From 2c8c7ca47374bd24f24d52a02addbbbc2409d5e4 Mon Sep 17 00:00:00 2001 From: Stephen Toub Date: Fri, 3 May 2024 11:49:32 -0400 Subject: [PATCH] Clean up some things in LiquidPromptTemplate and PromptyKernelExtensions - Liquid template parsing should happen during construction, not on each render - Liquid prompt template construction should fail for invalid templates - Default inputs should be evaluated once at Liquid template construction time - RenderAsync should capture any exceptions into returned Task - Role regex used in parsing rendered messages should be Compiled - LiquidPromptTemplateFactory should do arg validation and accomodate a PromptTemplateConfig whose TemplateFormat is null - Use XML comments instead of normal comments to describe properties in internal DOM - Remove unnecessary empty primary constructor - Use a regex to parse the components of a prompty template in order to a) more strictly validate contents but more importantly b) avoid losing part of the template when the separator appears in the contents itself - Clean up some XML comments - Set ModelId appropriately for openai - Avoid storing temperature/top_p in execution settings if they weren't specified - Add an OutputVariable if the prompty specifies one - Cache the default template factory rather than creating a new one on each construction --- .../LiquidTemplateFactoryTest.cs | 13 +- .../LiquidPromptTemplate.cs | 111 ++++++++------- .../LiquidPromptTemplateFactory.cs | 4 +- .../PromptyTest.cs | 107 ++++++++++++-- .../Core/PromptyModelParameters.cs | 32 +++-- .../Functions.Prompty/Core/PromptyYaml.cs | 2 +- .../Extensions/PromptyKernelExtensions.cs | 132 +++++++++++------- 7 files changed, 273 insertions(+), 128 deletions(-) diff --git a/dotnet/src/Extensions/PromptTemplates.Liquid.UnitTests/LiquidTemplateFactoryTest.cs b/dotnet/src/Extensions/PromptTemplates.Liquid.UnitTests/LiquidTemplateFactoryTest.cs index c02cc3514f3a..d16b081c3061 100644 --- a/dotnet/src/Extensions/PromptTemplates.Liquid.UnitTests/LiquidTemplateFactoryTest.cs +++ b/dotnet/src/Extensions/PromptTemplates.Liquid.UnitTests/LiquidTemplateFactoryTest.cs @@ -8,18 +8,22 @@ namespace SemanticKernel.Extensions.PromptTemplates.Liquid.UnitTests; public class LiquidTemplateFactoryTest { - [Fact] - public void ItThrowsExceptionForUnknownPromptTemplateFormat() + [Theory] + [InlineData("unknown-format")] + [InlineData(null)] + public void ItThrowsExceptionForUnknownPromptTemplateFormat(string? format) { // Arrange var promptConfig = new PromptTemplateConfig("UnknownFormat") { - TemplateFormat = "unknown-format", + TemplateFormat = format, }; var target = new LiquidPromptTemplateFactory(); // Act & Assert + Assert.False(target.TryCreate(promptConfig, out IPromptTemplate? result)); + Assert.Null(result); Assert.Throws(() => target.Create(promptConfig)); } @@ -38,7 +42,6 @@ public void ItCreatesLiquidPromptTemplate() var result = target.Create(promptConfig); // Assert - Assert.NotNull(result); - Assert.True(result is LiquidPromptTemplate); + Assert.IsType(result); } } diff --git a/dotnet/src/Extensions/PromptTemplates.Liquid/LiquidPromptTemplate.cs b/dotnet/src/Extensions/PromptTemplates.Liquid/LiquidPromptTemplate.cs index 699088099bf8..da89519a832a 100644 --- a/dotnet/src/Extensions/PromptTemplates.Liquid/LiquidPromptTemplate.cs +++ b/dotnet/src/Extensions/PromptTemplates.Liquid/LiquidPromptTemplate.cs @@ -2,6 +2,7 @@ using System; using System.Collections.Generic; +using System.Diagnostics; using System.Text; using System.Text.RegularExpressions; using System.Threading; @@ -15,14 +16,15 @@ namespace Microsoft.SemanticKernel.PromptTemplates.Liquid; /// internal sealed class LiquidPromptTemplate : IPromptTemplate { - private readonly PromptTemplateConfig _config; - private static readonly Regex s_roleRegex = new(@"(?system|assistant|user|function):[\s]+"); + private static readonly Regex s_roleRegex = new(@"(?system|assistant|user|function):\s+", RegexOptions.Compiled); - /// - /// Constructor for Liquid PromptTemplate. - /// + private readonly Template _liquidTemplate; + private readonly Dictionary _inputVariables; + + /// Initializes the . /// Prompt template configuration - /// throw if is not + /// is not . + /// The template in could not be parsed. public LiquidPromptTemplate(PromptTemplateConfig config) { if (config.TemplateFormat != LiquidPromptTemplateFactory.LiquidTemplateFormat) @@ -30,38 +32,54 @@ public LiquidPromptTemplate(PromptTemplateConfig config) throw new ArgumentException($"Invalid template format: {config.TemplateFormat}"); } - this._config = config; - } + // Parse the template now so we can check for errors, understand variable usage, and + // avoid having to parse on each render. + this._liquidTemplate = Template.ParseLiquid(config.Template); + if (this._liquidTemplate.HasErrors) + { + throw new ArgumentException($"The template could not be parsed:{Environment.NewLine}{string.Join(Environment.NewLine, this._liquidTemplate.Messages)}"); + } + Debug.Assert(this._liquidTemplate.Page is not null); - /// - public Task RenderAsync(Kernel kernel, KernelArguments? arguments = null, CancellationToken cancellationToken = default) - { - Verify.NotNull(kernel); + // TODO: Update config.InputVariables with any variables referenced by the template but that aren't explicitly defined in the front matter. - var template = this._config.Template; - var liquidTemplate = Template.ParseLiquid(template); - Dictionary nonEmptyArguments = new(); - foreach (var p in this._config.InputVariables) + // Configure _inputVariables with the default values from the config. This will be used + // in RenderAsync to seed the arguments used when evaluating the template. + this._inputVariables = []; + foreach (var p in config.InputVariables) { - if (p.Default is null || (p.Default is string s && string.IsNullOrWhiteSpace(s))) + if (p.Default is not null) { - continue; + this._inputVariables[p.Name] = p.Default; } - - nonEmptyArguments[p.Name] = p.Default; } + } + + /// +#pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously + public async Task RenderAsync(Kernel kernel, KernelArguments? arguments = null, CancellationToken cancellationToken = default) +#pragma warning restore CS1998 + { + Verify.NotNull(kernel); + cancellationToken.ThrowIfCancellationRequested(); - foreach (var p in arguments ?? new KernelArguments()) + Dictionary? nonEmptyArguments = null; + if (this._inputVariables.Count is > 0 || arguments?.Count is > 0) { - if (p.Value is null) + nonEmptyArguments = new(this._inputVariables); + if (arguments is not null) { - continue; + foreach (var p in arguments) + { + if (p.Value is not null) + { + nonEmptyArguments[p.Key] = p.Value; + } + } } - - nonEmptyArguments[p.Key] = p.Value; } - var renderedResult = liquidTemplate.Render(nonEmptyArguments); + var renderedResult = this._liquidTemplate.Render(nonEmptyArguments); // parse chat history // for every text like below @@ -72,35 +90,30 @@ public Task RenderAsync(Kernel kernel, KernelArguments? arguments = null // // xxxx // - var splits = s_roleRegex.Split(renderedResult); // if no role is found, return the entire text - if (splits.Length == 1) + if (splits.Length > 1) { - return Task.FromResult(renderedResult); - } + // otherwise, the split text chunks will be in the following format + // [0] = "" + // [1] = role information + // [2] = message content + // [3] = role information + // [4] = message content + // ... + // we will iterate through the array and create a new string with the following format + var sb = new StringBuilder(); + for (var i = 1; i < splits.Length; i += 2) + { + sb.Append(""); + sb.AppendLine(splits[i + 1]); + sb.AppendLine(""); + } - // otherwise, the split text chunks will be in the following format - // [0] = "" - // [1] = role information - // [2] = message content - // [3] = role information - // [4] = message content - // ... - // we will iterate through the array and create a new string with the following format - var sb = new StringBuilder(); - for (var i = 1; i < splits.Length; i += 2) - { - var role = splits[i]; - var content = splits[i + 1]; - sb.Append(""); - sb.AppendLine(content); - sb.AppendLine(""); + renderedResult = sb.ToString(); } - renderedResult = sb.ToString(); - - return Task.FromResult(renderedResult); + return renderedResult; } } diff --git a/dotnet/src/Extensions/PromptTemplates.Liquid/LiquidPromptTemplateFactory.cs b/dotnet/src/Extensions/PromptTemplates.Liquid/LiquidPromptTemplateFactory.cs index daf2f2ce1115..57185f508ca3 100644 --- a/dotnet/src/Extensions/PromptTemplates.Liquid/LiquidPromptTemplateFactory.cs +++ b/dotnet/src/Extensions/PromptTemplates.Liquid/LiquidPromptTemplateFactory.cs @@ -18,7 +18,9 @@ public sealed class LiquidPromptTemplateFactory : IPromptTemplateFactory /// public bool TryCreate(PromptTemplateConfig templateConfig, [NotNullWhen(true)] out IPromptTemplate? result) { - if (templateConfig.TemplateFormat.Equals(LiquidTemplateFormat, StringComparison.Ordinal)) + Verify.NotNull(templateConfig); + + if (LiquidTemplateFormat.Equals(templateConfig.TemplateFormat, StringComparison.Ordinal)) { result = new LiquidPromptTemplate(templateConfig); return true; diff --git a/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTest.cs b/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTest.cs index bcc2be283cd3..d90d0067f0a8 100644 --- a/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTest.cs +++ b/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTest.cs @@ -1,22 +1,27 @@ // Copyright (c) Microsoft. All rights reserved. +using System; +using System.Collections.Generic; using System.IO; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Connectors.OpenAI; +using Microsoft.SemanticKernel.TextGeneration; using Xunit; namespace SemanticKernel.Functions.Prompty.UnitTests; + public sealed class PromptyTest { [Fact] public void ChatPromptyTest() { // Arrange - var kernel = Kernel.CreateBuilder() - .Build(); - - var cwd = Directory.GetCurrentDirectory(); - var chatPromptyPath = Path.Combine(cwd, "TestData", "chat.prompty"); + Kernel kernel = new(); + var chatPromptyPath = Path.Combine("TestData", "chat.prompty"); var promptyTemplate = File.ReadAllText(chatPromptyPath); // Act @@ -34,11 +39,8 @@ public void ChatPromptyTest() public void ChatPromptyShouldSupportCreatingOpenAIExecutionSettings() { // Arrange - var kernel = Kernel.CreateBuilder() - .Build(); - - var cwd = Directory.GetCurrentDirectory(); - var chatPromptyPath = Path.Combine(cwd, "TestData", "chat.prompty"); + Kernel kernel = new(); + var chatPromptyPath = Path.Combine("TestData", "chat.prompty"); // Act var kernelFunction = kernel.CreateFunctionFromPromptyFile(chatPromptyPath); @@ -70,10 +72,8 @@ public void ChatPromptyShouldSupportCreatingOpenAIExecutionSettings() public void ItShouldCreateFunctionFromPromptYamlWithNoExecutionSettings() { // Arrange - var kernel = Kernel.CreateBuilder() - .Build(); - var cwd = Directory.GetCurrentDirectory(); - var promptyPath = Path.Combine(cwd, "TestData", "chatNoExecutionSettings.prompty"); + Kernel kernel = new(); + var promptyPath = Path.Combine("TestData", "chatNoExecutionSettings.prompty"); // Act var kernelFunction = kernel.CreateFunctionFromPromptyFile(promptyPath); @@ -83,6 +83,85 @@ public void ItShouldCreateFunctionFromPromptYamlWithNoExecutionSettings() Assert.Equal("prompty_with_no_execution_setting", kernelFunction.Name); Assert.Equal("prompty without execution setting", kernelFunction.Description); Assert.Single(kernelFunction.Metadata.Parameters); + Assert.Equal("prompt", kernelFunction.Metadata.Parameters[0].Name); Assert.Empty(kernelFunction.ExecutionSettings!); } + + [Theory] + [InlineData(""" + --- + name: SomePrompt + --- + Abc + """)] + [InlineData(""" + --- + name: SomePrompt + --- + Abc + """)] + [InlineData(""" + ---a + name: SomePrompt + --- + Abc + """)] + [InlineData(""" + --- + name: SomePrompt + ---b + Abc + """)] + public void ItRequiresStringSeparatorPlacement(string prompt) + { + // Arrange + Kernel kernel = new(); + + // Act / Assert + Assert.Throws(() => kernel.CreateFunctionFromPrompty(prompt)); + } + + [Fact] + public async Task ItSupportsSeparatorInContentAsync() + { + // Arrange + IKernelBuilder builder = Kernel.CreateBuilder(); + builder.Services.AddSingleton(_ => new EchoTextGenerationService()); + Kernel kernel = builder.Build(); + + // Act + var kernelFunction = kernel.CreateFunctionFromPrompty(""" + --- + name: SomePrompt + description: This is the description. + --- + Abc---def + --- + Efg + """); + + // Assert + Assert.NotNull(kernelFunction); + Assert.Equal("SomePrompt", kernelFunction.Name); + Assert.Equal("This is the description.", kernelFunction.Description); + Assert.Equal(""" + Abc---def + --- + Efg + """, await kernelFunction.InvokeAsync(kernel)); + } + + private sealed class EchoTextGenerationService : ITextGenerationService + { + public IReadOnlyDictionary Attributes { get; } = new Dictionary(); + + public Task> GetTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, CancellationToken cancellationToken = default) => + Task.FromResult>([new TextContent(prompt)]); + + public async IAsyncEnumerable GetStreamingTextContentsAsync(string prompt, PromptExecutionSettings? executionSettings = null, Kernel? kernel = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + await Task.Delay(0, cancellationToken); + yield return new StreamingTextContent(prompt); + } + } } diff --git a/dotnet/src/Functions/Functions.Prompty/Core/PromptyModelParameters.cs b/dotnet/src/Functions/Functions.Prompty/Core/PromptyModelParameters.cs index 9605ff2cfb73..8a7e9ed3a4ef 100644 --- a/dotnet/src/Functions/Functions.Prompty/Core/PromptyModelParameters.cs +++ b/dotnet/src/Functions/Functions.Prompty/Core/PromptyModelParameters.cs @@ -5,36 +5,46 @@ namespace Microsoft.SemanticKernel.Prompty.Core; +/// Parameters to be sent to the model. internal sealed class PromptyModelParameters { - // Parameters to be sent to the model + /// Specify the format for model output (e.g., JSON mode). [YamlMember(Alias = "response_format")] - public string? ResponseFormat { get; set; } // Specify the format for model output (e.g., JSON mode) + public string? ResponseFormat { get; set; } + /// Seed for deterministic sampling (Beta feature). [YamlMember(Alias = "seed")] - public int? Seed { get; set; } // Seed for deterministic sampling (Beta feature) + public int? Seed { get; set; } + /// Maximum number of tokens in chat completion. [YamlMember(Alias = "max_tokens")] - public int? MaxTokens { get; set; } // Maximum number of tokens in chat completion + public int? MaxTokens { get; set; } + /// Sampling temperature (0 means deterministic). [YamlMember(Alias = "temperature")] - public double? Temperature { get; set; } // Sampling temperature (0 means deterministic) + public double? Temperature { get; set; } + /// Controls which function the model calls (e.g., "none" or "auto"). [YamlMember(Alias = "tools_choice")] - public string? ToolsChoice { get; set; } // Controls which function the model calls (e.g., "none" or "auto") + public string? ToolsChoice { get; set; } + /// Array of tools (if applicable). [YamlMember(Alias = "tools")] - public List? Tools { get; set; } // Array of tools (if applicable) + public List? Tools { get; set; } + /// Frequency penalty for sampling. [YamlMember(Alias = "frequency_penalty")] - public double? FrequencyPenalty { get; set; } // Frequency penalty for sampling + public double? FrequencyPenalty { get; set; } + /// Presence penalty for sampling. [YamlMember(Alias = "presence_penalty")] - public double? PresencePenalty { get; set; } // Presence penalty for sampling + public double? PresencePenalty { get; set; } + /// Sequences where model stops generating tokens. [YamlMember(Alias = "stop")] - public List? Stop { get; set; } // Sequences where model stops generating tokens + public List? Stop { get; set; } + /// Nucleus sampling probability (0 means no tokens generated). [YamlMember(Alias = "top_p")] - public double? TopP { get; set; } // Nucleus sampling probability (0 means no tokens generated) + public double? TopP { get; set; } } diff --git a/dotnet/src/Functions/Functions.Prompty/Core/PromptyYaml.cs b/dotnet/src/Functions/Functions.Prompty/Core/PromptyYaml.cs index d4ac1ddbe1ce..4af70817e742 100644 --- a/dotnet/src/Functions/Functions.Prompty/Core/PromptyYaml.cs +++ b/dotnet/src/Functions/Functions.Prompty/Core/PromptyYaml.cs @@ -8,7 +8,7 @@ namespace Microsoft.SemanticKernel.Prompty.Core; /// /// Schema: https://github.com/Azure/azureml_run_specification/blob/master/schemas/Prompty.yaml /// -internal sealed class PromptyYaml() +internal sealed class PromptyYaml { [YamlMember(Alias = "name")] public string? Name { get; set; } diff --git a/dotnet/src/Functions/Functions.Prompty/Extensions/PromptyKernelExtensions.cs b/dotnet/src/Functions/Functions.Prompty/Extensions/PromptyKernelExtensions.cs index 2e649906f20e..6dbe54db1972 100644 --- a/dotnet/src/Functions/Functions.Prompty/Extensions/PromptyKernelExtensions.cs +++ b/dotnet/src/Functions/Functions.Prompty/Extensions/PromptyKernelExtensions.cs @@ -3,7 +3,8 @@ using System; using System.Collections.Generic; using System.IO; -using Microsoft.Extensions.Logging; +using System.Linq; +using System.Text.RegularExpressions; using Microsoft.SemanticKernel.PromptTemplates.Handlebars; using Microsoft.SemanticKernel.PromptTemplates.Liquid; using Microsoft.SemanticKernel.Prompty.Core; @@ -12,12 +13,25 @@ namespace Microsoft.SemanticKernel; /// -/// Extension methods for to create a from a Prompty file. +/// Provides extension methods for creating s from the Prompty template format. /// public static class PromptyKernelExtensions { + /// Default template factory to use when none is provided. + private static readonly AggregatorPromptTemplateFactory s_defaultTemplateFactory = + new(new LiquidPromptTemplateFactory(), new HandlebarsPromptTemplateFactory()); + + /// Regex for parsing the YAML frontmatter and content from the prompty template. + private static readonly Regex s_promptyRegex = new(""" + ^---\s*$\n # Start of YAML front matter, a line beginning with "---" followed by optional whitespace + (?
.*?) # Capture the YAML front matter, everything up to the next "---" line + ^---\s*$\n # End of YAML front matter, a line beginning with "---" followed by optional whitespace + (?.*) # Capture the content after the YAML front matter + """, + RegexOptions.Multiline | RegexOptions.Singleline | RegexOptions.IgnorePatternWhitespace | RegexOptions.Compiled); + /// - /// Create a from a prompty file. + /// Create a from a prompty template file. /// /// The containing services, plugins, and other state for use throughout the operation. /// Path to the file containing the Prompty representation of a prompt based . @@ -25,51 +39,46 @@ public static class PromptyKernelExtensions /// The to use when interpreting the prompt template configuration into a . /// If null, a will be used with support for Liquid and Handlebars prompt templates. /// - /// The to use. - /// - /// - /// + /// The created . + /// is null. + /// is null. + /// is empty or composed entirely of whitespace. public static KernelFunction CreateFunctionFromPromptyFile( this Kernel kernel, string promptyFilePath, - IPromptTemplateFactory? promptTemplateFactory = null, - ILoggerFactory? loggerFactory = null) + IPromptTemplateFactory? promptTemplateFactory = null) { Verify.NotNull(kernel); Verify.NotNullOrWhiteSpace(promptyFilePath); var promptyTemplate = File.ReadAllText(promptyFilePath); - return kernel.CreateFunctionFromPrompty(promptyTemplate, promptTemplateFactory, loggerFactory); + return kernel.CreateFunctionFromPrompty(promptyTemplate, promptTemplateFactory); } /// - /// Create a from a prompty file. + /// Create a from a prompty template. /// /// The containing services, plugins, and other state for use throughout the operation. - /// Prompty representation of a prompt based . + /// Prompty representation of a prompt-based . /// /// The to use when interpreting the prompt template configuration into a . /// If null, a will be used with support for Liquid and Handlebars prompt templates. /// - /// The to use. - /// - /// - /// + /// The created . + /// is null. + /// is null. + /// is empty or composed entirely of whitespace. public static KernelFunction CreateFunctionFromPrompty( this Kernel kernel, string promptyTemplate, - IPromptTemplateFactory? promptTemplateFactory = null, - ILoggerFactory? loggerFactory = null) + IPromptTemplateFactory? promptTemplateFactory = null) { Verify.NotNull(kernel); Verify.NotNullOrWhiteSpace(promptyTemplate); - promptTemplateFactory ??= new AggregatorPromptTemplateFactory(new HandlebarsPromptTemplateFactory(), new LiquidPromptTemplateFactory()); - - // create PromptTemplateConfig from text - // step 1 - // retrieve the header, which is in yaml format and put between --- - // + // Step 1: + // Create PromptTemplateConfig from text. + // Retrieve the header, which is in yaml format and put between --- // e.g // file: chat.prompty // --- @@ -81,8 +90,8 @@ public static KernelFunction CreateFunctionFromPrompty( // api: chat // configuration: // type: azure_openai - // azure_deployment: gpt - 35 - turbo - // api_version: 2023 - 07 - 01 - preview + // azure_deployment: gpt-35-turbo + // api_version: 2023-07-01-preview // parameters: // tools_choice: auto // tools: @@ -98,15 +107,20 @@ public static KernelFunction CreateFunctionFromPrompty( // --- // ... (rest of the prompty content) - var splits = promptyTemplate.Split(["---"], StringSplitOptions.RemoveEmptyEntries); - var yaml = splits[0]; - var content = splits[1]; + // Parse the YAML frontmatter and content from the prompty template + Match m = s_promptyRegex.Match(promptyTemplate); + if (!m.Success) + { + throw new ArgumentException("Invalid prompty template. Header and content could not be parsed."); + } + + var header = m.Groups["header"].Value; + var content = m.Groups["content"].Value; - var deserializer = new DeserializerBuilder().Build(); - var prompty = deserializer.Deserialize(yaml); + var prompty = new DeserializerBuilder().Build().Deserialize(header); - // step 2 - // create a prompt template config from the prompty object + // Step 2: + // Create a prompt template config from the prompty data. var promptTemplateConfig = new PromptTemplateConfig { Name = prompty.Name, // TODO: sanitize name @@ -115,16 +129,27 @@ public static KernelFunction CreateFunctionFromPrompty( }; PromptExecutionSettings? defaultExecutionSetting = null; - if (prompty.Model?.ModelConfiguration?.ModelType is ModelType.azure_openai || prompty.Model?.ModelConfiguration?.ModelType is ModelType.openai) + if (prompty.Model?.ModelConfiguration?.ModelType is ModelType.azure_openai or ModelType.openai) { defaultExecutionSetting = new PromptExecutionSettings { - ModelId = prompty.Model?.ModelConfiguration?.AzureDeployment, + ModelId = prompty.Model.ModelConfiguration.ModelType is ModelType.azure_openai ? + prompty.Model.ModelConfiguration.AzureDeployment : + prompty.Model.ModelConfiguration.Name }; var extensionData = new Dictionary(); - extensionData.Add("temperature", prompty.Model?.Parameters?.Temperature ?? 1.0); - extensionData.Add("top_p", prompty.Model?.Parameters?.TopP ?? 1.0); + + if (prompty.Model?.Parameters?.Temperature is double temperature) + { + extensionData.Add("temperature", temperature); + } + + if (prompty.Model?.Parameters?.TopP is double topP) + { + extensionData.Add("top_p", topP); + } + if (prompty.Model?.Parameters?.MaxTokens is int maxTokens) { extensionData.Add("max_tokens", maxTokens); @@ -159,28 +184,41 @@ public static KernelFunction CreateFunctionFromPrompty( promptTemplateConfig.AddExecutionSettings(defaultExecutionSetting); } - // step 3. add input variables - if (prompty.Inputs != null) + // Step 3: + // Add input and output variables. + if (prompty.Inputs is not null) { foreach (var input in prompty.Inputs) { if (input.Value is string description) { - var inputVariable = new InputVariable() + promptTemplateConfig.InputVariables.Add(new() { Name = input.Key, Description = description, - }; - - promptTemplateConfig.InputVariables.Add(inputVariable); + }); } } } - // step 4. update template format, if not provided, use Liquid as default - var templateFormat = prompty.Template ?? LiquidPromptTemplateFactory.LiquidTemplateFormat; - promptTemplateConfig.TemplateFormat = templateFormat; + if (prompty.Outputs is not null) + { + // PromptTemplateConfig supports only a single output variable. If the prompty template + // contains one and only one, use it. Otherwise, ignore any outputs. + if (prompty.Outputs.Count == 1 && + prompty.Outputs.First().Value is string description) + { + promptTemplateConfig.OutputVariable = new() { Description = description }; + } + } + + // Step 4: + // Update template format. If not provided, use Liquid as default. + promptTemplateConfig.TemplateFormat = prompty.Template ?? LiquidPromptTemplateFactory.LiquidTemplateFormat; - return KernelFunctionFactory.CreateFromPrompt(promptTemplateConfig, promptTemplateFactory, loggerFactory); + return KernelFunctionFactory.CreateFromPrompt( + promptTemplateConfig, + promptTemplateFactory ?? s_defaultTemplateFactory, + kernel.LoggerFactory); } }