diff --git a/dotnet/samples/Concepts/Concepts.csproj b/dotnet/samples/Concepts/Concepts.csproj
index 891eea16c400..8299288d542e 100644
--- a/dotnet/samples/Concepts/Concepts.csproj
+++ b/dotnet/samples/Concepts/Concepts.csproj
@@ -62,9 +62,11 @@
+
+
diff --git a/dotnet/samples/Concepts/Prompty/PromptyFunction.cs b/dotnet/samples/Concepts/Prompty/PromptyFunction.cs
new file mode 100644
index 000000000000..9e4d2f45f823
--- /dev/null
+++ b/dotnet/samples/Concepts/Prompty/PromptyFunction.cs
@@ -0,0 +1,41 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using Microsoft.SemanticKernel;
+
+namespace Prompty;
+
+public class PromptyFunction(ITestOutputHelper output) : BaseTest(output)
+{
+ [Fact]
+ public async Task InlineFunctionAsync()
+ {
+ Kernel kernel = Kernel.CreateBuilder()
+ .AddOpenAIChatCompletion(
+ modelId: TestConfiguration.OpenAI.ChatModelId,
+ apiKey: TestConfiguration.OpenAI.ApiKey)
+ .Build();
+
+ string promptTemplate = """
+ ---
+ name: Contoso_Chat_Prompt
+ description: A sample prompt that responds with what Seattle is.
+ authors:
+ - ????
+ model:
+ api: chat
+ configuration:
+ type: openai
+ ---
+ system:
+ You are a helpful assistant who knows all about cities in the USA
+
+ user:
+ What is Seattle?
+ """;
+
+ var function = kernel.CreateFunctionFromPrompty(promptTemplate);
+
+ var result = await kernel.InvokeAsync(function);
+ Console.WriteLine(result);
+ }
+}
diff --git a/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTest.cs b/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTest.cs
index 3376da6b3fae..bcc2be283cd3 100644
--- a/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTest.cs
+++ b/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTest.cs
@@ -17,9 +17,10 @@ public void ChatPromptyTest()
var cwd = Directory.GetCurrentDirectory();
var chatPromptyPath = Path.Combine(cwd, "TestData", "chat.prompty");
+ var promptyTemplate = File.ReadAllText(chatPromptyPath);
// Act
- var kernelFunction = kernel.CreateFunctionFromPrompty(chatPromptyPath);
+ var kernelFunction = kernel.CreateFunctionFromPrompty(promptyTemplate);
// Assert
Assert.Equal("Contoso_Chat_Prompt", kernelFunction.Name);
@@ -40,7 +41,7 @@ public void ChatPromptyShouldSupportCreatingOpenAIExecutionSettings()
var chatPromptyPath = Path.Combine(cwd, "TestData", "chat.prompty");
// Act
- var kernelFunction = kernel.CreateFunctionFromPrompty(chatPromptyPath);
+ var kernelFunction = kernel.CreateFunctionFromPromptyFile(chatPromptyPath);
// Assert
// kernel function created from chat.prompty should have a single execution setting
@@ -75,7 +76,7 @@ public void ItShouldCreateFunctionFromPromptYamlWithNoExecutionSettings()
var promptyPath = Path.Combine(cwd, "TestData", "chatNoExecutionSettings.prompty");
// Act
- var kernelFunction = kernel.CreateFunctionFromPrompty(promptyPath);
+ var kernelFunction = kernel.CreateFunctionFromPromptyFile(promptyPath);
// Assert
Assert.NotNull(kernelFunction);
diff --git a/dotnet/src/Functions/Functions.Prompty/Extensions/PromptyKernelExtensions.cs b/dotnet/src/Functions/Functions.Prompty/Extensions/PromptyKernelExtensions.cs
index 8b8219244552..2e649906f20e 100644
--- a/dotnet/src/Functions/Functions.Prompty/Extensions/PromptyKernelExtensions.cs
+++ b/dotnet/src/Functions/Functions.Prompty/Extensions/PromptyKernelExtensions.cs
@@ -12,29 +12,57 @@
namespace Microsoft.SemanticKernel;
///
-/// Extension methods for to create a from a prompty file.
+/// Extension methods for to create a from a Prompty file.
///
public static class PromptyKernelExtensions
{
///
/// Create a from a prompty file.
///
- /// kernel
- /// path to prompty file.
- /// prompty template factory, if not provided, a will be used.
- /// logger factory
+ /// The containing services, plugins, and other state for use throughout the operation.
+ /// Path to the file containing the Prompty representation of a prompt based .
+ ///
+ /// The to use when interpreting the prompt template configuration into a .
+ /// If null, a will be used with support for Liquid and Handlebars prompt templates.
+ ///
+ /// The to use.
///
///
///
- public static KernelFunction CreateFunctionFromPrompty(
+ public static KernelFunction CreateFunctionFromPromptyFile(
this Kernel kernel,
- string promptyPath,
+ string promptyFilePath,
IPromptTemplateFactory? promptTemplateFactory = null,
ILoggerFactory? loggerFactory = null)
{
Verify.NotNull(kernel);
+ Verify.NotNullOrWhiteSpace(promptyFilePath);
+
+ var promptyTemplate = File.ReadAllText(promptyFilePath);
+ return kernel.CreateFunctionFromPrompty(promptyTemplate, promptTemplateFactory, loggerFactory);
+ }
- var text = File.ReadAllText(promptyPath);
+ ///
+ /// Create a from a prompty file.
+ ///
+ /// The containing services, plugins, and other state for use throughout the operation.
+ /// Prompty representation of a prompt based .
+ ///
+ /// The to use when interpreting the prompt template configuration into a .
+ /// If null, a will be used with support for Liquid and Handlebars prompt templates.
+ ///
+ /// The to use.
+ ///
+ ///
+ ///
+ public static KernelFunction CreateFunctionFromPrompty(
+ this Kernel kernel,
+ string promptyTemplate,
+ IPromptTemplateFactory? promptTemplateFactory = null,
+ ILoggerFactory? loggerFactory = null)
+ {
+ Verify.NotNull(kernel);
+ Verify.NotNullOrWhiteSpace(promptyTemplate);
promptTemplateFactory ??= new AggregatorPromptTemplateFactory(new HandlebarsPromptTemplateFactory(), new LiquidPromptTemplateFactory());
@@ -70,7 +98,7 @@ public static KernelFunction CreateFunctionFromPrompty(
// ---
// ... (rest of the prompty content)
- var splits = text.Split(["---"], StringSplitOptions.RemoveEmptyEntries);
+ var splits = promptyTemplate.Split(["---"], StringSplitOptions.RemoveEmptyEntries);
var yaml = splits[0];
var content = splits[1];