Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions DotPrompt.Tests/PromptFileTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,9 @@ public void FromFile_BasicPrompt_ProducesValidPromptFile()

Assert.Equal("basic", promptFile.Name);

Assert.NotNull(promptFile.Model);
Assert.Equal("claude-3-5-sonnet-latest", promptFile.Model);

Assert.NotNull(promptFile.Config);
Assert.Equal(OutputFormat.Text, promptFile.Config.OutputFormat);
Assert.Equal(500, promptFile.Config.MaxTokens);
Expand Down Expand Up @@ -196,6 +199,19 @@ public void FromStream_WithInvalidName_ThrowsAnException()
Assert.Contains("once cleaned results in an empty string", exception.Message);
}

[Fact]
public void FromStream_WithMissingModelName_IsPersistedAsNullValue()
{
const string content = "name: missing-model\nmodel: \nprompts:\n system: System prompt\n user: User prompt";
using var ms = new MemoryStream(Encoding.UTF8.GetBytes(content));
ms.Seek(0, SeekOrigin.Begin);

var promptFile = PromptFile.FromStream("", ms);

Assert.Equal("missing-model", promptFile.Name);
Assert.Null(promptFile.Model);
}

[Fact]
public void GenerateUserPrompt_UsingDefaults_CorrectlyGeneratesPromptFromTemplate()
{
Expand Down
1 change: 1 addition & 0 deletions DotPrompt.Tests/SamplePrompts/basic.prompt
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
model: claude-3-5-sonnet-latest
config:
outputFormat: text
temperature: 0.9
Expand Down
5 changes: 5 additions & 0 deletions DotPrompt/PromptFile.cs
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,11 @@ public partial class PromptFile
/// Gets, sets the name of the prompt
/// </summary>
public required string Name { get; set; }

/// <summary>
/// Gets, sets the name of the model (or deployment) the prompt should be executed using
/// </summary>
public string? Model { get; set; }

/// <summary>
/// Gets, sets the configuration to use for the prompt
Expand Down
23 changes: 21 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ A complete prompt file would look like this.

```yaml
name: Example
model: gpt-4o
config:
outputFormat: text
temperature: 0.9
Expand Down Expand Up @@ -52,6 +53,16 @@ The `name` is optional in the configuration, if it's not provided then the name

If you use this property then when the file is loaded the name is converted to lowercase and spaces are replaced with hyphens. So a name of `My cool Prompt` would become `my-cool-prompt`. This is done to make sure the name is easily accessible from the code.

### Model

This is another optional item in the configuration, but it provides information to the user of the prompt file which model (or deployment for Azure Open AI) it should use. As this can be null if not specified this the consumer should make sure to check before usage. For example:

```csharp
var model = promptFile.Model ?? "my-default";
```

Using this option though allows the prompt engineer to be very explicit about which model they intended to be used to provide the best results.

### Config

The `config` section has some top level items which are provided for the client to use in their LLM calls to set options on each call. The `outputFormat` property takes a value of either `text` or `json` depending on how the LLM is intended to respond to the request. If specifying `json` then some LLMs require either the system or user prompt to state that the expected output is JSON as well. If the library does not detect the term `JSON` in the prompt then it will append a small statement to the system prompt requesting for the response to be in JSON format.
Expand Down Expand Up @@ -202,7 +213,6 @@ using Azure.AI.OpenAI;
using DotPrompt;

var openAiClient = new(new Uri("https://endpoint"), new ApiKeyCredential("abc123"));
var client = openAiClient.GetChatClient("model");

var promptManager = new PromptManager();
var promptFile = promptManager.GetPromptFile("example");
Expand All @@ -216,6 +226,8 @@ var userPrompt = promptFile.GetUserPrompt(new Dictionary<string, object>
{ "style", "used car salesman" }
});

var client = openAiClient.GetChatClient(promptFile.Model ?? "default-model");

var completion = await client.CompleteChatAsync(
[
new SystemChatMessage(systemPrompt),
Expand All @@ -239,7 +251,6 @@ using DotPrompt;
using DotPrompt.Extensions.OpenAi;

var openAiClient = new(new Uri("https://endpoint"), new ApiKeyCredential("abc123"));
var client = openAiClient.GetChatClient("model");

var promptManager = new PromptManager();
var promptFile = promptManager.GetPromptFile("example");
Expand All @@ -250,6 +261,8 @@ var promptValues = new Dictionary<string, object>
{ "style", "used car salesman" }
};

var client = openAiClient.GetChatClient(promptFile.Model ?? "default-model");

var completion = await client.CompleteChatAsync(
promptFile.ToOpenAiChatMessages(promptValues),
promptFile.ToOpenAiChatCompletionOptions()
Expand Down Expand Up @@ -332,6 +345,11 @@ public class PromptEntity : ITableEntity
/// </summary>
public ETag ETag { get; set; }

/// <summary>
/// Gets, sets the model to use
/// </summary>
public string? Model { get; set; }

/// <summary>
/// Gets, sets the output format
/// </summary>
Expand Down Expand Up @@ -395,6 +413,7 @@ public class PromptEntity : ITableEntity
var promptFile = new PromptFile
{
Name = RowKey,
Model = Model,
Config = new PromptConfig
{
OutputFormat = Enum.Parse<OutputFormat>(OutputFormat, true),
Expand Down