Skip to content

Commit 9c0c1ae

Browse files
Copilotmudler
andcommitted
Move common finish reasons to constants
- Create constants.go with FinishReasonStop, FinishReasonToolCalls, FinishReasonFunctionCall - Replace all string literals with constants in chat.go, completion.go, realtime.go - Improves code maintainability and prevents typos Co-authored-by: mudler <2420543+mudler@users.noreply.github.com>
1 parent b38426f commit 9c0c1ae

4 files changed

Lines changed: 23 additions & 14 deletions

File tree

core/http/endpoints/openai/chat.go

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -383,7 +383,7 @@ func ChatEndpoint(cl *config.ModelConfigLoader, ml *model.ModelLoader, evaluator
383383
}
384384
log.Error().Msgf("Stream ended with error: %v", err)
385385

386-
stopReason := "stop"
386+
stopReason := FinishReasonStop
387387
resp := &schema.OpenAIResponse{
388388
ID: id,
389389
Created: created,
@@ -412,11 +412,11 @@ func ChatEndpoint(cl *config.ModelConfigLoader, ml *model.ModelLoader, evaluator
412412
}
413413
}
414414

415-
finishReason := "stop"
415+
finishReason := FinishReasonStop
416416
if toolsCalled && len(input.Tools) > 0 {
417-
finishReason = "tool_calls"
417+
finishReason = FinishReasonToolCalls
418418
} else if toolsCalled {
419-
finishReason = "function_call"
419+
finishReason = FinishReasonFunctionCall
420420
}
421421

422422
resp := &schema.OpenAIResponse{
@@ -448,7 +448,7 @@ func ChatEndpoint(cl *config.ModelConfigLoader, ml *model.ModelLoader, evaluator
448448
tokenCallback := func(s string, c *[]schema.Choice) {
449449
if !shouldUseFn {
450450
// no function is called, just reply and use stop as finish reason
451-
stopReason := "stop"
451+
stopReason := FinishReasonStop
452452
*c = append(*c, schema.Choice{FinishReason: &stopReason, Index: 0, Message: &schema.Message{Role: "assistant", Content: &s}})
453453
return
454454
}
@@ -467,12 +467,12 @@ func ChatEndpoint(cl *config.ModelConfigLoader, ml *model.ModelLoader, evaluator
467467
return
468468
}
469469

470-
stopReason := "stop"
470+
stopReason := FinishReasonStop
471471
*c = append(*c, schema.Choice{
472472
FinishReason: &stopReason,
473473
Message: &schema.Message{Role: "assistant", Content: &result}})
474474
default:
475-
toolCallsReason := "tool_calls"
475+
toolCallsReason := FinishReasonToolCalls
476476
toolChoice := schema.Choice{
477477
FinishReason: &toolCallsReason,
478478
Message: &schema.Message{
@@ -498,7 +498,7 @@ func ChatEndpoint(cl *config.ModelConfigLoader, ml *model.ModelLoader, evaluator
498498
)
499499
} else {
500500
// otherwise we return more choices directly (deprecated)
501-
functionCallReason := "function_call"
501+
functionCallReason := FinishReasonFunctionCall
502502
*c = append(*c, schema.Choice{
503503
FinishReason: &functionCallReason,
504504
Message: &schema.Message{

core/http/endpoints/openai/completion.go

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -155,7 +155,7 @@ func CompletionEndpoint(cl *config.ModelConfigLoader, ml *model.ModelLoader, eva
155155
}
156156
log.Error().Msgf("Stream ended with error: %v", err)
157157

158-
stopReason := "stop"
158+
stopReason := FinishReasonStop
159159
errorResp := schema.OpenAIResponse{
160160
ID: id,
161161
Created: created,
@@ -182,7 +182,7 @@ func CompletionEndpoint(cl *config.ModelConfigLoader, ml *model.ModelLoader, eva
182182
}
183183
}
184184

185-
stopReason := "stop"
185+
stopReason := FinishReasonStop
186186
resp := &schema.OpenAIResponse{
187187
ID: id,
188188
Created: created,
@@ -222,7 +222,7 @@ func CompletionEndpoint(cl *config.ModelConfigLoader, ml *model.ModelLoader, eva
222222

223223
r, tokenUsage, err := ComputeChoices(
224224
input, i, config, cl, appConfig, ml, func(s string, c *[]schema.Choice) {
225-
stopReason := "stop"
225+
stopReason := FinishReasonStop
226226
*c = append(*c, schema.Choice{Text: s, FinishReason: &stopReason, Index: k})
227227
}, nil)
228228
if err != nil {
Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
package openai
2+
3+
// Finish reason constants for OpenAI API responses
4+
const (
5+
FinishReasonStop = "stop"
6+
FinishReasonToolCalls = "tool_calls"
7+
FinishReasonFunctionCall = "function_call"
8+
)

core/http/endpoints/openai/realtime.go

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1072,7 +1072,7 @@ func processTextResponse(config *config.ModelConfig, session *Session, prompt st
10721072
result, tokenUsage, err := ComputeChoices(input, prompt, config, startupOptions, ml, func(s string, c *[]schema.Choice) {
10731073
if !shouldUseFn {
10741074
// no function is called, just reply and use stop as finish reason
1075-
stopReason := "stop"
1075+
stopReason := FinishReasonStop
10761076
*c = append(*c, schema.Choice{FinishReason: &stopReason, Index: 0, Message: &schema.Message{Role: "assistant", Content: &s}})
10771077
return
10781078
}
@@ -1100,7 +1100,8 @@ func processTextResponse(config *config.ModelConfig, session *Session, prompt st
11001100
}
11011101
11021102
if len(input.Tools) > 0 {
1103-
toolChoice.FinishReason = "tool_calls"
1103+
toolCallsReason := FinishReasonToolCalls
1104+
toolChoice.FinishReason = &toolCallsReason
11041105
}
11051106
11061107
for _, ss := range results {
@@ -1121,7 +1122,7 @@ func processTextResponse(config *config.ModelConfig, session *Session, prompt st
11211122
)
11221123
} else {
11231124
// otherwise we return more choices directly
1124-
functionCallReason := "function_call"
1125+
functionCallReason := FinishReasonFunctionCall
11251126
*c = append(*c, schema.Choice{
11261127
FinishReason: &functionCallReason,
11271128
Message: &schema.Message{

0 commit comments

Comments
 (0)