Merge pull request #874 from MohammadErfan-Jabbari/fix/streaming-finish-reason-tool-calls
fix(antigravity): preserve finish_reason tool_calls across streaming chunks
This commit is contained in:
+26
-10
@@ -22,8 +22,10 @@ import (
|
||||
|
||||
// convertCliResponseToOpenAIChatParams holds parameters for response conversion.
|
||||
type convertCliResponseToOpenAIChatParams struct {
|
||||
UnixTimestamp int64
|
||||
FunctionIndex int
|
||||
UnixTimestamp int64
|
||||
FunctionIndex int
|
||||
SawToolCall bool // Tracks if any tool call was seen in the entire stream
|
||||
UpstreamFinishReason string // Caches the upstream finish reason for final chunk
|
||||
}
|
||||
|
||||
// functionCallIDCounter provides a process-wide unique counter for function call identifiers.
|
||||
@@ -79,10 +81,9 @@ func ConvertAntigravityResponseToOpenAI(_ context.Context, _ string, originalReq
|
||||
template, _ = sjson.Set(template, "id", responseIDResult.String())
|
||||
}
|
||||
|
||||
// Extract and set the finish reason.
|
||||
// Cache the finish reason - do NOT set it in output yet (will be set on final chunk)
|
||||
if finishReasonResult := gjson.GetBytes(rawJSON, "response.candidates.0.finishReason"); finishReasonResult.Exists() {
|
||||
template, _ = sjson.Set(template, "choices.0.finish_reason", strings.ToLower(finishReasonResult.String()))
|
||||
template, _ = sjson.Set(template, "choices.0.native_finish_reason", strings.ToLower(finishReasonResult.String()))
|
||||
(*param).(*convertCliResponseToOpenAIChatParams).UpstreamFinishReason = strings.ToUpper(finishReasonResult.String())
|
||||
}
|
||||
|
||||
// Extract and set usage metadata (token counts).
|
||||
@@ -112,7 +113,6 @@ func ConvertAntigravityResponseToOpenAI(_ context.Context, _ string, originalReq
|
||||
|
||||
// Process the main content part of the response.
|
||||
partsResult := gjson.GetBytes(rawJSON, "response.candidates.0.content.parts")
|
||||
hasFunctionCall := false
|
||||
if partsResult.IsArray() {
|
||||
partResults := partsResult.Array()
|
||||
for i := 0; i < len(partResults); i++ {
|
||||
@@ -148,7 +148,7 @@ func ConvertAntigravityResponseToOpenAI(_ context.Context, _ string, originalReq
|
||||
template, _ = sjson.Set(template, "choices.0.delta.role", "assistant")
|
||||
} else if functionCallResult.Exists() {
|
||||
// Handle function call content.
|
||||
hasFunctionCall = true
|
||||
(*param).(*convertCliResponseToOpenAIChatParams).SawToolCall = true // Persist across chunks
|
||||
toolCallsResult := gjson.Get(template, "choices.0.delta.tool_calls")
|
||||
functionCallIndex := (*param).(*convertCliResponseToOpenAIChatParams).FunctionIndex
|
||||
(*param).(*convertCliResponseToOpenAIChatParams).FunctionIndex++
|
||||
@@ -195,9 +195,25 @@ func ConvertAntigravityResponseToOpenAI(_ context.Context, _ string, originalReq
|
||||
}
|
||||
}
|
||||
|
||||
if hasFunctionCall {
|
||||
template, _ = sjson.Set(template, "choices.0.finish_reason", "tool_calls")
|
||||
template, _ = sjson.Set(template, "choices.0.native_finish_reason", "tool_calls")
|
||||
// Determine finish_reason only on the final chunk (has both finishReason and usage metadata)
|
||||
params := (*param).(*convertCliResponseToOpenAIChatParams)
|
||||
upstreamFinishReason := params.UpstreamFinishReason
|
||||
sawToolCall := params.SawToolCall
|
||||
|
||||
usageExists := gjson.GetBytes(rawJSON, "response.usageMetadata").Exists()
|
||||
isFinalChunk := upstreamFinishReason != "" && usageExists
|
||||
|
||||
if isFinalChunk {
|
||||
var finishReason string
|
||||
if sawToolCall {
|
||||
finishReason = "tool_calls"
|
||||
} else if upstreamFinishReason == "MAX_TOKENS" {
|
||||
finishReason = "max_tokens"
|
||||
} else {
|
||||
finishReason = "stop"
|
||||
}
|
||||
template, _ = sjson.Set(template, "choices.0.finish_reason", finishReason)
|
||||
template, _ = sjson.Set(template, "choices.0.native_finish_reason", strings.ToLower(upstreamFinishReason))
|
||||
}
|
||||
|
||||
return []string{template}
|
||||
|
||||
Reference in New Issue
Block a user