opencode-cursor-agent/internal/logic/chat/chat_completions_logic.go

237 lines
6.2 KiB
Go

// Code scaffolded by goctl. Safe to edit.
// goctl 1.10.1
package chat
import (
"context"
"encoding/json"
"fmt"
"net/http"
"cursor-api-proxy/internal/config"
"cursor-api-proxy/internal/svc"
apitypes "cursor-api-proxy/internal/types"
"cursor-api-proxy/pkg/adapter/openai"
"cursor-api-proxy/pkg/domain/types"
"cursor-api-proxy/pkg/infrastructure/logger"
"cursor-api-proxy/pkg/infrastructure/parser"
"cursor-api-proxy/pkg/usecase"
"github.com/google/uuid"
"github.com/zeromicro/go-zero/core/logx"
)
type ChatCompletionsLogic struct {
logx.Logger
ctx context.Context
svcCtx *svc.ServiceContext
}
func NewChatCompletionsLogic(ctx context.Context, svcCtx *svc.ServiceContext) *ChatCompletionsLogic {
return &ChatCompletionsLogic{
Logger: logx.WithContext(ctx),
ctx: ctx,
svcCtx: svcCtx,
}
}
func (l *ChatCompletionsLogic) ChatCompletions(req *apitypes.ChatCompletionRequest) (*apitypes.ChatCompletionResponse, error) {
cfg := configToBridge(l.svcCtx.Config)
model := openai.NormalizeModelID(req.Model)
cursorModel := types.ResolveToCursorModel(model)
if cursorModel == "" {
cursorModel = model
}
messages := convertMessages(req.Messages)
tools := convertTools(req.Tools)
functions := convertFunctions(req.Functions)
cleanMessages := usecase.SanitizeMessages(messages)
toolsText := openai.ToolsToSystemText(tools, functions)
messagesWithTools := cleanMessages
if toolsText != "" {
messagesWithTools = append([]interface{}{
map[string]interface{}{"role": "system", "content": toolsText},
}, cleanMessages...)
}
prompt := openai.BuildPromptFromMessages(messagesWithTools)
// TODO: implement non-streaming execution
_ = cfg
_ = cursorModel
_ = prompt
return &apitypes.ChatCompletionResponse{
Id: "chatcmpl_" + uuid.New().String(),
Object: "chat.completion",
Created: 0,
Model: model,
Choices: []apitypes.Choice{
{
Index: 0,
Message: apitypes.RespMessage{
Role: "assistant",
Content: "TODO: implement non-streaming response",
},
FinishReason: "stop",
},
},
}, nil
}
func (l *ChatCompletionsLogic) ChatCompletionsStream(req *apitypes.ChatCompletionRequest, w http.ResponseWriter) error {
cfg := configToBridge(l.svcCtx.Config)
model := openai.NormalizeModelID(req.Model)
cursorModel := types.ResolveToCursorModel(model)
if cursorModel == "" {
cursorModel = model
}
messages := convertMessages(req.Messages)
tools := convertTools(req.Tools)
functions := convertFunctions(req.Functions)
cleanMessages := usecase.SanitizeMessages(messages)
toolsText := openai.ToolsToSystemText(tools, functions)
messagesWithTools := cleanMessages
if toolsText != "" {
messagesWithTools = append([]interface{}{
map[string]interface{}{"role": "system", "content": toolsText},
}, cleanMessages...)
}
prompt := openai.BuildPromptFromMessages(messagesWithTools)
if l.svcCtx.Config.Verbose {
logger.LogDebug("model=%s prompt_len=%d", cursorModel, len(prompt))
}
id := "chatcmpl_" + uuid.New().String()
created := int64(0)
hasTools := len(tools) > 0 || len(functions) > 0
flusher, _ := w.(http.Flusher)
var accumulated string
var chunkNum int
var p parser.Parser
// TODO: implement proper streaming with usecase.RunAgentStream
// For now, return a placeholder response
_ = cfg
_ = prompt
_ = hasTools
_ = p
_ = chunkNum
_ = accumulated
chunk := map[string]interface{}{
"id": id, "object": "chat.completion.chunk", "created": created, "model": model,
"choices": []map[string]interface{}{
{"index": 0, "delta": map[string]string{"content": "Streaming not yet implemented"}, "finish_reason": nil},
},
}
data, _ := json.Marshal(chunk)
fmt.Fprintf(w, "data: %s\n\n", data)
if flusher != nil {
flusher.Flush()
}
stopChunk := map[string]interface{}{
"id": id, "object": "chat.completion.chunk", "created": created, "model": model,
"choices": []map[string]interface{}{
{"index": 0, "delta": map[string]interface{}{}, "finish_reason": "stop"},
},
}
data, _ = json.Marshal(stopChunk)
fmt.Fprintf(w, "data: %s\n\n", data)
fmt.Fprintf(w, "data: [DONE]\n\n")
if flusher != nil {
flusher.Flush()
}
return nil
}
// Stub implementations - TODO: full implementation
func convertMessages(msgs []apitypes.Message) []interface{} {
result := make([]interface{}, len(msgs))
for i, m := range msgs {
result[i] = map[string]interface{}{
"role": m.Role,
"content": m.Content,
}
}
return result
}
func convertTools(tools []apitypes.Tool) []interface{} {
if tools == nil {
return nil
}
result := make([]interface{}, len(tools))
for i, t := range tools {
result[i] = map[string]interface{}{
"type": t.Type,
"function": map[string]interface{}{
"name": t.Function.Name,
"description": t.Function.Description,
"parameters": t.Function.Parameters,
},
}
}
return result
}
func convertFunctions(funcs []apitypes.Function) []interface{} {
if funcs == nil {
return nil
}
result := make([]interface{}, len(funcs))
for i, f := range funcs {
result[i] = map[string]interface{}{
"name": f.Name,
"description": f.Description,
"parameters": f.Parameters,
}
}
return result
}
func configToBridge(c config.Config) config.BridgeConfig {
return config.BridgeConfig{
AgentBin: c.AgentBin,
Host: c.Host,
Port: c.Port,
RequiredKey: c.RequiredKey,
DefaultModel: c.DefaultModel,
Mode: "ask",
Provider: c.Provider,
Force: c.Force,
ApproveMcps: c.ApproveMcps,
StrictModel: c.StrictModel,
Workspace: c.Workspace,
TimeoutMs: c.TimeoutMs,
TLSCertPath: c.TLSCertPath,
TLSKeyPath: c.TLSKeyPath,
SessionsLogPath: c.SessionsLogPath,
ChatOnlyWorkspace: c.ChatOnlyWorkspace,
Verbose: c.Verbose,
MaxMode: c.MaxMode,
ConfigDirs: c.ConfigDirs,
MultiPort: c.MultiPort,
WinCmdlineMax: c.WinCmdlineMax,
GeminiAccountDir: c.GeminiAccountDir,
GeminiBrowserVisible: c.GeminiBrowserVisible,
GeminiMaxSessions: c.GeminiMaxSessions,
}
}
// Placeholder for usecase functions
// These should be properly implemented with the usecase package
var _ = usecase.AccountsDir