package handlers import ( "cursor-api-proxy/internal/config" "cursor-api-proxy/internal/httputil" "cursor-api-proxy/internal/models" "net/http" "sync" "time" ) const modelCacheTTLMs = 5 * 60 * 1000 type ModelCache struct { At int64 Models []models.CursorCliModel } type ModelCacheRef struct { mu sync.Mutex cache *ModelCache inflight bool waiters []chan struct{} } func (ref *ModelCacheRef) HandleModels(w http.ResponseWriter, r *http.Request, cfg config.BridgeConfig) { now := time.Now().UnixMilli() ref.mu.Lock() if ref.cache != nil && now-ref.cache.At <= modelCacheTTLMs { cache := ref.cache ref.mu.Unlock() writeModels(w, cache.Models) return } if ref.inflight { // Wait for the in-flight fetch ch := make(chan struct{}, 1) ref.waiters = append(ref.waiters, ch) ref.mu.Unlock() <-ch ref.mu.Lock() cache := ref.cache ref.mu.Unlock() writeModels(w, cache.Models) return } ref.inflight = true ref.mu.Unlock() fetched, err := models.ListCursorCliModels(cfg.AgentBin, 60000) ref.mu.Lock() ref.inflight = false if err == nil { ref.cache = &ModelCache{At: time.Now().UnixMilli(), Models: fetched} } waiters := ref.waiters ref.waiters = nil ref.mu.Unlock() for _, ch := range waiters { ch <- struct{}{} } if err != nil { httputil.WriteJSON(w, 500, map[string]interface{}{ "error": map[string]string{"message": err.Error(), "code": "models_fetch_error"}, }, nil) return } writeModels(w, fetched) } func writeModels(w http.ResponseWriter, mods []models.CursorCliModel) { cursorModels := make([]map[string]interface{}, len(mods)) for i, m := range mods { cursorModels[i] = map[string]interface{}{ "id": m.ID, "object": "model", "owned_by": "cursor", "name": m.Name, } } ids := make([]string, len(mods)) for i, m := range mods { ids[i] = m.ID } aliases := models.GetAnthropicModelAliases(ids) for _, a := range aliases { cursorModels = append(cursorModels, map[string]interface{}{ "id": a.ID, "object": "model", "owned_by": "cursor", "name": a.Name, }) } httputil.WriteJSON(w, 200, map[string]interface{}{ "object": "list", "data": cursorModels, }, nil) }