Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 25 additions & 3 deletions internal/api/chat/create_conversation_message_stream_v2.go
Original file line number Diff line number Diff line change
Expand Up @@ -276,9 +276,31 @@ func (s *ChatServerV2) CreateConversationMessageStream(
return s.sendStreamError(stream, err)
}

// Check if user has an API key for requested model
var llmProvider *models.LLMProviderConfig
var customModel *models.CustomModel
customModel = nil
for _, m := range settings.CustomModels {
if m.Slug == modelSlug {
customModel = &m
break
}
}

// Usage is the same as ChatCompletion, just passing the stream parameter
llmProvider := &models.LLMProviderConfig{
APIKey: settings.OpenAIAPIKey,

if customModel == nil {
// User did not specify API key for this model
llmProvider = &models.LLMProviderConfig{
APIKey: "",
IsCustomModel: false,
}
} else {
llmProvider = &models.LLMProviderConfig{
APIKey: customModel.APIKey,
Endpoint: customModel.BaseUrl,
IsCustomModel: true,
}
}

openaiChatHistory, inappChatHistory, err := s.aiClientV2.ChatCompletionStreamV2(ctx, stream, conversation.ID.Hex(), modelSlug, conversation.OpenaiChatHistoryCompletion, llmProvider)
Expand Down Expand Up @@ -307,7 +329,7 @@ func (s *ChatServerV2) CreateConversationMessageStream(
for i, bsonMsg := range conversation.InappChatHistory {
protoMessages[i] = mapper.BSONToChatMessageV2(bsonMsg)
}
title, err := s.aiClientV2.GetConversationTitleV2(ctx, protoMessages, llmProvider)
title, err := s.aiClientV2.GetConversationTitleV2(ctx, protoMessages, llmProvider, modelSlug)
if err != nil {
s.logger.Error("Failed to get conversation title", "error", err, "conversationID", conversation.ID.Hex())
return
Expand Down
45 changes: 35 additions & 10 deletions internal/api/chat/list_supported_models_v2.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ package chat

import (
"context"
"strings"

"paperdebugger/internal/libs/contextutil"
chatv2 "paperdebugger/pkg/gen/api/chat/v2"
Expand Down Expand Up @@ -220,37 +219,63 @@ func (s *ChatServerV2) ListSupportedModels(
return nil, err
}

hasOwnAPIKey := strings.TrimSpace(settings.OpenAIAPIKey) != ""

var models []*chatv2.SupportedModel
for _, config := range allModels {
// Check if user has set API key for this particular model
hasOwnAPIKey := false
for _, model := range settings.CustomModels {
if model.Slug == config.slugOpenRouter {
// User has API key for this model, use slugOpenAI instead of slugOpenRouter if applicable
// slug := config.slugOpenRouter
// if strings.TrimSpace(config.slugOpenAI) != "" {
// slug = config.slugOpenAI
// }

models = append(models, &chatv2.SupportedModel{
Name: model.Name,
Slug: model.Slug,
TotalContext: int64(model.ContextWindow),
MaxOutput: int64(model.MaxOutput),
InputPrice: int64(model.InputPrice),
OutputPrice: int64(model.OutputPrice),
IsCustom: true,
})
hasOwnAPIKey = true
continue
}
}

if hasOwnAPIKey {
continue
}

// Choose the appropriate slug based on whether user has their own API key.
//
// Some models are only available via OpenRouter; for those, slugOpenAI may be empty.
// In that case, keep using the OpenRouter slug to avoid returning an empty model slug.
slug := config.slugOpenRouter
if hasOwnAPIKey && strings.TrimSpace(config.slugOpenAI) != "" {
slug = config.slugOpenAI
}
// slug := config.slugOpenRouter
// if hasOwnAPIKey && strings.TrimSpace(config.slugOpenAI) != "" {
// slug = config.slugOpenAI
// }

model := &chatv2.SupportedModel{
Name: config.name,
Slug: slug,
Slug: config.slugOpenRouter,
TotalContext: config.totalContext,
MaxOutput: config.maxOutput,
InputPrice: config.inputPrice,
OutputPrice: config.outputPrice,
IsCustom: false,
}

// If model requires own key but user hasn't provided one, mark as disabled
if config.requireOwnKey && !hasOwnAPIKey {
if config.requireOwnKey {
model.Disabled = true
model.DisabledReason = stringPtr("Requires your own OpenAI API key. Configure it in Settings.")
}

models = append(models, model)
}

return &chatv2.ListSupportedModelsResponse{
Models: models,
}, nil
Expand Down
31 changes: 31 additions & 0 deletions internal/api/mapper/user.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,23 +6,54 @@ import (
)

func MapProtoSettingsToModel(settings *userv1.Settings) *models.Settings {
// Map the slice of custom models
customModels := make([]models.CustomModel, len(settings.CustomModels))
for i, m := range settings.CustomModels {
customModels[i] = models.CustomModel{
Slug: m.Slug,
Name: m.Name,
BaseUrl: m.BaseUrl,
APIKey: m.ApiKey,
ContextWindow: m.ContextWindow,
MaxOutput: m.MaxOutput,
InputPrice: m.InputPrice,
OutputPrice: m.OutputPrice,
}
}

return &models.Settings{
ShowShortcutsAfterSelection: settings.ShowShortcutsAfterSelection,
FullWidthPaperDebuggerButton: settings.FullWidthPaperDebuggerButton,
EnableCompletion: settings.EnableCompletion,
FullDocumentRag: settings.FullDocumentRag,
ShowedOnboarding: settings.ShowedOnboarding,
OpenAIAPIKey: settings.OpenaiApiKey,
CustomModels: customModels,
}
}

func MapModelSettingsToProto(settings *models.Settings) *userv1.Settings {
// Map the slice back to Proto
customModels := make([]*userv1.CustomModel, len(settings.CustomModels))
for i, m := range settings.CustomModels {
customModels[i] = &userv1.CustomModel{
Slug: m.Slug,
Name: m.Name,
BaseUrl: m.BaseUrl,
ApiKey: m.APIKey,
ContextWindow: m.ContextWindow,
InputPrice: m.InputPrice,
OutputPrice: m.OutputPrice,
}
}

return &userv1.Settings{
ShowShortcutsAfterSelection: settings.ShowShortcutsAfterSelection,
FullWidthPaperDebuggerButton: settings.FullWidthPaperDebuggerButton,
EnableCompletion: settings.EnableCompletion,
FullDocumentRag: settings.FullDocumentRag,
ShowedOnboarding: settings.ShowedOnboarding,
OpenaiApiKey: settings.OpenAIAPIKey,
CustomModels: customModels,
}
}
7 changes: 4 additions & 3 deletions internal/models/llm_provider.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,10 @@ package models
// LLMProviderConfig holds the configuration for LLM API calls.
// If both Endpoint and APIKey are empty, the system default will be used.
type LLMProviderConfig struct {
Endpoint string
APIKey string
ModelName string
Endpoint string
APIKey string
ModelName string
IsCustomModel bool
}

// IsCustom returns true if the user has configured custom LLM provider settings.
Expand Down
24 changes: 18 additions & 6 deletions internal/models/user.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,25 @@ package models

import "go.mongodb.org/mongo-driver/v2/bson"

type CustomModel struct {
Slug string `bson:"_id"`
Name string `bson:"name"`
BaseUrl string `bson:"base_url"`
APIKey string `bson:"api_key"`
ContextWindow int32 `bson:"context_window"`
MaxOutput int32 `bson:"max_output"`
InputPrice int32 `bson:"input_price"`
OutputPrice int32 `bson:"output_price"`
}

type Settings struct {
ShowShortcutsAfterSelection bool `bson:"show_shortcuts_after_selection"`
FullWidthPaperDebuggerButton bool `bson:"full_width_paper_debugger_button"`
EnableCompletion bool `bson:"enable_completion"`
FullDocumentRag bool `bson:"full_document_rag"`
ShowedOnboarding bool `bson:"showed_onboarding"`
OpenAIAPIKey string `bson:"openai_api_key"`
ShowShortcutsAfterSelection bool `bson:"show_shortcuts_after_selection"`
FullWidthPaperDebuggerButton bool `bson:"full_width_paper_debugger_button"`
EnableCompletion bool `bson:"enable_completion"`
FullDocumentRag bool `bson:"full_document_rag"`
ShowedOnboarding bool `bson:"showed_onboarding"`
OpenAIAPIKey string `bson:"openai_api_key"`
CustomModels []CustomModel `bson:"custom_models"`
}

type User struct {
Expand Down
22 changes: 12 additions & 10 deletions internal/services/toolkit/client/client_v2.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,18 +32,20 @@ func (a *AIClientV2) GetOpenAIClient(llmConfig *models.LLMProviderConfig) *opena
var Endpoint string = llmConfig.Endpoint
var APIKey string = llmConfig.APIKey

if Endpoint == "" {
if APIKey != "" {
// User provided their own API key, use the OpenAI-compatible endpoint
Endpoint = a.cfg.OpenAIBaseURL // standard openai base url
} else {
// suffix needed for cloudflare gateway
Endpoint = a.cfg.InferenceBaseURL + "/openrouter"
if !llmConfig.IsCustomModel {
if Endpoint == "" {
if APIKey != "" {
// User provided their own API key, use the OpenAI-compatible endpoint
Endpoint = a.cfg.OpenAIBaseURL // standard openai base url
} else {
// suffix needed for cloudflare gateway
Endpoint = a.cfg.InferenceBaseURL + "/openrouter"
}
}
}

if APIKey == "" {
APIKey = a.cfg.InferenceAPIKey
if APIKey == "" {
APIKey = a.cfg.InferenceAPIKey
}
}

opts := []option.RequestOption{
Expand Down
5 changes: 5 additions & 0 deletions internal/services/toolkit/client/completion_v2.go
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,11 @@ func (a *AIClientV2) ChatCompletionStreamV2(ctx context.Context, callbackStream
streamHandler.SendFinalization()
}()

if llmProvider.IsCustomModel {
// e.g., Strip "google/" from "google/gemini-2.5-flash"
modelSlug = modelSlug[strings.Index(modelSlug, "/")+1:]
}

oaiClient := a.GetOpenAIClient(llmProvider)
params := getDefaultParamsV2(modelSlug, a.toolCallHandler.Registry)

Expand Down
10 changes: 8 additions & 2 deletions internal/services/toolkit/client/get_conversation_title_v2.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ import (
"github.com/samber/lo"
)

func (a *AIClientV2) GetConversationTitleV2(ctx context.Context, inappChatHistory []*chatv2.Message, llmProvider *models.LLMProviderConfig) (string, error) {
func (a *AIClientV2) GetConversationTitleV2(ctx context.Context, inappChatHistory []*chatv2.Message, llmProvider *models.LLMProviderConfig, modelSlug string) (string, error) {
messages := lo.Map(inappChatHistory, func(message *chatv2.Message, _ int) string {
if _, ok := message.Payload.MessageType.(*chatv2.MessagePayload_Assistant); ok {
return fmt.Sprintf("Assistant: %s", message.Payload.GetAssistant().GetContent())
Expand All @@ -29,7 +29,13 @@ func (a *AIClientV2) GetConversationTitleV2(ctx context.Context, inappChatHistor
message := strings.Join(messages, "\n")
message = fmt.Sprintf("%s\nBased on above conversation, generate a short, clear, and descriptive title that summarizes the main topic or purpose of the discussion. The title should be concise, specific, and use natural language. Avoid vague or generic titles. Use abbreviation and short words if possible. Use 3-5 words if possible. Give me the title only, no other text including any other words.", message)

_, resp, err := a.ChatCompletionV2(ctx, "gpt-5-nano", OpenAIChatHistory{
// Default model if user is not using their own
modelToUse := "gpt-5-nano"
if llmProvider.IsCustomModel {
modelToUse = modelSlug
}

_, resp, err := a.ChatCompletionV2(ctx, modelToUse, OpenAIChatHistory{
openai.SystemMessage("You are a helpful assistant that generates a title for a conversation."),
openai.UserMessage(message),
}, llmProvider)
Expand Down
12 changes: 12 additions & 0 deletions internal/services/toolkit/client/utils_v2.go
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,18 @@ func getDefaultParamsV2(modelSlug string, toolRegistry *registry.ToolRegistryV2)
"o1",
"codex-mini-latest",
}

// Gemini does not support Store param
if strings.HasPrefix(strings.ToLower(modelSlug), "gemini") {
return openaiv3.ChatCompletionNewParams{
Model: modelSlug,
Temperature: openaiv3.Float(0.7),
MaxCompletionTokens: openaiv3.Int(4000),
Tools: toolRegistry.GetTools(),
ParallelToolCalls: openaiv3.Bool(true),
}
}

for _, model := range reasoningModels {
if strings.Contains(modelSlug, model) {
return openaiv3.ChatCompletionNewParams{
Expand Down
2 changes: 1 addition & 1 deletion pkg/gen/api/auth/v1/auth_grpc.pb.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion pkg/gen/api/chat/v1/chat_grpc.pb.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

19 changes: 13 additions & 6 deletions pkg/gen/api/chat/v2/chat.pb.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion pkg/gen/api/chat/v2/chat_grpc.pb.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading