diff --git a/src/app/components/ApiSettings.tsx b/src/app/components/ApiSettings.tsx index c66044e..cdb3e39 100644 --- a/src/app/components/ApiSettings.tsx +++ b/src/app/components/ApiSettings.tsx @@ -3,26 +3,49 @@ import React, { useState, useEffect } from 'react'; import { SecureApiKeyManager } from '../lib/secureApiKey'; -export type ApiProvider = 'openai' | 'grok' | 'ollama' | 'deepseek' | 'custom'; +export type ApiProvider = + | 'openai' + | 'openai-compatible' + | 'grok' + | 'ollama' + | 'deepseek' + | 'gemini-vertex' + | 'custom'; // API 提供商帮助信息 const API_HELP: Record = { openai: '使用 OpenAI API,例如 GPT-4', + 'openai-compatible': '连接任意兼容 OpenAI Chat Completions 的服务', grok: '使用 Grok API (X.AI)', ollama: '使用本地运行的 Ollama 服务', deepseek: '使用 DeepSeek API,例如 DeepSeek-V2', + 'gemini-vertex': '使用 Google Vertex AI Gemini 模型(需要 GCP 项目)', custom: '配置自定义 API 端点' }; // 默认 API URLs const API_URLS: Record = { openai: 'https://api.openai.com/v1/chat/completions', - grok: 'https://api.x.ai/v1/chat/completions', + 'openai-compatible': 'https://your-openai-compatible-endpoint/v1/chat/completions', + grok: 'https://api.grok.ai/v1/chat/completions', ollama: 'http://localhost:11434/api/generate', // 确保使用 /api/generate 端点 deepseek: 'https://api.deepseek.com/v1/chat/completions', + 'gemini-vertex': 'https://{region}-aiplatform.googleapis.com/v1/projects/{project}/locations/{region}/publishers/google/models/gemini-1.5-pro:generateContent', custom: '' }; +const DEFAULT_PROVIDER_OPTIONS: ApiProvider[] = ['openai', 'grok', 'ollama', 'deepseek', 'custom']; + +const PROVIDER_LABELS: Record = { + openai: 'OpenAI', + 'openai-compatible': 'OpenAI 兼容服务', + grok: 'Grok (xAI)', + ollama: 'Ollama (本地)', + deepseek: 'DeepSeek', + 'gemini-vertex': 'Google Gemini (Vertex AI)', + custom: '自定义' +}; + export interface ApiSettingsProps { showSettings: boolean; toggleSettings: () => void; @@ -37,6 +60,9 @@ export interface ApiSettingsProps { // 仅在使用 Ollama 时需要 availableModels?: string[]; fetchModels?: () => Promise; + providerOptions?: ApiProvider[]; + geminiProjectId?: string; + setGeminiProjectId?: (projectId: string) => void; } export default function ApiSettings({ @@ -51,7 +77,10 @@ export default function ApiSettings({ model, setModel, availableModels = [], - fetchModels + fetchModels, + providerOptions = DEFAULT_PROVIDER_OPTIONS, + geminiProjectId, + setGeminiProjectId }: ApiSettingsProps) { const [rememberMe, setRememberMe] = useState(false); const [showSecurityTip, setShowSecurityTip] = useState(false); @@ -116,6 +145,8 @@ export default function ApiSettings({ // 设置默认模型名称 if (provider === 'openai') { setModel('gpt-4'); + } else if (provider === 'openai-compatible') { + setModel('gpt-4o-mini'); } else if (provider === 'grok') { setModel('grok-3-latest'); } else if (provider === 'ollama') { @@ -131,6 +162,11 @@ export default function ApiSettings({ } } else if (provider === 'deepseek') { setModel('deepseek-chat'); + } else if (provider === 'gemini-vertex') { + setModel('gemini-1.5-pro'); + if (setGeminiProjectId) { + setGeminiProjectId(''); + } } // 自定义提供商不设置默认模型 }; @@ -166,15 +202,25 @@ export default function ApiSettings({ onChange={handleApiProviderChange} className="block w-full px-3 py-2 border border-gray-300 rounded-md shadow-sm focus:outline-none focus:ring-blue-500 focus:border-blue-500 sm:text-sm" > - - - - - + {providerOptions.map(option => ( + + ))}

{API_HELP[apiProvider]}

+ {apiProvider === 'openai-compatible' && ( +

+ 请填写兼容 OpenAI Chat Completions 协议的完整接口地址。 +

+ )} + {apiProvider === 'gemini-vertex' && ( +

+ 需要提供 Vertex AI REST 接口地址,并确保帐号具有访问权限。 +

+ )}
@@ -194,7 +240,7 @@ export default function ApiSettings({
+ {apiProvider === 'gemini-vertex' && ( +

可使用 gcloud auth print-access-token 获取临时 Token。

+ )}
)}
+ {apiProvider === 'gemini-vertex' && setGeminiProjectId && ( +
+ + setGeminiProjectId(e.target.value)} + className="block w-full px-3 py-2 border border-gray-300 rounded-md shadow-sm focus:outline-none focus:ring-blue-500 focus:border-blue-500 sm:text-sm" + placeholder="例如 my-gcp-project" + /> +
+ )} +
{showApiSettings && ( - {}} // 这里已经控制显示了,所以传入空函数 apiProvider={apiProvider} setApiProvider={(provider) => { + const previousProvider = apiProvider; setApiProvider(provider); // 当更改提供商时,直接更新URL(使用预定义的默认值) if (provider === 'openai') { - setLlmApiUrl('https://api.openai.com/v1/chat/completions'); + setLlmApiUrl(API_URLS.openai); setModel('gpt-4'); + } else if (provider === 'openai-compatible') { + setLlmApiUrl(API_URLS['openai-compatible']); + setModel('gpt-4o-mini'); } else if (provider === 'grok') { setLlmApiUrl('https://api.grok.ai/v1/chat/completions'); setModel('grok-3-latest'); @@ -351,6 +432,15 @@ export default function WritingAssistant() { } else if (provider === 'deepseek') { setLlmApiUrl('https://api.deepseek.com/v1/chat/completions'); setModel('deepseek-chat'); + } else if (provider === 'gemini-vertex') { + streamingPreferenceRef.current = useStreaming; + setUseStreaming(false); + setLlmApiUrl(API_URLS['gemini-vertex']); + setModel('gemini-1.5-pro'); + setGeminiProjectId(''); + } + if (previousProvider === 'gemini-vertex' && provider !== 'gemini-vertex') { + setUseStreaming(streamingPreferenceRef.current); } // 重置错误 setError(null); @@ -364,6 +454,9 @@ export default function WritingAssistant() { setModel={setModel} availableModels={availableModels} fetchModels={fetchOllamaModels} + providerOptions={['openai', 'openai-compatible', 'grok', 'ollama', 'deepseek', 'gemini-vertex', 'custom']} + geminiProjectId={geminiProjectId} + setGeminiProjectId={setGeminiProjectId} /> )} @@ -496,16 +589,16 @@ export default function WritingAssistant() { 生成结果 - + {/* 流式模式切换 */}
+ {apiProvider === 'gemini-vertex' && ( + Gemini Vertex 暂不支持流式输出 + )}
- - {output && isComplete && ( + + {articleContent && isComplete && (