Skip to content

Commit 776e841

Browse files
committed
Refactors AI model selection and endpoint handling
Streamlines AI model selection in the settings panel by removing model grouping and adds GPT-5 Pro and GPT-4.1. Normalizes endpoint handling for OpenAI and DeepSeek providers to avoid duplicate '/v1' segments and ensures correct API base URLs are used. Also, removes the deepseek-coder option and updates test cases for AIChatHeader to reflect the changed status indicator and updates OpenAI model mocks with current models in AIChat.
1 parent b6042ad commit 776e841

File tree

9 files changed

+181
-135
lines changed

9 files changed

+181
-135
lines changed

frontend/src/components/AISettingsPanel.vue

Lines changed: 5 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -144,19 +144,11 @@
144144
</el-option>
145145
</template>
146146
<template v-else>
147-
<el-option-group label="GPT-5 Series (Latest 2025)">
148-
<el-option value="gpt-5" label="GPT-5 ⭐ Recommended" />
149-
<el-option value="gpt-5-mini" label="GPT-5 Mini" />
150-
<el-option value="gpt-5-nano" label="GPT-5 Nano" />
151-
</el-option-group>
152-
<el-option-group label="GPT-4.1 Series">
153-
<el-option value="gpt-4.1-2025-04-14" label="GPT-4.1" />
154-
<el-option value="gpt-4.1-mini-2025-04-14" label="GPT-4.1 Mini" />
155-
</el-option-group>
156-
<el-option-group label="GPT-4o Series">
157-
<el-option value="gpt-4o-2024-08-06" label="GPT-4o" />
158-
<el-option value="gpt-4o-mini-2024-07-18" label="GPT-4o Mini" />
159-
</el-option-group>
147+
<el-option value="gpt-5" label="GPT-5 ⭐ Recommended" />
148+
<el-option value="gpt-5-mini" label="GPT-5 Mini" />
149+
<el-option value="gpt-5-nano" label="GPT-5 Nano" />
150+
<el-option value="gpt-5-pro" label="GPT-5 Pro" />
151+
<el-option value="gpt-4.1" label="GPT-4.1" />
160152
</template>
161153
</el-select>
162154
<el-button
@@ -228,7 +220,6 @@
228220
<template v-else>
229221
<el-option value="deepseek-reasoner" label="DeepSeek Reasoner ⭐" />
230222
<el-option value="deepseek-chat" label="DeepSeek Chat" />
231-
<el-option value="deepseek-coder" label="DeepSeek Coder" />
232223
</template>
233224
</el-select>
234225
<el-button

frontend/src/utils/config.ts

Lines changed: 32 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -40,13 +40,14 @@ export function loadConfigForProvider(provider: Provider): AIConfig {
4040
provider,
4141
endpoint: (() => {
4242
const value = stored.endpoint ?? defaults.endpoint ?? ''
43+
const fallback = defaults.endpoint ?? ''
4344
if (provider !== 'ollama' && isLocalEndpoint(value)) {
44-
return defaults.endpoint ?? ''
45+
return normalizeEndpoint(provider, String(fallback))
4546
}
4647
if (!value) {
47-
return defaults.endpoint ?? ''
48+
return normalizeEndpoint(provider, String(fallback))
4849
}
49-
return value
50+
return normalizeEndpoint(provider, String(value))
5051
})(),
5152
model: stored.model ?? defaults.model ?? '',
5253
apiKey: stored.apiKey ?? defaults.apiKey ?? '',
@@ -74,7 +75,7 @@ export function saveConfig(config: AIConfig): void {
7475
const normalizedProvider = (provider === 'local' ? 'ollama' : provider) as Provider
7576
const defaults = getDefaultConfig(normalizedProvider)
7677
const providerConfig = {
77-
endpoint: (rest.endpoint && String(rest.endpoint).trim()) || defaults.endpoint || '',
78+
endpoint: normalizeEndpoint(normalizedProvider, (rest.endpoint && String(rest.endpoint).trim()) || defaults.endpoint || ''),
7879
model: rest.model ?? defaults.model ?? '',
7980
apiKey: rest.apiKey ?? defaults.apiKey ?? '',
8081
timeout: (typeof rest.timeout === 'number' && rest.timeout > 0 ? rest.timeout : defaults.timeout ?? 120),
@@ -94,7 +95,7 @@ export function saveConfig(config: AIConfig): void {
9495
export function getDefaultConfig(provider: string): Partial<AIConfig> {
9596
const defaults: Record<string, Partial<AIConfig>> = {
9697
ollama: { endpoint: 'http://localhost:11434', apiKey: '', timeout: 120 },
97-
openai: { endpoint: 'https://api.openai.com/v1', apiKey: '', timeout: 120 },
98+
openai: { endpoint: 'https://api.openai.com', apiKey: '', timeout: 120 },
9899
deepseek: { endpoint: 'https://api.deepseek.com', apiKey: '', timeout: 180 }
99100
}
100101

@@ -121,10 +122,8 @@ export function getMockModels(provider: string): Model[] {
121122
{ id: 'gpt-5', name: 'GPT-5 ⭐', size: 'Cloud' },
122123
{ id: 'gpt-5-mini', name: 'GPT-5 Mini', size: 'Cloud' },
123124
{ id: 'gpt-5-nano', name: 'GPT-5 Nano', size: 'Cloud' },
124-
{ id: 'gpt-4.1-2025-04-14', name: 'GPT-4.1', size: 'Cloud' },
125-
{ id: 'gpt-4.1-mini-2025-04-14', name: 'GPT-4.1 Mini', size: 'Cloud' },
126-
{ id: 'gpt-4o-2024-08-06', name: 'GPT-4o', size: 'Cloud' },
127-
{ id: 'gpt-4o-mini-2024-07-18', name: 'GPT-4o Mini', size: 'Cloud' }
125+
{ id: 'gpt-5-pro', name: 'GPT-5 Pro', size: 'Cloud' },
126+
{ id: 'gpt-4.1', name: 'GPT-4.1', size: 'Cloud' }
128127
],
129128
deepseek: [
130129
{ id: 'deepseek-chat', name: 'DeepSeek Chat', size: 'Cloud' },
@@ -140,3 +139,27 @@ export function getMockModels(provider: string): Model[] {
140139
export function generateId(): string {
141140
return `${Date.now()}-${Math.random().toString(36).slice(2, 9)}`
142141
}
142+
143+
function normalizeEndpoint(provider: Provider, endpoint: string): string {
144+
const normalizedProvider: Provider = (provider === 'local' ? 'ollama' : provider) as Provider
145+
if (!endpoint) {
146+
return endpoint
147+
}
148+
149+
let value = endpoint.trim()
150+
while (value.endsWith('/')) {
151+
value = value.slice(0, -1)
152+
}
153+
154+
if (normalizedProvider === 'openai' || normalizedProvider === 'deepseek') {
155+
const lower = value.toLowerCase()
156+
if (lower.endsWith('/v1')) {
157+
value = value.slice(0, value.length - 3)
158+
while (value.endsWith('/')) {
159+
value = value.slice(0, -1)
160+
}
161+
}
162+
}
163+
164+
return value
165+
}

frontend/tests/components/AIChatHeader.test.ts

Lines changed: 16 additions & 63 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ describe('AIChatHeader', () => {
1717
}
1818
})
1919

20-
it('should render title and subtitle', () => {
20+
it('renders title and subtitle', () => {
2121
const wrapper = mount(AIChatHeader, {
2222
props: {
2323
provider: 'ollama',
@@ -34,11 +34,11 @@ describe('AIChatHeader', () => {
3434
expect(wrapper.text()).toContain('ai.subtitle')
3535
})
3636

37-
it('should display correct status', () => {
37+
it('shows provider label and status indicator', () => {
3838
const wrapper = mount(AIChatHeader, {
3939
props: {
40-
provider: 'ollama',
41-
status: 'connected'
40+
provider: 'deepseek',
41+
status: 'connecting'
4242
},
4343
global: {
4444
provide: {
@@ -47,75 +47,28 @@ describe('AIChatHeader', () => {
4747
}
4848
})
4949

50-
expect(wrapper.text()).toContain('ai.status.connected')
50+
const indicator = wrapper.find('.status-indicator')
51+
expect(indicator.exists()).toBe(true)
52+
expect(indicator.classes()).toContain('connecting')
53+
expect(indicator.text()).toContain('ai.status.connecting')
54+
expect(wrapper.text()).toContain('ai.providerLabel')
5155
})
5256

53-
it('should emit open-settings when settings button clicked', async () => {
54-
const wrapper = mount(AIChatHeader, {
57+
it('applies correct status classes', () => {
58+
const createWrapper = (status: 'connected' | 'connecting' | 'disconnected') => mount(AIChatHeader, {
5559
props: {
56-
provider: 'ollama',
57-
status: 'disconnected'
60+
provider: 'openai',
61+
status
5862
},
5963
global: {
6064
provide: {
6165
appContext: mockContext
62-
},
63-
stubs: {
64-
'el-tag': true,
65-
'el-icon': true,
66-
'el-button': true
6766
}
6867
}
6968
})
7069

71-
// Find the el-button component and trigger its click event
72-
await wrapper.findComponent({ name: 'el-button' }).trigger('click')
73-
74-
expect(wrapper.emitted('open-settings')).toBeTruthy()
75-
expect(wrapper.emitted('open-settings')?.length).toBe(1)
76-
})
77-
78-
it('should display correct status type', () => {
79-
const globalConfig = {
80-
provide: {
81-
appContext: mockContext
82-
},
83-
stubs: {
84-
'el-tag': {
85-
template: '<span class="el-tag"><slot /></span>'
86-
},
87-
'el-icon': true,
88-
'el-button': true
89-
}
90-
}
91-
92-
const connectedWrapper = mount(AIChatHeader, {
93-
props: {
94-
provider: 'ollama',
95-
status: 'connected'
96-
},
97-
global: globalConfig
98-
})
99-
100-
const connectingWrapper = mount(AIChatHeader, {
101-
props: {
102-
provider: 'ollama',
103-
status: 'connecting'
104-
},
105-
global: globalConfig
106-
})
107-
108-
const disconnectedWrapper = mount(AIChatHeader, {
109-
props: {
110-
provider: 'ollama',
111-
status: 'disconnected'
112-
},
113-
global: globalConfig
114-
})
115-
116-
// Status badges should exist in all cases
117-
expect(connectedWrapper.find('.el-tag').exists()).toBe(true)
118-
expect(connectingWrapper.find('.el-tag').exists()).toBe(true)
119-
expect(disconnectedWrapper.find('.el-tag').exists()).toBe(true)
70+
expect(createWrapper('connected').find('.status-indicator').classes()).toContain('connected')
71+
expect(createWrapper('connecting').find('.status-indicator').classes()).toContain('connecting')
72+
expect(createWrapper('disconnected').find('.status-indicator').classes()).toContain('disconnected')
12073
})
12174
})

frontend/tests/composables/useAIChat.test.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -64,8 +64,8 @@ describe('useAIChat', () => {
6464
provider: 'openai'
6565
}))
6666
localStorage.setItem('atest-ai-config-openai', JSON.stringify({
67-
endpoint: 'https://api.openai.com/v1',
68-
model: 'gpt-4o',
67+
endpoint: 'https://api.openai.com',
68+
model: 'gpt-5',
6969
timeout: 200,
7070
maxTokens: 2048,
7171
apiKey: 'sk-test123',

frontend/tests/utils/config.test.ts

Lines changed: 42 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ describe('config utils', () => {
3232

3333
const config = loadConfig()
3434
expect(config.provider).toBe('openai')
35-
expect(config.endpoint).toBe('https://api.openai.com/v1')
35+
expect(config.endpoint).toBe('https://api.openai.com')
3636
expect(config.apiKey).toBe('')
3737
expect(config.timeout).toBe(120)
3838
})
@@ -74,7 +74,21 @@ describe('config utils', () => {
7474
expect(config.provider).toBe('deepseek')
7575
expect(config.endpoint).toBe('https://api.deepseek.com')
7676
expect(config.model).toBe('deepseek-chat')
77-
expect(config.timeout).toBe(180)
77+
expect(config.timeout).toBe(90)
78+
})
79+
80+
it('should normalize trailing version segment for openai endpoint', () => {
81+
localStorage.setItem('atest-ai-global-config', JSON.stringify({
82+
provider: 'openai'
83+
}))
84+
localStorage.setItem('atest-ai-config-openai', JSON.stringify({
85+
endpoint: 'https://api.openai.com/v1/',
86+
model: 'gpt-5',
87+
apiKey: 'sk-test'
88+
}))
89+
90+
const config = loadConfig()
91+
expect(config.endpoint).toBe('https://api.openai.com')
7892
})
7993
})
8094

@@ -122,6 +136,23 @@ describe('config utils', () => {
122136
expect(providerConfig.model).toBe('llama3.2:3b')
123137
expect(providerConfig.timeout).toBe(90)
124138
})
139+
140+
it('should normalize openai endpoint when saving', () => {
141+
const config: AIConfig = {
142+
provider: 'openai',
143+
endpoint: 'https://api.openai.com/v1',
144+
model: 'gpt-5',
145+
apiKey: 'sk-test',
146+
timeout: 120,
147+
maxTokens: 16384,
148+
status: 'connected'
149+
}
150+
151+
saveConfig(config)
152+
153+
const providerConfig = JSON.parse(localStorage.getItem('atest-ai-config-openai')!)
154+
expect(providerConfig.endpoint).toBe('https://api.openai.com')
155+
})
125156
})
126157

127158
describe('getDefaultConfig', () => {
@@ -137,7 +168,7 @@ describe('config utils', () => {
137168
it('should return openai default config', () => {
138169
const config = getDefaultConfig('openai')
139170

140-
expect(config.endpoint).toBe('https://api.openai.com/v1')
171+
expect(config.endpoint).toBe('https://api.openai.com')
141172
expect(config.apiKey).toBe('')
142173
expect(config.timeout).toBe(120)
143174
})
@@ -161,9 +192,14 @@ describe('config utils', () => {
161192
it('should return openai mock models', () => {
162193
const models = getMockModels('openai')
163194

164-
expect(models.length).toBeGreaterThanOrEqual(7)
165-
expect(models[0].id).toBe('gpt-5')
166-
expect(models.some(model => model.id === 'gpt-4o-2024-08-06')).toBe(true)
195+
expect(models).toHaveLength(5)
196+
expect(models.map(model => model.id)).toEqual([
197+
'gpt-5',
198+
'gpt-5-mini',
199+
'gpt-5-nano',
200+
'gpt-5-pro',
201+
'gpt-4.1'
202+
])
167203
})
168204

169205
it('should return empty array for unknown provider', () => {

pkg/ai/endpoints.go

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
package ai
2+
3+
import "strings"
4+
5+
// normalizeProviderEndpoint trims provider endpoints to a canonical form so that
6+
// universal clients can safely append API paths without duplicating segments like /v1.
7+
func normalizeProviderEndpoint(provider, endpoint string) string {
8+
trimmed := strings.TrimSpace(endpoint)
9+
if trimmed == "" {
10+
return ""
11+
}
12+
13+
trimmed = strings.TrimRight(trimmed, "/")
14+
normalized := strings.ToLower(strings.TrimSpace(provider))
15+
16+
if normalized == "openai" || normalized == "deepseek" {
17+
for strings.HasSuffix(trimmed, "/v1") {
18+
trimmed = strings.TrimSuffix(trimmed, "/v1")
19+
trimmed = strings.TrimRight(trimmed, "/")
20+
}
21+
}
22+
23+
return trimmed
24+
}

pkg/ai/generator.go

Lines changed: 17 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,6 @@ import (
2626
"sync"
2727
"time"
2828

29-
"github.com/linuxsuren/atest-ext-ai/pkg/ai/providers/openai"
3029
"github.com/linuxsuren/atest-ext-ai/pkg/ai/providers/universal"
3130
"github.com/linuxsuren/atest-ext-ai/pkg/config"
3231
"github.com/linuxsuren/atest-ext-ai/pkg/interfaces"
@@ -723,33 +722,36 @@ func createRuntimeClient(provider string, runtimeConfig map[string]any) (interfa
723722
}
724723

725724
// Create client based on provider type
726-
switch provider {
725+
normalizedProvider := normalizeProviderName(provider)
726+
727+
switch normalizedProvider {
727728
case "openai", "deepseek", "custom":
728-
// Create OpenAI-compatible client
729-
config := &openai.Config{
729+
config := &universal.Config{
730+
Provider: normalizedProvider,
731+
Endpoint: normalizeProviderEndpoint(normalizedProvider, baseURL),
730732
APIKey: apiKey,
731-
BaseURL: baseURL,
732733
Model: model,
733734
MaxTokens: maxTokens,
734735
}
735736

736-
// Set default endpoints for known providers
737-
if provider == "deepseek" && config.BaseURL == "" {
738-
config.BaseURL = "https://api.deepseek.com/v1"
739-
}
740-
741-
// Custom provider requires endpoint
742-
if provider == "custom" && config.BaseURL == "" {
743-
return nil, fmt.Errorf("endpoint is required for custom provider")
737+
if config.Endpoint == "" {
738+
switch normalizedProvider {
739+
case "openai":
740+
config.Endpoint = "https://api.openai.com"
741+
case "deepseek":
742+
config.Endpoint = "https://api.deepseek.com"
743+
case "custom":
744+
return nil, fmt.Errorf("endpoint is required for custom provider")
745+
}
744746
}
745747

746-
return openai.NewClient(config)
748+
return universal.NewUniversalClient(config)
747749

748750
case "ollama":
749751
// Create Ollama client (using universal provider)
750752
config := &universal.Config{
751753
Provider: "ollama",
752-
Endpoint: baseURL,
754+
Endpoint: normalizeProviderEndpoint("ollama", baseURL),
753755
Model: model,
754756
MaxTokens: maxTokens,
755757
}

0 commit comments

Comments
 (0)