Skip to content

Commit d53df17

Browse files
committed
Improvements and bug fixes: added support for o1-mini, o3-mini, o1, o1-preview
1 parent e3ffbcd commit d53df17

File tree

18 files changed

+164
-46
lines changed

18 files changed

+164
-46
lines changed

README.md

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -77,6 +77,14 @@ The integrations available in this release include:
7777
- Parallel function calling, image generation and recognition are seamlessly supported for new models.
7878
- Updated documentation with these new models.
7979

80+
- Version 1.3.2 released on Feb 08, 2025.
81+
- I added support for two newer language models, o1-mini and o3-mini that became available in Azure OpenAI.
82+
- These Azure OpenAI models did not support streaming options; I added the exemption logic for them.
83+
- I added support for regular OpenAI language models: o1-mini, o1-preview, o1, o1-2024-12-17, o3-mini.
84+
- The full-scale models o1 and o1-2024-12-17 did not support streaming options; I added the exemption logic for them.
85+
- Added informational messages to the progress shimmer. It displays the header if streaming options were enabled but unsupported by the model.
86+
- Bug fix: incorrect voice out on the first entry instead of selected one.
87+
8088
### Full-Scale Setup
8189

8290
![Data access diagram](docs/data-access-diagram.png "Data access diagram")

spfx-latest/config/package-solution.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
"name": "azure-openai-chat",
55
"title": "Azure OpenAI Chat Web Part",
66
"id": "64e358ba-f88c-4fee-9a8a-6db06299d90a",
7-
"version": "1.3.1.0",
7+
"version": "1.3.2.0",
88
"includeClientSideAssets": true,
99
"skipFeatureDeployment": true,
1010
"isDomainIsolated": false,

spfx-latest/package-lock.json

Lines changed: 8 additions & 8 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

spfx-latest/package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "azure-openai-chat",
3-
"version": "1.3.1",
3+
"version": "1.3.2",
44
"private": true,
55
"engines": {
66
"node": ">=16.13.0 <17.0.0 || >=18.17.1 <19.0.0"

spfx-latest/src/components/AzureOpenAiChatLoader.ts

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -196,7 +196,7 @@ export default class AzureOpenAiChatLoader extends BaseClientSideWebPart<IAzureO
196196
}
197197

198198
protected get dataVersion(): Version {
199-
return Version.parse('1.0');
199+
return Version.parse('1.3.2');
200200
}
201201

202202
protected getPropertyPaneConfiguration(): IPropertyPaneConfiguration {
@@ -274,6 +274,8 @@ export default class AzureOpenAiChatLoader extends BaseClientSideWebPart<IAzureO
274274
{ key: 'gpt-4-1106-preview', text: `${strings.TextGpt4Turbo} (${strings.TextPreview})` },
275275
{ key: 'gpt-4o-mini', text: strings.TextGpt4oMini },
276276
{ key: 'gpt-4o', text: strings.TextGpt4o },
277+
{ key: 'o1-mini', text: strings.TextO1Mini },
278+
{ key: 'o3-mini', text: strings.TextO3Mini },
277279
],
278280
properties: this.properties,
279281
}),

spfx-latest/src/components/ContentPanel.tsx

Lines changed: 17 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -314,7 +314,15 @@ const ContentPanel: FunctionComponent<IContentPanelProps> = ({ props }) => {
314314
setModel
315315
);
316316

317-
const panelContentPane = elements.getPanelContentPane(refContentPane, chatHistory, isCustomPanelOpen, rows, isProgress);
317+
const panelContentPane = elements.getPanelContentPane(
318+
refContentPane,
319+
chatHistory,
320+
isCustomPanelOpen,
321+
rows,
322+
isProgress,
323+
props,
324+
model
325+
);
318326

319327
const promptContainer = elements.getPromptContainer(
320328
refPromptArea,
@@ -407,6 +415,9 @@ const ContentPanel: FunctionComponent<IContentPanelProps> = ({ props }) => {
407415
const maxHeight = 100; // px
408416
const padding = 15; // px
409417

418+
// Stript trailing new line chars (added on copy-paste)
419+
e.target.value = e.target.value.replace(/\n+$/, '');
420+
410421
if (!e.target.value) {
411422
e.target.style.height = `${minHeight}px`;
412423
setIsSubmitDisabled(true);
@@ -569,7 +580,7 @@ const ContentPanel: FunctionComponent<IContentPanelProps> = ({ props }) => {
569580
};
570581

571582
if (props.apiService.isConfigured()) {
572-
if (!props.streaming) {
583+
if (!ChatHelper.isStreamingSupported(payload.model, props)) {
573584
props.apiService.callQueryText(payload).then((response) => {
574585
unstable_batchedUpdates(() => {
575586
handleResponse(response);
@@ -714,8 +725,9 @@ const ContentPanel: FunctionComponent<IContentPanelProps> = ({ props }) => {
714725
? `.${styles.customPanel} div[id='${chatMessageId}']`
715726
: `div[id='${chatMessageId}']`;
716727

728+
const inputText = HtmlHelper.stripHtml(r.content);
717729
const getAudio =
718-
isAi && props.voiceOutput && ChatHelper.supportsTextToSpeech(props)
730+
isAi && props.voiceOutput && ChatHelper.supportsTextToSpeech(props) //&& inputText.length <= 4096 // tts input supports max 4096 chars
719731
? (text: string) => new SpeechService(props.apiService).callTextToSpeech(text)
720732
: undefined;
721733

@@ -804,8 +816,8 @@ const ContentPanel: FunctionComponent<IContentPanelProps> = ({ props }) => {
804816
<>
805817
{isAi && props.voiceOutput ? (
806818
<VoiceOutput
807-
querySelector={chatMessageIdSelector}
808-
text={HtmlHelper.stripHtml(r.content)}
819+
//querySelector={chatMessageIdSelector}
820+
text={inputText}
809821
tooltip={strings.TextVoiceOutput}
810822
getAudio={getAudio}
811823
/>

spfx-latest/src/components/ContentPanelElements.tsx

Lines changed: 34 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ import { IChatHistory } from 'shared/model/IChat';
1313
import SessionStorageService from 'shared/services/SessionStorageService';
1414
import { IChatProps } from './Chat';
1515
import styles from './Chat.module.scss';
16+
import { IAzureOpenAiChatProps } from './IAzureOpenAiChatProps';
1617
import * as Icons from './Icons';
1718
import Prompts from './Prompts';
1819
import UploadFiles from './UploadFiles';
@@ -86,7 +87,7 @@ export default class ContentPanelElements {
8687
const isVisionSupported = props.vision;
8788
const isPdfSupported = true;
8889
// Upload button should be visible only if Enable integrations is turned on in web part settings.
89-
return props.functions && (isVisionSupported || isPdfSupported) ? (
90+
return isVisionSupported || isPdfSupported ? (
9091
<>
9192
{getSimpleDialog(strings.TextUpload, strings.TextUploadFiles, showUploadDialog, setShowUploadDialog, [
9293
<UploadFiles
@@ -127,20 +128,31 @@ export default class ContentPanelElements {
127128
return star ? Icons.getStarIcon() : Icons.getLighteningIcon();
128129
}
129130

131+
private getModelText(languageModel: string, defaultModelText?: string) {
132+
const lcLanguageModel = languageModel.toLocaleLowerCase();
133+
const modelTexts = {
134+
'4o': strings.TextGpt4o,
135+
'4o-mini': strings.TextGpt4oMini,
136+
'o1-mini': strings.TextO1Mini,
137+
'o1-preview': strings.TextO1Preview,
138+
o1: strings.TextO1,
139+
'o3-mini': strings.TextO3Mini,
140+
};
141+
for (const key of Object.keys(modelTexts)) {
142+
if (lcLanguageModel.endsWith(key)) return modelTexts[key];
143+
}
144+
return defaultModelText ?? languageModel;
145+
}
146+
130147
private getLanguageModelText(languageModel: string, isGpt3: boolean, isGpt4: boolean, isGpt4Turbo: boolean): string {
131148
if (isGpt3) {
132149
return strings.TextGpt35;
133150
} else if (isGpt4Turbo) {
134151
return strings.TextGpt4Turbo;
135152
} else if (isGpt4) {
136-
if (languageModel.toLocaleLowerCase().endsWith('4o')) {
137-
return strings.TextGpt4o;
138-
} else if (languageModel.toLocaleLowerCase().endsWith('4o-mini')) {
139-
return strings.TextGpt4oMini;
140-
}
141-
return strings.TextGpt4;
153+
return this.getModelText(languageModel, strings.TextGpt4);
142154
} else {
143-
return languageModel;
155+
return this.getModelText(languageModel);
144156
}
145157
}
146158

@@ -199,11 +211,23 @@ export default class ContentPanelElements {
199211
chatHistory: IChatHistory[],
200212
isCustomPanelOpen: boolean,
201213
rows: JSX.Element[],
202-
isProgress: boolean
214+
isProgress: boolean,
215+
contentProps: IAzureOpenAiChatProps,
216+
model: string
203217
): JSX.Element {
204218
const props = this.props;
205219

206220
const noUpperLanguageSelector = !props.promptAtBottom && !(props.languageModels?.length > 1);
221+
let shimmerInformationalHeader: React.ReactNode;
222+
if (contentProps.streaming && !ChatHelper.isStreamingSupported(model, contentProps)) {
223+
shimmerInformationalHeader = (
224+
<div>
225+
<strong>
226+
{model}: {strings.TextStreamingUnsupported}
227+
</strong>
228+
</div>
229+
);
230+
}
207231
return (
208232
<div
209233
ref={refContentPane}
@@ -218,7 +242,7 @@ export default class ContentPanelElements {
218242
>
219243
<div className={styles.responseRowsContainer}>
220244
{rows}
221-
{isProgress && <CustomShimmer />}
245+
{isProgress && <CustomShimmer header={shimmerInformationalHeader} />}
222246
</div>
223247
</div>
224248
);

spfx-latest/src/helpers/ChatHelper.ts

Lines changed: 32 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -34,29 +34,33 @@ export default class ChatHelper {
3434
const largeContentDeduction = 1500;
3535

3636
let returnValue = Math.floor((4 * 1024 - responseTokens) * averageCharsPerToken);
37-
if (/4o/i.test(model) || /4-(1106|turbo|vision)/i.test(model)) {
37+
if (/4o/i.test(model) || /4-(1106|turbo|vision)/i.test(model) || /o1-mini|o1-preview/i.test(model)) {
3838
returnValue = Math.floor((128 * 1024 - responseTokens) * averageCharsPerToken) - largeContentDeduction;
3939
} else if (/32k/i.test(model)) {
4040
returnValue = Math.floor((32 * 1024 - responseTokens) * averageCharsPerToken) - largeContentDeduction;
4141
} else if (/16k|-1106/i.test(model)) {
4242
returnValue = Math.floor((16 * 1024 - responseTokens) * averageCharsPerToken) - largeContentDeduction;
4343
} else if (/8k/i.test(model) || /gpt-4/i.test(model)) {
4444
returnValue = Math.floor((8 * 1024 - responseTokens) * averageCharsPerToken) - largeContentDeduction;
45+
} else if (/o\d/i.test(model)) {
46+
returnValue = Math.floor((200 * 1024 - responseTokens) * averageCharsPerToken) - largeContentDeduction;
4547
}
4648
return returnValue - 200; // 200 extra chars reserved for service needs (redundancy).
4749
}
4850

4951
public static maxRequestLength(model: string, responseTokens: number, chatHistoryLength: number): number {
5052
// maxRequestLength = max allowed number of characters in the prompt.
51-
let maxCharacters = 4000; // GPT-35-turbo, 4k
52-
if (/4o/i.test(model) || /4-(1106|turbo|vision)/i.test(model)) {
53-
maxCharacters = 125000; // ~ (128 * 1024 * 3.6) / 3.75 long questions - answers.
53+
let maxCharacters = 4_000; // GPT-35-turbo, 4k
54+
if (/4o/i.test(model) || /4-(1106|turbo|vision)/i.test(model) || /o1-mini|o1-preview/i.test(model)) {
55+
maxCharacters = 125_000; // ~ (128 * 1024 * 3.6) / 3.75 long questions - answers.
5456
} else if (/32k/i.test(model)) {
55-
maxCharacters = 30000; // ~ (32 * 1024 * 3.6) / 3.75 long questions - answers.
57+
maxCharacters = 30_000; // ~ (32 * 1024 * 3.6) / 3.75 long questions - answers.
5658
} else if (/16k|-1106/i.test(model)) {
57-
maxCharacters = 15000; // ~ (16 * 1024 * 3.6) / 3.75 long questions - answers.
59+
maxCharacters = 15_000; // ~ (16 * 1024 * 3.6) / 3.75 long questions - answers.
5860
} else if (/8k/i.test(model) || /gpt-4/i.test(model)) {
59-
maxCharacters = 7500; // ~ (8 * 1024 * 3.6) / 3.75 long questions - answers.
61+
maxCharacters = 7_500; // ~ (8 * 1024 * 3.6) / 3.75 long questions - answers.
62+
} else if (/o\d/i.test(model)) {
63+
maxCharacters = 195_000; // ~ (200 * 1024 * 3.6) / 3.75 long questions - answers.
6064
}
6165
const maxLength = this.maxContentLength(model, responseTokens);
6266
const allowedLength = maxLength - chatHistoryLength;
@@ -74,6 +78,12 @@ export default class ChatHelper {
7478
returnValue = defaultResponseTokens;
7579
} else if (/8k/i.test(model) || /gpt-4/i.test(model)) {
7680
returnValue = defaultResponseTokens;
81+
} else if (/o\d-mini/i.test(model)) {
82+
returnValue = 65_536;
83+
} else if (/o1-preview/i.test(model)) {
84+
returnValue = 32_768;
85+
} else if (/o\d/i.test(model)) {
86+
returnValue = 100_000;
7787
}
7888
return returnValue;
7989
}
@@ -137,6 +147,21 @@ export default class ChatHelper {
137147
}
138148
}
139149

150+
public static isStreamingSupported = (model: string, props: IAzureOpenAiChatProps) => {
151+
// As of February 2025, the model o1-mini of Azure OpenAI did not support streaming and function calling.
152+
// - At the same time, native OpenAI models o1-mini, o3-mini, o1-preview supported streaming, but did not support function calling.
153+
// - The full-scale native OpenAI models o1 and o1-2024-12-17 did not support streaming and function calling.
154+
// On attempts to use streaming outputs, they have thrown errors. Note that this behaviour might change later.
155+
if (/^o\d$|o\d-\d{4}-\d{2}-\d{2}/i.test(model?.toLocaleLowerCase())) return false;
156+
157+
return (
158+
props.streaming &&
159+
(!/o\d/.test(model) ||
160+
props.apiService?.isNative(props.endpointBaseUrlForOpenAi) ||
161+
props.apiService?.isOpenAiNativeUrl(props.endpointBaseUrlForOpenAi))
162+
);
163+
};
164+
140165
public static formatDate(date: string | Date, locale: string): string {
141166
if (typeof date === 'string') date = new Date(date);
142167
return new Date().getFullYear() !== date.getFullYear()

spfx-latest/src/loc/en-us.js

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -73,6 +73,10 @@ define([], function () {
7373
TextGpt4o: 'GPT-4o',
7474
TextGpt4oMini: 'GPT-4o Mini',
7575
TextGpt4Turbo: 'GPT-4 Turbo',
76+
TextO1Mini: 'O1 Mini',
77+
TextO1Preview: 'O1 Preview',
78+
TextO1: 'O1',
79+
TextO3Mini: 'O3 Mini',
7680
TextHideMySharedChats: 'Hide my chats',
7781
TextInvalidListUrl: 'Invalid list URL. List with the same name already exists at the site',
7882
TextLanguage: 'Language',
@@ -102,6 +106,7 @@ define([], function () {
102106
TextSharedChats: 'Shared chats',
103107
TextShareWith: 'Specific people to share with (if you want to limit access to this chat, max 15 persons)',
104108
TextStop: 'Stop',
109+
TextStreamingUnsupported: 'streaming option is unsupported for this model',
105110
TextSubmit: 'Submit',
106111
TextSummarizePdf: 'Summarise PDF content',
107112
TextUndeterminedError: 'Unexpected error',

spfx-latest/src/loc/fi-fi.js

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -72,6 +72,10 @@ define([], function () {
7272
TextGpt4o: 'GPT-4o',
7373
TextGpt4oMini: 'GPT-4o Mini',
7474
TextGpt4Turbo: 'GPT-4 Turbo',
75+
TextO1Mini: 'O1 Mini',
76+
TextO1Preview: 'O1 Preview',
77+
TextO1: 'O1',
78+
TextO3Mini: 'O3 Mini',
7579
TextHideMySharedChats: 'Piilota chattini',
7680
TextInvalidListUrl: 'Virheellinen luettelon URL-osoite. Luettelo samalla nimella on jo olemassa sivustolla',
7781
TextLanguage: 'Kieli',
@@ -101,6 +105,7 @@ define([], function () {
101105
TextSharedChats: 'Jaetut chatit',
102106
TextShareWith: 'Tietyt ihmiset, joiden kanssa jakaa (jos haluat rajoittaa pääsyä tähän keskusteluun, enintään 15 henkilöä)',
103107
TextStop: 'Lopeta',
108+
TextStreamingUnsupported: 'suoratoisto-ominaisuutta ei tueta tässä mallissa',
104109
TextSubmit: 'Lähetä',
105110
TextSummarizePdf: 'Tee yhteenveto PDF-sisällöstä',
106111
TextUndeterminedError: 'Odottamaton virhe',

0 commit comments

Comments
 (0)