Skip to content

Commit b62afdd

Browse files
committed
fix: azure openai api
1 parent 1c3df14 commit b62afdd

File tree

3 files changed

+36
-13
lines changed

3 files changed

+36
-13
lines changed

package-lock.json

Lines changed: 9 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@
2222
"@nem035/gpt-3-encoder": "^1.1.7",
2323
"@picocss/pico": "^1.5.7",
2424
"@primer/octicons-react": "^18.2.0",
25+
"@vespaiach/axios-fetch-adapter": "^0.3.1",
2526
"azure-openai": "^0.9.4",
2627
"countries-list": "^2.6.1",
2728
"eventsource-parser": "^0.1.0",

src/background/apis/azure-openai-api.mjs

Lines changed: 26 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@ import { Configuration, OpenAIApi } from 'azure-openai'
22
import { getUserConfig, maxResponseTokenLength } from '../../config/index.mjs'
33
import { getChatSystemPromptBase, pushRecord, setAbortController } from './shared.mjs'
44
import { getConversationPairs } from '../../utils/get-conversation-pairs'
5+
import fetchAdapter from '@vespaiach/axios-fetch-adapter'
56

67
/**
78
* @param {Runtime.Port} port
@@ -27,7 +28,6 @@ export async function generateAnswersWithAzureOpenaiApi(port, question, session)
2728
}),
2829
)
2930

30-
let answer = ''
3131
const response = await openAiApi
3232
.createChatCompletion(
3333
{
@@ -38,37 +38,50 @@ export async function generateAnswersWithAzureOpenaiApi(port, question, session)
3838
{
3939
signal: controller.signal,
4040
responseType: 'stream',
41+
adapter: fetchAdapter,
4142
},
4243
)
4344
.catch((err) => {
4445
port.onMessage.removeListener(messageListener)
4546
throw err
4647
})
48+
49+
let chunkData = ''
50+
const step = 1500
51+
let length = 0
4752
for await (const chunk of response.data) {
48-
const lines = chunk
53+
chunkData += chunk
54+
length += 1
55+
if (length % step !== 0 && !chunkData.endsWith('[DONE]')) continue
56+
57+
const lines = chunkData
4958
.toString('utf8')
5059
.split('\n')
5160
.filter((line) => line.trim().startsWith('data: '))
5261

62+
let answer = ''
63+
let message = ''
64+
let data
5365
for (const line of lines) {
54-
const message = line.replace(/^data: /, '')
55-
console.debug('sse message', message)
56-
if (message === '[DONE]') {
57-
pushRecord(session, question, answer)
58-
console.debug('conversation history', { content: session.conversationRecords })
59-
port.postMessage({ answer: null, done: true, session: session })
60-
break
61-
}
62-
let data
66+
message = line.replace(/^data: /, '')
6367
try {
6468
data = JSON.parse(message)
6569
} catch (error) {
66-
console.debug('json error', error)
6770
continue
6871
}
69-
answer += data.choices[0].text
72+
if ('content' in data.choices[0].delta) answer += data.choices[0].delta.content
73+
}
74+
if (data) {
75+
console.debug('sse message', data)
7076
port.postMessage({ answer: answer, done: false, session: null })
7177
}
78+
if (message === '[DONE]') {
79+
console.debug('sse message', '[DONE]')
80+
pushRecord(session, question, answer)
81+
console.debug('conversation history', { content: session.conversationRecords })
82+
port.postMessage({ answer: null, done: true, session: session })
83+
break
84+
}
7285
}
7386

7487
port.onMessage.removeListener(messageListener)

0 commit comments

Comments
 (0)