refactor: clean up API calls

This commit is contained in:
Aesli 2024-11-12 14:39:11 -06:00
parent 208607eb0c
commit a626f939ca

View file

@ -102,8 +102,20 @@ function generateWithAi(defaultPrompt, onApply) {
resultArea.disabled = true;
if (model.includes("claude")) {
const baseUrl = "https://api.anthropic.com/v1/messages";
await generateWithClaude(key, model, prompt, temperature, resultArea);
} else {
await generateWithGPT(key, model, prompt, temperature, resultArea);
}
} catch (error) {
return tip(error.message, true, "error", 4000);
} finally {
button.disabled = false;
byId("aiGeneratorResult").disabled = false;
}
}
async function generateWithClaude(key, model, prompt, temperature, resultArea) {
const baseUrl = "https://api.anthropic.com/v1/messages";
const response = await fetch(baseUrl, {
method: "POST",
headers: {
@ -116,7 +128,8 @@ function generateWithAi(defaultPrompt, onApply) {
model,
messages: [{role: "user", content: prompt}],
stream: true,
max_tokens: 4096
max_tokens: 4096,
temperature
})
});
@ -125,33 +138,10 @@ function generateWithAi(defaultPrompt, onApply) {
throw new Error(json?.error?.message || "Failed to generate with Claude");
}
const reader = response.body.getReader();
const decoder = new TextDecoder("utf-8");
let buffer = "";
while (true) {
const {done, value} = await reader.read();
if (done) break;
buffer += decoder.decode(value, {stream: true});
const lines = buffer.split("\n");
for (let i = 0; i < lines.length - 1; i++) {
const line = lines[i].trim();
if (line.startsWith("data: ")) {
try {
const jsonData = JSON.parse(line.slice(6));
const content = jsonData.delta?.text;
if (content) resultArea.value += content;
} catch (jsonError) {
console.warn("Failed to parse Claude JSON:", jsonError, "Line:", line);
}
}
await handleStream(response, resultArea, true);
}
buffer = lines[lines.length - 1];
}
} else {
async function generateWithGPT(key, model, prompt, temperature, resultArea) {
const response = await fetch("https://api.openai.com/v1/chat/completions", {
method: "POST",
headers: {
@ -164,16 +154,20 @@ function generateWithAi(defaultPrompt, onApply) {
{role: "system", content: SYSTEM_MESSAGE},
{role: "user", content: prompt}
],
temperature: 1.2,
temperature,
stream: true
})
});
if (!response.ok) {
const json = await response.json();
throw new Error(json?.error?.message || "Failed to generate");
throw new Error(json?.error?.message || "Failed to generate with GPT");
}
await handleStream(response, resultArea, false);
}
async function handleStream(response, resultArea, isClaude) {
const reader = response.body.getReader();
const decoder = new TextDecoder("utf-8");
let buffer = "";
@ -187,13 +181,21 @@ function generateWithAi(defaultPrompt, onApply) {
for (let i = 0; i < lines.length - 1; i++) {
const line = lines[i].trim();
if (line.startsWith("data: ") && line !== "data: [DONE]") {
if (line.startsWith("data: ") && (!isClaude && line !== "data: [DONE]")) {
try {
const jsonData = JSON.parse(line.slice(6));
const content = jsonData.choices[0].delta.content;
const content = isClaude
? jsonData.delta?.text
: jsonData.choices[0].delta.content;
if (content) resultArea.value += content;
} catch (jsonError) {
console.warn("Failed to parse JSON:", jsonError, "Line:", line);
console.warn(
`Failed to parse ${isClaude ? "Claude" : "OpenAI"} JSON:`,
jsonError,
"Line:",
line
);
}
}
}
@ -201,11 +203,4 @@ function generateWithAi(defaultPrompt, onApply) {
buffer = lines[lines.length - 1];
}
}
} catch (error) {
return tip(error.message, true, "error", 4000);
} finally {
button.disabled = false;
byId("aiGeneratorResult").disabled = false;
}
}
}