refactor: ollama generation

This commit is contained in:
Azgaar 2025-06-14 15:20:01 +02:00
parent fe2fa6d6b8
commit bba3587e50
3 changed files with 81 additions and 254 deletions

View file

@ -10,7 +10,7 @@ const PROVIDERS = {
generate: generateWithAnthropic
},
ollama: {
keyLink: "https://ollama.com/library",
keyLink: "https://github.com/Azgaar/Fantasy-Map-Generator/wiki/Ollama-text-generation",
generate: generateWithOllama
}
};
@ -27,15 +27,11 @@ const MODELS = {
"claude-3-5-haiku-latest": "anthropic",
"claude-3-5-sonnet-latest": "anthropic",
"claude-3-opus-latest": "anthropic",
"Ollama (enter model in key field)": "ollama"
"ollama (local models)": "ollama"
};
const SYSTEM_MESSAGE = "I'm working on my fantasy map.";
if (typeof modules.generateWithAi_setupDone === 'undefined') {
modules.generateWithAi_setupDone = false;
}
async function generateWithOpenAI({key, model, prompt, temperature, onContent}) {
const headers = {
"Content-Type": "application/json",
@ -58,7 +54,7 @@ async function generateWithOpenAI({key, model, prompt, temperature, onContent})
if (content) onContent(content);
};
await handleStream(response, getContent, "openai");
await handleStream(response, getContent);
}
async function generateWithAnthropic({key, model, prompt, temperature, onContent}) {
@ -82,59 +78,38 @@ async function generateWithAnthropic({key, model, prompt, temperature, onContent
if (content) onContent(content);
};
await handleStream(response, getContent, "anthropic");
await handleStream(response, getContent);
}
async function generateWithOllama({key, model, prompt, temperature, onContent}) {
// For Ollama, 'key' is the actual model name entered by the user.
// 'model' is the value from the dropdown, e.g., "Ollama (enter model in key field)".
const ollamaModelName = key;
const headers = {
"Content-Type": "application/json"
};
const body = {
model: ollamaModelName,
prompt: prompt,
system: SYSTEM_MESSAGE,
options: {
temperature: temperature
},
stream: true
};
const ollamaModelName = key; // for Ollama, 'key' is the actual model name entered by the user
const response = await fetch("http://localhost:11434/api/generate", {
method: "POST",
headers,
body: JSON.stringify(body)
headers: {"Content-Type": "application/json"},
body: JSON.stringify({
model: ollamaModelName,
prompt,
system: SYSTEM_MESSAGE,
options: {temperature},
stream: true
})
});
const getContent = json => {
// Ollama streams JSON objects with a "response" field for content
// and "done": true in the final message (which might have an empty response).
if (json.response) {
onContent(json.response);
}
if (json.response) onContent(json.response);
};
await handleStream(response, getContent, "ollama");
await handleStream(response, getContent);
}
async function handleStream(response, getContent, providerType) {
async function handleStream(response, getContent) {
if (!response.ok) {
let errorMessage = `Failed to generate (${response.status} ${response.statusText})`;
try {
const json = await response.json();
if (providerType === "ollama" && json?.error) {
errorMessage = json.error;
} else {
errorMessage = json?.error?.message || json?.error || `Failed to generate (${response.status} ${response.statusText})`;
}
} catch (e) {
ERROR && console.error("Failed to parse error response JSON:", e)
}
errorMessage = json.error?.message || json.error || errorMessage;
} catch {}
throw new Error(errorMessage);
}
@ -151,24 +126,14 @@ async function handleStream(response, getContent, providerType) {
for (let i = 0; i < lines.length - 1; i++) {
const line = lines[i].trim();
if (providerType === "ollama") {
if (line) {
try {
const json = JSON.parse(line);
getContent(json);
} catch (jsonError) {
ERROR && console.error(`Failed to parse JSON from Ollama:`, jsonError, `Line: ${line}`);
}
}
} else {
if (line.startsWith("data: ") && line !== "data: [DONE]") {
try {
const json = JSON.parse(line.slice(6));
getContent(json);
} catch (jsonError) {
ERROR && console.error(`Failed to parse JSON:`, jsonError, `Line: ${line}`);
}
}
if (!line) continue;
if (line === "data: [DONE]") break;
try {
const parsed = line.startsWith("data: ") ? JSON.parse(line.slice(6)) : JSON.parse(line);
getContent(parsed);
} catch (error) {
ERROR && console.error("Failed to parse line:", line, error);
}
}
@ -177,59 +142,65 @@ async function handleStream(response, getContent, providerType) {
}
function generateWithAi(defaultPrompt, onApply) {
updateValues();
function updateDialogElements() {
$("#aiGenerator").dialog({
title: "AI Text Generator",
position: {my: "center", at: "center", of: "svg"},
resizable: false,
buttons: {
Generate: function (e) {
generate(e.target);
},
Apply: function () {
const result = byId("aiGeneratorResult").value;
if (!result) return tip("No result to apply", true, "error", 4000);
onApply(result);
$(this).dialog("close");
},
Close: function () {
$(this).dialog("close");
}
}
});
if (modules.generateWithAi) return;
modules.generateWithAi = true;
byId("aiGeneratorKeyHelp").on("click", function (e) {
const model = byId("aiGeneratorModel").value;
const provider = MODELS[model];
openURL(PROVIDERS[provider].keyLink);
});
function updateValues() {
byId("aiGeneratorResult").value = "";
byId("aiGeneratorPrompt").value = defaultPrompt;
byId("aiGeneratorTemperature").value = localStorage.getItem("fmg-ai-temperature") || "1";
const select = byId("aiGeneratorModel");
const currentModelVal = select.value;
select.options.length = 0;
Object.keys(MODELS).forEach(model => select.options.add(new Option(model, model)));
const storedModel = localStorage.getItem("fmg-ai-model");
if (storedModel && MODELS[storedModel]) {
select.value = storedModel;
} else if (currentModelVal && MODELS[currentModelVal]) {
select.value = currentModelVal;
} else {
select.value = DEFAULT_MODEL;
}
if (!select.value || !MODELS[select.value]) select.value = DEFAULT_MODEL;
select.value = localStorage.getItem("fmg-ai-model");
if (!select.value || !MODELS[select.value]) select.value = DEFAULT_MODEL;
const provider = MODELS[select.value];
const keyInput = byId("aiGeneratorKey");
if (keyInput) {
keyInput.value = localStorage.getItem(`fmg-ai-kl-${provider}`) || "";
if (provider === "ollama") {
keyInput.placeholder = "Enter Ollama model name (e.g., llama3)";
} else {
keyInput.placeholder = "Enter API Key";
}
} else {
ERROR && console.error("AI Generator: Could not find 'aiGeneratorKey' element in updateDialogElements.");
}
byId("aiGeneratorKey").value = localStorage.getItem(`fmg-ai-kl-${provider}`) || "";
}
async function doGenerate(button) {
async function generate(button) {
const key = byId("aiGeneratorKey").value;
const modelValue = byId("aiGeneratorModel").value;
const provider = MODELS[modelValue];
if (!key) return tip("Please enter an API key", true, "error", 4000);
if (provider !== "ollama" && !key) {
return tip("Please enter an API key", true, "error", 4000);
}
if (provider === "ollama" && !key) {
return tip("Please enter the Ollama model name in the key field", true, "error", 4000);
}
if (!modelValue) return tip("Please select a model", true, "error", 4000);
localStorage.setItem("fmg-ai-model", modelValue);
const model = byId("aiGeneratorModel").value;
if (!model) return tip("Please select a model", true, "error", 4000);
localStorage.setItem("fmg-ai-model", model);
const provider = MODELS[model];
localStorage.setItem(`fmg-ai-kl-${provider}`, key);
const promptText = byId("aiGeneratorPrompt").value;
if (!promptText) return tip("Please enter a prompt", true, "error", 4000);
const prompt = byId("aiGeneratorPrompt").value;
if (!prompt) return tip("Please enter a prompt", true, "error", 4000);
const temperature = byId("aiGeneratorTemperature").valueAsNumber;
if (isNaN(temperature)) return tip("Temperature must be a number", true, "error", 4000);
@ -240,83 +211,14 @@ function generateWithAi(defaultPrompt, onApply) {
const resultArea = byId("aiGeneratorResult");
resultArea.disabled = true;
resultArea.value = "";
const onContentCallback = content => (resultArea.value += content);
const onContent = content => (resultArea.value += content);
await PROVIDERS[provider].generate({key: key, model: modelValue, prompt: promptText, temperature, onContent: onContentCallback});
await PROVIDERS[provider].generate({key, model, prompt, temperature, onContent});
} catch (error) {
tip(error.message, true, "error", 4000);
return tip(error.message, true, "error", 4000);
} finally {
button.disabled = false;
byId("aiGeneratorResult").disabled = false;
}
}
$("#aiGenerator").dialog({
title: "AI Text Generator",
position: {my: "center", at: "center", of: "svg"},
resizable: false,
width: Math.min(600, window.innerWidth - 20),
modal: true,
open: function() {
if (!modules.generateWithAi_setupDone) {
const keyHelpButton = byId("aiGeneratorKeyHelp");
if (keyHelpButton) {
keyHelpButton.addEventListener("click", function () {
const modelValue = byId("aiGeneratorModel").value;
const provider = MODELS[modelValue];
if (provider === "ollama") {
openURL(PROVIDERS.ollama.keyLink);
} else if (provider && PROVIDERS[provider] && PROVIDERS[provider].keyLink) {
openURL(PROVIDERS[provider].keyLink);
}
});
} else {
ERROR && console.error("AI Generator: Could not find 'aiGeneratorKeyHelp' element for event listener.");
}
const modelSelect = byId("aiGeneratorModel");
if (modelSelect) {
modelSelect.addEventListener("change", function() {
const newModelValue = this.value;
const newProvider = MODELS[newModelValue];
const keyInput = byId("aiGeneratorKey");
if (keyInput) {
if (newProvider === "ollama") {
keyInput.placeholder = "Enter Ollama model name (e.g., llama3)";
} else {
keyInput.placeholder = "Enter API Key";
}
keyInput.value = localStorage.getItem(`fmg-ai-kl-${newProvider}`) || "";
} else {
ERROR && console.error("AI Generator: Could not find 'aiGeneratorKey' element during model change listener.");
}
});
} else {
ERROR && console.error("AI Generator: Could not find 'aiGeneratorModel' element for event listener.");
}
modules.generateWithAi_setupDone = true;
}
updateDialogElements();
},
buttons: {
"Generate": function (e) {
doGenerate(e.currentTarget || e.target);
},
"Apply": function () {
const result = byId("aiGeneratorResult").value;
if (!result) return tip("No result to apply", true, "error", 4000);
onApply(result);
$(this).dialog("close");
},
"Close": function () {
$(this).dialog("close");
}
}
});
}
window.generateWithAi = generateWithAi;