Temperature parameters can be customized

This commit is contained in:
Dyxang 2024-11-04 20:46:37 +08:00
parent 91dc16878e
commit 8b6566ac59
No known key found for this signature in database
GPG key ID: 5573F57321747E5C
2 changed files with 20 additions and 2 deletions

View file

@ -4951,7 +4951,18 @@
>Model:
<select id="aiGeneratorModel"></select>
</label>
<label for="aiGeneratorTemperature"
>Temperature:
<input id="aiGeneratorTemperature" placeholder="1.2" class="icon-key" />
<a
href="https://platform.openai.com/docs/api-reference/chat/create#chat-create-temperature"
target="_blank"
rel="noreferrer"
class="icon-help-circled"
style="text-decoration: none"
data-tip="Between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic."
></a>
</label>
<label for="aiGeneratorKey"
>Key:
<input id="aiGeneratorKey" placeholder="Enter OpenAI API key" class="icon-key" />

View file

@ -33,6 +33,7 @@ function geneateWithAi(defaultPrompt, onApply) {
byId("aiGeneratorResult").value = "";
byId("aiGeneratorPrompt").value = defaultPrompt;
byId("aiGeneratorKey").value = localStorage.getItem("fmg-ai-kl") || "";
byId("aiGeneratorTemperature").value = localStorage.getItem("fmg-ai-temperature") || "1.2";
const select = byId("aiGeneratorModel");
select.options.length = 0;
@ -52,6 +53,12 @@ function geneateWithAi(defaultPrompt, onApply) {
const prompt = byId("aiGeneratorPrompt").value;
if (!prompt) return tip("Please enter a prompt", true, "error", 4000);
const temperature = parseFloat(byId("aiGeneratorTemperature").value);
if (isNaN(temperature) || temperature < 0 || temperature > 2) {
return tip("Temperature must be a number between 0 and 2", true, "error", 4000);
}
localStorage.setItem("fmg-ai-temperature", temperature.toString());
try {
button.disabled = true;
const resultArea = byId("aiGeneratorResult");
@ -70,7 +77,7 @@ function geneateWithAi(defaultPrompt, onApply) {
{role: "system", content: SYSTEM_MESSAGE},
{role: "user", content: prompt}
],
temperature: 1.2,
temperature: temperature,
stream: true // Enable streaming
})
});