feat(Index.tsx): enable Ollama configuration via settings and state variables

This commit is contained in:
2025-08-29 12:44:33 +02:00
parent 571f333719
commit 482012c0fb

View File

@@ -49,9 +49,7 @@ const NOTE_INDEX = 'notes';
const SCRATCH_INDEX = 'scratch'; const SCRATCH_INDEX = 'scratch';
const SETTINGS_INDEX = 'settings'; const SETTINGS_INDEX = 'settings';
// Ollama configuration // Ollama configuration - now using state variables
const OLLAMA_ENDPOINT = 'http://localhost:11434';
const OLLAMA_MODEL = 'gemma3:4b-it-qat';
const meiliHeaders = { const meiliHeaders = {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
'Authorization': `Bearer ${MEILISEARCH_API_KEY}`, 'Authorization': `Bearer ${MEILISEARCH_API_KEY}`,
@@ -96,13 +94,17 @@ const Index = () => {
const [debugInfo, setDebugInfo] = useState<string[]>([]); const [debugInfo, setDebugInfo] = useState<string[]>([]);
const [showDebugPanel, setShowDebugPanel] = useState(false); const [showDebugPanel, setShowDebugPanel] = useState(false);
const [autoGenerateTags, setAutoGenerateTags] = useState(true); const [autoGenerateTags, setAutoGenerateTags] = useState(true);
const [ollamaStatus, setOllamaStatus] = useState<'unknown' | 'online' | 'offline'>('unknown'); const [ollamaStatus, setOllamaStatus] = useState<'unknown' | 'online' | 'offline'>('unknown');
const [includeTagsInSearch, setIncludeTagsInSearch] = useState(true); const [includeTagsInSearch, setIncludeTagsInSearch] = useState(true);
const [tagGenerationTimeout, setTagGenerationTimeout] = useState<NodeJS.Timeout>(); const [tagGenerationTimeout, setTagGenerationTimeout] = useState<NodeJS.Timeout>();
const [isSettingsOpen, setIsSettingsOpen] = useState(false); const [isSettingsOpen, setIsSettingsOpen] = useState(false);
const [systemPrompt, setSystemPrompt] = useState(`You are a helpful assistant that generates searchable tags for journal entries. const [ollamaEndpoint, setOllamaEndpoint] = useState('http://localhost:11434');
const [ollamaModel, setOllamaModel] = useState('gemma3:4b-it-qat');
Your task is to analyze the content and generate 1-3 relevant tags that would help the author find this note later. const [ollamaTemperature, setOllamaTemperature] = useState(0.2);
const [ollamaKeepAlive, setOllamaKeepAlive] = useState(-1);
const [systemPrompt, setSystemPrompt] = useState(`You are a helpful assistant that generates searchable tags for journal entries.
Your task is to analyze the content and generate 1-3 relevant tags that would help the author find this note later.
Focus on the quality of tags, not the quantity. Focus on the quality of tags, not the quantity.
Focus on: Focus on:
@@ -114,7 +116,11 @@ Focus on:
Return ONLY a comma-separated list of tags, no other text. Example: golang, testing, array-comparison, cmp-library Return ONLY a comma-separated list of tags, no other text. Example: golang, testing, array-comparison, cmp-library
Keep tags concise, use lowercase, and separate words with hyphens if needed.`); Keep tags concise, use lowercase, and separate words with hyphens if needed.
Available variables:
- $current: The current note content
- $previous: Previous notes with dates, separated by "--------------------"`);
const [contextSize, setContextSize] = useState(3); const [contextSize, setContextSize] = useState(3);
const { resolvedTheme, setTheme } = useTheme(); const { resolvedTheme, setTheme } = useTheme();
@@ -242,7 +248,7 @@ Keep tags concise, use lowercase, and separate words with hyphens if needed.`);
const controller = new AbortController(); const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), 500); const timeoutId = setTimeout(() => controller.abort(), 500);
const response = await fetch(`${OLLAMA_ENDPOINT}/api/tags`, { const response = await fetch(`${ollamaEndpoint}/api/tags`, {
method: 'GET', method: 'GET',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
@@ -307,42 +313,47 @@ Keep tags concise, use lowercase, and separate words with hyphens if needed.`);
// Generate tags using Ollama // Generate tags using Ollama
const generateTags = async (content: string, noteIndex?: number): Promise<string[]> => { const generateTags = async (content: string, noteIndex?: number): Promise<string[]> => {
try { try {
// Get context from surrounding notes // Get context from surrounding notes
let context = ''; let previousNotes = '';
if (noteIndex !== undefined && noteCache.length > 0) { if (noteIndex !== undefined && noteCache.length > 0) {
const contextNotes = []; const contextNotes = [];
const start = Math.max(0, noteIndex - contextSize); const start = Math.max(0, noteIndex - contextSize);
const end = Math.min(noteCache.length, noteIndex + contextSize + 1); const end = Math.min(noteCache.length, noteIndex + contextSize + 1);
for (let i = start; i < end; i++) { for (let i = start; i < end; i++) {
if (i !== noteIndex) { if (i !== noteIndex) {
const note = noteCache[i]; const note = noteCache[i];
const date = new Date(note.epochTime).toLocaleDateString(); const date = new Date(note.epochTime).toLocaleDateString();
contextNotes.push(`[${date}] ${note.content.substring(0, 200)}${note.content.length > 200 ? '...' : ''}`); contextNotes.push(`[${date}] ${note.content}`);
} }
} }
if (contextNotes.length > 0) { if (contextNotes.length > 0) {
context = `\n\nContext from surrounding notes:\n${contextNotes.join('\n\n')}`; previousNotes = contextNotes.join('\n\n--------------------\n\n');
} }
} }
// Replace variables in system prompt
let processedSystemPrompt = systemPrompt
.replace(/\$current/g, content)
.replace(/\$previous/g, previousNotes);
const userPrompt = `Generate tags for this journal entry: const userPrompt = `Generate tags for this journal entry:
${content}${context}`; ${content}`;
const response = await fetchWithTiming(`${OLLAMA_ENDPOINT}/api/generate`, { const response = await fetchWithTiming(`${ollamaEndpoint}/api/generate`, {
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
}, },
body: JSON.stringify({ body: JSON.stringify({
model: OLLAMA_MODEL, model: ollamaModel,
system: systemPrompt, system: processedSystemPrompt,
prompt: userPrompt, prompt: userPrompt,
stream: false, stream: false,
keep_alive: -1, keep_alive: ollamaKeepAlive,
temperature: 0.2, temperature: ollamaTemperature,
}), }),
}, 'Generate Tags'); }, 'Generate Tags');
@@ -1540,6 +1551,7 @@ ${content}${context}`;
await loadSystemPromptSetting(); await loadSystemPromptSetting();
await loadContextSizeSetting(); await loadContextSizeSetting();
await loadIncludeTagsSetting(); await loadIncludeTagsSetting();
await loadAllOllamaSettings();
await checkOllamaStatus(); await checkOllamaStatus();
// Retry Ollama connection after a delay if it failed // Retry Ollama connection after a delay if it failed
@@ -1712,6 +1724,122 @@ ${content}${context}`;
} }
}; };
// Load Ollama endpoint setting
const loadOllamaEndpointSetting = async () => {
try {
const response = await fetchWithTiming(`${MEILISEARCH_ENDPOINT}/indexes/${SETTINGS_INDEX}/search`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${MEILISEARCH_API_KEY}`,
},
body: JSON.stringify({
q: 'ollamaEndpoint',
filter: 'key = "ollamaEndpoint"',
limit: 1,
}),
}, 'Load Ollama Endpoint Setting');
if (response.ok) {
const data = await response.json();
if (data.hits.length > 0) {
setOllamaEndpoint(data.hits[0].value);
}
}
} catch (error) {
console.error('Error loading Ollama endpoint setting:', error);
}
};
// Load Ollama model setting
const loadOllamaModelSetting = async () => {
try {
const response = await fetchWithTiming(`${MEILISEARCH_ENDPOINT}/indexes/${SETTINGS_INDEX}/search`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${MEILISEARCH_API_KEY}`,
},
body: JSON.stringify({
q: 'ollamaModel',
filter: 'key = "ollamaModel"',
limit: 1,
}),
}, 'Load Ollama Model Setting');
if (response.ok) {
const data = await response.json();
if (data.hits.length > 0) {
setOllamaModel(data.hits[0].value);
}
}
} catch (error) {
console.error('Error loading Ollama model setting:', error);
}
};
// Load Ollama temperature setting
const loadOllamaTemperatureSetting = async () => {
try {
const response = await fetchWithTiming(`${MEILISEARCH_ENDPOINT}/indexes/${SETTINGS_INDEX}/search`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${MEILISEARCH_API_KEY}`,
},
body: JSON.stringify({
q: 'ollamaTemperature',
filter: 'key = "ollamaTemperature"',
limit: 1,
}),
}, 'Load Ollama Temperature Setting');
if (response.ok) {
const data = await response.json();
if (data.hits.length > 0) {
setOllamaTemperature(Number(data.hits[0].value));
}
}
} catch (error) {
console.error('Error loading Ollama temperature setting:', error);
}
};
// Load Ollama keep_alive setting
const loadOllamaKeepAliveSetting = async () => {
try {
const response = await fetchWithTiming(`${MEILISEARCH_ENDPOINT}/indexes/${SETTINGS_INDEX}/search`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${MEILISEARCH_API_KEY}`,
},
body: JSON.stringify({
q: 'ollamaKeepAlive',
filter: 'key = "ollamaKeepAlive"',
limit: 1,
}),
}, 'Load Ollama Keep Alive Setting');
if (response.ok) {
const data = await response.json();
if (data.hits.length > 0) {
setOllamaKeepAlive(Number(data.hits[0].value));
}
}
} catch (error) {
console.error('Error loading Ollama keep alive setting:', error);
}
};
// Load all Ollama settings
const loadAllOllamaSettings = async () => {
await loadOllamaEndpointSetting();
await loadOllamaModelSetting();
await loadOllamaTemperatureSetting();
await loadOllamaKeepAliveSetting();
};
// Save font size setting // Save font size setting
const saveFontSizeSetting = async (newFontSize: string) => { const saveFontSizeSetting = async (newFontSize: string) => {
try { try {
@@ -1897,6 +2025,154 @@ ${content}${context}`;
} }
}; };
// Save Ollama endpoint setting
const saveOllamaEndpointSetting = async (endpoint: string) => {
try {
const document = {
key: 'ollamaEndpoint',
value: endpoint,
updatedAt: new Date().getTime(),
};
const response = await fetchWithTiming(`${MEILISEARCH_ENDPOINT}/indexes/${SETTINGS_INDEX}/documents`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${MEILISEARCH_API_KEY}`,
},
body: JSON.stringify(document),
}, 'Save Ollama Endpoint Setting');
if (response.status !== 202) {
throw new Error('Failed to save Ollama endpoint setting');
}
setOllamaEndpoint(endpoint);
toast({
title: "Ollama endpoint updated",
description: `Endpoint changed to ${endpoint}.`,
});
} catch (error) {
console.error('Error saving Ollama endpoint setting:', error);
toast({
title: "Error",
description: "Failed to save Ollama endpoint setting.",
variant: "destructive",
});
}
};
// Save Ollama model setting
const saveOllamaModelSetting = async (model: string) => {
try {
const document = {
key: 'ollamaModel',
value: model,
updatedAt: new Date().getTime(),
};
const response = await fetchWithTiming(`${MEILISEARCH_ENDPOINT}/indexes/${SETTINGS_INDEX}/documents`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${MEILISEARCH_API_KEY}`,
},
body: JSON.stringify(document),
}, 'Save Ollama Model Setting');
if (response.status !== 202) {
throw new Error('Failed to save Ollama model setting');
}
setOllamaModel(model);
toast({
title: "Ollama model updated",
description: `Model changed to ${model}.`,
});
} catch (error) {
console.error('Error saving Ollama model setting:', error);
toast({
title: "Error",
description: "Failed to save Ollama model setting.",
variant: "destructive",
});
}
};
// Save Ollama temperature setting
const saveOllamaTemperatureSetting = async (temperature: number) => {
try {
const document = {
key: 'ollamaTemperature',
value: temperature.toString(),
updatedAt: new Date().getTime(),
};
const response = await fetchWithTiming(`${MEILISEARCH_ENDPOINT}/indexes/${SETTINGS_INDEX}/documents`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${MEILISEARCH_API_KEY}`,
},
body: JSON.stringify(document),
}, 'Save Ollama Temperature Setting');
if (response.status !== 202) {
throw new Error('Failed to save Ollama temperature setting');
}
setOllamaTemperature(temperature);
toast({
title: "Ollama temperature updated",
description: `Temperature changed to ${temperature}.`,
});
} catch (error) {
console.error('Error saving Ollama temperature setting:', error);
toast({
title: "Error",
description: "Failed to save Ollama temperature setting.",
variant: "destructive",
});
}
};
// Save Ollama keep_alive setting
const saveOllamaKeepAliveSetting = async (keepAlive: number) => {
try {
const document = {
key: 'ollamaKeepAlive',
value: keepAlive.toString(),
updatedAt: new Date().getTime(),
};
const response = await fetchWithTiming(`${MEILISEARCH_ENDPOINT}/indexes/${SETTINGS_INDEX}/documents`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${MEILISEARCH_API_KEY}`,
},
body: JSON.stringify(document),
}, 'Save Ollama Keep Alive Setting');
if (response.status !== 202) {
throw new Error('Failed to save Ollama keep alive setting');
}
setOllamaKeepAlive(keepAlive);
toast({
title: "Ollama keep alive updated",
description: `Keep alive changed to ${keepAlive}.`,
});
} catch (error) {
console.error('Error saving Ollama keep alive setting:', error);
toast({
title: "Error",
description: "Failed to save Ollama keep alive setting.",
variant: "destructive",
});
}
};
return ( return (
<div className={`min-h-screen bg-background text-foreground flex flex-col ${getTextClass('base')}`}> <div className={`min-h-screen bg-background text-foreground flex flex-col ${getTextClass('base')}`}>
{/* Header */} {/* Header */}
@@ -2420,13 +2696,59 @@ ${content}${context}`;
</div> </div>
<div> <div>
<label className={`${getTextClass('base')} font-medium`}>Endpoint:</label> <label className={`${getTextClass('xl')} font-medium mb-2 block`}>Endpoint</label>
<p className={`${getTextClass('base')} text-muted-foreground`}>{OLLAMA_ENDPOINT}</p> <Input
value={ollamaEndpoint}
onChange={(e) => setOllamaEndpoint(e.target.value)}
onBlur={() => saveOllamaEndpointSetting(ollamaEndpoint)}
className={`${getTextClass('base')}`}
placeholder="http://localhost:11434"
/>
</div> </div>
<div> <div>
<label className={`${getTextClass('base')} font-medium`}>Model:</label> <label className={`${getTextClass('xl')} font-medium mb-2 block`}>Model</label>
<p className={`${getTextClass('base')} text-muted-foreground`}>{OLLAMA_MODEL}</p> <Input
value={ollamaModel}
onChange={(e) => setOllamaModel(e.target.value)}
onBlur={() => saveOllamaModelSetting(ollamaModel)}
className={`${getTextClass('base')}`}
placeholder="gemma3:4b-it-qat"
/>
</div>
<div>
<label className={`${getTextClass('xl')} font-medium mb-2 block`}>
Temperature: {ollamaTemperature}
</label>
<Slider
value={[ollamaTemperature]}
onValueChange={(value) => saveOllamaTemperatureSetting(value[0])}
max={2}
min={0}
step={0.1}
className="w-full h-8"
/>
<p className={`${getTextClass('base')} text-muted-foreground mt-2`}>
Controls randomness in tag generation (0 = deterministic, 2 = very random)
</p>
</div>
<div>
<label className={`${getTextClass('xl')} font-medium mb-2 block`}>
Keep Alive: {ollamaKeepAlive === -1 ? 'Infinite' : `${ollamaKeepAlive}s`}
</label>
<Slider
value={[ollamaKeepAlive === -1 ? 0 : Math.min(ollamaKeepAlive, 3600)]}
onValueChange={(value) => saveOllamaKeepAliveSetting(value[0] === 0 ? -1 : value[0])}
max={3600}
min={0}
step={60}
className="w-full h-8"
/>
<p className={`${getTextClass('base')} text-muted-foreground mt-2`}>
How long to keep the model loaded in memory (-1 = infinite, 0 = unload immediately)
</p>
</div> </div>
</div> </div>
</div> </div>