feat(Index.tsx): add user prompt setting for AI tag generation
This commit is contained in:
@@ -116,11 +116,10 @@ Focus on:
|
||||
|
||||
Return ONLY a comma-separated list of tags, no other text. Example: golang, testing, array-comparison, cmp-library
|
||||
|
||||
Keep tags concise, use lowercase, and separate words with hyphens if needed.
|
||||
Keep tags concise, use lowercase, and separate words with hyphens if needed.`);
|
||||
const [userPrompt, setUserPrompt] = useState(`Generate tags for this journal entry:
|
||||
|
||||
Available variables:
|
||||
- $current: The current note content
|
||||
- $previous: Previous notes with dates, separated by "--------------------"`);
|
||||
$current`);
|
||||
const [contextSize, setContextSize] = useState(3);
|
||||
|
||||
const { resolvedTheme, setTheme } = useTheme();
|
||||
@@ -313,35 +312,29 @@ Available variables:
|
||||
// Generate tags using Ollama
|
||||
const generateTags = async (content: string, noteIndex?: number): Promise<string[]> => {
|
||||
try {
|
||||
// Get context from surrounding notes
|
||||
// Get context from previous notes only
|
||||
let previousNotes = '';
|
||||
if (noteIndex !== undefined && noteCache.length > 0) {
|
||||
const contextNotes = [];
|
||||
const start = Math.max(0, noteIndex - contextSize);
|
||||
const end = Math.min(noteCache.length, noteIndex + contextSize + 1);
|
||||
const end = noteIndex; // Only previous notes, not future ones
|
||||
|
||||
for (let i = start; i < end; i++) {
|
||||
if (i !== noteIndex) {
|
||||
const note = noteCache[i];
|
||||
const date = new Date(note.epochTime).toLocaleDateString();
|
||||
contextNotes.push(`[${date}] ${note.content}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (contextNotes.length > 0) {
|
||||
previousNotes = contextNotes.join('\n\n--------------------\n\n');
|
||||
}
|
||||
}
|
||||
|
||||
// Replace variables in system prompt
|
||||
let processedSystemPrompt = systemPrompt
|
||||
// Replace variables in user prompt only (system prompt should be instructions, not content)
|
||||
let processedUserPrompt = userPrompt
|
||||
.replace(/\$current/g, content)
|
||||
.replace(/\$previous/g, previousNotes);
|
||||
|
||||
const userPrompt = `Generate tags for this journal entry:
|
||||
|
||||
${content}`;
|
||||
|
||||
const response = await fetchWithTiming(`${ollamaEndpoint}/api/generate`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
@@ -349,8 +342,8 @@ ${content}`;
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model: ollamaModel,
|
||||
system: processedSystemPrompt,
|
||||
prompt: userPrompt,
|
||||
system: systemPrompt,
|
||||
prompt: processedUserPrompt,
|
||||
stream: false,
|
||||
keep_alive: ollamaKeepAlive,
|
||||
temperature: ollamaTemperature,
|
||||
@@ -1549,6 +1542,7 @@ ${content}`;
|
||||
await loadFontSizeSetting();
|
||||
await loadAutoGenerateTagsSetting();
|
||||
await loadSystemPromptSetting();
|
||||
await loadUserPromptSetting();
|
||||
await loadContextSizeSetting();
|
||||
await loadIncludeTagsSetting();
|
||||
await loadAllOllamaSettings();
|
||||
@@ -1670,6 +1664,33 @@ ${content}`;
|
||||
}
|
||||
};
|
||||
|
||||
// Load user prompt setting
|
||||
const loadUserPromptSetting = async () => {
|
||||
try {
|
||||
const response = await fetchWithTiming(`${MEILISEARCH_ENDPOINT}/indexes/${SETTINGS_INDEX}/search`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': `Bearer ${MEILISEARCH_API_KEY}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
q: 'userPrompt',
|
||||
filter: 'key = "userPrompt"',
|
||||
limit: 1,
|
||||
}),
|
||||
}, 'Load User Prompt Setting');
|
||||
|
||||
if (response.ok) {
|
||||
const data = await response.json();
|
||||
if (data.hits.length > 0) {
|
||||
setUserPrompt(data.hits[0].value);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error loading user prompt setting:', error);
|
||||
}
|
||||
};
|
||||
|
||||
// Load context size setting
|
||||
const loadContextSizeSetting = async () => {
|
||||
try {
|
||||
@@ -1951,6 +1972,43 @@ ${content}`;
|
||||
}
|
||||
};
|
||||
|
||||
// Save user prompt setting
|
||||
const saveUserPromptSetting = async (newPrompt: string) => {
|
||||
try {
|
||||
const document = {
|
||||
key: 'userPrompt',
|
||||
value: newPrompt,
|
||||
updatedAt: new Date().getTime(),
|
||||
};
|
||||
|
||||
const response = await fetchWithTiming(`${MEILISEARCH_ENDPOINT}/indexes/${SETTINGS_INDEX}/documents`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': `Bearer ${MEILISEARCH_API_KEY}`,
|
||||
},
|
||||
body: JSON.stringify(document),
|
||||
}, 'Save User Prompt Setting');
|
||||
|
||||
if (response.status !== 202) {
|
||||
throw new Error('Failed to save user prompt setting');
|
||||
}
|
||||
|
||||
setUserPrompt(newPrompt);
|
||||
toast({
|
||||
title: "User prompt updated",
|
||||
description: "User prompt has been saved.",
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error saving user prompt setting:', error);
|
||||
toast({
|
||||
title: "Error",
|
||||
description: "Failed to save user prompt setting.",
|
||||
variant: "destructive",
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
// Save context size setting
|
||||
const saveContextSizeSetting = async (newSize: number) => {
|
||||
try {
|
||||
@@ -2655,6 +2713,22 @@ ${content}`;
|
||||
This prompt tells the AI how to generate tags for your notes
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<label className={`${getTextClass('xl')} font-medium mb-2 block`}>
|
||||
User Prompt
|
||||
</label>
|
||||
<Textarea
|
||||
value={userPrompt}
|
||||
onChange={(e) => setUserPrompt(e.target.value)}
|
||||
onBlur={() => saveUserPromptSetting(userPrompt)}
|
||||
className={`min-h-32 ${getTextClass('base')}`}
|
||||
placeholder="Enter the user prompt for tag generation..."
|
||||
/>
|
||||
<p className={`${getTextClass('base')} text-muted-foreground mt-2`}>
|
||||
This prompt is sent to the AI with $current and $previous variables
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
Reference in New Issue
Block a user