feat: immutable entries + full task logging

Entries now immutable once journal is generated:
- Edit/delete returns ENTRY_IMMUTABLE error if journal exists
- Frontend shows lock message and hides delete button
- Delete Journal button to unlock entries

Task logging now stores full JSON:
- request: full JSON request sent to AI provider
- response: full JSON response from AI provider
- prompt: formatted human-readable prompt

Prompt structure:
1. System prompt
2. Previous diary entries (journals)
3. Today's entries
This commit is contained in:
lotherk
2026-03-26 22:05:52 +00:00
parent 5c217853de
commit 754fea73c6
10 changed files with 197 additions and 98 deletions

View File

@@ -247,6 +247,11 @@ app.put('/api/v1/entries/:id', async (c) => {
const existing = await prisma.entry.findFirst({ where: { id, userId } });
if (!existing) return c.json({ data: null, error: { code: 'NOT_FOUND', message: 'Entry not found' } }, 404);
const journal = await prisma.journal.findFirst({ where: { userId, date: existing.date } });
if (journal) {
return c.json({ data: null, error: { code: 'ENTRY_IMMUTABLE', message: 'Cannot edit entry: journal already generated. Delete the journal first.' } }, 400);
}
const entry = await prisma.entry.update({
where: { id },
data: {
@@ -266,6 +271,11 @@ app.delete('/api/v1/entries/:id', async (c) => {
const existing = await prisma.entry.findFirst({ where: { id, userId } });
if (!existing) return c.json({ data: null, error: { code: 'NOT_FOUND', message: 'Entry not found' } }, 404);
const journal = await prisma.journal.findFirst({ where: { userId, date: existing.date } });
if (journal) {
return c.json({ data: null, error: { code: 'ENTRY_IMMUTABLE', message: 'Cannot delete entry: journal already generated. Delete the journal first.' } }, 400);
}
await prisma.entry.delete({ where: { id } });
return c.json({ data: { deleted: true }, error: null });
});
@@ -292,6 +302,7 @@ app.post('/api/v1/journal/generate/:date', async (c) => {
return c.json({ data: null, error: { code: 'NO_AI_CONFIG', message: 'AI not configured. Please set up your API key in settings.' } }, 400);
}
// Build entries text
const entriesText = entries.map(entry => {
let text = `[${entry.type.toUpperCase()}] ${entry.createdAt.toISOString()}\n${entry.content}`;
if (entry.metadata) {
@@ -304,7 +315,7 @@ app.post('/api/v1/journal/generate/:date', async (c) => {
return text;
}).join('\n\n');
// Get previous journals for context
// Build previous journals text
const contextDays = settings?.journalContextDays || 0;
let previousJournalsText = '';
@@ -321,23 +332,19 @@ app.post('/api/v1/journal/generate/:date', async (c) => {
},
},
orderBy: { date: 'desc' },
select: { date: true, content: true, generatedAt: true },
select: { date: true, content: true },
});
if (previousJournals.length > 0) {
previousJournalsText = `\n\nPREVIOUS JOURNAL SUMMARY (last ${contextDays} days for context):\n${previousJournals.map(j =>
previousJournalsText = `PREVIOUS DIARIES:\n${previousJournals.map(j =>
`[${j.date}]\n${j.content}`
).join('\n\n')}\n`;
).join('\n\n')}\n\n`;
}
}
// Build prompts: 1. system prompt, 2. previous journals, 3. today's entries
const systemPrompt = settings?.journalPrompt || 'You are a thoughtful journal writer.';
const userPrompt = `${previousJournalsText}The following entries were captured throughout the day (${date}). Write a thoughtful, reflective journal entry.
ENTRIES:
${entriesText}
JOURNAL:`;
const userPrompt = `${previousJournalsText}ENTRIES FROM TODAY (${date}):\n${entriesText}\n\nWrite a thoughtful, reflective journal entry based on the entries above.`;
console.log(`[Journal Generate] Date: ${date}, Context days: ${contextDays}, Entries: ${entries.length}`);
@@ -354,8 +361,9 @@ JOURNAL:`;
status: 'pending',
provider,
model: settings?.aiModel,
prompt: userPrompt,
request: systemPrompt,
prompt: `${systemPrompt}\n\n---\n\n${userPrompt}`,
request: '',
response: '',
},
});
@@ -365,8 +373,6 @@ JOURNAL:`;
data: { id: placeholderJournal.id },
});
let content = '';
try {
console.log(`[Journal Generate] Using provider: ${provider}`);
@@ -379,20 +385,21 @@ JOURNAL:`;
console.log(`[Journal Generate] AI Provider created: ${aiProvider.provider}`);
content = await aiProvider.generate(userPrompt, systemPrompt);
const result = await aiProvider.generate(userPrompt, systemPrompt);
if (!content) {
if (!result.content) {
throw new Error('No content generated from AI');
}
console.log(`[Journal Generate] Success! Content length: ${content.length}`);
console.log(`[Journal Generate] Success! Content length: ${result.content.length}`);
// Update task with success - store full prompt and response
// Update task with success - store full request and response JSON
await prisma.task.update({
where: { id: task.id },
data: {
status: 'completed',
response: content,
request: JSON.stringify(result.request, null, 2),
response: JSON.stringify(result.response, null, 2),
completedAt: new Date(),
},
});
@@ -400,7 +407,7 @@ JOURNAL:`;
// Update journal with content
const journal = await prisma.journal.update({
where: { id: placeholderJournal.id },
data: { content, generatedAt: new Date() },
data: { content: result.content, generatedAt: new Date() },
});
return c.json({ data: { journal, task }, error: null });
@@ -545,9 +552,9 @@ app.post('/api/v1/ai/test', async (c) => {
const result = await aiProvider.generate('Say "OK" if you can read this.', 'You are a test assistant. Respond with just "OK".');
console.log(`[AI Test] Success! Response length: ${result.length}`);
console.log(`[AI Test] Success! Response length: ${result.content.length}`);
if (result.toLowerCase().includes('ok')) {
if (result.content.toLowerCase().includes('ok')) {
return c.json({ data: { valid: true, message: 'Connection successful!' }, error: null });
} else {
return c.json({ data: { valid: false }, error: { code: 'TEST_FAILED', message: 'Model responded but with unexpected output' } });

View File

@@ -65,6 +65,11 @@ entriesRoutes.put('/:id', async (c) => {
return c.json({ data: null, error: { code: 'NOT_FOUND', message: 'Entry not found' } }, 404);
}
const journal = await prisma.journal.findFirst({ where: { userId, date: existing.date } });
if (journal) {
return c.json({ data: null, error: { code: 'ENTRY_IMMUTABLE', message: 'Cannot edit entry: journal already generated. Delete the journal first.' } }, 400);
}
const entry = await prisma.entry.update({
where: { id },
data: {
@@ -89,6 +94,11 @@ entriesRoutes.delete('/:id', async (c) => {
return c.json({ data: null, error: { code: 'NOT_FOUND', message: 'Entry not found' } }, 404);
}
const journal = await prisma.journal.findFirst({ where: { userId, date: existing.date } });
if (journal) {
return c.json({ data: null, error: { code: 'ENTRY_IMMUTABLE', message: 'Cannot delete entry: journal already generated. Delete the journal first.' } }, 400);
}
await prisma.entry.delete({ where: { id } });
return c.json({ data: { deleted: true }, error: null });

View File

@@ -1,4 +1,4 @@
import type { AIProvider, AIProviderConfig } from './provider';
import type { AIProvider, AIProviderConfig, AIProviderResult } from './provider';
export class AnthropicProvider implements AIProvider {
provider = 'anthropic' as const;
@@ -12,7 +12,16 @@ export class AnthropicProvider implements AIProvider {
this.baseUrl = config.baseUrl || 'https://api.anthropic.com/v1';
}
async generate(prompt: string, systemPrompt?: string): Promise<string> {
async generate(prompt: string, systemPrompt?: string): Promise<AIProviderResult> {
const requestBody = {
model: this.model,
max_tokens: 2000,
system: systemPrompt,
messages: [
{ role: 'user', content: prompt }
],
};
const response = await fetch(`${this.baseUrl}/messages`, {
method: 'POST',
headers: {
@@ -21,23 +30,22 @@ export class AnthropicProvider implements AIProvider {
'anthropic-version': '2023-06-01',
'anthropic-dangerous-direct-browser-access': 'true',
},
body: JSON.stringify({
model: this.model,
max_tokens: 2000,
system: systemPrompt,
messages: [
{ role: 'user', content: prompt }
],
}),
body: JSON.stringify(requestBody),
});
const responseData = await response.json();
if (!response.ok) {
const error = await response.text();
throw new Error(`Anthropic API error: ${response.status} ${error}`);
throw new Error(`Anthropic API error: ${response.status} ${JSON.stringify(responseData)}`);
}
const data = await response.json() as { content: Array<{ text: string }> };
return data.content[0]?.text || '';
const content = responseData.content?.[0]?.text || '';
return {
content,
request: requestBody,
response: responseData,
};
}
async validate(): Promise<boolean> {

View File

@@ -1,4 +1,4 @@
import type { AIProvider, AIProviderConfig } from './provider';
import type { AIProvider, AIProviderConfig, AIProviderResult } from './provider';
export class GroqProvider implements AIProvider {
provider = 'groq' as const;
@@ -12,7 +12,7 @@ export class GroqProvider implements AIProvider {
this.baseUrl = config.baseUrl || 'https://api.groq.com/openai/v1';
}
async generate(prompt: string, systemPrompt?: string): Promise<string> {
async generate(prompt: string, systemPrompt?: string): Promise<AIProviderResult> {
const messages: Array<{ role: string; content: string }> = [];
if (systemPrompt) {
@@ -21,27 +21,36 @@ export class GroqProvider implements AIProvider {
messages.push({ role: 'user', content: prompt });
const requestBody = {
model: this.model,
messages,
temperature: 0.7,
max_tokens: 2000,
};
const response = await fetch(`${this.baseUrl}/chat/completions`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${this.apiKey}`,
},
body: JSON.stringify({
model: this.model,
messages,
temperature: 0.7,
max_tokens: 2000,
}),
body: JSON.stringify(requestBody),
});
const responseText = await response.text();
if (!response.ok) {
const error = await response.text();
throw new Error(`Groq API error: ${response.status} ${error}`);
throw new Error(`Groq API error: ${response.status} ${responseText}`);
}
const data = await response.json() as { choices: Array<{ message: { content: string } }> };
return data.choices[0]?.message?.content || '';
const responseData = JSON.parse(responseText);
const content = responseData.choices?.[0]?.message?.content || '';
return {
content,
request: requestBody,
response: responseData,
};
}
async validate(): Promise<boolean> {

View File

@@ -1,4 +1,4 @@
import type { AIProvider, AIProviderConfig } from './provider';
import type { AIProvider, AIProviderConfig, AIProviderResult } from './provider';
export class LMStudioProvider implements AIProvider {
provider = 'lmstudio' as const;
@@ -10,7 +10,7 @@ export class LMStudioProvider implements AIProvider {
this.model = config.model || 'local-model';
}
async generate(prompt: string, systemPrompt?: string): Promise<string> {
async generate(prompt: string, systemPrompt?: string): Promise<AIProviderResult> {
const messages: Array<{ role: string; content: string }> = [];
if (systemPrompt) {
@@ -19,26 +19,34 @@ export class LMStudioProvider implements AIProvider {
messages.push({ role: 'user', content: prompt });
const requestBody = {
model: this.model,
messages,
temperature: 0.7,
max_tokens: 2000,
};
const response = await fetch(`${this.baseUrl}/chat/completions`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
model: this.model,
messages,
temperature: 0.7,
max_tokens: 2000,
}),
body: JSON.stringify(requestBody),
});
const responseData = await response.json();
if (!response.ok) {
const error = await response.text();
throw new Error(`LM Studio API error: ${response.status} ${error}`);
throw new Error(`LM Studio API error: ${response.status} ${JSON.stringify(responseData)}`);
}
const data = await response.json() as { choices: Array<{ message: { content: string } }> };
return data.choices[0]?.message?.content || '';
const content = responseData.choices?.[0]?.message?.content || '';
return {
content,
request: requestBody,
response: responseData,
};
}
async validate(): Promise<boolean> {

View File

@@ -1,4 +1,4 @@
import type { AIProvider, AIProviderConfig } from './provider';
import type { AIProvider, AIProviderConfig, AIProviderResult } from './provider';
export class OllamaProvider implements AIProvider {
provider = 'ollama' as const;
@@ -10,29 +10,37 @@ export class OllamaProvider implements AIProvider {
this.model = config.model || 'llama3.2';
}
async generate(prompt: string, systemPrompt?: string): Promise<string> {
async generate(prompt: string, systemPrompt?: string): Promise<AIProviderResult> {
const requestBody = {
model: this.model,
stream: false,
messages: [
...(systemPrompt ? [{ role: 'system', content: systemPrompt }] : []),
{ role: 'user', content: prompt },
],
};
const response = await fetch(`${this.baseUrl}/api/chat`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
model: this.model,
stream: false,
messages: [
...(systemPrompt ? [{ role: 'system', content: systemPrompt }] : []),
{ role: 'user', content: prompt },
],
}),
body: JSON.stringify(requestBody),
});
const responseData = await response.json();
if (!response.ok) {
const error = await response.text();
throw new Error(`Ollama API error: ${response.status} ${error}`);
throw new Error(`Ollama API error: ${response.status} ${JSON.stringify(responseData)}`);
}
const data = await response.json() as { message: { content: string } };
return data.message?.content || '';
const content = responseData.message?.content || '';
return {
content,
request: requestBody,
response: responseData,
};
}
async validate(): Promise<boolean> {

View File

@@ -1,4 +1,4 @@
import type { AIProvider, AIProviderConfig } from './provider';
import type { AIProvider, AIProviderConfig, AIProviderResult } from './provider';
export class OpenAIProvider implements AIProvider {
provider = 'openai' as const;
@@ -12,7 +12,7 @@ export class OpenAIProvider implements AIProvider {
this.baseUrl = config.baseUrl || 'https://api.openai.com/v1';
}
async generate(prompt: string, systemPrompt?: string): Promise<string> {
async generate(prompt: string, systemPrompt?: string): Promise<AIProviderResult> {
const messages: Array<{ role: string; content: string }> = [];
if (systemPrompt) {
@@ -21,27 +21,35 @@ export class OpenAIProvider implements AIProvider {
messages.push({ role: 'user', content: prompt });
const requestBody = {
model: this.model,
messages,
temperature: 0.7,
max_tokens: 2000,
};
const response = await fetch(`${this.baseUrl}/chat/completions`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${this.apiKey}`,
},
body: JSON.stringify({
model: this.model,
messages,
temperature: 0.7,
max_tokens: 2000,
}),
body: JSON.stringify(requestBody),
});
const responseData = await response.json();
if (!response.ok) {
const error = await response.text();
throw new Error(`OpenAI API error: ${response.status} ${error}`);
throw new Error(`OpenAI API error: ${response.status} ${JSON.stringify(responseData)}`);
}
const data = await response.json() as { choices: Array<{ message: { content: string } }> };
return data.choices[0]?.message?.content || '';
const content = responseData.choices?.[0]?.message?.content || '';
return {
content,
request: requestBody,
response: responseData,
};
}
async validate(): Promise<boolean> {

View File

@@ -1,6 +1,12 @@
export interface AIProviderResult {
content: string;
request: Record<string, unknown>;
response: Record<string, unknown>;
}
export interface AIProvider {
provider: 'openai' | 'anthropic' | 'ollama' | 'lmstudio' | 'groq';
generate(prompt: string, systemPrompt?: string): Promise<string>;
generate(prompt: string, systemPrompt?: string): Promise<AIProviderResult>;
validate?(): Promise<boolean>;
}