Added: - Groq AI provider (free, fast with llama-3.3-70b-versatile) - Timezone setting (22 timezones) - Journal context: include previous journals (3/7/14/30 days) - Test connection button for AI providers - Per-provider settings (API key, model, base URL remembered) - Detailed task logging (full prompts and responses) - Tasks page with expandable details - Progress modal with steps and AI output details Fixed: - Groq API endpoint (https://api.groq.com/openai/v1/chat/completions) - Ollama baseUrl leaking to other providers - Database schema references - Proper Prisma migrations (data-safe) Changed: - Default AI: OpenAI → Groq - Project renamed: TotalRecall → DearDiary - Strict anti-hallucination prompt - Docker uses prisma migrate deploy (non-destructive)
36 lines
1.1 KiB
TypeScript
36 lines
1.1 KiB
TypeScript
export interface AIProvider {
|
|
provider: 'openai' | 'anthropic' | 'ollama' | 'lmstudio' | 'groq';
|
|
generate(prompt: string, systemPrompt?: string): Promise<string>;
|
|
validate?(): Promise<boolean>;
|
|
}
|
|
|
|
export interface AIProviderConfig {
|
|
provider: 'openai' | 'anthropic' | 'ollama' | 'lmstudio' | 'groq';
|
|
apiKey: string;
|
|
model?: string;
|
|
baseUrl?: string;
|
|
}
|
|
|
|
import { OpenAIProvider } from './openai';
|
|
import { AnthropicProvider } from './anthropic';
|
|
import { OllamaProvider } from './ollama';
|
|
import { LMStudioProvider } from './lmstudio';
|
|
import { GroqProvider } from './groq';
|
|
|
|
export function createAIProvider(config: AIProviderConfig): AIProvider {
|
|
switch (config.provider) {
|
|
case 'openai':
|
|
return new OpenAIProvider(config);
|
|
case 'anthropic':
|
|
return new AnthropicProvider(config);
|
|
case 'ollama':
|
|
return new OllamaProvider(config);
|
|
case 'lmstudio':
|
|
return new LMStudioProvider(config);
|
|
case 'groq':
|
|
return new GroqProvider(config);
|
|
default:
|
|
throw new Error(`Unknown AI provider: ${config.provider}`);
|
|
}
|
|
}
|