Files
clicktrack/lib/analysis/providers/ollama.ts
AJ Avezzano 8b9d72bc9d feat: analysis providers, settings UI, song search, WAV duration fix
- Multi-provider AI analysis (Anthropic, OpenAI, Ollama, Algorithmic)
- server-only guards on all provider files; client bundle fix
- /settings page with provider status, Ollama model picker, preferences
- Song search box on /analyze replacing raw MBID input (debounced, keyboard nav)
- Auto-register song via MusicBrainz on POST /api/tracks (no more 404)
- Fix WAV duration bug: last section songEnd was double-counting elapsed time
- Registry sync comment updated for self-hosted HTTPS git servers

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-03 18:46:17 -04:00

158 lines
5.2 KiB
TypeScript

import 'server-only';
import type { CTPDocument } from "@/lib/ctp/schema";
import type { AnalysisInput, AnalysisProvider } from "@/lib/analysis/providers";
import { SYSTEM_PROMPT } from "./anthropic";
function getBaseUrl(): string {
return process.env.OLLAMA_BASE_URL ?? "http://localhost:11434";
}
// ─── Model list ───────────────────────────────────────────────────────────────
interface OllamaTagsResponse {
models?: Array<{ name: string }>;
}
export async function getOllamaModels(): Promise<string[]> {
try {
const url = `${getBaseUrl()}/api/tags`;
const response = await fetch(url, { signal: AbortSignal.timeout(5000) });
if (!response.ok) return [];
const json = await response.json() as OllamaTagsResponse;
return (json.models ?? []).map((m) => m.name);
} catch {
return [];
}
}
// ─── Chat completions helper ──────────────────────────────────────────────────
async function callOllama(model: string, userMessage: string): Promise<string> {
const baseUrl = getBaseUrl();
const response = await fetch(`${baseUrl}/v1/chat/completions`, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
model,
messages: [
{ role: "system", content: SYSTEM_PROMPT },
{
role: "user",
content:
userMessage +
"\n\nRespond with valid JSON only. Do not add any explanation or markdown. Your entire response must be a single valid JSON object matching the schema.",
},
],
stream: false,
}),
signal: AbortSignal.timeout(120000), // 2-minute timeout for slow local models
});
if (!response.ok) {
const text = await response.text().catch(() => "");
throw new Error(`Ollama API error ${response.status}: ${text.slice(0, 200)}`);
}
const json = await response.json() as {
choices?: Array<{ message?: { content?: string } }>;
};
const content = json.choices?.[0]?.message?.content;
if (!content) {
throw new Error("Ollama did not return a message content");
}
return content;
}
// ─── Provider implementation ──────────────────────────────────────────────────
export const ollamaProvider: AnalysisProvider = {
id: "ollama",
label: "Ollama",
type: "local-ai",
async isAvailable() {
const url = getBaseUrl();
try {
const response = await fetch(`${url}/api/tags`, {
signal: AbortSignal.timeout(5000),
});
if (response.ok) {
return { available: true };
}
return { available: false, reason: `Ollama not reachable at ${url}` };
} catch {
return { available: false, reason: `Ollama not reachable at ${url}` };
}
},
async generateCTP(input: AnalysisInput): Promise<CTPDocument> {
const { ollamaModel, bpm, duration, title, artist, mbid, contributed_by } = input;
if (!ollamaModel) {
throw new Error("ollamaModel is required for Ollama provider");
}
const approxBars = Math.round((duration * bpm) / 60 / 4);
const userMessage = `\
Generate a CTP document for the following song:
Title: ${title ?? "Unknown Title"}
Artist: ${artist ?? "Unknown Artist"}
MusicBrainz ID: ${mbid ?? "unknown"}
Detected BPM: ${bpm}
Duration: ${duration.toFixed(1)} seconds (~${approxBars} bars at 4/4)
Contributed by: ${contributed_by}
Create a plausible section layout for this song. If this is a well-known song, use your knowledge of its actual arrangement. If not, use a sensible generic structure.`;
// Attempt parse with one retry on failure
let content: string;
try {
content = await callOllama(ollamaModel, userMessage);
} catch (err) {
throw new Error(
`Ollama request failed: ${err instanceof Error ? err.message : String(err)}`
);
}
const tryParse = (raw: string): CTPDocument | null => {
// Strip markdown code fences if present
const stripped = raw.replace(/^```(?:json)?\s*/i, "").replace(/\s*```\s*$/, "").trim();
try {
const doc = JSON.parse(stripped) as CTPDocument;
if (!doc.metadata.created_at || doc.metadata.created_at.includes("placeholder")) {
doc.metadata.created_at = new Date().toISOString();
}
return doc;
} catch {
return null;
}
};
const firstAttempt = tryParse(content);
if (firstAttempt) return firstAttempt;
// Retry once
let retryContent: string;
try {
retryContent = await callOllama(ollamaModel, userMessage);
} catch (err) {
throw new Error(
`Ollama retry request failed: ${err instanceof Error ? err.message : String(err)}`
);
}
const secondAttempt = tryParse(retryContent);
if (secondAttempt) return secondAttempt;
throw new Error(
`Ollama (${ollamaModel}) returned a response that could not be parsed as a valid CTP document. ` +
`Response preview: ${content.slice(0, 200)}`
);
},
};