feat: analysis providers, settings UI, song search, WAV duration fix
- Multi-provider AI analysis (Anthropic, OpenAI, Ollama, Algorithmic) - server-only guards on all provider files; client bundle fix - /settings page with provider status, Ollama model picker, preferences - Song search box on /analyze replacing raw MBID input (debounced, keyboard nav) - Auto-register song via MusicBrainz on POST /api/tracks (no more 404) - Fix WAV duration bug: last section songEnd was double-counting elapsed time - Registry sync comment updated for self-hosted HTTPS git servers Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
123
app/(web)/settings/page.tsx
Normal file
123
app/(web)/settings/page.tsx
Normal file
@@ -0,0 +1,123 @@
|
||||
"use client";
|
||||
|
||||
import { useEffect, useState, useCallback } from "react";
|
||||
import type { ProviderInfo } from "@/lib/analysis/providers";
|
||||
import ProviderStatus from "@/components/settings/ProviderStatus";
|
||||
import OllamaModelPicker from "@/components/settings/OllamaModelPicker";
|
||||
import PreferencesPanel from "@/components/settings/PreferencesPanel";
|
||||
|
||||
const PROVIDER_KEY = "clicktrack_analysis_provider";
|
||||
const MODEL_KEY = "clicktrack_ollama_model";
|
||||
|
||||
interface ProvidersResponse {
|
||||
providers: ProviderInfo[];
|
||||
ollamaModels: string[];
|
||||
}
|
||||
|
||||
export default function SettingsPage() {
|
||||
const [providers, setProviders] = useState<ProviderInfo[]>([]);
|
||||
const [ollamaModels, setOllamaModels] = useState<string[]>([]);
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [refreshing, setRefreshing] = useState(false);
|
||||
const [defaultProvider, setDefaultProvider] = useState<string>("");
|
||||
const [selectedOllamaModel, setSelectedOllamaModel] = useState<string>("");
|
||||
|
||||
const fetchProviders = useCallback(async (isRefresh = false) => {
|
||||
if (isRefresh) setRefreshing(true);
|
||||
try {
|
||||
const res = await fetch("/api/analyze/providers");
|
||||
const data = await res.json() as ProvidersResponse;
|
||||
setProviders(data.providers);
|
||||
setOllamaModels(data.ollamaModels);
|
||||
|
||||
// Initialise model selection
|
||||
if (data.ollamaModels.length > 0) {
|
||||
const saved = localStorage.getItem(MODEL_KEY);
|
||||
if (saved && data.ollamaModels.includes(saved)) {
|
||||
setSelectedOllamaModel(saved);
|
||||
} else {
|
||||
setSelectedOllamaModel(data.ollamaModels[0]);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
setLoading(false);
|
||||
setRefreshing(false);
|
||||
}
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
const saved = localStorage.getItem(PROVIDER_KEY);
|
||||
if (saved) setDefaultProvider(saved);
|
||||
fetchProviders();
|
||||
}, [fetchProviders]);
|
||||
|
||||
function handleSetDefault(id: string) {
|
||||
setDefaultProvider(id);
|
||||
localStorage.setItem(PROVIDER_KEY, id);
|
||||
}
|
||||
|
||||
function handleModelChange(model: string) {
|
||||
setSelectedOllamaModel(model);
|
||||
localStorage.setItem(MODEL_KEY, model);
|
||||
}
|
||||
|
||||
const ollamaProvider = providers.find((p) => p.id === "ollama");
|
||||
const ollamaAvailable = ollamaProvider?.available === true;
|
||||
|
||||
if (loading) {
|
||||
return (
|
||||
<div className="space-y-8">
|
||||
<h1 className="text-2xl font-bold">Settings</h1>
|
||||
<p className="text-zinc-500">Loading…</p>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="space-y-10 max-w-2xl">
|
||||
<h1 className="text-2xl font-bold">Settings</h1>
|
||||
|
||||
{/* Analysis Providers */}
|
||||
<section className="space-y-4">
|
||||
<h2 className="text-lg font-semibold">Analysis Providers</h2>
|
||||
<div className="space-y-3">
|
||||
{providers.map((provider) => (
|
||||
<ProviderStatus
|
||||
key={provider.id}
|
||||
provider={provider}
|
||||
selectedOllamaModel={selectedOllamaModel}
|
||||
isDefault={defaultProvider === provider.id}
|
||||
onSetDefault={handleSetDefault}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</section>
|
||||
|
||||
{/* Ollama Models */}
|
||||
{ollamaAvailable && (
|
||||
<section className="space-y-4">
|
||||
<h2 className="text-lg font-semibold">Ollama Models</h2>
|
||||
<OllamaModelPicker
|
||||
models={ollamaModels}
|
||||
value={selectedOllamaModel}
|
||||
onChange={handleModelChange}
|
||||
onRefresh={() => fetchProviders(true)}
|
||||
refreshing={refreshing}
|
||||
/>
|
||||
{ollamaProvider?.ollamaBaseUrl && (
|
||||
<p className="text-xs text-zinc-600">
|
||||
Base URL (operator-configured):{" "}
|
||||
<code className="text-zinc-500">{ollamaProvider.ollamaBaseUrl}</code>
|
||||
</p>
|
||||
)}
|
||||
</section>
|
||||
)}
|
||||
|
||||
{/* Preferences */}
|
||||
<section className="space-y-4">
|
||||
<h2 className="text-lg font-semibold">Preferences</h2>
|
||||
<PreferencesPanel />
|
||||
</section>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
20
app/api/analyze/providers/route.ts
Normal file
20
app/api/analyze/providers/route.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { NextResponse } from "next/server";
|
||||
import { getProviderInfoList, getOllamaModels } from "@/lib/analysis/providers/registry";
|
||||
|
||||
/**
|
||||
* GET /api/analyze/providers
|
||||
*
|
||||
* Returns all providers (available and unavailable) and the list of
|
||||
* locally available Ollama models (empty array if Ollama is unreachable).
|
||||
*/
|
||||
export async function GET() {
|
||||
const [providers, ollamaModels] = await Promise.all([
|
||||
getProviderInfoList(),
|
||||
getOllamaModels(),
|
||||
]);
|
||||
|
||||
return NextResponse.json(
|
||||
{ providers, ollamaModels },
|
||||
{ headers: { "Cache-Control": "no-store" } }
|
||||
);
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { z } from "zod";
|
||||
import { generateCTPWithAI } from "@/lib/analysis/ai-ctp";
|
||||
import { getProvider, getAvailableProviders } from "@/lib/analysis/providers/registry";
|
||||
import { validateCTP } from "@/lib/ctp/validate";
|
||||
|
||||
// ─── Request schema ───────────────────────────────────────────────────────────
|
||||
@@ -12,16 +12,18 @@ const AnalyzeRequestSchema = z.object({
|
||||
artist: z.string().min(1).max(256).optional(),
|
||||
mbid: z.string().uuid().optional().nullable(),
|
||||
contributed_by: z.string().min(1).max(64).optional(),
|
||||
provider: z.string().optional(),
|
||||
ollamaModel: z.string().optional(),
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/analyze
|
||||
*
|
||||
* Accepts BPM detection results from the browser and uses Claude to generate
|
||||
* a draft CTP document for human review.
|
||||
* Accepts BPM detection results from the browser and uses the selected provider
|
||||
* to generate a draft CTP document for human review.
|
||||
*
|
||||
* Body (JSON):
|
||||
* { bpm, duration, title?, artist?, mbid?, contributed_by? }
|
||||
* { bpm, duration, title?, artist?, mbid?, contributed_by?, provider?, ollamaModel? }
|
||||
*
|
||||
* Returns:
|
||||
* { ctp: CTPDocument, warnings: string[] }
|
||||
@@ -42,40 +44,67 @@ export async function POST(req: NextRequest) {
|
||||
);
|
||||
}
|
||||
|
||||
const { bpm, duration, title, artist, mbid, contributed_by } = parsed.data;
|
||||
const { bpm, duration, title, artist, mbid, contributed_by, provider: providerId, ollamaModel } =
|
||||
parsed.data;
|
||||
|
||||
if (!process.env.ANTHROPIC_API_KEY) {
|
||||
// Validate Ollama-specific requirement
|
||||
if (providerId === "ollama" && (!ollamaModel || ollamaModel.trim() === "")) {
|
||||
return NextResponse.json(
|
||||
{ error: "ANTHROPIC_API_KEY is not configured on this server" },
|
||||
{ status: 503 }
|
||||
{ error: "ollamaModel is required when using the Ollama provider" },
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
// Resolve provider
|
||||
let provider;
|
||||
if (providerId) {
|
||||
try {
|
||||
provider = await getProvider(providerId);
|
||||
} catch (err) {
|
||||
return NextResponse.json(
|
||||
{ error: err instanceof Error ? err.message : String(err) },
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
} else {
|
||||
const available = await getAvailableProviders();
|
||||
provider = available[0];
|
||||
if (!provider) {
|
||||
return NextResponse.json(
|
||||
{ error: "No analysis providers are currently available" },
|
||||
{ status: 503 }
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const input = {
|
||||
bpm,
|
||||
duration,
|
||||
title,
|
||||
artist,
|
||||
mbid: mbid ?? null,
|
||||
contributed_by: contributed_by ?? "anonymous",
|
||||
ollamaModel,
|
||||
};
|
||||
|
||||
let ctpDoc;
|
||||
try {
|
||||
ctpDoc = await generateCTPWithAI({
|
||||
bpm,
|
||||
duration,
|
||||
title,
|
||||
artist,
|
||||
mbid: mbid ?? null,
|
||||
contributedBy: contributed_by ?? "anonymous",
|
||||
});
|
||||
ctpDoc = await provider.generateCTP(input);
|
||||
} catch (err) {
|
||||
console.error("[analyze] AI generation failed:", err);
|
||||
console.error(`[analyze] Provider '${provider.label}' failed:`, err);
|
||||
return NextResponse.json(
|
||||
{ error: "Failed to generate CTP document", detail: String(err) },
|
||||
{ status: 500 }
|
||||
{
|
||||
error: `Provider '${provider.label}' failed: ${err instanceof Error ? err.message : String(err)}`,
|
||||
},
|
||||
{ status: 502 }
|
||||
);
|
||||
}
|
||||
|
||||
// Validate the AI output against the CTP schema
|
||||
// Validate the output against the CTP schema
|
||||
const validation = validateCTP(ctpDoc);
|
||||
const warnings: string[] = [];
|
||||
|
||||
if (!validation.success) {
|
||||
// Rather than 500-ing, return the draft with validation warnings so the user
|
||||
// can still see and manually correct it.
|
||||
warnings.push(...validation.errors.issues.map((i) => `${i.path.join(".")}: ${i.message}`));
|
||||
}
|
||||
|
||||
|
||||
134
app/api/analyze/test/route.ts
Normal file
134
app/api/analyze/test/route.ts
Normal file
@@ -0,0 +1,134 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { z } from "zod";
|
||||
|
||||
const TestRequestSchema = z.object({
|
||||
provider: z.string(),
|
||||
ollamaModel: z.string().optional(),
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/analyze/test
|
||||
*
|
||||
* Runs a lightweight probe for a provider to confirm it is working.
|
||||
* Always returns 200 — use the `ok` field to check success/failure.
|
||||
*
|
||||
* Body: { provider: string, ollamaModel?: string }
|
||||
* Response: { ok: true } | { ok: false, error: string }
|
||||
*/
|
||||
export async function POST(req: NextRequest) {
|
||||
let body: unknown;
|
||||
try {
|
||||
body = await req.json();
|
||||
} catch {
|
||||
return NextResponse.json({ ok: false, error: "Invalid JSON body" });
|
||||
}
|
||||
|
||||
const parsed = TestRequestSchema.safeParse(body);
|
||||
if (!parsed.success) {
|
||||
return NextResponse.json({ ok: false, error: "Missing required field: provider" });
|
||||
}
|
||||
|
||||
const { provider, ollamaModel } = parsed.data;
|
||||
|
||||
try {
|
||||
switch (provider) {
|
||||
case "algorithmic": {
|
||||
return NextResponse.json({ ok: true });
|
||||
}
|
||||
|
||||
case "anthropic": {
|
||||
const apiKey = process.env.ANTHROPIC_API_KEY;
|
||||
if (!apiKey) {
|
||||
return NextResponse.json({ ok: false, error: "ANTHROPIC_API_KEY not set" });
|
||||
}
|
||||
// Minimal 1-token probe
|
||||
const response = await fetch("https://api.anthropic.com/v1/messages", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"x-api-key": apiKey,
|
||||
"anthropic-version": "2023-06-01",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model: process.env.ANTHROPIC_MODEL ?? "claude-opus-4-6",
|
||||
max_tokens: 10,
|
||||
messages: [{ role: "user", content: "Reply with the word OK" }],
|
||||
}),
|
||||
signal: AbortSignal.timeout(15000),
|
||||
});
|
||||
if (response.ok) {
|
||||
return NextResponse.json({ ok: true });
|
||||
}
|
||||
const text = await response.text().catch(() => "");
|
||||
return NextResponse.json({
|
||||
ok: false,
|
||||
error: `Anthropic API returned ${response.status}: ${text.slice(0, 200)}`,
|
||||
});
|
||||
}
|
||||
|
||||
case "openai": {
|
||||
const apiKey = process.env.OPENAI_API_KEY;
|
||||
if (!apiKey) {
|
||||
return NextResponse.json({ ok: false, error: "OPENAI_API_KEY not set" });
|
||||
}
|
||||
const baseUrl = process.env.OPENAI_BASE_URL ?? "https://api.openai.com/v1";
|
||||
const model = process.env.OPENAI_MODEL ?? "gpt-4o";
|
||||
const response = await fetch(`${baseUrl}/chat/completions`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model,
|
||||
max_tokens: 10,
|
||||
messages: [{ role: "user", content: "Reply with the word OK" }],
|
||||
}),
|
||||
signal: AbortSignal.timeout(15000),
|
||||
});
|
||||
if (response.ok) {
|
||||
return NextResponse.json({ ok: true });
|
||||
}
|
||||
const text = await response.text().catch(() => "");
|
||||
return NextResponse.json({
|
||||
ok: false,
|
||||
error: `OpenAI API returned ${response.status}: ${text.slice(0, 200)}`,
|
||||
});
|
||||
}
|
||||
|
||||
case "ollama": {
|
||||
const baseUrl = process.env.OLLAMA_BASE_URL ?? "http://localhost:11434";
|
||||
if (!ollamaModel) {
|
||||
return NextResponse.json({ ok: false, error: "ollamaModel is required for Ollama test" });
|
||||
}
|
||||
const response = await fetch(`${baseUrl}/v1/chat/completions`, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
model: ollamaModel,
|
||||
max_tokens: 10,
|
||||
messages: [{ role: "user", content: "Reply with the word OK" }],
|
||||
stream: false,
|
||||
}),
|
||||
signal: AbortSignal.timeout(30000),
|
||||
});
|
||||
if (response.ok) {
|
||||
return NextResponse.json({ ok: true });
|
||||
}
|
||||
const text = await response.text().catch(() => "");
|
||||
return NextResponse.json({
|
||||
ok: false,
|
||||
error: `Ollama returned ${response.status}: ${text.slice(0, 200)}`,
|
||||
});
|
||||
}
|
||||
|
||||
default:
|
||||
return NextResponse.json({ ok: false, error: `Unknown provider: ${provider}` });
|
||||
}
|
||||
} catch (err) {
|
||||
return NextResponse.json({
|
||||
ok: false,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,8 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { z } from "zod";
|
||||
import { getTempoMapsForSong, insertTempoMap, query } from "@/lib/db/client";
|
||||
import { getTempoMapsForSong, getSongByMbid, insertTempoMap, upsertSong } from "@/lib/db/client";
|
||||
import { validateCTP } from "@/lib/ctp/validate";
|
||||
import { lookupRecording, formatArtistCredit, mbDurationToSeconds } from "@/lib/musicbrainz/client";
|
||||
|
||||
// ─── GET /api/tracks?mbid=<uuid> ─────────────────────────────────────────────
|
||||
|
||||
@@ -55,19 +56,33 @@ export async function POST(req: NextRequest) {
|
||||
);
|
||||
}
|
||||
|
||||
// Ensure the song exists
|
||||
const { rowCount } = await query("SELECT 1 FROM songs WHERE mbid = $1", [
|
||||
doc.metadata.mbid,
|
||||
]);
|
||||
// Ensure the song exists — auto-register it if not
|
||||
const existing = await getSongByMbid(doc.metadata.mbid);
|
||||
|
||||
if (!rowCount || rowCount === 0) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: "Song not found. Search for the song first to register it.",
|
||||
if (!existing) {
|
||||
try {
|
||||
const rec = await lookupRecording(doc.metadata.mbid);
|
||||
await upsertSong({
|
||||
mbid: doc.metadata.mbid,
|
||||
},
|
||||
{ status: 404 }
|
||||
);
|
||||
title: rec.title,
|
||||
artist: formatArtistCredit(rec["artist-credit"]),
|
||||
duration_seconds: mbDurationToSeconds(rec.length),
|
||||
acousticbrainz_bpm: null,
|
||||
acousticbrainz_time_sig_num: null,
|
||||
source: "musicbrainz",
|
||||
});
|
||||
} catch {
|
||||
// MusicBrainz unreachable — fall back to CTP metadata
|
||||
await upsertSong({
|
||||
mbid: doc.metadata.mbid,
|
||||
title: doc.metadata.title,
|
||||
artist: doc.metadata.artist,
|
||||
duration_seconds: doc.metadata.duration_seconds,
|
||||
acousticbrainz_bpm: null,
|
||||
acousticbrainz_time_sig_num: null,
|
||||
source: "manual",
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const map = await insertTempoMap({
|
||||
|
||||
@@ -30,7 +30,10 @@ export default function RootLayout({
|
||||
<a href="/analyze" className="hover:text-zinc-100 transition-colors">
|
||||
Analyze
|
||||
</a>
|
||||
<
|
||||
<a href="/settings" className="hover:text-zinc-100 transition-colors">
|
||||
Settings
|
||||
</a>
|
||||
<a
|
||||
href="https://github.com/your-org/clicktrack"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
|
||||
Reference in New Issue
Block a user