feat: analysis providers, settings UI, song search, WAV duration fix

- Multi-provider AI analysis (Anthropic, OpenAI, Ollama, Algorithmic)
- server-only guards on all provider files; client bundle fix
- /settings page with provider status, Ollama model picker, preferences
- Song search box on /analyze replacing raw MBID input (debounced, keyboard nav)
- Auto-register song via MusicBrainz on POST /api/tracks (no more 404)
- Fix WAV duration bug: last section songEnd was double-counting elapsed time
- Registry sync comment updated for self-hosted HTTPS git servers

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
AJ Avezzano
2026-04-03 18:46:17 -04:00
parent 51f67f0aeb
commit 8b9d72bc9d
22 changed files with 1803 additions and 293 deletions

View File

@@ -10,16 +10,33 @@ POSTGRES_PASSWORD=clicktrack
# ── Redis ────────────────────────────────────────────────────────────────────
REDIS_URL=redis://localhost:6379
# ── Community registry ───────────────────────────────────────────────────────
# ── Community registry (optional) ────────────────────────────────────────────
# HTTPS URL of a git repository containing .ctp.json tempo map files.
# Compatible with any self-hosted git server (Gitea, Forgejo, GitLab, etc.).
# To authenticate, embed a personal access token in the URL:
# REGISTRY_REPO=https://user:token@git.yourdomain.com/org/clicktrack-registry
REGISTRY_REPO=
REGISTRY_BRANCH=main
REGISTRY_SYNC_INTERVAL=3600
# ── AI Tempo Analysis ────────────────────────────────────────────────────────
# Required for the /analyze feature (AI tempo map generation).
# Get a key at https://console.anthropic.com
# BPM detection is client-side and works without this key.
# BPM detection is client-side and works without any of these keys.
# --- Cloud AI: Anthropic (existing) ---
# Required to enable Anthropic provider. Get a key at https://console.anthropic.com
ANTHROPIC_API_KEY=
ANTHROPIC_MODEL=claude-opus-4-6 # optional model override
# --- Cloud AI: OpenAI-compatible ---
OPENAI_API_KEY= # required to enable OpenAI provider
OPENAI_BASE_URL=https://api.openai.com/v1 # override for Groq, Together, Fireworks, etc.
OPENAI_MODEL=gpt-4o # optional model override
# --- Local AI: Ollama ---
OLLAMA_BASE_URL=http://localhost:11434 # required to enable Ollama provider
# Model is selected by the user in the UI — no OLLAMA_MODEL env var needed
# Algorithmic provider is always available — no config needed.
# ── App ──────────────────────────────────────────────────────────────────────
NEXT_PUBLIC_APP_NAME=ClickTrack

123
app/(web)/settings/page.tsx Normal file
View File

@@ -0,0 +1,123 @@
"use client";
import { useEffect, useState, useCallback } from "react";
import type { ProviderInfo } from "@/lib/analysis/providers";
import ProviderStatus from "@/components/settings/ProviderStatus";
import OllamaModelPicker from "@/components/settings/OllamaModelPicker";
import PreferencesPanel from "@/components/settings/PreferencesPanel";
const PROVIDER_KEY = "clicktrack_analysis_provider";
const MODEL_KEY = "clicktrack_ollama_model";
interface ProvidersResponse {
providers: ProviderInfo[];
ollamaModels: string[];
}
export default function SettingsPage() {
const [providers, setProviders] = useState<ProviderInfo[]>([]);
const [ollamaModels, setOllamaModels] = useState<string[]>([]);
const [loading, setLoading] = useState(true);
const [refreshing, setRefreshing] = useState(false);
const [defaultProvider, setDefaultProvider] = useState<string>("");
const [selectedOllamaModel, setSelectedOllamaModel] = useState<string>("");
const fetchProviders = useCallback(async (isRefresh = false) => {
if (isRefresh) setRefreshing(true);
try {
const res = await fetch("/api/analyze/providers");
const data = await res.json() as ProvidersResponse;
setProviders(data.providers);
setOllamaModels(data.ollamaModels);
// Initialise model selection
if (data.ollamaModels.length > 0) {
const saved = localStorage.getItem(MODEL_KEY);
if (saved && data.ollamaModels.includes(saved)) {
setSelectedOllamaModel(saved);
} else {
setSelectedOllamaModel(data.ollamaModels[0]);
}
}
} finally {
setLoading(false);
setRefreshing(false);
}
}, []);
useEffect(() => {
const saved = localStorage.getItem(PROVIDER_KEY);
if (saved) setDefaultProvider(saved);
fetchProviders();
}, [fetchProviders]);
function handleSetDefault(id: string) {
setDefaultProvider(id);
localStorage.setItem(PROVIDER_KEY, id);
}
function handleModelChange(model: string) {
setSelectedOllamaModel(model);
localStorage.setItem(MODEL_KEY, model);
}
const ollamaProvider = providers.find((p) => p.id === "ollama");
const ollamaAvailable = ollamaProvider?.available === true;
if (loading) {
return (
<div className="space-y-8">
<h1 className="text-2xl font-bold">Settings</h1>
<p className="text-zinc-500">Loading</p>
</div>
);
}
return (
<div className="space-y-10 max-w-2xl">
<h1 className="text-2xl font-bold">Settings</h1>
{/* Analysis Providers */}
<section className="space-y-4">
<h2 className="text-lg font-semibold">Analysis Providers</h2>
<div className="space-y-3">
{providers.map((provider) => (
<ProviderStatus
key={provider.id}
provider={provider}
selectedOllamaModel={selectedOllamaModel}
isDefault={defaultProvider === provider.id}
onSetDefault={handleSetDefault}
/>
))}
</div>
</section>
{/* Ollama Models */}
{ollamaAvailable && (
<section className="space-y-4">
<h2 className="text-lg font-semibold">Ollama Models</h2>
<OllamaModelPicker
models={ollamaModels}
value={selectedOllamaModel}
onChange={handleModelChange}
onRefresh={() => fetchProviders(true)}
refreshing={refreshing}
/>
{ollamaProvider?.ollamaBaseUrl && (
<p className="text-xs text-zinc-600">
Base URL (operator-configured):{" "}
<code className="text-zinc-500">{ollamaProvider.ollamaBaseUrl}</code>
</p>
)}
</section>
)}
{/* Preferences */}
<section className="space-y-4">
<h2 className="text-lg font-semibold">Preferences</h2>
<PreferencesPanel />
</section>
</div>
);
}

View File

@@ -0,0 +1,20 @@
import { NextResponse } from "next/server";
import { getProviderInfoList, getOllamaModels } from "@/lib/analysis/providers/registry";
/**
* GET /api/analyze/providers
*
* Returns all providers (available and unavailable) and the list of
* locally available Ollama models (empty array if Ollama is unreachable).
*/
export async function GET() {
const [providers, ollamaModels] = await Promise.all([
getProviderInfoList(),
getOllamaModels(),
]);
return NextResponse.json(
{ providers, ollamaModels },
{ headers: { "Cache-Control": "no-store" } }
);
}

View File

@@ -1,6 +1,6 @@
import { NextRequest, NextResponse } from "next/server";
import { z } from "zod";
import { generateCTPWithAI } from "@/lib/analysis/ai-ctp";
import { getProvider, getAvailableProviders } from "@/lib/analysis/providers/registry";
import { validateCTP } from "@/lib/ctp/validate";
// ─── Request schema ───────────────────────────────────────────────────────────
@@ -12,16 +12,18 @@ const AnalyzeRequestSchema = z.object({
artist: z.string().min(1).max(256).optional(),
mbid: z.string().uuid().optional().nullable(),
contributed_by: z.string().min(1).max(64).optional(),
provider: z.string().optional(),
ollamaModel: z.string().optional(),
});
/**
* POST /api/analyze
*
* Accepts BPM detection results from the browser and uses Claude to generate
* a draft CTP document for human review.
* Accepts BPM detection results from the browser and uses the selected provider
* to generate a draft CTP document for human review.
*
* Body (JSON):
* { bpm, duration, title?, artist?, mbid?, contributed_by? }
* { bpm, duration, title?, artist?, mbid?, contributed_by?, provider?, ollamaModel? }
*
* Returns:
* { ctp: CTPDocument, warnings: string[] }
@@ -42,40 +44,67 @@ export async function POST(req: NextRequest) {
);
}
const { bpm, duration, title, artist, mbid, contributed_by } = parsed.data;
const { bpm, duration, title, artist, mbid, contributed_by, provider: providerId, ollamaModel } =
parsed.data;
if (!process.env.ANTHROPIC_API_KEY) {
// Validate Ollama-specific requirement
if (providerId === "ollama" && (!ollamaModel || ollamaModel.trim() === "")) {
return NextResponse.json(
{ error: "ANTHROPIC_API_KEY is not configured on this server" },
{ status: 503 }
{ error: "ollamaModel is required when using the Ollama provider" },
{ status: 400 }
);
}
let ctpDoc;
// Resolve provider
let provider;
if (providerId) {
try {
ctpDoc = await generateCTPWithAI({
provider = await getProvider(providerId);
} catch (err) {
return NextResponse.json(
{ error: err instanceof Error ? err.message : String(err) },
{ status: 400 }
);
}
} else {
const available = await getAvailableProviders();
provider = available[0];
if (!provider) {
return NextResponse.json(
{ error: "No analysis providers are currently available" },
{ status: 503 }
);
}
}
const input = {
bpm,
duration,
title,
artist,
mbid: mbid ?? null,
contributedBy: contributed_by ?? "anonymous",
});
contributed_by: contributed_by ?? "anonymous",
ollamaModel,
};
let ctpDoc;
try {
ctpDoc = await provider.generateCTP(input);
} catch (err) {
console.error("[analyze] AI generation failed:", err);
console.error(`[analyze] Provider '${provider.label}' failed:`, err);
return NextResponse.json(
{ error: "Failed to generate CTP document", detail: String(err) },
{ status: 500 }
{
error: `Provider '${provider.label}' failed: ${err instanceof Error ? err.message : String(err)}`,
},
{ status: 502 }
);
}
// Validate the AI output against the CTP schema
// Validate the output against the CTP schema
const validation = validateCTP(ctpDoc);
const warnings: string[] = [];
if (!validation.success) {
// Rather than 500-ing, return the draft with validation warnings so the user
// can still see and manually correct it.
warnings.push(...validation.errors.issues.map((i) => `${i.path.join(".")}: ${i.message}`));
}

View File

@@ -0,0 +1,134 @@
import { NextRequest, NextResponse } from "next/server";
import { z } from "zod";
const TestRequestSchema = z.object({
provider: z.string(),
ollamaModel: z.string().optional(),
});
/**
* POST /api/analyze/test
*
* Runs a lightweight probe for a provider to confirm it is working.
* Always returns 200 — use the `ok` field to check success/failure.
*
* Body: { provider: string, ollamaModel?: string }
* Response: { ok: true } | { ok: false, error: string }
*/
export async function POST(req: NextRequest) {
let body: unknown;
try {
body = await req.json();
} catch {
return NextResponse.json({ ok: false, error: "Invalid JSON body" });
}
const parsed = TestRequestSchema.safeParse(body);
if (!parsed.success) {
return NextResponse.json({ ok: false, error: "Missing required field: provider" });
}
const { provider, ollamaModel } = parsed.data;
try {
switch (provider) {
case "algorithmic": {
return NextResponse.json({ ok: true });
}
case "anthropic": {
const apiKey = process.env.ANTHROPIC_API_KEY;
if (!apiKey) {
return NextResponse.json({ ok: false, error: "ANTHROPIC_API_KEY not set" });
}
// Minimal 1-token probe
const response = await fetch("https://api.anthropic.com/v1/messages", {
method: "POST",
headers: {
"Content-Type": "application/json",
"x-api-key": apiKey,
"anthropic-version": "2023-06-01",
},
body: JSON.stringify({
model: process.env.ANTHROPIC_MODEL ?? "claude-opus-4-6",
max_tokens: 10,
messages: [{ role: "user", content: "Reply with the word OK" }],
}),
signal: AbortSignal.timeout(15000),
});
if (response.ok) {
return NextResponse.json({ ok: true });
}
const text = await response.text().catch(() => "");
return NextResponse.json({
ok: false,
error: `Anthropic API returned ${response.status}: ${text.slice(0, 200)}`,
});
}
case "openai": {
const apiKey = process.env.OPENAI_API_KEY;
if (!apiKey) {
return NextResponse.json({ ok: false, error: "OPENAI_API_KEY not set" });
}
const baseUrl = process.env.OPENAI_BASE_URL ?? "https://api.openai.com/v1";
const model = process.env.OPENAI_MODEL ?? "gpt-4o";
const response = await fetch(`${baseUrl}/chat/completions`, {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${apiKey}`,
},
body: JSON.stringify({
model,
max_tokens: 10,
messages: [{ role: "user", content: "Reply with the word OK" }],
}),
signal: AbortSignal.timeout(15000),
});
if (response.ok) {
return NextResponse.json({ ok: true });
}
const text = await response.text().catch(() => "");
return NextResponse.json({
ok: false,
error: `OpenAI API returned ${response.status}: ${text.slice(0, 200)}`,
});
}
case "ollama": {
const baseUrl = process.env.OLLAMA_BASE_URL ?? "http://localhost:11434";
if (!ollamaModel) {
return NextResponse.json({ ok: false, error: "ollamaModel is required for Ollama test" });
}
const response = await fetch(`${baseUrl}/v1/chat/completions`, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
model: ollamaModel,
max_tokens: 10,
messages: [{ role: "user", content: "Reply with the word OK" }],
stream: false,
}),
signal: AbortSignal.timeout(30000),
});
if (response.ok) {
return NextResponse.json({ ok: true });
}
const text = await response.text().catch(() => "");
return NextResponse.json({
ok: false,
error: `Ollama returned ${response.status}: ${text.slice(0, 200)}`,
});
}
default:
return NextResponse.json({ ok: false, error: `Unknown provider: ${provider}` });
}
} catch (err) {
return NextResponse.json({
ok: false,
error: err instanceof Error ? err.message : String(err),
});
}
}

View File

@@ -1,7 +1,8 @@
import { NextRequest, NextResponse } from "next/server";
import { z } from "zod";
import { getTempoMapsForSong, insertTempoMap, query } from "@/lib/db/client";
import { getTempoMapsForSong, getSongByMbid, insertTempoMap, upsertSong } from "@/lib/db/client";
import { validateCTP } from "@/lib/ctp/validate";
import { lookupRecording, formatArtistCredit, mbDurationToSeconds } from "@/lib/musicbrainz/client";
// ─── GET /api/tracks?mbid=<uuid> ─────────────────────────────────────────────
@@ -55,19 +56,33 @@ export async function POST(req: NextRequest) {
);
}
// Ensure the song exists
const { rowCount } = await query("SELECT 1 FROM songs WHERE mbid = $1", [
doc.metadata.mbid,
]);
// Ensure the song exists — auto-register it if not
const existing = await getSongByMbid(doc.metadata.mbid);
if (!rowCount || rowCount === 0) {
return NextResponse.json(
{
error: "Song not found. Search for the song first to register it.",
if (!existing) {
try {
const rec = await lookupRecording(doc.metadata.mbid);
await upsertSong({
mbid: doc.metadata.mbid,
},
{ status: 404 }
);
title: rec.title,
artist: formatArtistCredit(rec["artist-credit"]),
duration_seconds: mbDurationToSeconds(rec.length),
acousticbrainz_bpm: null,
acousticbrainz_time_sig_num: null,
source: "musicbrainz",
});
} catch {
// MusicBrainz unreachable — fall back to CTP metadata
await upsertSong({
mbid: doc.metadata.mbid,
title: doc.metadata.title,
artist: doc.metadata.artist,
duration_seconds: doc.metadata.duration_seconds,
acousticbrainz_bpm: null,
acousticbrainz_time_sig_num: null,
source: "manual",
});
}
}
const map = await insertTempoMap({

View File

@@ -30,7 +30,10 @@ export default function RootLayout({
<a href="/analyze" className="hover:text-zinc-100 transition-colors">
Analyze
</a>
<
<a href="/settings" className="hover:text-zinc-100 transition-colors">
Settings
</a>
<a
href="https://github.com/your-org/clicktrack"
target="_blank"
rel="noopener noreferrer"

View File

@@ -7,18 +7,31 @@
* 1. User drops / selects an audio file (MP3, WAV, AAC, OGG, etc.)
* 2. Browser decodes the audio and runs BPM detection (Web Audio API)
* 3. Optional: user provides song title, artist, MusicBrainz ID
* 4. Client sends { bpm, duration, … } to POST /api/analyze
* 5. Server calls Claude → returns a CTP document draft
* 6. User can review the sections, download the .ctp.json, or submit to DB
* 4. User selects an analysis provider
* 5. Client sends { bpm, duration, provider, … } to POST /api/analyze
* 6. Server returns a CTP document draft
* 7. User can review the sections, download the .ctp.json, or submit to DB
*/
import { useState, useRef, useCallback } from "react";
import { useState, useRef, useCallback, useEffect } from "react";
import { detectBPM, type BPMDetectionResult } from "@/lib/analysis/bpm-detect";
import TempoMapEditor from "@/components/TempoMapEditor";
import type { CTPDocument } from "@/lib/ctp/schema";
import type { ProviderInfo } from "@/lib/analysis/providers";
import { RECOMMENDED_OLLAMA_MODELS } from "@/lib/analysis/constants";
const PROVIDER_KEY = "clicktrack_analysis_provider";
const MODEL_KEY = "clicktrack_ollama_model";
// ─── Types ────────────────────────────────────────────────────────────────────
interface SongResult {
mbid: string;
title: string;
artist: string;
duration_seconds: number | null;
}
type Stage =
| "idle"
| "decoding"
@@ -36,12 +49,10 @@ interface AnalyzerState {
ctp: CTPDocument | null;
warnings: string[];
errorMsg: string;
// Optional metadata the user may fill in before AI generation
title: string;
artist: string;
mbid: string;
contributedBy: string;
// Toggle: use halfTimeBpm instead of primary bpm
useHalfTime: boolean;
}
@@ -81,10 +92,167 @@ export default function TempoAnalyzer() {
const dropRef = useRef<HTMLDivElement>(null);
const [isDragging, setIsDragging] = useState(false);
// Provider state
const [providers, setProviders] = useState<ProviderInfo[]>([]);
const [ollamaModels, setOllamaModels] = useState<string[]>([]);
const [selectedProvider, setSelectedProvider] = useState<string>("");
const [selectedOllamaModel, setSelectedOllamaModel] = useState<string>("");
const [unavailableNotice, setUnavailableNotice] = useState<string>("");
const [unavailableModelNotice, setUnavailableModelNotice] = useState<string>("");
// Song search
const [songQuery, setSongQuery] = useState("");
const [songResults, setSongResults] = useState<SongResult[]>([]);
const [songDropdownOpen, setSongDropdownOpen] = useState(false);
const [songHighlightIdx, setSongHighlightIdx] = useState(-1);
const [songSearchFailed, setSongSearchFailed] = useState(false);
const [selectedSongLabel, setSelectedSongLabel] = useState("");
const searchDebounceRef = useRef<ReturnType<typeof setTimeout> | null>(null);
const songDropdownRef = useRef<HTMLDivElement>(null);
const update = (patch: Partial<AnalyzerState>) =>
setState((prev) => ({ ...prev, ...patch }));
// ── File handling ────────────────────────────────────────────────────────
// ── Load providers on mount ───────────────────────────────────────────────
useEffect(() => {
async function loadProviders() {
try {
const res = await fetch("/api/analyze/providers");
const data = await res.json() as { providers: ProviderInfo[]; ollamaModels: string[] };
const available = data.providers.filter((p) => p.available);
setProviders(available);
setOllamaModels(data.ollamaModels);
// Restore saved provider preference
const savedProvider = localStorage.getItem(PROVIDER_KEY);
if (savedProvider) {
const found = available.find((p) => p.id === savedProvider);
if (found) {
setSelectedProvider(found.id);
} else {
// Saved provider no longer available
const unavailable = data.providers.find((p) => p.id === savedProvider);
const label = unavailable?.label ?? savedProvider;
setUnavailableNotice(
`Your previous provider (${label}) is not currently available.`
);
setSelectedProvider(available[0]?.id ?? "");
}
} else {
setSelectedProvider(available[0]?.id ?? "");
}
// Restore saved Ollama model
if (data.ollamaModels.length > 0) {
const savedModel = localStorage.getItem(MODEL_KEY);
if (savedModel && data.ollamaModels.includes(savedModel)) {
setSelectedOllamaModel(savedModel);
} else {
if (savedModel) {
setUnavailableModelNotice(
`Your previous model (${savedModel}) is no longer available.`
);
}
setSelectedOllamaModel(data.ollamaModels[0]);
}
}
} catch {
// Provider fetch failure — app remains functional, generate will fail gracefully
}
}
loadProviders();
}, []);
useEffect(() => {
function onMouseDown(e: MouseEvent) {
if (songDropdownRef.current && !songDropdownRef.current.contains(e.target as Node)) {
setSongDropdownOpen(false);
}
}
document.addEventListener("mousedown", onMouseDown);
return () => document.removeEventListener("mousedown", onMouseDown);
}, []);
// ── Song search ───────────────────────────────────────────────────────────
function handleSongQueryChange(value: string) {
setSongQuery(value);
setSongHighlightIdx(-1);
if (searchDebounceRef.current) clearTimeout(searchDebounceRef.current);
if (!value.trim()) {
setSongResults([]);
setSongDropdownOpen(false);
return;
}
searchDebounceRef.current = setTimeout(async () => {
try {
const res = await fetch(`/api/songs?q=${encodeURIComponent(value)}&limit=5`);
if (!res.ok) throw new Error("Search failed");
const data = await res.json() as { songs: SongResult[] };
setSongResults(data.songs);
setSongDropdownOpen(true);
setSongSearchFailed(false);
} catch {
setSongSearchFailed(true);
setSongDropdownOpen(false);
}
}, 350);
}
function handleSongSelect(song: SongResult) {
update({ mbid: song.mbid, title: song.title, artist: song.artist });
const label = `${song.title}${song.artist}`;
setSelectedSongLabel(label);
setSongQuery(label);
setSongDropdownOpen(false);
setSongResults([]);
setSongHighlightIdx(-1);
}
function handleSongClear() {
update({ mbid: "", title: "", artist: "" });
setSelectedSongLabel("");
setSongQuery("");
setSongResults([]);
setSongDropdownOpen(false);
setSongHighlightIdx(-1);
if (searchDebounceRef.current) clearTimeout(searchDebounceRef.current);
}
function handleSongKeyDown(e: React.KeyboardEvent<HTMLInputElement>) {
if (e.key === "ArrowDown") {
e.preventDefault();
setSongHighlightIdx((i) => Math.min(i + 1, songResults.length - 1));
setSongDropdownOpen(true);
} else if (e.key === "ArrowUp") {
e.preventDefault();
setSongHighlightIdx((i) => Math.max(i - 1, 0));
} else if (e.key === "Enter") {
e.preventDefault();
if (songHighlightIdx >= 0 && songResults[songHighlightIdx]) {
handleSongSelect(songResults[songHighlightIdx]);
}
} else if (e.key === "Escape") {
setSongDropdownOpen(false);
}
}
function handleProviderChange(id: string) {
setSelectedProvider(id);
setUnavailableNotice("");
localStorage.setItem(PROVIDER_KEY, id);
}
function handleOllamaModelChange(model: string) {
setSelectedOllamaModel(model);
setUnavailableModelNotice("");
localStorage.setItem(MODEL_KEY, model);
}
// ── File handling ─────────────────────────────────────────────────────────
const handleFile = useCallback(async (file: File) => {
if (!file.type.startsWith("audio/") && !file.name.match(/\.(mp3|wav|aac|ogg|flac|m4a|aiff)$/i)) {
@@ -96,7 +264,6 @@ export default function TempoAnalyzer() {
const abort = new AbortController();
abortRef.current = abort;
// Try to pre-fill title/artist from filename: "Artist - Title.mp3"
const base = file.name.replace(/\.[^.]+$/, "");
const dashIdx = base.indexOf(" - ");
const autoTitle = dashIdx > -1 ? base.slice(dashIdx + 3) : base;
@@ -116,7 +283,7 @@ export default function TempoAnalyzer() {
try {
update({ stage: "detecting" });
const detection = await detectBPM(file, abort.signal);
update({ detection, stage: "idle" }); // wait for user to confirm/edit metadata
update({ detection, stage: "idle" });
} catch (err) {
if ((err as Error).name === "AbortError") return;
update({
@@ -136,12 +303,12 @@ export default function TempoAnalyzer() {
function handleFileInput(e: React.ChangeEvent<HTMLInputElement>) {
const file = e.target.files?.[0];
if (file) handleFile(file);
e.target.value = ""; // reset so re-selecting same file works
e.target.value = "";
}
// ── AI generation ────────────────────────────────────────────────────────
// ── Generation ────────────────────────────────────────────────────────────
async function handleGenerate() {
async function runGenerate(providerId: string, ollamaModel: string) {
if (!state.detection) return;
const effectiveBpm =
@@ -149,38 +316,60 @@ export default function TempoAnalyzer() {
? state.detection.halfTimeBpm
: state.detection.bpm;
update({ stage: "generating", ctp: null, warnings: [] });
update({ stage: "generating", ctp: null, warnings: [], errorMsg: "" });
try {
const res = await fetch("/api/analyze", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
const body: Record<string, unknown> = {
bpm: effectiveBpm,
duration: state.detection.duration,
title: state.title || undefined,
artist: state.artist || undefined,
mbid: state.mbid || undefined,
contributed_by: state.contributedBy || undefined,
}),
provider: providerId,
};
if (providerId === "ollama") {
body.ollamaModel = ollamaModel;
}
const res = await fetch("/api/analyze", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify(body),
});
const data = await res.json();
const data = await res.json() as { ctp?: CTPDocument; warnings?: string[]; error?: string };
if (!res.ok) {
throw new Error(data.error ?? `Server error ${res.status}`);
}
update({ ctp: data.ctp, warnings: data.warnings ?? [], stage: "review" });
update({ ctp: data.ctp ?? null, warnings: data.warnings ?? [], stage: "review" });
}
async function handleGenerate() {
try {
await runGenerate(selectedProvider, selectedOllamaModel);
} catch (err) {
update({
stage: "error",
errorMsg: `Generation failed: ${err instanceof Error ? err.message : String(err)}`,
errorMsg: err instanceof Error ? err.message : String(err),
});
}
}
// ── Submit to DB ─────────────────────────────────────────────────────────
async function handleRetry() {
try {
await runGenerate(selectedProvider, selectedOllamaModel);
} catch (err) {
update({
stage: "error",
errorMsg: err instanceof Error ? err.message : String(err),
});
}
}
// ── Submit to DB ──────────────────────────────────────────────────────────
async function handleSubmit() {
if (!state.ctp) return;
@@ -193,7 +382,7 @@ export default function TempoAnalyzer() {
body: JSON.stringify(state.ctp),
});
const data = await res.json();
const data = await res.json() as { error?: string };
if (!res.ok) {
throw new Error(data.error ?? `Server error ${res.status}`);
}
@@ -207,7 +396,7 @@ export default function TempoAnalyzer() {
}
}
// ── Download CTP file ────────────────────────────────────────────────────
// ── Download CTP file ────────────────────────────────────────────────────
function handleDownload() {
if (!state.ctp) return;
@@ -225,17 +414,26 @@ export default function TempoAnalyzer() {
URL.revokeObjectURL(url);
}
// ── Reset ────────────────────────────────────────────────────────────────
// ── Reset ────────────────────────────────────────────────────────────────
function handleReset() {
abortRef.current?.abort();
setState(INITIAL_STATE);
setSongQuery("");
setSongResults([]);
setSongDropdownOpen(false);
setSelectedSongLabel("");
setSongHighlightIdx(-1);
if (searchDebounceRef.current) clearTimeout(searchDebounceRef.current);
}
// ─── Render ───────────────────────────────────────────────────────────────
const { stage, file, detection, ctp, warnings, errorMsg, useHalfTime } = state;
const isProcessing = stage === "decoding" || stage === "detecting" || stage === "generating" || stage === "saving";
const isProcessing =
stage === "decoding" || stage === "detecting" || stage === "generating" || stage === "saving";
const selectedProviderInfo = providers.find((p) => p.id === selectedProvider);
return (
<div className="space-y-8">
@@ -279,29 +477,80 @@ export default function TempoAnalyzer() {
<p className="font-medium text-zinc-200">
{stage === "decoding" && "Decoding audio…"}
{stage === "detecting" && "Detecting tempo…"}
{stage === "generating" && "Generating tempo map with AI…"}
{stage === "generating" && "Generating tempo map…"}
{stage === "saving" && "Saving to database…"}
</p>
{stage === "generating" && (
{stage === "generating" && selectedProviderInfo?.id === "ollama" && (
<p className="mt-1 text-sm text-zinc-500">
Claude is analysing the song structure this takes ~515 seconds.
Local AI generation may take 3090 seconds depending on your hardware.
</p>
)}
{stage === "generating" && selectedProviderInfo?.id !== "ollama" && (
<p className="mt-1 text-sm text-zinc-500">
Analysing song structure this takes ~515 seconds.
</p>
)}
</div>
)}
{/* ── Error ─────────────────────────────────────────────────────── */}
{/* ── Error panel ────────────────────────────────────────────────── */}
{stage === "error" && (
<div className="rounded-xl border border-red-800 bg-red-950/30 px-6 py-5">
<p className="text-red-400 font-medium mb-1">Error</p>
<div className="rounded-xl border border-red-800 bg-red-950/30 px-6 py-5 space-y-4">
<div>
<p className="text-red-400 font-medium mb-1">Generation failed</p>
<p className="text-sm text-red-300">{errorMsg}</p>
</div>
{/* Provider selector in error state */}
{providers.length >= 2 && (
<div className="space-y-2">
<label className="block text-xs text-zinc-500">Analysis provider</label>
<select
value={selectedProvider}
onChange={(e) => handleProviderChange(e.target.value)}
className="rounded-lg border border-zinc-700 bg-zinc-800 px-3 py-2 text-sm text-zinc-100 focus:border-green-500 focus:outline-none"
>
{providers.map((p) => (
<option key={p.id} value={p.id}>
{p.label}
</option>
))}
</select>
</div>
)}
{selectedProvider === "ollama" && ollamaModels.length > 0 && (
<div className="space-y-2">
<label className="block text-xs text-zinc-500">Ollama model</label>
<select
value={selectedOllamaModel}
onChange={(e) => handleOllamaModelChange(e.target.value)}
className="rounded-lg border border-zinc-700 bg-zinc-800 px-3 py-2 text-sm text-zinc-100 focus:border-green-500 focus:outline-none"
>
{ollamaModels.map((m) => (
<option key={m} value={m}>
{RECOMMENDED_OLLAMA_MODELS.includes(m) ? `${m}` : m}
</option>
))}
</select>
</div>
)}
<div className="flex gap-3">
<button
onClick={handleRetry}
className="rounded-lg bg-red-800/60 px-4 py-2 text-sm font-medium text-red-200 hover:bg-red-700/60 transition-colors"
>
Retry
</button>
<button
onClick={handleReset}
className="mt-4 text-sm text-zinc-400 hover:text-zinc-200 underline"
className="text-sm text-zinc-400 hover:text-zinc-200 underline self-center"
>
Try again
Start over
</button>
</div>
</div>
)}
{/* ── Detection results + metadata form ─────────────────────────── */}
@@ -371,12 +620,83 @@ export default function TempoAnalyzer() {
</p>
)}
{/* Metadata form */}
{/* Metadata form + provider selector */}
{stage === "idle" && (
<>
{/* Song search */}
<div ref={songDropdownRef} className="relative">
<div className="flex items-center justify-between mb-1">
<label className="block text-xs text-zinc-500">Song</label>
{selectedSongLabel && (
<button
type="button"
onClick={handleSongClear}
className="text-xs text-zinc-500 hover:text-zinc-300"
>
Clear
</button>
)}
</div>
{songSearchFailed ? (
<div className="space-y-1">
<p className="text-xs text-amber-400">Search unavailable enter MusicBrainz ID manually:</p>
<input
value={state.mbid}
onChange={(e) => update({ mbid: e.target.value })}
placeholder="xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
className="w-full rounded-lg border border-zinc-700 bg-zinc-800 px-3 py-2 text-sm font-mono text-zinc-100 placeholder:text-zinc-600 focus:border-green-500 focus:outline-none"
/>
</div>
) : (
<input
value={songQuery}
onChange={(e) => {
const val = e.target.value;
if (selectedSongLabel) {
update({ mbid: "", title: "", artist: "" });
setSelectedSongLabel("");
}
handleSongQueryChange(val);
}}
onKeyDown={handleSongKeyDown}
placeholder="Search by title or artist…"
autoComplete="off"
className="w-full rounded-lg border border-zinc-700 bg-zinc-800 px-3 py-2 text-sm text-zinc-100 placeholder:text-zinc-600 focus:border-green-500 focus:outline-none"
/>
)}
{songDropdownOpen && (
<div className="absolute z-10 mt-1 w-full rounded-lg border border-zinc-700 bg-zinc-900 shadow-xl overflow-hidden">
{songResults.length === 0 ? (
<p className="px-3 py-2 text-sm text-zinc-500">
No songs found try the search page first to register the song.
</p>
) : (
songResults.map((song, i) => (
<button
key={song.mbid}
type="button"
onMouseDown={(e) => { e.preventDefault(); handleSongSelect(song); }}
className={`w-full text-left px-3 py-2 text-sm transition-colors ${
i === songHighlightIdx
? "bg-zinc-700 text-zinc-100"
: "text-zinc-300 hover:bg-zinc-800"
}`}
>
<span className="font-medium">{song.title}</span>
<span className="text-zinc-500"> {song.artist}</span>
</button>
))
)}
</div>
)}
</div>
{/* Title + artist manual overrides */}
<div className="grid gap-3 sm:grid-cols-2">
<div>
<label className="block text-xs text-zinc-500 mb-1">Song title</label>
<label className="block text-xs text-zinc-500 mb-1">
Song title <span className="text-zinc-600">(override)</span>
</label>
<input
value={state.title}
onChange={(e) => update({ title: e.target.value })}
@@ -385,7 +705,9 @@ export default function TempoAnalyzer() {
/>
</div>
<div>
<label className="block text-xs text-zinc-500 mb-1">Artist</label>
<label className="block text-xs text-zinc-500 mb-1">
Artist <span className="text-zinc-600">(override)</span>
</label>
<input
value={state.artist}
onChange={(e) => update({ artist: e.target.value })}
@@ -393,18 +715,9 @@ export default function TempoAnalyzer() {
className="w-full rounded-lg border border-zinc-700 bg-zinc-800 px-3 py-2 text-sm text-zinc-100 placeholder:text-zinc-600 focus:border-green-500 focus:outline-none"
/>
</div>
<div>
<label className="block text-xs text-zinc-500 mb-1">
MusicBrainz ID{" "}
<span className="text-zinc-600">(optional)</span>
</label>
<input
value={state.mbid}
onChange={(e) => update({ mbid: e.target.value })}
placeholder="xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
className="w-full rounded-lg border border-zinc-700 bg-zinc-800 px-3 py-2 text-sm font-mono text-zinc-100 placeholder:text-zinc-600 focus:border-green-500 focus:outline-none"
/>
</div>
{/* Contributed by */}
<div>
<label className="block text-xs text-zinc-500 mb-1">Your name / handle</label>
<input
@@ -414,13 +727,76 @@ export default function TempoAnalyzer() {
className="w-full rounded-lg border border-zinc-700 bg-zinc-800 px-3 py-2 text-sm text-zinc-100 placeholder:text-zinc-600 focus:border-green-500 focus:outline-none"
/>
</div>
{/* Provider selector */}
{unavailableNotice && (
<div className="rounded-lg border border-amber-800/50 bg-amber-950/20 px-4 py-3">
<p className="text-sm text-amber-300">{unavailableNotice}</p>
</div>
)}
{providers.length >= 2 && (
<div className="space-y-2">
<label className="block text-xs text-zinc-500">Analysis provider</label>
<select
value={selectedProvider}
onChange={(e) => handleProviderChange(e.target.value)}
className="rounded-lg border border-zinc-700 bg-zinc-800 px-3 py-2 text-sm text-zinc-100 focus:border-green-500 focus:outline-none"
>
{providers.map((p) => (
<option key={p.id} value={p.id}>
{p.label}
</option>
))}
</select>
</div>
)}
{/* Ollama model selector */}
{selectedProvider === "ollama" && ollamaModels.length > 0 && (
<div className="space-y-2">
{unavailableModelNotice && (
<div className="rounded-lg border border-amber-800/50 bg-amber-950/20 px-3 py-2">
<p className="text-xs text-amber-300">{unavailableModelNotice}</p>
</div>
)}
<label className="block text-xs text-zinc-500">Ollama model</label>
<select
value={selectedOllamaModel}
onChange={(e) => handleOllamaModelChange(e.target.value)}
className="rounded-lg border border-zinc-700 bg-zinc-800 px-3 py-2 text-sm text-zinc-100 focus:border-green-500 focus:outline-none"
>
{ollamaModels.map((m) => (
<option key={m} value={m}>
{RECOMMENDED_OLLAMA_MODELS.includes(m) ? `${m}` : m}
</option>
))}
</select>
</div>
)}
{/* Context-aware callouts */}
{selectedProvider === "algorithmic" && (
<div className="rounded-lg border border-zinc-700 bg-zinc-800/40 px-4 py-3">
<p className="text-sm text-zinc-400">
Algorithmic mode estimates song structure from BPM and duration. Section labels and bar counts are approximate review carefully before submitting.
</p>
</div>
)}
{selectedProvider === "ollama" && (
<div className="rounded-lg border border-zinc-700 bg-zinc-800/40 px-4 py-3">
<p className="text-sm text-zinc-400">
Local AI generation may take 3090 seconds depending on your hardware.
</p>
</div>
)}
<button
onClick={handleGenerate}
className="w-full rounded-lg bg-green-600 py-3 font-semibold text-white hover:bg-green-500 transition-colors"
>
Generate tempo map with AI
Generate tempo map
</button>
</>
)}

View File

@@ -0,0 +1,63 @@
"use client";
import { RECOMMENDED_OLLAMA_MODELS } from "@/lib/analysis/constants";
interface OllamaModelPickerProps {
models: string[];
value: string;
onChange: (model: string) => void;
onRefresh: () => void;
refreshing: boolean;
}
export default function OllamaModelPicker({
models,
value,
onChange,
onRefresh,
refreshing,
}: OllamaModelPickerProps) {
if (models.length === 0) {
return (
<p className="text-sm text-zinc-500">
No models found. Pull a model with{" "}
<code className="text-zinc-400">ollama pull qwen2.5:7b</code> and refresh.
</p>
);
}
return (
<div className="space-y-3">
<div className="flex flex-col gap-2">
{models.map((model) => {
const isRecommended = RECOMMENDED_OLLAMA_MODELS.includes(model);
return (
<label key={model} className="flex items-center gap-3 cursor-pointer group">
<input
type="radio"
name="ollama-model"
value={model}
checked={value === model}
onChange={() => onChange(model)}
className="accent-green-500"
/>
<span className="text-sm text-zinc-300 group-hover:text-zinc-100 transition-colors">
{model}
</span>
{isRecommended && (
<span className="text-xs text-amber-400"> recommended</span>
)}
</label>
);
})}
</div>
<button
onClick={onRefresh}
disabled={refreshing}
className="text-xs text-zinc-500 hover:text-zinc-300 underline disabled:opacity-50"
>
{refreshing ? "Refreshing…" : "Refresh model list"}
</button>
</div>
);
}

View File

@@ -0,0 +1,79 @@
"use client";
import { useState, useEffect } from "react";
const PROVIDER_KEY = "clicktrack_analysis_provider";
const MODEL_KEY = "clicktrack_ollama_model";
export default function PreferencesPanel() {
const [rememberProvider, setRememberProvider] = useState(true);
const [rememberModel, setRememberModel] = useState(true);
const [cleared, setCleared] = useState(false);
useEffect(() => {
// Reflect current state: if the keys exist, persistence is active
const hasProvider = localStorage.getItem(PROVIDER_KEY) !== null;
const hasModel = localStorage.getItem(MODEL_KEY) !== null;
setRememberProvider(hasProvider);
setRememberModel(hasModel);
}, []);
function handleToggleProvider(on: boolean) {
setRememberProvider(on);
if (!on) {
localStorage.removeItem(PROVIDER_KEY);
}
}
function handleToggleModel(on: boolean) {
setRememberModel(on);
if (!on) {
localStorage.removeItem(MODEL_KEY);
}
}
function handleClear() {
localStorage.removeItem(PROVIDER_KEY);
localStorage.removeItem(MODEL_KEY);
setRememberProvider(false);
setRememberModel(false);
setCleared(true);
setTimeout(() => setCleared(false), 3000);
}
return (
<div className="space-y-4">
<label className="flex items-center gap-3 cursor-pointer">
<input
type="checkbox"
checked={rememberProvider}
onChange={(e) => handleToggleProvider(e.target.checked)}
className="accent-green-500 w-4 h-4"
/>
<span className="text-sm text-zinc-300">Remember my last provider selection</span>
</label>
<label className="flex items-center gap-3 cursor-pointer">
<input
type="checkbox"
checked={rememberModel}
onChange={(e) => handleToggleModel(e.target.checked)}
className="accent-green-500 w-4 h-4"
/>
<span className="text-sm text-zinc-300">Remember my last Ollama model</span>
</label>
<div className="pt-1">
<button
onClick={handleClear}
className="rounded-lg border border-zinc-700 px-4 py-2 text-sm text-zinc-400 hover:border-zinc-500 hover:text-zinc-200 transition-colors"
>
Clear saved preferences
</button>
{cleared && (
<span className="ml-3 text-sm text-green-400">Preferences cleared.</span>
)}
</div>
</div>
);
}

View File

@@ -0,0 +1,114 @@
"use client";
import { useState } from "react";
import type { ProviderInfo } from "@/lib/analysis/providers";
interface ProviderStatusProps {
provider: ProviderInfo;
selectedOllamaModel?: string;
isDefault: boolean;
onSetDefault: (id: string) => void;
}
const TYPE_LABELS: Record<ProviderInfo["type"], string> = {
"cloud-ai": "Cloud AI",
"local-ai": "Local AI",
"algorithmic": "No AI",
};
export default function ProviderStatus({
provider,
selectedOllamaModel,
isDefault,
onSetDefault,
}: ProviderStatusProps) {
const [testStatus, setTestStatus] = useState<"idle" | "testing" | "ok" | "error">("idle");
const [testError, setTestError] = useState("");
async function handleTest() {
setTestStatus("testing");
setTestError("");
try {
const res = await fetch("/api/analyze/test", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
provider: provider.id,
ollamaModel: provider.id === "ollama" ? selectedOllamaModel : undefined,
}),
});
const data = await res.json() as { ok: boolean; error?: string };
if (data.ok) {
setTestStatus("ok");
} else {
setTestStatus("error");
setTestError(data.error ?? "Unknown error");
}
} catch (err) {
setTestStatus("error");
setTestError(err instanceof Error ? err.message : String(err));
}
}
return (
<div className="rounded-lg border border-zinc-800 bg-zinc-900/60 p-4">
<div className="flex items-start justify-between gap-3">
<div className="flex-1 min-w-0">
<div className="flex items-center gap-2 flex-wrap">
<span className="font-medium text-zinc-200">{provider.label}</span>
<span className="rounded px-1.5 py-0.5 text-xs bg-zinc-800 text-zinc-400">
{TYPE_LABELS[provider.type]}
</span>
{provider.available ? (
<span className="rounded px-1.5 py-0.5 text-xs bg-green-900/50 text-green-400">
Available
</span>
) : (
<span className="rounded px-1.5 py-0.5 text-xs bg-zinc-800 text-zinc-500">
Unavailable
</span>
)}
{isDefault && (
<span className="rounded px-1.5 py-0.5 text-xs bg-blue-900/50 text-blue-400">
Default
</span>
)}
</div>
{!provider.available && provider.unavailableReason && (
<p className="mt-1 text-xs text-zinc-600">{provider.unavailableReason}</p>
)}
{provider.id === "ollama" && provider.available && provider.ollamaBaseUrl && (
<p className="mt-1 text-xs text-zinc-600 font-mono">{provider.ollamaBaseUrl}</p>
)}
</div>
{provider.available && (
<div className="flex items-center gap-2 shrink-0">
{!isDefault && (
<button
onClick={() => onSetDefault(provider.id)}
className="text-xs text-zinc-500 hover:text-zinc-300 underline"
>
Set as default
</button>
)}
<button
onClick={handleTest}
disabled={testStatus === "testing"}
className="rounded px-3 py-1 text-xs font-medium border border-zinc-700 text-zinc-300 hover:border-zinc-500 hover:text-zinc-100 transition-colors disabled:opacity-50"
>
{testStatus === "testing" ? "Testing…" : "Test"}
</button>
</div>
)}
</div>
{testStatus === "ok" && (
<p className="mt-2 text-xs text-green-400"> Working</p>
)}
{testStatus === "error" && (
<p className="mt-2 text-xs text-red-400"> {testError}</p>
)}
</div>
);
}

View File

@@ -1,179 +1,17 @@
/**
* AI-assisted CTP document generation
*
* Takes the results of BPM detection (and optional song metadata) and uses
* Claude to produce a plausible, well-structured CTP document.
*
* Claude is asked to:
* - Divide the song into typical sections (Intro, Verse, Chorus, Bridge…)
* - Assign realistic start bars for each section
* - Note any tempo changes it would expect for the song/genre
* - Return a fully valid CTP 1.0 JSON document
*
* The caller should treat the result as a *draft* — the generated sections
* are educated guesses and should be verified against the recording.
* Re-exports from the Anthropic provider for backwards compatibility.
* @deprecated Import directly from @/lib/analysis/providers/anthropic instead.
*/
export type { AnalysisInput } from "@/lib/analysis/providers";
export { anthropicProvider as default } from "@/lib/analysis/providers/anthropic";
import Anthropic from "@anthropic-ai/sdk";
import type { CTPDocument } from "@/lib/ctp/schema";
// Legacy named export for any remaining callers
import { anthropicProvider } from "@/lib/analysis/providers/anthropic";
import type { AnalysisInput } from "@/lib/analysis/providers";
const client = new Anthropic();
// ─── Input / output types ─────────────────────────────────────────────────────
export interface AnalysisInput {
bpm: number;
duration: number; // seconds
title?: string;
artist?: string;
mbid?: string | null;
contributedBy?: string;
}
// ─── JSON Schema for structured output ───────────────────────────────────────
// Must be strict (no additionalProperties, all required fields present).
const CTP_SCHEMA = {
type: "object",
additionalProperties: false,
required: ["version", "metadata", "count_in", "sections"],
properties: {
version: { type: "string", enum: ["1.0"] },
metadata: {
type: "object",
additionalProperties: false,
required: [
"title", "artist", "mbid", "duration_seconds",
"contributed_by", "verified", "created_at",
],
properties: {
title: { type: "string" },
artist: { type: "string" },
mbid: { type: ["string", "null"] },
duration_seconds: { type: "number" },
contributed_by: { type: "string" },
verified: { type: "boolean" },
created_at: { type: "string" },
},
},
count_in: {
type: "object",
additionalProperties: false,
required: ["enabled", "bars", "use_first_section_tempo"],
properties: {
enabled: { type: "boolean" },
bars: { type: "integer", minimum: 1, maximum: 8 },
use_first_section_tempo: { type: "boolean" },
},
},
sections: {
type: "array",
minItems: 1,
items: {
type: "object",
additionalProperties: false,
required: ["label", "start_bar", "time_signature", "transition"],
// bpm is required for step, bpm_start/bpm_end for ramp — handled via oneOf
// but we keep this schema simple (strict mode) and validate downstream with Zod.
properties: {
label: { type: "string" },
start_bar: { type: "integer", minimum: 1 },
bpm: { type: "number" },
bpm_start: { type: "number" },
bpm_end: { type: "number" },
transition: { type: "string", enum: ["step", "ramp"] },
time_signature: {
type: "object",
additionalProperties: false,
required: ["numerator", "denominator"],
properties: {
numerator: { type: "integer", minimum: 1, maximum: 32 },
denominator: { type: "integer", enum: [1, 2, 4, 8, 16, 32] },
},
},
},
},
},
},
};
// ─── System prompt ────────────────────────────────────────────────────────────
const SYSTEM_PROMPT = `\
You are an expert music producer and session musician assisting cover bands with click tracks.
You will receive automated BPM detection results for a song and must generate a CTP (Click Track Protocol) document describing the song's full tempo map.
CTP rules:
- "version" must be "1.0"
- sections[0].start_bar must be 1
- sections must be sorted by start_bar ascending, with no gaps
- Step sections have a single "bpm" field; ramp sections have "bpm_start" and "bpm_end" (no "bpm" field)
- All BPM values must be between 20 and 400
- time_signature.denominator must be a power of 2 (1, 2, 4, 8, 16, or 32)
- metadata.verified must be false (this is AI-generated, not human-verified)
- metadata.created_at must be an ISO 8601 datetime string
Guidelines for section layout:
- Use typical pop/rock section names: Intro, Verse, Pre-Chorus, Chorus, Bridge, Outro
- Estimate bar counts based on song duration and BPM (bars = duration_seconds × BPM / 60 / beats_per_bar)
- Most songs are 4/4; note any unusual meters if you know the song
- If you know the song has a tempo change (ritardando, double-time feel, key change with tempo shift), model it with a ramp or step section
- If unsure about sections, use a single constant-tempo section covering the whole song
- Use the detected BPM as the primary tempo — do not invent a different BPM unless the song is well-known to have a different tempo
The output is a draft for human review. Add reasonable section structure based on the song's typical arrangement.`;
// ─── Main function ────────────────────────────────────────────────────────────
export async function generateCTPWithAI(input: AnalysisInput): Promise<CTPDocument> {
const { bpm, duration, title, artist, mbid, contributedBy } = input;
const approxBars = Math.round((duration * bpm) / 60 / 4); // assuming 4/4
const userMessage = `\
Generate a CTP document for the following song:
Title: ${title ?? "Unknown Title"}
Artist: ${artist ?? "Unknown Artist"}
MusicBrainz ID: ${mbid ?? "unknown"}
Detected BPM: ${bpm}
Duration: ${duration.toFixed(1)} seconds (~${approxBars} bars at 4/4)
Contributed by: ${contributedBy ?? "anonymous"}
Create a plausible section layout for this song. If this is a well-known song, use your knowledge of its actual arrangement. If not, use a sensible generic structure.`;
const response = await client.messages.create({
model: "claude-opus-4-6",
max_tokens: 2048,
thinking: { type: "adaptive" },
system: SYSTEM_PROMPT,
messages: [{ role: "user", content: userMessage }],
output_config: {
format: {
type: "json_schema",
schema: CTP_SCHEMA,
},
},
export async function generateCTPWithAI(input: AnalysisInput & { contributedBy?: string }) {
return anthropicProvider.generateCTP({
...input,
contributed_by: input.contributed_by ?? input.contributedBy ?? "anonymous",
});
const textBlock = response.content.find((b) => b.type === "text");
if (!textBlock || textBlock.type !== "text") {
throw new Error("Claude did not return a text block");
}
let parsed: unknown;
try {
parsed = JSON.parse(textBlock.text);
} catch {
throw new Error(`Claude returned invalid JSON: ${textBlock.text.slice(0, 200)}`);
}
// Stamp the current timestamp if Claude left a placeholder
const doc = parsed as CTPDocument;
if (!doc.metadata.created_at || doc.metadata.created_at.includes("placeholder")) {
doc.metadata.created_at = new Date().toISOString();
}
return doc;
}

View File

@@ -0,0 +1,3 @@
// Safe to import from client components — no SDK dependencies.
export const RECOMMENDED_OLLAMA_MODELS = ['qwen2.5:7b', 'llama3.1:8b', 'mistral:7b'];

29
lib/analysis/providers.ts Normal file
View File

@@ -0,0 +1,29 @@
import type { CTPDocument } from '@/lib/ctp/schema';
export interface AnalysisInput {
bpm: number;
duration: number; // seconds
title?: string;
artist?: string;
mbid?: string | null;
contributed_by: string;
ollamaModel?: string; // required when provider id is "ollama"
}
export interface ProviderInfo {
id: string;
label: string;
type: 'cloud-ai' | 'local-ai' | 'algorithmic';
available: boolean;
unavailableReason?: string; // present only when available === false
ollamaBaseUrl?: string; // present only for the ollama provider
}
export interface AnalysisProvider {
id: string;
label: string;
type: 'cloud-ai' | 'local-ai' | 'algorithmic';
/** Returns true if this provider is configured and reachable. Must not throw. */
isAvailable(): Promise<{ available: boolean; reason?: string }>;
generateCTP(input: AnalysisInput): Promise<CTPDocument>;
}

View File

@@ -0,0 +1,131 @@
import type { CTPDocument } from "@/lib/ctp/schema";
import type { AnalysisInput, AnalysisProvider } from "@/lib/analysis/providers";
// Section templates keyed by duration bucket
interface SectionTemplate {
labels: string[];
weights: number[];
}
function getTemplate(duration: number): SectionTemplate {
if (duration < 120) {
return {
labels: ["Intro", "Verse / Chorus", "Outro"],
weights: [0.12, 0.76, 0.12],
};
}
if (duration < 240) {
return {
labels: ["Intro", "Verse", "Chorus", "Verse + Chorus", "Outro"],
weights: [0.08, 0.22, 0.20, 0.38, 0.12],
};
}
if (duration < 360) {
return {
labels: ["Intro", "Verse", "Chorus", "Verse + Chorus", "Bridge", "Outro"],
weights: [0.07, 0.20, 0.18, 0.33, 0.10, 0.12],
};
}
return {
labels: ["Intro", "Verse", "Chorus", "Verse + Chorus", "Instrumental", "Bridge", "Outro"],
weights: [0.06, 0.18, 0.16, 0.30, 0.10, 0.10, 0.10],
};
}
function buildFallback(input: AnalysisInput): CTPDocument {
return {
version: "1.0",
metadata: {
title: input.title ?? "Unknown Title",
artist: input.artist ?? "Unknown Artist",
mbid: input.mbid ?? null,
duration_seconds: input.duration,
contributed_by: input.contributed_by,
verified: false,
created_at: new Date().toISOString(),
},
count_in: {
enabled: true,
bars: 2,
use_first_section_tempo: true,
},
sections: [
{
label: "Song",
start_bar: 1,
bpm: input.bpm,
time_signature: { numerator: 4, denominator: 4 },
transition: "step",
},
],
};
}
export const algorithmicProvider: AnalysisProvider = {
id: "algorithmic",
label: "Algorithmic (no AI)",
type: "algorithmic",
async isAvailable() {
return { available: true };
},
async generateCTP(input: AnalysisInput): Promise<CTPDocument> {
try {
const { bpm, duration, title } = input;
const totalBars = Math.floor((duration * bpm) / 240);
const template = getTemplate(duration);
const { labels, weights } = template;
// Allocate bars per section
const rawBars = weights.map((w) => Math.round(totalBars * w));
// Adjust last section so total is exact
const allocatedSum = rawBars.reduce((a, b) => a + b, 0);
const diff = totalBars - allocatedSum;
rawBars[rawBars.length - 1] = Math.max(1, rawBars[rawBars.length - 1] + diff);
// Determine time signature
const lowerTitle = title?.toLowerCase() ?? "";
const numerator = lowerTitle.includes("waltz") || lowerTitle.includes("3/4") ? 3 : 4;
const timeSignature = { numerator, denominator: 4 as const };
// Build sections with cumulative start_bar
let currentBar = 1;
const sections = labels.map((label, i) => {
const start_bar = currentBar;
currentBar += rawBars[i];
return {
label,
start_bar,
bpm,
time_signature: timeSignature,
transition: "step" as const,
};
});
return {
version: "1.0",
metadata: {
title: input.title ?? "Unknown Title",
artist: input.artist ?? "Unknown Artist",
mbid: input.mbid ?? null,
duration_seconds: duration,
contributed_by: input.contributed_by,
verified: false,
created_at: new Date().toISOString(),
},
count_in: {
enabled: true,
bars: 2,
use_first_section_tempo: true,
},
sections,
};
} catch {
// Algorithmic provider must never surface an error
return buildFallback(input);
}
},
};

View File

@@ -0,0 +1,181 @@
import 'server-only';
import Anthropic from "@anthropic-ai/sdk";
import type { CTPDocument } from "@/lib/ctp/schema";
import type { AnalysisInput, AnalysisProvider } from "@/lib/analysis/providers";
// Extract the non-streaming Message type from the SDK without relying on internal paths
type AnthropicMessage = Extract<
Awaited<ReturnType<Anthropic["messages"]["create"]>>,
{ content: unknown[] }
>;
const client = new Anthropic();
// ─── JSON Schema for structured output ───────────────────────────────────────
export const CTP_SCHEMA = {
type: "object",
additionalProperties: false,
required: ["version", "metadata", "count_in", "sections"],
properties: {
version: { type: "string", enum: ["1.0"] },
metadata: {
type: "object",
additionalProperties: false,
required: [
"title", "artist", "mbid", "duration_seconds",
"contributed_by", "verified", "created_at",
],
properties: {
title: { type: "string" },
artist: { type: "string" },
mbid: { type: ["string", "null"] },
duration_seconds: { type: "number" },
contributed_by: { type: "string" },
verified: { type: "boolean" },
created_at: { type: "string" },
},
},
count_in: {
type: "object",
additionalProperties: false,
required: ["enabled", "bars", "use_first_section_tempo"],
properties: {
enabled: { type: "boolean" },
bars: { type: "integer", minimum: 1, maximum: 8 },
use_first_section_tempo: { type: "boolean" },
},
},
sections: {
type: "array",
minItems: 1,
items: {
type: "object",
additionalProperties: false,
required: ["label", "start_bar", "time_signature", "transition"],
properties: {
label: { type: "string" },
start_bar: { type: "integer", minimum: 1 },
bpm: { type: "number" },
bpm_start: { type: "number" },
bpm_end: { type: "number" },
transition: { type: "string", enum: ["step", "ramp"] },
time_signature: {
type: "object",
additionalProperties: false,
required: ["numerator", "denominator"],
properties: {
numerator: { type: "integer", minimum: 1, maximum: 32 },
denominator: { type: "integer", enum: [1, 2, 4, 8, 16, 32] },
},
},
},
},
},
},
};
// ─── System prompt ────────────────────────────────────────────────────────────
export const SYSTEM_PROMPT = `\
You are an expert music producer and session musician assisting cover bands with click tracks.
You will receive automated BPM detection results for a song and must generate a CTP (Click Track Protocol) document describing the song's full tempo map.
CTP rules:
- "version" must be "1.0"
- sections[0].start_bar must be 1
- sections must be sorted by start_bar ascending, with no gaps
- Step sections have a single "bpm" field; ramp sections have "bpm_start" and "bpm_end" (no "bpm" field)
- All BPM values must be between 20 and 400
- time_signature.denominator must be a power of 2 (1, 2, 4, 8, 16, or 32)
- metadata.verified must be false (this is AI-generated, not human-verified)
- metadata.created_at must be an ISO 8601 datetime string
Guidelines for section layout:
- Use typical pop/rock section names: Intro, Verse, Pre-Chorus, Chorus, Bridge, Outro
- Estimate bar counts based on song duration and BPM (bars = duration_seconds × BPM / 60 / beats_per_bar)
- Most songs are 4/4; note any unusual meters if you know the song
- If you know the song has a tempo change (ritardando, double-time feel, key change with tempo shift), model it with a ramp or step section
- If unsure about sections, use a single constant-tempo section covering the whole song
- Use the detected BPM as the primary tempo — do not invent a different BPM unless the song is well-known to have a different tempo
The output is a draft for human review. Add reasonable section structure based on the song's typical arrangement.`;
// ─── Provider implementation ──────────────────────────────────────────────────
export const anthropicProvider: AnalysisProvider = {
id: "anthropic",
label: "Claude (Anthropic)",
type: "cloud-ai",
async isAvailable() {
if (process.env.ANTHROPIC_API_KEY) {
return { available: true };
}
return { available: false, reason: "ANTHROPIC_API_KEY not set" };
},
async generateCTP(input: AnalysisInput): Promise<CTPDocument> {
const { bpm, duration, title, artist, mbid, contributed_by } = input;
const model = process.env.ANTHROPIC_MODEL ?? "claude-opus-4-6";
const approxBars = Math.round((duration * bpm) / 60 / 4);
const userMessage = `\
Generate a CTP document for the following song:
Title: ${title ?? "Unknown Title"}
Artist: ${artist ?? "Unknown Artist"}
MusicBrainz ID: ${mbid ?? "unknown"}
Detected BPM: ${bpm}
Duration: ${duration.toFixed(1)} seconds (~${approxBars} bars at 4/4)
Contributed by: ${contributed_by}
Create a plausible section layout for this song. If this is a well-known song, use your knowledge of its actual arrangement. If not, use a sensible generic structure.`;
// thinking and output_config are not yet in the SDK type definitions;
// cast through the base param type to avoid type errors.
type ExtendedParams = Parameters<typeof client.messages.create>[0] & {
thinking?: { type: string };
output_config?: { format: { type: string; schema: unknown } };
};
const params: ExtendedParams = {
model,
max_tokens: 2048,
thinking: { type: "adaptive" },
system: SYSTEM_PROMPT,
messages: [{ role: "user", content: userMessage }],
output_config: {
format: {
type: "json_schema",
schema: CTP_SCHEMA,
},
},
};
const response = (await client.messages.create(
params as Parameters<typeof client.messages.create>[0]
)) as AnthropicMessage;
const textBlock = response.content.find((b) => b.type === "text");
if (!textBlock || textBlock.type !== "text") {
throw new Error("Claude did not return a text block");
}
let parsed: unknown;
try {
parsed = JSON.parse(textBlock.text);
} catch {
throw new Error(`Claude returned invalid JSON: ${textBlock.text.slice(0, 200)}`);
}
const doc = parsed as CTPDocument;
if (!doc.metadata.created_at || doc.metadata.created_at.includes("placeholder")) {
doc.metadata.created_at = new Date().toISOString();
}
return doc;
},
};

View File

@@ -0,0 +1,157 @@
import 'server-only';
import type { CTPDocument } from "@/lib/ctp/schema";
import type { AnalysisInput, AnalysisProvider } from "@/lib/analysis/providers";
import { SYSTEM_PROMPT } from "./anthropic";
function getBaseUrl(): string {
return process.env.OLLAMA_BASE_URL ?? "http://localhost:11434";
}
// ─── Model list ───────────────────────────────────────────────────────────────
interface OllamaTagsResponse {
models?: Array<{ name: string }>;
}
export async function getOllamaModels(): Promise<string[]> {
try {
const url = `${getBaseUrl()}/api/tags`;
const response = await fetch(url, { signal: AbortSignal.timeout(5000) });
if (!response.ok) return [];
const json = await response.json() as OllamaTagsResponse;
return (json.models ?? []).map((m) => m.name);
} catch {
return [];
}
}
// ─── Chat completions helper ──────────────────────────────────────────────────
async function callOllama(model: string, userMessage: string): Promise<string> {
const baseUrl = getBaseUrl();
const response = await fetch(`${baseUrl}/v1/chat/completions`, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
model,
messages: [
{ role: "system", content: SYSTEM_PROMPT },
{
role: "user",
content:
userMessage +
"\n\nRespond with valid JSON only. Do not add any explanation or markdown. Your entire response must be a single valid JSON object matching the schema.",
},
],
stream: false,
}),
signal: AbortSignal.timeout(120000), // 2-minute timeout for slow local models
});
if (!response.ok) {
const text = await response.text().catch(() => "");
throw new Error(`Ollama API error ${response.status}: ${text.slice(0, 200)}`);
}
const json = await response.json() as {
choices?: Array<{ message?: { content?: string } }>;
};
const content = json.choices?.[0]?.message?.content;
if (!content) {
throw new Error("Ollama did not return a message content");
}
return content;
}
// ─── Provider implementation ──────────────────────────────────────────────────
export const ollamaProvider: AnalysisProvider = {
id: "ollama",
label: "Ollama",
type: "local-ai",
async isAvailable() {
const url = getBaseUrl();
try {
const response = await fetch(`${url}/api/tags`, {
signal: AbortSignal.timeout(5000),
});
if (response.ok) {
return { available: true };
}
return { available: false, reason: `Ollama not reachable at ${url}` };
} catch {
return { available: false, reason: `Ollama not reachable at ${url}` };
}
},
async generateCTP(input: AnalysisInput): Promise<CTPDocument> {
const { ollamaModel, bpm, duration, title, artist, mbid, contributed_by } = input;
if (!ollamaModel) {
throw new Error("ollamaModel is required for Ollama provider");
}
const approxBars = Math.round((duration * bpm) / 60 / 4);
const userMessage = `\
Generate a CTP document for the following song:
Title: ${title ?? "Unknown Title"}
Artist: ${artist ?? "Unknown Artist"}
MusicBrainz ID: ${mbid ?? "unknown"}
Detected BPM: ${bpm}
Duration: ${duration.toFixed(1)} seconds (~${approxBars} bars at 4/4)
Contributed by: ${contributed_by}
Create a plausible section layout for this song. If this is a well-known song, use your knowledge of its actual arrangement. If not, use a sensible generic structure.`;
// Attempt parse with one retry on failure
let content: string;
try {
content = await callOllama(ollamaModel, userMessage);
} catch (err) {
throw new Error(
`Ollama request failed: ${err instanceof Error ? err.message : String(err)}`
);
}
const tryParse = (raw: string): CTPDocument | null => {
// Strip markdown code fences if present
const stripped = raw.replace(/^```(?:json)?\s*/i, "").replace(/\s*```\s*$/, "").trim();
try {
const doc = JSON.parse(stripped) as CTPDocument;
if (!doc.metadata.created_at || doc.metadata.created_at.includes("placeholder")) {
doc.metadata.created_at = new Date().toISOString();
}
return doc;
} catch {
return null;
}
};
const firstAttempt = tryParse(content);
if (firstAttempt) return firstAttempt;
// Retry once
let retryContent: string;
try {
retryContent = await callOllama(ollamaModel, userMessage);
} catch (err) {
throw new Error(
`Ollama retry request failed: ${err instanceof Error ? err.message : String(err)}`
);
}
const secondAttempt = tryParse(retryContent);
if (secondAttempt) return secondAttempt;
throw new Error(
`Ollama (${ollamaModel}) returned a response that could not be parsed as a valid CTP document. ` +
`Response preview: ${content.slice(0, 200)}`
);
},
};

View File

@@ -0,0 +1,108 @@
import 'server-only';
import type { CTPDocument } from "@/lib/ctp/schema";
import type { AnalysisInput, AnalysisProvider } from "@/lib/analysis/providers";
import { CTP_SCHEMA, SYSTEM_PROMPT } from "./anthropic";
function buildLabel(): string {
const model = process.env.OPENAI_MODEL ?? "GPT-4o";
const baseUrl = process.env.OPENAI_BASE_URL ?? "";
if (baseUrl && !baseUrl.includes("api.openai.com")) {
try {
const host = new URL(baseUrl).hostname;
return `${model} (${host})`;
} catch {
// fall through
}
}
return `${model} (OpenAI)`;
}
export const openaiProvider: AnalysisProvider = {
id: "openai",
label: buildLabel(),
type: "cloud-ai",
async isAvailable() {
if (process.env.OPENAI_API_KEY) {
return { available: true };
}
return { available: false, reason: "OPENAI_API_KEY not set" };
},
async generateCTP(input: AnalysisInput): Promise<CTPDocument> {
const apiKey = process.env.OPENAI_API_KEY;
if (!apiKey) {
throw new Error("OPENAI_API_KEY not set");
}
const baseUrl = process.env.OPENAI_BASE_URL ?? "https://api.openai.com/v1";
const model = process.env.OPENAI_MODEL ?? "gpt-4o";
const { bpm, duration, title, artist, mbid, contributed_by } = input;
const approxBars = Math.round((duration * bpm) / 60 / 4);
const userMessage = `\
Generate a CTP document for the following song:
Title: ${title ?? "Unknown Title"}
Artist: ${artist ?? "Unknown Artist"}
MusicBrainz ID: ${mbid ?? "unknown"}
Detected BPM: ${bpm}
Duration: ${duration.toFixed(1)} seconds (~${approxBars} bars at 4/4)
Contributed by: ${contributed_by}
Create a plausible section layout for this song. If this is a well-known song, use your knowledge of its actual arrangement. If not, use a sensible generic structure.`;
const response = await fetch(`${baseUrl}/chat/completions`, {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${apiKey}`,
},
body: JSON.stringify({
model,
messages: [
{ role: "system", content: SYSTEM_PROMPT },
{ role: "user", content: userMessage },
],
response_format: {
type: "json_schema",
json_schema: {
strict: true,
name: "CTPDocument",
schema: CTP_SCHEMA,
},
},
max_tokens: 2048,
}),
});
if (!response.ok) {
const text = await response.text().catch(() => "");
throw new Error(`OpenAI API error ${response.status}: ${text.slice(0, 200)}`);
}
const json = await response.json() as {
choices?: Array<{ message?: { content?: string } }>;
};
const content = json.choices?.[0]?.message?.content;
if (!content) {
throw new Error("OpenAI did not return a message content");
}
let parsed: unknown;
try {
parsed = JSON.parse(content);
} catch {
throw new Error(`OpenAI returned invalid JSON: ${content.slice(0, 200)}`);
}
const doc = parsed as CTPDocument;
if (!doc.metadata.created_at || doc.metadata.created_at.includes("placeholder")) {
doc.metadata.created_at = new Date().toISOString();
}
return doc;
},
};

View File

@@ -0,0 +1,78 @@
import 'server-only';
import type { AnalysisProvider, ProviderInfo } from "@/lib/analysis/providers";
import { anthropicProvider } from "./anthropic";
import { openaiProvider } from "./openai";
import { ollamaProvider, getOllamaModels } from "./ollama";
import { algorithmicProvider } from "./algorithmic";
import { RECOMMENDED_OLLAMA_MODELS } from "@/lib/analysis/constants";
export { getOllamaModels, RECOMMENDED_OLLAMA_MODELS };
// Registration order determines the default when the user has no saved preference.
const ALL_PROVIDERS: AnalysisProvider[] = [
anthropicProvider,
openaiProvider,
ollamaProvider,
algorithmicProvider,
];
/**
* Returns every provider with its current availability status.
* Runs all isAvailable() checks in parallel.
*/
export async function getProviderInfoList(): Promise<ProviderInfo[]> {
const results = await Promise.all(
ALL_PROVIDERS.map(async (p) => {
const availability = await p.isAvailable();
const info: ProviderInfo = {
id: p.id,
label: p.label,
type: p.type,
available: availability.available,
};
if (!availability.available && availability.reason) {
info.unavailableReason = availability.reason;
}
if (p.id === "ollama") {
info.ollamaBaseUrl = process.env.OLLAMA_BASE_URL ?? "http://localhost:11434";
}
return info;
})
);
return results;
}
/**
* Returns only providers where available === true.
* The algorithmic provider is always included.
*/
export async function getAvailableProviders(): Promise<AnalysisProvider[]> {
const checks = await Promise.all(
ALL_PROVIDERS.map(async (p) => {
const availability = await p.isAvailable();
return { provider: p, available: availability.available };
})
);
return checks.filter((c) => c.available).map((c) => c.provider);
}
/**
* Looks up a provider by id. Throws with a descriptive message if not found
* or if isAvailable() returns false.
*/
export async function getProvider(id: string): Promise<AnalysisProvider> {
const provider = ALL_PROVIDERS.find((p) => p.id === id);
if (!provider) {
throw new Error(
`Unknown provider '${id}'. Available providers: ${ALL_PROVIDERS.map((p) => p.id).join(", ")}`
);
}
const availability = await provider.isAvailable();
if (!availability.available) {
throw new Error(
`Provider '${id}' is not available: ${availability.reason ?? "unknown reason"}`
);
}
return provider;
}

View File

@@ -149,7 +149,10 @@ function calculateBeats(doc: CTPDocument): Beat[] {
}
} else {
// Last section: generate beats until we exceed duration_seconds
const songEnd = cursor + doc.metadata.duration_seconds;
const countInSeconds = doc.count_in.enabled
? (doc.count_in.bars * firstNumerator * 60) / firstBpm
: 0;
const songEnd = countInSeconds + doc.metadata.duration_seconds;
// Estimate bars remaining
const approxBarsRemaining = Math.ceil(
(doc.metadata.duration_seconds / 60) * section.bpm / numerator + 2

View File

@@ -68,6 +68,11 @@ export interface SongRow {
updated_at: Date;
}
export async function getSongByMbid(mbid: string): Promise<SongRow | null> {
const { rows } = await query<SongRow>("SELECT * FROM songs WHERE mbid = $1", [mbid]);
return rows[0] ?? null;
}
export async function searchSongs(q: string, limit = 20): Promise<SongRow[]> {
const { rows } = await query<SongRow>(
`SELECT * FROM songs

View File

@@ -1,14 +1,18 @@
/**
* Git Registry Sync
*
* Pulls CTP files from a remote GitHub repository (the "community registry")
* and upserts them into the local database.
* Pulls CTP files from a remote git repository (the "community registry")
* served over HTTPS. Compatible with any self-hosted git server
* (Gitea, Forgejo, GitLab CE, Gogs, etc.) or any public git host.
*
* The registry repo is expected to contain CTP JSON files at:
* <repo-root>/<artist-initial>/<artist-slug>/<recording-mbid>.ctp.json
*
* Configuration:
* REGISTRY_REPO — GitHub repo URL, e.g. https://github.com/org/clicktrack-registry
* REGISTRY_REPO — HTTPS URL of the registry repo,
* e.g. https://git.yourdomain.com/org/clicktrack-registry
* To authenticate, embed credentials in the URL:
* https://user:token@git.yourdomain.com/org/clicktrack-registry
* REGISTRY_BRANCH — branch to pull from (default: main)
*/