feat: analysis providers, settings UI, song search, WAV duration fix
- Multi-provider AI analysis (Anthropic, OpenAI, Ollama, Algorithmic) - server-only guards on all provider files; client bundle fix - /settings page with provider status, Ollama model picker, preferences - Song search box on /analyze replacing raw MBID input (debounced, keyboard nav) - Auto-register song via MusicBrainz on POST /api/tracks (no more 404) - Fix WAV duration bug: last section songEnd was double-counting elapsed time - Registry sync comment updated for self-hosted HTTPS git servers Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -1,179 +1,17 @@
|
||||
/**
|
||||
* AI-assisted CTP document generation
|
||||
*
|
||||
* Takes the results of BPM detection (and optional song metadata) and uses
|
||||
* Claude to produce a plausible, well-structured CTP document.
|
||||
*
|
||||
* Claude is asked to:
|
||||
* - Divide the song into typical sections (Intro, Verse, Chorus, Bridge…)
|
||||
* - Assign realistic start bars for each section
|
||||
* - Note any tempo changes it would expect for the song/genre
|
||||
* - Return a fully valid CTP 1.0 JSON document
|
||||
*
|
||||
* The caller should treat the result as a *draft* — the generated sections
|
||||
* are educated guesses and should be verified against the recording.
|
||||
* Re-exports from the Anthropic provider for backwards compatibility.
|
||||
* @deprecated Import directly from @/lib/analysis/providers/anthropic instead.
|
||||
*/
|
||||
export type { AnalysisInput } from "@/lib/analysis/providers";
|
||||
export { anthropicProvider as default } from "@/lib/analysis/providers/anthropic";
|
||||
|
||||
import Anthropic from "@anthropic-ai/sdk";
|
||||
import type { CTPDocument } from "@/lib/ctp/schema";
|
||||
// Legacy named export for any remaining callers
|
||||
import { anthropicProvider } from "@/lib/analysis/providers/anthropic";
|
||||
import type { AnalysisInput } from "@/lib/analysis/providers";
|
||||
|
||||
const client = new Anthropic();
|
||||
|
||||
// ─── Input / output types ─────────────────────────────────────────────────────
|
||||
|
||||
export interface AnalysisInput {
|
||||
bpm: number;
|
||||
duration: number; // seconds
|
||||
title?: string;
|
||||
artist?: string;
|
||||
mbid?: string | null;
|
||||
contributedBy?: string;
|
||||
}
|
||||
|
||||
// ─── JSON Schema for structured output ───────────────────────────────────────
|
||||
// Must be strict (no additionalProperties, all required fields present).
|
||||
|
||||
const CTP_SCHEMA = {
|
||||
type: "object",
|
||||
additionalProperties: false,
|
||||
required: ["version", "metadata", "count_in", "sections"],
|
||||
properties: {
|
||||
version: { type: "string", enum: ["1.0"] },
|
||||
metadata: {
|
||||
type: "object",
|
||||
additionalProperties: false,
|
||||
required: [
|
||||
"title", "artist", "mbid", "duration_seconds",
|
||||
"contributed_by", "verified", "created_at",
|
||||
],
|
||||
properties: {
|
||||
title: { type: "string" },
|
||||
artist: { type: "string" },
|
||||
mbid: { type: ["string", "null"] },
|
||||
duration_seconds: { type: "number" },
|
||||
contributed_by: { type: "string" },
|
||||
verified: { type: "boolean" },
|
||||
created_at: { type: "string" },
|
||||
},
|
||||
},
|
||||
count_in: {
|
||||
type: "object",
|
||||
additionalProperties: false,
|
||||
required: ["enabled", "bars", "use_first_section_tempo"],
|
||||
properties: {
|
||||
enabled: { type: "boolean" },
|
||||
bars: { type: "integer", minimum: 1, maximum: 8 },
|
||||
use_first_section_tempo: { type: "boolean" },
|
||||
},
|
||||
},
|
||||
sections: {
|
||||
type: "array",
|
||||
minItems: 1,
|
||||
items: {
|
||||
type: "object",
|
||||
additionalProperties: false,
|
||||
required: ["label", "start_bar", "time_signature", "transition"],
|
||||
// bpm is required for step, bpm_start/bpm_end for ramp — handled via oneOf
|
||||
// but we keep this schema simple (strict mode) and validate downstream with Zod.
|
||||
properties: {
|
||||
label: { type: "string" },
|
||||
start_bar: { type: "integer", minimum: 1 },
|
||||
bpm: { type: "number" },
|
||||
bpm_start: { type: "number" },
|
||||
bpm_end: { type: "number" },
|
||||
transition: { type: "string", enum: ["step", "ramp"] },
|
||||
time_signature: {
|
||||
type: "object",
|
||||
additionalProperties: false,
|
||||
required: ["numerator", "denominator"],
|
||||
properties: {
|
||||
numerator: { type: "integer", minimum: 1, maximum: 32 },
|
||||
denominator: { type: "integer", enum: [1, 2, 4, 8, 16, 32] },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// ─── System prompt ────────────────────────────────────────────────────────────
|
||||
|
||||
const SYSTEM_PROMPT = `\
|
||||
You are an expert music producer and session musician assisting cover bands with click tracks.
|
||||
|
||||
You will receive automated BPM detection results for a song and must generate a CTP (Click Track Protocol) document describing the song's full tempo map.
|
||||
|
||||
CTP rules:
|
||||
- "version" must be "1.0"
|
||||
- sections[0].start_bar must be 1
|
||||
- sections must be sorted by start_bar ascending, with no gaps
|
||||
- Step sections have a single "bpm" field; ramp sections have "bpm_start" and "bpm_end" (no "bpm" field)
|
||||
- All BPM values must be between 20 and 400
|
||||
- time_signature.denominator must be a power of 2 (1, 2, 4, 8, 16, or 32)
|
||||
- metadata.verified must be false (this is AI-generated, not human-verified)
|
||||
- metadata.created_at must be an ISO 8601 datetime string
|
||||
|
||||
Guidelines for section layout:
|
||||
- Use typical pop/rock section names: Intro, Verse, Pre-Chorus, Chorus, Bridge, Outro
|
||||
- Estimate bar counts based on song duration and BPM (bars = duration_seconds × BPM / 60 / beats_per_bar)
|
||||
- Most songs are 4/4; note any unusual meters if you know the song
|
||||
- If you know the song has a tempo change (ritardando, double-time feel, key change with tempo shift), model it with a ramp or step section
|
||||
- If unsure about sections, use a single constant-tempo section covering the whole song
|
||||
- Use the detected BPM as the primary tempo — do not invent a different BPM unless the song is well-known to have a different tempo
|
||||
|
||||
The output is a draft for human review. Add reasonable section structure based on the song's typical arrangement.`;
|
||||
|
||||
// ─── Main function ────────────────────────────────────────────────────────────
|
||||
|
||||
export async function generateCTPWithAI(input: AnalysisInput): Promise<CTPDocument> {
|
||||
const { bpm, duration, title, artist, mbid, contributedBy } = input;
|
||||
|
||||
const approxBars = Math.round((duration * bpm) / 60 / 4); // assuming 4/4
|
||||
|
||||
const userMessage = `\
|
||||
Generate a CTP document for the following song:
|
||||
|
||||
Title: ${title ?? "Unknown Title"}
|
||||
Artist: ${artist ?? "Unknown Artist"}
|
||||
MusicBrainz ID: ${mbid ?? "unknown"}
|
||||
Detected BPM: ${bpm}
|
||||
Duration: ${duration.toFixed(1)} seconds (~${approxBars} bars at 4/4)
|
||||
Contributed by: ${contributedBy ?? "anonymous"}
|
||||
|
||||
Create a plausible section layout for this song. If this is a well-known song, use your knowledge of its actual arrangement. If not, use a sensible generic structure.`;
|
||||
|
||||
const response = await client.messages.create({
|
||||
model: "claude-opus-4-6",
|
||||
max_tokens: 2048,
|
||||
thinking: { type: "adaptive" },
|
||||
system: SYSTEM_PROMPT,
|
||||
messages: [{ role: "user", content: userMessage }],
|
||||
output_config: {
|
||||
format: {
|
||||
type: "json_schema",
|
||||
schema: CTP_SCHEMA,
|
||||
},
|
||||
},
|
||||
export async function generateCTPWithAI(input: AnalysisInput & { contributedBy?: string }) {
|
||||
return anthropicProvider.generateCTP({
|
||||
...input,
|
||||
contributed_by: input.contributed_by ?? input.contributedBy ?? "anonymous",
|
||||
});
|
||||
|
||||
const textBlock = response.content.find((b) => b.type === "text");
|
||||
if (!textBlock || textBlock.type !== "text") {
|
||||
throw new Error("Claude did not return a text block");
|
||||
}
|
||||
|
||||
let parsed: unknown;
|
||||
try {
|
||||
parsed = JSON.parse(textBlock.text);
|
||||
} catch {
|
||||
throw new Error(`Claude returned invalid JSON: ${textBlock.text.slice(0, 200)}`);
|
||||
}
|
||||
|
||||
// Stamp the current timestamp if Claude left a placeholder
|
||||
const doc = parsed as CTPDocument;
|
||||
if (!doc.metadata.created_at || doc.metadata.created_at.includes("placeholder")) {
|
||||
doc.metadata.created_at = new Date().toISOString();
|
||||
}
|
||||
|
||||
return doc;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user