import { NextRequest, NextResponse } from "next/server"; import { z } from "zod"; const TestRequestSchema = z.object({ provider: z.string(), ollamaModel: z.string().optional(), }); /** * POST /api/analyze/test * * Runs a lightweight probe for a provider to confirm it is working. * Always returns 200 — use the `ok` field to check success/failure. * * Body: { provider: string, ollamaModel?: string } * Response: { ok: true } | { ok: false, error: string } */ export async function POST(req: NextRequest) { let body: unknown; try { body = await req.json(); } catch { return NextResponse.json({ ok: false, error: "Invalid JSON body" }); } const parsed = TestRequestSchema.safeParse(body); if (!parsed.success) { return NextResponse.json({ ok: false, error: "Missing required field: provider" }); } const { provider, ollamaModel } = parsed.data; try { switch (provider) { case "algorithmic": { return NextResponse.json({ ok: true }); } case "anthropic": { const apiKey = process.env.ANTHROPIC_API_KEY; if (!apiKey) { return NextResponse.json({ ok: false, error: "ANTHROPIC_API_KEY not set" }); } // Minimal 1-token probe const response = await fetch("https://api.anthropic.com/v1/messages", { method: "POST", headers: { "Content-Type": "application/json", "x-api-key": apiKey, "anthropic-version": "2023-06-01", }, body: JSON.stringify({ model: process.env.ANTHROPIC_MODEL ?? "claude-opus-4-6", max_tokens: 10, messages: [{ role: "user", content: "Reply with the word OK" }], }), signal: AbortSignal.timeout(15000), }); if (response.ok) { return NextResponse.json({ ok: true }); } const text = await response.text().catch(() => ""); return NextResponse.json({ ok: false, error: `Anthropic API returned ${response.status}: ${text.slice(0, 200)}`, }); } case "openai": { const apiKey = process.env.OPENAI_API_KEY; if (!apiKey) { return NextResponse.json({ ok: false, error: "OPENAI_API_KEY not set" }); } const baseUrl = process.env.OPENAI_BASE_URL ?? "https://api.openai.com/v1"; const model = process.env.OPENAI_MODEL ?? "gpt-4o"; const response = await fetch(`${baseUrl}/chat/completions`, { method: "POST", headers: { "Content-Type": "application/json", Authorization: `Bearer ${apiKey}`, }, body: JSON.stringify({ model, max_tokens: 10, messages: [{ role: "user", content: "Reply with the word OK" }], }), signal: AbortSignal.timeout(15000), }); if (response.ok) { return NextResponse.json({ ok: true }); } const text = await response.text().catch(() => ""); return NextResponse.json({ ok: false, error: `OpenAI API returned ${response.status}: ${text.slice(0, 200)}`, }); } case "ollama": { const baseUrl = process.env.OLLAMA_BASE_URL ?? "http://localhost:11434"; if (!ollamaModel) { return NextResponse.json({ ok: false, error: "ollamaModel is required for Ollama test" }); } const response = await fetch(`${baseUrl}/v1/chat/completions`, { method: "POST", headers: { "Content-Type": "application/json" }, body: JSON.stringify({ model: ollamaModel, max_tokens: 10, messages: [{ role: "user", content: "Reply with the word OK" }], stream: false, }), signal: AbortSignal.timeout(30000), }); if (response.ok) { return NextResponse.json({ ok: true }); } const text = await response.text().catch(() => ""); return NextResponse.json({ ok: false, error: `Ollama returned ${response.status}: ${text.slice(0, 200)}`, }); } default: return NextResponse.json({ ok: false, error: `Unknown provider: ${provider}` }); } } catch (err) { return NextResponse.json({ ok: false, error: err instanceof Error ? err.message : String(err), }); } }