// llm.ts — BYOK LLM provider abstraction
// Calls OpenAI, Anthropic, or Google Gemini directly from the browser.

const LLM_DEFAULT_MODELS: Record<Provider, string> = {
  openai: 'gpt-4o-mini',
  anthropic: 'claude-haiku-4-5',
  gemini: 'gemini-2.0-flash',
};

const LLM_MODEL_OPTIONS: Record<Provider, string[]> = {
  openai: ['gpt-4o-mini', 'gpt-4o', 'gpt-4.1-mini', 'gpt-4.1'],
  anthropic: ['claude-haiku-4-5', 'claude-sonnet-4-5', 'claude-3-5-haiku-latest', 'claude-3-5-sonnet-latest'],
  gemini: ['gemini-2.0-flash', 'gemini-2.5-flash', 'gemini-1.5-flash', 'gemini-1.5-pro'],
};

async function llmCall(
  cfg: ProviderConfig,
  systemPrompt: string,
  history: ChatMsg[]
): Promise<string> {
  if (!cfg.apiKey || cfg.apiKey.length < 6) {
    throw new Error('No API key configured. Open Settings (lower-right) and paste your key.');
  }
  if (cfg.provider === 'openai') return await callOpenAI(cfg, systemPrompt, history);
  if (cfg.provider === 'anthropic') return await callAnthropic(cfg, systemPrompt, history);
  if (cfg.provider === 'gemini') return await callGemini(cfg, systemPrompt, history);
  throw new Error('Unknown provider');
}

async function callOpenAI(
  cfg: ProviderConfig,
  system: string,
  history: ChatMsg[]
): Promise<string> {
  const messages = [
    { role: 'system', content: system },
    ...history.map((m) => ({ role: m.role, content: m.content })),
  ];
  const resp = await fetch('https://api.openai.com/v1/chat/completions', {
    method: 'POST',
    headers: {
      'Content-Type': 'application/json',
      Authorization: `Bearer ${cfg.apiKey}`,
    },
    body: JSON.stringify({
      model: cfg.model || LLM_DEFAULT_MODELS.openai,
      messages,
      temperature: 0.85,
      max_tokens: 700,
    }),
  });
  if (!resp.ok) {
    const t = await resp.text();
    throw new Error(`OpenAI error ${resp.status}: ${t.slice(0, 300)}`);
  }
  const data = await resp.json();
  return data.choices?.[0]?.message?.content ?? '(no content)';
}

async function callAnthropic(
  cfg: ProviderConfig,
  system: string,
  history: ChatMsg[]
): Promise<string> {
  // Anthropic blocks browser by default; the dangerous-direct-browser-access
  // header opts in for BYOK use cases.
  const messages = history
    .filter((m) => m.role !== 'system')
    .map((m) => ({ role: m.role, content: m.content }));
  const resp = await fetch('https://api.anthropic.com/v1/messages', {
    method: 'POST',
    headers: {
      'Content-Type': 'application/json',
      'x-api-key': cfg.apiKey,
      'anthropic-version': '2023-06-01',
      'anthropic-dangerous-direct-browser-access': 'true',
    },
    body: JSON.stringify({
      model: cfg.model || LLM_DEFAULT_MODELS.anthropic,
      system,
      messages,
      max_tokens: 700,
      temperature: 0.85,
    }),
  });
  if (!resp.ok) {
    const t = await resp.text();
    throw new Error(`Anthropic error ${resp.status}: ${t.slice(0, 300)}`);
  }
  const data = await resp.json();
  // Anthropic returns content as array of blocks
  const blocks = data.content || [];
  const txt = blocks.map((b: any) => (b.type === 'text' ? b.text : '')).join('');
  return txt || '(no content)';
}

async function callGemini(
  cfg: ProviderConfig,
  system: string,
  history: ChatMsg[]
): Promise<string> {
  const model = cfg.model || LLM_DEFAULT_MODELS.gemini;
  const url = `https://generativelanguage.googleapis.com/v1beta/models/${model}:generateContent?key=${encodeURIComponent(cfg.apiKey)}`;
  const contents = history.map((m) => ({
    role: m.role === 'assistant' ? 'model' : 'user',
    parts: [{ text: m.content }],
  }));
  const body: any = {
    systemInstruction: { parts: [{ text: system }] },
    contents,
    generationConfig: { temperature: 0.85, maxOutputTokens: 700 },
  };
  const resp = await fetch(url, {
    method: 'POST',
    headers: { 'Content-Type': 'application/json' },
    body: JSON.stringify(body),
  });
  if (!resp.ok) {
    const t = await resp.text();
    throw new Error(`Gemini error ${resp.status}: ${t.slice(0, 300)}`);
  }
  const data = await resp.json();
  const parts = data.candidates?.[0]?.content?.parts || [];
  return parts.map((p: any) => p.text || '').join('') || '(no content)';
}

// Expose to window
(window as any).LLM = { call: llmCall, defaults: LLM_DEFAULT_MODELS, options: LLM_MODEL_OPTIONS };
