/* global React */
/*
  GitHub helpers for the Letters Metadata Editor.

  Auth:
    - Personal Access Token (PAT) — stored in localStorage. Fully client-side,
      works on GitHub Pages with no extra setup. Token must have
      'contents:write' and 'pull_requests:write' on the target repo
      (a fine-grained PAT scoped to the repo is recommended).
    - GitHub OAuth Device Flow — UX is "click to sign in"; user enters a code
      on github.com/login/device. Requires a registered OAuth App's client_id
      AND a small CORS proxy for the token exchange (GitHub's
      /login/oauth/access_token endpoint does NOT allow CORS from a browser).
      We expose the inputs (client_id, proxy_base) so the deployer can wire it
      up; if absent, we fall back to the PAT flow.

  Save flow (per user request):
    Click Save -> create a branch from default -> commit final.json ->
    open a Pull Request -> show PR link.
*/

const GH_API = "https://api.github.com";

// ------------- config storage -------------
const LS_KEYS = {
  config: "lme:gh:config",      // { repo, branch, authMode, clientId, proxyBase }
  token:  "lme:gh:token",       // PAT or OAuth access token
  user:   "lme:gh:user",        // { login, avatar_url, name } cached
};

function loadGHConfig() {
  try {
    const raw = localStorage.getItem(LS_KEYS.config);
    return raw ? JSON.parse(raw) : null;
  } catch { return null; }
}
function saveGHConfig(cfg) {
  localStorage.setItem(LS_KEYS.config, JSON.stringify(cfg));
}
function loadGHToken() {
  try { return localStorage.getItem(LS_KEYS.token) || ""; } catch { return ""; }
}
function saveGHToken(t) {
  if (t) localStorage.setItem(LS_KEYS.token, t);
  else   localStorage.removeItem(LS_KEYS.token);
}
function loadGHUser() {
  try {
    const raw = localStorage.getItem(LS_KEYS.user);
    return raw ? JSON.parse(raw) : null;
  } catch { return null; }
}
function saveGHUser(u) {
  if (u) localStorage.setItem(LS_KEYS.user, JSON.stringify(u));
  else   localStorage.removeItem(LS_KEYS.user);
}

// ------------- helpers -------------
function parseRepoUrl(input) {
  if (!input) return null;
  let s = String(input).trim();
  // Strip trailing .git, trailing slash, query/hash.
  s = s.replace(/\.git$/i, "").replace(/[?#].*$/, "").replace(/\/+$/, "");
  // owner/repo shorthand.
  const short = /^([\w.-]+)\/([\w.-]+)$/.exec(s);
  if (short) return { owner: short[1], repo: short[2] };
  // github.com URL
  const url = /github\.com[/:]([\w.-]+)\/([\w.-]+)/i.exec(s);
  if (url) return { owner: url[1], repo: url[2] };
  return null;
}

function b64encodeUtf8(str) {
  // btoa can't handle non-Latin1 directly. Encode UTF-8 bytes first.
  const bytes = new TextEncoder().encode(str);
  let bin = "";
  for (const b of bytes) bin += String.fromCharCode(b);
  return btoa(bin);
}

function b64decodeUtf8(b64) {
  const bin = atob(b64.replace(/\s+/g, ""));
  const bytes = new Uint8Array(bin.length);
  for (let i = 0; i < bin.length; i++) bytes[i] = bin.charCodeAt(i);
  return new TextDecoder().decode(bytes);
}

async function ghFetch(token, path, init = {}) {
  const headers = {
    "Accept": "application/vnd.github+json",
    "X-GitHub-Api-Version": "2022-11-28",
    ...(init.headers || {}),
  };
  if (token) headers["Authorization"] = `Bearer ${token}`;
  if (init.body && !headers["Content-Type"]) headers["Content-Type"] = "application/json";
  const res = await fetch(GH_API + path, { ...init, headers });
  if (!res.ok) {
    let detail = "";
    try { detail = (await res.json())?.message || ""; } catch {}
    throw new Error(`GitHub ${res.status}: ${detail || res.statusText}`);
  }
  if (res.status === 204) return null;
  return res.json();
}

// ------------- API surface -------------
async function getViewer(token) {
  return ghFetch(token, "/user");
}

async function getRepoInfo(token, { owner, repo }) {
  return ghFetch(token, `/repos/${owner}/${repo}`);
}

// List subdirectories of a repo path. Returns array of { name, type }.
async function listDir(token, { owner, repo, path, ref }) {
  const q = ref ? `?ref=${encodeURIComponent(ref)}` : "";
  const data = await ghFetch(token, `/repos/${owner}/${repo}/contents/${encodePath(path)}${q}`);
  if (!Array.isArray(data)) return [];
  return data.map(d => ({ name: d.name, type: d.type, path: d.path }));
}

// Get file contents + sha. Returns { sha, content (decoded), encoding } or null on 404.
async function getFile(token, { owner, repo, path, ref }) {
  const q = ref ? `?ref=${encodeURIComponent(ref)}` : "";
  try {
    const data = await ghFetch(token, `/repos/${owner}/${repo}/contents/${encodePath(path)}${q}`);
    const content = data.encoding === "base64" ? b64decodeUtf8(data.content) : data.content;
    return { sha: data.sha, content, encoding: data.encoding, html_url: data.html_url };
  } catch (e) {
    if (/^GitHub 404/.test(e.message)) return null;
    throw e;
  }
}

// Get the latest commit SHA touching a path on a branch — used for change detection.
async function getLatestCommitForPath(token, { owner, repo, path, ref }) {
  const q = new URLSearchParams({ path, per_page: "1" });
  if (ref) q.set("sha", ref);
  const list = await ghFetch(token, `/repos/${owner}/${repo}/commits?${q}`);
  return list && list[0] ? list[0].sha : null;
}

function encodePath(p) {
  return p.split("/").map(encodeURIComponent).join("/");
}

// Create a branch off a base branch's tip.
async function createBranch(token, { owner, repo, baseBranch, newBranch }) {
  const baseRef = await ghFetch(token, `/repos/${owner}/${repo}/git/ref/heads/${encodeURIComponent(baseBranch)}`);
  const baseSha = baseRef.object.sha;
  await ghFetch(token, `/repos/${owner}/${repo}/git/refs`, {
    method: "POST",
    body: JSON.stringify({ ref: `refs/heads/${newBranch}`, sha: baseSha }),
  });
  return baseSha;
}

// Commit a file content via Contents API (one file -> one commit).
async function putFileOnBranch(token, { owner, repo, branch, path, content, message, sha }) {
  return ghFetch(token, `/repos/${owner}/${repo}/contents/${encodePath(path)}`, {
    method: "PUT",
    body: JSON.stringify({
      message,
      content: b64encodeUtf8(content),
      branch,
      ...(sha ? { sha } : {}),
    }),
  });
}

async function openPR(token, { owner, repo, head, base, title, body }) {
  return ghFetch(token, `/repos/${owner}/${repo}/pulls`, {
    method: "POST",
    body: JSON.stringify({ title, head, base, body, maintainer_can_modify: true }),
  });
}

// ------------- High-level: save a letter via PR -------------
async function saveLetterViaPR({
  token, repo, baseBranch, letterId, jsonString, baseFileSha, viewerLogin,
}) {
  const ts = new Date().toISOString().replace(/[:.]/g, "-").replace(/Z$/, "");
  const safeUser = (viewerLogin || "anon").replace(/[^a-z0-9-]/gi, "").toLowerCase() || "anon";
  const branchName = `edit/${letterId}/${safeUser}-${ts}`;
  const filePath = `letters/${letterId}/final.json`;

  // 1. Branch from base.
  await createBranch(token, { owner: repo.owner, repo: repo.repo, baseBranch, newBranch: branchName });

  // 2. Commit. We pass the original blob sha so GitHub rejects if base file
  //    moved on the BASE branch — but we just branched, so it'll match.
  await putFileOnBranch(token, {
    owner: repo.owner, repo: repo.repo,
    branch: branchName, path: filePath,
    content: jsonString,
    message: `edit(${letterId}): update final.json`,
    sha: baseFileSha,
  });

  // 3. Open PR.
  const pr = await openPR(token, {
    owner: repo.owner, repo: repo.repo,
    head: branchName, base: baseBranch,
    title: `Edits: ${letterId}`,
    body: `Edits to \`${filePath}\` from the Letters Metadata Editor.\n\n` +
          `Submitted by @${viewerLogin || "anon"} at ${new Date().toUTCString()}.\n\n` +
          `Review the diff and merge to publish.`,
  });

  return pr; // { html_url, number, ... }
}

// ------------- OAuth Device Flow (optional) -------------
// Requires:
//   - clientId of a GitHub OAuth App (Settings -> Developer settings -> OAuth Apps)
//   - proxyBase: a CORS-friendly endpoint that forwards POSTs to
//     https://github.com/login/device/code and /login/oauth/access_token.
//     E.g. a tiny Cloudflare Worker. Required because GitHub's OAuth
//     endpoints don't expose CORS headers.
async function deviceFlowStart({ clientId, proxyBase }) {
  if (!clientId || !proxyBase) throw new Error("OAuth not configured (clientId/proxyBase missing).");
  const res = await fetch(`${proxyBase.replace(/\/$/, "")}/device/code`, {
    method: "POST",
    headers: { "Content-Type": "application/json", "Accept": "application/json" },
    body: JSON.stringify({ client_id: clientId, scope: "repo" }),
  });
  if (!res.ok) throw new Error(`Device code request failed: ${res.status}`);
  return res.json(); // { device_code, user_code, verification_uri, expires_in, interval }
}

async function deviceFlowPoll({ clientId, proxyBase, deviceCode, interval, signal }) {
  const url = `${proxyBase.replace(/\/$/, "")}/oauth/access_token`;
  const start = Date.now();
  let curInterval = (interval || 5) * 1000;
  while (Date.now() - start < 15 * 60 * 1000) {
    if (signal?.aborted) throw new Error("Cancelled");
    await new Promise(r => setTimeout(r, curInterval));
    const res = await fetch(url, {
      method: "POST",
      headers: { "Content-Type": "application/json", "Accept": "application/json" },
      body: JSON.stringify({
        client_id: clientId,
        device_code: deviceCode,
        grant_type: "urn:ietf:params:oauth:grant-type:device_code",
      }),
    });
    const data = await res.json().catch(() => ({}));
    if (data.access_token) return data.access_token;
    if (data.error === "authorization_pending") continue;
    if (data.error === "slow_down") { curInterval += 5000; continue; }
    if (data.error === "expired_token") throw new Error("Sign-in code expired. Try again.");
    if (data.error === "access_denied") throw new Error("Sign-in cancelled.");
    if (data.error) throw new Error(data.error_description || data.error);
  }
  throw new Error("Sign-in timed out.");
}

// ------------- Expose -------------
window.GH = {
  // storage
  loadGHConfig, saveGHConfig,
  loadGHToken, saveGHToken,
  loadGHUser, saveGHUser,
  // helpers
  parseRepoUrl,
  // api
  getViewer, getRepoInfo, getFile, getLatestCommitForPath, listDir,
  saveLetterViaPR,
  // oauth
  deviceFlowStart, deviceFlowPoll,
};
