#!/usr/bin/env python3
"""
Backfill GA_Tracking_ID, SERVER y Cpanel_User en Airtable 2.0 Projects
desde datos de la base vieja.
Uso: python3 scripts/migrate-projects.py [--dry-run]
"""

import json
import urllib.request
import sys
import time
import re

TOKEN = "patUFXGCZ4lEcCQDx.88ed15dafa2a89c89d70b405bb86f97fdb9e097737ed5e5b0b3c33b252de51cb"
NEW_BASE = "appRxvpzqCmNsw2JN"
PROJECTS_TABLE = "tblzCgJZCbbt5j13Q"

DRY_RUN = "--dry-run" in sys.argv

# Server name normalization from old base values
SERVER_MAP = {
    "ESP - Hosting Rapido": "Hosting Rapido",
    "BR - BewPro":          "VPS Hostinger 1",
}

def normalize_name(name):
    """Normalize for fuzzy matching."""
    name = name.lower().strip()
    name = re.sub(r'\s+', ' ', name)
    name = re.sub(r'[^a-z0-9 ]', '', name)
    return name

def normalize_domain(url):
    if not url:
        return ""
    url = url.lower().strip().rstrip('/')
    url = re.sub(r'^https?://', '', url)
    url = re.sub(r'^www\.', '', url)
    return url

def api_get(url):
    req = urllib.request.Request(url, headers={"Authorization": f"Bearer {TOKEN}"})
    with urllib.request.urlopen(req) as resp:
        return json.loads(resp.read())

def api_patch(table, record_id, fields):
    url = f"https://api.airtable.com/v0/{NEW_BASE}/{table}/{record_id}"
    data = json.dumps({"fields": fields}).encode()
    req = urllib.request.Request(
        url, data=data, method="PATCH",
        headers={"Authorization": f"Bearer {TOKEN}", "Content-Type": "application/json"}
    )
    with urllib.request.urlopen(req) as resp:
        return json.loads(resp.read())

# Load old projects from backup
with open("docs/airtable-backup/bewpro-old/projects.json") as f:
    old_projects = json.load(f)

# Fetch 2.0 projects (paginate)
new_projects = []
offset = None
while True:
    url = f"https://api.airtable.com/v0/{NEW_BASE}/{PROJECTS_TABLE}?maxRecords=200"
    if offset:
        url += f"&offset={offset}"
    data = api_get(url)
    new_projects.extend(data.get("records", []))
    offset = data.get("offset")
    if not offset:
        break

# Build lookup maps for 2.0
new_by_name = {}
new_by_domain = {}
for r in new_projects:
    f = r.get("fields", {})
    name = normalize_name(f.get("Name", ""))
    domain = normalize_domain(f.get("Domain", ""))
    new_by_name[name] = r
    if domain:
        new_by_domain[domain] = r

print(f"{'DRY RUN — ' if DRY_RUN else ''}Backfill Projects: {len(old_projects)} old → {len(new_projects)} en 2.0\n")

matched = 0
updated = 0
unmatched = []

for old_rec in old_projects:
    of = old_rec.get("fields", {})
    old_name = of.get("Name", "")
    old_url = of.get("URL", "")
    old_ga = of.get("GA Tracking", "").strip()
    old_server = of.get("Server", "").strip()
    old_cpanel = of.get("Cpanel User", "").strip()
    old_amount = of.get("Amount")

    # Find match in 2.0
    match = new_by_name.get(normalize_name(old_name))
    if not match and old_url:
        match = new_by_domain.get(normalize_domain(old_url))

    if not match:
        unmatched.append(old_name)
        continue

    matched += 1
    nf = match.get("fields", {})
    updates = {}

    if old_ga and not nf.get("GA_Tracking_ID"):
        updates["GA_Tracking_ID"] = old_ga

    if old_server:
        normalized_server = SERVER_MAP.get(old_server, old_server)
        if not nf.get("SERVER"):
            updates["SERVER"] = normalized_server

    if old_cpanel and not nf.get("Cpanel_User"):
        updates["Cpanel_User"] = old_cpanel

    if old_amount and not nf.get("Notes"):
        updates["Notes"] = f"Tarifa histórica: ${old_amount}/mes"

    if not updates:
        print(f"  OK   {old_name}")
        continue

    print(f"  UPD  {old_name} → {list(updates.keys())}")
    if not DRY_RUN:
        api_patch(PROJECTS_TABLE, match["id"], updates)
        time.sleep(0.2)
    updated += 1

print(f"\nMatched: {matched}/{len(old_projects)} | {'Hubiera actualizado' if DRY_RUN else 'Actualizados'}: {updated}")
if unmatched:
    print(f"\nSin match en 2.0 ({len(unmatched)}):")
    for n in unmatched:
        print(f"  - {n}")
