#!/usr/bin/env python3
"""
Importa los shop products faltantes a Airtable 2.0.
- Lee catalog.json (144 productos)
- Detecta cuáles no existen en Airtable Shop Products
- Crea los faltantes con: Name, Slug, Category, Price_USD, Core_Product (link)
- Crea los registros correspondientes en Shop Products Copy
- Solo crea, nunca sobreescribe

Uso: python3 scripts/import-shop-products.py [--dry-run]
"""

import json, sys, time, urllib.request, urllib.parse

DRY_RUN = "--dry-run" in sys.argv

TOKEN = "patUFXGCZ4lEcCQDx.88ed15dafa2a89c89d70b405bb86f97fdb9e097737ed5e5b0b3c33b252de51cb"
NEW_BASE = "appRxvpzqCmNsw2JN"
SHOP_PRODUCTS  = "tblczbF5tQnq7SjZy"
SHOP_COPY      = "tbldxXvyo1f7vIQp5"
PRODUCTS_TABLE = "tbleVlExtIu9ONsQd"  # core products

# Price per core preset (USD/month)
CORE_PRICE = {
    "bp-dinamic":          15,
    "petite-website":      15,
    "insurance-advisor":   15,
    "catalogue-ai":        20,
    "standard-website":    25,
    "foundations-ong":     25,
    "nutritionist":        25,
    "restaurant-bar":      25,
    "business-catalogue":  25,
    "sitio-web-profesional": 25,
    "corporative":         30,
    "art-design":          30,
    "photography":         30,
    "personal-brand":      30,
    "construction":        30,
    "financial-wealth":    35,
    "concierge":           35,
    "law-firm-digital":    35,
    "real-estate":         40,
    "agency":              40,
    "website-reseller":    45,
}

# Category per core preset (valid Airtable singleSelect values)
CORE_CATEGORY = {
    "bp-dinamic":            "Services",
    "petite-website":        "Petite Website",
    "insurance-advisor":     "Professional",
    "catalogue-ai":          "E-Commerce",
    "standard-website":      "Standard Web App",
    "foundations-ong":       "Corporate",
    "nutritionist":          "Health and Wellness",
    "restaurant-bar":        "Services",
    "business-catalogue":    "E-Commerce",
    "sitio-web-profesional": "Professional",
    "corporative":           "Corporate",
    "art-design":            "Art",
    "photography":           "Art",
    "personal-brand":        "Personal",
    "construction":          "Corporate",
    "financial-wealth":      "Professional",
    "concierge":             "Services",
    "law-firm-digital":      "Professional",
    "real-estate":           "Professional",
    "agency":                "Corporate",
    "website-reseller":      "Corporate",
}

def api_get(url):
    req = urllib.request.Request(url, headers={"Authorization": f"Bearer {TOKEN}"})
    with urllib.request.urlopen(req) as resp:
        return json.loads(resp.read())

def api_post(table, records_data):
    url = f"https://api.airtable.com/v0/{NEW_BASE}/{table}"
    body = json.dumps({"records": records_data}).encode()
    req = urllib.request.Request(url, data=body, method="POST",
        headers={"Authorization": f"Bearer {TOKEN}", "Content-Type": "application/json"})
    with urllib.request.urlopen(req) as resp:
        return json.loads(resp.read())

# ── Load source data ──────────────────────────────────────────
with open("database/seeders/products/catalog.json") as f:
    CATALOG = json.load(f)
with open("database/seeders/products/shop-copy.json") as f:
    SHOP_COPY_DATA = json.load(f)

# ── Fetch existing Airtable data ──────────────────────────────
print("Fetching Airtable Shop Products...")
existing_slugs = set()
existing_records = {}
offset = None
while True:
    url = f"https://api.airtable.com/v0/{NEW_BASE}/{SHOP_PRODUCTS}?maxRecords=200"
    if offset: url += f"&offset={urllib.parse.quote(offset)}"
    data = api_get(url)
    for r in data.get("records", []):
        slug = r.get("fields", {}).get("Slug", "").strip()
        if slug:
            existing_slugs.add(slug)
            existing_records[slug] = r["id"]
    offset = data.get("offset")
    if not offset: break

print(f"  → {len(existing_slugs)} products already in Airtable")

# Fetch Core Products to get their record IDs
print("Fetching Core Products...")
core_product_ids = {}
data = api_get(f"https://api.airtable.com/v0/{NEW_BASE}/{PRODUCTS_TABLE}?maxRecords=100")
for r in data.get("records", []):
    slug = r.get("fields", {}).get("Slug", "").strip()
    if slug:
        core_product_ids[slug] = r["id"]

print(f"  → {len(core_product_ids)} core products found")

# ── Find missing products ─────────────────────────────────────
missing = {}
for shop_slug, entry in CATALOG.items():
    if shop_slug.startswith("_"):
        continue
    if shop_slug not in existing_slugs:
        missing[shop_slug] = entry

print(f"\nMissing from Airtable: {len(missing)}")

if DRY_RUN:
    print("\nDRY RUN — first 10:")
    for slug, entry in list(missing.items())[:10]:
        core = entry["core"]
        price = CORE_PRICE.get(core, 25)
        copy = SHOP_COPY_DATA.get(slug, {})
        print(f"  {slug} → core:{core} price:${price}")
        if copy:
            print(f"    H1: {copy.get('headline','')}")
    print(f"\nTotal to create: {len(missing)} Shop Products + {len(missing)} Copy records")
    sys.exit(0)

# ── Create in batches of 10 (Airtable limit) ─────────────────
def batch(lst, n):
    for i in range(0, len(lst), n):
        yield lst[i:i+n]

shop_items = list(missing.items())
created_shop = 0
created_copy = 0
failed = []

# Create Shop Products first
print(f"\nCreating {len(missing)} Shop Products...")
for chunk in batch(shop_items, 10):
    records = []
    for shop_slug, entry in chunk:
        core_slug = entry["core"]
        product_name = entry.get("name", shop_slug.replace("-", " ").title())
        price = CORE_PRICE.get(core_slug, 25)
        category = CORE_CATEGORY.get(core_slug, "Standard")

        fields = {
            "Name": product_name,
            "Slug": shop_slug,
            "Price_USD": price,
            "Category": category,
        }
        # Link to core product if exists
        if core_slug in core_product_ids:
            fields["Core_Product"] = [core_product_ids[core_slug]]

        records.append({"fields": fields})

    try:
        result = api_post(SHOP_PRODUCTS, records)
        created_shop += len(result.get("records", []))
        # Store new IDs for Copy creation
        for rec in result.get("records", []):
            slug = rec["fields"].get("Slug", "")
            existing_records[slug] = rec["id"]
        time.sleep(0.3)
    except Exception as e:
        print(f"  ERROR batch: {e}")
        failed.extend([s for s, _ in chunk])

print(f"  → Created {created_shop} Shop Products")

# Create Shop Products Copy records for the new products
print(f"\nCreating {len(missing)} Shop Products Copy records...")
for chunk in batch(shop_items, 10):
    records = []
    for shop_slug, entry in chunk:
        if shop_slug in failed:
            continue
        copy = SHOP_COPY_DATA.get(shop_slug, {})
        product_name = entry.get("name", shop_slug.replace("-", " ").title())

        fields = {
            "Slug": shop_slug,
            "Name": product_name,
            "Headline": copy.get("headline", f"{product_name} — sitio web profesional en 48hs"),
            "Subheadline": copy.get("subheadline", ""),
            "Keywords": copy.get("keywords", ""),
        }
        if copy.get("modules_aliased"):
            fields["Value_Props"] = " · ".join(copy["modules_aliased"])

        records.append({"fields": fields})

    if not records:
        continue
    try:
        result = api_post(SHOP_COPY, records)
        created_copy += len(result.get("records", []))
        time.sleep(0.3)
    except Exception as e:
        print(f"  ERROR batch copy: {e}")

print(f"  → Created {created_copy} Copy records")

# Final stats
total_shop = len(existing_slugs) + created_shop
print(f"\n✅ Done!")
print(f"   Shop Products: {len(existing_slugs)} → {total_shop} (+{created_shop})")
print(f"   Shop Products Copy: 100 → {100+created_copy} (+{created_copy})")
if failed:
    print(f"   Failed: {failed}")
