#!/usr/bin/env python3
"""Smart X Engagement - Reuses persistent browser session"""
import asyncio
import json
import base64
import os
import random
import re
import sys
import urllib.request
from datetime import datetime
from pathlib import Path

os.environ.setdefault("DISPLAY", ":99")

from patchright.async_api import async_playwright

PROFILE_DIR = Path("/workspace/browser_profile/x_headed")
COOKIES_PATH = Path("/workspace/scripts/x_cookies.json")
REPLIED_USERS_PATH = Path("/workspace/replied_users.json")

VOICE_PROFILE = """You are a Florida medical cannabis patient. Friendly, genuine, helpful.
Lowercase twitter speak, 1-2 emojis max. React to what they ACTUALLY posted.

CRITICAL: Look at the IMAGE. If flower=smoke, if concentrate=dab, if unclear=generic.
NEVER mention products you don't see. Keep under 140 chars."""

GEMINI_API_KEY = "AIzaSyBSGlZewG3RfqKJsPJr-ZR_BTJdLoxpqg"
SEARCH_QUERIES = ["Florida cannabis", "Florida dispensary pickup", "Florida medical marijuana", "FL weed"]


def load_replied_users():
    if REPLIED_USERS_PATH.exists():
        data = json.loads(REPLIED_USERS_PATH.read_text())
        cutoff = datetime.now().timestamp() - (7 * 24 * 3600)
        return {k: v for k, v in data.items() if v > cutoff}
    return {}

def save_replied_user(username):
    users = load_replied_users()
    users[username.lower()] = datetime.now().timestamp()
    REPLIED_USERS_PATH.write_text(json.dumps(users))

def already_replied_to(username):
    return username.lower() in load_replied_users()

def is_tweet_recent(time_text, max_hours=24):
    if not time_text: return False
    t = time_text.strip().lower()
    if re.match(r'^\d+m$', t): return True
    m = re.match(r'^(\d+)h$', t)
    if m: return int(m.group(1)) <= max_hours
    if re.search(r'(jan|feb|mar|apr|may|jun|jul|aug|sep|oct|nov|dec)', t): return False
    return False

def load_cookies():
    if not COOKIES_PATH.exists(): return []
    raw = json.loads(COOKIES_PATH.read_text())
    return [{
        "name": n, "value": raw[n], "domain": ".x.com", "path": "/", 
        "secure": True, "sameSite": "None" if n != "ct0" else "Lax"
    } for n in ["auth_token", "ct0", "twid", "kdt", "guest_id", "personalization_id"] 
      if n in raw and raw[n]]

async def analyze_image(tweet_text, img_b64, username):
    prompt = f"""{VOICE_PROFILE}
Tweet from @{username}: "{tweet_text}"
Analyze image. Format: PRODUCT_TYPE: [flower/concentrate/unclear]
REPLY: [reply]"""
    try:
        payload = json.dumps({"contents": [{"parts": [
            {"text": prompt}, {"inline_data": {"mime_type": "image/png", "data": img_b64}}
        ]}], "generationConfig": {"temperature": 0.8, "maxOutputTokens": 100}}).encode()
        req = urllib.request.Request(
            f"https://generativelanguage.googleapis.com/v1beta/models/gemini-2.0-flash:generateContent?key={GEMINI_API_KEY}",
            data=payload, headers={"Content-Type": "application/json"})
        with urllib.request.urlopen(req, timeout=30) as resp:
            r = json.loads(resp.read().decode())["candidates"][0]["content"]["parts"][0]["text"]
            ptype, reply = "unclear", None
            for ln in r.split("\n"):
                if ln.startswith("PRODUCT_TYPE:"): ptype = ln.split(":",1)[1].strip().lower()
                elif ln.startswith("REPLY:"): reply = ln.split(":",1)[1].strip().strip('"\'')
            if not reply: reply = r.strip().strip('"\'')
            print(f"  🔍 {ptype}", flush=True)
            if ptype == "flower" and "dab" in reply.lower(): return None
            return reply if 5 < len(reply) <= 280 else None
    except Exception as e:
        print(f"  Gemini err: {e}", flush=True)
    return None

def is_news_or_legislative(text):
    """Check if tweet is news/legislation/politics - skip these."""
    text_lower = text.lower()
    news_keywords = [
        'senate', 'committee', 'bill', 'legislation', 'vote', 'law', 'regulation',
        'political', 'governor', 'senator', 'representative', 'house of representatives',
        'passed', 'approved', 'rejected', 'measure', 'amendment', 'hearing',
        'news', 'breaking', 'report', 'announced', 'according to', 'officials',
        'fox news', 'cnn', 'msnbc', 'reuters', 'associated press'
    ]
    return any(kw in text_lower for kw in news_keywords)

async def run(reply_count=1, like_count=3):
    print("🧠 Smart Engagement", flush=True)
    stats = {"likes": 0, "replies": 0}
    PROFILE_DIR.mkdir(parents=True, exist_ok=True)
    
    async with async_playwright() as p:
        ctx = await p.chromium.launch_persistent_context(
            str(PROFILE_DIR), headless=False, 
            viewport={"width": 1366, "height": 768},
            args=["--no-sandbox", "--disable-dev-shm-usage"])
        
        await ctx.add_cookies(load_cookies())
        page = ctx.pages[0] if ctx.pages else await ctx.new_page()
        
        await page.goto("https://x.com/home", wait_until="domcontentloaded")
        await asyncio.sleep(3)
        if "login" in page.url.lower():
            print("❌ Not logged in", flush=True)
            await ctx.close()
            return stats
        
        q = random.choice(SEARCH_QUERIES)
        print(f"🔍 {q}", flush=True)
        await page.goto(f"https://x.com/search?q={q.replace(' ','%20')}&f=live", wait_until="domcontentloaded")
        await asyncio.sleep(4)
        await page.mouse.wheel(0, 400)
        await asyncio.sleep(2)
        
        for tweet in (await page.locator('[data-testid="tweet"]').all())[:8]:
            if stats["replies"] >= reply_count and stats["likes"] >= like_count: break
            try:
                time_el = tweet.locator('time').first
                if await time_el.count():
                    if not is_tweet_recent(await time_el.inner_text()): continue
                
                txt = await tweet.locator('[data-testid="tweetText"]').first.inner_text() if await tweet.locator('[data-testid="tweetText"]').count() else ""
                author = "unknown"
                try:
                    a = tweet.locator('[data-testid="User-Name"] a[role="link"]').first
                    if await a.count(): author = (await a.get_attribute("href")).strip("/").split("/")[0]
                except: pass
                
                print(f"👤 @{author}: {txt[:35]}...", flush=True)
                
                # Skip news/legislative content
                if is_news_or_legislative(txt):
                    print(f"  ⏭️  Skipping news/legislative content", flush=True)
                    continue
                
                if already_replied_to(author): continue
                
                if stats["likes"] < like_count:
                    btn = tweet.locator('[data-testid="like"]')
                    if await btn.is_visible():
                        try: await btn.click(timeout=3000); stats["likes"] += 1; await asyncio.sleep(1)
                        except: pass
                
                if stats["replies"] < reply_count:
                    imgs = await tweet.locator('[data-testid="tweetPhoto"] img').all()
                    reply = None
                    if imgs:
                        try: reply = await analyze_image(txt, base64.b64encode(await imgs[0].screenshot()).decode(), author)
                        except: pass
                    
                    # CRITICAL FIX: Don't reply if no image and no contextual reply generated
                    if not reply:
                        print(f"  ⏭️  No image, skipping reply", flush=True)
                        continue
                    
                    print(f"  📝 {reply}", flush=True)
                    await tweet.locator('time').locator('..').first.click(timeout=3000)
                    await asyncio.sleep(2)
                    await page.keyboard.press("r")
                    await asyncio.sleep(2)
                    ta = page.locator('[data-testid="tweetTextarea_0"]').first
                    if await ta.is_visible(timeout=3000):
                        await ta.fill(""); await ta.type(reply, delay=50); await asyncio.sleep(1)
                        await page.keyboard.press("Control+Enter"); await asyncio.sleep(3)
                        stats["replies"] += 1; save_replied_user(author)
                        print("  ✅ Posted!", flush=True)
                    await page.go_back(); await asyncio.sleep(2)
                    break
            except Exception as e: print(f"  err: {str(e)[:40]}", flush=True)
        
        print(f"📊 L:{stats['likes']} R:{stats['replies']}", flush=True)
        await ctx.close()
    return stats

if __name__ == "__main__":
    r, l = 1, 3
    for i, a in enumerate(sys.argv):
        if a == "--replies" and i+1 < len(sys.argv): r = int(sys.argv[i+1])
        elif a == "--likes" and i+1 < len(sys.argv): l = int(sys.argv[i+1])
    asyncio.run(run(r, l))
