from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.chrome.options import Options
import mysql.connector
from datetime import datetime
import pytz
import time
import logging
from dotenv import load_dotenv
import os

# -----------------------
# Config
# -----------------------
load_dotenv()
DB_CONFIG = {
    "host": os.getenv("DB_HOST"),
    "user": os.getenv("DB_USER"),
    "password": os.getenv("DB_PASSWORD"),
    "database": os.getenv("DB_NAME")
}
TIMEZONE = "America/Los_Angeles"
LOG_FILE = "scraper_cron.log"

# -----------------------
# Logging setup
# -----------------------
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s [%(levelname)s] %(message)s',
    handlers=[
        logging.FileHandler(LOG_FILE),
        logging.StreamHandler()
    ]
)

# -----------------------
# DB Setup
# -----------------------
def init_db():
    try:
        conn = mysql.connector.connect(**DB_CONFIG)
        cursor = conn.cursor()
        cursor.execute("""
        CREATE TABLE IF NOT EXISTS psychic_stats (
            id INT AUTO_INCREMENT PRIMARY KEY,
            available INT,
            busy INT,
            slot VARCHAR(10),
            scraped_at DATETIME
        )
        """)
        conn.commit()
        cursor.close()
        conn.close()
        logging.info("Connected to MySQL and ensured table exists.")
    except mysql.connector.Error as e:
        logging.error(f" MySQL init failed: {e}")
        raise

def slot_exists(slot_str):
    tz = pytz.timezone(TIMEZONE)
    today_str = datetime.now(tz).strftime("%Y-%m-%d")

    try:
        conn = mysql.connector.connect(**DB_CONFIG)
        cursor = conn.cursor()
        cursor.execute(
            "SELECT 1 FROM psychic_stats WHERE slot = %s AND DATE(scraped_at) = %s LIMIT 1",
            (slot_str, today_str)
        )
        exists = cursor.fetchone() is not None
        cursor.close()
        conn.close()
        return exists
    except mysql.connector.Error as e:
        logging.error(f" DB check failed: {e}")
        return False

# -----------------------
# Scraping Logic
# -----------------------
def scrape_and_store():
    tz = pytz.timezone(TIMEZONE)
    now = datetime.now(tz)

    # Skip if not on 2-hour boundary
    if now.hour % 2 != 0 or now.minute != 0:
        logging.info(f"Current time {now.strftime('%H:%M')} is not a 2-hour boundary. Skipping scraping.")
        return

    slot_hour = now.hour
    slot_str = f"{slot_hour:02d}:00"

    if slot_exists(slot_str):
        logging.info(f"[{slot_str}] Data already exists for today. Skipping scraping.")
        return

    # Setup Local WebDriver
    chrome_options = Options()
    chrome_options.add_argument("--headless")
    chrome_options.add_argument("--disable-gpu")
    chrome_options.add_argument("--no-sandbox")

    try:
        driver = webdriver.Chrome(options=chrome_options)  #  Local Chrome
        driver.get("https://www.californiapsychics.com/psychic-readings")
        time.sleep(5)

        available = int(driver.find_element(By.CLASS_NAME, "totalPsychicsAvailable").text.strip())
        busy = int(driver.find_element(By.CLASS_NAME, "totalPsychicsOnCall").text.strip())
        scraped_at = now.strftime("%Y-%m-%d %H:%M:%S")

        logging.info(f"Slot: {slot_str}, Available: {available}, Busy: {busy}")

        # Save to MySQL
        try:
            conn = mysql.connector.connect(**DB_CONFIG)
            cursor = conn.cursor()
            cursor.execute(
                "INSERT INTO psychic_stats (available, busy, slot, scraped_at) VALUES (%s, %s, %s, %s)",
                (available, busy, slot_str, scraped_at)
            )
            conn.commit()
            cursor.close()
            conn.close()
            logging.info(f"[{slot_str}] Data inserted successfully.")
        except mysql.connector.Error as e:
            logging.error(f" Failed to insert data: {e}")

    except Exception as e:
        logging.error(f" Error during scraping: {e}")
    finally:
        try:
            driver.quit()
        except:
            pass

# -----------------------
# Main
# -----------------------
if __name__ == "__main__":
    init_db()
    scrape_and_store()

