from selenium import webdriver
from bs4 import BeautifulSoup
import json
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.chrome.service import Service
from webdriver_manager.chrome import ChromeDriverManager
from models.models import Locations, Partners
from notify import send_email

def get_json_data():
    site_name = "Gerald R. Ford International Airport"
    partners = Partners.objects(fsBusinessName=site_name)

    if not partners:
        return {'message': 'No partners found'}
    
    partnerid = str(partners[0].id)
    
    # URL of the webpage to scrape
    url = "https://www.grr.org/"

    options = Options()
    options.add_argument('--headless')
    options.add_argument('--no-sandbox')
    options.add_argument('--disable-dev-shm-usage')
    driver = webdriver.Chrome(service=Service(ChromeDriverManager().install()), options=options)
    
    try:
        # Load the webpage
        driver.get(url)
        # Get the page source
        page_source = driver.page_source
        # Parse the HTML
        soup = BeautifulSoup(page_source, 'html.parser')

        # Find the relevant parent div
        parent_divs = soup.find('div', class_="parking-spaces")
        if parent_divs is None:
            send_email(site_name)
            return {'message': 'Data not received'}
        
        # Initialize list to store data
        parking_data = []

        # Find all rows in the table
        rows = soup.find_all('tr')

        # Extract title, spaces, and type
        headings = rows[0].find_all('th') if rows else []
        for row in rows[1:]:
            columns = row.find_all('td')
            if len(columns) < 4:
                continue  # Skip rows that do not have enough columns

            title = columns[0].text.strip()
            garage_spaces = columns[1].text.strip()
            north_spaces = columns[2].text.strip()
            economy_spaces = columns[3].text.strip()

            parking_data.append({
                'Title': headings[1].text.strip() if len(headings) > 1 else 'Unknown',
                'type': title,
                'Available Spots': garage_spaces,
            })
            parking_data.append({
                'Title': headings[2].text.strip() if len(headings) > 2 else 'Unknown',
                'type': title,
                'Available Spots': north_spaces,
            })
            parking_data.append({
                'Title': headings[3].text.strip() if len(headings) > 3 else 'Unknown',
                'type': title,
                'Available Spots': economy_spaces,
            })

            for data in parking_data:
                # Check if location already exists in the database
                existing_location = Locations.objects.filter(fsLocationName= data['type'] +' '+data['Title'],fsPartnerId=partnerid).first()
                if existing_location:
                    # Update existing record
                    existing_location.fiParkingSlots = data['Available Spots'] + ' Available' if data['Available Spots'] !="" else None
                    existing_location.save()
                    existing_location.fsLocationName = data['type'] +' '+data['Title']
                    existing_location.save()
                else:
                    print(data['type'] +' '+data['Title'])
                    location = Locations(
                    fsPartnerId=partnerid,
                    fsLocationName= data['type'] +' '+data['Title'],
                    fstype_field=data['type'],
                    fiParkingSlots=data['Available Spots'] + ' Available' if data['Available Spots'] !="" else None
                    )
                    location.save()

        # Convert the list of dictionaries to JSON
        json_data = json.dumps(parking_data)
        return json_data

    finally:
        driver.quit()
