from flask import Flask, jsonify, request, send_from_directory
from datetime import datetime
import re
import urllib.request
import urllib.parse
import http.cookiejar
from bs4 import BeautifulSoup
import os
from dotenv import load_dotenv

load_dotenv()

app = Flask(__name__, static_folder='../frontend', static_url_path='')

def fetch_html_with_optional_password(url, password=None):
    cookie_jar = http.cookiejar.CookieJar()
    opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cookie_jar))

    with opener.open(url) as response:
        html_content = response.read().decode('utf-8', 'ignore')

    if not password:
        return html_content

    soup = BeautifulSoup(html_content, 'lxml')
    password_input = soup.find('input', {'type': 'password', 'name': 'password'})
    if not password_input:
        return html_content

    form = password_input.find_parent('form') or soup.find('form')
    if not form:
        raise ValueError("Password field was found, but form was not found.")

    action = form.get('action') or url
    submit_url = urllib.parse.urljoin(url, action)
    method = (form.get('method') or 'post').lower()

    form_data = {}
    for input_tag in form.find_all('input'):
        input_name = input_tag.get('name')
        if not input_name:
            continue
        form_data[input_name] = input_tag.get('value', '')

    form_data['password'] = password
    encoded_data = urllib.parse.urlencode(form_data)

    if method == 'get':
        separator = '&' if urllib.parse.urlparse(submit_url).query else '?'
        request_url = f"{submit_url}{separator}{encoded_data}"
        request_obj = urllib.request.Request(request_url, method='GET')
        with opener.open(request_obj) as response:
            return response.read().decode('utf-8', 'ignore')

    request_obj = urllib.request.Request(
        submit_url,
        data=encoded_data.encode('utf-8'),
        method='POST'
    )
    with opener.open(request_obj) as response:
        return response.read().decode('utf-8', 'ignore')


def get_schedule_data(url, target_date, password=None):
    try:
        html_content = fetch_html_with_optional_password(url, password)
    except Exception as e:
        return {"error": f"Failed to fetch data: {str(e)}"}

    try:
        soup = BeautifulSoup(html_content, 'lxml')
        table = soup.find('table', id='listtable')
        if not table:
            return {"error": "Table with id 'listtable' not found."}

        rows = table.find_all('tr')
        header_row = rows[0]
        header_cells = header_row.find_all('td')
        headers = [cell.get_text(strip=True) for cell in header_cells[1:]]

        tables = []
        for row in rows[1:]:
            cells = [cell.get_text(strip=True) for cell in row.find_all('td')]
            if not cells:
                continue

            date_cell_text = cells[0]
            date_match = re.match(r'(\d{1,2}/\d{1,2})', date_cell_text)
            if date_match:
                row_date = date_match.group(1)
                if row_date == target_date:
                    statuses = cells[1:]

                    maru_members = []
                    sankaku_members = []
                    for i, status in enumerate(statuses):
                        if i < len(headers):
                            member_name = headers[i]
                            if status == '○':
                                maru_members.append(member_name)
                            elif status == '△':
                                sankaku_members.append(member_name)

                    final_rows = []
                    if maru_members:
                        final_rows.append(['○'] + maru_members)
                    if sankaku_members:
                        final_rows.append(['△'] + sankaku_members)

                    if final_rows:
                        tables.append({
                            "title": date_cell_text,
                            "header": ["Status", "Members"],
                            "rows": final_rows,
                            "url": url
                        })

        return tables

    except Exception as e:
        import traceback
        return {"error": f"Failed to parse data: {traceback.format_exc()}"}

@app.route('/api')
def api():
    date_str = request.args.get('date', default=datetime.now().strftime('%Y-%m-%d'))
    try:
        target_date = datetime.strptime(date_str, '%Y-%m-%d').strftime('%-m/%-d')

        url_configs = [
            {"url": "https://densuke.biz/list?cd=uQYExVaQLJ4Dd2vB", "password": None},
            {"url": "https://densuke.biz/list?cd=GzQhE6yQPXY3usad", "password": "H2728"},
            {"url": "https://densuke.biz/list?cd=tfXDcX2hEfTkdW6t", "password": None},
        ]

        all_tables = []
        for config in url_configs:
            result = get_schedule_data(
                config["url"],
                target_date,
                config.get("password")
            )
            if isinstance(result, dict) and "error" in result:
                return jsonify(result), 500
            all_tables.extend(result)

        return jsonify(all_tables)
    except Exception as e:
        return jsonify({"error": str(e)}), 400

@app.route('/')
def index():
    return send_from_directory(app.static_folder, 'index.html')

if __name__ == '__main__':
    port = int(os.environ.get('PORT', 8000))
    app.run(host='0.0.0.0', port=port)
