import os, re, json, time, datetime
from pathlib import Path
import requests

BASE = 'https://api.clickup.com/api/v2'
WS = Path('/Users/ahmad/.openclaw/workspace')
CREDS = WS / 'clickup_credentials.md'

m = re.search(r'(pk_[A-Za-z0-9_]+)', CREDS.read_text())
if not m:
    raise SystemExit('Token not found in clickup_credentials.md')
TOKEN = m.group(1)
HEADERS = {'Authorization': TOKEN, 'Content-Type': 'application/json'}


def get(url, params=None):
    r = requests.get(url, headers=HEADERS, params=params, timeout=60)
    r.raise_for_status()
    return r.json()


def post(url, data=None):
    r = requests.post(url, headers=HEADERS, json=data or {}, timeout=60)
    r.raise_for_status()
    return r.json()


def put(url, data=None):
    r = requests.put(url, headers=HEADERS, json=data or {}, timeout=60)
    r.raise_for_status()
    return r.json()


def fetch_all_list_tasks(list_id, include_closed=True):
    out = []
    seen = set()
    page = 0
    while True:
        params = {'page': page, 'include_closed': str(include_closed).lower(), 'subtasks': 'true'}
        data = get(f'{BASE}/list/{list_id}/task', params=params)
        tasks = data.get('tasks', [])
        added = 0
        for t in tasks:
            tid = t.get('id')
            if tid and tid in seen:
                continue
            if tid:
                seen.add(tid)
            out.append(t)
            added += 1
        if len(tasks) == 0:
            break
        if data.get('last_page') is True:
            break
        if added == 0:
            break
        page += 1
        if page > 200:
            break
    return out


def norm_title(s):
    return re.sub(r'\s+', ' ', (s or '').strip().lower())


def date_key(task):
    # choose due_date else start_date else date_created
    for k in ['due_date', 'start_date', 'date_created']:
        v = task.get(k)
        if v:
            try:
                ts = int(v)
                d = datetime.datetime.utcfromtimestamp(ts/1000).date().isoformat()
                return d
            except Exception:
                pass
    return 'none'


def choose_target(task):
    name = (task.get('name') or '').lower()
    status = ((task.get('status') or {}).get('status') or '').lower()
    priority = ((task.get('priority') or {}) or {}).get('priority')
    due = task.get('due_date')

    if any(x in name for x in ['meeting', 'call', 'follow up', 'follow-up', '1:1']):
        return 'Meetings & Follow-ups'
    if any(x in name for x in ['recurring', 'weekly', 'daily', 'monthly', 'routine']):
        return 'Recurring Ops'
    if 'block' in status or 'wait' in status or any(x in name for x in ['waiting', 'blocked', 'dependency']):
        return 'Waiting / Blocked'
    if status in ['complete', 'done', 'closed']:
        return 'Archive'

    if due:
        ts = int(due)/1000
        now = time.time()
        days = (ts - now)/86400
        if days <= 1.5:
            return 'Today'
        if days <= 7.5:
            return 'This Week'

    if priority in ['1','2',1,2]:
        return 'Today'
    return 'Pipeline'


def main():
    now = datetime.datetime.now().strftime('%Y%m%d_%H%M')
    bdir = WS / 'clickup_restructure_backup' / now
    bdir.mkdir(parents=True, exist_ok=True)

    teams = get(f'{BASE}/team').get('teams', [])
    if not teams:
        raise SystemExit('No ClickUp teams found')
    team = teams[0]
    team_id = team['id']

    spaces = get(f'{BASE}/team/{team_id}/space', params={'archived': 'false'}).get('spaces', [])

    # Inventory backup
    inv = {'team': team, 'spaces': [], 'folders': [], 'lists': [], 'tasks': {}}
    for sp in spaces:
        sp_id = sp['id']
        print(f'Backing up space {sp.get("name")} ({sp_id})', flush=True)
        inv['spaces'].append(sp)
        folders = get(f'{BASE}/space/{sp_id}/folder', params={'archived': 'false'}).get('folders', [])
        inv['folders'].extend(folders)

        # folderless lists
        fl_lists = get(f'{BASE}/space/{sp_id}/list', params={'archived': 'false'}).get('lists', [])
        for lst in fl_lists:
            inv['lists'].append(lst)
            ts = fetch_all_list_tasks(lst['id'], include_closed=True)
            inv['tasks'][lst['id']] = ts
            print(f"  List {lst.get('name')} ({lst['id']}): {len(ts)} tasks", flush=True)

        for f in folders:
            l = get(f"{BASE}/folder/{f['id']}/list", params={'archived': 'false'}).get('lists', [])
            for lst in l:
                inv['lists'].append(lst)
                ts = fetch_all_list_tasks(lst['id'], include_closed=True)
                inv['tasks'][lst['id']] = ts
                print(f"  List {lst.get('name')} ({lst['id']}): {len(ts)} tasks", flush=True)

    (bdir / 'team.json').write_text(json.dumps(team, indent=2))
    (bdir / 'spaces.json').write_text(json.dumps(inv['spaces'], indent=2))
    (bdir / 'folders.json').write_text(json.dumps(inv['folders'], indent=2))
    (bdir / 'lists.json').write_text(json.dumps(inv['lists'], indent=2))
    (bdir / 'tasks_by_list.json').write_text(json.dumps(inv['tasks'], indent=2))

    # Ensure target space
    blockers = []
    target_space = next((s for s in spaces if s.get('name') == 'BigALC OS'), None)
    created_space = None
    if not target_space:
        try:
            created_space = post(f'{BASE}/team/{team_id}/space', {'name': 'BigALC OS', 'multiple_assignees': True, 'features': {}})
            target_space = created_space
        except Exception as e:
            blockers.append(f"Could not create space 'BigALC OS': {e}")
            target_space = next((s for s in spaces if s.get('name') == 'Ops'), spaces[0])
            print(f"WARNING: Falling back to existing space {target_space.get('name')} ({target_space.get('id')})", flush=True)

    target_lists = ['Today', 'This Week', 'Pipeline', 'Waiting / Blocked', 'Recurring Ops', 'Meetings & Follow-ups', 'Archive']
    existing_target_lists = get(f"{BASE}/space/{target_space['id']}/list", params={'archived': 'false'}).get('lists', [])
    list_map = {l['name']: l for l in existing_target_lists}
    created_lists = []
    for ln in target_lists:
        if ln not in list_map:
            nl = post(f"{BASE}/space/{target_space['id']}/list", {'name': ln, 'content': ''})
            list_map[ln] = nl
            created_lists.append(nl)

    # Build duplicate index from existing target lists
    dup_index = set()
    for ln in target_lists:
        lid = list_map[ln]['id']
        for t in fetch_all_list_tasks(lid, include_closed=True):
            dup_index.add((lid, norm_title(t.get('name')), date_key(t)))

    # Determine legacy lists to migrate (exclude new lists in target space)
    target_list_ids = {list_map[n]['id'] for n in target_lists}
    all_lists = inv['lists']
    legacy_lists = [l for l in all_lists if l['id'] not in target_list_ids]

    # ensure specified list id included if accessible and missing from inventory
    explicit_id = '186337367'
    if explicit_id not in {l['id'] for l in legacy_lists} and explicit_id not in target_list_ids:
        try:
            el = get(f'{BASE}/list/{explicit_id}')
            legacy_lists.append(el)
            if explicit_id not in inv['tasks']:
                inv['tasks'][explicit_id] = fetch_all_list_tasks(explicit_id, include_closed=True)
        except Exception:
            pass

    migrated = []
    skipped_duplicates = []
    unresolved = []

    processed = 0
    for lst in legacy_lists:
        lid = lst['id']
        tasks = inv['tasks'].get(lid)
        print(f"Migrating list {lst.get('name')} ({lid})", flush=True)
        if tasks is None:
            try:
                tasks = fetch_all_list_tasks(lid, include_closed=True)
            except Exception as e:
                unresolved.append({'list_id': lid, 'list_name': lst.get('name'), 'error': str(e)})
                continue

        for t in tasks:
            processed += 1
            if processed % 100 == 0:
                print(f'Processed {processed} tasks | migrated {len(migrated)} | duplicates {len(skipped_duplicates)} | unresolved {len(unresolved)}', flush=True)
            tgt_name = choose_target(t)
            tgt = list_map.get(tgt_name)
            if not tgt:
                unresolved.append({'task_id': t.get('id'), 'task_name': t.get('name'), 'reason': f'missing target list {tgt_name}'})
                continue
            k = (tgt['id'], norm_title(t.get('name')), date_key(t))
            if k in dup_index:
                skipped_duplicates.append({'source_task_id': t.get('id'), 'name': t.get('name'), 'target_list': tgt_name})
                continue

            src_url = t.get('url') or f'https://app.clickup.com/t/{t.get("id")}'
            desc = (t.get('description') or '').strip()
            prefix = f"[Migrated from legacy task {t.get('id')}]({src_url})\n\n"
            new_desc = prefix + desc

            src_tags = []
            for tg in (t.get('tags') or []):
                if isinstance(tg, dict) and tg.get('name'):
                    src_tags.append(tg['name'])
                elif isinstance(tg, str):
                    src_tags.append(tg)
            payload = {
                'name': t.get('name'),
                'description': new_desc,
                'assignees': [a['id'] for a in t.get('assignees', []) if a.get('id')],
                'tags': sorted(set(src_tags + ['migrated-from-legacy'])),
                'priority': (t.get('priority') or {}).get('priority') if t.get('priority') else None,
                'due_date': int(t['due_date']) if t.get('due_date') else None,
                'start_date': int(t['start_date']) if t.get('start_date') else None,
                'notify_all': False,
                'time_estimate': int(t['time_estimate']) if t.get('time_estimate') else None,
            }
            payload = {k2: v2 for k2, v2 in payload.items() if v2 is not None}
            try:
                created = post(f"{BASE}/list/{tgt['id']}/task", payload)
                migrated.append({
                    'source_task_id': t.get('id'),
                    'source_task_name': t.get('name'),
                    'source_list_id': lid,
                    'source_list_name': lst.get('name'),
                    'target_task_id': created.get('id'),
                    'target_task_url': created.get('url'),
                    'target_list': tgt_name,
                })
                dup_index.add(k)
            except Exception as e:
                unresolved.append({'task_id': t.get('id'), 'task_name': t.get('name'), 'error': str(e)})

    (bdir / 'migration_results.json').write_text(json.dumps({
        'migrated': migrated,
        'skipped_duplicates': skipped_duplicates,
        'unresolved': unresolved,
    }, indent=2))

    # Report
    report = []
    report.append('# ClickUp Restructure Report')
    report.append(f'- Timestamp: {datetime.datetime.now().isoformat()}')
    report.append(f'- Team: {team.get("name")} (ID: {team_id})')
    report.append(f'- Backup directory: `{bdir}`')
    report.append('')
    report.append('## Created/Ensured Entities')
    report.append(f"- Space: {target_space.get('name')} (ID: {target_space.get('id')}) - {'CREATED' if created_space else 'EXISTING/FALLBACK'}")
    report.append('- Lists:')
    for ln in target_lists:
        li = list_map[ln]
        c = any(x['id'] == li['id'] for x in created_lists)
        report.append(f"  - {ln} (ID: {li['id']}) - {'CREATED' if c else 'EXISTING'} - {li.get('url','')}")

    report.append('')
    report.append('## Migration Summary')
    report.append(f'- Legacy lists considered: {len(legacy_lists)}')
    report.append(f'- Tasks migrated (new copies created): {len(migrated)}')
    report.append(f'- Tasks skipped as duplicates (title+date heuristic): {len(skipped_duplicates)}')
    report.append(f'- Unresolved items: {len(unresolved)}')

    by_target = {}
    for mrec in migrated:
        by_target[mrec['target_list']] = by_target.get(mrec['target_list'], 0) + 1
    report.append('')
    report.append('### Migrated counts by target list')
    for ln in target_lists:
        report.append(f'- {ln}: {by_target.get(ln,0)}')

    report.append('')
    report.append('## Sample Migrated Tasks (up to 30)')
    for mrec in migrated[:30]:
        report.append(f"- {mrec['source_task_name']} | src `{mrec['source_task_id']}` -> [{mrec['target_task_id']}]({mrec['target_task_url']}) | {mrec['target_list']}")

    if blockers:
        report.append('')
        report.append('## Blockers')
        for b in blockers:
            report.append(f'- {b}')

    if unresolved:
        report.append('')
        report.append('## Unresolved Items')
        for u in unresolved[:200]:
            report.append(f"- {json.dumps(u, ensure_ascii=False)}")

    if skipped_duplicates:
        report.append('')
        report.append('## Skipped Duplicates (first 100)')
        for d in skipped_duplicates[:100]:
            report.append(f"- {d['name']} (src {d['source_task_id']}) -> {d['target_list']}")

    (WS / 'clickup_restructure_report.md').write_text('\n'.join(report))

    summary = {
        'backup_dir': str(bdir),
        'team_id': team_id,
        'team_name': team.get('name'),
        'space_id': target_space.get('id'),
        'space_name': target_space.get('name'),
        'created_space': bool(created_space),
        'created_lists': [x['name'] for x in created_lists],
        'migrated_count': len(migrated),
        'skipped_duplicates': len(skipped_duplicates),
        'unresolved_count': len(unresolved),
        'blockers': blockers,
    }
    print(json.dumps(summary, indent=2))


if __name__ == '__main__':
    main()
