#!/usr/bin/env python3
"""
eBay Research Data Collector
Uses saved session to fetch research data from eBay API
"""
import asyncio
import json
import time
from datetime import datetime
from login import EbayLogin


class EbayResearchCollector(EbayLogin):
    def __init__(self):
        super().__init__()
        self.results_dir = f"{self.user_data_dir}/research_results"

    async def setup(self):
        """Setup the research collector"""
        # Create results directory
        import os
        os.makedirs(self.results_dir, exist_ok=True)

        # Start browser with saved session
        await self.start_browser(headless=True)  # Run headless for data collection

        # Verify login status
        if not await self.check_login_status():
            print("❌ Not logged in. Please run login.py first to authenticate.")
            return False

        print("✅ Research collector ready!")
        return True

    async def collect_research_data(self, search_config):
        """
        Collect research data based on configuration

        Args:
            search_config (dict): Configuration for the search
                - keywords: Search terms
                - day_range: Number of days to search
                - total_items: Total items to collect
                - items_per_page: Items per API request
                - category_id: eBay category ID (optional)
        """
        try:
            keywords = search_config.get('keywords', 'iphone case')
            day_range = search_config.get('day_range', 90)
            total_items = search_config.get('total_items', 200)
            items_per_page = search_config.get('items_per_page', 50)
            category_id = search_config.get('category_id', 0)

            print(f"\n🔬 Research Collection Started")
            print(f"📝 Keywords: {keywords}")
            print(f"📅 Date range: {day_range} days")
            print(f"📊 Target items: {total_items}")
            print("-" * 50)

            # Collect data using multi-page fetch
            result = await self.fetch_multiple_pages(
                keywords=keywords,
                day_range=day_range,
                total_items=total_items,
                items_per_page=items_per_page
            )

            if result:
                # Move the result file to results directory
                original_path = f"{self.user_data_dir}/ebay_research_combined_{keywords.replace(' ', '_')}_{int(time.time())}.json"

                # Create a more organized filename
                timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
                safe_keywords = keywords.replace(' ', '_').replace('+', '_')
                new_filename = f"{safe_keywords}_{day_range}d_{len(result.get('items', []))}items_{timestamp}.json"
                new_path = f"{self.results_dir}/{new_filename}"

                # Save with additional metadata
                enhanced_result = {
                    "collection_info": {
                        "timestamp": datetime.now().isoformat(),
                        "collector_version": "1.0",
                        "search_config": search_config
                    },
                    **result
                }

                with open(new_path, 'w', encoding='utf-8') as f:
                    json.dump(enhanced_result, f, indent=2, ensure_ascii=False)

                print(f"\n🎉 Research collection completed!")
                print(f"📁 Results saved to: {new_path}")
                print(f"📊 Items collected: {len(result.get('items', []))}")

                return enhanced_result

            return None

        except Exception as e:
            print(f"❌ Error in research collection: {str(e)}")
            return None

    async def batch_collect(self, batch_config):
        """
        Collect multiple research datasets in batch

        Args:
            batch_config (list): List of search configurations
        """
        results = []

        print(f"🔄 Starting batch collection of {len(batch_config)} searches...")

        for i, config in enumerate(batch_config, 1):
            print(f"\n📊 Batch {i}/{len(batch_config)}")

            result = await self.collect_research_data(config)
            if result:
                results.append({
                    "config": config,
                    "result_summary": {
                        "items_count": len(result.get('items', [])),
                        "collection_time": result.get('collection_info', {}).get('timestamp'),
                        "success": True
                    }
                })
            else:
                results.append({
                    "config": config,
                    "result_summary": {
                        "items_count": 0,
                        "success": False
                    }
                })

            # Delay between batch items to be respectful
            if i < len(batch_config):
                print("⏳ Waiting 30 seconds before next collection...")
                await asyncio.sleep(30)

        # Save batch summary
        timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
        batch_summary_path = f"{self.results_dir}/batch_summary_{timestamp}.json"

        with open(batch_summary_path, 'w', encoding='utf-8') as f:
            json.dump({
                "batch_info": {
                    "timestamp": datetime.now().isoformat(),
                    "total_searches": len(batch_config),
                    "successful_searches": sum(1 for r in results if r['result_summary']['success'])
                },
                "results": results
            }, f, indent=2, ensure_ascii=False)

        print(f"\n🎉 Batch collection completed!")
        print(f"📁 Batch summary: {batch_summary_path}")

        return results


async def main():
    """Example usage of the research collector"""
    collector = EbayResearchCollector()

    try:
        # Setup the collector
        if not await collector.setup():
            return

        # Example 1: Single search
        search_config = {
            "keywords": "iphone case",
            "day_range": 30,
            "total_items": 100,
            "items_per_page": 50,
            "category_id": 0
        }

        result = await collector.collect_research_data(search_config)

        # Example 2: Batch collection (uncomment to use)
        # batch_configs = [
        #     {"keywords": "iphone case", "day_range": 30, "total_items": 100},
        #     {"keywords": "samsung case", "day_range": 30, "total_items": 100},
        #     {"keywords": "phone charger", "day_range": 30, "total_items": 100},
        # ]
        #
        # batch_results = await collector.batch_collect(batch_configs)

    except Exception as e:
        print(f"Error: {str(e)}")
    finally:
        await collector.close_browser()


if __name__ == "__main__":
    asyncio.run(main())
