import os
import requests
import json
import time
from datetime import datetime
import argparse
import logging

class MakerworldBackup:
    def __init__(self, username, output_dir=None, include_comments=True, include_likes=True, include_metadata=True):
        self.username = username
        self.include_comments = include_comments
        self.include_likes = include_likes
        self.include_metadata = include_metadata
        
        # Set up output directory
        if output_dir is None:
            self.output_dir = f"makerworld_backup_{username}_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
        else:
            self.output_dir = output_dir
            
        # Create necessary directories
        self.files_dir = os.path.join(self.output_dir, "files")
        self.comments_dir = os.path.join(self.output_dir, "comments")
        self.metadata_dir = os.path.join(self.output_dir, "metadata")
        
        os.makedirs(self.output_dir, exist_ok=True)
        os.makedirs(self.files_dir, exist_ok=True)
        os.makedirs(self.comments_dir, exist_ok=True)
        os.makedirs(self.metadata_dir, exist_ok=True)
        
        # Set up logging
        logging.basicConfig(
            level=logging.INFO,
            format='%(asctime)s - %(levelname)s - %(message)s',
            handlers=[
                logging.FileHandler(os.path.join(self.output_dir, "backup_log.txt")),
                logging.StreamHandler()
            ]
        )
        self.logger = logging
        
        # Base URLs and headers
        self.base_url = "https://api.makerworld.com/v1"
        self.headers = {
            "User-Agent": "MakerworldBackupTool/1.0"
        }
        
    def get_user_models(self):
        """Get all models from the user"""
        self.logger.info(f"Fetching models for user: {self.username}")
        
        models_url = f"{self.base_url}/users/{self.username}/models"
        models = []
        page = 1
        
        while True:
            params = {"page": page, "limit": 50}
            try:
                response = requests.get(models_url, params=params, headers=self.headers)
                response.raise_for_status()
                
                data = response.json()
                if not data.get("models"):
                    break
                    
                models.extend(data["models"])
                self.logger.info(f"Fetched page {page}, got {len(data['models'])} models")
                
                if len(data["models"]) < 50:
                    break
                    
                page += 1
                time.sleep(1)  # Rate limiting
                
            except requests.RequestException as e:
                self.logger.error(f"Error fetching models: {str(e)}")
                break
                
        self.logger.info(f"Total models found: {len(models)}")
        return models
        
    def download_file(self, url, save_path):
        """Download a file from URL and save it locally"""
        try:
            response = requests.get(url, stream=True, headers=self.headers)
            response.raise_for_status()
            
            with open(save_path, 'wb') as f:
                for chunk in response.iter_content(chunk_size=8192):
                    f.write(chunk)
                    
            return True
            
        except requests.RequestException as e:
            self.logger.error(f"Error downloading {url}: {str(e)}")
            return False
            
    def get_model_comments(self, model_id):
        """Get all comments for a model"""
        comments_url = f"{self.base_url}/models/{model_id}/comments"
        comments = []
        page = 1
        
        while True:
            params = {"page": page, "limit": 50}
            try:
                response = requests.get(comments_url, params=params, headers=self.headers)
                response.raise_for_status()
                
                data = response.json()
                if not data.get("comments"):
                    break
                    
                comments.extend(data["comments"])
                
                if len(data["comments"]) < 50:
                    break
                    
                page += 1
                time.sleep(1)  # Rate limiting
                
            except requests.RequestException as e:
                self.logger.error(f"Error fetching comments for model {model_id}: {str(e)}")
                break
                
        return comments
        
    def get_model_likes(self, model_id):
        """Get all likes for a model"""
        likes_url = f"{self.base_url}/models/{model_id}/likes"
        likes = []
        page = 1
        
        while True:
            params = {"page": page, "limit": 50}
            try:
                response = requests.get(likes_url, params=params, headers=self.headers)
                response.raise_for_status()
                
                data = response.json()
                if not data.get("likes"):
                    break
                    
                likes.extend(data["likes"])
                
                if len(data["likes"]) < 50:
                    break
                    
                page += 1
                time.sleep(1)  # Rate limiting
                
            except requests.RequestException as e:
                self.logger.error(f"Error fetching likes for model {model_id}: {str(e)}")
                break
                
        return likes
        
    def backup_model(self, model):
        """Backup a single model with its files and metadata"""
        model_id = model["id"]
        model_name = model["name"]
        safe_name = "".join([c if c.isalnum() else "_" for c in model_name])
        
        self.logger.info(f"Backing up model: {model_name} (ID: {model_id})")
        
        # Create model directory
        model_dir = os.path.join(self.files_dir, f"{safe_name}_{model_id}")
        os.makedirs(model_dir, exist_ok=True)
        
        # Save model metadata
        if self.include_metadata:
            with open(os.path.join(self.metadata_dir, f"{safe_name}_{model_id}.json"), 'w') as f:
                json.dump(model, f, indent=2)
        
        # Download files
        for file_info in model.get("files", []):
            file_url = file_info.get("download_url")
            if not file_url:
                continue
                
            file_name = file_info.get("name", "unknown_file")
            file_path = os.path.join(model_dir, file_name)
            
            self.logger.info(f"Downloading file: {file_name}")
            success = self.download_file(file_url, file_path)
            
            if success:
                self.logger.info(f"Downloaded: {file_name}")
            else:
                self.logger.error(f"Failed to download: {file_name}")
                
        # Get and save comments
        if self.include_comments:
            comments = self.get_model_comments(model_id)
            if comments:
                with open(os.path.join(self.comments_dir, f"{safe_name}_{model_id}_comments.json"), 'w') as f:
                    json.dump(comments, f, indent=2)
                self.logger.info(f"Saved {len(comments)} comments for model: {model_name}")
                
        # Get and save likes
        if self.include_likes:
            likes = self.get_model_likes(model_id)
            if likes:
                with open(os.path.join(self.metadata_dir, f"{safe_name}_{model_id}_likes.json"), 'w') as f:
                    json.dump(likes, f, indent=2)
                self.logger.info(f"Saved {len(likes)} likes for model: {model_name}")
                
        return True
        
    def run_backup(self):
        """Run the full backup process"""
        start_time = time.time()
        self.logger.info(f"Starting Makerworld backup for user: {self.username}")
        
        # Get all user models
        models = self.get_user_models()
        
        if not models:
            self.logger.warning(f"No models found for user: {self.username}")
            return False
            
        # Backup each model
        success_count = 0
        for model in models:
            if self.backup_model(model):
                success_count += 1
                
        # Create a summary file
        summary = {
            "username": self.username,
            "backup_date": datetime.now().isoformat(),
            "total_models": len(models),
            "successful_backups": success_count,
            "backup_options": {
                "include_comments": self.include_comments,
                "include_likes": self.include_likes,
                "include_metadata": self.include_metadata
            }
        }
        
        with open(os.path.join(self.output_dir, "backup_summary.json"), 'w') as f:
            json.dump(summary, f, indent=2)
            
        elapsed_time = time.time() - start_time
        self.logger.info(f"Backup completed in {elapsed_time:.2f} seconds")
        self.logger.info(f"Successfully backed up {success_count} of {len(models)} models")
        self.logger.info(f"Backup stored in: {os.path.abspath(self.output_dir)}")
        
        return True

def main():
    parser = argparse.ArgumentParser(description="Backup Makerworld user files and data")
    parser.add_argument("username", help="Makerworld username to backup")
    parser.add_argument("--output-dir", help="Output directory for backup (default: auto-generated)")
    parser.add_argument("--no-comments", action="store_false", dest="include_comments", help="Skip backing up comments")
    parser.add_argument("--no-likes", action="store_false", dest="include_likes", help="Skip backing up likes")
    parser.add_argument("--no-metadata", action="store_false", dest="include_metadata", help="Skip backing up metadata")
    
    args = parser.parse_args()
    
    backup = MakerworldBackup(
        username=args.username,
        output_dir=args.output_dir,
        include_comments=args.include_comments,
        include_likes=args.include_likes,
        include_metadata=args.include_metadata
    )
    
    backup.run_backup()

if __name__ == "__main__":
    main()