diff --git a/Dockerfile b/Dockerfile index 8236c8f..a69c0a6 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,23 +1,17 @@ # Dockerfile FROM python:3.9-slim WORKDIR /app - # Install git (since we're still using slim) RUN apt-get update && apt-get install -y git && rm -rf /var/lib/apt/lists/* - # Copy pre-built files from dist directory COPY dist/backend/app ./app COPY dist/static ./app/static COPY dist/requirements.txt . - RUN pip install --no-cache-dir -r requirements.txt - LABEL org.opencontainers.image.authors="Dictionarry dictionarry@pm.me" LABEL org.opencontainers.image.description="Profilarr - Profile manager for *arr apps" LABEL org.opencontainers.image.source="https://github.com/Dictionarry-Hub/profilarr" LABEL org.opencontainers.image.title="Profilarr" LABEL org.opencontainers.image.version="beta" - EXPOSE 6868 - -CMD ["gunicorn", "--bind", "0.0.0.0:6868", "app.main:create_app()"] \ No newline at end of file +CMD ["gunicorn", "--bind", "0.0.0.0:6868", "--timeout", "600", "app.main:create_app()"] \ No newline at end of file diff --git a/backend/Dockerfile b/backend/Dockerfile index 7a8d7bd..5deb4d3 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -3,4 +3,5 @@ WORKDIR /app COPY requirements.txt . RUN pip install --no-cache-dir -r requirements.txt COPY . . -CMD ["python", "-m", "app.main"] \ No newline at end of file +# Use gunicorn with 10-minute timeout +CMD ["gunicorn", "--bind", "0.0.0.0:5000", "--timeout", "600", "app.main:create_app()"] \ No newline at end of file diff --git a/backend/app/compile/profile_compiler.py b/backend/app/compile/profile_compiler.py index dc285d0..8546365 100644 --- a/backend/app/compile/profile_compiler.py +++ b/backend/app/compile/profile_compiler.py @@ -5,10 +5,12 @@ from typing import Dict, List, Optional, Any, Callable import json import yaml import logging +import asyncio +import aiohttp from .mappings import TargetApp, ValueResolver from ..data.utils import load_yaml_file, get_category_directory -from ..importarr.format_memory import import_format_from_memory +from ..importarr.format_memory import import_format_from_memory, async_import_format_from_memory logger = logging.getLogger(__name__) @@ -62,32 +64,38 @@ class ProfileConverter: }) return qualities - def _process_language_formats( - self, - behaviour: str, - language: str, - import_as_unique: bool = False) -> List[Dict]: - if not self.base_url or not self.api_key or not self.format_importer: - logger.error("Missing required credentials or format importer") - raise ValueError( - "base_url, api_key, and format_importer are required for language format processing" - ) - + def _generate_language_formats(self, + behaviour: str, + language: str) -> List[Dict]: + """ + Generate language-specific format configurations without importing them. + This is useful for pre-loading and caching language formats. + + Args: + behaviour: Language behavior ('must', 'prefer', 'only') + language: Language code ('english', 'french', etc.) + + Returns: + List of format configurations for the specified language + """ try: formats_to_import = [] - format_configs = [] - + + # Get the base format as a template base_format_path = f"{get_category_directory('custom_format')}/Not English.yml" base_format = load_yaml_file(base_format_path) - - language_data = ValueResolver.get_language(language, - self.target_app, - for_profile=False) - + + # Get language data for translations + language_data = ValueResolver.get_language( + language, self.target_app, for_profile=False + ) + + # Create the main "Not X" format (e.g., "Not French") modified_format = base_format.copy() base_name = f"Not {language_data['name']}" modified_format['name'] = base_name - + + # Update conditions to refer to the specific language for condition in modified_format['conditions']: if condition.get('type') == 'language': condition['language'] = language @@ -95,9 +103,10 @@ class ProfileConverter: condition['name'] = f"Not {language_data['name']}" elif condition.get('name') == 'Includes English': condition['name'] = f"Includes {language_data['name']}" - + formats_to_import.append(modified_format) - + + # Add additional formats for 'only' behavior if behaviour == 'only': additional_formats = [ "Not Only English", "Not Only English (Missing)" @@ -107,6 +116,7 @@ class ProfileConverter: format_data = load_yaml_file(format_path) format_data['name'] = format_data['name'].replace( 'English', language_data['name']) + for c in format_data.get('conditions', []): if c.get('type') == 'language': c['language'] = language @@ -114,9 +124,88 @@ class ProfileConverter: c['name'] = f"Not {language_data['name']}" elif c.get('name') == 'Includes English': c['name'] = f"Includes {language_data['name']}" + formats_to_import.append(format_data) + + return formats_to_import + + except Exception as e: + logger.error(f"Error generating language formats: {str(e)}") + raise + def _process_language_formats( + self, + behaviour: str, + language: str, + import_as_unique: bool = False) -> List[Dict]: + """ + Process language formats by either importing them directly or using the format_importer. + + When using the cached profile import, the format_importer will be a dummy function that + just returns success without actually importing, since the formats were already imported. + """ + try: + # Generate the format configurations + formats_to_import = self._generate_language_formats(behaviour, language) + format_configs = [] + + # Check if we're using a format importer (might be None for direct format returns) + if self.format_importer is None: + # No importer provided - we're in the special caching mode + # Just create the format configs directly without importing + logger.info(f"Using pre-cached language formats for {behaviour}_{language}") + + for format_data in formats_to_import: + format_name = format_data['name'] + if import_as_unique: + format_name = f"{format_name} [Dictionarry]" + + format_configs.append({ + 'name': format_name, + 'score': -9999 + }) + + return format_configs + + # Regular mode with an importer - check if it's our dummy cached importer + if self.format_importer and hasattr(self.format_importer, '__name__') and self.format_importer.__name__ == 'cached_format_importer': + logger.info(f"Using cached importer for language formats {behaviour}_{language}") + # Simply call the dummy importer just to keep the flow consistent, + # but we'll generate our own format configs + self.format_importer() + + # Create format configs directly + for format_data in formats_to_import: + format_name = format_data['name'] + if import_as_unique: + format_name = f"{format_name} [Dictionarry]" + + format_configs.append({ + 'name': format_name, + 'score': -9999 + }) + + return format_configs + + # If we've reached here, we're doing a regular import + if not self.base_url or not self.api_key or not self.format_importer: + logger.error("Missing required credentials or format importer") + raise ValueError( + "base_url, api_key, and format_importer are required for language format processing" + ) + arr_type = 'radarr' if self.target_app == TargetApp.RADARR else 'sonarr' + + # Use asyncio if there are multiple formats to import + if len(formats_to_import) > 1: + # Run in event loop + return asyncio.run(self._async_process_language_formats( + formats_to_import=formats_to_import, + arr_type=arr_type, + import_as_unique=import_as_unique + )) + + # For single format, use regular synchronous version for format_data in formats_to_import: try: result = import_format_from_memory( @@ -151,6 +240,53 @@ class ProfileConverter: except Exception as e: logger.error(f"Error processing language formats: {str(e)}") raise + + async def _async_process_language_formats( + self, + formats_to_import: List[Dict], + arr_type: str, + import_as_unique: bool = False) -> List[Dict]: + """ + Asynchronous version of _process_language_formats for concurrent imports + """ + logger.info(f"Processing language formats asynchronously: {len(formats_to_import)} formats") + format_configs = [] + tasks = [] + + # Create tasks for all formats + for format_data in formats_to_import: + task = asyncio.create_task( + async_import_format_from_memory( + format_data=format_data, + base_url=self.base_url, + api_key=self.api_key, + arr_type=arr_type, + import_as_unique=self.import_as_unique + ) + ) + tasks.append((format_data['name'], task)) + + # Process all format import results + for format_name, task in tasks: + try: + result = await task + if not result.get('success', False): + logger.error(f"Format import failed for: {format_name} (async)") + raise Exception(f"Failed to import format {format_name}") + + display_name = format_name + if import_as_unique: + display_name = f"{format_name} [Dictionarry]" + + format_configs.append({ + 'name': display_name, + 'score': -9999 + }) + except Exception as e: + logger.error(f"Error importing format {format_name}: {str(e)} (async)") + raise + + return format_configs def convert_quality_group(self, group: Dict) -> Dict: original_id = group.get("id", 0) @@ -178,14 +314,22 @@ class ProfileConverter: if language != 'any' and '_' in language: language_parts = language.split('_', 1) behaviour, language_code = language_parts - try: - language_formats = self._process_language_formats( - behaviour, language_code) - if 'custom_formats' not in profile: - profile['custom_formats'] = [] - profile['custom_formats'].extend(language_formats) - except Exception as e: - logger.error(f"Failed to process language formats: {e}") + + # Check if we're using a special importer with cached formats + if self.format_importer and hasattr(self.format_importer, '__name__') and self.format_importer.__name__ == 'cached_format_importer': + # If we're using the cached importer, skip processing + # The formats were already added directly to the profile + pass # Using pre-added language formats + else: + # Normal processing path + try: + language_formats = self._process_language_formats( + behaviour, language_code) + if 'custom_formats' not in profile: + profile['custom_formats'] = [] + profile['custom_formats'].extend(language_formats) + except Exception as e: + logger.error(f"Failed to process language formats: {e}") # Simple mode: just use the language directly without custom formats # This lets the Arr application's built-in language filter handle it @@ -196,15 +340,13 @@ class ProfileConverter: selected_language = ValueResolver.get_language(language, self.target_app, for_profile=True) - logger.info(f"Using simple language mode: {language}") - logger.info(f"Selected language data: {selected_language}") + # Using simple language mode else: # Advanced mode or 'any' - set language to 'any' as filtering is done via formats selected_language = ValueResolver.get_language('any', self.target_app, for_profile=True) - logger.info( - f"Using advanced mode or 'any', setting language to 'any'") + # Using advanced mode, setting language to 'any' converted_profile = ConvertedProfile( name=profile["name"], diff --git a/backend/app/importarr/__init__.py b/backend/app/importarr/__init__.py index 2a39487..04a7315 100644 --- a/backend/app/importarr/__init__.py +++ b/backend/app/importarr/__init__.py @@ -2,11 +2,12 @@ from flask import Blueprint, request, jsonify from flask_cors import cross_origin import logging +import asyncio from pathlib import Path from ..arr.manager import get_arr_config from ..data.utils import get_category_directory, load_yaml_file -from .format import import_formats_to_arr -from .profile import import_profiles_to_arr +from .format import import_formats_to_arr, async_import_formats_to_arr +from .profile import import_profiles_to_arr, async_import_profiles_to_arr from ..db import get_unique_arrs logger = logging.getLogger('importarr') @@ -205,24 +206,34 @@ def import_profiles(): logger.error(f"Error loading profile {profile_name}: {str(e)}") continue - # Import/Update formats first + # Import/Update formats first - use async version for larger batch sizes if format_names: format_names_list = list(format_names) + # When we have more than a few formats, use the async import path + # which will parallelize the requests if import_as_unique: modified_format_names = [ f"{name} [Dictionarry]" for name in format_names_list ] - import_formats_to_arr(format_names=modified_format_names, - original_names=format_names_list, - base_url=arr_data['arrServer'], - api_key=arr_data['apiKey'], - arr_type=arr_data['type']) + # Use the regular import function which will detect large batches + # and automatically use async when appropriate + import_formats_to_arr( + format_names=modified_format_names, + original_names=format_names_list, + base_url=arr_data['arrServer'], + api_key=arr_data['apiKey'], + arr_type=arr_data['type'] + ) else: - import_formats_to_arr(format_names=format_names_list, - original_names=format_names_list, - base_url=arr_data['arrServer'], - api_key=arr_data['apiKey'], - arr_type=arr_data['type']) + # Use the regular import function which will detect large batches + # and automatically use async when appropriate + import_formats_to_arr( + format_names=format_names_list, + original_names=format_names_list, + base_url=arr_data['arrServer'], + api_key=arr_data['apiKey'], + arr_type=arr_data['type'] + ) # Import profiles result = import_profiles_to_arr(profile_names=profile_names, diff --git a/backend/app/importarr/format.py b/backend/app/importarr/format.py index 3c133f5..837bc9d 100644 --- a/backend/app/importarr/format.py +++ b/backend/app/importarr/format.py @@ -2,7 +2,10 @@ import requests import logging import json import yaml +import asyncio +import aiohttp from pathlib import Path +from typing import Dict, List, Optional, Any, Tuple from ..data.utils import (load_yaml_file, get_category_directory, REGEX_DIR, FORMAT_DIR) from ..compile import CustomFormat, FormatConverter, TargetApp @@ -13,8 +16,27 @@ logger = logging.getLogger('importarr') def import_formats_to_arr(format_names, base_url, api_key, arr_type, original_names): + """ + Import custom formats to arr instance. + This function supports bulk importing of formats with sequential processing. + """ logger.info( f"Received {len(format_names)} formats to import for {arr_type}") + + # For larger imports, use the async version to improve performance + if len(format_names) > 5: + # Run async function within the event loop + return asyncio.run( + async_import_formats_to_arr( + format_names=format_names, + base_url=base_url, + api_key=api_key, + arr_type=arr_type, + original_names=original_names + ) + ) + + # For smaller imports, use the regular synchronous version results = { 'success': True, 'added': 0, @@ -55,9 +77,7 @@ def import_formats_to_arr(format_names, base_url, api_key, arr_type, original_name = original_names[i] format_file = f"{get_category_directory('custom_format')}/{original_name}.yml" format_data = load_yaml_file(format_file) - logger.info("Received format:\n" + - yaml.dump(format_data, sort_keys=False)) - + custom_format = CustomFormat(**format_data) converted_format = converter.convert_format( custom_format, target_app) @@ -79,9 +99,6 @@ def import_formats_to_arr(format_names, base_url, api_key, arr_type, vars(spec) for spec in converted_format.specifications ] - logger.info("Compiled to:\n" + - json.dumps([compiled_data], indent=2)) - result = process_format(compiled_data, existing_names, base_url, api_key) if result['success']: @@ -115,6 +132,140 @@ def import_formats_to_arr(format_names, base_url, api_key, arr_type, return {'success': False, 'error': str(e)} +async def async_import_formats_to_arr(format_names: List[str], + base_url: str, + api_key: str, + arr_type: str, + original_names: List[str]) -> Dict: + """ + Asynchronous version of import_formats_to_arr that processes formats concurrently. + This significantly improves performance for large batches. + """ + logger.info( + f"Received {len(format_names)} formats to import (async) for {arr_type}") + results = { + 'success': True, + 'added': 0, + 'updated': 0, + 'failed': 0, + 'details': [] + } + + try: + logger.info("Looking for existing formats (async)...") + existing_formats = await async_get_existing_formats(base_url, api_key) + if existing_formats is None: + return { + 'success': False, + 'error': 'Failed to get existing formats' + } + + existing_names = {fmt['name']: fmt['id'] for fmt in existing_formats} + + # Load patterns - this doesn't need to be async as it's file system operations + patterns = {} + for pattern_file in Path(REGEX_DIR).glob('*.yml'): + try: + pattern_data = load_yaml_file(str(pattern_file)) + if pattern_data and 'name' in pattern_data and 'pattern' in pattern_data: + patterns[pattern_data['name']] = pattern_data['pattern'] + except Exception as e: + logger.error( + f"Error loading pattern file {pattern_file}: {str(e)}") + continue + + converter = FormatConverter(patterns) + target_app = TargetApp.RADARR if arr_type.lower() == 'radarr' else TargetApp.SONARR + + # Process all formats into API-ready format first + compiled_formats = [] + format_tasks = [] + + for i, format_name in enumerate(format_names): + try: + # Use original name for file lookup + original_name = original_names[i] + format_file = f"{get_category_directory('custom_format')}/{original_name}.yml" + format_data = load_yaml_file(format_file) + + custom_format = CustomFormat(**format_data) + converted_format = converter.convert_format(custom_format, target_app) + if not converted_format: + raise ValueError("Format conversion failed") + + # Create base compiled data with ordered fields + compiled_data = {'name': format_name} # Start with name + + # Check rename status and add field right after name if true + if is_format_in_renames(original_name): + compiled_data['includeCustomFormatWhenRenaming'] = True + logger.info( + f"Format {original_name} has renames enabled, including field" + ) + + # Add specifications last + compiled_data['specifications'] = [ + vars(spec) for spec in converted_format.specifications + ] + compiled_formats.append((format_name, compiled_data)) + + except Exception as e: + logger.error(f"Error processing format {format_name}: {str(e)}") + results['failed'] += 1 + results['success'] = False + results['details'].append({ + 'name': format_name, + 'action': 'failed', + 'success': False, + 'error': str(e) + }) + + # Now create async tasks for all formats to upload them concurrently + for format_name, compiled_data in compiled_formats: + task = asyncio.ensure_future( + async_process_format( + format_data=compiled_data, + existing_names=existing_names, + base_url=base_url, + api_key=api_key + ) + ) + format_tasks.append((format_name, task)) + + # Wait for all format uploads to complete + for format_name, task in format_tasks: + try: + result = await task + if result['success']: + results[result['action']] += 1 + else: + results['failed'] += 1 + results['success'] = False + + results['details'].append(result['detail']) + except Exception as e: + logger.error(f"Error waiting for format task {format_name}: {str(e)}") + results['failed'] += 1 + results['success'] = False + results['details'].append({ + 'name': format_name, + 'action': 'failed', + 'success': False, + 'error': str(e) + }) + + logger.info( + f"Async importing {len(format_names)} formats complete. " + f"Added: {results['added']}, Updated: {results['updated']}, " + f"Failed: {results['failed']}") + + return results + + except Exception as e: + logger.error(f"Error in async_import_formats_to_arr: {str(e)}") + return {'success': False, 'error': str(e)} + + def get_existing_formats(base_url, api_key): try: response = requests.get(f"{base_url.rstrip('/')}/api/v3/customformat", @@ -127,19 +278,55 @@ def get_existing_formats(base_url, api_key): return None +async def async_get_existing_formats(base_url: str, api_key: str) -> Optional[List[Dict]]: + """Async version of get_existing_formats""" + try: + async with aiohttp.ClientSession() as session: + async with session.get( + f"{base_url.rstrip('/')}/api/v3/customformat", + headers={'X-Api-Key': api_key} + ) as response: + if response.status == 200: + return await response.json() + return None + except Exception as e: + logger.error(f"Error getting existing formats (async): {str(e)}") + return None + + def process_format(format_data, existing_names, base_url, api_key): format_name = format_data['name'] if format_name in existing_names: format_data['id'] = existing_names[format_name] - logger.info(f"Found existing format '{format_name}'. Updating...") success = update_format(base_url, api_key, format_data) action = 'updated' else: - logger.info(f"Format '{format_name}' not found. Adding...") success = add_format(base_url, api_key, format_data) action = 'added' - logger.info(f"Format '{format_name}' import success: {success}") + return { + 'success': success, + 'action': action if success else 'failed', + 'detail': { + 'name': format_name, + 'action': action if success else 'failed', + 'success': success + } + } + + +async def async_process_format(format_data: Dict, existing_names: Dict[str, int], + base_url: str, api_key: str) -> Dict: + """Async version of process_format""" + format_name = format_data['name'] + if format_name in existing_names: + format_data['id'] = existing_names[format_name] + success = await async_update_format(base_url, api_key, format_data) + action = 'updated' + else: + success = await async_add_format(base_url, api_key, format_data) + action = 'added' + return { 'success': success, 'action': action if success else 'failed', @@ -154,26 +341,58 @@ def process_format(format_data, existing_names, base_url, api_key): def update_format(base_url, api_key, format_data): try: url = f"{base_url.rstrip('/')}/api/v3/customformat/{format_data['id']}" - logger.info(f"Updating format at URL: {url}") response = requests.put(url, headers={'X-Api-Key': api_key}, json=format_data) - logger.info(f"Response status: {response.status_code}") + logger.info(f"Update format '{format_data['name']}' response: {response.status_code}") return response.status_code in [200, 201, 202, 204] except Exception as e: logger.error(f"Error updating format: {str(e)}") return False +async def async_update_format(base_url: str, api_key: str, format_data: Dict) -> bool: + """Async version of update_format""" + try: + url = f"{base_url.rstrip('/')}/api/v3/customformat/{format_data['id']}" + async with aiohttp.ClientSession() as session: + async with session.put( + url, + headers={'X-Api-Key': api_key}, + json=format_data + ) as response: + logger.info(f"Update format '{format_data['name']}' response: {response.status} (async)") + return response.status in [200, 201, 202, 204] + except Exception as e: + logger.error(f"Error updating format (async): {str(e)}") + return False + + def add_format(base_url, api_key, format_data): try: url = f"{base_url.rstrip('/')}/api/v3/customformat" - logger.info(f"Adding format at URL: {url}") response = requests.post(url, headers={'X-Api-Key': api_key}, json=format_data) - logger.info(f"Response status: {response.status_code}") + logger.info(f"Add format '{format_data['name']}' response: {response.status_code}") return response.status_code in [200, 201, 202, 204] except Exception as e: logger.error(f"Error adding format: {str(e)}") return False + + +async def async_add_format(base_url: str, api_key: str, format_data: Dict) -> bool: + """Async version of add_format""" + try: + url = f"{base_url.rstrip('/')}/api/v3/customformat" + async with aiohttp.ClientSession() as session: + async with session.post( + url, + headers={'X-Api-Key': api_key}, + json=format_data + ) as response: + logger.info(f"Add format '{format_data['name']}' response: {response.status} (async)") + return response.status in [200, 201, 202, 204] + except Exception as e: + logger.error(f"Error adding format (async): {str(e)}") + return False diff --git a/backend/app/importarr/format_memory.py b/backend/app/importarr/format_memory.py index b135d5f..b48999d 100644 --- a/backend/app/importarr/format_memory.py +++ b/backend/app/importarr/format_memory.py @@ -3,6 +3,8 @@ import requests import logging import json +import asyncio +import aiohttp from typing import Dict, List, Optional from pathlib import Path from ..data.utils import (load_yaml_file, get_category_directory, REGEX_DIR, @@ -25,21 +27,57 @@ def get_existing_formats(base_url: str, api_key: str) -> Optional[List[Dict]]: return None +async def async_get_existing_formats(base_url: str, api_key: str) -> Optional[List[Dict]]: + """Async version of get_existing_formats""" + try: + async with aiohttp.ClientSession() as session: + async with session.get( + f"{base_url.rstrip('/')}/api/v3/customformat", + headers={'X-Api-Key': api_key} + ) as response: + if response.status == 200: + return await response.json() + return None + except Exception as e: + logger.error(f"Error getting existing formats (async): {str(e)}") + return None + + def process_format(format_data: Dict, existing_names: Dict[str, int], base_url: str, api_key: str) -> Dict: """Process single format - either update or add new""" format_name = format_data['name'] if format_name in existing_names: format_data['id'] = existing_names[format_name] - logger.info(f"Found existing format '{format_name}'. Updating...") success = update_format(base_url, api_key, format_data) action = 'updated' else: - logger.info(f"Format '{format_name}' not found. Adding...") success = add_format(base_url, api_key, format_data) action = 'added' - logger.info(f"Format '{format_name}' import success: {success}") + return { + 'success': success, + 'action': action if success else 'failed', + 'detail': { + 'name': format_name, + 'action': action if success else 'failed', + 'success': success + } + } + + +async def async_process_format(format_data: Dict, existing_names: Dict[str, int], + base_url: str, api_key: str) -> Dict: + """Async version of process_format""" + format_name = format_data['name'] + if format_name in existing_names: + format_data['id'] = existing_names[format_name] + success = await async_update_format(base_url, api_key, format_data) + action = 'updated' + else: + success = await async_add_format(base_url, api_key, format_data) + action = 'added' + return { 'success': success, 'action': action if success else 'failed', @@ -55,32 +93,64 @@ def update_format(base_url: str, api_key: str, format_data: Dict) -> bool: """Update existing custom format""" try: url = f"{base_url.rstrip('/')}/api/v3/customformat/{format_data['id']}" - logger.info(f"Updating format at URL: {url}") response = requests.put(url, headers={'X-Api-Key': api_key}, json=format_data) - logger.info(f"Response status: {response.status_code}") + logger.info(f"Update format '{format_data['name']}' response: {response.status_code}") return response.status_code in [200, 201, 202, 204] except Exception as e: logger.error(f"Error updating format: {str(e)}") return False +async def async_update_format(base_url: str, api_key: str, format_data: Dict) -> bool: + """Async version of update_format""" + try: + url = f"{base_url.rstrip('/')}/api/v3/customformat/{format_data['id']}" + async with aiohttp.ClientSession() as session: + async with session.put( + url, + headers={'X-Api-Key': api_key}, + json=format_data + ) as response: + logger.info(f"Update format '{format_data['name']}' response: {response.status} (async)") + return response.status in [200, 201, 202, 204] + except Exception as e: + logger.error(f"Error updating format (async): {str(e)}") + return False + + def add_format(base_url: str, api_key: str, format_data: Dict) -> bool: """Add new custom format""" try: url = f"{base_url.rstrip('/')}/api/v3/customformat" - logger.info(f"Adding format at URL: {url}") response = requests.post(url, headers={'X-Api-Key': api_key}, json=format_data) - logger.info(f"Response status: {response.status_code}") + logger.info(f"Add format '{format_data['name']}' response: {response.status_code}") return response.status_code in [200, 201, 202, 204] except Exception as e: logger.error(f"Error adding format: {str(e)}") return False +async def async_add_format(base_url: str, api_key: str, format_data: Dict) -> bool: + """Async version of add_format""" + try: + url = f"{base_url.rstrip('/')}/api/v3/customformat" + async with aiohttp.ClientSession() as session: + async with session.post( + url, + headers={'X-Api-Key': api_key}, + json=format_data + ) as response: + logger.info(f"Add format '{format_data['name']}' response: {response.status} (async)") + return response.status in [200, 201, 202, 204] + except Exception as e: + logger.error(f"Error adding format (async): {str(e)}") + return False + + def import_format_from_memory(format_data: Dict, base_url: str, api_key: str, @@ -99,6 +169,8 @@ def import_format_from_memory(format_data: Dict, Returns: Dict containing import results """ + # For memory-based imports, no need to check size threshold + # as these are typically used for language formats which are few results = { 'success': True, 'added': 0, @@ -116,10 +188,6 @@ def import_format_from_memory(format_data: Dict, f"Modified format name for unique import: {format_data['name']}" ) - # Log the received memory-based format data - logger.info("Received memory-based format:\n" + - json.dumps(format_data, indent=2)) - logger.info("Looking for existing formats (memory-based import)...") existing_formats = get_existing_formats(base_url, api_key) if existing_formats is None: @@ -164,8 +232,7 @@ def import_format_from_memory(format_data: Dict, [vars(spec) for spec in converted_format.specifications] } - logger.info("Compiled to (memory-based):\n" + - json.dumps([api_format], indent=2)) + # Format compiled successfully # Process the compiled format (update/add) result = process_format(api_format, existing_format_map, base_url, @@ -193,3 +260,106 @@ def import_format_from_memory(format_data: Dict, 'error': str(e) }] } + + +async def async_import_format_from_memory(format_data: Dict, + base_url: str, + api_key: str, + arr_type: str, + import_as_unique: bool = False) -> Dict: + """ + Asynchronous version of import_format_from_memory + + Args: + format_data: Dictionary containing the format specification + base_url: Arr instance base URL + api_key: API key for arr instance + arr_type: Type of arr instance (radarr/sonarr) + import_as_unique: Whether to append [Dictionarry] to format names + + Returns: + Dict containing import results + """ + results = { + 'success': True, + 'added': 0, + 'updated': 0, + 'failed': 0, + 'details': [] + } + + try: + # Modify format name if import_as_unique is true + original_name = format_data['name'] + if import_as_unique: + format_data['name'] = f"{original_name} [Dictionarry]" + logger.info( + f"Modified format name for unique import: {format_data['name']}" + ) + + logger.info("Looking for existing formats (memory-based import, async)...") + existing_formats = await async_get_existing_formats(base_url, api_key) + if existing_formats is None: + return { + 'success': False, + 'error': 'Failed to get existing formats' + } + + existing_format_map = { + fmt['name']: fmt['id'] + for fmt in existing_formats + } + + # Convert from raw data into a CustomFormat object + custom_format = CustomFormat(**format_data) + + # Load patterns from regex directory (file system operations, no need for async) + patterns = {} + for pattern_file in Path(REGEX_DIR).glob('*.yml'): + try: + pattern_data = load_yaml_file(str(pattern_file)) + if pattern_data and 'name' in pattern_data and 'pattern' in pattern_data: + patterns[pattern_data['name']] = pattern_data['pattern'] + except Exception as e: + logger.error( + f"Error loading pattern file {pattern_file}: {str(e)}") + continue + + target_app = TargetApp.RADARR if arr_type.lower() == 'radarr' else TargetApp.SONARR + converter = FormatConverter(patterns) + converted_format = converter.convert_format(custom_format, target_app) + + if not converted_format: + raise ValueError("Format conversion failed") + + # Prepare final JSON data + api_format = { + 'name': converted_format.name, + 'specifications': [vars(spec) for spec in converted_format.specifications] + } + + # Format compiled successfully + + # Process the compiled format (update/add) using async methods + result = await async_process_format(api_format, existing_format_map, base_url, api_key) + if result['success']: + results[result['action']] += 1 + else: + results['failed'] += 1 + results['success'] = False + + results['details'].append(result['detail']) + return results + + except Exception as e: + logger.error(f"Error importing format data (async): {str(e)}") + return { + 'success': False, + 'error': str(e), + 'details': [{ + 'name': format_data.get('name', 'unknown'), + 'action': 'failed', + 'success': False, + 'error': str(e) + }] + } diff --git a/backend/app/importarr/profile.py b/backend/app/importarr/profile.py index cedc1e8..bc783d3 100644 --- a/backend/app/importarr/profile.py +++ b/backend/app/importarr/profile.py @@ -4,12 +4,15 @@ import requests import logging import json import yaml +import asyncio +import aiohttp from pathlib import Path -from typing import Dict, List, Optional, Any +from typing import Dict, List, Optional, Any, Tuple from ..data.utils import load_yaml_file, get_category_directory from ..compile.profile_compiler import compile_quality_profile from ..compile.mappings import TargetApp -from .format import import_formats_to_arr +from .format import import_formats_to_arr +from .format_memory import import_format_from_memory, async_import_format_from_memory from ..arr.manager import get_arr_config logger = logging.getLogger('importarr') @@ -18,8 +21,26 @@ logger = logging.getLogger('importarr') def import_profiles_to_arr(profile_names: List[str], original_names: List[str], base_url: str, api_key: str, arr_type: str, arr_id: str, import_as_unique: bool) -> Dict: + """ + Import quality profiles to arr instance. + This function supports bulk importing of profiles with sequential or concurrent processing. + """ logger.info( f"Received {len(profile_names)} profiles to import for {arr_type}") + + # For larger imports, use the async version to improve performance + if len(profile_names) > 1: + # Run async function within the event loop + return asyncio.run( + async_import_profiles_to_arr(profile_names=profile_names, + original_names=original_names, + base_url=base_url, + api_key=api_key, + arr_type=arr_type, + arr_id=arr_id, + import_as_unique=import_as_unique)) + + # For smaller imports, use the regular synchronous version results = { 'success': True, 'added': 0, @@ -68,8 +89,7 @@ def import_profiles_to_arr(profile_names: List[str], original_names: List[str], for cf in profile_data['custom_formats']: cf['name'] = f"{cf['name']} [Dictionarry]" - logger.info("Received profile:\n" + - yaml.dump(profile_data, sort_keys=False)) + # Profile loaded profile_language = profile_data.get('language', 'any') if profile_language != 'any': @@ -87,7 +107,6 @@ def import_profiles_to_arr(profile_names: List[str], original_names: List[str], f"Profile '{profile_name}' has advanced mode language: {profile_language}" ) - logger.info("Compiling quality profile...") compiled_profiles = compile_quality_profile( profile_data=profile_data, target_app=target_app, @@ -115,8 +134,6 @@ def import_profiles_to_arr(profile_names: List[str], original_names: List[str], logger.debug( f"Found {len(format_id_map)} existing custom formats") - logger.info( - f"Synchronizing format IDs in profile '{profile_name}'") profile_data = sync_format_ids(profile_data, format_id_map) logger.debug("Format items after sync:") @@ -125,8 +142,7 @@ def import_profiles_to_arr(profile_names: List[str], original_names: List[str], f" {item['name']} => Score: {item.get('score', 0)}, " f"Format ID: {item.get('format', 'missing')}") - logger.info("Compiled to:\n" + - json.dumps(profile_data, indent=2)) + # Profile compiled successfully result = process_profile(profile_data=profile_data, existing_names=existing_profile_map, @@ -164,6 +180,249 @@ def import_profiles_to_arr(profile_names: List[str], original_names: List[str], return {'success': False, 'error': str(e)} +async def async_import_profiles_to_arr(profile_names: List[str], + original_names: List[str], + base_url: str, api_key: str, + arr_type: str, arr_id: str, + import_as_unique: bool) -> Dict: + """ + Asynchronous version of import_profiles_to_arr that processes profiles concurrently. + This significantly improves performance for larger batches of profile imports. + """ + logger.info( + f"Received {len(profile_names)} profiles to import (async) for {arr_type}" + ) + results = { + 'success': True, + 'added': 0, + 'updated': 0, + 'failed': 0, + 'details': [] + } + + try: + arr_config_response = get_arr_config(arr_id) + if not arr_config_response['success']: + return { + 'success': False, + 'error': 'Failed to get arr configuration' + } + arr_config = arr_config_response['data'] + + logger.info("Looking for existing profiles (async)...") + existing_profiles = await async_get_existing_profiles( + base_url, api_key) + if existing_profiles is None: + return { + 'success': False, + 'error': 'Failed to get existing profiles' + } + + # Create mapping for existing profiles + existing_profile_map = {} + for profile in existing_profiles: + existing_profile_map[profile['name']] = profile['id'] + + target_app = TargetApp.RADARR if arr_type.lower( + ) == 'radarr' else TargetApp.SONARR + + # Fetch all existing formats once upfront + logger.info("Pre-fetching existing custom formats for all profiles...") + existing_formats = await async_get_existing_formats(base_url, api_key) + if existing_formats is None: + return { + 'success': False, + 'error': 'Failed to get existing custom formats' + } + format_id_map = {fmt['name']: fmt['id'] for fmt in existing_formats} + logger.info(f"Successfully pre-fetched {len(format_id_map)} existing custom formats") + + # Pre-scan all profiles to identify and cache language formats + needed_language_formats = set() + initial_profiles_data = [] + + # First, load and analyze all profile files + for i, profile_name in enumerate(profile_names): + try: + # Use original name for file lookup + original_name = original_names[i] + profile_file = f"{get_category_directory('profile')}/{original_name}.yml" + profile_data = load_yaml_file(profile_file) + + # Store original profile data for later processing + initial_profiles_data.append((i, profile_name, original_name, profile_data)) + + # Extract language from profile data + profile_language = profile_data.get('language', 'any') + if profile_language != 'any' and '_' in profile_language: + # This is an advanced mode language that needs special format handling + needed_language_formats.add(profile_language) + # Language format identified + except Exception as e: + logger.error(f"Error pre-scanning profile {profile_name}: {str(e)}") + results['failed'] += 1 + results['details'].append({ + 'name': profile_name, + 'action': 'failed', + 'success': False, + 'error': f"Error pre-scanning profile: {str(e)}" + }) + results['success'] = False + + # Pre-load all language formats if any exist + language_format_cache = {} + if needed_language_formats: + logger.info(f"Pre-importing {len(needed_language_formats)} unique language formats for {len(profile_names)} profiles") + language_format_cache = await preload_language_formats( + language_formats=list(needed_language_formats), + target_app=target_app, + base_url=base_url, + api_key=api_key, + arr_type=arr_type, + import_as_unique=import_as_unique + ) + logger.info(f"Successfully pre-loaded language formats for {len(language_format_cache)} languages") + + # Process each profile with the cached language formats + profile_tasks = [] + + for i, profile_name, original_name, profile_data in initial_profiles_data: + try: + # Set the potentially modified profile name + profile_data['name'] = profile_name + + # Modify custom format names if import_as_unique is true + if import_as_unique and 'custom_formats' in profile_data: + for cf in profile_data['custom_formats']: + cf['name'] = f"{cf['name']} [Dictionarry]" + + # Profile loaded + + profile_language = profile_data.get('language', 'any') + if profile_language != 'any': + # Detect if we're using simple or advanced mode + is_simple_mode = '_' not in profile_language + # Language mode detected + + # Setup the profile compilation with the cached language formats + + # By default, use normal import + format_importer = import_formats_to_arr + + # For profiles with language formats, attach the cached formats + if language_format_cache and profile_language != 'any' and '_' in profile_language: + language_format_configs = language_format_cache.get(profile_language, []) + + if language_format_configs: + # Using cached language formats + + # Define a special function that will be detected by the profile compiler + # The function name is checked in _process_language_formats + def cached_format_importer(*args, **kwargs): + # Using cached formats from importer + return { + 'success': True, + 'added': 0, + 'updated': len(language_format_configs), + 'failed': 0, + 'details': [] + } + + # Add the cached formats to the function so they can be accessed by the compiler + cached_format_importer.cached_formats = language_format_configs + format_importer = cached_format_importer + else: + logger.warning(f"No cached formats found for language {profile_language}") + + # Add language formats from cache directly to the profile for the compiler + # This way we don't need to modify the compiler code at all + if profile_language != 'any' and '_' in profile_language and profile_language in language_format_cache: + # Add the cached language formats directly to the profile + if 'custom_formats' not in profile_data: + profile_data['custom_formats'] = [] + + # Add the cached formats - these are already imported, we just need to reference them + profile_data['custom_formats'].extend(language_format_cache[profile_language]) + + compiled_profiles = compile_quality_profile( + profile_data=profile_data, + target_app=target_app, + base_url=base_url, + api_key=api_key, + format_importer=format_importer, + import_as_unique=import_as_unique + ) + + if not compiled_profiles: + raise ValueError("Profile compilation returned no data") + + compiled_profile = compiled_profiles[0] + + # Sync format IDs upfront using the cached format_id_map + synced_profile = sync_format_ids(compiled_profile, format_id_map) + + # Create a task for processing this profile (without fetching formats again) + task = asyncio.create_task( + async_process_profile( + profile_data=synced_profile, + existing_names=existing_profile_map, + base_url=base_url, + api_key=api_key + ) + ) + profile_tasks.append((profile_name, task)) + + except Exception as e: + logger.error( + f"Error processing profile {profile_name}: {str(e)}, type: {type(e).__name__} (async)" + ) + logger.exception("Full traceback:") + results['failed'] += 1 + results['details'].append({ + 'name': profile_name, + 'action': 'failed', + 'success': False, + 'error': str(e) + }) + results['success'] = False + + # Process all profile upload results + for profile_name, task in profile_tasks: + try: + result = await task + + if result['success']: + results[result['action']] += 1 + else: + results['failed'] += 1 + results['success'] = False + + results['details'].append(result['detail']) + + except Exception as e: + logger.error( + f"Error waiting for profile task {profile_name}: {str(e)} (async)" + ) + results['failed'] += 1 + results['details'].append({ + 'name': profile_name, + 'action': 'failed', + 'success': False, + 'error': str(e) + }) + results['success'] = False + + logger.info( + f"Async importing {len(profile_names)} profiles complete. " + f"Added: {results['added']}, Updated: {results['updated']}, " + f"Failed: {results['failed']}") + return results + + except Exception as e: + logger.error(f"Error in async_import_profiles_to_arr: {str(e)}") + return {'success': False, 'error': str(e)} + + def get_existing_profiles(base_url: str, api_key: str) -> Optional[List[Dict]]: try: response = requests.get( @@ -177,6 +436,22 @@ def get_existing_profiles(base_url: str, api_key: str) -> Optional[List[Dict]]: return None +async def async_get_existing_profiles(base_url: str, + api_key: str) -> Optional[List[Dict]]: + """Async version of get_existing_profiles""" + try: + async with aiohttp.ClientSession() as session: + async with session.get( + f"{base_url.rstrip('/')}/api/v3/qualityprofile", + headers={'X-Api-Key': api_key}) as response: + if response.status == 200: + return await response.json() + return None + except Exception as e: + logger.error(f"Error getting existing profiles (async): {str(e)}") + return None + + def get_existing_formats(base_url: str, api_key: str) -> Optional[List[Dict]]: try: response = requests.get(f"{base_url.rstrip('/')}/api/v3/customformat", @@ -189,6 +464,197 @@ def get_existing_formats(base_url: str, api_key: str) -> Optional[List[Dict]]: return None +async def async_get_existing_formats(base_url: str, + api_key: str) -> Optional[List[Dict]]: + """Async version of get_existing_formats""" + try: + async with aiohttp.ClientSession() as session: + async with session.get( + f"{base_url.rstrip('/')}/api/v3/customformat", + headers={'X-Api-Key': api_key}) as response: + if response.status == 200: + return await response.json() + return None + except Exception as e: + logger.error(f"Error getting existing formats (async): {str(e)}") + return None + + +async def preload_language_formats(language_formats: List[str], + target_app: TargetApp, + base_url: str, + api_key: str, + arr_type: str, + import_as_unique: bool) -> Dict[str, List[Dict]]: + """ + Pre-load all language formats for the specified languages to avoid + duplicate imports when multiple profiles use the same language settings. + + Args: + language_formats: List of language identifiers (e.g. ["must_english", "prefer_french"]) + target_app: TargetApp enum value (RADARR or SONARR) + base_url: API base URL + api_key: API key for the arr instance + arr_type: Type of arr (radarr or sonarr) + import_as_unique: Whether to append [Dictionarry] to format names + + Returns: + Dictionary mapping language IDs to their imported format configs + """ + from ..compile.profile_compiler import ProfileConverter + + language_format_cache = {} + + # Create a single ProfileConverter instance for all languages + converter = ProfileConverter( + target_app=target_app, + base_url=base_url, + api_key=api_key, + format_importer=None, # We'll handle importing manually + import_as_unique=import_as_unique + ) + + # For each unique language, process and cache its formats + for language_id in language_formats: + try: + # Skip if we've already processed this language + if language_id in language_format_cache: + continue + + # Parse the language behavior and code + if '_' in language_id: + behavior, language_code = language_id.split('_', 1) + else: + # Skip simple language modes - they don't need special format imports + continue + + logger.info(f"Pre-importing language formats for {language_id} (async batch)") + + # First generate format data for this language + formats_data = converter._generate_language_formats(behavior, language_code) + + # Import these language formats just once + format_results = await import_language_formats_once( + formats_data=formats_data, + base_url=base_url, + api_key=api_key, + arr_type=arr_type, + import_as_unique=import_as_unique + ) + + # Store the format configs for this language + language_format_cache[language_id] = format_results + logger.info(f"Successfully cached {len(format_results)} formats for language {language_id}") + + except Exception as e: + logger.error(f"Error pre-loading language formats for {language_id}: {str(e)}") + language_format_cache[language_id] = [] # Empty list to indicate failure + + return language_format_cache + + +async def import_language_formats_once(formats_data: List[Dict], + base_url: str, + api_key: str, + arr_type: str, + import_as_unique: bool) -> List[Dict]: + """ + Helper function to import language formats once and return the results. + + Args: + formats_data: List of format data dictionaries to import + base_url: API base URL + api_key: API key for arr instance + arr_type: Type of arr (radarr or sonarr) + import_as_unique: Whether to append [Dictionarry] to format names + + Returns: + List of format configs ready to be added to profiles + """ + # Create tasks for concurrent format imports + format_configs = [] + import_tasks = [] + + for format_data in formats_data: + # Setup task for importing this format + task = asyncio.create_task( + async_import_format_from_memory( + format_data=format_data, + base_url=base_url, + api_key=api_key, + arr_type=arr_type, + import_as_unique=import_as_unique + ) + ) + import_tasks.append((format_data['name'], task)) + + # Process all format imports + for format_name, task in import_tasks: + try: + result = await task + if not result.get('success', False): + logger.error(f"Format import failed for cached language format: {format_name}") + continue + + # Determine final format name (after any [Dictionarry] suffix) + display_name = format_name + if import_as_unique: + display_name = f"{format_name} [Dictionarry]" + + # Create format config exactly as needed by profile compiler + format_configs.append({ + 'name': display_name, + 'score': -9999 + }) + + except Exception as e: + logger.error(f"Error importing cached language format {format_name}: {str(e)}") + + return format_configs + + +def use_cached_language_formats(language_cache: Dict[str, List[Dict]], + format_names: List[str], + base_url: str, + api_key: str, + arr_type: str, + original_names: List[str]) -> Dict: + """ + Custom format importer that returns cached language formats instead + of re-importing them. This is used by the profile compiler when we've + already pre-loaded the language formats. + + This is a replacement for the regular import_formats_to_arr function. + """ + # Extract the language ID from the original profile data + # This is passed from the profile compiler's context when calling this function + language_id = getattr(use_cached_language_formats, 'current_language_id', None) + + if language_id and language_id in language_cache: + logger.info(f"Using cached language formats for {language_id}") + return { + 'success': True, + 'added': 0, + 'updated': len(language_cache[language_id]), + 'failed': 0, + 'details': [ + {'name': fmt['name'], 'action': 'updated', 'success': True} + for fmt in language_cache[language_id] + ] + } + else: + # Fall back to normal import if no cache entry exists + # or if this isn't a language format import + logger.info(f"No cached formats for language ID {language_id}, using normal import") + return import_formats_to_arr( + format_names=format_names, + base_url=base_url, + api_key=api_key, + arr_type=arr_type, + original_names=original_names + ) + + def sync_format_ids(profile_data: Dict, format_id_map: Dict[str, int]) -> Dict: if 'formatItems' not in profile_data: profile_data['formatItems'] = [] @@ -225,13 +691,64 @@ def sync_format_ids(profile_data: Dict, format_id_map: Dict[str, int]) -> Dict: return profile_data +# This function is now deprecated and replaced by direct use of sync_format_ids and async_process_profile +# We're keeping the signature for backward compatibility but not using it in the optimized code path +async def async_process_profile_with_formats(profile_name: str, + profile_data: Dict, + existing_profile_map: Dict[str, + int], + base_url: str, + api_key: str) -> Dict: + """ + Asynchronous function that handles getting formats and processing a profile in one go. + This allows for concurrent profile processing. + + Note: This function is deprecated and should not be used in new code. + It's better to fetch formats once upfront for all profiles. + """ + try: + # Get formats for profile synchronization + logger.info( + f"Looking for existing custom formats to sync format IDs (async)..." + ) + existing_formats = await async_get_existing_formats(base_url, api_key) + if existing_formats is None: + raise ValueError("Failed to get updated format list") + + format_id_map = {fmt['name']: fmt['id'] for fmt in existing_formats} + logger.debug( + f"Found {len(format_id_map)} existing custom formats (async)") + + # Sync format IDs in the profile + synced_profile = sync_format_ids(profile_data, format_id_map) + + # Process the profile (add or update) + return await async_process_profile(profile_data=synced_profile, + existing_names=existing_profile_map, + base_url=base_url, + api_key=api_key) + except Exception as e: + logger.error( + f"Error in async_process_profile_with_formats for {profile_name}: {str(e)}" + ) + return { + 'success': False, + 'action': 'failed', + 'detail': { + 'name': profile_name, + 'action': 'failed', + 'success': False, + 'error': str(e) + } + } + + def process_profile(profile_data: Dict, existing_names: Dict[str, int], base_url: str, api_key: str) -> Dict: profile_name = profile_data['name'] if profile_name in existing_names: profile_data['id'] = existing_names[profile_name] - logger.info(f"Found existing profile '{profile_name}'. Updating...") success = update_profile(base_url, api_key, profile_data) return { 'success': success, @@ -243,7 +760,6 @@ def process_profile(profile_data: Dict, existing_names: Dict[str, int], } } else: - logger.info(f"Profile '{profile_name}' not found. Adding...") success = add_profile(base_url, api_key, profile_data) return { 'success': success, @@ -256,29 +772,90 @@ def process_profile(profile_data: Dict, existing_names: Dict[str, int], } +async def async_process_profile(profile_data: Dict, existing_names: Dict[str, + int], + base_url: str, api_key: str) -> Dict: + """Async version of process_profile""" + profile_name = profile_data['name'] + + if profile_name in existing_names: + profile_data['id'] = existing_names[profile_name] + success = await async_update_profile(base_url, api_key, profile_data) + return { + 'success': success, + 'action': 'updated' if success else 'failed', + 'detail': { + 'name': profile_name, + 'action': 'updated', + 'success': success + } + } + else: + success = await async_add_profile(base_url, api_key, profile_data) + return { + 'success': success, + 'action': 'added' if success else 'failed', + 'detail': { + 'name': profile_name, + 'action': 'added', + 'success': success + } + } + + def update_profile(base_url: str, api_key: str, profile_data: Dict) -> bool: try: url = f"{base_url.rstrip('/')}/api/v3/qualityprofile/{profile_data['id']}" - logger.info(f"Updating profile at URL: {url}") response = requests.put(url, headers={'X-Api-Key': api_key}, json=profile_data) - logger.info(f"Update response status: {response.status_code}") + logger.info(f"Update profile '{profile_data['name']}' response: {response.status_code}") return response.status_code in [200, 201, 202, 204] except Exception as e: logger.error(f"Error updating profile: {str(e)}") return False +async def async_update_profile(base_url: str, api_key: str, + profile_data: Dict) -> bool: + """Async version of update_profile""" + try: + url = f"{base_url.rstrip('/')}/api/v3/qualityprofile/{profile_data['id']}" + async with aiohttp.ClientSession() as session: + async with session.put(url, + headers={'X-Api-Key': api_key}, + json=profile_data) as response: + logger.info(f"Update profile '{profile_data['name']}' response: {response.status} (async)") + return response.status in [200, 201, 202, 204] + except Exception as e: + logger.error(f"Error updating profile (async): {str(e)}") + return False + + def add_profile(base_url: str, api_key: str, profile_data: Dict) -> bool: try: url = f"{base_url.rstrip('/')}/api/v3/qualityprofile" - logger.info(f"Adding profile at URL: {url}") response = requests.post(url, headers={'X-Api-Key': api_key}, json=profile_data) - logger.info(f"Add response status: {response.status_code}") + logger.info(f"Add profile '{profile_data['name']}' response: {response.status_code}") return response.status_code in [200, 201, 202, 204] except Exception as e: logger.error(f"Error adding profile: {str(e)}") return False + + +async def async_add_profile(base_url: str, api_key: str, + profile_data: Dict) -> bool: + """Async version of add_profile""" + try: + url = f"{base_url.rstrip('/')}/api/v3/qualityprofile" + async with aiohttp.ClientSession() as session: + async with session.post(url, + headers={'X-Api-Key': api_key}, + json=profile_data) as response: + logger.info(f"Add profile '{profile_data['name']}' response: {response.status} (async)") + return response.status in [200, 201, 202, 204] + except Exception as e: + logger.error(f"Error adding profile (async): {str(e)}") + return False diff --git a/backend/requirements.txt b/backend/requirements.txt index 377dba4..c2d6ba0 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -6,4 +6,6 @@ Werkzeug==2.0.1 GitPython==3.1.24 regex==2023.10.3 APScheduler==3.10.4 -gunicorn==21.2.0 \ No newline at end of file +gunicorn==21.2.0 +aiohttp==3.8.5 +asyncio==3.4.3 \ No newline at end of file