diff --git a/backend/app/data/utils.py b/backend/app/data/utils.py
index 819a69f..884a664 100644
--- a/backend/app/data/utils.py
+++ b/backend/app/data/utils.py
@@ -7,6 +7,7 @@ from typing import Dict, List, Any, Tuple, Union
import git
import regex
import logging
+from ..db.queries.arr import update_arr_config_on_rename, update_arr_config_on_delete
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
@@ -165,8 +166,20 @@ def update_yaml_file(file_path: str, data: Dict[str, Any],
# Update references before performing the rename
try:
+ # Update regular references
updated_files = update_references(category, old_name, new_name)
logger.info(f"Updated references in: {updated_files}")
+
+ # Update arr configs if this is a format or profile
+ if category in ['custom_format', 'profile']:
+ arr_category = 'customFormats' if category == 'custom_format' else 'profiles'
+ updated_configs = update_arr_config_on_rename(
+ arr_category, old_name, new_name)
+ if updated_configs:
+ logger.info(
+ f"Updated arr configs for {category} rename: {updated_configs}"
+ )
+
except Exception as e:
logger.error(f"Failed to update references: {e}")
raise Exception(f"Failed to update references: {str(e)}")
@@ -262,9 +275,9 @@ def check_delete_constraints(category: str, name: str) -> Tuple[bool, str]:
format_data = load_yaml_file(format_path)
# Check each condition in the format
for condition in format_data.get('conditions', []):
- if (condition['type'] in [
+ if condition['type'] in [
'release_title', 'release_group', 'edition'
- ] and condition.get('pattern') == check_name):
+ ] and condition.get('pattern') == check_name:
references.append(
f"custom format: {format_data['name']}")
except Exception as e:
@@ -299,6 +312,14 @@ def check_delete_constraints(category: str, name: str) -> Tuple[bool, str]:
f"Error checking profile file {profile_file}: {e}")
continue
+ # Update arr configs for formats and profiles
+ if category in ['custom_format', 'profile']:
+ arr_category = 'customFormats' if category == 'custom_format' else 'profiles'
+ updated_configs = update_arr_config_on_delete(arr_category, name)
+ if updated_configs:
+ logger.info(
+ f"Removed {name} from arr configs: {updated_configs}")
+
if references:
error_msg = f"Cannot delete - item is referenced in:\n" + "\n".join(
f"- {ref}" for ref in references)
diff --git a/backend/app/db/__init__.py b/backend/app/db/__init__.py
index a8b4f80..3bc49f6 100644
--- a/backend/app/db/__init__.py
+++ b/backend/app/db/__init__.py
@@ -1,12 +1,15 @@
-# backend/app/db/__init__.py
from .connection import get_db
from .queries.settings import get_settings, get_secret_key, save_settings
-from .queries.arr import get_unique_arrs
-from .queries.format_renames import add_format_to_renames, remove_format_from_renames, is_format_in_renames
+from .queries.arr import (get_unique_arrs, update_arr_config_on_rename,
+ update_arr_config_on_delete)
+from .queries.format_renames import (add_format_to_renames,
+ remove_format_from_renames,
+ is_format_in_renames)
from .migrations.runner import run_migrations
__all__ = [
'get_db', 'get_settings', 'get_secret_key', 'save_settings',
- 'get_unique_arrs', 'run_migrations', 'add_format_to_renames',
+ 'get_unique_arrs', 'update_arr_config_on_rename',
+ 'update_arr_config_on_delete', 'run_migrations', 'add_format_to_renames',
'remove_format_from_renames', 'is_format_in_renames'
]
diff --git a/backend/app/db/queries/arr.py b/backend/app/db/queries/arr.py
index 5b62c1e..592bc02 100644
--- a/backend/app/db/queries/arr.py
+++ b/backend/app/db/queries/arr.py
@@ -1,14 +1,15 @@
-# backend/app/db/queries/arr.py
from ..connection import get_db
+import json
+import logging
+
+logger = logging.getLogger(__name__)
def get_unique_arrs(arr_ids):
"""
Get import_as_unique settings for a list of arr IDs.
-
Args:
arr_ids (list): List of arr configuration IDs
-
Returns:
dict: Dictionary mapping arr IDs to their import_as_unique settings and names
"""
@@ -18,12 +19,12 @@ def get_unique_arrs(arr_ids):
with get_db() as conn:
placeholders = ','.join('?' * len(arr_ids))
query = f'''
- SELECT id, name, import_as_unique
- FROM arr_config
- WHERE id IN ({placeholders})
+ SELECT id, name, import_as_unique
+ FROM arr_config
+ WHERE id IN ({placeholders})
'''
-
results = conn.execute(query, arr_ids).fetchall()
+
return {
row['id']: {
'import_as_unique': bool(row['import_as_unique']),
@@ -31,3 +32,88 @@ def get_unique_arrs(arr_ids):
}
for row in results
}
+
+
+def update_arr_config_on_rename(category, old_name, new_name):
+ """
+ Update arr_config data_to_sync when a format or profile is renamed.
+ Args:
+ category (str): Either 'customFormats' or 'profiles'
+ old_name (str): Original name being changed
+ new_name (str): New name to change to
+ Returns:
+ list: IDs of arr_config rows that were updated
+ """
+ updated_ids = []
+
+ with get_db() as conn:
+ # Get all configs that might reference this name
+ rows = conn.execute(
+ 'SELECT id, data_to_sync FROM arr_config WHERE data_to_sync IS NOT NULL'
+ ).fetchall()
+
+ for row in rows:
+ try:
+ data = json.loads(row['data_to_sync'])
+ # Check if this config has the relevant category data
+ if category in data:
+ # Update any matching names
+ if old_name in data[category]:
+ # Replace old name with new name
+ data[category] = [
+ new_name if x == old_name else x
+ for x in data[category]
+ ]
+ # Save changes back to database
+ conn.execute(
+ 'UPDATE arr_config SET data_to_sync = ? WHERE id = ?',
+ (json.dumps(data), row['id']))
+ updated_ids.append(row['id'])
+ except json.JSONDecodeError:
+ logger.error(f"Invalid JSON in arr_config id={row['id']}")
+ continue
+
+ if updated_ids:
+ conn.commit()
+
+ return updated_ids
+
+
+def update_arr_config_on_delete(category, name):
+ """
+ Update arr_config data_to_sync when a format or profile is deleted.
+ Args:
+ category (str): Either 'customFormats' or 'profiles'
+ name (str): Name being deleted
+ Returns:
+ list: IDs of arr_config rows that were updated
+ """
+ updated_ids = []
+
+ with get_db() as conn:
+ # Get all configs that might reference this name
+ rows = conn.execute(
+ 'SELECT id, data_to_sync FROM arr_config WHERE data_to_sync IS NOT NULL'
+ ).fetchall()
+
+ for row in rows:
+ try:
+ data = json.loads(row['data_to_sync'])
+ # Check if this config has the relevant category data
+ if category in data:
+ # Remove any matching names
+ if name in data[category]:
+ data[category].remove(name)
+ # Save changes back to database
+ conn.execute(
+ 'UPDATE arr_config SET data_to_sync = ? WHERE id = ?',
+ (json.dumps(data), row['id']))
+ updated_ids.append(row['id'])
+ except json.JSONDecodeError:
+ logger.error(f"Invalid JSON in arr_config id={row['id']}")
+ continue
+
+ if updated_ids:
+ conn.commit()
+
+ return updated_ids
diff --git a/frontend/src/components/settings/arrs/ArrModal.jsx b/frontend/src/components/settings/arrs/ArrModal.jsx
index 7c79ed2..88b558c 100644
--- a/frontend/src/components/settings/arrs/ArrModal.jsx
+++ b/frontend/src/components/settings/arrs/ArrModal.jsx
@@ -1,8 +1,10 @@
+// ArrModal.jsx
+
import React from 'react';
import {Plus, TestTube, Loader, Save, X, Trash, Check} from 'lucide-react';
import Modal from '@ui/Modal';
import {useArrModal} from '@hooks/useArrModal';
-import DataSelectorModal from './DataSelectorModal';
+import DataSelector from './DataSelector';
import SyncModal from './SyncModal';
const ArrModal = ({isOpen, onClose, onSubmit, editingArr}) => {
@@ -44,43 +46,34 @@ const ArrModal = ({isOpen, onClose, onSubmit, editingArr}) => {
{value: 'schedule', label: 'Scheduled'}
];
- // Ensure data_to_sync always has the required structure
const safeSelectedData = {
profiles: formData.data_to_sync?.profiles || [],
customFormats: formData.data_to_sync?.customFormats || []
};
- // Handle sync method change
- const handleSyncMethodChange = e => {
- const newMethod = e.target.value;
- handleInputChange({
- target: {
- id: 'sync_method',
- value: newMethod
- }
- });
-
- // Reset data_to_sync when switching to manual
- if (newMethod === 'manual') {
- handleInputChange({
- target: {
- id: 'data_to_sync',
- value: {profiles: [], customFormats: []}
- }
- });
- }
+ const handleFormSubmit = e => {
+ e.preventDefault();
+ e.stopPropagation();
+ handleSubmit(e);
};
- const inputClasses = errorKey =>
- `w-full px-3 py-2 text-sm rounded-lg border ${
- errors[errorKey]
- ? 'border-red-500'
- : 'border-gray-300 dark:border-gray-600'
- } bg-white dark:bg-gray-700 text-gray-900 dark:text-white focus:ring-2 ${
- errors[errorKey]
- ? 'focus:ring-red-500 focus:border-red-500'
- : 'focus:ring-blue-500 focus:border-blue-500'
- } placeholder-gray-400 dark:placeholder-gray-500 transition-all`;
+ const inputClasses = errorKey => `
+ w-full px-3 py-2 text-sm rounded-lg border ${
+ errors[errorKey]
+ ? 'border-red-500'
+ : 'border-gray-300 dark:border-gray-600'
+ } bg-white dark:bg-gray-800 text-gray-900 dark:text-white focus:ring-2 ${
+ errors[errorKey]
+ ? 'focus:ring-red-500 focus:border-red-500'
+ : 'focus:ring-blue-500 focus:border-blue-500'
+ } placeholder-gray-400 dark:placeholder-gray-500 transition-all
+ `;
+
+ const handleSyncMethodChange = e => {
+ e.preventDefault();
+ e.stopPropagation();
+ handleInputChange(e);
+ };
return (
+ Note: Custom formats used in selected quality + profiles are automatically imported and don't + need to be selected here. +
+{error}
+- Note: Custom formats used in selected - quality profiles are automatically imported - and don't need to be selected here. -
-{error}
-