From 9b1d69014aa9504ebf9a82ae32a33f4d8c859bca Mon Sep 17 00:00:00 2001 From: Sam Chau Date: Tue, 26 Nov 2024 16:14:29 +1030 Subject: [PATCH] feature: quality profile improvements (#9) - refactored backend for general data endpoints - removed ID based files - overhauled quality profile creation - qualities, tags, scores, langauges, upgrades have all been added --- backend/app/__init__.py | 2 + backend/app/data/__init__.py | 131 +++ backend/app/data/utils.py | 165 ++++ backend/app/db.py | 1 + backend/app/git/operations/resolve.py | 43 +- backend/app/git/repo/clone.py | 28 +- backend/app/git/status/commit_history.py | 212 ++-- backend/app/git/status/incoming_changes.py | 364 +++++-- backend/app/git/status/merge_conflicts.py | 277 +++++- backend/app/git/status/outgoing_changes.py | 526 ++++++---- docker-compose.yml | 2 +- frontend/package-lock.json | 70 ++ frontend/package.json | 3 + frontend/src/App.jsx | 88 +- frontend/src/api/api.js | 106 +- frontend/src/api/arr.js | 15 +- frontend/src/api/data.js | 97 ++ frontend/src/assets/logo/Radarr.svg | 16 + frontend/src/assets/logo/Sonarr.svg | 9 + .../components/profile/CreateGroupModal.jsx | 170 ++++ .../src/components/profile/ProfileCard.jsx | 267 +++-- .../components/profile/ProfileGeneralTab.jsx | 194 ++++ .../profile/ProfileLangaugesTab.jsx | 119 +++ .../src/components/profile/ProfileModal.jsx | 908 ++++++++++-------- .../src/components/profile/ProfilePage.jsx | 76 +- .../profile/ProfileQualitiesTab.jsx | 560 +++++++++++ .../components/profile/ProfileScoringTab.jsx | 416 ++++++++ .../src/components/profile/QualityItem.jsx | 124 +++ .../src/components/settings/SettingsPage.jsx | 2 +- .../settings/git/StatusContainer.jsx | 2 +- .../settings/git/modal/ResolveConflicts.jsx | 316 +++--- .../settings/git/modal/ViewChanges.jsx | 34 +- .../settings/git/modal/ViewCommits.jsx | 10 +- frontend/src/components/ui/Modal.jsx | 108 ++- frontend/src/components/ui/Navbar.jsx | 2 +- frontend/src/components/ui/SortDropdown.jsx | 61 ++ frontend/src/components/ui/TabViewer.jsx | 63 ++ frontend/src/{utils => constants}/messages.js | 0 frontend/src/constants/qualities.js | 229 +++++ frontend/vite.config.js | 36 +- 40 files changed, 4572 insertions(+), 1280 deletions(-) create mode 100644 backend/app/data/__init__.py create mode 100644 backend/app/data/utils.py create mode 100644 frontend/src/api/data.js create mode 100644 frontend/src/assets/logo/Radarr.svg create mode 100644 frontend/src/assets/logo/Sonarr.svg create mode 100644 frontend/src/components/profile/CreateGroupModal.jsx create mode 100644 frontend/src/components/profile/ProfileGeneralTab.jsx create mode 100644 frontend/src/components/profile/ProfileLangaugesTab.jsx create mode 100644 frontend/src/components/profile/ProfileQualitiesTab.jsx create mode 100644 frontend/src/components/profile/ProfileScoringTab.jsx create mode 100644 frontend/src/components/profile/QualityItem.jsx create mode 100644 frontend/src/components/ui/SortDropdown.jsx create mode 100644 frontend/src/components/ui/TabViewer.jsx rename frontend/src/{utils => constants}/messages.js (100%) create mode 100644 frontend/src/constants/qualities.js diff --git a/backend/app/__init__.py b/backend/app/__init__.py index 2fc5418..3cd32fe 100644 --- a/backend/app/__init__.py +++ b/backend/app/__init__.py @@ -6,6 +6,7 @@ from .format import bp as format_bp from .profile import bp as profile_bp from .git import bp as git_bp from .arr import bp as arr_bp +from .data import bp as data_bp from .settings_utils import create_empty_settings_if_not_exists, load_settings from .db import init_db @@ -30,6 +31,7 @@ def create_app(): app.register_blueprint(format_bp) app.register_blueprint(profile_bp) app.register_blueprint(git_bp) + app.register_blueprint(data_bp) app.register_blueprint(arr_bp) # Add settings route diff --git a/backend/app/data/__init__.py b/backend/app/data/__init__.py new file mode 100644 index 0000000..4461129 --- /dev/null +++ b/backend/app/data/__init__.py @@ -0,0 +1,131 @@ +from flask import Blueprint, request, jsonify +import logging +import os +import yaml +from .utils import (get_category_directory, load_yaml_file, validate, + save_yaml_file, update_yaml_file, get_file_created_date, + get_file_modified_date) + +logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) +bp = Blueprint('data', __name__, url_prefix='/data') + + +@bp.route('/', methods=['GET']) +def retrieve_all(category): + try: + directory = get_category_directory(category) + files = [f for f in os.listdir(directory) if f.endswith('.yml')] + logger.info(f"Files found: {files}") + + if not files: + return jsonify([]), 200 + + result = [] + for file_name in files: + file_path = os.path.join(directory, file_name) + logger.info(f"Processing file: {file_path}") + try: + content = load_yaml_file(file_path) + result.append({ + "file_name": + file_name, + "content": + content, + "created_date": + get_file_created_date(file_path), + "modified_date": + get_file_modified_date(file_path) + }) + except yaml.YAMLError: + result.append({ + "file_name": file_name, + "error": "Failed to parse YAML" + }) + + return jsonify(result), 200 + + except ValueError as ve: + logger.error(ve) + return jsonify({"error": str(ve)}), 400 + except FileNotFoundError as fnfe: + logger.error(fnfe) + return jsonify({"error": str(fnfe)}), 404 + except Exception as e: + logger.exception("Unexpected error occurred") + return jsonify({"error": "An unexpected error occurred"}), 500 + + +@bp.route('//', + methods=['GET', 'POST', 'PUT', 'DELETE']) +def handle_item(category, name): + try: + directory = get_category_directory(category) + file_name = f"{name}.yml" if not name.endswith('.yml') else name + file_path = os.path.join(directory, file_name) + + if request.method == 'GET': + try: + content = load_yaml_file(file_path) + return jsonify({ + "file_name": + file_name, + "content": + content, + "created_date": + get_file_created_date(file_path), + "modified_date": + get_file_modified_date(file_path) + }), 200 + except FileNotFoundError: + return jsonify({"error": f"File {file_name} not found"}), 404 + except yaml.YAMLError: + return jsonify( + {"error": f"Failed to parse YAML file {file_name}"}), 500 + + elif request.method == 'DELETE': + if not os.path.exists(file_path): + return jsonify({"error": f"File {file_name} not found"}), 404 + try: + os.remove(file_path) + return jsonify( + {"message": f"Successfully deleted {file_name}"}), 200 + except OSError as e: + logger.error(f"Error deleting file {file_path}: {e}") + return jsonify({"error": f"Failed to delete {file_name}"}), 500 + + elif request.method == 'POST': + if os.path.exists(file_path): + return jsonify({"error": + f"File {file_name} already exists"}), 409 + + try: + data = request.get_json() + if validate(data, category): + save_yaml_file(file_path, data, category) + return jsonify( + {"message": f"Successfully created {file_name}"}), 201 + return jsonify({"error": "Validation failed"}), 400 + except Exception as e: + logger.error(f"Error creating file: {e}") + return jsonify({"error": str(e)}), 500 + + elif request.method == 'PUT': + if not os.path.exists(file_path): + return jsonify({"error": f"File {file_name} not found"}), 404 + + try: + data = request.get_json() + update_yaml_file(file_path, data, category) + return jsonify( + {"message": f"Successfully updated {file_name}"}), 200 + except Exception as e: + logger.error(f"Error updating file: {e}") + return jsonify({"error": str(e)}), 500 + + except ValueError as ve: + logger.error(ve) + return jsonify({"error": str(ve)}), 400 + except Exception as e: + logger.exception("Unexpected error occurred") + return jsonify({"error": "An unexpected error occurred"}), 500 diff --git a/backend/app/data/utils.py b/backend/app/data/utils.py new file mode 100644 index 0000000..e2c17c9 --- /dev/null +++ b/backend/app/data/utils.py @@ -0,0 +1,165 @@ +import os +import yaml +import shutil +import logging +from typing import Dict, Any, Tuple +from datetime import datetime +import git + +logger = logging.getLogger(__name__) + +# Directory constants +REPO_PATH = '/app/data/db' +REGEX_DIR = '/app/data/db/regex_patterns' +FORMAT_DIR = '/app/data/db/custom_formats' +PROFILE_DIR = '/app/data/db/profiles' + +# Expected fields for each category +REGEX_FIELDS = ["name", "pattern", "flags"] +FORMAT_FIELDS = ["name", "format", "description"] +PROFILE_FIELDS = [ + "name", + "description", + "tags", + "upgradesAllowed", + "minCustomFormatScore", + "upgradeUntilScore", + "minScoreIncrement", + "custom_formats", # Array of {name, score} objects + "qualities", # Array of strings + "upgrade_until", + "language" +] + +# Category mappings +CATEGORY_MAP = { + "custom_format": (FORMAT_DIR, FORMAT_FIELDS), + "regex_pattern": (REGEX_DIR, REGEX_FIELDS), + "profile": (PROFILE_DIR, PROFILE_FIELDS) +} + + +def _setup_yaml_quotes(): + """Configure YAML to quote string values""" + + def str_presenter(dumper, data): + return dumper.represent_scalar('tag:yaml.org,2002:str', + data, + style="'") + + yaml.add_representer(str, str_presenter) + + +def get_file_created_date(file_path: str) -> str: + """Get file creation date in ISO format""" + try: + stats = os.stat(file_path) + return datetime.fromtimestamp(stats.st_ctime).isoformat() + except Exception as e: + logger.error(f"Error getting creation date for {file_path}: {e}") + return None + + +def get_file_modified_date(file_path: str) -> str: + """Get file last modified date in ISO format""" + try: + stats = os.stat(file_path) + return datetime.fromtimestamp(stats.st_mtime).isoformat() + except Exception as e: + logger.error(f"Error getting modified date for {file_path}: {e}") + return None + + +def get_category_directory(category: str) -> str: + try: + directory, _ = CATEGORY_MAP[category] + except KeyError: + logger.error(f"Invalid category requested: {category}") + raise ValueError(f"Invalid category: {category}") + + if not os.path.exists(directory): + logger.error(f"Directory not found: {directory}") + raise FileNotFoundError(f"Directory not found: {directory}") + + return directory + + +def load_yaml_file(file_path: str) -> Dict[str, Any]: + if not os.path.exists(file_path): + logger.error(f"File not found: {file_path}") + raise FileNotFoundError(f"File not found: {file_path}") + + try: + with open(file_path, 'r') as f: + content = yaml.safe_load(f) + return content + except yaml.YAMLError as e: + logger.error(f"Error parsing YAML file {file_path}: {e}") + raise + except Exception as e: + logger.error(f"Unexpected error reading file {file_path}: {e}") + raise + + +def validate(data: Dict[str, Any], category: str) -> bool: + if not isinstance(data, dict): + return False + + _, fields = CATEGORY_MAP[category] + return all(field in data for field in fields) + + +def save_yaml_file(file_path: str, data: Dict[str, Any], + category: str) -> None: + if not validate(data, category): + raise ValueError("Invalid data format") + + _, fields = CATEGORY_MAP[category] + ordered_data = {field: data[field] for field in fields} + + _setup_yaml_quotes() # Configure YAML for quoted strings + + with open(file_path, 'w') as f: + yaml.safe_dump(ordered_data, f, sort_keys=False) + + +def update_yaml_file(file_path: str, data: Dict[str, Any], + category: str) -> None: + try: + # Check if this is a rename operation + if 'rename' in data: + new_name = data['rename'] + directory = os.path.dirname(file_path) + new_file_path = os.path.join(directory, f"{new_name}.yml") + + # Remove rename field before saving + data_to_save = {k: v for k, v in data.items() if k != 'rename'} + + # First save the updated content to the current file + save_yaml_file(file_path, data_to_save, category) + + # Then use git mv for the rename + repo = git.Repo(REPO_PATH) + # Convert to relative paths for git + rel_old_path = os.path.relpath(file_path, REPO_PATH) + rel_new_path = os.path.relpath(new_file_path, REPO_PATH) + + try: + repo.git.mv(rel_old_path, rel_new_path) + except git.GitCommandError as e: + logger.error(f"Git mv failed: {e}") + raise Exception("Failed to rename file using git mv") + + else: + # Normal update without rename + backup_path = f"{file_path}.bak" + shutil.copy2(file_path, backup_path) + try: + save_yaml_file(file_path, data, category) + os.remove(backup_path) + except Exception as e: + shutil.move(backup_path, file_path) + raise + + except Exception as e: + raise diff --git a/backend/app/db.py b/backend/app/db.py index d777633..5b15f25 100644 --- a/backend/app/db.py +++ b/backend/app/db.py @@ -1,3 +1,4 @@ +# db.py import sqlite3 import os diff --git a/backend/app/git/operations/resolve.py b/backend/app/git/operations/resolve.py index 30bb185..b993e34 100644 --- a/backend/app/git/operations/resolve.py +++ b/backend/app/git/operations/resolve.py @@ -1,15 +1,24 @@ -# git/operations/resolve.py - import yaml from git import GitCommandError import logging from typing import Dict, Any import os from copy import deepcopy +from ...data.utils import CATEGORY_MAP logger = logging.getLogger(__name__) +def determine_type(file_path): + if 'regex_patterns' in file_path: + return 'Regex Pattern' + elif 'custom_formats' in file_path: + return 'Custom Format' + elif 'profiles' in file_path: + return 'Quality Profile' + return 'Unknown' + + def get_version_data(repo, ref, file_path): """Get YAML data from a specific version of a file.""" try: @@ -158,20 +167,16 @@ def resolve_conflicts( # Handle each resolution field for field, choice in field_resolutions.items(): if field.startswith('custom_format_'): - try: - cf_id = int(field.split('_')[-1]) - except ValueError: - raise Exception( - f"Invalid custom_format ID in field: {field}") + format_name = field[len('custom_format_'):] ours_cf = next( (item for item in ours_data.get('custom_formats', []) - if item['id'] == cf_id), None) + if item['name'] == format_name), None) theirs_cf = next( (item for item in theirs_data.get('custom_formats', []) - if item['id'] == cf_id), None) + if item['name'] == format_name), None) if choice == 'local' and ours_cf: resolved_cf = ours_cf @@ -183,13 +188,13 @@ def resolve_conflicts( discarded_values[field] = ours_cf else: raise Exception( - f"Invalid choice or missing custom_format ID {cf_id}" + f"Invalid choice or missing custom format {format_name}" ) resolved_cf_list = resolved_data.get( 'custom_formats', []) for idx, item in enumerate(resolved_cf_list): - if item['id'] == cf_id: + if item['name'] == format_name: resolved_cf_list[idx] = resolved_cf break else: @@ -241,6 +246,22 @@ def resolve_conflicts( raise Exception( f"Invalid choice for field: {field}") + # Get file type and apply appropriate field ordering + file_type = determine_type(file_path) + if file_type == 'Quality Profile': + _, fields = CATEGORY_MAP['profile'] + elif file_type == 'Custom Format': + _, fields = CATEGORY_MAP['custom_format'] + elif file_type == 'Regex Pattern': + _, fields = CATEGORY_MAP['regex_pattern'] + + # Order the fields according to the category's field order + ordered_data = { + field: resolved_data.get(field) + for field in fields if field in resolved_data + } + resolved_data = ordered_data + # Write resolved version full_path = os.path.join(repo.working_dir, file_path) with open(full_path, 'w') as f: diff --git a/backend/app/git/repo/clone.py b/backend/app/git/repo/clone.py index a50b9a9..22df88a 100644 --- a/backend/app/git/repo/clone.py +++ b/backend/app/git/repo/clone.py @@ -53,31 +53,39 @@ def clone_repository(repo_url, repo_path): logger.info(f"Creating missing folder: {folder_name}") os.makedirs(folder_path) - cloned_files = [ - f for f in os.listdir(folder_path) if f.endswith('.yml') - ] - cloned_ids = set(int(f.split('.')[0]) for f in cloned_files) + # Get existing files from cloned repo + cloned_files = set( + f.replace('.yml', '') for f in os.listdir(folder_path) + if f.endswith('.yml')) if os.path.exists(backup_folder_path): local_files = [ f for f in os.listdir(backup_folder_path) if f.endswith('.yml') ] + for file_name in local_files: old_file_path = os.path.join(backup_folder_path, file_name) with open(old_file_path, 'r') as file: data = yaml.safe_load(file) - while data['id'] in cloned_ids: - data['id'] += 1 + # Use name as the identifier + base_name = data['name'] + new_name = base_name + counter = 1 - cloned_ids.add(data['id']) + # If name exists, append a number + while new_name in cloned_files: + new_name = f"{base_name} ({counter})" + counter += 1 - new_file_name = f"{data['id']}_{data['name'].replace(' ', '_').lower()}.yml" - new_file_path = os.path.join(folder_path, new_file_name) + cloned_files.add(new_name) + + new_file_path = os.path.join(folder_path, + f"{new_name}.yml") with open(new_file_path, 'w') as file: yaml.dump(data, file) - logger.info(f"Merged local file: {new_file_name}") + logger.info(f"Merged local file: {new_name}.yml") if os.path.exists(backup_dir): logger.info(f"Removing backup directory: {backup_dir}") diff --git a/backend/app/git/status/commit_history.py b/backend/app/git/status/commit_history.py index ac7fdbb..9d37a54 100644 --- a/backend/app/git/status/commit_history.py +++ b/backend/app/git/status/commit_history.py @@ -7,9 +7,68 @@ import logging logger = logging.getLogger(__name__) +def format_commit(commit, repo, tracking_branch=None): + """Helper function to format a single commit's information""" + # Check if it's a merge commit + is_merge = len(commit.parents) > 1 + + # Get the remote URL for the commit if possible + remote_url = None + if tracking_branch: + remote_url = repo.remote().url + if remote_url.endswith('.git'): + remote_url = remote_url[:-4] + remote_url += f"/commit/{commit.hexsha}" + + commit_info = { + 'hash': commit.hexsha, + 'message': commit.message.strip(), + 'author': f"{commit.author.name} <{commit.author.email}>", + 'date': commit.committed_datetime.isoformat(), + 'isMerge': is_merge, + 'remoteUrl': remote_url, + 'details': { + 'files_changed': [], + 'insertions': 0, + 'deletions': 0 + } + } + + # Get detailed stats + try: + if len(commit.parents) > 0: + # Get the diff between this commit and its first parent + diff = commit.parents[0].diff(commit) + + # Initialize stats + stats = {'files_changed': [], 'insertions': 0, 'deletions': 0} + + # Get the total diff stats using git diff --numstat + raw_stats = repo.git.diff(commit.parents[0].hexsha, + commit.hexsha, + numstat=True).splitlines() + + for line in raw_stats: + if not line.strip(): + continue + adds, dels, file_path = line.split('\t') + # Handle binary files which show up as '-' in numstat + if adds != '-' and dels != '-': + stats['insertions'] += int(adds) + stats['deletions'] += int(dels) + stats['files_changed'].append(file_path) + + commit_info['details'] = stats + + except Exception as e: + logger.debug(f"Error getting commit details: {e}") + + return commit_info + + def get_git_commit_history(repo_path, branch=None): """ - Get the commit history for the repository, optionally for a specific branch. + Get both local and remote commit history for the repository. Args: repo_path (str): Path to the git repository @@ -19,8 +78,11 @@ def get_git_commit_history(repo_path, branch=None): tuple: (success: bool, result: dict/str) On success, returns (True, { 'local_commits': [...], + 'remote_commits': [...], 'ahead_count': int, - 'behind_count': int + 'behind_count': int, + 'branch': str, + 'has_remote': bool }) On failure, returns (False, error_message) """ @@ -36,113 +98,61 @@ def get_git_commit_history(repo_path, branch=None): except Exception as e: logger.debug(f"No tracking branch found: {e}") - # Get local commits - commits = [] - try: - # If we have a tracking branch, get commits since the divergence point - if tracking_branch: + local_commits = [] + remote_commits = [] + ahead_count = 0 + behind_count = 0 + + if tracking_branch: + try: + # Find the merge base (common ancestor) merge_base = repo.merge_base(tracking_branch, current_branch)[0] - commits = list( - repo.iter_commits( - f"{merge_base.hexsha}..{current_branch.name}")) - else: - # If no tracking branch, get recent commits (last 50) - commits = list( - repo.iter_commits(current_branch.name, max_count=50)) - # Format commit information - formatted_commits = [] - for commit in commits: - # Check if it's a merge commit - is_merge = len(commit.parents) > 1 + # Get commits that are in local but not in remote (ahead) + local_commits = [ + format_commit(commit, repo, tracking_branch) + for commit in repo.iter_commits( + f"{tracking_branch.name}..{current_branch.name}") + ] + ahead_count = len(local_commits) - # Get the remote URL for the commit if possible - remote_url = None - if tracking_branch: - remote_url = repo.remote().url - if remote_url.endswith('.git'): - remote_url = remote_url[:-4] - remote_url += f"/commit/{commit.hexsha}" + # Get commits that are in remote but not in local (behind) + remote_commits = [ + format_commit(commit, repo, tracking_branch) + for commit in repo.iter_commits( + f"{current_branch.name}..{tracking_branch.name}") + ] + behind_count = len(remote_commits) - commit_info = { - 'hash': commit.hexsha, - 'message': commit.message.strip(), - 'author': f"{commit.author.name} <{commit.author.email}>", - 'date': commit.committed_datetime.isoformat(), - 'isMerge': is_merge, - 'remoteUrl': remote_url, - 'details': { - 'files_changed': [], - 'insertions': 0, - 'deletions': 0 - } - } + # If no divergence, get recent commits from current branch + if not local_commits and not remote_commits: + local_commits = [ + format_commit(commit, repo, tracking_branch) + for commit in repo.iter_commits(current_branch.name, + max_count=50) + ] - # Get detailed stats - try: - if len(commit.parents) > 0: - # Get the diff between this commit and its first parent - diff = commit.parents[0].diff(commit) + except git.GitCommandError as e: + logger.error(f"Git command error while getting commits: {e}") + return False, f"Error getting commits: {str(e)}" - # Initialize stats - stats = { - 'files_changed': [], - 'insertions': 0, - 'deletions': 0 - } + else: + # If no tracking branch, just get recent local commits + local_commits = [ + format_commit(commit, repo) + for commit in repo.iter_commits(current_branch.name, + max_count=50) + ] - # Get the total diff stats using git diff --numstat - raw_stats = repo.git.diff(commit.parents[0].hexsha, - commit.hexsha, - numstat=True).splitlines() - - for line in raw_stats: - if not line.strip(): - continue - adds, dels, file_path = line.split('\t') - # Handle binary files which show up as '-' in numstat - if adds != '-' and dels != '-': - stats['insertions'] += int(adds) - stats['deletions'] += int(dels) - stats['files_changed'].append(file_path) - - commit_info['details'] = stats - - except Exception as e: - logger.debug(f"Error getting commit details: {e}") - commit_info['details'] = { - 'files_changed': [], - 'insertions': 0, - 'deletions': 0 - } - - formatted_commits.append(commit_info) - - # Get ahead/behind counts - ahead_count = 0 - behind_count = 0 - if tracking_branch: - ahead_count = len( - list( - repo.iter_commits( - f"{tracking_branch.name}..{current_branch.name}"))) - behind_count = len( - list( - repo.iter_commits( - f"{current_branch.name}..{tracking_branch.name}"))) - - return True, { - 'local_commits': formatted_commits, - 'ahead_count': ahead_count, - 'behind_count': behind_count, - 'branch': branch_to_check, - 'has_remote': tracking_branch is not None - } - - except git.GitCommandError as e: - logger.error(f"Git command error while getting commits: {e}") - return False, f"Error getting commits: {str(e)}" + return True, { + 'local_commits': local_commits, + 'remote_commits': remote_commits, + 'ahead_count': ahead_count, + 'behind_count': behind_count, + 'branch': branch_to_check, + 'has_remote': tracking_branch is not None + } except Exception as e: logger.exception("Error getting commit history") diff --git a/backend/app/git/status/incoming_changes.py b/backend/app/git/status/incoming_changes.py index 4325b49..d937df0 100644 --- a/backend/app/git/status/incoming_changes.py +++ b/backend/app/git/status/incoming_changes.py @@ -9,27 +9,20 @@ logger = logging.getLogger(__name__) def check_merge_conflict(repo, branch, file_path): - """ - Checks if an incoming change will conflict with local changes. - Returns True if there would be a merge conflict, False otherwise. - """ + """Checks if an incoming change will conflict with local changes.""" try: - # Check for both uncommitted and committed changes has_changes = False - # 1. Check uncommitted changes + # Check uncommitted changes status = repo.git.status('--porcelain', file_path).strip() if status: status_code = status[:2] if len(status) >= 2 else '' has_changes = 'M' in status_code or 'A' in status_code or 'D' in status_code - # 2. Check committed changes not in remote + # Check committed changes not in remote try: - # Get the merge-base (common ancestor) of local and remote merge_base = repo.git.merge_base('HEAD', f'origin/{branch}').strip() - - # Check if there are any commits affecting this file between merge-base and HEAD committed_changes = repo.git.log(f'{merge_base}..HEAD', '--', file_path, @@ -40,25 +33,22 @@ def check_merge_conflict(repo, branch, file_path): if has_changes: try: - # Use correct merge-tree syntax merge_test = repo.git.merge_tree('--write-tree', 'HEAD', f'origin/{branch}') - - # Check if this specific file has conflicts in the merge result return any( line.startswith('<<<<<<< ') for line in merge_test.splitlines() if file_path in line) except GitCommandError as e: logger.warning( f"Merge tree test failed, assuming conflict: {str(e)}") - return True # If merge-tree fails, assume there's a conflict + return True return False except Exception as e: logger.error( f"Error checking merge conflict for {file_path}: {str(e)}") - return False # Default to no conflict if we can't determine + return False def get_file_data(repo, file_path, ref): @@ -75,7 +65,6 @@ def get_incoming_changes(repo, branch): incoming_changes = [] try: - # Get changed files between local and remote diff_index = repo.git.diff(f'HEAD...origin/{branch}', '--name-only').split('\n') untracked = repo.git.ls_files('--others', @@ -90,17 +79,14 @@ def get_incoming_changes(repo, branch): continue try: - # Get both versions of the file local_data = get_file_data(repo, file_path, 'HEAD') remote_data = get_file_data(repo, file_path, f'origin/{branch}') if local_data == remote_data: continue - # Check for potential merge conflicts will_conflict = check_merge_conflict(repo, branch, file_path) - # Get commit message try: raw_commit_message = repo.git.show(f'HEAD...origin/{branch}', '--format=%B', '-s', '--', @@ -119,18 +105,17 @@ def get_incoming_changes(repo, branch): status = 'New' local_name = remote_data.get('name') incoming_name = None - changes = [{ - 'key': key, - 'change': 'added', - 'value': value - } for key, value in remote_data.items()] + changes = [{'key': 'File', 'change': 'added'}] else: status = 'Modified' local_name = local_data.get( 'name') if local_data else os.path.basename(file_path) incoming_name = remote_data.get( 'name') if remote_data else None - changes = compare_data(local_data, remote_data) + if file_path.startswith('profiles/'): + changes = compare_quality_profile(local_data, remote_data) + else: + changes = compare_generic(local_data, remote_data) if not changes: continue @@ -150,8 +135,7 @@ def get_incoming_changes(repo, branch): 'status': status, 'type': file_type, 'changes': changes, - 'will_conflict': - will_conflict # Added conflict status per file + 'will_conflict': will_conflict }) except Exception as e: @@ -163,14 +147,252 @@ def get_incoming_changes(repo, branch): return incoming_changes -def compare_data(local_data, remote_data): +def compare_quality_profile(local_data, remote_data): + """Compare quality profile specific changes""" if local_data is None and remote_data is not None: - # File is entirely new - return [{'key': 'file', 'change': 'added'}] + return [{'key': 'File', 'change': 'added'}] if local_data is not None and remote_data is None: - # File has been deleted - return [{'key': 'file', 'change': 'deleted'}] + return [{'key': 'File', 'change': 'deleted'}] + + changes = [] + + # Simple fields with consistent capitalization + simple_fields = { + 'name': 'Name', + 'description': 'Description', + 'language': 'Language', + 'minCustomFormatScore': 'Minimum Custom Format Score', + 'minScoreIncrement': 'Minimum Score Increment', + 'upgradeUntilScore': 'Upgrade Until Score', + 'upgradesAllowed': 'Upgrades Allowed' + } + + for field, display_name in simple_fields.items(): + local_value = local_data.get(field) + remote_value = remote_data.get(field) + if local_value != remote_value: + changes.append({ + 'key': display_name, + 'change': 'modified', + 'from': local_value, + 'to': remote_value + }) + + # Compare qualities + if local_data.get('qualities') != remote_data.get('qualities'): + changes.extend( + compare_qualities(local_data.get('qualities', []), + remote_data.get('qualities', []))) + + # Compare profile-specific custom formats + if local_data.get('custom_formats') != remote_data.get('custom_formats'): + changes.extend( + compare_profile_formats(local_data.get('custom_formats', []), + remote_data.get('custom_formats', []))) + + # Compare tags + if local_data.get('tags') != remote_data.get('tags'): + changes.extend( + compare_tags(local_data.get('tags', []), + remote_data.get('tags', []))) + + # Compare upgrade_until + if local_data.get('upgrade_until') != remote_data.get('upgrade_until'): + changes.extend( + compare_upgrade_until(local_data.get('upgrade_until', {}), + remote_data.get('upgrade_until', {}))) + + return changes + + +def compare_qualities(local_qualities, remote_qualities): + """Compare quality groups and their qualities""" + if not local_qualities and not remote_qualities: + return [] + + changes = [] + + # Create lookup dictionaries + local_dict = {quality.get('name'): quality for quality in local_qualities} + remote_dict = { + quality.get('name'): quality + for quality in remote_qualities + } + + # Find added/removed qualities + local_names = set(local_dict.keys()) + remote_names = set(remote_dict.keys()) + + # Track additions + for name in (remote_names - local_names): + changes.append({ + 'key': 'Quality Group', + 'change': 'added', + 'value': name + }) + + # Track removals + for name in (local_names - remote_names): + changes.append({ + 'key': 'Quality Group', + 'change': 'removed', + 'value': name + }) + + # Compare common qualities + for name in (local_names & remote_names): + local_quality = local_dict[name] + remote_quality = remote_dict[name] + + # Compare description + if local_quality.get('description') != remote_quality.get( + 'description'): + changes.append({ + 'key': f'Quality Group: {name}: Description', + 'change': 'modified', + 'from': local_quality.get('description'), + 'to': remote_quality.get('description') + }) + + # Compare nested qualities + local_nested = { + q.get('name'): q + for q in local_quality.get('qualities', []) + } + remote_nested = { + q.get('name'): q + for q in remote_quality.get('qualities', []) + } + + nested_local = set(local_nested.keys()) + nested_remote = set(remote_nested.keys()) + + for nested_name in (nested_remote - nested_local): + changes.append({ + 'key': f'Quality Group: {name}: Quality', + 'change': 'added', + 'value': nested_name + }) + + for nested_name in (nested_local - nested_remote): + changes.append({ + 'key': f'Quality Group: {name}: Quality', + 'change': 'removed', + 'value': nested_name + }) + + return changes + + +def compare_profile_formats(local_formats, remote_formats): + """Compare custom formats within a quality profile""" + if not local_formats and not remote_formats: + return [] + + changes = [] + + # Create lookup dictionaries + local_dict = {fmt.get('name'): fmt.get('score') for fmt in local_formats} + remote_dict = {fmt.get('name'): fmt.get('score') for fmt in remote_formats} + + local_names = set(local_dict.keys()) + remote_names = set(remote_dict.keys()) + + # Track additions + for name in (remote_names - local_names): + changes.append({ + 'key': 'Custom Format', + 'change': 'added', + 'value': { + 'name': name, + 'score': remote_dict[name] + } + }) + + # Track removals + for name in (local_names - remote_names): + changes.append({ + 'key': 'Custom Format', + 'change': 'removed', + 'value': { + 'name': name, + 'score': local_dict[name] + } + }) + + # Compare scores for existing formats + for name in (local_names & remote_names): + if local_dict[name] != remote_dict[name]: + changes.append({ + 'key': f'Custom Format: {name}: Score', + 'change': 'modified', + 'from': local_dict[name], + 'to': remote_dict[name] + }) + + return changes + + +def compare_tags(local_tags, remote_tags): + """Compare tag lists""" + local_set = set(local_tags or []) + remote_set = set(remote_tags or []) + + changes = [] + + if added := (remote_set - local_set): + changes.append({ + 'key': 'Tags', + 'change': 'added', + 'value': sorted(list(added)) + }) + + if removed := (local_set - remote_set): + changes.append({ + 'key': 'Tags', + 'change': 'removed', + 'value': sorted(list(removed)) + }) + + return changes + + +def compare_upgrade_until(local_upgrade, remote_upgrade): + """Compare upgrade_until objects""" + if not local_upgrade and not remote_upgrade: + return [] + + changes = [] + + # Compare name + if local_upgrade.get('name') != remote_upgrade.get('name'): + changes.append({ + 'key': 'Upgrade Until: Name', + 'change': 'modified', + 'from': local_upgrade.get('name'), + 'to': remote_upgrade.get('name') + }) + + # Compare description + if local_upgrade.get('description') != remote_upgrade.get('description'): + changes.append({ + 'key': 'Upgrade Until: Description', + 'change': 'modified', + 'from': local_upgrade.get('description'), + 'to': remote_upgrade.get('description') + }) + + return changes + + +def compare_generic(local_data, remote_data): + """Process changes for non-profile files""" + if local_data is None and remote_data is not None: + return [{'key': 'File', 'change': 'added'}] + + if local_data is not None and remote_data is None: + return [{'key': 'File', 'change': 'deleted'}] changes = [] all_keys = set(local_data.keys()).union(set(remote_data.keys())) @@ -180,77 +402,11 @@ def compare_data(local_data, remote_data): remote_value = remote_data.get(key) if local_value != remote_value: - if key == 'tags': - changes.extend(compare_tags(local_value, remote_value)) - elif key == 'custom_formats': - changes.extend( - compare_custom_formats(local_value, remote_value)) - else: - changes.append({ - 'key': key, - 'change': 'modified', - 'from': local_value, - 'to': remote_value - }) - - return changes - - -def compare_tags(local_tags, remote_tags): - local_tags = set(local_tags or []) - remote_tags = set(remote_tags or []) - - added = remote_tags - local_tags - removed = local_tags - remote_tags - - changes = [] - if added: - changes.append({ - 'key': 'tags', - 'change': 'added', - 'value': list(added) - }) - if removed: - changes.append({ - 'key': 'tags', - 'change': 'removed', - 'value': list(removed) - }) - - return changes - - -def compare_custom_formats(local_cfs, remote_cfs): - local_cfs = {cf['id']: cf for cf in local_cfs or []} - remote_cfs = {cf['id']: cf for cf in remote_cfs or []} - - all_ids = set(local_cfs.keys()).union(set(remote_cfs.keys())) - changes = [] - - for cf_id in all_ids: - local_cf = local_cfs.get(cf_id) - remote_cf = remote_cfs.get(cf_id) - - if local_cf != remote_cf: - if local_cf and remote_cf: - if local_cf['score'] != remote_cf['score']: - changes.append({ - 'key': f'custom_format_{cf_id}', - 'change': 'modified', - 'from': local_cf['score'], - 'to': remote_cf['score'] - }) - elif local_cf and not remote_cf: - changes.append({ - 'key': f'custom_format_{cf_id}', - 'change': 'removed', - 'value': local_cf['score'] - }) - elif not local_cf and remote_cf: - changes.append({ - 'key': f'custom_format_{cf_id}', - 'change': 'added', - 'value': remote_cf['score'] - }) + changes.append({ + 'key': key.title(), # Capitalize generic keys + 'change': 'modified', + 'from': local_value, + 'to': remote_value + }) return changes diff --git a/backend/app/git/status/merge_conflicts.py b/backend/app/git/status/merge_conflicts.py index f74c7a8..4b0c476 100644 --- a/backend/app/git/status/merge_conflicts.py +++ b/backend/app/git/status/merge_conflicts.py @@ -99,7 +99,7 @@ def process_modify_delete_conflict(repo, file_path, deleted_in_head): conflict_details = { 'conflicting_parameters': [{ 'parameter': - 'file', + 'File', 'local_value': 'deleted' if deleted_in_head else existing_data, 'incoming_value': @@ -142,26 +142,16 @@ def process_conflict_file(repo, file_path): conflict_details = {'conflicting_parameters': []} - # Find conflicting fields - for key in set(ours_data.keys()) | set(theirs_data.keys()): - if key == 'date_modified': - continue - - ours_value = ours_data.get(key) - theirs_value = theirs_data.get(key) - - if ours_value != theirs_value: - logger.debug( - f"Found conflict in {key} - Local: {ours_value}, Incoming: {theirs_value}" - ) - conflict_details['conflicting_parameters'].append({ - 'parameter': - key, - 'local_value': - ours_value, - 'incoming_value': - theirs_value - }) + # Process based on file type + if file_path.startswith('profiles/'): + detailed_conflicts = compare_quality_profile( + ours_data, theirs_data) + conflict_details['conflicting_parameters'].extend( + detailed_conflicts) + else: + detailed_conflicts = compare_generic(ours_data, theirs_data) + conflict_details['conflicting_parameters'].extend( + detailed_conflicts) # Check if file still has unmerged status status_output = repo.git.status('--porcelain', file_path) @@ -185,6 +175,251 @@ def process_conflict_file(repo, file_path): return None +def compare_quality_profile(ours_data, theirs_data): + """Compare quality profile fields for conflicts""" + conflicts = [] + + # Simple fields with consistent capitalization + simple_fields = { + 'name': 'Name', + 'description': 'Description', + 'language': 'Language', + 'minCustomFormatScore': 'Minimum Custom Format Score', + 'minScoreIncrement': 'Minimum Score Increment', + 'upgradeUntilScore': 'Upgrade Until Score', + 'upgradesAllowed': 'Upgrades Allowed' + } + + for field, display_name in simple_fields.items(): + ours_value = ours_data.get(field) + theirs_value = theirs_data.get(field) + if ours_value != theirs_value: + conflicts.append({ + 'parameter': display_name, + 'local_value': ours_value, + 'incoming_value': theirs_value + }) + + # Compare qualities + ours_qualities = ours_data.get('qualities', []) + theirs_qualities = theirs_data.get('qualities', []) + if ours_qualities != theirs_qualities: + conflicts.extend(compare_qualities(ours_qualities, theirs_qualities)) + + # Compare custom formats + ours_formats = ours_data.get('custom_formats', []) + theirs_formats = theirs_data.get('custom_formats', []) + if ours_formats != theirs_formats: + conflicts.extend(compare_custom_formats(ours_formats, theirs_formats)) + + # Compare tags + ours_tags = ours_data.get('tags', []) + theirs_tags = theirs_data.get('tags', []) + if ours_tags != theirs_tags: + conflicts.extend(compare_tags(ours_tags, theirs_tags)) + + # Compare upgrade_until + ours_upgrade = ours_data.get('upgrade_until', {}) + theirs_upgrade = theirs_data.get('upgrade_until', {}) + if ours_upgrade != theirs_upgrade: + conflicts.extend(compare_upgrade_until(ours_upgrade, theirs_upgrade)) + + return conflicts + + +def compare_qualities(ours_qualities, theirs_qualities): + """Compare quality groups for conflicts""" + conflicts = [] + + # Create lookup dictionaries + ours_dict = {quality.get('name'): quality for quality in ours_qualities} + theirs_dict = { + quality.get('name'): quality + for quality in theirs_qualities + } + + # Find added/removed qualities + ours_names = set(ours_dict.keys()) + theirs_names = set(theirs_dict.keys()) + + # Track additions + for name in (theirs_names - ours_names): + conflicts.append({ + 'parameter': 'Quality Group', + 'local_value': None, + 'incoming_value': name + }) + + # Track removals + for name in (ours_names - theirs_names): + conflicts.append({ + 'parameter': 'Quality Group', + 'local_value': name, + 'incoming_value': None + }) + + # Compare common qualities + for name in (ours_names & theirs_names): + ours_quality = ours_dict[name] + theirs_quality = theirs_dict[name] + + # Compare description + if ours_quality.get('description') != theirs_quality.get( + 'description'): + conflicts.append({ + 'parameter': + f'Quality Group: {name}: Description', + 'local_value': + ours_quality.get('description'), + 'incoming_value': + theirs_quality.get('description') + }) + + # Compare nested qualities + ours_nested = { + q.get('name'): q + for q in ours_quality.get('qualities', []) + } + theirs_nested = { + q.get('name'): q + for q in theirs_quality.get('qualities', []) + } + + nested_ours = set(ours_nested.keys()) + nested_theirs = set(theirs_nested.keys()) + + for nested_name in (nested_theirs - nested_ours): + conflicts.append({ + 'parameter': f'Quality Group: {name}: Quality', + 'local_value': None, + 'incoming_value': nested_name + }) + + for nested_name in (nested_ours - nested_theirs): + conflicts.append({ + 'parameter': f'Quality Group: {name}: Quality', + 'local_value': nested_name, + 'incoming_value': None + }) + + return conflicts + + +def compare_custom_formats(ours_formats, theirs_formats): + """Compare custom formats for conflicts""" + conflicts = [] + + # Create lookup dictionaries + ours_dict = {fmt.get('name'): fmt.get('score') for fmt in ours_formats} + theirs_dict = {fmt.get('name'): fmt.get('score') for fmt in theirs_formats} + + ours_names = set(ours_dict.keys()) + theirs_names = set(theirs_dict.keys()) + + # Track additions + for name in (theirs_names - ours_names): + conflicts.append({ + 'parameter': 'Custom Format', + 'local_value': None, + 'incoming_value': { + 'name': name, + 'score': theirs_dict[name] + } + }) + + # Track removals + for name in (ours_names - theirs_names): + conflicts.append({ + 'parameter': 'Custom Format', + 'local_value': { + 'name': name, + 'score': ours_dict[name] + }, + 'incoming_value': None + }) + + # Compare scores for existing formats + for name in (ours_names & theirs_names): + if ours_dict[name] != theirs_dict[name]: + conflicts.append({ + 'parameter': f'Custom Format: {name}: Score', + 'local_value': ours_dict[name], + 'incoming_value': theirs_dict[name] + }) + + return conflicts + + +def compare_tags(ours_tags, theirs_tags): + """Compare tags for conflicts""" + conflicts = [] + ours_set = set(ours_tags or []) + theirs_set = set(theirs_tags or []) + + if added := (theirs_set - ours_set): + for tag in sorted(added): + conflicts.append({ + 'parameter': f'Tags: {tag}', + 'local_value': False, + 'incoming_value': True + }) + + if removed := (ours_set - theirs_set): + for tag in sorted(removed): + conflicts.append({ + 'parameter': f'Tags: {tag}', + 'local_value': True, + 'incoming_value': False + }) + + return conflicts + + +def compare_upgrade_until(ours_upgrade, theirs_upgrade): + """Compare upgrade_until objects for conflicts""" + conflicts = [] + + # Compare name + if ours_upgrade.get('name') != theirs_upgrade.get('name'): + conflicts.append({ + 'parameter': 'Upgrade Until: Name', + 'local_value': ours_upgrade.get('name'), + 'incoming_value': theirs_upgrade.get('name') + }) + + # Compare description + if ours_upgrade.get('description') != theirs_upgrade.get('description'): + conflicts.append({ + 'parameter': 'Upgrade Until: Description', + 'local_value': ours_upgrade.get('description'), + 'incoming_value': theirs_upgrade.get('description') + }) + + return conflicts + + +def compare_generic(ours_data, theirs_data): + """Compare generic files for conflicts""" + conflicts = [] + all_keys = set(ours_data.keys()).union(set(theirs_data.keys())) + + for key in all_keys: + if key == 'date_modified': + continue + + ours_value = ours_data.get(key) + theirs_value = theirs_data.get(key) + + if ours_value != theirs_value: + conflicts.append({ + 'parameter': key.title(), + 'local_value': ours_value, + 'incoming_value': theirs_value + }) + + return conflicts + + def get_version_data(repo, ref, file_path): """Get YAML data from a specific version of a file.""" try: diff --git a/backend/app/git/status/outgoing_changes.py b/backend/app/git/status/outgoing_changes.py index 0afe94b..82c1955 100644 --- a/backend/app/git/status/outgoing_changes.py +++ b/backend/app/git/status/outgoing_changes.py @@ -1,5 +1,3 @@ -# git/status/outgoing_changes.py - import os import yaml import logging @@ -10,58 +8,123 @@ logger = logging.getLogger(__name__) def get_outgoing_changes(repo): + """Get list of changes in working directory""" + # Use --porcelain=1 format for consistent output status = repo.git.status('--porcelain', '-z').split('\0') logger.debug(f"Raw porcelain status: {status}") changes = [] - for item in status: + i = 0 + while i < len(status): + item = status[i] if not item: + i += 1 continue logger.debug(f"Processing status item: {item}") if len(item) < 4: logger.warning(f"Unexpected status item format: {item}") + i += 1 continue - x, y, file_path = item[0], item[1], item[3:] + x, y = item[0], item[1] + file_path = item[3:] logger.debug(f"Parsed status: x={x}, y={y}, file_path={file_path}") # Skip files in conflict state if x == 'U' or y == 'U': + i += 1 continue is_staged = x != ' ' and x != '?' - is_deleted = x == 'D' or y == 'D' - if is_deleted: - changes.append(process_deleted_file(repo, file_path, is_staged)) - else: - changes.append( - process_modified_file(repo, file_path, x, y, is_staged)) + try: + # Handle different file statuses + if x == 'R': + if i + 1 < len(status) and status[i + 1]: + old_path = status[i + 1] + changes.append(handle_rename(repo, old_path, file_path)) + i += 2 # Skip the old filename entry + continue + elif x == 'D' or y == 'D': + changes.append(handle_delete(repo, file_path, is_staged)) + else: + changes.append(handle_modification(repo, file_path, is_staged)) + except Exception as e: + logger.error(f"Error processing change for {file_path}: {str(e)}") + + i += 1 - logger.debug(f"Final changes: {changes}") return changes -def process_deleted_file(repo, file_path, is_staged): +def handle_rename(repo, old_path, new_path): + """Handle a renamed file""" + try: + # Get old content for name + old_content = repo.git.show(f'HEAD:{old_path}') + old_data = yaml.safe_load(old_content) + prior_name = old_data.get('name') if old_data else None + + # Get new content for name + with open(os.path.join(repo.working_dir, new_path), 'r') as f: + new_data = yaml.safe_load(f.read()) + current_name = new_data.get('name') if new_data else None + except Exception as e: + logger.warning( + f"Could not get content for renamed file, using file names: {str(e)}" + ) + prior_name = os.path.basename(old_path) + current_name = os.path.basename(new_path) + + # Only set outgoing_name if it changed + outgoing_name = current_name if current_name != prior_name else None + + return { + 'name': + current_name or os.path.basename(new_path), + 'prior_name': + prior_name, + 'outgoing_name': + outgoing_name, + 'type': + determine_type(new_path), + 'status': + 'Renamed', + 'file_path': + new_path, + 'old_file_path': + old_path, + 'staged': + False, # Renames are always unstaged in the working directory + 'modified': + True, + 'deleted': + False, + 'changes': [{ + 'key': 'File', + 'change': 'renamed', + 'from': old_path, + 'to': new_path + }] + } + + +def handle_delete(repo, file_path, is_staged): + """Handle a deleted file""" try: file_content = repo.git.show(f'HEAD:{file_path}') yaml_content = yaml.safe_load(file_content) - original_name = yaml_content.get('name', 'Unknown') - original_id = yaml_content.get('id', '') + original_name = yaml_content.get('name', os.path.basename(file_path)) except Exception as e: - logger.warning( - f"Could not retrieve original content for deleted file {file_path}: {str(e)}" - ) - original_name = "Unknown" - original_id = "" + logger.warning(f"Could not get content for deleted file: {str(e)}") + original_name = os.path.basename(file_path) return { 'name': original_name, 'prior_name': original_name, - 'outgoing_name': None, - 'id': original_id, + 'outgoing_name': None, # Deleted files have no outgoing name 'type': determine_type(file_path), 'status': 'Deleted', 'file_path': file_path, @@ -69,56 +132,298 @@ def process_deleted_file(repo, file_path, is_staged): 'modified': False, 'deleted': True, 'changes': [{ - 'key': 'file', + 'key': 'File', 'change': 'deleted' }] } -def process_modified_file(repo, file_path, x, y, is_staged): +def handle_modification(repo, file_path, is_staged): + """Handle a modified or new file""" try: - # Get the content of the file from the last commit - old_content = repo.git.show(f'HEAD:{file_path}') - old_data = yaml.safe_load(old_content) - except GitCommandError: - old_data = None + # Get old content if file exists in HEAD + try: + old_content = repo.git.show(f'HEAD:{file_path}') + old_data = yaml.safe_load(old_content) + prior_name = old_data.get('name') + status = 'Modified' + except GitCommandError: + old_data = None + prior_name = None + status = 'New' - # Get the current content of the file - with open(os.path.join(repo.working_dir, file_path), 'r') as f: - new_content = f.read() - new_data = yaml.safe_load(new_content) + # Get new content + with open(os.path.join(repo.working_dir, file_path), 'r') as f: + new_data = yaml.safe_load(f.read()) + current_name = new_data.get('name') - detailed_changes = compare_data(old_data, new_data) + # Only set outgoing_name if it changed + outgoing_name = current_name if current_name != prior_name else None - # Determine prior_name and outgoing_name - prior_name = old_data.get('name') if old_data else None - outgoing_name = new_data.get('name') if new_data else None + # Process changes based on file type + if file_path.startswith('profiles/'): + detailed_changes = process_quality_profile(old_data, new_data) + else: + detailed_changes = process_generic(old_data, new_data) - # If there's no name change, set outgoing_name to None - if prior_name == outgoing_name: - outgoing_name = None - - return { - 'name': new_data.get('name', os.path.basename(file_path)), - 'prior_name': prior_name, - 'outgoing_name': outgoing_name, - 'id': new_data.get('id', ''), - 'type': determine_type(file_path), - 'status': 'Modified' if old_data else 'New', - 'file_path': file_path, - 'staged': is_staged, - 'modified': y != ' ', - 'deleted': False, - 'changes': detailed_changes - } + return { + 'name': current_name or os.path.basename(file_path), + 'prior_name': prior_name, + 'outgoing_name': outgoing_name, + 'type': determine_type(file_path), + 'status': status, + 'file_path': file_path, + 'staged': is_staged, + 'modified': True, + 'deleted': False, + 'changes': detailed_changes + } + except Exception as e: + logger.error(f"Error processing modified file {file_path}: {str(e)}") + raise -def compare_data(old_data, new_data): +def process_quality_profile(old_data, new_data): + """Process changes in quality profile files""" if old_data is None and new_data is not None: - return [{'key': 'file', 'change': 'added'}] + return [{'key': 'File', 'change': 'added'}] if old_data is not None and new_data is None: - return [{'key': 'file', 'change': 'deleted'}] + return [{'key': 'File', 'change': 'deleted'}] + + changes = [] + + # Simple fields with consistent capitalization + simple_fields = { + 'name': 'Name', + 'description': 'Description', + 'language': 'Language', + 'minCustomFormatScore': 'Minimum Custom Format Score', + 'minScoreIncrement': 'Minimum Score Increment', + 'upgradeUntilScore': 'Upgrade Until Score', + 'upgradesAllowed': 'Upgrades Allowed' + } + + for field, display_name in simple_fields.items(): + old_value = old_data.get(field) + new_value = new_data.get(field) + if old_value != new_value: + changes.append({ + 'key': display_name, + 'change': 'modified', + 'from': old_value, + 'to': new_value + }) + + # Compare qualities + if old_data.get('qualities') != new_data.get('qualities'): + changes.extend( + compare_qualities(old_data.get('qualities', []), + new_data.get('qualities', []))) + + # Compare profile-specific custom formats + if old_data.get('custom_formats') != new_data.get('custom_formats'): + changes.extend( + compare_profile_formats(old_data.get('custom_formats', []), + new_data.get('custom_formats', []))) + + # Compare tags + if old_data.get('tags') != new_data.get('tags'): + changes.extend( + compare_tags(old_data.get('tags', []), new_data.get('tags', []))) + + # Compare upgrade_until + if old_data.get('upgrade_until') != new_data.get('upgrade_until'): + changes.extend( + compare_upgrade_until(old_data.get('upgrade_until', {}), + new_data.get('upgrade_until', {}))) + + return changes + + +def compare_qualities(old_qualities, new_qualities): + """Compare quality groups and their qualities""" + if not old_qualities and not new_qualities: + return [] + + changes = [] + + # Create lookup dictionaries + old_dict = {quality.get('name'): quality for quality in old_qualities} + new_dict = {quality.get('name'): quality for quality in new_qualities} + + # Find added/removed qualities + old_names = set(old_dict.keys()) + new_names = set(new_dict.keys()) + + # Track additions + for name in (new_names - old_names): + changes.append({ + 'key': 'Quality Group', + 'change': 'added', + 'value': name + }) + + # Track removals + for name in (old_names - new_names): + changes.append({ + 'key': 'Quality Group', + 'change': 'removed', + 'value': name + }) + + # Compare common qualities + for name in (old_names & new_names): + old_quality = old_dict[name] + new_quality = new_dict[name] + + # Compare description + if old_quality.get('description') != new_quality.get('description'): + changes.append({ + 'key': f'Quality Group: {name}: Description', + 'change': 'modified', + 'from': old_quality.get('description'), + 'to': new_quality.get('description') + }) + + # Compare nested qualities + old_nested = { + q.get('name'): q + for q in old_quality.get('qualities', []) + } + new_nested = { + q.get('name'): q + for q in new_quality.get('qualities', []) + } + + nested_old = set(old_nested.keys()) + nested_new = set(new_nested.keys()) + + for nested_name in (nested_new - nested_old): + changes.append({ + 'key': f'Quality Group: {name}: Quality', + 'change': 'added', + 'value': nested_name + }) + + for nested_name in (nested_old - nested_new): + changes.append({ + 'key': f'Quality Group: {name}: Quality', + 'change': 'removed', + 'value': nested_name + }) + + return changes + + +def compare_profile_formats(old_formats, new_formats): + """Compare custom formats within a quality profile""" + if not old_formats and not new_formats: + return [] + + changes = [] + + # Create lookup dictionaries + old_dict = {fmt.get('name'): fmt.get('score') for fmt in old_formats} + new_dict = {fmt.get('name'): fmt.get('score') for fmt in new_formats} + + old_names = set(old_dict.keys()) + new_names = set(new_dict.keys()) + + # Track additions + for name in (new_names - old_names): + changes.append({ + 'key': 'Custom Format', + 'change': 'added', + 'value': { + 'name': name, + 'score': new_dict[name] + } + }) + + # Track removals + for name in (old_names - new_names): + changes.append({ + 'key': 'Custom Format', + 'change': 'removed', + 'value': { + 'name': name, + 'score': old_dict[name] + } + }) + + # Compare scores for existing formats + for name in (old_names & new_names): + if old_dict[name] != new_dict[name]: + changes.append({ + 'key': f'Custom Format: {name}: Score', + 'change': 'modified', + 'from': old_dict[name], + 'to': new_dict[name] + }) + + return changes + + +def compare_tags(old_tags, new_tags): + """Compare tag lists""" + old_set = set(old_tags or []) + new_set = set(new_tags or []) + + changes = [] + + if added := (new_set - old_set): + changes.append({ + 'key': 'Tags', + 'change': 'added', + 'value': sorted(list(added)) + }) + + if removed := (old_set - new_set): + changes.append({ + 'key': 'Tags', + 'change': 'removed', + 'value': sorted(list(removed)) + }) + + return changes + + +def compare_upgrade_until(old_upgrade, new_upgrade): + """Compare upgrade_until objects""" + if not old_upgrade and not new_upgrade: + return [] + + changes = [] + + # Compare name + if old_upgrade.get('name') != new_upgrade.get('name'): + changes.append({ + 'key': 'Upgrade Until: Name', + 'change': 'modified', + 'from': old_upgrade.get('name'), + 'to': new_upgrade.get('name') + }) + + # Compare description + if old_upgrade.get('description') != new_upgrade.get('description'): + changes.append({ + 'key': 'Upgrade Until: Description', + 'change': 'modified', + 'from': old_upgrade.get('description'), + 'to': new_upgrade.get('description') + }) + + return changes + + +def process_generic(old_data, new_data): + """Process changes for non-profile files""" + if old_data is None and new_data is not None: + return [{'key': 'File', 'change': 'added'}] + + if old_data is not None and new_data is None: + return [{'key': 'File', 'change': 'deleted'}] changes = [] all_keys = set(old_data.keys()).union(set(new_data.keys())) @@ -128,112 +433,11 @@ def compare_data(old_data, new_data): new_value = new_data.get(key) if old_value != new_value: - if key == 'tags': - changes.extend(compare_tags(old_value, new_value)) - elif key == 'custom_formats': - changes.extend(compare_custom_formats(old_value, new_value)) - elif key == 'conditions': - changes.extend(compare_conditions(old_value, new_value)) - else: - changes.append({ - 'key': key, - 'change': 'modified', - 'from': old_value, - 'to': new_value - }) - - return changes - - -def compare_tags(old_tags, new_tags): - old_tags = set(old_tags or []) - new_tags = set(new_tags or []) - - added = new_tags - old_tags - removed = old_tags - new_tags - - changes = [] - if added: - changes.append({ - 'key': 'tags', - 'change': 'added', - 'value': list(added) - }) - if removed: - changes.append({ - 'key': 'tags', - 'change': 'removed', - 'value': list(removed) - }) - - return changes - - -def compare_custom_formats(old_cfs, new_cfs): - old_cfs = {cf['id']: cf for cf in old_cfs or []} - new_cfs = {cf['id']: cf for cf in new_cfs or []} - - all_ids = set(old_cfs.keys()).union(set(new_cfs.keys())) - changes = [] - - for cf_id in all_ids: - old_cf = old_cfs.get(cf_id) - new_cf = new_cfs.get(cf_id) - - if old_cf != new_cf: - if old_cf and new_cf: - if old_cf['score'] != new_cf['score']: - changes.append({ - 'key': f'custom_format_{cf_id}', - 'change': 'modified', - 'from': old_cf['score'], - 'to': new_cf['score'] - }) - elif old_cf and not new_cf: - changes.append({ - 'key': f'custom_format_{cf_id}', - 'change': 'removed', - 'value': old_cf['score'] - }) - elif not old_cf and new_cf: - changes.append({ - 'key': f'custom_format_{cf_id}', - 'change': 'added', - 'value': new_cf['score'] - }) - - return changes - - -def compare_conditions(old_conditions, new_conditions): - changes = [] - old_conditions = old_conditions or [] - new_conditions = new_conditions or [] - - # Check for removed or modified conditions - for i, old_cond in enumerate(old_conditions): - if i >= len(new_conditions): changes.append({ - 'key': f'conditions[{i}]', - 'change': 'removed', - 'value': old_cond + 'key': key.title(), # Capitalize generic keys + 'change': 'modified', + 'from': old_value, + 'to': new_value }) - elif old_cond != new_conditions[i]: - for key in old_cond.keys(): - if old_cond.get(key) != new_conditions[i].get(key): - changes.append({ - 'key': f'conditions[{i}].{key}', - 'change': 'modified', - 'from': old_cond.get(key), - 'to': new_conditions[i].get(key) - }) - - # Check for added conditions - for i in range(len(old_conditions), len(new_conditions)): - changes.append({ - 'key': f'conditions[{i}]', - 'change': 'added', - 'value': new_conditions[i] - }) return changes diff --git a/docker-compose.yml b/docker-compose.yml index 491665e..9abe0a7 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -8,7 +8,6 @@ services: - ./frontend:/app - /app/node_modules environment: - - VITE_API_URL=http://localhost:5000 - CHOKIDAR_USEPOLLING=true backend: @@ -22,5 +21,6 @@ services: - FLASK_ENV=development env_file: - .env.1 + restart: always volumes: backend_data: diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 7032838..2d0e70f 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -8,6 +8,9 @@ "name": "frontend", "version": "0.0.0", "dependencies": { + "@dnd-kit/core": "^6.1.0", + "@dnd-kit/modifiers": "^7.0.0", + "@dnd-kit/sortable": "^8.0.0", "@radix-ui/react-slot": "^1.1.0", "axios": "^0.21.1", "class-variance-authority": "^0.7.0", @@ -358,6 +361,68 @@ "node": ">=6.9.0" } }, + "node_modules/@dnd-kit/accessibility": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@dnd-kit/accessibility/-/accessibility-3.1.0.tgz", + "integrity": "sha512-ea7IkhKvlJUv9iSHJOnxinBcoOI3ppGnnL+VDJ75O45Nss6HtZd8IdN8touXPDtASfeI2T2LImb8VOZcL47wjQ==", + "dependencies": { + "tslib": "^2.0.0" + }, + "peerDependencies": { + "react": ">=16.8.0" + } + }, + "node_modules/@dnd-kit/core": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/@dnd-kit/core/-/core-6.1.0.tgz", + "integrity": "sha512-J3cQBClB4TVxwGo3KEjssGEXNJqGVWx17aRTZ1ob0FliR5IjYgTxl5YJbKTzA6IzrtelotH19v6y7uoIRUZPSg==", + "dependencies": { + "@dnd-kit/accessibility": "^3.1.0", + "@dnd-kit/utilities": "^3.2.2", + "tslib": "^2.0.0" + }, + "peerDependencies": { + "react": ">=16.8.0", + "react-dom": ">=16.8.0" + } + }, + "node_modules/@dnd-kit/modifiers": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/@dnd-kit/modifiers/-/modifiers-7.0.0.tgz", + "integrity": "sha512-BG/ETy3eBjFap7+zIti53f0PCLGDzNXyTmn6fSdrudORf+OH04MxrW4p5+mPu4mgMk9kM41iYONjc3DOUWTcfg==", + "dependencies": { + "@dnd-kit/utilities": "^3.2.2", + "tslib": "^2.0.0" + }, + "peerDependencies": { + "@dnd-kit/core": "^6.1.0", + "react": ">=16.8.0" + } + }, + "node_modules/@dnd-kit/sortable": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/@dnd-kit/sortable/-/sortable-8.0.0.tgz", + "integrity": "sha512-U3jk5ebVXe1Lr7c2wU7SBZjcWdQP+j7peHJfCspnA81enlu88Mgd7CC8Q+pub9ubP7eKVETzJW+IBAhsqbSu/g==", + "dependencies": { + "@dnd-kit/utilities": "^3.2.2", + "tslib": "^2.0.0" + }, + "peerDependencies": { + "@dnd-kit/core": "^6.1.0", + "react": ">=16.8.0" + } + }, + "node_modules/@dnd-kit/utilities": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/@dnd-kit/utilities/-/utilities-3.2.2.tgz", + "integrity": "sha512-+MKAJEOfaBe5SmV6t34p80MMKhjvUz0vRrvVJbPT0WElzaOJ/1xs+D+KDv+tD/NE5ujfrChEcshd4fLn0wpiqg==", + "dependencies": { + "tslib": "^2.0.0" + }, + "peerDependencies": { + "react": ">=16.8.0" + } + }, "node_modules/@esbuild/android-arm": { "version": "0.18.20", "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.18.20.tgz", @@ -2825,6 +2890,11 @@ "integrity": "sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==", "dev": true }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==" + }, "node_modules/update-browserslist-db": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.0.tgz", diff --git a/frontend/package.json b/frontend/package.json index cad409a..5f844c3 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -9,6 +9,9 @@ "preview": "vite preview" }, "dependencies": { + "@dnd-kit/core": "^6.1.0", + "@dnd-kit/modifiers": "^7.0.0", + "@dnd-kit/sortable": "^8.0.0", "@radix-ui/react-slot": "^1.1.0", "axios": "^0.21.1", "class-variance-authority": "^0.7.0", diff --git a/frontend/src/App.jsx b/frontend/src/App.jsx index 838648a..59b1057 100644 --- a/frontend/src/App.jsx +++ b/frontend/src/App.jsx @@ -1,51 +1,51 @@ -import { BrowserRouter as Router, Routes, Route } from "react-router-dom"; -import { useState, useEffect } from "react"; -import RegexPage from "./components/regex/RegexPage"; -import FormatPage from "./components/format/FormatPage"; -import ProfilePage from "./components/profile/ProfilePage"; -import SettingsPage from "./components/settings/SettingsPage"; -import Navbar from "./components/ui/Navbar"; -import { ToastContainer } from "react-toastify"; -import "react-toastify/dist/ReactToastify.css"; +import {BrowserRouter as Router, Routes, Route} from 'react-router-dom'; +import {useState, useEffect} from 'react'; +import RegexPage from './components/regex/RegexPage'; +import FormatPage from './components/format/FormatPage'; +import ProfilePage from './components/profile/ProfilePage'; +import SettingsPage from './components/settings/SettingsPage'; +import Navbar from './components/ui/Navbar'; +import {ToastContainer} from 'react-toastify'; +import 'react-toastify/dist/ReactToastify.css'; function App() { - const [darkMode, setDarkMode] = useState(true); + const [darkMode, setDarkMode] = useState(true); - useEffect(() => { - if (darkMode) { - document.documentElement.classList.add("dark"); - } else { - document.documentElement.classList.remove("dark"); - } - }, [darkMode]); + useEffect(() => { + if (darkMode) { + document.documentElement.classList.add('dark'); + } else { + document.documentElement.classList.remove('dark'); + } + }, [darkMode]); - return ( - -
- -
- - } /> - } /> - } /> - } /> - } /> - -
-
- -
- ); + return ( + +
+ +
+ + } /> + } /> + } /> + } /> + } /> + +
+
+ +
+ ); } export default App; diff --git a/frontend/src/api/api.js b/frontend/src/api/api.js index 175e74e..82e6392 100644 --- a/frontend/src/api/api.js +++ b/frontend/src/api/api.js @@ -1,10 +1,8 @@ import axios from 'axios'; -const API_BASE_URL = import.meta.env.VITE_API_URL || 'http://localhost:5000'; - export const getRegexes = async () => { try { - const response = await axios.get(`${API_BASE_URL}/regex`); + const response = await axios.get(`/api/regex`); return response.data; } catch (error) { console.error('Error fetching regexes:', error); @@ -14,7 +12,7 @@ export const getRegexes = async () => { export const saveRegex = async regex => { try { - const response = await axios.post(`${API_BASE_URL}/regex`, regex); + const response = await axios.post(`/api/regex`, regex); return response.data; } catch (error) { console.error('Error saving regex:', error); @@ -24,7 +22,7 @@ export const saveRegex = async regex => { export const updateRegex = async (id, regex) => { try { - const response = await axios.put(`${API_BASE_URL}/regex/${id}`, regex); + const response = await axios.put(`/api/regex/${id}`, regex); return response.data; } catch (error) { console.error('Error updating regex:', error); @@ -35,7 +33,7 @@ export const updateRegex = async (id, regex) => { export const deleteRegex = async (id, force = false) => { try { const response = await axios.delete( - `${API_BASE_URL}/regex/${id}${force ? '?force=true' : ''}`, + `/api/regex/${id}${force ? '?force=true' : ''}`, { validateStatus: status => { return ( @@ -55,7 +53,7 @@ export const deleteRegex = async (id, force = false) => { export const getFormats = async () => { try { - const response = await axios.get(`${API_BASE_URL}/format`); + const response = await axios.get(`/api/format`); return response.data; } catch (error) { console.error('Error fetching formats:', error); @@ -65,7 +63,7 @@ export const getFormats = async () => { export const saveFormat = async format => { try { - const response = await axios.post(`${API_BASE_URL}/format`, format); + const response = await axios.post(`/api/format`, format); return response.data; } catch (error) { console.error('Error saving format:', error); @@ -75,10 +73,7 @@ export const saveFormat = async format => { export const updateFormat = async (id, format) => { try { - const response = await axios.put( - `${API_BASE_URL}/format/${id}`, - format - ); + const response = await axios.put(`/api/format/${id}`, format); return response.data; } catch (error) { console.error('Error updating format:', error); @@ -89,7 +84,7 @@ export const updateFormat = async (id, format) => { export const deleteFormat = async (id, force = false) => { try { const response = await axios.delete( - `${API_BASE_URL}/format/${id}${force ? '?force=true' : ''}`, + `/api/format/${id}${force ? '?force=true' : ''}`, { validateStatus: status => { return ( @@ -109,10 +104,7 @@ export const deleteFormat = async (id, force = false) => { export const createRegex101Link = async regexData => { try { - const response = await axios.post( - `${API_BASE_URL}/regex/regex101`, - regexData - ); + const response = await axios.post(`/api/regex/regex101`, regexData); return response.data; } catch (error) { console.error('Error creating regex101 link:', error); @@ -122,7 +114,7 @@ export const createRegex101Link = async regexData => { export const getSettings = async () => { try { - const response = await axios.get(`${API_BASE_URL}/settings`); + const response = await axios.get(`/api/settings`); return response.data; } catch (error) { console.error('Error fetching settings:', error); @@ -132,7 +124,7 @@ export const getSettings = async () => { export const getGitStatus = async () => { try { - const response = await axios.get(`${API_BASE_URL}/git/status`); + const response = await axios.get(`/api/git/status`); // Ensure has_unpushed_commits is included in the response return { ...response.data, @@ -150,7 +142,7 @@ export const getGitStatus = async () => { export const getBranches = async () => { try { - const response = await axios.get(`${API_BASE_URL}/git/branches`); + const response = await axios.get(`/api/git/branches`); return response.data; } catch (error) { console.error('Error fetching branches:', error); @@ -161,7 +153,7 @@ export const getBranches = async () => { export const checkoutBranch = async branchName => { try { const response = await axios.post( - `${API_BASE_URL}/git/checkout`, + `/api/git/checkout`, { branch: branchName }, @@ -185,7 +177,7 @@ export const checkoutBranch = async branchName => { export const createBranch = async (branchName, baseBranch) => { try { const response = await axios.post( - `${API_BASE_URL}/git/branch`, + `/api/git/branch`, { name: branchName, base: baseBranch @@ -209,18 +201,15 @@ export const createBranch = async (branchName, baseBranch) => { export const deleteBranch = async branchName => { try { - const response = await axios.delete( - `${API_BASE_URL}/git/branch/${branchName}`, - { - validateStatus: status => { - return ( - (status >= 200 && status < 300) || - status === 400 || - status === 409 - ); - } + const response = await axios.delete(`/api/git/branch/${branchName}`, { + validateStatus: status => { + return ( + (status >= 200 && status < 300) || + status === 400 || + status === 409 + ); } - ); + }); return response.data; } catch (error) { console.error('Error deleting branch:', error); @@ -231,7 +220,7 @@ export const deleteBranch = async branchName => { export const pushBranchToRemote = async branchName => { try { const response = await axios.post( - `${API_BASE_URL}/git/branch/push`, + `/api/git/branch/push`, { branch: branchName }, @@ -254,7 +243,7 @@ export const pushBranchToRemote = async branchName => { export const addFiles = async files => { try { - const response = await axios.post(`${API_BASE_URL}/git/stage`, {files}); + const response = await axios.post(`/api/git/stage`, {files}); return response.data; } catch (error) { console.error('Error staging files:', error); @@ -264,7 +253,7 @@ export const addFiles = async files => { export const unstageFiles = async files => { try { - const response = await axios.post(`${API_BASE_URL}/git/unstage`, { + const response = await axios.post(`/api/git/unstage`, { files }); return response.data; @@ -276,7 +265,7 @@ export const unstageFiles = async files => { export const commitFiles = async (files, commitMessage) => { try { - const response = await axios.post(`${API_BASE_URL}/git/commit`, { + const response = await axios.post(`/api/git/commit`, { files, commit_message: commitMessage }); @@ -289,7 +278,7 @@ export const commitFiles = async (files, commitMessage) => { export const pushFiles = async () => { try { - const response = await axios.post(`${API_BASE_URL}/git/push`); + const response = await axios.post(`/api/git/push`); return response.data; } catch (error) { // Pass through the structured error from the backend @@ -307,7 +296,7 @@ export const pushFiles = async () => { }; export const revertFile = async filePath => { try { - const response = await axios.post(`${API_BASE_URL}/git/revert`, { + const response = await axios.post(`/api/git/revert`, { file_path: filePath }); return response.data; @@ -319,7 +308,7 @@ export const revertFile = async filePath => { export const revertAll = async () => { try { - const response = await axios.post(`${API_BASE_URL}/git/revert-all`); + const response = await axios.post(`/api/git/revert-all`); return response.data; } catch (error) { console.error('Error reverting all changes:', error); @@ -329,7 +318,7 @@ export const revertAll = async () => { export const deleteFile = async filePath => { try { - const response = await axios.delete(`${API_BASE_URL}/git/file`, { + const response = await axios.delete(`/api/git/file`, { data: {file_path: filePath} }); return response.data; @@ -341,7 +330,7 @@ export const deleteFile = async filePath => { export const pullBranch = async branchName => { try { - const response = await axios.post(`${API_BASE_URL}/git/pull`, { + const response = await axios.post(`/api/git/pull`, { branch: branchName }); return response.data; @@ -364,7 +353,7 @@ export const pullBranch = async branchName => { export const cloneRepo = async gitRepo => { try { - const response = await axios.post(`${API_BASE_URL}/git/clone`, { + const response = await axios.post(`/api/git/clone`, { gitRepo }); return response.data; @@ -376,7 +365,7 @@ export const cloneRepo = async gitRepo => { export const getProfiles = async () => { try { - const response = await axios.get(`${API_BASE_URL}/profile`); + const response = await axios.get(`/api/profile`); return response.data; } catch (error) { console.error('Error fetching profiles:', error); @@ -386,7 +375,7 @@ export const getProfiles = async () => { export const saveProfile = async profile => { try { - const response = await axios.post(`${API_BASE_URL}/profile`, profile); + const response = await axios.post(`/api/profile`, profile); return response.data; } catch (error) { console.error('Error saving profile:', error); @@ -396,10 +385,7 @@ export const saveProfile = async profile => { export const updateProfile = async (id, profile) => { try { - const response = await axios.put( - `${API_BASE_URL}/profile/${id}`, - profile - ); + const response = await axios.put(`/api/profile/${id}`, profile); return response.data; } catch (error) { console.error('Error updating profile:', error); @@ -409,7 +395,7 @@ export const updateProfile = async (id, profile) => { export const deleteProfile = async id => { try { - const response = await axios.delete(`${API_BASE_URL}/profile/${id}`); + const response = await axios.delete(`/api/profile/${id}`); return response.data; } catch (error) { console.error('Error deleting profile:', error); @@ -419,7 +405,7 @@ export const deleteProfile = async id => { export const unlinkRepo = async (removeFiles = false) => { try { - const response = await axios.post(`${API_BASE_URL}/git/unlink`, { + const response = await axios.post(`/api/git/unlink`, { removeFiles }); return response.data; @@ -431,7 +417,7 @@ export const unlinkRepo = async (removeFiles = false) => { export const checkDevMode = async () => { try { - const response = await axios.get(`${API_BASE_URL}/git/dev`); + const response = await axios.get(`/api/git/dev`); return response.data; } catch (error) { console.error('Error checking dev mode:', error); @@ -441,7 +427,7 @@ export const checkDevMode = async () => { export const resolveConflict = async resolutions => { try { - const response = await axios.post(`${API_BASE_URL}/git/resolve`, { + const response = await axios.post(`/api/git/resolve`, { resolutions }); return response.data; @@ -453,7 +439,7 @@ export const resolveConflict = async resolutions => { export const finalizeMerge = async () => { try { - const response = await axios.post(`${API_BASE_URL}/git/merge/finalize`); + const response = await axios.post(`/api/git/merge/finalize`); return response.data; } catch (error) { console.error('Error finalizing merge:', error); @@ -472,7 +458,7 @@ export const finalizeMerge = async () => { export const abortMerge = async () => { try { - const response = await axios.post(`${API_BASE_URL}/git/merge/abort`); + const response = await axios.post(`/api/git/merge/abort`); return response.data; } catch (error) { console.error('Error aborting merge:', error); @@ -480,19 +466,13 @@ export const abortMerge = async () => { } }; -export const getCommitHistory = async (branch = null) => { +export const getCommitHistory = async () => { try { - const url = new URL(`${API_BASE_URL}/git/commits`); - if (branch) { - url.searchParams.append('branch', branch); - } - - const response = await axios.get(url.toString(), { + const response = await axios.get('/api/git/commits', { validateStatus: status => { return (status >= 200 && status < 300) || status === 400; } }); - return response.data; } catch (error) { console.error('Error fetching commit history:', error); diff --git a/frontend/src/api/arr.js b/frontend/src/api/arr.js index 61c303b..b899fe6 100644 --- a/frontend/src/api/arr.js +++ b/frontend/src/api/arr.js @@ -1,11 +1,9 @@ import axios from 'axios'; -const API_BASE_URL = import.meta.env.VITE_API_URL || 'http://localhost:5000'; - export const pingService = async (url, apiKey, type) => { try { const response = await axios.post( - `${API_BASE_URL}/arr/ping`, + `/api/arr/ping`, { url, apiKey, @@ -35,7 +33,7 @@ export const pingService = async (url, apiKey, type) => { export const saveArrConfig = async config => { try { - const response = await axios.post(`${API_BASE_URL}/arr/config`, config); + const response = await axios.post(`/api/arr/config`, config); return response.data; } catch (error) { console.error('Error saving arr config:', error); @@ -45,7 +43,7 @@ export const saveArrConfig = async config => { export const getArrConfigs = async () => { try { - const response = await axios.get(`${API_BASE_URL}/arr/config`); + const response = await axios.get(`/api/arr/config`); console.log('Raw axios response:', response); console.log('Response data:', response.data); return response.data; // This is correct - don't change this @@ -57,10 +55,7 @@ export const getArrConfigs = async () => { export const updateArrConfig = async (id, config) => { try { - const response = await axios.put( - `${API_BASE_URL}/arr/config/${id}`, - config - ); + const response = await axios.put(`/api/arr/config/${id}`, config); return response.data; } catch (error) { console.error('Error updating arr config:', error); @@ -70,7 +65,7 @@ export const updateArrConfig = async (id, config) => { export const deleteArrConfig = async id => { try { - const response = await axios.delete(`${API_BASE_URL}/arr/config/${id}`); + const response = await axios.delete(`/api/arr/config/${id}`); return response.data; } catch (error) { console.error('Error deleting arr config:', error); diff --git a/frontend/src/api/data.js b/frontend/src/api/data.js new file mode 100644 index 0000000..5ff6df6 --- /dev/null +++ b/frontend/src/api/data.js @@ -0,0 +1,97 @@ +import axios from 'axios'; + +const BASE_URL = '/api/data'; + +const handleError = (error, operation) => { + console.error(`Error ${operation}:`, error); + if (error.response?.data) { + return { + success: false, + message: error.response.data.error + }; + } + return { + success: false, + message: `Failed to ${operation}` + }; +}; + +// Get all items for a category +export const getAllItems = async category => { + try { + const response = await axios.get(`${BASE_URL}/${category}`); + return response.data; + } catch (error) { + return handleError(error, `fetch ${category} items`); + } +}; + +// Get single item +export const getItem = async (category, name) => { + try { + const response = await axios.get(`${BASE_URL}/${category}/${name}`); + return response.data; + } catch (error) { + return handleError(error, `fetch ${category} item ${name}`); + } +}; + +// Create new item +export const createItem = async (category, data) => { + try { + const response = await axios.post( + `${BASE_URL}/${category}/${data.name}`, + data + ); + return response.data; + } catch (error) { + return handleError(error, `create ${category} item`); + } +}; + +// Update existing item +export const updateItem = async (category, name, data, newName) => { + try { + const response = await axios.put(`${BASE_URL}/${category}/${name}`, { + ...data, + ...(newName && {rename: newName}) // Only add rename field if newName exists + }); + return response.data; + } catch (error) { + return handleError(error, `update ${category} item ${name}`); + } +}; + +// Delete item +export const deleteItem = async (category, name) => { + try { + const response = await axios.delete(`${BASE_URL}/${category}/${name}`); + return response.data; + } catch (error) { + return handleError(error, `delete ${category} item ${name}`); + } +}; + +export const Profiles = { + getAll: () => getAllItems('profile'), + get: name => getItem('profile', name), + create: data => createItem('profile', data), + update: (name, data, newName) => updateItem('profile', name, data, newName), + delete: name => deleteItem('profile', name) +}; + +export const CustomFormats = { + getAll: () => getAllItems('custom_format'), + get: name => getItem('custom_format', name), + create: data => createItem('custom_format', data), + update: (name, data) => updateItem('custom_format', name, data), + delete: name => deleteItem('custom_format', name) +}; + +export const RegexPatterns = { + getAll: () => getAllItems('regex_pattern'), + get: name => getItem('regex_pattern', name), + create: data => createItem('regex_pattern', data), + update: (name, data) => updateItem('regex_pattern', name, data), + delete: name => deleteItem('regex_pattern', name) +}; diff --git a/frontend/src/assets/logo/Radarr.svg b/frontend/src/assets/logo/Radarr.svg new file mode 100644 index 0000000..2f06f69 --- /dev/null +++ b/frontend/src/assets/logo/Radarr.svg @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/frontend/src/assets/logo/Sonarr.svg b/frontend/src/assets/logo/Sonarr.svg new file mode 100644 index 0000000..b0a7218 --- /dev/null +++ b/frontend/src/assets/logo/Sonarr.svg @@ -0,0 +1,9 @@ + + + + + + + + + \ No newline at end of file diff --git a/frontend/src/components/profile/CreateGroupModal.jsx b/frontend/src/components/profile/CreateGroupModal.jsx new file mode 100644 index 0000000..5a778f9 --- /dev/null +++ b/frontend/src/components/profile/CreateGroupModal.jsx @@ -0,0 +1,170 @@ +import React, {useState, useEffect} from 'react'; +import Modal from '../ui/Modal'; +import Tooltip from '@ui/Tooltip'; +import {InfoIcon} from 'lucide-react'; + +const CreateGroupModal = ({ + isOpen, + onClose, + availableQualities, + onCreateGroup, + editingGroup = null +}) => { + const [selectedQualities, setSelectedQualities] = useState([]); + const [groupName, setGroupName] = useState(''); + const [description, setDescription] = useState(''); + + useEffect(() => { + if (isOpen && editingGroup) { + setGroupName(editingGroup.name); + setDescription(editingGroup.description || ''); + + // Set selected qualities from the editing group + const existingQualities = editingGroup.qualities.map(quality => { + // Find the quality in availableQualities to get the most up-to-date version + return ( + availableQualities.find(q => q.id === quality.id) || quality + ); + }); + setSelectedQualities(existingQualities); + } else if (!isOpen) { + // Reset state when modal closes + setGroupName(''); + setDescription(''); + setSelectedQualities([]); + } + }, [isOpen, editingGroup, availableQualities]); + + const getValidationMessage = () => { + if (!groupName) return 'Please enter a group name'; + if (selectedQualities.length === 0) + return 'Select at least one quality'; + return null; + }; + + const handleSave = () => { + if (groupName && selectedQualities.length > 0) { + const groupData = { + // If editing, keep the same ID; otherwise generate new one + id: editingGroup ? editingGroup.id : Date.now(), + name: groupName, + description, + qualities: selectedQualities, + // Preserve enabled state if editing, default to true for new groups + enabled: editingGroup ? editingGroup.enabled : true, + // Preserve radarr/sonarr settings if editing + radarr: editingGroup?.radarr, + sonarr: editingGroup?.sonarr + }; + + onCreateGroup(groupData); + } + }; + + const isValid = groupName && selectedQualities.length > 0; + + const isQualitySelected = quality => { + return selectedQualities.some(sq => sq.id === quality.id); + }; + + return ( + + + + + + }> +
+
+ +

+ Groups allow you to combine multiple qualities that are + considered equivalent. Items matching any quality in the + group will be treated equally. +

+
+ +
+ + setGroupName(e.target.value)} + className='mt-1 block w-full rounded-md border border-gray-300 dark:border-gray-600 bg-white dark:bg-gray-700 px-2.5 py-1.5 text-xs text-gray-900 dark:text-gray-100 focus:border-blue-500 dark:focus:border-blue-400 focus:outline-none focus:ring-1 focus:ring-blue-500 dark:focus:ring-blue-400' + placeholder='Enter group name' + /> +
+ +
+ +