diff --git a/.gitignore b/.gitignore index 15f1723..ea2b7bf 100644 --- a/.gitignore +++ b/.gitignore @@ -8,6 +8,8 @@ __pycache__/ # Environment variables .env +.env.1 +.env.2 # OS files .DS_Store \ No newline at end of file diff --git a/backend/app/__init__.py b/backend/app/__init__.py index a522a04..3376edf 100644 --- a/backend/app/__init__.py +++ b/backend/app/__init__.py @@ -1,5 +1,3 @@ -# app/__init__.py - import os from flask import Flask, jsonify from flask_cors import CORS @@ -12,15 +10,17 @@ from .settings_utils import create_empty_settings_if_not_exists, load_settings REGEX_DIR = os.path.join('data', 'db', 'regex_patterns') FORMAT_DIR = os.path.join('data', 'db', 'custom_formats') PROFILE_DIR = os.path.join('data', 'db', 'profiles') +DATA_DIR = '/app/data' + def create_app(): app = Flask(__name__) CORS(app, resources={r"/*": {"origins": "*"}}) - + # Initialize directories and create empty settings file if it doesn't exist initialize_directories() create_empty_settings_if_not_exists() - + # Register Blueprints app.register_blueprint(regex_bp) app.register_blueprint(format_bp) @@ -35,7 +35,9 @@ def create_app(): return app + def initialize_directories(): os.makedirs(REGEX_DIR, exist_ok=True) os.makedirs(FORMAT_DIR, exist_ok=True) os.makedirs(PROFILE_DIR, exist_ok=True) + os.makedirs(DATA_DIR, exist_ok=True) diff --git a/backend/app/git/__init__.py b/backend/app/git/__init__.py index 45a3091..197991f 100644 --- a/backend/app/git/__init__.py +++ b/backend/app/git/__init__.py @@ -1,6 +1,5 @@ from flask import Blueprint, request, jsonify from .status.status import get_git_status -from .status.diff import get_diff from .branches.manager import Branch_Manager from .operations.manager import GitOperations from .repo.unlink import unlink_repository @@ -74,6 +73,8 @@ def create_branch(): return jsonify({'success': True, **result}), 200 else: logger.error(f"Failed to create branch: {result}") + if 'merging' in result.get('error', '').lower(): + return jsonify({'success': False, 'error': result}), 409 return jsonify({'success': False, 'error': result}), 400 @@ -99,6 +100,8 @@ def checkout_branch(): return jsonify({'success': True, **result}), 200 else: logger.error(f"Failed to checkout branch: {result}") + if 'merging' in result.get('error', '').lower(): + return jsonify({'success': False, 'error': result}), 409 return jsonify({'success': False, 'error': result}), 400 @@ -111,6 +114,8 @@ def delete_branch(branch_name): return jsonify({'success': True, **result}), 200 else: logger.error(f"Failed to delete branch: {result}") + if 'merging' in result.get('error', '').lower(): + return jsonify({'success': False, 'error': result}), 409 return jsonify({'success': False, 'error': result}), 400 @@ -129,23 +134,43 @@ def push_branch(): if success: return jsonify({"success": True, "data": result}), 200 else: - return jsonify({"success": False, "error": result["error"]}), 500 + if 'merging' in result.get('error', '').lower(): + return jsonify({'success': False, 'error': result}), 409 + return jsonify({'success': False, 'error': result["error"]}), 500 + + +@bp.route('/commit', methods=['POST']) +def commit_files(): + files = request.json.get('files', []) + user_commit_message = request.json.get('commit_message', "Commit changes") + logger.debug(f"Received request to commit files: {files}") + + commit_message = generate_commit_message(user_commit_message, files) + success, message = git_operations.commit(files, commit_message) + + if success: + logger.debug("Successfully committed files") + return jsonify({'success': True, 'message': message}), 200 + else: + logger.error(f"Error committing files: {message}") + return jsonify({'success': False, 'error': message}), 400 @bp.route('/push', methods=['POST']) def push_files(): - files = request.json.get('files', []) - user_commit_message = request.json.get('commit_message', - "Commit and push staged files") - logger.debug(f"Received request to push files: {files}") - commit_message = generate_commit_message(user_commit_message, files) - success, message = git_operations.push(files, commit_message) + logger.debug("Received request to push changes") + success, message = git_operations.push() + if success: - logger.debug("Successfully committed and pushed files") + logger.debug("Successfully pushed changes") return jsonify({'success': True, 'message': message}), 200 else: - logger.error(f"Error pushing files: {message}") - return jsonify({'success': False, 'error': message}), 400 + logger.error(f"Error pushing changes: {message}") + # If message is a dict, it's a structured error + if isinstance(message, dict): + return jsonify({'success': False, 'error': message}), 400 + # Otherwise it's a string error + return jsonify({'success': False, 'error': str(message)}), 400 @bp.route('/revert', methods=['POST']) @@ -193,28 +218,49 @@ def delete_file(): @bp.route('/pull', methods=['POST']) def pull_branch(): branch_name = request.json.get('branch') - success, message = git_operations.pull(branch_name) + success, response = git_operations.pull(branch_name) + + # Handle different response types + if isinstance(response, dict): + if response.get('state') == 'resolve': + # Merge conflict is now a success case with state='resolve' + return jsonify({ + 'success': True, + 'state': 'resolve', + 'message': response['message'], + 'details': response['details'] + }), 200 + elif response.get('state') == 'error': + # Handle error states + return jsonify({ + 'success': False, + 'state': 'error', + 'message': response['message'], + 'details': response.get('details', {}) + }), 409 if response.get('type') in [ + 'merge_conflict', 'uncommitted_changes' + ] else 400 + elif response.get('state') == 'complete': + # Normal success case + return jsonify({ + 'success': True, + 'state': 'complete', + 'message': response['message'], + 'details': response.get('details', {}) + }), 200 + + # Fallback for string responses or unexpected formats if success: - return jsonify({'success': True, 'message': message}), 200 - else: - logger.error(f"Error pulling branch: {message}") - return jsonify({'success': False, 'error': message}), 400 - - -@bp.route('/diff', methods=['POST']) -def diff_file(): - file_path = request.json.get('file_path') - try: - diff = get_diff(REPO_PATH, file_path) - logger.debug(f"Diff for file {file_path}: {diff}") - return jsonify({'success': True, 'diff': diff if diff else ""}), 200 - except Exception as e: - logger.error(f"Error getting diff for file {file_path}: {str(e)}", - exc_info=True) return jsonify({ - 'success': False, - 'error': f"Error getting diff for file: {str(e)}" - }), 400 + 'success': True, + 'state': 'complete', + 'message': response + }), 200 + return jsonify({ + 'success': False, + 'state': 'error', + 'message': str(response) + }), 400 @bp.route('/stage', methods=['POST']) @@ -227,6 +273,16 @@ def handle_stage_files(): return jsonify({'success': False, 'error': message}), 400 +@bp.route('/unstage', methods=['POST']) +def handle_unstage_files(): + files = request.json.get('files', []) + success, message = git_operations.unstage(files) + if success: + return jsonify({'success': True, 'message': message}), 200 + else: + return jsonify({'success': False, 'error': message}), 400 + + @bp.route('/unlink', methods=['POST']) def unlink(): data = request.get_json() @@ -239,20 +295,67 @@ def unlink(): def generate_commit_message(user_message, files): - file_changes = [] - for file in files: - if 'regex_patterns' in file: - file_changes.append(f"Update regex pattern: {file.split('/')[-1]}") - elif 'custom_formats' in file: - file_changes.append(f"Update custom format: {file.split('/')[-1]}") - else: - file_changes.append(f"Update: {file}") - - commit_message = f"{user_message}\n\nChanges:\n" + "\n".join(file_changes) - return commit_message + return user_message @bp.route('/dev', methods=['GET']) def dev_mode(): is_dev_mode = check_dev_mode() return jsonify({'devMode': is_dev_mode}), 200 + + +@bp.route('/resolve', methods=['POST']) +def resolve_conflicts(): + logger.debug("Received request to resolve conflicts") + resolutions = request.json.get('resolutions') + + if not resolutions: + return jsonify({ + 'success': False, + 'error': "Resolutions are required" + }), 400 + + result = git_operations.resolve(resolutions) + + if result.get('success'): + logger.debug("Successfully resolved conflicts") + return jsonify(result), 200 + else: + logger.error(f"Error resolving conflicts: {result.get('error')}") + return jsonify(result), 400 + + +@bp.route('/merge/finalize', methods=['POST']) +def finalize_merge(): + """ + Route to finalize a merge after all conflicts have been resolved. + Expected to be called only after all conflicts are resolved and changes are staged. + """ + logger.debug("Received request to finalize merge") + + result = git_operations.finalize_merge() + + if result.get('success'): + logger.debug( + f"Successfully finalized merge with files: {result.get('committed_files', [])}" + ) + return jsonify({ + 'success': True, + 'message': result.get('message'), + 'committed_files': result.get('committed_files', []) + }), 200 + else: + logger.error(f"Error finalizing merge: {result.get('error')}") + return jsonify({'success': False, 'error': result.get('error')}), 400 + + +@bp.route('/merge/abort', methods=['POST']) +def abort_merge(): + logger.debug("Received request to abort merge") + success, message = git_operations.abort_merge() + if success: + logger.debug("Successfully aborted merge") + return jsonify({'success': True, 'message': message}), 200 + else: + logger.error(f"Error aborting merge: {message}") + return jsonify({'success': False, 'error': message}), 400 diff --git a/backend/app/git/branches/manager.py b/backend/app/git/branches/manager.py index cf95482..31f959e 100644 --- a/backend/app/git/branches/manager.py +++ b/backend/app/git/branches/manager.py @@ -1,23 +1,45 @@ # git/branches/branches.py import git +import os from .create import create_branch from .checkout import checkout_branch from .delete import delete_branch from .get import get_branches, get_current_branch from .push import push_branch_to_remote + class Branch_Manager: + def __init__(self, repo_path): self.repo_path = repo_path + def is_merging(self): + repo = git.Repo(self.repo_path) + return os.path.exists(os.path.join(repo.git_dir, 'MERGE_HEAD')) + def create(self, branch_name, base_branch='main'): + if self.is_merging(): + return False, { + 'error': + 'Cannot create branch while merging. Resolve conflicts first.' + } return create_branch(self.repo_path, branch_name, base_branch) def checkout(self, branch_name): + if self.is_merging(): + return False, { + 'error': + 'Cannot checkout while merging. Resolve conflicts first.' + } return checkout_branch(self.repo_path, branch_name) def delete(self, branch_name): + if self.is_merging(): + return False, { + 'error': + 'Cannot delete branch while merging. Resolve conflicts first.' + } return delete_branch(self.repo_path, branch_name) def get_all(self): @@ -25,6 +47,10 @@ class Branch_Manager: def get_current(self): return get_current_branch(self.repo_path) - + def push(self, branch_name): - return push_branch_to_remote(self.repo_path, branch_name) \ No newline at end of file + if self.is_merging(): + return False, { + 'error': 'Cannot push while merging. Resolve conflicts first.' + } + return push_branch_to_remote(self.repo_path, branch_name) diff --git a/backend/app/git/operations/commit.py b/backend/app/git/operations/commit.py index a109d17..acfe6ea 100644 --- a/backend/app/git/operations/commit.py +++ b/backend/app/git/operations/commit.py @@ -1,10 +1,10 @@ # git/operations/commit.py - import git import logging logger = logging.getLogger(__name__) + def commit_changes(repo_path, files, message): try: repo = git.Repo(repo_path) @@ -13,4 +13,4 @@ def commit_changes(repo_path, files, message): return True, "Successfully committed changes." except Exception as e: logger.error(f"Error committing changes: {str(e)}", exc_info=True) - return False, f"Error committing changes: {str(e)}" \ No newline at end of file + return False, f"Error committing changes: {str(e)}" diff --git a/backend/app/git/operations/manager.py b/backend/app/git/operations/manager.py index 627e8da..0c923bd 100644 --- a/backend/app/git/operations/manager.py +++ b/backend/app/git/operations/manager.py @@ -1,5 +1,3 @@ -# git/operations/operations.py - import git from .stage import stage_files from .commit import commit_changes @@ -7,19 +5,50 @@ from .push import push_changes from .revert import revert_file, revert_all from .delete import delete_file from .pull import pull_branch +from .unstage import unstage_files +from .merge import abort_merge, finalize_merge +from .resolve import resolve_conflicts +import os +import logging + +logger = logging.getLogger(__name__) + class GitOperations: + def __init__(self, repo_path): self.repo_path = repo_path + self.configure_git() + + def configure_git(self): + try: + repo = git.Repo(self.repo_path) + # Get user info from env variables + git_name = os.environ.get('GITHUB_USER_NAME') + git_email = os.environ.get('GITHUB_USER_EMAIL') + + logger.debug(f"Git config - Name: {git_name}, Email: {git_email}" + ) # Add this + + if git_name and git_email: + with repo.config_writer() as config: + config.set_value('user', 'name', git_name) + config.set_value('user', 'email', git_email) + logger.debug("Git identity configured successfully") + except Exception as e: + logger.error(f"Error configuring git user: {str(e)}") def stage(self, files): return stage_files(self.repo_path, files) + def unstage(self, files): + return unstage_files(self.repo_path, files) + def commit(self, files, message): return commit_changes(self.repo_path, files, message) - def push(self, files, message): - return push_changes(self.repo_path, files, message) + def push(self): + return push_changes(self.repo_path) def revert(self, file_path): return revert_file(self.repo_path, file_path) @@ -31,4 +60,15 @@ class GitOperations: return delete_file(self.repo_path, file_path) def pull(self, branch_name): - return pull_branch(self.repo_path, branch_name) \ No newline at end of file + return pull_branch(self.repo_path, branch_name) + + def finalize_merge(self): + repo = git.Repo(self.repo_path) + return finalize_merge(repo) + + def abort_merge(self): + return abort_merge(self.repo_path) + + def resolve(self, resolutions): + repo = git.Repo(self.repo_path) + return resolve_conflicts(repo, resolutions) diff --git a/backend/app/git/operations/merge.py b/backend/app/git/operations/merge.py new file mode 100644 index 0000000..8b8a2f8 --- /dev/null +++ b/backend/app/git/operations/merge.py @@ -0,0 +1,96 @@ +# git/operations/merge.py +import git +import logging +import os +from typing import Dict, Any, Tuple +from .commit import commit_changes + +logger = logging.getLogger(__name__) + + +def finalize_merge(repo) -> Dict[str, Any]: + """ + Finalize a merge by committing all staged files after conflict resolution. + """ + try: + if not os.path.exists(os.path.join(repo.git_dir, 'MERGE_HEAD')): + return { + 'success': False, + 'error': 'Not currently in a merge state' + } + + # Get unmerged files + unmerged_files = [] + status = repo.git.status('--porcelain', '-z').split('\0') + for item in status: + if item and len(item) >= 4: + x, y, file_path = item[0], item[1], item[3:] + if 'U' in (x, y): + unmerged_files.append(file_path) + + # Force update the index for unmerged files + for file_path in unmerged_files: + # Remove from index first + try: + repo.git.execute(['git', 'reset', '--', file_path]) + except git.GitCommandError: + pass + + # Add back to index + try: + repo.git.execute(['git', 'add', '--', file_path]) + except git.GitCommandError as e: + logger.error(f"Error adding file {file_path}: {str(e)}") + return { + 'success': False, + 'error': f"Failed to stage resolved file {file_path}" + } + + # Create commit message + commit_message = "Merge complete: resolved conflicts" + + # Commit + try: + repo.git.commit('-m', commit_message) + logger.info("Successfully finalized merge") + return {'success': True, 'message': 'Merge completed successfully'} + except git.GitCommandError as e: + logger.error(f"Git command error during commit: {str(e)}") + return { + 'success': False, + 'error': f"Failed to commit merge: {str(e)}" + } + + except Exception as e: + logger.error(f"Failed to finalize merge: {str(e)}") + return { + 'success': False, + 'error': f"Failed to finalize merge: {str(e)}" + } + + +def abort_merge(repo_path): + try: + repo = git.Repo(repo_path) + + # Try aborting the merge using git merge --abort + try: + repo.git.execute(['git', 'merge', '--abort']) + return True, "Merge aborted successfully" + except git.GitCommandError as e: + logger.warning( + "Error aborting merge with 'git merge --abort'. Trying 'git reset --hard'." + ) + + # If git merge --abort fails, try resetting to the previous commit using git reset --hard + try: + repo.git.execute(['git', 'reset', '--hard']) + return True, "Merge aborted and repository reset to the previous commit" + except git.GitCommandError as e: + logger.exception( + "Error resetting repository with 'git reset --hard'") + return False, str(e) + + except Exception as e: + logger.exception("Unexpected error aborting merge") + return False, str(e) diff --git a/backend/app/git/operations/pull.py b/backend/app/git/operations/pull.py index 51fad14..e1c0ee4 100644 --- a/backend/app/git/operations/pull.py +++ b/backend/app/git/operations/pull.py @@ -1,15 +1,49 @@ -# git/operations/pull.py - import git import logging +from git import GitCommandError +from ..status.status import get_git_status logger = logging.getLogger(__name__) + def pull_branch(repo_path, branch_name): try: repo = git.Repo(repo_path) - repo.git.pull('origin', branch_name) - return True, f"Successfully pulled changes for branch {branch_name}." + + # Check for uncommitted changes first + if repo.is_dirty(untracked_files=True): + return False, { + 'type': 'uncommitted_changes', + 'message': + 'Cannot pull: You have uncommitted local changes that would be lost', + 'details': 'Please commit or stash your changes before pulling' + } + + try: + # Fetch first to get remote changes + repo.remotes.origin.fetch() + + try: + # Try to pull with explicit merge strategy + repo.git.pull('origin', branch_name, '--no-rebase') + return True, "Successfully pulled changes for branch {branch_name}" + except GitCommandError as e: + if "CONFLICT" in str(e): + # Don't reset - let Git stay in merge conflict state + return True, { + 'state': 'resolve', + 'type': 'merge_conflict', + 'message': + 'Repository is now in conflict resolution state. Please resolve conflicts to continue merge.', + 'details': 'Please resolve conflicts to continue merge' + } + raise e + + except GitCommandError as e: + logger.error(f"Git command error pulling branch: {str(e)}", + exc_info=True) + return False, f"Error pulling branch: {str(e)}" + except Exception as e: logger.error(f"Error pulling branch: {str(e)}", exc_info=True) - return False, f"Error pulling branch: {str(e)}" \ No newline at end of file + return False, f"Error pulling branch: {str(e)}" diff --git a/backend/app/git/operations/push.py b/backend/app/git/operations/push.py index cc0c90c..8f5faf2 100644 --- a/backend/app/git/operations/push.py +++ b/backend/app/git/operations/push.py @@ -1,16 +1,14 @@ # git/operations/push.py - import git import logging -from .commit import commit_changes from ..auth.authenticate import check_dev_mode, get_github_token logger = logging.getLogger(__name__) -def push_changes(repo_path, files, message): +def push_changes(repo_path): try: - # Check if we're in dev mode + # Check if we're in dev mode - keep this check for push operations if not check_dev_mode(): logger.warning("Not in dev mode. Push operation not allowed.") return False, "Push operation not allowed in production mode." @@ -22,37 +20,36 @@ def push_changes(repo_path, files, message): return False, "GitHub token not available" repo = git.Repo(repo_path) - - # Commit changes - commit_success, commit_message = commit_changes( - repo_path, files, message) - if not commit_success: - return False, commit_message - - # Modify the remote URL to include the token origin = repo.remote(name='origin') auth_repo_url = origin.url.replace('https://', f'https://{github_token}@') origin.set_url(auth_repo_url) - # Push changes - push_info = origin.push() - - # Restore the original remote URL (without the token) - origin.set_url( - origin.url.replace(f'https://{github_token}@', 'https://')) - - # Check if the push was successful - if push_info and push_info[0].flags & push_info[0].ERROR: - raise git.GitCommandError("git push", push_info[0].summary) - - return True, "Successfully pushed changes." + try: + # Push changes + push_info = origin.push() + if push_info and push_info[0].flags & push_info[0].ERROR: + raise git.GitCommandError("git push", push_info[0].summary) + return True, "Successfully pushed changes." + except git.GitCommandError as e: + error_msg = str(e) + if "non-fast-forward" in error_msg: + return False, { + "type": + "non_fast_forward", + "message": + "Push rejected: Remote contains work that you do not have locally. Please pull the latest changes first." + } + raise e + finally: + # Always restore the original URL (without token) + origin.set_url( + origin.url.replace(f'https://{github_token}@', 'https://')) except git.GitCommandError as e: logger.error(f"Git command error pushing changes: {str(e)}", exc_info=True) - return False, f"Error pushing changes: {str(e)}" - + return False, str(e) except Exception as e: logger.error(f"Error pushing changes: {str(e)}", exc_info=True) return False, f"Error pushing changes: {str(e)}" diff --git a/backend/app/git/operations/resolve.py b/backend/app/git/operations/resolve.py new file mode 100644 index 0000000..ca10c83 --- /dev/null +++ b/backend/app/git/operations/resolve.py @@ -0,0 +1,223 @@ +# git/operations/resolve.py + +import yaml +from git import GitCommandError +import logging +from typing import Dict, Any +import os +from copy import deepcopy + +logger = logging.getLogger(__name__) + + +def get_version_data(repo, ref, file_path): + """Get YAML data from a specific version of a file.""" + try: + content = repo.git.show(f'{ref}:{file_path}') + return yaml.safe_load(content) if content else None + except GitCommandError: + return None + + +def resolve_conflicts( + repo, resolutions: Dict[str, Dict[str, str]]) -> Dict[str, Any]: + logger.debug(f"Received resolutions for files: {list(resolutions.keys())}") + """ + Resolve merge conflicts based on provided resolutions. + """ + # Get list of conflicting files + try: + status = repo.git.status('--porcelain', '-z').split('\0') + conflicts = [] + for item in status: + if not item or len(item) < 4: + continue + x, y, file_path = item[0], item[1], item[3:] + if 'U' in (x, y) or (x == 'D' and y == 'D'): + conflicts.append(file_path) + + # Validate resolutions are for actual conflicting files + for file_path in resolutions: + if file_path not in conflicts: + return { + 'success': False, + 'error': f"File not in conflict: {file_path}" + } + + except Exception as e: + return { + 'success': False, + 'error': f"Failed to get conflicts: {str(e)}" + } + + # Store initial states for rollback + initial_states = {} + for file_path in resolutions: + try: + # Join with repo path + full_path = os.path.join(repo.working_dir, file_path) + with open(full_path, 'r') as f: + initial_states[file_path] = f.read() + except Exception as e: + return { + 'success': False, + 'error': f"Couldn't read file {file_path}: {str(e)}" + } + + try: + results = {} + for file_path, field_resolutions in resolutions.items(): + # Get all three versions + base_data = get_version_data(repo, 'HEAD^', file_path) + ours_data = get_version_data(repo, 'HEAD', file_path) + theirs_data = get_version_data(repo, 'MERGE_HEAD', file_path) + + if not base_data or not ours_data or not theirs_data: + raise Exception(f"Couldn't get all versions of {file_path}") + + # Start with a deep copy of ours_data to preserve all fields + resolved_data = deepcopy(ours_data) + + # Track changes + kept_values = {} + discarded_values = {} + + # Handle each resolution field + for field, choice in field_resolutions.items(): + if field.startswith('custom_format_'): + # Extract the custom_format ID + try: + cf_id = int(field.split('_')[-1]) + except ValueError: + raise Exception( + f"Invalid custom_format ID in field: {field}") + + # Find the custom_format in ours and theirs + ours_cf = next( + (item for item in ours_data.get('custom_formats', []) + if item['id'] == cf_id), None) + theirs_cf = next( + (item + for item in theirs_data.get('custom_formats', []) + if item['id'] == cf_id), None) + + if choice == 'local' and ours_cf: + resolved_cf = ours_cf + kept_values[field] = ours_cf + discarded_values[field] = theirs_cf + elif choice == 'incoming' and theirs_cf: + resolved_cf = theirs_cf + kept_values[field] = theirs_cf + discarded_values[field] = ours_cf + else: + raise Exception( + f"Invalid choice or missing custom_format ID {cf_id} for field: {field}" + ) + + # Update the resolved_data's custom_formats + resolved_cf_list = resolved_data.get('custom_formats', []) + for idx, item in enumerate(resolved_cf_list): + if item['id'] == cf_id: + resolved_cf_list[idx] = resolved_cf + break + else: + # If not found, append it + resolved_cf_list.append(resolved_cf) + resolved_data['custom_formats'] = resolved_cf_list + + elif field.startswith('tag_'): + # Extract the tag name + tag_name = field[len('tag_'):] + current_tags = set(resolved_data.get('tags', [])) + + if choice == 'local': + # Assume 'local' means keeping the tag from ours + if tag_name in ours_data.get('tags', []): + current_tags.add(tag_name) + kept_values[field] = 'local' + discarded_values[field] = 'incoming' + else: + current_tags.discard(tag_name) + kept_values[field] = 'none' + discarded_values[field] = 'incoming' + elif choice == 'incoming': + # Assume 'incoming' means keeping the tag from theirs + if tag_name in theirs_data.get('tags', []): + current_tags.add(tag_name) + kept_values[field] = 'incoming' + discarded_values[field] = 'local' + else: + current_tags.discard(tag_name) + kept_values[field] = 'none' + discarded_values[field] = 'local' + else: + raise Exception( + f"Invalid choice for tag field: {field}") + + resolved_data['tags'] = sorted(current_tags) + + else: + # Handle other fields + field_key = field + if choice == 'local': + resolved_data[field_key] = ours_data.get(field_key) + kept_values[field_key] = ours_data.get(field_key) + discarded_values[field_key] = theirs_data.get( + field_key) + elif choice == 'incoming': + resolved_data[field_key] = theirs_data.get(field_key) + kept_values[field_key] = theirs_data.get(field_key) + discarded_values[field_key] = ours_data.get(field_key) + else: + raise Exception(f"Invalid choice for field: {field}") + + # Write resolved version using full path + full_path = os.path.join(repo.working_dir, file_path) + with open(full_path, 'w') as f: + yaml.safe_dump(resolved_data, f, default_flow_style=False) + + # Stage the resolved file + repo.index.add([file_path]) + + results[file_path] = { + 'kept_values': kept_values, + 'discarded_values': discarded_values + } + + # Log the base, ours, theirs, and resolved versions + logger.info(f"Successfully resolved {file_path}") + logger.info( + f"Base version:\n{yaml.safe_dump(base_data, default_flow_style=False)}" + ) + logger.info( + f"Ours version:\n{yaml.safe_dump(ours_data, default_flow_style=False)}" + ) + logger.info( + f"Theirs version:\n{yaml.safe_dump(theirs_data, default_flow_style=False)}" + ) + logger.info( + f"Resolved version:\n{yaml.safe_dump(resolved_data, default_flow_style=False)}" + ) + + logger.debug("==== Status after resolve_conflicts ====") + status_output = repo.git.status('--porcelain', '-z').split('\0') + for item in status_output: + if item: + logger.debug(f"File status: {item}") + logger.debug("=======================================") + + return {'success': True, 'results': results} + + except Exception as e: + # Rollback on any error using full paths + for file_path, initial_state in initial_states.items(): + try: + full_path = os.path.join(repo.working_dir, file_path) + with open(full_path, 'w') as f: + f.write(initial_state) + except Exception as rollback_error: + logger.error( + f"Failed to rollback {file_path}: {str(rollback_error)}") + + logger.error(f"Failed to resolve conflicts: {str(e)}") + return {'success': False, 'error': str(e)} diff --git a/backend/app/git/operations/stage.py b/backend/app/git/operations/stage.py index 6b90ebd..6b9e1b4 100644 --- a/backend/app/git/operations/stage.py +++ b/backend/app/git/operations/stage.py @@ -1,36 +1,20 @@ # git/operations/stage.py - import git import logging -from ..auth.authenticate import check_dev_mode, get_github_token logger = logging.getLogger(__name__) def stage_files(repo_path, files): try: - # Check if we're in dev mode - if not check_dev_mode(): - logger.warning("Not in dev mode. Staging operation not allowed.") - return False, "Staging operation not allowed in production mode." - - # Get the GitHub token - github_token = get_github_token() - if not github_token: - logger.error("GitHub token not available") - return False, "GitHub token not available" - repo = git.Repo(repo_path) - # Authenticate with GitHub token - with repo.git.custom_environment(GIT_ASKPASS='echo', - GIT_USERNAME=github_token): - if not files: - repo.git.add(A=True) - message = "All changes have been staged." - else: - repo.index.add(files) - message = "Specified files have been staged." + if not files: + repo.git.add(A=True) + message = "All changes have been staged." + else: + repo.index.add(files) + message = "Specified files have been staged." return True, message @@ -38,7 +22,6 @@ def stage_files(repo_path, files): logger.error(f"Git command error staging files: {str(e)}", exc_info=True) return False, f"Error staging files: {str(e)}" - except Exception as e: logger.error(f"Error staging files: {str(e)}", exc_info=True) return False, f"Error staging files: {str(e)}" diff --git a/backend/app/git/operations/types.py b/backend/app/git/operations/types.py new file mode 100644 index 0000000..f7bf94c --- /dev/null +++ b/backend/app/git/operations/types.py @@ -0,0 +1,52 @@ +from dataclasses import dataclass +from typing import List, Dict, Optional, Literal +from enum import Enum + + +class FileType(str, Enum): + REGEX = "regex" + CUSTOM_FORMAT = "custom format" + QUALITY_PROFILE = "quality profile" + + +class ResolutionChoice(str, Enum): + LOCAL = "local" + INCOMING = "incoming" + + +@dataclass +class TagConflict: + tag: str + local_status: Literal["Present", "Absent"] + incoming_status: Literal["Present", "Absent"] + resolution: Optional[ResolutionChoice] = None + + +@dataclass +class FormatConflict: + format_id: str + local_score: Optional[int] + incoming_score: Optional[int] + resolution: Optional[ResolutionChoice] = None + + +@dataclass +class GeneralConflict: + key: str + local_value: any + incoming_value: any + resolution: Optional[ResolutionChoice] = None + + +@dataclass +class FileResolution: + file_type: FileType + filename: str + tags: List[TagConflict] + formats: List[FormatConflict] + general: List[GeneralConflict] + + +@dataclass +class ResolutionRequest: + resolutions: Dict[str, FileResolution] diff --git a/backend/app/git/operations/unstage.py b/backend/app/git/operations/unstage.py new file mode 100644 index 0000000..7890ea7 --- /dev/null +++ b/backend/app/git/operations/unstage.py @@ -0,0 +1,15 @@ +# git/operations/unstage.py +import git +import logging + +logger = logging.getLogger(__name__) + + +def unstage_files(repo_path, files): + try: + repo = git.Repo(repo_path) + repo.index.reset(files=files) + return True, "Successfully unstaged files." + except Exception as e: + logger.error(f"Error unstaging files: {str(e)}", exc_info=True) + return False, f"Error unstaging files: {str(e)}" diff --git a/backend/app/git/status/diff.py b/backend/app/git/status/diff.py deleted file mode 100644 index f65f16d..0000000 --- a/backend/app/git/status/diff.py +++ /dev/null @@ -1,35 +0,0 @@ -import os -import git -import logging - -logger = logging.getLogger(__name__) - - -def get_diff(repo_path, file_path): - try: - repo = git.Repo(repo_path) - branch = repo.active_branch.name - remote_branch = f'origin/{branch}' # Assuming the remote is 'origin' - - # Fetch the latest changes from the remote - repo.git.fetch() - - # Check if the file is untracked - untracked_files = repo.untracked_files - if file_path in untracked_files: - with open(os.path.join(repo.working_dir, file_path), 'r') as file: - content = file.read() - diff = "\n".join([f"+{line}" for line in content.splitlines()]) - else: - # Check if the file is deleted - if not os.path.exists(os.path.join(repo.working_dir, file_path)): - diff = "-Deleted File" - else: - # Get the diff between the local and the remote branch - diff = repo.git.diff(f'{remote_branch}', file_path) - - return diff - except Exception as e: - logger.error(f"Error getting diff for file {file_path}: {str(e)}", - exc_info=True) - raise e diff --git a/backend/app/git/status/incoming_changes.py b/backend/app/git/status/incoming_changes.py index ae770f6..4325b49 100644 --- a/backend/app/git/status/incoming_changes.py +++ b/backend/app/git/status/incoming_changes.py @@ -1,51 +1,256 @@ # git/status/incoming_changes.py - import os import logging -from .utils import extract_data_from_yaml, determine_type, parse_commit_message +import yaml +from git import GitCommandError +from .utils import determine_type, parse_commit_message, extract_data_from_yaml logger = logging.getLogger(__name__) +def check_merge_conflict(repo, branch, file_path): + """ + Checks if an incoming change will conflict with local changes. + Returns True if there would be a merge conflict, False otherwise. + """ + try: + # Check for both uncommitted and committed changes + has_changes = False + + # 1. Check uncommitted changes + status = repo.git.status('--porcelain', file_path).strip() + if status: + status_code = status[:2] if len(status) >= 2 else '' + has_changes = 'M' in status_code or 'A' in status_code or 'D' in status_code + + # 2. Check committed changes not in remote + try: + # Get the merge-base (common ancestor) of local and remote + merge_base = repo.git.merge_base('HEAD', + f'origin/{branch}').strip() + + # Check if there are any commits affecting this file between merge-base and HEAD + committed_changes = repo.git.log(f'{merge_base}..HEAD', + '--', + file_path, + ignore_missing=True).strip() + has_changes = has_changes or bool(committed_changes) + except GitCommandError as e: + logger.warning(f"Error checking committed changes: {str(e)}") + + if has_changes: + try: + # Use correct merge-tree syntax + merge_test = repo.git.merge_tree('--write-tree', 'HEAD', + f'origin/{branch}') + + # Check if this specific file has conflicts in the merge result + return any( + line.startswith('<<<<<<< ') + for line in merge_test.splitlines() if file_path in line) + except GitCommandError as e: + logger.warning( + f"Merge tree test failed, assuming conflict: {str(e)}") + return True # If merge-tree fails, assume there's a conflict + + return False + + except Exception as e: + logger.error( + f"Error checking merge conflict for {file_path}: {str(e)}") + return False # Default to no conflict if we can't determine + + +def get_file_data(repo, file_path, ref): + try: + content = repo.git.show(f'{ref}:{file_path}') + return yaml.safe_load(content) + except GitCommandError: + logger.warning( + f"Failed to retrieve content for file: {file_path} at {ref}") + return None + + def get_incoming_changes(repo, branch): incoming_changes = [] - diff = repo.git.diff(f'HEAD...origin/{branch}', name_only=True) - changed_files = diff.split('\n') if diff else [] + + try: + # Get changed files between local and remote + diff_index = repo.git.diff(f'HEAD...origin/{branch}', + '--name-only').split('\n') + untracked = repo.git.ls_files('--others', + '--exclude-standard').split('\n') + changed_files = list(filter(None, set(diff_index + untracked))) + except GitCommandError as e: + logger.error(f"Error getting changed files: {str(e)}") + return [] for file_path in changed_files: - if file_path: - full_path = os.path.join(repo.working_dir, file_path) - file_data = extract_data_from_yaml(full_path) if os.path.exists( - full_path) else None + if not file_path: + continue - # Correcting the git show command - raw_commit_message = repo.git.show(f'HEAD...origin/{branch}', - '--format=%B', '-s', - file_path).strip() - parsed_commit_message = parse_commit_message( - raw_commit_message - ) # Parse commit message using the util function + try: + # Get both versions of the file + local_data = get_file_data(repo, file_path, 'HEAD') + remote_data = get_file_data(repo, file_path, f'origin/{branch}') + + if local_data == remote_data: + continue + + # Check for potential merge conflicts + will_conflict = check_merge_conflict(repo, branch, file_path) + + # Get commit message + try: + raw_commit_message = repo.git.show(f'HEAD...origin/{branch}', + '--format=%B', '-s', '--', + file_path).strip() + commit_message = parse_commit_message(raw_commit_message) + except GitCommandError: + commit_message = { + "body": "", + "footer": "", + "scope": "", + "subject": "Unable to retrieve commit message", + "type": "" + } + + if not local_data and remote_data: + status = 'New' + local_name = remote_data.get('name') + incoming_name = None + changes = [{ + 'key': key, + 'change': 'added', + 'value': value + } for key, value in remote_data.items()] + else: + status = 'Modified' + local_name = local_data.get( + 'name') if local_data else os.path.basename(file_path) + incoming_name = remote_data.get( + 'name') if remote_data else None + changes = compare_data(local_data, remote_data) + + if not changes: + continue + + file_type = determine_type(file_path) + file_id = remote_data.get('id') if remote_data else None incoming_changes.append({ - 'name': - file_data.get('name', os.path.basename(file_path)) - if file_data else os.path.basename(file_path), - 'id': - file_data.get('id') if file_data else None, - 'type': - determine_type(file_path), - 'status': - 'Incoming', - 'file_path': - file_path, - 'commit_message': - parsed_commit_message, # Use parsed commit message - 'staged': - False, - 'modified': - True, - 'deleted': - False + 'commit_message': commit_message, + 'deleted': False, + 'file_path': file_path, + 'id': file_id, + 'modified': True, + 'local_name': local_name, + 'incoming_name': incoming_name, + 'staged': False, + 'status': status, + 'type': file_type, + 'changes': changes, + 'will_conflict': + will_conflict # Added conflict status per file }) + except Exception as e: + logger.error( + f"Error processing incoming change for {file_path}: {str(e)}") + continue + + logger.info(f"Found {len(incoming_changes)} incoming changes") return incoming_changes + + +def compare_data(local_data, remote_data): + if local_data is None and remote_data is not None: + # File is entirely new + return [{'key': 'file', 'change': 'added'}] + + if local_data is not None and remote_data is None: + # File has been deleted + return [{'key': 'file', 'change': 'deleted'}] + + changes = [] + all_keys = set(local_data.keys()).union(set(remote_data.keys())) + + for key in all_keys: + local_value = local_data.get(key) + remote_value = remote_data.get(key) + + if local_value != remote_value: + if key == 'tags': + changes.extend(compare_tags(local_value, remote_value)) + elif key == 'custom_formats': + changes.extend( + compare_custom_formats(local_value, remote_value)) + else: + changes.append({ + 'key': key, + 'change': 'modified', + 'from': local_value, + 'to': remote_value + }) + + return changes + + +def compare_tags(local_tags, remote_tags): + local_tags = set(local_tags or []) + remote_tags = set(remote_tags or []) + + added = remote_tags - local_tags + removed = local_tags - remote_tags + + changes = [] + if added: + changes.append({ + 'key': 'tags', + 'change': 'added', + 'value': list(added) + }) + if removed: + changes.append({ + 'key': 'tags', + 'change': 'removed', + 'value': list(removed) + }) + + return changes + + +def compare_custom_formats(local_cfs, remote_cfs): + local_cfs = {cf['id']: cf for cf in local_cfs or []} + remote_cfs = {cf['id']: cf for cf in remote_cfs or []} + + all_ids = set(local_cfs.keys()).union(set(remote_cfs.keys())) + changes = [] + + for cf_id in all_ids: + local_cf = local_cfs.get(cf_id) + remote_cf = remote_cfs.get(cf_id) + + if local_cf != remote_cf: + if local_cf and remote_cf: + if local_cf['score'] != remote_cf['score']: + changes.append({ + 'key': f'custom_format_{cf_id}', + 'change': 'modified', + 'from': local_cf['score'], + 'to': remote_cf['score'] + }) + elif local_cf and not remote_cf: + changes.append({ + 'key': f'custom_format_{cf_id}', + 'change': 'removed', + 'value': local_cf['score'] + }) + elif not local_cf and remote_cf: + changes.append({ + 'key': f'custom_format_{cf_id}', + 'change': 'added', + 'value': remote_cf['score'] + }) + + return changes diff --git a/backend/app/git/status/merge_conflicts.py b/backend/app/git/status/merge_conflicts.py new file mode 100644 index 0000000..f46ff86 --- /dev/null +++ b/backend/app/git/status/merge_conflicts.py @@ -0,0 +1,115 @@ +import os +import yaml +import logging +from git import GitCommandError +from .utils import determine_type + +logger = logging.getLogger(__name__) + +# Define the possible states +UNRESOLVED = "UNRESOLVED" # File is still in conflict, hasn't been resolved and not added +RESOLVED = "RESOLVED" # File is no longer in conflict, been resolved and has been added + + +def get_merge_conflicts(repo): + """Get all merge conflicts in the repository.""" + try: + if not os.path.exists(os.path.join(repo.git_dir, 'MERGE_HEAD')): + logger.debug("No MERGE_HEAD found - not in merge state") + return [] + + conflicts = [] + status = repo.git.status('--porcelain', '-z').split('\0') + + logger.debug(f"Raw status output: {[s for s in status if s]}") + + for item in status: + if not item or len(item) < 4: + continue + + x, y, file_path = item[0], item[1], item[3:] + logger.debug( + f"Processing status item - X: {x}, Y: {y}, Path: {file_path}") + + if 'U' in (x, y) or (x == 'D' and y == 'D'): + conflict = process_conflict_file(repo, file_path) + if conflict: + conflicts.append(conflict) + + logger.debug(f"Found {len(conflicts)} conflicts") + return conflicts + + except Exception as e: + logger.error(f"Error getting merge conflicts: {str(e)}", exc_info=True) + return [] + + +def process_conflict_file(repo, file_path): + """Process a single conflict file and return its conflict information.""" + try: + logger.debug(f"Processing conflict file: {file_path}") + + # Get current and incoming versions + ours_data = get_version_data(repo, 'HEAD', file_path) + theirs_data = get_version_data(repo, 'MERGE_HEAD', file_path) + + if not ours_data or not theirs_data: + logger.warning( + f"Missing data for {file_path} - Ours: {bool(ours_data)}, Theirs: {bool(theirs_data)}" + ) + return None + + conflict_details = {'conflicting_parameters': []} + + # Find conflicting fields + for key in set(ours_data.keys()) | set(theirs_data.keys()): + if key == 'date_modified': + continue + + ours_value = ours_data.get(key) + theirs_value = theirs_data.get(key) + + if ours_value != theirs_value: + logger.debug( + f"Found conflict in {key} - Local: {ours_value}, Incoming: {theirs_value}" + ) + conflict_details['conflicting_parameters'].append({ + 'parameter': + key, + 'local_value': + ours_value, + 'incoming_value': + theirs_value + }) + + # Check if file still has unmerged (UU) status + status_output = repo.git.status('--porcelain', file_path) + logger.debug(f"Status output for {file_path}: {status_output}") + status = UNRESOLVED if status_output.startswith('UU') else RESOLVED + + result = { + 'file_path': file_path, + 'type': determine_type(file_path), + 'name': ours_data.get('name'), + 'status': status, + 'conflict_details': conflict_details + } + + logger.debug(f"Processed conflict result: {result}") + return result + + except Exception as e: + logger.error(f"Error processing conflict file {file_path}: {str(e)}", + exc_info=True) + return None + + +def get_version_data(repo, ref, file_path): + """Get YAML data from a specific version of a file.""" + try: + content = repo.git.show(f'{ref}:{file_path}') + return yaml.safe_load(content) if content else None + except GitCommandError as e: + logger.error( + f"Error getting version data for {ref}:{file_path}: {str(e)}") + return None diff --git a/backend/app/git/status/outgoing_changes.py b/backend/app/git/status/outgoing_changes.py index ae54333..0afe94b 100644 --- a/backend/app/git/status/outgoing_changes.py +++ b/backend/app/git/status/outgoing_changes.py @@ -1,13 +1,14 @@ # git/status/outgoing_changes.py import os -import json import yaml import logging -from .utils import extract_data_from_yaml, determine_type, interpret_git_status +from git import GitCommandError +from .utils import determine_type, parse_commit_message logger = logging.getLogger(__name__) + def get_outgoing_changes(repo): status = repo.git.status('--porcelain', '-z').split('\0') logger.debug(f"Raw porcelain status: {status}") @@ -26,81 +27,213 @@ def get_outgoing_changes(repo): x, y, file_path = item[0], item[1], item[3:] logger.debug(f"Parsed status: x={x}, y={y}, file_path={file_path}") + # Skip files in conflict state + if x == 'U' or y == 'U': + continue + is_staged = x != ' ' and x != '?' is_deleted = x == 'D' or y == 'D' - full_path = os.path.join(repo.working_dir, file_path) - if is_deleted: - try: - # Get the content of the file from the last commit - file_content = repo.git.show(f'HEAD:{file_path}') - yaml_content = yaml.safe_load(file_content) - original_name = yaml_content.get('name', 'Unknown') - original_id = yaml_content.get('id', '') - except Exception as e: - logger.warning(f"Could not retrieve original name for deleted file {file_path}: {str(e)}") - original_name = "Unknown" - original_id = "" - - changes.append({ - 'name': original_name, - 'id': original_id, - 'type': determine_type(file_path), - 'status': 'Deleted', - 'file_path': file_path, - 'staged': is_staged, - 'modified': False, - 'deleted': True - }) - elif os.path.isdir(full_path): - logger.debug(f"Found directory: {file_path}, going through folder.") - for root, dirs, files in os.walk(full_path): - for file in files: - if file.endswith('.yml') or file.endswith('.yaml'): - file_full_path = os.path.join(root, file) - logger.debug(f"Found file: {file_full_path}, going through file.") - file_data = extract_data_from_yaml(file_full_path) - if file_data: - logger.debug(f"File contents: {file_data}") - logger.debug(f"Found ID: {file_data.get('id')}") - logger.debug(f"Found Name: {file_data.get('name')}") - changes.append({ - 'name': file_data.get('name', ''), - 'id': file_data.get('id', ''), - 'type': determine_type(file_path), - 'status': interpret_git_status(x, y), - 'file_path': os.path.relpath(file_full_path, repo.working_dir), - 'staged': x != '?' and x != ' ', - 'modified': y == 'M', - 'deleted': False - }) - else: - logger.debug(f"No data extracted from file: {file_full_path}") + changes.append(process_deleted_file(repo, file_path, is_staged)) else: - file_data = extract_data_from_yaml(full_path) if os.path.exists(full_path) else None - if file_data: - changes.append({ - 'name': file_data.get('name', ''), - 'id': file_data.get('id', ''), - 'type': determine_type(file_path), - 'status': interpret_git_status(x, y), - 'file_path': file_path, - 'staged': is_staged, - 'modified': y != ' ', - 'deleted': False - }) + changes.append( + process_modified_file(repo, file_path, x, y, is_staged)) + + logger.debug(f"Final changes: {changes}") + return changes + + +def process_deleted_file(repo, file_path, is_staged): + try: + file_content = repo.git.show(f'HEAD:{file_path}') + yaml_content = yaml.safe_load(file_content) + original_name = yaml_content.get('name', 'Unknown') + original_id = yaml_content.get('id', '') + except Exception as e: + logger.warning( + f"Could not retrieve original content for deleted file {file_path}: {str(e)}" + ) + original_name = "Unknown" + original_id = "" + + return { + 'name': original_name, + 'prior_name': original_name, + 'outgoing_name': None, + 'id': original_id, + 'type': determine_type(file_path), + 'status': 'Deleted', + 'file_path': file_path, + 'staged': is_staged, + 'modified': False, + 'deleted': True, + 'changes': [{ + 'key': 'file', + 'change': 'deleted' + }] + } + + +def process_modified_file(repo, file_path, x, y, is_staged): + try: + # Get the content of the file from the last commit + old_content = repo.git.show(f'HEAD:{file_path}') + old_data = yaml.safe_load(old_content) + except GitCommandError: + old_data = None + + # Get the current content of the file + with open(os.path.join(repo.working_dir, file_path), 'r') as f: + new_content = f.read() + new_data = yaml.safe_load(new_content) + + detailed_changes = compare_data(old_data, new_data) + + # Determine prior_name and outgoing_name + prior_name = old_data.get('name') if old_data else None + outgoing_name = new_data.get('name') if new_data else None + + # If there's no name change, set outgoing_name to None + if prior_name == outgoing_name: + outgoing_name = None + + return { + 'name': new_data.get('name', os.path.basename(file_path)), + 'prior_name': prior_name, + 'outgoing_name': outgoing_name, + 'id': new_data.get('id', ''), + 'type': determine_type(file_path), + 'status': 'Modified' if old_data else 'New', + 'file_path': file_path, + 'staged': is_staged, + 'modified': y != ' ', + 'deleted': False, + 'changes': detailed_changes + } + + +def compare_data(old_data, new_data): + if old_data is None and new_data is not None: + return [{'key': 'file', 'change': 'added'}] + + if old_data is not None and new_data is None: + return [{'key': 'file', 'change': 'deleted'}] + + changes = [] + all_keys = set(old_data.keys()).union(set(new_data.keys())) + + for key in all_keys: + old_value = old_data.get(key) + new_value = new_data.get(key) + + if old_value != new_value: + if key == 'tags': + changes.extend(compare_tags(old_value, new_value)) + elif key == 'custom_formats': + changes.extend(compare_custom_formats(old_value, new_value)) + elif key == 'conditions': + changes.extend(compare_conditions(old_value, new_value)) else: changes.append({ - 'name': os.path.basename(file_path).replace('.yml', ''), - 'id': '', - 'type': determine_type(file_path), - 'status': interpret_git_status(x, y), - 'file_path': file_path, - 'staged': is_staged, - 'modified': y != ' ', - 'deleted': False + 'key': key, + 'change': 'modified', + 'from': old_value, + 'to': new_value }) - logger.debug(f"Final changes: {json.dumps(changes, indent=2)}") - return changes \ No newline at end of file + return changes + + +def compare_tags(old_tags, new_tags): + old_tags = set(old_tags or []) + new_tags = set(new_tags or []) + + added = new_tags - old_tags + removed = old_tags - new_tags + + changes = [] + if added: + changes.append({ + 'key': 'tags', + 'change': 'added', + 'value': list(added) + }) + if removed: + changes.append({ + 'key': 'tags', + 'change': 'removed', + 'value': list(removed) + }) + + return changes + + +def compare_custom_formats(old_cfs, new_cfs): + old_cfs = {cf['id']: cf for cf in old_cfs or []} + new_cfs = {cf['id']: cf for cf in new_cfs or []} + + all_ids = set(old_cfs.keys()).union(set(new_cfs.keys())) + changes = [] + + for cf_id in all_ids: + old_cf = old_cfs.get(cf_id) + new_cf = new_cfs.get(cf_id) + + if old_cf != new_cf: + if old_cf and new_cf: + if old_cf['score'] != new_cf['score']: + changes.append({ + 'key': f'custom_format_{cf_id}', + 'change': 'modified', + 'from': old_cf['score'], + 'to': new_cf['score'] + }) + elif old_cf and not new_cf: + changes.append({ + 'key': f'custom_format_{cf_id}', + 'change': 'removed', + 'value': old_cf['score'] + }) + elif not old_cf and new_cf: + changes.append({ + 'key': f'custom_format_{cf_id}', + 'change': 'added', + 'value': new_cf['score'] + }) + + return changes + + +def compare_conditions(old_conditions, new_conditions): + changes = [] + old_conditions = old_conditions or [] + new_conditions = new_conditions or [] + + # Check for removed or modified conditions + for i, old_cond in enumerate(old_conditions): + if i >= len(new_conditions): + changes.append({ + 'key': f'conditions[{i}]', + 'change': 'removed', + 'value': old_cond + }) + elif old_cond != new_conditions[i]: + for key in old_cond.keys(): + if old_cond.get(key) != new_conditions[i].get(key): + changes.append({ + 'key': f'conditions[{i}].{key}', + 'change': 'modified', + 'from': old_cond.get(key), + 'to': new_conditions[i].get(key) + }) + + # Check for added conditions + for i in range(len(old_conditions), len(new_conditions)): + changes.append({ + 'key': f'conditions[{i}]', + 'change': 'added', + 'value': new_conditions[i] + }) + + return changes diff --git a/backend/app/git/status/status.py b/backend/app/git/status/status.py index 8fba0f2..94fa9ce 100644 --- a/backend/app/git/status/status.py +++ b/backend/app/git/status/status.py @@ -1,44 +1,95 @@ # git/status/status.py - import git from git.exc import GitCommandError, InvalidGitRepositoryError import logging -import json from .incoming_changes import get_incoming_changes from .outgoing_changes import get_outgoing_changes +from .merge_conflicts import get_merge_conflicts +from .utils import determine_type +import os +import yaml logger = logging.getLogger(__name__) + def get_commits_ahead(repo, branch): return list(repo.iter_commits(f'origin/{branch}..{branch}')) + def get_commits_behind(repo, branch): return list(repo.iter_commits(f'{branch}..origin/{branch}')) + +def get_unpushed_changes(repo, branch): + """Get detailed info about files modified in unpushed commits""" + try: + # Get the file paths + unpushed_files = repo.git.diff(f'origin/{branch}..{branch}', + '--name-only').split('\n') + unpushed_files = [f for f in unpushed_files if f] + + detailed_changes = [] + for file_path in unpushed_files: + try: + # Get the current content of the file to extract name + with open(os.path.join(repo.working_dir, file_path), 'r') as f: + content = yaml.safe_load(f.read()) + + detailed_changes.append({ + 'type': + determine_type(file_path), + 'name': + content.get('name', os.path.basename(file_path)), + 'file_path': + file_path + }) + except Exception as e: + logger.warning( + f"Could not get details for {file_path}: {str(e)}") + # Fallback to basic info if we can't read the file + detailed_changes.append({ + 'type': determine_type(file_path), + 'name': os.path.basename(file_path), + 'file_path': file_path + }) + + return detailed_changes + except Exception as e: + logger.error(f"Error getting unpushed changes: {str(e)}") + return [] + + def get_git_status(repo_path): try: logger.debug(f"Attempting to get status for repo at {repo_path}") repo = git.Repo(repo_path) - logger.debug(f"Successfully created Repo object") + branch = repo.active_branch.name + remote_branch_exists = f"origin/{branch}" in [ + ref.name for ref in repo.remotes.origin.refs + ] + # Check for merge state + is_merging = os.path.exists(os.path.join(repo.git_dir, 'MERGE_HEAD')) + + # Get merge conflicts if we're in a merge state + merge_conflicts = get_merge_conflicts(repo) if is_merging else [] + + # Get all changes first outgoing_changes = get_outgoing_changes(repo) logger.debug(f"Outgoing changes detected: {outgoing_changes}") - branch = repo.active_branch.name - remote_branch_exists = f"origin/{branch}" in [ref.name for ref in repo.remotes.origin.refs] - if remote_branch_exists: repo.remotes.origin.fetch() commits_behind = get_commits_behind(repo, branch) commits_ahead = get_commits_ahead(repo, branch) - logger.debug(f"Commits behind: {len(commits_behind)}, Commits ahead: {len(commits_ahead)}") - incoming_changes = get_incoming_changes(repo, branch) + unpushed_files = get_unpushed_changes( + repo, branch) if commits_ahead else [] else: commits_behind = [] commits_ahead = [] incoming_changes = [] - logger.debug("Remote branch does not exist, skipping commits ahead/behind and incoming changes calculation.") + unpushed_files = [] status = { "branch": branch, @@ -47,15 +98,13 @@ def get_git_status(repo_path): "commits_behind": len(commits_behind), "commits_ahead": len(commits_ahead), "incoming_changes": incoming_changes, + "has_unpushed_commits": len(commits_ahead) > 0, + "unpushed_files": unpushed_files, + "is_merging": is_merging, + "merge_conflicts": merge_conflicts, + "has_conflicts": bool(merge_conflicts) } - logger.debug(f"Final status object: {json.dumps(status, indent=2)}") return True, status - except GitCommandError as e: - logger.error(f"GitCommandError: {str(e)}") - return False, f"Git error: {str(e)}" - except InvalidGitRepositoryError: - logger.error(f"InvalidGitRepositoryError for path: {repo_path}") - return False, "Invalid Git repository" except Exception as e: - logger.error(f"Unexpected error in get_git_status: {str(e)}", exc_info=True) - return False, f"Unexpected error: {str(e)}" \ No newline at end of file + logger.error(f"Error in get_git_status: {str(e)}", exc_info=True) + return False, str(e) diff --git a/docker-compose.yml b/docker-compose.yml index 408a654..491665e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -8,7 +8,7 @@ services: - ./frontend:/app - /app/node_modules environment: - - VITE_API_URL=http://192.168.1.111:5000 # Replace with your host machine's IP + - VITE_API_URL=http://localhost:5000 - CHOKIDAR_USEPOLLING=true backend: @@ -21,6 +21,6 @@ services: environment: - FLASK_ENV=development env_file: - - .env + - .env.1 volumes: backend_data: diff --git a/docs/diagrams/conflict-resolution.mmd b/docs/diagrams/conflict-resolution.mmd new file mode 100644 index 0000000..b25e76f --- /dev/null +++ b/docs/diagrams/conflict-resolution.mmd @@ -0,0 +1,41 @@ +stateDiagram-v2 + [*] --> CheckingForUpdates: User Initiates Pull + + CheckingForUpdates --> NormalPull: No Conflicts Detected + CheckingForUpdates --> ConflictDetected: Conflicts Found + + NormalPull --> [*]: Pull Complete + + ConflictDetected --> ResolutionState: Enter Resolution Mode + note right of ResolutionState + System returns conflict object + containing all conflicted files + end note + + state ResolutionState { + [*] --> FileSelection + + FileSelection --> FileResolution: Select Unresolved File + + FileResolution --> ConflictChoice + + state ConflictChoice { + [*] --> DecisionMaking + DecisionMaking --> KeepLocal: User Keeps Local + DecisionMaking --> AcceptIncoming: User Accepts Incoming + DecisionMaking --> CustomMerge: User Combines/Modifies + + KeepLocal --> MarkResolved + AcceptIncoming --> MarkResolved + CustomMerge --> MarkResolved + } + + ConflictChoice --> AddFile: File Resolved + + AddFile --> FileSelection: More Files\nto Resolve + AddFile --> AllFilesResolved: No More\nConflicts + } + + ResolutionState --> CommitChanges: All Files Resolved + + CommitChanges --> [*]: Resolution Complete \ No newline at end of file diff --git a/docs/diagrams/sync-flow.md b/docs/diagrams/sync-flow.md new file mode 100644 index 0000000..01c9e00 --- /dev/null +++ b/docs/diagrams/sync-flow.md @@ -0,0 +1,24 @@ +Profilarr Sync Flow + +```mermaid +flowchart TD + A[User Opens App] --> B[Check Git Status] + B --> C{Changes Detected?} + C -->|No Changes| D[Up to Date] + C -->|Changes Exist| E{Type of Change} + E -->|Incoming Only| F[Fast Forward Available] + E -->|Outgoing Only| G[Push Available*] + E -->|Both| H{Conflicts?} + H -->|Yes| I[Show Conflict UI] + H -->|No| J[Auto-merge] + I --> K[User Resolves] + K --> L[Apply Resolution] + L --> M[Update Git State] + J --> M + F --> M + G --> M + + %% Add note about push restrictions + N[*Push only available for developers
on specific branches] + N -.- G +``` diff --git a/frontend/index.html b/frontend/index.html index 4824950..f734373 100644 --- a/frontend/index.html +++ b/frontend/index.html @@ -1,16 +1,14 @@ - + + + + + + Profilarr + - - - - - Regexerr - - - -
- - - - \ No newline at end of file + +
+ + + diff --git a/frontend/src/api/api.js b/frontend/src/api/api.js index 3f2656c..ec2bb70 100644 --- a/frontend/src/api/api.js +++ b/frontend/src/api/api.js @@ -1,6 +1,6 @@ import axios from 'axios'; -const API_BASE_URL = 'http://localhost:5000'; +const API_BASE_URL = import.meta.env.VITE_API_URL || 'http://localhost:5000'; export const getRegexes = async () => { try { @@ -133,7 +133,15 @@ export const getSettings = async () => { export const getGitStatus = async () => { try { const response = await axios.get(`${API_BASE_URL}/git/status`); - return response.data; + // Ensure has_unpushed_commits is included in the response + return { + ...response.data, + data: { + ...response.data.data, + has_unpushed_commits: + response.data.data.has_unpushed_commits || false + } + }; } catch (error) { console.error('Error fetching Git status:', error); throw error; @@ -152,9 +160,21 @@ export const getBranches = async () => { export const checkoutBranch = async branchName => { try { - const response = await axios.post(`${API_BASE_URL}/git/checkout`, { - branch: branchName - }); + const response = await axios.post( + `${API_BASE_URL}/git/checkout`, + { + branch: branchName + }, + { + validateStatus: status => { + return ( + (status >= 200 && status < 300) || + status === 400 || + status === 409 + ); + } + } + ); return response.data; } catch (error) { console.error('Error checking out branch:', error); @@ -164,10 +184,22 @@ export const checkoutBranch = async branchName => { export const createBranch = async (branchName, baseBranch) => { try { - const response = await axios.post(`${API_BASE_URL}/git/branch`, { - name: branchName, - base: baseBranch - }); + const response = await axios.post( + `${API_BASE_URL}/git/branch`, + { + name: branchName, + base: baseBranch + }, + { + validateStatus: status => { + return ( + (status >= 200 && status < 300) || + status === 400 || + status === 409 + ); + } + } + ); return response.data; } catch (error) { console.error('Error creating branch:', error); @@ -178,7 +210,16 @@ export const createBranch = async (branchName, baseBranch) => { export const deleteBranch = async branchName => { try { const response = await axios.delete( - `${API_BASE_URL}/git/branch/${branchName}` + `${API_BASE_URL}/git/branch/${branchName}`, + { + validateStatus: status => { + return ( + (status >= 200 && status < 300) || + status === 400 || + status === 409 + ); + } + } ); return response.data; } catch (error) { @@ -187,6 +228,30 @@ export const deleteBranch = async branchName => { } }; +export const pushBranchToRemote = async branchName => { + try { + const response = await axios.post( + `${API_BASE_URL}/git/branch/push`, + { + branch: branchName + }, + { + validateStatus: status => { + return ( + (status >= 200 && status < 300) || + status === 400 || + status === 409 + ); + } + } + ); + return response.data; + } catch (error) { + console.error('Error pushing branch to remote:', error); + throw error; + } +}; + export const addFiles = async files => { try { const response = await axios.post(`${API_BASE_URL}/git/stage`, {files}); @@ -197,19 +262,49 @@ export const addFiles = async files => { } }; -export const pushFiles = async (files, commitMessage) => { +export const unstageFiles = async files => { try { - const response = await axios.post(`${API_BASE_URL}/git/push`, { + const response = await axios.post(`${API_BASE_URL}/git/unstage`, { + files + }); + return response.data; + } catch (error) { + console.error('Error unstaging files:', error); + throw error; + } +}; + +export const commitFiles = async (files, commitMessage) => { + try { + const response = await axios.post(`${API_BASE_URL}/git/commit`, { files, commit_message: commitMessage }); return response.data; } catch (error) { - console.error('Error pushing files:', error); + console.error('Error committing files:', error); throw error; } }; +export const pushFiles = async () => { + try { + const response = await axios.post(`${API_BASE_URL}/git/push`); + return response.data; + } catch (error) { + // Pass through the structured error from the backend + if (error.response?.data) { + return { + success: false, + error: error.response.data.error + }; + } + return { + success: false, + error: 'Failed to push changes' + }; + } +}; export const revertFile = async filePath => { try { const response = await axios.post(`${API_BASE_URL}/git/revert`, { @@ -251,20 +346,19 @@ export const pullBranch = async branchName => { }); return response.data; } catch (error) { - console.error('Error pulling branch:', error); - throw error; - } -}; - -export const getDiff = async filePath => { - try { - const response = await axios.post(`${API_BASE_URL}/git/diff`, { - file_path: filePath - }); - return response.data; - } catch (error) { - console.error('Error fetching diff:', error); - throw error; + if (error.response?.data) { + return { + success: false, + state: error.response.data.state || 'error', + message: error.response.data.message, + details: error.response.data.details + }; + } + return { + success: false, + state: 'error', + message: 'Failed to pull changes' + }; } }; @@ -335,18 +429,6 @@ export const unlinkRepo = async (removeFiles = false) => { } }; -export const pushBranchToRemote = async branchName => { - try { - const response = await axios.post(`${API_BASE_URL}/git/branch/push`, { - branch: branchName - }); - return response.data; - } catch (error) { - console.error('Error pushing branch to remote:', error); - throw error; - } -}; - export const checkDevMode = async () => { try { const response = await axios.get(`${API_BASE_URL}/git/dev`); @@ -356,3 +438,44 @@ export const checkDevMode = async () => { throw error; } }; + +export const resolveConflict = async resolutions => { + try { + const response = await axios.post(`${API_BASE_URL}/git/resolve`, { + resolutions + }); + return response.data; + } catch (error) { + console.error('Error resolving conflicts:', error); + throw error; + } +}; + +export const finalizeMerge = async () => { + try { + const response = await axios.post(`${API_BASE_URL}/git/merge/finalize`); + return response.data; + } catch (error) { + console.error('Error finalizing merge:', error); + if (error.response?.data) { + return { + success: false, + error: error.response.data.error + }; + } + return { + success: false, + error: 'Failed to finalize merge' + }; + } +}; + +export const abortMerge = async () => { + try { + const response = await axios.post(`${API_BASE_URL}/git/merge/abort`); + return response.data; + } catch (error) { + console.error('Error aborting merge:', error); + throw error; + } +}; diff --git a/frontend/src/components/format/FormatPage.jsx b/frontend/src/components/format/FormatPage.jsx index 45996b1..c772809 100644 --- a/frontend/src/components/format/FormatPage.jsx +++ b/frontend/src/components/format/FormatPage.jsx @@ -1,150 +1,203 @@ -import React, { useState, useEffect } from "react"; -import FormatCard from "./FormatCard"; -import FormatModal from "./FormatModal"; -import AddNewCard from "../ui/AddNewCard"; -import { getFormats } from "../../api/api"; -import FilterMenu from "../ui/FilterMenu"; -import SortMenu from "../ui/SortMenu"; -import { Loader } from "lucide-react"; +import React, {useState, useEffect} from 'react'; +import {useNavigate} from 'react-router-dom'; +import FormatCard from './FormatCard'; +import FormatModal from './FormatModal'; +import AddNewCard from '../ui/AddNewCard'; +import {getFormats, getGitStatus} from '../../api/api'; +import FilterMenu from '../ui/FilterMenu'; +import SortMenu from '../ui/SortMenu'; +import {Loader} from 'lucide-react'; function FormatPage() { - const [formats, setFormats] = useState([]); - const [isModalOpen, setIsModalOpen] = useState(false); - const [selectedFormat, setSelectedFormat] = useState(null); - const [sortBy, setSortBy] = useState("title"); - const [filterType, setFilterType] = useState("none"); - const [filterValue, setFilterValue] = useState(""); - const [allTags, setAllTags] = useState([]); - const [isCloning, setIsCloning] = useState(false); - const [isLoading, setIsLoading] = useState(true); + const [formats, setFormats] = useState([]); + const [isModalOpen, setIsModalOpen] = useState(false); + const [selectedFormat, setSelectedFormat] = useState(null); + const [sortBy, setSortBy] = useState('title'); + const [filterType, setFilterType] = useState('none'); + const [filterValue, setFilterValue] = useState(''); + const [allTags, setAllTags] = useState([]); + const [isCloning, setIsCloning] = useState(false); + const [isLoading, setIsLoading] = useState(true); + const [mergeConflicts, setMergeConflicts] = useState([]); - const loadingMessages = [ - "Decoding the custom format matrix...", - "Parsing the digital alphabet soup...", - "Untangling the format spaghetti...", - "Calibrating the format-o-meter...", - "Indexing your media DNA...", - ]; + const navigate = useNavigate(); - useEffect(() => { - fetchFormats(); - }, []); + const loadingMessages = [ + 'Decoding the custom format matrix...', + 'Parsing the digital alphabet soup...', + 'Untangling the format spaghetti...', + 'Calibrating the format-o-meter...', + 'Indexing your media DNA...' + ]; - const fetchFormats = async () => { - try { - const fetchedFormats = await getFormats(); - setFormats(fetchedFormats); - const tags = [ - ...new Set(fetchedFormats.flatMap((format) => format.tags || [])), - ]; - setAllTags(tags); - } catch (error) { - console.error("Error fetching formats:", error); - } finally { - setIsLoading(false); - } - }; + useEffect(() => { + fetchGitStatus(); + }, []); - const handleOpenModal = (format = null) => { - setSelectedFormat(format); - setIsModalOpen(true); - setIsCloning(false); - }; - - const handleCloseModal = () => { - setSelectedFormat(null); - setIsModalOpen(false); - setIsCloning(false); - }; - - const handleCloneFormat = (format) => { - const clonedFormat = { - ...format, - id: 0, - name: `${format.name} [COPY]`, + const fetchFormats = async () => { + try { + const fetchedFormats = await getFormats(); + setFormats(fetchedFormats); + const tags = [ + ...new Set(fetchedFormats.flatMap(format => format.tags || [])) + ]; + setAllTags(tags); + } catch (error) { + console.error('Error fetching formats:', error); + } finally { + setIsLoading(false); + } }; - setSelectedFormat(clonedFormat); - setIsModalOpen(true); - setIsCloning(true); - }; - const handleSaveFormat = () => { - fetchFormats(); - handleCloseModal(); - }; + const fetchGitStatus = async () => { + try { + const result = await getGitStatus(); + if (result.success) { + setMergeConflicts(result.data.merge_conflicts || []); + if (result.data.merge_conflicts.length === 0) { + fetchFormats(); + } else { + setIsLoading(false); + } + } + } catch (error) { + console.error('Error fetching Git status:', error); + setIsLoading(false); + } + }; - const formatDate = (dateString) => { - return new Date(dateString).toLocaleString(); - }; + const handleOpenModal = (format = null) => { + setSelectedFormat(format); + setIsModalOpen(true); + setIsCloning(false); + }; - const sortedAndFilteredFormats = formats - .filter((format) => { - if (filterType === "tag") { - return format.tags && format.tags.includes(filterValue); - } - if (filterType === "date") { - const formatDate = new Date(format.date_modified); - const filterDate = new Date(filterValue); - return formatDate.toDateString() === filterDate.toDateString(); - } - return true; - }) - .sort((a, b) => { - if (sortBy === "title") return a.name.localeCompare(b.name); - if (sortBy === "dateCreated") - return new Date(b.date_created) - new Date(a.date_created); - if (sortBy === "dateModified") - return new Date(b.date_modified) - new Date(a.date_modified); - return 0; - }); + const handleCloseModal = () => { + setSelectedFormat(null); + setIsModalOpen(false); + setIsCloning(false); + }; + + const handleCloneFormat = format => { + const clonedFormat = { + ...format, + id: 0, + name: `${format.name} [COPY]` + }; + setSelectedFormat(clonedFormat); + setIsModalOpen(true); + setIsCloning(true); + }; + + const handleSaveFormat = () => { + fetchFormats(); + handleCloseModal(); + }; + + const formatDate = dateString => { + return new Date(dateString).toLocaleString(); + }; + + const sortedAndFilteredFormats = formats + .filter(format => { + if (filterType === 'tag') { + return format.tags && format.tags.includes(filterValue); + } + if (filterType === 'date') { + const formatDate = new Date(format.date_modified); + const filterDate = new Date(filterValue); + return formatDate.toDateString() === filterDate.toDateString(); + } + return true; + }) + .sort((a, b) => { + if (sortBy === 'title') return a.name.localeCompare(b.name); + if (sortBy === 'dateCreated') + return new Date(b.date_created) - new Date(a.date_created); + if (sortBy === 'dateModified') + return new Date(b.date_modified) - new Date(a.date_modified); + return 0; + }); + + const hasConflicts = mergeConflicts.length > 0; + + if (isLoading) { + return ( +
+ +

+ { + loadingMessages[ + Math.floor(Math.random() * loadingMessages.length) + ] + } +

+
+ ); + } + + if (hasConflicts) { + return ( +
+
+

+ Merge Conflicts Detected +

+ +
+ +
+

What Happened?

+

+ This page is locked because there are unresolved merge + conflicts. You need to address these conflicts in the + settings page before continuing. +

+
+
+ ); + } - if (isLoading) { return ( -
- -

- {loadingMessages[Math.floor(Math.random() * loadingMessages.length)]} -

-
+
+

Manage Custom Formats

+
+ + +
+
+ {sortedAndFilteredFormats.map(format => ( + handleOpenModal(format)} + onClone={handleCloneFormat} + showDate={sortBy !== 'title'} + formatDate={formatDate} + /> + ))} + handleOpenModal()} /> +
+ +
); - } - - return ( -
-

Manage Custom Formats

-
- - -
-
- {sortedAndFilteredFormats.map((format) => ( - handleOpenModal(format)} - onClone={handleCloneFormat} // Pass the clone handler - showDate={sortBy !== "title"} - formatDate={formatDate} - /> - ))} - handleOpenModal()} /> -
- -
- ); } export default FormatPage; diff --git a/frontend/src/components/profile/ProfileModal.jsx b/frontend/src/components/profile/ProfileModal.jsx index 55c2888..623afac 100644 --- a/frontend/src/components/profile/ProfileModal.jsx +++ b/frontend/src/components/profile/ProfileModal.jsx @@ -265,12 +265,7 @@ function ProfileModal({ }; return ( - + {loading ? (
diff --git a/frontend/src/components/profile/ProfilePage.jsx b/frontend/src/components/profile/ProfilePage.jsx index 74f1a8f..b2d25fd 100644 --- a/frontend/src/components/profile/ProfilePage.jsx +++ b/frontend/src/components/profile/ProfilePage.jsx @@ -1,169 +1,223 @@ -import React, { useState, useEffect } from "react"; -import ProfileCard from "./ProfileCard"; -import ProfileModal from "./ProfileModal"; -import AddNewCard from "../ui/AddNewCard"; -import { getProfiles, getFormats } from "../../api/api"; -import FilterMenu from "../ui/FilterMenu"; -import SortMenu from "../ui/SortMenu"; -import { Loader } from "lucide-react"; +import React, {useState, useEffect} from 'react'; +import {useNavigate} from 'react-router-dom'; +import ProfileCard from './ProfileCard'; +import ProfileModal from './ProfileModal'; +import AddNewCard from '../ui/AddNewCard'; +import {getProfiles, getFormats, getGitStatus} from '../../api/api'; +import FilterMenu from '../ui/FilterMenu'; +import SortMenu from '../ui/SortMenu'; +import {Loader} from 'lucide-react'; function ProfilePage() { - const [profiles, setProfiles] = useState([]); - const [formats, setFormats] = useState([]); - const [isModalOpen, setIsModalOpen] = useState(false); - const [selectedProfile, setSelectedProfile] = useState(null); - const [sortBy, setSortBy] = useState("title"); - const [filterType, setFilterType] = useState("none"); - const [filterValue, setFilterValue] = useState(""); - const [allTags, setAllTags] = useState([]); - const [isCloning, setIsCloning] = useState(false); - const [isLoading, setIsLoading] = useState(true); + const [profiles, setProfiles] = useState([]); + const [formats, setFormats] = useState([]); + const [isModalOpen, setIsModalOpen] = useState(false); + const [selectedProfile, setSelectedProfile] = useState(null); + const [sortBy, setSortBy] = useState('title'); + const [filterType, setFilterType] = useState('none'); + const [filterValue, setFilterValue] = useState(''); + const [allTags, setAllTags] = useState([]); + const [isCloning, setIsCloning] = useState(false); + const [isLoading, setIsLoading] = useState(true); + const [mergeConflicts, setMergeConflicts] = useState([]); - const loadingMessages = [ - "Profiling your media collection...", - "Organizing your digital hoard...", - "Calibrating the flux capacitor...", - "Synchronizing with the movie matrix...", - "Optimizing your binge-watching potential...", - ]; + const navigate = useNavigate(); - useEffect(() => { - fetchProfiles(); - fetchFormats(); - }, []); + const loadingMessages = [ + 'Profiling your media collection...', + 'Organizing your digital hoard...', + 'Calibrating the flux capacitor...', + 'Synchronizing with the movie matrix...', + 'Optimizing your binge-watching potential...' + ]; - const fetchProfiles = async () => { - try { - const fetchedProfiles = await getProfiles(); - setProfiles(fetchedProfiles); - const tags = [ - ...new Set(fetchedProfiles.flatMap((profile) => profile.tags || [])), - ]; - setAllTags(tags); - } catch (error) { - console.error("Error fetching profiles:", error); - } finally { - setIsLoading(false); - } - }; + useEffect(() => { + fetchGitStatus(); + }, []); - const fetchFormats = async () => { - try { - const fetchedFormats = await getFormats(); - setFormats(fetchedFormats); - } catch (error) { - console.error("Error fetching formats:", error); - } - }; - - const handleOpenModal = (profile = null) => { - const safeProfile = profile - ? { - ...profile, - custom_formats: profile.custom_formats || [], + const fetchProfiles = async () => { + try { + const fetchedProfiles = await getProfiles(); + setProfiles(fetchedProfiles); + const tags = [ + ...new Set( + fetchedProfiles.flatMap(profile => profile.tags || []) + ) + ]; + setAllTags(tags); + } catch (error) { + console.error('Error fetching profiles:', error); + } finally { + setIsLoading(false); } - : null; - setSelectedProfile(safeProfile); - setIsModalOpen(true); - setIsCloning(false); - }; - - const handleCloseModal = () => { - setSelectedProfile(null); - setIsModalOpen(false); - setIsCloning(false); - }; - - const handleCloneProfile = (profile) => { - const clonedProfile = { - ...profile, - id: 0, - name: `${profile.name} [COPY]`, - custom_formats: profile.custom_formats || [], }; - setSelectedProfile(clonedProfile); - setIsModalOpen(true); - setIsCloning(true); - }; - const handleSaveProfile = () => { - fetchProfiles(); - handleCloseModal(); - }; + const fetchFormats = async () => { + try { + const fetchedFormats = await getFormats(); + setFormats(fetchedFormats); + } catch (error) { + console.error('Error fetching formats:', error); + } + }; - // Define the missing formatDate function - const formatDate = (dateString) => { - return new Date(dateString).toLocaleString(); - }; + const fetchGitStatus = async () => { + try { + const result = await getGitStatus(); + if (result.success) { + setMergeConflicts(result.data.merge_conflicts || []); + if (result.data.merge_conflicts.length === 0) { + fetchProfiles(); + fetchFormats(); + } else { + setIsLoading(false); + } + } + } catch (error) { + console.error('Error fetching Git status:', error); + setIsLoading(false); + } + }; - const sortedAndFilteredProfiles = profiles - .filter((profile) => { - if (filterType === "tag") { - return profile.tags && profile.tags.includes(filterValue); - } - if (filterType === "date") { - const profileDate = new Date(profile.date_modified); - const filterDate = new Date(filterValue); - return profileDate.toDateString() === filterDate.toDateString(); - } - return true; - }) - .sort((a, b) => { - if (sortBy === "name") return a.name.localeCompare(b.name); - if (sortBy === "dateCreated") - return new Date(b.date_created) - new Date(a.date_created); - if (sortBy === "dateModified") - return new Date(b.date_modified) - new Date(a.date_modified); - return 0; - }); + const handleOpenModal = (profile = null) => { + const safeProfile = profile + ? { + ...profile, + custom_formats: profile.custom_formats || [] + } + : null; + setSelectedProfile(safeProfile); + setIsModalOpen(true); + setIsCloning(false); + }; + + const handleCloseModal = () => { + setSelectedProfile(null); + setIsModalOpen(false); + setIsCloning(false); + }; + + const handleCloneProfile = profile => { + const clonedProfile = { + ...profile, + id: 0, + name: `${profile.name} [COPY]`, + custom_formats: profile.custom_formats || [] + }; + setSelectedProfile(clonedProfile); + setIsModalOpen(true); + setIsCloning(true); + }; + + const handleSaveProfile = () => { + fetchProfiles(); + handleCloseModal(); + }; + + const formatDate = dateString => { + return new Date(dateString).toLocaleString(); + }; + + const sortedAndFilteredProfiles = profiles + .filter(profile => { + if (filterType === 'tag') { + return profile.tags && profile.tags.includes(filterValue); + } + if (filterType === 'date') { + const profileDate = new Date(profile.date_modified); + const filterDate = new Date(filterValue); + return profileDate.toDateString() === filterDate.toDateString(); + } + return true; + }) + .sort((a, b) => { + if (sortBy === 'name') return a.name.localeCompare(b.name); + if (sortBy === 'dateCreated') + return new Date(b.date_created) - new Date(a.date_created); + if (sortBy === 'dateModified') + return new Date(b.date_modified) - new Date(a.date_modified); + return 0; + }); + + const hasConflicts = mergeConflicts.length > 0; + + if (isLoading) { + return ( +
+ +

+ { + loadingMessages[ + Math.floor(Math.random() * loadingMessages.length) + ] + } +

+
+ ); + } + + if (hasConflicts) { + return ( +
+
+

+ Merge Conflicts Detected +

+ +
+ +
+

What Happened?

+

+ This page is locked because there are unresolved merge + conflicts. You need to address these conflicts in the + settings page before continuing. +

+
+
+ ); + } - if (isLoading) { return ( -
- -

- {loadingMessages[Math.floor(Math.random() * loadingMessages.length)]} -

-
+
+

Manage Profiles

+
+ + +
+
+ {sortedAndFilteredProfiles.map(profile => ( + handleOpenModal(profile)} + onClone={handleCloneProfile} + showDate={sortBy !== 'name'} + formatDate={formatDate} + /> + ))} + handleOpenModal()} /> +
+ +
); - } - - return ( -
-

Manage Profiles

-
- - -
-
- {sortedAndFilteredProfiles.map((profile) => ( - handleOpenModal(profile)} - onClone={handleCloneProfile} - showDate={sortBy !== "name"} - formatDate={formatDate} // Pass the formatDate function to the ProfileCard - /> - ))} - handleOpenModal()} /> -
- -
- ); } export default ProfilePage; diff --git a/frontend/src/components/regex/RegexPage.jsx b/frontend/src/components/regex/RegexPage.jsx index a7a7cbf..fa6f8bd 100644 --- a/frontend/src/components/regex/RegexPage.jsx +++ b/frontend/src/components/regex/RegexPage.jsx @@ -1,151 +1,208 @@ -import React, { useState, useEffect } from "react"; -import RegexCard from "./RegexCard"; -import RegexModal from "./RegexModal"; -import AddNewCard from "../ui/AddNewCard"; -import { getRegexes } from "../../api/api"; -import FilterMenu from "../ui/FilterMenu"; -import SortMenu from "../ui/SortMenu"; -import { Loader } from "lucide-react"; +import React, {useState, useEffect} from 'react'; +import {useNavigate} from 'react-router-dom'; +import RegexCard from './RegexCard'; +import RegexModal from './RegexModal'; +import AddNewCard from '../ui/AddNewCard'; +import {getRegexes} from '../../api/api'; +import FilterMenu from '../ui/FilterMenu'; +import SortMenu from '../ui/SortMenu'; +import {Loader} from 'lucide-react'; +import {getGitStatus} from '../../api/api'; function RegexPage() { - const [regexes, setRegexes] = useState([]); - const [isModalOpen, setIsModalOpen] = useState(false); - const [selectedRegex, setSelectedRegex] = useState(null); - const [sortBy, setSortBy] = useState("title"); - const [filterType, setFilterType] = useState("none"); - const [filterValue, setFilterValue] = useState(""); - const [allTags, setAllTags] = useState([]); - const [isCloning, setIsCloning] = useState(false); - const [isLoading, setIsLoading] = useState(true); + const [regexes, setRegexes] = useState([]); + const [isModalOpen, setIsModalOpen] = useState(false); + const [selectedRegex, setSelectedRegex] = useState(null); + const [sortBy, setSortBy] = useState('title'); + const [filterType, setFilterType] = useState('none'); + const [filterValue, setFilterValue] = useState(''); + const [allTags, setAllTags] = useState([]); + const [isCloning, setIsCloning] = useState(false); + const [isLoading, setIsLoading] = useState(true); + const [mergeConflicts, setMergeConflicts] = useState([]); - const loadingMessages = [ - "Matching patterns in the digital universe...", - "Capturing groups of binary brilliance...", - "Escaping special characters in the wild...", - "Quantifying the unquantifiable...", - "Regex-ing the un-regex-able...", - ]; + const navigate = useNavigate(); - useEffect(() => { - fetchRegexes(); - }, []); + const loadingMessages = [ + 'Compiling complex patterns...', + 'Analyzing regex efficiency...', + 'Optimizing search algorithms...', + 'Testing pattern boundaries...', + 'Loading regex libraries...', + 'Parsing intricate expressions...', + 'Detecting pattern conflicts...', + 'Refactoring nested groups...' + ]; - const fetchRegexes = async () => { - try { - const fetchedRegexes = await getRegexes(); - setRegexes(fetchedRegexes); - const tags = [ - ...new Set(fetchedRegexes.flatMap((regex) => regex.tags || [])), - ]; - setAllTags(tags); - } catch (error) { - console.error("Error fetching regexes:", error); - } finally { - setIsLoading(false); - } - }; + useEffect(() => { + fetchGitStatus(); + }, []); - const handleOpenModal = (regex = null) => { - setSelectedRegex(regex); - setIsModalOpen(true); - setIsCloning(false); - }; - - const handleCloseModal = () => { - setSelectedRegex(null); - setIsModalOpen(false); - setIsCloning(false); - }; - - const handleCloneRegex = (regex) => { - const clonedRegex = { - ...regex, - id: 0, - name: `${regex.name} [COPY]`, - regex101Link: "", + const fetchRegexes = async () => { + try { + const fetchedRegexes = await getRegexes(); + setRegexes(fetchedRegexes); + const tags = [ + ...new Set(fetchedRegexes.flatMap(regex => regex.tags || [])) + ]; + setAllTags(tags); + } catch (error) { + console.error('Error fetching regexes:', error); + } finally { + setIsLoading(false); + } }; - setSelectedRegex(clonedRegex); - setIsModalOpen(true); - setIsCloning(true); - }; - const handleSaveRegex = () => { - fetchRegexes(); - handleCloseModal(); - }; + const fetchGitStatus = async () => { + try { + const result = await getGitStatus(); + if (result.success) { + setMergeConflicts(result.data.merge_conflicts || []); + if (result.data.merge_conflicts.length === 0) { + fetchRegexes(); + } else { + setIsLoading(false); + } + } + } catch (error) { + console.error('Error fetching Git status:', error); + setIsLoading(false); + } + }; - const formatDate = (dateString) => { - return new Date(dateString).toLocaleString(); - }; + const handleOpenModal = (regex = null) => { + setSelectedRegex(regex); + setIsModalOpen(true); + setIsCloning(false); + }; - const sortedAndFilteredRegexes = regexes - .filter((regex) => { - if (filterType === "tag") { - return regex.tags && regex.tags.includes(filterValue); - } - if (filterType === "date") { - const regexDate = new Date(regex.date_modified); - const filterDate = new Date(filterValue); - return regexDate.toDateString() === filterDate.toDateString(); - } - return true; - }) - .sort((a, b) => { - if (sortBy === "title") return a.name.localeCompare(b.name); - if (sortBy === "dateCreated") - return new Date(b.date_created) - new Date(a.date_created); - if (sortBy === "dateModified") - return new Date(b.date_modified) - new Date(a.date_modified); - return 0; - }); + const handleCloseModal = () => { + setSelectedRegex(null); + setIsModalOpen(false); + setIsCloning(false); + }; + + const handleCloneRegex = regex => { + const clonedRegex = { + ...regex, + id: 0, + name: `${regex.name} [COPY]`, + regex101Link: '' + }; + setSelectedRegex(clonedRegex); + setIsModalOpen(true); + setIsCloning(true); + }; + + const handleSaveRegex = () => { + fetchRegexes(); + handleCloseModal(); + }; + + const formatDate = dateString => { + return new Date(dateString).toLocaleString(); + }; + + const sortedAndFilteredRegexes = regexes + .filter(regex => { + if (filterType === 'tag') { + return regex.tags && regex.tags.includes(filterValue); + } + if (filterType === 'date') { + const regexDate = new Date(regex.date_modified); + const filterDate = new Date(filterValue); + return regexDate.toDateString() === filterDate.toDateString(); + } + return true; + }) + .sort((a, b) => { + if (sortBy === 'title') return a.name.localeCompare(b.name); + if (sortBy === 'dateCreated') + return new Date(b.date_created) - new Date(a.date_created); + if (sortBy === 'dateModified') + return new Date(b.date_modified) - new Date(a.date_modified); + return 0; + }); + + const hasConflicts = mergeConflicts.length > 0; + + if (isLoading) { + return ( +
+ +

+ { + loadingMessages[ + Math.floor(Math.random() * loadingMessages.length) + ] + } +

+
+ ); + } + + if (hasConflicts) { + return ( +
+
+

+ Merge Conflicts Detected +

+ +
+ +
+

What Happened?

+

+ This page is locked because there are unresolved merge + conflicts. You need to address these conflicts in the + settings page before continuing. +

+
+
+ ); + } - if (isLoading) { return ( -
- -

- {loadingMessages[Math.floor(Math.random() * loadingMessages.length)]} -

-
+
+

Manage Regex Patterns

+
+ + +
+
+ {sortedAndFilteredRegexes.map(regex => ( + handleOpenModal(regex)} + onClone={handleCloneRegex} // Pass the clone handler + showDate={sortBy !== 'title'} + formatDate={formatDate} + /> + ))} + handleOpenModal()} /> +
+ +
); - } - - return ( -
-

Manage Regex Patterns

-
- - -
-
- {sortedAndFilteredRegexes.map((regex) => ( - handleOpenModal(regex)} - onClone={handleCloneRegex} // Pass the clone handler - showDate={sortBy !== "title"} - formatDate={formatDate} - /> - ))} - handleOpenModal()} /> -
- -
- ); } export default RegexPage; diff --git a/frontend/src/components/settings/SettingsPage.jsx b/frontend/src/components/settings/SettingsPage.jsx index d156b6b..56a0050 100644 --- a/frontend/src/components/settings/SettingsPage.jsx +++ b/frontend/src/components/settings/SettingsPage.jsx @@ -3,6 +3,8 @@ import { getSettings, getGitStatus, addFiles, + unstageFiles, + commitFiles, pushFiles, revertFile, pullBranch, @@ -32,6 +34,7 @@ const SettingsPage = () => { const [noChangesMessage, setNoChangesMessage] = useState(''); const [activeTab, setActiveTab] = useState('git'); const tabsRef = useRef({}); + const [mergeConflicts, setMergeConflicts] = useState([]); useEffect(() => { fetchSettings(); @@ -67,10 +70,11 @@ const SettingsPage = () => { setStatusLoading(true); setStatusLoadingMessage(getRandomMessage(statusLoadingMessages)); setNoChangesMessage(getRandomMessage(noChangesMessages)); + try { const result = await getGitStatus(); if (result.success) { - setChanges({ + const gitStatus = { ...result.data, outgoing_changes: Array.isArray( result.data.outgoing_changes @@ -81,8 +85,16 @@ const SettingsPage = () => { result.data.incoming_changes ) ? result.data.incoming_changes + : [], + merge_conflicts: Array.isArray(result.data.merge_conflicts) + ? result.data.merge_conflicts : [] - }); + }; + + setChanges(gitStatus); + setMergeConflicts(gitStatus.merge_conflicts); + + console.log('Git Status:', JSON.stringify(gitStatus, null, 2)); } } catch (error) { console.error('Error fetching Git status:', error); @@ -114,13 +126,33 @@ const SettingsPage = () => { } }; + const handleUnstageSelectedChanges = async selectedChanges => { + setLoadingAction('unstage_selected'); + try { + const response = await unstageFiles(selectedChanges); + if (response.success) { + await fetchGitStatus(); + Alert.success(response.message); + } else { + Alert.error(response.error); + } + } catch (error) { + Alert.error( + 'An unexpected error occurred while unstaging changes.' + ); + console.error('Error unstaging changes:', error); + } finally { + setLoadingAction(''); + } + }; + const handleCommitSelectedChanges = async ( selectedChanges, commitMessage ) => { setLoadingAction('commit_selected'); try { - const response = await pushFiles(selectedChanges, commitMessage); + const response = await commitFiles(selectedChanges, commitMessage); if (response.success) { await fetchGitStatus(); Alert.success(response.message); @@ -137,6 +169,64 @@ const SettingsPage = () => { } }; + const handlePushChanges = async () => { + setLoadingAction('push_changes'); + try { + const response = await pushFiles(); + + if (response.success) { + await fetchGitStatus(); + Alert.success(response.message); + } else { + if (typeof response.error === 'object' && response.error.type) { + // Handle structured errors + Alert.error(response.error.message); + } else { + // Handle string errors + Alert.error(response.error); + } + } + } catch (error) { + console.error('Error in handlePushChanges:', error); + Alert.error('An unexpected error occurred while pushing changes.'); + } finally { + setLoadingAction(''); + } + }; + const handlePullSelectedChanges = async () => { + setLoadingAction('pull_changes'); + try { + const response = await pullBranch(changes.branch); + + // First update status regardless of what happened + await fetchGitStatus(); + + if (response.success) { + if (response.state === 'resolve') { + Alert.info( + response.message || + 'Repository is now in conflict resolution state. Please resolve conflicts to continue. ', + { + autoClose: true, + closeOnClick: true + } + ); + } else { + Alert.success( + response.message || 'Successfully pulled changes' + ); + } + } else { + Alert.error(response.message || 'Failed to pull changes'); + } + } catch (error) { + console.error('Error in pullBranch:', error); + Alert.error('Failed to pull changes'); + } finally { + setLoadingAction(''); + } + }; + const handleRevertSelectedChanges = async selectedChanges => { setLoadingAction('revert_selected'); try { @@ -164,24 +254,6 @@ const SettingsPage = () => { } }; - const handlePullSelectedChanges = async selectedChanges => { - setLoadingAction('pull_changes'); - try { - const response = await pullBranch(changes.branch, selectedChanges); - if (response.success) { - await fetchGitStatus(); - Alert.success(response.message); - } else { - Alert.error(response.error); - } - } catch (error) { - Alert.error('An unexpected error occurred while pulling changes.'); - console.error('Error pulling changes:', error); - } finally { - setLoadingAction(''); - } - }; - return (