Merge pull request #285 from Dictionarry-Hub/dev

This commit is contained in:
santiagosayshey
2026-01-30 01:23:11 +10:30
committed by GitHub
29 changed files with 834 additions and 262 deletions

View File

@@ -1,13 +1,28 @@
# Dockerfile
FROM python:3.9-slim
WORKDIR /app
# Install git and gosu for user switching
RUN apt-get update && apt-get install -y git gosu && rm -rf /var/lib/apt/lists/*
# Install git, gosu, and PowerShell Core
RUN apt-get update && apt-get install -y \
git \
gosu \
wget \
ca-certificates \
libicu-dev \
&& wget -O /tmp/powershell.tar.gz https://github.com/PowerShell/PowerShell/releases/download/v7.4.0/powershell-7.4.0-linux-x64.tar.gz \
&& mkdir -p /opt/microsoft/powershell/7 \
&& tar zxf /tmp/powershell.tar.gz -C /opt/microsoft/powershell/7 \
&& chmod +x /opt/microsoft/powershell/7/pwsh \
&& ln -s /opt/microsoft/powershell/7/pwsh /usr/bin/pwsh \
&& rm /tmp/powershell.tar.gz \
&& rm -rf /var/lib/apt/lists/*
# Copy pre-built files from dist directory
COPY dist/backend/app ./app
COPY dist/backend/scripts ./app/scripts
COPY dist/static ./app/static
COPY dist/requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# Ensure scripts are executable
RUN chmod +x /app/scripts/*.ps1 || true
# Copy and setup entrypoint script
COPY entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh

View File

@@ -1,7 +1,21 @@
FROM python:3.9
WORKDIR /app
# Install PowerShell Core
RUN apt-get update && apt-get install -y \
wget \
ca-certificates \
libicu-dev \
&& wget -O /tmp/powershell.tar.gz https://github.com/PowerShell/PowerShell/releases/download/v7.4.0/powershell-7.4.0-linux-x64.tar.gz \
&& mkdir -p /opt/microsoft/powershell/7 \
&& tar zxf /tmp/powershell.tar.gz -C /opt/microsoft/powershell/7 \
&& chmod +x /opt/microsoft/powershell/7/pwsh \
&& ln -s /opt/microsoft/powershell/7/pwsh /usr/bin/pwsh \
&& rm /tmp/powershell.tar.gz \
&& rm -rf /var/lib/apt/lists/*
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
COPY . .
# Ensure scripts are executable
RUN chmod +x /app/scripts/*.ps1 || true
# Use gunicorn with 10-minute timeout
CMD ["python", "-m", "app.main"]

View File

@@ -7,6 +7,7 @@ from .utils import (get_category_directory, load_yaml_file, validate,
test_regex_pattern, test_format_conditions,
check_delete_constraints, filename_to_display)
from ..db import add_format_to_renames, remove_format_from_renames, is_format_in_renames
from .cache import data_cache
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
@@ -16,43 +17,19 @@ bp = Blueprint('data', __name__)
@bp.route('/<string:category>', methods=['GET'])
def retrieve_all(category):
try:
directory = get_category_directory(category)
files = [f for f in os.listdir(directory) if f.endswith('.yml')]
logger.debug(f"Found {len(files)} files in {category}")
if not files:
return jsonify([]), 200
result = []
errors = 0
for file_name in files:
file_path = os.path.join(directory, file_name)
try:
content = load_yaml_file(file_path)
# Add metadata for custom formats
if category == 'custom_format':
content['metadata'] = {
'includeInRename':
is_format_in_renames(content['name'])
# Use cache instead of reading from disk
items = data_cache.get_all(category)
# Add metadata for custom formats
if category == 'custom_format':
for item in items:
if 'content' in item and 'name' in item['content']:
item['content']['metadata'] = {
'includeInRename': is_format_in_renames(item['content']['name'])
}
result.append({
"file_name":
file_name,
"content":
content,
"modified_date":
get_file_modified_date(file_path)
})
except yaml.YAMLError:
errors += 1
result.append({
"file_name": file_name,
"error": "Failed to parse YAML"
})
logger.info(
f"Processed {len(files)} {category} files ({errors} errors)")
return jsonify(result), 200
logger.info(f"Retrieved {len(items)} {category} items from cache")
return jsonify(items), 200
except ValueError as ve:
logger.error(ve)
@@ -127,6 +104,10 @@ def handle_item(category, name):
# Then delete the file
os.remove(file_path)
# Update cache
data_cache.remove_item(category, file_name)
return jsonify(
{"message": f"Successfully deleted {file_name}"}), 200
except OSError as e:
@@ -226,6 +207,32 @@ def handle_item(category, name):
return jsonify({"error": "An unexpected error occurred"}), 500
@bp.route('/regex/verify', methods=['POST'])
def verify_regex():
"""Verify a regex pattern using .NET regex engine via PowerShell"""
try:
data = request.get_json()
if not data:
return jsonify({"error": "No JSON data provided"}), 400
pattern = data.get('pattern')
if not pattern:
return jsonify({"error": "Pattern is required"}), 400
from .utils import verify_dotnet_regex
success, message = verify_dotnet_regex(pattern)
if success:
return jsonify({"valid": True, "message": "Pattern is valid"}), 200
else:
return jsonify({"valid": False, "error": message}), 200
except Exception as e:
logger.exception("Error verifying regex pattern")
return jsonify({"valid": False, "error": str(e)}), 500
@bp.route('/<string:category>/test', methods=['POST'])
def run_tests(category):
logger.info(f"Received test request for category: {category}")
@@ -233,25 +240,29 @@ def run_tests(category):
try:
data = request.get_json()
if not data:
logger.warning("Rejected test request - no JSON data provided")
logger.warning("Test request rejected: no JSON data")
return jsonify({"error": "No JSON data provided"}), 400
tests = data.get('tests', [])
if not tests:
logger.warning("Rejected test request - no test cases provided")
logger.warning("Test request rejected: no tests provided")
return jsonify({"error":
"At least one test case is required"}), 400
if category == 'regex_pattern':
pattern = data.get('pattern')
logger.info(f"Processing regex test request - Pattern: {pattern}")
if not pattern:
logger.warning("Rejected test request - missing pattern")
logger.warning("Test request rejected: missing pattern")
return jsonify({"error": "Pattern is required"}), 400
success, message, updated_tests = test_regex_pattern(
pattern, tests)
if success and updated_tests:
passed = sum(1 for t in updated_tests if t.get('passes'))
total = len(updated_tests)
logger.info(f"Tests completed: {passed}/{total} passed")
elif category == 'custom_format':
conditions = data.get('conditions', [])
@@ -274,10 +285,8 @@ def run_tests(category):
return jsonify(
{"error": "Testing not supported for this category"}), 400
logger.info(f"Test execution completed - Success: {success}")
if not success:
logger.warning(f"Test execution failed - {message}")
logger.error(f"Test execution failed: {message}")
return jsonify({"success": False, "message": message}), 400
return jsonify({"success": True, "tests": updated_tests}), 200

117
backend/app/data/cache.py Normal file
View File

@@ -0,0 +1,117 @@
import os
import yaml
import logging
from typing import Dict, List, Any, Optional
from datetime import datetime
import threading
from .utils import get_category_directory, get_file_modified_date, filename_to_display
logger = logging.getLogger(__name__)
class DataCache:
"""In-memory cache for YAML data"""
def __init__(self):
self._cache = {
'regex_pattern': {},
'custom_format': {},
'profile': {}
}
self._lock = threading.RLock()
self._initialized = False
def initialize(self, force_reload=False):
"""Load all data into memory on startup
Args:
force_reload: If True, force a reload even if already initialized
"""
with self._lock:
if self._initialized and not force_reload:
return
logger.info("Initializing data cache..." if not force_reload else "Reloading data cache...")
for category in self._cache.keys():
self._load_category(category)
self._initialized = True
logger.info("Data cache initialized successfully" if not force_reload else "Data cache reloaded successfully")
def _load_category(self, category: str):
"""Load all items from a category into cache"""
try:
directory = get_category_directory(category)
items = {}
for filename in os.listdir(directory):
if not filename.endswith('.yml'):
continue
file_path = os.path.join(directory, filename)
try:
with open(file_path, 'r') as f:
content = yaml.safe_load(f)
if content:
# Store with metadata
items[filename] = {
'file_name': filename,
'modified_date': get_file_modified_date(file_path),
'content': content
}
except Exception as e:
logger.error(f"Error loading {file_path}: {e}")
self._cache[category] = items
logger.info(f"Loaded {len(items)} items for category {category}")
except Exception as e:
logger.error(f"Error loading category {category}: {e}")
def get_all(self, category: str) -> List[Dict[str, Any]]:
"""Get all items from a category"""
with self._lock:
if not self._initialized:
self.initialize()
return list(self._cache.get(category, {}).values())
def get_item(self, category: str, name: str) -> Optional[Dict[str, Any]]:
"""Get a specific item"""
with self._lock:
if not self._initialized:
self.initialize()
# Convert name to filename
filename = f"{name.replace('[', '(').replace(']', ')')}.yml"
return self._cache.get(category, {}).get(filename)
def update_item(self, category: str, filename: str, content: Dict[str, Any]):
"""Update an item in cache"""
with self._lock:
if category in self._cache:
file_path = os.path.join(get_category_directory(category), filename)
self._cache[category][filename] = {
'file_name': filename,
'modified_date': get_file_modified_date(file_path),
'content': content
}
logger.debug(f"Updated cache for {category}/{filename}")
def remove_item(self, category: str, filename: str):
"""Remove an item from cache"""
with self._lock:
if category in self._cache and filename in self._cache[category]:
del self._cache[category][filename]
logger.debug(f"Removed from cache: {category}/{filename}")
def rename_item(self, category: str, old_filename: str, new_filename: str):
"""Rename an item in cache"""
with self._lock:
if category in self._cache and old_filename in self._cache[category]:
item = self._cache[category].pop(old_filename)
item['file_name'] = new_filename
self._cache[category][new_filename] = item
logger.debug(f"Renamed in cache: {category}/{old_filename} -> {new_filename}")
# Global cache instance
data_cache = DataCache()

View File

@@ -7,6 +7,8 @@ from typing import Dict, List, Any, Tuple, Union
import git
import regex
import logging
import subprocess
import json
from ..db.queries.arr import update_arr_config_on_rename, update_arr_config_on_delete
logger = logging.getLogger(__name__)
@@ -152,6 +154,11 @@ def save_yaml_file(file_path: str,
with open(safe_file_path, 'w') as f:
yaml.safe_dump(ordered_data, f, sort_keys=False)
# Update cache
from .cache import data_cache
filename = os.path.basename(safe_file_path)
data_cache.update_item(category, filename, ordered_data)
def update_yaml_file(file_path: str, data: Dict[str, Any],
@@ -216,6 +223,12 @@ def update_yaml_file(file_path: str, data: Dict[str, Any],
os.rename(file_path, new_file_path)
# Stage the new file
repo.index.add([rel_new_path])
# Update cache for rename
from .cache import data_cache
old_filename = os.path.basename(file_path)
new_filename = os.path.basename(new_file_path)
data_cache.rename_item(category, old_filename, new_filename)
except git.GitCommandError as e:
logger.error(f"Git operation failed: {e}")
@@ -360,6 +373,68 @@ def check_delete_constraints(category: str, name: str) -> Tuple[bool, str]:
return False, f"Error checking references: {str(e)}"
def verify_dotnet_regex(pattern: str) -> Tuple[bool, str]:
"""
Verify a regex pattern using .NET regex engine via PowerShell.
Returns (success, message) tuple.
"""
try:
# Get the path to the validate.ps1 script
# In Docker, the structure is /app/app/data/utils.py and script is at /app/scripts/validate.ps1
script_path = os.path.join('/app', 'scripts', 'validate.ps1')
if not os.path.exists(script_path):
# Fallback for local development
script_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'scripts', 'validate.ps1')
# Run PowerShell script, passing pattern via stdin to avoid shell escaping issues
result = subprocess.run(
['pwsh', '-File', script_path],
input=pattern,
capture_output=True,
text=True,
timeout=5
)
if result.returncode != 0 and not result.stdout:
logger.error(f"PowerShell script failed: {result.stderr}")
return False, "Failed to validate pattern"
# Log the raw output for debugging
logger.debug(f"PowerShell output: {result.stdout}")
# Parse JSON output
try:
output = json.loads(result.stdout.strip())
except json.JSONDecodeError:
# Try to find JSON in the output
lines = result.stdout.strip().split('\n')
for line in reversed(lines):
if line.strip():
try:
output = json.loads(line)
break
except json.JSONDecodeError:
continue
else:
logger.error(f"No valid JSON found in output: {result.stdout}")
return False, "Failed to parse validation result"
if output.get('valid'):
return True, output.get('message', 'Pattern is valid')
else:
return False, output.get('error', 'Invalid pattern')
except subprocess.TimeoutExpired:
logger.error("Pattern validation timed out")
return False, "Pattern validation timed out"
except FileNotFoundError:
logger.error("PowerShell (pwsh) not found")
return False, "PowerShell is not available"
except Exception as e:
logger.error(f"Error validating pattern: {e}")
return False, f"Validation error: {str(e)}"
def update_references(category: str, old_name: str,
new_name: str) -> List[str]:
"""
@@ -478,76 +553,67 @@ def test_regex_pattern(
pattern: str,
tests: List[Dict[str, Any]]) -> Tuple[bool, str, List[Dict[str, Any]]]:
"""
Test a regex pattern against a list of test cases using PCRE2 compatible engine.
Test a regex pattern against a list of test cases using .NET regex engine via PowerShell.
Returns match information along with test results.
"""
logger.info(f"Starting regex pattern test - Pattern: {pattern}")
try:
try:
compiled_pattern = regex.compile(pattern,
regex.V1 | regex.IGNORECASE)
logger.info(
"Pattern compiled successfully with PCRE2 compatibility")
except regex.error as e:
logger.warning(f"Invalid regex pattern: {str(e)}")
return False, f"Invalid regex pattern: {str(e)}", tests
current_time = datetime.now().isoformat()
logger.info(f"Processing {len(tests)} test cases")
for test in tests:
test_id = test.get('id', 'unknown')
test_input = test.get('input', '')
expected = test.get('expected', False)
try:
match = compiled_pattern.search(test_input)
matches = bool(match)
# Update test result with basic fields
test['passes'] = matches == expected
test['lastRun'] = current_time
# Add match information
if match:
test['matchedContent'] = match.group(0)
test['matchSpan'] = {
'start': match.start(),
'end': match.end()
}
# Get all capture groups if they exist
test['matchedGroups'] = [g for g in match.groups()
] if match.groups() else []
else:
test['matchedContent'] = None
test['matchSpan'] = None
test['matchedGroups'] = []
logger.info(
f"Test {test_id} {'passed' if test['passes'] else 'failed'} - Match: {matches}, Expected: {expected}"
)
except Exception as e:
logger.error(f"Error running test {test_id}: {str(e)}")
test['passes'] = False
test['lastRun'] = current_time
test['matchedContent'] = None
test['matchSpan'] = None
test['matchedGroups'] = []
# Log overall results
passed_tests = sum(1 for test in tests if test.get('passes', False))
logger.info(
f"Test execution complete - {passed_tests}/{len(tests)} tests passed"
# Get the path to the test.ps1 script
script_path = os.path.join('/app', 'scripts', 'test.ps1')
if not os.path.exists(script_path):
# Fallback for local development
script_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), 'scripts', 'test.ps1')
# Prepare the input data
input_data = {
'pattern': pattern,
'tests': tests
}
# Run PowerShell script
result = subprocess.run(
['pwsh', '-File', script_path],
input=json.dumps(input_data),
capture_output=True,
text=True,
timeout=10
)
return True, "", tests
if result.returncode != 0 and not result.stdout:
logger.error(f"PowerShell script failed: {result.stderr}")
return False, "Failed to run tests", tests
# Parse JSON output
try:
output = json.loads(result.stdout.strip())
except json.JSONDecodeError:
# Try to find JSON in the output
lines = result.stdout.strip().split('\n')
for line in reversed(lines):
if line.strip():
try:
output = json.loads(line)
break
except json.JSONDecodeError:
continue
else:
logger.error(f"No valid JSON found in output: {result.stdout}")
return False, "Failed to parse test results", tests
if output.get('success'):
return True, "Tests completed successfully", output.get('tests', tests)
else:
return False, output.get('message', 'Tests failed'), tests
except subprocess.TimeoutExpired:
logger.error("Test execution timed out")
return False, "Test execution timed out", tests
except FileNotFoundError:
logger.error("PowerShell (pwsh) not found")
return False, "PowerShell is not available", tests
except Exception as e:
logger.error(f"Unexpected error in test_regex_pattern: {str(e)}",
exc_info=True)
return False, str(e), tests
logger.error(f"Error running tests: {e}")
return False, f"Test error: {str(e)}", tests
def test_format_conditions(conditions: List[Dict],

View File

@@ -44,6 +44,12 @@ def checkout_branch(repo_path, branch_name):
return False, f"Branch '{branch_name}' does not exist locally or in any remote."
logger.debug(f"Successfully checked out branch: {branch_name}")
# Reload cache after branch checkout since files may have changed
from ...data.cache import data_cache
logger.info("Reloading data cache after branch checkout")
data_cache.initialize(force_reload=True)
return True, {
"message": f"Checked out branch: {branch_name}",
"current_branch": branch_name

View File

@@ -11,6 +11,11 @@ def delete_file(repo_path, file_path):
if os.path.exists(full_file_path):
os.remove(full_file_path)
# Reload cache after file deletion
from ...data.cache import data_cache
data_cache.initialize(force_reload=True)
message = f"File {file_path} has been deleted."
return True, message
else:

View File

@@ -60,6 +60,11 @@ def finalize_merge(repo) -> Dict[str, Any]:
if status_manager:
status_manager.update_remote_status()
# Reload cache for modified data files
from ...data.cache import data_cache
logger.info("Reloading data cache after merge completion")
data_cache.initialize(force_reload=True) # This will reload all data
return {'success': True, 'message': 'Merge completed successfully'}
except git.GitCommandError as e:
logger.error(f"Git command error during commit: {str(e)}")

View File

@@ -35,6 +35,11 @@ def pull_branch(repo_path, branch_name):
if status_manager:
status_manager.update_remote_status()
# Reload cache for updated data files
from ...data.cache import data_cache
logger.info("Reloading data cache after pull")
data_cache.initialize(force_reload=True) # This will reload all data
# -------------------------------
# *** "On pull" ARR import logic using new importer:
# 1) Query all ARR configs that have sync_method="pull"

View File

@@ -310,6 +310,11 @@ def resolve_conflicts(
logger.debug(f"File status: {item}")
logger.debug("=======================================")
# Reload cache after conflict resolution
from ...data.cache import data_cache
logger.info("Reloading data cache after conflict resolution")
data_cache.initialize(force_reload=True)
return {'success': True, 'results': results}
except Exception as e:

View File

@@ -26,6 +26,11 @@ def revert_file(repo_path, file_path):
untracked_files = repo.untracked_files
is_untracked = any(f == file_path for f in untracked_files)
# Check if file is staged for deletion
staged_deletions = repo.index.diff("HEAD", R=True)
is_staged_for_deletion = any(d.a_path == file_path
for d in staged_deletions)
if is_untracked:
# For untracked files, we need to remove them
try:
@@ -33,14 +38,7 @@ def revert_file(repo_path, file_path):
message = f"New file {file_path} has been removed."
except FileNotFoundError:
message = f"File {file_path} was already removed."
return True, message
# Check if file is staged for deletion
staged_deletions = repo.index.diff("HEAD", R=True)
is_staged_for_deletion = any(d.a_path == file_path
for d in staged_deletions)
if is_staged_for_deletion:
elif is_staged_for_deletion:
# Restore file staged for deletion
repo.git.reset("--", file_path)
repo.git.checkout('HEAD', "--", file_path)
@@ -51,6 +49,10 @@ def revert_file(repo_path, file_path):
repo.git.restore('--staged', "--", file_path)
message = f"File {file_path} has been reverted."
# Reload cache after ANY revert operation
from ...data.cache import data_cache
data_cache.initialize(force_reload=True)
return True, message
except git.exc.GitCommandError as e:
@@ -98,6 +100,10 @@ def revert_all(repo_path):
message += f" and {len(untracked_files)} new file(s) have been removed"
message += "."
# Reload cache after reverting all
from ...data.cache import data_cache
data_cache.initialize(force_reload=True)
return True, message
except git.exc.GitCommandError as e:

View File

@@ -116,6 +116,11 @@ def clone_repository(repo_url, repo_path):
logger.info("Removing backup directory")
shutil.rmtree(backup_dir)
# Reload cache after clone operation
from ...data.cache import data_cache
logger.info("Reloading data cache after clone")
data_cache.initialize(force_reload=True)
logger.info("Clone operation completed successfully")
return True, "Repository cloned and local files merged successfully"

View File

@@ -68,6 +68,12 @@ def unlink_repository(repo_path, remove_files=False):
save_settings({'gitRepo': None})
logger.info("Updated settings to remove git information")
# Reload cache if files were removed
if remove_files:
from ...data.cache import data_cache
logger.info("Reloading data cache after removing repository files")
data_cache.initialize(force_reload=True)
return True, "Repository successfully unlinked"
except Exception as e:
logger.error(f"Error unlinking repository: {str(e)}", exc_info=True)

View File

@@ -9,20 +9,11 @@ from .logger import get_import_logger
logger = logging.getLogger(__name__)
# Cache patterns at module level to avoid reloading
_CACHED_PATTERNS = None
def get_cached_patterns():
"""Get cached regex patterns, loading them once on first access."""
global _CACHED_PATTERNS
if _CACHED_PATTERNS is None:
_CACHED_PATTERNS = load_regex_patterns()
return _CACHED_PATTERNS
def compile_format_to_api_structure(
format_yaml: Dict[str, Any],
arr_type: str
arr_type: str,
patterns: Dict[str, str] = None
) -> Dict[str, Any]:
"""
Compile a format from YAML to Arr API structure.
@@ -30,12 +21,15 @@ def compile_format_to_api_structure(
Args:
format_yaml: Format data from YAML file
arr_type: 'radarr' or 'sonarr'
patterns: Pre-loaded regex patterns (if None, will load from disk)
Returns:
Compiled format ready for API
"""
target_app = TargetApp.RADARR if arr_type.lower() == 'radarr' else TargetApp.SONARR
patterns = get_cached_patterns()
# Only load patterns if not provided
if patterns is None:
patterns = load_regex_patterns()
compiled = {
'name': format_yaml.get('name', 'Unknown')

View File

@@ -22,6 +22,11 @@ class FormatStrategy(ImportStrategy):
Returns:
Dictionary with 'formats' key containing compiled formats
"""
from ..utils import load_regex_patterns
# Load all regex patterns once at the start
patterns = load_regex_patterns()
formats = []
failed = []
import_logger = get_import_logger()
@@ -35,7 +40,7 @@ class FormatStrategy(ImportStrategy):
format_yaml = load_yaml(f"custom_format/{filename}.yml")
# Compile to API structure
compiled = compile_format_to_api_structure(format_yaml, self.arr_type)
compiled = compile_format_to_api_structure(format_yaml, self.arr_type, patterns)
# Add unique suffix if needed
if self.import_as_unique:

View File

@@ -22,6 +22,11 @@ class ProfileStrategy(ImportStrategy):
Returns:
Dictionary with 'profiles' and 'formats' keys
"""
from ..utils import load_regex_patterns
# Load all regex patterns once at the start
patterns = load_regex_patterns()
profiles = []
all_formats = []
processed_formats: Set[str] = set()
@@ -49,7 +54,7 @@ class ProfileStrategy(ImportStrategy):
try:
format_yaml = load_yaml(f"custom_format/{format_name}.yml")
compiled_format = compile_format_to_api_structure(format_yaml, self.arr_type)
compiled_format = compile_format_to_api_structure(format_yaml, self.arr_type, patterns)
if self.import_as_unique:
compiled_format['name'] = self.add_unique_suffix(compiled_format['name'])
@@ -72,7 +77,7 @@ class ProfileStrategy(ImportStrategy):
for lang_format in language_formats:
lang_name = lang_format.get('name', 'Language format')
compiled_lang = compile_format_to_api_structure(lang_format, self.arr_type)
compiled_lang = compile_format_to_api_structure(lang_format, self.arr_type, patterns)
if self.import_as_unique:
compiled_lang['name'] = self.add_unique_suffix(compiled_lang['name'])

View File

@@ -18,6 +18,7 @@ from .logs import bp as logs_bp
from .media_management import media_management_bp
from .middleware import init_middleware
from .init import setup_logging, init_app_config, init_git_user
from .data.cache import data_cache
def create_app():
@@ -48,6 +49,10 @@ def create_app():
# Initialize Git user configuration
logger.info("Initializing Git user")
success, message = init_git_user()
# Initialize data cache
logger.info("Initializing data cache")
data_cache.initialize()
if not success:
logger.warning(f"Git user initialization issue: {message}")
else:

107
backend/scripts/test.ps1 Executable file
View File

@@ -0,0 +1,107 @@
#!/usr/bin/env pwsh
# Run regex tests against a pattern
# Set output encoding to UTF-8
[Console]::OutputEncoding = [System.Text.Encoding]::UTF8
$ErrorActionPreference = "Stop"
# Read from stdin
$inputText = $input
if (-not $inputText) {
$inputText = [System.Console]::In.ReadToEnd()
}
if (-not $inputText) {
Write-Output (ConvertTo-Json @{
success = $false
message = "No input provided"
} -Compress)
exit 0
}
try {
$data = $inputText | ConvertFrom-Json
$Pattern = $data.pattern
$tests = $data.tests
}
catch {
Write-Output (ConvertTo-Json @{
success = $false
message = "Failed to parse input JSON: $_"
} -Compress)
exit 0
}
# Ensure we have required inputs
if ([string]::IsNullOrWhiteSpace($Pattern)) {
Write-Output (ConvertTo-Json @{
success = $false
message = "No pattern provided"
} -Compress)
exit 0
}
if (-not $tests -or $tests.Count -eq 0) {
Write-Output (ConvertTo-Json @{
success = $false
message = "No tests provided"
} -Compress)
exit 0
}
try {
# Create the regex object with case-insensitive option
$regex = [System.Text.RegularExpressions.Regex]::new($Pattern, [System.Text.RegularExpressions.RegexOptions]::IgnoreCase)
# Process each test
$results = @()
foreach ($test in $tests) {
$match = $regex.Match($test.input)
$passes = ($match.Success -eq $test.expected)
$result = @{
id = $test.id
input = $test.input
expected = $test.expected
passes = $passes
}
if ($match.Success) {
# Include match details for highlighting (using original format)
$result.matchedContent = $match.Value
$result.matchSpan = @{
start = $match.Index
end = $match.Index + $match.Length
}
# Include capture groups if any
$groups = @()
for ($i = 1; $i -lt $match.Groups.Count; $i++) {
if ($match.Groups[$i].Success) {
$groups += $match.Groups[$i].Value
}
}
$result.matchedGroups = $groups
}
else {
$result.matchedContent = $null
$result.matchSpan = $null
$result.matchedGroups = @()
}
$results += $result
}
Write-Output (ConvertTo-Json @{
success = $true
tests = $results
} -Compress -Depth 10)
}
catch {
Write-Output (ConvertTo-Json @{
success = $false
message = $_.Exception.Message
} -Compress)
}

73
backend/scripts/validate.ps1 Executable file
View File

@@ -0,0 +1,73 @@
#!/usr/bin/env pwsh
# Validate a .NET regex pattern
param(
[Parameter(Mandatory=$false)]
[string]$Pattern
)
# Set output encoding to UTF-8
[Console]::OutputEncoding = [System.Text.Encoding]::UTF8
$ErrorActionPreference = "Stop"
# Read pattern from stdin if not provided as parameter
if (-not $Pattern) {
$Pattern = [System.Console]::In.ReadToEnd()
}
# Ensure we have a pattern
if ([string]::IsNullOrWhiteSpace($Pattern)) {
$result = @{
valid = $false
error = "No pattern provided"
}
Write-Output (ConvertTo-Json $result -Compress)
exit 0
}
try {
# Attempt to create a .NET Regex object with the pattern
# Using IgnoreCase option as per requirement
$regex = [System.Text.RegularExpressions.Regex]::new($Pattern, [System.Text.RegularExpressions.RegexOptions]::IgnoreCase)
# If we get here, the pattern is valid
$result = @{
valid = $true
message = "Pattern is valid .NET regex"
}
Write-Output (ConvertTo-Json $result -Compress)
exit 0
}
catch {
# Pattern is invalid, extract the meaningful part of the error message
$errorMessage = $_.Exception.Message
# Try to extract just the useful part of .NET regex errors
if ($errorMessage -match "Invalid pattern '.*?' at offset (\d+)\. (.+)") {
$errorMessage = "At position $($matches[1]): $($matches[2])"
}
elseif ($errorMessage -match 'parsing ".*?" - (.+)') {
$errorMessage = $matches[1]
}
elseif ($errorMessage -match 'Exception calling .* with .* argument\(s\): "(.+)"') {
$innerError = $matches[1]
if ($innerError -match "Invalid pattern '.*?' at offset (\d+)\. (.+)") {
$errorMessage = "At position $($matches[1]): $($matches[2])"
}
else {
$errorMessage = $innerError
}
}
# Remove any trailing quotes or periods followed by quotes
$errorMessage = $errorMessage -replace '\."$', '.' -replace '"$', ''
$result = @{
valid = $false
error = $errorMessage
}
Write-Output (ConvertTo-Json $result -Compress)
exit 0
}

View File

@@ -301,5 +301,15 @@ export const RegexPatterns = {
update: (name, data, newName) =>
updateItem('regex_pattern', name, data, newName),
delete: name => deleteItem('regex_pattern', name),
runTests: createSpecialEndpoint('regex_pattern', 'test')
runTests: createSpecialEndpoint('regex_pattern', 'test'),
verify: async pattern => {
try {
const response = await axios.post(`${BASE_URL}/regex/verify`, {
pattern
});
return response.data;
} catch (error) {
throw handleError(error, 'verify regex pattern');
}
}
};

View File

@@ -1,4 +1,4 @@
import React, {useState} from 'react';
import React, {useState, useEffect, useRef} from 'react';
import PropTypes from 'prop-types';
import {Copy, Check, FlaskConical, FileText, ListFilter} from 'lucide-react';
import Tooltip from '@ui/Tooltip';
@@ -14,6 +14,8 @@ function FormatCard({
willBeSelected,
onSelect
}) {
const [isVisible, setIsVisible] = useState(false);
const cardRef = useRef(null);
const [showDescription, setShowDescription] = useState(() => {
const saved = localStorage.getItem(`format-view-${format.file_name}`);
return saved !== null ? JSON.parse(saved) : true;
@@ -64,8 +66,27 @@ function FormatCard({
}
};
useEffect(() => {
const observer = new IntersectionObserver(
([entry]) => {
setIsVisible(entry.isIntersecting);
},
{
threshold: 0,
rootMargin: '100px' // Keep cards rendered 100px outside viewport
}
);
if (cardRef.current) {
observer.observe(cardRef.current);
}
return () => observer.disconnect();
}, []);
return (
<div
ref={cardRef}
className={`w-full h-[12rem] bg-gradient-to-br from-gray-800/95 to-gray-900 border ${
isSelected
? 'border-blue-500'
@@ -81,7 +102,8 @@ function FormatCard({
} transition-all cursor-pointer relative`}
onClick={handleClick}
onMouseDown={handleMouseDown}>
<div className='p-4 flex flex-col h-full'>
{isVisible ? (
<div className='p-4 flex flex-col h-full'>
{/* Header Section */}
<div className='flex justify-between items-start'>
<div className='flex flex-col min-w-0 flex-1'>
@@ -237,6 +259,15 @@ function FormatCard({
)}
</div>
</div>
) : (
<div className='p-4 flex items-center justify-center h-full'>
<div className='w-full space-y-2'>
<div className='h-5 bg-gray-700/50 rounded animate-pulse'/>
<div className='h-3 bg-gray-700/50 rounded animate-pulse w-3/4'/>
<div className='h-3 bg-gray-700/50 rounded animate-pulse w-1/2'/>
</div>
</div>
)}
</div>
);
}

View File

@@ -23,15 +23,13 @@ const AddUnitTestModal = ({isOpen, onClose, onAdd, tests, editTest = null}) => {
const handleSubmit = () => {
const getNextTestId = testArray => {
if (!testArray || testArray.length === 0) return 1;
return Math.max(...testArray.map(test => test.id)) + 1;
return Math.max(...testArray.map(test => test.id || 0)) + 1;
};
const testData = {
id: editTest ? editTest.id : getNextTestId(tests),
input,
expected: shouldMatch,
passes: false,
lastRun: null
expected: shouldMatch
};
onAdd(testData);

View File

@@ -1,4 +1,4 @@
import React from 'react';
import React, {useState, useEffect, useRef} from 'react';
import PropTypes from 'prop-types';
import {Copy, Check, FlaskConical} from 'lucide-react';
import Tooltip from '@ui/Tooltip';
@@ -15,6 +15,9 @@ const RegexCard = ({
willBeSelected,
onSelect
}) => {
const [isVisible, setIsVisible] = useState(false);
const cardRef = useRef(null);
const totalTests = pattern.tests?.length || 0;
const passedTests = pattern.tests?.filter(t => t.passes)?.length || 0;
const passRate =
@@ -46,8 +49,27 @@ const RegexCard = ({
return 'text-red-400';
};
useEffect(() => {
const observer = new IntersectionObserver(
([entry]) => {
setIsVisible(entry.isIntersecting);
},
{
threshold: 0,
rootMargin: '100px' // Keep cards rendered 100px outside viewport
}
);
if (cardRef.current) {
observer.observe(cardRef.current);
}
return () => observer.disconnect();
}, []);
return (
<div
ref={cardRef}
className={`w-full h-[20rem] bg-gradient-to-br from-gray-800/95 to-gray-900 border ${
isSelected
? 'border-blue-500'
@@ -63,7 +85,8 @@ const RegexCard = ({
} transition-all cursor-pointer overflow-hidden`}
onClick={handleClick}
onMouseDown={handleMouseDown}>
<div className='p-6 flex flex-col h-full'>
{isVisible ? (
<div className='p-6 flex flex-col h-full'>
{/* Header Section */}
<div className='flex-none'>
<div className='flex justify-between items-start'>
@@ -183,6 +206,15 @@ const RegexCard = ({
)}
</div>
</div>
) : (
<div className='p-6 flex items-center justify-center h-full'>
<div className='w-full space-y-3'>
<div className='h-6 bg-gray-700/50 rounded animate-pulse'/>
<div className='h-20 bg-gray-700/50 rounded animate-pulse'/>
<div className='h-4 bg-gray-700/50 rounded animate-pulse w-3/4'/>
</div>
</div>
)}
</div>
);
};

View File

@@ -2,7 +2,9 @@ import React, {useState} from 'react';
import PropTypes from 'prop-types';
import MarkdownEditor from '@ui/MarkdownEditor';
import AddButton from '@ui/DataBar/AddButton';
import {InfoIcon} from 'lucide-react';
import {Regex, Loader} from 'lucide-react';
import {RegexPatterns} from '@api/data';
import Alert from '@ui/Alert';
const RegexGeneralTab = ({
name,
@@ -18,6 +20,7 @@ const RegexGeneralTab = ({
patternError
}) => {
const [newTag, setNewTag] = useState('');
const [validating, setValidating] = useState(false);
const handleAddTag = () => {
if (newTag.trim() && !tags.includes(newTag.trim())) {
@@ -33,6 +36,30 @@ const RegexGeneralTab = ({
}
};
const handleValidatePattern = async () => {
if (!pattern?.trim()) {
Alert.warning('Please enter a pattern to validate');
return;
}
setValidating(true);
try {
const result = await RegexPatterns.verify(pattern);
if (result.valid) {
Alert.success('Pattern is valid .NET regex');
} else {
Alert.error(result.error || 'Invalid pattern');
}
} catch (error) {
console.error('Validation error:', error);
Alert.error('Failed to validate pattern');
} finally {
setValidating(false);
}
};
return (
<div className='w-full'>
{error && (
@@ -89,17 +116,28 @@ const RegexGeneralTab = ({
<div className='space-y-2'>
<div className='space-y-1'>
<div className='flex items-center justify-between'>
<label className='text-sm font-medium text-gray-700 dark:text-gray-300'>
Pattern
</label>
<div className='flex items-center gap-2 text-xs text-blue-600 dark:text-blue-400'>
<InfoIcon className='h-4 w-4' />
<span>Case insensitive PCRE2</span>
<div>
<label className='text-sm font-medium text-gray-700 dark:text-gray-300'>
Pattern
</label>
<p className='text-xs text-gray-500 dark:text-gray-400'>
Enter your regular expression pattern (case-insensitive .NET)
</p>
</div>
<button
onClick={handleValidatePattern}
disabled={validating || !pattern?.trim()}
className='inline-flex items-center px-3 py-1.5 text-sm font-medium rounded-md
bg-blue-600 hover:bg-blue-700 disabled:bg-blue-600/50 text-white
transition-colors duration-200'>
{validating ? (
<Loader className='w-4 h-4 mr-2 animate-spin' />
) : (
<Regex className='w-4 h-4 mr-2' />
)}
Validate
</button>
</div>
<p className='text-xs text-gray-500 dark:text-gray-400'>
Enter your regular expression pattern
</p>
</div>
{patternError && (
<p className='text-sm text-red-600 dark:text-red-400'>

View File

@@ -6,7 +6,7 @@ import RegexTestingTab from './RegexTestingTab';
import {useRegexModal} from '@hooks/useRegexModal';
import {RegexPatterns} from '@api/data';
import Alert from '@ui/Alert';
import {Loader, Play} from 'lucide-react';
import {Loader, Play, Save, Trash2, Check} from 'lucide-react';
const RegexModal = ({
pattern: initialPattern,
@@ -84,12 +84,13 @@ const RegexModal = ({
{initialPattern && !isCloning && (
<button
onClick={handleDelete}
className={`px-4 py-2 text-white rounded transition-colors ${
isDeleting
? 'bg-red-600 hover:bg-red-700'
: 'bg-red-500 hover:bg-red-600'
}`}>
{isDeleting ? 'Confirm Delete' : 'Delete'}
className='inline-flex items-center gap-2 px-4 py-2 rounded bg-gray-800 border border-gray-700 text-gray-200 hover:bg-gray-700 transition-colors'>
{isDeleting ? (
<Check className="w-4 h-4 text-green-500" />
) : (
<Trash2 className="w-4 h-4 text-red-500" />
)}
<span>Delete</span>
</button>
)}
<div className='flex gap-2'>
@@ -97,20 +98,20 @@ const RegexModal = ({
<button
onClick={() => handleRunTests(patternValue, tests)}
disabled={isRunningTests}
className='inline-flex items-center px-4 py-2 bg-green-600 hover:bg-green-700
disabled:bg-green-600/50 text-white rounded transition-colors'>
className='inline-flex items-center gap-2 px-4 py-2 rounded bg-gray-800 border border-gray-700 text-gray-200 hover:bg-gray-700 disabled:opacity-50 transition-colors'>
{isRunningTests ? (
<Loader className='w-4 h-4 mr-2 animate-spin' />
<Loader className="w-4 h-4 text-yellow-500 animate-spin" />
) : (
<Play className='w-4 h-4 mr-2' />
<Play className="w-4 h-4 text-green-500" />
)}
Run Tests
<span>Run Tests</span>
</button>
)}
<button
onClick={handleSave}
className='bg-blue-500 hover:bg-blue-600 text-white px-4 py-2 rounded transition-colors'>
Save
className='inline-flex items-center gap-2 px-4 py-2 rounded bg-gray-800 border border-gray-700 text-gray-200 hover:bg-gray-700 transition-colors'>
<Save className="w-4 h-4 text-blue-500" />
<span>Save</span>
</button>
</div>
</div>

View File

@@ -13,52 +13,47 @@ const RegexTestingTab = ({
}) => {
const [isModalOpen, setIsModalOpen] = useState(false);
const [editingTest, setEditingTest] = useState(null);
const [testResults, setTestResults] = useState({});
// Wrapped run tests function that stores results
const handleRunTests = useCallback(async (testPattern, testData) => {
const results = await onRunTests(testPattern, testData);
if (results && Array.isArray(results)) {
// Store results by test ID
const resultsMap = {};
results.forEach(result => {
resultsMap[result.id] = result;
});
setTestResults(resultsMap);
}
return results;
}, [onRunTests]);
useEffect(() => {
const needsAutoRun =
tests?.length > 0 &&
pattern &&
tests.some(test => test.passes !== undefined && !test.matchSpan);
if (needsAutoRun && !isRunningTests) {
onRunTests(pattern, tests);
// Run tests when pattern or tests change
if (tests?.length > 0 && pattern && !isRunningTests) {
handleRunTests(pattern, tests);
}
}, []);
}, [pattern]); // Only re-run when pattern changes
const handleAddOrUpdateTest = useCallback(
testData => {
let updatedTests;
if (editingTest) {
updatedTests = tests.map(test =>
test.id === testData.id
? {
...testData,
passes: false,
lastRun: null,
matchedContent: null,
matchSpan: null,
matchedGroups: []
}
: test
test.id === testData.id ? testData : test
);
} else {
updatedTests = [
...tests,
{
...testData,
passes: false,
lastRun: null,
matchedContent: null,
matchSpan: null,
matchedGroups: []
}
];
updatedTests = [...tests, testData];
}
onTestsChange(updatedTests);
onRunTests(pattern, updatedTests);
// Run tests automatically after adding/updating
if (pattern) {
handleRunTests(pattern, updatedTests);
}
setEditingTest(null);
},
[tests, onTestsChange, onRunTests, pattern, editingTest]
[tests, onTestsChange, handleRunTests, pattern, editingTest]
);
const handleEditTest = useCallback(test => {
@@ -80,72 +75,81 @@ const RegexTestingTab = ({
}, []);
const totalTests = tests?.length || 0;
const passedTests = tests?.filter(test => test.passes)?.length || 0;
const passedTests = tests?.filter(test => {
const result = testResults[test.id];
return result?.passes;
})?.length || 0;
return (
<div className='flex flex-col h-full'>
{/* Header with Progress Bar */}
<div className='flex items-center justify-between pb-4 pr-2'>
{/* Header */}
<div className='flex items-center justify-between pb-4'>
<div>
<h2 className='text-xl font-semibold text-gray-900 dark:text-white mb-3'>
<h2 className='text-xl font-semibold text-gray-900 dark:text-white mb-1'>
Unit Tests
</h2>
<div className='flex items-center gap-3'>
<div className='h-1.5 w-32 bg-gray-200 dark:bg-gray-700 rounded-full overflow-hidden'>
<div
className='h-full bg-emerald-500 rounded-full transition-all duration-300'
style={{
width: `${
totalTests
? (passedTests / totalTests) * 100
: 0
}%`
}}
/>
</div>
<span className='text-sm text-gray-600 dark:text-gray-300'>
{totalTests > 0
? `${passedTests}/${totalTests} tests passing`
: 'No tests added yet'}
</span>
</div>
{totalTests > 0 && (
<p className='text-sm text-gray-600 dark:text-gray-400'>
{passedTests} of {totalTests} tests passing
{totalTests > 0 && ` (${Math.round((passedTests / totalTests) * 100)}%)`}
</p>
)}
</div>
<div className='flex items-center gap-2'>
{tests?.length > 0 && (
<button
onClick={() => onRunTests(pattern, tests)}
onClick={() => handleRunTests(pattern, tests)}
disabled={isRunningTests}
className='inline-flex items-center px-3 py-2 text-sm font-medium rounded-md bg-green-600 hover:bg-green-700 disabled:bg-green-600/50 text-white'>
className='inline-flex items-center gap-2 px-3 py-1.5 text-sm rounded bg-gray-800 border border-gray-700 text-gray-200 hover:bg-gray-700 disabled:opacity-50 transition-colors'>
{isRunningTests ? (
<Loader className='w-4 h-4 mr-2 animate-spin' />
<Loader className='w-3.5 h-3.5 text-yellow-500 animate-spin' />
) : (
<Play className='w-4 h-4 mr-2' />
<Play className='w-3.5 h-3.5 text-green-500' />
)}
Run Tests
<span>Run Tests</span>
</button>
)}
<button
onClick={() => setIsModalOpen(true)}
className='inline-flex items-center px-3 py-2 text-sm font-medium rounded-md bg-blue-600 hover:bg-blue-700 text-white'>
<Plus className='w-4 h-4 mr-2' />
Add Test
className='inline-flex items-center gap-2 px-3 py-1.5 text-sm rounded bg-gray-800 border border-gray-700 text-gray-200 hover:bg-gray-700 transition-colors'>
<Plus className='w-3.5 h-3.5 text-blue-500' />
<span>Add Test</span>
</button>
</div>
</div>
{/* Progress Bar */}
{totalTests > 0 && (
<div className='mb-4'>
<div className='h-2 bg-gray-200 dark:bg-gray-700 rounded-full overflow-hidden'>
<div
className='h-full bg-emerald-500 transition-all duration-500 ease-out'
style={{width: `${(passedTests / totalTests) * 100}%`}}
/>
</div>
</div>
)}
{/* Test List */}
<div className='flex-1 overflow-y-auto pr-2'>
{tests?.length > 0 ? (
<div className='space-y-3'>
{tests.map(test => (
<UnitTest
key={test.id}
test={test}
pattern={pattern}
onDelete={() => handleDeleteTest(test.id)}
onEdit={() => handleEditTest(test)}
/>
))}
{tests.map(test => {
// Merge saved test with runtime results
const testWithResults = {
...test,
...testResults[test.id]
};
return (
<UnitTest
key={test.id}
test={testWithResults}
pattern={pattern}
onDelete={() => handleDeleteTest(test.id)}
onEdit={() => handleEditTest(test)}
/>
);
})}
</div>
) : (
<div className='text-center py-12 rounded-lg'>
@@ -173,15 +177,7 @@ RegexTestingTab.propTypes = {
PropTypes.shape({
id: PropTypes.number.isRequired,
input: PropTypes.string.isRequired,
expected: PropTypes.bool.isRequired,
passes: PropTypes.bool.isRequired,
lastRun: PropTypes.string,
matchedContent: PropTypes.string,
matchedGroups: PropTypes.arrayOf(PropTypes.string),
matchSpan: PropTypes.shape({
start: PropTypes.number,
end: PropTypes.number
})
expected: PropTypes.bool.isRequired
})
),
onTestsChange: PropTypes.func.isRequired,

View File

@@ -68,11 +68,7 @@ const UnitTest = ({test, pattern, onDelete, onEdit}) => {
: 'Should Not Match'}
</span>
</div>
<div className='flex items-center gap-2'>
<span className='text-xs text-gray-500 dark:text-gray-400'>
Last run: {test.lastRun}
</span>
<div className='flex gap-2'>
<div className='flex gap-2'>
<button
onClick={onEdit}
className='p-1 rounded shrink-0 transition-transform transform hover:scale-110'>
@@ -83,7 +79,6 @@ const UnitTest = ({test, pattern, onDelete, onEdit}) => {
className='p-1 rounded shrink-0 transition-transform transform hover:scale-110'>
<Trash2 className='w-4 h-4 text-gray-500 dark:text-gray-400' />
</button>
</div>
</div>
</div>
@@ -112,7 +107,6 @@ UnitTest.propTypes = {
input: PropTypes.string.isRequired,
expected: PropTypes.bool.isRequired,
passes: PropTypes.bool.isRequired,
lastRun: PropTypes.string,
matchedContent: PropTypes.string,
matchedGroups: PropTypes.arrayOf(PropTypes.string),
matchSpan: PropTypes.shape({

View File

@@ -65,13 +65,33 @@ export const useRegexModal = (initialPattern, onSave) => {
return;
}
// Validate pattern with .NET regex engine
try {
const validationResult = await RegexPatterns.verify(patternValue);
if (!validationResult.valid) {
Alert.error(`Invalid regex pattern: ${validationResult.error || 'Pattern validation failed'}`);
return;
}
} catch (error) {
console.error('Pattern validation error:', error);
Alert.error('Failed to validate pattern. Please check the pattern and try again.');
return;
}
try {
// Clean tests to only include saved data
const cleanTests = tests.map((test, index) => ({
id: test.id || index + 1,
input: test.input,
expected: test.expected
}));
const data = {
name,
pattern: patternValue,
description,
tags,
tests
tests: cleanTests
};
if (initialPattern && !isCloning) {
@@ -98,15 +118,16 @@ export const useRegexModal = (initialPattern, onSave) => {
const handleRunTests = useCallback(
async (pattern, tests) => {
try {
const updatedTests = await runTests(pattern, tests);
if (updatedTests) {
setTests(updatedTests);
}
const testResults = await runTests(pattern, tests);
// We don't update the tests state with results
// Results are only used for display, not saved
return testResults;
} catch (error) {
console.error('Error running tests:', error);
Alert.error(
error.message || 'Failed to run tests. Please try again.'
);
return null;
}
},
[runTests]

View File

@@ -34,14 +34,12 @@ export const useRegexTesting = onUpdateTests => {
}
);
// Update tests through the callback
if (onUpdateTests) {
onUpdateTests(result.tests);
}
// Return the test results (with match information)
// Don't save these results, just return them for display
return result.tests;
} else {
Alert.error(result.message || 'Failed to run tests');
return tests;
return null;
}
} catch (error) {
console.error('Error running tests:', error);