mirror of
https://github.com/Dictionarry-Hub/profilarr.git
synced 2026-01-30 14:20:59 +01:00
feat(pcd): add database linking functionality
This commit is contained in:
@@ -9,6 +9,9 @@ import { migration as migration004 } from './migrations/004_create_jobs_tables.t
|
||||
import { migration as migration005 } from './migrations/005_create_backup_settings.ts';
|
||||
import { migration as migration006 } from './migrations/006_simplify_log_settings.ts';
|
||||
import { migration as migration007 } from './migrations/007_create_notification_tables.ts';
|
||||
import { migration as migration008 } from './migrations/008_create_database_instances.ts';
|
||||
import { migration as migration009 } from './migrations/009_add_personal_access_token.ts';
|
||||
import { migration as migration010 } from './migrations/010_add_is_private.ts';
|
||||
|
||||
export interface Migration {
|
||||
version: number;
|
||||
@@ -233,7 +236,10 @@ export function loadMigrations(): Migration[] {
|
||||
migration004,
|
||||
migration005,
|
||||
migration006,
|
||||
migration007
|
||||
migration007,
|
||||
migration008,
|
||||
migration009,
|
||||
migration010
|
||||
];
|
||||
|
||||
// Sort by version number
|
||||
|
||||
@@ -0,0 +1,62 @@
|
||||
import type { Migration } from '../migrations.ts';
|
||||
|
||||
/**
|
||||
* Migration 008: Create database_instances table
|
||||
*
|
||||
* Creates the table for storing linked Profilarr Compliant Database (PCD) repositories.
|
||||
* These databases contain configuration profiles that can be synced to arr instances.
|
||||
*
|
||||
* Fields:
|
||||
* - id: Auto-incrementing primary key
|
||||
* - uuid: Unique identifier used for filesystem storage path
|
||||
* - name: User-friendly name (unique)
|
||||
* - repository_url: Git repository URL
|
||||
* - local_path: Path where the repository is cloned (data/databases/{uuid})
|
||||
* - sync_strategy: 0 = manual check, >0 = auto-check every X minutes
|
||||
* - auto_pull: 0 = notify only, 1 = auto-pull updates
|
||||
* - enabled: Boolean flag (1=enabled, 0=disabled)
|
||||
* - last_synced_at: Timestamp of last successful sync
|
||||
* - created_at: Timestamp of creation
|
||||
* - updated_at: Timestamp of last update
|
||||
*/
|
||||
|
||||
export const migration: Migration = {
|
||||
version: 8,
|
||||
name: 'Create database_instances table',
|
||||
|
||||
up: `
|
||||
CREATE TABLE database_instances (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
|
||||
-- Instance identification
|
||||
uuid TEXT NOT NULL UNIQUE,
|
||||
name TEXT NOT NULL UNIQUE,
|
||||
|
||||
-- Repository connection
|
||||
repository_url TEXT NOT NULL,
|
||||
|
||||
-- Local storage
|
||||
local_path TEXT NOT NULL,
|
||||
|
||||
-- Sync settings
|
||||
sync_strategy INTEGER NOT NULL DEFAULT 0,
|
||||
auto_pull INTEGER NOT NULL DEFAULT 0,
|
||||
|
||||
-- Status
|
||||
enabled INTEGER NOT NULL DEFAULT 1,
|
||||
last_synced_at DATETIME,
|
||||
|
||||
-- Metadata
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Index for looking up by UUID
|
||||
CREATE INDEX idx_database_instances_uuid ON database_instances(uuid);
|
||||
`,
|
||||
|
||||
down: `
|
||||
DROP INDEX IF EXISTS idx_database_instances_uuid;
|
||||
DROP TABLE IF EXISTS database_instances;
|
||||
`
|
||||
};
|
||||
@@ -0,0 +1,47 @@
|
||||
import type { Migration } from '../migrations.ts';
|
||||
|
||||
/**
|
||||
* Migration 009: Add personal_access_token to database_instances
|
||||
*
|
||||
* Adds support for Personal Access Tokens (PAT) to enable:
|
||||
* - Cloning private repositories
|
||||
* - Push access for developers working on database content
|
||||
*/
|
||||
|
||||
export const migration: Migration = {
|
||||
version: 9,
|
||||
name: 'Add personal_access_token to database_instances',
|
||||
|
||||
up: `
|
||||
ALTER TABLE database_instances
|
||||
ADD COLUMN personal_access_token TEXT;
|
||||
`,
|
||||
|
||||
down: `
|
||||
-- SQLite doesn't support DROP COLUMN easily, so we recreate the table
|
||||
CREATE TABLE database_instances_backup (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
uuid TEXT NOT NULL UNIQUE,
|
||||
name TEXT NOT NULL UNIQUE,
|
||||
repository_url TEXT NOT NULL,
|
||||
local_path TEXT NOT NULL,
|
||||
sync_strategy INTEGER NOT NULL DEFAULT 0,
|
||||
auto_pull INTEGER NOT NULL DEFAULT 0,
|
||||
enabled INTEGER NOT NULL DEFAULT 1,
|
||||
last_synced_at DATETIME,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
INSERT INTO database_instances_backup
|
||||
SELECT id, uuid, name, repository_url, local_path, sync_strategy,
|
||||
auto_pull, enabled, last_synced_at, created_at, updated_at
|
||||
FROM database_instances;
|
||||
|
||||
DROP TABLE database_instances;
|
||||
|
||||
ALTER TABLE database_instances_backup RENAME TO database_instances;
|
||||
|
||||
CREATE INDEX idx_database_instances_uuid ON database_instances(uuid);
|
||||
`
|
||||
};
|
||||
48
src/lib/server/db/migrations/010_add_is_private.ts
Normal file
48
src/lib/server/db/migrations/010_add_is_private.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import type { Migration } from '../migrations.ts';
|
||||
|
||||
/**
|
||||
* Migration 010: Add is_private to database_instances
|
||||
*
|
||||
* Adds auto-detected flag to indicate if a repository is private.
|
||||
* This is determined during the initial clone by attempting to access
|
||||
* the repository with and without authentication.
|
||||
*/
|
||||
|
||||
export const migration: Migration = {
|
||||
version: 10,
|
||||
name: 'Add is_private to database_instances',
|
||||
|
||||
up: `
|
||||
ALTER TABLE database_instances
|
||||
ADD COLUMN is_private INTEGER NOT NULL DEFAULT 0;
|
||||
`,
|
||||
|
||||
down: `
|
||||
-- SQLite doesn't support DROP COLUMN easily, so we recreate the table
|
||||
CREATE TABLE database_instances_backup (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
uuid TEXT NOT NULL UNIQUE,
|
||||
name TEXT NOT NULL UNIQUE,
|
||||
repository_url TEXT NOT NULL,
|
||||
local_path TEXT NOT NULL,
|
||||
sync_strategy INTEGER NOT NULL DEFAULT 0,
|
||||
auto_pull INTEGER NOT NULL DEFAULT 0,
|
||||
enabled INTEGER NOT NULL DEFAULT 1,
|
||||
personal_access_token TEXT,
|
||||
last_synced_at DATETIME,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
INSERT INTO database_instances_backup
|
||||
SELECT id, uuid, name, repository_url, local_path, sync_strategy,
|
||||
auto_pull, enabled, personal_access_token, last_synced_at, created_at, updated_at
|
||||
FROM database_instances;
|
||||
|
||||
DROP TABLE database_instances;
|
||||
|
||||
ALTER TABLE database_instances_backup RENAME TO database_instances;
|
||||
|
||||
CREATE INDEX idx_database_instances_uuid ON database_instances(uuid);
|
||||
`
|
||||
};
|
||||
211
src/lib/server/db/queries/databaseInstances.ts
Normal file
211
src/lib/server/db/queries/databaseInstances.ts
Normal file
@@ -0,0 +1,211 @@
|
||||
import { db } from '../db.ts';
|
||||
|
||||
/**
|
||||
* Types for database_instances table
|
||||
*/
|
||||
export interface DatabaseInstance {
|
||||
id: number;
|
||||
uuid: string;
|
||||
name: string;
|
||||
repository_url: string;
|
||||
local_path: string;
|
||||
sync_strategy: number;
|
||||
auto_pull: number;
|
||||
enabled: number;
|
||||
personal_access_token: string | null;
|
||||
is_private: number;
|
||||
last_synced_at: string | null;
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
}
|
||||
|
||||
export interface CreateDatabaseInstanceInput {
|
||||
uuid: string;
|
||||
name: string;
|
||||
repositoryUrl: string;
|
||||
localPath: string;
|
||||
syncStrategy?: number;
|
||||
autoPull?: boolean;
|
||||
enabled?: boolean;
|
||||
personalAccessToken?: string;
|
||||
isPrivate?: boolean;
|
||||
}
|
||||
|
||||
export interface UpdateDatabaseInstanceInput {
|
||||
name?: string;
|
||||
repositoryUrl?: string;
|
||||
syncStrategy?: number;
|
||||
autoPull?: boolean;
|
||||
enabled?: boolean;
|
||||
personalAccessToken?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* All queries for database_instances table
|
||||
*/
|
||||
export const databaseInstancesQueries = {
|
||||
/**
|
||||
* Create a new database instance
|
||||
*/
|
||||
create(input: CreateDatabaseInstanceInput): number {
|
||||
const syncStrategy = input.syncStrategy ?? 0;
|
||||
const autoPull = input.autoPull !== false ? 1 : 0;
|
||||
const enabled = input.enabled !== false ? 1 : 0;
|
||||
const personalAccessToken = input.personalAccessToken || null;
|
||||
const isPrivate = input.isPrivate ? 1 : 0;
|
||||
|
||||
db.execute(
|
||||
`INSERT INTO database_instances (uuid, name, repository_url, local_path, sync_strategy, auto_pull, enabled, personal_access_token, is_private)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
input.uuid,
|
||||
input.name,
|
||||
input.repositoryUrl,
|
||||
input.localPath,
|
||||
syncStrategy,
|
||||
autoPull,
|
||||
enabled,
|
||||
personalAccessToken,
|
||||
isPrivate
|
||||
);
|
||||
|
||||
// Get the last inserted ID
|
||||
const result = db.queryFirst<{ id: number }>('SELECT last_insert_rowid() as id');
|
||||
return result?.id ?? 0;
|
||||
},
|
||||
|
||||
/**
|
||||
* Get a database instance by ID
|
||||
*/
|
||||
getById(id: number): DatabaseInstance | undefined {
|
||||
return db.queryFirst<DatabaseInstance>('SELECT * FROM database_instances WHERE id = ?', id);
|
||||
},
|
||||
|
||||
/**
|
||||
* Get a database instance by UUID
|
||||
*/
|
||||
getByUuid(uuid: string): DatabaseInstance | undefined {
|
||||
return db.queryFirst<DatabaseInstance>(
|
||||
'SELECT * FROM database_instances WHERE uuid = ?',
|
||||
uuid
|
||||
);
|
||||
},
|
||||
|
||||
/**
|
||||
* Get all database instances
|
||||
*/
|
||||
getAll(): DatabaseInstance[] {
|
||||
return db.query<DatabaseInstance>('SELECT * FROM database_instances ORDER BY name');
|
||||
},
|
||||
|
||||
/**
|
||||
* Get enabled database instances
|
||||
*/
|
||||
getEnabled(): DatabaseInstance[] {
|
||||
return db.query<DatabaseInstance>(
|
||||
'SELECT * FROM database_instances WHERE enabled = 1 ORDER BY name'
|
||||
);
|
||||
},
|
||||
|
||||
/**
|
||||
* Get databases that need auto-sync check
|
||||
*/
|
||||
getDueForSync(): DatabaseInstance[] {
|
||||
return db.query<DatabaseInstance>(
|
||||
`SELECT * FROM database_instances
|
||||
WHERE enabled = 1
|
||||
AND sync_strategy > 0
|
||||
AND (
|
||||
last_synced_at IS NULL
|
||||
OR datetime(last_synced_at, '+' || sync_strategy || ' minutes') <= datetime('now')
|
||||
)
|
||||
ORDER BY last_synced_at ASC NULLS FIRST`
|
||||
);
|
||||
},
|
||||
|
||||
/**
|
||||
* Update a database instance
|
||||
*/
|
||||
update(id: number, input: UpdateDatabaseInstanceInput): boolean {
|
||||
const updates: string[] = [];
|
||||
const params: (string | number | null)[] = [];
|
||||
|
||||
if (input.name !== undefined) {
|
||||
updates.push('name = ?');
|
||||
params.push(input.name);
|
||||
}
|
||||
if (input.repositoryUrl !== undefined) {
|
||||
updates.push('repository_url = ?');
|
||||
params.push(input.repositoryUrl);
|
||||
}
|
||||
if (input.syncStrategy !== undefined) {
|
||||
updates.push('sync_strategy = ?');
|
||||
params.push(input.syncStrategy);
|
||||
}
|
||||
if (input.autoPull !== undefined) {
|
||||
updates.push('auto_pull = ?');
|
||||
params.push(input.autoPull ? 1 : 0);
|
||||
}
|
||||
if (input.enabled !== undefined) {
|
||||
updates.push('enabled = ?');
|
||||
params.push(input.enabled ? 1 : 0);
|
||||
}
|
||||
if (input.personalAccessToken !== undefined) {
|
||||
updates.push('personal_access_token = ?');
|
||||
params.push(input.personalAccessToken || null);
|
||||
}
|
||||
|
||||
if (updates.length === 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Add updated_at
|
||||
updates.push('updated_at = CURRENT_TIMESTAMP');
|
||||
params.push(id);
|
||||
|
||||
const affected = db.execute(
|
||||
`UPDATE database_instances SET ${updates.join(', ')} WHERE id = ?`,
|
||||
...params
|
||||
);
|
||||
|
||||
return affected > 0;
|
||||
},
|
||||
|
||||
/**
|
||||
* Update last_synced_at timestamp
|
||||
*/
|
||||
updateSyncedAt(id: number): boolean {
|
||||
const affected = db.execute(
|
||||
'UPDATE database_instances SET last_synced_at = CURRENT_TIMESTAMP, updated_at = CURRENT_TIMESTAMP WHERE id = ?',
|
||||
id
|
||||
);
|
||||
return affected > 0;
|
||||
},
|
||||
|
||||
/**
|
||||
* Delete a database instance
|
||||
*/
|
||||
delete(id: number): boolean {
|
||||
const affected = db.execute('DELETE FROM database_instances WHERE id = ?', id);
|
||||
return affected > 0;
|
||||
},
|
||||
|
||||
/**
|
||||
* Check if a database name already exists
|
||||
*/
|
||||
nameExists(name: string, excludeId?: number): boolean {
|
||||
if (excludeId !== undefined) {
|
||||
const result = db.queryFirst<{ count: number }>(
|
||||
'SELECT COUNT(*) as count FROM database_instances WHERE name = ? AND id != ?',
|
||||
name,
|
||||
excludeId
|
||||
);
|
||||
return (result?.count ?? 0) > 0;
|
||||
}
|
||||
|
||||
const result = db.queryFirst<{ count: number }>(
|
||||
'SELECT COUNT(*) as count FROM database_instances WHERE name = ?',
|
||||
name
|
||||
);
|
||||
return (result?.count ?? 0) > 0;
|
||||
}
|
||||
};
|
||||
@@ -1,7 +1,7 @@
|
||||
-- Profilarr Database Schema
|
||||
-- This file documents the current database schema after all migrations
|
||||
-- DO NOT execute this file directly - use migrations instead
|
||||
-- Last updated: 2025-10-22
|
||||
-- Last updated: 2025-11-04
|
||||
|
||||
-- ==============================================================================
|
||||
-- TABLE: migrations
|
||||
@@ -191,6 +191,40 @@ CREATE TABLE notification_history (
|
||||
FOREIGN KEY (service_id) REFERENCES notification_services(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
-- ==============================================================================
|
||||
-- TABLE: database_instances
|
||||
-- Purpose: Store linked Profilarr Compliant Database (PCD) repositories
|
||||
-- Migration: 008_create_database_instances.ts, 009_add_personal_access_token.ts, 010_add_is_private.ts
|
||||
-- ==============================================================================
|
||||
|
||||
CREATE TABLE database_instances (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
|
||||
-- Instance identification
|
||||
uuid TEXT NOT NULL UNIQUE, -- UUID for filesystem storage path
|
||||
name TEXT NOT NULL UNIQUE, -- User-friendly name (e.g., "Dictionarry DB")
|
||||
|
||||
-- Repository connection
|
||||
repository_url TEXT NOT NULL, -- Git repository URL
|
||||
personal_access_token TEXT, -- PAT for private repos and push access (Migration 009)
|
||||
is_private INTEGER NOT NULL DEFAULT 0, -- 1=private repo, 0=public (auto-detected, Migration 010)
|
||||
|
||||
-- Local storage
|
||||
local_path TEXT NOT NULL, -- Path where repo is cloned (data/databases/{uuid})
|
||||
|
||||
-- Sync settings
|
||||
sync_strategy INTEGER NOT NULL DEFAULT 0, -- 0=manual check, >0=auto-check every X minutes
|
||||
auto_pull INTEGER NOT NULL DEFAULT 0, -- 0=notify only, 1=auto-pull updates
|
||||
|
||||
-- Status
|
||||
enabled INTEGER NOT NULL DEFAULT 1, -- 1=enabled, 0=disabled
|
||||
last_synced_at DATETIME, -- Timestamp of last successful sync
|
||||
|
||||
-- Metadata
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- ==============================================================================
|
||||
-- INDEXES
|
||||
-- Purpose: Improve query performance
|
||||
@@ -212,3 +246,6 @@ CREATE INDEX idx_notification_services_type ON notification_services(service_typ
|
||||
CREATE INDEX idx_notification_history_service_id ON notification_history(service_id);
|
||||
CREATE INDEX idx_notification_history_sent_at ON notification_history(sent_at);
|
||||
CREATE INDEX idx_notification_history_status ON notification_history(status);
|
||||
|
||||
-- Database instances indexes (Migration: 008_create_database_instances.ts)
|
||||
CREATE INDEX idx_database_instances_uuid ON database_instances(uuid);
|
||||
|
||||
60
src/lib/server/jobs/definitions/syncDatabases.ts
Normal file
60
src/lib/server/jobs/definitions/syncDatabases.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import { logger } from '$logger/logger.ts';
|
||||
import { syncDatabases } from '../logic/syncDatabases.ts';
|
||||
import type { JobDefinition, JobResult } from '../types.ts';
|
||||
|
||||
/**
|
||||
* Sync PCD databases job
|
||||
* Checks for databases that need syncing and pulls updates if auto_pull is enabled
|
||||
*/
|
||||
export const syncDatabasesJob: JobDefinition = {
|
||||
name: 'sync_databases',
|
||||
description: 'Auto-sync PCD databases with remote repositories',
|
||||
schedule: '*/5 minutes',
|
||||
|
||||
handler: async (): Promise<JobResult> => {
|
||||
try {
|
||||
await logger.info('Starting database sync job', {
|
||||
source: 'SyncDatabasesJob'
|
||||
});
|
||||
|
||||
// Run sync
|
||||
const result = await syncDatabases();
|
||||
|
||||
// Log results for each database
|
||||
for (const db of result.databases) {
|
||||
if (db.success) {
|
||||
if (db.updatesPulled > 0) {
|
||||
await logger.info(`Synced database: ${db.name}`, {
|
||||
source: 'SyncDatabasesJob',
|
||||
meta: { databaseId: db.id, updatesPulled: db.updatesPulled }
|
||||
});
|
||||
}
|
||||
} else {
|
||||
await logger.error(`Failed to sync database: ${db.name}`, {
|
||||
source: 'SyncDatabasesJob',
|
||||
meta: { databaseId: db.id, error: db.error }
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const message = `Sync completed: ${result.successCount} successful, ${result.failureCount} failed (${result.totalChecked} total)`;
|
||||
|
||||
if (result.failureCount > 0 && result.successCount === 0) {
|
||||
return {
|
||||
success: false,
|
||||
error: message
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: message
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
};
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -6,6 +6,7 @@ import { logger } from '$logger/logger.ts';
|
||||
import { cleanupLogsJob } from './definitions/cleanupLogs.ts';
|
||||
import { createBackupJob } from './definitions/createBackup.ts';
|
||||
import { cleanupBackupsJob } from './definitions/cleanupBackups.ts';
|
||||
import { syncDatabasesJob } from './definitions/syncDatabases.ts';
|
||||
|
||||
/**
|
||||
* Register all job definitions
|
||||
@@ -15,6 +16,7 @@ function registerAllJobs(): void {
|
||||
jobRegistry.register(cleanupLogsJob);
|
||||
jobRegistry.register(createBackupJob);
|
||||
jobRegistry.register(cleanupBackupsJob);
|
||||
jobRegistry.register(syncDatabasesJob);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -54,7 +54,7 @@ export async function createBackup(
|
||||
error: `Source path is not a directory: ${sourceDir}`,
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
} catch (_error) {
|
||||
return {
|
||||
success: false,
|
||||
error: `Source directory does not exist: ${sourceDir}`,
|
||||
|
||||
139
src/lib/server/jobs/logic/syncDatabases.ts
Normal file
139
src/lib/server/jobs/logic/syncDatabases.ts
Normal file
@@ -0,0 +1,139 @@
|
||||
/**
|
||||
* Core sync logic for PCD auto-sync
|
||||
* Checks for databases that need syncing and pulls updates if auto_pull is enabled
|
||||
*/
|
||||
|
||||
import { pcdManager } from '$pcd/pcd.ts';
|
||||
import { notificationManager } from '$notifications/NotificationManager.ts';
|
||||
|
||||
export interface DatabaseSyncStatus {
|
||||
id: number;
|
||||
name: string;
|
||||
success: boolean;
|
||||
updatesPulled: number;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export interface SyncDatabasesResult {
|
||||
totalChecked: number;
|
||||
successCount: number;
|
||||
failureCount: number;
|
||||
databases: DatabaseSyncStatus[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync all databases that are due for auto-sync
|
||||
* Checks databases with sync_strategy > 0 that haven't been synced within their interval
|
||||
*/
|
||||
export async function syncDatabases(): Promise<SyncDatabasesResult> {
|
||||
const databases = pcdManager.getDueForSync();
|
||||
|
||||
const totalChecked = databases.length;
|
||||
let successCount = 0;
|
||||
let failureCount = 0;
|
||||
const statuses: DatabaseSyncStatus[] = [];
|
||||
|
||||
for (const db of databases) {
|
||||
try {
|
||||
// Check for updates
|
||||
const updateInfo = await pcdManager.checkForUpdates(db.id);
|
||||
|
||||
if (!updateInfo.hasUpdates) {
|
||||
// No updates available, just mark as checked
|
||||
statuses.push({
|
||||
id: db.id,
|
||||
name: db.name,
|
||||
success: true,
|
||||
updatesPulled: 0
|
||||
});
|
||||
successCount++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Updates are available
|
||||
if (db.auto_pull === 1) {
|
||||
// Auto-pull is enabled, sync the database
|
||||
const syncResult = await pcdManager.sync(db.id);
|
||||
|
||||
if (syncResult.success) {
|
||||
// Send success notification
|
||||
await notificationManager.notify({
|
||||
type: 'pcd.sync_success',
|
||||
title: 'Database Synced Successfully',
|
||||
message: `Database "${db.name}" has been updated (${syncResult.commitsBehind} commit${syncResult.commitsBehind === 1 ? '' : 's'} pulled)`,
|
||||
metadata: {
|
||||
databaseId: db.id,
|
||||
databaseName: db.name,
|
||||
commitsPulled: syncResult.commitsBehind
|
||||
}
|
||||
});
|
||||
|
||||
statuses.push({
|
||||
id: db.id,
|
||||
name: db.name,
|
||||
success: true,
|
||||
updatesPulled: syncResult.commitsBehind
|
||||
});
|
||||
successCount++;
|
||||
} else {
|
||||
// Send failure notification
|
||||
await notificationManager.notify({
|
||||
type: 'pcd.sync_failed',
|
||||
title: 'Database Sync Failed',
|
||||
message: `Failed to sync database "${db.name}": ${syncResult.error}`,
|
||||
metadata: {
|
||||
databaseId: db.id,
|
||||
databaseName: db.name,
|
||||
error: syncResult.error
|
||||
}
|
||||
});
|
||||
|
||||
statuses.push({
|
||||
id: db.id,
|
||||
name: db.name,
|
||||
success: false,
|
||||
updatesPulled: 0,
|
||||
error: syncResult.error
|
||||
});
|
||||
failureCount++;
|
||||
}
|
||||
} else {
|
||||
// Auto-pull is disabled, send notification
|
||||
await notificationManager.notify({
|
||||
type: 'pcd.updates_available',
|
||||
title: 'Database Updates Available',
|
||||
message: `Updates are available for database "${db.name}" (${updateInfo.commitsBehind} commit${updateInfo.commitsBehind === 1 ? '' : 's'} behind)`,
|
||||
metadata: {
|
||||
databaseId: db.id,
|
||||
databaseName: db.name,
|
||||
commitsBehind: updateInfo.commitsBehind
|
||||
}
|
||||
});
|
||||
|
||||
statuses.push({
|
||||
id: db.id,
|
||||
name: db.name,
|
||||
success: true,
|
||||
updatesPulled: 0
|
||||
});
|
||||
successCount++;
|
||||
}
|
||||
} catch (error) {
|
||||
statuses.push({
|
||||
id: db.id,
|
||||
name: db.name,
|
||||
success: false,
|
||||
updatesPulled: 0,
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
});
|
||||
failureCount++;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
totalChecked,
|
||||
successCount,
|
||||
failureCount,
|
||||
databases: statuses
|
||||
};
|
||||
}
|
||||
106
src/lib/server/pcd/deps.ts
Normal file
106
src/lib/server/pcd/deps.ts
Normal file
@@ -0,0 +1,106 @@
|
||||
/**
|
||||
* PCD Dependency Resolution
|
||||
* Handles cloning and managing PCD dependencies
|
||||
*/
|
||||
|
||||
import * as git from '$utils/git/git.ts';
|
||||
import { loadManifest } from './manifest.ts';
|
||||
|
||||
/**
|
||||
* Extract repository name from GitHub URL
|
||||
* https://github.com/Dictionarry-Hub/schema -> schema
|
||||
*/
|
||||
function getRepoName(repoUrl: string): string {
|
||||
const parts = repoUrl.split('/');
|
||||
return parts[parts.length - 1];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get dependency path
|
||||
*/
|
||||
function getDependencyPath(pcdPath: string, repoName: string): string {
|
||||
return `${pcdPath}/deps/${repoName}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clone and checkout a single dependency
|
||||
*/
|
||||
async function cloneDependency(
|
||||
pcdPath: string,
|
||||
repoUrl: string,
|
||||
version: string
|
||||
): Promise<void> {
|
||||
const repoName = getRepoName(repoUrl);
|
||||
const depPath = getDependencyPath(pcdPath, repoName);
|
||||
|
||||
// Clone the dependency repository
|
||||
await git.clone(repoUrl, depPath);
|
||||
|
||||
// Checkout the specific version tag
|
||||
await git.checkout(depPath, version);
|
||||
}
|
||||
|
||||
/**
|
||||
* Process all dependencies for a PCD
|
||||
* Clones dependencies and validates their manifests
|
||||
*/
|
||||
export async function processDependencies(pcdPath: string): Promise<void> {
|
||||
// Load the PCD's manifest
|
||||
const manifest = await loadManifest(pcdPath);
|
||||
|
||||
// Skip if no dependencies
|
||||
if (!manifest.dependencies || Object.keys(manifest.dependencies).length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Create deps directory
|
||||
const depsDir = `${pcdPath}/deps`;
|
||||
await Deno.mkdir(depsDir, { recursive: true });
|
||||
|
||||
// Process each dependency
|
||||
for (const [repoUrl, version] of Object.entries(manifest.dependencies)) {
|
||||
// Clone and checkout the dependency
|
||||
await cloneDependency(pcdPath, repoUrl, version);
|
||||
|
||||
// Validate the dependency's manifest
|
||||
const repoName = getRepoName(repoUrl);
|
||||
const depPath = getDependencyPath(pcdPath, repoName);
|
||||
await loadManifest(depPath);
|
||||
|
||||
// TODO (post-2.0): Recursively process nested dependencies
|
||||
// For now, we only support one level of dependencies
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if all dependencies are present and valid
|
||||
*/
|
||||
export async function validateDependencies(pcdPath: string): Promise<boolean> {
|
||||
try {
|
||||
const manifest = await loadManifest(pcdPath);
|
||||
|
||||
// If no dependencies, validation passes
|
||||
if (!manifest.dependencies || Object.keys(manifest.dependencies).length === 0) {
|
||||
return true;
|
||||
}
|
||||
|
||||
for (const [repoUrl] of Object.entries(manifest.dependencies)) {
|
||||
const repoName = getRepoName(repoUrl);
|
||||
const depPath = getDependencyPath(pcdPath, repoName);
|
||||
|
||||
// Check if dependency directory exists
|
||||
try {
|
||||
await Deno.stat(depPath);
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Validate dependency manifest
|
||||
await loadManifest(depPath);
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
152
src/lib/server/pcd/manifest.ts
Normal file
152
src/lib/server/pcd/manifest.ts
Normal file
@@ -0,0 +1,152 @@
|
||||
/**
|
||||
* PCD Manifest Parser and Validator
|
||||
* Handles reading and validating pcd.json files
|
||||
*/
|
||||
|
||||
export interface Manifest {
|
||||
name: string;
|
||||
version: string;
|
||||
description: string;
|
||||
dependencies?: Record<string, string>;
|
||||
arr_types?: string[];
|
||||
authors?: Array<{ name: string; email?: string }>;
|
||||
license?: string;
|
||||
repository?: string;
|
||||
tags?: string[];
|
||||
links?: {
|
||||
homepage?: string;
|
||||
documentation?: string;
|
||||
issues?: string;
|
||||
};
|
||||
profilarr: {
|
||||
minimum_version: string;
|
||||
};
|
||||
}
|
||||
|
||||
export class ManifestValidationError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
this.name = 'ManifestValidationError';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read manifest from a PCD repository
|
||||
*/
|
||||
export async function readManifest(pcdPath: string): Promise<Manifest> {
|
||||
const manifestPath = `${pcdPath}/pcd.json`;
|
||||
|
||||
try {
|
||||
const manifestContent = await Deno.readTextFile(manifestPath);
|
||||
const manifest = JSON.parse(manifestContent);
|
||||
return manifest;
|
||||
} catch (error) {
|
||||
if (error instanceof Deno.errors.NotFound) {
|
||||
throw new ManifestValidationError('pcd.json not found in repository');
|
||||
}
|
||||
if (error instanceof SyntaxError) {
|
||||
throw new ManifestValidationError('pcd.json contains invalid JSON');
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate a manifest object
|
||||
*/
|
||||
export function validateManifest(manifest: unknown): asserts manifest is Manifest {
|
||||
if (!manifest || typeof manifest !== 'object') {
|
||||
throw new ManifestValidationError('Manifest must be an object');
|
||||
}
|
||||
|
||||
const m = manifest as Record<string, unknown>;
|
||||
|
||||
// Required fields
|
||||
if (typeof m.name !== 'string' || !m.name) {
|
||||
throw new ManifestValidationError('Manifest missing required field: name');
|
||||
}
|
||||
|
||||
if (typeof m.version !== 'string' || !m.version) {
|
||||
throw new ManifestValidationError('Manifest missing required field: version');
|
||||
}
|
||||
|
||||
if (typeof m.description !== 'string' || !m.description) {
|
||||
throw new ManifestValidationError('Manifest missing required field: description');
|
||||
}
|
||||
|
||||
// Validate dependencies if present
|
||||
if (m.dependencies !== undefined) {
|
||||
if (typeof m.dependencies !== 'object' || m.dependencies === null) {
|
||||
throw new ManifestValidationError('Manifest field dependencies must be an object');
|
||||
}
|
||||
|
||||
// Validate dependencies includes schema (only check for non-empty dependencies)
|
||||
const deps = m.dependencies as Record<string, unknown>;
|
||||
if (Object.keys(deps).length > 0) {
|
||||
const hasSchema = Object.keys(deps).some((url) => url.includes('/schema'));
|
||||
if (!hasSchema) {
|
||||
throw new ManifestValidationError('Manifest dependencies must include schema repository');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Validate profilarr section
|
||||
if (!m.profilarr || typeof m.profilarr !== 'object') {
|
||||
throw new ManifestValidationError('Manifest missing required field: profilarr');
|
||||
}
|
||||
|
||||
const profilarr = m.profilarr as Record<string, unknown>;
|
||||
if (typeof profilarr.minimum_version !== 'string' || !profilarr.minimum_version) {
|
||||
throw new ManifestValidationError('Manifest missing required field: profilarr.minimum_version');
|
||||
}
|
||||
|
||||
// Optional fields validation
|
||||
if (m.arr_types !== undefined) {
|
||||
if (!Array.isArray(m.arr_types)) {
|
||||
throw new ManifestValidationError('Manifest field arr_types must be an array');
|
||||
}
|
||||
const validTypes = ['radarr', 'sonarr', 'readarr', 'lidarr', 'prowlarr', 'whisparr'];
|
||||
for (const type of m.arr_types) {
|
||||
if (typeof type !== 'string' || !validTypes.includes(type)) {
|
||||
throw new ManifestValidationError(
|
||||
`Invalid arr_type: ${type}. Must be one of: ${validTypes.join(', ')}`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (m.authors !== undefined) {
|
||||
if (!Array.isArray(m.authors)) {
|
||||
throw new ManifestValidationError('Manifest field authors must be an array');
|
||||
}
|
||||
for (const author of m.authors) {
|
||||
if (!author || typeof author !== 'object') {
|
||||
throw new ManifestValidationError('Each author must be an object');
|
||||
}
|
||||
const a = author as Record<string, unknown>;
|
||||
if (typeof a.name !== 'string' || !a.name) {
|
||||
throw new ManifestValidationError('Each author must have a name');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (m.tags !== undefined) {
|
||||
if (!Array.isArray(m.tags)) {
|
||||
throw new ManifestValidationError('Manifest field tags must be an array');
|
||||
}
|
||||
for (const tag of m.tags) {
|
||||
if (typeof tag !== 'string') {
|
||||
throw new ManifestValidationError('Each tag must be a string');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read and validate manifest from a PCD repository
|
||||
*/
|
||||
export async function loadManifest(pcdPath: string): Promise<Manifest> {
|
||||
const manifest = await readManifest(pcdPath);
|
||||
validateManifest(manifest);
|
||||
return manifest;
|
||||
}
|
||||
19
src/lib/server/pcd/paths.ts
Normal file
19
src/lib/server/pcd/paths.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
/**
|
||||
* Helper functions for PCD paths
|
||||
*/
|
||||
|
||||
import { config } from '$config';
|
||||
|
||||
/**
|
||||
* Get the filesystem path for a PCD repository
|
||||
*/
|
||||
export function getPCDPath(uuid: string): string {
|
||||
return `${config.paths.databases}/${uuid}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the manifest file path for a PCD repository
|
||||
*/
|
||||
export function getManifestPath(uuid: string): string {
|
||||
return `${getPCDPath(uuid)}/pcd.json`;
|
||||
}
|
||||
242
src/lib/server/pcd/pcd.ts
Normal file
242
src/lib/server/pcd/pcd.ts
Normal file
@@ -0,0 +1,242 @@
|
||||
/**
|
||||
* PCD Manager - High-level orchestration for PCD lifecycle
|
||||
*/
|
||||
|
||||
import * as git from '$utils/git/git.ts';
|
||||
import { databaseInstancesQueries } from '$db/queries/databaseInstances.ts';
|
||||
import type { DatabaseInstance } from '$db/queries/databaseInstances.ts';
|
||||
import { loadManifest, type Manifest } from './manifest.ts';
|
||||
import { getPCDPath } from './paths.ts';
|
||||
import { processDependencies } from './deps.ts';
|
||||
import { notificationManager } from '$notifications/NotificationManager.ts';
|
||||
|
||||
export interface LinkOptions {
|
||||
repositoryUrl: string;
|
||||
name: string;
|
||||
branch?: string;
|
||||
syncStrategy?: number;
|
||||
autoPull?: boolean;
|
||||
personalAccessToken?: string;
|
||||
}
|
||||
|
||||
export interface SyncResult {
|
||||
success: boolean;
|
||||
commitsBehind: number;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* PCD Manager - Manages the lifecycle of Profilarr Compliant Databases
|
||||
*/
|
||||
class PCDManager {
|
||||
/**
|
||||
* Link a new PCD repository
|
||||
*/
|
||||
async link(options: LinkOptions): Promise<DatabaseInstance> {
|
||||
// Generate UUID for storage
|
||||
const uuid = crypto.randomUUID();
|
||||
const localPath = getPCDPath(uuid);
|
||||
|
||||
try {
|
||||
// Clone the repository and detect if it's private
|
||||
const isPrivate = await git.clone(options.repositoryUrl, localPath, options.branch, options.personalAccessToken);
|
||||
|
||||
// Validate manifest (loadManifest throws if invalid)
|
||||
await loadManifest(localPath);
|
||||
|
||||
// Process dependencies (clone and validate)
|
||||
await processDependencies(localPath);
|
||||
|
||||
// Insert into database
|
||||
const id = databaseInstancesQueries.create({
|
||||
uuid,
|
||||
name: options.name,
|
||||
repositoryUrl: options.repositoryUrl,
|
||||
localPath,
|
||||
syncStrategy: options.syncStrategy,
|
||||
autoPull: options.autoPull,
|
||||
personalAccessToken: options.personalAccessToken,
|
||||
isPrivate
|
||||
});
|
||||
|
||||
// Get and return the created instance
|
||||
const instance = databaseInstancesQueries.getById(id);
|
||||
if (!instance) {
|
||||
throw new Error('Failed to retrieve created database instance');
|
||||
}
|
||||
|
||||
// Send notification
|
||||
await notificationManager.notify({
|
||||
type: 'pcd.linked',
|
||||
title: 'Database Linked',
|
||||
message: `Database "${options.name}" has been linked successfully`,
|
||||
metadata: {
|
||||
databaseId: id,
|
||||
databaseName: options.name,
|
||||
repositoryUrl: options.repositoryUrl
|
||||
}
|
||||
});
|
||||
|
||||
return instance;
|
||||
} catch (error) {
|
||||
// Cleanup on failure - remove cloned directory
|
||||
try {
|
||||
await Deno.remove(localPath, { recursive: true });
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Unlink a PCD repository
|
||||
*/
|
||||
async unlink(id: number): Promise<void> {
|
||||
const instance = databaseInstancesQueries.getById(id);
|
||||
if (!instance) {
|
||||
throw new Error(`Database instance ${id} not found`);
|
||||
}
|
||||
|
||||
// Store name and URL for notification
|
||||
const { name, repository_url } = instance;
|
||||
|
||||
// Delete from database first
|
||||
databaseInstancesQueries.delete(id);
|
||||
|
||||
// Then cleanup filesystem
|
||||
try {
|
||||
await Deno.remove(instance.local_path, { recursive: true });
|
||||
} catch (error) {
|
||||
// Log but don't throw - database entry is already deleted
|
||||
console.error(`Failed to remove PCD directory ${instance.local_path}:`, error);
|
||||
}
|
||||
|
||||
// Send notification
|
||||
await notificationManager.notify({
|
||||
type: 'pcd.unlinked',
|
||||
title: 'Database Unlinked',
|
||||
message: `Database "${name}" has been removed`,
|
||||
metadata: {
|
||||
databaseId: id,
|
||||
databaseName: name,
|
||||
repositoryUrl: repository_url
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync a PCD repository (pull updates)
|
||||
*/
|
||||
async sync(id: number): Promise<SyncResult> {
|
||||
const instance = databaseInstancesQueries.getById(id);
|
||||
if (!instance) {
|
||||
throw new Error(`Database instance ${id} not found`);
|
||||
}
|
||||
|
||||
try {
|
||||
// Check for updates first
|
||||
const updateInfo = await git.checkForUpdates(instance.local_path);
|
||||
|
||||
if (!updateInfo.hasUpdates) {
|
||||
// Already up to date
|
||||
databaseInstancesQueries.updateSyncedAt(id);
|
||||
return {
|
||||
success: true,
|
||||
commitsBehind: 0
|
||||
};
|
||||
}
|
||||
|
||||
// Pull updates
|
||||
await git.pull(instance.local_path);
|
||||
|
||||
// Update last_synced_at
|
||||
databaseInstancesQueries.updateSyncedAt(id);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
commitsBehind: updateInfo.commitsBehind
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
commitsBehind: 0,
|
||||
error: error instanceof Error ? error.message : 'Unknown error'
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check for available updates without pulling
|
||||
*/
|
||||
async checkForUpdates(id: number): Promise<git.UpdateInfo> {
|
||||
const instance = databaseInstancesQueries.getById(id);
|
||||
if (!instance) {
|
||||
throw new Error(`Database instance ${id} not found`);
|
||||
}
|
||||
|
||||
return await git.checkForUpdates(instance.local_path);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get parsed manifest for a PCD
|
||||
*/
|
||||
async getManifest(id: number): Promise<Manifest> {
|
||||
const instance = databaseInstancesQueries.getById(id);
|
||||
if (!instance) {
|
||||
throw new Error(`Database instance ${id} not found`);
|
||||
}
|
||||
|
||||
return await loadManifest(instance.local_path);
|
||||
}
|
||||
|
||||
/**
|
||||
* Switch branch for a PCD
|
||||
*/
|
||||
async switchBranch(id: number, branch: string): Promise<void> {
|
||||
const instance = databaseInstancesQueries.getById(id);
|
||||
if (!instance) {
|
||||
throw new Error(`Database instance ${id} not found`);
|
||||
}
|
||||
|
||||
await git.checkout(instance.local_path, branch);
|
||||
await git.pull(instance.local_path);
|
||||
databaseInstancesQueries.updateSyncedAt(id);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get git status for a PCD
|
||||
*/
|
||||
async getStatus(id: number): Promise<git.GitStatus> {
|
||||
const instance = databaseInstancesQueries.getById(id);
|
||||
if (!instance) {
|
||||
throw new Error(`Database instance ${id} not found`);
|
||||
}
|
||||
|
||||
return await git.getStatus(instance.local_path);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all PCDs
|
||||
*/
|
||||
getAll(): DatabaseInstance[] {
|
||||
return databaseInstancesQueries.getAll();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get PCD by ID
|
||||
*/
|
||||
getById(id: number): DatabaseInstance | undefined {
|
||||
return databaseInstancesQueries.getById(id);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get PCDs that need auto-sync
|
||||
*/
|
||||
getDueForSync(): DatabaseInstance[] {
|
||||
return databaseInstancesQueries.getDueForSync();
|
||||
}
|
||||
}
|
||||
|
||||
// Export singleton instance
|
||||
export const pcdManager = new PCDManager();
|
||||
@@ -26,6 +26,7 @@ class Config {
|
||||
await Deno.mkdir(this.paths.logs, { recursive: true });
|
||||
await Deno.mkdir(this.paths.data, { recursive: true });
|
||||
await Deno.mkdir(this.paths.backups, { recursive: true });
|
||||
await Deno.mkdir(this.paths.databases, { recursive: true });
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -54,6 +55,9 @@ class Config {
|
||||
get database(): string {
|
||||
return `${config.basePath}/data/profilarr.db`;
|
||||
},
|
||||
get databases(): string {
|
||||
return `${config.basePath}/data/databases`;
|
||||
},
|
||||
get backups(): string {
|
||||
return `${config.basePath}/backups`;
|
||||
}
|
||||
|
||||
299
src/lib/server/utils/git/git.ts
Normal file
299
src/lib/server/utils/git/git.ts
Normal file
@@ -0,0 +1,299 @@
|
||||
/**
|
||||
* Git utility functions for managing database repositories
|
||||
*/
|
||||
|
||||
export interface GitStatus {
|
||||
currentBranch: string;
|
||||
isDirty: boolean;
|
||||
untracked: string[];
|
||||
modified: string[];
|
||||
staged: string[];
|
||||
}
|
||||
|
||||
export interface UpdateInfo {
|
||||
hasUpdates: boolean;
|
||||
commitsBehind: number;
|
||||
commitsAhead: number;
|
||||
latestRemoteCommit: string;
|
||||
currentLocalCommit: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a git command with sandboxed environment (no system credentials)
|
||||
*/
|
||||
async function execGit(args: string[], cwd?: string): Promise<string> {
|
||||
const command = new Deno.Command('git', {
|
||||
args,
|
||||
cwd,
|
||||
stdout: 'piped',
|
||||
stderr: 'piped',
|
||||
env: {
|
||||
// Disable all credential helpers and interactive prompts
|
||||
GIT_TERMINAL_PROMPT: '0', // Fail instead of prompting (git 2.3+)
|
||||
GIT_ASKPASS: 'echo', // Return empty on credential requests
|
||||
GIT_SSH_COMMAND: 'ssh -o BatchMode=yes', // Disable SSH password prompts
|
||||
// Clear credential helpers via environment config
|
||||
GIT_CONFIG_COUNT: '1',
|
||||
GIT_CONFIG_KEY_0: 'credential.helper',
|
||||
GIT_CONFIG_VALUE_0: ''
|
||||
}
|
||||
});
|
||||
|
||||
const { code, stdout, stderr } = await command.output();
|
||||
|
||||
if (code !== 0) {
|
||||
const errorMessage = new TextDecoder().decode(stderr);
|
||||
throw new Error(`Git command failed: ${errorMessage}`);
|
||||
}
|
||||
|
||||
return new TextDecoder().decode(stdout).trim();
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate that a repository URL is accessible and detect if it's private using GitHub API
|
||||
* Returns true if the repository is private, false if public
|
||||
*/
|
||||
async function validateRepository(repositoryUrl: string, personalAccessToken?: string): Promise<boolean> {
|
||||
// Validate GitHub URL format and extract owner/repo
|
||||
const githubPattern = /^https:\/\/github\.com\/([\w-]+)\/([\w-]+)\/?$/;
|
||||
const normalizedUrl = repositoryUrl.replace(/\.git$/, '');
|
||||
const match = normalizedUrl.match(githubPattern);
|
||||
|
||||
if (!match) {
|
||||
throw new Error('Repository URL must be a valid GitHub repository (https://github.com/username/repo)');
|
||||
}
|
||||
|
||||
const [, owner, repo] = match;
|
||||
const apiUrl = `https://api.github.com/repos/${owner}/${repo}`;
|
||||
|
||||
// First try without authentication to check if it's public
|
||||
try {
|
||||
const response = await globalThis.fetch(apiUrl, {
|
||||
headers: {
|
||||
'Accept': 'application/vnd.github+json',
|
||||
'X-GitHub-Api-Version': '2022-11-28',
|
||||
'User-Agent': 'Profilarr'
|
||||
}
|
||||
});
|
||||
|
||||
if (response.ok) {
|
||||
const data = await response.json();
|
||||
// Repository is accessible without auth
|
||||
return data.private === true;
|
||||
}
|
||||
|
||||
// 404 or 403 means repo doesn't exist or is private
|
||||
if (response.status === 404 || response.status === 403) {
|
||||
// If we have a PAT, try with authentication
|
||||
if (personalAccessToken) {
|
||||
const authResponse = await globalThis.fetch(apiUrl, {
|
||||
headers: {
|
||||
'Accept': 'application/vnd.github+json',
|
||||
'Authorization': `Bearer ${personalAccessToken}`,
|
||||
'X-GitHub-Api-Version': '2022-11-28',
|
||||
'User-Agent': 'Profilarr'
|
||||
}
|
||||
});
|
||||
|
||||
if (authResponse.ok) {
|
||||
const data = await authResponse.json();
|
||||
return data.private === true;
|
||||
}
|
||||
|
||||
if (authResponse.status === 404) {
|
||||
throw new Error('Repository not found. Please check the URL.');
|
||||
}
|
||||
|
||||
if (authResponse.status === 401 || authResponse.status === 403) {
|
||||
throw new Error('Unable to access repository. Please check your Personal Access Token has the correct permissions (repo scope required).');
|
||||
}
|
||||
|
||||
throw new Error(`GitHub API error: ${authResponse.status} ${authResponse.statusText}`);
|
||||
}
|
||||
|
||||
throw new Error('Repository not found or is private. Please provide a Personal Access Token if this is a private repository.');
|
||||
}
|
||||
|
||||
throw new Error(`GitHub API error: ${response.status} ${response.statusText}`);
|
||||
} catch (error) {
|
||||
if (error instanceof Error && (
|
||||
error.message.includes('Repository not found') ||
|
||||
error.message.includes('Unable to access') ||
|
||||
error.message.includes('GitHub API error')
|
||||
)) {
|
||||
throw error;
|
||||
}
|
||||
throw new Error(`Failed to validate repository: ${error instanceof Error ? error.message : String(error)}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clone a git repository
|
||||
* Returns true if the repository is private, false if public
|
||||
*/
|
||||
export async function clone(
|
||||
repositoryUrl: string,
|
||||
targetPath: string,
|
||||
branch?: string,
|
||||
personalAccessToken?: string
|
||||
): Promise<boolean> {
|
||||
// Validate repository exists and detect if it's private
|
||||
const isPrivate = await validateRepository(repositoryUrl, personalAccessToken);
|
||||
|
||||
const args = ['clone'];
|
||||
|
||||
if (branch) {
|
||||
args.push('--branch', branch);
|
||||
}
|
||||
|
||||
// Inject personal access token into URL if provided (for private repos or push access)
|
||||
let authUrl = repositoryUrl;
|
||||
if (personalAccessToken) {
|
||||
// Format: https://TOKEN@github.com/username/repo
|
||||
authUrl = repositoryUrl.replace('https://github.com', `https://${personalAccessToken}@github.com`);
|
||||
}
|
||||
|
||||
args.push(authUrl, targetPath);
|
||||
|
||||
await execGit(args);
|
||||
|
||||
return isPrivate;
|
||||
}
|
||||
|
||||
/**
|
||||
* Pull latest changes from remote
|
||||
*/
|
||||
export async function pull(repoPath: string): Promise<void> {
|
||||
await execGit(['pull'], repoPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch from remote without merging
|
||||
*/
|
||||
export async function fetchRemote(repoPath: string): Promise<void> {
|
||||
await execGit(['fetch'], repoPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current branch name
|
||||
*/
|
||||
export async function getCurrentBranch(repoPath: string): Promise<string> {
|
||||
return await execGit(['branch', '--show-current'], repoPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checkout a branch
|
||||
*/
|
||||
export async function checkout(repoPath: string, branch: string): Promise<void> {
|
||||
await execGit(['checkout', branch], repoPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get repository status
|
||||
*/
|
||||
export async function getStatus(repoPath: string): Promise<GitStatus> {
|
||||
const currentBranch = await getCurrentBranch(repoPath);
|
||||
|
||||
// Get short status
|
||||
const statusOutput = await execGit(['status', '--short'], repoPath);
|
||||
|
||||
const untracked: string[] = [];
|
||||
const modified: string[] = [];
|
||||
const staged: string[] = [];
|
||||
|
||||
for (const line of statusOutput.split('\n')) {
|
||||
if (!line.trim()) continue;
|
||||
|
||||
const status = line.substring(0, 2);
|
||||
const file = line.substring(3);
|
||||
|
||||
if (status.startsWith('??')) {
|
||||
untracked.push(file);
|
||||
} else if (status[1] === 'M' || status[1] === 'D') {
|
||||
modified.push(file);
|
||||
} else if (status[0] === 'M' || status[0] === 'A' || status[0] === 'D') {
|
||||
staged.push(file);
|
||||
}
|
||||
}
|
||||
|
||||
const isDirty = untracked.length > 0 || modified.length > 0 || staged.length > 0;
|
||||
|
||||
return {
|
||||
currentBranch,
|
||||
isDirty,
|
||||
untracked,
|
||||
modified,
|
||||
staged
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Check for updates from remote
|
||||
*/
|
||||
export async function checkForUpdates(repoPath: string): Promise<UpdateInfo> {
|
||||
// Fetch latest from remote
|
||||
await fetchRemote(repoPath);
|
||||
|
||||
const currentBranch = await getCurrentBranch(repoPath);
|
||||
const remoteBranch = `origin/${currentBranch}`;
|
||||
|
||||
// Get current commit
|
||||
const currentLocalCommit = await execGit(['rev-parse', 'HEAD'], repoPath);
|
||||
|
||||
// Get remote commit
|
||||
let latestRemoteCommit: string;
|
||||
try {
|
||||
latestRemoteCommit = await execGit(['rev-parse', remoteBranch], repoPath);
|
||||
} catch {
|
||||
// Remote branch doesn't exist or hasn't been fetched
|
||||
return {
|
||||
hasUpdates: false,
|
||||
commitsBehind: 0,
|
||||
commitsAhead: 0,
|
||||
latestRemoteCommit: currentLocalCommit,
|
||||
currentLocalCommit
|
||||
};
|
||||
}
|
||||
|
||||
// Count commits behind
|
||||
let commitsBehind = 0;
|
||||
try {
|
||||
const behindOutput = await execGit(
|
||||
['rev-list', '--count', `HEAD..${remoteBranch}`],
|
||||
repoPath
|
||||
);
|
||||
commitsBehind = parseInt(behindOutput) || 0;
|
||||
} catch {
|
||||
commitsBehind = 0;
|
||||
}
|
||||
|
||||
// Count commits ahead
|
||||
let commitsAhead = 0;
|
||||
try {
|
||||
const aheadOutput = await execGit(
|
||||
['rev-list', '--count', `${remoteBranch}..HEAD`],
|
||||
repoPath
|
||||
);
|
||||
commitsAhead = parseInt(aheadOutput) || 0;
|
||||
} catch {
|
||||
commitsAhead = 0;
|
||||
}
|
||||
|
||||
return {
|
||||
hasUpdates: commitsBehind > 0,
|
||||
commitsBehind,
|
||||
commitsAhead,
|
||||
latestRemoteCommit,
|
||||
currentLocalCommit
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset repository to match remote (discards local changes)
|
||||
*/
|
||||
export async function resetToRemote(repoPath: string): Promise<void> {
|
||||
const currentBranch = await getCurrentBranch(repoPath);
|
||||
const remoteBranch = `origin/${currentBranch}`;
|
||||
|
||||
await execGit(['reset', '--hard', remoteBranch], repoPath);
|
||||
}
|
||||
@@ -54,21 +54,24 @@ class Logger {
|
||||
* Check if logging is enabled
|
||||
*/
|
||||
private isEnabled(): boolean {
|
||||
return this.config.enabled;
|
||||
const currentSettings = logSettings.get();
|
||||
return currentSettings.enabled === 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if file logging is enabled
|
||||
*/
|
||||
private isFileLoggingEnabled(): boolean {
|
||||
return this.config.fileLogging;
|
||||
const currentSettings = logSettings.get();
|
||||
return currentSettings.file_logging === 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if console logging is enabled
|
||||
*/
|
||||
private isConsoleLoggingEnabled(): boolean {
|
||||
return this.config.consoleLogging;
|
||||
const currentSettings = logSettings.get();
|
||||
return currentSettings.console_logging === 1;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -79,8 +82,12 @@ class Logger {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Get fresh settings from database instead of using cached config
|
||||
const currentSettings = logSettings.get();
|
||||
const currentMinLevel = currentSettings.min_level;
|
||||
|
||||
const levels: LogLevel[] = ["DEBUG", "INFO", "WARN", "ERROR"];
|
||||
const minIndex = levels.indexOf(this.config.minLevel);
|
||||
const minIndex = levels.indexOf(currentMinLevel);
|
||||
const levelIndex = levels.indexOf(level);
|
||||
|
||||
return levelIndex >= minIndex;
|
||||
|
||||
Reference in New Issue
Block a user