feat(migrations): add app_info table and change default log level to DEBUG

refactor(logging): update logging levels and improve log messages across jobs
feat(jobs): enhance job initialization and sync with database
This commit is contained in:
Sam Chau
2026-01-02 22:13:04 +10:30
parent 8c12de7f89
commit bc55d0c7bb
22 changed files with 209 additions and 153 deletions

View File

@@ -24,10 +24,10 @@
"highlight.js": "npm:highlight.js@^11.11.1"
},
"tasks": {
"dev": "APP_BASE_PATH=./dist/dev PARSER_HOST=localhost PARSER_PORT=5000 deno run -A npm:vite dev",
"dev": "DENO_ENV=development PORT=6969 HOST=0.0.0.0 APP_BASE_PATH=./dist/dev PARSER_HOST=localhost PARSER_PORT=5000 deno run -A npm:vite dev",
"build": "APP_BASE_PATH=./dist/build deno run -A npm:vite build && deno compile --no-check --allow-net --allow-read --allow-write --allow-env --allow-ffi --allow-run --target x86_64-unknown-linux-gnu --output dist/build/profilarr dist/build/mod.ts",
"build:windows": "APP_BASE_PATH=./dist/build deno run -A npm:vite build && deno compile --no-check --allow-net --allow-read --allow-write --allow-env --allow-ffi --allow-run --target x86_64-pc-windows-msvc --output dist/windows/profilarr.exe dist/build/mod.ts",
"preview": "PORT=6868 APP_BASE_PATH=./dist/dev PARSER_HOST=localhost PARSER_PORT=5000 ./dist/build/profilarr",
"preview": "PORT=6868 HOST=0.0.0.0 APP_BASE_PATH=./dist/dev PARSER_HOST=localhost PARSER_PORT=5000 ./dist/build/profilarr",
"format": "prettier --write .",
"lint": "prettier --check . && eslint .",
"test": "APP_BASE_PATH=./dist/test deno test src/tests --allow-read --allow-write --allow-env",

View File

@@ -1,6 +1,7 @@
import { config } from '$config';
import { logStartup } from '$logger/startup.ts';
import { printBanner, getServerInfo } from '$logger/startup.ts';
import { logSettings } from '$logger/settings.ts';
import { logger } from '$logger/logger.ts';
import { db } from '$db/db.ts';
import { runMigrations } from '$db/migrations.ts';
import { initializeJobs } from '$jobs/init.ts';
@@ -10,9 +11,6 @@ import { pcdManager } from '$pcd/pcd.ts';
// Initialize configuration on server startup
await config.init();
// Log startup banner
await logStartup();
// Initialize database
await db.initialize();
@@ -28,3 +26,12 @@ await pcdManager.initialize();
// Initialize and start job system
await initializeJobs();
await jobScheduler.start();
// Log server ready
await logger.info('Server ready', {
source: 'Startup',
meta: getServerInfo()
});
// Print startup banner with URL
printBanner();

View File

@@ -56,7 +56,7 @@ class DatabaseManager {
.catch(() => false);
if (!dbExists) {
await logger.warn('Database file does not exist, creating new database', {
await logger.debug('Creating new database', {
source: 'DatabaseManager',
meta: { path: config.paths.database }
});
@@ -76,9 +76,9 @@ class DatabaseManager {
this.initialized = true;
await logger.info('Database initialized successfully', {
await logger.debug('Database initialized', {
source: 'DatabaseManager',
meta: { path: config.paths.database, dbExists }
meta: { path: config.paths.database }
});
} catch (error) {
const message = error instanceof Error ? error.message : String(error);
@@ -196,14 +196,6 @@ class DatabaseManager {
*/
close(): void {
if (this.db) {
// Log with stack trace to see what's calling close()
logger.info('Closing database connection', {
source: 'DatabaseManager',
meta: {
path: config.paths.database,
stack: new Error().stack
}
});
this.db.close();
this.db = null;
this.initialized = false;

View File

@@ -19,6 +19,8 @@ import { migration as migration014 } from './migrations/014_create_ai_settings.t
import { migration as migration015 } from './migrations/015_create_arr_sync_tables.ts';
import { migration as migration016 } from './migrations/016_add_should_sync_flags.ts';
import { migration as migration017 } from './migrations/017_create_regex101_cache.ts';
import { migration as migration018 } from './migrations/018_create_app_info.ts';
import { migration as migration019 } from './migrations/019_default_log_level_debug.ts';
export interface Migration {
version: number;
@@ -83,14 +85,10 @@ class MigrationRunner {
migration.version,
migration.name
);
await logger.info(`✓ Applied migration ${migration.version}: ${migration.name}`, {
source: 'MigrationRunner'
});
});
} catch (error) {
await logger.error(`Failed to apply migration ${migration.version}: ${migration.name}`, {
source: 'MigrationRunner',
await logger.error(`Failed to apply migration ${migration.version}: ${migration.name}`, {
source: 'DatabaseMigrations',
meta: error
});
throw error;
@@ -112,14 +110,10 @@ class MigrationRunner {
// Remove the migration record
db.execute(`DELETE FROM ${this.migrationsTable} WHERE version = ?`, migration.version);
await logger.info(`✓ Rolled back migration ${migration.version}: ${migration.name}`, {
source: 'MigrationRunner'
});
});
} catch (error) {
await logger.error(`Failed to rollback migration ${migration.version}: ${migration.name}`, {
source: 'MigrationRunner',
await logger.error(`Failed to rollback migration ${migration.version}: ${migration.name}`, {
source: 'DatabaseMigrations',
meta: error
});
throw error;
@@ -135,19 +129,24 @@ class MigrationRunner {
// Sort migrations by version
const sortedMigrations = [...migrations].sort((a, b) => a.version - b.version);
let applied = 0;
const applied: Array<{ version: number; name: string }> = [];
for (const migration of sortedMigrations) {
if (this.isApplied(migration.version)) {
continue;
}
await this.applyMigration(migration);
applied++;
applied.push({ version: migration.version, name: migration.name });
}
if (applied === 0) {
await logger.info('Database is up to date', {
source: 'MigrationRunner'
if (applied.length === 0) {
await logger.debug('Database up to date', {
source: 'DatabaseMigrations'
});
} else {
await logger.info(`Applied ${applied.length} migration(s)`, {
source: 'DatabaseMigrations',
meta: { migrations: applied }
});
}
}
@@ -160,8 +159,8 @@ class MigrationRunner {
const currentVersion = this.getCurrentVersion();
if (currentVersion <= targetVersion) {
await logger.info('Already at target version or below', {
source: 'MigrationRunner'
await logger.debug('Already at target version or below', {
source: 'DatabaseMigrations'
});
return;
}
@@ -171,19 +170,22 @@ class MigrationRunner {
.filter((m) => m.version > targetVersion && m.version <= currentVersion)
.sort((a, b) => b.version - a.version);
let rolledBack = 0;
const rolledBack: Array<{ version: number; name: string }> = [];
for (const migration of sortedMigrations) {
if (!this.isApplied(migration.version)) {
continue;
}
await this.rollbackMigration(migration);
rolledBack++;
rolledBack.push({ version: migration.version, name: migration.name });
}
await logger.info(`✓ Rolled back ${rolledBack} migration(s)`, {
source: 'MigrationRunner'
});
if (rolledBack.length > 0) {
await logger.info(`Rolled back ${rolledBack.length} migration(s)`, {
source: 'DatabaseMigrations',
meta: { migrations: rolledBack }
});
}
}
/**
@@ -219,11 +221,8 @@ class MigrationRunner {
* Fresh migration (reset and reapply all)
*/
async fresh(migrations: Migration[]): Promise<void> {
await logger.warn('Resetting database...', { source: 'MigrationRunner' });
await logger.warn('Resetting database', { source: 'DatabaseMigrations' });
await this.reset(migrations);
await logger.info('↻ Reapplying all migrations...', {
source: 'MigrationRunner'
});
await this.up(migrations);
}
}
@@ -253,7 +252,9 @@ export function loadMigrations(): Migration[] {
migration014,
migration015,
migration016,
migration017
migration017,
migration018,
migration019
];
// Sort by version number

View File

@@ -0,0 +1,29 @@
import type { Migration } from '../migrations.ts';
/**
* Migration 018: Create app_info table
*
* Creates a singleton table to store application metadata.
* Version is stored here and bumped via migrations on each release.
*/
export const migration: Migration = {
version: 18,
name: 'Create app_info table',
up: `
CREATE TABLE app_info (
id INTEGER PRIMARY KEY CHECK (id = 1),
version TEXT NOT NULL,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
);
-- Insert initial version
INSERT INTO app_info (id, version) VALUES (1, '2.0.0');
`,
down: `
DROP TABLE IF EXISTS app_info;
`
};

View File

@@ -0,0 +1,18 @@
import type { Migration } from '../migrations.ts';
/**
* Migration 019: Change default log level to DEBUG
*/
export const migration: Migration = {
version: 19,
name: 'Change default log level to DEBUG',
up: `
UPDATE log_settings SET min_level = 'DEBUG' WHERE id = 1 AND min_level = 'INFO';
`,
down: `
UPDATE log_settings SET min_level = 'INFO' WHERE id = 1 AND min_level = 'DEBUG';
`
};

View File

@@ -0,0 +1,32 @@
import { db } from '../db.ts';
/**
* Types for app_info table
*/
export interface AppInfo {
id: number;
version: string;
created_at: string;
updated_at: string;
}
/**
* All queries for app_info table
* Singleton pattern - only one record exists
*/
export const appInfoQueries = {
/**
* Get the app info (singleton)
*/
get(): AppInfo | undefined {
return db.queryFirst<AppInfo>('SELECT * FROM app_info WHERE id = 1');
},
/**
* Get just the version string
*/
getVersion(): string {
const info = db.queryFirst<{ version: string }>('SELECT version FROM app_info WHERE id = 1');
return info?.version ?? 'unknown';
}
};

View File

@@ -30,11 +30,6 @@ export const cleanupBackupsJob: JobDefinition = {
const cutoffDate = new Date();
cutoffDate.setDate(cutoffDate.getDate() - retentionDays);
await logger.info(`Cleaning up backups older than ${retentionDays} days`, {
source: 'CleanupBackupsJob',
meta: { cutoffDate: cutoffDate.toISOString() }
});
// Read all files in backups directory
let deletedCount = 0;
let errorCount = 0;
@@ -58,11 +53,6 @@ export const cleanupBackupsJob: JobDefinition = {
if (stat.mtime && stat.mtime < cutoffDate) {
await Deno.remove(filePath);
deletedCount++;
await logger.info(`Deleted old backup: ${entry.name}`, {
source: 'CleanupBackupsJob',
meta: { file: entry.name, modifiedAt: stat.mtime.toISOString() }
});
}
} catch (error) {
errorCount++;

View File

@@ -27,16 +27,6 @@ export const cleanupLogsJob: JobDefinition = {
const retentionDays = settings.retention_days;
const logsDir = config.paths.logs;
// Calculate cutoff date for logging
const cutoffDate = new Date();
cutoffDate.setDate(cutoffDate.getDate() - retentionDays);
const cutoffDateStr = cutoffDate.toISOString().split('T')[0];
await logger.info(`Cleaning up logs older than ${retentionDays} days`, {
source: 'CleanupLogsJob',
meta: { cutoffDate: cutoffDateStr }
});
// Run cleanup
const result = await cleanupLogs(logsDir, retentionDays);

View File

@@ -27,11 +27,6 @@ export const createBackupJob: JobDefinition = {
const sourceDir = config.paths.data;
const backupsDir = config.paths.backups;
await logger.info('Creating backup', {
source: 'CreateBackupJob',
meta: { sourceDir, backupsDir }
});
// Run backup creation
const result = await createBackup(sourceDir, backupsDir);
@@ -49,11 +44,6 @@ export const createBackupJob: JobDefinition = {
// Calculate size in MB for display
const sizeInMB = ((result.sizeBytes ?? 0) / (1024 * 1024)).toFixed(2);
await logger.info(`Backup created successfully: ${result.filename} (${sizeInMB} MB)`, {
source: 'CreateBackupJob',
meta: { filename: result.filename, sizeBytes: result.sizeBytes }
});
return {
success: true,
output: `Backup created: ${result.filename} (${sizeInMB} MB)`

View File

@@ -23,14 +23,9 @@ export const syncArrJob: JobDefinition = {
};
}
// Log individual results
// Log errors only
for (const sync of result.syncs) {
if (sync.success) {
await logger.info(`Synced ${sync.section} to ${sync.instanceName}`, {
source: 'SyncArrJob',
meta: { instanceId: sync.instanceId, section: sync.section }
});
} else {
if (!sync.success) {
await logger.error(`Failed to sync ${sync.section} to ${sync.instanceName}`, {
source: 'SyncArrJob',
meta: { instanceId: sync.instanceId, section: sync.section, error: sync.error }

View File

@@ -13,23 +13,12 @@ export const syncDatabasesJob: JobDefinition = {
handler: async (): Promise<JobResult> => {
try {
await logger.info('Starting database sync job', {
source: 'SyncDatabasesJob'
});
// Run sync
const result = await syncDatabases();
// Log results for each database
// Log errors only
for (const db of result.databases) {
if (db.success) {
if (db.updatesPulled > 0) {
await logger.info(`Synced database: ${db.name}`, {
source: 'SyncDatabasesJob',
meta: { databaseId: db.id, updatesPulled: db.updatesPulled }
});
}
} else {
if (!db.success) {
await logger.error(`Failed to sync database: ${db.name}`, {
source: 'SyncDatabasesJob',
meta: { databaseId: db.id, error: db.error }

View File

@@ -16,10 +16,6 @@ export const upgradeManagerJob: JobDefinition = {
handler: async (): Promise<JobResult> => {
try {
await logger.info('Starting upgrade manager job', {
source: 'UpgradeManagerJob'
});
const result = await runUpgradeManager();
// Build output message
@@ -32,18 +28,9 @@ export const upgradeManagerJob: JobDefinition = {
const message = `Processed ${result.totalProcessed} config(s): ${result.successCount} successful, ${result.failureCount} failed, ${result.skippedCount} skipped`;
// Log individual results
// Log failures only
for (const instance of result.instances) {
if (instance.success) {
await logger.info(`Upgrade completed for "${instance.instanceName}"`, {
source: 'UpgradeManagerJob',
meta: {
instanceId: instance.instanceId,
filterName: instance.filterName,
itemsSearched: instance.itemsSearched
}
});
} else {
if (!instance.success && instance.error) {
await logger.warn(`Upgrade skipped/failed for "${instance.instanceName}": ${instance.error}`, {
source: 'UpgradeManagerJob',
meta: {

View File

@@ -26,29 +26,27 @@ function registerAllJobs(): void {
/**
* Sync registered jobs with database
* Creates DB records for any new jobs
* Returns list of newly created job names
*/
async function syncJobsWithDatabase(): Promise<void> {
function syncJobsWithDatabase(): string[] {
const registeredJobs = jobRegistry.getAll();
const created: string[] = [];
for (const jobDef of registeredJobs) {
// Check if job exists in database
const existing = jobsQueries.getByName(jobDef.name);
if (!existing) {
// Create new job record
const id = jobsQueries.create({
jobsQueries.create({
name: jobDef.name,
description: jobDef.description,
schedule: jobDef.schedule,
enabled: true
});
await logger.info(`Created job in database: ${jobDef.name}`, {
source: 'JobSystem',
meta: { jobId: id }
});
created.push(jobDef.name);
}
}
return created;
}
/**
@@ -57,16 +55,23 @@ async function syncJobsWithDatabase(): Promise<void> {
* 2. Sync with database
*/
export async function initializeJobs(): Promise<void> {
await logger.info('Initializing job system', { source: 'JobSystem' });
await logger.debug('Initializing job system', { source: 'JobSystem' });
// Register all jobs
registerAllJobs();
// Sync with database
await syncJobsWithDatabase();
const created = syncJobsWithDatabase();
await logger.info('Job system initialized', {
const meta: { jobCount: number; created?: string[] } = {
jobCount: jobRegistry.getAll().length
};
if (created.length > 0) {
meta.created = created;
}
await logger.info('Job system ready', {
source: 'JobSystem',
meta: { jobCount: jobRegistry.getAll().length }
meta
});
}

View File

@@ -136,7 +136,7 @@ export async function runUpgradeManager(): Promise<UpgradeManagerResult> {
};
}
await logger.info(`Found ${dueConfigs.length} upgrade config(s) to process`, {
await logger.debug(`Found ${dueConfigs.length} upgrade config(s) to process`, {
source: 'UpgradeManager',
meta: {
configIds: dueConfigs.map((c) => c.arrInstanceId)

View File

@@ -58,7 +58,7 @@ export async function runJob(job: Job): Promise<boolean> {
return false;
}
await logger.info(`Starting job: ${job.name}`, {
await logger.debug(`Starting job: ${job.name}`, {
source: 'JobRunner',
meta: { jobId: job.id }
});

View File

@@ -19,7 +19,7 @@ class JobScheduler {
return;
}
await logger.info('Starting job scheduler', { source: 'JobScheduler' });
await logger.info('Job scheduler ready', { source: 'JobScheduler' });
// Check immediately on start
await this.checkAndRunJobs();
@@ -33,13 +33,11 @@ class JobScheduler {
/**
* Stop the job scheduler
*/
async stop(): Promise<void> {
stop(): void {
if (this.intervalId === null) {
return;
}
await logger.info('Stopping job scheduler', { source: 'JobScheduler' });
clearInterval(this.intervalId);
this.intervalId = null;
}
@@ -63,7 +61,7 @@ class JobScheduler {
return;
}
await logger.info(`Found ${dueJobs.length} job(s) to run`, {
await logger.debug(`Found ${dueJobs.length} job(s) to run`, {
source: 'JobScheduler',
meta: { jobNames: dueJobs.map((j) => j.name) }
});
@@ -105,7 +103,7 @@ class JobScheduler {
return false;
}
await logger.info(`Manually triggering job: ${jobName}`, { source: 'JobScheduler' });
await logger.debug(`Manually triggering job: ${jobName}`, { source: 'JobScheduler' });
try {
return await runJob(job);

View File

@@ -20,10 +20,6 @@ export class NotificationManager {
const services = notificationServicesQueries.getAllEnabled();
if (services.length === 0) {
await logger.debug('No enabled notification services found', {
source: 'NotificationManager',
meta: { type: notification.type }
});
return;
}
@@ -38,10 +34,6 @@ export class NotificationManager {
});
if (relevantServices.length === 0) {
await logger.debug('No services configured for this notification type', {
source: 'NotificationManager',
meta: { type: notification.type }
});
return;
}

View File

@@ -68,9 +68,9 @@ export abstract class BaseHttpNotifier implements Notifier {
throw new Error(`HTTP ${response.status}: ${errorText}`);
}
await logger.info(`Notification sent successfully`, {
await logger.debug(`Notification sent`, {
source: this.getName(),
meta: { type: notification.type, title: notification.title }
meta: { type: notification.type }
});
this.lastSentAt = new Date();

View File

@@ -80,8 +80,19 @@ class PCDManager {
// Compile cache and start watching (only if enabled)
if (instance.enabled) {
try {
await compile(localPath, id);
const stats = await compile(localPath, id);
await startWatch(localPath, id);
await logger.debug(`Cache compiled for "${options.name}"`, {
source: 'PCDManager',
meta: {
databaseId: id,
schema: stats.schema,
base: stats.base,
tweaks: stats.tweaks,
user: stats.user
}
});
} catch (error) {
// Log error but don't fail the link operation
await logger.error('Failed to compile PCD cache after linking', {

View File

@@ -6,6 +6,8 @@ class Config {
private basePath: string;
public readonly timezone: string;
public readonly parserUrl: string;
public readonly port: number;
public readonly host: string;
constructor() {
// Default base path logic:
@@ -30,6 +32,18 @@ class Config {
const parserHost = Deno.env.get('PARSER_HOST') || 'localhost';
const parserPort = Deno.env.get('PARSER_PORT') || '5000';
this.parserUrl = `http://${parserHost}:${parserPort}`;
// Server bind configuration
this.port = parseInt(Deno.env.get('PORT') || '6868', 10);
this.host = Deno.env.get('HOST') || '0.0.0.0';
}
/**
* Get the server URL for display
*/
get serverUrl(): string {
const displayHost = this.host === '0.0.0.0' ? 'localhost' : this.host;
return `http://${displayHost}:${this.port}`;
}
/**

View File

@@ -2,7 +2,8 @@
* Startup banner and logging
*/
import { logger } from './logger.ts';
import { config } from '$config';
import { appInfoQueries } from '$db/queries/appInfo.ts';
const BANNER = String.raw`
_____.__.__
@@ -13,14 +14,29 @@ _____________ _____/ ____\__| | _____ ______________
|__| \/
`;
export async function logStartup(): Promise<void> {
// Print banner (not logged to file, just console)
export function printBanner(): void {
const version = appInfoQueries.getVersion();
const url = config.serverUrl;
console.log(BANNER);
// Log startup info
await logger.info('Server started', { source: 'Startup' });
// Log environment
const env = Deno.env.get('NODE_ENV') || Deno.env.get('DENO_ENV') || 'development';
await logger.info(`Environment: ${env}`, { source: 'Startup' });
console.log(` v${version} | ${url}`);
console.log();
}
export interface ServerInfo {
version: string;
env: string;
timezone: string;
basePath: string;
hostname: string;
}
export function getServerInfo(): ServerInfo {
return {
version: appInfoQueries.getVersion(),
env: Deno.env.get('DENO_ENV') || 'production',
timezone: config.timezone,
basePath: config.paths.base,
hostname: typeof Deno !== 'undefined' ? Deno.hostname() : 'unknown'
};
}