Initial commit

This commit is contained in:
Jarek Krochmalski
2025-12-28 21:16:03 +01:00
commit 62e3c6439e
552 changed files with 104858 additions and 0 deletions

View File

@@ -0,0 +1,632 @@
/**
* Unified Scheduler Service
*
* Manages all scheduled tasks using croner with automatic job lifecycle:
* - System cleanup jobs (static cron schedules)
* - Container auto-updates (dynamic schedules from database)
* - Git stack auto-sync (dynamic schedules from database)
*
* All execution logic is in separate task files for clean architecture.
*/
import { Cron } from 'croner';
import {
getEnabledAutoUpdateSettings,
getEnabledAutoUpdateGitStacks,
getAutoUpdateSettingById,
getGitStack,
getScheduleCleanupCron,
getEventCleanupCron,
getScheduleRetentionDays,
getEventRetentionDays,
getScheduleCleanupEnabled,
getEventCleanupEnabled,
getEnvironments,
getEnvUpdateCheckSettings,
getAllEnvUpdateCheckSettings,
getEnvironment,
getEnvironmentTimezone,
getDefaultTimezone
} from '../db';
import {
cleanupStaleVolumeHelpers,
cleanupExpiredVolumeHelpers
} from '../docker';
// Import task execution functions
import { runContainerUpdate } from './tasks/container-update';
import { runGitStackSync } from './tasks/git-stack-sync';
import { runEnvUpdateCheckJob } from './tasks/env-update-check';
import {
runScheduleCleanupJob,
runEventCleanupJob,
runVolumeHelperCleanupJob,
SYSTEM_SCHEDULE_CLEANUP_ID,
SYSTEM_EVENT_CLEANUP_ID,
SYSTEM_VOLUME_HELPER_CLEANUP_ID
} from './tasks/system-cleanup';
// Store all active cron jobs
const activeJobs: Map<string, Cron> = new Map();
// System cleanup jobs
let cleanupJob: Cron | null = null;
let eventCleanupJob: Cron | null = null;
let volumeHelperCleanupJob: Cron | null = null;
// Scheduler state
let isRunning = false;
/**
* Start the unified scheduler service.
* Registers all schedules with croner for automatic execution.
*/
export async function startScheduler(): Promise<void> {
if (isRunning) {
console.log('[Scheduler] Already running');
return;
}
console.log('[Scheduler] Starting scheduler service...');
isRunning = true;
// Get cron expressions and default timezone from database
const scheduleCleanupCron = await getScheduleCleanupCron();
const eventCleanupCron = await getEventCleanupCron();
const defaultTimezone = await getDefaultTimezone();
// Start system cleanup jobs (static schedules with default timezone)
cleanupJob = new Cron(scheduleCleanupCron, { timezone: defaultTimezone }, async () => {
await runScheduleCleanupJob();
});
eventCleanupJob = new Cron(eventCleanupCron, { timezone: defaultTimezone }, async () => {
await runEventCleanupJob();
});
// Cleanup functions to pass to the job (avoids dynamic import issues in production)
// Wrap cleanupStaleVolumeHelpers to pre-fetch environments
const wrappedCleanupStale = async () => {
const envs = await getEnvironments();
await cleanupStaleVolumeHelpers(envs);
};
const volumeCleanupFns = {
cleanupStaleVolumeHelpers: wrappedCleanupStale,
cleanupExpiredVolumeHelpers
};
// Volume helper cleanup runs every 30 minutes to clean up expired browse containers
volumeHelperCleanupJob = new Cron('*/30 * * * *', { timezone: defaultTimezone }, async () => {
await runVolumeHelperCleanupJob('cron', volumeCleanupFns);
});
// Run volume helper cleanup immediately on startup to clean up stale containers
runVolumeHelperCleanupJob('startup', volumeCleanupFns).catch(err => {
console.error('[Scheduler] Error during startup volume helper cleanup:', err);
});
console.log(`[Scheduler] System schedule cleanup: ${scheduleCleanupCron} [${defaultTimezone}]`);
console.log(`[Scheduler] System event cleanup: ${eventCleanupCron} [${defaultTimezone}]`);
console.log(`[Scheduler] Volume helper cleanup: every 30 minutes [${defaultTimezone}]`);
// Register all dynamic schedules from database
await refreshAllSchedules();
console.log('[Scheduler] Service started');
}
/**
* Stop the scheduler service and cleanup all jobs.
*/
export function stopScheduler(): void {
if (!isRunning) return;
console.log('[Scheduler] Stopping scheduler...');
isRunning = false;
// Stop system jobs
if (cleanupJob) {
cleanupJob.stop();
cleanupJob = null;
}
if (eventCleanupJob) {
eventCleanupJob.stop();
eventCleanupJob = null;
}
if (volumeHelperCleanupJob) {
volumeHelperCleanupJob.stop();
volumeHelperCleanupJob = null;
}
// Stop all dynamic jobs
for (const [key, job] of activeJobs.entries()) {
job.stop();
}
activeJobs.clear();
console.log('[Scheduler] Service stopped');
}
/**
* Refresh all dynamic schedules from database.
* Called on startup and optionally for recovery.
*/
export async function refreshAllSchedules(): Promise<void> {
console.log('[Scheduler] Refreshing all schedules...');
// Clear existing dynamic jobs
for (const [key, job] of activeJobs.entries()) {
job.stop();
}
activeJobs.clear();
let containerCount = 0;
let gitStackCount = 0;
// Register container auto-update schedules
try {
const containerSettings = await getEnabledAutoUpdateSettings();
for (const setting of containerSettings) {
if (setting.cronExpression) {
const registered = await registerSchedule(
setting.id,
'container_update',
setting.environmentId
);
if (registered) containerCount++;
}
}
} catch (error) {
console.error('[Scheduler] Error loading container schedules:', error);
}
// Register git stack auto-sync schedules
try {
const gitStacks = await getEnabledAutoUpdateGitStacks();
for (const stack of gitStacks) {
if (stack.autoUpdateCron) {
const registered = await registerSchedule(
stack.id,
'git_stack_sync',
stack.environmentId
);
if (registered) gitStackCount++;
}
}
} catch (error) {
console.error('[Scheduler] Error loading git stack schedules:', error);
}
// Register environment update check schedules
let envUpdateCheckCount = 0;
try {
const envConfigs = await getAllEnvUpdateCheckSettings();
for (const { envId, settings } of envConfigs) {
if (settings.enabled && settings.cron) {
const registered = await registerSchedule(
envId,
'env_update_check',
envId
);
if (registered) envUpdateCheckCount++;
}
}
} catch (error) {
console.error('[Scheduler] Error loading env update check schedules:', error);
}
console.log(`[Scheduler] Registered ${containerCount} container schedules, ${gitStackCount} git stack schedules, ${envUpdateCheckCount} env update check schedules`);
}
/**
* Register or update a schedule with automatic croner execution.
* Idempotent - can be called multiple times safely.
*/
export async function registerSchedule(
scheduleId: number,
type: 'container_update' | 'git_stack_sync' | 'env_update_check',
environmentId: number | null
): Promise<boolean> {
const key = `${type}-${scheduleId}`;
try {
// Unregister existing job if present
unregisterSchedule(scheduleId, type);
// Fetch schedule data from database
let cronExpression: string | null = null;
let entityName: string | null = null;
let enabled = false;
if (type === 'container_update') {
const setting = await getAutoUpdateSettingById(scheduleId);
if (!setting) return false;
cronExpression = setting.cronExpression;
entityName = setting.containerName;
enabled = setting.enabled;
} else if (type === 'git_stack_sync') {
const stack = await getGitStack(scheduleId);
if (!stack) return false;
cronExpression = stack.autoUpdateCron;
entityName = stack.stackName;
enabled = stack.autoUpdate;
} else if (type === 'env_update_check') {
const config = await getEnvUpdateCheckSettings(scheduleId);
if (!config) return false;
const env = await getEnvironment(scheduleId);
if (!env) return false;
cronExpression = config.cron;
entityName = `Update: ${env.name}`;
enabled = config.enabled;
}
// Don't create job if disabled or no cron expression
if (!enabled || !cronExpression) {
return false;
}
// Get timezone for this environment
const timezone = environmentId ? await getEnvironmentTimezone(environmentId) : 'UTC';
// Create new Cron instance with timezone
const job = new Cron(cronExpression, { timezone }, async () => {
// Defensive check: verify schedule still exists and is enabled
if (type === 'container_update') {
const setting = await getAutoUpdateSettingById(scheduleId);
if (!setting || !setting.enabled) return;
await runContainerUpdate(scheduleId, setting.containerName, environmentId, 'cron');
} else if (type === 'git_stack_sync') {
const stack = await getGitStack(scheduleId);
if (!stack || !stack.autoUpdate) return;
await runGitStackSync(scheduleId, stack.stackName, environmentId, 'cron');
} else if (type === 'env_update_check') {
const config = await getEnvUpdateCheckSettings(scheduleId);
if (!config || !config.enabled) return;
await runEnvUpdateCheckJob(scheduleId, 'cron');
}
});
// Store in active jobs map
activeJobs.set(key, job);
console.log(`[Scheduler] Registered ${type} schedule ${scheduleId} (${entityName}): ${cronExpression} [${timezone}]`);
return true;
} catch (error: any) {
console.error(`[Scheduler] Failed to register ${type} schedule ${scheduleId}:`, error.message);
return false;
}
}
/**
* Unregister a schedule and stop its croner job.
* Idempotent - safe to call even if not registered.
*/
export function unregisterSchedule(
scheduleId: number,
type: 'container_update' | 'git_stack_sync' | 'env_update_check'
): void {
const key = `${type}-${scheduleId}`;
const job = activeJobs.get(key);
if (job) {
job.stop();
activeJobs.delete(key);
console.log(`[Scheduler] Unregistered ${type} schedule ${scheduleId}`);
}
}
/**
* Refresh all schedules for a specific environment.
* Called when an environment's timezone changes to re-register jobs with the new timezone.
*/
export async function refreshSchedulesForEnvironment(environmentId: number): Promise<void> {
console.log(`[Scheduler] Refreshing schedules for environment ${environmentId} (timezone changed)`);
let refreshedCount = 0;
// Re-register container auto-update schedules for this environment
try {
const containerSettings = await getEnabledAutoUpdateSettings();
for (const setting of containerSettings) {
if (setting.environmentId === environmentId && setting.cronExpression) {
const registered = await registerSchedule(
setting.id,
'container_update',
setting.environmentId
);
if (registered) refreshedCount++;
}
}
} catch (error) {
console.error('[Scheduler] Error refreshing container schedules:', error);
}
// Re-register git stack auto-sync schedules for this environment
try {
const gitStacks = await getEnabledAutoUpdateGitStacks();
for (const stack of gitStacks) {
if (stack.environmentId === environmentId && stack.autoUpdateCron) {
const registered = await registerSchedule(
stack.id,
'git_stack_sync',
stack.environmentId
);
if (registered) refreshedCount++;
}
}
} catch (error) {
console.error('[Scheduler] Error refreshing git stack schedules:', error);
}
// Re-register environment update check schedule for this environment
try {
const config = await getEnvUpdateCheckSettings(environmentId);
if (config && config.enabled && config.cron) {
const registered = await registerSchedule(
environmentId,
'env_update_check',
environmentId
);
if (registered) refreshedCount++;
}
} catch (error) {
console.error('[Scheduler] Error refreshing env update check schedule:', error);
}
console.log(`[Scheduler] Refreshed ${refreshedCount} schedules for environment ${environmentId}`);
}
/**
* Refresh system cleanup jobs with the new default timezone.
* Called when the default timezone setting changes.
*/
export async function refreshSystemJobs(): Promise<void> {
console.log('[Scheduler] Refreshing system jobs (default timezone changed)');
// Get current settings
const scheduleCleanupCron = await getScheduleCleanupCron();
const eventCleanupCron = await getEventCleanupCron();
const defaultTimezone = await getDefaultTimezone();
// Cleanup functions to pass to the job
const wrappedCleanupStale = async () => {
const envs = await getEnvironments();
await cleanupStaleVolumeHelpers(envs);
};
const volumeCleanupFns = {
cleanupStaleVolumeHelpers: wrappedCleanupStale,
cleanupExpiredVolumeHelpers
};
// Stop existing system jobs
if (cleanupJob) {
cleanupJob.stop();
}
if (eventCleanupJob) {
eventCleanupJob.stop();
}
if (volumeHelperCleanupJob) {
volumeHelperCleanupJob.stop();
}
// Re-create with new timezone
cleanupJob = new Cron(scheduleCleanupCron, { timezone: defaultTimezone }, async () => {
await runScheduleCleanupJob();
});
eventCleanupJob = new Cron(eventCleanupCron, { timezone: defaultTimezone }, async () => {
await runEventCleanupJob();
});
volumeHelperCleanupJob = new Cron('*/30 * * * *', { timezone: defaultTimezone }, async () => {
await runVolumeHelperCleanupJob('cron', volumeCleanupFns);
});
console.log(`[Scheduler] System schedule cleanup: ${scheduleCleanupCron} [${defaultTimezone}]`);
console.log(`[Scheduler] System event cleanup: ${eventCleanupCron} [${defaultTimezone}]`);
console.log(`[Scheduler] Volume helper cleanup: every 30 minutes [${defaultTimezone}]`);
}
// =============================================================================
// MANUAL TRIGGER FUNCTIONS (for API endpoints)
// =============================================================================
/**
* Manually trigger a container update.
*/
export async function triggerContainerUpdate(settingId: number): Promise<{ success: boolean; executionId?: number; error?: string }> {
try {
const setting = await getAutoUpdateSettingById(settingId);
if (!setting) {
return { success: false, error: 'Auto-update setting not found' };
}
// Run in background
runContainerUpdate(settingId, setting.containerName, setting.environmentId, 'manual');
return { success: true };
} catch (error: any) {
return { success: false, error: error.message };
}
}
/**
* Manually trigger a git stack sync.
*/
export async function triggerGitStackSync(stackId: number): Promise<{ success: boolean; executionId?: number; error?: string }> {
try {
const stack = await getGitStack(stackId);
if (!stack) {
return { success: false, error: 'Git stack not found' };
}
// Run in background
runGitStackSync(stackId, stack.stackName, stack.environmentId, 'manual');
return { success: true };
} catch (error: any) {
return { success: false, error: error.message };
}
}
/**
* Trigger git stack sync from webhook (called from webhook endpoint).
*/
export async function triggerGitStackSyncFromWebhook(stackId: number): Promise<{ success: boolean; executionId?: number; error?: string }> {
try {
const stack = await getGitStack(stackId);
if (!stack) {
return { success: false, error: 'Git stack not found' };
}
// Run in background
runGitStackSync(stackId, stack.stackName, stack.environmentId, 'webhook');
return { success: true };
} catch (error: any) {
return { success: false, error: error.message };
}
}
/**
* Manually trigger an environment update check.
*/
export async function triggerEnvUpdateCheck(environmentId: number): Promise<{ success: boolean; executionId?: number; error?: string }> {
try {
const config = await getEnvUpdateCheckSettings(environmentId);
if (!config) {
return { success: false, error: 'Update check settings not found for this environment' };
}
const env = await getEnvironment(environmentId);
if (!env) {
return { success: false, error: 'Environment not found' };
}
// Run in background
runEnvUpdateCheckJob(environmentId, 'manual');
return { success: true };
} catch (error: any) {
return { success: false, error: error.message };
}
}
/**
* Manually trigger a system job (schedule cleanup, event cleanup, etc.).
*/
export async function triggerSystemJob(jobId: string): Promise<{ success: boolean; executionId?: number; error?: string }> {
try {
if (jobId === String(SYSTEM_SCHEDULE_CLEANUP_ID) || jobId === 'schedule-cleanup') {
runScheduleCleanupJob('manual');
return { success: true };
} else if (jobId === String(SYSTEM_EVENT_CLEANUP_ID) || jobId === 'event-cleanup') {
runEventCleanupJob('manual');
return { success: true };
} else if (jobId === String(SYSTEM_VOLUME_HELPER_CLEANUP_ID) || jobId === 'volume-helper-cleanup') {
// Wrap to pre-fetch environments (avoids dynamic import in production)
const wrappedCleanupStale = async () => {
const envs = await getEnvironments();
await cleanupStaleVolumeHelpers(envs);
};
runVolumeHelperCleanupJob('manual', {
cleanupStaleVolumeHelpers: wrappedCleanupStale,
cleanupExpiredVolumeHelpers
});
return { success: true };
} else {
return { success: false, error: 'Unknown system job ID' };
}
} catch (error: any) {
return { success: false, error: error.message };
}
}
// =============================================================================
// UTILITY FUNCTIONS
// =============================================================================
/**
* Get the next run time for a cron expression.
* @param cronExpression - The cron expression
* @param timezone - Optional IANA timezone (e.g., 'Europe/Warsaw'). Defaults to local timezone.
*/
export function getNextRun(cronExpression: string, timezone?: string): Date | null {
try {
const options = timezone ? { timezone } : undefined;
const job = new Cron(cronExpression, options);
const next = job.nextRun();
job.stop();
return next;
} catch {
return null;
}
}
/**
* Check if a cron expression is valid.
*/
export function isValidCron(cronExpression: string): boolean {
try {
const job = new Cron(cronExpression);
job.stop();
return true;
} catch {
return false;
}
}
/**
* Get system schedules info for the API.
*/
export async function getSystemSchedules(): Promise<SystemScheduleInfo[]> {
const scheduleRetention = await getScheduleRetentionDays();
const eventRetention = await getEventRetentionDays();
const scheduleCleanupCron = await getScheduleCleanupCron();
const eventCleanupCron = await getEventCleanupCron();
const scheduleCleanupEnabled = await getScheduleCleanupEnabled();
const eventCleanupEnabled = await getEventCleanupEnabled();
return [
{
id: SYSTEM_SCHEDULE_CLEANUP_ID,
type: 'system_cleanup' as const,
name: 'Schedule execution cleanup',
description: `Removes execution logs older than ${scheduleRetention} days`,
cronExpression: scheduleCleanupCron,
nextRun: scheduleCleanupEnabled ? getNextRun(scheduleCleanupCron)?.toISOString() ?? null : null,
isSystem: true,
enabled: scheduleCleanupEnabled
},
{
id: SYSTEM_EVENT_CLEANUP_ID,
type: 'system_cleanup' as const,
name: 'Container event cleanup',
description: `Removes container events older than ${eventRetention} days`,
cronExpression: eventCleanupCron,
nextRun: eventCleanupEnabled ? getNextRun(eventCleanupCron)?.toISOString() ?? null : null,
isSystem: true,
enabled: eventCleanupEnabled
},
{
id: SYSTEM_VOLUME_HELPER_CLEANUP_ID,
type: 'system_cleanup' as const,
name: 'Volume helper cleanup',
description: 'Cleans up temporary volume browser containers',
cronExpression: '*/30 * * * *',
nextRun: getNextRun('*/30 * * * *')?.toISOString() ?? null,
isSystem: true,
enabled: true
}
];
}
export interface SystemScheduleInfo {
id: number;
type: 'system_cleanup';
name: string;
description: string;
cronExpression: string;
nextRun: string | null;
isSystem: true;
enabled: boolean;
}

View File

@@ -0,0 +1,575 @@
/**
* Container Auto-Update Task
*
* Handles automatic container updates with vulnerability scanning.
*/
import type { ScheduleTrigger, VulnerabilityCriteria } from '../../db';
import {
getAutoUpdateSettingById,
updateAutoUpdateLastChecked,
updateAutoUpdateLastUpdated,
createScheduleExecution,
updateScheduleExecution,
appendScheduleExecutionLog,
saveVulnerabilityScan,
getCombinedScanForImage
} from '../../db';
import {
pullImage,
listContainers,
inspectContainer,
createContainer,
stopContainer,
removeContainer,
checkImageUpdateAvailable,
getTempImageTag,
isDigestBasedImage,
getImageIdByTag,
removeTempImage,
tagImage
} from '../../docker';
import { getScannerSettings, scanImage, type ScanResult, type VulnerabilitySeverity } from '../../scanner';
import { sendEventNotification } from '../../notifications';
import { parseImageNameAndTag, shouldBlockUpdate, combineScanSummaries, isDockhandContainer } from './update-utils';
/**
* Execute a container auto-update.
*/
export async function runContainerUpdate(
settingId: number,
containerName: string,
environmentId: number | null | undefined,
triggeredBy: ScheduleTrigger
): Promise<void> {
const envId = environmentId ?? undefined;
const startTime = Date.now();
// Create execution record
const execution = await createScheduleExecution({
scheduleType: 'container_update',
scheduleId: settingId,
environmentId: environmentId ?? null,
entityName: containerName,
triggeredBy,
status: 'running'
});
await updateScheduleExecution(execution.id, {
startedAt: new Date().toISOString()
});
const log = (message: string) => {
console.log(`[Auto-update] ${message}`);
appendScheduleExecutionLog(execution.id, `[${new Date().toISOString()}] ${message}`);
};
try {
log(`Checking container: ${containerName}`);
await updateAutoUpdateLastChecked(containerName, envId);
// Find the container
const containers = await listContainers(true, envId);
const container = containers.find(c => c.name === containerName);
if (!container) {
log(`Container not found: ${containerName}`);
await updateScheduleExecution(execution.id, {
status: 'failed',
completedAt: new Date().toISOString(),
duration: Date.now() - startTime,
errorMessage: 'Container not found'
});
return;
}
// Get the full container config to extract the image name (tag)
const inspectData = await inspectContainer(container.id, envId) as any;
const imageNameFromConfig = inspectData.Config?.Image;
if (!imageNameFromConfig) {
log(`Could not determine image name from container config`);
await updateScheduleExecution(execution.id, {
status: 'failed',
completedAt: new Date().toISOString(),
duration: Date.now() - startTime,
errorMessage: 'Could not determine image name'
});
return;
}
// Prevent Dockhand from updating itself
if (isDockhandContainer(imageNameFromConfig)) {
log(`Skipping Dockhand container - cannot auto-update self`);
await updateScheduleExecution(execution.id, {
status: 'skipped',
completedAt: new Date().toISOString(),
duration: Date.now() - startTime,
details: { reason: 'Cannot auto-update Dockhand itself' }
});
return;
}
// Get the actual image ID from inspect data
const currentImageId = inspectData.Image;
log(`Container is using image: ${imageNameFromConfig}`);
log(`Current image ID: ${currentImageId?.substring(0, 19)}`);
// Get scanner and schedule settings early to determine scan strategy
const [scannerSettings, updateSetting] = await Promise.all([
getScannerSettings(envId),
getAutoUpdateSettingById(settingId)
]);
const vulnerabilityCriteria = (updateSetting?.vulnerabilityCriteria || 'never') as VulnerabilityCriteria;
// Scan if scanning is enabled (scanner !== 'none')
// The vulnerabilityCriteria only controls whether to BLOCK updates, not whether to SCAN
const shouldScan = scannerSettings.scanner !== 'none';
// =============================================================================
// SAFE UPDATE FLOW
// =============================================================================
// 1. Registry check (no pull) - determine if update is available
// 2. If scanning enabled:
// a. Pull new image (overwrites original tag temporarily)
// b. Get new image ID
// c. SAFETY: Restore original tag to point to OLD image
// d. Tag new image with temp suffix for scanning
// e. Scan temp image
// f. If blocked: remove temp image, original tag still safe
// g. If approved: re-tag to original and proceed
// 3. If no scanning: simple pull and update
// =============================================================================
// Step 1: Check for update using registry check (no pull)
log(`Checking registry for updates: ${imageNameFromConfig}`);
const registryCheck = await checkImageUpdateAvailable(imageNameFromConfig, currentImageId, envId);
// Handle local images or registry errors
if (registryCheck.isLocalImage) {
log(`Local image detected - skipping (auto-update requires registry)`);
await updateScheduleExecution(execution.id, {
status: 'skipped',
completedAt: new Date().toISOString(),
duration: Date.now() - startTime,
details: { reason: 'Local image - no registry available' }
});
return;
}
if (registryCheck.error) {
log(`Registry check error: ${registryCheck.error}`);
// Don't fail on transient errors, just skip this run
await updateScheduleExecution(execution.id, {
status: 'skipped',
completedAt: new Date().toISOString(),
duration: Date.now() - startTime,
details: { reason: `Registry check failed: ${registryCheck.error}` }
});
return;
}
if (!registryCheck.hasUpdate) {
log(`Already up-to-date: ${containerName} is running the latest version`);
await updateScheduleExecution(execution.id, {
status: 'skipped',
completedAt: new Date().toISOString(),
duration: Date.now() - startTime,
details: { reason: 'Already up-to-date' }
});
return;
}
log(`Update available! Registry digest: ${registryCheck.registryDigest?.substring(0, 19) || 'unknown'}`);
// Variables for scan results
let scanResults: ScanResult[] | undefined;
let scanSummary: VulnerabilitySeverity | undefined;
let newImageId: string | null = null;
const newDigest = registryCheck.registryDigest;
// Step 2: Safe pull with temp tag protection (if scanning enabled)
if (shouldScan) {
log(`Safe-pull enabled (scanner: ${scannerSettings.scanner}, criteria: ${vulnerabilityCriteria})`);
// Check if this is a digest-based image (can't use temp tags)
if (isDigestBasedImage(imageNameFromConfig)) {
log(`Digest-based image detected - temp tag protection not available`);
// Fall through to simple flow
} else {
const tempTag = getTempImageTag(imageNameFromConfig);
log(`Using temp tag for safe pull: ${tempTag}`);
try {
// Step 2a: Pull new image (overwrites original tag)
log(`Pulling new image: ${imageNameFromConfig}`);
await pullImage(imageNameFromConfig, undefined, envId);
// Step 2b: Get new image ID
newImageId = await getImageIdByTag(imageNameFromConfig, envId);
if (!newImageId) {
throw new Error('Failed to get new image ID after pull');
}
log(`New image pulled: ${newImageId.substring(0, 19)}`);
// Step 2c: SAFETY - Restore original tag to OLD image
log(`Restoring original tag to current safe image...`);
const [oldRepo, oldTag] = parseImageNameAndTag(imageNameFromConfig);
await tagImage(currentImageId, oldRepo, oldTag, envId);
log(`Original tag ${imageNameFromConfig} restored to safe image`);
// Step 2d: Tag new image with temp suffix
const [tempRepo, tempTagName] = parseImageNameAndTag(tempTag);
await tagImage(newImageId, tempRepo, tempTagName, envId);
log(`New image tagged as: ${tempTag}`);
// Step 2e: Scan temp image
log(`Scanning new image for vulnerabilities...`);
try {
scanResults = await scanImage(tempTag, envId, (progress) => {
const scannerTag = progress.scanner ? `[${progress.scanner}]` : '[scan]';
if (progress.message) {
log(`${scannerTag} ${progress.message}`);
}
if (progress.output) {
log(`${scannerTag} ${progress.output}`);
}
});
if (scanResults.length > 0) {
scanSummary = combineScanSummaries(scanResults);
log(`Scan result: ${scanSummary.critical} critical, ${scanSummary.high} high, ${scanSummary.medium} medium, ${scanSummary.low} low`);
// Save scan results
for (const result of scanResults) {
try {
await saveVulnerabilityScan({
environmentId: envId ?? null,
imageId: newImageId,
imageName: result.imageName,
scanner: result.scanner,
scannedAt: result.scannedAt,
scanDuration: result.scanDuration,
criticalCount: result.summary.critical,
highCount: result.summary.high,
mediumCount: result.summary.medium,
lowCount: result.summary.low,
negligibleCount: result.summary.negligible,
unknownCount: result.summary.unknown,
vulnerabilities: result.vulnerabilities,
error: result.error ?? null
});
} catch (saveError: any) {
log(`Warning: Could not save scan results: ${saveError.message}`);
}
}
// Handle 'more_than_current' criteria
let currentScanSummary: VulnerabilitySeverity | undefined;
if (vulnerabilityCriteria === 'more_than_current') {
log(`Looking up cached scan for current image...`);
try {
const cachedScan = await getCombinedScanForImage(currentImageId, envId ?? null);
if (cachedScan) {
currentScanSummary = cachedScan;
log(`Cached scan: ${currentScanSummary.critical} critical, ${currentScanSummary.high} high`);
} else {
log(`No cached scan found, scanning current image...`);
const currentScanResults = await scanImage(currentImageId, envId, (progress) => {
const tag = progress.scanner ? `[${progress.scanner}]` : '[scan]';
if (progress.message) log(`${tag} ${progress.message}`);
});
if (currentScanResults.length > 0) {
currentScanSummary = combineScanSummaries(currentScanResults);
log(`Current image: ${currentScanSummary.critical} critical, ${currentScanSummary.high} high`);
// Save for future use
for (const result of currentScanResults) {
try {
await saveVulnerabilityScan({
environmentId: envId ?? null,
imageId: currentImageId,
imageName: result.imageName,
scanner: result.scanner,
scannedAt: result.scannedAt,
scanDuration: result.scanDuration,
criticalCount: result.summary.critical,
highCount: result.summary.high,
mediumCount: result.summary.medium,
lowCount: result.summary.low,
negligibleCount: result.summary.negligible,
unknownCount: result.summary.unknown,
vulnerabilities: result.vulnerabilities,
error: result.error ?? null
});
} catch { /* ignore */ }
}
}
}
} catch (cacheError: any) {
log(`Warning: Could not get current scan: ${cacheError.message}`);
}
}
// Check if update should be blocked
const { blocked, reason } = shouldBlockUpdate(vulnerabilityCriteria, scanSummary, currentScanSummary);
if (blocked) {
// Step 2f: BLOCKED - Remove temp image, original tag is safe
log(`UPDATE BLOCKED: ${reason}`);
log(`Removing blocked image: ${tempTag}`);
await removeTempImage(newImageId, envId);
log(`Blocked image removed - container will continue using safe image`);
await updateScheduleExecution(execution.id, {
status: 'skipped',
completedAt: new Date().toISOString(),
duration: Date.now() - startTime,
details: {
mode: 'auto_update',
reason: 'vulnerabilities_found',
blockReason: reason,
vulnerabilityCriteria,
summary: { checked: 1, updated: 0, blocked: 1, failed: 0 },
containers: [{
name: containerName,
status: 'blocked',
blockReason: reason,
scannerResults: scanResults.map(r => ({
scanner: r.scanner,
critical: r.summary.critical,
high: r.summary.high,
medium: r.summary.medium,
low: r.summary.low,
negligible: r.summary.negligible,
unknown: r.summary.unknown
}))
}],
scanResult: {
summary: scanSummary,
scanners: scanResults.map(r => r.scanner),
scannedAt: scanResults[0]?.scannedAt,
scannerResults: scanResults.map(r => ({
scanner: r.scanner,
critical: r.summary.critical,
high: r.summary.high,
medium: r.summary.medium,
low: r.summary.low,
negligible: r.summary.negligible,
unknown: r.summary.unknown
}))
}
}
});
await sendEventNotification('auto_update_blocked', {
title: 'Auto-update blocked',
message: `Container "${containerName}" update blocked: ${reason}`,
type: 'warning'
}, envId);
return;
}
log(`Scan passed vulnerability criteria`);
}
} catch (scanError: any) {
// Scan failure - cleanup temp image and fail
log(`Scan failed: ${scanError.message}`);
log(`Removing temp image due to scan failure...`);
await removeTempImage(newImageId, envId);
await updateScheduleExecution(execution.id, {
status: 'failed',
completedAt: new Date().toISOString(),
duration: Date.now() - startTime,
errorMessage: `Vulnerability scan failed: ${scanError.message}`
});
return;
}
// Step 2g: APPROVED - Re-tag to original for update
log(`Re-tagging approved image to: ${imageNameFromConfig}`);
await tagImage(newImageId, oldRepo, oldTag, envId);
log(`Image ready for update`);
// Clean up temp tag (optional, image will be removed when container is recreated)
try {
await removeTempImage(tempTag, envId);
} catch { /* ignore cleanup errors */ }
} catch (pullError: any) {
log(`Safe-pull failed: ${pullError.message}`);
await updateScheduleExecution(execution.id, {
status: 'failed',
completedAt: new Date().toISOString(),
duration: Date.now() - startTime,
errorMessage: `Failed to pull image: ${pullError.message}`
});
return;
}
}
} else {
// No scanning - simple pull
log(`Pulling update (no vulnerability scan)...`);
try {
await pullImage(imageNameFromConfig, undefined, envId);
log(`Image pulled successfully`);
} catch (pullError: any) {
log(`Pull failed: ${pullError.message}`);
await updateScheduleExecution(execution.id, {
status: 'failed',
completedAt: new Date().toISOString(),
duration: Date.now() - startTime,
errorMessage: `Failed to pull image: ${pullError.message}`
});
return;
}
}
log(`Proceeding with container recreation...`);
const success = await recreateContainer(containerName, envId, log);
if (success) {
await updateAutoUpdateLastUpdated(containerName, envId);
log(`Successfully updated container: ${containerName}`);
await updateScheduleExecution(execution.id, {
status: 'success',
completedAt: new Date().toISOString(),
duration: Date.now() - startTime,
details: {
mode: 'auto_update',
newDigest,
vulnerabilityCriteria,
summary: { checked: 1, updated: 1, blocked: 0, failed: 0 },
containers: [{
name: containerName,
status: 'updated',
scannerResults: scanResults?.map(r => ({
scanner: r.scanner,
critical: r.summary.critical,
high: r.summary.high,
medium: r.summary.medium,
low: r.summary.low,
negligible: r.summary.negligible,
unknown: r.summary.unknown
}))
}],
scanResult: scanSummary ? {
summary: scanSummary,
scanners: scanResults?.map(r => r.scanner) || [],
scannedAt: scanResults?.[0]?.scannedAt,
scannerResults: scanResults?.map(r => ({
scanner: r.scanner,
critical: r.summary.critical,
high: r.summary.high,
medium: r.summary.medium,
low: r.summary.low,
negligible: r.summary.negligible,
unknown: r.summary.unknown
})) || []
} : undefined
}
});
// Send notification for successful update
await sendEventNotification('auto_update_success', {
title: 'Container auto-updated',
message: `Container "${containerName}" was updated to a new image version`,
type: 'success'
}, envId);
} else {
throw new Error('Failed to recreate container');
}
} catch (error: any) {
log(`Error: ${error.message}`);
await updateScheduleExecution(execution.id, {
status: 'failed',
completedAt: new Date().toISOString(),
duration: Date.now() - startTime,
errorMessage: error.message
});
// Send notification for failed update
await sendEventNotification('auto_update_failed', {
title: 'Auto-update failed',
message: `Container "${containerName}" auto-update failed: ${error.message}`,
type: 'error'
}, envId);
}
}
// =============================================================================
// HELPER FUNCTIONS
// =============================================================================
async function recreateContainer(
containerName: string,
envId?: number,
log?: (msg: string) => void
): Promise<boolean> {
try {
// Find the container by name
const containers = await listContainers(true, envId);
const container = containers.find(c => c.name === containerName);
if (!container) {
log?.(`Container not found: ${containerName}`);
return false;
}
// Get full container config
const inspectData = await inspectContainer(container.id, envId) as any;
const wasRunning = inspectData.State.Running;
const config = inspectData.Config;
const hostConfig = inspectData.HostConfig;
log?.(`Recreating container: ${containerName} (was running: ${wasRunning})`);
// Stop container if running
if (wasRunning) {
log?.('Stopping container...');
await stopContainer(container.id, envId);
}
// Remove old container
log?.('Removing old container...');
await removeContainer(container.id, true, envId);
// Prepare port bindings
const ports: { [key: string]: { HostPort: string } } = {};
if (hostConfig.PortBindings) {
for (const [containerPort, bindings] of Object.entries(hostConfig.PortBindings)) {
if (bindings && (bindings as any[]).length > 0) {
ports[containerPort] = { HostPort: (bindings as any[])[0].HostPort || '' };
}
}
}
// Create new container
log?.('Creating new container...');
const newContainer = await createContainer({
name: containerName,
image: config.Image,
ports,
volumeBinds: hostConfig.Binds || [],
env: config.Env || [],
labels: config.Labels || {},
cmd: config.Cmd || undefined,
restartPolicy: hostConfig.RestartPolicy?.Name || 'no',
networkMode: hostConfig.NetworkMode || undefined
}, envId);
// Start if was running
if (wasRunning) {
log?.('Starting new container...');
await newContainer.start();
}
log?.('Container recreated successfully');
return true;
} catch (error: any) {
log?.(`Failed to recreate container: ${error.message}`);
return false;
}
}

View File

@@ -0,0 +1,509 @@
/**
* Environment Update Check Task
*
* Checks all containers in an environment for available image updates.
* Can optionally auto-update containers when updates are found.
*/
import type { ScheduleTrigger, VulnerabilityCriteria } from '../../db';
import {
getEnvUpdateCheckSettings,
getEnvironment,
createScheduleExecution,
updateScheduleExecution,
appendScheduleExecutionLog,
saveVulnerabilityScan,
clearPendingContainerUpdates,
addPendingContainerUpdate,
removePendingContainerUpdate
} from '../../db';
import {
listContainers,
inspectContainer,
checkImageUpdateAvailable,
pullImage,
stopContainer,
removeContainer,
createContainer,
getTempImageTag,
isDigestBasedImage,
getImageIdByTag,
removeTempImage,
tagImage
} from '../../docker';
import { sendEventNotification } from '../../notifications';
import { getScannerSettings, scanImage, type VulnerabilitySeverity } from '../../scanner';
import { parseImageNameAndTag, shouldBlockUpdate, combineScanSummaries, isDockhandContainer } from './update-utils';
interface UpdateInfo {
containerId: string;
containerName: string;
imageName: string;
currentImageId: string;
currentDigest?: string;
newDigest?: string;
}
// Track running update checks to prevent concurrent execution
const runningUpdateChecks = new Set<number>();
/**
* Execute environment update check job.
* @param environmentId - The environment ID to check
* @param triggeredBy - What triggered this execution
*/
export async function runEnvUpdateCheckJob(
environmentId: number,
triggeredBy: ScheduleTrigger = 'cron'
): Promise<void> {
// Prevent concurrent execution for the same environment
if (runningUpdateChecks.has(environmentId)) {
console.log(`[EnvUpdateCheck] Environment ${environmentId} update check already running, skipping`);
return;
}
runningUpdateChecks.add(environmentId);
const startTime = Date.now();
try {
// Get environment info
const env = await getEnvironment(environmentId);
if (!env) {
console.error(`[EnvUpdateCheck] Environment ${environmentId} not found`);
return;
}
// Get settings
const config = await getEnvUpdateCheckSettings(environmentId);
if (!config) {
console.error(`[EnvUpdateCheck] No settings found for environment ${environmentId}`);
return;
}
// Create execution record
const execution = await createScheduleExecution({
scheduleType: 'env_update_check',
scheduleId: environmentId,
environmentId,
entityName: `Update: ${env.name}`,
triggeredBy,
status: 'running'
});
await updateScheduleExecution(execution.id, {
startedAt: new Date().toISOString()
});
const log = async (message: string) => {
console.log(`[EnvUpdateCheck] ${message}`);
await appendScheduleExecutionLog(execution.id, `[${new Date().toISOString()}] ${message}`);
};
try {
await log(`Starting update check for environment: ${env.name}`);
await log(`Auto-update mode: ${config.autoUpdate ? 'ON' : 'OFF'}`);
// Clear pending updates at the start - we'll re-add as we discover updates
await clearPendingContainerUpdates(environmentId);
// Get all containers in this environment
const containers = await listContainers(true, environmentId);
await log(`Found ${containers.length} containers`);
const updatesAvailable: UpdateInfo[] = [];
let checkedCount = 0;
let errorCount = 0;
// Check each container for updates
for (const container of containers) {
try {
const inspectData = await inspectContainer(container.id, environmentId) as any;
const imageName = inspectData.Config?.Image;
const currentImageId = inspectData.Image;
if (!imageName) {
await log(` [${container.name}] Skipping - no image name found`);
continue;
}
checkedCount++;
await log(` Checking: ${container.name} (${imageName})`);
const result = await checkImageUpdateAvailable(imageName, currentImageId, environmentId);
if (result.isLocalImage) {
await log(` Local image - skipping update check`);
continue;
}
if (result.error) {
await log(` Error: ${result.error}`);
errorCount++;
continue;
}
if (result.hasUpdate) {
updatesAvailable.push({
containerId: container.id,
containerName: container.name,
imageName,
currentImageId,
currentDigest: result.currentDigest,
newDigest: result.registryDigest
});
// Add to pending table immediately - will be removed on successful update
await addPendingContainerUpdate(environmentId, container.id, container.name, imageName);
await log(` UPDATE AVAILABLE`);
await log(` Current: ${result.currentDigest?.substring(0, 24) || 'unknown'}...`);
await log(` New: ${result.registryDigest?.substring(0, 24) || 'unknown'}...`);
} else {
await log(` Up to date`);
}
} catch (err: any) {
await log(` [${container.name}] Error: ${err.message}`);
errorCount++;
}
}
// Summary
await log('');
await log('=== SUMMARY ===');
await log(`Total containers: ${containers.length}`);
await log(`Checked: ${checkedCount}`);
await log(`Updates available: ${updatesAvailable.length}`);
await log(`Errors: ${errorCount}`);
if (updatesAvailable.length === 0) {
await log('All containers are up to date');
// Pending updates already cleared at start, nothing to add
await updateScheduleExecution(execution.id, {
status: 'success',
completedAt: new Date().toISOString(),
duration: Date.now() - startTime,
details: {
updatesFound: 0,
containersChecked: checkedCount,
errors: errorCount
}
});
return;
}
// Build notification message with details
const updateList = updatesAvailable
.map(u => {
const currentShort = u.currentDigest?.substring(0, 12) || 'unknown';
const newShort = u.newDigest?.substring(0, 12) || 'unknown';
return `- ${u.containerName} (${u.imageName})\n ${currentShort}... -> ${newShort}...`;
})
.join('\n');
if (config.autoUpdate) {
// Auto-update mode: actually update the containers with safe-pull flow
await log('');
await log('=== AUTO-UPDATE MODE ===');
// Get scanner settings and vulnerability criteria
const scannerSettings = await getScannerSettings(environmentId);
const vulnerabilityCriteria = (config.vulnerabilityCriteria || 'never') as VulnerabilityCriteria;
// Scan if scanning is enabled (scanner !== 'none')
// The vulnerabilityCriteria only controls whether to BLOCK updates, not whether to SCAN
const shouldScan = scannerSettings.scanner !== 'none';
await log(`Vulnerability criteria: ${vulnerabilityCriteria}`);
if (shouldScan) {
await log(`Scanner: ${scannerSettings.scanner} (scan enabled)`);
}
await log(`Updating ${updatesAvailable.length} containers...`);
let successCount = 0;
let failCount = 0;
let blockedCount = 0;
const updatedContainers: string[] = [];
const failedContainers: string[] = [];
const blockedContainers: { name: string; reason: string; scannerResults?: { scanner: string; critical: number; high: number; medium: number; low: number }[] }[] = [];
for (const update of updatesAvailable) {
// Skip Dockhand container - cannot update itself
if (isDockhandContainer(update.imageName)) {
await log(`\n[${update.containerName}] Skipping - cannot auto-update Dockhand itself`);
continue;
}
try {
await log(`\nUpdating: ${update.containerName}`);
// Get full container config
const inspectData = await inspectContainer(update.containerId, environmentId) as any;
const wasRunning = inspectData.State.Running;
const containerConfig = inspectData.Config;
const hostConfig = inspectData.HostConfig;
// SAFE-PULL FLOW
if (shouldScan && !isDigestBasedImage(update.imageName)) {
const tempTag = getTempImageTag(update.imageName);
await log(` Safe-pull with temp tag: ${tempTag}`);
// Step 1: Pull new image
await log(` Pulling ${update.imageName}...`);
await pullImage(update.imageName, () => {}, environmentId);
// Step 2: Get new image ID
const newImageId = await getImageIdByTag(update.imageName, environmentId);
if (!newImageId) {
throw new Error('Failed to get new image ID after pull');
}
await log(` New image: ${newImageId.substring(0, 19)}`);
// Step 3: SAFETY - Restore original tag to old image
const [oldRepo, oldTag] = parseImageNameAndTag(update.imageName);
await tagImage(update.currentImageId, oldRepo, oldTag, environmentId);
await log(` Restored original tag to safe image`);
// Step 4: Tag new image with temp suffix
const [tempRepo, tempTagName] = parseImageNameAndTag(tempTag);
await tagImage(newImageId, tempRepo, tempTagName, environmentId);
// Step 5: Scan temp image
await log(` Scanning for vulnerabilities...`);
let scanBlocked = false;
let blockReason = '';
let currentScannerResults: { scanner: string; critical: number; high: number; medium: number; low: number }[] = [];
// Collect scan logs to log after scan completes
const scanLogs: string[] = [];
try {
const scanResults = await scanImage(tempTag, environmentId, (progress) => {
if (progress.message) {
scanLogs.push(` [${progress.scanner || 'scan'}] ${progress.message}`);
}
});
// Log collected scan messages
for (const scanLog of scanLogs) {
await log(scanLog);
}
if (scanResults.length > 0) {
const scanSummary = combineScanSummaries(scanResults);
await log(` Scan: ${scanSummary.critical} critical, ${scanSummary.high} high, ${scanSummary.medium} medium, ${scanSummary.low} low`);
// Capture per-scanner results for blocking info
currentScannerResults = scanResults.map(r => ({
scanner: r.scanner,
critical: r.summary.critical,
high: r.summary.high,
medium: r.summary.medium,
low: r.summary.low
}));
// Save scan results
for (const result of scanResults) {
try {
await saveVulnerabilityScan({
environmentId,
imageId: newImageId,
imageName: result.imageName,
scanner: result.scanner,
scannedAt: result.scannedAt,
scanDuration: result.scanDuration,
criticalCount: result.summary.critical,
highCount: result.summary.high,
mediumCount: result.summary.medium,
lowCount: result.summary.low,
negligibleCount: result.summary.negligible,
unknownCount: result.summary.unknown,
vulnerabilities: result.vulnerabilities,
error: result.error ?? null
});
} catch { /* ignore save errors */ }
}
// Check if blocked
const { blocked, reason } = shouldBlockUpdate(vulnerabilityCriteria, scanSummary, undefined);
if (blocked) {
scanBlocked = true;
blockReason = reason;
}
}
} catch (scanErr: any) {
await log(` Scan failed: ${scanErr.message}`);
scanBlocked = true;
blockReason = `Scan failed: ${scanErr.message}`;
}
if (scanBlocked) {
// BLOCKED - Remove temp image
await log(` UPDATE BLOCKED: ${blockReason}`);
await removeTempImage(newImageId, environmentId);
await log(` Removed blocked image - container stays safe`);
blockedCount++;
blockedContainers.push({
name: update.containerName,
reason: blockReason,
scannerResults: currentScannerResults.length > 0 ? currentScannerResults : undefined
});
continue;
}
// APPROVED - Re-tag to original
await log(` Scan passed, re-tagging...`);
await tagImage(newImageId, oldRepo, oldTag, environmentId);
try {
await removeTempImage(tempTag, environmentId);
} catch { /* ignore cleanup errors */ }
} else {
// Simple pull (no scanning or digest-based image)
await log(` Pulling ${update.imageName}...`);
await pullImage(update.imageName, () => {}, environmentId);
}
// Stop container if running
if (wasRunning) {
await log(` Stopping...`);
await stopContainer(update.containerId, environmentId);
}
// Remove old container
await log(` Removing old container...`);
await removeContainer(update.containerId, true, environmentId);
// Prepare port bindings
const ports: { [key: string]: { HostPort: string } } = {};
if (hostConfig.PortBindings) {
for (const [containerPort, bindings] of Object.entries(hostConfig.PortBindings)) {
if (bindings && (bindings as any[]).length > 0) {
ports[containerPort] = { HostPort: (bindings as any[])[0].HostPort || '' };
}
}
}
// Create new container
await log(` Creating new container...`);
const newContainer = await createContainer({
name: update.containerName,
image: update.imageName,
ports,
volumeBinds: hostConfig.Binds || [],
env: containerConfig.Env || [],
labels: containerConfig.Labels || {},
cmd: containerConfig.Cmd || undefined,
restartPolicy: hostConfig.RestartPolicy?.Name || 'no',
networkMode: hostConfig.NetworkMode || undefined
}, environmentId);
// Start if was running
if (wasRunning) {
await log(` Starting...`);
await newContainer.start();
}
await log(` Updated successfully`);
successCount++;
updatedContainers.push(update.containerName);
// Remove from pending table - successfully updated
await removePendingContainerUpdate(environmentId, update.containerId);
} catch (err: any) {
await log(` FAILED: ${err.message}`);
failCount++;
failedContainers.push(update.containerName);
}
}
await log('');
await log(`=== UPDATE COMPLETE ===`);
await log(`Updated: ${successCount}`);
await log(`Blocked: ${blockedCount}`);
await log(`Failed: ${failCount}`);
// Send notifications
if (blockedCount > 0) {
await sendEventNotification('auto_update_blocked', {
title: `${blockedCount} update(s) blocked in ${env.name}`,
message: blockedContainers.map(c => `- ${c.name}: ${c.reason}`).join('\n'),
type: 'warning'
}, environmentId);
}
const notificationMessage = successCount > 0
? `Updated ${successCount} container(s) in ${env.name}:\n${updatedContainers.map(c => `- ${c}`).join('\n')}${blockedCount > 0 ? `\n\nBlocked (${blockedCount}):\n${blockedContainers.map(c => `- ${c.name}`).join('\n')}` : ''}${failCount > 0 ? `\n\nFailed (${failCount}):\n${failedContainers.map(c => `- ${c}`).join('\n')}` : ''}`
: blockedCount > 0 ? `All updates blocked in ${env.name}` : `Update failed for all containers in ${env.name}`;
await sendEventNotification('batch_update_success', {
title: successCount > 0 ? `Containers updated in ${env.name}` : blockedCount > 0 ? `Updates blocked in ${env.name}` : `Container updates failed in ${env.name}`,
message: notificationMessage,
type: successCount > 0 && failCount === 0 && blockedCount === 0 ? 'success' : successCount > 0 ? 'warning' : 'error'
}, environmentId);
// Blocked/failed containers stay in pending table (successfully updated ones were removed)
await updateScheduleExecution(execution.id, {
status: failCount > 0 && successCount === 0 && blockedCount === 0 ? 'failed' : 'success',
completedAt: new Date().toISOString(),
duration: Date.now() - startTime,
details: {
mode: 'auto_update',
updatesFound: updatesAvailable.length,
containersChecked: checkedCount,
errors: errorCount,
autoUpdate: true,
vulnerabilityCriteria,
summary: { checked: checkedCount, updated: successCount, blocked: blockedCount, failed: failCount },
containers: [
...updatedContainers.map(name => ({ name, status: 'updated' as const })),
...blockedContainers.map(c => ({ name: c.name, status: 'blocked' as const, blockReason: c.reason, scannerResults: c.scannerResults })),
...failedContainers.map(name => ({ name, status: 'failed' as const }))
],
updated: successCount,
blocked: blockedCount,
failed: failCount,
blockedContainers
}
});
} else {
// Check-only mode: just send notification
await log('');
await log('Check-only mode - sending notification about available updates');
// Pending updates already added as we discovered them
await sendEventNotification('updates_detected', {
title: `Container updates available in ${env.name}`,
message: `${updatesAvailable.length} update(s) available:\n${updateList}`,
type: 'info'
}, environmentId);
await updateScheduleExecution(execution.id, {
status: 'success',
completedAt: new Date().toISOString(),
duration: Date.now() - startTime,
details: {
mode: 'notify_only',
updatesFound: updatesAvailable.length,
containersChecked: checkedCount,
errors: errorCount,
autoUpdate: false,
summary: { checked: checkedCount, updated: 0, blocked: 0, failed: 0 },
containers: updatesAvailable.map(u => ({
name: u.containerName,
status: 'checked' as const,
imageName: u.imageName,
currentDigest: u.currentDigest,
newDigest: u.newDigest
}))
}
});
}
} catch (error: any) {
await log(`Error: ${error.message}`);
await updateScheduleExecution(execution.id, {
status: 'failed',
completedAt: new Date().toISOString(),
duration: Date.now() - startTime,
errorMessage: error.message
});
}
} finally {
runningUpdateChecks.delete(environmentId);
}
}

View File

@@ -0,0 +1,102 @@
/**
* Git Stack Auto-Sync Task
*
* Handles automatic syncing and deploying of git-based compose stacks.
*/
import type { ScheduleTrigger } from '../../db';
import {
createScheduleExecution,
updateScheduleExecution,
appendScheduleExecutionLog
} from '../../db';
import { deployGitStack } from '../../git';
import { sendEventNotification } from '../../notifications';
/**
* Execute a git stack sync.
*/
export async function runGitStackSync(
stackId: number,
stackName: string,
environmentId: number | null | undefined,
triggeredBy: ScheduleTrigger
): Promise<void> {
const startTime = Date.now();
// Create execution record
const execution = await createScheduleExecution({
scheduleType: 'git_stack_sync',
scheduleId: stackId,
environmentId: environmentId ?? null,
entityName: stackName,
triggeredBy,
status: 'running'
});
await updateScheduleExecution(execution.id, {
startedAt: new Date().toISOString()
});
const log = (message: string) => {
console.log(`[Git-sync] ${message}`);
appendScheduleExecutionLog(execution.id, `[${new Date().toISOString()}] ${message}`);
};
try {
log(`Starting sync for stack: ${stackName}`);
// Deploy the git stack (only if there are changes)
const result = await deployGitStack(stackId, { force: false });
const envId = environmentId ?? undefined;
if (result.success) {
if (result.skipped) {
log(`No changes detected for stack: ${stackName}, skipping redeploy`);
// Send notification for skipped sync
await sendEventNotification('git_sync_skipped', {
title: 'Git sync skipped',
message: `Stack "${stackName}" sync skipped: no changes detected`,
type: 'info'
}, envId);
} else {
log(`Successfully deployed stack: ${stackName}`);
// Send notification for successful sync
await sendEventNotification('git_sync_success', {
title: 'Git stack deployed',
message: `Stack "${stackName}" was synced and deployed successfully`,
type: 'success'
}, envId);
}
if (result.output) log(result.output);
await updateScheduleExecution(execution.id, {
status: result.skipped ? 'skipped' : 'success',
completedAt: new Date().toISOString(),
duration: Date.now() - startTime,
details: { output: result.output }
});
} else {
throw new Error(result.error || 'Deployment failed');
}
} catch (error: any) {
log(`Error: ${error.message}`);
await updateScheduleExecution(execution.id, {
status: 'failed',
completedAt: new Date().toISOString(),
duration: Date.now() - startTime,
errorMessage: error.message
});
// Send notification for failed sync
const envId = environmentId ?? undefined;
await sendEventNotification('git_sync_failed', {
title: 'Git sync failed',
message: `Stack "${stackName}" sync failed: ${error.message}`,
type: 'error'
}, envId);
}
}

View File

@@ -0,0 +1,202 @@
/**
* System Cleanup Tasks
*
* Handles system cleanup jobs (schedule executions, container events).
*/
import type { ScheduleTrigger } from '../../db';
import {
getScheduleRetentionDays,
cleanupOldExecutions,
getEventRetentionDays,
getScheduleCleanupEnabled,
getEventCleanupEnabled,
createScheduleExecution,
updateScheduleExecution,
appendScheduleExecutionLog
} from '../../db';
// System job IDs
export const SYSTEM_SCHEDULE_CLEANUP_ID = 1;
export const SYSTEM_EVENT_CLEANUP_ID = 2;
export const SYSTEM_VOLUME_HELPER_CLEANUP_ID = 3;
/**
* Execute schedule execution cleanup job.
*/
export async function runScheduleCleanupJob(triggeredBy: ScheduleTrigger = 'cron'): Promise<void> {
// Check if cleanup is enabled (skip check if manually triggered)
if (triggeredBy === 'cron') {
const enabled = await getScheduleCleanupEnabled();
if (!enabled) {
return; // Skip execution if disabled
}
}
const startTime = Date.now();
// Create execution record
const execution = await createScheduleExecution({
scheduleType: 'system_cleanup',
scheduleId: SYSTEM_SCHEDULE_CLEANUP_ID,
environmentId: null,
entityName: 'Schedule execution cleanup',
triggeredBy,
status: 'running'
});
await updateScheduleExecution(execution.id, {
startedAt: new Date().toISOString()
});
const log = async (message: string) => {
console.log(`[Schedule Cleanup] ${message}`);
await appendScheduleExecutionLog(execution.id, `[${new Date().toISOString()}] ${message}`);
};
try {
const retentionDays = await getScheduleRetentionDays();
await log(`Starting cleanup with ${retentionDays} day retention`);
await cleanupOldExecutions(retentionDays);
await log('Cleanup completed successfully');
await updateScheduleExecution(execution.id, {
status: 'success',
completedAt: new Date().toISOString(),
duration: Date.now() - startTime,
details: { retentionDays }
});
} catch (error: any) {
await log(`Error: ${error.message}`);
await updateScheduleExecution(execution.id, {
status: 'failed',
completedAt: new Date().toISOString(),
duration: Date.now() - startTime,
errorMessage: error.message
});
}
}
/**
* Execute event cleanup job.
*/
export async function runEventCleanupJob(triggeredBy: ScheduleTrigger = 'cron'): Promise<void> {
// Check if cleanup is enabled (skip check if manually triggered)
if (triggeredBy === 'cron') {
const enabled = await getEventCleanupEnabled();
if (!enabled) {
return; // Skip execution if disabled
}
}
const startTime = Date.now();
// Create execution record
const execution = await createScheduleExecution({
scheduleType: 'system_cleanup',
scheduleId: SYSTEM_EVENT_CLEANUP_ID,
environmentId: null,
entityName: 'Container event cleanup',
triggeredBy,
status: 'running'
});
await updateScheduleExecution(execution.id, {
startedAt: new Date().toISOString()
});
const log = async (message: string) => {
console.log(`[Event Cleanup] ${message}`);
await appendScheduleExecutionLog(execution.id, `[${new Date().toISOString()}] ${message}`);
};
try {
const { deleteOldContainerEvents } = await import('../../db');
const retentionDays = await getEventRetentionDays();
await log(`Starting cleanup of events older than ${retentionDays} days`);
const deleted = await deleteOldContainerEvents(retentionDays);
await log(`Removed ${deleted} old container events`);
await updateScheduleExecution(execution.id, {
status: 'success',
completedAt: new Date().toISOString(),
duration: Date.now() - startTime,
details: { deletedCount: deleted, retentionDays }
});
} catch (error: any) {
await log(`Error: ${error.message}`);
await updateScheduleExecution(execution.id, {
status: 'failed',
completedAt: new Date().toISOString(),
duration: Date.now() - startTime,
errorMessage: error.message
});
}
}
/**
* Execute volume helper cleanup job.
* Cleans up stale dockhand-browse-* containers used for volume browsing.
* @param triggeredBy - What triggered this execution
* @param cleanupFns - Optional cleanup functions (passed from scheduler to avoid dynamic import issues)
*/
export async function runVolumeHelperCleanupJob(
triggeredBy: ScheduleTrigger = 'cron',
cleanupFns?: {
cleanupStaleVolumeHelpers: () => Promise<void>;
cleanupExpiredVolumeHelpers: () => Promise<void>;
}
): Promise<void> {
const startTime = Date.now();
// Create execution record
const execution = await createScheduleExecution({
scheduleType: 'system_cleanup',
scheduleId: SYSTEM_VOLUME_HELPER_CLEANUP_ID,
environmentId: null,
entityName: 'Volume helper cleanup',
triggeredBy,
status: 'running'
});
await updateScheduleExecution(execution.id, {
startedAt: new Date().toISOString()
});
const log = async (message: string) => {
console.log(`[Volume Helper Cleanup] ${message}`);
await appendScheduleExecutionLog(execution.id, `[${new Date().toISOString()}] ${message}`);
};
try {
await log('Starting cleanup of stale and expired volume helper containers');
if (cleanupFns) {
// Use provided functions (from scheduler static imports)
await cleanupFns.cleanupStaleVolumeHelpers();
await cleanupFns.cleanupExpiredVolumeHelpers();
} else {
// Fallback to dynamic import (may not work in production)
const { runVolumeHelperCleanup } = await import('../../db');
await runVolumeHelperCleanup();
}
await log('Cleanup completed successfully');
await updateScheduleExecution(execution.id, {
status: 'success',
completedAt: new Date().toISOString(),
duration: Date.now() - startTime
});
} catch (error: any) {
await log(`Error: ${error.message}`);
await updateScheduleExecution(execution.id, {
status: 'failed',
completedAt: new Date().toISOString(),
duration: Date.now() - startTime,
errorMessage: error.message
});
}
}

View File

@@ -0,0 +1,114 @@
/**
* Shared utilities for container and environment auto-update tasks.
*/
import type { VulnerabilityCriteria } from '../../db';
import type { VulnerabilitySeverity } from '../../scanner';
/**
* Parse image name and tag from a full image reference.
* Handles various formats:
* - nginx → ["nginx", "latest"]
* - nginx:1.25 → ["nginx", "1.25"]
* - registry.example.com:5000/myimage:v1 → ["registry.example.com:5000/myimage", "v1"]
* - nginx:latest-dockhand-pending → ["nginx", "latest-dockhand-pending"]
*/
export function parseImageNameAndTag(imageName: string): [string, string] {
// Handle digest-based images (return as-is with empty tag)
if (imageName.includes('@sha256:')) {
return [imageName, ''];
}
// Find the last colon that's part of the tag (not part of registry port)
const lastColon = imageName.lastIndexOf(':');
if (lastColon === -1) {
return [imageName, 'latest'];
}
// Check if this colon is part of a registry port
// Registry ports appear before a slash: registry:5000/image
const afterColon = imageName.substring(lastColon + 1);
if (afterColon.includes('/')) {
// The colon is part of the registry, not the tag
return [imageName, 'latest'];
}
// The colon separates repo from tag
return [imageName.substring(0, lastColon), afterColon];
}
/**
* Determine if an update should be blocked based on vulnerability criteria.
*/
export function shouldBlockUpdate(
criteria: VulnerabilityCriteria,
newScanSummary: VulnerabilitySeverity,
currentScanSummary?: VulnerabilitySeverity
): { blocked: boolean; reason: string } {
const totalVulns = newScanSummary.critical + newScanSummary.high + newScanSummary.medium + newScanSummary.low;
switch (criteria) {
case 'any':
if (totalVulns > 0) {
return {
blocked: true,
reason: `Found ${totalVulns} vulnerabilities (${newScanSummary.critical} critical, ${newScanSummary.high} high, ${newScanSummary.medium} medium, ${newScanSummary.low} low)`
};
}
break;
case 'critical_high':
if (newScanSummary.critical > 0 || newScanSummary.high > 0) {
return {
blocked: true,
reason: `Found ${newScanSummary.critical} critical and ${newScanSummary.high} high severity vulnerabilities`
};
}
break;
case 'critical':
if (newScanSummary.critical > 0) {
return {
blocked: true,
reason: `Found ${newScanSummary.critical} critical vulnerabilities`
};
}
break;
case 'more_than_current':
if (currentScanSummary) {
const currentTotal = currentScanSummary.critical + currentScanSummary.high + currentScanSummary.medium + currentScanSummary.low;
if (totalVulns > currentTotal) {
return {
blocked: true,
reason: `New image has ${totalVulns} vulnerabilities vs ${currentTotal} in current image`
};
}
}
break;
case 'never':
default:
break;
}
return { blocked: false, reason: '' };
}
/**
* Check if a container is the Dockhand application itself.
* Used to prevent Dockhand from updating its own container.
*/
export function isDockhandContainer(imageName: string): boolean {
return imageName.toLowerCase().includes('fnsys/dockhand');
}
/**
* Combine multiple scan summaries by taking the maximum of each severity level.
*/
export function combineScanSummaries(results: { summary: VulnerabilitySeverity }[]): VulnerabilitySeverity {
return results.reduce((acc, result) => ({
critical: Math.max(acc.critical, result.summary.critical),
high: Math.max(acc.high, result.summary.high),
medium: Math.max(acc.medium, result.summary.medium),
low: Math.max(acc.low, result.summary.low),
negligible: Math.max(acc.negligible, result.summary.negligible),
unknown: Math.max(acc.unknown, result.summary.unknown)
}), { critical: 0, high: 0, medium: 0, low: 0, negligible: 0, unknown: 0 });
}