60 lines
1.7 KiB
YAML
60 lines
1.7 KiB
YAML
# examples:
|
|
# periodic_cleanup:
|
|
# class: CleanSoftDeletedRecordsJob
|
|
# queue: background
|
|
# args: [ 1000, { batch_size: 500 } ]
|
|
# schedule: every hour
|
|
# periodic_cleanup_with_command:
|
|
# command: "SoftDeletedRecord.due.delete_all"
|
|
# priority: 2
|
|
# schedule: at 5am every day
|
|
|
|
# No recurring tasks configured yet
|
|
# (previously had clear_solid_queue_finished_jobs, but now preserve_finished_jobs: false in queue.yml)
|
|
|
|
# Clean up failed jobs older than 1 day
|
|
cleanup_failed_jobs:
|
|
command: "SolidQueue::FailedExecution.where('created_at < ?', 1.day.ago).delete_all"
|
|
queue: background
|
|
schedule: every 6 hours
|
|
|
|
# Disable expired rules automatically
|
|
expired_rules_cleanup:
|
|
class: ExpiredRulesCleanupJob
|
|
queue: default
|
|
schedule: every hour
|
|
|
|
# Clean up old events based on retention setting
|
|
cleanup_old_events:
|
|
class: CleanupOldEventsJob
|
|
queue: background
|
|
schedule: every hour
|
|
|
|
# Export events from PostgreSQL to DuckLake for fast analytics
|
|
export_events_to_ducklake:
|
|
class: ExportEventsToDucklakeJob
|
|
queue: default
|
|
schedule: every 1 minutes
|
|
|
|
# Merge DuckLake files and clean up immediately after
|
|
merge_ducklake_files:
|
|
class: MergeDucklakeFilesJob
|
|
queue: background
|
|
schedule: every 15 minutes
|
|
|
|
# OLD PARQUET SYSTEM (DISABLED - using DuckLake now)
|
|
# export_events_to_parquet:
|
|
# class: ExportEventsToParquetJob
|
|
# queue: default
|
|
# schedule: every 1 minutes
|
|
#
|
|
# consolidate_parquet_hourly:
|
|
# class: ConsolidateParquetHourlyJob
|
|
# queue: default
|
|
# schedule: "5 * * * *" # At 5 minutes past every hour
|
|
#
|
|
# consolidate_parquet_weekly:
|
|
# class: ConsolidateParquetWeeklyJob
|
|
# queue: default
|
|
# schedule: "5 0 * * 1" # Monday at 00:05
|