backupninja/examples/example.restic

225 lines
4.8 KiB
Plaintext
Raw Normal View History

2018-06-06 09:51:21 +02:00
#
# restic handler example file
#
# Mandatory options are un-commented with suggested values
# Other options are commented out with their default values
#
# when = everyday at 01
[general]
# Create a new backup of files and/or directories [yes/no]
run_backup = yes
# Remove snapshots from the repository [yes/no]
#run_forget = no
# Check the repository for errors [yes/no]
#run_check = no
# Remove unneeded data from the repository [yes/no]
#run_prune = no
# Retry to run the command [integer]
#retry_run = 1
# Seconds to wait between each retry attempts [integer]
2019-09-12 11:49:50 +02:00
#retry_wait = 5
# Repository to backup to or restore from [path]
2018-06-06 09:51:21 +02:00
repository = /mnt/backup
# Repository password [string]
2018-06-06 09:51:21 +02:00
password = secret
# File to load root certificates from (default: use system certificates) [path]
#cacert =
# Set the cache directory [path]
#cache_dir =
# Auto remove old cache directories [yes/no]
#cleanup_cache =
# Set output mode to JSON for commands that support it [yes/no]
#json =
# Limits downloads to a maximum rate in KiB/s. (default: unlimited) [integer]
#limit_download =
# Limits uploads to a maximum rate in KiB/s. (default: unlimited) [integer]
#limit_upload =
# Do not use a local cache [yes/no]
#no_cache =
# Do not lock the repo, this allows some operations on read-only repos [yes/no]
#no_lock =
# Set extended option (can be specified multiple times) [key=value]
#option =
# Read the repository password from a file [path]
#password_file =
# Do not output comprehensive progress report [yes/no]
#quiet =
# Path to a file containing PEM encoded TLS client certificate and private key [path]
#tls_client_cert =
[s3]
#aws_access_key_id =
#aws_secret_access_key =
#aws_session_token =
[swift]
#os_auth_url =
#os_tenant_id =
#os_tenant_name =
#os_username =
#os_password =
#os_region_name =
[b2]
#b2_account_id =
#b2_account_key =
[azure]
#azure_account_name =
#azure_account_key =
[gs]
#google_project_id =
#google_application_credentials =
[backup]
# Files adn directories to backup (can be specified multiple times) [path]
#include = /
# Exclude a pattern (can be specified multiple times) [pattern]
#exclude = /dev
#exclude = /lost+found
#exclude = /media
#exclude = /mnt
#exclude = /proc
#exclude = /run
#exclude = /sys
#exclude = /tmp
#exclude = /var/cache
#exclude = /var/lock
#exclude = /var/spool
#exclude = /var/run
#exclude = /var/tmp
# Excludes cache directories that are marked with a CACHEDIR.TAG file [yes/no]
#exclude_caches =
# Read exclude patterns from a file (can be specified multiple times) [path]
#exclude_file =
# Takes filename[:header], exclude contents of directories containing filename (except filename itself) if header of that file is as provided (can be specified multiple times) [stringArray]
#exclude_if_present =
# Read the files to backup from file (can be combined with file args) [path]
#files_from =
# Force re-reading the target files/directories (overrides the "parent" flag) [yes/no]
#force =
# Set the hostname for the snapshot manually. To prevent an expensive rescan use the "parent" flag [string]
#hostname =
# Exclude other file systems [yes/no]
#one_file_system =
# Use this parent snapshot (default: last snapshot in the repo that has the same target files/directories) [string]
#parent =
# Add a tag for the new snapshot (can be specified multiple times) [string]
#tag =
# Time of the backup (ex. '2012-11-01 22:08:41') (default: now) [string]
#time =
# Store the atime for all files and directories [yes/no]
#with_atime =
[forget]
# Keep the last n snapshots [integer]
#keep_last = 7
# Keep the last n hourly snapshots [integer]
#keep_hourly =
# Keep the last n daily snapshots [integer]
#keep_daily =
# Keep the last n weekly snapshots [integer]
#keep_weekly =
# Keep the last n monthly snapshots [integer]
#keep_monthly =
# Keep the last n yearly snapshots [integer]
#keep_yearly =
# Keep snapshots that were created within duration before the newest (e.g. 1y5m7d) [string]
#keep_within =
# Keep snapshots with this tag (can be specified multiple times) [string]
#keep_tag =
# Only consider snapshots with the given host [string]
#host =
# Only consider snapshots which include this tag (can be specified multiple times) [string]
#tag =
# Only consider snapshots which include this (absolute) path (can be specified multiple times) [string]
#path =
# Use compact format [yes/no]
#compact =
# String for grouping snapshots by host,paths,tags (default "host,paths") [string]
#group_by =
# Do not delete anything, just print what would be done [yes/no]
dry_run =
# Automatically run the 'prune' command if snapshots have been removed [yes/no]
#prune =
[check]
# Find unused blobs [yes/no]
#check_unused =
# Read all data blobs [yes/no]
#read_data =
# Read subset of data packs [string]
#read_data_subset =
# Use the cache [yes/no]
#with_cache =