From bc0c105dbfe3fa0740f99bbe4656113290a0f3f2 Mon Sep 17 00:00:00 2001 From: vlw Date: Thu, 1 Jan 2026 00:45:12 +0100 Subject: [PATCH] doc: update README --- README.md | 82 +++++++++++++++++++------------------------------------ 1 file changed, 28 insertions(+), 54 deletions(-) diff --git a/README.md b/README.md index 224c247..e0b3de8 100644 --- a/README.md +++ b/README.md @@ -1,17 +1,14 @@ # 3rd -A script to automate the "off-site copy" in the 3-2-1 Backup strategy with encryption, uploading to AWS S3, and independent definition of compression method and [temporary] archive storage locations for uploading large archives to S3, with support for independent configurations for subdirectories as well. +A script to automate the 3rd "off-site copy" step in the 3-2-1 Backup strategy. Each directory have independent configuration of compression level, encryption password, AWS S3 destination, and temporary storage location while being uploaded to S3. This script is a wrapper for the AWS CLI `aws` and the 7zip CLI `7z` and is meant to be run on Linux. Other operating systems are untested. -## Key features -- Archive encryption before uploading to AWS S3. -- Independent compression level, archive location, S3 storage location, for directories and subdirectories. - # Installation Make sure you have the following prerequisites before starting: -- Python 3 installed. -- The [7zip CLI](https://www.7-zip.org/download.html) installed. -- The [AWS CLI](https://aws.amazon.com/cli/) installed and configured with write access to your target bucket. +- Python 3 +- The [7zip CLI](https://www.7-zip.org/download.html) +- The [AWS CLI](https://aws.amazon.com/cli/) +- Write permission to an AWS S3 bucket 1. **Clone this repository** @@ -30,10 +27,10 @@ cp -p .example.config.json .config.json [See the config file documentation for more information](#config). -4. **Run `run.py` in autorun mode** +4. **Run `run.py` with your config file** ``` -python3 run.py -a +python3 run.py -i .config.json ``` [See the CLI section for a list of all available arguments](#cli). @@ -41,7 +38,7 @@ python3 run.py -a ## Optional cron Schedule this backup script to run with a crontab entry, for example: ``` -30 2 * * 3 cd /opt/3rd && /usr/bin/python3 run.py -a +30 2 * * 3 cd /opt/3rd && /usr/bin/python3 run.py -i .config.json ``` Which will run at 2:30 each Wednesday. @@ -49,41 +46,16 @@ Which will run at 2:30 each Wednesday. The config file `.config.json` is used to define parameters and which directories to archive (in autorun mode). ```json -{ - "config": { - "cloud": { - // Name of the target AWS S3 bucket - "bucket": "vlw-test" - // .. More options to come (probably) - }, - // Default settings for each archive item - "archive": { - // The password used to encrypt all archives - "password": "mypassword", - // The compression level to use when "compress" is true for an item - "compression_level": 10, - // Default archive location when "path_temp" is null for an item - "default_path_temp": "/tmp/output" - } +[ + { + "password": "mypassword", // AES-256 encryption password. Set to false to disable encryption + "compression": 10, // Compression level between 0-10, where 0 is STORE and 10 is max compression. Set to 0 or false/null to disable compression + "abspath_temp": "/tmp", // Directory to store the created archive while it's being uploaded to S3. Set to false/null to use the system temp-directory + "abspath_target": "", // An ABSOLUTE path to the directory or file to archive + "abspath_destination": "s3:///" // A fully qualified AWS S3 URL }, - // Array of archive items, see next section - "archive": [] -} - -``` - -Each archive item uses the following structure: -```json -{ - // Enables or disables compression for this directory. STORE will be used if disabled. - "compress": true, - // Store the encrypted archive in this directory temporarily while its being uploaded to S3. - "path_temp": "/tmp/", - // The relative path from the bucket root directory to store the uploaded object - "path_target_to": "/myarchive.7z", - // An absolute path (very important) to the target folder to upload - "path_target_from": "/my/archive" -} + // etc.. +] ``` ## Common parent directories @@ -92,19 +64,21 @@ One of the key features of this script is that it can perform different archivin If you have the directory `/my/archive` with the following config: ```json { - "compress": true, - "path_temp": null, - "path_target_to": "/myarchive.7z", - "path_target_from": "/my/archive" + "password": "mypassword", + "compression": 10, + "abspath_temp": null, + "abspath_target": "/my/archive", + "abspath_destination": "s3://my-bucket/archive.7z" } ``` And a subdirectory `/my/archive/subdirectory` with the following config: ```json { - "compress": true, - "path_temp": null, - "path_target_to": "/my-subdirectory.7z", - "path_target_from": "/my/archive/subdirectory" + "password": "mypassword", + "compression": 10, + "abspath_temp": null, + "abspath_target": "/my/archive/subdirectory", + "abspath_destination": "s3://my-bucket/subdirectory.7z" } ``` The `/my/archive/subdirectory` will be **excluded** from the `/my/archive` archive since it has an overriding archive configuration. @@ -115,6 +89,6 @@ Available command line argument with `run.py`: arg|Name|Default|Description --|--|--|-- `-s`|`--sleep`|2|Set a global sleep duration between commands -`-a`|`--autorun`|False|Archive each item in the .config.json archive array +`-i`|`--input`|*None*|Path to a config file to load `-d`|`--dryrun`|False|Perform a dry run. Archives will not be uploaded to S3. `-l`|`--log-level`|`StdoutLevel.STANDARD`|Set a custom log level when printing to the console. See `/src/Enums.py#StdoutLevel` \ No newline at end of file