mirror of
https://codeberg.org/vlw/3rd.git
synced 2026-01-12 06:46:00 +01:00
Compare commits
4 commits
| Author | SHA1 | Date | |
|---|---|---|---|
| cd1c8c4e91 | |||
| f317c41a67 | |||
| 234e125d84 | |||
| 310fa9b66a |
9 changed files with 264 additions and 137 deletions
|
|
@ -1,26 +1,16 @@
|
||||||
{
|
[
|
||||||
"config": {
|
{
|
||||||
"cloud": {
|
"password": "mypassword",
|
||||||
"bucket": "my-bucket"
|
"compression": 10,
|
||||||
},
|
"abspath_temp": null,
|
||||||
"archive": {
|
"abspath_target": "<replace with ABSOLUTE path to a target directory>",
|
||||||
"password": "mypassword",
|
"abspath_destination": "s3://<replace with bucket>/<replace with destination>"
|
||||||
"compression_level": 10,
|
|
||||||
"default_path_temp": "/tmp/"
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"archive": [
|
{
|
||||||
{
|
"password": "mypassword",
|
||||||
"compress": true,
|
"compression": 10,
|
||||||
"path_temp": null,
|
"abspath_temp": null,
|
||||||
"path_target_to": "<replace with bucket object path>",
|
"abspath_target": "<replace with ABSOLUTE path to a target directory>",
|
||||||
"path_target_from": "<replace with ABSOLUTE path to a target directory>"
|
"abspath_destination": "s3://<replace with bucket>/<replace with destination>"
|
||||||
},
|
}
|
||||||
{
|
]
|
||||||
"compress": true,
|
|
||||||
"path_temp": null,
|
|
||||||
"path_target_to": "<replace with bucket object path>",
|
|
||||||
"path_target_from": "<replace with ABSOLUTE path to a target directory>"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
|
||||||
86
README.md
86
README.md
|
|
@ -1,17 +1,14 @@
|
||||||
# 3rd
|
# 3rd
|
||||||
A script to automate the "off-site copy" in the 3-2-1 Backup strategy with encryption, uploading to AWS S3, and independent definition of compression method and [temporary] archive storage locations for uploading large archives to S3, with support for independent configurations for subdirectories as well.
|
A script to automate the 3rd "off-site copy" step in the 3-2-1 Backup strategy. Each directory have independent configuration of compression level, encryption password, AWS S3 destination, and temporary storage location while being uploaded to S3.
|
||||||
|
|
||||||
This script is a wrapper for the AWS CLI `aws` and the 7zip CLI `7z` and is meant to be run on Linux. Other operating systems are untested.
|
This script is a wrapper for the AWS CLI `aws` and the 7zip CLI `7z` and is meant to be run on Linux. Other operating systems are untested.
|
||||||
|
|
||||||
## Key features
|
|
||||||
- Archive encryption before uploading to AWS S3.
|
|
||||||
- Independent compression level, archive location, S3 storage location, for directories and subdirectories.
|
|
||||||
|
|
||||||
# Installation
|
# Installation
|
||||||
Make sure you have the following prerequisites before starting:
|
Make sure you have the following prerequisites before starting:
|
||||||
- Python 3 installed.
|
- Python 3
|
||||||
- The [7zip CLI](https://www.7-zip.org/download.html) installed.
|
- The [7zip CLI](https://www.7-zip.org/download.html)
|
||||||
- The [AWS CLI](https://aws.amazon.com/cli/) installed and configured with write access to your target bucket.
|
- The [AWS CLI](https://aws.amazon.com/cli/)
|
||||||
|
- Write permission to an AWS S3 bucket
|
||||||
|
|
||||||
|
|
||||||
1. **Clone this repository**
|
1. **Clone this repository**
|
||||||
|
|
@ -30,10 +27,10 @@ cp -p .example.config.json .config.json
|
||||||
|
|
||||||
[See the config file documentation for more information](#config).
|
[See the config file documentation for more information](#config).
|
||||||
|
|
||||||
4. **Run `run.py` in autorun mode**
|
4. **Run `run.py` with your config file**
|
||||||
|
|
||||||
```
|
```
|
||||||
python3 run.py -a
|
python3 run.py -i .config.json
|
||||||
```
|
```
|
||||||
[See the CLI section for a list of all available arguments](#cli).
|
[See the CLI section for a list of all available arguments](#cli).
|
||||||
|
|
||||||
|
|
@ -41,49 +38,26 @@ python3 run.py -a
|
||||||
## Optional cron
|
## Optional cron
|
||||||
Schedule this backup script to run with a crontab entry, for example:
|
Schedule this backup script to run with a crontab entry, for example:
|
||||||
```
|
```
|
||||||
30 2 * * 3 cd /opt/3rd && /usr/bin/python3 run.py -a
|
30 2 * * 3 cd /opt/3rd && /usr/bin/python3 run.py -i .config.json
|
||||||
```
|
```
|
||||||
Which will run at 2:30 each Wednesday.
|
Which will run at 2:30 each Wednesday.
|
||||||
|
|
||||||
# Config
|
# Config
|
||||||
The config file `.config.json` is used to define parameters and which directories to archive (in autorun mode).
|
The config file (`.config.json` by default) is used to define which directories to archive with parameters.
|
||||||
|
|
||||||
|
Directories are specified as an array of objects, each object has key value properties with the following format:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
[
|
||||||
"config": {
|
{
|
||||||
"cloud": {
|
"password": "mypassword", // AES-256 encryption password. Set to false to disable encryption
|
||||||
// Name of the target AWS S3 bucket
|
"compression": 10, // Compression level between 0-10, where 0 is STORE and 10 is max compression. Set to 0 or false/null to disable compression
|
||||||
"bucket": "vlw-test"
|
"abspath_temp": "/tmp", // Directory to store the created archive while it's being uploaded to S3. Set to false/null to use the system temp-directory
|
||||||
// .. More options to come (probably)
|
"abspath_target": "<replace with ABSOLUTE path to a target directory>", // An ABSOLUTE path to the directory or file to archive
|
||||||
},
|
"abspath_destination": "s3://<replace with bucket>/<replace with destination>" // A fully qualified AWS S3 URL
|
||||||
// Default settings for each archive item
|
|
||||||
"archive": {
|
|
||||||
// The password used to encrypt all archives
|
|
||||||
"password": "mypassword",
|
|
||||||
// The compression level to use when "compress" is true for an item
|
|
||||||
"compression_level": 10,
|
|
||||||
// Default archive location when "path_temp" is null for an item
|
|
||||||
"default_path_temp": "/tmp/output"
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
// Array of archive items, see next section
|
// etc..
|
||||||
"archive": []
|
]
|
||||||
}
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
Each archive item uses the following structure:
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
// Enables or disables compression for this directory. STORE will be used if disabled.
|
|
||||||
"compress": true,
|
|
||||||
// Store the encrypted archive in this directory temporarily while its being uploaded to S3.
|
|
||||||
"path_temp": "/tmp/",
|
|
||||||
// The relative path from the bucket root directory to store the uploaded object
|
|
||||||
"path_target_to": "/myarchive.7z",
|
|
||||||
// An absolute path (very important) to the target folder to upload
|
|
||||||
"path_target_from": "/my/archive"
|
|
||||||
}
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Common parent directories
|
## Common parent directories
|
||||||
|
|
@ -92,19 +66,21 @@ One of the key features of this script is that it can perform different archivin
|
||||||
If you have the directory `/my/archive` with the following config:
|
If you have the directory `/my/archive` with the following config:
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"compress": true,
|
"password": "mypassword",
|
||||||
"path_temp": null,
|
"compression": 10,
|
||||||
"path_target_to": "/myarchive.7z",
|
"abspath_temp": null,
|
||||||
"path_target_from": "/my/archive"
|
"abspath_target": "/my/archive",
|
||||||
|
"abspath_destination": "s3://my-bucket/archive.7z"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
And a subdirectory `/my/archive/subdirectory` with the following config:
|
And a subdirectory `/my/archive/subdirectory` with the following config:
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"compress": true,
|
"password": "mypassword",
|
||||||
"path_temp": null,
|
"compression": 10,
|
||||||
"path_target_to": "/my-subdirectory.7z",
|
"abspath_temp": null,
|
||||||
"path_target_from": "/my/archive/subdirectory"
|
"abspath_target": "/my/archive/subdirectory",
|
||||||
|
"abspath_destination": "s3://my-bucket/subdirectory.7z"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
The `/my/archive/subdirectory` will be **excluded** from the `/my/archive` archive since it has an overriding archive configuration.
|
The `/my/archive/subdirectory` will be **excluded** from the `/my/archive` archive since it has an overriding archive configuration.
|
||||||
|
|
@ -115,6 +91,6 @@ Available command line argument with `run.py`:
|
||||||
arg|Name|Default|Description
|
arg|Name|Default|Description
|
||||||
--|--|--|--
|
--|--|--|--
|
||||||
`-s`|`--sleep`|2|Set a global sleep duration between commands
|
`-s`|`--sleep`|2|Set a global sleep duration between commands
|
||||||
`-a`|`--autorun`|False|Archive each item in the .config.json archive array
|
`-i`|`--input`|*None*|Path to a config file to load
|
||||||
`-d`|`--dryrun`|False|Perform a dry run. Archives will not be uploaded to S3.
|
`-d`|`--dryrun`|False|Perform a dry run. Archives will not be uploaded to S3.
|
||||||
`-l`|`--log-level`|`StdoutLevel.STANDARD`|Set a custom log level when printing to the console. See `/src/Enums.py#StdoutLevel`
|
`-l`|`--log-level`|`StdoutLevel.STANDARD`|Set a custom log level when printing to the console. See `/src/Enums.py#StdoutLevel`
|
||||||
29
run.py
29
run.py
|
|
@ -1,3 +1,4 @@
|
||||||
|
import os
|
||||||
import typing
|
import typing
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
|
|
@ -9,21 +10,18 @@ from src.Enums import StdoutLevel, Namespace
|
||||||
|
|
||||||
stdout = Stdout(Namespace.CLI)
|
stdout = Stdout(Namespace.CLI)
|
||||||
|
|
||||||
def autorun() -> None:
|
def main() -> None:
|
||||||
"""
|
"""
|
||||||
Autorun
|
Autorun from a config file
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file (str): Path to the config file to load
|
||||||
"""
|
"""
|
||||||
|
|
||||||
for item in Config().config["archive"]:
|
|
||||||
stdout.info(f"Autorun: {item}")
|
|
||||||
|
|
||||||
Aws(Archive(item)).upload()
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
parser = argparse.ArgumentParser(description="Testing")
|
parser = argparse.ArgumentParser(description="Testing")
|
||||||
|
|
||||||
parser.add_argument("-s", "--sleep", type=int, help="Global log sleep level")
|
parser.add_argument("-s", "--sleep", type=int, help="Global log sleep level")
|
||||||
parser.add_argument("-a", "--autorun", action="store_true", help="Autorun")
|
parser.add_argument("-i", "--input", help="Load config file from path",default=".config.json")
|
||||||
parser.add_argument("-d", "--dryrun", action="store_true", help="Dry run")
|
parser.add_argument("-d", "--dryrun", action="store_true", help="Dry run")
|
||||||
parser.add_argument("-l", "--log-level", type=str, help="Global log level")
|
parser.add_argument("-l", "--log-level", type=str, help="Global log level")
|
||||||
|
|
||||||
|
|
@ -45,12 +43,19 @@ if __name__ == "__main__":
|
||||||
# Set enable dry run
|
# Set enable dry run
|
||||||
if args.dryrun:
|
if args.dryrun:
|
||||||
Aws.dry_run = True
|
Aws.dry_run = True
|
||||||
|
Archive.preserve_archives = True
|
||||||
stdout.ok("Dry run enabled")
|
stdout.ok("Dry run enabled")
|
||||||
|
|
||||||
stdout.log("Starting...")
|
stdout.log("Starting...")
|
||||||
|
|
||||||
# Autorun archives from config
|
if not os.path.isfile(args.input):
|
||||||
if args.autorun:
|
stdout.error(f"No config file found at path: '{args.input}'")
|
||||||
autorun()
|
exit(1)
|
||||||
|
|
||||||
|
for item in Config.from_json_file(args.input):
|
||||||
|
Aws(Archive(item)).upload()
|
||||||
|
|
||||||
stdout.log("Finished!")
|
stdout.log("Finished!")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
|
||||||
|
|
@ -6,21 +6,23 @@ import subprocess
|
||||||
from ..Cli import Cli
|
from ..Cli import Cli
|
||||||
from ..Stdout import Stdout
|
from ..Stdout import Stdout
|
||||||
from ..Config import Config
|
from ..Config import Config
|
||||||
from .Filesystem import PATH_MANIFEST, Filesystem
|
from .Filesystem import Filesystem
|
||||||
from ..Enums import Namespace, Format, StdoutLevel
|
from ..Enums import Namespace, Format, StdoutLevel
|
||||||
|
|
||||||
class Archive():
|
class Archive():
|
||||||
def __init__(self, item: dict):
|
preserve_archives = False
|
||||||
|
|
||||||
|
def __init__(self, item: Config):
|
||||||
"""
|
"""
|
||||||
Create a new Archive instance for a target item
|
Create a new Archive instance for a target item
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
item (dict): A dictionary of archive instructions
|
item (Config): Target item to archive
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.item = item
|
self.item = item
|
||||||
self.__fs = Filesystem(self.item["path_target_from"])
|
|
||||||
self.__config = Config().config["config"]
|
self.__fs = Filesystem(self.item.abspath_target)
|
||||||
self.__stdout = Stdout(Namespace.ARCHIVE)
|
self.__stdout = Stdout(Namespace.ARCHIVE)
|
||||||
|
|
||||||
if self.__fs.valid:
|
if self.__fs.valid:
|
||||||
|
|
@ -37,19 +39,18 @@ class Archive():
|
||||||
str: Absolute pathname to target zip file
|
str: Absolute pathname to target zip file
|
||||||
"""
|
"""
|
||||||
|
|
||||||
output_path = self.__config["archive"]["default_path_temp"]
|
filename = hashlib.md5(self.item.abspath_target.encode()).hexdigest()
|
||||||
|
|
||||||
# Override temporary file location if specified
|
return f"{self.item.abspath_temp.rstrip('/')}/{filename}.7z"
|
||||||
if self.item["path_temp"]:
|
|
||||||
output_path = self.item["path_temp"]
|
|
||||||
|
|
||||||
return f"{output_path.rstrip('/')}/{hashlib.md5(self.item['path_target_from'].encode()).hexdigest()}.7z"
|
|
||||||
|
|
||||||
def cleanup(self) -> None:
|
def cleanup(self) -> None:
|
||||||
"""
|
"""
|
||||||
Remove archive file
|
Remove archive file
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
if Archive.preserve_archives:
|
||||||
|
return
|
||||||
|
|
||||||
os.remove(self.output_path)
|
os.remove(self.output_path)
|
||||||
self.__stdout.info(f"Archive removed: {self.output_path}")
|
self.__stdout.info(f"Archive removed: {self.output_path}")
|
||||||
|
|
||||||
|
|
@ -58,31 +59,33 @@ class Archive():
|
||||||
Skip archiving of target item
|
Skip archiving of target item
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.__stdout.warn(f"Archiving skipped for: {self.item['path_target_from']}")
|
self.__stdout.warn(f"Archiving skipped for: {self.item.abspath_target}")
|
||||||
|
|
||||||
|
self.cleanup()
|
||||||
|
|
||||||
def __compress(self) -> None:
|
def __compress(self) -> None:
|
||||||
"""
|
"""
|
||||||
Compress the target path
|
Compress the target path
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.__stdout.log(f"Starting compression for: {self.item['path_target_from']}").sleep()
|
self.__stdout.log(f"Starting compression for: {self.item.abspath_target}").sleep()
|
||||||
|
|
||||||
# Prepare command line arguments
|
# Prepare command line arguments
|
||||||
args = [
|
args = [
|
||||||
"7z",
|
"7z",
|
||||||
"a",
|
"a",
|
||||||
"-t7z",
|
"-t7z",
|
||||||
f"-mx={self.__config['archive']['compression_level']}"
|
f"-mx={self.item.compression}"
|
||||||
]
|
]
|
||||||
|
|
||||||
# Enable encryption if archive password is set
|
# Enable encryption if archive password is set
|
||||||
if self.__config["archive"]["password"]:
|
if self.item.password:
|
||||||
args.append("-mhe=on")
|
args.append("-mhe=on")
|
||||||
args.append(f"-p{self.__config['archive']['password']}")
|
args.append(f"-p{self.item.password}")
|
||||||
|
|
||||||
# Append output path and file list manifest arguments for 7zip
|
# Append output path and file list manifest arguments for 7zip
|
||||||
args.append(self.output_path)
|
args.append(self.output_path)
|
||||||
args.append(self.item["path_target_from"])
|
args.append(self.item.abspath_target)
|
||||||
|
|
||||||
# Exclude directories thats
|
# Exclude directories thats
|
||||||
for exclude in self.__fs.common_relative_paths():
|
for exclude in self.__fs.common_relative_paths():
|
||||||
|
|
@ -96,6 +99,7 @@ class Archive():
|
||||||
return self.__die()
|
return self.__die()
|
||||||
|
|
||||||
self.__stdout.info(f"Temporary archive placed at: {self.__fs.path}").sleep()
|
self.__stdout.info(f"Temporary archive placed at: {self.__fs.path}").sleep()
|
||||||
self.__stdout.ok(f"Compression completed for: {self.item['path_target_from']}")
|
self.__stdout.ok(f"Compression completed for: {self.item.abspath_target}")
|
||||||
|
|
||||||
cmd.cleanup()
|
cmd.cleanup()
|
||||||
|
self.cleanup()
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,10 @@
|
||||||
import os
|
import os
|
||||||
import tempfile
|
|
||||||
from typing import Union
|
from typing import Union
|
||||||
|
|
||||||
from ..Config import Config
|
from ..Config import Config
|
||||||
from ..Stdout import Stdout
|
from ..Stdout import Stdout
|
||||||
from ..Enums import Namespace
|
from ..Enums import Namespace
|
||||||
|
|
||||||
PATH_MANIFEST = f"{tempfile.gettempdir().rstrip('/')}/archive_manifest.txt"
|
|
||||||
|
|
||||||
class Filesystem():
|
class Filesystem():
|
||||||
def __init__(self, path: str):
|
def __init__(self, path: str):
|
||||||
"""
|
"""
|
||||||
|
|
@ -19,7 +16,6 @@ class Filesystem():
|
||||||
|
|
||||||
self.valid = True
|
self.valid = True
|
||||||
self.path = path
|
self.path = path
|
||||||
self.__config = Config().config
|
|
||||||
self.__stdout = Stdout(Namespace.FILESYSTEM)
|
self.__stdout = Stdout(Namespace.FILESYSTEM)
|
||||||
|
|
||||||
if not os.path.exists(self.path):
|
if not os.path.exists(self.path):
|
||||||
|
|
@ -37,8 +33,8 @@ class Filesystem():
|
||||||
|
|
||||||
paths = []
|
paths = []
|
||||||
|
|
||||||
for item in self.__config["archive"]:
|
for path in Config.pathnames:
|
||||||
paths.append(item["path_target_from"])
|
paths.append(path)
|
||||||
|
|
||||||
return paths
|
return paths
|
||||||
|
|
||||||
|
|
@ -75,6 +71,7 @@ class Filesystem():
|
||||||
Returns:
|
Returns:
|
||||||
str | None: Common pathname with base path or None if no common path (or is base path)
|
str | None: Common pathname with base path or None if no common path (or is base path)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
base_path = os.path.normpath(self.path)
|
base_path = os.path.normpath(self.path)
|
||||||
target_path = os.path.normpath(path)
|
target_path = os.path.normpath(path)
|
||||||
|
|
||||||
|
|
|
||||||
169
src/Config.py
169
src/Config.py
|
|
@ -1,25 +1,174 @@
|
||||||
import json
|
import json
|
||||||
import typing
|
import tempfile
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from typing import Self, Union
|
||||||
|
|
||||||
CONFIG_FILEPATH = Path.cwd() / ".config.json"
|
from .Enums import ConfigKeys
|
||||||
|
|
||||||
class Config():
|
class Config():
|
||||||
def __init__(self):
|
pathnames = set()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def for_each(items: list) -> Self:
|
||||||
|
"""
|
||||||
|
Returns a generator which iterates over each item in a list of item configs
|
||||||
|
|
||||||
|
Args:
|
||||||
|
items (list): The list to iterate over
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Self: Instance of the Config class
|
||||||
|
|
||||||
|
Yields:
|
||||||
|
Iterator[Self]: Config class for the current item
|
||||||
|
"""
|
||||||
|
|
||||||
|
for item in items:
|
||||||
|
yield Config(item)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_json_file(pathname: str) -> Self:
|
||||||
|
"""
|
||||||
|
Load item configs from a JSON file
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pathname (str): _description_
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Self: _description_
|
||||||
|
"""
|
||||||
|
|
||||||
|
with open(pathname, "r") as f:
|
||||||
|
config = json.load(f)
|
||||||
|
|
||||||
|
for item in config:
|
||||||
|
Config.pathnames.add(item[ConfigKeys.ABSPATH_TARGET.value])
|
||||||
|
|
||||||
|
return Config.for_each(config)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def __throw_missing_key(key: ConfigKeys) -> None:
|
||||||
|
"""
|
||||||
|
Raises a KeyError for an item config key if it does not exist
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key (ConfigKeys): The key to raise an error for
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
KeyError: Raised from an item config key
|
||||||
|
"""
|
||||||
|
|
||||||
|
raise KeyError(f"Expected required item config key '{key.value}' but it was not found")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def __throw_value_error(key: ConfigKeys, expected_type: str) -> None:
|
||||||
|
"""
|
||||||
|
Raise a ValueError for a key with an expected type
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key (ConfigKeys): The item config key to raise an error for
|
||||||
|
expected_type (str): The expected type
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: Raised from an item config key and expected value type
|
||||||
|
"""
|
||||||
|
|
||||||
|
raise ValueError(f"Item config key '{key.value}' expects a value of type {expected_type}")
|
||||||
|
|
||||||
|
def __init__(self, item: dict):
|
||||||
"""
|
"""
|
||||||
Create a new Config instance
|
Create a new Config instance
|
||||||
"""
|
"""
|
||||||
|
|
||||||
with open(CONFIG_FILEPATH, "r") as f:
|
self.__item = item
|
||||||
self.__config = json.load(f)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def config(self) -> dict:
|
def password(self) -> str|bool:
|
||||||
"""
|
"""
|
||||||
Returns config variables as a dictonary
|
Returns the password for this item, or None if unset
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
dict: Confg values
|
str|False: Password or None if no password is set
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return dict(self.__config)
|
if not self.__key_exists(ConfigKeys.PASSWORD.value):
|
||||||
|
return False
|
||||||
|
|
||||||
|
return self.__item[ConfigKeys.PASSWORD.value] if isinstance(self.__item[ConfigKeys.PASSWORD.value], str) else None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def compression(self) -> int:
|
||||||
|
"""
|
||||||
|
Returns the compression level for this item, or false if STORE mode should be used
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str|False: Compression level for this item, false if compression is disabled
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not self.__key_exists(ConfigKeys.COMPRESSION.value):
|
||||||
|
return 0
|
||||||
|
|
||||||
|
if not isinstance(self.__item[ConfigKeys.COMPRESSION.value], int) or self.__item[ConfigKeys.COMPRESSION.value] == 0:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
return max(1, min(self.__item[ConfigKeys.COMPRESSION.value], 10))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def abspath_temp(self) -> str:
|
||||||
|
"""
|
||||||
|
Returns the path to the directory where the created archive will be stored until it's uploaded
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: Absolute path to the destination directory
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not self.__key_exists(ConfigKeys.ABSPATH_TEMP.value):
|
||||||
|
return tempfile.gettempdir()
|
||||||
|
|
||||||
|
return self.__item[ConfigKeys.ABSPATH_TEMP.value] if isinstance(self.__item[ConfigKeys.ABSPATH_TEMP.value], str) else tempfile.gettempdir()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def abspath_target(self) -> str:
|
||||||
|
"""
|
||||||
|
Returns an absolute path to the target to be archived
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: Absolute path to the target
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not self.__key_exists(ConfigKeys.ABSPATH_TARGET.value):
|
||||||
|
return Config.__throw_missing_key(ConfigKeys.ABSPATH_TARGET)
|
||||||
|
|
||||||
|
if not isinstance(self.__item[ConfigKeys.ABSPATH_TARGET.value], str):
|
||||||
|
return Config.__throw_value_error(ConfigKeys.ABSPATH_TARGET, str)
|
||||||
|
|
||||||
|
return self.__item[ConfigKeys.ABSPATH_TARGET.value]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def abspath_destination(self) -> str:
|
||||||
|
"""
|
||||||
|
Returns an absolute path to the target to be archived
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: Absolute path to the target
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not self.__key_exists(ConfigKeys.ASBPATH_DESTINATION.value):
|
||||||
|
return Config.__throw_missing_key(ConfigKeys.ASBPATH_DESTINATION)
|
||||||
|
|
||||||
|
if not isinstance(self.__item[ConfigKeys.ASBPATH_DESTINATION.value], str):
|
||||||
|
return Config.__throw_value_error(ConfigKeys.ASBPATH_DESTINATION, str)
|
||||||
|
|
||||||
|
return self.__item[ConfigKeys.ASBPATH_DESTINATION.value]
|
||||||
|
|
||||||
|
def __key_exists(self, key: str) -> bool:
|
||||||
|
"""
|
||||||
|
Returns true if a property key is defined for the current item
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key (str): The key to test
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if key exists
|
||||||
|
"""
|
||||||
|
return key in self.__item
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,12 @@
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
|
||||||
|
class ConfigKeys(Enum):
|
||||||
|
PASSWORD = "password"
|
||||||
|
COMPRESSION = "compression"
|
||||||
|
ABSPATH_TEMP = "abspath_temp"
|
||||||
|
ABSPATH_TARGET = "abspath_target"
|
||||||
|
ASBPATH_DESTINATION = "abspath_destination"
|
||||||
|
|
||||||
class Namespace(Enum):
|
class Namespace(Enum):
|
||||||
AWS = "AWS"
|
AWS = "AWS"
|
||||||
CLI = "Command"
|
CLI = "Command"
|
||||||
|
|
|
||||||
|
|
@ -103,7 +103,7 @@ class Stdout():
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Bail out if stdout is disabled
|
# Bail out if stdout is disabled
|
||||||
if Stdout.global_level.value == StdoutLevel.NONE.value:
|
if self.global_level.value == StdoutLevel.NONE.value:
|
||||||
return self.die()
|
return self.die()
|
||||||
|
|
||||||
print(f"{Format.HEADER.value}> {self.namespace.value}:{Format.ENDC.value}{msg}{Format.ENDC.value}")
|
print(f"{Format.HEADER.value}> {self.namespace.value}:{Format.ENDC.value}{msg}{Format.ENDC.value}")
|
||||||
|
|
@ -136,7 +136,7 @@ class Stdout():
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Bail out if log level is less than verbose
|
# Bail out if log level is less than verbose
|
||||||
if not Stdout.global_level.value >= StdoutLevel.STANDARD.value:
|
if not self.global_level.value >= StdoutLevel.STANDARD.value:
|
||||||
return self.die()
|
return self.die()
|
||||||
|
|
||||||
return self.print(f" {msg}")
|
return self.print(f" {msg}")
|
||||||
|
|
@ -153,7 +153,7 @@ class Stdout():
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Bail out if log level is less than verbose
|
# Bail out if log level is less than verbose
|
||||||
if not Stdout.global_level.value >= StdoutLevel.VERBOSE.value:
|
if not self.global_level.value >= StdoutLevel.VERBOSE.value:
|
||||||
return self.die()
|
return self.die()
|
||||||
|
|
||||||
return self.print(f" {msg}")
|
return self.print(f" {msg}")
|
||||||
|
|
@ -170,7 +170,7 @@ class Stdout():
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Bail out if log level is less than verbose
|
# Bail out if log level is less than verbose
|
||||||
if not Stdout.global_level.value >= StdoutLevel.DEBUG.value:
|
if not self.global_level.value >= StdoutLevel.DEBUG.value:
|
||||||
return self.die()
|
return self.die()
|
||||||
|
|
||||||
return self.print(f" {msg}")
|
return self.print(f" {msg}")
|
||||||
|
|
@ -187,7 +187,7 @@ class Stdout():
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Bail out if log level is less than default
|
# Bail out if log level is less than default
|
||||||
if not Stdout.global_level.value >= StdoutLevel.STANDARD.value:
|
if not self.global_level.value >= StdoutLevel.STANDARD.value:
|
||||||
return self.die()
|
return self.die()
|
||||||
|
|
||||||
return self.print(f" {Format.WARNING.value}! WARN: {msg}")
|
return self.print(f" {Format.WARNING.value}! WARN: {msg}")
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,6 @@
|
||||||
import typing
|
import typing
|
||||||
|
|
||||||
from ..Cli import Cli
|
from ..Cli import Cli
|
||||||
from ..Config import Config
|
|
||||||
from ..Stdout import Stdout
|
from ..Stdout import Stdout
|
||||||
from ..Enums import Namespace, StdoutLevel
|
from ..Enums import Namespace, StdoutLevel
|
||||||
from ..Archive.Archive import Archive
|
from ..Archive.Archive import Archive
|
||||||
|
|
@ -18,7 +17,6 @@ class Aws():
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.archive = archive
|
self.archive = archive
|
||||||
self.__config = Config().config
|
|
||||||
self.__stdout = Stdout(Namespace.AWS)
|
self.__stdout = Stdout(Namespace.AWS)
|
||||||
|
|
||||||
def upload(self) -> None:
|
def upload(self) -> None:
|
||||||
|
|
@ -26,15 +24,16 @@ class Aws():
|
||||||
Create a backup of an Archive instance to AWS
|
Create a backup of an Archive instance to AWS
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.__stdout.log(f"Starting upload of archive for: {self.archive.item['path_target_from']}")
|
self.__stdout.log(f"Starting upload of archive for: {self.archive.item.abspath_target}")
|
||||||
self.__stdout.debug(f"Archive object: {self.archive}")
|
self.__stdout.debug(f"Archive object: {self.archive}")
|
||||||
|
self.__stdout.info(f"Uploading to: {self.archive.item.abspath_destination}")
|
||||||
|
|
||||||
args = [
|
args = [
|
||||||
"aws",
|
"aws",
|
||||||
"s3",
|
"s3",
|
||||||
"cp",
|
"cp",
|
||||||
self.archive.output_path,
|
self.archive.output_path,
|
||||||
f"s3://{self.__config['config']['cloud']['bucket']}/{self.archive.item['path_target_to'].strip('/')}"
|
self.archive.item.abspath_destination
|
||||||
]
|
]
|
||||||
|
|
||||||
if Aws.dry_run:
|
if Aws.dry_run:
|
||||||
|
|
@ -47,7 +46,7 @@ class Aws():
|
||||||
cmd.run(args)
|
cmd.run(args)
|
||||||
|
|
||||||
if cmd.stderr:
|
if cmd.stderr:
|
||||||
self.__stdout.error(f"Failed to upload archive for: {self.archive.item['path_target_from']}")
|
self.__stdout.error(f"Failed to upload archive for: {self.archive.item.abspath_target}")
|
||||||
return
|
return
|
||||||
|
|
||||||
self.__stdout.info("Cleaning up temporary files")
|
self.__stdout.info("Cleaning up temporary files")
|
||||||
|
|
@ -55,4 +54,4 @@ class Aws():
|
||||||
cmd.cleanup()
|
cmd.cleanup()
|
||||||
self.archive.cleanup()
|
self.archive.cleanup()
|
||||||
|
|
||||||
self.__stdout.ok(f"Archive uploaded: {self.archive.item['path_target_from']}")
|
self.__stdout.ok(f"Archive uploaded to: {self.archive.item.abspath_destination}")
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue