mirror of
https://codeberg.org/vlw/3rd.git
synced 2026-01-11 22:36:01 +01:00
refactor: better config structure and config value accessing
This commit is contained in:
parent
503a88b52a
commit
adb3fcdcf2
8 changed files with 211 additions and 76 deletions
|
|
@ -1,26 +1,16 @@
|
|||
{
|
||||
"config": {
|
||||
"cloud": {
|
||||
"bucket": "my-bucket"
|
||||
},
|
||||
"archive": {
|
||||
"password": "mypassword",
|
||||
"compression_level": 10,
|
||||
"default_path_temp": "/tmp/"
|
||||
}
|
||||
[
|
||||
{
|
||||
"password": "mypassword",
|
||||
"compression": 10,
|
||||
"abspath_temp": null,
|
||||
"abspath_target": "<replace with ABSOLUTE path to a target directory>",
|
||||
"abspath_destination": "s3://<replace with bucket>/<replace with destination>"
|
||||
},
|
||||
"archive": [
|
||||
{
|
||||
"compress": true,
|
||||
"path_temp": null,
|
||||
"path_target_to": "<replace with bucket object path>",
|
||||
"path_target_from": "<replace with ABSOLUTE path to a target directory>"
|
||||
},
|
||||
{
|
||||
"compress": true,
|
||||
"path_temp": null,
|
||||
"path_target_to": "<replace with bucket object path>",
|
||||
"path_target_from": "<replace with ABSOLUTE path to a target directory>"
|
||||
}
|
||||
]
|
||||
}
|
||||
{
|
||||
"password": "mypassword",
|
||||
"compression": 10,
|
||||
"abspath_temp": null,
|
||||
"abspath_target": "<replace with ABSOLUTE path to a target directory>",
|
||||
"abspath_destination": "s3://<replace with bucket>/<replace with destination>"
|
||||
}
|
||||
]
|
||||
|
|
|
|||
2
.gitignore
vendored
2
.gitignore
vendored
|
|
@ -1,3 +1,3 @@
|
|||
.config.json
|
||||
.example.config.json
|
||||
|
||||
__pycache__
|
||||
|
|
|
|||
23
run.py
23
run.py
|
|
@ -9,21 +9,18 @@ from src.Enums import StdoutLevel, Namespace
|
|||
|
||||
stdout = Stdout(Namespace.CLI)
|
||||
|
||||
def autorun() -> None:
|
||||
def main(file: str) -> None:
|
||||
"""
|
||||
Autorun
|
||||
Autorun from a config file
|
||||
|
||||
Args:
|
||||
file (str): Path to the config file to load
|
||||
"""
|
||||
|
||||
for item in Config().config["archive"]:
|
||||
stdout.info(f"Autorun: {item}")
|
||||
|
||||
Aws(Archive(item)).upload()
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Testing")
|
||||
|
||||
parser.add_argument("-s", "--sleep", type=int, help="Global log sleep level")
|
||||
parser.add_argument("-a", "--autorun", action="store_true", help="Autorun")
|
||||
parser.add_argument("-i", "--input", action="store_true", help="Load config file from path")
|
||||
parser.add_argument("-d", "--dryrun", action="store_true", help="Dry run")
|
||||
parser.add_argument("-l", "--log-level", type=str, help="Global log level")
|
||||
|
||||
|
|
@ -49,8 +46,10 @@ if __name__ == "__main__":
|
|||
|
||||
stdout.log("Starting...")
|
||||
|
||||
# Autorun archives from config
|
||||
if args.autorun:
|
||||
autorun()
|
||||
for item in Config.from_json_file(args.input):
|
||||
Archive(item)
|
||||
|
||||
stdout.log("Finished!")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
|
|||
|
|
@ -10,17 +10,17 @@ from .Filesystem import PATH_MANIFEST, Filesystem
|
|||
from ..Enums import Namespace, Format, StdoutLevel
|
||||
|
||||
class Archive():
|
||||
def __init__(self, item: dict):
|
||||
def __init__(self, item: Config):
|
||||
"""
|
||||
Create a new Archive instance for a target item
|
||||
|
||||
Args:
|
||||
item (dict): A dictionary of archive instructions
|
||||
item (Config): Target item to archive
|
||||
"""
|
||||
|
||||
self.item = item
|
||||
self.__fs = Filesystem(self.item["path_target_from"])
|
||||
self.__config = Config().config["config"]
|
||||
|
||||
self.__fs = Filesystem(self.item.abspath_target)
|
||||
self.__stdout = Stdout(Namespace.ARCHIVE)
|
||||
|
||||
if self.__fs.valid:
|
||||
|
|
@ -37,13 +37,9 @@ class Archive():
|
|||
str: Absolute pathname to target zip file
|
||||
"""
|
||||
|
||||
output_path = self.__config["archive"]["default_path_temp"]
|
||||
filename = hashlib.md5(self.item.abspath_target.encode()).hexdigest()
|
||||
|
||||
# Override temporary file location if specified
|
||||
if self.item["path_temp"]:
|
||||
output_path = self.item["path_temp"]
|
||||
|
||||
return f"{output_path.rstrip('/')}/{hashlib.md5(self.item['path_target_from'].encode()).hexdigest()}.7z"
|
||||
return f"{self.item.abspath_temp.rstrip('/')}/{filename}.7z"
|
||||
|
||||
def cleanup(self) -> None:
|
||||
"""
|
||||
|
|
@ -58,31 +54,31 @@ class Archive():
|
|||
Skip archiving of target item
|
||||
"""
|
||||
|
||||
self.__stdout.warn(f"Archiving skipped for: {self.item['path_target_from']}")
|
||||
self.__stdout.warn(f"Archiving skipped for: {self.item.abspath_target}")
|
||||
|
||||
def __compress(self) -> None:
|
||||
"""
|
||||
Compress the target path
|
||||
"""
|
||||
|
||||
self.__stdout.log(f"Starting compression for: {self.item['path_target_from']}").sleep()
|
||||
self.__stdout.log(f"Starting compression for: {self.item.abspath_target}").sleep()
|
||||
|
||||
# Prepare command line arguments
|
||||
args = [
|
||||
"7z",
|
||||
"a",
|
||||
"-t7z",
|
||||
f"-mx={self.__config['archive']['compression_level']}"
|
||||
f"-mx={self.item.compression}"
|
||||
]
|
||||
|
||||
# Enable encryption if archive password is set
|
||||
if self.__config["archive"]["password"]:
|
||||
if self.item.password:
|
||||
args.append("-mhe=on")
|
||||
args.append(f"-p{self.__config['archive']['password']}")
|
||||
args.append(f"-p{self.item.password}")
|
||||
|
||||
# Append output path and file list manifest arguments for 7zip
|
||||
args.append(self.output_path)
|
||||
args.append(self.item["path_target_from"])
|
||||
args.append(self.item.abspath_target)
|
||||
|
||||
# Exclude directories thats
|
||||
for exclude in self.__fs.common_relative_paths():
|
||||
|
|
@ -96,6 +92,6 @@ class Archive():
|
|||
return self.__die()
|
||||
|
||||
self.__stdout.info(f"Temporary archive placed at: {self.__fs.path}").sleep()
|
||||
self.__stdout.ok(f"Compression completed for: {self.item['path_target_from']}")
|
||||
self.__stdout.ok(f"Compression completed for: {self.item.abspath_target}")
|
||||
|
||||
cmd.cleanup()
|
||||
|
|
|
|||
|
|
@ -1,13 +1,10 @@
|
|||
import os
|
||||
import tempfile
|
||||
from typing import Union
|
||||
|
||||
from ..Config import Config
|
||||
from ..Stdout import Stdout
|
||||
from ..Enums import Namespace
|
||||
|
||||
PATH_MANIFEST = f"{tempfile.gettempdir().rstrip('/')}/archive_manifest.txt"
|
||||
|
||||
class Filesystem():
|
||||
def __init__(self, path: str):
|
||||
"""
|
||||
|
|
@ -19,7 +16,6 @@ class Filesystem():
|
|||
|
||||
self.valid = True
|
||||
self.path = path
|
||||
self.__config = Config().config
|
||||
self.__stdout = Stdout(Namespace.FILESYSTEM)
|
||||
|
||||
if not os.path.exists(self.path):
|
||||
|
|
@ -37,7 +33,7 @@ class Filesystem():
|
|||
|
||||
paths = []
|
||||
|
||||
for item in self.__config["archive"]:
|
||||
for item in Config.pathnames:
|
||||
paths.append(item["path_target_from"])
|
||||
|
||||
return paths
|
||||
|
|
|
|||
169
src/Config.py
169
src/Config.py
|
|
@ -1,25 +1,174 @@
|
|||
import json
|
||||
import typing
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from typing import Self, Union
|
||||
|
||||
CONFIG_FILEPATH = Path.cwd() / ".config.json"
|
||||
from .Enums import ConfigKeys
|
||||
|
||||
class Config():
|
||||
def __init__(self):
|
||||
pathnames = set()
|
||||
|
||||
@staticmethod
|
||||
def for_each(items: list) -> Self:
|
||||
"""
|
||||
Returns a generator which iterates over each item in a list of item configs
|
||||
|
||||
Args:
|
||||
items (list): The list to iterate over
|
||||
|
||||
Returns:
|
||||
Self: Instance of the Config class
|
||||
|
||||
Yields:
|
||||
Iterator[Self]: Config class for the current item
|
||||
"""
|
||||
|
||||
for item in items:
|
||||
yield Config(item)
|
||||
|
||||
@staticmethod
|
||||
def from_json_file(pathname: str) -> Self:
|
||||
"""
|
||||
Load item configs from a JSON file
|
||||
|
||||
Args:
|
||||
pathname (str): _description_
|
||||
|
||||
Returns:
|
||||
Self: _description_
|
||||
"""
|
||||
|
||||
with open(pathname, "r") as f:
|
||||
json = json.load(f)
|
||||
|
||||
for item in json:
|
||||
Config.pathnames.add(item[ConfigKeys.ABSPATH_TARGET.value])
|
||||
|
||||
Config.for_each(json)
|
||||
|
||||
@staticmethod
|
||||
def __throw_missing_key(key: ConfigKeys) -> None:
|
||||
"""
|
||||
Raises a KeyError for an item config key if it does not exist
|
||||
|
||||
Args:
|
||||
key (ConfigKeys): The key to raise an error for
|
||||
|
||||
Raises:
|
||||
KeyError: Raised from an item config key
|
||||
"""
|
||||
|
||||
raise KeyError(f"Expected required item config key '{key.value}' but it was not found")
|
||||
|
||||
@staticmethod
|
||||
def __throw_value_error(key: ConfigKeys, expected_type: str) -> None:
|
||||
"""
|
||||
Raise a ValueError for a key with an expected type
|
||||
|
||||
Args:
|
||||
key (ConfigKeys): The item config key to raise an error for
|
||||
expected_type (str): The expected type
|
||||
|
||||
Raises:
|
||||
ValueError: Raised from an item config key and expected value type
|
||||
"""
|
||||
|
||||
raise ValueError(f"Item config key '{key.value}' expects a value of type {expected_type}")
|
||||
|
||||
def __init__(self, item: dict):
|
||||
"""
|
||||
Create a new Config instance
|
||||
"""
|
||||
|
||||
with open(CONFIG_FILEPATH, "r") as f:
|
||||
self.__config = json.load(f)
|
||||
self.__item = item
|
||||
|
||||
@property
|
||||
def config(self) -> dict:
|
||||
def password(self) -> str|False:
|
||||
"""
|
||||
Returns config variables as a dictonary
|
||||
Returns the password for this item, or None if unset
|
||||
|
||||
Returns:
|
||||
dict: Confg values
|
||||
str|False: Password or None if no password is set
|
||||
"""
|
||||
|
||||
return dict(self.__config)
|
||||
|
||||
if not self.__key_exists(ConfigKeys.PASSWORD.value):
|
||||
return False
|
||||
|
||||
return self.__item[ConfigKeys.PASSWORD.value] if isinstance(self.__item[ConfigKeys.PASSWORD.value], str) else None
|
||||
|
||||
@property
|
||||
def compression(self) -> int:
|
||||
"""
|
||||
Returns the compression level for this item, or false if STORE mode should be used
|
||||
|
||||
Returns:
|
||||
str|False: Compression level for this item, false if compression is disabled
|
||||
"""
|
||||
|
||||
if not self.__key_exists(ConfigKeys.COMPRESSION.value):
|
||||
return 0
|
||||
|
||||
if not isinstance(self.__item[ConfigKeys.COMPRESSION.value], int) or self.__item[ConfigKeys.COMPRESSION.value] == 0:
|
||||
return 0
|
||||
|
||||
return max(1, min(self.__item[ConfigKeys.COMPRESSION.value], 10))
|
||||
|
||||
@property
|
||||
def abspath_temp(self) -> str:
|
||||
"""
|
||||
Returns the path to the directory where the created archive will be stored until it's uploaded
|
||||
|
||||
Returns:
|
||||
str: Absolute path to the destination directory
|
||||
"""
|
||||
|
||||
if not self.__key_exists(ConfigKeys.ABSPATH_TEMP.value):
|
||||
return tempfile.gettempdir()
|
||||
|
||||
return self.__item[ConfigKeys.ABSPATH_TEMP.value] if isinstance(self.__item[ConfigKeys.ABSPATH_TEMP.value], str) else tempfile.gettempdir()
|
||||
|
||||
@property
|
||||
def abspath_target(self) -> str:
|
||||
"""
|
||||
Returns an absolute path to the target to be archived
|
||||
|
||||
Returns:
|
||||
str: Absolute path to the target
|
||||
"""
|
||||
|
||||
if not self.__key_exists(ConfigKeys.ABSPATH_TARGET.value):
|
||||
return Config.__throw_missing_key(ConfigKeys.ABSPATH_TARGET)
|
||||
|
||||
if not isinstance(self.__item[ConfigKeys.ABSPATH_TARGET.value], str):
|
||||
return Config.__throw_value_error(ConfigKeys.ABSPATH_TARGET, str)
|
||||
|
||||
return self.__item[ConfigKeys.ABSPATH_TARGET.value]
|
||||
|
||||
@property
|
||||
def abspath_destination(self) -> str:
|
||||
"""
|
||||
Returns an absolute path to the target to be archived
|
||||
|
||||
Returns:
|
||||
str: Absolute path to the target
|
||||
"""
|
||||
|
||||
if not self.__key_exists(ConfigKeys.ASBPATH_DESTINATION.value):
|
||||
return Config.__throw_missing_key(ConfigKeys.ASBPATH_DESTINATION)
|
||||
|
||||
if not isinstance(self.__item[ConfigKeys.ASBPATH_DESTINATION.value], str):
|
||||
return Config.__throw_value_error(ConfigKeys.ASBPATH_DESTINATION, str)
|
||||
|
||||
return self.__item[ConfigKeys.ASBPATH_DESTINATION.value]
|
||||
|
||||
def __key_exists(self, key: str) -> bool:
|
||||
"""
|
||||
Returns true if a property key is defined for the current item
|
||||
|
||||
Args:
|
||||
key (str): The key to test
|
||||
|
||||
Returns:
|
||||
bool: True if key exists
|
||||
"""
|
||||
return key in self.__item
|
||||
|
|
|
|||
|
|
@ -1,5 +1,12 @@
|
|||
from enum import Enum
|
||||
|
||||
class ConfigKeys(Enum):
|
||||
PASSWORD = "password"
|
||||
COMPRESSION = "compression"
|
||||
ABSPATH_TEMP = "abspath_temp"
|
||||
ABSPATH_TARGET = "abspath_target"
|
||||
ASBPATH_DESTINATION = "abspath_destination"
|
||||
|
||||
class Namespace(Enum):
|
||||
AWS = "AWS"
|
||||
CLI = "Command"
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import typing
|
||||
|
||||
from ..Cli import Cli
|
||||
from ..Config import Config
|
||||
from ..Stdout import Stdout
|
||||
from ..Enums import Namespace, StdoutLevel
|
||||
from ..Archive.Archive import Archive
|
||||
|
|
@ -18,7 +17,6 @@ class Aws():
|
|||
"""
|
||||
|
||||
self.archive = archive
|
||||
self.__config = Config().config
|
||||
self.__stdout = Stdout(Namespace.AWS)
|
||||
|
||||
def upload(self) -> None:
|
||||
|
|
@ -26,7 +24,7 @@ class Aws():
|
|||
Create a backup of an Archive instance to AWS
|
||||
"""
|
||||
|
||||
self.__stdout.log(f"Starting upload of archive for: {self.archive.item['path_target_from']}")
|
||||
self.__stdout.log(f"Starting upload of archive for: {self.archive.item.abspath_target}")
|
||||
self.__stdout.debug(f"Archive object: {self.archive}")
|
||||
|
||||
args = [
|
||||
|
|
@ -34,7 +32,7 @@ class Aws():
|
|||
"s3",
|
||||
"cp",
|
||||
self.archive.output_path,
|
||||
f"s3://{self.__config['config']['cloud']['bucket']}/{self.archive.item['path_target_to'].strip('/')}"
|
||||
self.archive.item.abspath_destination
|
||||
]
|
||||
|
||||
if Aws.dry_run:
|
||||
|
|
@ -47,7 +45,7 @@ class Aws():
|
|||
cmd.run(args)
|
||||
|
||||
if cmd.stderr:
|
||||
self.__stdout.error(f"Failed to upload archive for: {self.archive.item['path_target_from']}")
|
||||
self.__stdout.error(f"Failed to upload archive for: {self.archive.item.abspath_target}")
|
||||
return
|
||||
|
||||
self.__stdout.info("Cleaning up temporary files")
|
||||
|
|
@ -55,4 +53,4 @@ class Aws():
|
|||
cmd.cleanup()
|
||||
self.archive.cleanup()
|
||||
|
||||
self.__stdout.ok(f"Archive uploaded: {self.archive.item['path_target_from']}")
|
||||
self.__stdout.ok(f"Archive uploaded: {self.archive.item.abspath_target}")
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue