Compare commits

..

No commits in common. "master" and "2.0.0" have entirely different histories.

8 changed files with 18 additions and 36 deletions

2
.gitignore vendored
View file

@ -1,3 +1,3 @@
.config.json .example.config.json
__pycache__ __pycache__

View file

@ -43,9 +43,7 @@ Schedule this backup script to run with a crontab entry, for example:
Which will run at 2:30 each Wednesday. Which will run at 2:30 each Wednesday.
# Config # Config
The config file (`.config.json` by default) is used to define which directories to archive with parameters. The config file `.config.json` is used to define parameters and which directories to archive (in autorun mode).
Directories are specified as an array of objects, each object has key value properties with the following format:
```json ```json
[ [

12
run.py
View file

@ -1,4 +1,3 @@
import os
import typing import typing
import argparse import argparse
@ -10,7 +9,7 @@ from src.Enums import StdoutLevel, Namespace
stdout = Stdout(Namespace.CLI) stdout = Stdout(Namespace.CLI)
def main() -> None: def main(file: str) -> None:
""" """
Autorun from a config file Autorun from a config file
@ -21,7 +20,7 @@ def main() -> None:
parser = argparse.ArgumentParser(description="Testing") parser = argparse.ArgumentParser(description="Testing")
parser.add_argument("-s", "--sleep", type=int, help="Global log sleep level") parser.add_argument("-s", "--sleep", type=int, help="Global log sleep level")
parser.add_argument("-i", "--input", help="Load config file from path",default=".config.json") parser.add_argument("-i", "--input", action="store_true", help="Load config file from path")
parser.add_argument("-d", "--dryrun", action="store_true", help="Dry run") parser.add_argument("-d", "--dryrun", action="store_true", help="Dry run")
parser.add_argument("-l", "--log-level", type=str, help="Global log level") parser.add_argument("-l", "--log-level", type=str, help="Global log level")
@ -43,17 +42,12 @@ def main() -> None:
# Set enable dry run # Set enable dry run
if args.dryrun: if args.dryrun:
Aws.dry_run = True Aws.dry_run = True
Archive.preserve_archives = True
stdout.ok("Dry run enabled") stdout.ok("Dry run enabled")
stdout.log("Starting...") stdout.log("Starting...")
if not os.path.isfile(args.input):
stdout.error(f"No config file found at path: '{args.input}'")
exit(1)
for item in Config.from_json_file(args.input): for item in Config.from_json_file(args.input):
Aws(Archive(item)).upload() Archive(item)
stdout.log("Finished!") stdout.log("Finished!")

View file

@ -6,12 +6,10 @@ import subprocess
from ..Cli import Cli from ..Cli import Cli
from ..Stdout import Stdout from ..Stdout import Stdout
from ..Config import Config from ..Config import Config
from .Filesystem import Filesystem from .Filesystem import PATH_MANIFEST, Filesystem
from ..Enums import Namespace, Format, StdoutLevel from ..Enums import Namespace, Format, StdoutLevel
class Archive(): class Archive():
preserve_archives = False
def __init__(self, item: Config): def __init__(self, item: Config):
""" """
Create a new Archive instance for a target item Create a new Archive instance for a target item
@ -48,9 +46,6 @@ class Archive():
Remove archive file Remove archive file
""" """
if Archive.preserve_archives:
return
os.remove(self.output_path) os.remove(self.output_path)
self.__stdout.info(f"Archive removed: {self.output_path}") self.__stdout.info(f"Archive removed: {self.output_path}")
@ -61,8 +56,6 @@ class Archive():
self.__stdout.warn(f"Archiving skipped for: {self.item.abspath_target}") self.__stdout.warn(f"Archiving skipped for: {self.item.abspath_target}")
self.cleanup()
def __compress(self) -> None: def __compress(self) -> None:
""" """
Compress the target path Compress the target path
@ -102,4 +95,3 @@ class Archive():
self.__stdout.ok(f"Compression completed for: {self.item.abspath_target}") self.__stdout.ok(f"Compression completed for: {self.item.abspath_target}")
cmd.cleanup() cmd.cleanup()
self.cleanup()

View file

@ -33,8 +33,8 @@ class Filesystem():
paths = [] paths = []
for path in Config.pathnames: for item in Config.pathnames:
paths.append(path) paths.append(item["path_target_from"])
return paths return paths
@ -71,7 +71,6 @@ class Filesystem():
Returns: Returns:
str | None: Common pathname with base path or None if no common path (or is base path) str | None: Common pathname with base path or None if no common path (or is base path)
""" """
base_path = os.path.normpath(self.path) base_path = os.path.normpath(self.path)
target_path = os.path.normpath(path) target_path = os.path.normpath(path)

View file

@ -39,12 +39,12 @@ class Config():
""" """
with open(pathname, "r") as f: with open(pathname, "r") as f:
config = json.load(f) json = json.load(f)
for item in config: for item in json:
Config.pathnames.add(item[ConfigKeys.ABSPATH_TARGET.value]) Config.pathnames.add(item[ConfigKeys.ABSPATH_TARGET.value])
return Config.for_each(config) Config.for_each(json)
@staticmethod @staticmethod
def __throw_missing_key(key: ConfigKeys) -> None: def __throw_missing_key(key: ConfigKeys) -> None:
@ -83,7 +83,7 @@ class Config():
self.__item = item self.__item = item
@property @property
def password(self) -> str|bool: def password(self) -> str|False:
""" """
Returns the password for this item, or None if unset Returns the password for this item, or None if unset

View file

@ -103,7 +103,7 @@ class Stdout():
""" """
# Bail out if stdout is disabled # Bail out if stdout is disabled
if self.global_level.value == StdoutLevel.NONE.value: if Stdout.global_level.value == StdoutLevel.NONE.value:
return self.die() return self.die()
print(f"{Format.HEADER.value}> {self.namespace.value}:{Format.ENDC.value}{msg}{Format.ENDC.value}") print(f"{Format.HEADER.value}> {self.namespace.value}:{Format.ENDC.value}{msg}{Format.ENDC.value}")
@ -136,7 +136,7 @@ class Stdout():
""" """
# Bail out if log level is less than verbose # Bail out if log level is less than verbose
if not self.global_level.value >= StdoutLevel.STANDARD.value: if not Stdout.global_level.value >= StdoutLevel.STANDARD.value:
return self.die() return self.die()
return self.print(f" {msg}") return self.print(f" {msg}")
@ -153,7 +153,7 @@ class Stdout():
""" """
# Bail out if log level is less than verbose # Bail out if log level is less than verbose
if not self.global_level.value >= StdoutLevel.VERBOSE.value: if not Stdout.global_level.value >= StdoutLevel.VERBOSE.value:
return self.die() return self.die()
return self.print(f" {msg}") return self.print(f" {msg}")
@ -170,7 +170,7 @@ class Stdout():
""" """
# Bail out if log level is less than verbose # Bail out if log level is less than verbose
if not self.global_level.value >= StdoutLevel.DEBUG.value: if not Stdout.global_level.value >= StdoutLevel.DEBUG.value:
return self.die() return self.die()
return self.print(f" {msg}") return self.print(f" {msg}")
@ -187,7 +187,7 @@ class Stdout():
""" """
# Bail out if log level is less than default # Bail out if log level is less than default
if not self.global_level.value >= StdoutLevel.STANDARD.value: if not Stdout.global_level.value >= StdoutLevel.STANDARD.value:
return self.die() return self.die()
return self.print(f" {Format.WARNING.value}! WARN: {msg}") return self.print(f" {Format.WARNING.value}! WARN: {msg}")

View file

@ -26,7 +26,6 @@ class Aws():
self.__stdout.log(f"Starting upload of archive for: {self.archive.item.abspath_target}") self.__stdout.log(f"Starting upload of archive for: {self.archive.item.abspath_target}")
self.__stdout.debug(f"Archive object: {self.archive}") self.__stdout.debug(f"Archive object: {self.archive}")
self.__stdout.info(f"Uploading to: {self.archive.item.abspath_destination}")
args = [ args = [
"aws", "aws",
@ -54,4 +53,4 @@ class Aws():
cmd.cleanup() cmd.cleanup()
self.archive.cleanup() self.archive.cleanup()
self.__stdout.ok(f"Archive uploaded to: {self.archive.item.abspath_destination}") self.__stdout.ok(f"Archive uploaded: {self.archive.item.abspath_target}")