mirror of
https://codeberg.org/vlw/3rd.git
synced 2026-01-11 22:36:01 +01:00
wip: 2026-01-01T15:37:20+0100 (1767278240)
This commit is contained in:
parent
cd1c8c4e91
commit
e32d8ccf66
4 changed files with 158 additions and 33 deletions
30
run.py
30
run.py
|
|
@ -1,12 +1,13 @@
|
||||||
import os
|
import os
|
||||||
import typing
|
|
||||||
import argparse
|
import argparse
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
from src.Config import Config
|
from src.Config import Config
|
||||||
from src.Stdout import Stdout
|
from src.Stdout import Stdout
|
||||||
from src.Upload.Aws import Aws
|
from src.Upload.Aws import Aws
|
||||||
from src.Archive.Archive import Archive
|
from src.Archive.Archive import Archive
|
||||||
from src.Enums import StdoutLevel, Namespace
|
from src.Enums import StdoutLevel, Namespace
|
||||||
|
from src.Archive.Filesystem import Filesystem
|
||||||
|
|
||||||
stdout = Stdout(Namespace.CLI)
|
stdout = Stdout(Namespace.CLI)
|
||||||
|
|
||||||
|
|
@ -21,17 +22,13 @@ def main() -> None:
|
||||||
parser = argparse.ArgumentParser(description="Testing")
|
parser = argparse.ArgumentParser(description="Testing")
|
||||||
|
|
||||||
parser.add_argument("-s", "--sleep", type=int, help="Global log sleep level")
|
parser.add_argument("-s", "--sleep", type=int, help="Global log sleep level")
|
||||||
parser.add_argument("-i", "--input", help="Load config file from path",default=".config.json")
|
parser.add_argument("-c", "--cache", type=Union[str, bool], help="Path to a cache file", default=True)
|
||||||
|
parser.add_argument("-i", "--input", help="Load config file from path", default=".config.json")
|
||||||
parser.add_argument("-d", "--dryrun", action="store_true", help="Dry run")
|
parser.add_argument("-d", "--dryrun", action="store_true", help="Dry run")
|
||||||
parser.add_argument("-l", "--log-level", type=str, help="Global log level")
|
parser.add_argument("-l", "--log-level", type=str, help="Global log level")
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
# Set custom global sleep level
|
|
||||||
if args.sleep:
|
|
||||||
Stdout.global_sleep = args.sleep
|
|
||||||
stdout.ok(f"Setting global log sleep level to: {Stdout.global_sleep} second(s)")
|
|
||||||
|
|
||||||
# Set custom global log level
|
# Set custom global log level
|
||||||
if args.log_level:
|
if args.log_level:
|
||||||
try:
|
try:
|
||||||
|
|
@ -40,6 +37,16 @@ def main() -> None:
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise ValueError(f"{args.log_level} is not a valid StdoutLevel")
|
raise ValueError(f"{args.log_level} is not a valid StdoutLevel")
|
||||||
|
|
||||||
|
# Set custom cache file
|
||||||
|
if args.cache != Filesystem.cache_file:
|
||||||
|
Filesystem.cache_file = args.cache
|
||||||
|
stdout.ok(f"Using cache file: {Filesystem.cache_file}")
|
||||||
|
|
||||||
|
# Set custom global sleep level
|
||||||
|
if args.sleep:
|
||||||
|
Stdout.global_sleep = args.sleep
|
||||||
|
stdout.ok(f"Setting global log sleep level to: {Stdout.global_sleep} second(s)")
|
||||||
|
|
||||||
# Set enable dry run
|
# Set enable dry run
|
||||||
if args.dryrun:
|
if args.dryrun:
|
||||||
Aws.dry_run = True
|
Aws.dry_run = True
|
||||||
|
|
@ -53,7 +60,14 @@ def main() -> None:
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
for item in Config.from_json_file(args.input):
|
for item in Config.from_json_file(args.input):
|
||||||
Aws(Archive(item)).upload()
|
archive = Archive(item)
|
||||||
|
|
||||||
|
# Skip paths that have not been modified since last upload
|
||||||
|
if not archive.fs.is_modified:
|
||||||
|
stdout.log(f"'{archive.fs.path}' has not changed since last upload, moving on")
|
||||||
|
continue
|
||||||
|
|
||||||
|
Aws(archive.compress()).upload()
|
||||||
|
|
||||||
stdout.log("Finished!")
|
stdout.log("Finished!")
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,5 @@
|
||||||
import os
|
import os
|
||||||
import typing
|
import typing
|
||||||
import hashlib
|
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
from ..Cli import Cli
|
from ..Cli import Cli
|
||||||
|
|
@ -21,15 +20,10 @@ class Archive():
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.item = item
|
self.item = item
|
||||||
|
self.fs = Filesystem(self.item.abspath_target)
|
||||||
|
|
||||||
self.__fs = Filesystem(self.item.abspath_target)
|
|
||||||
self.__stdout = Stdout(Namespace.ARCHIVE)
|
self.__stdout = Stdout(Namespace.ARCHIVE)
|
||||||
|
|
||||||
if self.__fs.valid:
|
|
||||||
self.__compress()
|
|
||||||
else:
|
|
||||||
self.__die()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def output_path(self) -> str:
|
def output_path(self) -> str:
|
||||||
"""
|
"""
|
||||||
|
|
@ -39,9 +33,7 @@ class Archive():
|
||||||
str: Absolute pathname to target zip file
|
str: Absolute pathname to target zip file
|
||||||
"""
|
"""
|
||||||
|
|
||||||
filename = hashlib.md5(self.item.abspath_target.encode()).hexdigest()
|
return f"{self.item.abspath_temp.rstrip('/')}/{self.fs.hash}.7z"
|
||||||
|
|
||||||
return f"{self.item.abspath_temp.rstrip('/')}/{filename}.7z"
|
|
||||||
|
|
||||||
def cleanup(self) -> None:
|
def cleanup(self) -> None:
|
||||||
"""
|
"""
|
||||||
|
|
@ -54,16 +46,7 @@ class Archive():
|
||||||
os.remove(self.output_path)
|
os.remove(self.output_path)
|
||||||
self.__stdout.info(f"Archive removed: {self.output_path}")
|
self.__stdout.info(f"Archive removed: {self.output_path}")
|
||||||
|
|
||||||
def __die(self) -> None:
|
def compress(self) -> None:
|
||||||
"""
|
|
||||||
Skip archiving of target item
|
|
||||||
"""
|
|
||||||
|
|
||||||
self.__stdout.warn(f"Archiving skipped for: {self.item.abspath_target}")
|
|
||||||
|
|
||||||
self.cleanup()
|
|
||||||
|
|
||||||
def __compress(self) -> None:
|
|
||||||
"""
|
"""
|
||||||
Compress the target path
|
Compress the target path
|
||||||
"""
|
"""
|
||||||
|
|
@ -88,7 +71,7 @@ class Archive():
|
||||||
args.append(self.item.abspath_target)
|
args.append(self.item.abspath_target)
|
||||||
|
|
||||||
# Exclude directories thats
|
# Exclude directories thats
|
||||||
for exclude in self.__fs.common_relative_paths():
|
for exclude in self.fs.common_relative_paths():
|
||||||
args.append(f"-xr!{exclude}")
|
args.append(f"-xr!{exclude}")
|
||||||
|
|
||||||
cmd = Cli()
|
cmd = Cli()
|
||||||
|
|
@ -98,8 +81,17 @@ class Archive():
|
||||||
cmd.cleanup()
|
cmd.cleanup()
|
||||||
return self.__die()
|
return self.__die()
|
||||||
|
|
||||||
self.__stdout.info(f"Temporary archive placed at: {self.__fs.path}").sleep()
|
self.__stdout.info(f"Temporary archive placed at: {self.fs.path}").sleep()
|
||||||
self.__stdout.ok(f"Compression completed for: {self.item.abspath_target}")
|
self.__stdout.ok(f"Compression completed for: {self.item.abspath_target}")
|
||||||
|
|
||||||
cmd.cleanup()
|
cmd.cleanup()
|
||||||
self.cleanup()
|
self.cleanup()
|
||||||
|
|
||||||
|
def __die(self) -> None:
|
||||||
|
"""
|
||||||
|
Skip archiving of target item
|
||||||
|
"""
|
||||||
|
|
||||||
|
self.__stdout.warn(f"Archiving skipped for: {self.item.abspath_target}")
|
||||||
|
|
||||||
|
self.cleanup()
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,19 @@
|
||||||
import os
|
import os
|
||||||
|
import json
|
||||||
|
import hashlib
|
||||||
|
import tempfile
|
||||||
from typing import Union
|
from typing import Union
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
from ..Config import Config
|
from ..Config import Config
|
||||||
from ..Stdout import Stdout
|
from ..Stdout import Stdout
|
||||||
from ..Enums import Namespace
|
from ..Enums import Namespace
|
||||||
|
|
||||||
|
DEFAULT_CACHE_FILE = f"{tempfile.gettempdir().rstrip('/')}/3rd_cache.json"
|
||||||
|
|
||||||
class Filesystem():
|
class Filesystem():
|
||||||
|
__cache_file: str|bool = True
|
||||||
|
|
||||||
def __init__(self, path: str):
|
def __init__(self, path: str):
|
||||||
"""
|
"""
|
||||||
Create a new filesystem instance for a target file or directory
|
Create a new filesystem instance for a target file or directory
|
||||||
|
|
@ -14,14 +22,106 @@ class Filesystem():
|
||||||
path (str): Target file or directory
|
path (str): Target file or directory
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.valid = True
|
|
||||||
self.path = path
|
self.path = path
|
||||||
self.__stdout = Stdout(Namespace.FILESYSTEM)
|
self.__stdout = Stdout(Namespace.FILESYSTEM)
|
||||||
|
|
||||||
if not os.path.exists(self.path):
|
if not os.path.exists(self.path):
|
||||||
self.valid = False
|
|
||||||
self.__stdout.error(f"No such file or directory: {self.path}")
|
self.__stdout.error(f"No such file or directory: {self.path}")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hash(self) -> str:
|
||||||
|
"""
|
||||||
|
Returns a hash of the current path
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: MD5 hash
|
||||||
|
"""
|
||||||
|
|
||||||
|
return hashlib.md5(self.path.encode()).hexdigest()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_modified(self) -> bool:
|
||||||
|
# Target will always be treated as modified if caching is disabled
|
||||||
|
if Filesystem.cache_file == False:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return self.last_modified > self.last_archived
|
||||||
|
|
||||||
|
@property
|
||||||
|
def last_modified(self) -> datetime:
|
||||||
|
"""
|
||||||
|
Get last modified datetime for target path
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
datetime: Last modified datetime
|
||||||
|
"""
|
||||||
|
|
||||||
|
return datetime.fromtimestamp(os.path.getmtime(self.path))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def last_archived(self) -> datetime|bool:
|
||||||
|
"""
|
||||||
|
Returns the datetime the target path was last uploaded to the remote archive
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
datetime|bool: Datetime last uploaded, or False if never uploaded or cache is disabled
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Bail out if caching is disabled
|
||||||
|
if Filesystem.cache_file == False:
|
||||||
|
return False
|
||||||
|
|
||||||
|
self.__init_cache_file()
|
||||||
|
|
||||||
|
with open(Filesystem.cache_file, "r") as f:
|
||||||
|
cache = json.load(f)
|
||||||
|
|
||||||
|
if not self.hash in cache:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return datetime.fromtimestamp(cache[self.hash])
|
||||||
|
|
||||||
|
@last_archived.setter
|
||||||
|
def last_archived(self, last_archived: datetime = None) -> None:
|
||||||
|
"""
|
||||||
|
Set the last datetime this path was uploaded to the remote archive
|
||||||
|
|
||||||
|
Args:
|
||||||
|
last_archived (datetime, optional): Set last uploaded datetime. Defaults to current datetime.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Bail out if caching is disabled
|
||||||
|
if Filesystem.cache_file == False:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.__init_cache_file()
|
||||||
|
|
||||||
|
# Coerce datetime from current time
|
||||||
|
if not last_archived:
|
||||||
|
last_modified = datetime.now()
|
||||||
|
|
||||||
|
with open(Filesystem.cache_file, "r") as f:
|
||||||
|
cache = json.load(f)
|
||||||
|
|
||||||
|
cache[self.hash] = datetime.timestamp()
|
||||||
|
|
||||||
|
with open(Filesystem.cache_file, "w") as f:
|
||||||
|
json.dump(cache, f)
|
||||||
|
|
||||||
|
self.__stdout.info(f"Updated last archive date for: {self.path}")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cache_file(self) -> str|bool:
|
||||||
|
"""
|
||||||
|
Returns the pathname to the cache file, or False if caching is disabled
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str|bool: Pathname to cache file, False if disabled
|
||||||
|
"""
|
||||||
|
|
||||||
|
return self.__cache_file if self.__cache_file != True else DEFAULT_CACHE_FILE
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def __paths(self) -> list:
|
def __paths(self) -> list:
|
||||||
"""
|
"""
|
||||||
|
|
@ -61,6 +161,24 @@ class Filesystem():
|
||||||
|
|
||||||
return common_paths
|
return common_paths
|
||||||
|
|
||||||
|
def __init_cache_file(self) -> None:
|
||||||
|
"""
|
||||||
|
Create and init cache file if it does not exist
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Bail out if file already exists
|
||||||
|
if os.path.isfile:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Init cache file with empty JSON object
|
||||||
|
with open(Filesystem.cache_file, "w") as f:
|
||||||
|
json.dump({}, f)
|
||||||
|
|
||||||
|
self.__stdout.ok(f"New cache file created and initialized: {Filesystem.cache_file}")
|
||||||
|
|
||||||
|
if Filesystem.cache_file == DEFAULT_CACHE_FILE:
|
||||||
|
self.__stdout.warn(f"It's recommended to set a custom cache file path, the current cache file at '{Filesystem.cache_file}' will be gone after system reboot")
|
||||||
|
|
||||||
def __get_common_subpath(self, path: str) -> str | None:
|
def __get_common_subpath(self, path: str) -> str | None:
|
||||||
"""
|
"""
|
||||||
Returns the pathname in common with the base path from a target path
|
Returns the pathname in common with the base path from a target path
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
|
||||||
class ConfigKeys(Enum):
|
class ConfigKeys(Enum):
|
||||||
|
META = "meta"
|
||||||
PASSWORD = "password"
|
PASSWORD = "password"
|
||||||
COMPRESSION = "compression"
|
COMPRESSION = "compression"
|
||||||
ABSPATH_TEMP = "abspath_temp"
|
ABSPATH_TEMP = "abspath_temp"
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue