From 3d5ff490a69887f1438956e5ec95b4363364dc15 Mon Sep 17 00:00:00 2001 From: Shahriyar Rzayev Date: Sun, 3 Oct 2021 19:07:37 +0400 Subject: [PATCH 01/17] feature: add new option for generate default config file in default path (#448) As discussed in #446 it can be valuable to add an commandline option to generate the default config file is this was somehow eliminated by setup. --generate-config-file flag option was added. Sample run will be something like this: autoxtrabackup --generate-config-file --verbose 2021-10-03 19:03:07 INFO [autoxtrabackup:278] Default config file is generated in /home/shako/.autoxtrabackup/autoxtrabackup.cnf 2021-10-03 19:03:07 INFO [autoxtrabackup:313] Xtrabackup command history: 2021-10-03 19:03:07 INFO [autoxtrabackup:315] ['command', 'xtrabackup_function', 'start time', 'end time', 'duration', 'exit code'] 2021-10-03 19:03:07 INFO [autoxtrabackup:316] Autoxtrabackup completed successfully! --- mysql_autoxtrabackup/autoxtrabackup.py | 22 ++++++++++++++----- .../general_conf/generalops.py | 18 +++++++-------- 2 files changed, 25 insertions(+), 15 deletions(-) diff --git a/mysql_autoxtrabackup/autoxtrabackup.py b/mysql_autoxtrabackup/autoxtrabackup.py index 3da19e0..2ab19b7 100644 --- a/mysql_autoxtrabackup/autoxtrabackup.py +++ b/mysql_autoxtrabackup/autoxtrabackup.py @@ -17,6 +17,7 @@ from mysql_autoxtrabackup.backup_prepare.prepare import Prepare from mysql_autoxtrabackup.general_conf import path_config from mysql_autoxtrabackup.general_conf.generalops import GeneralClass +from mysql_autoxtrabackup.general_conf.generate_default_conf import GenerateDefaultConfig from mysql_autoxtrabackup.process_runner.process_runner import ProcessRunner from mysql_autoxtrabackup.utils import version @@ -103,12 +104,11 @@ def validate_file(file: str) -> Optional[bool]: # filename extension should be .cnf pattern = re.compile(r".*\.cnf") - if pattern.match(file): - # Lastly the file should have all 5 required headers - if check_file_content(file): - return None - else: + if not pattern.match(file): raise ValueError("Invalid file extension. Expecting .cnf") + # Lastly the file should have all 5 required headers + if check_file_content(file): + return None return None @@ -133,6 +133,12 @@ def validate_file(file: str) -> Optional[bool]: show_default=True, help="Read options from the given file", ) +@click.option( + "--generate-config-file", + is_flag=True, + is_eager=True, + help="Create a config file template in default directory" +) @click.option("--tag", help="Pass the tag string for each backup") @click.option("--show-tags", is_flag=True, help="Show backup tags and exit") @click.option("-v", "--verbose", is_flag=True, help="Be verbose (print to console)") @@ -188,6 +194,7 @@ def all_procedure( log_file, log, defaults_file, + generate_config_file, dry_run, log_file_max_bytes, log_file_backup_count, @@ -256,6 +263,7 @@ def all_procedure( and dry_run is False and show_tags is False and run_server is False + and generate_config_file is False ): print_help(ctx, None, value=True) @@ -264,6 +272,10 @@ def all_procedure( elif show_tags and defaults_file: backup_ = Backup(config=defaults_file) backup_.show_tags(backup_dir=str(backup_options.get("backup_dir"))) + elif generate_config_file: + gen_ = GenerateDefaultConfig() + gen_.generate_config_file() + logger.info(f"Default config file is generated in {defaults_file}") elif prepare: prepare_ = Prepare(config=defaults_file, dry_run=dry_run_, tag=tag) prepare_.prepare_backup_and_copy_back() diff --git a/mysql_autoxtrabackup/general_conf/generalops.py b/mysql_autoxtrabackup/general_conf/generalops.py index e446348..66f063e 100644 --- a/mysql_autoxtrabackup/general_conf/generalops.py +++ b/mysql_autoxtrabackup/general_conf/generalops.py @@ -95,11 +95,10 @@ def backup_archive_options(self) -> Dict[str, Union[str, float]]: archive_max_size = self.con.get(section, "max_archive_size", fallback=None) if archive_max_size: archive_max_size = humanfriendly.parse_size(archive_max_size) - else: - if self.con.get(section, "archive_max_size", fallback=None): - archive_max_size = humanfriendly.parse_size( - self.con.get(section, "archive_max_size", fallback=None) - ) + elif self.con.get(section, "archive_max_size", fallback=None): + archive_max_size = humanfriendly.parse_size( + self.con.get(section, "archive_max_size", fallback=None) + ) # backward compatible with old config 'max_archive_duration' and newer 'archive_max_duration' archive_max_duration = self.con.get( @@ -107,11 +106,10 @@ def backup_archive_options(self) -> Dict[str, Union[str, float]]: ) if archive_max_duration: archive_max_duration = humanfriendly.parse_timespan(archive_max_duration) - else: - if self.con.get(section, "archive_max_size", fallback=None): - archive_max_duration = humanfriendly.parse_timespan( - self.con.get(section, "archive_max_size", fallback=None) - ) + elif self.con.get(section, "archive_max_size", fallback=None): + archive_max_duration = humanfriendly.parse_timespan( + self.con.get(section, "archive_max_size", fallback=None) + ) return { "archive_dir": self.con.get(section, "archive_dir", fallback=None), # type: ignore From 5737bb1524d231547dfda88d5e57d35cb501be28 Mon Sep 17 00:00:00 2001 From: Shahriyar Rzayev Date: Sun, 3 Oct 2021 19:53:53 +0400 Subject: [PATCH 02/17] bugfix: ValueError: could not convert string to float: 'None' (#449) The wrong config option was used and it fails when the tool tries to use backup archive feature. Now it will raise BackupArchiveNotConfigured error if options were not set and also it will check for archive_max_duration --- mysql_autoxtrabackup/autoxtrabackup.py | 6 ++++-- mysql_autoxtrabackup/backup_backup/backup_archive.py | 9 +++++++++ mysql_autoxtrabackup/general_conf/generalops.py | 4 ++-- mysql_autoxtrabackup/process_runner/errors.py | 11 +++++++++++ 4 files changed, 26 insertions(+), 4 deletions(-) diff --git a/mysql_autoxtrabackup/autoxtrabackup.py b/mysql_autoxtrabackup/autoxtrabackup.py index 2ab19b7..0cc8365 100644 --- a/mysql_autoxtrabackup/autoxtrabackup.py +++ b/mysql_autoxtrabackup/autoxtrabackup.py @@ -17,7 +17,9 @@ from mysql_autoxtrabackup.backup_prepare.prepare import Prepare from mysql_autoxtrabackup.general_conf import path_config from mysql_autoxtrabackup.general_conf.generalops import GeneralClass -from mysql_autoxtrabackup.general_conf.generate_default_conf import GenerateDefaultConfig +from mysql_autoxtrabackup.general_conf.generate_default_conf import ( + GenerateDefaultConfig, +) from mysql_autoxtrabackup.process_runner.process_runner import ProcessRunner from mysql_autoxtrabackup.utils import version @@ -137,7 +139,7 @@ def validate_file(file: str) -> Optional[bool]: "--generate-config-file", is_flag=True, is_eager=True, - help="Create a config file template in default directory" + help="Create a config file template in default directory", ) @click.option("--tag", help="Pass the tag string for each backup") @click.option("--show-tags", is_flag=True, help="Show backup tags and exit") diff --git a/mysql_autoxtrabackup/backup_backup/backup_archive.py b/mysql_autoxtrabackup/backup_backup/backup_archive.py index 51ff78e..09fa9bf 100644 --- a/mysql_autoxtrabackup/backup_backup/backup_archive.py +++ b/mysql_autoxtrabackup/backup_backup/backup_archive.py @@ -7,6 +7,7 @@ from mysql_autoxtrabackup.backup_backup.backup_builder import BackupBuilderChecker from mysql_autoxtrabackup.general_conf import path_config from mysql_autoxtrabackup.general_conf.generalops import GeneralClass +from mysql_autoxtrabackup.process_runner.errors import BackupArchiveNotConfigured from mysql_autoxtrabackup.process_runner.process_runner import ProcessRunner from mysql_autoxtrabackup.utils import helpers @@ -126,6 +127,14 @@ def clean_old_archives(self) -> None: archive_dir = str(self.backup_archive_options.get("archive_dir")) # Finding if last full backup older than the interval or more from now! cleanup_msg = "Removing archive {}/{} due to {}" + if not self.backup_archive_options.get( + "archive_max_duration", None + ) and not self.backup_archive_options.get("archive_max_size", None): + raise BackupArchiveNotConfigured( + expression="BackupArchiveNotConfigured", + message="You need to both set archive_max_size and archive_max_duration in config file.", + ) + for archive in helpers.sorted_ls(archive_dir): if "_archive" in archive: archive_date = datetime.strptime(archive, "%Y-%m-%d_%H-%M-%S_archive") diff --git a/mysql_autoxtrabackup/general_conf/generalops.py b/mysql_autoxtrabackup/general_conf/generalops.py index 66f063e..f670ad2 100644 --- a/mysql_autoxtrabackup/general_conf/generalops.py +++ b/mysql_autoxtrabackup/general_conf/generalops.py @@ -106,9 +106,9 @@ def backup_archive_options(self) -> Dict[str, Union[str, float]]: ) if archive_max_duration: archive_max_duration = humanfriendly.parse_timespan(archive_max_duration) - elif self.con.get(section, "archive_max_size", fallback=None): + elif self.con.get(section, "archive_max_duration", fallback=None): archive_max_duration = humanfriendly.parse_timespan( - self.con.get(section, "archive_max_size", fallback=None) + self.con.get(section, "archive_max_duration", fallback=None) ) return { diff --git a/mysql_autoxtrabackup/process_runner/errors.py b/mysql_autoxtrabackup/process_runner/errors.py index c472318..e32d72d 100644 --- a/mysql_autoxtrabackup/process_runner/errors.py +++ b/mysql_autoxtrabackup/process_runner/errors.py @@ -55,3 +55,14 @@ def __init__(self, expression: str, message: str) -> None: self.expression = expression self.message = message log_error(self.expression, self.message) + + +class BackupArchiveNotConfigured(Error): + """ + Exception raised when archive_max_size and archive_max_duration configs are not set + """ + + def __init__(self, expression: str, message: str) -> None: + self.expression = expression + self.message = message + log_error(self.expression, self.message) From c6d6dee5685f4b803c3ccbae5213f2f7b598be15 Mon Sep 17 00:00:00 2001 From: Shahriyar Rzayev Date: Sun, 3 Oct 2021 19:59:47 +0400 Subject: [PATCH 03/17] Version bump --- mysql_autoxtrabackup/utils/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mysql_autoxtrabackup/utils/version.py b/mysql_autoxtrabackup/utils/version.py index 2fd781c..bd41eab 100644 --- a/mysql_autoxtrabackup/utils/version.py +++ b/mysql_autoxtrabackup/utils/version.py @@ -1,3 +1,3 @@ __all__ = "VERSION" -VERSION = "2.0.2" +VERSION = "2.0.3" From e433f4816d4073e6ab1545d6ec4960e92479b66a Mon Sep 17 00:00:00 2001 From: Shahriyar Rzayev Date: Sun, 3 Oct 2021 20:12:34 +0400 Subject: [PATCH 04/17] Updated the History.md --- HISTORY.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/HISTORY.md b/HISTORY.md index 53f8c22..cc21289 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -1,3 +1,8 @@ +## v2.0.3 (2021-10-03) + +* , #449 by @ShahriyarR +* , #448 by @ShahriyarR + ## v2.0.2 (2021-05-06) * Increased code coverage and did code base refactoring, #444 by @shahriyarr From 6a2406c4b649f251167ea83531cc648fd880ccbd Mon Sep 17 00:00:00 2001 From: Shahriyar Rzayev Date: Sat, 9 Apr 2022 18:32:13 +0400 Subject: [PATCH 05/17] Major refactorings - iteration 1 --- .deepsource.toml | 8 - docs/conf.py | 18 +- mysql_autoxtrabackup/autoxtrabackup.py | 316 +++++++++++------- .../backup_backup/__init__.py | 1 - .../backup_backup/backup_archive.py | 170 ---------- .../backup_backup/backup_builder.py | 295 ++-------------- .../backup_backup/backup_tags.py | 95 ++++++ .../backup_backup/backuper.py | 299 ++++++----------- .../general_conf/generalops.py | 82 +---- .../general_conf/generate_default_conf.py | 93 +----- pyproject.toml | 92 ++--- scripts/format-imports.sh | 2 +- scripts/format.sh | 2 +- 13 files changed, 465 insertions(+), 1008 deletions(-) delete mode 100644 .deepsource.toml delete mode 100644 mysql_autoxtrabackup/backup_backup/backup_archive.py create mode 100644 mysql_autoxtrabackup/backup_backup/backup_tags.py mode change 100644 => 100755 scripts/format-imports.sh diff --git a/.deepsource.toml b/.deepsource.toml deleted file mode 100644 index aebeb0f..0000000 --- a/.deepsource.toml +++ /dev/null @@ -1,8 +0,0 @@ -version = 1 - -[[analyzers]] -name = "python" -enabled = true - - [analyzers.meta] - runtime_version = "3.x.x" \ No newline at end of file diff --git a/docs/conf.py b/docs/conf.py index 62d576d..7f7141f 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -53,18 +53,18 @@ master_doc = "index" # General information about the project. -project = u"MySQL-AutoXtrabackup" -copyright = u"2020, Shahriyar Rzayev" -author = u"Shahriyar Rzayev" +project = "MySQL-AutoXtrabackup" +copyright = "2020, Shahriyar Rzayev" +author = "Shahriyar Rzayev" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. -version = u"{}".format(VERSION) +version = "{}".format(VERSION) # The full version, including alpha/beta/rc tags. -release = u"{}".format(VERSION) +release = "{}".format(VERSION) # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -139,8 +139,8 @@ ( master_doc, "MySQLAutoXtrabackup.tex", - u"MySQL AutoXtrabackup Documentation", - u"Shahriyar Rzayev", + "MySQL AutoXtrabackup Documentation", + "Shahriyar Rzayev", "manual", ), ] @@ -154,7 +154,7 @@ ( master_doc, "mysqlautoxtrabackup", - u"MySQL AutoXtrabackup Documentation", + "MySQL AutoXtrabackup Documentation", [author], 1, ) @@ -170,7 +170,7 @@ ( master_doc, "MySQLAutoXtrabackup", - u"MySQL AutoXtrabackup Documentation", + "MySQL AutoXtrabackup Documentation", author, "MySQLAutoXtrabackup", "One line description of project.", diff --git a/mysql_autoxtrabackup/autoxtrabackup.py b/mysql_autoxtrabackup/autoxtrabackup.py index 0cc8365..cf9797a 100644 --- a/mysql_autoxtrabackup/autoxtrabackup.py +++ b/mysql_autoxtrabackup/autoxtrabackup.py @@ -6,13 +6,15 @@ from logging.handlers import RotatingFileHandler from sys import exit from sys import platform as _platform -from typing import Optional +from typing import Dict import click import humanfriendly # type: ignore import pid # type: ignore from mysql_autoxtrabackup.api import main +from mysql_autoxtrabackup.backup_backup import BackupBuilderChecker +from mysql_autoxtrabackup.backup_backup.backup_tags import BackupTags from mysql_autoxtrabackup.backup_backup.backuper import Backup from mysql_autoxtrabackup.backup_prepare.prepare import Prepare from mysql_autoxtrabackup.general_conf import path_config @@ -22,6 +24,7 @@ ) from mysql_autoxtrabackup.process_runner.process_runner import ProcessRunner from mysql_autoxtrabackup.utils import version +from mysql_autoxtrabackup.utils.mysql_cli import MySQLClientHelper logger = logging.getLogger("") destinations_hash = { @@ -31,14 +34,19 @@ } -def address_matcher(plt: str) -> str: +def _address_matcher(plt: str) -> str: return destinations_hash.get(plt, ("localhost", 514)) # type: ignore -handler = logging.handlers.SysLogHandler(address=address_matcher(_platform)) +def _handle_logging() -> logging: + global logger, destinations_hash -# Set syslog for the root logger -logger.addHandler(handler) + handler = logging.handlers.SysLogHandler(address=_address_matcher(_platform)) + # Set syslog for the root logger + logger.addHandler(handler) + + +_handle_logging() def print_help(ctx: click.Context, param: None, value: bool) -> None: @@ -48,25 +56,29 @@ def print_help(ctx: click.Context, param: None, value: bool) -> None: ctx.exit() -def print_version(ctx: click.Context, param: None, value: bool) -> None: +def _get_version_str() -> str: + return f""" +Developed by Shahriyar Rzayev from Azerbaijan PUG(http://azepug.az) +Link : https://github.com/ShahriyarR/MySQL-AutoXtraBackup +Email: rzayev.sehriyar@gmail.com +Based on Percona XtraBackup: https://github.com/percona/percona-xtrabackup/ +MySQL-AutoXtraBackup Version: {version.VERSION} + """ + + +def _print_version(ctx: click.Context, param: None, value: bool) -> None: if not value or ctx.resilient_parsing: return - click.echo("Developed by Shahriyar Rzayev from Azerbaijan PUG(http://azepug.az)") - click.echo("Link : https://github.com/ShahriyarR/MySQL-AutoXtraBackup") - click.echo("Email: rzayev.sehriyar@gmail.com") - click.echo( - "Based on Percona XtraBackup: https://github.com/percona/percona-xtrabackup/" - ) - click.echo(f"MySQL-AutoXtraBackup Version: {version.VERSION}") + click.echo(_get_version_str()) ctx.exit() -def check_file_content(file: str) -> Optional[bool]: +def _check_file_content(file: str) -> bool: """Check if all mandatory headers and keys exist in file""" with open(file, "r") as config_file: file_content = config_file.read() - config_headers = ["MySQL", "Backup", "Encrypt", "Compress", "Commands"] + config_headers = ["MySQL", "Backup"] config_keys = [ "mysql", "mycnf", @@ -78,10 +90,6 @@ def check_file_content(file: str) -> Optional[bool]: "tmp_dir", "backup_dir", "backup_tool", - "xtra_prepare", - "start_mysql_command", - "stop_mysql_command", - "chown_command", ] for header in config_headers: @@ -95,23 +103,22 @@ def check_file_content(file: str) -> Optional[bool]: return True -def validate_file(file: str) -> Optional[bool]: +def validate_file(file: str) -> None: """ Check for validity of the file given in file path. If file doesn't exist or invalid configuration file, throw error. """ - if not os.path.isfile(file): - raise FileNotFoundError("Specified file does not exist.") - # filename extension should be .cnf pattern = re.compile(r".*\.cnf") + if not os.path.isfile(file): + raise FileNotFoundError("Specified file does not exist.") + if not pattern.match(file): raise ValueError("Invalid file extension. Expecting .cnf") - # Lastly the file should have all 5 required headers - if check_file_content(file): - return None - return None + # Lastly the file should have all 2 required headers + if not _check_file_content(file): + raise RuntimeError("Config file content validation failed.") @click.command() @@ -124,14 +131,14 @@ def validate_file(file: str) -> Optional[bool]: @click.option( "--version", is_flag=True, - callback=print_version, # type: ignore + callback=_print_version, expose_value=False, is_eager=True, help="Version information.", ) @click.option( "--defaults-file", - default=path_config.config_path_file, # type: ignore + default=path_config.config_path_file, show_default=True, help="Read options from the given file", ) @@ -200,124 +207,187 @@ def all_procedure( dry_run, log_file_max_bytes, log_file_backup_count, -): +) -> bool: options = GeneralClass(defaults_file) logging_options = options.logging_options backup_options = options.backup_options - - formatter = logging.Formatter( - fmt="%(asctime)s %(levelname)s [%(module)s:%(lineno)d] %(message)s", - datefmt="%Y-%m-%d %H:%M:%S", - ) + formatter = _get_formatter() if verbose: - ch = logging.StreamHandler() - # control console output log level - ch.setLevel(logging.INFO) - ch.setFormatter(formatter) - logger.addHandler(ch) + _set_log_level_format(formatter) if log_file: try: - if logging_options.get("log_file_max_bytes") and logging_options.get( - "log_file_backup_count" - ): - file_handler = RotatingFileHandler( - log_file, - mode="a", - maxBytes=int(str(logging_options.get("log_file_max_bytes"))), - backupCount=int(str(logging_options.get("log_file_backup_count"))), - ) - else: - file_handler = RotatingFileHandler( - log_file, - mode="a", - maxBytes=log_file_max_bytes, - backupCount=log_file_backup_count, - ) - file_handler.setFormatter(formatter) - logger.addHandler(file_handler) + file_handler = _get_log_rotate_handler( + log_file, + logging_options, + max_bytes=log_file_max_bytes, + backup_count=log_file_backup_count, + ) + _add_log_rotate_handler(file_handler, formatter) except PermissionError as err: - exit("{} Please consider to run as root or sudo".format(err)) + exit(f"{err} Please consider to run as root or sudo") # set log level in order: 1. user argument 2. config file 3. @click default - if log is not None: - logger.setLevel(log) - elif logging_options.get("log_level"): - logger.setLevel(str(logging_options.get("log_level"))) - else: - # this is the fallback default log-level. - logger.setLevel("INFO") + _set_log_level(log, logging_options) validate_file(defaults_file) pid_file = pid.PidFile(piddir=backup_options.get("pid_dir")) try: - with pid_file: # User PidFile for locking to single instance - dry_run_ = dry_run - if dry_run_: - dry_run_ = 1 - logger.warning("Dry run enabled!") - if ( - prepare is False - and backup is False - and verbose is False - and dry_run is False - and show_tags is False - and run_server is False - and generate_config_file is False - ): - print_help(ctx, None, value=True) - - elif run_server: - main.run_server() - elif show_tags and defaults_file: - backup_ = Backup(config=defaults_file) - backup_.show_tags(backup_dir=str(backup_options.get("backup_dir"))) - elif generate_config_file: - gen_ = GenerateDefaultConfig() - gen_.generate_config_file() - logger.info(f"Default config file is generated in {defaults_file}") - elif prepare: - prepare_ = Prepare(config=defaults_file, dry_run=dry_run_, tag=tag) - prepare_.prepare_backup_and_copy_back() - elif backup: - backup_ = Backup(config=defaults_file, dry_run=dry_run_, tag=tag) - backup_.all_backup() + _run_commands( + backup, + backup_options, + ctx, + defaults_file, + dry_run, + generate_config_file, + pid_file, + prepare, + run_server, + show_tags, + tag, + verbose, + ) except (pid.PidFileAlreadyLockedError, pid.PidFileAlreadyRunningError) as error: - if float( - str(backup_options.get("pid_runtime_warning")) - ) and time.time() - os.stat(pid_file.filename).st_ctime > float( - str(backup_options.get("pid_runtime_warning")) - ): - pid.fh.seek(0) - pid_str = pid.fh.read(16).split("\n", 1)[0].strip() - logger.warning( - "Pid file already exists or Pid already running! : ", str(error) - ) - logger.critical( - "Backup (pid: " - + pid_str - + ") has been running for logger than: " - + str( - humanfriendly.format_timespan( - backup_options.get("pid_runtime_warning") - ) - ) - ) - + _handle_backup_pid_exception(backup_options, error, pid_file) except pid.PidFileUnreadableError as error: - logger.warning("Pid file can not be read: " + str(error)) + logger.warning(f"Pid file can not be read: {str(error)}") except pid.PidFileError as error: - logger.warning("Generic error with pid file: " + str(error)) + logger.warning(f"Generic error with pid file: {str(error)}") - logger.info("Xtrabackup command history:") - for i in ProcessRunner.xtrabackup_history_log: - logger.info(str(i)) + _log_command_history() logger.info("Autoxtrabackup completed successfully!") return True +def _run_commands( + backup, + backup_options, + ctx, + defaults_file, + dry_run, + generate_config_file, + pid_file, + prepare, + run_server, + show_tags, + tag, + verbose, +): + with pid_file: # User PidFile for locking to single instance + dry_run_ = dry_run + if dry_run_: + dry_run_ = 1 + logger.warning("Dry run enabled!") + + builder_obj = BackupBuilderChecker(config=defaults_file, dry_run=dry_run_) + tagger = BackupTags(tag, builder_obj) + mysql_cli = MySQLClientHelper(config=defaults_file) + + if ( + prepare is False + and backup is False + and verbose is False + and dry_run is False + and show_tags is False + and run_server is False + and generate_config_file is False + ): + print_help(ctx, None, value=True) + + elif run_server: + main.run_server() + elif show_tags and defaults_file: + + Backup( + config=defaults_file, + builder_obj=builder_obj, + tagger=tagger, + mysql_cli=mysql_cli, + ).tagger.show_tags(backup_dir=str(backup_options.get("backup_dir"))) + elif generate_config_file: + GenerateDefaultConfig().generate_config_file() + logger.info(f"Default config file is generated in {defaults_file}") + elif prepare: + Prepare( + config=defaults_file, dry_run=dry_run_, tag=tag + ).prepare_backup_and_copy_back() + elif backup: + Backup( + config=defaults_file, + builder_obj=builder_obj, + tagger=tagger, + mysql_cli=mysql_cli, + dry_run=dry_run_, + tag=tag, + ).all_backup() + + +def _log_command_history(): + logger.info("Xtrabackup command history:") + for history in ProcessRunner.xtrabackup_history_log: + logger.info(str(history)) + + +def _add_log_rotate_handler(file_handler, formatter): + file_handler.setFormatter(formatter) + logger.addHandler(file_handler) + + +def _handle_backup_pid_exception(backup_options, error, pid_file): + pid_warning = str(backup_options.get("pid_runtime_warning")) + if float(pid_warning) and time.time() - os.stat(pid_file.filename).st_ctime > float( + pid_warning + ): + pid.fh.seek(0) + pid_str = pid.fh.read(16).split("\n", 1)[0].strip() + pid_warning = str(humanfriendly.format_timespan(pid_warning)) + logger.warning( + f"Pid file already exists or Pid already running! : {str(error)}", + ) + logger.critical( + f"Backup (pid: {pid_str}) has been running for logger than: {pid_warning}" + ) + + +def _set_log_level(log, logging_options): + if log is not None: + logger.setLevel(log) + elif logging_options.get("log_level"): + logger.setLevel(str(logging_options.get("log_level"))) + else: + # this is the fallback default log-level. + logger.setLevel("INFO") + + +def _get_log_rotate_handler( + log_file: str, logging_options: Dict, max_bytes: int, backup_count: int +): + return RotatingFileHandler( + log_file, + mode="a", + maxBytes=max_bytes or int(str(logging_options.get("log_file_max_bytes"))), + backupCount=backup_count + or int(str(logging_options.get("log_file_backup_count"))), + ) + + +def _get_formatter() -> logging: + return logging.Formatter( + fmt="%(asctime)s %(levelname)s [%(module)s:%(lineno)d] %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + ) + + +def _set_log_level_format(formatter: logging) -> None: + ch = logging.StreamHandler() + # control console output log level + ch.setLevel(logging.INFO) + ch.setFormatter(formatter) + logger.addHandler(ch) + + if __name__ == "__main__": all_procedure() diff --git a/mysql_autoxtrabackup/backup_backup/__init__.py b/mysql_autoxtrabackup/backup_backup/__init__.py index 3e57bf0..debc22e 100644 --- a/mysql_autoxtrabackup/backup_backup/__init__.py +++ b/mysql_autoxtrabackup/backup_backup/__init__.py @@ -1,3 +1,2 @@ -from .backup_archive import BackupArchive as BackupArchive from .backup_builder import BackupBuilderChecker as BackupBuilderChecker from .backuper import Backup as Backup diff --git a/mysql_autoxtrabackup/backup_backup/backup_archive.py b/mysql_autoxtrabackup/backup_backup/backup_archive.py deleted file mode 100644 index 09fa9bf..0000000 --- a/mysql_autoxtrabackup/backup_backup/backup_archive.py +++ /dev/null @@ -1,170 +0,0 @@ -import logging -import os -import shutil -from datetime import datetime -from typing import Union - -from mysql_autoxtrabackup.backup_backup.backup_builder import BackupBuilderChecker -from mysql_autoxtrabackup.general_conf import path_config -from mysql_autoxtrabackup.general_conf.generalops import GeneralClass -from mysql_autoxtrabackup.process_runner.errors import BackupArchiveNotConfigured -from mysql_autoxtrabackup.process_runner.process_runner import ProcessRunner -from mysql_autoxtrabackup.utils import helpers - -logger = logging.getLogger(__name__) - - -class BackupArchive: - def __init__( - self, - config: str = path_config.config_path_file, - dry_run: Union[bool, None] = None, - tag: Union[str, None] = None, - ) -> None: - self.conf = config - self.dry = dry_run - self.tag = tag - options_obj = GeneralClass(config=self.conf) - self.backup_options = BackupBuilderChecker( - config=self.conf, dry_run=self.dry - ).backup_options - self.backup_archive_options = options_obj.backup_archive_options - - def create_backup_archives(self) -> bool: - from mysql_autoxtrabackup.backup_prepare.prepare import Prepare - - # Creating .tar.gz archive files of taken backups - file_list = os.listdir(str(self.backup_options.get("full_dir"))) - for i in file_list: - if len(file_list) == 1 or i != max(file_list): - logger.info("Preparing backups prior archiving them...") - - if self.backup_archive_options.get("prepare_archive"): - logger.info("Started to prepare backups, prior archiving!") - prepare_obj = Prepare( - config=self.conf, dry_run=self.dry, tag=self.tag - ) - status = prepare_obj.prepare_inc_full_backups() - if status: - logger.info( - "Backups Prepared successfully... {}".format(status) - ) - - if self.backup_archive_options.get("move_archive") and ( - int(str(self.backup_archive_options.get("move_archive"))) == 1 - ): - dir_name = ( - str(self.backup_archive_options.get("archive_dir")) - + "/" - + i - + "_archive" - ) - logger.info( - "move_archive enabled. Moving {} to {}".format( - self.backup_options.get("backup_dir"), dir_name - ) - ) - try: - shutil.copytree( - str(self.backup_options.get("backup_dir")), dir_name - ) - except Exception as err: - logger.error("FAILED: Move Archive") - logger.error(err) - raise - else: - return True - else: - logger.info( - "move_archive is disabled. archiving / compressing current_backup." - ) - # Multi-core tar utilizing pigz. - - # Pigz default to number of cores available, or 8 if cannot be read. - - # Test if pigz is available. - logger.info("testing for pigz...") - status = ProcessRunner.run_command("pigz --version") - archive_file = ( - str(self.backup_archive_options.get("archive_dir")) - + "/" - + i - + ".tar.gz" - ) - if status: - logger.info("Found pigz...") - # run_tar = "tar cvvf - {} {} | pigz -v > {}" \ - run_tar = ( - "tar --use-compress-program=pigz -cvf {} {} {}".format( - archive_file, - self.backup_options.get("full_dir"), - self.backup_options.get("inc_dir"), - ) - ) - else: - # handle file not found error. - logger.warning( - "pigz executeable not available. Defaulting to singlecore tar" - ) - run_tar = "tar -zcf {} {} {}".format( - archive_file, - self.backup_options.get("full_dir"), - self.backup_options.get("inc_dir"), - ) - status = ProcessRunner.run_command(run_tar) - if status: - logger.info( - "OK: Old full backup and incremental backups archived!" - ) - return True - - logger.error("FAILED: Archiving ") - raise RuntimeError("FAILED: Archiving -> {}".format(run_tar)) - return True - - def clean_old_archives(self) -> None: - logger.info("Starting cleaning of old archives") - archive_dir = str(self.backup_archive_options.get("archive_dir")) - # Finding if last full backup older than the interval or more from now! - cleanup_msg = "Removing archive {}/{} due to {}" - if not self.backup_archive_options.get( - "archive_max_duration", None - ) and not self.backup_archive_options.get("archive_max_size", None): - raise BackupArchiveNotConfigured( - expression="BackupArchiveNotConfigured", - message="You need to both set archive_max_size and archive_max_duration in config file.", - ) - - for archive in helpers.sorted_ls(archive_dir): - if "_archive" in archive: - archive_date = datetime.strptime(archive, "%Y-%m-%d_%H-%M-%S_archive") - else: - archive_date = datetime.strptime(archive, "%Y-%m-%d_%H-%M-%S.tar.gz") - - now = datetime.now() - - if ( - self.backup_archive_options.get("archive_max_duration") - or self.backup_archive_options.get("archive_max_size") - ) and ( - float((now - archive_date).total_seconds()) - >= float(str(self.backup_archive_options.get("archive_max_duration"))) - or float(helpers.get_directory_size(archive_dir)) - > float(str(self.backup_archive_options.get("archive_max_size"))) - ): - logger.info( - cleanup_msg.format( - archive_dir, archive, "archive_max_duration exceeded." - ) - ) - logger.info("OR") - logger.info( - cleanup_msg.format( - archive_dir, archive, "archive_max_size exceeded." - ) - ) - full_archive_path = os.path.join(archive_dir, archive) - if os.path.isdir(full_archive_path): - shutil.rmtree(full_archive_path) - else: - os.remove(full_archive_path) diff --git a/mysql_autoxtrabackup/backup_backup/backup_builder.py b/mysql_autoxtrabackup/backup_backup/backup_builder.py index c40422d..b4782a7 100644 --- a/mysql_autoxtrabackup/backup_backup/backup_builder.py +++ b/mysql_autoxtrabackup/backup_backup/backup_builder.py @@ -1,214 +1,53 @@ # Will store necessary checks and command building actions here import logging -from os.path import isfile -from typing import Optional, Union +from typing import Optional from mysql_autoxtrabackup.general_conf import path_config from mysql_autoxtrabackup.general_conf.generalops import GeneralClass -from mysql_autoxtrabackup.process_runner.process_runner import ProcessRunner logger = logging.getLogger(__name__) class BackupBuilderChecker: - # General pre-backup checking/extracting/untar/streaming etc. should happen here - def __init__( self, config: str = path_config.config_path_file, - dry_run: Union[bool, None] = None, + dry_run: Optional[bool] = None, ) -> None: self.conf = config self.dry = dry_run options_obj = GeneralClass(config=self.conf) self.mysql_options = options_obj.mysql_options - self.compression_options = options_obj.compression_options - self.encryption_options = options_obj.encryption_options self.backup_options = options_obj.backup_options - self.xbstream_options = options_obj.xbstream_options def general_command_builder(self) -> str: """ Method for building general options for backup command. :return: String of constructed options. """ - args = "" - - if self.mysql_options.get("mysql_socket"): - args += " --socket={}".format(self.mysql_options.get("mysql_socket")) - else: - args += " --host={} --port={}".format( - self.mysql_options.get("mysql_host"), - self.mysql_options.get("mysql_port"), - ) - # Adding compression support for backup - if ( - self.compression_options.get("compress") - and self.compression_options.get("compress_chunk_size") - and self.compression_options.get("compress_threads") - ): - args += ( - " --compress={}" - " --compress-chunk-size={}" - " --compress-threads={}".format( - self.compression_options.get("compress"), - self.compression_options.get("compress_chunk_size"), - self.compression_options.get("compress_threads"), - ) - ) - - # Adding encryption support for full backup - if self.encryption_options.get("encrypt"): - args += ( - " --encrypt={}" - " --encrypt-threads={}" - " --encrypt-chunk-size={}".format( - self.encryption_options.get("encrypt"), - self.encryption_options.get("encrypt_threads"), - self.encryption_options.get("encrypt_chunk_size"), - ) - ) - - if self.encryption_options.get("encrypt_key"): - if self.encryption_options.get("encrypt_key_file"): - raise AttributeError( - "--encrypt-key and --encrypt-key-file are mutually exclusive" - ) - args += " --encrypt-key={}".format( - self.encryption_options.get("encrypt_key") - ) - elif self.encryption_options.get("encrypt_key_file"): - args += " --encrypt-key-file={}".format( - self.encryption_options.get("encrypt_key_file") - ) - - # Checking if extra options were passed: - if self.backup_options.get("xtra_options"): - args += " {}".format(self.backup_options.get("xtra_options")) - # Checking if extra backup options were passed: - if self.backup_options.get("xtra_backup"): - args += " {}".format(self.backup_options.get("xtra_backup")) - - # Checking if partial recovery list is available - if self.backup_options.get("partial_list"): - logger.warning("Partial Backup is enabled!") - args += ' --databases="{}"'.format(self.backup_options.get("partial_list")) - - return args - - def extract_decrypt_from_stream_backup( - self, - recent_full_bck: Optional[str] = None, - recent_inc_bck: Optional[str] = None, - flag: Optional[bool] = None, - ) -> None: - """ - Method for extracting and if necessary decrypting from streamed backup. - If the recent_full_bck passed then it means you want to extract the full backup. - If the recent_int_bck passed then it means you want to extract the inc backup. - """ - # Extract and decrypt streamed full backup prior to executing incremental backup - file_name = "{}/{}/inc_backup.stream".format( - self.backup_options.get("inc_dir"), recent_inc_bck + args = ( + f" --socket={self.mysql_options.get('mysql_socket')}" + if self.mysql_options.get("mysql_socket") + else f" --host={self.mysql_options.get('mysql_host')} --port={self.mysql_options.get('mysql_port')}" ) - file_place_holder = "< {} -C {}/{}".format( - file_name, self.backup_options.get("inc_dir"), recent_inc_bck - ) - - if not recent_inc_bck: - file_name = "{}/{}/full_backup.stream".format( - self.backup_options.get("full_dir"), recent_full_bck - ) - file_place_holder = "< {} -C {}/{}".format( - file_name, self.backup_options.get("full_dir"), recent_full_bck - ) - - xbstream_command = None - - if self.xbstream_options.get("stream") == "xbstream": - xbstream_command = "{} {}".format( - self.xbstream_options.get("xbstream"), - self.xbstream_options.get("xbstream_options"), - ) - if ( - self.encryption_options.get("encrypt") - and self.xbstream_options.get("xbs_decrypt") - and not flag - ): - logger.info( - "Using xbstream to extract and decrypt from {}".format(file_name) - ) - xbstream_command += ( - " --decrypt={} --encrypt-key={} --encrypt-threads={} ".format( - self.encryption_options.get("decrypt"), - self.encryption_options.get("encrypt_key"), - self.encryption_options.get("encrypt_threads"), - ) - ) - if xbstream_command: - xbstream_command += file_place_holder - logger.info( - "The following xbstream command will be executed {}".format( - xbstream_command - ) - ) - if self.dry == 0 and isfile(file_name): - ProcessRunner.run_command(xbstream_command) - - def stream_encrypt_compress_tar_checker(self) -> None: - if self.xbstream_options.get("stream") == "tar" and ( - self.encryption_options.get("encrypt") - or self.compression_options.get("compress") - ): - logger.error( - "xtrabackup: error: compressed and encrypted backups are " - "incompatible with the 'tar' streaming format. Use --stream=xbstream instead." - ) - raise RuntimeError( - "xtrabackup: error: compressed and encrypted backups are " - "incompatible with the 'tar' streaming format. Use --stream=xbstream instead." - ) - - def stream_tar_incremental_checker(self) -> None: - if self.xbstream_options.get("stream") == "tar": - logger.error( - "xtrabackup: error: streaming incremental backups are incompatible with the " - "'tar' streaming format. Use --stream=xbstream instead." - ) - raise RuntimeError( - "xtrabackup: error: streaming incremental backups are incompatible with the " - "'tar' streaming format. Use --stream=xbstream instead." - ) + return ( + f"{args} {self.backup_options.get('xtra_options')}" + if self.backup_options.get("xtra_options") + else "" + ) def full_backup_command_builder(self, full_backup_dir: str) -> str: """ Method for creating Full Backup command. - :param full_backup_dir the path of backup directory + :param: full_backup_dir the path of backup directory + :return: generated command string """ - xtrabackup_cmd = ( - "{} --defaults-file={} --user={} --password={} " - " --target-dir={} --backup".format( - self.backup_options.get("backup_tool"), - self.mysql_options.get("mycnf"), - self.mysql_options.get("mysql_user"), - self.mysql_options.get("mysql_password"), - full_backup_dir, - ) - ) - # Calling general options/command builder to add extra options - xtrabackup_cmd += self.general_command_builder() - - stream = self.backup_options.get("stream") - if stream: - logger.warning("Streaming is enabled!") - xtrabackup_cmd += ' --stream="{}"'.format(stream) - if stream == "xbstream": - xtrabackup_cmd += " > {}/full_backup.stream".format(full_backup_dir) - elif stream == "tar": - xtrabackup_cmd += " > {}/full_backup.tar".format(full_backup_dir) - - return xtrabackup_cmd + return ( + f"{self.backup_options.get('backup_tool')} --defaults-file={self.mysql_options.get('mycnf')} " + f"--user={self.mysql_options.get('mysql_user')} --password={self.mysql_options.get('mysql_password')} " + f"--target-dir={full_backup_dir} --backup" + ) + self.general_command_builder() def inc_backup_command_builder( self, @@ -217,95 +56,17 @@ def inc_backup_command_builder( recent_inc_bck: Optional[str] = None, ) -> str: xtrabackup_inc_cmd_base = ( - "{} --defaults-file={} --user={} --password={}".format( - self.backup_options.get("backup_tool"), - self.mysql_options.get("mycnf"), - self.mysql_options.get("mysql_user"), - self.mysql_options.get("mysql_password"), - ) + f'{self.backup_options.get("backup_tool")} ' + f'--defaults-file={self.mysql_options.get("mycnf")} ' + f'--user={self.mysql_options.get("mysql_user")} ' + f'--password={self.mysql_options.get("mysql_password")} ' + f"--target-dir={inc_backup_dir}" ) - if not recent_inc_bck: - xtrabackup_inc_cmd_base += ( - " --target-dir={} --incremental-basedir={}/{} --backup".format( - inc_backup_dir, self.backup_options.get("full_dir"), recent_full_bck - ) - ) - else: - xtrabackup_inc_cmd_base += ( - " --target-dir={} --incremental-basedir={}/{} --backup".format( - inc_backup_dir, self.backup_options.get("inc_dir"), recent_inc_bck - ) - ) - - # Calling general options/command builder to add extra options - xtrabackup_inc_cmd_base += self.general_command_builder() - - # Checking if streaming enabled for backups - # There is no need to check for 'tar' streaming type -> see the method: stream_tar_incremental_checker() - if ( - hasattr(self, "stream") - and self.xbstream_options.get("stream") == "xbstream" - ): - xtrabackup_inc_cmd_base += ' --stream="{}"'.format( - self.xbstream_options.get("stream") - ) - xtrabackup_inc_cmd_base += " > {}/inc_backup.stream".format(inc_backup_dir) - logger.warning("Streaming xbstream is enabled!") - return xtrabackup_inc_cmd_base - - def decrypter( - self, - recent_full_bck: Optional[str] = None, - xtrabackup_inc_cmd: Optional[str] = None, - recent_inc_bck: Optional[str] = None, - ) -> None: - logger.info("Applying workaround for LP #1444255") - logger.info("See more -> https://jira.percona.com/browse/PXB-934") - # With recent PXB 8 it seems to be there is no need for this workaround. - # Due to this moving this feature to this method and keeping just in case. - # Deprecated as hell. - if "encrypt" not in xtrabackup_inc_cmd: # type: ignore - return - if not isfile( - "{}/{}/xtrabackup_checkpoints.xbcrypt".format( - self.backup_options.get("full_dir"), recent_full_bck - ) - ): - logger.info("Skipping...") - return - - xbcrypt_command = "{} -d -k {} -a {}".format( - self.encryption_options.get("xbcrypt"), - self.encryption_options.get("encrypt_key"), - self.encryption_options.get("encrypt"), - ) - xbcrypt_command_extra = ( - " -i {}/{}/xtrabackup_checkpoints.xbcrypt -o {}/{}/xtrabackup_checkpoints" - ) - xbcrypt_command += xbcrypt_command_extra.format( - self.backup_options.get("full_dir"), - recent_full_bck, - self.backup_options.get("full_dir"), - recent_full_bck, + xtrabackup_inc_cmd_base += ( + f' --incremental-basedir={self.backup_options.get("inc_dir")}/{recent_inc_bck}' + if recent_inc_bck + else f' --incremental-basedir={self.backup_options.get("full_dir")}/{recent_full_bck}' ) - if recent_inc_bck: - if not isfile( - "{}/{}/xtrabackup_checkpoints.xbcrypt".format( - self.backup_options.get("inc_dir"), recent_inc_bck - ) - ): - logger.info("Skipping...") - return - xbcrypt_command += xbcrypt_command_extra.format( - self.backup_options.get("inc_dir"), - recent_inc_bck, - self.backup_options.get("inc_dir"), - recent_inc_bck, - ) - logger.info( - "The following xbcrypt command will be executed {}".format(xbcrypt_command) - ) - if self.dry == 0: - ProcessRunner.run_command(xbcrypt_command) + return f"{xtrabackup_inc_cmd_base} --backup {self.general_command_builder()}" diff --git a/mysql_autoxtrabackup/backup_backup/backup_tags.py b/mysql_autoxtrabackup/backup_backup/backup_tags.py new file mode 100644 index 0000000..ec163a0 --- /dev/null +++ b/mysql_autoxtrabackup/backup_backup/backup_tags.py @@ -0,0 +1,95 @@ +import logging +import os +from dataclasses import dataclass +from datetime import datetime +from typing import Optional + +from mysql_autoxtrabackup.backup_backup import BackupBuilderChecker +from mysql_autoxtrabackup.utils import helpers + +logger = logging.getLogger(__name__) + + +@dataclass +class BackupTags: + tag: Optional[str] + builder_obj: BackupBuilderChecker + + def add_tag( + self, backup_type: str, backup_size: Optional[str], backup_status: Optional[str] + ) -> bool: + """ + Method for adding backup tags + :param backup_type: The backup type - Full/Inc + :param backup_size: The size of the backup in human-readable format + :param backup_status: Status: OK or Status: Failed + :return: True if no exception + """ + # skip tagging unless self.tag + if not self.tag: + logger.info("TAGGING SKIPPED") + return True + + # Currently, only support Inc and Full types, calculate name based on this + assert backup_type in { + "Full", + "Inc", + }, f"add_tag(): backup_type {backup_type}: must be 'Full' or 'Inc'" + + backup_name = ( + helpers.get_latest_dir_name( + str(self.builder_obj.backup_options.get("full_dir")) + ) + if backup_type == "Full" + else helpers.get_latest_dir_name( + str(self.builder_obj.backup_options.get("inc_dir")) + ) + ) + + # Calculate more tag fields, create string + backup_timestamp = datetime.now().strftime("%Y-%m-%d_%H-%M-%S") + backup_tag_str = ( + "{bk_name}\t{bk_type}\t{bk_status}\t{bk_timestamp}\t{bk_size}\t'{bk_tag}'\n" + ) + + # Apply tag + with open( + f'{self.builder_obj.backup_options.get("backup_dir")}/backup_tags.txt', "a" + ) as backup_tags_file: + backup_tag_final = backup_tag_str.format( + bk_name=backup_name, + bk_type=backup_type, + bk_status=backup_status, + bk_timestamp=backup_timestamp, + bk_size=backup_size, + bk_tag=self.tag, + ) + + backup_tags_file.write(backup_tag_final) + return True + + @staticmethod + def show_tags(backup_dir: str, tag_file: Optional[str] = None) -> Optional[bool]: + tag_file = tag_file or f"{backup_dir}/backup_tags.txt" + if os.path.isfile(tag_file): + with open(f"{backup_dir}/backup_tags.txt", "r") as backup_tags: + from_file = backup_tags.read() + column_names = "{0}\t{1}\t{2}\t{3}\t{4}\tTAG\n".format( + "Backup".ljust(19), + "Type".ljust(4), + "Status".ljust(2), + "Completion_time".ljust(19), + "Size", + ) + extra_str = "{}\n".format("-" * (len(column_names) + 21)) + print(column_names + extra_str + from_file) + logger.info(column_names + extra_str + from_file) + return True + else: + logger.warning( + "Could not find backup_tags.txt inside given backup directory. Can't print tags." + ) + print( + "WARNING: Could not find backup_tags.txt inside given backup directory. Can't print tags." + ) + return None diff --git a/mysql_autoxtrabackup/backup_backup/backuper.py b/mysql_autoxtrabackup/backup_backup/backuper.py index 67f0e74..ba3f537 100755 --- a/mysql_autoxtrabackup/backup_backup/backuper.py +++ b/mysql_autoxtrabackup/backup_backup/backuper.py @@ -3,17 +3,17 @@ # Shahriyar Rzayev (Shako)-> https://mysql.az/ https://azepug.az/ # / rzayev.sehriyar@gmail.com / rzayev.shahriyar@yandex.com # This comment is from 2014 - keeping it here - - import logging import os import shutil import time +from dataclasses import dataclass, field from datetime import datetime -from typing import Optional, Union +from functools import wraps +from typing import Optional -from mysql_autoxtrabackup.backup_backup.backup_archive import BackupArchive from mysql_autoxtrabackup.backup_backup.backup_builder import BackupBuilderChecker +from mysql_autoxtrabackup.backup_backup.backup_tags import BackupTags from mysql_autoxtrabackup.general_conf import path_config from mysql_autoxtrabackup.general_conf.check_env import CheckEnv from mysql_autoxtrabackup.process_runner.process_runner import ProcessRunner @@ -22,102 +22,68 @@ logger = logging.getLogger(__name__) -class Backup: - def __init__( - self, - config: str = path_config.config_path_file, - dry_run: Union[bool, None] = None, - tag: Union[str, None] = None, - ) -> None: - self.conf = config - self.dry = dry_run - self.tag = tag - self.mysql_cli = mysql_cli.MySQLClientHelper(config=self.conf) - self.builder_obj = BackupBuilderChecker(config=self.conf, dry_run=self.dry) - self.archive_obj = BackupArchive( - config=self.conf, dry_run=self.dry, tag=self.tag - ) +def _is_dry_run(method): + @wraps(method) + def wrapped(_self, *args, **kwargs): + return True if _self.dry_run else method(_self, *args, **kwargs) - def add_tag( - self, backup_type: str, backup_size: Optional[str], backup_status: Optional[str] - ) -> bool: - """ - Method for adding backup tags - :param backup_type: The backup type - Full/Inc - :param backup_size: The size of the backup in human readable format - :param backup_status: Status: OK or Status: Failed - :return: True if no exception - """ - # skip tagging unless self.tag - if not self.tag: - logger.info("TAGGING SKIPPED") - return True - - # Currently only support Inc and Full types, calculate name based on this - assert backup_type in ( - "Full", - "Inc", - ), "add_tag(): backup_type {}: must be 'Full' or 'Inc'".format(backup_type) - backup_name = ( - helpers.get_latest_dir_name( - str(self.builder_obj.backup_options.get("full_dir")) - ) - if backup_type == "Full" - else helpers.get_latest_dir_name( - str(self.builder_obj.backup_options.get("inc_dir")) - ) - ) + return wrapped - # Calculate more tag fields, create string - backup_timestamp = datetime.now().strftime("%Y-%m-%d_%H-%M-%S") - backup_tag_str = ( - "{bk_name}\t{bk_type}\t{bk_status}\t{bk_timestamp}\t{bk_size}\t'{bk_tag}'\n" - ) - # Apply tag - with open( - "{}/backup_tags.txt".format( - self.builder_obj.backup_options.get("backup_dir") - ), - "a", - ) as backup_tags_file: - backup_tag_final = backup_tag_str.format( - bk_name=backup_name, - bk_type=backup_type, - bk_status=backup_status, - bk_timestamp=backup_timestamp, - bk_size=backup_size, - bk_tag=self.tag, - ) +def _is_full_path_exists(method): + @wraps(method) + def wrapped( + _self, full_dir: Optional[str] = None, remove_all: Optional[str] = None + ): + full_dir = full_dir or _self._full_dir + if not os.path.isdir(full_dir): + return + return method(_self, full_dir, remove_all) - backup_tags_file.write(backup_tag_final) - return True + return wrapped - @staticmethod - def show_tags(backup_dir: str, tag_file: Optional[str] = None) -> Optional[bool]: - tag_file = tag_file or "{}/backup_tags.txt".format(backup_dir) - if os.path.isfile(tag_file): - with open("{}/backup_tags.txt".format(backup_dir), "r") as backup_tags: - from_file = backup_tags.read() - column_names = "{0}\t{1}\t{2}\t{3}\t{4}\tTAG\n".format( - "Backup".ljust(19), - "Type".ljust(4), - "Status".ljust(2), - "Completion_time".ljust(19), - "Size", - ) - extra_str = "{}\n".format("-" * (len(column_names) + 21)) - print(column_names + extra_str + from_file) - logger.info(column_names + extra_str + from_file) - return True - else: - logger.warning( - "Could not find backup_tags.txt inside given backup directory. Can't print tags." - ) - print( - "WARNING: Could not find backup_tags.txt inside given backup directory. Can't print tags." - ) - return None + +def _is_inc_path_exists(method): + @wraps(method) + def wrapped(_self, inc_dir: Optional[str] = None): + inc_dir = inc_dir or _self._inc_dir + if not os.path.isdir(inc_dir): + return + return method(_self, inc_dir) + + return wrapped + + +def _get_inc_dir(builder_obj: BackupBuilderChecker): + return str(builder_obj.backup_options.get("inc_dir")) + + +def _get_full_dir(builder_obj: BackupBuilderChecker): + return str(builder_obj.backup_options.get("full_dir")) + + +def _create_bck_dir(path: str): + return helpers.create_backup_directory(path) + + +def _get_recent_bck(path: str): + return helpers.get_latest_dir_name(path) + + +@dataclass +class Backup: + builder_obj: BackupBuilderChecker + tagger: BackupTags + mysql_cli: mysql_cli.MySQLClientHelper + config: str = path_config.config_path_file + dry_run: Optional[bool] = None + tag: Optional[str] = None + _full_dir: str = field(init=False) + _inc_dir: str = field(init=False) + + def __post_init__(self): + self._full_dir = _get_full_dir(self.builder_obj) + self._inc_dir = _get_inc_dir(self.builder_obj) def last_full_backup_date( self, path: Optional[str] = None, full_backup_interval: Optional[float] = None @@ -127,16 +93,17 @@ def last_full_backup_date( :return: True if last full backup date older than given interval, False if it is newer. """ # Finding last full backup date from dir/folder name - full_dir = path or str(self.builder_obj.backup_options.get("full_dir")) + full_dir = path or self._full_dir backup_interval = full_backup_interval or str( self.builder_obj.backup_options.get("full_backup_interval") ) - max_dir = helpers.get_latest_dir_name(full_dir) + max_dir = _get_recent_bck(full_dir) dir_date = datetime.strptime(str(max_dir), "%Y-%m-%d_%H-%M-%S") now = datetime.now() return float((now - dir_date).total_seconds()) >= float(backup_interval) + @_is_full_path_exists def clean_full_backup_dir( self, full_dir: Optional[str] = None, @@ -145,34 +112,27 @@ def clean_full_backup_dir( # Deleting old full backup after taking new full backup. # Keeping the latest in order not to lose everything. logger.info("starting clean_full_backup_dir") - full_dir = full_dir or str(self.builder_obj.backup_options.get("full_dir")) - if not os.path.isdir(full_dir): - return True - if remove_all: - for i in os.listdir(full_dir): - rm_dir = full_dir + "/" + i - shutil.rmtree(rm_dir) - return True for i in os.listdir(full_dir): - rm_dir = full_dir + "/" + i - if i != max(os.listdir(full_dir)): + rm_dir = f"{full_dir}/{i}" + if (i != max(os.listdir(full_dir)) and not remove_all) or remove_all: + logger.info(f"DELETING {rm_dir}") shutil.rmtree(rm_dir) - logger.info("DELETING {}".format(rm_dir)) else: - logger.info("KEEPING {}".format(rm_dir)) + logger.info(f"KEEPING {rm_dir}") return True + @_is_inc_path_exists def clean_inc_backup_dir(self, inc_dir: Optional[str] = None) -> Optional[bool]: # Deleting incremental backups after taking new fresh full backup. - inc_dir = inc_dir or str(self.builder_obj.backup_options.get("inc_dir")) - if not os.path.isdir(inc_dir): - return True + inc_dir = inc_dir or self._inc_dir + for i in os.listdir(inc_dir): - rm_dir = inc_dir + "/" + i + rm_dir = f"{inc_dir}/{i}" shutil.rmtree(str(rm_dir)) return True + @_is_dry_run def full_backup(self) -> bool: """ Method for taking full backups. It will construct the backup command based on config file. @@ -180,38 +140,19 @@ def full_backup(self) -> bool: :raise: RuntimeError on error. """ logger.info( - "starting full backup to {}".format( - self.builder_obj.backup_options.get("full_dir") - ) - ) - full_backup_dir = helpers.create_backup_directory( - str(self.builder_obj.backup_options.get("full_dir")) + f'starting full backup to {self.builder_obj.backup_options.get("full_dir")}' ) + full_backup_dir = _create_bck_dir(self._full_dir) + # Creating Full Backup command. xtrabackup_cmd = self.builder_obj.full_backup_command_builder( full_backup_dir=full_backup_dir ) - # Extra checks. - self.builder_obj.stream_encrypt_compress_tar_checker() - - if self.dry: - # If it's a dry run, skip running & tagging - return True - - logger.debug( - "Starting {}".format(self.builder_obj.backup_options.get("backup_tool")) - ) - status = ProcessRunner.run_command(xtrabackup_cmd) - status_str = "OK" if status is True else "FAILED" - self.add_tag( - backup_type="Full", - backup_size=helpers.get_folder_size(full_backup_dir), - backup_status=status_str, - ) - return status + return self._get_status("Full", full_backup_dir, xtrabackup_cmd) + @_is_dry_run def inc_backup(self) -> bool: """ Method for taking incremental backups. @@ -219,28 +160,17 @@ def inc_backup(self) -> bool: :raise: RuntimeError on error. """ # Get the recent full backup path - recent_full_bck = helpers.get_latest_dir_name( - str(self.builder_obj.backup_options.get("full_dir")) - ) + recent_full_bck = _get_recent_bck(self._full_dir) if not recent_full_bck: raise RuntimeError( "Failed to get Full backup path. Are you sure you have one?" ) # Get the recent incremental backup path - recent_inc_bck = helpers.get_latest_dir_name( - str(self.builder_obj.backup_options.get("inc_dir")) - ) + recent_inc_bck = _get_recent_bck(self._inc_dir) # Creating time-stamped incremental backup directory - inc_backup_dir = helpers.create_backup_directory( - str(self.builder_obj.backup_options.get("inc_dir")) - ) - - # Check here if stream=tar enabled. - # Because it is impossible to take incremental backup with streaming tar. - # raise RuntimeError. - self.builder_obj.stream_tar_incremental_checker() + inc_backup_dir = _create_bck_dir(self._inc_dir) xtrabackup_inc_cmd = self.builder_obj.inc_backup_command_builder( recent_full_bck=recent_full_bck, @@ -248,32 +178,7 @@ def inc_backup(self) -> bool: recent_inc_bck=recent_inc_bck, ) - self.builder_obj.extract_decrypt_from_stream_backup( - recent_full_bck=recent_full_bck, recent_inc_bck=recent_inc_bck - ) - - # Deprecated workaround for LP #1444255 - self.builder_obj.decrypter( - recent_full_bck=recent_full_bck, - xtrabackup_inc_cmd=xtrabackup_inc_cmd, - recent_inc_bck=recent_inc_bck, - ) - - if self.dry: - # If it's a dry run, skip running & tagging - return True - - logger.debug( - "Starting {}".format(self.builder_obj.backup_options.get("backup_tool")) - ) - status = ProcessRunner.run_command(xtrabackup_inc_cmd) - status_str = "OK" if status is True else "FAILED" - self.add_tag( - backup_type="Inc", - backup_size=helpers.get_folder_size(inc_backup_dir), - backup_status=status_str, - ) - return status + return self._get_status("Inc", inc_backup_dir, xtrabackup_inc_cmd) def all_backup(self) -> bool: """ @@ -286,15 +191,13 @@ def all_backup(self) -> bool: # Creating object from CheckEnv class check_env_obj = CheckEnv( - self.conf, - full_dir=str(self.builder_obj.backup_options.get("full_dir")), - inc_dir=str(self.builder_obj.backup_options.get("inc_dir")), + self.config, + full_dir=self._full_dir, + inc_dir=self._inc_dir, ) assert check_env_obj.check_all_env() is True, "environment checks failed!" - if not helpers.get_latest_dir_name( - str(self.builder_obj.backup_options.get("full_dir")) - ): + if not _get_recent_bck(self._full_dir): logger.info( "- - - - You have no backups : Taking very first Full Backup! - - - -" ) @@ -308,19 +211,6 @@ def all_backup(self) -> bool: "- - - - Your full backup is timeout : Taking new Full Backup! - - - -" ) - # Archiving backups - if self.archive_obj.backup_archive_options.get("archive_dir"): - logger.info( - "Archiving enabled; cleaning archive_dir & archiving previous Full Backup" - ) - if self.archive_obj.backup_archive_options.get( - "archive_max_duration" - ) or self.archive_obj.backup_archive_options.get("archive_max_size"): - self.archive_obj.clean_old_archives() - self.archive_obj.create_backup_archives() - else: - logger.info("Archiving disabled. Skipping!") - if self.mysql_cli.mysql_run_command("flush logs") and self.full_backup(): # Removing full backups self.clean_full_backup_dir() @@ -331,10 +221,10 @@ def all_backup(self) -> bool: else: logger.info( - "- - - - You have a full backup that is less than {} seconds old. - - - -".format( - self.builder_obj.backup_options.get("full_backup_interval") - ) + f"- - - - You have a full backup that is less than " + f'{self.builder_obj.backup_options.get("full_backup_interval")} seconds old. - - - -' ) + logger.info( "- - - - We will take an incremental one based on recent Full Backup - - - -" ) @@ -345,3 +235,14 @@ def all_backup(self) -> bool: self.inc_backup() return True + + def _get_status(self, backup_type: str, backup_dir: str, xtrabackup_cmd: str): + logger.debug(f'Starting {self.builder_obj.backup_options.get("backup_tool")}') + status = ProcessRunner.run_command(xtrabackup_cmd) + status_str = "OK" if status is True else "FAILED" + self.tagger.add_tag( + backup_type=backup_type, + backup_size=helpers.get_folder_size(backup_dir), + backup_status=status_str, + ) + return status diff --git a/mysql_autoxtrabackup/general_conf/generalops.py b/mysql_autoxtrabackup/general_conf/generalops.py index f670ad2..ce38963 100644 --- a/mysql_autoxtrabackup/general_conf/generalops.py +++ b/mysql_autoxtrabackup/general_conf/generalops.py @@ -16,9 +16,7 @@ def __init__(self, config: str = path_config.config_path_file) -> None: self.con = configparser.ConfigParser() self.con.read(config) else: - logger.critical( - "Missing config file : {}".format(path_config.config_path_file) - ) + logger.critical(f"Missing config file : {path_config.config_path_file}") @property def mysql_options(self) -> Dict[str, str]: @@ -44,81 +42,6 @@ def logging_options(self) -> Dict[str, str]: "log_file_backup_count": self.con.get(section, "log_file_backup_count"), } - @property - def compression_options(self) -> Dict[str, str]: - section = "Compress" - return { - "compress": self.con.get(section, "compress", fallback=None), # type: ignore - "compress_chunk_size": self.con.get(section, "compress_chunk_size", fallback=None), # type: ignore - "compress_threads": self.con.get(section, "compress_threads", fallback=None), # type: ignore - "decompress": self.con.get(section, "decompress", fallback=None), # type: ignore - "remove_original": self.con.get(section, "remove_original", fallback=None), # type: ignore - } - - @property - def xbstream_options(self) -> Dict[str, str]: - section = "Xbstream" - return { - "xbstream": self.con.get(section, "xbstream", fallback=None), # type: ignore - "stream": self.con.get(section, "stream", fallback=None), # type: ignore - "xbstream_options": self.con.get(section, "xbstream_options", fallback=None), # type: ignore - "xbs_decrypt": self.con.get(section, "xbs_decrypt", fallback=None), # type: ignore - } - - @property - def command_options(self) -> Dict[str, str]: - section = "Commands" - return { - "start_mysql_command": self.con.get(section, "start_mysql_command"), - "stop_mysql_command": self.con.get(section, "stop_mysql_command"), - "chown_command": self.con.get(section, "chown_command"), - } - - @property - def encryption_options(self) -> Dict[str, str]: - section = "Encrypt" - return { - "xbcrypt": self.con.get(section, "xbcrypt", fallback=None), # type: ignore - "encrypt": self.con.get(section, "encrypt", fallback=None), # type: ignore - "encrypt_key": self.con.get(section, "encrypt_key", fallback=None), # type: ignore - "encrypt_key_file": self.con.get(section, "encrypt_key_file", fallback=None), # type: ignore - "encrypt_threads": self.con.get(section, "encrypt_threads", fallback=None), # type: ignore - "encrypt_chunk_size": self.con.get(section, "encrypt_chunk_size", fallback=None), # type: ignore - "decrypt": self.con.get(section, "decrypt", fallback=None), # type: ignore - "remove_original": self.con.get(section, "remove_original", fallback=None), # type: ignore - } - - @property - def backup_archive_options(self) -> Dict[str, Union[str, float]]: - section = "Backup" - # backward compatible with old config 'max_archive_size' and newer 'archive_max_size' - archive_max_size = self.con.get(section, "max_archive_size", fallback=None) - if archive_max_size: - archive_max_size = humanfriendly.parse_size(archive_max_size) - elif self.con.get(section, "archive_max_size", fallback=None): - archive_max_size = humanfriendly.parse_size( - self.con.get(section, "archive_max_size", fallback=None) - ) - - # backward compatible with old config 'max_archive_duration' and newer 'archive_max_duration' - archive_max_duration = self.con.get( - section, "max_archive_duration", fallback=None - ) - if archive_max_duration: - archive_max_duration = humanfriendly.parse_timespan(archive_max_duration) - elif self.con.get(section, "archive_max_duration", fallback=None): - archive_max_duration = humanfriendly.parse_timespan( - self.con.get(section, "archive_max_duration", fallback=None) - ) - - return { - "archive_dir": self.con.get(section, "archive_dir", fallback=None), # type: ignore - "prepare_archive": self.con.get(section, "prepare_archive", fallback=None), # type: ignore - "move_archive": self.con.get(section, "move_archive", fallback=None), # type: ignore - "archive_max_size": str(archive_max_size), - "archive_max_duration": str(archive_max_duration), - } - @property def backup_options(self) -> Dict[str, Union[str, float]]: section = "Backup" @@ -132,12 +55,9 @@ def backup_options(self) -> Dict[str, Union[str, float]]: "full_dir": self.con.get(section, "backup_dir") + "/full", "inc_dir": self.con.get(section, "backup_dir") + "/inc", "backup_tool": self.con.get(section, "backup_tool"), - "prepare_tool": self.con.get(section, "prepare_tool", fallback=None), # type: ignore "xtra_backup": self.con.get(section, "xtra_backup", fallback=None), # type: ignore - "xtra_prepare_options": self.con.get(section, "xtra_prepare_options", fallback=None), # type: ignore "xtra_options": self.con.get(section, "xtra_options", fallback=None), # type: ignore "full_backup_interval": humanfriendly.parse_timespan( self.con.get(section, "full_backup_interval", fallback="86400.0") ), - "partial_list": self.con.get(section, "partial_list", fallback=None), # type: ignore } diff --git a/mysql_autoxtrabackup/general_conf/generate_default_conf.py b/mysql_autoxtrabackup/general_conf/generate_default_conf.py index 75e3ef7..0a21861 100644 --- a/mysql_autoxtrabackup/general_conf/generate_default_conf.py +++ b/mysql_autoxtrabackup/general_conf/generate_default_conf.py @@ -2,6 +2,7 @@ # As part of - https://github.com/ShahriyarR/MySQL-AutoXtraBackup/issues/331 import configparser +import contextlib from os import makedirs from os.path import exists, join @@ -12,14 +13,12 @@ class GenerateDefaultConfig: def __init__(self, config: str = path_config.config_path_file) -> None: self.conf = config self.home = path_config.home - try: + with contextlib.suppress(FileExistsError, OSError): if not exists(path_config.config_path): makedirs(path_config.config_path) - except: - pass def generate_config_file(self) -> None: - with open(self.conf, "w+") as cfgfile: + with open(self.conf, "w+") as cfg_file: config = configparser.ConfigParser(allow_no_value=True) section1 = "MySQL" config.add_section(section1) @@ -56,91 +55,7 @@ def generate_config_file(self) -> None: config.set(section3, "pid_runtime_warning", "2 Hours") config.set(section3, "backup_dir", join(self.home, "XB_TEST/backup_dir")) config.set(section3, "backup_tool", "/usr/bin/xtrabackup") - config.set( - section3, - "#Optional: specify different path/version of xtrabackup here for prepare", - ) - config.set(section3, "#prepare_tool", "") - config.set(section3, "#Optional: pass additional options for backup stage") - config.set(section3, "#xtra_backup", "--compact") - config.set(section3, "#Optional: pass additional options for prepare stage") - config.set(section3, "#xtra_prepare_options", "--rebuild-indexes") - config.set( - section3, - "#Optional: pass general additional options; it will go to both for backup and prepare", - ) config.set(section3, "#xtra_options", "--binlog-info=ON --galera-info") - config.set(section3, "#Optional: set archive and rotation") - config.set( - section3, "#archive_dir", join(self.home, "XB_TEST/backup_archives") - ) - config.set(section3, "#prepare_archive", "1") - config.set(section3, "#move_archive", "0") config.set(section3, "#full_backup_interval", "1 day") - config.set(section3, "#archive_max_size", "100GiB") - config.set(section3, "#archive_max_duration", "4 Days") - config.set( - section3, - "#Optional: WARNING(Enable this if you want to take partial backups). " - "Specify database names or table names.", - ) - config.set(section3, "#partial_list", "tests.t1 tests.t2 dbtest") - - section4 = "Compress" - config.add_section(section4) - config.set(section4, "#optional") - config.set(section4, "#Enable only if you want to use compression.") - config.set(section4, "#compress", "quicklz") - config.set(section4, "#compress_chunk_size", "65536") - config.set(section4, "#compress_threads", "4") - config.set(section4, "#decompress", "TRUE") - config.set( - section4, - "#Enable if you want to remove .qp files after decompression." - "(Available from PXB 2.3.7 and 2.4.6)", - ) - config.set(section4, "#remove_original", "FALSE") - - section5 = "Encrypt" - config.add_section(section5) - config.set(section5, "#Optional") - config.set(section5, "#Enable only if you want to create encrypted backups") - config.set(section5, "#xbcrypt", "/usr/bin/xbcrypt") - config.set(section5, "#encrypt", "AES256") - config.set( - section5, - "#Please note that --encrypt-key and --encrypt-key-file are mutually exclusive", - ) - config.set(section5, "#encrypt_key", "VVTBwgM4UhwkTTV98fhuj+D1zyWoA89K") - config.set(section5, "#encrypt_key_file", "/path/to/file/with_encrypt_key") - config.set(section5, "#encrypt_threads", "4") - config.set(section5, "#encrypt_chunk_size", "65536") - config.set(section5, "#decrypt", "AES256") - config.set( - section5, - "#Enable if you want to remove .qp files after decompression." - "(Available from PXB 2.3.7 and 2.4.6)", - ) - config.set(section5, "#remove_original", "FALSE") - - section6 = "Xbstream" - config.add_section(section6) - config.set(section6, "#EXPERIMENTAL/OPTIONAL") - config.set(section6, "#Enable this, if you want to stream your backups") - config.set(section6, "#xbstream", "/usr/bin/xbstream") - config.set(section6, "#stream", "xbstream") - config.set(section6, "#xbstream_options", "-x --parallel=100") - config.set(section6, "#xbs_decrypt", "1") - config.set( - section6, - "# WARN, enable this, if you want to stream your backups to remote host", - ) - config.set(section6, "#remote_stream", "ssh xxx.xxx.xxx.xxx") - - section8 = "Commands" - config.add_section(section8) - config.set(section8, "start_mysql_command", "service mysql start") - config.set(section8, "stop_mysql_command", "service mysql stop") - config.set(section8, "chown_command", "chown -R mysql:mysql") - config.write(cfgfile) + config.write(cfg_file) diff --git a/pyproject.toml b/pyproject.toml index 4a7a95c..01e2479 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,12 +1,11 @@ [build-system] -requires = ["flit"] -build-backend = "flit.buildapi" +requires = ["flit_core >=3.2,<4"] +build-backend = "flit_core.buildapi" -[tool.flit.metadata] -module = "mysql_autoxtrabackup" -author = "Shahriyar(Shako) Rzayev" -author-email = "rzayev.sehriyar@gmail.com" -home-page = "https://github.com/ShahriyarR/MySQL-AutoXtraBackup" +[project] +name = "mysql_autoxtrabackup" +authors = [{name = "Shako Rzayev", email = "rzayev.sehriyar@gmail.com"}] +readme = "README.md" classifiers = [ "Intended Audience :: Information Technology", "Intended Audience :: System Administrators", @@ -24,71 +23,46 @@ classifiers = [ "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", ] - +dynamic = ["version", "description"] +requires-python = ">=3.8" requires = [ - "click >=3.3", + "click >=8.0", "pid >=2.0", "humanfriendly >=2.0", "fastapi >= 0.63.0", "uvicorn >= 0.13.4", ] -description-file = "README.md" -requires-python = ">=3.6" - -[tool.flit.metadata.urls] -Documentation = "https://autoxtrabackup.azepug.az/" - -[tool.flit.metadata.requires-extra] -test = [ - "pytest ==5.4.3", - "pytest-cov ==2.10.0", - "mypy ==0.812", - "isort >=5.0.6,<6.0.0", - "flake8 >=3.8.3,<4.0.0", - "black ==20.8b1", -] - -doc = [ - "sphinx", - "sphinx_rtd_theme", - "sphinx-autobuild", - "jinja2 >=2.11.3" -] +[project.optional-dependencies] +doc = ["mkdocs-material >=8.1.2"] dev = [ - "click >=3.3", - "pid >=2.0", - "humanfriendly >=2.0", - "pytest ==5.4.3", - "pytest-cov ==2.10.0", - "mypy ==0.812", - "isort >=5.0.6,<6.0.0", - "flake8 >=3.8.3,<4.0.0", - "black ==20.8b1", - "fastapi >= 0.63.0", - "uvicorn >= 0.13.4", -] + "black >=22.3.0", + "pylint >=2.12.2", + "isort >=5.9.3", + "autoflake >=1.4", + "flake8 >=4.0.1", + "pre-commit >=2.17.0" + ] -all = [ - "click >=3.3", - "pid >=2.0", - "humanfriendly >=2.0", - "sphinx", - "sphinx_rtd_theme", - "sphinx-autobuild", - "jinja2 >=2.11.3", - "fastapi >= 0.63.0", - "uvicorn >= 0.13.4", -] +[project.urls] +Documentation = "https://autoxtrabackup.azepug.az/" +Source = "https://github.com/ShahriyarR/MySQL-AutoXtraBackup" +Home = "https://github.com/ShahriyarR/MySQL-AutoXtraBackup" + +[project.scripts] +autoxtrabackup = "mysql_autoxtrabackup.autoxtrabackup:all_procedure" [tool.isort] profile = "black" -known_third_party = ["click", "pid", "humanfriendly"] +py_version = 38 +skip = [".gitignore", ".dockerignore"] +extend_skip = [".md", ".json"] +skip_glob = ["docs/*"] -[tool.flit.scripts] -autoxtrabackup = "mysql_autoxtrabackup.autoxtrabackup:all_procedure" \ No newline at end of file +[tool.black] +line-length = 88 +target-version = ['py38'] +include = '\.pyi?$' \ No newline at end of file diff --git a/scripts/format-imports.sh b/scripts/format-imports.sh old mode 100644 new mode 100755 index 2ade8f1..bf1c2a6 --- a/scripts/format-imports.sh +++ b/scripts/format-imports.sh @@ -2,5 +2,5 @@ set -x # Sort imports one per line, so autoflake can remove unused imports -isort --recursive mysql_autoxtrabackup tests docs scripts --force-single-line-imports +isort mysql_autoxtrabackup tests docs scripts --force-single-line-imports sh ./scripts/format.sh diff --git a/scripts/format.sh b/scripts/format.sh index 1353009..94f3e36 100644 --- a/scripts/format.sh +++ b/scripts/format.sh @@ -3,4 +3,4 @@ set -x autoflake --remove-all-unused-imports --recursive --remove-unused-variables --in-place mysql_autoxtrabackup docs scripts tests --exclude=__init__.py black mysql_autoxtrabackup docs scripts tests -isort --recursive mysql_autoxtrabackup docs scripts tests +isort mysql_autoxtrabackup docs scripts tests From d462a06c9757375296894ceeba8fe308b85eb619 Mon Sep 17 00:00:00 2001 From: Shahriyar Rzayev Date: Sat, 9 Apr 2022 20:57:20 +0400 Subject: [PATCH 06/17] Trying to setup test environment --- .../backup_backup/backuper.py | 7 +++-- tests/Dockerfile | 29 ++++++++++--------- tests/requirements.txt | 1 - 3 files changed, 20 insertions(+), 17 deletions(-) diff --git a/mysql_autoxtrabackup/backup_backup/backuper.py b/mysql_autoxtrabackup/backup_backup/backuper.py index ba3f537..cd9f9dc 100755 --- a/mysql_autoxtrabackup/backup_backup/backuper.py +++ b/mysql_autoxtrabackup/backup_backup/backuper.py @@ -202,7 +202,7 @@ def all_backup(self) -> bool: "- - - - You have no backups : Taking very first Full Backup! - - - -" ) - if self.mysql_cli.mysql_run_command("flush logs") and self.full_backup(): + if self._flush_logs_and_backup(): # Removing old inc backups self.clean_inc_backup_dir() @@ -211,7 +211,7 @@ def all_backup(self) -> bool: "- - - - Your full backup is timeout : Taking new Full Backup! - - - -" ) - if self.mysql_cli.mysql_run_command("flush logs") and self.full_backup(): + if self._flush_logs_and_backup(): # Removing full backups self.clean_full_backup_dir() @@ -236,6 +236,9 @@ def all_backup(self) -> bool: return True + def _flush_logs_and_backup(self) -> bool: + return self.mysql_cli.mysql_run_command("flush logs") and self.full_backup() + def _get_status(self, backup_type: str, backup_dir: str, xtrabackup_cmd: str): logger.debug(f'Starting {self.builder_obj.backup_options.get("backup_tool")}') status = ProcessRunner.run_command(xtrabackup_cmd) diff --git a/tests/Dockerfile b/tests/Dockerfile index 46bc3e4..b2aa31f 100644 --- a/tests/Dockerfile +++ b/tests/Dockerfile @@ -1,17 +1,18 @@ -FROM mysql/mysql-server:8.0 +FROM mysql:8.0-oracle USER root WORKDIR /opt +RUN microdnf install yum RUN yum install -y git -RUN yum install -y python3 +RUN yum install -y python38 RUN yum install -y vim RUN yum install -y perl -RUN yum install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm +RUN yum install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm RUN yum install -y https://repo.percona.com/yum/percona-release-latest.noarch.rpm RUN yum install -y libev RUN percona-release enable-only tools RUN yum install -y --exclude=Percona-Server\* percona-xtrabackup-80 RUN yum install -y qpress -RUN yum install -y python3-pip +RUN yum install -y python38-pip RUN cd /opt && \ git clone https://github.com/sstephenson/bats.git && \ cd bats && \ @@ -20,23 +21,23 @@ ARG GIT_BRANCH_NAME RUN cd /opt && \ git clone -b $GIT_BRANCH_NAME https://github.com/ShahriyarR/MySQL-AutoXtraBackup.git && \ cd /opt/MySQL-AutoXtraBackup && \ - python3 setup.py install + pip3.8 install -U pip && \ + pip3.8 install flit && \ + FLIT_ROOT_INSTALL=1 flit install RUN yum groupinstall -y "Development Tools" -RUN yum -y install python3-devel.x86_64 --enablerepo=rhel-7-server-optional-rpms -RUN cd /opt/MySQL-AutoXtraBackup/test && \ - pip3 install -r requirements.txt +RUN yum -y install python38-devel.x86_64 +RUN yum -y install libffi +RUN yum -y install libffi-devel +RUN cd /opt/MySQL-AutoXtraBackup/tests && \ + pip3.8 install -r requirements.txt EXPOSE 8080 -RUN cd /opt/MySQL-AutoXtraBackup && \ - git pull && \ - pipenv --python `which python3` install - WORKDIR /opt/MySQL-AutoXtraBackup RUN cd /opt/MySQL-AutoXtraBackup && git pull -RUN pip3 install uvicorn -RUN pip3 install fastapi +RUN pip3.8 install uvicorn +RUN pip3.8 install fastapi COPY entrypoint.sh / RUN chmod +x /entrypoint.sh diff --git a/tests/requirements.txt b/tests/requirements.txt index 6473819..b8d4c9f 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -1,5 +1,4 @@ docker -pygit2==0.28.2 pytest pipenv fastapi From a9de5441c1d8f42ff0ef37ba13ac2181b1f64991 Mon Sep 17 00:00:00 2001 From: Shahriyar Rzayev Date: Sat, 9 Apr 2022 21:05:27 +0400 Subject: [PATCH 07/17] failed to start fastapi --- tests/Dockerfile | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/Dockerfile b/tests/Dockerfile index b2aa31f..fb3650c 100644 --- a/tests/Dockerfile +++ b/tests/Dockerfile @@ -29,7 +29,7 @@ RUN yum groupinstall -y "Development Tools" RUN yum -y install python38-devel.x86_64 RUN yum -y install libffi RUN yum -y install libffi-devel -RUN cd /opt/MySQL-AutoXtraBackup/tests && \ +RUN cd /opt/MySQL-AutoXtraBackup/tests && git pull && \ pip3.8 install -r requirements.txt EXPOSE 8080 @@ -39,7 +39,7 @@ RUN cd /opt/MySQL-AutoXtraBackup && git pull RUN pip3.8 install uvicorn RUN pip3.8 install fastapi -COPY entrypoint.sh / -RUN chmod +x /entrypoint.sh -ENTRYPOINT ["./entrypoint.sh"] -#CMD ["uvicorn", "api.main:app", "--port", "8080"] +RUN cd /opt/MySQL-AutoXtraBackup/tests && chmod +x entrypoint.sh +ENTRYPOINT ["/opt/MySQL-AutoXtraBackup/tests/entrypoint.sh"] +WORKDIR /opt/MySQL-AutoXtraBackup/mysql_autoxtrabackup +CMD ["uvicorn", "api.main:app", "--port", "8080"] From d32ab1ab26d2281158cb6a6d4f89139ad70823c5 Mon Sep 17 00:00:00 2001 From: Shahriyar Rzayev Date: Mon, 11 Apr 2022 14:14:31 +0400 Subject: [PATCH 08/17] changed pyproject.toml --- pyproject.toml | 2 +- tests/Dockerfile | 18 ++++++++++-------- 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 01e2479..fde1578 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,7 +27,7 @@ classifiers = [ ] dynamic = ["version", "description"] requires-python = ">=3.8" -requires = [ +dependencies = [ "click >=8.0", "pid >=2.0", "humanfriendly >=2.0", diff --git a/tests/Dockerfile b/tests/Dockerfile index fb3650c..9d0b592 100644 --- a/tests/Dockerfile +++ b/tests/Dockerfile @@ -25,19 +25,21 @@ RUN cd /opt && \ pip3.8 install flit && \ FLIT_ROOT_INSTALL=1 flit install -RUN yum groupinstall -y "Development Tools" -RUN yum -y install python38-devel.x86_64 -RUN yum -y install libffi -RUN yum -y install libffi-devel -RUN cd /opt/MySQL-AutoXtraBackup/tests && git pull && \ - pip3.8 install -r requirements.txt +#RUN yum groupinstall -y "Development Tools" +#RUN yum -y install python38-devel.x86_64 +#RUN yum -y install libffi +#RUN yum -y install libffi-devel +#RUN cd /opt/MySQL-AutoXtraBackup/tests && git pull && \ +# pip3.8 install -r requirements.txt EXPOSE 8080 WORKDIR /opt/MySQL-AutoXtraBackup RUN cd /opt/MySQL-AutoXtraBackup && git pull -RUN pip3.8 install uvicorn -RUN pip3.8 install fastapi +#RUN pip3.8 install uvicorn +#RUN pip3.8 install fastapi + +RUN which uvicorn RUN cd /opt/MySQL-AutoXtraBackup/tests && chmod +x entrypoint.sh ENTRYPOINT ["/opt/MySQL-AutoXtraBackup/tests/entrypoint.sh"] From 6c7b30b943b7db4d627e220ac8aed0c47ae3bc79 Mon Sep 17 00:00:00 2001 From: Shahriyar Rzayev Date: Mon, 11 Apr 2022 14:48:41 +0400 Subject: [PATCH 09/17] Fixing the bug with missing config file --- docker-compose-test.yaml | 2 +- .../general_conf/generalops.py | 19 +++++++++++++------ tests/Dockerfile | 4 +--- 3 files changed, 15 insertions(+), 10 deletions(-) diff --git a/docker-compose-test.yaml b/docker-compose-test.yaml index dcee065..1f8a2df 100644 --- a/docker-compose-test.yaml +++ b/docker-compose-test.yaml @@ -1,7 +1,7 @@ version: "3.7" services: - api_v1: + autoxtrabackup: build: context: tests dockerfile: Dockerfile diff --git a/mysql_autoxtrabackup/general_conf/generalops.py b/mysql_autoxtrabackup/general_conf/generalops.py index ce38963..22b883d 100644 --- a/mysql_autoxtrabackup/general_conf/generalops.py +++ b/mysql_autoxtrabackup/general_conf/generalops.py @@ -5,18 +5,25 @@ import humanfriendly # type: ignore -from . import path_config +from . import path_config, GenerateDefaultConfig logger = logging.getLogger(__name__) +def _create_default_config(config: str, missing: str) ->None: + logger.critical(f"Missing config file : {missing}") + logger.warning("Creating default config file...") + GenerateDefaultConfig(config=config).generate_config_file() + logger.info(f"Default config file is generated in {config}") + + class GeneralClass: def __init__(self, config: str = path_config.config_path_file) -> None: - if isfile(config): - self.con = configparser.ConfigParser() - self.con.read(config) - else: - logger.critical(f"Missing config file : {path_config.config_path_file}") + if not isfile(config): + _create_default_config(config, missing=path_config.config_path_file) + + self.con = configparser.ConfigParser() + self.con.read(config) @property def mysql_options(self) -> Dict[str, str]: diff --git a/tests/Dockerfile b/tests/Dockerfile index 9d0b592..c5b80ba 100644 --- a/tests/Dockerfile +++ b/tests/Dockerfile @@ -18,6 +18,7 @@ RUN cd /opt && \ cd bats && \ ./install.sh /usr/local ARG GIT_BRANCH_NAME +EXPOSE 8080 RUN cd /opt && \ git clone -b $GIT_BRANCH_NAME https://github.com/ShahriyarR/MySQL-AutoXtraBackup.git && \ cd /opt/MySQL-AutoXtraBackup && \ @@ -32,15 +33,12 @@ RUN cd /opt && \ #RUN cd /opt/MySQL-AutoXtraBackup/tests && git pull && \ # pip3.8 install -r requirements.txt -EXPOSE 8080 WORKDIR /opt/MySQL-AutoXtraBackup RUN cd /opt/MySQL-AutoXtraBackup && git pull #RUN pip3.8 install uvicorn #RUN pip3.8 install fastapi -RUN which uvicorn - RUN cd /opt/MySQL-AutoXtraBackup/tests && chmod +x entrypoint.sh ENTRYPOINT ["/opt/MySQL-AutoXtraBackup/tests/entrypoint.sh"] WORKDIR /opt/MySQL-AutoXtraBackup/mysql_autoxtrabackup From 9c1842c3b254eae3e6d9dc39bcbd83dcb85d13ee Mon Sep 17 00:00:00 2001 From: Shahriyar Rzayev Date: Mon, 11 Apr 2022 18:01:09 +0400 Subject: [PATCH 10/17] Major refactoring of the Prepare stage --- docker-compose-test.yaml | 2 +- mysql_autoxtrabackup/autoxtrabackup.py | 10 +- .../backup_backup/__init__.py | 2 - .../backup_backup/backup_builder.py | 18 +- .../backup_backup/backup_tags.py | 2 +- .../backup_prepare/__init__.py | 1 - .../backup_prepare/copy_back.py | 160 --------- .../backup_prepare/prepare.py | 319 ++++++------------ .../backup_prepare/prepare_builder.py | 148 ++------ mysql_autoxtrabackup/general_conf/__init__.py | 4 - .../general_conf/check_env.py | 36 +- .../general_conf/generalops.py | 14 +- .../general_conf/generate_default_conf.py | 3 +- .../process_runner/process_runner.py | 27 +- mysql_autoxtrabackup/utils/mysql_cli.py | 44 ++- pyproject.toml | 3 + tests/Dockerfile | 29 +- tests/conftest.py | 10 +- tests/entrypoint.sh | 210 ------------ tests/test_backup.py | 9 +- tests/test_mysql_cli.py | 5 +- 21 files changed, 235 insertions(+), 821 deletions(-) delete mode 100644 mysql_autoxtrabackup/backup_prepare/copy_back.py delete mode 100755 tests/entrypoint.sh diff --git a/docker-compose-test.yaml b/docker-compose-test.yaml index 1f8a2df..b961d67 100644 --- a/docker-compose-test.yaml +++ b/docker-compose-test.yaml @@ -9,8 +9,8 @@ services: GIT_BRANCH_NAME: ${GIT_BRANCH_NAME} container_name: autoxtrabackup_apiv1_test volumes: - - ./tests/entrypoint.sh:/entrypoint.sh - /dev/log:/dev/log + - .:/opt/MySQL-AutoXtraBackup ports: - "8080:8080" network_mode: host diff --git a/mysql_autoxtrabackup/autoxtrabackup.py b/mysql_autoxtrabackup/autoxtrabackup.py index cf9797a..a258c79 100644 --- a/mysql_autoxtrabackup/autoxtrabackup.py +++ b/mysql_autoxtrabackup/autoxtrabackup.py @@ -13,7 +13,7 @@ import pid # type: ignore from mysql_autoxtrabackup.api import main -from mysql_autoxtrabackup.backup_backup import BackupBuilderChecker +from mysql_autoxtrabackup.backup_backup.backup_builder import BackupBuilderChecker from mysql_autoxtrabackup.backup_backup.backup_tags import BackupTags from mysql_autoxtrabackup.backup_backup.backuper import Backup from mysql_autoxtrabackup.backup_prepare.prepare import Prepare @@ -248,6 +248,7 @@ def all_procedure( show_tags, tag, verbose, + options=options, ) except (pid.PidFileAlreadyLockedError, pid.PidFileAlreadyRunningError) as error: @@ -275,6 +276,7 @@ def _run_commands( show_tags, tag, verbose, + options, ): with pid_file: # User PidFile for locking to single instance dry_run_ = dry_run @@ -282,9 +284,9 @@ def _run_commands( dry_run_ = 1 logger.warning("Dry run enabled!") - builder_obj = BackupBuilderChecker(config=defaults_file, dry_run=dry_run_) + builder_obj = BackupBuilderChecker(options=options) tagger = BackupTags(tag, builder_obj) - mysql_cli = MySQLClientHelper(config=defaults_file) + mysql_cli = MySQLClientHelper(options=options) if ( prepare is False @@ -312,7 +314,7 @@ def _run_commands( logger.info(f"Default config file is generated in {defaults_file}") elif prepare: Prepare( - config=defaults_file, dry_run=dry_run_, tag=tag + dry_run=dry_run_, tag=tag, options=options ).prepare_backup_and_copy_back() elif backup: Backup( diff --git a/mysql_autoxtrabackup/backup_backup/__init__.py b/mysql_autoxtrabackup/backup_backup/__init__.py index debc22e..e69de29 100644 --- a/mysql_autoxtrabackup/backup_backup/__init__.py +++ b/mysql_autoxtrabackup/backup_backup/__init__.py @@ -1,2 +0,0 @@ -from .backup_builder import BackupBuilderChecker as BackupBuilderChecker -from .backuper import Backup as Backup diff --git a/mysql_autoxtrabackup/backup_backup/backup_builder.py b/mysql_autoxtrabackup/backup_backup/backup_builder.py index b4782a7..01f527b 100644 --- a/mysql_autoxtrabackup/backup_backup/backup_builder.py +++ b/mysql_autoxtrabackup/backup_backup/backup_builder.py @@ -1,24 +1,20 @@ # Will store necessary checks and command building actions here import logging +from dataclasses import dataclass from typing import Optional -from mysql_autoxtrabackup.general_conf import path_config from mysql_autoxtrabackup.general_conf.generalops import GeneralClass logger = logging.getLogger(__name__) +@dataclass class BackupBuilderChecker: - def __init__( - self, - config: str = path_config.config_path_file, - dry_run: Optional[bool] = None, - ) -> None: - self.conf = config - self.dry = dry_run - options_obj = GeneralClass(config=self.conf) - self.mysql_options = options_obj.mysql_options - self.backup_options = options_obj.backup_options + options: GeneralClass + + def __post_init__(self): + self.mysql_options = self.options.mysql_options + self.backup_options = self.options.backup_options def general_command_builder(self) -> str: """ diff --git a/mysql_autoxtrabackup/backup_backup/backup_tags.py b/mysql_autoxtrabackup/backup_backup/backup_tags.py index ec163a0..481a72e 100644 --- a/mysql_autoxtrabackup/backup_backup/backup_tags.py +++ b/mysql_autoxtrabackup/backup_backup/backup_tags.py @@ -4,7 +4,7 @@ from datetime import datetime from typing import Optional -from mysql_autoxtrabackup.backup_backup import BackupBuilderChecker +from mysql_autoxtrabackup.backup_backup.backup_builder import BackupBuilderChecker from mysql_autoxtrabackup.utils import helpers logger = logging.getLogger(__name__) diff --git a/mysql_autoxtrabackup/backup_prepare/__init__.py b/mysql_autoxtrabackup/backup_prepare/__init__.py index 876d357..8621a3e 100644 --- a/mysql_autoxtrabackup/backup_prepare/__init__.py +++ b/mysql_autoxtrabackup/backup_prepare/__init__.py @@ -1,3 +1,2 @@ -from .copy_back import CopyBack as CopyBack from .prepare import Prepare as Prepare from .prepare_builder import BackupPrepareBuilderChecker as BackupPrepareBuilderChecker diff --git a/mysql_autoxtrabackup/backup_prepare/copy_back.py b/mysql_autoxtrabackup/backup_prepare/copy_back.py deleted file mode 100644 index 8e6ca20..0000000 --- a/mysql_autoxtrabackup/backup_prepare/copy_back.py +++ /dev/null @@ -1,160 +0,0 @@ -import logging -import os -import shutil -from typing import Optional, Union - -from mysql_autoxtrabackup.general_conf import path_config -from mysql_autoxtrabackup.general_conf.generalops import GeneralClass -from mysql_autoxtrabackup.process_runner.process_runner import ProcessRunner -from mysql_autoxtrabackup.utils import helpers - -logger = logging.getLogger(__name__) - - -class CopyBack: - def __init__(self, config: str = path_config.config_path_file) -> None: - self.conf = config - options_obj = GeneralClass(config=self.conf) - self.command_options = options_obj.command_options - self.mysql_options = options_obj.backup_options - self.backup_options = options_obj.backup_options - - def shutdown_mysql(self) -> Union[None, bool, Exception]: - # Shut Down MySQL - logger.info("Shutting Down MySQL server:") - args = self.command_options.get("stop_mysql_command") - return ProcessRunner.run_command(args) - - def move_to_tmp_dir(self) -> None: - try: - shutil.move( - str(self.mysql_options.get("data_dir")), - str(self.backup_options.get("tmp_dir")), - ) - logger.info( - "Moved data_dir to {} ...".format(self.backup_options.get("tmp_dir")) - ) - except shutil.Error as err: - logger.error("Error occurred while moving data_dir") - logger.error(err) - raise RuntimeError(err) - - def create_empty_data_dir(self) -> Union[None, bool, Exception]: - logger.info("Creating an empty data directory ...") - makedir = "mkdir {}".format(self.mysql_options.get("data_dir")) - return ProcessRunner.run_command(makedir) - - def move_data_dir(self) -> bool: - # Move data_dir to new directory - tmp_dir = self.backup_options.get("tmp_dir") - logger.info("Moving MySQL data_dir to {}".format(tmp_dir)) - if os.path.isdir(str(self.backup_options.get("tmp_dir"))): - rmdir_ = "rm -rf {}".format(tmp_dir) - ProcessRunner.run_command(rmdir_) - self.move_to_tmp_dir() - self.create_empty_data_dir() - return True - - def run_xtra_copyback(self, data_dir: Optional[str] = None) -> Optional[bool]: - # Running Xtrabackup with --copy-back option - copy_back = "{} --copy-back {} --target-dir={}/{} --data_dir={}".format( - self.backup_options.get("backup_tool"), - self.backup_options.get("xtra_options"), - self.backup_options.get("full_dir"), - helpers.get_latest_dir_name(str(self.backup_options.get("full_dir"))), - self.mysql_options.get("data_dir") if data_dir is None else data_dir, - ) - return ProcessRunner.run_command(copy_back) - - def giving_chown(self, data_dir: Optional[str] = None) -> Optional[bool]: - # Changing owner of data_dir to given user:group - give_chown = "{} {}".format( - self.command_options.get("chown_command"), - self.mysql_options.get("data_dir") if data_dir is None else data_dir, - ) - return ProcessRunner.run_command(give_chown) - - def start_mysql_func( - self, start_tool: Optional[str] = None, options: Optional[str] = None - ) -> Union[None, bool, Exception]: - # Starting MySQL - logger.info("Starting MySQL server: ") - args = ( - self.command_options.get("start_mysql_command") - if start_tool is None - else start_tool - ) - start_command = "{} {}".format(args, options) if options is not None else args - return ProcessRunner.run_command(start_command) - - @staticmethod - def check_if_backup_prepared( - full_dir: Optional[str], full_backup_file: Optional[str] - ) -> Optional[bool]: - """ - This method is for checking if the backup can be copied-back. - It is going to check xtrabackup_checkpoints file inside backup directory for backup_type column. - backup_type column must be equal to 'full-prepared' - :return: True if backup is already prepared; RuntimeError if it is not. - """ - with open( - "{}/{}/xtrabackup_checkpoints".format(full_dir, full_backup_file), "r" - ) as xchk_file: - # This thing seems to be complicated bu it is not: - # Trying to get 'full-prepared' from ['backup_type ', ' full-prepared\n'] - if ( - xchk_file.readline().split("=")[1].strip("\n").lstrip() - == "full-prepared" - ): - return True - raise RuntimeError( - "This full backup is not fully prepared, not doing copy-back!" - ) - - def copy( - self, options: Optional[str] = None, data_dir: Optional[str] = None - ) -> bool: - """ - Function for running: - xtrabackup --copy-back - giving chown to data_dir - starting mysql - :return: True if succeeded. Error if failed - """ - logger.info("Copying Back Already Prepared Final Backup:") - if ( - len( - os.listdir( - str(self.mysql_options.get("data_dir")) - if data_dir is None - else data_dir - ) - ) - > 0 - ): - logger.info("MySQL data_dir is not empty!") - return False - else: - self.run_xtra_copyback(data_dir=data_dir) - self.giving_chown(data_dir=data_dir) - self.start_mysql_func(options=options) - return True - - def copy_back_action(self, options: Optional[str] = None) -> Optional[bool]: - """ - Function for complete recover/copy-back actions - :return: True if succeeded. Error if failed. - """ - try: - self.check_if_backup_prepared( - str(self.backup_options.get("full_dir")), - helpers.get_latest_dir_name(str(self.backup_options.get("full_dir"))), - ) - self.shutdown_mysql() - if self.move_data_dir() and self.copy(options=options): - logger.info("All data copied back successfully. ") - logger.info("Your MySQL server is UP again") - return True - except Exception as err: - logger.error("{}: {}".format(type(err).__name__, err)) - return None diff --git a/mysql_autoxtrabackup/backup_prepare/prepare.py b/mysql_autoxtrabackup/backup_prepare/prepare.py index 8d64efb..63ae021 100644 --- a/mysql_autoxtrabackup/backup_prepare/prepare.py +++ b/mysql_autoxtrabackup/backup_prepare/prepare.py @@ -1,62 +1,46 @@ import logging import os import time -from typing import Optional, Union +from dataclasses import dataclass +from typing import List, Optional, Tuple -from mysql_autoxtrabackup.backup_backup.backup_builder import BackupBuilderChecker -from mysql_autoxtrabackup.backup_prepare.copy_back import CopyBack from mysql_autoxtrabackup.backup_prepare.prepare_builder import ( BackupPrepareBuilderChecker, ) -from mysql_autoxtrabackup.general_conf import path_config +from mysql_autoxtrabackup.general_conf.generalops import GeneralClass from mysql_autoxtrabackup.process_runner.process_runner import ProcessRunner from mysql_autoxtrabackup.utils import helpers logger = logging.getLogger(__name__) +@dataclass class Prepare: - def __init__( - self, - config: str = path_config.config_path_file, - dry_run: Optional[bool] = None, - tag: Optional[str] = None, - ) -> None: - self.conf = config - self.dry = dry_run - self.tag = tag - self.prepare_options = BackupPrepareBuilderChecker( - config=self.conf, dry_run=self.dry - ) - # If prepare_tool option enabled in config, make backup_tool to use this. - # The reason is maybe you have backup taken with 2.4 version but your are going to prepare - # with newer version. It is somehow unlike to do this but still. - if self.prepare_options.backup_options.get("prepare_tool"): - self.prepare_options.backup_options["backup_tool"] = str( - self.prepare_options.backup_options.get("prepare_tool") - ) + options: GeneralClass + dry_run: Optional[bool] = None + tag: Optional[str] = None + + def __post_init__(self): + + self.prepare_options = BackupPrepareBuilderChecker(options=self.options) if self.tag and not os.path.isfile( - "{}/backup_tags.txt".format( - self.prepare_options.backup_options.get("backup_dir") - ) + f'{self.prepare_options.backup_options.get("backup_dir")}/backup_tags.txt' ): raise RuntimeError( "Could not find backup_tags.txt inside backup directory. " "Please run without --tag option" ) - def run_prepare_command( - self, base_dir: Optional[str], actual_dir: Optional[str], cmd: Optional[str] - ) -> Optional[bool]: - # Decrypt backup - self.prepare_options.decrypt_backup(base_dir, actual_dir) + self.recent_bck = helpers.get_latest_dir_name( + str(self.prepare_options.backup_options.get("full_dir")) + ) + self.inc_dir = str(self.prepare_options.backup_options.get("inc_dir")) - # Decompress backup - self.prepare_options.decompress_backup(base_dir, actual_dir) + def run_prepare_command(self, cmd: Optional[str]) -> Optional[bool]: - logger.info("Running prepare command -> {}".format(cmd)) - if self.dry: + logger.info(f"Running prepare command -> {cmd}") + if self.dry_run: return True return ProcessRunner.run_command(cmd) @@ -66,104 +50,22 @@ def prepare_with_tags(self) -> Optional[bool]: backup_dir=str(self.prepare_options.backup_options.get("backup_dir")), tag_name=self.tag, ) - recent_bck = helpers.get_latest_dir_name( - str(self.prepare_options.backup_options.get("full_dir")) - ) - # I am not going to initialize this object in Prepare class constructor as I thin there is no need. - backup_builder = BackupBuilderChecker(self.conf, dry_run=self.dry) - if found_backups[1] == "Full": # type: ignore - if recent_bck: - logger.info("- - - - Preparing Full Backup - - - -") - - # Extracting/decrypting from streamed backup and extra checks goes here. - backup_builder.extract_decrypt_from_stream_backup( - recent_full_bck=recent_bck - ) - - # Prepare command - backup_prepare_cmd = self.prepare_options.prepare_command_builder( - full_backup=recent_bck - ) - - self.run_prepare_command( - str(self.prepare_options.backup_options.get("full_dir")), - recent_bck, - backup_prepare_cmd, - ) - - elif found_backups[1] == "Inc": # type: ignore - if not os.listdir(str(self.prepare_options.backup_options.get("inc_dir"))): - logger.info( - "- - - - You have no Incremental backups. So will prepare only latest Full backup - - - -" - ) - self.prepare_only_full_backup() - else: - logger.info("- - - - You have Incremental backups. - - - -") - if self.prepare_only_full_backup(): - logger.info("Preparing Incs: ") - list_of_dir = helpers.sorted_ls( - str(self.prepare_options.backup_options.get("inc_dir")) - ) - # Find the index number inside all list for backup(which was found via tag) - index_num = list_of_dir.index(found_backups[0]) # type: ignore - # Limit the iteration until this found backup - for dir_ in list_of_dir[: index_num + 1]: - apply_log_only = None - if dir_ != found_backups[0]: # type: ignore - logger.info( - "Preparing inc backups in sequence. inc backup dir/name is {}".format( - dir_ - ) - ) - apply_log_only = True - - else: - logger.info( - "Preparing last incremental backup, inc backup dir/name is {}".format( - dir_ - ) - ) - - # Extracting/decrypting from streamed backup and extra checks goes here - backup_builder.extract_decrypt_from_stream_backup( - recent_inc_bck=dir_, flag=True - ) - - # Prepare command - backup_prepare_cmd = ( - self.prepare_options.prepare_command_builder( - full_backup=recent_bck, - incremental=dir_, - apply_log_only=apply_log_only, - ) - ) - - self.run_prepare_command( - str(self.prepare_options.backup_options.get("inc_dir")), - dir_, - backup_prepare_cmd, - ) + self._prepare_and_run_using_tags(found_backups) logger.info("- - - - The end of the Prepare Stage. - - - -") return True - def prepare_only_full_backup(self) -> Union[None, bool, Exception]: - recent_bck = helpers.get_latest_dir_name( - str(self.prepare_options.backup_options.get("full_dir")) + def prepare_run_incremental_backups(self, found_backups: Optional[Tuple[str, str]]) -> None: + logger.info("Preparing Incs: ") + self._iterate_and_run_found_backups( + found_backups, helpers.sorted_ls(self.inc_dir) ) - backup_builder = BackupBuilderChecker(self.conf, dry_run=self.dry) - if recent_bck: - apply_log_only = None - if not os.listdir(str(self.prepare_options.backup_options.get("inc_dir"))): - logger.info("- - - - Preparing Full Backup - - - -") - self.prepare_options.untar_backup(recent_bck=recent_bck) - # Extracting/decrypting from streamed backup and extra checks goes here - backup_builder.extract_decrypt_from_stream_backup( - recent_full_bck=recent_bck - ) - else: + def prepare_only_full_backup(self) -> Optional[bool]: + if self.recent_bck: + apply_log_only = None + if os.listdir(self.inc_dir): logger.info("- - - - Preparing Full backup for incrementals - - - -") logger.info( "- - - - Final prepare,will occur after preparing all inc backups - - - -" @@ -171,121 +73,120 @@ def prepare_only_full_backup(self) -> Union[None, bool, Exception]: time.sleep(3) apply_log_only = True - # Prepare command - backup_prepare_cmd = self.prepare_options.prepare_command_builder( - full_backup=recent_bck, apply_log_only=apply_log_only + self._prepare_and_run( + recent_bck=self.recent_bck, apply_log_only=apply_log_only ) - self.run_prepare_command( - str(self.prepare_options.backup_options.get("full_dir")), - recent_bck, - backup_prepare_cmd, - ) return True - def prepare_inc_full_backups(self) -> Union[None, bool, Exception]: - backup_builder = BackupBuilderChecker(self.conf, dry_run=self.dry) - if not os.listdir(str(self.prepare_options.backup_options.get("inc_dir"))): + def prepare_inc_full_backups(self) -> Optional[bool]: + if not os.listdir(self.inc_dir): logger.info( "- - - - You have no Incremental backups. So will prepare only latest Full backup - - - -" ) return self.prepare_only_full_backup() else: logger.info("- - - - You have Incremental backups. - - - -") - recent_bck = helpers.get_latest_dir_name( - str(self.prepare_options.backup_options.get("full_dir")) - ) if self.prepare_only_full_backup(): logger.info("Preparing Incs: ") - list_of_dir = sorted( - os.listdir(str(self.prepare_options.backup_options.get("inc_dir"))) - ) - for inc_backup_dir in list_of_dir: - apply_log_only = None - if inc_backup_dir != max( - os.listdir( - str(self.prepare_options.backup_options.get("inc_dir")) - ) - ): - logger.info( - "Preparing Incremental backups in sequence. Incremental backup dir/name is {}".format( - inc_backup_dir - ) - ) - apply_log_only = True - else: - logger.info( - "Preparing last Incremental backup, inc backup dir/name is {}".format( - inc_backup_dir - ) - ) - - # Extracting/decrypting from streamed backup and extra checks goes here - backup_builder.extract_decrypt_from_stream_backup( - recent_inc_bck=inc_backup_dir, flag=True - ) - # Prepare command - backup_prepare_cmd = self.prepare_options.prepare_command_builder( - full_backup=recent_bck, - incremental=inc_backup_dir, - apply_log_only=apply_log_only, - ) - - self.run_prepare_command( - str(self.prepare_options.backup_options.get("inc_dir")), - inc_backup_dir, - backup_prepare_cmd, - ) + list_of_dir = sorted(os.listdir(self.inc_dir)) + self._iterate_and_run_sequential_increment_backups(list_of_dir) logger.info("- - - - The end of the Prepare Stage. - - - -") return True + def _prepare_and_run_using_tags( + self, found_backups: Optional[Tuple[str, str]] + ) -> None: + if found_backups[1] == "Full": + if self.recent_bck: + logger.info("- - - - Preparing Full Backup - - - -") + self._prepare_and_run(recent_bck=self.recent_bck) + + elif found_backups[1] == "Inc": + if not os.listdir(self.inc_dir): + logger.info( + "- - - - You have no Incremental backups. So will prepare only latest Full backup - - - -" + ) + self.prepare_only_full_backup() + else: + logger.info("- - - - You have Incremental backups. - - - -") + if self.prepare_only_full_backup(): + self.prepare_run_incremental_backups(found_backups) + + def _iterate_and_run_sequential_increment_backups(self, dir_: List[str]) -> None: + for inc_backup_dir in dir_: + apply_log_only = None + if inc_backup_dir != max(os.listdir(self.inc_dir)): + logger.info( + f"Preparing Incremental backups in sequence. Incremental backup dir/name is {inc_backup_dir}" + ) + + apply_log_only = True + else: + logger.info( + f"Preparing last Incremental backup, inc backup dir/name is {inc_backup_dir}" + ) + + self._prepare_and_run( + recent_bck=self.recent_bck, + dir_=inc_backup_dir, + apply_log_only=apply_log_only, + ) + + def _prepare_and_run( + self, + recent_bck: str, + apply_log_only: Optional[bool] = None, + dir_: Optional[str] = None, + ) -> None: + # Prepare command + backup_prepare_cmd = self.prepare_options.prepare_command_builder( + full_backup=recent_bck, + incremental=dir_, + apply_log_only=apply_log_only, + ) + self.run_prepare_command(backup_prepare_cmd) + + def _iterate_and_run_found_backups( + self, found_backups: Optional[Tuple[str, str]], list_of_dir: List[str] + ) -> None: + # Limit the iteration until this found backup + for dir_ in list_of_dir[: list_of_dir.index(found_backups[0]) + 1]: + apply_log_only = None + if dir_ != found_backups[0]: + logger.info( + f"Preparing inc backups in sequence. inc backup dir/name is {dir_}" + ) + apply_log_only = True + else: + logger.info( + f"Preparing last incremental backup, inc backup dir/name is {dir_}" + ) + + self._prepare_and_run( + recent_bck=self.recent_bck, apply_log_only=apply_log_only, dir_=dir_ + ) + def prepare_backup_and_copy_back(self) -> None: - copy_back_obj = CopyBack(config=self.conf) - # Recovering/Copying Back Prepared Backup x = "- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -" print(x) print("") print("Preparing full/inc backups!") - print("What do you want to do?") - print( - "1. Prepare Backups and keep for future usage. NOTE('Once Prepared Backups Can not be prepared Again')" - ) - print("2. Prepare Backups and restore/recover/copy-back immediately") - print("3. Just copy-back previously prepared backups") - - prepare = int(input("Please Choose one of options and type 1 or 2 or 3: ")) + answer = input("Are you sure? [Y/n]: ") print("") print(x) time.sleep(3) - if prepare == 1: + if answer.lower() == "y": if not self.tag: self.prepare_inc_full_backups() else: logger.info("Backup tag will be used to prepare backups") self.prepare_with_tags() - elif prepare == 2: - if not self.tag: - self.prepare_inc_full_backups() - else: - self.prepare_with_tags() - if not self.dry: - copy_back_obj.copy_back_action() - else: - logger.critical( - "Dry run is not implemented for copy-back/recovery actions!" - ) - elif prepare == 3: - if not self.dry: - copy_back_obj.copy_back_action() - else: - logger.critical( - "Dry run is not implemented for copy-back/recovery actions!" - ) else: - print("Please type 1 or 2 or 3 and nothing more!") + print("Please type Y or n!") diff --git a/mysql_autoxtrabackup/backup_prepare/prepare_builder.py b/mysql_autoxtrabackup/backup_prepare/prepare_builder.py index 1b59479..4971b9a 100644 --- a/mysql_autoxtrabackup/backup_prepare/prepare_builder.py +++ b/mysql_autoxtrabackup/backup_prepare/prepare_builder.py @@ -1,26 +1,20 @@ import logging import os +from dataclasses import dataclass from typing import Optional, Tuple -from mysql_autoxtrabackup.general_conf import path_config from mysql_autoxtrabackup.general_conf.generalops import GeneralClass -from mysql_autoxtrabackup.process_runner.process_runner import ProcessRunner logger = logging.getLogger(__name__) +@dataclass class BackupPrepareBuilderChecker: - def __init__( - self, config: str = path_config.config_path_file, dry_run: Optional[bool] = None - ) -> None: - self.conf = config - self.dry = dry_run - options_obj = GeneralClass(config=self.conf) - self.backup_options = options_obj.backup_options - self.compression_options = options_obj.compression_options - self.encryption_options = options_obj.encryption_options - self.xbstream_options = options_obj.xbstream_options + options: GeneralClass + + def __post_init__(self): + self.backup_options = self.options.backup_options @staticmethod def parse_backup_tags( @@ -28,78 +22,20 @@ def parse_backup_tags( ) -> Optional[Tuple[str, str]]: """ Static Method for returning the backup directory name and backup type - :param backup_dir: The backup directory path - :param tag_name: The tag name to search + :param: backup_dir: The backup directory path + :param: tag_name: The tag name to search :return: Tuple of (backup directory, backup type) (2017-11-09_19-37-16, Full). :raises: RuntimeError if there is no such tag inside backup_tags.txt """ - if os.path.isfile("{}/backup_tags.txt".format(backup_dir)): - with open("{}/backup_tags.txt".format(backup_dir), "r") as backup_tags: + if os.path.isfile(f"{backup_dir}/backup_tags.txt"): + with open(f"{backup_dir}/backup_tags.txt", "r") as backup_tags: f = backup_tags.readlines() for i in f: split_ = i.split("\t") if tag_name == split_[-1].rstrip("'\n\r").lstrip("'"): return split_[0], split_[1] - else: - raise RuntimeError("There is no such tag for backups") - return None - - def decompress_backup( - self, path: Optional[str], dir_name: Optional[str] - ) -> Optional[bool]: - """ - Method for backup decompression. - Check if decompression enabled, if it is, decompress - backup prior prepare. - :param path: the basedir path i.e full backup dir or incremental dir. - :param dir_name: the exact name backup folder(likely timestamped folder name). - :return: None or RuntimeError - """ - if self.compression_options.get("decompress"): - # The base decompression command - dec_cmd = "{} --decompress={} --target-dir={}/{}".format( - self.backup_options.get("backup_tool"), - self.compression_options.get("decompress"), - path, - dir_name, - ) - if self.compression_options.get("remove_original_comp"): - dec_cmd += " --remove-original" - - logger.info("Trying to decompress backup") - logger.info("Running decompress command -> {}".format(dec_cmd)) - if self.dry: - return None - return ProcessRunner.run_command(dec_cmd) - return None - - def decrypt_backup( - self, path: Optional[str], dir_name: Optional[str] - ) -> Optional[bool]: - """ - Method for decrypting backups. - If you use crypted backups it should be decrypted prior preparing. - :param path: the basedir path i.e full backup dir or incremental dir. - :param dir_name: the exact name backup folder(likely timestamped folder name). - :return: None or RuntimeError - """ - if self.encryption_options.get("decrypt"): - # The base decryption command - decr_cmd = "{} --decrypt={} --encrypt-key={} --target-dir={}/{}".format( - self.backup_options.get("backup_tool"), - self.encryption_options.get("decrypt"), - self.encryption_options.get("encrypt_key"), - path, - dir_name, - ) - if self.encryption_options.get("remove_original_comp"): - decr_cmd += " --remove-original" - logger.info("Trying to decrypt backup") - logger.info("Running decrypt command -> {}".format(decr_cmd)) - if self.dry: - return None - return ProcessRunner.run_command(decr_cmd) + raise RuntimeError("There is no such tag for backups") return None def prepare_command_builder( @@ -110,50 +46,32 @@ def prepare_command_builder( ) -> str: """ Method for building prepare command as it is repeated several times. - :param full_backup: The full backup directory name - :param incremental: The incremental backup directory name - :param apply_log_only: The flag to add --apply-log-only + :param: full_backup: The full backup directory name + :param: incremental: The incremental backup directory name + :param: apply_log_only: The flag to add --apply-log-only :return: The prepare command string """ # Base prepare command - xtrabackup_prepare_cmd = "{} --prepare --target-dir={}/{}".format( - self.backup_options.get("backup_tool"), - self.backup_options.get("full_dir"), - full_backup, + xtrabackup_prepare_cmd = ( + f'{self.backup_options.get("backup_tool")} --prepare ' + f'--target-dir={self.backup_options.get("full_dir")}/{full_backup}' ) - if incremental: - xtrabackup_prepare_cmd += " --incremental-dir={}/{}".format( - self.backup_options.get("inc_dir"), incremental - ) - if apply_log_only: - xtrabackup_prepare_cmd += " --apply-log-only" - - # Checking if extra options were passed: - if self.backup_options.get("xtra_options"): - xtrabackup_prepare_cmd += " {}".format( - self.backup_options.get("xtra_options") - ) - - # Checking of extra prepare options were passed: - if self.backup_options.get("xtra_prepare_options"): - xtrabackup_prepare_cmd += " {}".format( - self.backup_options.get("xtra_prepare_options") - ) + xtrabackup_prepare_cmd += ( + f" --incremental-dir={self.backup_options.get('inc_dir')}/{incremental}" + if incremental + else "" + ) - return xtrabackup_prepare_cmd + xtrabackup_prepare_cmd += ( + f" {self.backup_options.get('xtra_options')}" + if self.backup_options.get("xtra_options") + else "" + ) + xtrabackup_prepare_cmd += ( + f" {self.backup_options.get('xtra_prepare_options')}" + if self.backup_options.get("xtra_prepare_options") + else "" + ) - def untar_backup(self, recent_bck: str) -> Optional[bool]: - if self.xbstream_options.get("stream") == "tar": - full_dir = self.backup_options.get("full_dir") - untar_cmd = "tar -xf {}/{}/full_backup.tar -C {}/{}".format( - full_dir, recent_bck, full_dir, recent_bck - ) - logger.info( - "The following tar command will be executed -> {}".format(untar_cmd) - ) - if self.dry == 0 and os.path.isfile( - "{}/{}/full_backup.tar".format(full_dir, recent_bck) - ): - return ProcessRunner.run_command(untar_cmd) - return None + return f"{xtrabackup_prepare_cmd} --apply-log-only" if apply_log_only else "" diff --git a/mysql_autoxtrabackup/general_conf/__init__.py b/mysql_autoxtrabackup/general_conf/__init__.py index 6547a2a..e69de29 100644 --- a/mysql_autoxtrabackup/general_conf/__init__.py +++ b/mysql_autoxtrabackup/general_conf/__init__.py @@ -1,4 +0,0 @@ -from . import path_config as path_config -from .check_env import CheckEnv as CheckEnv -from .generalops import GeneralClass as GeneralClass -from .generate_default_conf import GenerateDefaultConfig as GenerateDefaultConfig diff --git a/mysql_autoxtrabackup/general_conf/check_env.py b/mysql_autoxtrabackup/general_conf/check_env.py index 3b632c4..6b6a73e 100644 --- a/mysql_autoxtrabackup/general_conf/check_env.py +++ b/mysql_autoxtrabackup/general_conf/check_env.py @@ -23,7 +23,6 @@ def __init__( options = GeneralClass(config=self.conf) self.backup_options = options.backup_options self.mysql_options = options.mysql_options - self.archive_options = options.backup_archive_options if full_dir: self.backup_options["full_dir"] = full_dir if inc_dir: @@ -32,7 +31,7 @@ def __init__( def check_mysql_uptime(self, options: Optional[str] = None) -> Optional[bool]: """ Method for checking if MySQL server is up or not. - :param options: Passed options to connect to MySQL server if None, then going to get it from conf file + :param: options: Passed options to connect to MySQL server if None, then going to get it from conf file :return: True on success, raise RuntimeError on error. """ if not options: @@ -71,7 +70,7 @@ def check_mysql_uptime(self, options: Optional[str] = None) -> Optional[bool]: # filter out password from argument list filtered_args = re.sub("--password='?\w+'?", "--password='*'", status_args) - logger.info("Running mysqladmin command -> {}".format(filtered_args)) + logger.info(f"Running mysqladmin command -> {filtered_args}") return ProcessRunner.run_command(status_args) @@ -98,11 +97,11 @@ def check_mysql_mysql(self) -> Union[bool, Exception]: """ mysql = self.mysql_options.get("mysql") if os.path.exists(str(mysql)): - logger.info("OK: {} exists".format(mysql)) + logger.info(f"OK: {mysql} exists") return True - logger.error("FAILED: {} doest NOT exist".format(mysql)) - raise RuntimeError("FAILED: {} doest NOT exist".format(mysql)) + logger.error(f"FAILED: {mysql} doest NOT exist") + raise RuntimeError(f"FAILED: {mysql} doest NOT exist") def check_mysql_mysqladmin(self) -> Union[bool, Exception]: """ @@ -111,11 +110,11 @@ def check_mysql_mysqladmin(self) -> Union[bool, Exception]: """ mysqladmin = self.mysql_options.get("mysqladmin") if os.path.exists(str(mysqladmin)): - logger.info("OK: {} exists".format(mysqladmin)) + logger.info(f"OK: {mysqladmin} exists") return True - logger.error("FAILED: {} does NOT exist".format(mysqladmin)) - raise RuntimeError("FAILED: {} does NOT exist".format(mysqladmin)) + logger.error(f"FAILED: {mysqladmin} does NOT exist") + raise RuntimeError(f"FAILED: {mysqladmin} does NOT exist") def check_mysql_backuptool(self) -> Union[bool, Exception]: """ @@ -141,22 +140,6 @@ def check_mysql_backup_dir(self) -> Optional[bool]: return create_directory(str(self.backup_options.get("backup_dir"))) - def check_mysql_archive_dir(self) -> Optional[bool]: - """ - Check for archive directory. - If archive_dir is given in config file and if it does not exist, try to create. - :return: True on success. RuntimeError on failure. - """ - if not self.archive_options.get("archive_dir"): - logger.info("Skipping check as this option not specified in config file...") - return True - - if os.path.exists(str(self.archive_options.get("archive_dir"))): - logger.info("OK: Archive folder directory exists") - return True - - return create_directory(str(self.archive_options.get("archive_dir"))) - def check_mysql_full_backup_dir(self) -> Optional[bool]: """ Check full backup directory path. @@ -195,11 +178,10 @@ def check_all_env(self) -> Union[bool, Exception]: self.check_mysql_backup_dir() self.check_mysql_full_backup_dir() self.check_mysql_inc_backup_dir() - self.check_mysql_archive_dir() except Exception as err: logger.critical("FAILED: Check status") logger.error(err) - raise RuntimeError("FAILED: Check status") + raise RuntimeError("FAILED: Check status") from err else: logger.info("OK: Check status") return True diff --git a/mysql_autoxtrabackup/general_conf/generalops.py b/mysql_autoxtrabackup/general_conf/generalops.py index 22b883d..559837c 100644 --- a/mysql_autoxtrabackup/general_conf/generalops.py +++ b/mysql_autoxtrabackup/general_conf/generalops.py @@ -5,12 +5,15 @@ import humanfriendly # type: ignore -from . import path_config, GenerateDefaultConfig +from mysql_autoxtrabackup.general_conf import path_config +from mysql_autoxtrabackup.general_conf.generate_default_conf import ( + GenerateDefaultConfig, +) logger = logging.getLogger(__name__) -def _create_default_config(config: str, missing: str) ->None: +def _create_default_config(config: str, missing: str) -> None: logger.critical(f"Missing config file : {missing}") logger.warning("Creating default config file...") GenerateDefaultConfig(config=config).generate_config_file() @@ -62,8 +65,11 @@ def backup_options(self) -> Dict[str, Union[str, float]]: "full_dir": self.con.get(section, "backup_dir") + "/full", "inc_dir": self.con.get(section, "backup_dir") + "/inc", "backup_tool": self.con.get(section, "backup_tool"), - "xtra_backup": self.con.get(section, "xtra_backup", fallback=None), # type: ignore - "xtra_options": self.con.get(section, "xtra_options", fallback=None), # type: ignore + "xtra_backup": self.con.get(section, "xtra_backup", fallback=None), + "xtra_options": self.con.get(section, "xtra_options", fallback=None), + "xtra_prepare_options": self.con.get( + section, "xtra_prepare_options", fallback=None + ), "full_backup_interval": humanfriendly.parse_timespan( self.con.get(section, "full_backup_interval", fallback="86400.0") ), diff --git a/mysql_autoxtrabackup/general_conf/generate_default_conf.py b/mysql_autoxtrabackup/general_conf/generate_default_conf.py index 0a21861..3ddb9c0 100644 --- a/mysql_autoxtrabackup/general_conf/generate_default_conf.py +++ b/mysql_autoxtrabackup/general_conf/generate_default_conf.py @@ -55,7 +55,8 @@ def generate_config_file(self) -> None: config.set(section3, "pid_runtime_warning", "2 Hours") config.set(section3, "backup_dir", join(self.home, "XB_TEST/backup_dir")) config.set(section3, "backup_tool", "/usr/bin/xtrabackup") - config.set(section3, "#xtra_options", "--binlog-info=ON --galera-info") + config.set(section3, "xtra_options", "--no-server-version-check") + config.set(section3, "#xtra_prepare_options", "") config.set(section3, "#full_backup_interval", "1 day") config.write(cfg_file) diff --git a/mysql_autoxtrabackup/process_runner/process_runner.py b/mysql_autoxtrabackup/process_runner/process_runner.py index 66edea4..7eb64c5 100644 --- a/mysql_autoxtrabackup/process_runner/process_runner.py +++ b/mysql_autoxtrabackup/process_runner/process_runner.py @@ -3,8 +3,8 @@ import re import shlex import subprocess -import typing from subprocess import PIPE, STDOUT +from typing import List, Optional from mysql_autoxtrabackup.general_conf import path_config from mysql_autoxtrabackup.general_conf.generalops import GeneralClass @@ -34,10 +34,10 @@ def __init__(self, config: str = path_config.config_path_file) -> None: ] @property - def xtrabackup_history_log(self) -> typing.List[typing.List[str]]: + def xtrabackup_history_log(self) -> List[List[str]]: return self._xtrabackup_history_log - def run_command(self, command: typing.Optional[str]) -> bool: + def run_command(self, command: Optional[str]) -> bool: """ executes a prepared command, enables real-time console & log output. @@ -51,7 +51,7 @@ def run_command(self, command: typing.Optional[str]) -> bool: # filter out password from argument list, print command to execute filtered_command = re.sub("--password='?\w+'?", "--password='*'", command) # type: ignore - logger.info("SUBPROCESS STARTING: {}".format(str(filtered_command))) + logger.info(f"SUBPROCESS STARTING: {str(filtered_command)}") subprocess_args = self.command_to_args(command_str=command) # start the command subprocess cmd_start = datetime.datetime.now() @@ -65,20 +65,19 @@ def run_command(self, command: typing.Optional[str]) -> bool: ) ) logger.info( - "SUBPROCESS {} COMPLETED with exit code: {}".format( - subprocess_args[0], process.returncode - ) + f"SUBPROCESS {subprocess_args[0]} COMPLETED with exit code: {process.returncode}" ) + cmd_end = datetime.datetime.now() self.summarize_process(subprocess_args, cmd_start, cmd_end, process.returncode) # return True or False. if process.returncode == 0: return True else: - raise ChildProcessError("SUBPROCESS FAILED! >> {}".format(filtered_command)) + raise ChildProcessError(f"SUBPROCESS FAILED! >> {filtered_command}") @staticmethod - def command_to_args(command_str: typing.Optional[str]) -> typing.List[str]: + def command_to_args(command_str: Optional[str]) -> List[str]: """ convert a string bash command to an arguments list, to use with subprocess @@ -100,7 +99,7 @@ def command_to_args(command_str: typing.Optional[str]) -> typing.List[str]: args = shlex.split(command_str) else: raise TypeError - logger.debug("subprocess args are: {}".format(args)) + logger.debug(f"subprocess args are: {args}") return args @staticmethod @@ -124,7 +123,7 @@ def represent_duration( def summarize_process( self, - args: typing.List[str], + args: List[str], cmd_start: datetime.datetime, cmd_end: datetime.datetime, return_code: int, @@ -138,12 +137,6 @@ def summarize_process( xtrabackup_function = "prepare" elif "--prepare" in args: xtrabackup_function = "prepare/apply-log-only" - if not xtrabackup_function: - for arg in args: - if re.search(r"(--decrypt)=?[\w]*", arg): - xtrabackup_function = "decrypt" - elif re.search(r"(--decompress)=?[\w]*", arg): - xtrabackup_function = "decompress" if cmd_root != "pigz": # this will be just the pigz --version call diff --git a/mysql_autoxtrabackup/utils/mysql_cli.py b/mysql_autoxtrabackup/utils/mysql_cli.py index 791006d..4b9bf4d 100644 --- a/mysql_autoxtrabackup/utils/mysql_cli.py +++ b/mysql_autoxtrabackup/utils/mysql_cli.py @@ -1,42 +1,36 @@ # This file will consist of some wrapper for using MySQL # It is mainly used for preparing and calling mysql cli import logging +from dataclasses import dataclass -from mysql_autoxtrabackup.general_conf import path_config from mysql_autoxtrabackup.general_conf.generalops import GeneralClass from mysql_autoxtrabackup.process_runner.process_runner import ProcessRunner logger = logging.getLogger(__name__) +@dataclass class MySQLClientHelper: - def __init__(self, config: str = path_config.config_path_file): - self.conf = config - # Using Composition instead of Inheritance here - options_obj = GeneralClass(config=self.conf) - self.mysql_options = options_obj.mysql_options + options: GeneralClass + + def __post_init__(self): + self.mysql_options = self.options.mysql_options def create_mysql_client_command(self, statement: str) -> str: - command_connection = "{} --defaults-file={} -u{} --password={}".format( - self.mysql_options.get("mysql"), - self.mysql_options.get("mycnf"), - self.mysql_options.get("mysql_user"), - self.mysql_options.get("mysql_password"), + command_connection = ( + f'{self.mysql_options.get("mysql")} --defaults-file={self.mysql_options.get("mycnf")} ' + f'-u{self.mysql_options.get("mysql_user")} ' + f'--password={self.mysql_options.get("mysql_password")}' + ) + + command_connection += ( + f" --socket={self.mysql_options.get('mysql_socket')}" + if self.mysql_options.get("mysql_socket") + else f" --host={self.mysql_options.get('mysql_host')} " + f" --port={self.mysql_options.get('mysql_port')}" ) - command_execute = ' -e "{}"' - if self.mysql_options.get("mysql_socket"): - command_connection += " --socket={}" - new_command = command_connection.format( - self.mysql_options.get("mysql_socket") - ) - else: - command_connection += " --host={} --port={}" - new_command = command_connection.format( - self.mysql_options.get("mysql_host"), - self.mysql_options.get("mysql_port"), - ) - new_command += command_execute - return new_command.format(statement) + + return f"{command_connection} -e '{statement}'" def mysql_run_command(self, statement: str) -> bool: command = self.create_mysql_client_command(statement=statement) diff --git a/pyproject.toml b/pyproject.toml index fde1578..8d52c15 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -46,6 +46,9 @@ dev = [ "flake8 >=4.0.1", "pre-commit >=2.17.0" ] +test = [ + "pytest >= 7.1.1" +] [project.urls] Documentation = "https://autoxtrabackup.azepug.az/" diff --git a/tests/Dockerfile b/tests/Dockerfile index c5b80ba..56e396d 100644 --- a/tests/Dockerfile +++ b/tests/Dockerfile @@ -1,6 +1,10 @@ FROM mysql:8.0-oracle USER root WORKDIR /opt +COPY . /opt/MySQL-AutoXtraBackup + +ENV MYSQL_ROOT_PASSWORD=12345 + RUN microdnf install yum RUN yum install -y git RUN yum install -y python38 @@ -17,29 +21,12 @@ RUN cd /opt && \ git clone https://github.com/sstephenson/bats.git && \ cd bats && \ ./install.sh /usr/local -ARG GIT_BRANCH_NAME EXPOSE 8080 -RUN cd /opt && \ - git clone -b $GIT_BRANCH_NAME https://github.com/ShahriyarR/MySQL-AutoXtraBackup.git && \ - cd /opt/MySQL-AutoXtraBackup && \ +RUN cd /opt/MySQL-AutoXtraBackup && \ pip3.8 install -U pip && \ pip3.8 install flit && \ - FLIT_ROOT_INSTALL=1 flit install - -#RUN yum groupinstall -y "Development Tools" -#RUN yum -y install python38-devel.x86_64 -#RUN yum -y install libffi -#RUN yum -y install libffi-devel -#RUN cd /opt/MySQL-AutoXtraBackup/tests && git pull && \ -# pip3.8 install -r requirements.txt - - -WORKDIR /opt/MySQL-AutoXtraBackup -RUN cd /opt/MySQL-AutoXtraBackup && git pull -#RUN pip3.8 install uvicorn -#RUN pip3.8 install fastapi + FLIT_ROOT_INSTALL=1 flit install --symlink -RUN cd /opt/MySQL-AutoXtraBackup/tests && chmod +x entrypoint.sh -ENTRYPOINT ["/opt/MySQL-AutoXtraBackup/tests/entrypoint.sh"] WORKDIR /opt/MySQL-AutoXtraBackup/mysql_autoxtrabackup -CMD ["uvicorn", "api.main:app", "--port", "8080"] +CMD ["mysqld", "--character-set-server=utf8mb4", "--collation-server=utf8mb4_unicode_ci"] +#CMD ["uvicorn", "api.main:app", "--port", "8080"] diff --git a/tests/conftest.py b/tests/conftest.py index 54b0c88..8990742 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,9 +2,17 @@ from fastapi.testclient import TestClient from mysql_autoxtrabackup.api.main import app +from mysql_autoxtrabackup.backup_backup.backup_builder import BackupBuilderChecker +from mysql_autoxtrabackup.backup_backup.backup_tags import BackupTags from mysql_autoxtrabackup.backup_backup.backuper import Backup +from mysql_autoxtrabackup.general_conf.path_config import config_path_file +from mysql_autoxtrabackup.utils.mysql_cli import MySQLClientHelper -bck_obj = Backup() +builder_obj = BackupBuilderChecker(config=config_path_file, dry_run=None) +tagger = BackupTags(None, builder_obj) +mysql_cli = MySQLClientHelper(config=config_path_file) + +bck_obj = Backup(builder_obj=builder_obj, mysql_cli=mysql_cli, tagger=tagger) client = TestClient(app) diff --git a/tests/entrypoint.sh b/tests/entrypoint.sh deleted file mode 100755 index 18a34ec..0000000 --- a/tests/entrypoint.sh +++ /dev/null @@ -1,210 +0,0 @@ -#!/bin/bash -# Copyright (c) 2017, Oracle and/or its affiliates. All rights reserved. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; version 2 of the License. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA -set -e - -echo "[Entrypoint] MySQL Docker Image 8.0.20-1.1.16" -echo "[Entrypoint] Modified by Shako(mysql.az)" -# Fetch value from server config -# We use mysqld --verbose --help instead of my_print_defaults because the -# latter only show values present in config files, and not server defaults -_get_config() { - local conf="$1"; shift - "$@" --verbose --help 2>/dev/null | grep "^$conf" | awk '$1 == "'"$conf"'" { print $2; exit }' -} - -# If command starts with an option, prepend mysqld -# This allows users to add command-line options without -# needing to specify the "mysqld" command -if [ "${1:0:1}" = '-' ]; then - set -- mysqld "$@" -fi - -if [ "$1" = 'mysqld' ]; then - # Test that the server can start. We redirect stdout to /dev/null so - # only the error messages are left. - result=0 - output=$("$@" --validate-config) || result=$? - if [ ! "$result" = "0" ]; then - echo >&2 '[Entrypoint] ERROR: Unable to start MySQL. Please check your configuration.' - echo >&2 "[Entrypoint] $output" - exit 1 - fi - - # Get config - DATADIR="$(_get_config 'datadir' "$@")" - SOCKET="$(_get_config 'socket' "$@")" - - if [ -n "$MYSQL_LOG_CONSOLE" ] || [ -n "console" ]; then - # Don't touch bind-mounted config files - if ! cat /proc/1/mounts | grep "etc/my.cnf"; then - sed -i 's/^log-error=/#&/' /etc/my.cnf - fi - fi - - if [ ! -d "$DATADIR/mysql" ]; then - # If the password variable is a filename we use the contents of the file. We - # read this first to make sure that a proper error is generated for empty files. - if [ -f "$MYSQL_ROOT_PASSWORD" ]; then - MYSQL_ROOT_PASSWORD="$(cat $MYSQL_ROOT_PASSWORD)" - if [ -z "$MYSQL_ROOT_PASSWORD" ]; then - echo >&2 '[Entrypoint] Empty MYSQL_ROOT_PASSWORD file specified.' - exit 1 - fi - fi - if [ -z "$MYSQL_ROOT_PASSWORD" -a -z "$MYSQL_ALLOW_EMPTY_PASSWORD" -a -z "$MYSQL_RANDOM_ROOT_PASSWORD" ]; then - echo >&2 '[Entrypoint] No password option specified for new database.' - echo >&2 '[Entrypoint] A random onetime password will be generated.' - MYSQL_RANDOM_ROOT_PASSWORD=true - MYSQL_ONETIME_PASSWORD=true - fi - mkdir -p "$DATADIR" - chown -R mysql:mysql "$DATADIR" - - echo '[Entrypoint] Initializing database' - "$@" --initialize-insecure - echo '[Entrypoint] Database initialized' - - "$@" --daemonize --skip-networking --socket="$SOCKET" - - # To avoid using password on commandline, put it in a temporary file. - # The file is only populated when and if the root password is set. - PASSFILE=$(mktemp -u /var/lib/mysql-files/XXXXXXXXXX) - install /dev/null -m0600 -omysql -gmysql "$PASSFILE" - # Define the client command used throughout the script - # "SET @@SESSION.SQL_LOG_BIN=0;" is required for products like group replication to work properly - mysql=( mysql --defaults-extra-file="$PASSFILE" --protocol=socket -uroot -hlocalhost --socket="$SOCKET" --init-command="SET @@SESSION.SQL_LOG_BIN=0;") - - if [ ! -z "" ]; - then - for i in {30..0}; do - if mysqladmin --socket="$SOCKET" ping &>/dev/null; then - break - fi - echo '[Entrypoint] Waiting for server...' - sleep 1 - done - if [ "$i" = 0 ]; then - echo >&2 '[Entrypoint] Timeout during MySQL init.' - exit 1 - fi - fi - - mysql_tzinfo_to_sql /usr/share/zoneinfo | "${mysql[@]}" mysql - - if [ ! -z "$MYSQL_RANDOM_ROOT_PASSWORD" ]; then - MYSQL_ROOT_PASSWORD="$(pwmake 128)" - echo "[Entrypoint] GENERATED ROOT PASSWORD: $MYSQL_ROOT_PASSWORD" - fi - if [ -z "$MYSQL_ROOT_HOST" ]; then - ROOTCREATE="ALTER USER 'root'@'localhost' IDENTIFIED BY '${MYSQL_ROOT_PASSWORD}';" - else - ROOTCREATE="ALTER USER 'root'@'localhost' IDENTIFIED BY '${MYSQL_ROOT_PASSWORD}'; \ - CREATE USER 'root'@'${MYSQL_ROOT_HOST}' IDENTIFIED BY '${MYSQL_ROOT_PASSWORD}'; \ - GRANT ALL ON *.* TO 'root'@'${MYSQL_ROOT_HOST}' WITH GRANT OPTION ; \ - GRANT PROXY ON ''@'' TO 'root'@'${MYSQL_ROOT_HOST}' WITH GRANT OPTION ;" - fi - "${mysql[@]}" <<-EOSQL - DELETE FROM mysql.user WHERE user NOT IN ('mysql.infoschema', 'mysql.session', 'mysql.sys', 'root') OR host NOT IN ('localhost'); - CREATE USER 'healthchecker'@'localhost' IDENTIFIED BY 'healthcheckpass'; - ${ROOTCREATE} - FLUSH PRIVILEGES ; - EOSQL - if [ ! -z "$MYSQL_ROOT_PASSWORD" ]; then - # Put the password into the temporary config file - cat >"$PASSFILE" < "$SQL" -ALTER USER 'root'@'localhost' IDENTIFIED BY '12345'; -#ALTER USER 'root'@'${MYSQL_ROOT_HOST}' PASSWORD EXPIRE; -#ALTER USER 'root'@'localhost' PASSWORD EXPIRE; -EOF - else - cat << EOF > "$SQL" -#ALTER USER 'root'@'localhost' PASSWORD EXPIRE; -ALTER USER 'root'@'localhost' IDENTIFIED BY '12345'; -EOF - fi - set -- "$@" --init-file="$SQL" - unset SQL - fi - fi - - echo - echo '[Entrypoint] MySQL init process done. Ready for start up.' - echo - fi - - # Used by healthcheck to make sure it doesn't mistakenly report container - # healthy during startup - # Put the password into the temporary config file - touch /healthcheck.cnf - cat >"/healthcheck.cnf" < Date: Mon, 11 Apr 2022 18:40:34 +0400 Subject: [PATCH 11/17] Big part of the prepare code refactoring is over; now need to proceed with tests --- .../backup_backup/backup_builder.py | 10 ++--- .../backup_prepare/prepare.py | 5 ++- .../backup_prepare/prepare_builder.py | 19 +++++---- .../general_conf/check_env.py | 42 ++++++++----------- .../general_conf/generalops.py | 18 ++++---- 5 files changed, 46 insertions(+), 48 deletions(-) diff --git a/mysql_autoxtrabackup/backup_backup/backup_builder.py b/mysql_autoxtrabackup/backup_backup/backup_builder.py index 01f527b..398b76b 100644 --- a/mysql_autoxtrabackup/backup_backup/backup_builder.py +++ b/mysql_autoxtrabackup/backup_backup/backup_builder.py @@ -27,11 +27,7 @@ def general_command_builder(self) -> str: else f" --host={self.mysql_options.get('mysql_host')} --port={self.mysql_options.get('mysql_port')}" ) - return ( - f"{args} {self.backup_options.get('xtra_options')}" - if self.backup_options.get("xtra_options") - else "" - ) + return f"{args} {self._get_extra_options('xtra_options')}" def full_backup_command_builder(self, full_backup_dir: str) -> str: """ @@ -66,3 +62,7 @@ def inc_backup_command_builder( ) return f"{xtrabackup_inc_cmd_base} --backup {self.general_command_builder()}" + + def _get_extra_options(self, option: str): + _option = self.backup_options.get(option) + return f" {_option}" if _option else "" diff --git a/mysql_autoxtrabackup/backup_prepare/prepare.py b/mysql_autoxtrabackup/backup_prepare/prepare.py index 63ae021..56a366d 100644 --- a/mysql_autoxtrabackup/backup_prepare/prepare.py +++ b/mysql_autoxtrabackup/backup_prepare/prepare.py @@ -56,7 +56,9 @@ def prepare_with_tags(self) -> Optional[bool]: logger.info("- - - - The end of the Prepare Stage. - - - -") return True - def prepare_run_incremental_backups(self, found_backups: Optional[Tuple[str, str]]) -> None: + def prepare_run_incremental_backups( + self, found_backups: Optional[Tuple[str, str]] + ) -> None: logger.info("Preparing Incs: ") self._iterate_and_run_found_backups( found_backups, helpers.sorted_ls(self.inc_dir) @@ -142,7 +144,6 @@ def _prepare_and_run( apply_log_only: Optional[bool] = None, dir_: Optional[str] = None, ) -> None: - # Prepare command backup_prepare_cmd = self.prepare_options.prepare_command_builder( full_backup=recent_bck, incremental=dir_, diff --git a/mysql_autoxtrabackup/backup_prepare/prepare_builder.py b/mysql_autoxtrabackup/backup_prepare/prepare_builder.py index 4971b9a..77f3c69 100644 --- a/mysql_autoxtrabackup/backup_prepare/prepare_builder.py +++ b/mysql_autoxtrabackup/backup_prepare/prepare_builder.py @@ -10,7 +10,6 @@ @dataclass class BackupPrepareBuilderChecker: - options: GeneralClass def __post_init__(self): @@ -64,14 +63,16 @@ def prepare_command_builder( ) xtrabackup_prepare_cmd += ( - f" {self.backup_options.get('xtra_options')}" - if self.backup_options.get("xtra_options") - else "" + f" {self._get_extra_options('xtra_options')}" + f" {self._get_extra_options('xtra_prepare_options')}" ) - xtrabackup_prepare_cmd += ( - f" {self.backup_options.get('xtra_prepare_options')}" - if self.backup_options.get("xtra_prepare_options") - else "" + + return ( + f"{xtrabackup_prepare_cmd} --apply-log-only" + if apply_log_only + else xtrabackup_prepare_cmd ) - return f"{xtrabackup_prepare_cmd} --apply-log-only" if apply_log_only else "" + def _get_extra_options(self, option: str): + _option = self.backup_options.get(option) + return f" {_option}" if _option else "" diff --git a/mysql_autoxtrabackup/general_conf/check_env.py b/mysql_autoxtrabackup/general_conf/check_env.py index 6b6a73e..ea680d2 100644 --- a/mysql_autoxtrabackup/general_conf/check_env.py +++ b/mysql_autoxtrabackup/general_conf/check_env.py @@ -14,10 +14,10 @@ class CheckEnv: def __init__( - self, - config: str = path_config.config_path_file, - full_dir: Union[str, None] = None, - inc_dir: Union[str, None] = None, + self, + config: str = path_config.config_path_file, + full_dir: Union[str, None] = None, + inc_dir: Union[str, None] = None, ) -> None: self.conf = config options = GeneralClass(config=self.conf) @@ -36,25 +36,19 @@ def check_mysql_uptime(self, options: Optional[str] = None) -> Optional[bool]: """ if not options: - status_args = ( - "{} --defaults-file={} " - "--user={} --password='{}' status".format( - self.mysql_options.get("mysqladmin"), - self.mysql_options.get("mycnf"), - self.mysql_options.get("mysql_user"), - self.mysql_options.get("mysql_password"), - ) - ) + status_args = f"""{self.mysql_options.get("mysqladmin")} + --defaults-file={self.mysql_options.get("mycnf")} + --user={self.mysql_options.get("mysql_user")} + --password='{self.mysql_options.get("mysql_password")}' + status""" if self.mysql_options.get("mysql_socket"): - status_args += " --socket={}".format( - self.mysql_options.get("mysql_socket") - ) + status_args += f' --socket={self.mysql_options.get("mysql_socket")}' elif self.mysql_options.get("mysql_host") and self.mysql_options.get( - "mysql_port" + "mysql_port" ): - status_args += " --host={}".format(self.mysql_options.get("mysql_host")) - status_args += " --port={}".format(self.mysql_options.get("mysql_port")) + status_args += f' --host={self.mysql_options.get("mysql_host")}' + status_args += f' --port={self.mysql_options.get("mysql_port")}' else: logger.critical( "Neither mysql_socket nor mysql_host and mysql_port are defined in config!" @@ -63,9 +57,7 @@ def check_mysql_uptime(self, options: Optional[str] = None) -> Optional[bool]: "Neither mysql_socket nor mysql_host and mysql_port are defined in config!" ) else: - status_args = "{} {} status".format( - self.mysql_options.get("mysqladmin"), options - ) + status_args = f'{self.mysql_options.get("mysqladmin")} {options} status' # filter out password from argument list filtered_args = re.sub("--password='?\w+'?", "--password='*'", status_args) @@ -74,7 +66,7 @@ def check_mysql_uptime(self, options: Optional[str] = None) -> Optional[bool]: return ProcessRunner.run_command(status_args) - def check_mysql_conf(self) -> Union[bool, Exception]: + def check_mysql_conf(self) -> Optional[bool]: """ Method for checking passed MySQL my.cnf defaults file. If it is not passed then skip this check :return: True on success, raise RuntimeError on error. @@ -116,7 +108,7 @@ def check_mysql_mysqladmin(self) -> Union[bool, Exception]: logger.error(f"FAILED: {mysqladmin} does NOT exist") raise RuntimeError(f"FAILED: {mysqladmin} does NOT exist") - def check_mysql_backuptool(self) -> Union[bool, Exception]: + def check_mysql_backup_tool(self) -> Union[bool, Exception]: """ Method for checking if given backup tool path is there or not. :return: RuntimeError on failure, True on success @@ -174,7 +166,7 @@ def check_all_env(self) -> Union[bool, Exception]: self.check_mysql_mysql() self.check_mysql_mysqladmin() self.check_mysql_conf() - self.check_mysql_backuptool() + self.check_mysql_backup_tool() self.check_mysql_backup_dir() self.check_mysql_full_backup_dir() self.check_mysql_inc_backup_dir() diff --git a/mysql_autoxtrabackup/general_conf/generalops.py b/mysql_autoxtrabackup/general_conf/generalops.py index 559837c..d2ad279 100644 --- a/mysql_autoxtrabackup/general_conf/generalops.py +++ b/mysql_autoxtrabackup/general_conf/generalops.py @@ -1,5 +1,6 @@ import configparser import logging +from dataclasses import dataclass from os.path import isfile from typing import Dict, Union @@ -20,13 +21,16 @@ def _create_default_config(config: str, missing: str) -> None: logger.info(f"Default config file is generated in {config}") +@dataclass class GeneralClass: - def __init__(self, config: str = path_config.config_path_file) -> None: - if not isfile(config): - _create_default_config(config, missing=path_config.config_path_file) + config: str = path_config.config_path_file + + def __post_init__(self): + if not isfile(self.config): + _create_default_config(self.config, missing=path_config.config_path_file) self.con = configparser.ConfigParser() - self.con.read(config) + self.con.read(self.config) @property def mysql_options(self) -> Dict[str, str]: @@ -37,9 +41,9 @@ def mysql_options(self) -> Dict[str, str]: "mysqladmin": self.con.get(section, "mysqladmin"), "mysql_user": self.con.get(section, "mysql_user"), "mysql_password": self.con.get(section, "mysql_password"), - "mysql_socket": self.con.get(section, "mysql_socket", fallback=None), # type: ignore - "mysql_host": self.con.get(section, "mysql_host", fallback=None), # type: ignore - "mysql_port": self.con.get(section, "mysql_port", fallback=None), # type: ignore + "mysql_socket": self.con.get(section, "mysql_socket", fallback=None), + "mysql_host": self.con.get(section, "mysql_host", fallback=None), + "mysql_port": self.con.get(section, "mysql_port", fallback=None), "data_dir": self.con.get(section, "datadir"), } From 0af37c72b229aaf844cb6ef8b080ecf7e6a9d8d1 Mon Sep 17 00:00:00 2001 From: Shahriyar Rzayev Date: Mon, 11 Apr 2022 19:00:46 +0400 Subject: [PATCH 12/17] pushing helper refactoring --- .../general_conf/check_env.py | 10 ++--- mysql_autoxtrabackup/utils/helpers.py | 39 +++++++++---------- 2 files changed, 23 insertions(+), 26 deletions(-) diff --git a/mysql_autoxtrabackup/general_conf/check_env.py b/mysql_autoxtrabackup/general_conf/check_env.py index ea680d2..428428c 100644 --- a/mysql_autoxtrabackup/general_conf/check_env.py +++ b/mysql_autoxtrabackup/general_conf/check_env.py @@ -14,10 +14,10 @@ class CheckEnv: def __init__( - self, - config: str = path_config.config_path_file, - full_dir: Union[str, None] = None, - inc_dir: Union[str, None] = None, + self, + config: str = path_config.config_path_file, + full_dir: Union[str, None] = None, + inc_dir: Union[str, None] = None, ) -> None: self.conf = config options = GeneralClass(config=self.conf) @@ -45,7 +45,7 @@ def check_mysql_uptime(self, options: Optional[str] = None) -> Optional[bool]: if self.mysql_options.get("mysql_socket"): status_args += f' --socket={self.mysql_options.get("mysql_socket")}' elif self.mysql_options.get("mysql_host") and self.mysql_options.get( - "mysql_port" + "mysql_port" ): status_args += f' --host={self.mysql_options.get("mysql_host")}' status_args += f' --port={self.mysql_options.get("mysql_port")}' diff --git a/mysql_autoxtrabackup/utils/helpers.py b/mysql_autoxtrabackup/utils/helpers.py index b615963..8865168 100644 --- a/mysql_autoxtrabackup/utils/helpers.py +++ b/mysql_autoxtrabackup/utils/helpers.py @@ -5,18 +5,18 @@ import os import subprocess from datetime import datetime -from typing import Dict, List, Optional, Union +from typing import Dict, List, Optional logger = logging.getLogger(__name__) -def get_folder_size(path: str) -> Union[str, None]: +def get_folder_size(path: str) -> Optional[str]: """ Function to calculate given folder size. Using 'du' command here. :param path: The full path to be calculated - :return: String with human readable size info, for eg, 5.3M + :return: String with human-readable size info, for eg, 5.3M """ - du_cmd = "du -hs {}".format(path) + du_cmd = f"du -hs {path}" status, output = subprocess.getstatusoutput(du_cmd) if status == 0: return output.split()[0] @@ -31,8 +31,9 @@ def sorted_ls(path: Optional[str]) -> List[str]: :param path: Directory path :return: The list of sorted directories """ - mtime = lambda f: os.stat(os.path.join(path, f)).st_mtime # type: ignore - return list(sorted(os.listdir(path), key=mtime)) + return list( + sorted(os.listdir(path), key=lambda f: os.stat(os.path.join(path, f)).st_mtime) + ) def get_directory_size(path: str) -> int: @@ -66,19 +67,15 @@ def create_backup_directory(directory: str, forced_dir: Optional[str] = None) -> os.makedirs(new_dir) return new_dir except Exception as err: - logger.error( - "Something went wrong in create_backup_directory(): {}".format(err) - ) + logger.error(f"Something went wrong in create_backup_directory(): {err}") raise RuntimeError( - "Something went wrong in create_backup_directory(): {}".format(err) - ) + f"Something went wrong in create_backup_directory(): {err}" + ) from err def get_latest_dir_name(path: Optional[str]) -> Optional[str]: # Return last backup dir name either incremental or full backup dir - if len(os.listdir(path)) > 0: - return max(os.listdir(path)) - return None + return max(os.listdir(path)) if len(os.listdir(path)) > 0 else None def create_directory(path: str) -> Optional[bool]: @@ -89,7 +86,7 @@ def create_directory(path: str) -> Optional[bool]: return True except Exception as err: logger.error("FAILED: Could not create directory, ", err) - raise RuntimeError("FAILED: Could not create directory") + raise RuntimeError("FAILED: Could not create directory") from err def check_if_backup_prepared(type_: str, path: str) -> str: @@ -99,8 +96,8 @@ def check_if_backup_prepared(type_: str, path: str) -> str: :param path: path string of the backup folder :return: True if given backup is prepared, False otherwise """ - if type_ == "full" and os.path.isfile(path + "/xtrabackup_checkpoints"): - with open(path + "/xtrabackup_checkpoints", "r") as f: + if type_ == "full" and os.path.isfile(f"{path}/xtrabackup_checkpoints"): + with open(f"{path}/xtrabackup_checkpoints", "r") as f: if f.readline().split()[-1] == "full-prepared": return "Full-Prepared" # TODO: add the possible way of checking for incremental backups as well. @@ -113,11 +110,11 @@ def list_available_backups(path: str) -> Dict[str, List[Dict[str, str]]]: Dict of backups; and the statuses - if they are already prepared or not :param path: General backup directory path - :return: dictionary of backups full and incremental + :return: dictionary of full and incremental backups """ backups = {} - full_backup_dir = path + "/full" - inc_backup_dir = path + "/inc" + full_backup_dir = f"{path}/full" + inc_backup_dir = f"{path}/inc" if os.path.isdir(full_backup_dir): backups = { "full": [ @@ -126,7 +123,7 @@ def list_available_backups(path: str) -> Dict[str, List[Dict[str, str]]]: for dir_ in os.listdir(full_backup_dir) } if os.path.isdir(inc_backup_dir): - backups["inc"] = sorted_ls(inc_backup_dir) # type: ignore + backups["inc"] = sorted_ls(inc_backup_dir) logger.info( "Listing all available backups from full and incremental backup directories..." ) From f0aa9c43fdc81c38c76049c1c2b23fbb3f07b603 Mon Sep 17 00:00:00 2001 From: Shahriyar Rzayev Date: Tue, 12 Apr 2022 12:16:46 +0400 Subject: [PATCH 13/17] Added checkenv refactoring --- mysql_autoxtrabackup/autoxtrabackup.py | 4 +- .../backup_backup/backuper.py | 98 ++++++++------- .../general_conf/check_env.py | 115 +++++++++--------- 3 files changed, 115 insertions(+), 102 deletions(-) diff --git a/mysql_autoxtrabackup/autoxtrabackup.py b/mysql_autoxtrabackup/autoxtrabackup.py index a258c79..c9e1cfb 100644 --- a/mysql_autoxtrabackup/autoxtrabackup.py +++ b/mysql_autoxtrabackup/autoxtrabackup.py @@ -304,10 +304,10 @@ def _run_commands( elif show_tags and defaults_file: Backup( - config=defaults_file, builder_obj=builder_obj, tagger=tagger, mysql_cli=mysql_cli, + options=options, ).tagger.show_tags(backup_dir=str(backup_options.get("backup_dir"))) elif generate_config_file: GenerateDefaultConfig().generate_config_file() @@ -318,10 +318,10 @@ def _run_commands( ).prepare_backup_and_copy_back() elif backup: Backup( - config=defaults_file, builder_obj=builder_obj, tagger=tagger, mysql_cli=mysql_cli, + options=options, dry_run=dry_run_, tag=tag, ).all_backup() diff --git a/mysql_autoxtrabackup/backup_backup/backuper.py b/mysql_autoxtrabackup/backup_backup/backuper.py index cd9f9dc..1bf76c8 100755 --- a/mysql_autoxtrabackup/backup_backup/backuper.py +++ b/mysql_autoxtrabackup/backup_backup/backuper.py @@ -14,8 +14,8 @@ from mysql_autoxtrabackup.backup_backup.backup_builder import BackupBuilderChecker from mysql_autoxtrabackup.backup_backup.backup_tags import BackupTags -from mysql_autoxtrabackup.general_conf import path_config from mysql_autoxtrabackup.general_conf.check_env import CheckEnv +from mysql_autoxtrabackup.general_conf.generalops import GeneralClass from mysql_autoxtrabackup.process_runner.process_runner import ProcessRunner from mysql_autoxtrabackup.utils import helpers, mysql_cli @@ -54,19 +54,19 @@ def wrapped(_self, inc_dir: Optional[str] = None): return wrapped -def _get_inc_dir(builder_obj: BackupBuilderChecker): +def _get_inc_dir(builder_obj: BackupBuilderChecker) -> str: return str(builder_obj.backup_options.get("inc_dir")) -def _get_full_dir(builder_obj: BackupBuilderChecker): +def _get_full_dir(builder_obj: BackupBuilderChecker) -> str: return str(builder_obj.backup_options.get("full_dir")) -def _create_bck_dir(path: str): +def _create_bck_dir(path: str) -> str: return helpers.create_backup_directory(path) -def _get_recent_bck(path: str): +def _get_recent_bck(path: str) -> str: return helpers.get_latest_dir_name(path) @@ -75,7 +75,7 @@ class Backup: builder_obj: BackupBuilderChecker tagger: BackupTags mysql_cli: mysql_cli.MySQLClientHelper - config: str = path_config.config_path_file + options: GeneralClass dry_run: Optional[bool] = None tag: Optional[str] = None _full_dir: str = field(init=False) @@ -85,7 +85,28 @@ def __post_init__(self): self._full_dir = _get_full_dir(self.builder_obj) self._inc_dir = _get_inc_dir(self.builder_obj) - def last_full_backup_date( + def all_backup(self) -> bool: + """ + This method at first checks full backup directory, if it is empty takes full backup. + If it is not empty then checks for full backup time. + If the recent full backup is taken 1 day ago, it takes full backup. + In any other conditions it takes incremental backup. + """ + # Workaround for circular import dependency error in Python + + # Creating object from CheckEnv class + check_env_obj = CheckEnv( + options=self.options, + full_dir=self._full_dir, + inc_dir=self._inc_dir, + ) + + assert check_env_obj.check_all_env() is True, "environment checks failed!" + self._run_backup() + + return True + + def _last_full_backup_date( self, path: Optional[str] = None, full_backup_interval: Optional[float] = None ) -> bool: """ @@ -104,14 +125,14 @@ def last_full_backup_date( return float((now - dir_date).total_seconds()) >= float(backup_interval) @_is_full_path_exists - def clean_full_backup_dir( + def _clean_full_backup_dir( self, full_dir: Optional[str] = None, remove_all: Optional[bool] = None, ) -> Optional[bool]: # Deleting old full backup after taking new full backup. # Keeping the latest in order not to lose everything. - logger.info("starting clean_full_backup_dir") + logger.info("Starting _clean_full_backup_dir") for i in os.listdir(full_dir): rm_dir = f"{full_dir}/{i}" @@ -123,7 +144,7 @@ def clean_full_backup_dir( return True @_is_inc_path_exists - def clean_inc_backup_dir(self, inc_dir: Optional[str] = None) -> Optional[bool]: + def _clean_inc_backup_dir(self, inc_dir: Optional[str] = None) -> Optional[bool]: # Deleting incremental backups after taking new fresh full backup. inc_dir = inc_dir or self._inc_dir @@ -133,7 +154,7 @@ def clean_inc_backup_dir(self, inc_dir: Optional[str] = None) -> Optional[bool]: return True @_is_dry_run - def full_backup(self) -> bool: + def _take_full_backup(self) -> bool: """ Method for taking full backups. It will construct the backup command based on config file. :return: True on success. @@ -153,7 +174,7 @@ def full_backup(self) -> bool: return self._get_status("Full", full_backup_dir, xtrabackup_cmd) @_is_dry_run - def inc_backup(self) -> bool: + def _take_inc_backup(self) -> bool: """ Method for taking incremental backups. :return: True on success. @@ -180,43 +201,20 @@ def inc_backup(self) -> bool: return self._get_status("Inc", inc_backup_dir, xtrabackup_inc_cmd) - def all_backup(self) -> bool: - """ - This method at first checks full backup directory, if it is empty takes full backup. - If it is not empty then checks for full backup time. - If the recent full backup is taken 1 day ago, it takes full backup. - In any other conditions it takes incremental backup. - """ - # Workaround for circular import dependency error in Python - - # Creating object from CheckEnv class - check_env_obj = CheckEnv( - self.config, - full_dir=self._full_dir, - inc_dir=self._inc_dir, - ) - - assert check_env_obj.check_all_env() is True, "environment checks failed!" + def _run_backup(self) -> None: if not _get_recent_bck(self._full_dir): logger.info( "- - - - You have no backups : Taking very first Full Backup! - - - -" ) - if self._flush_logs_and_backup(): - # Removing old inc backups - self.clean_inc_backup_dir() + self._flush_logs_backup_and_clean() - elif self.last_full_backup_date(): + elif self._last_full_backup_date(): logger.info( "- - - - Your full backup is timeout : Taking new Full Backup! - - - -" ) - if self._flush_logs_and_backup(): - # Removing full backups - self.clean_full_backup_dir() - - # Removing inc backups - self.clean_inc_backup_dir() + self._flush_logs_backup_and_clean(clean_full=True) else: @@ -232,14 +230,28 @@ def all_backup(self) -> bool: time.sleep(3) # Taking incremental backup - self.inc_backup() + self._take_inc_backup() - return True + def _flush_logs_backup_and_clean(self, clean_full: bool = False) -> None: + if self._flush_logs_and_backup(): + self._clean_backup_dirs(clean_full=clean_full) + + def _clean_backup_dirs(self, clean_full: bool = False) -> None: + # Removing full backups + if clean_full: + self._clean_full_backup_dir() + + # Removing inc backups + self._clean_inc_backup_dir() def _flush_logs_and_backup(self) -> bool: - return self.mysql_cli.mysql_run_command("flush logs") and self.full_backup() + return ( + self.mysql_cli.mysql_run_command("flush logs") and self._take_full_backup() + ) - def _get_status(self, backup_type: str, backup_dir: str, xtrabackup_cmd: str): + def _get_status( + self, backup_type: str, backup_dir: str, xtrabackup_cmd: str + ) -> bool: logger.debug(f'Starting {self.builder_obj.backup_options.get("backup_tool")}') status = ProcessRunner.run_command(xtrabackup_cmd) status_str = "OK" if status is True else "FAILED" diff --git a/mysql_autoxtrabackup/general_conf/check_env.py b/mysql_autoxtrabackup/general_conf/check_env.py index 428428c..c67ad0a 100644 --- a/mysql_autoxtrabackup/general_conf/check_env.py +++ b/mysql_autoxtrabackup/general_conf/check_env.py @@ -1,72 +1,73 @@ import logging import os import re -from typing import Optional, Union +from dataclasses import dataclass +from typing import Optional, Union from mysql_autoxtrabackup.process_runner.process_runner import ProcessRunner from mysql_autoxtrabackup.utils.helpers import create_directory -from . import path_config from .generalops import GeneralClass logger = logging.getLogger(__name__) +def _mask_password(status_args: str) -> str: + # filter out password from argument list + return re.sub("--password='?\w+'?", "--password='*'", status_args) + + +@dataclass class CheckEnv: - def __init__( - self, - config: str = path_config.config_path_file, - full_dir: Union[str, None] = None, - inc_dir: Union[str, None] = None, - ) -> None: - self.conf = config - options = GeneralClass(config=self.conf) - self.backup_options = options.backup_options - self.mysql_options = options.mysql_options - if full_dir: - self.backup_options["full_dir"] = full_dir - if inc_dir: - self.backup_options["ind_dir"] = inc_dir - - def check_mysql_uptime(self, options: Optional[str] = None) -> Optional[bool]: + options: GeneralClass + full_dir: Optional[str] = None + inc_dir: Optional[str] = None + + def __post_init__(self): + self.backup_options = self.options.backup_options + self.mysql_options = self.options.mysql_options + + if self.full_dir: + self.backup_options["full_dir"] = self.full_dir + if self.inc_dir: + self.backup_options["ind_dir"] = self.inc_dir + + def _check_mysql_uptime(self, options: Optional[str] = None) -> Optional[bool]: """ Method for checking if MySQL server is up or not. :param: options: Passed options to connect to MySQL server if None, then going to get it from conf file :return: True on success, raise RuntimeError on error. """ - if not options: + status_args = f'{self.mysql_options.get("mysqladmin")} {options} status' \ + if options else self._build_status_check_command() + + logger.info(f"Running mysqladmin command -> {_mask_password(status_args)}") - status_args = f"""{self.mysql_options.get("mysqladmin")} + return ProcessRunner.run_command(status_args) + + def _build_status_check_command(self) -> str: + status_args = f"""{self.mysql_options.get("mysqladmin")} --defaults-file={self.mysql_options.get("mycnf")} --user={self.mysql_options.get("mysql_user")} --password='{self.mysql_options.get("mysql_password")}' status""" - - if self.mysql_options.get("mysql_socket"): - status_args += f' --socket={self.mysql_options.get("mysql_socket")}' - elif self.mysql_options.get("mysql_host") and self.mysql_options.get( + if self.mysql_options.get("mysql_socket"): + status_args += f' --socket={self.mysql_options.get("mysql_socket")}' + elif self.mysql_options.get("mysql_host") and self.mysql_options.get( "mysql_port" - ): - status_args += f' --host={self.mysql_options.get("mysql_host")}' - status_args += f' --port={self.mysql_options.get("mysql_port")}' - else: - logger.critical( - "Neither mysql_socket nor mysql_host and mysql_port are defined in config!" - ) - raise RuntimeError( - "Neither mysql_socket nor mysql_host and mysql_port are defined in config!" - ) + ): + status_args += f' --host={self.mysql_options.get("mysql_host")}' + status_args += f' --port={self.mysql_options.get("mysql_port")}' else: - status_args = f'{self.mysql_options.get("mysqladmin")} {options} status' - - # filter out password from argument list - filtered_args = re.sub("--password='?\w+'?", "--password='*'", status_args) - - logger.info(f"Running mysqladmin command -> {filtered_args}") - - return ProcessRunner.run_command(status_args) + logger.critical( + "Neither mysql_socket nor mysql_host and mysql_port are defined in config!" + ) + raise RuntimeError( + "Neither mysql_socket nor mysql_host and mysql_port are defined in config!" + ) + return status_args - def check_mysql_conf(self) -> Optional[bool]: + def _check_mysql_conf(self) -> Optional[bool]: """ Method for checking passed MySQL my.cnf defaults file. If it is not passed then skip this check :return: True on success, raise RuntimeError on error. @@ -82,7 +83,7 @@ def check_mysql_conf(self) -> Optional[bool]: logger.info("OK: MySQL configuration file exists") return True - def check_mysql_mysql(self) -> Union[bool, Exception]: + def _check_mysql_mysql(self) -> Union[bool, Exception]: """ Method for checking mysql client path :return: True on success, raise RuntimeError on error. @@ -95,7 +96,7 @@ def check_mysql_mysql(self) -> Union[bool, Exception]: logger.error(f"FAILED: {mysql} doest NOT exist") raise RuntimeError(f"FAILED: {mysql} doest NOT exist") - def check_mysql_mysqladmin(self) -> Union[bool, Exception]: + def _check_mysql_mysqladmin(self) -> Union[bool, Exception]: """ Method for checking mysqladmin path :return: True on success, raise RuntimeError on error. @@ -108,7 +109,7 @@ def check_mysql_mysqladmin(self) -> Union[bool, Exception]: logger.error(f"FAILED: {mysqladmin} does NOT exist") raise RuntimeError(f"FAILED: {mysqladmin} does NOT exist") - def check_mysql_backup_tool(self) -> Union[bool, Exception]: + def _check_backup_tool(self) -> Union[bool, Exception]: """ Method for checking if given backup tool path is there or not. :return: RuntimeError on failure, True on success @@ -120,7 +121,7 @@ def check_mysql_backup_tool(self) -> Union[bool, Exception]: logger.error("FAILED: XtraBackup does NOT exist") raise RuntimeError("FAILED: XtraBackup does NOT exist") - def check_mysql_backup_dir(self) -> Optional[bool]: + def _check_backup_dir(self) -> Optional[bool]: """ Check for MySQL backup directory. If directory exists already then, return True. If not, try to create it. @@ -132,7 +133,7 @@ def check_mysql_backup_dir(self) -> Optional[bool]: return create_directory(str(self.backup_options.get("backup_dir"))) - def check_mysql_full_backup_dir(self) -> Optional[bool]: + def _check_full_backup_dir(self) -> Optional[bool]: """ Check full backup directory path. If this path exists return True if not try to create. @@ -144,7 +145,7 @@ def check_mysql_full_backup_dir(self) -> Optional[bool]: return create_directory(str(self.backup_options.get("full_dir"))) - def check_mysql_inc_backup_dir(self) -> Optional[bool]: + def _check_inc_backup_dir(self) -> Optional[bool]: """ Check incremental backup directory path. If this path exists return True if not try to create. @@ -162,14 +163,14 @@ def check_all_env(self) -> Union[bool, Exception]: :return: True on success, raise RuntimeError on error. """ try: - self.check_mysql_uptime() - self.check_mysql_mysql() - self.check_mysql_mysqladmin() - self.check_mysql_conf() - self.check_mysql_backup_tool() - self.check_mysql_backup_dir() - self.check_mysql_full_backup_dir() - self.check_mysql_inc_backup_dir() + self._check_mysql_uptime() + self._check_mysql_mysql() + self._check_mysql_mysqladmin() + self._check_mysql_conf() + self._check_backup_tool() + self._check_backup_dir() + self._check_full_backup_dir() + self._check_inc_backup_dir() except Exception as err: logger.critical("FAILED: Check status") logger.error(err) From 7849bf4916b19c9a23d509f269a194a2c67963d3 Mon Sep 17 00:00:00 2001 From: Shahriyar Rzayev Date: Tue, 12 Apr 2022 19:00:17 +0400 Subject: [PATCH 14/17] The next big chunk of code refactoring --- .../{backup_backup => backup}/__init__.py | 0 .../backup_builder.py | 0 .../{backup_backup => backup}/backup_tags.py | 2 +- .../{backup_backup => backup}/backuper.py | 58 +++--- .../{general_conf => cli}/__init__.py | 0 .../{utils/__init__.py => cli/backup.py} | 0 mysql_autoxtrabackup/common/__init__.py | 0 .../{utils => common}/helpers.py | 6 +- .../{utils => common}/mysql_cli.py | 0 .../{utils => common}/version.py | 0 mysql_autoxtrabackup/configs/__init__.py | 0 mysql_autoxtrabackup/configs/check_env.py | 157 +++++++++++++++ .../{general_conf => configs}/generalops.py | 0 .../generate_default_conf.py | 0 .../{general_conf => configs}/path_config.py | 0 .../general_conf/check_env.py | 180 ------------------ .../{backup_prepare => prepare}/__init__.py | 0 .../{backup_prepare => prepare}/prepare.py | 2 +- .../prepare_builder.py | 27 ++- 19 files changed, 207 insertions(+), 225 deletions(-) rename mysql_autoxtrabackup/{backup_backup => backup}/__init__.py (100%) rename mysql_autoxtrabackup/{backup_backup => backup}/backup_builder.py (100%) rename mysql_autoxtrabackup/{backup_backup => backup}/backup_tags.py (98%) rename mysql_autoxtrabackup/{backup_backup => backup}/backuper.py (85%) rename mysql_autoxtrabackup/{general_conf => cli}/__init__.py (100%) rename mysql_autoxtrabackup/{utils/__init__.py => cli/backup.py} (100%) create mode 100644 mysql_autoxtrabackup/common/__init__.py rename mysql_autoxtrabackup/{utils => common}/helpers.py (96%) rename mysql_autoxtrabackup/{utils => common}/mysql_cli.py (100%) rename mysql_autoxtrabackup/{utils => common}/version.py (100%) create mode 100644 mysql_autoxtrabackup/configs/__init__.py create mode 100644 mysql_autoxtrabackup/configs/check_env.py rename mysql_autoxtrabackup/{general_conf => configs}/generalops.py (100%) rename mysql_autoxtrabackup/{general_conf => configs}/generate_default_conf.py (100%) rename mysql_autoxtrabackup/{general_conf => configs}/path_config.py (100%) delete mode 100644 mysql_autoxtrabackup/general_conf/check_env.py rename mysql_autoxtrabackup/{backup_prepare => prepare}/__init__.py (100%) rename mysql_autoxtrabackup/{backup_prepare => prepare}/prepare.py (99%) rename mysql_autoxtrabackup/{backup_prepare => prepare}/prepare_builder.py (82%) diff --git a/mysql_autoxtrabackup/backup_backup/__init__.py b/mysql_autoxtrabackup/backup/__init__.py similarity index 100% rename from mysql_autoxtrabackup/backup_backup/__init__.py rename to mysql_autoxtrabackup/backup/__init__.py diff --git a/mysql_autoxtrabackup/backup_backup/backup_builder.py b/mysql_autoxtrabackup/backup/backup_builder.py similarity index 100% rename from mysql_autoxtrabackup/backup_backup/backup_builder.py rename to mysql_autoxtrabackup/backup/backup_builder.py diff --git a/mysql_autoxtrabackup/backup_backup/backup_tags.py b/mysql_autoxtrabackup/backup/backup_tags.py similarity index 98% rename from mysql_autoxtrabackup/backup_backup/backup_tags.py rename to mysql_autoxtrabackup/backup/backup_tags.py index 481a72e..f37a004 100644 --- a/mysql_autoxtrabackup/backup_backup/backup_tags.py +++ b/mysql_autoxtrabackup/backup/backup_tags.py @@ -5,7 +5,7 @@ from typing import Optional from mysql_autoxtrabackup.backup_backup.backup_builder import BackupBuilderChecker -from mysql_autoxtrabackup.utils import helpers +from mysql_autoxtrabackup.common import helpers logger = logging.getLogger(__name__) diff --git a/mysql_autoxtrabackup/backup_backup/backuper.py b/mysql_autoxtrabackup/backup/backuper.py similarity index 85% rename from mysql_autoxtrabackup/backup_backup/backuper.py rename to mysql_autoxtrabackup/backup/backuper.py index 1bf76c8..19c24e1 100755 --- a/mysql_autoxtrabackup/backup_backup/backuper.py +++ b/mysql_autoxtrabackup/backup/backuper.py @@ -14,10 +14,10 @@ from mysql_autoxtrabackup.backup_backup.backup_builder import BackupBuilderChecker from mysql_autoxtrabackup.backup_backup.backup_tags import BackupTags +from mysql_autoxtrabackup.common import helpers, mysql_cli from mysql_autoxtrabackup.general_conf.check_env import CheckEnv from mysql_autoxtrabackup.general_conf.generalops import GeneralClass from mysql_autoxtrabackup.process_runner.process_runner import ProcessRunner -from mysql_autoxtrabackup.utils import helpers, mysql_cli logger = logging.getLogger(__name__) @@ -86,15 +86,6 @@ def __post_init__(self): self._inc_dir = _get_inc_dir(self.builder_obj) def all_backup(self) -> bool: - """ - This method at first checks full backup directory, if it is empty takes full backup. - If it is not empty then checks for full backup time. - If the recent full backup is taken 1 day ago, it takes full backup. - In any other conditions it takes incremental backup. - """ - # Workaround for circular import dependency error in Python - - # Creating object from CheckEnv class check_env_obj = CheckEnv( options=self.options, full_dir=self._full_dir, @@ -203,34 +194,35 @@ def _take_inc_backup(self) -> bool: def _run_backup(self) -> None: if not _get_recent_bck(self._full_dir): - logger.info( - "- - - - You have no backups : Taking very first Full Backup! - - - -" - ) - - self._flush_logs_backup_and_clean() - + self._take_fresh_full_backup() elif self._last_full_backup_date(): - logger.info( - "- - - - Your full backup is timeout : Taking new Full Backup! - - - -" - ) - - self._flush_logs_backup_and_clean(clean_full=True) - + self._take_new_full_backup_after_old_expired() else: + self._take_incremental_backup() - logger.info( - f"- - - - You have a full backup that is less than " - f'{self.builder_obj.backup_options.get("full_backup_interval")} seconds old. - - - -' - ) - - logger.info( - "- - - - We will take an incremental one based on recent Full Backup - - - -" - ) + def _take_incremental_backup(self): + logger.info( + f"- - - - You have a full backup that is less than " + f'{self.builder_obj.backup_options.get("full_backup_interval")} seconds old. - - - -' + ) + logger.info( + "- - - - We will take an incremental one based on recent Full Backup - - - -" + ) + time.sleep(3) + # Taking incremental backup + self._take_inc_backup() - time.sleep(3) + def _take_new_full_backup_after_old_expired(self): + logger.info( + "- - - - Your full backup is timeout : Taking new Full Backup! - - - -" + ) + self._flush_logs_backup_and_clean(clean_full=True) - # Taking incremental backup - self._take_inc_backup() + def _take_fresh_full_backup(self): + logger.info( + "- - - - You have no backups : Taking very first Full Backup! - - - -" + ) + self._flush_logs_backup_and_clean() def _flush_logs_backup_and_clean(self, clean_full: bool = False) -> None: if self._flush_logs_and_backup(): diff --git a/mysql_autoxtrabackup/general_conf/__init__.py b/mysql_autoxtrabackup/cli/__init__.py similarity index 100% rename from mysql_autoxtrabackup/general_conf/__init__.py rename to mysql_autoxtrabackup/cli/__init__.py diff --git a/mysql_autoxtrabackup/utils/__init__.py b/mysql_autoxtrabackup/cli/backup.py similarity index 100% rename from mysql_autoxtrabackup/utils/__init__.py rename to mysql_autoxtrabackup/cli/backup.py diff --git a/mysql_autoxtrabackup/common/__init__.py b/mysql_autoxtrabackup/common/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/mysql_autoxtrabackup/utils/helpers.py b/mysql_autoxtrabackup/common/helpers.py similarity index 96% rename from mysql_autoxtrabackup/utils/helpers.py rename to mysql_autoxtrabackup/common/helpers.py index 8865168..9efdf26 100644 --- a/mysql_autoxtrabackup/utils/helpers.py +++ b/mysql_autoxtrabackup/common/helpers.py @@ -92,8 +92,8 @@ def create_directory(path: str) -> Optional[bool]: def check_if_backup_prepared(type_: str, path: str) -> str: """ Helper function for checking if given backup already prepared or not. - :param type_: Type of backup full or inc - :param path: path string of the backup folder + :param: type_: Type of backup full or inc + :param: path: path string of the backup folder :return: True if given backup is prepared, False otherwise """ if type_ == "full" and os.path.isfile(f"{path}/xtrabackup_checkpoints"): @@ -109,7 +109,7 @@ def list_available_backups(path: str) -> Dict[str, List[Dict[str, str]]]: Helper function for returning Dict of backups; and the statuses - if they are already prepared or not - :param path: General backup directory path + :param: path: General backup directory path :return: dictionary of full and incremental backups """ backups = {} diff --git a/mysql_autoxtrabackup/utils/mysql_cli.py b/mysql_autoxtrabackup/common/mysql_cli.py similarity index 100% rename from mysql_autoxtrabackup/utils/mysql_cli.py rename to mysql_autoxtrabackup/common/mysql_cli.py diff --git a/mysql_autoxtrabackup/utils/version.py b/mysql_autoxtrabackup/common/version.py similarity index 100% rename from mysql_autoxtrabackup/utils/version.py rename to mysql_autoxtrabackup/common/version.py diff --git a/mysql_autoxtrabackup/configs/__init__.py b/mysql_autoxtrabackup/configs/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/mysql_autoxtrabackup/configs/check_env.py b/mysql_autoxtrabackup/configs/check_env.py new file mode 100644 index 0000000..1fef9ea --- /dev/null +++ b/mysql_autoxtrabackup/configs/check_env.py @@ -0,0 +1,157 @@ +import logging +import os +import re +from dataclasses import dataclass +from typing import Optional, Union + +from mysql_autoxtrabackup.common.helpers import create_directory +from mysql_autoxtrabackup.process_runner.process_runner import ProcessRunner + +from .generalops import GeneralClass + +DOES_NOT_EXIST = "FAILED: MySQL configuration file path does NOT exist" + +MYSQL_CONN_MSG = ( + "Neither mysql_socket nor mysql_host and mysql_port are defined in config!" +) + +logger = logging.getLogger(__name__) + + +def _mask_password(status_args: str) -> str: + # filter out password from argument list + return re.sub("--password='?\w+'?", "--password='*'", status_args) + + +def _is_binary_exists(_binary_path: str): + if os.path.exists(_binary_path): + logger.info(f"OK: {_binary_path} exists") + return True + + logger.error(f"FAILED: {_binary_path} does NOT exist") + raise RuntimeError(f"FAILED: {_binary_path} does NOT exist") + + +def _is_path_exists(_path: str) -> Optional[bool]: + if os.path.exists(_path): + logger.info(f"OK: {_path} exists") + return True + return create_directory(_path) + + +@dataclass +class CheckEnv: + options: GeneralClass + full_dir: Optional[str] = None + inc_dir: Optional[str] = None + + def __post_init__(self): + self.backup_options = self.options.backup_options + self.mysql_options = self.options.mysql_options + + if self.full_dir: + self.backup_options["full_dir"] = self.full_dir + if self.inc_dir: + self.backup_options["ind_dir"] = self.inc_dir + + self._required_dirs = { + "backup_dir": self.backup_options.get("backup_dir"), + "full_dir": self.backup_options.get("full_dir"), + "inc_dir": self.backup_options.get("inc_dir"), + } + self._required_binaries = { + "mysql": self.mysql_options.get("mysql"), + "mysqladmin": self.mysql_options.get("mysqladmin"), + "backup_tool": self.backup_options.get("backup_tool"), + } + + def check_all_env(self) -> Union[bool, Exception]: + """ + Method for running all checks + :return: True on success, raise RuntimeError on error. + """ + try: + self._check_mysql_uptime() + self._check_mysql_conf() + self._is_all_binaries_exist() + self._is_all_paths_exist() + except Exception as err: + logger.critical("FAILED: Check status") + logger.error(err) + raise RuntimeError("FAILED: Check status") from err + else: + logger.info("OK: Check status") + return True + + def _check_mysql_uptime(self, options: Optional[str] = None) -> Optional[bool]: + """ + Method for checking if MySQL server is up or not. + :param: options: Passed options to connect to MySQL server if None, then going to get it from conf file + :return: True on success, raise RuntimeError on error. + """ + status_args = ( + f'{self.mysql_options.get("mysqladmin")} {options} status' + if options + else self._build_status_check_command() + ) + + logger.info(f"Running mysqladmin command -> {_mask_password(status_args)}") + + return ProcessRunner.run_command(status_args) + + def _check_mysql_conf(self) -> Optional[bool]: + """ + Method for checking passed MySQL my.cnf defaults file. If it is not passed then skip this check + :return: True on success, raise RuntimeError on error. + """ + my_cnf = self.mysql_options.get("mycnf") + if not my_cnf or my_cnf == "": + logger.info("Skipping my.cnf check, because it is not specified") + return True + elif not os.path.exists(my_cnf): + logger.error(DOES_NOT_EXIST) + raise RuntimeError(DOES_NOT_EXIST) + + logger.info("OK: MySQL configuration file exists") + return True + + def _is_all_paths_exist(self): + return all(_is_path_exists(_path) for _path in self._required_dirs.values()) + + def _is_all_binaries_exist(self): + return all( + _is_binary_exists(_binary_path) + for _binary_path in self._required_binaries.values() + ) + + def _build_status_check_command(self) -> str: + self._is_mysql_conn_options_provided() + + status_args = f"""{self.mysql_options.get("mysqladmin")} + --defaults-file={self.mysql_options.get("mycnf")} + --user={self.mysql_options.get("mysql_user")} + --password='{self.mysql_options.get("mysql_password")}' + status""" + + return self._append_conn_string(status_args) + + def _is_mysql_conn_options_provided(self): + if not self.mysql_options.get("mysql_socket") and not ( + self.mysql_options.get("mysql_host") + and self.mysql_options.get("mysql_port") + ): + logger.critical(MYSQL_CONN_MSG) + raise RuntimeError(MYSQL_CONN_MSG) + + def _append_conn_string(self, status_args): + status_args += ( + f' --socket={self.mysql_options.get("mysql_socket")}' + if self.mysql_options.get("mysql_socket") + else "" + ) + if self.mysql_options.get("mysql_host") and self.mysql_options.get( + "mysql_port" + ): + status_args += f' --host={self.mysql_options.get("mysql_host")}' + status_args += f' --port={self.mysql_options.get("mysql_port")}' + return status_args diff --git a/mysql_autoxtrabackup/general_conf/generalops.py b/mysql_autoxtrabackup/configs/generalops.py similarity index 100% rename from mysql_autoxtrabackup/general_conf/generalops.py rename to mysql_autoxtrabackup/configs/generalops.py diff --git a/mysql_autoxtrabackup/general_conf/generate_default_conf.py b/mysql_autoxtrabackup/configs/generate_default_conf.py similarity index 100% rename from mysql_autoxtrabackup/general_conf/generate_default_conf.py rename to mysql_autoxtrabackup/configs/generate_default_conf.py diff --git a/mysql_autoxtrabackup/general_conf/path_config.py b/mysql_autoxtrabackup/configs/path_config.py similarity index 100% rename from mysql_autoxtrabackup/general_conf/path_config.py rename to mysql_autoxtrabackup/configs/path_config.py diff --git a/mysql_autoxtrabackup/general_conf/check_env.py b/mysql_autoxtrabackup/general_conf/check_env.py deleted file mode 100644 index c67ad0a..0000000 --- a/mysql_autoxtrabackup/general_conf/check_env.py +++ /dev/null @@ -1,180 +0,0 @@ -import logging -import os -import re - -from dataclasses import dataclass -from typing import Optional, Union -from mysql_autoxtrabackup.process_runner.process_runner import ProcessRunner -from mysql_autoxtrabackup.utils.helpers import create_directory - -from .generalops import GeneralClass - -logger = logging.getLogger(__name__) - - -def _mask_password(status_args: str) -> str: - # filter out password from argument list - return re.sub("--password='?\w+'?", "--password='*'", status_args) - - -@dataclass -class CheckEnv: - options: GeneralClass - full_dir: Optional[str] = None - inc_dir: Optional[str] = None - - def __post_init__(self): - self.backup_options = self.options.backup_options - self.mysql_options = self.options.mysql_options - - if self.full_dir: - self.backup_options["full_dir"] = self.full_dir - if self.inc_dir: - self.backup_options["ind_dir"] = self.inc_dir - - def _check_mysql_uptime(self, options: Optional[str] = None) -> Optional[bool]: - """ - Method for checking if MySQL server is up or not. - :param: options: Passed options to connect to MySQL server if None, then going to get it from conf file - :return: True on success, raise RuntimeError on error. - """ - status_args = f'{self.mysql_options.get("mysqladmin")} {options} status' \ - if options else self._build_status_check_command() - - logger.info(f"Running mysqladmin command -> {_mask_password(status_args)}") - - return ProcessRunner.run_command(status_args) - - def _build_status_check_command(self) -> str: - status_args = f"""{self.mysql_options.get("mysqladmin")} - --defaults-file={self.mysql_options.get("mycnf")} - --user={self.mysql_options.get("mysql_user")} - --password='{self.mysql_options.get("mysql_password")}' - status""" - if self.mysql_options.get("mysql_socket"): - status_args += f' --socket={self.mysql_options.get("mysql_socket")}' - elif self.mysql_options.get("mysql_host") and self.mysql_options.get( - "mysql_port" - ): - status_args += f' --host={self.mysql_options.get("mysql_host")}' - status_args += f' --port={self.mysql_options.get("mysql_port")}' - else: - logger.critical( - "Neither mysql_socket nor mysql_host and mysql_port are defined in config!" - ) - raise RuntimeError( - "Neither mysql_socket nor mysql_host and mysql_port are defined in config!" - ) - return status_args - - def _check_mysql_conf(self) -> Optional[bool]: - """ - Method for checking passed MySQL my.cnf defaults file. If it is not passed then skip this check - :return: True on success, raise RuntimeError on error. - """ - my_cnf = self.mysql_options.get("mycnf") - if not my_cnf or my_cnf == "": - logger.info("Skipping my.cnf check, because it is not specified") - return True - elif not os.path.exists(my_cnf): - logger.error("FAILED: MySQL configuration file path does NOT exist") - raise RuntimeError("FAILED: MySQL configuration file path does NOT exist") - - logger.info("OK: MySQL configuration file exists") - return True - - def _check_mysql_mysql(self) -> Union[bool, Exception]: - """ - Method for checking mysql client path - :return: True on success, raise RuntimeError on error. - """ - mysql = self.mysql_options.get("mysql") - if os.path.exists(str(mysql)): - logger.info(f"OK: {mysql} exists") - return True - - logger.error(f"FAILED: {mysql} doest NOT exist") - raise RuntimeError(f"FAILED: {mysql} doest NOT exist") - - def _check_mysql_mysqladmin(self) -> Union[bool, Exception]: - """ - Method for checking mysqladmin path - :return: True on success, raise RuntimeError on error. - """ - mysqladmin = self.mysql_options.get("mysqladmin") - if os.path.exists(str(mysqladmin)): - logger.info(f"OK: {mysqladmin} exists") - return True - - logger.error(f"FAILED: {mysqladmin} does NOT exist") - raise RuntimeError(f"FAILED: {mysqladmin} does NOT exist") - - def _check_backup_tool(self) -> Union[bool, Exception]: - """ - Method for checking if given backup tool path is there or not. - :return: RuntimeError on failure, True on success - """ - if os.path.exists(str(self.backup_options.get("backup_tool"))): - logger.info("OK: XtraBackup exists") - return True - - logger.error("FAILED: XtraBackup does NOT exist") - raise RuntimeError("FAILED: XtraBackup does NOT exist") - - def _check_backup_dir(self) -> Optional[bool]: - """ - Check for MySQL backup directory. - If directory exists already then, return True. If not, try to create it. - :return: True on success. RuntimeError on failure. - """ - if os.path.exists(str(self.backup_options.get("backup_dir"))): - logger.info("OK: Main backup directory exists") - return True - - return create_directory(str(self.backup_options.get("backup_dir"))) - - def _check_full_backup_dir(self) -> Optional[bool]: - """ - Check full backup directory path. - If this path exists return True if not try to create. - :return: True on success. - """ - if os.path.exists(str(self.backup_options.get("full_dir"))): - logger.info("OK: Full Backup directory exists") - return True - - return create_directory(str(self.backup_options.get("full_dir"))) - - def _check_inc_backup_dir(self) -> Optional[bool]: - """ - Check incremental backup directory path. - If this path exists return True if not try to create. - :return: True on success. - """ - if os.path.exists(str(self.backup_options.get("inc_dir"))): - logger.info("OK: Increment directory exists") - return True - - return create_directory(str(self.backup_options.get("inc_dir"))) - - def check_all_env(self) -> Union[bool, Exception]: - """ - Method for running all checks - :return: True on success, raise RuntimeError on error. - """ - try: - self._check_mysql_uptime() - self._check_mysql_mysql() - self._check_mysql_mysqladmin() - self._check_mysql_conf() - self._check_backup_tool() - self._check_backup_dir() - self._check_full_backup_dir() - self._check_inc_backup_dir() - except Exception as err: - logger.critical("FAILED: Check status") - logger.error(err) - raise RuntimeError("FAILED: Check status") from err - else: - logger.info("OK: Check status") - return True diff --git a/mysql_autoxtrabackup/backup_prepare/__init__.py b/mysql_autoxtrabackup/prepare/__init__.py similarity index 100% rename from mysql_autoxtrabackup/backup_prepare/__init__.py rename to mysql_autoxtrabackup/prepare/__init__.py diff --git a/mysql_autoxtrabackup/backup_prepare/prepare.py b/mysql_autoxtrabackup/prepare/prepare.py similarity index 99% rename from mysql_autoxtrabackup/backup_prepare/prepare.py rename to mysql_autoxtrabackup/prepare/prepare.py index 56a366d..df6856d 100644 --- a/mysql_autoxtrabackup/backup_prepare/prepare.py +++ b/mysql_autoxtrabackup/prepare/prepare.py @@ -7,9 +7,9 @@ from mysql_autoxtrabackup.backup_prepare.prepare_builder import ( BackupPrepareBuilderChecker, ) +from mysql_autoxtrabackup.common import helpers from mysql_autoxtrabackup.general_conf.generalops import GeneralClass from mysql_autoxtrabackup.process_runner.process_runner import ProcessRunner -from mysql_autoxtrabackup.utils import helpers logger = logging.getLogger(__name__) diff --git a/mysql_autoxtrabackup/backup_prepare/prepare_builder.py b/mysql_autoxtrabackup/prepare/prepare_builder.py similarity index 82% rename from mysql_autoxtrabackup/backup_prepare/prepare_builder.py rename to mysql_autoxtrabackup/prepare/prepare_builder.py index 77f3c69..79d00f1 100644 --- a/mysql_autoxtrabackup/backup_prepare/prepare_builder.py +++ b/mysql_autoxtrabackup/prepare/prepare_builder.py @@ -50,28 +50,41 @@ def prepare_command_builder( :param: apply_log_only: The flag to add --apply-log-only :return: The prepare command string """ - # Base prepare command + xtrabackup_prepare_cmd = self._base_prepare_command(full_backup) + + xtrabackup_prepare_cmd = self._append_incremental_option( + incremental, xtrabackup_prepare_cmd + ) + + xtrabackup_prepare_cmd = self._append_extra_options(xtrabackup_prepare_cmd) + + return ( + f"{xtrabackup_prepare_cmd} --apply-log-only" + if apply_log_only + else xtrabackup_prepare_cmd + ) + + def _base_prepare_command(self, full_backup): xtrabackup_prepare_cmd = ( f'{self.backup_options.get("backup_tool")} --prepare ' f'--target-dir={self.backup_options.get("full_dir")}/{full_backup}' ) + return xtrabackup_prepare_cmd + def _append_incremental_option(self, incremental, xtrabackup_prepare_cmd): xtrabackup_prepare_cmd += ( f" --incremental-dir={self.backup_options.get('inc_dir')}/{incremental}" if incremental else "" ) + return xtrabackup_prepare_cmd + def _append_extra_options(self, xtrabackup_prepare_cmd): xtrabackup_prepare_cmd += ( f" {self._get_extra_options('xtra_options')}" f" {self._get_extra_options('xtra_prepare_options')}" ) - - return ( - f"{xtrabackup_prepare_cmd} --apply-log-only" - if apply_log_only - else xtrabackup_prepare_cmd - ) + return xtrabackup_prepare_cmd def _get_extra_options(self, option: str): _option = self.backup_options.get(option) From d85f3333827274a418c92ecc7b7969ed697bc445 Mon Sep 17 00:00:00 2001 From: Shahriyar Rzayev Date: Tue, 12 Apr 2022 19:56:11 +0400 Subject: [PATCH 15/17] Next generation of the code refactoring --- changes/make_history.py | 2 +- docs/conf.py | 2 +- mysql_autoxtrabackup/__init__.py | 2 +- .../api/controller/controller.py | 6 +- mysql_autoxtrabackup/api/main.py | 2 +- mysql_autoxtrabackup/autoxtrabackup.py | 254 ++++++++++-------- mysql_autoxtrabackup/backup/backup_builder.py | 64 +++-- mysql_autoxtrabackup/backup/backup_tags.py | 95 ------- mysql_autoxtrabackup/backup/backuper.py | 34 +-- mysql_autoxtrabackup/cli/__init__.py | 0 mysql_autoxtrabackup/cli/backup.py | 0 mysql_autoxtrabackup/common/mysql_cli.py | 2 +- mysql_autoxtrabackup/configs/check_env.py | 25 +- mysql_autoxtrabackup/configs/generalops.py | 8 +- .../configs/generate_default_conf.py | 111 ++++---- mysql_autoxtrabackup/prepare/prepare.py | 177 +++++------- .../prepare/prepare_builder.py | 24 +- mysql_autoxtrabackup/process_runner/errors.py | 15 +- .../process_runner/process_runner.py | 4 +- pyproject.toml | 1 + tests/conftest.py | 12 +- tests/test_helpers.py | 2 +- 22 files changed, 354 insertions(+), 488 deletions(-) delete mode 100644 mysql_autoxtrabackup/backup/backup_tags.py delete mode 100644 mysql_autoxtrabackup/cli/__init__.py delete mode 100644 mysql_autoxtrabackup/cli/backup.py diff --git a/changes/make_history.py b/changes/make_history.py index 79344ad..4971c02 100644 --- a/changes/make_history.py +++ b/changes/make_history.py @@ -31,7 +31,7 @@ print('no changes found') sys.exit(0) -version = SourceFileLoader('version', 'mysql_autoxtrabackup/utils/version.py').load_module() +version = SourceFileLoader('version', 'mysql_autoxtrabackup/common/version.py').load_module() chunk_title = f'v{version.VERSION} ({date.today():%Y-%m-%d})' new_chunk = '## {}\n\n{}\n\n'.format(chunk_title, '\n'.join(c for *_, c in sorted(bullet_list, reverse=True))) diff --git a/docs/conf.py b/docs/conf.py index 7f7141f..1b7f62d 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -21,7 +21,7 @@ # import sphinx_rtd_theme -from mysql_autoxtrabackup.utils.version import VERSION +from mysql_autoxtrabackup.common.version import VERSION sys.path.insert(0, os.path.abspath("../mysql_autoxtrabackup")) diff --git a/mysql_autoxtrabackup/__init__.py b/mysql_autoxtrabackup/__init__.py index 98b44ff..f942fba 100644 --- a/mysql_autoxtrabackup/__init__.py +++ b/mysql_autoxtrabackup/__init__.py @@ -1,5 +1,5 @@ """MySQL-AutoXtrabackup command-line tool, for automating tedious MySQL physical backups management using Percona Xtrabackup""" -from .utils import version +from .common import version __version__ = version.VERSION diff --git a/mysql_autoxtrabackup/api/controller/controller.py b/mysql_autoxtrabackup/api/controller/controller.py index a5fe4c3..f21a0c6 100644 --- a/mysql_autoxtrabackup/api/controller/controller.py +++ b/mysql_autoxtrabackup/api/controller/controller.py @@ -2,9 +2,9 @@ from fastapi.responses import JSONResponse from starlette.responses import RedirectResponse -from mysql_autoxtrabackup.backup_backup.backuper import Backup -from mysql_autoxtrabackup.backup_prepare.prepare import Prepare -from mysql_autoxtrabackup.utils.helpers import list_available_backups +from mysql_autoxtrabackup.backup.backuper import Backup +from mysql_autoxtrabackup.common.helpers import list_available_backups +from mysql_autoxtrabackup.prepare.prepare import Prepare router = APIRouter() diff --git a/mysql_autoxtrabackup/api/main.py b/mysql_autoxtrabackup/api/main.py index 6bab93b..e3320a9 100644 --- a/mysql_autoxtrabackup/api/main.py +++ b/mysql_autoxtrabackup/api/main.py @@ -5,7 +5,7 @@ from fastapi.openapi.utils import get_openapi from mysql_autoxtrabackup.api.controller.controller import router -from mysql_autoxtrabackup.utils.version import VERSION +from mysql_autoxtrabackup.common.version import VERSION app = FastAPI() diff --git a/mysql_autoxtrabackup/autoxtrabackup.py b/mysql_autoxtrabackup/autoxtrabackup.py index c9e1cfb..e53ba19 100644 --- a/mysql_autoxtrabackup/autoxtrabackup.py +++ b/mysql_autoxtrabackup/autoxtrabackup.py @@ -13,18 +13,15 @@ import pid # type: ignore from mysql_autoxtrabackup.api import main -from mysql_autoxtrabackup.backup_backup.backup_builder import BackupBuilderChecker -from mysql_autoxtrabackup.backup_backup.backup_tags import BackupTags -from mysql_autoxtrabackup.backup_backup.backuper import Backup -from mysql_autoxtrabackup.backup_prepare.prepare import Prepare -from mysql_autoxtrabackup.general_conf import path_config -from mysql_autoxtrabackup.general_conf.generalops import GeneralClass -from mysql_autoxtrabackup.general_conf.generate_default_conf import ( - GenerateDefaultConfig, -) +from mysql_autoxtrabackup.backup.backup_builder import BackupCommandBuilder +from mysql_autoxtrabackup.backup.backuper import Backup +from mysql_autoxtrabackup.common import version +from mysql_autoxtrabackup.common.mysql_cli import MySQLClientHelper +from mysql_autoxtrabackup.configs import path_config +from mysql_autoxtrabackup.configs.generalops import GeneralClass +from mysql_autoxtrabackup.configs.generate_default_conf import generate_config_file as generate_config +from mysql_autoxtrabackup.prepare.prepare import Prepare from mysql_autoxtrabackup.process_runner.process_runner import ProcessRunner -from mysql_autoxtrabackup.utils import version -from mysql_autoxtrabackup.utils.mysql_cli import MySQLClientHelper logger = logging.getLogger("") destinations_hash = { @@ -148,8 +145,6 @@ def validate_file(file: str) -> None: is_eager=True, help="Create a config file template in default directory", ) -@click.option("--tag", help="Pass the tag string for each backup") -@click.option("--show-tags", is_flag=True, help="Show backup tags and exit") @click.option("-v", "--verbose", is_flag=True, help="Be verbose (print to console)") @click.option( "-lf", @@ -193,51 +188,47 @@ def validate_file(file: str) -> None: ) @click.pass_context def all_procedure( - ctx, - prepare, - backup, - run_server, - tag, - show_tags, - verbose, - log_file, - log, - defaults_file, - generate_config_file, - dry_run, - log_file_max_bytes, - log_file_backup_count, + ctx, + prepare, + backup, + run_server, + verbose, + log_file, + log, + defaults_file, + generate_config_file, + dry_run, + log_file_max_bytes, + log_file_backup_count, ) -> bool: - options = GeneralClass(defaults_file) - logging_options = options.logging_options - backup_options = options.backup_options - formatter = _get_formatter() + backup_options, logging_options, options = _get_options(defaults_file) - if verbose: - _set_log_level_format(formatter) + _set_outputs(_get_formatter(), log, log_file, log_file_backup_count, log_file_max_bytes, logging_options, verbose) + + pid_file = pid.PidFile(piddir=backup_options.get("pid_dir")) + + _factory(backup, backup_options, ctx, defaults_file, dry_run, generate_config_file, options, pid_file, prepare, + run_server, verbose) + + _log_command_history() + logger.info("Autoxtrabackup completed successfully!") + return True - if log_file: - try: - file_handler = _get_log_rotate_handler( - log_file, - logging_options, - max_bytes=log_file_max_bytes, - backup_count=log_file_backup_count, - ) - _add_log_rotate_handler(file_handler, formatter) - except PermissionError as err: - exit(f"{err} Please consider to run as root or sudo") +def _set_outputs(formatter, log, log_file, log_file_backup_count, log_file_max_bytes, logging_options, verbose): + _set_verbose_mode(formatter, verbose) + _set_log_file( + formatter, log_file, log_file_backup_count, log_file_max_bytes, logging_options + ) # set log level in order: 1. user argument 2. config file 3. @click default _set_log_level(log, logging_options) - validate_file(defaults_file) - pid_file = pid.PidFile(piddir=backup_options.get("pid_dir")) +def _factory(backup, backup_options, ctx, defaults_file, dry_run, generate_config_file, options, pid_file, prepare, + run_server, verbose): try: _run_commands( backup, - backup_options, ctx, defaults_file, dry_run, @@ -245,8 +236,6 @@ def all_procedure( pid_file, prepare, run_server, - show_tags, - tag, verbose, options=options, ) @@ -258,73 +247,74 @@ def all_procedure( except pid.PidFileError as error: logger.warning(f"Generic error with pid file: {str(error)}") - _log_command_history() - logger.info("Autoxtrabackup completed successfully!") - return True - def _run_commands( - backup, - backup_options, - ctx, - defaults_file, - dry_run, - generate_config_file, - pid_file, - prepare, - run_server, - show_tags, - tag, - verbose, - options, + backup, + ctx, + defaults_file, + dry_run, + generate_config_file, + pid_file, + prepare, + run_server, + verbose, + options, ): with pid_file: # User PidFile for locking to single instance - dry_run_ = dry_run - if dry_run_: - dry_run_ = 1 - logger.warning("Dry run enabled!") + dry_run_ = _set_dry_run(dry_run) - builder_obj = BackupBuilderChecker(options=options) - tagger = BackupTags(tag, builder_obj) - mysql_cli = MySQLClientHelper(options=options) + builder_obj, mysql_cli = _instantiate_objects(options) if ( - prepare is False - and backup is False - and verbose is False - and dry_run is False - and show_tags is False - and run_server is False - and generate_config_file is False + prepare is False + and backup is False + and verbose is False + and dry_run is False + and run_server is False + and generate_config_file is False ): print_help(ctx, None, value=True) elif run_server: main.run_server() - elif show_tags and defaults_file: - - Backup( - builder_obj=builder_obj, - tagger=tagger, - mysql_cli=mysql_cli, - options=options, - ).tagger.show_tags(backup_dir=str(backup_options.get("backup_dir"))) elif generate_config_file: - GenerateDefaultConfig().generate_config_file() - logger.info(f"Default config file is generated in {defaults_file}") + _generate_config_file(defaults_file) elif prepare: - Prepare( - dry_run=dry_run_, tag=tag, options=options - ).prepare_backup_and_copy_back() + _prepare_backup(dry_run_, options) elif backup: - Backup( - builder_obj=builder_obj, - tagger=tagger, - mysql_cli=mysql_cli, - options=options, - dry_run=dry_run_, - tag=tag, - ).all_backup() + _take_backup(builder_obj, dry_run_, mysql_cli, options) + + +def _set_dry_run(dry_run): + dry_run_ = dry_run + if dry_run_: + dry_run_ = 1 + logger.warning("Dry run enabled!") + return dry_run_ + + +def _generate_config_file(defaults_file): + generate_config(config=defaults_file) + logger.info(f"Default config file is generated in {defaults_file}") + + +def _prepare_backup(dry_run_, options): + Prepare(dry_run=dry_run_, options=options).prepare_backup() + + +def _take_backup(builder_obj, dry_run_, mysql_cli, options): + Backup( + builder_obj=builder_obj, + mysql_cli=mysql_cli, + options=options, + dry_run=dry_run_, + ).all_backup() + + +def _instantiate_objects(options): + builder_obj = BackupCommandBuilder(options=options) + mysql_cli = MySQLClientHelper(options=options) + return builder_obj, mysql_cli def _log_command_history(): @@ -333,15 +323,10 @@ def _log_command_history(): logger.info(str(history)) -def _add_log_rotate_handler(file_handler, formatter): - file_handler.setFormatter(formatter) - logger.addHandler(file_handler) - - def _handle_backup_pid_exception(backup_options, error, pid_file): pid_warning = str(backup_options.get("pid_runtime_warning")) if float(pid_warning) and time.time() - os.stat(pid_file.filename).st_ctime > float( - pid_warning + pid_warning ): pid.fh.seek(0) pid_str = pid.fh.read(16).split("\n", 1)[0].strip() @@ -354,25 +339,19 @@ def _handle_backup_pid_exception(backup_options, error, pid_file): ) -def _set_log_level(log, logging_options): - if log is not None: - logger.setLevel(log) - elif logging_options.get("log_level"): - logger.setLevel(str(logging_options.get("log_level"))) - else: - # this is the fallback default log-level. - logger.setLevel("INFO") +def _add_log_rotate_handler(file_handler, formatter): + file_handler.setFormatter(formatter) + logger.addHandler(file_handler) def _get_log_rotate_handler( - log_file: str, logging_options: Dict, max_bytes: int, backup_count: int + log_file: str, logging_options: Dict[str, str], max_bytes: int, backup_count: int ): return RotatingFileHandler( log_file, mode="a", maxBytes=max_bytes or int(str(logging_options.get("log_file_max_bytes"))), - backupCount=backup_count - or int(str(logging_options.get("log_file_backup_count"))), + backupCount=backup_count or int(str(logging_options.get("log_file_backup_count"))), ) @@ -383,6 +362,14 @@ def _get_formatter() -> logging: ) +def _get_options(defaults_file): + validate_file(defaults_file) + options = GeneralClass(defaults_file) + logging_options = options.logging_options + backup_options = options.backup_options + return backup_options, logging_options, options + + def _set_log_level_format(formatter: logging) -> None: ch = logging.StreamHandler() # control console output log level @@ -391,5 +378,36 @@ def _set_log_level_format(formatter: logging) -> None: logger.addHandler(ch) +def _set_log_file( + formatter, log_file, log_file_backup_count, log_file_max_bytes, logging_options +): + if log_file: + try: + file_handler = _get_log_rotate_handler( + log_file, + logging_options, + max_bytes=log_file_max_bytes, + backup_count=log_file_backup_count, + ) + _add_log_rotate_handler(file_handler, formatter) + except PermissionError as err: + exit(f"{err} Please consider to run as root or sudo") + + +def _set_log_level(log, logging_options): + if log is not None: + logger.setLevel(log) + elif logging_options.get("log_level"): + logger.setLevel(str(logging_options.get("log_level"))) + else: + # this is the fallback default log-level. + logger.setLevel("INFO") + + +def _set_verbose_mode(formatter, verbose): + if verbose: + _set_log_level_format(formatter) + + if __name__ == "__main__": all_procedure() diff --git a/mysql_autoxtrabackup/backup/backup_builder.py b/mysql_autoxtrabackup/backup/backup_builder.py index 398b76b..7860f23 100644 --- a/mysql_autoxtrabackup/backup/backup_builder.py +++ b/mysql_autoxtrabackup/backup/backup_builder.py @@ -3,43 +3,24 @@ from dataclasses import dataclass from typing import Optional -from mysql_autoxtrabackup.general_conf.generalops import GeneralClass +from mysql_autoxtrabackup.configs.generalops import GeneralClass logger = logging.getLogger(__name__) @dataclass -class BackupBuilderChecker: +class BackupCommandBuilder: options: GeneralClass def __post_init__(self): self.mysql_options = self.options.mysql_options self.backup_options = self.options.backup_options - def general_command_builder(self) -> str: - """ - Method for building general options for backup command. - :return: String of constructed options. - """ - args = ( - f" --socket={self.mysql_options.get('mysql_socket')}" - if self.mysql_options.get("mysql_socket") - else f" --host={self.mysql_options.get('mysql_host')} --port={self.mysql_options.get('mysql_port')}" - ) - - return f"{args} {self._get_extra_options('xtra_options')}" - def full_backup_command_builder(self, full_backup_dir: str) -> str: - """ - Method for creating Full Backup command. - :param: full_backup_dir the path of backup directory - :return: generated command string - """ return ( - f"{self.backup_options.get('backup_tool')} --defaults-file={self.mysql_options.get('mycnf')} " - f"--user={self.mysql_options.get('mysql_user')} --password={self.mysql_options.get('mysql_password')} " - f"--target-dir={full_backup_dir} --backup" - ) + self.general_command_builder() + self._get_full_backup_command(full_backup_dir) + + self._get_common_command_string() + ) def inc_backup_command_builder( self, @@ -47,7 +28,25 @@ def inc_backup_command_builder( inc_backup_dir: Optional[str], recent_inc_bck: Optional[str] = None, ) -> str: - xtrabackup_inc_cmd_base = ( + xtrabackup_inc_cmd_base = self._get_inc_backup_base_command( + inc_backup_dir=inc_backup_dir + ) + + xtrabackup_inc_cmd_base += self._add_incremental_basedir( + recent_inc_bck=recent_inc_bck, recent_full_bck=recent_full_bck + ) + + return f"{xtrabackup_inc_cmd_base} --backup {self._get_common_command_string()}" + + def _get_common_command(self) -> str: + return ( + f" --socket={self.mysql_options.get('mysql_socket')}" + if self.mysql_options.get("mysql_socket") + else f" --host={self.mysql_options.get('mysql_host')} --port={self.mysql_options.get('mysql_port')}" + ) + + def _get_inc_backup_base_command(self, inc_backup_dir: str) -> str: + return ( f'{self.backup_options.get("backup_tool")} ' f'--defaults-file={self.mysql_options.get("mycnf")} ' f'--user={self.mysql_options.get("mysql_user")} ' @@ -55,14 +54,23 @@ def inc_backup_command_builder( f"--target-dir={inc_backup_dir}" ) - xtrabackup_inc_cmd_base += ( + def _add_incremental_basedir(self, recent_inc_bck: str, recent_full_bck: str): + return ( f' --incremental-basedir={self.backup_options.get("inc_dir")}/{recent_inc_bck}' if recent_inc_bck else f' --incremental-basedir={self.backup_options.get("full_dir")}/{recent_full_bck}' ) - return f"{xtrabackup_inc_cmd_base} --backup {self.general_command_builder()}" + def _get_full_backup_command(self, full_backup_dir: str) -> str: + return ( + f"{self.backup_options.get('backup_tool')} --defaults-file={self.mysql_options.get('mycnf')} " + f"--user={self.mysql_options.get('mysql_user')} --password={self.mysql_options.get('mysql_password')} " + f"--target-dir={full_backup_dir} --backup" + ) - def _get_extra_options(self, option: str): + def _get_extra_options(self, option: str) -> str: _option = self.backup_options.get(option) return f" {_option}" if _option else "" + + def _get_common_command_string(self) -> str: + return f"{self._get_common_command()} {self._get_extra_options('xtra_options')}" diff --git a/mysql_autoxtrabackup/backup/backup_tags.py b/mysql_autoxtrabackup/backup/backup_tags.py deleted file mode 100644 index f37a004..0000000 --- a/mysql_autoxtrabackup/backup/backup_tags.py +++ /dev/null @@ -1,95 +0,0 @@ -import logging -import os -from dataclasses import dataclass -from datetime import datetime -from typing import Optional - -from mysql_autoxtrabackup.backup_backup.backup_builder import BackupBuilderChecker -from mysql_autoxtrabackup.common import helpers - -logger = logging.getLogger(__name__) - - -@dataclass -class BackupTags: - tag: Optional[str] - builder_obj: BackupBuilderChecker - - def add_tag( - self, backup_type: str, backup_size: Optional[str], backup_status: Optional[str] - ) -> bool: - """ - Method for adding backup tags - :param backup_type: The backup type - Full/Inc - :param backup_size: The size of the backup in human-readable format - :param backup_status: Status: OK or Status: Failed - :return: True if no exception - """ - # skip tagging unless self.tag - if not self.tag: - logger.info("TAGGING SKIPPED") - return True - - # Currently, only support Inc and Full types, calculate name based on this - assert backup_type in { - "Full", - "Inc", - }, f"add_tag(): backup_type {backup_type}: must be 'Full' or 'Inc'" - - backup_name = ( - helpers.get_latest_dir_name( - str(self.builder_obj.backup_options.get("full_dir")) - ) - if backup_type == "Full" - else helpers.get_latest_dir_name( - str(self.builder_obj.backup_options.get("inc_dir")) - ) - ) - - # Calculate more tag fields, create string - backup_timestamp = datetime.now().strftime("%Y-%m-%d_%H-%M-%S") - backup_tag_str = ( - "{bk_name}\t{bk_type}\t{bk_status}\t{bk_timestamp}\t{bk_size}\t'{bk_tag}'\n" - ) - - # Apply tag - with open( - f'{self.builder_obj.backup_options.get("backup_dir")}/backup_tags.txt', "a" - ) as backup_tags_file: - backup_tag_final = backup_tag_str.format( - bk_name=backup_name, - bk_type=backup_type, - bk_status=backup_status, - bk_timestamp=backup_timestamp, - bk_size=backup_size, - bk_tag=self.tag, - ) - - backup_tags_file.write(backup_tag_final) - return True - - @staticmethod - def show_tags(backup_dir: str, tag_file: Optional[str] = None) -> Optional[bool]: - tag_file = tag_file or f"{backup_dir}/backup_tags.txt" - if os.path.isfile(tag_file): - with open(f"{backup_dir}/backup_tags.txt", "r") as backup_tags: - from_file = backup_tags.read() - column_names = "{0}\t{1}\t{2}\t{3}\t{4}\tTAG\n".format( - "Backup".ljust(19), - "Type".ljust(4), - "Status".ljust(2), - "Completion_time".ljust(19), - "Size", - ) - extra_str = "{}\n".format("-" * (len(column_names) + 21)) - print(column_names + extra_str + from_file) - logger.info(column_names + extra_str + from_file) - return True - else: - logger.warning( - "Could not find backup_tags.txt inside given backup directory. Can't print tags." - ) - print( - "WARNING: Could not find backup_tags.txt inside given backup directory. Can't print tags." - ) - return None diff --git a/mysql_autoxtrabackup/backup/backuper.py b/mysql_autoxtrabackup/backup/backuper.py index 19c24e1..10b2c38 100755 --- a/mysql_autoxtrabackup/backup/backuper.py +++ b/mysql_autoxtrabackup/backup/backuper.py @@ -12,11 +12,10 @@ from functools import wraps from typing import Optional -from mysql_autoxtrabackup.backup_backup.backup_builder import BackupBuilderChecker -from mysql_autoxtrabackup.backup_backup.backup_tags import BackupTags +from mysql_autoxtrabackup.backup.backup_builder import BackupCommandBuilder from mysql_autoxtrabackup.common import helpers, mysql_cli -from mysql_autoxtrabackup.general_conf.check_env import CheckEnv -from mysql_autoxtrabackup.general_conf.generalops import GeneralClass +from mysql_autoxtrabackup.configs.check_env import CheckEnv +from mysql_autoxtrabackup.configs.generalops import GeneralClass from mysql_autoxtrabackup.process_runner.process_runner import ProcessRunner logger = logging.getLogger(__name__) @@ -54,11 +53,11 @@ def wrapped(_self, inc_dir: Optional[str] = None): return wrapped -def _get_inc_dir(builder_obj: BackupBuilderChecker) -> str: +def _get_inc_dir(builder_obj: BackupCommandBuilder) -> str: return str(builder_obj.backup_options.get("inc_dir")) -def _get_full_dir(builder_obj: BackupBuilderChecker) -> str: +def _get_full_dir(builder_obj: BackupCommandBuilder) -> str: return str(builder_obj.backup_options.get("full_dir")) @@ -72,12 +71,10 @@ def _get_recent_bck(path: str) -> str: @dataclass class Backup: - builder_obj: BackupBuilderChecker - tagger: BackupTags + builder_obj: BackupCommandBuilder mysql_cli: mysql_cli.MySQLClientHelper options: GeneralClass dry_run: Optional[bool] = None - tag: Optional[str] = None _full_dir: str = field(init=False) _inc_dir: str = field(init=False) @@ -162,7 +159,7 @@ def _take_full_backup(self) -> bool: full_backup_dir=full_backup_dir ) - return self._get_status("Full", full_backup_dir, xtrabackup_cmd) + return self._get_status(xtrabackup_cmd) @_is_dry_run def _take_inc_backup(self) -> bool: @@ -190,7 +187,7 @@ def _take_inc_backup(self) -> bool: recent_inc_bck=recent_inc_bck, ) - return self._get_status("Inc", inc_backup_dir, xtrabackup_inc_cmd) + return self._get_status(xtrabackup_inc_cmd) def _run_backup(self) -> None: if not _get_recent_bck(self._full_dir): @@ -225,7 +222,7 @@ def _take_fresh_full_backup(self): self._flush_logs_backup_and_clean() def _flush_logs_backup_and_clean(self, clean_full: bool = False) -> None: - if self._flush_logs_and_backup(): + if self._flush_logs_and_take_backup(): self._clean_backup_dirs(clean_full=clean_full) def _clean_backup_dirs(self, clean_full: bool = False) -> None: @@ -236,20 +233,13 @@ def _clean_backup_dirs(self, clean_full: bool = False) -> None: # Removing inc backups self._clean_inc_backup_dir() - def _flush_logs_and_backup(self) -> bool: + def _flush_logs_and_take_backup(self) -> bool: return ( self.mysql_cli.mysql_run_command("flush logs") and self._take_full_backup() ) def _get_status( - self, backup_type: str, backup_dir: str, xtrabackup_cmd: str + self, xtrabackup_cmd: str ) -> bool: logger.debug(f'Starting {self.builder_obj.backup_options.get("backup_tool")}') - status = ProcessRunner.run_command(xtrabackup_cmd) - status_str = "OK" if status is True else "FAILED" - self.tagger.add_tag( - backup_type=backup_type, - backup_size=helpers.get_folder_size(backup_dir), - backup_status=status_str, - ) - return status + return ProcessRunner.run_command(xtrabackup_cmd) diff --git a/mysql_autoxtrabackup/cli/__init__.py b/mysql_autoxtrabackup/cli/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/mysql_autoxtrabackup/cli/backup.py b/mysql_autoxtrabackup/cli/backup.py deleted file mode 100644 index e69de29..0000000 diff --git a/mysql_autoxtrabackup/common/mysql_cli.py b/mysql_autoxtrabackup/common/mysql_cli.py index 4b9bf4d..1f3a505 100644 --- a/mysql_autoxtrabackup/common/mysql_cli.py +++ b/mysql_autoxtrabackup/common/mysql_cli.py @@ -3,7 +3,7 @@ import logging from dataclasses import dataclass -from mysql_autoxtrabackup.general_conf.generalops import GeneralClass +from mysql_autoxtrabackup.configs.generalops import GeneralClass from mysql_autoxtrabackup.process_runner.process_runner import ProcessRunner logger = logging.getLogger(__name__) diff --git a/mysql_autoxtrabackup/configs/check_env.py b/mysql_autoxtrabackup/configs/check_env.py index 1fef9ea..69d30e9 100644 --- a/mysql_autoxtrabackup/configs/check_env.py +++ b/mysql_autoxtrabackup/configs/check_env.py @@ -105,6 +105,7 @@ def _check_mysql_conf(self) -> Optional[bool]: :return: True on success, raise RuntimeError on error. """ my_cnf = self.mysql_options.get("mycnf") + if not my_cnf or my_cnf == "": logger.info("Skipping my.cnf check, because it is not specified") return True @@ -115,15 +116,23 @@ def _check_mysql_conf(self) -> Optional[bool]: logger.info("OK: MySQL configuration file exists") return True - def _is_all_paths_exist(self): + def _is_all_paths_exist(self) -> bool: return all(_is_path_exists(_path) for _path in self._required_dirs.values()) - def _is_all_binaries_exist(self): + def _is_all_binaries_exist(self) -> bool: return all( _is_binary_exists(_binary_path) for _binary_path in self._required_binaries.values() ) + def _is_mysql_conn_options_provided(self) -> None: + if not self.mysql_options.get("mysql_socket") and not ( + self.mysql_options.get("mysql_host") + and self.mysql_options.get("mysql_port") + ): + logger.critical(MYSQL_CONN_MSG) + raise RuntimeError(MYSQL_CONN_MSG) + def _build_status_check_command(self) -> str: self._is_mysql_conn_options_provided() @@ -135,22 +144,14 @@ def _build_status_check_command(self) -> str: return self._append_conn_string(status_args) - def _is_mysql_conn_options_provided(self): - if not self.mysql_options.get("mysql_socket") and not ( - self.mysql_options.get("mysql_host") - and self.mysql_options.get("mysql_port") - ): - logger.critical(MYSQL_CONN_MSG) - raise RuntimeError(MYSQL_CONN_MSG) - - def _append_conn_string(self, status_args): + def _append_conn_string(self, status_args) -> str: status_args += ( f' --socket={self.mysql_options.get("mysql_socket")}' if self.mysql_options.get("mysql_socket") else "" ) if self.mysql_options.get("mysql_host") and self.mysql_options.get( - "mysql_port" + "mysql_port" ): status_args += f' --host={self.mysql_options.get("mysql_host")}' status_args += f' --port={self.mysql_options.get("mysql_port")}' diff --git a/mysql_autoxtrabackup/configs/generalops.py b/mysql_autoxtrabackup/configs/generalops.py index d2ad279..543e5e0 100644 --- a/mysql_autoxtrabackup/configs/generalops.py +++ b/mysql_autoxtrabackup/configs/generalops.py @@ -6,10 +6,8 @@ import humanfriendly # type: ignore -from mysql_autoxtrabackup.general_conf import path_config -from mysql_autoxtrabackup.general_conf.generate_default_conf import ( - GenerateDefaultConfig, -) +from mysql_autoxtrabackup.configs import path_config +from mysql_autoxtrabackup.configs.generate_default_conf import generate_config_file logger = logging.getLogger(__name__) @@ -17,7 +15,7 @@ def _create_default_config(config: str, missing: str) -> None: logger.critical(f"Missing config file : {missing}") logger.warning("Creating default config file...") - GenerateDefaultConfig(config=config).generate_config_file() + generate_config_file(config=config) logger.info(f"Default config file is generated in {config}") diff --git a/mysql_autoxtrabackup/configs/generate_default_conf.py b/mysql_autoxtrabackup/configs/generate_default_conf.py index 3ddb9c0..634b464 100644 --- a/mysql_autoxtrabackup/configs/generate_default_conf.py +++ b/mysql_autoxtrabackup/configs/generate_default_conf.py @@ -9,54 +9,63 @@ from . import path_config -class GenerateDefaultConfig: - def __init__(self, config: str = path_config.config_path_file) -> None: - self.conf = config - self.home = path_config.home - with contextlib.suppress(FileExistsError, OSError): - if not exists(path_config.config_path): - makedirs(path_config.config_path) - - def generate_config_file(self) -> None: - with open(self.conf, "w+") as cfg_file: - config = configparser.ConfigParser(allow_no_value=True) - section1 = "MySQL" - config.add_section(section1) - config.set(section1, "mysql", "/usr/bin/mysql") - config.set(section1, "mycnf", "") - config.set(section1, "mysqladmin", "/usr/bin/mysqladmin") - config.set(section1, "mysql_user", "root") - config.set(section1, "mysql_password", "12345") - config.set( - section1, - "## Set either mysql_socket only, OR host + port. If both are set mysql_socket is used", - ) - config.set(section1, "mysql_socket", "/var/lib/mysql/mysql.sock") - config.set(section1, "#mysql_host", "127.0.0.1") - config.set(section1, "#mysql_port", "3306") - config.set(section1, "datadir", "/var/lib/mysql") - - section2 = "Logging" - config.add_section(section2) - config.set(section2, "#[DEBUG,INFO,WARNING,ERROR,CRITICAL]") - config.set(section2, "log", "DEBUG") - config.set(section2, "log_file_max_bytes", "1073741824") - config.set(section2, "log_file_backup_count", "7") - - section3 = "Backup" - config.add_section(section3) - config.set(section3, "#Optional: set pid directory") - config.set(section3, "pid_dir", "/tmp/MySQL-AutoXtraBackup") - config.set(section3, "tmp_dir", join(self.home, "XB_TEST/mysql_datadirs")) - config.set( - section3, - "#Optional: set warning if pid of backup us running for longer than X", - ) - config.set(section3, "pid_runtime_warning", "2 Hours") - config.set(section3, "backup_dir", join(self.home, "XB_TEST/backup_dir")) - config.set(section3, "backup_tool", "/usr/bin/xtrabackup") - config.set(section3, "xtra_options", "--no-server-version-check") - config.set(section3, "#xtra_prepare_options", "") - config.set(section3, "#full_backup_interval", "1 day") - - config.write(cfg_file) +def _set_logging_options(config: configparser.ConfigParser) -> None: + section2 = "Logging" + config.add_section(section2) + config.set(section2, "#[DEBUG,INFO,WARNING,ERROR,CRITICAL]") + config.set(section2, "log", "DEBUG") + config.set(section2, "log_file_max_bytes", "1073741824") + config.set(section2, "log_file_backup_count", "7") + + +def _set_mysql_options(config: configparser.ConfigParser) -> None: + section1 = "MySQL" + config.add_section(section1) + config.set(section1, "mysql", "/usr/bin/mysql") + config.set(section1, "mycnf", "") + config.set(section1, "mysqladmin", "/usr/bin/mysqladmin") + config.set(section1, "mysql_user", "root") + config.set(section1, "mysql_password", "12345") + config.set( + section1, + "## Set either mysql_socket only, OR host + port. If both are set mysql_socket is used", + ) + config.set(section1, "mysql_socket", "/var/lib/mysql/mysql.sock") + config.set(section1, "#mysql_host", "127.0.0.1") + config.set(section1, "#mysql_port", "3306") + config.set(section1, "datadir", "/var/lib/mysql") + + +def _set_backup_options(config: configparser.ConfigParser, home: str) -> None: + section3 = "Backup" + config.add_section(section3) + config.set(section3, "#Optional: set pid directory") + config.set(section3, "pid_dir", "/tmp/MySQL-AutoXtraBackup") + config.set(section3, "tmp_dir", join(home, "XB_TEST/mysql_datadirs")) + config.set( + section3, + "#Optional: set warning if pid of backup us running for longer than X", + ) + config.set(section3, "pid_runtime_warning", "2 Hours") + config.set(section3, "backup_dir", join(home, "XB_TEST/backup_dir")) + config.set(section3, "backup_tool", "/usr/bin/xtrabackup") + config.set(section3, "xtra_options", "--no-server-version-check") + config.set(section3, "#xtra_prepare_options", "") + config.set(section3, "#full_backup_interval", "1 day") + + +def generate_config_file(config: str = path_config.config_path_file, home: str = path_config.home) -> None: + with contextlib.suppress(FileExistsError, OSError): + if not exists(path_config.config_path): + makedirs(path_config.config_path) + + with open(config, "w+") as cfg_file: + config = configparser.ConfigParser(allow_no_value=True) + _set_mysql_options(config) + + _set_logging_options(config) + + _set_backup_options(config, home) + + config.write(cfg_file) + diff --git a/mysql_autoxtrabackup/prepare/prepare.py b/mysql_autoxtrabackup/prepare/prepare.py index df6856d..da513a1 100644 --- a/mysql_autoxtrabackup/prepare/prepare.py +++ b/mysql_autoxtrabackup/prepare/prepare.py @@ -4,77 +4,81 @@ from dataclasses import dataclass from typing import List, Optional, Tuple -from mysql_autoxtrabackup.backup_prepare.prepare_builder import ( - BackupPrepareBuilderChecker, -) from mysql_autoxtrabackup.common import helpers -from mysql_autoxtrabackup.general_conf.generalops import GeneralClass +from mysql_autoxtrabackup.configs.generalops import GeneralClass +from mysql_autoxtrabackup.prepare.prepare_builder import BackupPrepareBuilderChecker from mysql_autoxtrabackup.process_runner.process_runner import ProcessRunner logger = logging.getLogger(__name__) +def _set_apply_log_only_found_backups( + dir_: str, found_backups: Optional[Tuple[str, str]] +): + apply_log_only = None + if dir_ != found_backups[0]: + logger.info(f"Preparing inc backups in sequence. inc backup dir/name is {dir_}") + apply_log_only = True + else: + logger.info(f"Preparing last incremental backup, inc backup dir/name is {dir_}") + return apply_log_only + + +def _ask_input() -> str: + x = "- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -" + print(x) + print("") + print("Preparing full/inc backups!") + answer = input("Are you sure? [Y/n]: ") + print("") + print(x) + return answer + + @dataclass class Prepare: options: GeneralClass dry_run: Optional[bool] = None - tag: Optional[str] = None def __post_init__(self): self.prepare_options = BackupPrepareBuilderChecker(options=self.options) - if self.tag and not os.path.isfile( - f'{self.prepare_options.backup_options.get("backup_dir")}/backup_tags.txt' - ): - raise RuntimeError( - "Could not find backup_tags.txt inside backup directory. " - "Please run without --tag option" - ) - self.recent_bck = helpers.get_latest_dir_name( str(self.prepare_options.backup_options.get("full_dir")) ) self.inc_dir = str(self.prepare_options.backup_options.get("inc_dir")) - def run_prepare_command(self, cmd: Optional[str]) -> Optional[bool]: + def prepare_backup(self) -> None: + answer = _ask_input() + + time.sleep(3) + self._handle_prompt(answer) + + def _handle_prompt(self, answer) -> None: + if answer.lower() == "y": + self._prepare_inc_and_full_backups() + else: + print("Please type Y or n!") + + def _run_prepare_command(self, cmd: Optional[str]) -> Optional[bool]: logger.info(f"Running prepare command -> {cmd}") if self.dry_run: return True return ProcessRunner.run_command(cmd) - def prepare_with_tags(self) -> Optional[bool]: - # Method for preparing backups based on passed backup tags - found_backups = BackupPrepareBuilderChecker.parse_backup_tags( - backup_dir=str(self.prepare_options.backup_options.get("backup_dir")), - tag_name=self.tag, - ) - - self._prepare_and_run_using_tags(found_backups) - - logger.info("- - - - The end of the Prepare Stage. - - - -") - return True - - def prepare_run_incremental_backups( - self, found_backups: Optional[Tuple[str, str]] + def _prepare_run_incremental_backups( + self, found_backups: Optional[Tuple[str, str]] ) -> None: logger.info("Preparing Incs: ") self._iterate_and_run_found_backups( found_backups, helpers.sorted_ls(self.inc_dir) ) - def prepare_only_full_backup(self) -> Optional[bool]: + def _prepare_only_full_backup(self) -> Optional[bool]: if self.recent_bck: - apply_log_only = None - if os.listdir(self.inc_dir): - logger.info("- - - - Preparing Full backup for incrementals - - - -") - logger.info( - "- - - - Final prepare,will occur after preparing all inc backups - - - -" - ) - time.sleep(3) - - apply_log_only = True + apply_log_only = self._set_apply_log_only() self._prepare_and_run( recent_bck=self.recent_bck, apply_log_only=apply_log_only @@ -82,16 +86,16 @@ def prepare_only_full_backup(self) -> Optional[bool]: return True - def prepare_inc_full_backups(self) -> Optional[bool]: + def _prepare_inc_and_full_backups(self) -> Optional[bool]: if not os.listdir(self.inc_dir): logger.info( "- - - - You have no Incremental backups. So will prepare only latest Full backup - - - -" ) - return self.prepare_only_full_backup() + return self._prepare_only_full_backup() else: logger.info("- - - - You have Incremental backups. - - - -") - if self.prepare_only_full_backup(): + if self._prepare_only_full_backup(): logger.info("Preparing Incs: ") list_of_dir = sorted(os.listdir(self.inc_dir)) self._iterate_and_run_sequential_increment_backups(list_of_dir) @@ -99,38 +103,9 @@ def prepare_inc_full_backups(self) -> Optional[bool]: logger.info("- - - - The end of the Prepare Stage. - - - -") return True - def _prepare_and_run_using_tags( - self, found_backups: Optional[Tuple[str, str]] - ) -> None: - if found_backups[1] == "Full": - if self.recent_bck: - logger.info("- - - - Preparing Full Backup - - - -") - self._prepare_and_run(recent_bck=self.recent_bck) - - elif found_backups[1] == "Inc": - if not os.listdir(self.inc_dir): - logger.info( - "- - - - You have no Incremental backups. So will prepare only latest Full backup - - - -" - ) - self.prepare_only_full_backup() - else: - logger.info("- - - - You have Incremental backups. - - - -") - if self.prepare_only_full_backup(): - self.prepare_run_incremental_backups(found_backups) - def _iterate_and_run_sequential_increment_backups(self, dir_: List[str]) -> None: for inc_backup_dir in dir_: - apply_log_only = None - if inc_backup_dir != max(os.listdir(self.inc_dir)): - logger.info( - f"Preparing Incremental backups in sequence. Incremental backup dir/name is {inc_backup_dir}" - ) - - apply_log_only = True - else: - logger.info( - f"Preparing last Incremental backup, inc backup dir/name is {inc_backup_dir}" - ) + apply_log_only = self._set_apply_log_only_exclude_recent(inc_backup_dir) self._prepare_and_run( recent_bck=self.recent_bck, @@ -139,55 +114,51 @@ def _iterate_and_run_sequential_increment_backups(self, dir_: List[str]) -> None ) def _prepare_and_run( - self, - recent_bck: str, - apply_log_only: Optional[bool] = None, - dir_: Optional[str] = None, + self, + recent_bck: str, + apply_log_only: Optional[bool] = None, + dir_: Optional[str] = None, ) -> None: backup_prepare_cmd = self.prepare_options.prepare_command_builder( full_backup=recent_bck, incremental=dir_, apply_log_only=apply_log_only, ) - self.run_prepare_command(backup_prepare_cmd) + self._run_prepare_command(backup_prepare_cmd) def _iterate_and_run_found_backups( - self, found_backups: Optional[Tuple[str, str]], list_of_dir: List[str] + self, found_backups: Optional[Tuple[str, str]], list_of_dir: List[str] ) -> None: # Limit the iteration until this found backup for dir_ in list_of_dir[: list_of_dir.index(found_backups[0]) + 1]: - apply_log_only = None - if dir_ != found_backups[0]: - logger.info( - f"Preparing inc backups in sequence. inc backup dir/name is {dir_}" - ) - apply_log_only = True - else: - logger.info( - f"Preparing last incremental backup, inc backup dir/name is {dir_}" - ) + apply_log_only = _set_apply_log_only_found_backups(dir_, found_backups) self._prepare_and_run( recent_bck=self.recent_bck, apply_log_only=apply_log_only, dir_=dir_ ) - def prepare_backup_and_copy_back(self) -> None: - x = "- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -" + def _set_apply_log_only(self) -> bool: + apply_log_only = None + if os.listdir(self.inc_dir): + logger.info("- - - - Preparing Full backup for incrementals - - - -") + logger.info( + "- - - - Final prepare,will occur after preparing all inc backups - - - -" + ) + time.sleep(3) - print(x) - print("") - print("Preparing full/inc backups!") - answer = input("Are you sure? [Y/n]: ") - print("") - print(x) + apply_log_only = True + return apply_log_only - time.sleep(3) + def _set_apply_log_only_exclude_recent(self, inc_backup_dir: str) -> bool: + apply_log_only = None + if inc_backup_dir != max(os.listdir(self.inc_dir)): + logger.info( + f"Preparing Incremental backups in sequence. Incremental backup dir/name is {inc_backup_dir}" + ) - if answer.lower() == "y": - if not self.tag: - self.prepare_inc_full_backups() - else: - logger.info("Backup tag will be used to prepare backups") - self.prepare_with_tags() + apply_log_only = True else: - print("Please type Y or n!") + logger.info( + f"Preparing last Incremental backup, inc backup dir/name is {inc_backup_dir}" + ) + return apply_log_only diff --git a/mysql_autoxtrabackup/prepare/prepare_builder.py b/mysql_autoxtrabackup/prepare/prepare_builder.py index 79d00f1..0e403f7 100644 --- a/mysql_autoxtrabackup/prepare/prepare_builder.py +++ b/mysql_autoxtrabackup/prepare/prepare_builder.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from typing import Optional, Tuple -from mysql_autoxtrabackup.general_conf.generalops import GeneralClass +from mysql_autoxtrabackup.configs.generalops import GeneralClass logger = logging.getLogger(__name__) @@ -15,28 +15,6 @@ class BackupPrepareBuilderChecker: def __post_init__(self): self.backup_options = self.options.backup_options - @staticmethod - def parse_backup_tags( - backup_dir: Optional[str], tag_name: Optional[str] - ) -> Optional[Tuple[str, str]]: - """ - Static Method for returning the backup directory name and backup type - :param: backup_dir: The backup directory path - :param: tag_name: The tag name to search - :return: Tuple of (backup directory, backup type) (2017-11-09_19-37-16, Full). - :raises: RuntimeError if there is no such tag inside backup_tags.txt - """ - if os.path.isfile(f"{backup_dir}/backup_tags.txt"): - with open(f"{backup_dir}/backup_tags.txt", "r") as backup_tags: - f = backup_tags.readlines() - - for i in f: - split_ = i.split("\t") - if tag_name == split_[-1].rstrip("'\n\r").lstrip("'"): - return split_[0], split_[1] - raise RuntimeError("There is no such tag for backups") - return None - def prepare_command_builder( self, full_backup: Optional[str], diff --git a/mysql_autoxtrabackup/process_runner/errors.py b/mysql_autoxtrabackup/process_runner/errors.py index e32d72d..3de6140 100644 --- a/mysql_autoxtrabackup/process_runner/errors.py +++ b/mysql_autoxtrabackup/process_runner/errors.py @@ -2,11 +2,9 @@ logger = logging.getLogger(__name__) -# TODO: use these errors in the future - keeping it for future - def log_error(expression: str, message: str) -> None: - logger.error("FAILED: " + expression + " " + message) + logger.error(f"FAILED: {expression} {message}") class Error(Exception): @@ -55,14 +53,3 @@ def __init__(self, expression: str, message: str) -> None: self.expression = expression self.message = message log_error(self.expression, self.message) - - -class BackupArchiveNotConfigured(Error): - """ - Exception raised when archive_max_size and archive_max_duration configs are not set - """ - - def __init__(self, expression: str, message: str) -> None: - self.expression = expression - self.message = message - log_error(self.expression, self.message) diff --git a/mysql_autoxtrabackup/process_runner/process_runner.py b/mysql_autoxtrabackup/process_runner/process_runner.py index 7eb64c5..f1263df 100644 --- a/mysql_autoxtrabackup/process_runner/process_runner.py +++ b/mysql_autoxtrabackup/process_runner/process_runner.py @@ -6,8 +6,8 @@ from subprocess import PIPE, STDOUT from typing import List, Optional -from mysql_autoxtrabackup.general_conf import path_config -from mysql_autoxtrabackup.general_conf.generalops import GeneralClass +from mysql_autoxtrabackup.configs import path_config +from mysql_autoxtrabackup.configs.generalops import GeneralClass logger = logging.getLogger(__name__) diff --git a/pyproject.toml b/pyproject.toml index 8d52c15..b8ee94c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,6 +33,7 @@ dependencies = [ "humanfriendly >=2.0", "fastapi >= 0.63.0", "uvicorn >= 0.13.4", + "dynacli >= 1.0.7" ] diff --git a/tests/conftest.py b/tests/conftest.py index 8990742..6f4cb0f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,13 +2,13 @@ from fastapi.testclient import TestClient from mysql_autoxtrabackup.api.main import app -from mysql_autoxtrabackup.backup_backup.backup_builder import BackupBuilderChecker -from mysql_autoxtrabackup.backup_backup.backup_tags import BackupTags -from mysql_autoxtrabackup.backup_backup.backuper import Backup -from mysql_autoxtrabackup.general_conf.path_config import config_path_file -from mysql_autoxtrabackup.utils.mysql_cli import MySQLClientHelper +from mysql_autoxtrabackup.backup.backup_builder import BackupCommandBuilder +from mysql_autoxtrabackup.backup.backup_tags import BackupTags +from mysql_autoxtrabackup.backup.backuper import Backup +from mysql_autoxtrabackup.common.mysql_cli import MySQLClientHelper +from mysql_autoxtrabackup.configs.path_config import config_path_file -builder_obj = BackupBuilderChecker(config=config_path_file, dry_run=None) +builder_obj = BackupCommandBuilder(config=config_path_file, dry_run=None) tagger = BackupTags(None, builder_obj) mysql_cli = MySQLClientHelper(config=config_path_file) diff --git a/tests/test_helpers.py b/tests/test_helpers.py index e42cbee..ce4d0b6 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -1,7 +1,7 @@ import os import shutil -from mysql_autoxtrabackup.utils import helpers +from mysql_autoxtrabackup.common import helpers class TestHelpers: From 753a8603051ae441486d8bda1af0289700cfc10e Mon Sep 17 00:00:00 2001 From: Shahriyar Rzayev Date: Tue, 12 Apr 2022 20:12:02 +0400 Subject: [PATCH 16/17] For today it is enough :D --- mysql_autoxtrabackup/autoxtrabackup.py | 114 +++++++++++------- mysql_autoxtrabackup/backup/backuper.py | 4 +- mysql_autoxtrabackup/configs/check_env.py | 6 +- mysql_autoxtrabackup/configs/generalops.py | 4 +- .../configs/generate_default_conf.py | 5 +- mysql_autoxtrabackup/prepare/prepare.py | 14 +-- .../prepare/prepare_builder.py | 3 +- 7 files changed, 85 insertions(+), 65 deletions(-) diff --git a/mysql_autoxtrabackup/autoxtrabackup.py b/mysql_autoxtrabackup/autoxtrabackup.py index e53ba19..d370800 100644 --- a/mysql_autoxtrabackup/autoxtrabackup.py +++ b/mysql_autoxtrabackup/autoxtrabackup.py @@ -19,7 +19,9 @@ from mysql_autoxtrabackup.common.mysql_cli import MySQLClientHelper from mysql_autoxtrabackup.configs import path_config from mysql_autoxtrabackup.configs.generalops import GeneralClass -from mysql_autoxtrabackup.configs.generate_default_conf import generate_config_file as generate_config +from mysql_autoxtrabackup.configs.generate_default_conf import ( + generate_default_config_file as generate_config, +) from mysql_autoxtrabackup.prepare.prepare import Prepare from mysql_autoxtrabackup.process_runner.process_runner import ProcessRunner @@ -188,34 +190,58 @@ def validate_file(file: str) -> None: ) @click.pass_context def all_procedure( - ctx, - prepare, - backup, - run_server, - verbose, - log_file, - log, - defaults_file, - generate_config_file, - dry_run, - log_file_max_bytes, - log_file_backup_count, + prepare, + backup, + run_server, + verbose, + log_file, + log, + defaults_file, + generate_config_file, + dry_run, + log_file_max_bytes, + log_file_backup_count, ) -> bool: backup_options, logging_options, options = _get_options(defaults_file) - _set_outputs(_get_formatter(), log, log_file, log_file_backup_count, log_file_max_bytes, logging_options, verbose) + _set_outputs( + _get_formatter(), + log, + log_file, + log_file_backup_count, + log_file_max_bytes, + logging_options, + verbose, + ) pid_file = pid.PidFile(piddir=backup_options.get("pid_dir")) - _factory(backup, backup_options, ctx, defaults_file, dry_run, generate_config_file, options, pid_file, prepare, - run_server, verbose) + _factory( + backup, + backup_options, + defaults_file, + dry_run, + generate_config_file, + options, + pid_file, + prepare, + run_server, + ) _log_command_history() logger.info("Autoxtrabackup completed successfully!") return True -def _set_outputs(formatter, log, log_file, log_file_backup_count, log_file_max_bytes, logging_options, verbose): +def _set_outputs( + formatter, + log, + log_file, + log_file_backup_count, + log_file_max_bytes, + logging_options, + verbose, +): _set_verbose_mode(formatter, verbose) _set_log_file( formatter, log_file, log_file_backup_count, log_file_max_bytes, logging_options @@ -224,19 +250,26 @@ def _set_outputs(formatter, log, log_file, log_file_backup_count, log_file_max_b _set_log_level(log, logging_options) -def _factory(backup, backup_options, ctx, defaults_file, dry_run, generate_config_file, options, pid_file, prepare, - run_server, verbose): +def _factory( + backup, + backup_options, + defaults_file, + dry_run, + generate_config_file, + options, + pid_file, + prepare, + run_server, +): try: _run_commands( backup, - ctx, defaults_file, dry_run, generate_config_file, pid_file, prepare, run_server, - verbose, options=options, ) @@ -249,33 +282,21 @@ def _factory(backup, backup_options, ctx, defaults_file, dry_run, generate_confi def _run_commands( - backup, - ctx, - defaults_file, - dry_run, - generate_config_file, - pid_file, - prepare, - run_server, - verbose, - options, + backup, + defaults_file, + dry_run, + generate_config_file, + pid_file, + prepare, + run_server, + options, ): with pid_file: # User PidFile for locking to single instance dry_run_ = _set_dry_run(dry_run) builder_obj, mysql_cli = _instantiate_objects(options) - if ( - prepare is False - and backup is False - and verbose is False - and dry_run is False - and run_server is False - and generate_config_file is False - ): - print_help(ctx, None, value=True) - - elif run_server: + if run_server: main.run_server() elif generate_config_file: _generate_config_file(defaults_file) @@ -326,7 +347,7 @@ def _log_command_history(): def _handle_backup_pid_exception(backup_options, error, pid_file): pid_warning = str(backup_options.get("pid_runtime_warning")) if float(pid_warning) and time.time() - os.stat(pid_file.filename).st_ctime > float( - pid_warning + pid_warning ): pid.fh.seek(0) pid_str = pid.fh.read(16).split("\n", 1)[0].strip() @@ -345,13 +366,14 @@ def _add_log_rotate_handler(file_handler, formatter): def _get_log_rotate_handler( - log_file: str, logging_options: Dict[str, str], max_bytes: int, backup_count: int + log_file: str, logging_options: Dict[str, str], max_bytes: int, backup_count: int ): return RotatingFileHandler( log_file, mode="a", maxBytes=max_bytes or int(str(logging_options.get("log_file_max_bytes"))), - backupCount=backup_count or int(str(logging_options.get("log_file_backup_count"))), + backupCount=backup_count + or int(str(logging_options.get("log_file_backup_count"))), ) @@ -379,7 +401,7 @@ def _set_log_level_format(formatter: logging) -> None: def _set_log_file( - formatter, log_file, log_file_backup_count, log_file_max_bytes, logging_options + formatter, log_file, log_file_backup_count, log_file_max_bytes, logging_options ): if log_file: try: diff --git a/mysql_autoxtrabackup/backup/backuper.py b/mysql_autoxtrabackup/backup/backuper.py index 10b2c38..71f58f7 100755 --- a/mysql_autoxtrabackup/backup/backuper.py +++ b/mysql_autoxtrabackup/backup/backuper.py @@ -238,8 +238,6 @@ def _flush_logs_and_take_backup(self) -> bool: self.mysql_cli.mysql_run_command("flush logs") and self._take_full_backup() ) - def _get_status( - self, xtrabackup_cmd: str - ) -> bool: + def _get_status(self, xtrabackup_cmd: str) -> bool: logger.debug(f'Starting {self.builder_obj.backup_options.get("backup_tool")}') return ProcessRunner.run_command(xtrabackup_cmd) diff --git a/mysql_autoxtrabackup/configs/check_env.py b/mysql_autoxtrabackup/configs/check_env.py index 69d30e9..71d9f0c 100644 --- a/mysql_autoxtrabackup/configs/check_env.py +++ b/mysql_autoxtrabackup/configs/check_env.py @@ -127,8 +127,8 @@ def _is_all_binaries_exist(self) -> bool: def _is_mysql_conn_options_provided(self) -> None: if not self.mysql_options.get("mysql_socket") and not ( - self.mysql_options.get("mysql_host") - and self.mysql_options.get("mysql_port") + self.mysql_options.get("mysql_host") + and self.mysql_options.get("mysql_port") ): logger.critical(MYSQL_CONN_MSG) raise RuntimeError(MYSQL_CONN_MSG) @@ -151,7 +151,7 @@ def _append_conn_string(self, status_args) -> str: else "" ) if self.mysql_options.get("mysql_host") and self.mysql_options.get( - "mysql_port" + "mysql_port" ): status_args += f' --host={self.mysql_options.get("mysql_host")}' status_args += f' --port={self.mysql_options.get("mysql_port")}' diff --git a/mysql_autoxtrabackup/configs/generalops.py b/mysql_autoxtrabackup/configs/generalops.py index 543e5e0..090d32b 100644 --- a/mysql_autoxtrabackup/configs/generalops.py +++ b/mysql_autoxtrabackup/configs/generalops.py @@ -7,7 +7,7 @@ import humanfriendly # type: ignore from mysql_autoxtrabackup.configs import path_config -from mysql_autoxtrabackup.configs.generate_default_conf import generate_config_file +from mysql_autoxtrabackup.configs.generate_default_conf import generate_default_config_file logger = logging.getLogger(__name__) @@ -15,7 +15,7 @@ def _create_default_config(config: str, missing: str) -> None: logger.critical(f"Missing config file : {missing}") logger.warning("Creating default config file...") - generate_config_file(config=config) + generate_default_config_file(config=config) logger.info(f"Default config file is generated in {config}") diff --git a/mysql_autoxtrabackup/configs/generate_default_conf.py b/mysql_autoxtrabackup/configs/generate_default_conf.py index 634b464..2b3847a 100644 --- a/mysql_autoxtrabackup/configs/generate_default_conf.py +++ b/mysql_autoxtrabackup/configs/generate_default_conf.py @@ -54,7 +54,9 @@ def _set_backup_options(config: configparser.ConfigParser, home: str) -> None: config.set(section3, "#full_backup_interval", "1 day") -def generate_config_file(config: str = path_config.config_path_file, home: str = path_config.home) -> None: +def generate_default_config_file( + config: str = path_config.config_path_file, home: str = path_config.home +) -> None: with contextlib.suppress(FileExistsError, OSError): if not exists(path_config.config_path): makedirs(path_config.config_path) @@ -68,4 +70,3 @@ def generate_config_file(config: str = path_config.config_path_file, home: str = _set_backup_options(config, home) config.write(cfg_file) - diff --git a/mysql_autoxtrabackup/prepare/prepare.py b/mysql_autoxtrabackup/prepare/prepare.py index da513a1..7ba3f40 100644 --- a/mysql_autoxtrabackup/prepare/prepare.py +++ b/mysql_autoxtrabackup/prepare/prepare.py @@ -13,7 +13,7 @@ def _set_apply_log_only_found_backups( - dir_: str, found_backups: Optional[Tuple[str, str]] + dir_: str, found_backups: Optional[Tuple[str, str]] ): apply_log_only = None if dir_ != found_backups[0]: @@ -69,7 +69,7 @@ def _run_prepare_command(self, cmd: Optional[str]) -> Optional[bool]: return ProcessRunner.run_command(cmd) def _prepare_run_incremental_backups( - self, found_backups: Optional[Tuple[str, str]] + self, found_backups: Optional[Tuple[str, str]] ) -> None: logger.info("Preparing Incs: ") self._iterate_and_run_found_backups( @@ -114,10 +114,10 @@ def _iterate_and_run_sequential_increment_backups(self, dir_: List[str]) -> None ) def _prepare_and_run( - self, - recent_bck: str, - apply_log_only: Optional[bool] = None, - dir_: Optional[str] = None, + self, + recent_bck: str, + apply_log_only: Optional[bool] = None, + dir_: Optional[str] = None, ) -> None: backup_prepare_cmd = self.prepare_options.prepare_command_builder( full_backup=recent_bck, @@ -127,7 +127,7 @@ def _prepare_and_run( self._run_prepare_command(backup_prepare_cmd) def _iterate_and_run_found_backups( - self, found_backups: Optional[Tuple[str, str]], list_of_dir: List[str] + self, found_backups: Optional[Tuple[str, str]], list_of_dir: List[str] ) -> None: # Limit the iteration until this found backup for dir_ in list_of_dir[: list_of_dir.index(found_backups[0]) + 1]: diff --git a/mysql_autoxtrabackup/prepare/prepare_builder.py b/mysql_autoxtrabackup/prepare/prepare_builder.py index 0e403f7..99643eb 100644 --- a/mysql_autoxtrabackup/prepare/prepare_builder.py +++ b/mysql_autoxtrabackup/prepare/prepare_builder.py @@ -1,7 +1,6 @@ import logging -import os from dataclasses import dataclass -from typing import Optional, Tuple +from typing import Optional from mysql_autoxtrabackup.configs.generalops import GeneralClass From f7da6ebd306a618265f62c2828cfc25ce2827fd9 Mon Sep 17 00:00:00 2001 From: Shahriyar Rzayev Date: Wed, 13 Apr 2022 13:51:17 +0400 Subject: [PATCH 17/17] Now ready for the next stage; need to construct proper CLI structure --- mysql_autoxtrabackup/autoxtrabackup.py | 26 ++++++++++++++++++-------- 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/mysql_autoxtrabackup/autoxtrabackup.py b/mysql_autoxtrabackup/autoxtrabackup.py index d370800..6538b23 100644 --- a/mysql_autoxtrabackup/autoxtrabackup.py +++ b/mysql_autoxtrabackup/autoxtrabackup.py @@ -1,3 +1,4 @@ +import functools import logging import logging.handlers import os @@ -190,6 +191,7 @@ def validate_file(file: str) -> None: ) @click.pass_context def all_procedure( + ctx, prepare, backup, run_server, @@ -296,14 +298,22 @@ def _run_commands( builder_obj, mysql_cli = _instantiate_objects(options) - if run_server: - main.run_server() - elif generate_config_file: - _generate_config_file(defaults_file) - elif prepare: - _prepare_backup(dry_run_, options) - elif backup: - _take_backup(builder_obj, dry_run_, mysql_cli, options) + _map = _build_callables_map(backup, builder_obj, defaults_file, dry_run_, generate_config_file, mysql_cli, + options, prepare, run_server) + for _callable in _map.values(): + if _callable: + _callable() + + +def _build_callables_map(backup, builder_obj, defaults_file, dry_run_, generate_config_file, mysql_cli, options, + prepare, run_server): + return { + "run_server": functools.partial(main.run_server) if run_server else None, + "generate_config_file": functools.partial(_generate_config_file, defaults_file) + if generate_config_file else None, + "prepare": functools.partial(_prepare_backup, dry_run_, options) if prepare else None, + "backup": functools.partial(_take_backup, builder_obj, dry_run_, mysql_cli, options) if backup else None + } def _set_dry_run(dry_run):