Add a MySQL backup helper
continuous-integration/drone the build failed
Details
continuous-integration/drone the build failed
Details
This commit is contained in:
parent
fe4a041573
commit
71e561ae8d
|
@ -14,3 +14,4 @@
|
|||
|
||||
__pycache__
|
||||
/datman-helper-postgres/datman_helper_postgres.egg-info
|
||||
/datman-helper-mysql/datman_helper_mysql.egg-info
|
||||
|
|
|
@ -20,16 +20,17 @@ pipeline:
|
|||
testSuite:
|
||||
image: rust:1.54.0-slim-bullseye
|
||||
commands:
|
||||
- apt-get -qq update && apt-get -yqq install pkg-config libssl-dev build-essential libsqlite3-dev python3.9 python3.9-venv postgresql postgresql-client
|
||||
- apt-get -qq update && apt-get -yqq install pkg-config libssl-dev build-essential libsqlite3-dev python3.9 python3.9-venv postgresql postgresql-client mariadb-server mariadb-client
|
||||
- pg_ctlcluster 13 main start
|
||||
- su postgres -c 'createuser root'
|
||||
- su postgres -c 'createdb -O root testsuitedb'
|
||||
- psql testsuitedb -c 'CREATE TABLE testsuitetable ();'
|
||||
- mysql -e 'CREATE DATABASE testsuitemydb; USE testsuitemydb; CREATE TABLE sometable (INT PRIMARY KEY); INSERT INTO sometable VALUES (42);'
|
||||
- cargo install -q --path yama
|
||||
- cargo install -q --path datman
|
||||
- python3.9 -m venv testsuite/.venv
|
||||
- ./testsuite/.venv/bin/pip install -e testsuite -e datman-helper-postgres
|
||||
- cd testsuite && TEST_POSTGRES=$(hostname),testsuitedb,root ./.venv/bin/green
|
||||
- ./testsuite/.venv/bin/pip install -e testsuite -e datman-helper-postgres -e datman-helper-mysql
|
||||
- cd testsuite && . .venv/bin/activate && TEST_POSTGRES=$(hostname),testsuitedb,root TEST_MYSQL=$(hostname),testsuitemydb,root green
|
||||
|
||||
deployManual:
|
||||
image: rust:1.54.0-slim
|
||||
|
|
|
@ -0,0 +1,86 @@
|
|||
import json
|
||||
import os
|
||||
import pwd
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def cli():
|
||||
"""
|
||||
Performs a backup of a MySQL database.
|
||||
|
||||
Parameters:
|
||||
database: str — the name of the database to back up.
|
||||
|
||||
user: optional str — the name of the Linux user to use to connect to MySQL.
|
||||
Sudo or SSH will be used to make this happen, if it's specified,
|
||||
unless it's a local user that is already the current user.
|
||||
|
||||
host: optional str — if specified, the backup will be made using SSH
|
||||
(unless this host is the same as the one named)
|
||||
"""
|
||||
request_info = json.load(sys.stdin)
|
||||
assert isinstance(request_info, dict)
|
||||
|
||||
database_to_use = request_info["database"]
|
||||
user_to_use = request_info.get("user")
|
||||
host_to_use = request_info.get("host")
|
||||
use_lz4 = request_info.get("use_lz4_for_ssh", True)
|
||||
|
||||
if host_to_use is not None:
|
||||
hostname = subprocess.check_output("hostname").decode().strip()
|
||||
if hostname == host_to_use:
|
||||
host_to_use = None
|
||||
|
||||
command = []
|
||||
|
||||
if host_to_use is not None:
|
||||
command.append("ssh")
|
||||
if user_to_use is not None:
|
||||
command.append(f"{user_to_use}@{host_to_use}")
|
||||
else:
|
||||
command.append(f"{host_to_use}")
|
||||
elif user_to_use is not None:
|
||||
current_username = pwd.getpwuid(os.getuid()).pw_name
|
||||
if current_username != user_to_use:
|
||||
command.append("sudo")
|
||||
command.append("-u")
|
||||
command.append(user_to_use)
|
||||
|
||||
command.append("mysqldump")
|
||||
command.append(database_to_use)
|
||||
|
||||
# Where the output of the dump command should go.
|
||||
output_of_dump = sys.stdout
|
||||
# The process (if any) that is our LZ4 decompressor.
|
||||
lz4_process = None
|
||||
|
||||
if use_lz4 and host_to_use is not None:
|
||||
# Add an LZ4 compressor on the remote side.
|
||||
command += ["|", "lz4", "--compress", "--stdout"]
|
||||
|
||||
# Then open an LZ4 decompressor on our side.
|
||||
lz4_process = subprocess.Popen(
|
||||
["lz4", "--decompress", "--stdout"],
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
output_of_dump = lz4_process.stdin
|
||||
|
||||
# we MUST disable shell here otherwise the local side will do both
|
||||
# the compression and decompression which would be silly!
|
||||
subprocess.check_call(
|
||||
command,
|
||||
stdin=subprocess.DEVNULL,
|
||||
stdout=output_of_dump,
|
||||
stderr=sys.stderr,
|
||||
shell=False,
|
||||
)
|
||||
|
||||
if lz4_process is not None:
|
||||
# must close here, otherwise the decompressor never ends
|
||||
lz4_process.stdin.close()
|
||||
exit_code = lz4_process.wait()
|
||||
if exit_code != 0:
|
||||
raise ChildProcessError(f"lz4 not happy: {exit_code}")
|
|
@ -0,0 +1,119 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
from shutil import rmtree
|
||||
|
||||
from setuptools import Command, find_packages, setup
|
||||
|
||||
# Package meta-data.
|
||||
NAME = "datman_helper_mysql"
|
||||
DESCRIPTION = "MySQL integration for Datman"
|
||||
URL = "https://bics.ga/reivilibre/yama"
|
||||
EMAIL = "reivi@librepush.net"
|
||||
AUTHOR = "Olivier 'reivilibre'"
|
||||
REQUIRES_PYTHON = ">=3.7.0"
|
||||
VERSION = "0.1.0"
|
||||
|
||||
# What packages are required for this module to be executed?
|
||||
REQUIRED = []
|
||||
|
||||
|
||||
# What packages are optional?
|
||||
EXTRAS = {}
|
||||
|
||||
# The rest you shouldn't have to touch too much :)
|
||||
# ------------------------------------------------
|
||||
# Except, perhaps the License and Trove Classifiers!
|
||||
# If you do change the License, remember to change the Trove Classifier for that!
|
||||
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
# Import the README and use it as the long-description.
|
||||
# Note: this will only work if 'README.md' is present in your MANIFEST.in file!
|
||||
try:
|
||||
with io.open(os.path.join(here, "README.md"), encoding="utf-8") as f:
|
||||
long_description = "\n" + f.read()
|
||||
except FileNotFoundError:
|
||||
long_description = DESCRIPTION
|
||||
|
||||
# Load the package's __version__.py module as a dictionary.
|
||||
about = {}
|
||||
if not VERSION:
|
||||
project_slug = NAME.lower().replace("-", "_").replace(" ", "_")
|
||||
with open(os.path.join(here, project_slug, "__version__.py")) as f:
|
||||
exec(f.read(), about)
|
||||
else:
|
||||
about["__version__"] = VERSION
|
||||
|
||||
|
||||
class UploadCommand(Command):
|
||||
"""Support setup.py upload."""
|
||||
|
||||
description = "Build and publish the package."
|
||||
user_options = []
|
||||
|
||||
@staticmethod
|
||||
def status(s):
|
||||
"""Prints things in bold."""
|
||||
print("\033[1m{0}\033[0m".format(s))
|
||||
|
||||
def initialize_options(self):
|
||||
pass
|
||||
|
||||
def finalize_options(self):
|
||||
pass
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
self.status("Removing previous builds…")
|
||||
rmtree(os.path.join(here, "dist"))
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
self.status("Building Source and Wheel (universal) distribution…")
|
||||
os.system("{0} setup.py sdist bdist_wheel --universal".format(sys.executable))
|
||||
|
||||
self.status("Uploading the package to PyPI via Twine…")
|
||||
os.system("twine upload dist/*")
|
||||
|
||||
self.status("Pushing git tags…")
|
||||
os.system("git tag v{0}".format(about["__version__"]))
|
||||
os.system("git push --tags")
|
||||
|
||||
sys.exit()
|
||||
|
||||
|
||||
# Where the magic happens:
|
||||
setup(
|
||||
name=NAME,
|
||||
version=about["__version__"],
|
||||
description=DESCRIPTION,
|
||||
long_description=long_description,
|
||||
long_description_content_type="text/markdown",
|
||||
author=AUTHOR,
|
||||
author_email=EMAIL,
|
||||
python_requires=REQUIRES_PYTHON,
|
||||
url=URL,
|
||||
packages=find_packages(exclude=["tests", "*.tests", "*.tests.*", "tests.*"]),
|
||||
# If your package is a single module, use this instead of 'packages':
|
||||
# py_modules=['mypackage'],
|
||||
entry_points={
|
||||
"console_scripts": [
|
||||
"datman-helper-postgres-backup=datman_helper_postgres.backup:cli",
|
||||
"datman-helper-postgres-restore=datman_helper_postgres.restore:cli",
|
||||
],
|
||||
},
|
||||
install_requires=REQUIRED,
|
||||
extras_require=EXTRAS,
|
||||
include_package_data=True,
|
||||
# TODO license='GPL3',
|
||||
classifiers=[
|
||||
# Trove classifiers
|
||||
# Full list: https://pypi.python.org/pypi?%3Aaction=list_classifiers
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3",
|
||||
],
|
||||
)
|
|
@ -4,7 +4,7 @@ if [ $# -ge 1 ]
|
|||
then
|
||||
files=$*
|
||||
else
|
||||
files="testsuite/setup.py testsuite/datmantests testsuite/helpers testsuite/yamatests datman-helper-postgres/datman_helper_postgres datman-helper-postgres/setup.py"
|
||||
files="testsuite/setup.py testsuite/datmantests testsuite/helpers testsuite/yamatests datman-helper-postgres/datman_helper_postgres datman-helper-postgres/setup.py datman-helper-mysql/datman_helper_mysql datman-helper-mysql/setup.py"
|
||||
fi
|
||||
|
||||
echo "Linting these locations: $files"
|
||||
|
|
|
@ -0,0 +1,108 @@
|
|||
import json
|
||||
import os
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from tempfile import TemporaryDirectory
|
||||
from unittest import TestCase
|
||||
|
||||
from helpers import DirectoryDescriptor, scan_dir
|
||||
from helpers.datman_helpers import set_up_simple_datman
|
||||
from helpers.yama_helpers import set_up_simple_yama
|
||||
|
||||
|
||||
class TestPostgresHelper(TestCase):
|
||||
def setUp(self):
|
||||
if "TEST_MYSQL" not in os.environ:
|
||||
self.skipTest(
|
||||
"TEST_MYSQL environment variable not set. "
|
||||
"Should be set to a MySQL host, database,"
|
||||
" user combination, comma-separated."
|
||||
)
|
||||
|
||||
def test_helper_fails_on_bad_connection(self):
|
||||
proc = subprocess.Popen("datman-helper-mysql-backup", stdin=subprocess.PIPE)
|
||||
proc.stdin.write(
|
||||
json.dumps(
|
||||
{"database": "mydatabase", "host": "notmyhost", "user": "bobjones"}
|
||||
).encode()
|
||||
)
|
||||
proc.stdin.close()
|
||||
self.assertNotEqual(proc.wait(), 0)
|
||||
|
||||
def test_helper_succeeds_on_correct_config(self):
|
||||
my_host, my_database, my_user = os.environ["TEST_MYSQL"].split(",")
|
||||
|
||||
proc = subprocess.Popen(
|
||||
"datman-helper-mysql-backup",
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
)
|
||||
proc.stdin.write(
|
||||
json.dumps(
|
||||
{
|
||||
"database": my_database or None,
|
||||
"host": my_host or None,
|
||||
"user": my_user or None,
|
||||
}
|
||||
).encode()
|
||||
)
|
||||
proc.stdin.close()
|
||||
stdout = proc.stdout.read()
|
||||
self.assertEqual(proc.wait(), 0)
|
||||
self.assertIn(b"CREATE TABLE", stdout)
|
||||
self.assertIn(b"INSERT INTO", stdout)
|
||||
print(stdout) # TODO
|
||||
|
||||
def test_backup_and_extraction(self):
|
||||
td = TemporaryDirectory("test_my_bae")
|
||||
tdpath = Path(td.name)
|
||||
|
||||
my_host, my_database, my_user = os.environ["TEST_MYSQL"].split(",")
|
||||
|
||||
datman_path = tdpath.joinpath("datman")
|
||||
yama_path = datman_path.joinpath("main")
|
||||
|
||||
set_up_simple_datman(
|
||||
datman_path,
|
||||
custom_extra_test=f"""
|
||||
[source.mysql123]
|
||||
helper = "mysql"
|
||||
label = "precious"
|
||||
kind = {{ stdout = "mysql123.sql" }}
|
||||
database = "{my_database}"
|
||||
host = "{my_host}"
|
||||
user = "{my_user}"
|
||||
""",
|
||||
)
|
||||
set_up_simple_yama(yama_path)
|
||||
|
||||
print("storing")
|
||||
subprocess.check_call(
|
||||
("datman", "backup-one", "mysql123", "main"), cwd=datman_path
|
||||
)
|
||||
|
||||
print("extracting")
|
||||
dest_path = tdpath.joinpath("desta")
|
||||
subprocess.check_call(
|
||||
(
|
||||
"datman",
|
||||
"extract",
|
||||
"--skip-metadata",
|
||||
"--accept-partial",
|
||||
"main",
|
||||
"../desta",
|
||||
),
|
||||
cwd=datman_path,
|
||||
)
|
||||
|
||||
# this will be wrapped in a directory that starts with the name postgres123+
|
||||
extracted_dir_descriptor_wrapper = scan_dir(dest_path)
|
||||
|
||||
contents = extracted_dir_descriptor_wrapper.contents
|
||||
self.assertEqual(len(contents), 1)
|
||||
key, value = next(iter(contents.items()))
|
||||
self.assertTrue(key.startswith("mysql123+"))
|
||||
|
||||
self.assertIsInstance(value, DirectoryDescriptor)
|
||||
key, value = next(iter(value.contents.items()))
|
||||
self.assertEqual(key, "mysql123.sql")
|
Loading…
Reference in New Issue