Skip to content

Commit

Permalink
Add s3 tests
Browse files Browse the repository at this point in the history
  • Loading branch information
vshepard committed May 3, 2024
1 parent 2b9c9b1 commit 25856d5
Show file tree
Hide file tree
Showing 59 changed files with 14,614 additions and 20,420 deletions.
Empty file added s3/__init__.py
Empty file.
Empty file added s3/test_utils/__init__.py
Empty file.
8 changes: 8 additions & 0 deletions s3/test_utils/config_provider.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
import configparser


def read_config(s3_config_file):
config = configparser.ConfigParser()
config.read_string('[fake-section]\n' + open(s3_config_file).read())

return config['fake-section']
208 changes: 208 additions & 0 deletions s3/test_utils/s3_backup.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,208 @@
import os
import io
import sys

import minio
from minio import Minio
from minio.deleteobjects import DeleteObject
import urllib3
from pg_probackup2.storage.fs_backup import TestBackupDir
from pg_probackup2.init_helpers import init_params
from s3.test_utils import config_provider

root = os.path.realpath(os.path.join(os.path.dirname(__file__), '../..'))
if root not in sys.path:
sys.path.append(root)

status_forcelist = [413, # RequestBodyTooLarge
429, # TooManyRequests
500, # InternalError
503, # ServerBusy
]

DEFAULT_CONF_FILE = 's3/tests/s3.conf'


class S3TestBackupDir(TestBackupDir):
is_file_based = False

def __init__(self, *, rel_path, backup):
self.access_key = None
self.secret_key = None
self.s3_type = None
self.tmp_path = None
self.host = None
self.port = None
self.bucket_name = None
self.region = None
self.bucket = None
self.path_suffix = None
self.https = None
self.s3_config_file = None
self.ca_certificate = None

self.set_s3_config_file()
self.setup_s3_env()

path = "pg_probackup"
if self.path_suffix:
path += "_" + self.path_suffix
if self.tmp_path == '' or os.path.isabs(self.tmp_path):
self.path = f"{path}{self.tmp_path}/{rel_path}/{backup}"
else:
self.path = f"{path}/{self.tmp_path}/{rel_path}/{backup}"

secure: bool = False
self.versioning: bool = False
if self.https in ['ON', 'HTTPS']:
secure = True
if self.https and self.ca_certificate:
http_client = urllib3.PoolManager(cert_reqs='CERT_REQUIRED',
ca_certs=self.ca_certificate,
retries=urllib3.Retry(total=5,
backoff_factor=1,
status_forcelist=status_forcelist))
else:
http_client = urllib3.PoolManager(retries=urllib3.Retry(total=5,
backoff_factor=1,
status_forcelist=status_forcelist))

self.conn = Minio(self.host + ":" + self.port, secure=secure, access_key=self.access_key,
secret_key=self.secret_key, http_client=http_client)
if not self.conn.bucket_exists(self.bucket):
raise Exception(f"Test bucket {self.bucket} does not exist.")

try:
config = self.conn.get_bucket_versioning(self.bucket)
if config.status.lower() == "enabled" or config.status.lower() == "suspended":
self.versioning = True
else:
self.versioning = False
except Exception as e:
if "NotImplemented" in repr(e):
self.versioning = False
else:
raise e
self.pb_args = ('-B', '/' + self.path, f'--s3={init_params.s3_type}')
if self.s3_config_file:
self.pb_args += (f'--s3-config-file={self.s3_config_file}',)
return

def setup_s3_env(self, s3_config=None):
self.tmp_path = os.environ.get('PGPROBACKUP_TMP_DIR', default='')
self.host = os.environ.get('PG_PROBACKUP_S3_HOST', default='')

# If environment variables are not setup, use from config
if self.s3_config_file or s3_config:
minio_config = config_provider.read_config(self.s3_config_file or s3_config)
self.access_key = minio_config['access-key']
self.secret_key = minio_config['secret-key']
self.host = minio_config['s3-host']
self.port = minio_config['s3-port']
self.bucket = minio_config['s3-bucket']
self.region = minio_config['s3-region']
self.https = minio_config['s3-secure']
init_params.s3_type = 'minio'
else:
self.access_key = os.environ.get('PG_PROBACKUP_S3_ACCESS_KEY')
self.secret_key = os.environ.get('PG_PROBACKUP_S3_SECRET_ACCESS_KEY')
self.host = os.environ.get('PG_PROBACKUP_S3_HOST')
self.port = os.environ.get('PG_PROBACKUP_S3_PORT')
self.bucket = os.environ.get('PG_PROBACKUP_S3_BUCKET_NAME')
self.region = os.environ.get('PG_PROBACKUP_S3_REGION')
self.https = os.environ.get('PG_PROBACKUP_S3_HTTPS')
self.ca_certificate = os.environ.get('PG_PROBACKUP_S3_CA_CERTIFICATE')
init_params.s3_type = os.environ.get('PG_PROBACKUP_S3_TEST')

# multi-url case
# remove all urls from string except the first one
if ';' in self.host:
self.host = self.host[:self.host.find(';')]
if ':' in self.host: # also change port if it was overridden in multihost string
self.port = self.host[self.host.find(':') + 1:]
self.host = self.host[:self.host.find(':')]

def set_s3_config_file(self):
s3_config = os.environ.get('PG_PROBACKUP_S3_CONFIG_FILE')
if s3_config is not None and s3_config.strip().lower() == "true":
self.s3_config_file = DEFAULT_CONF_FILE
else:
self.s3_config_file = s3_config

def list_instance_backups(self, instance):
full_path = os.path.join(self.path, 'backups', instance)
candidates = self.conn.list_objects(self.bucket, prefix=full_path, recursive=True)
return [os.path.basename(os.path.dirname(x.object_name))
for x in candidates if x.object_name.endswith('backup.control')]

def list_files(self, sub_dir, recursive=False):
full_path = os.path.join(self.path, sub_dir)
# Need '/' in the end to find inside the folder
full_path_dir = full_path if full_path[-1] == '/' else full_path + '/'
object_list = self.conn.list_objects(self.bucket, prefix=full_path_dir, recursive=recursive)
return [obj.object_name.replace(full_path_dir, '', 1)
for obj in object_list
if not obj.is_dir]

def list_dirs(self, sub_dir):
full_path = os.path.join(self.path, sub_dir)
# Need '/' in the end to find inside the folder
full_path_dir = full_path if full_path[-1] == '/' else full_path + '/'
object_list = self.conn.list_objects(self.bucket, prefix=full_path_dir, recursive=False)
return [obj.object_name.replace(full_path_dir, '', 1).rstrip('\\/')
for obj in object_list
if obj.is_dir]

def read_file(self, sub_path, *, text=True):
full_path = os.path.join(self.path, sub_path)
bytes = self.conn.get_object(self.bucket, full_path).read()
if not text:
return bytes
return bytes.decode('utf-8')

def write_file(self, sub_path, data, *, text=True):
full_path = os.path.join(self.path, sub_path)
if text:
data = data.encode('utf-8')
self.conn.put_object(self.bucket, full_path, io.BytesIO(data), length=len(data))

def cleanup(self, dir=''):
self.remove_dir(dir)

def remove_file(self, sub_path):
full_path = os.path.join(self.path, sub_path)
self.conn.remove_object(self.bucket, full_path)

def remove_dir(self, sub_path):
if sub_path:
full_path = os.path.join(self.path, sub_path)
else:
full_path = self.path
objs = self.conn.list_objects(self.bucket, prefix=full_path, recursive=True,
include_version=self.versioning)
delobjs = (DeleteObject(o.object_name, o.version_id) for o in objs)
errs = list(self.conn.remove_objects(self.bucket, delobjs))
if errs:
strerrs = "; ".join(str(err) for err in errs)
raise Exception("There were errors: {0}".format(strerrs))

def exists(self, sub_path):
full_path = os.path.join(self.path, sub_path)
try:
self.conn.stat_object(self.bucket, full_path)
return True
except minio.error.S3Error as s3err:
if s3err.code == 'NoSuchKey':
return False
raise s3err
except Exception as err:
raise err

def __str__(self):
return '/' + self.path

def __repr__(self):
return "S3TestBackupDir" + str(self.path)

def __fspath__(self):
return self.path
17 changes: 17 additions & 0 deletions s3/tests/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
import unittest
import os

from . import auth_test, param_test


def load_tests(loader, tests, pattern):
suite = unittest.TestSuite()

if 'PG_PROBACKUP_TEST_BASIC' in os.environ:
if os.environ['PG_PROBACKUP_TEST_BASIC'] == 'ON':
loader.testMethodPrefix = 'test_basic'

suite.addTests(loader.loadTestsFromModule(auth_test))
suite.addTests(loader.loadTestsFromModule(param_test))

return suite
36 changes: 36 additions & 0 deletions s3/tests/auth_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
import os
import sys

root = os.path.realpath(os.path.join(os.path.dirname(__file__), '../..'))
if root not in sys.path:
sys.path.append(root)

from tests.helpers.ptrack_helpers import ProbackupTest


class AuthorizationTest(ProbackupTest):
"""
Check connect to S3 via pre_start_checks() function
calling pg_probackup init --s3
test that s3 keys allow to connect to all types of storages
"""

def s3_auth_test(self):
console_output = self.pb.init(options=["--log-level-console=VERBOSE"])

self.assertNotIn(': 403', console_output) # Because we can have just '403' substring in timestamp
self.assertMessage(console_output, contains='S3_pre_start_check successful')
self.assertMessage(console_output, contains='HTTP response: 200')
self.assertIn(
f"INFO: Backup catalog '{self.backup_dir}' successfully initialized",
console_output)

def test_log_level_file_requires_log_directory(self):
console_output = self.pb.init(options=["--log-level-file=VERBOSE"],
skip_log_directory=True,
expect_error=True)

self.assertMessage(console_output,
contains='ERROR: Cannot save S3 logs to a file. You must specify --log-directory option when'
' running backup with --log-level-file option enabled')
6 changes: 6 additions & 0 deletions s3/tests/pytest.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
[pytest]
log_cli = true
log_cli_level = INFO
log_format = %(asctime)s %(levelname)s %(message)s
log_date_format = %Y-%m-%d %H:%M:%S
testpaths = tests
53 changes: 18 additions & 35 deletions tests/CVE_2018_1058_test.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,16 @@
import os
import unittest
from .helpers.ptrack_helpers import ProbackupTest, ProbackupException
from .helpers.ptrack_helpers import ProbackupTest

class CVE_2018_1058(ProbackupTest, unittest.TestCase):
class CVE_2018_1058(ProbackupTest):

# @unittest.skip("skip")
def test_basic_default_search_path(self):
""""""
backup_dir = os.path.join(self.tmp_path, self.module_name, self.fname, 'backup')
node = self.make_simple_node(
base_dir=os.path.join(self.module_name, self.fname, 'node'),
set_replication=True)
node = self.pg_node.make_simple('node', checksum=False, set_replication=True)

self.init_pb(backup_dir)
self.add_instance(backup_dir, 'node', node)
self.pb.init()
self.pb.add_instance('node', node)
node.slow_start()

node.safe_psql(
Expand All @@ -26,19 +23,16 @@ def test_basic_default_search_path(self):
"END "
"$$ LANGUAGE plpgsql")

self.backup_node(backup_dir, 'node', node, backup_type='full', options=['--stream'])
self.pb.backup_node('node', node, backup_type='full', options=['--stream'])

# @unittest.skip("skip")
def test_basic_backup_modified_search_path(self):
""""""
backup_dir = os.path.join(self.tmp_path, self.module_name, self.fname, 'backup')
node = self.make_simple_node(
base_dir=os.path.join(self.module_name, self.fname, 'node'),
set_replication=True)
self.set_auto_conf(node, options={'search_path': 'public,pg_catalog'})
node = self.pg_node.make_simple('node', checksum=False, set_replication=True)
node.set_auto_conf(options={'search_path': 'public,pg_catalog'})

self.init_pb(backup_dir)
self.add_instance(backup_dir, 'node', node)
self.pb.init()
self.pb.add_instance('node', node)
node.slow_start()

node.safe_psql(
Expand All @@ -62,7 +56,7 @@ def test_basic_backup_modified_search_path(self):
"$$ LANGUAGE plpgsql; "
"CREATE VIEW public.pg_proc AS SELECT proname FROM public.pg_proc()")

self.backup_node(backup_dir, 'node', node, backup_type='full', options=['--stream'])
self.pb.backup_node('node', node, backup_type='full', options=['--stream'])

log_file = os.path.join(node.logs_dir, 'postgresql.log')
with open(log_file, 'r') as f:
Expand All @@ -73,10 +67,8 @@ def test_basic_backup_modified_search_path(self):
# @unittest.skip("skip")
def test_basic_checkdb_modified_search_path(self):
""""""
node = self.make_simple_node(
base_dir=os.path.join(self.module_name, self.fname, 'node'),
initdb_params=['--data-checksums'])
self.set_auto_conf(node, options={'search_path': 'public,pg_catalog'})
node = self.pg_node.make_simple('node')
node.set_auto_conf(options={'search_path': 'public,pg_catalog'})
node.slow_start()

node.safe_psql(
Expand Down Expand Up @@ -110,20 +102,11 @@ def test_basic_checkdb_modified_search_path(self):
"CREATE VIEW public.pg_namespace AS SELECT * FROM public.pg_namespace();"
)

try:
self.checkdb_node(
self.pb.checkdb_node(
options=[
'--amcheck',
'--skip-block-validation',
'-d', 'postgres', '-p', str(node.port)])
self.assertEqual(
1, 0,
"Expecting Error because amcheck{,_next} not installed\n"
" Output: {0} \n CMD: {1}".format(
repr(self.output), self.cmd))
except ProbackupException as e:
self.assertIn(
"WARNING: Extension 'amcheck' or 'amcheck_next' are not installed in database postgres",
e.message,
"\n Unexpected Error Message: {0}\n CMD: {1}".format(
repr(e.message), self.cmd))
'-d', 'postgres', '-p', str(node.port)],
expect_error="because amcheck{,_next} not installed")
self.assertMessage(contains=
"WARNING: Extension 'amcheck' or 'amcheck_next' are not installed in database postgres")
Loading

0 comments on commit 25856d5

Please sign in to comment.