Skip to content

Commit

Permalink
Add s3 auth test
Browse files Browse the repository at this point in the history
  • Loading branch information
vshepard committed Dec 15, 2023
1 parent 61d3ff6 commit 7cc3d7f
Show file tree
Hide file tree
Showing 6 changed files with 185 additions and 1 deletion.
2 changes: 1 addition & 1 deletion tests/helpers/ptrack_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ def load_backup_class(fs_type):
root = os.path.realpath(os.path.join(os.path.dirname(__file__), '../..'))
if root not in sys.path:
sys.path.append(root)
from s3.test_utils.s3_backup import S3TestBackupDir
from ..test_utils.s3_backup import S3TestBackupDir
fs_backup_class = S3TestBackupDir


Expand Down
29 changes: 29 additions & 0 deletions tests/s3_auth_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
from tests.test_utils.base_test import S3BaseTest


class AuthorizationTest(S3BaseTest):
"""
Check connect to S3 via pre_start_checks() function
calling pg_probackup init --s3
test that s3 keys allow to connect to all types of storages
"""

def test_s3_authorisation(self):
console_output = self.pb.init(options=["--log-level-console=VERBOSE"])

self.assertNotIn(': 403', console_output) # Because we can have just '403' substring in timestamp
self.assertMessage(console_output, contains='S3_pre_start_check successful')
self.assertMessage(console_output, contains='HTTP response: 200')
self.assertIn(
f"INFO: Backup catalog '{self.backup_dir}' successfully initialized",
console_output)

def test_log_level_file_requires_log_directory(self):
console_output = self.pb.init(options=["--log-level-file=VERBOSE"],
skip_log_directory=True,
expect_error=True)

self.assertMessage(console_output,
contains='ERROR: Cannot save S3 logs to a file. You must specify --log-directory option when'
' running backup with --log-level-file option enabled')
Empty file added tests/test_utils/__init__.py
Empty file.
29 changes: 29 additions & 0 deletions tests/test_utils/base_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
import os
import sys
import unittest

root = os.path.realpath(os.path.join(os.path.dirname(__file__), '../..'))
if root not in sys.path:
sys.path.append(root)

from tests.helpers.ptrack_helpers import ProbackupTest
from . import config_provider


class S3BaseTest(ProbackupTest, unittest.TestCase):

additional_options = []

def setUp(self) -> None:
# If env vars weren't set, think that it is a local run and set up variables
if not os.environ.get('PG_PROBACKUP_S3_HOST'):
minio_config = config_provider.read_config()['MINIO']
os.environ.setdefault('PG_PROBACKUP_S3_ACCESS_KEY', minio_config['access_key'])
os.environ.setdefault('PG_PROBACKUP_S3_SECRET_ACCESS_KEY', minio_config['secret_key'])
os.environ.setdefault('PG_PROBACKUP_S3_HOST', minio_config['local_host'])
os.environ.setdefault('PG_PROBACKUP_S3_PORT', minio_config['api_port'])
os.environ.setdefault('PG_PROBACKUP_S3_BUCKET_NAME', minio_config['bucket_name'])
os.environ.setdefault('PG_PROBACKUP_S3_REGION', minio_config['region'])
# There will be a connector setting
if not os.environ.get("PROBACKUP_S3_TYPE_FULL_TEST"):
os.environ.setdefault('PROBACKUP_S3_TYPE_FULL_TEST', 'minio')
8 changes: 8 additions & 0 deletions tests/test_utils/config_provider.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
import configparser
import os.path


def read_config():
configs = configparser.ConfigParser()
configs.read(os.path.join(os.path.dirname(__file__), '../properties.ini'))
return configs
118 changes: 118 additions & 0 deletions tests/test_utils/s3_backup.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
import os
import io
import sys

import minio
from minio import Minio
from minio.deleteobjects import DeleteObject
import urllib3

root = os.path.realpath(os.path.join(os.path.dirname(__file__), '../..'))
if root not in sys.path:
sys.path.append(root)

# Should fail if either of env vars does not exist
host = os.environ['PG_PROBACKUP_S3_HOST']
port = os.environ['PG_PROBACKUP_S3_PORT']
access = os.environ['PG_PROBACKUP_S3_ACCESS_KEY']
secret = os.environ['PG_PROBACKUP_S3_SECRET_ACCESS_KEY']
bucket = os.environ['PG_PROBACKUP_S3_BUCKET_NAME']
path_suffix = os.environ.get("PG_PROBACKUP_TEST_TMP_SUFFIX")
https = os.environ.get("PG_PROBACKUP_S3_HTTPS")

s3_type = os.environ.get('PROBACKUP_S3_TYPE_FULL_TEST')

status_forcelist = [413, # RequestBodyTooLarge
429, # TooManyRequests
500, # InternalError
503, # ServerBusy
]


class S3TestBackupDir:
is_file_based = False

def __init__(self, *, rel_path, backup):
path = "pg_probackup"
if path_suffix:
path += "_" + path_suffix
self.path = f"{path}/{rel_path}/{backup}"
secure: bool = False
if https in ['ON', 'HTTPS']:
secure = True
self.conn = Minio(host + ":" + port, secure=secure, access_key=access,
secret_key=secret, http_client=urllib3.PoolManager(retries=urllib3.Retry(total=5,
backoff_factor=1,
status_forcelist=status_forcelist)))
if not self.conn.bucket_exists(bucket):
raise Exception(f"Test bucket {bucket} does not exist.")
self.pb_args = ('-B', '/' + self.path, f'--s3={s3_type}')
return

def list_instance_backups(self, instance):
full_path = os.path.join(self.path, 'backups', instance)
candidates = self.conn.list_objects(bucket, prefix=full_path, recursive=True)
return [os.path.basename(os.path.dirname(x.object_name))
for x in candidates if x.object_name.endswith('backup.control')]

def list_files(self, sub_dir, recursive=False):
full_path = os.path.join(self.path, sub_dir)
# Need '/' in the end to find inside the folder
full_path_dir = full_path if full_path[-1] == '/' else full_path + '/'
object_list = self.conn.list_objects(bucket, prefix=full_path_dir, recursive=recursive)
return [obj.object_name.replace(full_path_dir, '', 1)
for obj in object_list
if not obj.is_dir]

def read_file(self, sub_path, *, text=True):
full_path = os.path.join(self.path, sub_path)
bytes = self.conn.get_object(bucket, full_path).read()
if not text:
return bytes
return bytes.decode('utf-8')

def write_file(self, sub_path, data, *, text=True):
full_path = os.path.join(self.path, sub_path)
if text:
data = data.encode('utf-8')
self.conn.put_object(bucket, full_path, io.BytesIO(data), length=len(data))

def cleanup(self):
self.remove_dir('')

def remove_file(self, sub_path):
full_path = os.path.join(self.path, sub_path)
self.conn.remove_object(bucket, full_path)

def remove_dir(self, sub_path):
if sub_path:
full_path = os.path.join(self.path, sub_path)
else:
full_path = self.path
objs = self.conn.list_objects(bucket, prefix=full_path, recursive=True)
delobjs = (DeleteObject(o.object_name) for o in objs)
errs = list(self.conn.remove_objects(bucket, delobjs))
if errs:
strerrs = "; ".join(str(err) for err in errs)
raise Exception("There were errors: {0}".format(strerrs))

def exists(self, sub_path):
full_path = os.path.join(self.path, sub_path)
try:
self.conn.stat_object(bucket, full_path)
return True
except minio.error.S3Error as s3err:
if s3err.code == 'NoSuchKey':
return False
raise s3err
except Exception as err:
raise err

def __str__(self):
return '/' + self.path

def __repr__(self):
return "S3TestBackupDir" + str(self.path)

def __fspath__(self):
return self.path

0 comments on commit 7cc3d7f

Please sign in to comment.