Skip to content

Commit

Permalink
Merge pull request #642 from HumairAK/update_params_gen
Browse files Browse the repository at this point in the history
chore: fix params generator for v2
  • Loading branch information
HumairAK authored May 10, 2024
2 parents 83fda4d + b5ffa0a commit 13c516e
Showing 1 changed file with 76 additions and 89 deletions.
165 changes: 76 additions & 89 deletions scripts/release/params.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,44 +2,73 @@

import requests

QUAY_REPOS = {
V1_TAG = "v1.6.4"

ODH_QUAY_ORG = "opendatahub"

QUAY_REPOS_V1 = {
"IMAGES_APISERVER": "ds-pipelines-api-server",
"IMAGES_ARTIFACT": "ds-pipelines-artifact-manager",
"IMAGES_PERSISTENTAGENT": "ds-pipelines-persistenceagent",
"IMAGES_SCHEDULEDWORKFLOW": "ds-pipelines-scheduledworkflow",
"IMAGES_MLMDENVOY": "ds-pipelines-metadata-envoy",
"IMAGES_MLMDGRPC": "ds-pipelines-metadata-grpc",
"IMAGES_MLMDWRITER": "ds-pipelines-metadata-writer",
"IMAGES_DSPO": "data-science-pipelines-operator",
}

ARCH = "amd64"
QUAY_REPOS_V2 = {
"IMAGES_DSPO": "data-science-pipelines-operator",
"V2_LAUNCHER_IMAGE": "ds-pipelines-launcher",
"V2_DRIVER_IMAGE": "ds-pipelines-driver",
"IMAGESV2_ARGO_APISERVER": "ds-pipelines-api-server",
"IMAGESV2_ARGO_PERSISTENCEAGENT": "ds-pipelines-persistenceagent",
"IMAGESV2_ARGO_SCHEDULEDWORKFLOW": "ds-pipelines-scheduledworkflow",
}

# RH Registry Env vars
IMAGES_CACHE = "IMAGES_CACHE"
IMAGES_MOVERESULTSIMAGE = "IMAGES_MOVERESULTSIMAGE"
IMAGES_MARIADB = "IMAGES_MARIADB"
IMAGES_OAUTHPROXY = "IMAGES_OAUTHPROXY"
TAGGED_REPOS = {
"IMAGESV2_ARGO_WORKFLOWCONTROLLER" : {
"TAG": "3.3.10-upstream",
"REPO": "ds-pipelines-argo-workflowcontroller"
},
"IMAGESV2_ARGO_ARGOEXEC" : {
"TAG": "3.3.10-upstream",
"REPO": "ds-pipelines-argo-argoexec"
},
"IMAGESV2_ARGO_MLMDGRPC": {
"TAG": "main-94ae1e9",
"REPO": "mlmd-grpc-server"
},
}

# RH Registry repos
REPO_UBI_MINIMAL = "ubi8/ubi-minimal"
REPO_UBI_MICRO = "ubi8/ubi-micro"
REPO_MARIADB = "rhel8/mariadb-103"
REPO_OAUTH_PROXY = "openshift4/ose-oauth-proxy"
STATIC_REPOS = {
"IMAGESV2_ARGO_MLMDENVOY": "registry.redhat.io/openshift-service-mesh/proxyv2-rhel8@sha256:a744c1b386fd5e4f94e43543e829df1bfdd1b564137917372a11da06872f4bcb",
"IMAGES_MARIADB": "registry.redhat.io/rhel8/mariadb-103@sha256:3d30992e60774f887c4e7959c81b0c41b0d82d042250b3b56f05ab67fd4cdee1",
"IMAGES_OAUTHPROXY": "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:ab112105ac37352a2a4916a39d6736f5db6ab4c29bad4467de8d613e80e9bb33",
"IMAGES_CACHE": "registry.redhat.io/ubi8/ubi-minimal@sha256:5d2d4d4dbec470f8ffb679915e2a8ae25ad754cd9193fa966deee1ecb7b3ee00",
"IMAGES_MOVERESULTSIMAGE": "registry.redhat.io/ubi8/ubi-micro@sha256:396baed3d689157d96aa7d8988fdfea7eb36684c8335eb391cf1952573e689c1",
}

# RH Registry servers
RH_REGISTRY_ACCESS = "registry.access.redhat.com"
RH_REGISTRY_IO = "registry.redhat.io"
OTHER_OPTIONS = {
"ZAP_LOG_LEVEL": "info",
"MAX_CONCURRENT_RECONCILES": "10",
"DSPO_HEALTHCHECK_DATABASE_CONNECTIONTIMEOUT": "15s",
"DSPO_HEALTHCHECK_OBJECTSTORE_CONNECTIONTIMEOUT": "15s",
"DSPO_REQUEUE_TIME": "20s",
"DSPO_APISERVER_INCLUDE_OWNERREFERENCE": "true"
}


def fetch_quay_repo_tag_digest(quay_repo, quay_org, tag):
api_url = f"https://quay.io/api/v1/repository/{quay_org}/{quay_repo}/tag/?specificTag={tag}"

response = requests.get(api_url).json()
tags = response['tags']
if 'tags' not in response:
print(f"Could not fetch tag: {tag} for repo {quay_org}/{quay_repo}. Response: {response}")
exit(1)

tags = response['tags']
if len(tags) == 0 or 'end_ts' in tags[0]:
print("Tag does not exist or was deleted.", file=sys.stderr)
print(f"Tag: {tag} for repo {quay_org}/{quay_repo} does not exist or was deleted.", file=sys.stderr)
exit(1)
digest = tags[0].get('manifest_digest')
if not digest:
Expand All @@ -48,37 +77,30 @@ def fetch_quay_repo_tag_digest(quay_repo, quay_org, tag):
return digest


def fetch_rh_repo_tag_digest(repo, tag):
api_url = f"https://catalog.redhat.com/api/containers/v1/repositories/registry/{RH_REGISTRY_ACCESS}/repository/{repo}/tag/{tag}"

response = requests.get(api_url).json()

amd_img = {}
for img in response['data']:
arch = img.get('architecture')
if not arch:
print(f"No 'architecture' field found when fetching image from RH registry.", file=sys.stderr)
exit(1)
if img['architecture'] == 'amd64':
amd_img = img

if not amd_img:
print(f"AMD64 arch image not found for repo {repo} and tag {tag}", file=sys.stderr)
exit(1)
def fetch_images(repos, overrides, lines, org, tag):
for image_env_var in repos:
if image_env_var in overrides:
lines.append(f"{image_env_var}={overrides[image_env_var]}")
else:
image_repo = repos[image_env_var]
digest = fetch_quay_repo_tag_digest(image_repo, org, tag)
image_repo_with_digest = f"{image_repo}@{digest}"
lines.append(f"{image_env_var}=quay.io/{org}/{image_repo_with_digest}")

sha_digest = amd_img['image_id']

return sha_digest
def static_vars(values, overrides, lines):
for var in values:
if var in overrides:
lines.append(f"{var}={overrides[var]}")
else:
value = values[var]
lines.append(f"{var}={value}")


def generate_params(args):
tag = args.tag
quay_org = args.quay_org
file_out = args.out_file
ubi_minimal_tag = args.ubi_minimal_tag
ubi_micro_tag = args.ubi_micro_tag
mariadb_tag = args.mariadb_tag
oauth_proxy_tag = args.oauth_proxy_tag

# Structure: { "ENV_VAR": "IMG_DIGEST",...}
overrides = {}
Expand All @@ -91,53 +113,18 @@ def generate_params(args):
exit(1)
overrides[entry[0]] = entry[1]

images = []
# Fetch QUAY Images
for image_env_var in QUAY_REPOS:
if image_env_var in overrides:
images.append(f"{image_env_var}={overrides[image_env_var]}")
else:
image_repo = QUAY_REPOS[image_env_var]
digest = fetch_quay_repo_tag_digest(image_repo, quay_org, tag)
image_repo_with_digest = f"{image_repo}@{digest}"
images.append(f"{image_env_var}=quay.io/{quay_org}/{image_repo_with_digest}")

# Fetch RH Registry images
rh_registry_images = {
RH_REGISTRY_ACCESS: [
{
"repo": REPO_UBI_MINIMAL,
"tag": ubi_minimal_tag,
"env": IMAGES_CACHE
},
{
"repo": REPO_UBI_MICRO,
"tag": ubi_micro_tag,
"env": IMAGES_MOVERESULTSIMAGE
},
],
RH_REGISTRY_IO: [
{
"repo": REPO_MARIADB,
"tag": mariadb_tag,
"env": IMAGES_MARIADB
},
{
"repo": REPO_OAUTH_PROXY,
"tag": oauth_proxy_tag,
"env": IMAGES_OAUTHPROXY
},
]
}
for registry in rh_registry_images:
for img in rh_registry_images[registry]:
image_env_var, tag, repo = img['env'], img['tag'], img['repo']
if image_env_var in overrides:
images.append(f"{image_env_var}={overrides[image_env_var]}")
else:
digest = fetch_rh_repo_tag_digest(repo, tag)
images.append(f"{image_env_var}={registry}/{repo}@{digest}")
env_var_lines = []

fetch_images(QUAY_REPOS_V1, overrides, env_var_lines, quay_org, V1_TAG)
fetch_images(QUAY_REPOS_V2, overrides, env_var_lines, quay_org, tag)
for image in TAGGED_REPOS:
target_repo = {image: TAGGED_REPOS[image]["REPO"]}
target_tag = TAGGED_REPOS[image]["TAG"]
fetch_images(target_repo, overrides, env_var_lines, quay_org, target_tag)

static_vars(STATIC_REPOS, overrides, env_var_lines)
static_vars(OTHER_OPTIONS, overrides, env_var_lines)

with open(file_out, 'w') as f:
for images in images:
f.write(f"{images}\n")
for env_var_lines in env_var_lines:
f.write(f"{env_var_lines}\n")

0 comments on commit 13c516e

Please sign in to comment.