diff --git a/.final_builds/packages/golang-1-linux/index.yml b/.final_builds/packages/golang-1-linux/index.yml new file mode 100644 index 00000000..4c62c715 --- /dev/null +++ b/.final_builds/packages/golang-1-linux/index.yml @@ -0,0 +1,10 @@ +builds: + 126fca7399cba5bc8c1b02e392441f1688daf1586d57d1b3614e314b893a6cc4: + version: 126fca7399cba5bc8c1b02e392441f1688daf1586d57d1b3614e314b893a6cc4 + blobstore_id: 3be90446-c0f6-4aed-5b1b-da96e7d1814b + sha1: sha256:70c3918c28cd74a0a9be561b5eac8b85e8e7de4a72bc66cd321288974937e0be + 8c04109541f4d504f5be559da433998bd459b0f45cd3654557cc3642cc4d2f60: + version: 8c04109541f4d504f5be559da433998bd459b0f45cd3654557cc3642cc4d2f60 + blobstore_id: 07b1c4f5-6c91-4ef2-583b-f0ebf2b94b27 + sha1: sha256:efdf65bca81264d9110ec764be7edea1de341781a1ffe59c219db064d71d9fc3 +format-version: "2" diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..ef5319e0 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,24 @@ +version: 2 +updates: +- package-ecosystem: pip + directory: /ci/scripts + schedule: + interval: weekly + day: "monday" + time: "09:00" + timezone: "Europe/Berlin" + commit-message: + prefix: "ci" + labels: + - run-ci +- package-ecosystem: gomod + directory: /src/pcap + schedule: + interval: weekly + day: "monday" + time: "09:00" + timezone: "Europe/Berlin" + commit-message: + prefix: "dep" + labels: + - run-ci diff --git a/.gitignore b/.gitignore index b46eab98..74d94387 100644 --- a/.gitignore +++ b/.gitignore @@ -14,7 +14,6 @@ # Dependency directories & default folders/files .bundle .dev_builds -.final_builds pcap-release.tgz config/private.yml blobs diff --git a/ci/Dockerfile b/ci/Dockerfile index b569029c..ac48acf5 100644 --- a/ci/Dockerfile +++ b/ci/Dockerfile @@ -1,9 +1,9 @@ -# Dockerfile for image cf-routing.common.repositories.cloud.sap/pcap-release-testflight:dev +# Dockerfile for image cf-routing.common.repositories.cloud.sap/pcap-release-testflight FROM bosh/docker-cpi:main ARG GINKGO_VERSION=latest ARG GOLANGCILINT_VERSION=latest -RUN apt-get update && apt-get install -y libpcap-dev && rm -rf /var/lib/apt/lists/* +RUN apt-get update && apt-get install -y libpcap-dev python3-pip && rm -rf /var/lib/apt/lists/* RUN curl -fsSL https://deb.nodesource.com/setup_current.x | sudo -E bash - && \ apt-get install -y nodejs && rm -rf /var/lib/apt/lists/* @@ -14,8 +14,12 @@ RUN echo "source /tmp/local-bosh/director/env" >> /root/.bashrc RUN npm install -g semantic-release && \ npm install -g @semantic-release/exec -# Install go dependencies -ENV GOBIN=/usr/local/bin +# Install Python libraries needed for scripts +COPY scripts/requirements.txt /requirements.txt +RUN /usr/bin/python3 -m pip install -r /requirements.txt + +ENV GOPATH=/go PATH=${PATH}:/go/bin + RUN go install "github.com/onsi/ginkgo/v2/ginkgo@${GINKGO_VERSION}" \ && go install "github.com/golangci/golangci-lint/cmd/golangci-lint@${GOLANGCILINT_VERSION}" \ && go install "github.com/geofffranks/spruce/cmd/spruce@latest" diff --git a/ci/pipeline.yml b/ci/pipeline.yml index 0b8b3018..649c8d2c 100644 --- a/ci/pipeline.yml +++ b/ci/pipeline.yml @@ -8,6 +8,7 @@ groups: - rc - acceptance-tests - acceptance-tests-pr + - autobump-dependencies jobs: - name: unit-tests @@ -286,6 +287,69 @@ jobs: GITHUB_OWNER: cloudfoundry GITHUB_TOKEN: ((github.access_token)) GCP_SERVICE_KEY: ((gcp.service_key)) + - name: autobump-dependencies + public: true + serial: true + plan: + - do: + - get: daily + trigger: true + - get: git + - get: golang-release + - in_parallel: + steps: + - task: autobump-dependencies + config: + inputs: + - name: git + platform: linux + image_resource: + type: docker-image + source: + repository: cf-routing.common.repositories.cloud.sap/pcap-release-testflight + tag: latest + username: ((docker.username)) + password: ((docker.password)) + run: + dir: git + path: /usr/bin/python3 + args: [ "ci/scripts/autobump-dependencies.py" ] + params: + REPO_ROOT: git + PR_BASE: main + PR_ORG: cloudfoundry + PR_LABEL: run-ci + + GCP_SERVICE_KEY: ((gcp.service_key)) + GITHUB_COM_TOKEN: ((github.access_token)) + - task: autobump-golang-release + config: + inputs: + - name: git + - name: golang-release + platform: linux + image_resource: + type: docker-image + source: + repository: cf-routing.common.repositories.cloud.sap/pcap-release-testflight + tag: latest + username: ((docker.username)) + password: ((docker.password)) + run: + dir: git + path: /usr/bin/python3 + args: [ "ci/scripts/bump_golang_vendored.py" ] + params: + GO_PACKAGE_REPO_ROOT: ../golang-release + REPO_ROOT: git + PR_BASE: main + PR_ORG: cloudfoundry + PR_LABEL: run-ci + GIT_AUTHOR_NAME: ((github.bot_user)) + GIT_AUTHOR_EMAIL: ((github.bot_email)) + + GCP_SERVICE_KEY: ((gcp.service_key)) + GITHUB_COM_TOKEN: ((github.access_token)) resource_types: - name: slack-notification @@ -310,6 +374,12 @@ resources: uri: https://github.com/cloudfoundry/pcap-release.git branch: main password: ((github.access_token)) + - name: golang-release + type: git + source: + uri: https://github.com/bosh-packages/golang-release.git + branch: master + password: ((github.access_token)) - name: git-pull-requests type: pull-request @@ -336,3 +406,11 @@ resources: type: bosh-io-stemcell source: name: bosh-warden-boshlite-ubuntu-jammy-go_agent + + - name: daily + type: time + source: + start: 7:00 AM + stop: 8:00 AM + location: Europe/Berlin + interval: 24h \ No newline at end of file diff --git a/ci/scripts/autobump-dependencies.py b/ci/scripts/autobump-dependencies.py new file mode 100644 index 00000000..0e32be1f --- /dev/null +++ b/ci/scripts/autobump-dependencies.py @@ -0,0 +1,366 @@ +#!/usr/bin/env python3 +import functools +import os +import re +import shutil +from dataclasses import dataclass +import subprocess +import sys +import textwrap +from typing import List, Optional, Tuple +import yaml + +import github # PyGithub +import requests +from packaging import version +from bs4 import BeautifulSoup +from git import Repo + + +# Required Environment Vars +BLOBSTORE_SECRET_ACCESS_KEY = os.environ["GCP_SERVICE_KEY"] +gh = github.Github(login_or_token=os.environ["GITHUB_COM_TOKEN"]) +PR_ORG = os.environ["PR_ORG"] +PR_BASE = os.environ["PR_BASE"] +PR_LABEL = os.environ["PR_LABEL"] +# if DRY_RUN is set, blobs will not be uploaded and no PR created (downloads and local changes are still performed) +DRY_RUN = "DRY_RUN" in os.environ + +# Other Global Variables +BLOBS_PATH = "config/blobs.yml" +PACKAGING_PATH = "packages/{}/packaging" + + +class BoshHelper: + """ + Helper class to interface with the bosh-cli. + """ + + @classmethod + def add_blob(cls, path, blobs_path): + cls._run_bosh_cmd("add-blob", path, blobs_path) + + @classmethod + def remove_blob(cls, path): + cls._run_bosh_cmd("remove-blob", path) + + @classmethod + def upload_blobs(cls): + cls._run_bosh_cmd("upload-blobs") + + @classmethod + def _run_bosh_cmd(cls, cmd, *args): + cmd_params = ["bosh", cmd, *args] + print(f"Running '{' '.join(cmd_params)}' ...") + + # run as subprocess and handle errors + process = subprocess.Popen(cmd_params, stderr=subprocess.PIPE, stdout=subprocess.PIPE) + stdout, stderr = process.communicate() + if stdout: + print(stdout.decode("utf-8"), file=sys.stdout) # we don't expect any stdout under normal behaviour, might be useful for debugging though + if stderr: + print(stderr.decode("utf-8"), file=sys.stdout) + if process.returncode != 0: + raise Exception(f"Command {' '.join(cmd_params)} failed. Aborting.") + + +@dataclass +class Release: + """ + A specific release (i.e. version) of a dependency. + Currently, only the latest release of each dependency is fetched. + """ + + name: str + url: str + file: str + version: version.Version + + def download(self) -> None: + if os.path.isfile(self.file): + print(f"[{self.name}] already exists") + return + + print(f"[{self.name}] download '{self.url}' to '{self.file}'") + wget(self.url, self.file) + + def add_blob(self): + target_path = f"{self.file}" + BoshHelper.add_blob(self.file, target_path) + +@dataclass(repr=False) +class Dependency: + """ + The base class that defines the interface of a dependency. + fetch_latest_release needs to be implemented by subclasses. + """ + + name: str + version_var_name: str + pinned_version: str + root_url: str + packages: List[str] + remote_repo = gh.get_repo(f"{PR_ORG}/pcap-release") + + _latest_release: Optional[Release] = None + _current_version: version.Version = None + _current_package: str = None + + @property + def pr_branch(self): + return f"{self.name}-auto-bump-{PR_BASE}" + + @property + def current_version(self) -> version.Version: + """ + Fetches the current version of the release from the packaging file if not already known. + (Should always be identical to the version in blobs.yml) + """ + with open(PACKAGING_PATH.format(self._current_package), "r") as packaging_file: + for line in packaging_file.readlines(): + if line.startswith(self.version_var_name): + # Regex: expecting e.g. "RELEASE_VERSION=1.2.3 # http://release.org/download". extracting Semver Group + rgx = rf"{self.version_var_name}=((?:[0-9]+\.){{1,3}}[0-9]+)\s+#.*$" + match = re.match(rgx, line) + if match: + current_version_str = match.groups()[0] + self._current_version = version.parse(current_version_str) + return self._current_version + raise Exception(f"Could not find current version of {self.name}") + + @property + def latest_release(self) -> Release: + if not self._latest_release: + self._latest_release = self.fetch_latest_release() + return self._latest_release + + def check_current_version(self, package) -> version.Version: + self._current_package = package + return self.current_version + + # fetch_latest_release is implemented by subclasses + def fetch_latest_release(self) -> Release: + """ + Dependency release tarballs/downloads are available from various locations (Github or custom websites), + so fetching the latest release (incl. tarball download URL) has to be handled individually for every dependency. + Therefore, fetch_latest_release is implemented by subclasses. + """ + raise NotImplementedError + + def remove_current_blob(self, package): + blob_current_version = self.check_current_version(package) + current_blob_path = f"{self.name}-{blob_current_version}.tar.gz" + if self._check_blob_exists(current_blob_path): + BoshHelper.remove_blob(current_blob_path) + else: + print(f"Current Blob not found: {current_blob_path}") + + def _check_blob_exists(self, blob_path) -> bool: + """ + Checks config/blobs.yml if blob exists + """ + with open(BLOBS_PATH, "r") as blobs_file: + yml = yaml.safe_load(blobs_file) + return blob_path in yml.keys() + + def update_packaging_file(self, package): + """ + Writes the new dependency version and download-url into packages//packaging + """ + with open(PACKAGING_PATH.format(package), "r") as packaging_file: + replacement = "" + for line in packaging_file.readlines(): + if line.startswith(self.version_var_name): + line = f"{self.version_var_name}={self.latest_release.version} # {self.latest_release.url}\n" + replacement += line + + with open(PACKAGING_PATH.format(package), "w") as packaging_file_write: + packaging_file_write.write(replacement) + + def open_pr_exists(self) -> bool: + prs_exist = False + + for pr in self.remote_repo.get_pulls( + state="open", base=PR_BASE, head=f"{PR_ORG}:{self.pr_branch}" + ): # theoretically there should never be more than one open PR, print them anyways + print(f"Open {self.pr_branch} PR exists: {pr.html_url}") + prs_exist = True + return prs_exist + + def create_pr(self): + print(f"[{self.name}] Creating bump branch {PR_ORG}:{self.pr_branch} and PR...") + pr_body = textwrap.dedent( + f""" + Automatic bump to version {self.latest_release.version}, downloaded from {self.latest_release.url}. + + After merge, consider releasing a new version of pcap-release. + """ + ) + if DRY_RUN: + return + + self._create_branch(self.remote_repo, self.pr_branch) + + for package in self.packages: + self._update_file( + self.remote_repo, + PACKAGING_PATH.format(package), + self.pr_branch, + f"dep: Bump {self.name} version to {self.latest_release.version}", + ) + + self._update_file( + self.remote_repo, + BLOBS_PATH, + self.pr_branch, + f"dep: Update blob reference for {self.name} to version {self.latest_release.version}", + ) + + pr = self.remote_repo.create_pull( + title=f"dep: Bump {self.name} version to {self.latest_release.version}", + body=pr_body, + base=PR_BASE, + head=f"{PR_ORG}:{self.pr_branch}", + ) + pr.add_to_labels(PR_LABEL) + print(f"[{self.name}] Created Pull Request: {pr.html_url}") + + + def _create_branch(self, repo, branch): + """ + Creates the branch with the given name. + If it exists, deletes the existing branch and creates a new one. + """ + try: + ref = repo.get_git_ref(f"heads/{branch}") + ref.delete() + except github.UnknownObjectException: + print(f"Branch {branch} didn't exist. We'll create it.") + finally: + base_branch = repo.get_git_ref(f"heads/{PR_BASE}") + repo.create_git_ref(f"refs/heads/{branch}", base_branch.object.sha) + + def _update_file(self, repo, path, branch, message): + with open(path, "rb") as f: + content = f.read() + github_file = repo.get_contents(path, ref=branch) + repo.update_file(path=path, message=message, content=content, sha=github_file.sha, branch=branch) + +@dataclass +class WebLinkDependency(Dependency): + + selector: str = "a" + pattern: str = "({name}-({pinned_version}" + r"(?:\.[0-9])+))\.tar\.gz" + + def fetch_latest_release(self) -> Release: + data = requests.get(self.root_url) + html = BeautifulSoup(data.text, "html.parser") + + versions = [] + links = [link for link in html.select(self.selector) if "href" in link.attrs] + + for link in links: + match = re.search( + self.pattern.format(name=self.name, pinned_version=self.pinned_version), + link.attrs["href"], + ) + if match: + versions.append( + Release( + match.group(1), # full name without extension + requests.compat.urljoin(self.root_url, link.attrs["href"]), # absolute URL based on relative link href + match.group(0), # full file name with extension + version.parse(match.group(2)), # version + ) + ) + + if versions: + # sort found versions with highest first, return first entry, i.e. highest applicable version number. + return sorted(versions, key=lambda r: r.version, reverse=True)[0] + + raise Exception(f"Failed to get latest {self.name} version from {self.root_url}") + +def wget(url: str, path: str, auth: Optional[Tuple[str, str]] = None): + """ + downloads a file, optionally decoding any compression applied on HTTP level + """ + with requests.get(url, stream=True, allow_redirects=True, auth=auth) as resp: + if resp.status_code != 200: + raise Exception(f"request failed {resp.status_code}") + # see https://github.com/psf/requests/issues/2155#issuecomment-50771010 + resp.raw.read = functools.partial(resp.raw.read, decode_content=True) + with open(path, "wb") as file: + shutil.copyfileobj(resp.raw, file) + +def write_private_yaml(): + """ + Writes private.yml to config subdirectory (used for blobstore/s3 authentication) + """ + private_yml = { + "blobstore": { + "options": { + "credentials_source": "static", + "json_key": BLOBSTORE_SECRET_ACCESS_KEY, + } + } + } + with open("config/private.yml", "w") as file: + yaml.dump(private_yml, file, default_flow_style=False) + + +def cleanup_local_changes(): + local_git = Repo(os.curdir).git + local_git.reset("--hard") + local_git.clean("-fx") + +def main() -> None: + dependencies: List[Dependency] = [ + WebLinkDependency( + "libpcap", + "LIBPCAP_VERSION", + "1.10", + "https://www.tcpdump.org/release/", + packages=["pcap-api", "pcap-agent"], + ), + ] + + for dependency in dependencies: + + should_be_bumped = False + + for package in dependency.packages: + current_version = dependency.check_current_version(package) + latest_release = dependency.latest_release + latest_version = latest_release.version + + if latest_version <= current_version: + print(f"[{dependency.name}] in {package} already on the latest version: {latest_version} " f"(pinned: {dependency.pinned_version}.*)") + continue + + if dependency.open_pr_exists(): + print(f"[{dependency.name}] Open bump PR exists (for branch: {dependency.pr_branch})") + continue + + print(f"[{dependency.name}] Version-Bump required for package {package}: {current_version} --> {latest_version}") + should_be_bumped = True + latest_release.download() + # update blobs in specific package + dependency.remove_current_blob(package) + latest_release.add_blob() + dependency.update_packaging_file(package) + if not DRY_RUN: + write_private_yaml() + BoshHelper.upload_blobs() + + if not should_be_bumped: + continue + + # create only one PR per dependency + dependency.create_pr() + + # clear the working directory for the next dependency bump. + cleanup_local_changes() + + +if __name__ == "__main__": + main() diff --git a/ci/scripts/bump_golang_vendored.py b/ci/scripts/bump_golang_vendored.py new file mode 100755 index 00000000..2579abed --- /dev/null +++ b/ci/scripts/bump_golang_vendored.py @@ -0,0 +1,284 @@ +#!/usr/bin/env python + +import datetime +import os +import re +import subprocess +import sys +import textwrap +from github import Github, GithubException, UnknownObjectException +from git import Repo, GitCommandError +import yaml +from ruamel.yaml import YAML + +# Required env vars +GH_TOKEN = os.environ["GITHUB_COM_TOKEN"] +PR_BASE = os.environ["PR_BASE"] +PR_ORG = os.environ["PR_ORG"] +PR_LABEL = os.environ["PR_LABEL"] +GO_PACKAGE_REPO_ROOT = os.environ["GO_PACKAGE_REPO_ROOT"] +GIT_AUTHOR_NAME = os.environ["GIT_AUTHOR_NAME"] +GIT_AUTHOR_EMAIL = os.environ["GIT_AUTHOR_EMAIL"] + +GCP_SERVICE_KEY = os.environ["GCP_SERVICE_KEY"] + +# Github.com +GH = Github(GH_TOKEN) + +# Current Git Repository (pcap-release) +ORIGIN_PATH = f"{PR_ORG}/pcap-release" +REMOTE_REPO = GH.get_repo(ORIGIN_PATH) +LOCAL_REPO = Repo(os.curdir) +LOCAL_GIT = LOCAL_REPO.git + +with LOCAL_REPO.config_writer() as config: + config.add_section("user") + config.set("user", "name", GIT_AUTHOR_NAME) + config.set("user", "email", GIT_AUTHOR_EMAIL) + + gh_url = f"url \"https://{GH_TOKEN}@github.com/\"" + config.add_section(gh_url) + config.set(gh_url, "insteadOf", "https://github.com/") + +# golang-release repo (github.com & local path) +GOLANG_RELEASE_REPO = GH.get_repo("bosh-packages/golang-release") +GOLANG_RELEASE_REPO_LOCAL = Repo(GO_PACKAGE_REPO_ROOT) + +# Paths in this Repository +PRIVATE_YAML_PATH = "config/private.yml" +PACKAGE_NAME = "golang-1-linux" +PACKAGE_PATH = f"packages/{PACKAGE_NAME}" +VERSION_PATH = PACKAGE_PATH + "/version" +DOCS_VERSION_PATH = "docs/go.version" +VENDORED_COMMIT_PATH = PACKAGE_PATH + "/vendored-commit" +SPEC_LOCK_PATH = PACKAGE_PATH + "/spec.lock" +GOLANG_RELEASE_INDEX_YML_PATH = f".final_builds/{PACKAGE_PATH}/index.yml" + +# Bump Branch Name +PR_BRANCH = "golang-auto-bump" +BRANCH_FULLNAME = f"{PR_ORG}:{PR_BRANCH}" + + +def main(): + # get the latest version via PyGithub (without cloning repository) for performance/traffic reasons + latest_version = get_latest_version() + + # if re.match(r"^\d+\.\d+$", latest_version): + # print(f"Skipping unpatched version {latest_version}.") + # + # sys.exit(0) + + current_version = get_current_version() + + if latest_version != current_version: + print(f"A new version exists: {current_version} --> {latest_version}") + if open_pr_exists(REMOTE_REPO): + print("A PR for a go version bump already exists. Exiting.") + else: + # Setup private.yml with Blobstore Credentials (for 'bosh vendor-package' later) + write_private_yaml() + + try: + check_for_conflicting_bump_branch(REMOTE_REPO) + except Exception: + print("An golang-bump PR exists, skipping. Merge or delete the other branch to bump dependencies.") + return + + print("No open golang-bump PR exists, bump required.") + test_local_golang_release_clone(latest_version) + blob_link = run_vendor_package() + + checkout_bump_branch() + update_versioning_files(latest_version) + create_pr(current_version, latest_version, blob_link) + + else: + print(f"Already on latest version {current_version}, exiting.") + + +def get_current_version(): + with open(VERSION_PATH, "r") as version_file: + return version_file.read() + + +def get_latest_version(): + version_file = GOLANG_RELEASE_REPO.get_contents(VERSION_PATH) + return version_file.decoded_content.decode() + + +def test_local_golang_release_clone(latest_version_remote): + # get actual latest version from local repository, compare against remote + golang_release_version_file_path = os.path.join(GO_PACKAGE_REPO_ROOT, VERSION_PATH) + with open(golang_release_version_file_path, "r") as file: + latest_version_local = file.read() + if latest_version_remote != latest_version_local: # these should never differ, unless there was an error in cloning golang-release + raise Exception( + f"golang_release versions differ between github.com remote ({latest_version_remote}) and local clone in {GO_PACKAGE_REPO_ROOT} ({latest_version_local})" + ) + + +def run_vendor_package(): + # Documentation for bosh vendor-package: https://bosh.io/docs/package-vendoring/#vendor + cmd_params = ["bosh", "vendor-package", PACKAGE_NAME, GO_PACKAGE_REPO_ROOT] + print(f"Running '{' '.join(cmd_params)}' ...") + + # run as subprocess and handle errors + process = subprocess.Popen(cmd_params, stderr=subprocess.PIPE, stdout=subprocess.PIPE) + stdout, stderr = process.communicate() + if stdout: + # we don't expect any stdout under normal behaviour, might be useful for debugging though + print(stdout.decode("utf-8"), file=sys.stdout) + response = stderr.decode("utf-8") # bosh vendor-package writes success info to stderr for some reason + print(response, file=sys.stdout) + if process.returncode != 0: + raise Exception(f"bosh vendor-package failed. Aborting: {response}") + + if response == "": + print("bosh vendor-package succeeded but provided no output. " + "The golang-package-blob for this version has been uploaded previously. Continuing.") + # extract blobstore URL + rgx = r".*? Successfully uploaded file to (.*?)$" + match = re.match(rgx, response) + if match: + groups = match.groups() + if groups and len(groups) == 1: + blob_link = groups[0] + print(f"bosh vendor-package successful. Link to new blob: {blob_link}") + return blob_link + + print("No new blob was uploaded. See messages above.") + + +def open_pr_exists(repo) -> bool: + prs_exist = False + for pr in repo.get_pulls( + state="open", base="master", head=BRANCH_FULLNAME + ): # theoretically there should never be more than one open PR, print them anyway + print(f"{repo.name}: Open {PR_BRANCH} PR: {pr.html_url}") + prs_exist = True + + # print statement for Jenkins Job build description + pr_version = pr.title.split(" ")[-1] + print(f"PR_URL={pr.html_url} VERSION={pr_version} ACTION=ALREADY_EXISTS") + return prs_exist + + +def check_for_conflicting_bump_branch(repo): + # the bump branch should not exist after a successful previous run/merge. If there is an existing branch, + # we risk including other changes in PR, so we abort instead. + # We're checking on the remote repo since we just cloned it and don't expect any local changes yet. + try: + repo.get_branch(PR_BRANCH) + raise Exception( + f"{repo.name}: The branch {PR_BRANCH} already exists in the remote repository. It might be a stale branch. Aborting.") + except GithubException: + print(f"No conflicting {PR_BRANCH} branch exists on remote repository. Continuing.") + + +def update_versioning_files(latest_version): + # write current golang-release commit hash to vendored-commit file + repo = Repo(GO_PACKAGE_REPO_ROOT) + current_sha = repo.head.object.hexsha + with open(VENDORED_COMMIT_PATH, "w") as vendored_commit_file: + vendored_commit_file.write(current_sha) + + # set versions file + with open(VERSION_PATH, "w") as version_file: + version_file.write(latest_version) + + with open(DOCS_VERSION_PATH, "w") as docs_version_file: + modified_time = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") + docs_version_file.write(f"This file was updated by CI on {modified_time}\n") + docs_version_file.write(f"go{latest_version}") + + +def write_private_yaml(): + private_yml = { + "blobstore": { + "options": { + "credentials_source": "static", + "json_key": GCP_SERVICE_KEY, + } + } + } + + with open(PRIVATE_YAML_PATH, "w") as file: + yaml.dump(private_yml, file, default_flow_style=False) + + +def checkout_bump_branch(): + print(f"Checking out branch {PR_BRANCH}") + try: + LOCAL_GIT.checkout("-b", PR_BRANCH) + except GitCommandError as exception: + print(f"Encountered exception while checking out {PR_BRANCH}: {exception}") + + +def create_pr(current_version, new_version, blob_link): + # create commit + print("Creating git commit...") + # add changed files + LOCAL_GIT.add(GOLANG_RELEASE_INDEX_YML_PATH, "-f") + LOCAL_GIT.add(VERSION_PATH) + LOCAL_GIT.add(DOCS_VERSION_PATH) + LOCAL_GIT.add(VENDORED_COMMIT_PATH) + LOCAL_GIT.add(SPEC_LOCK_PATH) + # create and push commit + LOCAL_GIT.commit("-m", f"dep(go): bump golang to {new_version}") + LOCAL_GIT.push("origin", PR_BRANCH) + + # create PR + print("Creating pull request...") + pr_body = textwrap.dedent( + f""" + Automatic bump of golang-1-release from go version {current_version} to version {new_version}. + + Link to golang blobstore package: {blob_link} + + After merge, consider making a new release. + """ + ) + pr = REMOTE_REPO.create_pull( + title=f"dep(go): Bump golang version to {new_version}", + body=pr_body, + base=PR_BASE, + head=BRANCH_FULLNAME, + ) + + pr.add_to_labels(PR_LABEL) + + +def update_git_content(file_path, file_content, latest_version): + # latest_version may contain a patch version which we need to strip + latest_major_minor = ".".join(latest_version.split(".")[0:2]) + + if file_path == "go.mod": + new_file_content = re.sub(r"^(go [0-9](.[0-9]+)+)$", "go " + latest_major_minor, file_content, 1, re.MULTILINE) + elif file_path == "Jenkinsfile": + # Should consider if we always have sometimes version with only major.minor pattern (no patch). + new_file_content = re.sub(r"go 'Go [0-9](.[0-9]+)+'", "go 'Go " + latest_major_minor + "'", file_content) + elif file_path == ".github/workflows/golint.yml": + # Regex substitution for version number in Yaml File, e.g. + # env: + # GOLANG_VERSION: 1.20 + regex_pattern = r"^(env:\n\s+GOLANG_VERSION: )\d+(.\d+)+$" + new_file_content = re.sub(regex_pattern, rf"\g<1>{latest_version}", file_content, 1, re.MULTILINE) + else: + print("This filename is not in the list of to be processed files.") + return file_content + return new_file_content + + +def cleanup(): + # make sure no credentials remain + try: + os.remove(PRIVATE_YAML_PATH) + except Exception as e: + print(f"Could not clean up: {e}") + + +if __name__ == "__main__": + try: + main() + finally: + cleanup() diff --git a/ci/scripts/notes.sh b/ci/scripts/notes.sh index 8ccbd21b..3bed1450 100755 --- a/ci/scripts/notes.sh +++ b/ci/scripts/notes.sh @@ -25,4 +25,10 @@ releases: \`\`\` EOF +cat >> "${CONCOURSE_ROOT}/${RELEASE_ROOT}/notes.md" <