diff --git a/.github/workflows/latest.yaml b/.github/workflows/latest.yaml index 474b5c3ec..51a758b17 100644 --- a/.github/workflows/latest.yaml +++ b/.github/workflows/latest.yaml @@ -20,32 +20,39 @@ jobs: - run: go env - name: go build run: go build -v ./... - - name: go test + - name: go unit test run: go test -v ./... - # e2e: - # name: e2e tests - # runs-on: ubuntu-latest - # needs: [ build ] - # - # steps: - # - uses: ko-build/setup-ko@v0.6 - # - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - # - uses: chainguard-dev/actions/setup-kind@main - # with: - # k8s-version: v1.23.x - # - uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0 - # with: - # go-version: "1.21" - # - uses: vdemeester/setup-tektoncd@main - # with: - # pipeline: v0.40.x - # pipeline-feature-flags: '{"enable-api-fields": "alpha"}' - # - name: install manual-approval-gate custom task - # run: | - # # FIXME: remove this once >= 0.41.x - # kubectl apply -f https://storage.googleapis.com/tekton-releases/pipeline/previous/v0.40.2/resolvers.yaml - # ko apply --local -f config/ + e2e: + name: e2e tests + runs-on: ubuntu-latest + needs: [ build publish-latest ] + + steps: + - uses: ko-build/setup-ko@v0.6 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 + - uses: chainguard-dev/actions/setup-kind@main + with: + k8s-version: v1.23.x + - uses: actions/setup-go@0c52d547c9bc32b1aa3301fd7a9cb496313a4491 # v5.0.0 + with: + go-version: "1.21" + - uses: vdemeester/setup-tektoncd@main + with: + pipeline: v0.56.x + pipeline-feature-flags: '{"enable-step-actions": "true"}' + - name: tests + run: | + REGISTRY=registry.registry.svc.cluster.local:32222 + KO_DOCKER_REPO=${REGISTRY}/cache ko pulbish --base-import-paths --tags=latest ./cmd/cache + sed "s/image:.*/image:${REGISTRY}\/cache:latest/g" tekton/cache-fetch.yaml | kubectl apply -f - + sed "s/image:.*/image:${REGISTRY}\/cache:latest/g" tekton/cache-upload.yaml | kubectl apply -f - + kubectl apply -f tests/ + tkn task start cache-fetch-go -p gitURL=https://github.com/vdemeester/go-helloworld-app -p gitRevision=main -p cachePatterns="**.go,**go.sum" -p cacheURIBase=oci://${REGISTRY}/cache/go -w name=source,emptyDir= -w name=gocache,emptyDir= -w name=gomodcache,emptyDir= --showlog + tkn task start cache-upload-go -p gitURL=https://github.com/vdemeester/go-helloworld-app -p gitRevision=main -p cachePatterns="**.go,**go.sum" -p cacheURIBase=oci://${REGISTRY}/cache/go -w name=source,emptyDir= -w name=gocache,emptyDir= -w name=gomodcache,emptyDir= --showlog + tkn task start cache-fetch-go -p gitURL=https://github.com/vdemeester/go-helloworld-app -p gitRevision=main -p cachePatterns="**.go,**go.sum" -p cacheURIBase=oci://${REGISTRY}/cache/go -w name=source,emptyDir= -w name=gocache,emptyDir= -w name=gomodcache,emptyDir= --showlog + tkn taskrun list + # FIXME: fail if something failed publish: name: publish latest diff --git a/.ko.yaml b/.ko.yaml new file mode 100644 index 000000000..eaa5b9761 --- /dev/null +++ b/.ko.yaml @@ -0,0 +1 @@ +defaultBaseImage: registry.access.redhat.com/ubi8/ubi-minimal diff --git a/cmd/cache/fetch.go b/cmd/cache/fetch.go index 5ce221786..af3a1b6de 100644 --- a/cmd/cache/fetch.go +++ b/cmd/cache/fetch.go @@ -1,53 +1,86 @@ package main import ( + "fmt" + "io/fs" + "os" "path/filepath" + "github.com/moby/patternmatcher" "github.com/openshift-pipelines/tekton-caches/internal/fetch" "github.com/openshift-pipelines/tekton-caches/internal/hash" "github.com/spf13/cobra" ) const ( - filesFlag = "hashfiles" - targetFlag = "target" - folderFlag = "folder" + workingdirFlag = "workingdir" + filesFlag = "hashfiles" + patternsFlag = "pattern" + sourceFlag = "source" + folderFlag = "folder" ) func fetchCmd() *cobra.Command { cmd := &cobra.Command{ Use: "fetch", RunE: func(cmd *cobra.Command, args []string) error { - files, err := cmd.Flags().GetString(filesFlag) + target, err := cmd.Flags().GetString(sourceFlag) if err != nil { return err } - target, err := cmd.Flags().GetString(targetFlag) + folder, err := cmd.Flags().GetString(folderFlag) if err != nil { return err } - folder, err := cmd.Flags().GetString(folderFlag) + workingdir, err := cmd.Flags().GetString(workingdirFlag) if err != nil { return err } - // FIXME error out if empty - - matches, err := filepath.Glob(files) + patterns, err := cmd.Flags().GetStringArray(patternsFlag) if err != nil { return err } + matches := glob(workingdir, func(s string) bool { + m, err := patternmatcher.Matches(s, patterns) + if err != nil { + fmt.Fprintf(os.Stderr, "error trying to match files with '%v': %s", patterns, err) + return false + } + return m + }) + if len(matches) == 0 { + return fmt.Errorf("Didn't match any files with %v", patterns) + } else { + fmt.Fprintf(os.Stderr, "Matched the following files: %v\n", matches) + } // TODO: Hash files based of matches hashStr, err := hash.Compute(matches) if err != nil { return err } - return fetch.Try(cmd.Context(), hashStr, target, folder) + + // FIXME: Wrap the error. + // If not, warn and do not fail + // fmt.Fprintf(os.Stderr, "Repository %s doesn't exists or isn't reachable, fetching no cache.\n", cacheImageRef) + return fetch.Fetch(cmd.Context(), hashStr, target, folder) }, } - cmd.Flags().String(filesFlag, "", "Files pattern to compute the hash from") - cmd.Flags().String(targetFlag, "", "Cache oci image target reference") + cmd.Flags().StringArray(patternsFlag, []string{}, "Files pattern to compute the hash from") + cmd.Flags().String(sourceFlag, "", "Cache source reference") cmd.Flags().String(folderFlag, "", "Folder where to extract the content of the cache if it exists") + cmd.Flags().String(workingdirFlag, ".", "Working dir from where the files patterns needs to be taken") return cmd } + +func glob(root string, fn func(string) bool) []string { + var files []string + filepath.WalkDir(root, func(s string, d fs.DirEntry, e error) error { + if fn(s) { + files = append(files, s) + } + return nil + }) + return files +} diff --git a/cmd/cache/upload.go b/cmd/cache/upload.go index 5d8b96acb..5b7911943 100644 --- a/cmd/cache/upload.go +++ b/cmd/cache/upload.go @@ -1,35 +1,53 @@ package main import ( - "path/filepath" + "fmt" + "os" + "github.com/moby/patternmatcher" "github.com/openshift-pipelines/tekton-caches/internal/hash" "github.com/openshift-pipelines/tekton-caches/internal/upload" "github.com/spf13/cobra" ) +const ( + targetFlag = "target" +) + func uploadCmd() *cobra.Command { cmd := &cobra.Command{ Use: "upload", RunE: func(cmd *cobra.Command, args []string) error { - files, err := cmd.Flags().GetString(filesFlag) + target, err := cmd.Flags().GetString(targetFlag) if err != nil { return err } - target, err := cmd.Flags().GetString(targetFlag) + folder, err := cmd.Flags().GetString(folderFlag) if err != nil { return err } - folder, err := cmd.Flags().GetString(folderFlag) + workingdir, err := cmd.Flags().GetString(workingdirFlag) if err != nil { return err } - // FIXME error out if empty - - matches, err := filepath.Glob(files) + patterns, err := cmd.Flags().GetStringArray(patternsFlag) if err != nil { return err } + matches := glob(workingdir, func(s string) bool { + m, err := patternmatcher.Matches(s, patterns) + if err != nil { + fmt.Fprintf(os.Stderr, "error trying to match files with '%v': %s", patterns, err) + return false + } + return m + }) + if len(matches) == 0 { + return fmt.Errorf("Didn't match any files with %v", patterns) + } else { + fmt.Fprintf(os.Stderr, "Matched the following files: %v\n", matches) + } + // TODO: Hash files based of matches hashStr, err := hash.Compute(matches) if err != nil { @@ -38,9 +56,10 @@ func uploadCmd() *cobra.Command { return upload.Upload(cmd.Context(), hashStr, target, folder) }, } - cmd.Flags().String(filesFlag, "", "Files pattern to compute the hash from") - cmd.Flags().String(targetFlag, "", "Cache oci image target reference") + cmd.Flags().StringArray(patternsFlag, []string{}, "Files pattern to compute the hash from") + cmd.Flags().String(targetFlag, "", "Cache target reference") cmd.Flags().String(folderFlag, "", "Folder where to extract the content of the cache if it exists") + cmd.Flags().String(workingdirFlag, ".", "Working dir from where the files patterns needs to be taken") return cmd } diff --git a/go.mod b/go.mod index beabe438c..f7514933c 100644 --- a/go.mod +++ b/go.mod @@ -5,6 +5,7 @@ go 1.21 require ( github.com/codeclysm/extract/v3 v3.1.1 github.com/google/go-containerregistry v0.19.0 + github.com/moby/patternmatcher v0.6.0 github.com/spf13/cobra v1.8.0 ) diff --git a/go.sum b/go.sum index 0c6d14f48..bd9ee9878 100644 --- a/go.sum +++ b/go.sum @@ -45,6 +45,8 @@ github.com/mattn/go-colorable v0.0.6/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaO github.com/mattn/go-isatty v0.0.0-20160806122752-66b8e73f3f5c/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/moby/patternmatcher v0.6.0 h1:GmP9lR19aU5GqSSFko+5pRqHi+Ohk1O69aFiKkVGiPk= +github.com/moby/patternmatcher v0.6.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc= github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= github.com/opencontainers/image-spec v1.1.0-rc3 h1:fzg1mXZFj8YdPeNkRXMg+zb88BFV0Ys52cJydRwBkb8= diff --git a/internal/fetch/fetch.go b/internal/fetch/fetch.go index b808f271c..371a55375 100644 --- a/internal/fetch/fetch.go +++ b/internal/fetch/fetch.go @@ -9,7 +9,7 @@ import ( "github.com/openshift-pipelines/tekton-caches/internal/provider/oci" ) -func Try(ctx context.Context, hash, target, folder string) error { +func Fetch(ctx context.Context, hash, target, folder string) error { u, err := url.Parse(target) if err != nil { return err @@ -17,7 +17,7 @@ func Try(ctx context.Context, hash, target, folder string) error { newTarget := strings.TrimPrefix(target, u.Scheme+"://") switch u.Scheme { case "oci": - return oci.Try(ctx, hash, newTarget, folder) + return oci.Fetch(ctx, hash, newTarget, folder) case "s3": return fmt.Errorf("s3 schema not (yet) supported: %s", target) case "gcs": diff --git a/internal/provider/oci/fetch.go b/internal/provider/oci/fetch.go index 70ca75d01..419f7a2f5 100644 --- a/internal/provider/oci/fetch.go +++ b/internal/provider/oci/fetch.go @@ -11,17 +11,15 @@ import ( "github.com/google/go-containerregistry/pkg/crane" ) -func Try(ctx context.Context, hash, target, folder string) error { +func Fetch(ctx context.Context, hash, target, folder string) error { cacheImageRef := strings.ReplaceAll(target, "{{hash}}", hash) fmt.Fprintf(os.Stderr, "Trying to fetch oci image %s in %s\n", cacheImageRef, folder) // Try to fetch it (if it exists) image, err := crane.Pull(cacheImageRef) if err != nil { - // If not, warn and do not fail fmt.Fprintf(os.Stderr, "Warning: %s\n", err) - fmt.Fprintf(os.Stderr, "Repository %s doesn't exists or isn't reachable, fetching no cache.\n", cacheImageRef) - return nil + return err } f, err := os.Create(filepath.Join(folder, "cache.tar")) diff --git a/tekton/cache-fetch.yaml b/tekton/cache-fetch.yaml new file mode 100644 index 000000000..dbe92975d --- /dev/null +++ b/tekton/cache-fetch.yaml @@ -0,0 +1,65 @@ +apiVersion: tekton.dev/v1alpha1 +kind: StepAction +metadata: + name: cache-fetch + annotations: + tekton.dev/pipelines.minVersion: "0.56.0" + tekton.dev/tags: "cache" +spec: + params: + - name: patterns + description: | + Regular expression to select files to include to compute the hash. + For example, in the case of a Go project, you can use `go.mod` for this, so the value would be "**/go.sum" (to work with possible sub go modules as well). + type: array + - name: source + description: | + The source from where the cache should be fetched. It's a URI with the scheme defining the "provider". In addition, one can add a {{hash}} variable to use the computed hash in the reference (oci image tags, path in s3, …) + Currently supported: + - oci:// (e.g. oci://quay.io/vdemeester/go-cache:{{hash}} + - s3:// (e.g. s3:// + type: string + - name: cachePath + description: | + Path where to extract the cache content. + It can refer any folder, backed by a workspace or a volume, or nothing. + type: string + - name: workingdir + description: | + The working dir from where the files patterns needs to be taken + type: string + results: # Any result to "publish" ? + - name: fetched + description: | + Whether a cache was fetched or not (true/false). This step won't fail if it didn't manage to fetch cache. This results allows the next step to act whether something was fetched or not. + env: + - name: PARAM_SOURCE + value: $(params.source) + - name: PARAM_CACHE_PATH + value: $(params.cachePath) + - name: PARAM_WORKINGDIR + value: $(params.workingdir) + # FIXME: use a released version once something is released :) + image: ghcr.io/openshift-pipelines/tekton-caches/cache:latest + args: ["$(params.patterns[*])"] + script: | + #!/bin/sh + + PATTERN_FLAGS="" + echo "Patterns: $*" + for p in $*; do + PATTERN_FLAGS="${PATTERN_FLAGS} --pattern ${p}" + done + + set -x + /ko-app/cache fetch ${PATTERN_FLAGS} \ + --source ${PARAM_SOURCE} \ + --folder ${PARAM_CACHE_PATH} \ + --workingdir ${PARAM_WORKINGDIR} + if [ $? -eq 0 ]; then + echo -n true > $(step.results.fetched.path) + else + echo -n false > $(step.results.fetched.path) + fi + + exit 0 diff --git a/tekton/cache-upload.yaml b/tekton/cache-upload.yaml new file mode 100644 index 000000000..39aac5cd6 --- /dev/null +++ b/tekton/cache-upload.yaml @@ -0,0 +1,58 @@ +apiVersion: tekton.dev/v1alpha1 +kind: StepAction +metadata: + name: cache-upload + annotations: + tekton.dev/pipelines.minVersion: "0.56.0" + tekton.dev/tags: "cache" +spec: + params: + - name: patterns + description: | + Regular expression to select files to include to compute the hash. + For example, in the case of a Go project, you can use `go.mod` for this, so the value would be "**/go.sum" (to work with possible sub go modules as well). + type: array + - name: target + description: | + The target from where the cache should be uploaded. It's a URI with the scheme defining the "provider". In addition, one can add a {{hash}} variable to use the computed hash in the reference (oci image tags, path in s3, …) + Currently supported: + - oci:// (e.g. oci://quay.io/vdemeester/go-cache:{{hash}} + - s3:// (e.g. s3:// + type: string + - name: cachePath + description: | + Path where to extract the cache content. + It can refer any folder, backed by a workspace or a volume, or nothing. + type: string + - name: workingdir + description: | + The working dir from where the files patterns needs to be taken + type: string + results: # Any result to "publish" ? + - name: fetched + description: | + Whether a cache was fetched or not (true/false). This step won't fail if it didn't manage to fetch cache. This results allows the next step to act whether something was fetched or not. + env: + - name: PARAM_TARGET + value: $(params.target) + - name: PARAM_CACHE_PATH + value: $(params.cachePath) + - name: PARAM_WORKINGDIR + value: $(params.workingdir) + # FIXME: use a released version once something is released :) + image: ghcr.io/openshift-pipelines/tekton-caches/cache:latest + args: ["$(params.patterns[*])"] + script: | + #!/bin/sh + + PATTERN_FLAGS="" + echo "Patterns: $*" + for p in $*; do + PATTERN_FLAGS="${PATTERN_FLAGS} --pattern ${p}" + done + + set -ex + /ko-app/cache upload ${PATTERN_FLAGS} \ + --target ${PARAM_TARGET} \ + --folder ${PARAM_CACHE_PATH} \ + --workingdir ${PARAM_WORKINGDIR} diff --git a/tests/git-batch-merge.yaml b/tests/git-batch-merge.yaml new file mode 100644 index 000000000..1c8470c29 --- /dev/null +++ b/tests/git-batch-merge.yaml @@ -0,0 +1,147 @@ +apiVersion: tekton.dev/v1alpha1 +kind: StepAction +metadata: + name: git-batch-merge + annotations: + tekton.dev/pipelines.minVersion: "0.54.0" + tekton.dev/categories: Git + tekton.dev/tags: git + tekton.dev/displayName: "git batch merge" + tekton.dev/platforms: "linux/amd64,linux/s390x,linux/ppc64le,linux/arm64" +spec: + params: + - name: sourcePath + description: The git repo will be cloned onto this path + - name: url + description: git url to clone + type: string + - name: revision + description: base git revision to checkout (branch, tag, sha, ref…) + type: string + default: master + - name: refspec + description: base git refspec to fetch before checking out revision + type: string + default: "refs/heads/main:refs/heads/main" + - name: gitUserName + description: git user name to use for creating the batched commit (First Last) + type: string + default: GitBatch Task + - name: gitUserEmail + description: git user email to use for creating the batched commit (First.Last@domain.com) + type: string + default: GitBatch.Task@tekton.dev + - name: mode + description: git operation to perform while batching (choose from merge, cherry-pick) + type: string + default: merge + - name: submodules + description: defines if the resource should initialize and fetch the submodules + type: string + default: "true" + - name: depth + description: performs a shallow clone where only the most recent commit(s) will be fetched + type: string + default: "0" + - name: sslVerify + description: defines if http.sslVerify should be set to true or false in the global git config + type: string + default: "true" + - name: subdirectory + description: subdirectory inside the "output" workspace to clone the git repo into + type: string + default: "" + - name: deleteExisting + description: clean out the contents of the repo's destination directory (if it already exists) before trying to clone the repo there + type: string + default: "false" + - name: gitInitImage + description: The image used where the git-init binary is. + default: "gcr.io/tekton-releases/github.com/tektoncd/pipeline/cmd/git-init:v0.18.1" + type: string + results: + - name: commit + description: The final commit SHA that was obtained after batching all provided refs onto revision + - name: tree + description: The git tree SHA that was obtained after batching all provided refs onto revision. + env: + - name: PARAM_SOURCE_PATH + value: $(params.sourcePath) + - name: PARAM_SUB_DIR + value: $(params.subdirectory) + - name: PARAM_DELETE_EXISTING + value: $(params.deleteExisting) + - name: PARAM_REF_SPEC + value: $(params.refspec) + - name: PARAM_URL + value: $(params.url) + - name: PARAM_REVISION + value: $(params.revision) + - name: PARAM_SSL_VERIFY + value: $(params.sslVerify) + - name: PARAM_SUBMODULES + value: $(params.submodules) + - name: PARAM_DEPTH + value: $(params.depth) + - name: PARAM_GIT_USER_NAME + value: $(params.gitUserName) + - name: PARAM_GIT_USER_EMAIL + value: $(params.gitUserEmail) + - name: PARAM_MODE + value: $(params.mode) + image: $(params.gitInitImage) + script: | + CHECKOUT_DIR="${PARAM_SOURCE_PATH}/${PARAM_SUB_DIR}" + + cleandir() { + # Delete any existing contents of the repo directory if it exists. + # + # We don't just "rm -rf $CHECKOUT_DIR" because $CHECKOUT_DIR might be "/" + # or the root of a mounted volume. + if [[ -d "$CHECKOUT_DIR" ]] ; then + # Delete non-hidden files and directories + rm -rf "$CHECKOUT_DIR"/* + # Delete files and directories starting with . but excluding .. + rm -rf "$CHECKOUT_DIR"/.[!.]* + # Delete files and directories starting with .. plus any other character + rm -rf "$CHECKOUT_DIR"/..?* + fi + } + + if [[ "${PARAM_DELETE_EXISTING}" == "true" ]] ; then + cleandir + fi + + refs="${PARAM_REF_SPEC}" + + /ko-app/git-init \ + -url "${PARAM_URL}" \ + -revision "${PARAM_REVISION}" \ + -refspec "$refs" \ + -path "$CHECKOUT_DIR" \ + -sslVerify="${PARAM_SSL_VERIFY}" \ + -submodules="${PARAM_SUBMODULES}" \ + -depth "${PARAM_DEPTH}" + + git -C $CHECKOUT_DIR config user.name "${PARAM_GIT_USER_NAME}" + git -C $CHECKOUT_DIR config user.email "${PARAM_GIT_USER_EMAIL}" + + mode="${PARAM_MODE}" + if [[ $mode == "merge" ]]; then + for ref in $p; do + git -C $CHECKOUT_DIR merge --quiet --allow-unrelated-histories refs/batch/$ref + done + elif [[ $mode == "cherry-pick" ]]; then + for ref in $p; do + git -C $CHECKOUT_DIR cherry-pick --allow-empty --keep-redundant-commits refs/batch/$ref + done + else + echo "unsupported mode $mode" + exit 1 + fi + + RESULT_SHA="$(git -C $CHECKOUT_DIR rev-parse HEAD)" + TREE_SHA="$(git -C $CHECKOUT_DIR rev-parse HEAD^{tree})" + # Make sure we don't add a trailing newline to the result! + echo -n "$(echo $RESULT_SHA | tr -d '\n')" > $(step.results.commit.path) + echo -n "$(echo $TREE_SHA | tr -d '\n')" > $(step.results.tree.path) diff --git a/tests/task-cache-fetch-go.yaml b/tests/task-cache-fetch-go.yaml new file mode 100644 index 000000000..233d19185 --- /dev/null +++ b/tests/task-cache-fetch-go.yaml @@ -0,0 +1,68 @@ +apiVersion: tekton.dev/v1 +kind: Task +metadata: + name: cache-fetch-go +spec: + workspaces: + - name: source + - name: gocache + - name: gomodcache + params: + - name: gitURL + type: string + - name: gitRevision + type: string + default: "main" + - name: cachePatterns + type: array + - name: cacheURIBase + steps: + - name: git-clone + ref: + name: git-batch-merge + params: + - name: url + value: $(params.gitURL) + - name: revision + value: $(params.gitRevision) + - name: sourcePath + value: $(workspaces.source.path) + - name: gocache-fetch + ref: + name: cache-fetch + params: + - name: patterns + value: $(params.cachePatterns) + - name: source + value: $(params.cacheURIBase)-go:{{hash}} + - name: cachePath + value: $(workspaces.gocache.path) + - name: workingdir + value: $(workspaces.source.path) + - name: gomodcache-fetch + ref: + name: cache-fetch + params: + - name: patterns + value: $(params.cachePatterns) + - name: source + value: $(params.cacheURIBase)-gomod:{{hash}} + - name: cachePath + value: $(workspaces.gomodcache.path) + - name: workingdir + value: $(workspaces.source.path) + - name: go-build + image: cgr.dev/chainguard/go + workingDir: $(workspaces.source.path) + script: | + #!/bin/sh + set -ex + + export GOCACHE=$(workspaces.gocache.path) + export GOMODCACHE=$(workspaces.gomodcache.path) + + go env + go build -v . + + du -sk -h $(go env GOCACHE) + du -sk -h $(go env GOMODCACHE) diff --git a/tests/task-cache-upload-go.yaml b/tests/task-cache-upload-go.yaml new file mode 100644 index 000000000..78cba8700 --- /dev/null +++ b/tests/task-cache-upload-go.yaml @@ -0,0 +1,76 @@ +apiVersion: tekton.dev/v1 +kind: Task +metadata: + name: cache-upload-go +spec: + workspaces: + - name: source + - name: gocache + - name: gomodcache + params: + - name: gitURL + type: string + - name: gitRevision + type: string + default: "main" + - name: cachePatterns + type: array + - name: cacheURIBase + steps: + - name: git-clone + ref: + name: git-batch-merge + params: + - name: url + value: $(params.gitURL) + - name: revision + value: $(params.gitRevision) + - name: sourcePath + value: $(workspaces.source.path) + - name: go-build + image: cgr.dev/chainguard/go + workingDir: $(workspaces.source.path) + script: | + #!/bin/sh + set -ex + + export GOCACHE=$(workspaces.gocache.path) + export GOMODCACHE=$(workspaces.gomodcache.path) + + go env + go build -v . + + du -sk -h $(go env GOCACHE) + du -sk -h $(go env GOMODCACHE) + + go env GOCACHE | tr -d '\n' > $(step.results.gocache.path) + go env GOMODCACHE | tr -d '\n' > $(step.results.gomodcache.path) + results: + - name: gocache + type: string + - name: gomodcache + type: string + - name: gomod-cache-upload + ref: + name: cache-upload + params: + - name: patterns + value: $(params.cachePatterns) + - name: target + value: $(params.cacheURIBase)-gomod:{{hash}} + - name: cachePath + value: $(steps.go-build.results.gomodcache) + - name: workingdir + value: $(workspaces.source.path) + - name: go-cache-upload + ref: + name: cache-upload + params: + - name: patterns + value: $(params.cachePatterns) + - name: target + value: $(params.cacheURIBase)-go:{{hash}} + - name: cachePath + value: $(steps.go-build.results.gocache) + - name: workingdir + value: $(workspaces.source.path) diff --git a/vendor/github.com/moby/patternmatcher/LICENSE b/vendor/github.com/moby/patternmatcher/LICENSE new file mode 100644 index 000000000..6d8d58fb6 --- /dev/null +++ b/vendor/github.com/moby/patternmatcher/LICENSE @@ -0,0 +1,191 @@ + + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + Copyright 2013-2018 Docker, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/moby/patternmatcher/NOTICE b/vendor/github.com/moby/patternmatcher/NOTICE new file mode 100644 index 000000000..e5154640f --- /dev/null +++ b/vendor/github.com/moby/patternmatcher/NOTICE @@ -0,0 +1,16 @@ +Docker +Copyright 2012-2017 Docker, Inc. + +This product includes software developed at Docker, Inc. (https://www.docker.com). + +The following is courtesy of our legal counsel: + + +Use and transfer of Docker may be subject to certain restrictions by the +United States and other governments. +It is your responsibility to ensure that your use and/or transfer does not +violate applicable laws. + +For more information, please see https://www.bis.doc.gov + +See also https://www.apache.org/dev/crypto.html and/or seek legal counsel. diff --git a/vendor/github.com/moby/patternmatcher/patternmatcher.go b/vendor/github.com/moby/patternmatcher/patternmatcher.go new file mode 100644 index 000000000..37a1a59ac --- /dev/null +++ b/vendor/github.com/moby/patternmatcher/patternmatcher.go @@ -0,0 +1,474 @@ +package patternmatcher + +import ( + "errors" + "os" + "path/filepath" + "regexp" + "strings" + "text/scanner" + "unicode/utf8" +) + +// escapeBytes is a bitmap used to check whether a character should be escaped when creating the regex. +var escapeBytes [8]byte + +// shouldEscape reports whether a rune should be escaped as part of the regex. +// +// This only includes characters that require escaping in regex but are also NOT valid filepath pattern characters. +// Additionally, '\' is not excluded because there is specific logic to properly handle this, as it's a path separator +// on Windows. +// +// Adapted from regexp::QuoteMeta in go stdlib. +// See https://cs.opensource.google/go/go/+/refs/tags/go1.17.2:src/regexp/regexp.go;l=703-715;drc=refs%2Ftags%2Fgo1.17.2 +func shouldEscape(b rune) bool { + return b < utf8.RuneSelf && escapeBytes[b%8]&(1<<(b/8)) != 0 +} + +func init() { + for _, b := range []byte(`.+()|{}$`) { + escapeBytes[b%8] |= 1 << (b / 8) + } +} + +// PatternMatcher allows checking paths against a list of patterns +type PatternMatcher struct { + patterns []*Pattern + exclusions bool +} + +// New creates a new matcher object for specific patterns that can +// be used later to match against patterns against paths +func New(patterns []string) (*PatternMatcher, error) { + pm := &PatternMatcher{ + patterns: make([]*Pattern, 0, len(patterns)), + } + for _, p := range patterns { + // Eliminate leading and trailing whitespace. + p = strings.TrimSpace(p) + if p == "" { + continue + } + p = filepath.Clean(p) + newp := &Pattern{} + if p[0] == '!' { + if len(p) == 1 { + return nil, errors.New("illegal exclusion pattern: \"!\"") + } + newp.exclusion = true + p = p[1:] + pm.exclusions = true + } + // Do some syntax checking on the pattern. + // filepath's Match() has some really weird rules that are inconsistent + // so instead of trying to dup their logic, just call Match() for its + // error state and if there is an error in the pattern return it. + // If this becomes an issue we can remove this since its really only + // needed in the error (syntax) case - which isn't really critical. + if _, err := filepath.Match(p, "."); err != nil { + return nil, err + } + newp.cleanedPattern = p + newp.dirs = strings.Split(p, string(os.PathSeparator)) + pm.patterns = append(pm.patterns, newp) + } + return pm, nil +} + +// Matches returns true if "file" matches any of the patterns +// and isn't excluded by any of the subsequent patterns. +// +// The "file" argument should be a slash-delimited path. +// +// Matches is not safe to call concurrently. +// +// Deprecated: This implementation is buggy (it only checks a single parent dir +// against the pattern) and will be removed soon. Use either +// MatchesOrParentMatches or MatchesUsingParentResults instead. +func (pm *PatternMatcher) Matches(file string) (bool, error) { + matched := false + file = filepath.FromSlash(file) + parentPath := filepath.Dir(file) + parentPathDirs := strings.Split(parentPath, string(os.PathSeparator)) + + for _, pattern := range pm.patterns { + // Skip evaluation if this is an inclusion and the filename + // already matched the pattern, or it's an exclusion and it has + // not matched the pattern yet. + if pattern.exclusion != matched { + continue + } + + match, err := pattern.match(file) + if err != nil { + return false, err + } + + if !match && parentPath != "." { + // Check to see if the pattern matches one of our parent dirs. + if len(pattern.dirs) <= len(parentPathDirs) { + match, _ = pattern.match(strings.Join(parentPathDirs[:len(pattern.dirs)], string(os.PathSeparator))) + } + } + + if match { + matched = !pattern.exclusion + } + } + + return matched, nil +} + +// MatchesOrParentMatches returns true if "file" matches any of the patterns +// and isn't excluded by any of the subsequent patterns. +// +// The "file" argument should be a slash-delimited path. +// +// Matches is not safe to call concurrently. +func (pm *PatternMatcher) MatchesOrParentMatches(file string) (bool, error) { + matched := false + file = filepath.FromSlash(file) + parentPath := filepath.Dir(file) + parentPathDirs := strings.Split(parentPath, string(os.PathSeparator)) + + for _, pattern := range pm.patterns { + // Skip evaluation if this is an inclusion and the filename + // already matched the pattern, or it's an exclusion and it has + // not matched the pattern yet. + if pattern.exclusion != matched { + continue + } + + match, err := pattern.match(file) + if err != nil { + return false, err + } + + if !match && parentPath != "." { + // Check to see if the pattern matches one of our parent dirs. + for i := range parentPathDirs { + match, _ = pattern.match(strings.Join(parentPathDirs[:i+1], string(os.PathSeparator))) + if match { + break + } + } + } + + if match { + matched = !pattern.exclusion + } + } + + return matched, nil +} + +// MatchesUsingParentResult returns true if "file" matches any of the patterns +// and isn't excluded by any of the subsequent patterns. The functionality is +// the same as Matches, but as an optimization, the caller keeps track of +// whether the parent directory matched. +// +// The "file" argument should be a slash-delimited path. +// +// MatchesUsingParentResult is not safe to call concurrently. +// +// Deprecated: this function does behave correctly in some cases (see +// https://github.com/docker/buildx/issues/850). +// +// Use MatchesUsingParentResults instead. +func (pm *PatternMatcher) MatchesUsingParentResult(file string, parentMatched bool) (bool, error) { + matched := parentMatched + file = filepath.FromSlash(file) + + for _, pattern := range pm.patterns { + // Skip evaluation if this is an inclusion and the filename + // already matched the pattern, or it's an exclusion and it has + // not matched the pattern yet. + if pattern.exclusion != matched { + continue + } + + match, err := pattern.match(file) + if err != nil { + return false, err + } + + if match { + matched = !pattern.exclusion + } + } + return matched, nil +} + +// MatchInfo tracks information about parent dir matches while traversing a +// filesystem. +type MatchInfo struct { + parentMatched []bool +} + +// MatchesUsingParentResults returns true if "file" matches any of the patterns +// and isn't excluded by any of the subsequent patterns. The functionality is +// the same as Matches, but as an optimization, the caller passes in +// intermediate results from matching the parent directory. +// +// The "file" argument should be a slash-delimited path. +// +// MatchesUsingParentResults is not safe to call concurrently. +func (pm *PatternMatcher) MatchesUsingParentResults(file string, parentMatchInfo MatchInfo) (bool, MatchInfo, error) { + parentMatched := parentMatchInfo.parentMatched + if len(parentMatched) != 0 && len(parentMatched) != len(pm.patterns) { + return false, MatchInfo{}, errors.New("wrong number of values in parentMatched") + } + + file = filepath.FromSlash(file) + matched := false + + matchInfo := MatchInfo{ + parentMatched: make([]bool, len(pm.patterns)), + } + for i, pattern := range pm.patterns { + match := false + // If the parent matched this pattern, we don't need to recheck. + if len(parentMatched) != 0 { + match = parentMatched[i] + } + + if !match { + // Skip evaluation if this is an inclusion and the filename + // already matched the pattern, or it's an exclusion and it has + // not matched the pattern yet. + if pattern.exclusion != matched { + continue + } + + var err error + match, err = pattern.match(file) + if err != nil { + return false, matchInfo, err + } + + // If the zero value of MatchInfo was passed in, we don't have + // any information about the parent dir's match results, and we + // apply the same logic as MatchesOrParentMatches. + if !match && len(parentMatched) == 0 { + if parentPath := filepath.Dir(file); parentPath != "." { + parentPathDirs := strings.Split(parentPath, string(os.PathSeparator)) + // Check to see if the pattern matches one of our parent dirs. + for i := range parentPathDirs { + match, _ = pattern.match(strings.Join(parentPathDirs[:i+1], string(os.PathSeparator))) + if match { + break + } + } + } + } + } + matchInfo.parentMatched[i] = match + + if match { + matched = !pattern.exclusion + } + } + return matched, matchInfo, nil +} + +// Exclusions returns true if any of the patterns define exclusions +func (pm *PatternMatcher) Exclusions() bool { + return pm.exclusions +} + +// Patterns returns array of active patterns +func (pm *PatternMatcher) Patterns() []*Pattern { + return pm.patterns +} + +// Pattern defines a single regexp used to filter file paths. +type Pattern struct { + matchType matchType + cleanedPattern string + dirs []string + regexp *regexp.Regexp + exclusion bool +} + +type matchType int + +const ( + unknownMatch matchType = iota + exactMatch + prefixMatch + suffixMatch + regexpMatch +) + +func (p *Pattern) String() string { + return p.cleanedPattern +} + +// Exclusion returns true if this pattern defines exclusion +func (p *Pattern) Exclusion() bool { + return p.exclusion +} + +func (p *Pattern) match(path string) (bool, error) { + if p.matchType == unknownMatch { + if err := p.compile(string(os.PathSeparator)); err != nil { + return false, filepath.ErrBadPattern + } + } + + switch p.matchType { + case exactMatch: + return path == p.cleanedPattern, nil + case prefixMatch: + // strip trailing ** + return strings.HasPrefix(path, p.cleanedPattern[:len(p.cleanedPattern)-2]), nil + case suffixMatch: + // strip leading ** + suffix := p.cleanedPattern[2:] + if strings.HasSuffix(path, suffix) { + return true, nil + } + // **/foo matches "foo" + return suffix[0] == os.PathSeparator && path == suffix[1:], nil + case regexpMatch: + return p.regexp.MatchString(path), nil + } + + return false, nil +} + +func (p *Pattern) compile(sl string) error { + regStr := "^" + pattern := p.cleanedPattern + // Go through the pattern and convert it to a regexp. + // We use a scanner so we can support utf-8 chars. + var scan scanner.Scanner + scan.Init(strings.NewReader(pattern)) + + escSL := sl + if sl == `\` { + escSL += `\` + } + + p.matchType = exactMatch + for i := 0; scan.Peek() != scanner.EOF; i++ { + ch := scan.Next() + + if ch == '*' { + if scan.Peek() == '*' { + // is some flavor of "**" + scan.Next() + + // Treat **/ as ** so eat the "/" + if string(scan.Peek()) == sl { + scan.Next() + } + + if scan.Peek() == scanner.EOF { + // is "**EOF" - to align with .gitignore just accept all + if p.matchType == exactMatch { + p.matchType = prefixMatch + } else { + regStr += ".*" + p.matchType = regexpMatch + } + } else { + // is "**" + // Note that this allows for any # of /'s (even 0) because + // the .* will eat everything, even /'s + regStr += "(.*" + escSL + ")?" + p.matchType = regexpMatch + } + + if i == 0 { + p.matchType = suffixMatch + } + } else { + // is "*" so map it to anything but "/" + regStr += "[^" + escSL + "]*" + p.matchType = regexpMatch + } + } else if ch == '?' { + // "?" is any char except "/" + regStr += "[^" + escSL + "]" + p.matchType = regexpMatch + } else if shouldEscape(ch) { + // Escape some regexp special chars that have no meaning + // in golang's filepath.Match + regStr += `\` + string(ch) + } else if ch == '\\' { + // escape next char. Note that a trailing \ in the pattern + // will be left alone (but need to escape it) + if sl == `\` { + // On windows map "\" to "\\", meaning an escaped backslash, + // and then just continue because filepath.Match on + // Windows doesn't allow escaping at all + regStr += escSL + continue + } + if scan.Peek() != scanner.EOF { + regStr += `\` + string(scan.Next()) + p.matchType = regexpMatch + } else { + regStr += `\` + } + } else if ch == '[' || ch == ']' { + regStr += string(ch) + p.matchType = regexpMatch + } else { + regStr += string(ch) + } + } + + if p.matchType != regexpMatch { + return nil + } + + regStr += "$" + + re, err := regexp.Compile(regStr) + if err != nil { + return err + } + + p.regexp = re + p.matchType = regexpMatch + return nil +} + +// Matches returns true if file matches any of the patterns +// and isn't excluded by any of the subsequent patterns. +// +// This implementation is buggy (it only checks a single parent dir against the +// pattern) and will be removed soon. Use MatchesOrParentMatches instead. +func Matches(file string, patterns []string) (bool, error) { + pm, err := New(patterns) + if err != nil { + return false, err + } + file = filepath.Clean(file) + + if file == "." { + // Don't let them exclude everything, kind of silly. + return false, nil + } + + return pm.Matches(file) +} + +// MatchesOrParentMatches returns true if file matches any of the patterns +// and isn't excluded by any of the subsequent patterns. +func MatchesOrParentMatches(file string, patterns []string) (bool, error) { + pm, err := New(patterns) + if err != nil { + return false, err + } + file = filepath.Clean(file) + + if file == "." { + // Don't let them exclude everything, kind of silly. + return false, nil + } + + return pm.MatchesOrParentMatches(file) +} diff --git a/vendor/modules.txt b/vendor/modules.txt index 77f98786b..9c25d2256 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -79,6 +79,9 @@ github.com/klauspost/compress/zstd/internal/xxhash # github.com/mitchellh/go-homedir v1.1.0 ## explicit github.com/mitchellh/go-homedir +# github.com/moby/patternmatcher v0.6.0 +## explicit; go 1.19 +github.com/moby/patternmatcher # github.com/opencontainers/go-digest v1.0.0 ## explicit; go 1.13 github.com/opencontainers/go-digest