forked from piplabs/story
-
Notifications
You must be signed in to change notification settings - Fork 0
184 lines (149 loc) · 6.47 KB
/
ci-s3.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
name: Build and Upload story Binary
on:
workflow_dispatch:
push:
branches:
- main
# # TESTING ONLY
# pull_request:
# branches:
# - main
permissions:
id-token: write
contents: write
pull-requests: write
actions: write
env:
NUM_INTERNAL_BINARIES_TO_KEEP: 50
NUM_PUBLIC_BINARIES_TO_KEEP: 400
S3_BUCKET: story-geth-binaries
BIN_NAME: story
jobs:
# Add timestamp
Timestamp:
uses: storyprotocol/gha-workflows/.github/workflows/reusable-timestamp.yml@main
# Build and upload the story binary
build_and_push:
needs: Timestamp
runs-on: ubuntu-latest
strategy:
matrix:
platform: [linux-amd64, linux-arm64, darwin-amd64, darwin-arm64]
steps:
- name: Checkout code
uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b # v4.1.5
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v1
with:
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_SERVICE_STAGING }}:role/iac-max-role
aws-region: us-west-1
role-session-name: github-actions
- name: Extract the version
run: |
PARAMS_FILE="./lib/buildinfo/buildinfo.go"
VERSION_MAJOR=$(awk -F= '/VersionMajor/ {gsub(/[^0-9]/, "", $2); printf "%s", $2}' $PARAMS_FILE)
VERSION_MINOR=$(awk -F= '/VersionMinor/ {gsub(/[^0-9]/, "", $2); printf "%s", $2}' $PARAMS_FILE)
VERSION_PATCH=$(awk -F= '/VersionPatch/ {gsub(/[^0-9]/, "", $2); printf "%s", $2}' $PARAMS_FILE)
VERSION_META=$(awk -F\" '/VersionMeta/ {print $2; exit}' $PARAMS_FILE)
VERSION="$VERSION_MAJOR.$VERSION_MINOR.$VERSION_PATCH"
if [ "$VERSION_META" != "stable" ]; then
VERSION+="-${VERSION_META}"
fi
echo "Version extracted: $VERSION"
echo "VERSION=$VERSION" >> $GITHUB_ENV
echo "VERSION_META=$VERSION_META" >> $GITHUB_ENV
- name: Build the story binary
run: |
IFS="-" read -r GOOS GOARCH <<< "${{ matrix.platform }}"
if [ "$GOOS" = "windows" ]; then
export BIN_NAME="${BIN_NAME}.exe"
fi
echo "Building for $GOOS/$GOARCH..."
cd client
env GOOS=$GOOS GOARCH=$GOARCH go build -o $BIN_NAME > /dev/null 2>&1
# Apply chmod only for non-windows platforms
if [ "$GOOS" != "windows" ]; then
chmod +x $BIN_NAME
fi
- name: Upload the story binary to S3
run: |
export TZ=America/Los_Angeles
IFS="-" read -r GOOS GOARCH <<< "${{ matrix.platform }}"
TIMESTAMP=$(date +%Y%m%d%H%M%S)
HUMAN_READABLE_VERSION=$(date)
COMMIT_HASH=$(git rev-parse --short HEAD)
FOLDER_NAME="story-${{ matrix.platform }}-${VERSION}-${COMMIT_HASH}"
ARCHIVE_NAME="${FOLDER_NAME}.tar.gz"
PUBLIC_DOWNLOAD_URL="https://$S3_BUCKET.s3.us-west-1.amazonaws.com/$BIN_NAME-public/$ARCHIVE_NAME"
BIN_NAME_WITH_PATH=./client/$BIN_NAME
if [ "$GOOS" = "windows" ]; then
export BIN_NAME_WITH_PATH="${BIN_NAME_WITH_PATH}.exe"
fi
mkdir $FOLDER_NAME
mv $BIN_NAME_WITH_PATH $FOLDER_NAME/
echo "Archiving the story binary..."
tar -czvf $ARCHIVE_NAME $FOLDER_NAME
if [ $? -ne 0 ]; then
echo "Failed to create the archive: $ARCHIVE_NAME"
exit 1
fi
aws s3 cp $ARCHIVE_NAME s3://$S3_BUCKET/$BIN_NAME-public/$ARCHIVE_NAME --quiet
if [ "${{ matrix.platform }}" = "linux-amd64" ]; then
echo "Uploading binary for internal use..."
aws s3 cp $ARCHIVE_NAME s3://$S3_BUCKET/$BIN_NAME/$BIN_NAME-$TIMESTAMP --quiet
# Update manifest file
aws s3 cp s3://$S3_BUCKET/$BIN_NAME/manifest.txt manifest.txt --quiet || touch manifest.txt
echo "$TIMESTAMP" >> manifest.txt
aws s3 cp manifest.txt s3://$S3_BUCKET/$BIN_NAME/manifest.txt --quiet
# Update version file
aws s3 cp s3://$S3_BUCKET/$BIN_NAME-public/version.txt version.txt --quiet || printf "File Name\t\t\tVerison\t\t\t\tCommit Hash\t\tTimestamp\n" > version.txt
if [ "${VERSION_META}" != "stable" ]; then
printf "$VERSION-$COMMIT_HASH\t\t$VERSION\t\t\t$COMMIT_HASH\t\t\t$TIMESTAMP\n" >> version.txt
else
printf "$VERSION-$COMMIT_HASH\t\t\t$VERSION\t\t\t\t$COMMIT_HASH\t\t\t$TIMESTAMP\n" >> version.txt
fi
aws s3 cp version.txt s3://$S3_BUCKET/$BIN_NAME-public/version.txt --quiet
fi
echo "COMMIT_HASH=$COMMIT_HASH" >> $GITHUB_ENV
echo "PUBLIC_DOWNLOAD_URL=$PUBLIC_DOWNLOAD_URL" >> $GITHUB_ENV
- name: Add binary version back to PR
if: matrix.platform == 'linux-amd64'
uses: mshick/add-pr-comment@v2
with:
message: |
### Binary uploaded successfully 🎉
📦 **Version Name:** ${{ env.VERSION }}-${{ env.COMMIT_HASH }}
📦 **Download Source:** [AWS S3](${{ env.PUBLIC_DOWNLOAD_URL }})
cleanup:
runs-on: ubuntu-latest
needs: build_and_push
steps:
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v1
with:
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_SERVICE_STAGING }}:role/iac-max-role
aws-region: us-west-1
role-session-name: github-actions
- name: Cleanup internal binaries
run: |
cleanup_s3() {
PREFIX=$1
KEEP=$2
echo "Cleaning up in bucket $S3_BUCKET with prefix: $PREFIX, keeping latest $KEEP binaries"
aws s3api list-objects-v2 --bucket $S3_BUCKET --prefix $PREFIX --query "sort_by(Contents,&LastModified)[*].Key" > all_binaries.json
# Extract the list of keys, remove the latest $KEEP binaries
BINARIES_TO_DELETE=$(jq -r ".[0:-${KEEP}][]" all_binaries.json)
if [ -n "$BINARIES_TO_DELETE" ]; then
# Delete old binaries
for key in $BINARIES_TO_DELETE; do
aws s3 rm s3://$S3_BUCKET/$key --quiet
done
echo "Deleted old binaries: $BINARIES_TO_DELETE"
else
echo "No old binaries to delete."
fi
}
# Cleanup internal binaries
cleanup_s3 "${BIN_NAME}/" "${NUM_INTERNAL_BINARIES_TO_KEEP}"
# Cleanup public binaries
cleanup_s3 "${BIN_NAME}-public/" "${NUM_PUBLIC_BINARIES_TO_KEEP}"