Skip to content

(Commit by workflow script) Update generated documentation. #428

(Commit by workflow script) Update generated documentation.

(Commit by workflow script) Update generated documentation. #428

Workflow file for this run

# Flow
# Copyright 2023 Akamai Technologies, Inc.
#
# Licensed under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in
# compliance with the License. You may obtain a copy
# of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in
# writing, software distributed under the License is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing
# permissions and limitations under the License.
name: Flow pipeline
# TODO: Code reuse somehow versus Flow-IPC workflow counterpart file? Could apply to major parts of this file.
# The copy/pasting and "see comment in other file" types of comments are a bit unseemly.
# See comment in Flow-IPC workflow counterpart. We follow the same philosophy here.
on:
pull_request:
branches:
- main
push:
branches:
- main
tags:
- v*
workflow_dispatch:
jobs:
# See comments in Flow-IPC workflow counterpart. We use same techniques.
set-vars:
runs-on: ubuntu-latest
steps:
- name: Set variables/constants for subsequent use
run: |
# Set variables/constants for subsequent use.
outputs:
doc-commit-message: (Commit by workflow script) Update generated documentation.
# Impetus behind this is to gate whether the real jobs below actually need to run. Can of course also compute
# other things as needed.
# TODO: Already mentioned in a TODO above, but this stuff here is just copy/paste galore. Should really figure out
# something to avoid it in this `setup` area.
setup:
needs: set-vars
runs-on: ubuntu-latest
steps:
- name: Checkout VERSION file and tag history
uses: actions/checkout@v4
with:
sparse-checkout: VERSION
fetch-depth: 0 # Get all history regarding tags. We'll need that for VERSION checking below.
- name: Grab repository version from `VERSION` file
id: repo_version
run: |
# Grab repository version from `VERSION` file.
if [ ! -e VERSION ]; then
echo '::error ::No VERSION file in repository root. One must exist.'
exit 1
fi
VERSION=`cat VERSION`
echo "Determined version: [$VERSION]."
echo "version=$VERSION" >> $GITHUB_OUTPUT
- name: (On release creation only) Check repository version against release tag name
if: success() && (!cancelled()) && startsWith(github.ref, 'refs/tags/v')
run: |
# Check repository version against release tag name.
VERSION=${{ steps.repo_version.outputs.version }}
TAG_VERSION=${GITHUB_REF#refs/tags/v}
if [ "$VERSION" = "$TAG_VERSION" ]; then
echo ":notice ::Tag name and repo version are an exact match. Looks good."
elif [[ "$TAG_VERSION" == "$VERSION-"* ]]; then
echo "::notice ::Tag name [v$TAG_VERSION] starts with repo version (pre-release). Allowed."
else
echo "::error ::VERSION file version v[$VERSION] must = release tag name [v$TAG_VERSION]."
echo "::error ::Suggest deleting release and tag, fixing VERSION if needed, and re-creating release/tag."
exit 1
fi
# TODO: Would be nice to also grab release name and ensure it equals tag name v<...>.
# Could be a separate step or just execute here if the above succeeded so far. Requires API fetch, so it's
# a few lines.
- name: (Except on release creation) Check repository version against already-existing release tag names
if: success() && (!cancelled()) && (!startsWith(github.ref, 'refs/tags/v'))
run: |
# Check repo version against release tag name.
# Do not fail on account of VERSION in this path however... just warn or gently inform.
VERSION=${{ steps.repo_version.outputs.version }}
if git rev-parse "v$VERSION" >/dev/null 2>&1; then
echo "::warning ::Tag [v$VERSION] already exists. The repo/VERSION file will require "\
"a bump, before the next release. However this should be done *just* ahead of release creation and must "\
"follow semantic versioning conventions, especially regarding when to bump major/minor/patch components. "\
"When ready to do this consult [https://semver.org/] as needed."
elif git tag | grep -q "^v$VERSION-"; then
echo "::notice ::Since pre-release tag(s) [v$VERSION-*] already exist(s), but no tag "\
"[v$VERSION] exists, this repo state is likely intended for a later pre-release or official release. "\
"Cool! However, reminder: "\
"The repo/VERSION version should be decided/finalized *just* ahead of release creation and must "\
"follow semantic versioning conventions, especially regarding when to bump major/minor/patch components. "\
"When ready to do this consult [https://semver.org/] as needed."
else
echo "::notice ::Since no tag [v$VERSION] or [v$VERSION-*] exists, this repo state "\
"is likely intended for a future release. Cool! However, reminder: "\
"The repo/VERSION version should be decided/finalized *just* ahead of release creation and must "\
"follow semantic versioning conventions, especially regarding when to bump major/minor/patch components. "\
"When ready to do this consult [https://semver.org/] as needed."
fi
- id: compute_proceed_else_not
name: Compute whether for main jobs to proceed or not
# See comments in Flow-IPC workflow counterpart. We use same techniques.
run: |
# Compute whether for main jobs to proceed or not.
TMP_MSG=/tmp/flow-pipeline-head-cmt-msg.txt
cat <<'FLOW_PIPELINE_HEAD_CMD_MSG_EOF' > $TMP_MSG
${{ github.event.head_commit.message }}
FLOW_PIPELINE_HEAD_CMD_MSG_EOF
if [ '${{ github.ref }}' != 'refs/heads/main' ] || \
[ '${{ github.event_name }}' != 'push' ] || \
! { head --lines=1 $TMP_MSG | fgrep -xq '${{ needs.set-vars.outputs.doc-commit-message }}'; }; then
echo 'proceed-else-not=true' >> $GITHUB_OUTPUT
else
echo 'proceed-else-not=false' >> $GITHUB_OUTPUT
echo 'The real jobs will not run: earlier `doc-and-release` job checked-in generated docs to `main`.'
echo 'That is not a source change and requires no actual pipeline to execute.'
fi
outputs:
proceed-else-not: ${{ steps.compute_proceed_else_not.outputs.proceed-else-not }}
build-and-test:
needs: [setup, set-vars]
if: |
needs.setup.outputs.proceed-else-not == 'true'
strategy:
fail-fast: false
matrix:
compiler:
- id: gcc-9
name: gcc
version: 9
c-path: /usr/bin/gcc-9
cpp-path: /usr/bin/g++-9
- id: gcc-10
name: gcc
version: 10
c-path: /usr/bin/gcc-10
cpp-path: /usr/bin/g++-10
- id: gcc-11
name: gcc
version: 11
c-path: /usr/bin/gcc-11
cpp-path: /usr/bin/g++-11
- id: gcc-13
name: gcc
version: 13
c-path: /usr/bin/gcc-13
cpp-path: /usr/bin/g++-13
- id: clang-13
name: clang
version: 13
c-path: /usr/bin/clang-13
cpp-path: /usr/bin/clang++-13
- id: clang-15
name: clang
version: 15
c-path: /usr/bin/clang-15
cpp-path: /usr/bin/clang++-15
- id: clang-16
name: clang
version: 16
c-path: /usr/bin/clang-16
cpp-path: /usr/bin/clang++-16
install: True
- id: clang-17
name: clang
version: 17
c-path: /usr/bin/clang-17
cpp-path: /usr/bin/clang++-17
install: True
build-test-cfg:
- id: debug
conan-profile-build-type: Debug
- id: release
conan-profile-build-type: Release
- id: relwithdebinfo
conan-profile-build-type: RelWithDebInfo
- id: minsizerel
conan-profile-build-type: MinSizeRel
# The sanitized (-*san) build/test configs follow. Here we follow Flow-IPC workflow counterpart.
# This can be a non-trivial topic -- though by definition Flow's situation is simpler than Flow-IPC's,
# as the latter includes the former entirely -- so if changing or trying to understand sanitizer-related
# stuff, *please* check the Flow-IPC workflow counterpart file, particularly comments near each thing.
# Keeping comments here light.
#
# Note that, as of this writing, MSAN support is present in Flow-IPC workflow counterpart but is disabled
# for reasons explained therein. We therefore omit it here entirely; no need to have it taking up space here
# too.
- id: relwithdebinfo-asan
conan-profile-build-type: RelWithDebInfo
conan-profile-custom-conf: |
tools.build:cflags = ["-fsanitize=address", "-fno-omit-frame-pointer"]
tools.build:cxxflags = ["-fsanitize=address", "-fno-omit-frame-pointer"]
tools.build:sharedlinkflags = ["-fsanitize=address"]
tools.build:exelinkflags = ["-fsanitize=address"]
conan-profile-custom-settings: |
compiler.sanitizer = address
conan-custom-settings-defs: | # Could we not copy/paste these 4x?
data['compiler']['gcc']['sanitizer'] = ['None', 'address', 'thread', 'memory', 'undefined']
data['compiler']['clang']['sanitizer'] = ['None', 'address', 'thread', 'memory', 'undefined']
sanitizer-name: asan
no-lto: True
- id: relwithdebinfo-ubsan
conan-profile-build-type: RelWithDebInfo
conan-profile-custom-conf: |
tools.build:cflags = ["-fsanitize=undefined", "-fno-omit-frame-pointer"]
tools.build:cxxflags = ["-fsanitize=undefined", "-fno-omit-frame-pointer"]
tools.build:sharedlinkflags = ["-fsanitize=undefined"]
tools.build:exelinkflags = ["-fsanitize=undefined"]
conan-profile-custom-settings: |
compiler.sanitizer = undefined
conan-custom-settings-defs: |
data['compiler']['gcc']['sanitizer'] = ['None', 'address', 'thread', 'memory', 'undefined']
data['compiler']['clang']['sanitizer'] = ['None', 'address', 'thread', 'memory', 'undefined']
sanitizer-name: ubsan
no-lto: True
- id: relwithdebinfo-tsan
conan-profile-build-type: RelWithDebInfo
conan-profile-custom-conf: |
tools.build:cflags = ["-fsanitize=thread"]
tools.build:cxxflags = ["-fsanitize=thread"]
tools.build:sharedlinkflags = ["-fsanitize=thread"]
tools.build:exelinkflags = ["-fsanitize=thread"]
conan-profile-custom-settings: |
compiler.sanitizer = thread
conan-custom-settings-defs: |
data['compiler']['gcc']['sanitizer'] = ['None', 'address', 'thread', 'memory', 'undefined']
data['compiler']['clang']['sanitizer'] = ['None', 'address', 'thread', 'memory', 'undefined']
sanitizer-name: tsan
no-lto: True
# Again, these exclusions are explained in FLow-IPC workflow counterpart.
exclude:
- compiler: { id: gcc-9 }
build-test-cfg: { id: relwithdebinfo-asan }
- compiler: { id: gcc-10 }
build-test-cfg: { id: relwithdebinfo-asan }
- compiler: { id: gcc-11 }
build-test-cfg: { id: relwithdebinfo-asan }
- compiler: { id: gcc-13 }
build-test-cfg: { id: relwithdebinfo-asan }
- compiler: { id: clang-13 }
build-test-cfg: { id: relwithdebinfo-asan }
- compiler: { id: gcc-9 }
build-test-cfg: { id: relwithdebinfo-ubsan }
- compiler: { id: gcc-10 }
build-test-cfg: { id: relwithdebinfo-ubsan }
- compiler: { id: gcc-11 }
build-test-cfg: { id: relwithdebinfo-ubsan }
- compiler: { id: gcc-13 }
build-test-cfg: { id: relwithdebinfo-ubsan }
- compiler: { id: clang-13 }
build-test-cfg: { id: relwithdebinfo-ubsan }
- compiler: { id: gcc-9 }
build-test-cfg: { id: relwithdebinfo-tsan }
- compiler: { id: gcc-10 }
build-test-cfg: { id: relwithdebinfo-tsan }
- compiler: { id: gcc-11 }
build-test-cfg: { id: relwithdebinfo-tsan }
- compiler: { id: gcc-13 }
build-test-cfg: { id: relwithdebinfo-tsan }
- compiler: { id: clang-13 }
build-test-cfg: { id: relwithdebinfo-tsan }
# Not using ubuntu-latest, so as to avoid surprises with OS upgrades and such.
runs-on: ubuntu-22.04
name: ${{ matrix.compiler.id }}-${{ matrix.build-test-cfg.id }}
env:
build-dir: ${{ github.workspace }}/build/${{ matrix.build-test-cfg.conan-profile-build-type }}
install-root-dir: ${{ github.workspace }}/install/${{ matrix.build-test-cfg.conan-profile-build-type }}
# (Unfortunately cannot refer to earlier-assigned `env.` entries within subsequent ones.)
install-dir: ${{ github.workspace }}/install/${{ matrix.build-test-cfg.conan-profile-build-type }}/usr/local
# For the remaining env entries please see comments in Flow-IPC workflow counterpart. We use same techniques.
san-suppress-cfg-file: ${{ github.workspace }}/install/${{ matrix.build-test-cfg.conan-profile-build-type }}/usr/local/bin/san_suppressions.cfg
san-suppress-cfg-in-file1: sanitize/${{ matrix.build-test-cfg.sanitizer-name }}/suppressions_${{ matrix.compiler.name }}.cfg
san-suppress-cfg-in-file2: sanitize/${{ matrix.build-test-cfg.sanitizer-name }}/suppressions_${{ matrix.compiler.name }}_${{ matrix.compiler.version }}.cfg
setup-tests-env: |
if [ '${{ matrix.build-test-cfg.sanitizer-name }}' = asan ]; then
export ASAN_OPTIONS='disable_coredump=0'
echo "ASAN_OPTIONS = [$ASAN_OPTIONS]."
elif [ '${{ matrix.build-test-cfg.sanitizer-name }}' = ubsan ]; then
export SAN_SUPP=1
export SAN_SUPP_CFG=${{ github.workspace }}/install/${{ matrix.build-test-cfg.conan-profile-build-type }}/usr/local/bin/san_suppressions.cfg
export UBSAN_OPTIONS="disable_coredump=0 print_stacktrace=1 suppressions=$SAN_SUPP_CFG"
echo "UBSAN_OPTIONS = [$UBSAN_OPTIONS]."
elif [ '${{ matrix.build-test-cfg.sanitizer-name }}' = tsan ]; then
export SAN_SUPP=1
export SAN_SUPP_CFG=${{ github.workspace }}/install/${{ matrix.build-test-cfg.conan-profile-build-type }}/usr/local/bin/san_suppressions.cfg
export TSAN_OPTIONS="disable_coredump=0 second_deadlock_stack=1 suppressions=$SAN_SUPP_CFG"
echo "TSAN_OPTIONS = [$TSAN_OPTIONS]."
fi
# TODO: Add more *SAN here.
if [ "$SAN_SUPP" != '' ]; then
echo 'Sanitizer [${{ matrix.build-test-cfg.sanitizer-name }}] suppressions cfg contents:'
echo '[[[ file--'
cat $SAN_SUPP_CFG
echo '--file ]]]'
fi
setup-run-env: |
if [ '${{ matrix.build-test-cfg.sanitizer-name }}' = asan ]; then
export BUILT_CMD_PREFIX='setarch -R'
echo 'Shall use `setarch -R` to disable ASLR to avoid ASAN-built binary misbehavior.'
elif [ '${{ matrix.build-test-cfg.sanitizer-name }}' = tsan ]; then
export BUILT_CMD_PREFIX='setarch -R'
echo 'Shall use `setarch -R` to disable ASLR to avoid TSAN-built binary misbehavior.'
fi
steps:
- name: Update available software list for apt-get
run: sudo apt-get update
- name: Install clang compiler
if: |
matrix.compiler.install && (matrix.compiler.name == 'clang')
run: |
# Install clang compiler.
wget https://apt.llvm.org/llvm.sh
chmod u+x llvm.sh
sudo ./llvm.sh ${{ matrix.compiler.version }}
- name: Install gcc compiler
if: |
matrix.compiler.install && (matrix.compiler.name == 'gcc')
run: |
# Install gcc compiler.
sudo apt-get install -y software-properties-common
sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
sudo apt-get update
sudo apt-get install -y gcc-${{ matrix.compiler.version }} g++-${{ matrix.compiler.version }}
# We get highest 1.x, instead of 2+, essentially to stay even with the Flow-IPC pipeline (and its similar workflow
# file). See explanation in that file. The to-do there applies to us as well (both should be done
# at ~the same time).
- name: Install the latest version of Conan which is less than 2
run: pip install 'conan<2'
- name: Checkout `flow` repository
uses: actions/checkout@v4
- name: Add custom settings for Conan packages
if: |
matrix.build-test-cfg.conan-custom-settings-defs
run: |
# Add custom settings for Conan packages.
conan config init
pip install PyYAML
CONAN_SETTINGS_PATH=$(conan config home)/settings.yml
python -c "
import yaml
with open('$CONAN_SETTINGS_PATH', 'r') as file:
data = yaml.safe_load(file)
${{ matrix.build-test-cfg.conan-custom-settings-defs }}
with open('$CONAN_SETTINGS_PATH', 'w') as file:
yaml.dump(data, file)
"
- name: Create Conan profile
run: |
# Create Conan profile.
cat <<'EOF' > conan_profile
[settings]
compiler = ${{ matrix.compiler.name }}
compiler.version = ${{ matrix.compiler.version }}
compiler.cppstd = 17
# TODO: Consider testing with LLVM-libc++ also (with clang anyway).
compiler.libcxx = libstdc++11
arch = x86_64
os = Linux
build_type = ${{ matrix.build-test-cfg.conan-profile-build-type }}
${{ matrix.build-test-cfg.conan-profile-custom-settings }}
[conf]
tools.build:compiler_executables = {"c": "${{ matrix.compiler.c-path }}", "cpp": "${{ matrix.compiler.cpp-path }}"}
tools.env.virtualenv:auto_use = True
${{ matrix.build-test-cfg.conan-profile-custom-conf }}
[buildenv]
CC = ${{ matrix.compiler.c-path }}
CXX = ${{ matrix.compiler.cpp-path }}
[options]
flow:build = True
flow:doc = False
EOF
- name: Install Flow dependencies with Conan using the profile
run: |
# Install Flow dependencies with Conan using the profile.
${{ env.setup-run-env }}
# At least `configure` mini-programs are built with our build-settings, so $BUILD_CMD_PREFIX is required.
$BUILT_CMD_PREFIX \
conan install . --profile:build conan_profile --profile:host conan_profile --build missing || FAILED=yep
# Dep build failure is easy to diagnose from stdout/err, unless it happens during a `configure`;
# then it'll often print something insane like "checking size of void *... 0" and suggest looking at
# config.log... which will usually reveal the true problem, like a seg-fault or TSAN-triggered problem
# running the configure-generated test program. So let's get all the `config.log`s we can find
# and put them where later the log artifact tarball will be sourced; recreate the same dir structure as
# in the Conan dep build area, but include only the `config.log` files. Note: This is probably only truly
# useful if indeed the above command fails; but can't hurt to include it regardless.
SRC=/home/runner/.conan/data
DST=${{ env.install-dir }}/bin/logs/dep_build/conan-data
find $SRC -name config.log | while read -r config_log; do
# Remove the $SRC part of the path to get a relative path.
REL_PATH="${config_log#$SRC/}"
# Determine the directory path within $DST where the config.log should be copied.
DST_DIR="$DST/${REL_PATH%/*}"
mkdir -p "$DST_DIR"
cp -v "$config_log" "$DST_DIR"
done
[ "$FAILED" != yep ]
- name: Build library and demos/tests with Conan
run: |
# Build libraries and demos/tests with Conan.
${{ env.setup-run-env }}
# At least capnp compiler binary is built with our build-settings, so $BUILD_CMD_PREFIX is required.
$BUILT_CMD_PREFIX conan build .
- name: Install built targets with Makefile
run: |
make install \
--directory ${{ env.build-dir }} DESTDIR=${{ env.install-root-dir }}
# Save runner space: blow away build dir after install.
rm -rf ${{ env.build-dir }}
# From now on use !cancelled() to try to run any test/demo that exists regardless
# of preceding failures if any. Same-ish (always()) with the log-upload at the end.
# Worst-case they'll all fail in super-basic ways and immediately; no extra harm done.
# From now on save logs in install-dir/bin/logs. They will be tarred up and uploaded
# as artifacts at the end. Please see comments in Flow-IPC workflow counterpart.
# We use the same techniques and reasoning. Those may seem overkill given the small number
# of tests/demos below and their small amount of output (as of this writing); but this can
# and will grow. The techniques will still apply.
- name: Run test/demo [NetFlow echo]
run: |
# Run test/demo [NetFlow echo].
cd ${{ env.install-dir }}/bin
OUT_DIR=logs/net_flow_echo
mkdir -p $OUT_DIR
SUPP_DIR_A=${{ github.workspace }}/src
{ cat $SUPP_DIR_A/${{ env.san-suppress-cfg-in-file1 }} $SUPP_DIR_A/${{ env.san-suppress-cfg-in-file2 }} \
> ${{ env.san-suppress-cfg-file }} 2> /dev/null; } || true
${{ env.setup-tests-env }}
${{ env.setup-run-env }}
$BUILT_CMD_PREFIX ./net_flow_echo_srv.exec 8888 localhost > $OUT_DIR/srv.console.log 2>&1 &
sleep 1
if $BUILT_CMD_PREFIX ./net_flow_echo_cli.exec 1 127.0.01 8888 'Hello world!' > $OUT_DIR/cli.console.log 2>&1; \
then EC=0; else EC=$?; fi
echo "Client finished with code [$EC]."
echo 'Server may print exception due to SIGTERM interruption; that is fine.'
kill `pidof $PWD/net_flow_echo_srv.exec`
# The executables also produce Flow logs with these hard-coded names in CWD.
touch flow_echo_srv.log # Create if needed just in case.
touch flow_echo_cli.log
mv flow_echo_srv.log $OUT_DIR/srv.log
mv flow_echo_cli.log $OUT_DIR/cli.log
[ $EC -eq 0 ]
# (More tests/demos here as needed.)
- name: Check test/demo logs for non-fatal sanitizer error(s)
if: |
(!cancelled()) && (matrix.build-test-cfg.sanitizer-name == 'ubsan')
run: |
# Check test/demo logs for non-fatal sanitizer error(s)
cd ${{ env.install-dir }}/bin/logs
# grep returns 0 if 1+ found, 1 if none found, 2+ on error. So check results explicitly instead of -e.
# Namely, for us, the only OK result is an empty stdout and empty stderr.
# Otherwise either grep failed (unlikely) or found 1+ problems; either way redirection target will
# be not-empty, and we will force failure.
{ grep 'SUMMARY: UndefinedBehaviorSanitizer:' `find . -type f` > san_failure_summaries.txt 2>&1; } || true
if [ -s san_failure_summaries.txt ]; then
echo 'Error(s) found. Pipeline will fail. Failures summarized below.'
echo 'Please peruse uploaded log artifacts, resolve the issues, and run pipeline again.'
echo '[[[ file--'
cat san_failure_summaries.txt
echo '--file ]]]'
false
fi
echo 'No errors found in logs.'
- name: Package test/demo logs tarball
if: |
always()
run: |
# Package test/demo logs tarball.
cd ${{ env.install-dir }}/bin
tar cvzf logs.tgz logs
rm -rf logs # Save runner space.
- name: Upload test/demo logs (please inspect if failure(s) seen above)
if: |
always()
uses: actions/upload-artifact@v4
with:
name: flow-test-logs-${{ matrix.compiler.id }}-${{ matrix.build-test-cfg.id }}
path: ${{ env.install-dir }}/bin/logs.tgz
doc-and-release:
needs: [setup, set-vars]
if: |
needs.setup.outputs.proceed-else-not == 'true'
strategy:
fail-fast: false
matrix:
compiler:
# Pick a reasonably modern but pre-installed compiler for building Doxygen/etc.
- id: clang-15
name: clang
version: 15
c-path: /usr/bin/clang-15
cpp-path: /usr/bin/clang++-15
build-cfg:
- id: release
conan-profile-build-type: Release
conan-preset: release
runs-on: ubuntu-22.04
name: doc-${{ matrix.compiler.id }}-${{ matrix.build-cfg.id }}
steps:
- name: Update available software list for apt-get
run: sudo apt-get update
- name: Install Flow dependencies (like Graphviz) with apt-get
run: sudo apt-get install -y graphviz
- name: Install the latest version of Conan which is less than 2
run: pip install 'conan<2'
- name: Checkout `flow` repository
uses: actions/checkout@v4
# See comments in Flow-IPC workflow counterpart. We use same techniques.
- name: (On release creation only) Signal Pages about release (web site should update)
if: success() && (!cancelled()) && startsWith(github.ref, 'refs/tags/v')
run: |
# Signal Pages about release (web site should update).
curl --fail-with-body -X POST \
-H 'Accept: application/vnd.github.v3+json' \
-H 'Authorization: token ${{ secrets.GIT_BOT_PAT }}' \
'https://api.github.com/repos/Flow-IPC/flow-ipc.github.io/dispatches' \
-d '{"event_type": "flow-sync-doc-event", "client_payload": {"version": "${{ github.ref_name }}"}}'
- name: Create Conan profile
run: |
# Create Conan profile.
cat <<EOF > conan_profile
[settings]
compiler = ${{ matrix.compiler.name }}
compiler.version = ${{ matrix.compiler.version }}
compiler.cppstd = 17
compiler.libcxx = libstdc++11
arch = x86_64
os = Linux
build_type = ${{ matrix.build-cfg.conan-profile-build-type }}
[conf]
tools.build:compiler_executables = { "c": "${{ matrix.compiler.c-path }}", "cpp": "${{ matrix.compiler.cpp-path }}" }
tools.env.virtualenv:auto_use = True
[buildenv]
CC = ${{ matrix.compiler.c-path }}
CXX = ${{ matrix.compiler.cpp-path }}
[options]
flow:build = False
flow:doc = True
EOF
- name: Install Flow dependencies (like Doxygen) with Conan using the profile
run: conan install . --profile:build conan_profile --profile:host conan_profile --build missing
- name: Generate code documentation using Conan and Doxygen
run: conan build .
- name: Create documentation tarball (full docs, API-only docs, landing page)
run: |
# Create documentation tarball (full docs, API-only docs, landing page).
cd ${{ github.workspace }}/doc/flow_doc
${{ github.workspace }}/tools/doc/stage_generated_docs.sh \
${{ github.workspace }}/build/${{ matrix.build-cfg.conan-profile-build-type }}
- name: Upload documentation tarball
uses: actions/upload-artifact@v4
with:
name: flow-doc
path: ${{ github.workspace }}/doc/flow_doc.tgz
# See comments in Flow-IPC workflow counterpart. We use same techniques.
- name: (`main` branch only) Check-in generated documentation directly into source control
id: doc_check_in
if: success() && (!cancelled()) && (github.ref == 'refs/heads/main')
run: |
# Check-in generated documentation directly into source control.
echo 'generated/ docs have been added or replaced locally; mirroring this into checked-in tree.'
git config user.name github-actions
git config user.email github-actions@github.com
git config --local http.https://github.com/.extraheader \
"AUTHORIZATION: basic $(echo -n x-access-token:${{ secrets.GIT_BOT_PAT }} | base64)"
cd ${{ github.workspace }}/doc/flow_doc
git rm -r --cached generated || echo 'No generated/ currently checked in; no problem.'
git add generated
git commit -m '${{ needs.set-vars.outputs.doc-commit-message }}'
git push origin main
# See comments in Flow-IPC workflow counterpart. We use same techniques.
- name: (`main` branch only) Signal Pages that we have updated the generated docs (web site should update)
if: success() && (!cancelled()) && (steps.doc_check_in.outcome != 'skipped')
run: |
# Signal Pages that we have updated the generated docs (web site should update).
curl --fail-with-body -X POST \
-H 'Accept: application/vnd.github.v3+json' \
-H 'Authorization: token ${{ secrets.GIT_BOT_PAT }}' \
'https://api.github.com/repos/Flow-IPC/flow-ipc.github.io/dispatches' \
-d '{"event_type": "flow-sync-doc-event"}'