Skip to content

Commit

Permalink
Merge branch 'main' into dependabot/npm_and_yarn/frontend/fast-loops-…
Browse files Browse the repository at this point in the history
…1.1.4
  • Loading branch information
nayib-jose-gloria authored Nov 15, 2024
2 parents 15b584a + fada613 commit 6e32c5b
Show file tree
Hide file tree
Showing 259 changed files with 5,698 additions and 2,959 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/build-images-and-create-deployment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ jobs:
uses: avakar/create-deployment@v1
# To stop deployment to a specific DEPLOYMENT_STAGE remove it from condition below.
# The DEPLOYMENT_STAGE that should be present are dev, stage, prod.
if: env.DEPLOYMENT_STAGE == 'prod' || env.DEPLOYMENT_STAGE == 'stage' || env.DEPLOYMENT_STAGE == 'dev'
if: env.DEPLOYMENT_STAGE == 'prod' || env.DEPLOYMENT_STAGE == 'stage'
with:
auto_merge: false
environment: ${{ env.DEPLOYMENT_STAGE }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/deploy-happy-stack.yml
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ jobs:
npx playwright install --with-deps
cp src/configs/${DEPLOYMENT_STAGE}.js src/configs/configs.js
DEBUG=pw:api npm run e2e-${DEPLOYMENT_STAGE}-logged-in
- uses: actions/upload-artifact@v2
- uses: actions/upload-artifact@v3
if: always()
with:
name: logged-in-test-results
Expand Down
9 changes: 9 additions & 0 deletions .github/workflows/push-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,7 @@ jobs:
name: coverage
path: /home/runner/work/single-cell-data-portal/single-cell-data-portal/.coverage*
retention-days: 3
include-hidden-files: true

- name: Upload Allure results as an artifact
uses: actions/upload-artifact@v3
Expand Down Expand Up @@ -168,6 +169,7 @@ jobs:
name: coverage
path: /home/runner/work/single-cell-data-portal/single-cell-data-portal/.coverage*
retention-days: 3
include-hidden-files: true

- name: Upload Allure results as an artifact
uses: actions/upload-artifact@v3
Expand Down Expand Up @@ -228,6 +230,7 @@ jobs:
name: coverage
path: /home/runner/work/single-cell-data-portal/single-cell-data-portal/.coverage*
retention-days: 3
include-hidden-files: true

- name: Upload Allure results as an artifact
uses: actions/upload-artifact@v3
Expand Down Expand Up @@ -288,13 +291,15 @@ jobs:
name: coverage
path: /home/runner/work/single-cell-data-portal/single-cell-data-portal/.coverage*
retention-days: 3
include-hidden-files: true

- name: Upload Allure results as an artifact
uses: actions/upload-artifact@v3
with:
name: allure-results
path: /home/runner/work/single-cell-data-portal/single-cell-data-portal/allure-results
retention-days: 20
include-hidden-files: true

- uses: 8398a7/action-slack@v3.15.0
with:
Expand Down Expand Up @@ -347,6 +352,7 @@ jobs:
name: coverage
path: /home/runner/work/single-cell-data-portal/single-cell-data-portal/.coverage*
retention-days: 3
include-hidden-files: true
- name: Upload Allure results as an artifact
uses: actions/upload-artifact@v3
with:
Expand Down Expand Up @@ -404,6 +410,7 @@ jobs:
name: coverage
path: /home/runner/work/single-cell-data-portal/single-cell-data-portal/.coverage*
retention-days: 3
include-hidden-files: true
- name: Upload Allure results as an artifact
uses: actions/upload-artifact@v3
with:
Expand Down Expand Up @@ -462,6 +469,7 @@ jobs:
name: coverage
path: /home/runner/work/single-cell-data-portal/single-cell-data-portal/.coverage*
retention-days: 3
include-hidden-files: true

- name: Upload Allure results as an artifact
uses: actions/upload-artifact@v3
Expand Down Expand Up @@ -521,6 +529,7 @@ jobs:
name: coverage
path: /home/runner/work/single-cell-data-portal/single-cell-data-portal/.coverage*
retention-days: 3
include-hidden-files: true

- name: Upload Allure results as an artifact
uses: actions/upload-artifact@v3
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/rdev-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,7 @@ jobs:
- name: Run e2e Logged In tests
run: |
DEBUG=pw:api RDEV_LINK=https://${{ env.STACK_NAME }}-frontend.rdev.single-cell.czi.technology npm run e2e-rdev-logged-in-ci
- uses: actions/upload-artifact@v2
- uses: actions/upload-artifact@v4
if: always()
with:
name: logged-in-test-results
Expand Down
66 changes: 66 additions & 0 deletions .happy/terraform/modules/batch/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,10 @@ resource aws_batch_job_definition dataset_metadata_update {
"name": "DATASETS_BUCKET",
"value": "${var.datasets_bucket}"
},
{
"name": "SPATIAL_DEEP_ZOOM_BUCKET",
"value": "${var.spatial_deep_zoom_bucket}"
},
{
"name": "DEPLOYMENT_STAGE",
"value": "${var.deployment_stage}"
Expand Down Expand Up @@ -119,6 +123,68 @@ resource aws_batch_job_definition dataset_metadata_update {
})
}

resource aws_batch_job_definition rollback {
type = "container"
name = "dp-${var.deployment_stage}-${var.custom_stack_name}-rollback"
container_properties = jsonencode({
"command": ["python3", "-m", "backend.layers.processing.rollback"],
"jobRoleArn": "${var.batch_role_arn}",
"image": "${var.image}",
"memory": 8000,
"environment": [
{
"name": "ARTIFACT_BUCKET",
"value": "${var.artifact_bucket}"
},
{
"name": "CELLXGENE_BUCKET",
"value": "${var.cellxgene_bucket}"
},
{
"name": "DATASETS_BUCKET",
"value": "${var.datasets_bucket}"
},
{
"name": "DEPLOYMENT_STAGE",
"value": "${var.deployment_stage}"
},
{
"name": "AWS_DEFAULT_REGION",
"value": "${data.aws_region.current.name}"
},
{
"name": "REMOTE_DEV_PREFIX",
"value": "${var.remote_dev_prefix}"
}
],
"vcpus": 1,
"linuxParameters": {
"maxSwap": 0,
"swappiness": 0
},
"retryStrategy": {
"attempts": 3,
"evaluateOnExit": [
{
"action": "RETRY",
"onReason": "Task failed to start"
},
{
"action": "EXIT",
"onReason": "*"
}
]
},
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-group": "${aws_cloudwatch_log_group.cloud_watch_logs_group.id}",
"awslogs-region": "${data.aws_region.current.name}"
}
}
})
}

resource aws_cloudwatch_log_group cloud_watch_logs_group {
retention_in_days = 365
name = "/dp/${var.deployment_stage}/${var.custom_stack_name}/upload"
Expand Down
5 changes: 5 additions & 0 deletions .happy/terraform/modules/batch/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,11 @@ variable datasets_bucket {
description = "Datasets public-access bucket name"
}

variable spatial_deep_zoom_bucket {
type = string
description = "Bucket for Visium Dataset spatial deep zoom images"
}

variable image {
type = string
description = "Image name"
Expand Down
2 changes: 2 additions & 0 deletions .happy/terraform/modules/ecs-stack/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@ locals {
artifact_bucket = try(local.secret["s3_buckets"]["artifact"]["name"], "")
cellxgene_bucket = try(local.secret["s3_buckets"]["cellxgene"]["name"], "")
datasets_bucket = try(local.secret["s3_buckets"]["datasets"]["name"], "")
spatial_deep_zoom_bucket = try(local.secret["s3_buckets"]["spatial_deep_zoom"]["name"], "")
dataset_submissions_bucket = try(local.secret["s3_buckets"]["dataset_submissions"]["name"], "")
wmg_bucket = try(local.secret["s3_buckets"]["wmg"]["name"], "")

Expand Down Expand Up @@ -316,6 +317,7 @@ module upload_batch {
artifact_bucket = local.artifact_bucket
cellxgene_bucket = local.cellxgene_bucket
datasets_bucket = local.datasets_bucket
spatial_deep_zoom_bucket = local.spatial_deep_zoom_bucket
frontend_url = local.frontend_url
batch_container_memory_limit = local.batch_container_memory_limit
}
Expand Down
4 changes: 4 additions & 0 deletions .happy/terraform/modules/wmg-batch/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,10 @@ resource aws_batch_job_definition batch_job_def {
{
"name": "REMOTE_DEV_PREFIX",
"value": "${var.remote_dev_prefix}"
},
{
"name": "CELLXGENE_CENSUS_USERAGENT",
"value": "${var.census_user_agent}"
}
],
"vcpus": ${var.desired_vcpus},
Expand Down
8 changes: 7 additions & 1 deletion .happy/terraform/modules/wmg-batch/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -55,4 +55,10 @@ variable desired_vcpus {
variable "api_url" {
type = string
description = "URL for the backend api."
}
}

variable "census_user_agent" {
type = string
description = "User agent for the census API"
default = "CZI-wmg"
}
22 changes: 22 additions & 0 deletions DEV_ENV_WITHOUT_DOCKER.md
Original file line number Diff line number Diff line change
Expand Up @@ -51,3 +51,25 @@ Run functional tests for WMG api against the `dev` environment.
**NOTE**: `dev` environment is a remote environment. These functional tests run locally against a backend in a remote environment called `dev`.

1. `AWS_PROFILE=single-cell-dev DEPLOYMENT_STAGE=dev pytest -v tests/functional/backend/wmg/test_wmg_api.py`

### Set up vips

You may run into issues with finding `_libvips` if you're running a Jupyter notebook locally that calls `pyvips`, such as when running CXG conversion locally. The error may look like this:

```
ModuleNotFoundError Traceback (most recent call last)
File ~/miniconda3/envs/py11/lib/python3.11/site-packages/pyvips/__init__.py:19
18 try:
---> 19 import _libvips
21 logger.debug('Loaded binary module _libvips')
ModuleNotFoundError: No module named '_libvips'
```

To resolve this, you'll need to install `vips` with `brew install vips`, because this is a dependency that `pyvips` has. If you're using conda, you'll have to also tell your conda environment where homebrew installed `vips`. You can do this with:

```
mkdir -p ~/miniconda3/envs/<CONDA_ENV_NAME>/etc/conda/activate.d
touch ~/miniconda3/envs/<CONDA_ENV_NAME>/etc/conda/activate.d/env_vars.sh
echo 'export DYLD_LIBRARY_PATH=/opt/homebrew/lib:$DYLD_LIBRARY_PATH' >> ~/miniconda3/envs/<CONDA_ENV_NAME>/etc/conda/activate.d/env_vars.sh
```
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ cellguide-pipeline-unittest:

.PHONY: functional-test
functional-test:
python3 -m pytest tests/functional/ --rootdir=. --verbose
python3 -m pytest tests/functional/ --rootdir=. --verbose -n auto

.PHONY: prod-performance-test
prod-performance-test:
Expand Down
30 changes: 14 additions & 16 deletions backend/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ db/check:
# Check if the database needs to be migrated due to changes in the schema.
PYTHONPATH=.. alembic -c=./database/database.ini check

# Ensure that make/db/tunnel/up is run before running this
# interactive mode usage: AWS_PROFILE=single-cell-dev DEPLOYMENT_STAGE=dev make db/connect
# ARGS usage: AWS_PROFILE=single-cell-dev DEPLOYMENT_STAGE=dev make db/connect ARGS="-c \"select * from dataset_artifact where filetype='CXG'\""
db/connect:
Expand All @@ -61,18 +62,15 @@ endif

db/connect_internal:
$(eval DB_PW = $(shell aws secretsmanager get-secret-value --secret-id corpora/backend/${DEPLOYMENT_STAGE}/database --region us-west-2 | jq -r '.SecretString | match(":([^:]*)@").captures[0].string'))
$(MAKE) db/tunnel/up
PGOPTIONS='-csearch_path=persistence_schema' PGPASSWORD=${DB_PW} psql --dbname ${DB_NAME} --username ${DB_USER} --host 0.0.0.0 $(ARGS)
$(MAKE) db/tunnel/down

db/console: db/connect # alias

PORT:=5432
db/dump:
# Dump the DEPLOYMENT_STAGE database to OUTFILE
$(eval DB_PW = $(shell aws secretsmanager get-secret-value --secret-id corpora/backend/${DEPLOYMENT_STAGE}/database --region us-west-2 | jq -r '.SecretString | match(":([^:]*)@").captures[0].string'))
$(MAKE) db/tunnel/up
PGPASSWORD=${DB_PW} pg_dump -Fc --dbname=corpora_${DEPLOYMENT_STAGE} --file=$(OUTFILE) --host 0.0.0.0 --username corpora_${DEPLOYMENT_STAGE}
$(MAKE) db/tunnel/down
PGPASSWORD=${DB_PW} pg_dump -Fc --dbname=corpora_${DEPLOYMENT_STAGE} --file=$(OUTFILE) --host 0.0.0.0 --port $(PORT) --username corpora_${DEPLOYMENT_STAGE}

db/local/load-data:
# Loads corpora_dev.sqlc into the local Docker env corpora database
Expand Down Expand Up @@ -119,24 +117,24 @@ else
CLUSTER_NAME=corpora-${DEPLOYMENT_STAGE}-corpora-api

endif
# TODO:
# - add db/tunnel as a dependency for all targets so that a tunnel is automatically opened if not already
# If running for a data mirror, must run once per SRC and DEST envs BEFORE running mirror_env_data, with different
# local PORT values.
# Runs in interactive mode, so each run requires separate terminal tab.
PORT:=5432
db/tunnel/up:
$(eval endpoint=$(shell aws rds describe-db-cluster-endpoints --db-cluster-identifier ${CLUSTER_NAME} | jq -r '.DBClusterEndpoints[] | select(.EndpointType | contains("WRITER")) | .Endpoint'))
ssh -f -T -N -M -S $(SSH_SOCKET)\
-o ServerAliveInterval=${SSH_SERVER_ALIVE_INTERVAL_IN_SECONDS} -o ServerAliveCountMax=${SSH_SERVER_ALIVE_COUNT_MAX} \
-o ExitOnForwardFailure=yes \
-L 5432:${endpoint}:5432 $(SSH_BASTION_HOST)
$(eval instance_id=$(shell aws ec2 describe-instances --filters "Name=tag:Name,Values=dp-${DEPLOYMENT_STAGE}-happy" --query "Reservations[*].Instances[*].InstanceId" --output text))

db/tunnel: db/tunnel/up # alias for backwards compatibility
aws ssm start-session --target ${instance_id} --document-name AWS-StartPortForwardingSessionToRemoteHost --parameters '{"portNumber":["5432"],"localPortNumber":["$(PORT)"],"host":["${endpoint}"]}'

db/tunnel/down:
ssh -S $(SSH_SOCKET) -O exit $(SSH_BASTION_HOST) || true
db/tunnel: db/tunnel/up # alias for backwards compatibility

SRC_ENV := prod
mirror_env_data:
# Mirrors the SRC_ENV env's AWS RDS database and S3 data to
# DEST_ENV. Defaults to prod->dev.
# Must also provide SRC_PORT and DEST_PORT where psql is running for each env.
# Must also run `make db/tunnel/up` before running this command, once for the SRC_ENV and once for the DEST_ENV.
#
# If WMG_CUBE is set to any non-null value, copy the WMG cube from SRC_ENV to DEST_ENV. Works for DEST_ENV=rdev.
#
Expand All @@ -151,5 +149,5 @@ mirror_env_data:
# THIS IS DESTRUCTIVE for the DEST_ENV env! The SRC_ENV env will
# never be modified, but the DEST_ENV env's data will be replaced.
#
# Usage: make mirror_env_data [SRC_ENV={prod|staging|dev}] [DEST_ENV={dev|staging|rdev}] [WMG_CUBE=1] [STACK=<rdev_stack_name> [CELLGUIDE=1] [COLLECTIONS=<uuid1,uuid2,...> [DATA=1]]]
scripts/mirror_env_data.sh $(SRC_ENV) $(DEST_ENV) $(WMG_CUBE) $(STACK) $(CELLGUIDE) $(COLLECTIONS) $(DATA)
# Usage: make mirror_env_data [SRC_ENV={prod|staging|dev}] [DEST_ENV={dev|staging|rdev}] [SRC_PORT={int}] [DEST_PORT={int}] [WMG_CUBE=1] [STACK=<rdev_stack_name> [CELLGUIDE=1] [COLLECTIONS=<uuid1,uuid2,...> [DATA=1]]]
scripts/mirror_env_data.sh $(SRC_ENV) $(DEST_ENV) $(SRC_PORT) $(DEST_PORT) $(WMG_CUBE) $(STACK) $(CELLGUIDE) $(COLLECTIONS) $(DATA)
Loading

0 comments on commit 6e32c5b

Please sign in to comment.