Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
70 commits
Select commit Hold shift + click to select a range
a94c2fb
refactor(worker): restructure monolithic jobs.py into modular archite…
bencap Jan 7, 2026
1db6b68
feat: Add comprehensive job traceability system database schema
bencap Jan 7, 2026
510614c
fix(logging): simplify context saving logic to overwrite existing map…
bencap Jan 7, 2026
83b34d6
tests: add TransactionSpy class for mocking database transaction meth…
bencap Jan 12, 2026
224bbb3
feat: add BaseManager class with transaction handling and rollback fe…
bencap Jan 12, 2026
05fc52b
feat: Job manager class, supporting utilities, and unit tests
bencap Jan 12, 2026
ae18eeb
feat: Pipeline manager class, supporting utilities, and unit tests
bencap Jan 14, 2026
899ca84
feat: add function to check if job dependencies are reachable
bencap Jan 16, 2026
f34939c
feat: add markers for test categorization in pytest
bencap Jan 16, 2026
3ad046d
fix: mock job manager returning in fixture rather than yielding
bencap Jan 17, 2026
1e447a7
fix: enhance error logging for job and pipeline state transitions
bencap Jan 17, 2026
a551f5d
fix: re-order imports in job manager test file
bencap Jan 17, 2026
8ff985c
fix: use conftest_optional import structure in worker test module
bencap Jan 17, 2026
c2100a2
feat: Add decorators for job and pipeline management
bencap Jan 20, 2026
3799d84
feat: use context for logging in job manager
bencap Jan 20, 2026
155e549
feat: decorator for job run record guarantees
bencap Jan 21, 2026
4a4055d
feat: add test mode support to job and pipeline decorators
bencap Jan 21, 2026
3c4e6b9
fix: simplify exc handling in job management decorator
bencap Jan 21, 2026
9dd71ff
feat: allow pipelines to be started by decorated jobs
bencap Jan 22, 2026
1fe076a
tests: unit tests for worker manager utilities
bencap Jan 22, 2026
16a5a50
feat: add network test marker and control socket access in pytest
bencap Jan 22, 2026
a701d53
Refactor test setup by replacing `setup_worker_db` with `with_populat…
bencap Jan 22, 2026
ce893a4
wip: refactor jobs to use job management system
bencap Jan 22, 2026
8c5e225
refactor: reduce mocking of database across worker tests
bencap Jan 23, 2026
b0397b4
refactor: simplify job definition in job management tests
bencap Jan 23, 2026
a716cc9
refactor: simplify job definition in job management tests
bencap Jan 23, 2026
8a22306
refactor: centralize decorator test mode flag fixture
bencap Jan 23, 2026
92ab081
feat: enhance pipeline start logic with controllable coordination
bencap Jan 24, 2026
a06aa21
feat: logic fixups and comprehensive test cases for variant processin…
bencap Jan 24, 2026
9603334
feat: add start_pipeline job and related tests for pipeline management
bencap Jan 24, 2026
fcfb060
feat: gnomAD managed job tests and enhancements
bencap Jan 25, 2026
a301f2d
feat: uniprot managed job tests and enhancements
bencap Jan 27, 2026
a06f351
feat: clingen managed job enhancements
bencap Jan 28, 2026
3aedead
fixup(variant creation)
bencap Jan 28, 2026
2af66dd
feat: implement job and pipeline factories with definitions and tests
bencap Jan 28, 2026
987b38a
feat: integrate PipelineFactory for variant creation and update proce…
bencap Jan 28, 2026
3ca697a
feat: add context manager for database session management
bencap Jan 28, 2026
c61bd41
feat: use session context manager in worker decorators rather than in…
bencap Jan 28, 2026
010f15c
refactor: streamline context handling in job and pipeline decorators
bencap Jan 28, 2026
b2c5fe7
feat: add new job definitions for score set annotation pipeline
bencap Jan 29, 2026
5ca9d3f
feat: implement AnnotationStatusManager for managing variant annotati…
bencap Jan 29, 2026
806f8ed
feat: add annotation status tracking to jobs
bencap Jan 29, 2026
011522c
feat: streamline job results and exception handling in tests
bencap Jan 29, 2026
7b44346
feat: less prescriptive status messages in complete job functions
bencap Jan 29, 2026
c8b0a02
fix: ensure exception info is always present for failed jobs in job m…
bencap Jan 29, 2026
cedb42d
fix: move Athena engine fixture to optional conftest for core depende…
bencap Jan 29, 2026
60ef67d
feat: add standalone context creation for worker lifecycle management
bencap Jan 29, 2026
a3f36d1
feat: add asyncclick dependency and update environment script to use it
bencap Jan 29, 2026
0416b2d
feat: add standalone job definitions and update lifecycle context for…
bencap Jan 29, 2026
a013cc0
feat: refactor populate_mapped_variant_data to use async and job subm…
bencap Jan 29, 2026
f3a7d6a
chore: test cleanup
bencap Jan 29, 2026
da0e2ce
fix: remove ga4gh packages from server group
bencap Jan 23, 2026
1abe4c6
docs: minimal developer docs via copilot for worker jobs
bencap Jan 29, 2026
797ea39
fix: mypy typing
bencap Jan 29, 2026
a1d3150
fix: test attempting to connect via socket to athena
bencap Jan 29, 2026
fcccb9a
feat: add Slack error notifications to job/pipeline decorators
bencap Jan 29, 2026
e85312a
fix: update TODO comments for clarity and specificity in UniProt and …
bencap Jan 29, 2026
893473f
feat: make Redis client optional in managers and add error handling f…
bencap Jan 29, 2026
5db3561
feat: implement create_job_dependency method in JobFactory with valid…
bencap Jan 29, 2026
85a4268
feat: refactor UniProt ID mapping script to use async commands and jo…
bencap Jan 29, 2026
9a4dcfc
feat: refactor link_gnomad_variants script to use async commands and …
bencap Jan 30, 2026
86b2478
feat: refactor clingen_car_submission script to use async commands an…
bencap Jan 30, 2026
25bc7da
feat: refactor clingen_ldh_submission script to streamline job submis…
bencap Jan 30, 2026
3c7449b
feat: clinvar clinical control refresh job + script
bencap Jan 30, 2026
f2b57a4
feat: update annotation type handling to use enum directly and switch…
bencap Feb 4, 2026
ba70e17
feat: add functions to retrieve associated ClinVar Allele IDs and enh…
bencap Feb 4, 2026
c7bf7f7
refactor: remove redundant fixture for setting up sample variants in …
bencap Feb 4, 2026
547be35
chore: add TODO for caching ClinVar control data to improve performance
bencap Feb 4, 2026
4a878b0
feat: add multiple refresh job definitions for ClinVar controls with …
bencap Feb 4, 2026
f3ea5ce
feat: enhance test workflow to run fast tests on pull requests and fu…
bencap Feb 4, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 26 additions & 5 deletions .github/workflows/run-tests-on-push.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
name: Run Tests (On Push)
name: Run Tests
on:
push:
# Run all tests on main, fast tests on other branches

env:
LOG_CONFIG: test
Expand Down Expand Up @@ -50,7 +51,12 @@ jobs:
- run: pip install --upgrade pip
- run: pip install poetry
- run: poetry install --with dev
- run: poetry run pytest tests/
- name: Run fast tests on non-main branches
if: github.event_name == 'push' && github.ref != 'refs/heads/main'
run: poetry run pytest tests/ -m "not network and not slow"
- name: Run full tests on main
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
run: poetry run pytest tests/

run-tests-3_11:
runs-on: ubuntu-latest
Expand All @@ -66,7 +72,12 @@ jobs:
- run: pip install --upgrade pip
- run: pip install poetry
- run: poetry install --with dev --extras server
- run: poetry run pytest tests/ --show-capture=stdout --cov=src
- name: Run fast tests on non-main branches
if: github.ref != 'refs/heads/main'
run: poetry run pytest tests/ -m "not network and not slow" --show-capture=stdout
- name: Run all tests with coverage on main branch
if: github.ref == 'refs/heads/main'
run: poetry run pytest tests/ --show-capture=stdout --cov=src

run-tests-3_12-core-dependencies:
runs-on: ubuntu-latest
Expand All @@ -80,7 +91,12 @@ jobs:
- run: pip install --upgrade pip
- run: pip install poetry
- run: poetry install --with dev
- run: poetry run pytest tests/
- name: Run fast tests on non-main branches
if: github.ref != 'refs/heads/main'
run: poetry run pytest tests/ -m "not network and not slow"
- name: Run all tests on main branch
if: github.ref == 'refs/heads/main'
run: poetry run pytest tests/

run-tests-3_12:
runs-on: ubuntu-latest
Expand All @@ -96,4 +112,9 @@ jobs:
- run: pip install --upgrade pip
- run: pip install poetry
- run: poetry install --with dev --extras server
- run: poetry run pytest tests/ --show-capture=stdout --cov=src
- name: Run fast tests on non-main branches
if: github.ref != 'refs/heads/main'
run: poetry run pytest tests/ -m "not network and not slow" --show-capture=stdout
- name: Run all tests with coverage on main branch
if: github.ref == 'refs/heads/main'
run: poetry run pytest tests/ --show-capture=stdout --cov=src
222 changes: 222 additions & 0 deletions alembic/versions/8de33cc35cd7_add_pipeline_and_job_tracking_tables.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,222 @@
"""add pipeline and job tracking tables

Revision ID: 8de33cc35cd7
Revises: dcf8572d3a17
Create Date: 2026-01-28 10:08:36.906494

"""

import sqlalchemy as sa
from sqlalchemy.dialects import postgresql

from alembic import op

# revision identifiers, used by Alembic.
revision = "8de33cc35cd7"
down_revision = "dcf8572d3a17"
branch_labels = None
depends_on = None


def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"pipelines",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("urn", sa.String(length=255), nullable=True),
sa.Column("name", sa.String(length=500), nullable=False),
sa.Column("description", sa.Text(), nullable=True),
sa.Column("status", sa.String(length=50), nullable=False),
sa.Column("correlation_id", sa.String(length=255), nullable=True),
sa.Column(
"metadata",
postgresql.JSONB(astext_type=sa.Text()),
server_default="{}",
nullable=False,
comment="Flexible metadata storage for pipeline-specific data",
),
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False),
sa.Column("started_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("finished_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("created_by_user_id", sa.Integer(), nullable=True),
sa.Column("mavedb_version", sa.String(length=50), nullable=True),
sa.CheckConstraint(
"status IN ('created', 'running', 'succeeded', 'failed', 'cancelled', 'paused', 'partial')",
name="ck_pipelines_status_valid",
),
sa.ForeignKeyConstraint(["created_by_user_id"], ["users.id"], ondelete="SET NULL"),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("urn"),
)
op.create_index("ix_pipelines_correlation_id", "pipelines", ["correlation_id"], unique=False)
op.create_index("ix_pipelines_created_at", "pipelines", ["created_at"], unique=False)
op.create_index("ix_pipelines_created_by_user_id", "pipelines", ["created_by_user_id"], unique=False)
op.create_index("ix_pipelines_status", "pipelines", ["status"], unique=False)
op.create_table(
"job_runs",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("urn", sa.String(length=255), nullable=True),
sa.Column("job_type", sa.String(length=100), nullable=False),
sa.Column("job_function", sa.String(length=255), nullable=False),
sa.Column("job_params", postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column("status", sa.String(length=50), nullable=False),
sa.Column("pipeline_id", sa.Integer(), nullable=True),
sa.Column("priority", sa.Integer(), nullable=False),
sa.Column("max_retries", sa.Integer(), nullable=False),
sa.Column("retry_count", sa.Integer(), nullable=False),
sa.Column("retry_delay_seconds", sa.Integer(), nullable=True),
sa.Column("scheduled_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False),
sa.Column("started_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("finished_at", sa.DateTime(timezone=True), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False),
sa.Column("error_message", sa.Text(), nullable=True),
sa.Column("error_traceback", sa.Text(), nullable=True),
sa.Column("failure_category", sa.String(length=100), nullable=True),
sa.Column("progress_current", sa.Integer(), nullable=True),
sa.Column("progress_total", sa.Integer(), nullable=True),
sa.Column("progress_message", sa.String(length=500), nullable=True),
sa.Column("correlation_id", sa.String(length=255), nullable=True),
sa.Column("metadata", postgresql.JSONB(astext_type=sa.Text()), server_default="{}", nullable=False),
sa.Column("mavedb_version", sa.String(length=50), nullable=True),
sa.CheckConstraint(
"status IN ('pending', 'queued', 'running', 'succeeded', 'failed', 'cancelled', 'skipped')",
name="ck_job_runs_status_valid",
),
sa.CheckConstraint("max_retries >= 0", name="ck_job_runs_max_retries_positive"),
sa.CheckConstraint("priority >= 0", name="ck_job_runs_priority_positive"),
sa.CheckConstraint("retry_count >= 0", name="ck_job_runs_retry_count_positive"),
sa.ForeignKeyConstraint(["pipeline_id"], ["pipelines.id"], ondelete="SET NULL"),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("urn"),
)
op.create_index("ix_job_runs_correlation_id", "job_runs", ["correlation_id"], unique=False)
op.create_index("ix_job_runs_created_at", "job_runs", ["created_at"], unique=False)
op.create_index("ix_job_runs_job_type", "job_runs", ["job_type"], unique=False)
op.create_index("ix_job_runs_pipeline_id", "job_runs", ["pipeline_id"], unique=False)
op.create_index("ix_job_runs_scheduled_at", "job_runs", ["scheduled_at"], unique=False)
op.create_index("ix_job_runs_status", "job_runs", ["status"], unique=False)
op.create_index("ix_job_runs_status_scheduled", "job_runs", ["status", "scheduled_at"], unique=False)
op.create_table(
"job_dependencies",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("depends_on_job_id", sa.Integer(), nullable=False),
sa.Column("dependency_type", sa.String(length=50), nullable=False),
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False),
sa.Column("metadata", postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.CheckConstraint(
"dependency_type IS NULL OR dependency_type IN ('success_required', 'completion_required')",
name="ck_job_dependencies_type_valid",
),
sa.ForeignKeyConstraint(["depends_on_job_id"], ["job_runs.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(["id"], ["job_runs.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id", "depends_on_job_id"),
)
op.create_index("ix_job_dependencies_created_at", "job_dependencies", ["created_at"], unique=False)
op.create_index("ix_job_dependencies_depends_on_job_id", "job_dependencies", ["depends_on_job_id"], unique=False)
op.create_table(
"variant_annotation_status",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("variant_id", sa.Integer(), nullable=False),
sa.Column(
"annotation_type",
sa.String(length=50),
nullable=False,
comment="Type of annotation: vrs, clinvar, gnomad, etc.",
),
sa.Column(
"version",
sa.String(length=50),
nullable=True,
comment="Version of the annotation source used (if applicable)",
),
sa.Column("status", sa.String(length=50), nullable=False, comment="success, failed, skipped, pending"),
sa.Column("error_message", sa.Text(), nullable=True),
sa.Column("failure_category", sa.String(length=100), nullable=True),
sa.Column(
"success_data",
postgresql.JSONB(astext_type=sa.Text()),
nullable=True,
comment="Annotation results when successful",
),
sa.Column(
"current",
sa.Boolean(),
server_default="true",
nullable=False,
comment="Whether this is the current status for the variant and annotation type",
),
sa.Column("job_run_id", sa.Integer(), nullable=True),
sa.Column("created_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False),
sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False),
sa.CheckConstraint(
"annotation_type IN ('vrs_mapping', 'clingen_allele_id', 'mapped_hgvs', 'variant_translation', 'gnomad_allele_frequency', 'clinvar_control', 'vep_functional_consequence', 'ldh_submission')",
name="ck_variant_annotation_type_valid",
),
sa.CheckConstraint("status IN ('success', 'failed', 'skipped')", name="ck_variant_annotation_status_valid"),
sa.ForeignKeyConstraint(["job_run_id"], ["job_runs.id"], ondelete="SET NULL"),
sa.ForeignKeyConstraint(["variant_id"], ["variants.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
"ix_variant_annotation_status_annotation_type", "variant_annotation_status", ["annotation_type"], unique=False
)
op.create_index(
"ix_variant_annotation_status_created_at", "variant_annotation_status", ["created_at"], unique=False
)
op.create_index("ix_variant_annotation_status_current", "variant_annotation_status", ["current"], unique=False)
op.create_index(
"ix_variant_annotation_status_job_run_id", "variant_annotation_status", ["job_run_id"], unique=False
)
op.create_index("ix_variant_annotation_status_status", "variant_annotation_status", ["status"], unique=False)
op.create_index(
"ix_variant_annotation_status_variant_id", "variant_annotation_status", ["variant_id"], unique=False
)
op.create_index(
"ix_variant_annotation_status_variant_type_version_current",
"variant_annotation_status",
["variant_id", "annotation_type", "version", "current"],
unique=False,
)
op.create_index("ix_variant_annotation_status_version", "variant_annotation_status", ["version"], unique=False)
op.create_index(
"ix_variant_annotation_type_status", "variant_annotation_status", ["annotation_type", "status"], unique=False
)
op.create_index(
"ix_variant_annotation_variant_type_status",
"variant_annotation_status",
["variant_id", "annotation_type", "status"],
unique=False,
)
# ### end Alembic commands ###


def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index("ix_variant_annotation_variant_type_status", table_name="variant_annotation_status")
op.drop_index("ix_variant_annotation_type_status", table_name="variant_annotation_status")
op.drop_index("ix_variant_annotation_status_version", table_name="variant_annotation_status")
op.drop_index("ix_variant_annotation_status_variant_type_version_current", table_name="variant_annotation_status")
op.drop_index("ix_variant_annotation_status_variant_id", table_name="variant_annotation_status")
op.drop_index("ix_variant_annotation_status_status", table_name="variant_annotation_status")
op.drop_index("ix_variant_annotation_status_job_run_id", table_name="variant_annotation_status")
op.drop_index("ix_variant_annotation_status_current", table_name="variant_annotation_status")
op.drop_index("ix_variant_annotation_status_created_at", table_name="variant_annotation_status")
op.drop_index("ix_variant_annotation_status_annotation_type", table_name="variant_annotation_status")
op.drop_table("variant_annotation_status")
op.drop_index("ix_job_dependencies_depends_on_job_id", table_name="job_dependencies")
op.drop_index("ix_job_dependencies_created_at", table_name="job_dependencies")
op.drop_table("job_dependencies")
op.drop_index("ix_job_runs_status_scheduled", table_name="job_runs")
op.drop_index("ix_job_runs_status", table_name="job_runs")
op.drop_index("ix_job_runs_scheduled_at", table_name="job_runs")
op.drop_index("ix_job_runs_pipeline_id", table_name="job_runs")
op.drop_index("ix_job_runs_job_type", table_name="job_runs")
op.drop_index("ix_job_runs_created_at", table_name="job_runs")
op.drop_index("ix_job_runs_correlation_id", table_name="job_runs")
op.drop_table("job_runs")
op.drop_index("ix_pipelines_status", table_name="pipelines")
op.drop_index("ix_pipelines_created_by_user_id", table_name="pipelines")
op.drop_index("ix_pipelines_created_at", table_name="pipelines")
op.drop_index("ix_pipelines_correlation_id", table_name="pipelines")
op.drop_table("pipelines")
# ### end Alembic commands ###
4 changes: 4 additions & 0 deletions bin/localstack-init.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
#!/bin/sh
echo "Initializing local S3 bucket..."
awslocal s3 mb s3://score-set-csv-uploads-dev
echo "S3 bucket 'score-set-csv-uploads-dev' created."
13 changes: 13 additions & 0 deletions docker-compose-dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,18 @@ services:
volumes:
- mavedb-redis-dev:/data

localstack:
image: localstack/localstack:latest
ports:
- "4566:4566"
env_file:
- settings/.env.dev
environment:
- SERVICES=s3:4566 # We only need S3 for MaveDB
volumes:
- mavedb-localstack-dev:/var/lib/localstack
- "./bin/localstack-init.sh:/etc/localstack/init/ready.d/localstack-init.sh"

seqrepo:
image: biocommons/seqrepo:2024-12-20
volumes:
Expand All @@ -104,3 +116,4 @@ volumes:
mavedb-data-dev:
mavedb-redis-dev:
mavedb-seqrepo-dev:
mavedb-localstack-dev:
861 changes: 457 additions & 404 deletions poetry.lock

Large diffs are not rendered by default.

13 changes: 10 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -62,12 +62,13 @@ starlette-context = { version = "^0.3.6", optional = true }
slack-sdk = { version = "~3.21.3", optional = true }
uvicorn = { extras = ["standard"], version = "*", optional = true }
watchtower = { version = "~3.2.0", optional = true }
asyncclick = "^8.3.0.7"

[tool.poetry.group.dev]
optional = true

[tool.poetry.group.dev.dependencies]
boto3-stubs = "~1.34.97"
boto3-stubs = { extras = ["s3"], version = "~1.42.33" }
mypy = "~1.10.0"
pre-commit = "*"
jsonschema = "*"
Expand All @@ -88,7 +89,7 @@ SQLAlchemy = { extras = ["mypy"], version = "~2.0.0" }


[tool.poetry.extras]
server = ["alembic", "alembic-utils", "arq", "authlib", "biocommons", "boto3", "cdot", "cryptography", "fastapi", "hgvs", "ga4gh-va-spec", "orcid", "psycopg2", "python-jose", "python-multipart", "pyathena", "requests", "starlette", "starlette-context", "slack-sdk", "uvicorn", "watchtower"]
server = ["alembic", "alembic-utils", "arq", "authlib", "biocommons", "boto3", "cdot", "cryptography", "fastapi", "hgvs", "orcid", "psycopg2", "python-jose", "python-multipart", "pyathena", "requests", "starlette", "starlette-context", "slack-sdk", "uvicorn", "watchtower"]


[tool.mypy]
Expand All @@ -100,11 +101,17 @@ plugins = [
mypy_path = "mypy_stubs"

[tool.pytest.ini_options]
addopts = "-v -rP --import-mode=importlib --disable-socket --allow-unix-socket --allow-hosts localhost,::1,127.0.0.1"
addopts = "-v -rP --import-mode=importlib"
asyncio_mode = 'strict'
testpaths = "tests/"
pythonpath = "."
norecursedirs = "tests/helpers/"
markers = """
integration: mark a test as an integration test.
unit: mark a test as a unit test.
network: mark a test that requires network access.
slow: mark a test as slow-running.
"""
# Uncomment the following lines to include application log output in Pytest logs.
# log_cli = true
# log_cli_level = "DEBUG"
Expand Down
9 changes: 9 additions & 0 deletions settings/.env.template
Original file line number Diff line number Diff line change
Expand Up @@ -98,3 +98,12 @@ AWS_REGION_NAME=us-west-2
ATHENA_SCHEMA_NAME=default
ATHENA_S3_STAGING_DIR=s3://your-bucket/path/to/staging/
GNOMAD_DATA_VERSION=v4.1

####################################################################################################
# Environment variables for S3 connection
####################################################################################################

AWS_ACCESS_KEY_ID=test
AWS_SECRET_ACCESS_KEY=test
S3_ENDPOINT_URL=http://localstack:4566
UPLOAD_S3_BUCKET_NAME=score-set-csv-uploads-dev
Loading