Skip to content

Specify Poetry version in Dockerfile #701

Specify Poetry version in Dockerfile

Specify Poetry version in Dockerfile #701

name: CI
on: push
jobs:
# first, build the image
build:
name: Build and push image
uses: uc-cdis/.github/.github/workflows/image_build_push.yaml@master
with:
BUILD_PLATFORMS: "linux/amd64"
secrets:
ECR_AWS_ACCESS_KEY_ID: ${{ secrets.ECR_AWS_ACCESS_KEY_ID }}
ECR_AWS_SECRET_ACCESS_KEY: ${{ secrets.ECR_AWS_SECRET_ACCESS_KEY }}
QUAY_USERNAME: ${{ secrets.QUAY_USERNAME }}
QUAY_ROBOT_TOKEN: ${{ secrets.QUAY_ROBOT_TOKEN }}
# then run the tests
test:
name: Run tests
runs-on: ubuntu-20.04
needs: build
services:
postgres:
image: postgres:13
# Provide the password for postgres
env:
POSTGRES_PASSWORD: postgres
POSTGRES_DB: metadata_db
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
# Maps tcp port 5432 on service container to the host
- 5432:5432
steps:
- name: Check out repository code
uses: actions/checkout@v3
- name: Installing dependencies
run: |
sudo apt-get update
sudo apt-get install --yes --no-install-recommends jq postgresql-client python3.9 python3-pip
sudo update-alternatives --install /usr/bin/python python /usr/bin/python3.9 1
sudo update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.9 1
- name: Install poetry, python dependencies and tube
run: |
python3.9 -m pip install --upgrade pip poetry requests
python3.9 -m poetry config virtualenvs.create true
python3.9 -m poetry install -vv --no-interaction --with dev
- name: Make dir cor compose-etl
working-directory: ..
run: git clone https://github.com/uc-cdis/compose-etl.git --branch master --single-branch
- name: List all files and current directory
run: |
ls ../..
ls ..
pwd
- name: Copy configuration files
run: |
cp -f tests/integrated_tests/gen3/tube/etlMapping.yaml ../compose-etl/configs/etlMapping.yaml
cp -f tests/integrated_tests/gen3/tube/user.yaml ../compose-etl/configs/user.yaml
- name: Make XDG_DATA_HOME's tube
run: mkdir -p $XDG_DATA_HOME/gen3/tube
env:
XDG_DATA_HOME: /home/runner/work/tube
- name: Copy file to the XDG_DATA_HOME folder
run: cp tests/integrated_tests/gen3/tube/{creds.json,etlMapping.yaml,user.yaml} $XDG_DATA_HOME/gen3/tube/
env:
XDG_DATA_HOME: /home/runner/work/tube
- name: Run standalone tests
env:
ES_URL: localhost
POSTGRES_PORT: 5432
XDG_DATA_HOME: /home/runner/work/tube
# Run the tests that do not require Spark and ElasticSearch
run: python3.9 -m poetry run pytest -v tests/standalone_tests --cov=tube --cov-report term-missing --cov-report xml
- name: Init postgres database
run: psql -d postgresql://postgres:postgres@localhost/metadata_db -f tests/integrated_tests/metadata_db.sql
- name: Create credential file
working-directory: ../compose-etl
run: echo "{\"db_host\":\"host.docker.internal\",\"db_username\":\"postgres\",\"db_password\":\"postgres\",\"db_database\":\"metadata_db\"}" > configs/creds.json
- name: Replace tube branch
working-directory: ../compose-etl
run: |
BRANCH=${{ github.head_ref || github.ref_name }}
BRANCH=$(echo $BRANCH | tr / _)
sed -i "s/tube:master/tube:$BRANCH/g" docker-compose.yml
- name: change max_map_count (needed for running Elasticsearch in container)
working-directory: ../compose-etl
run: sudo sysctl -w vm.max_map_count=262144
- name: Run spark and ElasticSearch
working-directory: ../compose-etl
run: docker compose up -d spark elasticsearch
- name: Wait for the container to be up and running
run: sleep 60
- name: Checking logs
working-directory: ../compose-etl
run: docker compose logs
- name: Check all containers
run: docker ps
- name: Check all docker networks
run: docker network ls
- name: Run ETL process
working-directory: ../compose-etl
run: docker compose run tube bash -c "python run_config.py && python run_etl.py"
- name: Run integrated tests
# Run the tests that require Spark and ElasticSearch
env:
ES_URL: localhost
POSTGRES_PORT: 5432
DICTIONARY_URL: https://s3.amazonaws.com/dictionary-artifacts/ndhdictionary/3.3.8/schema.json
XDG_DATA_HOME: /home/runner/work/tube
ES_INDEX_NAME: etl
# Skip SSL tests marked with `@pytest.mark.ssl`
run: python3.9 -m poetry run pytest -v tests/integrated_tests -m "not ssl" --cov=tube --cov-append --cov-report term-missing --cov-report xml
- name: Run dataframe tests
# Run the tests that require Spark but not ElasticSearch
env:
ES_URL: localhost
POSTGRES_PORT: 5432
DICTIONARY_URL: https://s3.amazonaws.com/dictionary-artifacts/ibdgc-dictionary/1.6.10/schema.json
XDG_DATA_HOME: /home/runner/work/tube
RUNNING_MODE: Test
HADOOP_VERSION: 3.3.2
ES_INDEX_NAME: etl
ES_HADOOP_VERSION: 8.3.3
ES_HADOOP_HOME: /home/runner/work/tube
ES_SPARK_30_2_12: elasticsearch-spark-30_2.12
ES_SPARK_20_2_11: elasticsearch-spark-20_2.11
# Skip SSL tests marked with `@pytest.mark.ssl`
run: |
python3.9 -m poetry run pytest -v tests/dataframe_tests -m "schema_ibdgc and not ssl" --cov=tube --cov-append --cov-report term-missing --cov-report xml
python3.9 -m poetry run pytest -v tests/dataframe_tests -m "schema_midrc and not ssl" --cov=tube --cov-append --cov-report term-missing --cov-report xml
python3.9 -m poetry run pytest -v tests/dataframe_tests -m "schema_jcoin and not ssl" --cov=tube --cov-append --cov-report term-missing --cov-report xml
python3.9 -m poetry run pytest -v tests/dataframe_tests -m "schema_parent and not ssl" --cov=tube --cov-append --cov-report term-missing --cov-report xml
# python3.9 -m poetry run pytest -v tests/dataframe_tests -m "not ssl and not schema_ibdgc and not schema_midrc" --cov=tube --cov-append --cov-report term-missing --cov-report xml
- name: Submit coverage report
env:
COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_TOKEN }}
run: |
pip install coveralls
coveralls