Skip to content
Merged
Show file tree
Hide file tree
Changes from 14 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
208 changes: 208 additions & 0 deletions .github/workflows/pr-gate.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,208 @@
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
# ---------------------------------------------------------
name: pr-gate

on:
push:
branches: [ laserprec/ghaction-ci ]

# This file defines following CI workflow:
#
# ┌──────────┐ ┌─────────┐ ┌────────┐
# │ static ├─┬─► build* ├─┬─► test │
# │ analysis │ │ │ (cpu) │ │ │ report │
# └──────────┘ │ └─────────┘ │ └────────┘
# │ ┌─────────┐ │
# ├─► build* ├─┤
# │ │ (spark) │ │
# │ └─────────┘ │
# │ ┌─────────┐ │
# └─► build* ├─┘
# │ (gpu) │ <-- TODO: Coming Soon
# └─────────┘
# ....
# *each runs in PARALLEL different combinations
# of python version, OS, test subsets, etc
#
# ASCII chart created via https://asciiflow.com/

jobs:
###############################################
############### STATIC-ANALYSIS ###############
###############################################
static-analysis:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Use Python 3.6
uses: actions/setup-python@v2
with:
python-version: 3.6

- name: Install dependencies (tox)
run: |
python -m pip install --upgrade pip setuptools wheel
pip install tox

- name: Run flake8
# TODO: re-enable this flake8 blocks (turned off to get a draft of the pipeline infrastracture)
continue-on-error: true
run: |
tox -e flake8

###############################################
################## CPU-BUILD ##################
###############################################
build-cpu:
runs-on: ${{ matrix.os }}
needs: static-analysis
strategy:
matrix:
os: [ubuntu-latest]
python: [3.6]
# different kind of tests are located in tests/<unit|integration|smoke> folders
test-kind: ['unit']
# pytest markers configured in tox.ini. See https://docs.pytest.org/en/6.2.x/example/markers.html
test-marker: ['not gpu and not spark and not notebooks', 'not gpu and notebooks and not spark']

steps:
- uses: actions/checkout@v2
################# Run Python tests #################
- name: Use Python ${{ matrix.python }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python }}

- name: Install build dependencies (tox)
run: |
python -m pip install --upgrade pip setuptools wheel
pip install tox

- name: Run ${{ matrix.test-kind }} tests ('${{ matrix.test-marker }}')
# '-e py' will use the default 'python' executable found in system path
# for why using tox, see: https://tox.readthedocs.io/en/latest/index.html
# tox will do:
# 1. build and install source distribution (sdist)
# 2. run static analysis on the code (not implemented yet)
# 3. run all of the specified test environment (i.e. run tests in different pyversions, etc)
# 4. show test reports
run: |
tox -e py -- tests/${{ matrix.test-kind }} -m '${{ matrix.test-marker }}'

- name: Prepare Code Coverage Report
run: |
mv .coverage '.coverage_${{ matrix.test-marker }}_${{ matrix.test-kind }}_${{ matrix.os }}_${{ matrix.python }}'
ls .coverage*

- name: Upload Code Coverage
uses: actions/upload-artifact@v2
with:
name: code-cov
path: .coverage*

###############################################
################# SPARK-BUILD #################
###############################################
build-spark:
runs-on: ${{ matrix.os }}
needs: static-analysis
strategy:
matrix:
os: [ubuntu-latest]
java: [8]
spark: [2.4.8]
hadoop: [2.6]
python: [3.6]
# different kind of tests are located in tests/<unit|integration|smoke> folders
test-kind: ['unit']
# pytest markers configured in tox.ini. See https://docs.pytest.org/en/6.2.x/example/markers.html
test-marker: ['notebooks and spark and not gpu', 'spark and not notebooks and not gpu']

steps:
- uses: actions/checkout@v2
################# Install spark dependencies (java, spark & hadoop) #################
- name: Setup Java JDK
uses: actions/setup-java@v2.1.0
with:
java-version: ${{ matrix.java }}
distribution: 'adopt'

- name: Setup Apache Spark
uses: vemonet/setup-spark@v1
with:
# Apache Spark version to install, see https://spark.apache.org/downloads.html
spark-version: ${{ matrix.spark }}
# Hadoop version
hadoop-version: ${{ matrix.hadoop }}

################# Run Python tests #################
- name: Use Python ${{ matrix.python }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python }}

- name: Install build dependencies (tox)
run: |
python -m pip install --upgrade pip setuptools wheel
pip install tox

- name: Run ${{ matrix.test-kind }} tests ('${{ matrix.test-marker }}')
run: |
tox -e py -- tests/${{ matrix.test-kind }} -m '${{ matrix.test-marker }}'

- name: Prepare Code Coverage Report
run: |
mv .coverage '.coverage_${{ matrix.test-marker }}_${{ matrix.test-kind }}_${{ matrix.os }}_${{ matrix.python }}'
ls .coverage*

- name: Upload Code Coverage
uses: actions/upload-artifact@v2
with:
name: code-cov
path: .coverage*

###############################################
############ TEST COVERAGE SUMMARY ############
###############################################
collect-code-cov:
runs-on: ubuntu-latest
needs: [build-cpu, build-spark]
steps:
- uses: actions/checkout@v2
- name: Setup Python
uses: actions/setup-python@v2.2.2
with:
python-version: '3.6'

- name: Install dev-dependencies
run: |
pip install --upgrade pip setuptools wheel
python -m pip install coverage

- name: Download coverage reports from all previous jobs
uses: actions/download-artifact@v2
with:
name: code-cov

- name: Show downloaded coverage reports
run: ls .coverage*

# Merge code coverge reports so the coverage numbers are accurate across
# different runsof subsets of tests
- name: Merage coverage reports
run: |
python -m coverage combine .coverage*
python -m coverage report
python -m coverage xml

- name: Show merged report
run: |
ls *.xml

- name: Upload code coverage report to CodeCov
uses: codecov/codecov-action@v2.0.2
with:
fail_ci_if_error: true
files: ./coverage.xml
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ wheels/
.installed.cfg
*.egg
MANIFEST
license.txt

# PyInstaller
# Usually these files are written by a python script from a template
Expand All @@ -37,6 +38,7 @@ pip-delete-this-directory.txt

# Unit test / coverage reports
htmlcov/
junit
.tox/
.coverage
.coverage.*
Expand Down
10 changes: 0 additions & 10 deletions pytest.ini

This file was deleted.

2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@
"cmake>=3.18.4.post1",
"xlearn==0.40a1",
],
"dev": ["black>=18.6b4,<21", "pytest>=3.6.4"],
"dev": ["black>=18.6b4,<21", "pytest>=3.6.4", "pytest-cov>=2.12.1"],
}
# for the brave of heart
extras_require["all"] = list(set(sum([*extras_require.values()], [])))
Expand Down
Empty file added tests/__init__.py
Empty file.
1 change: 1 addition & 0 deletions tests/ci/azure_pipeline_test/dsvm_linux_template.yml
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,7 @@ jobs:

conda activate ${{ parameters.conda_env }}
pip install pytest>=3.6.4 || exit -1
pip install pytest-cov

if [[ "${{ parameters.conda_env }}" == *"spark"* ]]; then
export PYSPARK_PYTHON=`which python`
Expand Down
Empty file added tests/integration/__init__.py
Empty file.
Empty file.
Empty file.
Empty file.
Empty file added tests/smoke/__init__.py
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file added tests/unit/__init__.py
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
Empty file.
67 changes: 67 additions & 0 deletions tox.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
[tox]
# py will use whatever the basepython `python` maps to from PATH
# you can use py38, for example, to chosse a different version
# See https://tox.readthedocs.io/en/latest/config.html#tox-environments
envlist = py


[testenv]
# Reading additional dependencies to run the test
# https://tox.readthedocs.io/en/latest/example/basic.html#depending-on-requirements-txt-or-defining-constraints
; deps = -rrequirements-dev.txt
# similar to 'pip install recommenders-*.whl[test]'
extras = all
commands =
# {posargs} will be substituded by arguments after the `--` when running.
# This will allow running subset of the test suite via tox.
#
# EX: tox -- -m "not spark and not gpu"
# will pass {-m "not spark and not gpu"} to `pytest`
# See https://tox.readthedocs.io/en/latest/example/general.html for more details
pytest {posargs}


[testenv:flake8]
deps = flake8
skip_install = True
commands = flake8 .


# Configurations for running pytest
[pytest]
log_cli = False
log_format = %(asctime)s %(levelname)s %(message)s
junit_family = xunit2
# This enable custom marker as decorator "@pytest.mark.gpu"
markers =
# markers allow to us to run faster subset of the test:
# EX: pytest -m "not spark and not gpu"
# See https://docs.pytest.org/en/stable/example/markers.html#registering-markers
deeprec: test deeprec model
sequential: test sequential model
notebooks: mark a test as notebooks test
smoke: mark a test as smoke test
integration: mark a test as integration test
gpu: mark a test as gpu test
spark: mark a test as spark test
vw: mark a test as vowpal wabbit test
testpaths =
tests
addopts =
# reports all (except passed tests). See https://docs.pytest.org/en/latest/usage.html#detailed-summary-report
-ra
--durations 0
--cov-append --cov=recommenders --cov-report=html --cov-report=term-missing --cov-report=xml --junitxml=junit/test-results.xml


[flake8]
; # Configs for flake8-import-order, see https://pypi.org/project/flake8-import-order/ for more info.
; import-order-style=edited
; application-import-names=recommenders, tests
# Native flake8 configs
max-line-length = 140
exclude =
build, dist, docs, examples,
tests
.env*,.venv* # local virtual environments
.tox