Skip to content
Snippets Groups Projects
.travis.yml 2.33 KiB
Newer Older
Tim O'Donnell's avatar
Tim O'Donnell committed
dist: bionic
language: python
python:
Tim O'Donnell's avatar
Tim O'Donnell committed
  - "3.7"
before_install:
  - if [[ "$TRAVIS_PYTHON_VERSION" == "2.7" ]]; then
      wget https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh;
    else
      wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh;
    fi
  - bash miniconda.sh -b -p $HOME/miniconda
  - export PATH="$HOME/miniconda/bin:$PATH"
  # reset the shell's lookup table for program name to path mappings
  - hash -r
  - conda config --set always_yes yes --set changeps1 no
  - conda update -q conda
  # Useful for debugging any issues with conda
  - conda info -a
Tim O'Donnell's avatar
Tim O'Donnell committed
  - free -m
addons:
  apt:
    packages:
Tim O'Donnell's avatar
Tim O'Donnell committed
      - pandoc
install:
  - >
      conda create -q -n test-environment python=$TRAVIS_PYTHON_VERSION
Tim O'Donnell's avatar
Tim O'Donnell committed
      numpy scipy nose pandas matplotlib mkl-service 'tensorflow>=1.1.0,<2.0.0'
      pypandoc sphinx numpydoc
  - source activate test-environment
Tim O'Donnell's avatar
Tim O'Donnell committed
  - pip install nose-timer
Alex Rubinsteyn's avatar
Alex Rubinsteyn committed
  - pip install -r requirements.txt
  - pip install -r docs/requirements.txt
  - pip install .
  - pip freeze
Tim O'Donnell's avatar
Tim O'Donnell committed
env:
  global:
Tim O'Donnell's avatar
Tim O'Donnell committed
    - PYTHONHASHSEED=0
    - KERAS_BACKEND=tensorflow
Tim O'Donnell's avatar
Tim O'Donnell committed
    - KMP_SETTINGS=TRUE
    - OMP_NUM_THREADS=1

  # download data and models, then run tests
Tim O'Donnell's avatar
Tim O'Donnell committed
  - mkdir -p /tmp/downloads
Tim O'Donnell's avatar
Tim O'Donnell committed
  # We download using wget to avoid sporadic SSL error on travis from Python.
Tim O'Donnell's avatar
Tim O'Donnell committed
  -
    wget
Tim O'Donnell's avatar
Tim O'Donnell committed
      $(mhcflurry-downloads url data_curated)
Tim O'Donnell's avatar
Tim O'Donnell committed
      $(mhcflurry-downloads url data_mass_spec_annotated)
Tim O'Donnell's avatar
Tim O'Donnell committed
      $(mhcflurry-downloads url models_class1)
      $(mhcflurry-downloads url models_class1_presentation)
Tim O'Donnell's avatar
Tim O'Donnell committed
      $(mhcflurry-downloads url models_class1_processing)
Tim O'Donnell's avatar
Tim O'Donnell committed
      $(mhcflurry-downloads url models_class1_processing_variants)
Tim O'Donnell's avatar
Tim O'Donnell committed
      $(mhcflurry-downloads url models_class1_pan)
      $(mhcflurry-downloads url models_class1_pan_variants)
Tim O'Donnell's avatar
Tim O'Donnell committed
      $(mhcflurry-downloads url allele_sequences)
      -P /tmp/downloads
Tim O'Donnell's avatar
Tim O'Donnell committed
  - ls -lh /tmp/downloads
Tim O'Donnell's avatar
Tim O'Donnell committed
  -
    mhcflurry-downloads fetch
      data_curated
      data_mass_spec_annotated
      models_class1
      models_class1_presentation
Tim O'Donnell's avatar
Tim O'Donnell committed
      models_class1_processing
Tim O'Donnell's avatar
Tim O'Donnell committed
      models_class1_processing_variants
      models_class1_pan
      models_class1_pan_variants
      allele_sequences
      --already-downloaded-dir /tmp/downloads
  - mhcflurry-downloads info  # just to test this command works
  - nosetests --with-timer -sv test
Tim O'Donnell's avatar
Tim O'Donnell committed
  - cd docs && bash ./doctest.sh