Skip to content

Commit

Permalink
Merge pull request #47 from hammerlab/fix-travis
Browse files Browse the repository at this point in the history
Fix installation of dependencies on Travis using conda
  • Loading branch information
tavinathanson committed Feb 19, 2016
2 parents 434ffa0 + 0953984 commit a2d0849
Show file tree
Hide file tree
Showing 8 changed files with 49 additions and 31 deletions.
42 changes: 30 additions & 12 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,25 +7,43 @@ addons:
apt:
packages:
# Needed for NetMHC
tcsh
env:
global:
# MHC_BUNDLE_PASS
- secure: "TIminZrp9m1kMXhemqz8Zx4BjojIoEYZJnNrDrL6T/pKMpP5FQ6sprj8meGfNse4ApRIPmp5lhqxbPOe7Cg7ooetIcORekjRueHwRkYXqgMbgffgZYuEJTAGLKFsBDEXFD1kWT7igmvXFsP1T0bb1TxRPK93Q5G+e1dEAm6Iqwo="
# Setup anaconda for easily running scipy on Travis; see https://gist.github.com/dan-blanchard/7045057
- tcsh
# install pandoc for use with pypandoc for converting the README
# from markdown to RST
- pandoc
before_install:
- wget http://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh
- chmod +x miniconda.sh
- ./miniconda.sh -b
- export PATH=/home/travis/miniconda/bin:$PATH
- conda update --yes conda
# Commands below copied from: http://conda.pydata.org/docs/travis.html
# We do this conditionally because it saves us some downloading if the
# version is the same.
- if [[ "$TRAVIS_PYTHON_VERSION" == "2.7" ]]; then
wget https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh;
else
wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh;
fi
- bash miniconda.sh -b -p $HOME/miniconda
- export PATH="$HOME/miniconda/bin:$PATH"
# reset the shell's lookup table for program name to path mappings
- hash -r
- conda config --set always_yes yes --set changeps1 no
- conda update -q conda
# Useful for debugging any issues with conda
- conda info -a
# install netmhcbundle
- git clone https://mhcbundle:$MHC_BUNDLE_PASS@github.com/hammerlab/netmhc-bundle.git
- export NETMHC_BUNDLE_HOME=$PWD/netmhc-bundle
- mkdir tmp
- export NETMHC_BUNDLE_TMPDIR=$PWD/tmp
- export PATH=$PATH:$NETMHC_BUNDLE_HOME/bin
env:
global:
# MHC_BUNDLE_PASS
- secure: "TIminZrp9m1kMXhemqz8Zx4BjojIoEYZJnNrDrL6T/pKMpP5FQ6sprj8meGfNse4ApRIPmp5lhqxbPOe7Cg7ooetIcORekjRueHwRkYXqgMbgffgZYuEJTAGLKFsBDEXFD1kWT7igmvXFsP1T0bb1TxRPK93Q5G+e1dEAm6Iqwo="
install:
- conda install --yes python=$TRAVIS_PYTHON_VERSION numpy scipy nose pandas matplotlib
- >
conda create -q -n test-environment python=$TRAVIS_PYTHON_VERSION
numpy scipy nose pandas matplotlib
- source activate test-environment
- pip install pypandoc
- pip install -r requirements.txt
- pip install .
- pip install coveralls
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[![Build Status](https://travis-ci.org/hammerlab/mhctools.svg?branch=master)](https://travis-ci.org/hammerlab/mhctools) [![Coverage Status](https://coveralls.io/repos/hammerlab/mhctools/badge.svg?branch=master)](https://coveralls.io/r/hammerlab/mhctools?branch=master)

# mhctools
Python interface to running command-line and web-based MHC binding predictors.
Python interface to running command-line and web-based MHC binding predictors.

## Example

Expand Down
3 changes: 1 addition & 2 deletions mhctools/alleles.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@ def parse_allele_name(name, species_prefix=None):
3) allele family
4) allele code
If species_prefix is provided, that is used instead of getting the species prefix from the name.
If species_prefix is provided, that is used instead of getting the species prefix from the name.
(And in that case, a species prefix in the name will result in an error being raised.)
For example, in all of the following inputs:
Expand Down Expand Up @@ -338,7 +338,6 @@ def compact_allele_name(raw_allele):
HLA-DPA1*01:05-DPB1*100:01 into DPA10105-DPB110001
"""
parsed_alleles = parse_classi_or_classii_allele_name(raw_allele)
species = parsed_alleles[0].species
normalized_list = []
for parsed_allele in parsed_alleles:
if len(parsed_allele.allele_family) > 0:
Expand Down
22 changes: 11 additions & 11 deletions mhctools/epitope_collection_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,20 +102,20 @@ def add_binding_prediction(
source_sequence = str(self.fasta_dictionary[source_sequence_key])

binding_prediction = BindingPrediction(
source_sequence_key=source_sequence_key,
source_sequence=source_sequence,
offset=offset,
allele=normalize_allele_name(allele),
peptide=peptide,
length=len(peptide),
value=ic50,
percentile_rank=rank,
prediction_method_name=self.prediction_method_name,
measure=self.binding_measure)
source_sequence_key=source_sequence_key,
source_sequence=source_sequence,
offset=offset,
allele=normalize_allele_name(allele),
peptide=peptide,
length=len(peptide),
value=ic50,
percentile_rank=rank,
prediction_method_name=self.prediction_method_name,
measure=self.binding_measure)
self.binding_predictions.append(binding_prediction)

def get_collection(self):
return EpitopeCollection(self.binding_predictions)

def get_dataframe(self):
return self.get_collection.dataframe()
return self.get_collection().dataframe()
2 changes: 2 additions & 0 deletions mhctools/iedb.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,9 @@
import logging
import io

# pylint: disable=import-error
from six.moves.urllib.request import urlopen, Request
# pylint: disable=import-error
from six.moves.urllib.parse import urlencode
import pandas as pd

Expand Down
2 changes: 0 additions & 2 deletions mhctools/netmhc.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,7 @@
from .base_commandline_predictor import BaseCommandlinePredictor
from .file_formats import parse_netmhc_stdout


class NetMHC(BaseCommandlinePredictor):

def __init__(
self,
alleles,
Expand Down
1 change: 0 additions & 1 deletion mhctools/netmhcii_pan.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@


class NetMHCIIpan(BaseCommandlinePredictor):

def __init__(
self,
alleles,
Expand Down
6 changes: 4 additions & 2 deletions mhctools/process_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
from subprocess import Popen, CalledProcessError
import time

# pylint: disable=import-error
from six.moves.queue import Queue

class AsyncProcess(object):
Expand Down Expand Up @@ -123,7 +124,7 @@ def add_to_queue(process):
# Are there any done processes?
to_remove = []
for possibly_done in processes.queue:
if possibly_done.poll() != None:
if possibly_done.poll() is not None:
possibly_done.wait()
to_remove.append(possibly_done)
# Remove them from the queue and stop checking
Expand All @@ -140,6 +141,7 @@ def add_to_queue(process):
processes.get().wait()

elapsed_time = time.time() - start_time
logging.info("Ran %d commands in %0.4f seconds",
logging.info(
"Ran %d commands in %0.4f seconds",
len(multiple_args_dict),
elapsed_time)

0 comments on commit a2d0849

Please sign in to comment.