Skip to content

Commit

Permalink
Merge pull request fastai#144 from andyli/travis
Browse files Browse the repository at this point in the history
TravisCI for automated testing
  • Loading branch information
jph00 authored Aug 16, 2017
2 parents afb6c4f + 40e38e0 commit 22b07d5
Show file tree
Hide file tree
Showing 4 changed files with 78 additions and 11 deletions.
51 changes: 51 additions & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
language: python
python: 2.7
dist: trusty
sudo: false

env:
global:
- NBCONVERT_ARGS="--to html --execute --ExecutePreprocessor.kernel_name=python --ExecutePreprocessor.timeout=-1"

install:
# xvfb
# https://docs.travis-ci.com/user/gui-and-headless-browsers/
- "export DISPLAY=:99.0"
- "sh -e /etc/init.d/xvfb start"
# install conda
# https://conda.io/docs/travis.html
- if [[ "$TRAVIS_PYTHON_VERSION" == "2.7" ]]; then
wget https://repo.continuum.io/miniconda/Miniconda2-latest-Linux-x86_64.sh -O miniconda.sh;
else
wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh;
fi
- bash miniconda.sh -b -f -p $HOME/miniconda
- export PATH="$HOME/miniconda/bin:$PATH"
- hash -r
- conda config --set always_yes yes --set changeps1 no
- conda update -q conda
- conda config --add channels conda-forge
- conda info -a
- conda create -q --name fastai --file requirements.txt python=$TRAVIS_PYTHON_VERSION
- source activate fastai
- conda list

script:
- mkdir -p deeplearning1/nbs/data
- if [ ! -d deeplearning1/nbs/data/dogscats ]; then
curl -sSLO "http://files.fast.ai/data/dogscats.zip" &&
unzip -qq -d deeplearning1/nbs/data dogscats.zip;
fi
- travis_wait jupyter nbconvert deeplearning1/nbs/lesson1.ipynb $NBCONVERT_ARGS
- travis_wait jupyter nbconvert deeplearning1/nbs/lesson2.ipynb $NBCONVERT_ARGS

before_cache:
# We only cache the archives but not the extracted packages,
# such that the miniconda installer script can extract correctly.
- rm -rf $HOME/miniconda/pkgs/*/
cache:
timeout: 1000
directories:
- $HOME/miniconda/pkgs
- $HOME/.keras/models
- deeplearning1/nbs/data
4 changes: 2 additions & 2 deletions deeplearning1/nbs/lesson1.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -70,8 +70,8 @@
},
"outputs": [],
"source": [
"path = \"data/dogscats/\"\n",
"#path = \"data/dogscats/sample/\""
"# path = \"data/dogscats/\"\n",
"path = \"data/dogscats/sample/\""
]
},
{
Expand Down
24 changes: 15 additions & 9 deletions deeplearning1/nbs/lesson2.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -374,8 +374,8 @@
},
"outputs": [],
"source": [
"#path = \"data/dogscats/sample/\"\n",
"path = \"data/dogscats/\"\n",
"path = \"data/dogscats/sample/\"\n",
"# path = \"data/dogscats/\"\n",
"model_path = path + 'models/'\n",
"if not os.path.exists(model_path): os.mkdir(model_path)"
]
Expand All @@ -395,8 +395,8 @@
},
"outputs": [],
"source": [
"batch_size=100\n",
"#batch_size=4"
"# batch_size=100\n",
"batch_size=4"
]
},
{
Expand Down Expand Up @@ -1160,7 +1160,10 @@
"#3. The images we were most confident were cats, but are actually dogs\n",
"incorrect_cats = np.where((preds==0) & (preds!=val_labels[:,1]))[0]\n",
"most_incorrect_cats = np.argsort(probs[incorrect_cats])[::-1][:n_view]\n",
"plots_idx(incorrect_cats[most_incorrect_cats], probs[incorrect_cats][most_incorrect_cats])"
"if len(most_incorrect_cats):\n",
" plots_idx(incorrect_cats[most_incorrect_cats], probs[incorrect_cats][most_incorrect_cats])\n",
"else:\n",
" print('No incorrect cats!')"
]
},
{
Expand All @@ -1187,7 +1190,10 @@
"#3. The images we were most confident were dogs, but are actually cats\n",
"incorrect_dogs = np.where((preds==1) & (preds!=val_labels[:,1]))[0]\n",
"most_incorrect_dogs = np.argsort(probs[incorrect_dogs])[:n_view]\n",
"plots_idx(incorrect_dogs[most_incorrect_dogs], 1-probs[incorrect_dogs][most_incorrect_dogs])"
"if len(most_incorrect_dogs):\n",
" plots_idx(incorrect_dogs[most_incorrect_dogs], 1-probs[incorrect_dogs][most_incorrect_dogs])\n",
"else:\n",
" print('No incorrect dogs!')"
]
},
{
Expand Down Expand Up @@ -1519,8 +1525,8 @@
"outputs": [],
"source": [
"def fit_model(model, batches, val_batches, nb_epoch=1):\n",
" model.fit_generator(batches, samples_per_epoch=batches.N, nb_epoch=nb_epoch, \n",
" validation_data=val_batches, nb_val_samples=val_batches.N)"
" model.fit_generator(batches, samples_per_epoch=batches.n, nb_epoch=nb_epoch, \n",
" validation_data=val_batches, nb_val_samples=val_batches.n)"
]
},
{
Expand Down Expand Up @@ -1958,7 +1964,7 @@
"outputs": [],
"source": [
"model.load_weights(model_path+'finetune2.h5')\n",
"model.evaluate_generator(get_batches(path+'valid', gen, False, batch_size*2), val_batches.N)"
"model.evaluate_generator(get_batches(path+'valid', gen, False, batch_size*2), val_batches.n)"
]
},
{
Expand Down
10 changes: 10 additions & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
jupyter
matplotlib
pillow
keras==1.2.2
theano
tensorflow
pandas
scikit-learn
bcolz
sympy

0 comments on commit 22b07d5

Please sign in to comment.