From 4781f40eb684c2d6c8dd5d6659eb535b89bb1b04 Mon Sep 17 00:00:00 2001 From: olegsinyavskiy Date: Wed, 2 Dec 2015 22:09:59 -0800 Subject: [PATCH] These changes speeds up travis testing time 2 times using some pytest and travis configuration options. Summary of changes: - py.test is configured to display test profiling information that shows 10 slowest tests. This would allow additional speed ups if anyone has ideas on some particular test. The slowest test is usually cifar dataset test and tensorflow convolutions. It seems that there are some other IT tests that could be sped up. - py.test is configured to run with pytest-xdist with 2 processes in parallel because travis does provide multicore support (1.5 cores) and because the slowest cifar test spends time on download which can run in parallel with other tests. - travis is configured to split backend tests into test matrix to make parallel theano vs tensorflow testing as opposed to rerun all the tests twice for python 2.7. - pickle filenames in tests are renamed to avoid clashes during multiprocessing --- .travis.yml | 24 +++++++++++++++--------- keras/backend/__init__.py | 6 ++++-- pytest.ini | 7 +++++++ tests/test_graph_model.py | 4 ++-- tests/test_sequential_model.py | 24 ++++++++++++------------ 5 files changed, 40 insertions(+), 25 deletions(-) create mode 100644 pytest.ini diff --git a/.travis.yml b/.travis.yml index 57dc66d9a..f2356fe3b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,9 +1,14 @@ sudo: required dist: trusty language: python -python: - - "2.7" - - "3.4" +matrix: + include: + - python: 3.4 + env: KERAS_BACKEND=theano + - python: 2.7 + env: KERAS_BACKEND=theano + - python: 2.7 + env: KERAS_BACKEND=tensorflow install: # code below is taken from http://conda.pydata.org/docs/travis.html # We do this conditionally because it saves us some downloading if the @@ -23,7 +28,7 @@ install: - conda create -q -n test-environment python=$TRAVIS_PYTHON_VERSION numpy scipy matplotlib pandas pytest h5py - source activate test-environment - - pip install pytest-cov python-coveralls + - pip install pytest-cov python-coveralls pytest-xdist - pip install git+git://github.com/Theano/Theano.git - python setup.py install @@ -33,10 +38,11 @@ install: fi # command to run tests script: - - PYTHONPATH=$PWD:$PYTHONPATH py.test -v --cov-report term-missing --cov keras tests/ - - if [[ "$TRAVIS_PYTHON_VERSION" == "2.7" ]]; then - sed -i -e 's/theano/tensorflow/g' ~/.keras/keras.json; - PYTHONPATH=$PWD:$PYTHONPATH py.test -v --cov-report term-missing --cov keras tests/; - fi + # run keras backend init to initialize backend config + - python -c "import keras.backend" + # set up keras backend + - sed -i -e 's/"backend":[[:space:]]*"[^"]*/"backend":\ "'$KERAS_BACKEND'/g' ~/.keras/keras.json; + - echo -e "Running tests with the following config:\n$(cat ~/.keras/keras.json)" + - PYTHONPATH=$PWD:$PYTHONPATH py.test tests/ after_success: - coveralls diff --git a/keras/backend/__init__.py b/keras/backend/__init__.py index 1416a3bf3..be33bde10 100644 --- a/keras/backend/__init__.py +++ b/keras/backend/__init__.py @@ -27,7 +27,9 @@ else: _config = {'floatx': floatx(), 'epsilon': epsilon(), 'backend': _BACKEND} - json.dump(_config, open(_config_path, 'w')) + with open(_config_path, 'w') as f: + # add new line in order for bash 'cat' display the content correctly + f.write(json.dumps(_config) + '\n') if _BACKEND == 'theano': print('Using Theano backend.') @@ -36,4 +38,4 @@ elif _BACKEND == 'tensorflow': print('Using TensorFlow backend.') from .tensorflow_backend import * else: - raise Exception('Unknown backend: ' + str(backend)) + raise Exception('Unknown backend: ' + str(_BACKEND)) diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 000000000..faf7da6fb --- /dev/null +++ b/pytest.ini @@ -0,0 +1,7 @@ +# Configuration of py.test +[pytest] +addopts=-v + -n 2 + --durations=10 + --cov-report term-missing + --cov=keras diff --git a/tests/test_graph_model.py b/tests/test_graph_model.py index fa63c59d8..043518a5c 100644 --- a/tests/test_graph_model.py +++ b/tests/test_graph_model.py @@ -139,7 +139,7 @@ class TestGraph(unittest.TestCase): assert(loss < 4.) print('test weight saving') - graph.save_weights('temp.h5', overwrite=True) + graph.save_weights('test_2o_1i_weights_temp.h5', overwrite=True) graph = Graph() graph.add_input(name='input1', input_shape=(32,)) graph.add_node(Dense(16), name='dense1', input='input1') @@ -148,7 +148,7 @@ class TestGraph(unittest.TestCase): graph.add_output(name='output1', input='dense2') graph.add_output(name='output2', input='dense3') graph.compile('rmsprop', {'output1': 'mse', 'output2': 'mse'}) - graph.load_weights('temp.h5') + graph.load_weights('test_2o_1i_weights_temp.h5') nloss = graph.evaluate({'input1': X_test, 'output1': y_test, 'output2': y2_test}) print(nloss) assert(loss == nloss) diff --git a/tests/test_sequential_model.py b/tests/test_sequential_model.py index 66855c02e..ae791b94c 100644 --- a/tests/test_sequential_model.py +++ b/tests/test_sequential_model.py @@ -61,14 +61,14 @@ class TestSequential(unittest.TestCase): model.get_config(verbose=0) print('test weight saving') - model.save_weights('temp.h5', overwrite=True) + model.save_weights('test_sequential_temp.h5', overwrite=True) model = Sequential() model.add(Dense(nb_hidden, input_shape=(input_dim,))) model.add(Activation('relu')) model.add(Dense(nb_class)) model.add(Activation('softmax')) model.compile(loss='categorical_crossentropy', optimizer='rmsprop') - model.load_weights('temp.h5') + model.load_weights('test_sequential_temp.h5') nloss = model.evaluate(X_train, y_train, verbose=0) assert(loss == nloss) @@ -114,7 +114,7 @@ class TestSequential(unittest.TestCase): model.get_config(verbose=0) print('test weight saving') - model.save_weights('temp.h5', overwrite=True) + model.save_weights('test_merge_sum_temp.h5', overwrite=True) left = Sequential() left.add(Dense(nb_hidden, input_shape=(input_dim,))) left.add(Activation('relu')) @@ -125,7 +125,7 @@ class TestSequential(unittest.TestCase): model.add(Merge([left, right], mode='sum')) model.add(Dense(nb_class)) model.add(Activation('softmax')) - model.load_weights('temp.h5') + model.load_weights('test_merge_sum_temp.h5') model.compile(loss='categorical_crossentropy', optimizer='rmsprop') nloss = model.evaluate([X_train, X_train], y_train, verbose=0) @@ -205,7 +205,7 @@ class TestSequential(unittest.TestCase): model.get_config(verbose=0) print('test weight saving') - model.save_weights('temp.h5', overwrite=True) + model.save_weights('test_merge_concat_temp.h5', overwrite=True) left = Sequential() left.add(Dense(nb_hidden, input_shape=(input_dim,))) left.add(Activation('relu')) @@ -221,7 +221,7 @@ class TestSequential(unittest.TestCase): model.add(Activation('softmax')) model.compile(loss='categorical_crossentropy', optimizer='rmsprop') - model.load_weights('temp.h5') + model.load_weights('test_merge_concat_temp.h5') nloss = model.evaluate([X_train, X_train], y_train, verbose=0) assert(loss == nloss) @@ -268,8 +268,8 @@ class TestSequential(unittest.TestCase): model.predict_proba([X_test, X_test, X_test], verbose=0) model.get_config(verbose=0) - model.save_weights('temp.h5', overwrite=True) - model.load_weights('temp.h5') + model.save_weights('test_merge_recursivity_temp.h5', overwrite=True) + model.load_weights('test_merge_recursivity_temp.h5') nloss = model.evaluate([X_train, X_train, X_train], y_train, verbose=0) print(nloss) @@ -305,8 +305,8 @@ class TestSequential(unittest.TestCase): model.predict_proba(X_test, verbose=0) model.get_config(verbose=0) - model.save_weights('temp.h5', overwrite=True) - model.load_weights('temp.h5') + model.save_weights('test_merge_overlap_temp.h5', overwrite=True) + model.load_weights('test_merge_overlap_temp.h5') nloss = model.evaluate(X_train, y_train, verbose=0) print(nloss) @@ -359,7 +359,7 @@ class TestSequential(unittest.TestCase): model.get_config(verbose=0) print('test weight saving') - model.save_weights('temp.h5', overwrite=True) + model.save_weights('test_lambda_temp.h5', overwrite=True) left = Sequential() left.add(Dense(nb_hidden, input_shape=(input_dim,))) left.add(Activation('relu')) @@ -371,7 +371,7 @@ class TestSequential(unittest.TestCase): output_shape=output_shape)) model.add(Dense(nb_class)) model.add(Lambda(activation)) - model.load_weights('temp.h5') + model.load_weights('test_lambda_temp.h5') model.compile(loss='categorical_crossentropy', optimizer='rmsprop') nloss = model.evaluate([X_train, X_train], y_train, verbose=0)