diff --git a/examples/addition_rnn.py b/examples/addition_rnn.py index 8703dcafb..2b013e40d 100644 --- a/examples/addition_rnn.py +++ b/examples/addition_rnn.py @@ -29,8 +29,7 @@ Five digits inverted: from __future__ import print_function from keras.models import Sequential from keras.engine.training import slice_X -from keras.layers.core import Activation, TimeDistributedDense, RepeatVector -from keras.layers import recurrent +from keras.layers import Activation, TimeDistributedDense, RepeatVector, recurrent import numpy as np from six.moves import range diff --git a/examples/antirectifier.py b/examples/antirectifier.py index f818a8624..19d9262fc 100644 --- a/examples/antirectifier.py +++ b/examples/antirectifier.py @@ -12,7 +12,7 @@ backend (`K`), our code can run both on TensorFlow and Theano. from __future__ import print_function from keras.models import Sequential -from keras.layers.core import Dense, Dropout, Layer, Activation +from keras.layers import Dense, Dropout, Layer, Activation from keras.datasets import mnist from keras import backend as K from keras.utils import np_utils diff --git a/examples/babi_memnn.py b/examples/babi_memnn.py index 6eb50c1e6..1db3cb517 100644 --- a/examples/babi_memnn.py +++ b/examples/babi_memnn.py @@ -16,8 +16,8 @@ Time per epoch: 3s on CPU (core i7). from __future__ import print_function from keras.models import Sequential from keras.layers.embeddings import Embedding -from keras.layers.core import Activation, Dense, Merge, Permute, Dropout -from keras.layers.recurrent import LSTM +from keras.layers import Activation, Dense, Merge, Permute, Dropout +from keras.layers import LSTM from keras.utils.data_utils import get_file from keras.preprocessing.sequence import pad_sequences from functools import reduce diff --git a/examples/babi_rnn.py b/examples/babi_rnn.py index 36b89831d..b783fe2d9 100644 --- a/examples/babi_rnn.py +++ b/examples/babi_rnn.py @@ -66,7 +66,7 @@ np.random.seed(1337) # for reproducibility from keras.utils.data_utils import get_file from keras.layers.embeddings import Embedding -from keras.layers.core import Dense, Merge, Dropout, RepeatVector +from keras.layers import Dense, Merge, Dropout, RepeatVector from keras.layers import recurrent from keras.models import Sequential from keras.preprocessing.sequence import pad_sequences diff --git a/examples/cifar10_cnn.py b/examples/cifar10_cnn.py index 8c8582320..fa6304a5d 100644 --- a/examples/cifar10_cnn.py +++ b/examples/cifar10_cnn.py @@ -15,8 +15,8 @@ from __future__ import print_function from keras.datasets import cifar10 from keras.preprocessing.image import ImageDataGenerator from keras.models import Sequential -from keras.layers.core import Dense, Dropout, Activation, Flatten -from keras.layers.convolutional import Convolution2D, MaxPooling2D +from keras.layers import Dense, Dropout, Activation, Flatten +from keras.layers import Convolution2D, MaxPooling2D from keras.optimizers import SGD from keras.utils import np_utils diff --git a/examples/deep_dream.py b/examples/deep_dream.py index 3a379258f..b30923d9a 100644 --- a/examples/deep_dream.py +++ b/examples/deep_dream.py @@ -24,7 +24,7 @@ import h5py import os from keras.models import Sequential -from keras.layers.convolutional import Convolution2D, ZeroPadding2D, MaxPooling2D +from keras.layers import Convolution2D, ZeroPadding2D, MaxPooling2D from keras import backend as K parser = argparse.ArgumentParser(description='Deep Dreams with Keras.') diff --git a/examples/imdb_cnn.py b/examples/imdb_cnn.py index e4bc33d5e..fd8c6ec54 100644 --- a/examples/imdb_cnn.py +++ b/examples/imdb_cnn.py @@ -12,9 +12,9 @@ np.random.seed(1337) # for reproducibility from keras.preprocessing import sequence from keras.models import Sequential -from keras.layers.core import Dense, Dropout, Activation, Lambda -from keras.layers.embeddings import Embedding -from keras.layers.convolutional import Convolution1D +from keras.layers import Dense, Dropout, Activation, Lambda +from keras.layers import Embedding +from keras.layers import Convolution1D from keras.datasets import imdb from keras import backend as K diff --git a/examples/imdb_cnn_lstm.py b/examples/imdb_cnn_lstm.py index 8a557b79a..19c2fb86c 100644 --- a/examples/imdb_cnn_lstm.py +++ b/examples/imdb_cnn_lstm.py @@ -9,10 +9,10 @@ np.random.seed(1337) # for reproducibility from keras.preprocessing import sequence from keras.models import Sequential -from keras.layers.core import Dense, Dropout, Activation -from keras.layers.embeddings import Embedding -from keras.layers.recurrent import LSTM, GRU, SimpleRNN -from keras.layers.convolutional import Convolution1D, MaxPooling1D +from keras.layers import Dense, Dropout, Activation +from keras.layers import Embedding +from keras.layers import LSTM, GRU, SimpleRNN +from keras.layers import Convolution1D, MaxPooling1D from keras.datasets import imdb diff --git a/examples/imdb_lstm.py b/examples/imdb_lstm.py index 46faef6db..b1b922e64 100644 --- a/examples/imdb_lstm.py +++ b/examples/imdb_lstm.py @@ -19,9 +19,8 @@ np.random.seed(1337) # for reproducibility from keras.preprocessing import sequence from keras.utils import np_utils from keras.models import Sequential -from keras.layers.core import Dense, Dropout, Activation -from keras.layers.embeddings import Embedding -from keras.layers.recurrent import LSTM, SimpleRNN, GRU +from keras.layers import Dense, Dropout, Activation, Embedding +from keras.layers import LSTM, SimpleRNN, GRU from keras.datasets import imdb max_features = 20000 diff --git a/examples/lstm_text_generation.py b/examples/lstm_text_generation.py index a5212edbf..5ec7a790e 100644 --- a/examples/lstm_text_generation.py +++ b/examples/lstm_text_generation.py @@ -12,8 +12,8 @@ has at least ~100k characters. ~1M is better. from __future__ import print_function from keras.models import Sequential -from keras.layers.core import Dense, Activation, Dropout -from keras.layers.recurrent import LSTM +from keras.layers import Dense, Activation, Dropout +from keras.layers import LSTM from keras.utils.data_utils import get_file import numpy as np import random diff --git a/examples/mnist_cnn.py b/examples/mnist_cnn.py index 930006cfc..656ef7f30 100644 --- a/examples/mnist_cnn.py +++ b/examples/mnist_cnn.py @@ -11,8 +11,8 @@ np.random.seed(1337) # for reproducibility from keras.datasets import mnist from keras.models import Sequential -from keras.layers.core import Dense, Dropout, Activation, Flatten -from keras.layers.convolutional import Convolution2D, MaxPooling2D +from keras.layers import Dense, Dropout, Activation, Flatten +from keras.layers import Convolution2D, MaxPooling2D from keras.utils import np_utils batch_size = 128 diff --git a/examples/mnist_irnn.py b/examples/mnist_irnn.py index cf4ad8a14..e42dd0de7 100644 --- a/examples/mnist_irnn.py +++ b/examples/mnist_irnn.py @@ -17,9 +17,9 @@ from __future__ import print_function from keras.datasets import mnist from keras.models import Sequential -from keras.layers.core import Dense, Activation +from keras.layers import Dense, Activation +from keras.layers import SimpleRNN from keras.initializations import normal, identity -from keras.layers.recurrent import SimpleRNN from keras.optimizers import RMSprop from keras.utils import np_utils diff --git a/examples/mnist_sklearn_wrapper.py b/examples/mnist_sklearn_wrapper.py index 6e7e0f32e..d58695927 100644 --- a/examples/mnist_sklearn_wrapper.py +++ b/examples/mnist_sklearn_wrapper.py @@ -9,8 +9,8 @@ np.random.seed(1337) # for reproducibility from keras.datasets import mnist from keras.models import Sequential -from keras.layers.core import Dense, Dropout, Activation, Flatten -from keras.layers.convolutional import Convolution2D, MaxPooling2D +from keras.layers import Dense, Dropout, Activation, Flatten +from keras.layers import Convolution2D, MaxPooling2D from keras.utils import np_utils from keras.wrappers.scikit_learn import KerasClassifier from sklearn.grid_search import GridSearchCV diff --git a/examples/mnist_transfer_cnn.py b/examples/mnist_transfer_cnn.py index 6e6331706..22d42ca67 100644 --- a/examples/mnist_transfer_cnn.py +++ b/examples/mnist_transfer_cnn.py @@ -19,8 +19,8 @@ np.random.seed(1337) # for reproducibility from keras.datasets import mnist from keras.models import Sequential -from keras.layers.core import Dense, Dropout, Activation, Flatten -from keras.layers.convolutional import Convolution2D, MaxPooling2D +from keras.layers import Dense, Dropout, Activation, Flatten +from keras.layers import Convolution2D, MaxPooling2D from keras.utils import np_utils diff --git a/examples/neural_style_transfer.py b/examples/neural_style_transfer.py index 3468f46c4..30c84f083 100644 --- a/examples/neural_style_transfer.py +++ b/examples/neural_style_transfer.py @@ -58,7 +58,7 @@ import argparse import h5py from keras.models import Sequential -from keras.layers.convolutional import Convolution2D, ZeroPadding2D, MaxPooling2D +from keras.layers import Convolution2D, ZeroPadding2D, MaxPooling2D from keras import backend as K parser = argparse.ArgumentParser(description='Neural style transfer with Keras.') diff --git a/examples/reuters_mlp.py b/examples/reuters_mlp.py index 8b489a0a3..82b892398 100644 --- a/examples/reuters_mlp.py +++ b/examples/reuters_mlp.py @@ -8,8 +8,7 @@ np.random.seed(1337) # for reproducibility from keras.datasets import reuters from keras.models import Sequential -from keras.layers.core import Dense, Dropout, Activation -from keras.layers.normalization import BatchNormalization +from keras.layers import Dense, Dropout, Activation from keras.utils import np_utils from keras.preprocessing.text import Tokenizer diff --git a/examples/stateful_lstm.py b/examples/stateful_lstm.py index 55bfd9e01..c55c2d1d2 100644 --- a/examples/stateful_lstm.py +++ b/examples/stateful_lstm.py @@ -5,8 +5,7 @@ from __future__ import print_function import numpy as np import matplotlib.pyplot as plt from keras.models import Sequential -from keras.layers.core import Dense -from keras.layers.recurrent import LSTM +from keras.layers import Dense, LSTM # since we are using stateful rnn tsteps can be set to 1