Merge branch 'master' of https://github.com/fchollet/keras
This commit is contained in:
commit
43bfeb0a66
@ -15,3 +15,4 @@ model.add(Dense(64, 64, W_regularizer = l2(.01)))
|
|||||||
|
|
||||||
- __l1__(l=0.01): L1 regularization penalty, also known as LASSO
|
- __l1__(l=0.01): L1 regularization penalty, also known as LASSO
|
||||||
- __l2__(l=0.01): L2 regularization penalty, also known as weight decay, or Ridge
|
- __l2__(l=0.01): L2 regularization penalty, also known as weight decay, or Ridge
|
||||||
|
- __l1l2__(l1=0.01, l2=0.01): L1-L2 regularization penalty, also known as ElasticNet
|
||||||
|
@ -53,7 +53,7 @@ def preprocess_data(X, scaler=None):
|
|||||||
X = scaler.transform(X)
|
X = scaler.transform(X)
|
||||||
return X, scaler
|
return X, scaler
|
||||||
|
|
||||||
def preprocess_labels(y, encoder=None, categorical=True):
|
def preprocess_labels(labels, encoder=None, categorical=True):
|
||||||
if not encoder:
|
if not encoder:
|
||||||
encoder = LabelEncoder()
|
encoder = LabelEncoder()
|
||||||
encoder.fit(labels)
|
encoder.fit(labels)
|
||||||
|
@ -15,5 +15,12 @@ def l2(l=.01):
|
|||||||
return g
|
return g
|
||||||
return l2wrap
|
return l2wrap
|
||||||
|
|
||||||
|
def l1l2(l1=.01, l2=.01):
|
||||||
|
def l1l2wrap(g, p):
|
||||||
|
g += T.sgn(p) * l1
|
||||||
|
g += p * l2
|
||||||
|
return g
|
||||||
|
return l1l2wrap
|
||||||
|
|
||||||
def identity(g, p):
|
def identity(g, p):
|
||||||
return g
|
return g
|
Loading…
Reference in New Issue
Block a user