Merge pull request #111 from enizhibitsky/l1l2-regularization

Add L1-L2 (ElasticNet) regularization
This commit is contained in:
François Chollet 2015-05-10 14:06:49 -07:00
commit 8705113239
2 changed files with 8 additions and 0 deletions

@ -15,3 +15,4 @@ model.add(Dense(64, 64, W_regularizer = l2(.01)))
- __l1__(l=0.01): L1 regularization penalty, also known as LASSO - __l1__(l=0.01): L1 regularization penalty, also known as LASSO
- __l2__(l=0.01): L2 regularization penalty, also known as weight decay, or Ridge - __l2__(l=0.01): L2 regularization penalty, also known as weight decay, or Ridge
- __l1l2__(l1=0.01, l2=0.01): L1-L2 regularization penalty, also known as ElasticNet

@ -15,5 +15,12 @@ def l2(l=.01):
return g return g
return l2wrap return l2wrap
def l1l2(l1=.01, l2=.01):
def l1l2wrap(g, p):
g += T.sgn(p) * l1
g += p * l2
return g
return l1l2wrap
def identity(g, p): def identity(g, p):
return g return g