Export Ftrl optimizer.
PiperOrigin-RevId: 433338123
This commit is contained in:
parent
a341b0eb2c
commit
9577fa87b7
@ -95,6 +95,7 @@ keras_packages = [
|
||||
"keras.optimizers.optimizer_experimental.adagrad",
|
||||
"keras.optimizers.optimizer_experimental.adam",
|
||||
"keras.optimizers.optimizer_experimental.adamax",
|
||||
"keras.optimizers.optimizer_experimental.ftrl",
|
||||
"keras.optimizers.optimizer_experimental.sgd",
|
||||
"keras.optimizers.optimizer_experimental.optimizer",
|
||||
"keras.optimizers.optimizer_experimental.rmsprop",
|
||||
|
@ -0,0 +1,98 @@
|
||||
path: "tensorflow.keras.optimizers.experimental.Ftrl"
|
||||
tf_class {
|
||||
is_instance: "<class \'keras.optimizers.optimizer_experimental.ftrl.Ftrl\'>"
|
||||
is_instance: "<class \'keras.optimizers.optimizer_experimental.optimizer.Optimizer\'>"
|
||||
is_instance: "<class \'keras.optimizers.optimizer_experimental.optimizer._BaseOptimizer\'>"
|
||||
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
|
||||
is_instance: "<class \'tensorflow.python.training.tracking.autotrackable.AutoTrackable\'>"
|
||||
is_instance: "<class \'tensorflow.python.training.tracking.base.Trackable\'>"
|
||||
is_instance: "<type \'object\'>"
|
||||
member {
|
||||
name: "iterations"
|
||||
mtype: "<type \'property\'>"
|
||||
}
|
||||
member {
|
||||
name: "learning_rate"
|
||||
mtype: "<type \'property\'>"
|
||||
}
|
||||
member {
|
||||
name: "lr"
|
||||
mtype: "<type \'property\'>"
|
||||
}
|
||||
member {
|
||||
name: "name"
|
||||
mtype: "<type \'property\'>"
|
||||
}
|
||||
member {
|
||||
name: "name_scope"
|
||||
mtype: "<type \'property\'>"
|
||||
}
|
||||
member {
|
||||
name: "non_trainable_variables"
|
||||
mtype: "<type \'property\'>"
|
||||
}
|
||||
member {
|
||||
name: "submodules"
|
||||
mtype: "<type \'property\'>"
|
||||
}
|
||||
member {
|
||||
name: "trainable_variables"
|
||||
mtype: "<type \'property\'>"
|
||||
}
|
||||
member {
|
||||
name: "variables"
|
||||
mtype: "<type \'property\'>"
|
||||
}
|
||||
member_method {
|
||||
name: "__init__"
|
||||
argspec: "args=[\'self\', \'learning_rate\', \'learning_rate_power\', \'initial_accumulator_value\', \'l1_regularization_strength\', \'l2_regularization_strength\', \'l2_shrinkage_regularization_strength\', \'beta\', \'clipnorm\', \'clipvalue\', \'global_clipnorm\', \'use_ema\', \'ema_momentum\', \'ema_overwrite_frequency\', \'jit_compile\', \'name\'], varargs=None, keywords=kwargs, defaults=[\'0.001\', \'-0.5\', \'0.1\', \'0.0\', \'0.0\', \'0.0\', \'0.0\', \'None\', \'None\', \'None\', \'False\', \'0.99\', \'None\', \'False\', \'Ftrl\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_variable"
|
||||
argspec: "args=[\'self\', \'shape\', \'dtype\', \'initializer\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'zeros\', \'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "add_variable_from_reference"
|
||||
argspec: "args=[\'self\', \'model_variable\', \'variable_name\', \'initial_value\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "aggregate_gradients"
|
||||
argspec: "args=[\'self\', \'grads_and_vars\'], varargs=None, keywords=None, defaults=None"
|
||||
}
|
||||
member_method {
|
||||
name: "apply_gradients"
|
||||
argspec: "args=[\'self\', \'grads_and_vars\', \'skip_gradients_aggregation\'], varargs=None, keywords=None, defaults=[\'False\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "build"
|
||||
argspec: "args=[\'self\', \'var_list\'], varargs=None, keywords=None, defaults=None"
|
||||
}
|
||||
member_method {
|
||||
name: "compute_gradients"
|
||||
argspec: "args=[\'self\', \'loss\', \'var_list\', \'tape\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "finalize_variable_values"
|
||||
argspec: "args=[\'self\', \'var_list\'], varargs=None, keywords=None, defaults=None"
|
||||
}
|
||||
member_method {
|
||||
name: "from_config"
|
||||
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
|
||||
}
|
||||
member_method {
|
||||
name: "get_config"
|
||||
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
|
||||
}
|
||||
member_method {
|
||||
name: "minimize"
|
||||
argspec: "args=[\'self\', \'loss\', \'var_list\', \'tape\'], varargs=None, keywords=None, defaults=[\'None\'], "
|
||||
}
|
||||
member_method {
|
||||
name: "update_step"
|
||||
argspec: "args=[\'self\', \'gradient\', \'variable\'], varargs=None, keywords=None, defaults=None"
|
||||
}
|
||||
member_method {
|
||||
name: "with_name_scope"
|
||||
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
|
||||
}
|
||||
}
|
@ -20,6 +20,10 @@ tf_module {
|
||||
name: "Adamax"
|
||||
mtype: "<type \'type\'>"
|
||||
}
|
||||
member {
|
||||
name: "Ftrl"
|
||||
mtype: "<type \'type\'>"
|
||||
}
|
||||
member {
|
||||
name: "Optimizer"
|
||||
mtype: "<type \'type\'>"
|
||||
|
@ -27,6 +27,7 @@ from keras.optimizers.optimizer_experimental import adagrad as adagrad_experimen
|
||||
from keras.optimizers.optimizer_experimental import adam as adam_experimental
|
||||
from keras.optimizers.optimizer_experimental import adamax as adamax_experimental
|
||||
from keras.optimizers.optimizer_experimental import adamw as adamw_experimental
|
||||
from keras.optimizers.optimizer_experimental import ftrl as ftrl_experimental
|
||||
from keras.optimizers.optimizer_experimental import rmsprop as rmsprop_experimental
|
||||
from keras.optimizers.optimizer_experimental import sgd as sgd_experimental
|
||||
from keras.optimizers.optimizer_v1 import Optimizer
|
||||
|
@ -17,10 +17,13 @@
|
||||
from keras.optimizers.optimizer_experimental import optimizer
|
||||
from keras.utils import generic_utils
|
||||
import tensorflow.compat.v2 as tf
|
||||
# pylint: disable=g-direct-tensorflow-import
|
||||
from tensorflow.python.util.tf_export import keras_export
|
||||
|
||||
|
||||
# pylint: disable=g-classes-have-attributes
|
||||
@generic_utils.register_keras_serializable()
|
||||
@keras_export('keras.optimizers.experimental.Ftrl', v1=[])
|
||||
class Ftrl(optimizer.Optimizer):
|
||||
r"""Optimizer that implements the FTRL algorithm.
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user