Clearing existing files

This commit is contained in:
Francois Chollet 2023-04-07 00:00:00 +00:00
parent cbbe8eb530
commit ac9aadc47a
1690 changed files with 0 additions and 452552 deletions

154
.bazelrc

@ -1,154 +0,0 @@
# TensorFlow Bazel configuration file.
# This file tries to group and simplify build options for TensorFlow
#
# ----CONFIG OPTIONS----
#
# Other build options:
# short_logs: Only log errors during build, skip warnings.
# verbose_logs: Show all compiler warnings during build.
# monolithic: Build all TF C++ code into a single shared object.
# dynamic_kernels: Try to link all kernels dynamically (experimental).
# libc++: Link against libc++ instead of stdlibc++
##
# TF version options;
# v1: Build TF V1 (without contrib)
# v2: Build TF v2
#
# Feature and Third party library support options:
# xla: Build TF with XLA
# tpu: Build TF with TPU support
# using_cuda: CUDA is available to build system.
# cuda: Build with full cuda support.
# rocm: Build with AMD GPU support (rocm).
# mkl: Enable full mkl support.
# tensorrt: Enable Tensorrt support.
# numa: Enable numa using hwloc.
# noaws: Disable AWS S3 storage support
# nogcp: Disable GCS support.
# nohdfs: Disable hadoop hdfs support.
# nonccl: Disable nccl support.
# Sets the default Apple platform to macOS.
build --apple_platform_type=macos
# Flags for open source build, always set to be true.
build --define open_source_build=true
test --define open_source_build=true
# For workaound the use_fast_cpp_protos issue in protobuf deps.
build --define=use_fast_cpp_protos=false
test --define=use_fast_cpp_protos=false
# This config refers to building with CUDA available. It does not necessarily
# mean that we build CUDA op kernels.
build:using_cuda --define=using_cuda=true
build:using_cuda --action_env TF_NEED_CUDA=1
build:using_cuda --crosstool_top=@local_config_cuda//crosstool:toolchain
# Enable the mlir generated GPU kernels only for cuda builds.
build --define=tensorflow_enable_mlir_generated_gpu_kernels=0
# This is a more specific option, so it takes precedence over the line above for cuda builds.
build:using_cuda --define=tensorflow_enable_mlir_generated_gpu_kernels=1
# This config refers to building CUDA op kernels with nvcc.
build:cuda --config=using_cuda
build:cuda --define=using_cuda_nvcc=true
# dbg config, as a shorthand for '--config=opt -c dbg'
build:dbg --config=opt -c dbg
# for now, disable arm_neon. see: https://github.com/tensorflow/tensorflow/issues/33360
build:dbg --cxxopt -DTF_LITE_DISABLE_X86_NEON
# AWS SDK must be compiled in release mode. see: https://github.com/tensorflow/tensorflow/issues/37498
build:dbg --copt -DDEBUG_BUILD
build:tensorrt --action_env TF_NEED_TENSORRT=1
build:rocm --crosstool_top=@local_config_rocm//crosstool:toolchain
build:rocm --define=using_rocm=true --define=using_rocm_hipcc=true
build:rocm --action_env TF_NEED_ROCM=1
# Options extracted from configure script
build:numa --define=with_numa_support=true
# Options to disable default on features
build:noaws --define=no_aws_support=true
build:nogcp --define=no_gcp_support=true
build:nohdfs --define=no_hdfs_support=true
build:nonccl --define=no_nccl_support=true
build --define=allow_oversize_protos=true
build --spawn_strategy=standalone
build -c opt
# Make Bazel print out all options from rc files.
build --announce_rc
# Other build flags.
build --define=grpc_no_ares=true
build:linux --copt=-w
build:linux --host_copt=-w
build:macos --copt=-w
build:windows --copt=/W0
# Tensorflow uses M_* math constants that only get defined by MSVC headers if
# _USE_MATH_DEFINES is defined.
build:windows --copt=/D_USE_MATH_DEFINES
build:windows --host_copt=/D_USE_MATH_DEFINES
# Default paths for TF_SYSTEM_LIBS
build:linux --define=PREFIX=/usr
build:linux --define=LIBDIR=$(PREFIX)/lib
build:linux --define=INCLUDEDIR=$(PREFIX)/include
build:linux --define=PROTOBUF_INCLUDE_PATH=$(PREFIX)/include
build:macos --define=PREFIX=/usr
build:macos --define=LIBDIR=$(PREFIX)/lib
build:macos --define=INCLUDEDIR=$(PREFIX)/include
build:macos --define=PROTOBUF_INCLUDE_PATH=$(PREFIX)/include
# TF_SYSTEM_LIBS do not work on windows.
# On windows, we still link everything into a single DLL.
build:windows --config=monolithic
# On linux, we dynamically link small amount of kernels
build:linux --config=dynamic_kernels
# Make sure to include as little of windows.h as possible
build:windows --copt=-DWIN32_LEAN_AND_MEAN
build:windows --host_copt=-DWIN32_LEAN_AND_MEAN
build:windows --copt=-DNOGDI
build:windows --host_copt=-DNOGDI
# MSVC (Windows): Standards-conformant preprocessor mode
# See https://docs.microsoft.com/en-us/cpp/preprocessor/preprocessor-experimental-overview
build:windows --copt=/experimental:preprocessor
build:windows --host_copt=/experimental:preprocessor
# Misc build options we need for windows.
build:windows --linkopt=/DEBUG
build:windows --host_linkopt=/DEBUG
build:windows --linkopt=/OPT:REF
build:windows --host_linkopt=/OPT:REF
build:windows --linkopt=/OPT:ICF
build:windows --host_linkopt=/OPT:ICF
build:windows --experimental_strict_action_env=true
# Verbose failure logs when something goes wrong
build:windows --verbose_failures
# Suppress all warning messages.
build:short_logs --output_filter=DONT_MATCH_ANYTHING
build:verbose_logs --output_filter=
build --config=short_logs
# Options to build TensorFlow 1.x or 2.x.
build:v1 --define=tf_api_version=1
build:v2 --define=tf_api_version=2
build:v1 --action_env=TF2_BEHAVIOR=0
build:v2 --action_env=TF2_BEHAVIOR=1
build --config=v2
test --config=v2
# Enable XLA
build:xla --define=with_xla_support=true

@ -1 +0,0 @@
5.4.0

@ -1,8 +0,0 @@
FROM mcr.microsoft.com/vscode/devcontainers/python:3.8
COPY setup.sh /setup.sh
# Install Bazel
RUN sudo apt install wget -y
RUN wget https://github.com/bazelbuild/bazelisk/releases/download/v1.11.0/bazelisk-linux-amd64
RUN chmod a+x bazelisk-linux-amd64
RUN mv bazelisk-linux-amd64 /usr/bin/bazel

@ -1,13 +0,0 @@
{
"dockerFile": "Dockerfile",
"postCreateCommand": "sh /setup.sh",
"extensions": ["ms-python.python"],
"settings": {
"files.watcherExclude": {
"**/bazel-*/**": true
},
"search.exclude": {
"**/bazel-*/**": true
}
}
}

@ -1,6 +0,0 @@
#!/bin/bash
sudo pip install -r requirements.txt
sudo pip uninstall keras-nightly -y
wget https://github.com/cli/cli/releases/download/v2.17.0/gh_2.17.0_linux_amd64.deb -P /tmp
sudo apt install /tmp/gh_2.17.0_linux_amd64.deb -y

@ -1,61 +0,0 @@
---
name: Bug Issue
about: Use this template for reporting a bug
labels: 'type:bug'
---
Please go to TF Forum for help and support:
https://discuss.tensorflow.org/tag/keras
If you open a GitHub issue, here is our policy:
It must be a bug, a feature request, or a significant problem with the documentation (for small docs fixes please send a PR instead).
The form below must be filled out.
**Here's why we have that policy:**.
Keras developers respond to issues. We want to focus on work that benefits the whole community, e.g., fixing bugs and adding features. Support only helps individuals. GitHub also notifies thousands of people when issues are filed. We want them to see you communicating an interesting problem, rather than being redirected to Stack Overflow.
**System information**.
- Have I written custom code (as opposed to using a stock example script provided in Keras):
- OS Platform and Distribution (e.g., Linux Ubuntu 16.04):
- TensorFlow installed from (source or binary):
- TensorFlow version (use command below):
- Python version:
- Bazel version (if compiling from source):
- GPU model and memory:
- Exact command to reproduce:
You can collect some of this information using our environment capture script:
https://github.com/tensorflow/tensorflow/tree/master/tools/tf_env_collect.sh
You can obtain the TensorFlow version with:
python -c "import tensorflow as tf; print(tf.version.GIT_VERSION, tf.version.VERSION)"
**Describe the problem**.
Describe the problem clearly here. Be sure to convey here why it's a bug in Keras or why the requested feature is needed.
**Describe the current behavior**.
**Describe the expected behavior**.
**[Contributing](https://github.com/keras-team/keras/blob/master/CONTRIBUTING.md)**.
- Do you want to contribute a PR? (yes/no):
- If yes, please read [this page](https://github.com/keras-team/keras/blob/master/CONTRIBUTING.md) for instructions
- Briefly describe your candidate solution(if contributing):
**Standalone code to reproduce the issue**.
Provide a reproducible test case that is the bare minimum necessary to generate
the problem. If possible, please share a link to Colab/Jupyter/any notebook.
**Source code / logs**.
Include any logs or source code that would be helpful to diagnose the problem. If including tracebacks, please include the full traceback. Large logs and files should be attached. Try to provide a reproducible test case that is the bare minimum necessary to generate the problem.

@ -1,37 +0,0 @@
---
name: Feature Request
about: Use this template for raising a feature request
labels: 'type:feature'
---
If you open a GitHub issue, here is our policy:
It must be a bug, a feature request, or a significant problem with the documentation (for small docs fixes please send a PR instead).
The form below must be filled out.
**Here's why we have that policy:**.
Keras developers respond to issues. We want to focus on work that benefits the whole community, e.g., fixing bugs and adding features. Support only helps individuals. GitHub also notifies thousands of people when issues are filed. We want them to see you communicating an interesting problem, rather than being redirected to Stack Overflow.
**System information**.
TensorFlow version (you are using):
Are you willing to contribute it (Yes/No) :
**Describe the feature and the current behavior/state**.
Describe the feature clearly here. Be sure to convey here why the requested feature is needed. Any brief description about the use-case would help.
**Will this change the current api? How?**
**Who will benefit from this feature?**
**[Contributing](https://github.com/keras-team/keras/blob/master/CONTRIBUTING.md)**
- Do you want to contribute a PR? (yes/no):
- If yes, please read [this page](https://github.com/keras-team/keras/blob/master/CONTRIBUTING.md) for instructions
- Briefly describe your candidate solution(if contributing):

@ -1,63 +0,0 @@
---
name: Documentation Issue
about: Use this template for documentation related issues
labels: 'type:docs'
---
Please go to TF Forum for help and support:
https://discuss.tensorflow.org/tag/keras
If you open a GitHub issue, here is our policy:
It must be a bug, a feature request, or a significant problem with the documentation (for small docs fixes please send a PR instead).
The form below must be filled out.
**Here's why we have that policy:**.
Keras developers respond to issues. We want to focus on work that benefits the whole community, e.g., fixing bugs and adding features. Support only helps individuals. GitHub also notifies thousands of people when issues are filed. We want them to see you communicating an interesting problem, rather than being redirected to Stack Overflow.
**URL(s) with the issue:**.
Please provide a link to the documentation entry, for example: https://keras.io/guides/customizing_what_happens_in_fit/
**Description of issue (what needs to be changed):**.
**Correct links**.
Is the link to the source code correct?
**Parameters defined**.
Are all parameters defined and formatted correctly?
**Returns defined**.
Are return values defined?
**Raises listed and defined**
Are the errors defined?
**Usage example**
Is there a usage example?
See the API guide: https://www.tensorflow.org/community/contribute/docs_ref on how to write testable usage examples.
**Request visuals, if applicable**.
Are there currently visuals? If not, will it clarify the content?
**Submit a pull request?**.
Are you planning to also submit a pull request to fix the issue? See the [docs contributor guide](https://github.com/keras-team/keras/blob/master/CONTRIBUTING.md):

@ -1 +0,0 @@
blank_issues_enabled: false

@ -1,19 +0,0 @@
# Copyright 2022 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
# A list of assignees
assignees:
- tilakrayal
- sushreebarsa

@ -1,52 +0,0 @@
name: Format the code
on:
workflow_dispatch:
permissions: {}
jobs:
createPullRequest:
permissions:
contents: write # to create branch (peter-evans/create-pull-request)
pull-requests: write # to create a PR (peter-evans/create-pull-request)
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Get pip cache dir
id: pip-cache
run: |
python -m pip install --upgrade pip setuptools
echo "::set-output name=dir::$(pip cache dir)"
- name: pip cache
uses: actions/cache@v2
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('requirements.txt') }}
restore-keys: |
${{ runner.os }}-pip-
- name: Install dependencies
run: |
pip install black==22.3.0 isort==5.10.1 flake8==4.0.1
- name: Format the code
run: sh shell/format.sh
- name: Create Pull Request
id: cpr
uses: peter-evans/create-pull-request@v4
with:
commit-message: format the code
committer: A. Unique TensorFlower <gardener@tensorflow.org>
author: A. Unique TensorFlower <gardener@tensorflow.org>
branch: format
delete-branch: true
title: 'Format the code'
body: |
This is a pull request automatically created by a Github Action to format the code.
If there is any conflict, click the run workflow button on
[this page](https://github.com/keras-team/keras/actions/workflows/format.yml).
labels: |
ready to pull
draft: false

@ -1,34 +0,0 @@
name: Lint
on:
push:
pull_request:
workflow_dispatch:
permissions:
contents: read # to fetch code (actions/checkout)
jobs:
lint:
name: Check the code format
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Get pip cache dir
id: pip-cache
run: |
python -m pip install --upgrade pip setuptools
echo "::set-output name=dir::$(pip cache dir)"
- name: pip cache
uses: actions/cache@v2
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('requirements.txt') }}
restore-keys: |
${{ runner.os }}-pip-
- name: Install dependencies
run: |
pip install black==22.3.0 isort==5.10.1 flake8==4.0.1
- name: Lint the code
run: sh shell/lint.sh

@ -1,47 +0,0 @@
name: Close inactive issues
on:
schedule:
- cron: "30 1 * * *"
jobs:
close-issues:
runs-on: ubuntu-latest
permissions:
issues: write
pull-requests: write
steps:
- name: Awaiting response issues
uses: actions/stale@v5
with:
days-before-issue-stale: 14
days-before-issue-close: 14
stale-issue-label: "stale"
# reason for closed the issue default value is not_planned
close-issue-reason: completed
only-labels: "stat:awaiting response from contributor"
stale-issue-message: >
This issue is stale because it has been open for 14 days with no activity.
It will be closed if no further activity occurs. Thank you.
close-issue-message: >
This issue was closed because it has been inactive for 28 days.
Please reopen if you'd like to work on this further.
days-before-pr-stale: 14
days-before-pr-close: 14
stale-pr-message: "This PR is stale because it has been open for 14 days with no activity. It will be closed if no further activity occurs. Thank you."
close-pr-message: "This PR was closed because it has been inactive for 28 days. Please reopen if you'd like to work on this further."
repo-token: ${{ secrets.GITHUB_TOKEN }}
- name: Contribution issues
uses: actions/stale@v5
with:
days-before-issue-stale: 180
days-before-issue-close: 365
stale-issue-label: "stale"
# reason for closed the issue default value is not_planned
close-issue-reason: not_planned
any-of-labels: "stat:contributions welcome,good first issue"
stale-issue-message: >
This issue is stale because it has been open for 180 days with no activity.
It will be closed if no further activity occurs. Thank you.
close-issue-message: >
This issue was closed because it has been inactive for more than 1 year.
repo-token: ${{ secrets.GITHUB_TOKEN }}

21
.gitignore vendored

@ -1,21 +0,0 @@
# macOS
.DS_Store
# Python temp files
__pycache__/
*.py[cod]
*$py.class
# Vim temp files
*.swp
*.swo
# VS Code configs
.devcontainer
.vscode
# Bazel files
bazel-bin
bazel-keras
bazel-out
bazel-testlogs

27
.vscode/settings.json vendored

@ -1,27 +0,0 @@
{
"python.linting.flake8Enabled": true,
"python.linting.pylintEnabled": false,
"python.linting.enabled": true,
"editor.rulers": [
80
],
"editor.formatOnSave": true,
"python.formatting.provider": "black",
"python.formatting.blackArgs": [
"--line-length",
"80"
],
"python.sortImports.args": [
"--profile",
"black",
"--sl"
],
"[python]": {
"editor.codeActionsOnSave": {
"source.organizeImports": true
}
},
"python.analysis.diagnosticSeverityOverrides": {
"reportMissingImports": "none"
}
}

118
BUILD

@ -1,118 +0,0 @@
py_library(
name = "expect_absl_installed",
# This is a dummy rule used as a absl dependency in open-source.
# We expect absl to already be installed on the system, e.g. via
# `pip install absl`
visibility = ["//visibility:public"],
deps = [],
)
py_library(
name = "expect_h5py_installed",
# This is a dummy rule used as a h5 dependency in open-source.
# We expect h5py to already be installed on the system, e.g. via
# `pip install h5py'
visibility = ["//visibility:public"],
deps = [],
)
py_library(
name = "expect_numpy_installed",
# This is a dummy rule used as a numpy dependency in open-source.
# We expect numpy to already be installed on the system, e.g. via
# `pip install numpy`
visibility = ["//visibility:public"],
deps = [],
)
py_library(
name = "expect_pandas_installed",
# This is a dummy rule used as a pandas dependency in open-source.
# We expect pandas to already be installed on the system, e.g. via
# `pip install pandas'
visibility = ["//visibility:public"],
deps = [],
)
py_library(
name = "expect_pillow_installed",
# This is a dummy rule used as a pillow dependency in open-source.
# We expect pillow to already be installed on the system, e.g. via
# `pip install Pillow'
visibility = ["//visibility:public"],
deps = [],
)
# Note that this dependency is for testing only.
py_library(
name = "expect_portpicker_installed",
# This is a dummy rule used as a pandas dependency in open-source.
# We expect portpicker to already be installed on the system, e.g. via
# `pip install portpicker'
visibility = ["//visibility:public"],
deps = [],
)
py_library(
name = "expect_pydot_installed",
# This is a dummy rule used as a pydot dependency in open-source.
# We expect pydot to already be installed on the system, e.g. via
# `pip install pydot'
visibility = ["//visibility:public"],
deps = [],
)
py_library(
name = "expect_scipy_installed",
# This is a dummy rule used as a scipy dependency in open-source.
# We expect scipy to already be installed on the system, e.g. via
# `pip install scipy'
visibility = ["//visibility:public"],
deps = [],
)
py_library(
name = "expect_six_installed",
# This is a dummy rule used as a six dependency in open-source.
# We expect six to already be installed on the system, e.g. via
# `pip install six`
visibility = ["//visibility:public"],
deps = [],
)
py_library(
name = "expect_tensorboard_installed",
# This is a dummy rule used as a tensorboard dependency in open-source.
# We expect tensorboard to already be installed on the system, e.g. via
# `pip install tensorflow`
visibility = ["//visibility:public"],
deps = [],
)
py_library(
name = "expect_tensorflow_installed",
# This is a dummy rule used as a tensorflow dependency in open-source.
# We expect tensorflow to already be installed on the system, e.g. via
# `pip install tensorflow`
visibility = ["//visibility:public"],
deps = [],
)
py_library(
name = "expect_yaml_installed",
# This is a dummy rule used as a yaml dependency in open-source.
# We expect yaml to already be installed on the system, e.g. via
# `pip install yaml`
visibility = ["//visibility:public"],
deps = [],
)
# Note that this dependency is for testing only.
py_library(
name = "expect_tensorflow_io_installed",
# This is a dummy rule used as a tensorflow_io dependency in open-source.
# We expect tensorflow_io to already be installed on the system, e.g. via
# `pip install tensorflow-io`
visibility = ["//visibility:public"],
deps = [],
)

@ -1,316 +0,0 @@
## How to contribute code
Follow these steps to submit your code contribution.
### Step 1. Open an issue
Before making any changes, we recommend opening an issue (if one doesn't already
exist) and discussing your proposed changes. This way, we can give you feedback
and validate the proposed changes.
If the changes are minor (simple bug fix or documentation fix), then feel free
to open a PR without discussion.
### Step 2. Make code changes
To make code changes, you need to fork the repository. You will need to setup a
development environment and run the unit tests. This is covered in the section
"Setup environment".
### Step 3. Create a pull request
Once the change is ready, open a pull request from your branch in your fork to
the master branch in [keras-team/keras](https://github.com/keras-team/keras).
### Step 4. Sign the Contributor License Agreement
After creating the pull request, the `google-cla` bot will comment on your pull
request with instructions on signing the Contributor License Agreement (CLA) if
you haven't done so. Please follow the instructions to sign the CLA. A `cla:yes`
tag is then added to the pull request.
![Tag added](https://i.imgur.com/LHEdIfL.png)
### Step 5. Code review
A reviewer will review the pull request and provide comments. The reviewer may
add a `kokoro:force-run` label to trigger the continuous integration tests.
![CI tests tag](https://i.imgur.com/58NOCB0.png)
If the tests fail, look into the error messages and try to fix them.
![CI tests](https://i.imgur.com/vVY0dZD.png)
There may be
several rounds of comments and code changes before the pull request gets
approved by the reviewer.
![Approval from reviewer](https://i.imgur.com/Ywl4ets.png)
### Step 6. Merging
Once the pull request is approved, a `ready to pull` tag will be added to the
pull request. A team member will take care of the merging.
![Ready to pull](https://i.imgur.com/yCEqJsA.png)
Here is an [example pull request](https://github.com/keras-team/keras/pull/15015)
for your reference.
## Setup environment
To setup the development environment, We provide two options. One is to use our
Dockerfile, which builds into a container the required dev tools. Another one is
to setup a local environment by installing the dev tools needed.
### Option 1: Use a Docker container
We provide a
[Dockerfile](https://github.com/keras-team/keras/blob/master/.devcontainer/Dockerfile)
to build the dev environment. You can build the Dockerfile into a Docker image
named `keras-dev` with the following command at the root directory of your
cloned repo.
```shell
docker build -t keras-dev .devcontainer
```
You can launch a Docker container from the image with the following command. The
`-it` option gives you an interactive shell of the container. The `-v
path/to/repo/:/home/keras/` mounts your cloned repo to the container. Replace
`path/to/repo` with the path to your cloned repo directory.
```shell
docker run -it -v path/to/repo/:/home/keras/ keras-dev
```
In the container shell, you need to install the latest dependencies with the
following command.
```shell
pip install -r /home/keras/requirements.txt && pip uninstall keras-nightly -y
```
Now, the environment setup is complete. You are ready to run the tests.
You may modify the Dockerfile to your specific needs, like installing your own
dev tools. You may also mount more volumes with the `-v` option, like your SSH
credentials.
Many popular editors today support developing in a container. Here is the list of
[supported editors](https://discuss.tensorflow.org/t/setup-your-favorite-editor-to-develop-keras)
with setup instructions.
### Option 2: Setup a local environment
To setup your local dev environment, you will need the following tools.
1. [Bazel](https://bazel.build/) is the tool to build and test Keras. See the
[installation guide](https://docs.bazel.build/versions/4.0.0/install.html)
for how to install and config bazel for your local environment.
2. [git](https://github.com/) for code repository management.
3. [python](https://www.python.org/) to build and code in Keras.
The following commands check the tools above are successfully installed. Note
that Keras requires at least Python 3.7 to run.
```shell
bazel --version
git --version
python --version
```
A [Python virtual environment](https://docs.python.org/3/tutorial/venv.html)
(venv) is a powerful tool to create a self-contained environment that isolates
any change from the system level config. It is highly recommended to avoid any
unexpected dependency or version issues.
With the following commands, you create a new venv, named `venv_dir`.
```shell
mkdir venv_dir
python3 -m venv venv_dir
```
You can activate the venv with the following command. You should always run the
tests with the venv activated. You need to activate the venv every time you open
a new shell.
```shell
source venv_dir/bin/activate # for Linux or MacOS
venv_dir\Scripts\activate.bat # for Windows
```
Clone your forked repo to your local machine. Go to the cloned directory to
install the dependencies into the venv. Since `tf-nightly` uses `keras-nightly`
as a dependency, we need to uninstall `keras-nightly` so that tests will run
against Keras code in the local workspace.
```shell
git clone https://github.com/YOUR_GITHUB_USERNAME/keras.git
cd keras
pip install -r requirements.txt
pip uninstall keras-nightly
```
The environment setup is completed. You may need to update the `tf-nightly`
version regularly to keep your environment up-to-date with the following
command.
```shell
pip install --upgrade tf-nightly
```
## Code style
The Keras uses [Black](https://black.readthedocs.io/en/stable/) and
[isort](https://pycqa.github.io/isort/) to format the code. Please refer to
[requirements.txt](https://github.com/keras-team/keras/blob/master/requirements.txt)
for the required versions. Run the following command **at the root directory of
the repo** to format your code.
```
sh shell/format.sh
```
It will also display the errors that cannot be resolved by autoformatting. You
need to follow the output of the command to resolve them manually.
If you do not want to auto format the code but only show the lint errors, you
can run `sh shell/lint.sh` **at the root directory of the repo**.
### Docstrings
We do not have an automated way to check docstring style, so if you write
or edit any docstring, please make sure to check them manually.
Keras docstrings follow the conventions below:
A **class docstring** may contain the following items:
* A one-line description of the class.
* Paragraph(s) of more detailed information.
* Optional `Examples` section.
* `Args` section for arguments in `__init__()`.
* If it's a layer:
* `Call arguments` section for arguments in `Layer.call()`.
* `Returns` section for the return values of `Layer.call()`.
* Optional `Raises` section for possible errors.
You can check out `MultiHeadAttention` as an example
[(link)](https://github.com/keras-team/keras/blob/v2.12.0-rc1/keras/layers/attention/multi_head_attention.py#L131).
A **function docstring** may contain the following items:
* One-line description of the function.
* Paragraph(s) of more detailed information.
* Optional `Examples` section.
* `Args` section for the function arguments.
* `Returns` section for the return values.
* Optional `Raises` section for possible errors.
You can check out `text_dataset_from_directory` as an example
[(link)](https://github.com/keras-team/keras/blob/v2.12.0-rc1/keras/utils/text_dataset.py#L31).
## Run tests
We use [Bazel](https://bazel.build/) to build and run the tests.
### Run a test file
For example, to run the tests in `keras/engine/base_layer_test.py`,
we can run the following command at the root directory of the repo.
```shell
bazel test keras/engine:base_layer_test
```
`keras/engine` is the relative path to the directory containing the `BUILD` file
defining the test. `base_layer_test` is the test target name defined with
`tf_py_test` in the `BUILD` file.
### Run a single test case
To run a single test, you can use `--test_filter=<your_regex>`
to use the regular expression to match the test you want to run. For example, you
can use the following command to run all the tests in `activations_test.py`,
whose names contain `test_serialization`.
```
bazel test keras:activations_test --test_filter=*test_serialization*
```
### Run all tests
You can run all the tests locally by running the following command in the repo
root directory.
```
bazel test --test_timeout 300,450,1200,3600 --test_output=errors --keep_going --define=use_fast_cpp_protos=false --build_tests_only --build_tag_filters=-no_oss,-oss_excluded --test_tag_filters=-no_oss,-oss_excluded keras/...
```
### Useful configs
Here we provide a list of useful configs you can use with Bazel.
```shell
bazel test [CONFIGS] [YOUR_TEST]
```
To use these configs, just replace `[CONFIGS]` with the actual config in the
command above.
* `-c opt` enables the optimizations during the build.
* `--test_sharding_strategy=disabled` disables the sharding so that all the
test outputs are in one file.
However, it may slow down the tests for not running in parallel
and may cause the test to timeout.
## Contributing to Keras applications
Contributions to the
[pre-trained application library](https://keras.io/api/applications/) are
welcome. Code for Keras applications is located in Keras repository in
[keras/applications](https://github.com/keras-team/keras/blob/master/keras/applications).
When contributing to Keras applications, please keep following checklist in
mind.
- Keras applications must implement an established and widely used model.
Applications should include a link to a paper describing the architecture of
the model with at least 20 citations.
- Applications should be provided with pre-trained weights.
- When submitting a pull request for a Keras application, these weights
can be provided at any publically available URL (e.g. a personal Cloud
Storage bucket). The weights will be uploaded to a Keras storage bucket
while merging the pull request.
- Weights should be downloaded with the
[get_file()](https://keras.io/api/utils/python_utils/#getfile-function)
utility function. Be sure to include the `file_hash` argument, which
allows cache invalidation on the downloaded weights. The command line
programs `shasum` and `sha256sum` can compute a file hash.
- You should help us verify that the accuracy of the model with pre-trained
weighted matches the reported results of the cited paper.
- You should add any new applications to the unit tests defined in
`applications_test.py` and `applications_load_weight_test.py`.
- For backwards compatibility, all applications should provide a
`preprocess_input()` function. For new applications, you should leave the
function empty (pass through inputs unaltered), and write the model so it
can handle raw inputs directly. Adding
[preprocessing layers](https://keras.io/guides/preprocessing_layers/) to the
application model may help with this. For image applications, a
[Rescaling](https://keras.io/api/layers/preprocessing_layers/image_preprocessing/rescaling/)
layer at the beginning of the model is often all that is needed.
- Once the PR is approved, you should create a companion PR to the keras.io
[application page](https://keras.io/api/applications/) updating the
"Available Models" section. The contribution guide for keras.io can be found
[here](https://github.com/keras-team/keras-io/blob/master/contributor_guide.md).
- As every PR requires several CPU/GPU hours of CI testing, we discourage
submitting PRs to fix one typo, one warning,etc. We recommend fixing the
same issue at the file level at least (e.g.: fix all typos in a file, fix
all compiler warnings in a file, etc.)
## Security vulnerability reports
Since Keras is the high-level API of TensorFlow 2, Keras follows same security practices as TensorFlow.
For details on guidelines on vulnerabilities and reporting them, you can refer [Using TensorFlow Securely](https://github.com/tensorflow/tensorflow/blob/master/SECURITY.md).

@ -1,41 +0,0 @@
Please go to Stack Overflow for help and support:
https://stackoverflow.com/questions/tagged/keras
If you open a GitHub issue, here is our policy:
1. It must be a bug, a feature request, or a significant problem with the
documentation (for small docs fixes please send a PR instead).
2. The form below must be filled out.
**Here's why we have that policy**: Keras developers respond to issues. We want to focus on work that benefits the whole community, e.g., fixing bugs and adding features. Support only helps individuals. GitHub also notifies thousands of people when issues are filed. We want them to see you communicating an interesting problem, rather than being redirected to Stack Overflow.
------------------------
### System information
- **Have I written custom code (as opposed to using a stock example script
provided in Keras)**:
- **OS Platform and Distribution (e.g., Linux Ubuntu 16.04)**:
- **TensorFlow installed from (source or binary)**:
- **TensorFlow version (use command below)**:
- **Python version**:
- **Bazel version (if compiling from source)**:
- **GPU model and memory**:
- **Exact command to reproduce**:
You can collect some of this information using our environment capture script:
https://github.com/tensorflow/tensorflow/tree/master/tools/tf_env_collect.sh
You can obtain the TensorFlow version with:
```bash
python -c "import tensorflow as tf; print(tf.version.GIT_VERSION, tf.version.VERSION)"
```
### Describe the problem
Describe the problem clearly here. Be sure to convey here why it's a bug in Keras or why the requested feature is needed.
### Source code / logs
Include any logs or source code that would be helpful to diagnose the problem. If including tracebacks, please include the full traceback. Large logs and files should be attached. Try to provide a reproducible test case that is the bare minimum necessary to generate the problem.

202
LICENSE

@ -1,202 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

213
README.md

@ -1,213 +0,0 @@
# Keras: Deep Learning for humans
![Keras logo](https://s3.amazonaws.com/keras.io/img/keras-logo-2018-large-1200.png)
This repository hosts the development of the Keras library.
Read the documentation at [keras.io](https://keras.io/).
## About Keras
Keras is a deep learning API written in Python,
running on top of the machine learning platform [TensorFlow](https://github.com/tensorflow/tensorflow).
It was developed with a focus on enabling fast experimentation and
providing a delightful developer experience.
**The purpose of Keras is to give an *unfair advantage* to any developer looking to ship ML-powered apps.**
Keras is:
- **Simple** -- but not simplistic. Keras reduces developer *cognitive load*
to free you to focus on the parts of the problem that really matter.
Keras focuses on ease of use, debugging speed, code elegance & conciseness,
maintainability, and deployability (via TFServing, TFLite, TF.js).
- **Flexible** -- Keras adopts the principle of *progressive disclosure of
complexity*: simple workflows should be quick and easy, while arbitrarily
advanced workflows should be *possible* via a clear path that builds upon
what you've already learned.
- **Powerful** -- Keras provides industry-strength performance and
scalability: it is used by organizations and companies including NASA,
YouTube, and Waymo. That's right -- your YouTube recommendations are
powered by Keras, and so is the world's most advanced driverless vehicle.
---
## Keras & TensorFlow 2
[TensorFlow 2](https://www.tensorflow.org/) is an end-to-end, open-source machine learning platform.
You can think of it as an infrastructure layer for
[differentiable programming](https://en.wikipedia.org/wiki/Differentiable_programming).
It combines four key abilities:
- Efficiently executing low-level tensor operations on CPU, GPU, or TPU.
- Computing the gradient of arbitrary differentiable expressions.
- Scaling computation to many devices, such as clusters of hundreds of GPUs.
- Exporting programs ("graphs") to external runtimes such as servers, browsers, mobile and embedded devices.
Keras is the high-level API of TensorFlow 2: an approachable, highly-productive interface
for solving machine learning problems,
with a focus on modern deep learning. It provides essential abstractions and building blocks for developing
and shipping machine learning solutions with high iteration velocity.
Keras empowers engineers and researchers to take full advantage of the scalability
and cross-platform capabilities of TensorFlow 2: you can run Keras on TPU or on large clusters of GPUs,
and you can export your Keras models to run in the browser or on a mobile device.
---
## First contact with Keras
The core data structures of Keras are __layers__ and __models__.
The simplest type of model is the [`Sequential` model](https://keras.io/guides/sequential_model/), a linear stack of layers.
For more complex architectures, you should use the [Keras functional API](https://keras.io/guides/functional_api/),
which allows you to build arbitrary graphs of layers or [write models entirely from scratch via subclassing](https://keras.io/guides/making_new_layers_and_models_via_subclassing/).
Here is the `Sequential` model:
```python
from tensorflow.keras.models import Sequential
model = Sequential()
```
Stacking layers is as easy as `.add()`:
```python
from tensorflow.keras.layers import Dense
model.add(Dense(units=64, activation='relu'))
model.add(Dense(units=10, activation='softmax'))
```
Once your model looks good, configure its learning process with `.compile()`:
```python
model.compile(loss='categorical_crossentropy',
optimizer='sgd',
metrics=['accuracy'])
```
If you need to, you can further configure your optimizer. The Keras philosophy is to keep simple things simple,
while allowing the user to be fully in control when they need to (the ultimate control being the easy extensibility of the source code via subclassing).
```python
model.compile(loss=tf.keras.losses.categorical_crossentropy,
optimizer=tf.keras.optimizers.SGD(
learning_rate=0.01, momentum=0.9, nesterov=True))
```
You can now iterate on your training data in batches:
```python
# x_train and y_train are Numpy arrays.
model.fit(x_train, y_train, epochs=5, batch_size=32)
```
Evaluate your test loss and metrics in one line:
```python
loss_and_metrics = model.evaluate(x_test, y_test, batch_size=128)
```
Or generate predictions on new data:
```python
classes = model.predict(x_test, batch_size=128)
```
What you just saw is the most elementary way to use Keras.
However, Keras is also a highly-flexible framework suitable to iterate on state-of-the-art research ideas.
Keras follows the principle of **progressive disclosure of complexity**: it makes it easy to get started,
yet it makes it possible to handle arbitrarily advanced use cases,
only requiring incremental learning at each step.
In much the same way that you were able to train & evaluate a simple neural network above in a few lines,
you can use Keras to quickly develop new training procedures or exotic model architectures.
Here's a low-level training loop example, combining Keras functionality with the TensorFlow `GradientTape`:
```python
import tensorflow as tf
# Prepare an optimizer.
optimizer = tf.keras.optimizers.Adam()
# Prepare a loss function.
loss_fn = tf.keras.losses.kl_divergence
# Iterate over the batches of a dataset.
for inputs, targets in dataset:
# Open a GradientTape.
with tf.GradientTape() as tape:
# Forward pass.
predictions = model(inputs)
# Compute the loss value for this batch.
loss_value = loss_fn(targets, predictions)
# Get gradients of loss wrt the weights.
gradients = tape.gradient(loss_value, model.trainable_weights)
# Update the weights of the model.
optimizer.apply_gradients(zip(gradients, model.trainable_weights))
```
For more in-depth tutorials about Keras, you can check out:
- [Introduction to Keras for engineers](https://keras.io/getting_started/intro_to_keras_for_engineers/)
- [Introduction to Keras for researchers](https://keras.io/getting_started/intro_to_keras_for_researchers/)
- [Developer guides](https://keras.io/guides/)
- [Other learning resources](https://keras.io/getting_started/learning_resources/)
---
## Installation
Keras comes packaged with TensorFlow 2 as `tensorflow.keras`.
To start using Keras, simply [install TensorFlow 2](https://www.tensorflow.org/install).
You can then import Keras as follows:
```python
from tensorflow import keras
```
---
## Release and compatibility
Keras has **nightly releases** (`keras-nightly` on PyPI)
and **stable releases** (`keras` on PyPI).
The nightly Keras releases are usually compatible with the corresponding version
of the `tf-nightly` releases
(e.g. `keras-nightly==2.7.0.dev2021100607` should be
used with `tf-nightly==2.7.0.dev2021100607`).
We don't maintain backward compatibility for nightly releases.
For stable releases, each Keras
version maps to a specific stable version of TensorFlow.
The table below shows the compatibility version mapping
between TensorFlow versions and Keras versions.
All the release branches can be found on [GitHub](https://github.com/keras-team/keras/releases).
All the release binaries can be found on [Pypi](https://pypi.org/project/keras/#history).
---
## Support
You can ask questions and join the development discussion:
- In the [TensorFlow forum](https://discuss.tensorflow.org/).
- On the [Keras mailing list](https://groups.google.com/forum/#!forum/keras-users).
---
## Opening an issue
You can also post **bug reports and feature requests** (only)
in [GitHub issues](https://github.com/keras-team/keras/issues).
---
## Opening a PR
We welcome contributions! Before opening a PR, please read
[our contributor guide](https://github.com/keras-team/keras/blob/master/CONTRIBUTING.md),
and the [API design guideline](https://github.com/keras-team/governance/blob/master/keras_api_design_guidelines.md).

@ -1,53 +0,0 @@
workspace(name = "org_keras")
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
# Needed by protobuf
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "bazel_skylib",
urls = [
"https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz",
"https://github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz",
],
sha256 = "74d544d96f4a5bb630d465ca8bbcfe231e3594e5aae57e1edbf17a6eb3ca2506",
)
load("@bazel_skylib//:workspace.bzl", "bazel_skylib_workspace")
bazel_skylib_workspace()
# Needed by protobuf
http_archive(
name = "six_archive",
build_file = "//third_party:six.BUILD",
sha256 = "1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926",
strip_prefix = "six-1.16.0",
urls = ["https://pypi.python.org/packages/source/s/six/six-1.16.0.tar.gz"],
)
bind(
name = "six",
actual = "@six_archive//:six",
)
http_archive(
name = "com_google_protobuf",
sha256 = "f66073dee0bc159157b0bd7f502d7d1ee0bc76b3c1eac9836927511bdc4b3fc1",
strip_prefix = "protobuf-3.21.9",
urls = ["https://github.com/protocolbuffers/protobuf/archive/v3.21.9.zip"],
)
# ZLIB. Need by com_google_protobuf.
http_archive(
name = "zlib",
build_file = "@com_google_protobuf//:third_party/zlib.BUILD",
sha256 = "b3a24de97a8fdbc835b9833169501030b8977031bcb54b3b3ac13740f846ab30",
strip_prefix = "zlib-1.2.13",
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/zlib.net/zlib-1.2.13.tar.gz",
"https://zlib.net/zlib-1.2.13.tar.gz",
],
)
load("@com_google_protobuf//:protobuf_deps.bzl", "protobuf_deps")
protobuf_deps()

@ -1,385 +0,0 @@
# Description:
# Contains the Keras API (internal TensorFlow version).
load("@org_keras//keras:keras.bzl", "tf_py_test")
package(
default_visibility = [":friends"],
licenses = ["notice"],
)
# Keras code that doesn't live in core Keras directory, but still
# need to directly access the keras code.
# We shouldn't add any client side package to this list.
package_group(
name = "friends",
packages = ["//keras/..."],
)
exports_files(["LICENSE"])
config_setting(
name = "no_keras_py_deps",
define_values = {"no_keras_py_deps": "true"},
visibility = ["//visibility:public"],
)
py_library(
name = "keras",
srcs = [
"__init__.py",
],
srcs_version = "PY3",
deps = [
":backend",
":engine",
"//:expect_h5py_installed",
"//:expect_numpy_installed",
"//:expect_pydot_installed",
"//:expect_scipy_installed",
"//:expect_tensorflow_installed",
"//:expect_yaml_installed",
"//keras/applications",
"//keras/datasets",
"//keras/distribute",
"//keras/estimator",
"//keras/feature_column",
"//keras/layers",
"//keras/layers/rnn:legacy_cell_wrappers",
"//keras/layers/rnn:legacy_cells",
"//keras/legacy_tf_layers:layers",
"//keras/mixed_precision:mixed_precision_experimental",
"//keras/models",
"//keras/optimizers",
"//keras/premade_models",
"//keras/preprocessing",
"//keras/saving",
"//keras/testing_infra:keras_doctest_lib",
"//keras/testing_infra:test_utils", # For keras.__internal__ API
"//keras/utils",
],
)
py_library(
name = "backend",
srcs = ["backend.py"],
srcs_version = "PY3",
deps = [
":backend_config",
"//:expect_numpy_installed",
"//:expect_tensorflow_installed",
"//keras/distribute:distribute_coordinator_utils",
"//keras/engine:keras_tensor",
"//keras/utils:control_flow_util",
"//keras/utils:object_identity",
"//keras/utils:tf_contextlib",
"//keras/utils:tf_inspect",
],
)
py_library(
name = "backend_config",
srcs = ["backend_config.py"],
srcs_version = "PY3",
deps = [
"//:expect_tensorflow_installed",
],
)
# TODO(scottzhu): Cleanup this target and point all the user to keras/engine.
py_library(
name = "engine",
srcs = [
"//keras/metrics",
"//keras/models",
],
srcs_version = "PY3",
deps = [
"//keras/engine",
],
)
py_library(
name = "activations",
srcs = [
"activations.py",
],
srcs_version = "PY3",
deps = [
":backend",
"//keras/layers/activation",
"//keras/utils:engine_utils",
],
)
# TODO(scottzhu): Cleanup this target and point all the user to keras/engine.
py_library(
name = "base_layer",
srcs = [],
srcs_version = "PY3",
deps = [
"//keras/engine:base_layer",
],
)
py_library(
name = "callbacks",
srcs = [
"callbacks.py",
],
srcs_version = "PY3",
deps = [
":backend",
"//:expect_tensorboard_installed",
"//:expect_tensorflow_installed",
"//keras/distribute:distributed_file_utils",
"//keras/distribute:worker_training_state",
"//keras/protobuf:projector_config_proto_py_pb2",
"//keras/utils:engine_utils",
"//keras/utils:mode_keys",
],
)
py_library(
name = "callbacks_v1",
srcs = [
"callbacks_v1.py",
],
srcs_version = "PY3",
deps = [
":backend",
"//:expect_tensorboard_installed",
"//:expect_tensorflow_installed",
"//keras/utils:engine_utils",
],
)
py_library(
name = "constraints",
srcs = [
"constraints.py",
],
srcs_version = "PY3",
deps = [
":backend",
"//keras/utils:engine_utils",
],
)
py_library(
name = "losses",
srcs = [
"losses.py",
],
srcs_version = "PY3",
deps = [
":backend",
"//:expect_tensorflow_installed",
"//keras/saving:saving_lib",
"//keras/utils:engine_utils",
"//keras/utils:generic_utils",
"//keras/utils:tf_utils",
],
)
py_library(
name = "regularizers",
srcs = [
"regularizers.py",
],
srcs_version = "PY3",
deps = [
":backend",
"//keras/utils:engine_utils",
],
)
# Internally urllib.request.urlretrieve library requires Google
# SSL context to be provided to work in python 3. This isn't needed in OSS.
# copybara:uncomment_begin(google-only)
# py_library(
# name = "url_utils",
# srcs = ["google/url_utils.py"],
# srcs_version = "PY3",
# deps = ["//pyglib/contrib/google_ssl"],
# )
# copybara:uncomment_end
# Some tf.distribute related feature requires detecting platform.
# Internally we'd like to recognize Borg, which is not needed in OSS.
# copybara:uncomment_begin(google-only)
# py_library(
# name = "distribute_utils",
# srcs = ["google/distribute_utils.py"],
# deps = [
# "//:expect_six_installed",
# "//:expect_tensorflow_installed",
# "//third_party/py/requests",
# ],
# )
# copybara:uncomment_end
tf_py_test(
name = "activations_test",
size = "small",
srcs = ["activations_test.py"],
python_version = "PY3",
deps = [
":activations",
":backend",
"//:expect_absl_installed",
"//:expect_numpy_installed",
"//:expect_scipy_installed",
"//:expect_tensorflow_installed",
"//keras/layers",
"//keras/layers/activation",
"//keras/layers/core",
"//keras/testing_infra:test_combinations",
],
)
tf_py_test(
name = "constraints_test",
size = "small",
srcs = ["constraints_test.py"],
python_version = "PY3",
deps = [
":backend",
":constraints",
"//:expect_numpy_installed",
"//:expect_tensorflow_installed",
"//keras/testing_infra:test_combinations",
],
)
tf_py_test(
name = "regularizers_test",
size = "medium",
srcs = ["regularizers_test.py"],
python_version = "PY3",
deps = [
":keras",
"//:expect_absl_installed",
"//:expect_tensorflow_installed",
"//keras/testing_infra:test_combinations",
],
)
tf_py_test(
name = "losses_test",
size = "small",
srcs = ["losses_test.py"],
python_version = "PY3",
shard_count = 4,
tags = [
"noasan", # b/186128525
],
deps = [
":backend",
":losses",
"//:expect_numpy_installed",
"//:expect_tensorflow_installed",
"//keras/testing_infra:test_combinations",
"//keras/utils:engine_utils",
],
)
tf_py_test(
name = "callbacks_test",
size = "medium",
srcs = ["callbacks_test.py"],
python_version = "PY3",
shard_count = 6,
tags = [
"no_pip", # TODO(b/276923757)
"no_tfrt", # TODO(b/179690526)
"notsan",
],
deps = [
":keras",
"//:expect_absl_installed",
"//:expect_numpy_installed",
"//:expect_tensorflow_installed",
"//keras/testing_infra:test_combinations",
],
)
tf_py_test(
name = "callbacks_v1_test",
size = "medium",
srcs = ["callbacks_v1_test.py"],
python_version = "PY3",
tags = [
"nomac", # Using profiler causes segfault in MacOS runs.
"notsan",
],
deps = [
":callbacks",
":callbacks_v1",
"//:expect_absl_installed",
"//:expect_numpy_installed",
"//:expect_tensorflow_installed",
"//keras/engine",
"//keras/layers",
"//keras/testing_infra:test_combinations",
"//keras/testing_infra:test_utils",
"//keras/utils:np_utils",
],
)
tf_py_test(
name = "backend_test",
size = "medium",
srcs = ["backend_test.py"],
python_version = "PY3",
shard_count = 4,
deps = [
":backend",
":engine",
"//:expect_absl_installed",
"//:expect_numpy_installed",
"//:expect_scipy_installed",
"//:expect_tensorflow_installed",
"//keras/testing_infra:test_combinations",
],
)
tf_py_test(
name = "backend_config_test",
size = "medium",
srcs = ["backend_config_test.py"],
python_version = "PY3",
deps = [
":backend",
":backend_config",
"//:expect_tensorflow_installed",
"//keras/testing_infra:test_combinations",
],
)
# copybara:uncomment_begin(google-only)
# tf_py_test(
# name = "url_utils_test",
# srcs = ["google/url_utils_test.py"],
# python_version = "PY3",
# deps = [
# ":url_utils",
# "//:expect_tensorflow_installed",
# "//testing/pymocks:matchers",
# ],
# )
# copybara:uncomment_end
# copybara:uncomment_begin(google-only)
# tf_py_test(
# name = "distribute_utils_test",
# srcs = ["google/distribute_utils_test.py"],
# python_version = "PY3",
# deps = [
# ":distribute_utils",
# "//:expect_tensorflow_installed",
# "//keras/distribute",
# "//testing/pymocks:matchers",
# ],
# )
# copybara:uncomment_end

@ -1,33 +0,0 @@
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Implementation of the Keras API, the high-level API of TensorFlow.
Detailed documentation and user guides are available at
[keras.io](https://keras.io).
"""
from keras import distribute
from keras import models
from keras.engine.input_layer import Input
from keras.engine.sequential import Sequential
from keras.engine.training import Model
# isort: off
from tensorflow.python import tf2
from tensorflow.python.util.tf_export import keras_export
__version__ = "2.13.0"
keras_export("keras.__version__").export_constant(__name__, "__version__")

@ -1,709 +0,0 @@
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Built-in activation functions."""
import sys
import types
import tensorflow.compat.v2 as tf
import keras.layers.activation as activation_layers
from keras import backend
from keras.saving import object_registration
from keras.saving import serialization_lib
from keras.saving.legacy import serialization as legacy_serialization
from keras.saving.legacy.saved_model import utils as saved_model_utils
from keras.utils import generic_utils
# isort: off
from tensorflow.python.util.tf_export import keras_export
# b/123041942
# In TF 2.x, if the `tf.nn.softmax` is used as an activation function in Keras
# layers, it gets serialized as 'softmax_v2' instead of 'softmax' as the
# internal method name is returned in serialization. This results in errors in
# model exporting and loading as Keras can't find any activation function with
# the name of `softmax_v2`.
# This dict maps the activation function name from its v2 version to its
# canonical name.
_TF_ACTIVATIONS_V2 = {
"softmax_v2": "softmax",
}
@keras_export("keras.activations.softmax")
@tf.__internal__.dispatch.add_dispatch_support
def softmax(x, axis=-1):
"""Softmax converts a vector of values to a probability distribution.
The elements of the output vector are in range (0, 1) and sum to 1.
Each vector is handled independently. The `axis` argument sets which axis
of the input the function is applied along.
Softmax is often used as the activation for the last
layer of a classification network because the result could be interpreted as
a probability distribution.
The softmax of each vector x is computed as
`exp(x) / tf.reduce_sum(exp(x))`.
The input values in are the log-odds of the resulting probability.
Args:
x : Input tensor.
axis: Integer, axis along which the softmax normalization is applied.
Returns:
Tensor, output of softmax transformation (all values are non-negative
and sum to 1).
Examples:
**Example 1: standalone usage**
>>> inputs = tf.random.normal(shape=(32, 10))
>>> outputs = tf.keras.activations.softmax(inputs)
>>> tf.reduce_sum(outputs[0, :]) # Each sample in the batch now sums to 1
<tf.Tensor: shape=(), dtype=float32, numpy=1.0000001>
**Example 2: usage in a `Dense` layer**
>>> layer = tf.keras.layers.Dense(32,
... activation=tf.keras.activations.softmax)
"""
if x.shape.rank <= 1:
raise ValueError(
f"Cannot apply softmax to a tensor that is 1D. Received input: {x}"
)
if isinstance(axis, int):
output = tf.nn.softmax(x, axis=axis)
else:
# nn.softmax does not support tuple axis.
numerator = tf.exp(x - tf.reduce_max(x, axis=axis, keepdims=True))
denominator = tf.reduce_sum(numerator, axis=axis, keepdims=True)
output = numerator / denominator
# Cache the logits to use for crossentropy loss.
output._keras_logits = x
return output
@keras_export("keras.activations.elu")
@tf.__internal__.dispatch.add_dispatch_support
def elu(x, alpha=1.0):
"""Exponential Linear Unit.
The exponential linear unit (ELU) with `alpha > 0` is:
`x` if `x > 0` and
`alpha * (exp(x) - 1)` if `x < 0`
The ELU hyperparameter `alpha` controls the value to which an
ELU saturates for negative net inputs. ELUs diminish the
vanishing gradient effect.
ELUs have negative values which pushes the mean of the activations
closer to zero.
Mean activations that are closer to zero enable faster learning as they
bring the gradient closer to the natural gradient.
ELUs saturate to a negative value when the argument gets smaller.
Saturation means a small derivative which decreases the variation
and the information that is propagated to the next layer.
Example Usage:
>>> import tensorflow as tf
>>> model = tf.keras.Sequential()
>>> model.add(tf.keras.layers.Conv2D(32, (3, 3), activation='elu',
... input_shape=(28, 28, 1)))
>>> model.add(tf.keras.layers.MaxPooling2D((2, 2)))
>>> model.add(tf.keras.layers.Conv2D(64, (3, 3), activation='elu'))
>>> model.add(tf.keras.layers.MaxPooling2D((2, 2)))
>>> model.add(tf.keras.layers.Conv2D(64, (3, 3), activation='elu'))
<tensorflow.python.keras.engine.sequential.Sequential object ...>
Args:
x: Input tensor.
alpha: A scalar, slope of negative section. `alpha` controls the value
to which an ELU saturates for negative net inputs.
Returns:
The exponential linear unit (ELU) activation function: `x` if `x > 0`
and `alpha * (exp(x) - 1)` if `x < 0`.
Reference:
- [Fast and Accurate Deep Network Learning by Exponential Linear Units
(ELUs) (Clevert et al, 2016)](https://arxiv.org/abs/1511.07289)
"""
return backend.elu(x, alpha)
@keras_export("keras.activations.selu")
@tf.__internal__.dispatch.add_dispatch_support
def selu(x):
"""Scaled Exponential Linear Unit (SELU).
The Scaled Exponential Linear Unit (SELU) activation function is defined as:
- `if x > 0: return scale * x`
- `if x < 0: return scale * alpha * (exp(x) - 1)`
where `alpha` and `scale` are pre-defined constants
(`alpha=1.67326324` and `scale=1.05070098`).
Basically, the SELU activation function multiplies `scale` (> 1) with the
output of the `tf.keras.activations.elu` function to ensure a slope larger
than one for positive inputs.
The values of `alpha` and `scale` are
chosen so that the mean and variance of the inputs are preserved
between two consecutive layers as long as the weights are initialized
correctly (see `tf.keras.initializers.LecunNormal` initializer)
and the number of input units is "large enough"
(see reference paper for more information).
Example Usage:
>>> num_classes = 10 # 10-class problem
>>> model = tf.keras.Sequential()
>>> model.add(tf.keras.layers.Dense(64, kernel_initializer='lecun_normal',
... activation='selu'))
>>> model.add(tf.keras.layers.Dense(32, kernel_initializer='lecun_normal',
... activation='selu'))
>>> model.add(tf.keras.layers.Dense(16, kernel_initializer='lecun_normal',
... activation='selu'))
>>> model.add(tf.keras.layers.Dense(num_classes, activation='softmax'))
Args:
x: A tensor or variable to compute the activation function for.
Returns:
The scaled exponential unit activation: `scale * elu(x, alpha)`.
Notes:
- To be used together with the
`tf.keras.initializers.LecunNormal` initializer.
- To be used together with the dropout variant
`tf.keras.layers.AlphaDropout` (not regular dropout).
References:
- [Klambauer et al., 2017](https://arxiv.org/abs/1706.02515)
"""
return tf.nn.selu(x)
@keras_export("keras.activations.softplus")
@tf.__internal__.dispatch.add_dispatch_support
def softplus(x):
"""Softplus activation function, `softplus(x) = log(exp(x) + 1)`.
Example Usage:
>>> a = tf.constant([-20, -1.0, 0.0, 1.0, 20], dtype = tf.float32)
>>> b = tf.keras.activations.softplus(a)
>>> b.numpy()
array([2.0611537e-09, 3.1326166e-01, 6.9314718e-01, 1.3132616e+00,
2.0000000e+01], dtype=float32)
Args:
x: Input tensor.
Returns:
The softplus activation: `log(exp(x) + 1)`.
"""
return tf.math.softplus(x)
@keras_export("keras.activations.softsign")
@tf.__internal__.dispatch.add_dispatch_support
def softsign(x):
"""Softsign activation function, `softsign(x) = x / (abs(x) + 1)`.
Example Usage:
>>> a = tf.constant([-1.0, 0.0, 1.0], dtype = tf.float32)
>>> b = tf.keras.activations.softsign(a)
>>> b.numpy()
array([-0.5, 0. , 0.5], dtype=float32)
Args:
x: Input tensor.
Returns:
The softsign activation: `x / (abs(x) + 1)`.
"""
return tf.math.softsign(x)
@keras_export("keras.activations.swish")
@tf.__internal__.dispatch.add_dispatch_support
def swish(x):
"""Swish activation function, `swish(x) = x * sigmoid(x)`.
Swish activation function which returns `x*sigmoid(x)`.
It is a smooth, non-monotonic function that consistently matches
or outperforms ReLU on deep networks, it is unbounded above and
bounded below.
Example Usage:
>>> a = tf.constant([-20, -1.0, 0.0, 1.0, 20], dtype = tf.float32)
>>> b = tf.keras.activations.swish(a)
>>> b.numpy()
array([-4.1223075e-08, -2.6894143e-01, 0.0000000e+00, 7.3105860e-01,
2.0000000e+01], dtype=float32)
Args:
x: Input tensor.
Returns:
The swish activation applied to `x` (see reference paper for details).
Reference:
- [Ramachandran et al., 2017](https://arxiv.org/abs/1710.05941)
"""
return tf.nn.silu(x)
@keras_export("keras.activations.relu")
@tf.__internal__.dispatch.add_dispatch_support
def relu(x, alpha=0.0, max_value=None, threshold=0.0):
"""Applies the rectified linear unit activation function.
With default values, this returns the standard ReLU activation:
`max(x, 0)`, the element-wise maximum of 0 and the input tensor.
Modifying default parameters allows you to use non-zero thresholds,
change the max value of the activation,
and to use a non-zero multiple of the input for values below the threshold.
Example:
>>> foo = tf.constant([-10, -5, 0.0, 5, 10], dtype = tf.float32)
>>> tf.keras.activations.relu(foo).numpy()
array([ 0., 0., 0., 5., 10.], dtype=float32)
>>> tf.keras.activations.relu(foo, alpha=0.5).numpy()
array([-5. , -2.5, 0. , 5. , 10. ], dtype=float32)
>>> tf.keras.activations.relu(foo, max_value=5.).numpy()
array([0., 0., 0., 5., 5.], dtype=float32)
>>> tf.keras.activations.relu(foo, threshold=5.).numpy()
array([-0., -0., 0., 0., 10.], dtype=float32)
Args:
x: Input `tensor` or `variable`.
alpha: A `float` that governs the slope for values lower than the
threshold.
max_value: A `float` that sets the saturation threshold (the largest
value the function will return).
threshold: A `float` giving the threshold value of the activation
function below which values will be damped or set to zero.
Returns:
A `Tensor` representing the input tensor,
transformed by the relu activation function.
Tensor will be of the same shape and dtype of input `x`.
"""
return backend.relu(
x, alpha=alpha, max_value=max_value, threshold=threshold
)
@keras_export("keras.activations.gelu", v1=[])
@tf.__internal__.dispatch.add_dispatch_support
def gelu(x, approximate=False):
"""Applies the Gaussian error linear unit (GELU) activation function.
Gaussian error linear unit (GELU) computes
`x * P(X <= x)`, where `P(X) ~ N(0, 1)`.
The (GELU) nonlinearity weights inputs by their value, rather than gates
inputs by their sign as in ReLU.
Example:
>>> x = tf.constant([-3.0, -1.0, 0.0, 1.0, 3.0], dtype=tf.float32)
>>> y = tf.keras.activations.gelu(x)
>>> y.numpy()
array([-0.00404951, -0.15865529, 0. , 0.8413447 , 2.9959507 ],
dtype=float32)
>>> y = tf.keras.activations.gelu(x, approximate=True)
>>> y.numpy()
array([-0.00363752, -0.15880796, 0. , 0.841192 , 2.9963627 ],
dtype=float32)
Args:
x: Input tensor.
approximate: A `bool`, whether to enable approximation.
Returns:
The gaussian error linear activation:
`0.5 * x * (1 + tanh(sqrt(2 / pi) * (x + 0.044715 * x^3)))`
if `approximate` is `True` or
`x * P(X <= x) = 0.5 * x * (1 + erf(x / sqrt(2)))`,
where `P(X) ~ N(0, 1)`,
if `approximate` is `False`.
Reference:
- [Gaussian Error Linear Units (GELUs)](https://arxiv.org/abs/1606.08415)
"""
return tf.nn.gelu(x, approximate)
@keras_export("keras.activations.tanh")
@tf.__internal__.dispatch.add_dispatch_support
def tanh(x):
"""Hyperbolic tangent activation function.
Example:
>>> a = tf.constant([-3.0, -1.0, 0.0, 1.0, 3.0], dtype = tf.float32)
>>> b = tf.keras.activations.tanh(a)
>>> b.numpy()
array([-0.9950547, -0.7615942, 0., 0.7615942, 0.9950547], dtype=float32)
Args:
x: Input tensor.
Returns:
Tensor of same shape and dtype of input `x`, with tanh activation:
`tanh(x) = sinh(x)/cosh(x) = ((exp(x) - exp(-x))/(exp(x) + exp(-x)))`.
"""
return tf.tanh(x)
@keras_export("keras.activations.sigmoid")
@tf.__internal__.dispatch.add_dispatch_support
def sigmoid(x):
"""Sigmoid activation function, `sigmoid(x) = 1 / (1 + exp(-x))`.
Applies the sigmoid activation function. For small values (<-5),
`sigmoid` returns a value close to zero, and for large values (>5)
the result of the function gets close to 1.
Sigmoid is equivalent to a 2-element Softmax, where the second element is
assumed to be zero. The sigmoid function always returns a value between
0 and 1.
Example:
>>> a = tf.constant([-20, -1.0, 0.0, 1.0, 20], dtype = tf.float32)
>>> b = tf.keras.activations.sigmoid(a)
>>> b.numpy()
array([2.0611537e-09, 2.6894143e-01, 5.0000000e-01, 7.3105860e-01,
1.0000000e+00], dtype=float32)
Args:
x: Input tensor.
Returns:
Tensor with the sigmoid activation: `1 / (1 + exp(-x))`.
"""
output = tf.sigmoid(x)
# Cache the logits to use for crossentropy loss.
output._keras_logits = x
return output
@keras_export("keras.activations.exponential")
@tf.__internal__.dispatch.add_dispatch_support
def exponential(x):
"""Exponential activation function.
Example:
>>> a = tf.constant([-3.0, -1.0, 0.0, 1.0, 3.0], dtype = tf.float32)
>>> b = tf.keras.activations.exponential(a)
>>> b.numpy()
array([0.04978707, 0.36787945, 1., 2.7182817 , 20.085537], dtype=float32)
Args:
x: Input tensor.
Returns:
Tensor with exponential activation: `exp(x)`.
"""
return tf.exp(x)
@keras_export("keras.activations.hard_sigmoid")
@tf.__internal__.dispatch.add_dispatch_support
def hard_sigmoid(x):
"""Hard sigmoid activation function.
A faster approximation of the sigmoid activation.
Piecewise linear approximation of the sigmoid function.
Ref: 'https://en.wikipedia.org/wiki/Hard_sigmoid'
Example:
>>> a = tf.constant([-3.0, -1.0, 0.0, 1.0, 3.0], dtype = tf.float32)
>>> b = tf.keras.activations.hard_sigmoid(a)
>>> b.numpy()
array([0. , 0.3, 0.5, 0.7, 1. ], dtype=float32)
Args:
x: Input tensor.
Returns:
The hard sigmoid activation, defined as:
- `if x < -2.5: return 0`
- `if x > 2.5: return 1`
- `if -2.5 <= x <= 2.5: return 0.2 * x + 0.5`
"""
return backend.hard_sigmoid(x)
@keras_export("keras.activations.linear")
@tf.__internal__.dispatch.add_dispatch_support
def linear(x):
"""Linear activation function (pass-through).
Example:
>>> a = tf.constant([-3.0, -1.0, 0.0, 1.0, 3.0], dtype = tf.float32)
>>> b = tf.keras.activations.linear(a)
>>> b.numpy()
array([-3., -1., 0., 1., 3.], dtype=float32)
Args:
x: Input tensor.
Returns:
The input, unmodified.
"""
return x
@keras_export("keras.activations.mish")
@tf.__internal__.dispatch.add_dispatch_support
def mish(x):
"""Mish activation function.
It is defined as:
```python
def mish(x):
return x * tanh(softplus(x))
```
where `softplus` is defined as:
```python
def softplus(x):
return log(exp(x) + 1)
```
Example:
>>> a = tf.constant([-3.0, -1.0, 0.0, 1.0], dtype = tf.float32)
>>> b = tf.keras.activations.mish(a)
>>> b.numpy()
array([-0.14564745, -0.30340144, 0., 0.86509836], dtype=float32)
Args:
x: Input tensor.
Returns:
The mish activation.
Reference:
- [Mish: A Self Regularized Non-Monotonic
Activation Function](https://arxiv.org/abs/1908.08681)
"""
return x * tf.math.tanh(tf.math.softplus(x))
@keras_export("keras.activations.serialize")
@tf.__internal__.dispatch.add_dispatch_support
def serialize(activation, use_legacy_format=False):
"""Returns the string identifier of an activation function.
Args:
activation : Function object.
Returns:
String denoting the name attribute of the input function
Example:
>>> tf.keras.activations.serialize(tf.keras.activations.tanh)
'tanh'
>>> tf.keras.activations.serialize(tf.keras.activations.sigmoid)
'sigmoid'
>>> tf.keras.activations.serialize('abcd')
Traceback (most recent call last):
...
ValueError: Unknown activation function 'abcd' cannot be serialized.
Raises:
ValueError: The input function is not a valid one.
"""
if (
hasattr(activation, "__name__")
and activation.__name__ in _TF_ACTIVATIONS_V2
):
return _TF_ACTIVATIONS_V2[activation.__name__]
if use_legacy_format:
return legacy_serialization.serialize_keras_object(activation)
fn_config = serialization_lib.serialize_keras_object(activation)
if (
not tf.__internal__.tf2.enabled()
or saved_model_utils.in_tf_saved_model_scope()
):
return fn_config
if "config" not in fn_config:
raise ValueError(
f"Unknown activation function '{activation}' cannot be "
"serialized due to invalid function name. Make sure to use "
"an activation name that matches the references defined in "
"activations.py or use `@keras.utils.register_keras_serializable` "
"for any custom activations. "
f"config={fn_config}"
)
if not isinstance(activation, types.FunctionType):
# Case for additional custom activations represented by objects
return fn_config
if (
isinstance(fn_config["config"], str)
and fn_config["config"] not in globals()
):
# Case for custom activation functions from external activations modules
fn_config["config"] = object_registration.get_registered_name(
activation
)
return fn_config
return fn_config["config"]
# Case for keras.activations builtins (simply return name)
# Add additional globals so that deserialize() can find these common activation
# functions
leaky_relu = tf.nn.leaky_relu
log_softmax = tf.nn.log_softmax
relu6 = tf.nn.relu6
silu = tf.nn.silu
@keras_export("keras.activations.deserialize")
@tf.__internal__.dispatch.add_dispatch_support
def deserialize(name, custom_objects=None, use_legacy_format=False):
"""Returns activation function given a string identifier.
Args:
name: The name of the activation function.
custom_objects: Optional `{function_name: function_obj}`
dictionary listing user-provided activation functions.
Returns:
Corresponding activation function.
Example:
>>> tf.keras.activations.deserialize('linear')
<function linear at 0x1239596a8>
>>> tf.keras.activations.deserialize('sigmoid')
<function sigmoid at 0x123959510>
>>> tf.keras.activations.deserialize('abcd')
Traceback (most recent call last):
...
ValueError: Unknown activation function 'abcd' cannot be deserialized.
Raises:
ValueError: `Unknown activation function` if the input string does not
denote any defined Tensorflow activation function.
"""
activation_functions = {}
current_module = sys.modules[__name__]
# we put 'current_module' after 'activation_layers' to prefer the local one
# if there is a collision
generic_utils.populate_dict_with_module_objects(
activation_functions,
(activation_layers, current_module),
obj_filter=callable,
)
if use_legacy_format:
return legacy_serialization.deserialize_keras_object(
name,
module_objects=activation_functions,
custom_objects=custom_objects,
printable_module_name="activation function",
)
returned_fn = serialization_lib.deserialize_keras_object(
name,
module_objects=activation_functions,
custom_objects=custom_objects,
printable_module_name="activation function",
)
if isinstance(returned_fn, str):
raise ValueError(
f"Unknown activation function '{name}' cannot be deserialized."
)
return returned_fn
@keras_export("keras.activations.get")
@tf.__internal__.dispatch.add_dispatch_support
def get(identifier):
"""Returns function.
Args:
identifier: Function or string
Returns:
Function corresponding to the input string or input function.
Example:
>>> tf.keras.activations.get('softmax')
<function softmax at 0x1222a3d90>
>>> tf.keras.activations.get(tf.keras.activations.softmax)
<function softmax at 0x1222a3d90>
>>> tf.keras.activations.get(None)
<function linear at 0x1239596a8>
>>> tf.keras.activations.get(abs)
<built-in function abs>
>>> tf.keras.activations.get('abcd')
Traceback (most recent call last):
...
ValueError: Unknown activation function:abcd
Raises:
ValueError: Input is an unknown function or string, i.e., the input does
not denote any defined function.
"""
if identifier is None:
return linear
if isinstance(identifier, (str, dict)):
use_legacy_format = (
"module" not in identifier
if isinstance(identifier, dict)
else False
)
return deserialize(identifier, use_legacy_format=use_legacy_format)
elif callable(identifier):
return identifier
raise TypeError(
f"Could not interpret activation function identifier: {identifier}"
)

@ -1,299 +0,0 @@
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Keras activation functions."""
import numpy as np
import tensorflow.compat.v2 as tf
from absl.testing import parameterized
import keras.layers.activation as activation_layers
from keras import activations
from keras import backend
from keras.layers import core
from keras.layers import serialization
from keras.testing_infra import test_combinations
def _ref_softmax(values):
m = np.max(values)
e = np.exp(values - m)
return e / np.sum(e)
def _ref_softplus(x):
return np.log(np.ones_like(x) + np.exp(x))
@test_combinations.generate(test_combinations.combine(mode=["graph", "eager"]))
class KerasActivationsTest(tf.test.TestCase, parameterized.TestCase):
def test_serialization(self):
all_activations = [
"softmax",
"relu",
"elu",
"tanh",
"sigmoid",
"hard_sigmoid",
"linear",
"softplus",
"softsign",
"selu",
"gelu",
"relu6",
"mish",
]
for name in all_activations:
fn = activations.get(name)
ref_fn = getattr(activations, name)
assert fn == ref_fn
config = activations.serialize(fn)
fn = activations.deserialize(config)
assert fn == ref_fn
def test_serialization_v2(self):
activation_map = {tf.math.softmax: "softmax"}
for fn_v2_key in activation_map:
fn_v2 = activations.get(fn_v2_key)
config = activations.serialize(fn_v2)
fn = activations.deserialize(config)
assert fn.__name__ == activation_map[fn_v2_key]
def test_serialization_with_layers(self):
activation = activation_layers.LeakyReLU(alpha=0.1)
layer = core.Dense(3, activation=activation)
config = serialization.serialize(layer)
# with custom objects
deserialized_layer = serialization.deserialize(
config, custom_objects={"LeakyReLU": activation}
)
self.assertEqual(
deserialized_layer.__class__.__name__, layer.__class__.__name__
)
self.assertEqual(
deserialized_layer.activation.__class__.__name__,
activation.__class__.__name__,
)
# without custom objects
deserialized_layer = serialization.deserialize(config)
self.assertEqual(
deserialized_layer.__class__.__name__, layer.__class__.__name__
)
self.assertEqual(
deserialized_layer.activation.__class__.__name__,
activation.__class__.__name__,
)
def test_softmax(self):
x = backend.placeholder(ndim=2)
f = backend.function([x], [activations.softmax(x)])
test_values = np.random.random((2, 5))
result = f([test_values])[0]
expected = _ref_softmax(test_values[0])
self.assertAllClose(result[0], expected, rtol=1e-05)
x = backend.placeholder(ndim=1)
with self.assertRaises(ValueError):
activations.softmax(x)
def test_softmax_2d_axis_0(self):
x = backend.placeholder(ndim=2)
f = backend.function([x], [activations.softmax(x, axis=0)])
test_values = np.random.random((2, 5))
result = f([test_values])[0]
expected = np.zeros((2, 5))
for i in range(5):
expected[:, i] = _ref_softmax(test_values[:, i])
self.assertAllClose(result, expected, rtol=1e-05)
def test_softmax_3d_axis_tuple(self):
x = backend.placeholder(ndim=3)
f = backend.function([x], [activations.softmax(x, axis=(1, 2))])
test_values = np.random.random((2, 3, 5))
result = f([test_values])[0]
expected = np.zeros((2, 3, 5))
for i in range(2):
expected[i, :, :] = _ref_softmax(test_values[i, :, :])
self.assertAllClose(result, expected, rtol=1e-05)
def test_temporal_softmax(self):
x = backend.placeholder(shape=(2, 2, 3))
f = backend.function([x], [activations.softmax(x)])
test_values = np.random.random((2, 2, 3)) * 10
result = f([test_values])[0]
expected = _ref_softmax(test_values[0, 0])
self.assertAllClose(result[0, 0], expected, rtol=1e-05)
def test_selu(self):
x = backend.placeholder(ndim=2)
f = backend.function([x], [activations.selu(x)])
alpha = 1.6732632423543772848170429916717
scale = 1.0507009873554804934193349852946
positive_values = np.array([[1, 2]], dtype=backend.floatx())
result = f([positive_values])[0]
self.assertAllClose(result, positive_values * scale, rtol=1e-05)
negative_values = np.array([[-1, -2]], dtype=backend.floatx())
result = f([negative_values])[0]
true_result = (np.exp(negative_values) - 1) * scale * alpha
self.assertAllClose(result, true_result)
def test_softplus(self):
x = backend.placeholder(ndim=2)
f = backend.function([x], [activations.softplus(x)])
test_values = np.random.random((2, 5))
result = f([test_values])[0]
expected = _ref_softplus(test_values)
self.assertAllClose(result, expected, rtol=1e-05)
def test_softsign(self):
def softsign(x):
return np.divide(x, np.ones_like(x) + np.absolute(x))
x = backend.placeholder(ndim=2)
f = backend.function([x], [activations.softsign(x)])
test_values = np.random.random((2, 5))
result = f([test_values])[0]
expected = softsign(test_values)
self.assertAllClose(result, expected, rtol=1e-05)
def test_sigmoid(self):
def ref_sigmoid(x):
if x >= 0:
return 1 / (1 + np.exp(-x))
else:
z = np.exp(x)
return z / (1 + z)
sigmoid = np.vectorize(ref_sigmoid)
x = backend.placeholder(ndim=2)
f = backend.function([x], [activations.sigmoid(x)])
test_values = np.random.random((2, 5))
result = f([test_values])[0]
expected = sigmoid(test_values)
self.assertAllClose(result, expected, rtol=1e-05)
def test_hard_sigmoid(self):
def ref_hard_sigmoid(x):
x = (x * 0.2) + 0.5
z = 0.0 if x <= 0 else (1.0 if x >= 1 else x)
return z
hard_sigmoid = np.vectorize(ref_hard_sigmoid)
x = backend.placeholder(ndim=2)
f = backend.function([x], [activations.hard_sigmoid(x)])
test_values = np.random.random((2, 5))
result = f([test_values])[0]
expected = hard_sigmoid(test_values)
self.assertAllClose(result, expected, rtol=1e-05)
def test_relu(self):
x = backend.placeholder(ndim=2)
f = backend.function([x], [activations.relu(x)])
positive_values = np.random.random((2, 5))
result = f([positive_values])[0]
self.assertAllClose(result, positive_values, rtol=1e-05)
negative_values = np.random.uniform(-1, 0, (2, 5))
result = f([negative_values])[0]
expected = np.zeros((2, 5))
self.assertAllClose(result, expected, rtol=1e-05)
def test_gelu(self):
def gelu(x, approximate=False):
if approximate:
return (
0.5
* x
* (
1.0
+ np.tanh(
np.sqrt(2.0 / np.pi)
* (x + 0.044715 * np.power(x, 3))
)
)
)
else:
from scipy.stats import norm
return x * norm.cdf(x)
x = backend.placeholder(ndim=2)
f = backend.function([x], [activations.gelu(x)])
test_values = np.random.random((2, 5))
result = f([test_values])[0]
expected = gelu(test_values)
self.assertAllClose(result, expected, rtol=1e-05)
f = backend.function([x], [activations.gelu(x, True)])
test_values = np.random.random((2, 5))
result = f([test_values])[0]
expected = gelu(test_values, True)
self.assertAllClose(result, expected, rtol=1e-05)
def test_elu(self):
x = backend.placeholder(ndim=2)
f = backend.function([x], [activations.elu(x, 0.5)])
test_values = np.random.random((2, 5))
result = f([test_values])[0]
self.assertAllClose(result, test_values, rtol=1e-05)
negative_values = np.array([[-1, -2]], dtype=backend.floatx())
result = f([negative_values])[0]
true_result = (np.exp(negative_values) - 1) / 2
self.assertAllClose(result, true_result)
def test_tanh(self):
test_values = np.random.random((2, 5))
x = backend.placeholder(ndim=2)
exp = activations.tanh(x)
f = backend.function([x], [exp])
result = f([test_values])[0]
expected = np.tanh(test_values)
self.assertAllClose(result, expected, rtol=1e-05)
def test_exponential(self):
test_values = np.random.random((2, 5))
x = backend.placeholder(ndim=2)
exp = activations.exponential(x)
f = backend.function([x], [exp])
result = f([test_values])[0]
expected = np.exp(test_values)
self.assertAllClose(result, expected, rtol=1e-05)
def test_mish(self):
test_values = np.random.random((2, 5))
x = backend.placeholder(ndim=2)
output = activations.mish(x)
f = backend.function([x], [output])
result = f([test_values])[0]
expected = test_values * np.tanh(_ref_softplus(test_values))
self.assertAllClose(result, expected, rtol=1e-05)
def test_linear(self):
x = np.random.random((10, 5))
self.assertAllClose(x, activations.linear(x))
def test_invalid_usage(self):
with self.assertRaises(ValueError):
activations.get("unknown")
# The following should be possible but should raise a warning:
activations.get(activation_layers.LeakyReLU())
if __name__ == "__main__":
tf.test.main()

@ -1,197 +0,0 @@
# Description:
# Package for Keras.
load("//keras/api:api_gen.bzl", "gen_api_init_files")
load("//keras/api:api_init_files.bzl", "KERAS_API_INIT_FILES", "KERAS_API_INIT_FILES_V1")
package(
default_visibility = [
"//keras:friends",
"//third_party/py/tensorflow:__subpackages__",
],
licenses = ["notice"], # Apache 2.0 License
)
exports_files(
[
"create_python_api_wrapper.py",
],
)
keras_packages = [
"keras",
"keras.activations",
"keras.applications.convnext",
"keras.applications.densenet",
"keras.applications.efficientnet",
"keras.applications.efficientnet_v2",
"keras.applications.imagenet_utils",
"keras.applications.inception_resnet_v2",
"keras.applications.inception_v3",
"keras.applications.mobilenet",
"keras.applications.mobilenet_v2",
"keras.applications.mobilenet_v3",
"keras.applications.nasnet",
"keras.applications.regnet",
"keras.applications.resnet",
"keras.applications.resnet_v2",
"keras.applications.resnet_rs",
"keras.applications.vgg16",
"keras.applications.vgg19",
"keras.applications.xception",
"keras.backend",
"keras.backend_config",
"keras.callbacks",
"keras.callbacks_v1",
"keras.constraints",
"keras.datasets.boston_housing",
"keras.datasets.cifar10",
"keras.datasets.cifar100",
"keras.datasets.fashion_mnist",
"keras.datasets.imdb",
"keras.datasets.mnist",
"keras.datasets.reuters",
"keras.dtensor.layout_map",
"keras.engine.base_layer",
"keras.engine.data_adapter",
"keras.engine.input_layer",
"keras.engine.input_spec",
"keras.engine.sequential",
"keras.engine.training",
"keras.estimator",
"keras.export.export_lib",
"keras.feature_column.dense_features",
"keras.feature_column.dense_features_v2",
"keras.feature_column.sequence_feature_column",
# Placeholder for internal API
"keras.initializers",
"keras.initializers.initializers",
"keras.initializers.initializers_v1",
"keras.layers.activation",
"keras.layers.attention",
"keras.layers.convolutional",
"keras.layers.core",
"keras.layers.locally_connected",
"keras.layers.merging",
"keras.layers.normalization",
"keras.layers.preprocessing",
"keras.layers.pooling",
"keras.layers.regularization",
"keras.layers.rnn",
"keras.layers.rnn.legacy_cell_wrappers",
"keras.layers.rnn.legacy_cells",
"keras.layers.serialization",
"keras.legacy_tf_layers.base",
"keras.legacy_tf_layers.convolutional",
"keras.legacy_tf_layers.core",
"keras.legacy_tf_layers.normalization",
"keras.legacy_tf_layers.pooling",
"keras.losses",
"keras.metrics",
"keras.mixed_precision.loss_scale_optimizer",
"keras.mixed_precision.policy",
"keras.models",
"keras.optimizers.adadelta",
"keras.optimizers.adagrad",
"keras.optimizers.adam",
"keras.optimizers.adamax",
"keras.optimizers.ftrl",
"keras.optimizers.nadam",
"keras.optimizers.sgd",
"keras.optimizers.optimizer",
"keras.optimizers.rmsprop",
"keras.optimizers.legacy.adadelta",
"keras.optimizers.legacy.adagrad",
"keras.optimizers.legacy.adam",
"keras.optimizers.legacy.adamax",
"keras.optimizers.legacy.ftrl",
"keras.optimizers.legacy.gradient_descent",
"keras.optimizers.legacy.nadam",
"keras.optimizers.legacy.optimizer_v2",
"keras.optimizers.legacy.rmsprop",
"keras.optimizers.schedules.learning_rate_schedule",
"keras.optimizers",
"keras.premade_models.linear",
"keras.premade_models.wide_deep",
"keras.preprocessing.image",
"keras.preprocessing.sequence",
"keras.preprocessing.text",
"keras.regularizers",
"keras.saving.legacy.model_config",
"keras.saving.legacy.save",
"keras.saving.legacy.serialization",
"keras.testing_infra.test_utils",
"keras.utils.data_utils",
"keras.utils.generic_utils",
"keras.utils.io_utils",
"keras.utils.layer_utils",
"keras.utils.losses_utils",
"keras.utils.np_utils",
"keras.utils.tf_utils",
"keras.utils.vis_utils",
]
# The target used by PIP package which need to generate API init files during OSS build.
py_library(
name = "keras_api",
srcs = [
":keras_python_api_gen",
":keras_python_api_gen_compat_v1",
":keras_python_api_gen_compat_v2",
],
srcs_version = "PY3",
deps = [
"//:expect_tensorflow_installed",
"//keras",
],
)
gen_api_init_files(
name = "keras_python_api_gen",
api_name = "keras",
api_version = 1,
output_files = KERAS_API_INIT_FILES_V1,
output_package = "keras.api",
package_deps = [
"//keras",
"//:expect_tensorflow_installed",
# "//third_party/tensorflow/lite/python:analyzer",
# "//third_party/tensorflow/lite/python:lite",
# "//third_party/tensorflow/lite/python/authoring",
],
packages = keras_packages,
)
gen_api_init_files(
name = "keras_python_api_gen_compat_v1",
api_name = "keras",
api_version = 1,
output_dir = "_v1/",
output_files = KERAS_API_INIT_FILES_V1,
output_package = "keras.api._v1",
package_deps = [
"//keras",
"//:expect_tensorflow_installed",
# "//third_party/tensorflow/lite/python:analyzer",
# "//third_party/tensorflow/lite/python:lite",
# "//third_party/tensorflow/lite/python/authoring",
],
packages = keras_packages,
)
gen_api_init_files(
name = "keras_python_api_gen_compat_v2",
api_name = "keras",
api_version = 2,
output_dir = "_v2/",
output_files = KERAS_API_INIT_FILES,
output_package = "keras.api._v2",
package_deps = [
"//keras",
"//:expect_tensorflow_installed",
# "//third_party/tensorflow/lite/python:analyzer",
# "//third_party/tensorflow/lite/python:lite",
# "//third_party/tensorflow/lite/python/authoring",
],
packages = keras_packages,
)

@ -1,129 +0,0 @@
"""Targets for generating Keras API __init__.py files.
This bzl file is copied with slight modifications from
tensorflow/python/tools/api/generator/api_gen.bzl
so that we can avoid needing to depend on TF source code in Bazel build.
It should be noted that because this file is executed during the build,
and it imports TensorFlow code, that installing TensorFlow python package
is required to Bazel build Keras.
"""
load("@org_keras//keras:keras.bzl", "if_indexing_source_code")
def gen_api_init_files(
name,
output_files,
root_init_template = None,
srcs = [],
api_name = "keras",
api_version = 2,
compat_api_versions = [],
compat_init_templates = [],
packages = ["keras"],
package_deps = [
"//keras:keras",
],
output_package = "keras.api",
output_dir = "",
root_file_name = "__init__.py"):
"""Creates API directory structure and __init__.py files.
Creates a genrule that generates a directory structure with __init__.py
files that import all exported modules (i.e. modules with tf_export
decorators).
Args:
name: name of genrule to create.
output_files: List of __init__.py files that should be generated.
This list should include file name for every module exported using
tf_export. For e.g. if an op is decorated with
@tf_export('module1.module2', 'module3'). Then, output_files should
include module1/module2/__init__.py and module3/__init__.py.
root_init_template: Python init file that should be used as template for
root __init__.py file. "# API IMPORTS PLACEHOLDER" comment inside this
template will be replaced with root imports collected by this genrule.
srcs: genrule sources. If passing root_init_template, the template file
must be included in sources.
api_name: Name of the project that you want to generate API files for
(e.g. "tensorflow" or "estimator").
api_version: TensorFlow API version to generate. Must be either 1 or 2.
compat_api_versions: Older TensorFlow API versions to generate under
compat/ directory.
compat_init_templates: Python init file that should be used as template
for top level __init__.py files under compat/vN directories.
"# API IMPORTS PLACEHOLDER" comment inside this
template will be replaced with root imports collected by this genrule.
packages: Python packages containing the @tf_export decorators you want to
process
package_deps: Python library target containing your packages.
output_package: Package where generated API will be added to.
output_dir: Subdirectory to output API to.
If non-empty, must end with '/'.
root_file_name: Name of the root file with all the root imports.
"""
root_init_template_flag = ""
if root_init_template:
root_init_template_flag = "--root_init_template=$(location " + root_init_template + ")"
primary_package = packages[0]
api_gen_binary_target = ("create_" + primary_package + "_api_%d_%s") % (api_version, name)
native.py_binary(
name = api_gen_binary_target,
srcs = ["//keras/api:create_python_api_wrapper.py"],
main = "//keras/api:create_python_api_wrapper.py",
python_version = "PY3",
srcs_version = "PY2AND3",
visibility = ["//visibility:public"],
deps = package_deps,
)
# Replace name of root file with root_file_name.
output_files = [
root_file_name if f == "__init__.py" else f
for f in output_files
]
all_output_files = ["%s%s" % (output_dir, f) for f in output_files]
compat_api_version_flags = ""
for compat_api_version in compat_api_versions:
compat_api_version_flags += " --compat_apiversion=%d" % compat_api_version
compat_init_template_flags = ""
for compat_init_template in compat_init_templates:
compat_init_template_flags += (
" --compat_init_template=$(location %s)" % compat_init_template
)
# The Keras package within tf project is accessible via both paths below
# Disable them for now so that we don't get SymbolExposedTwiceError
# from create_python_api.py
packages_to_ignore = ["tensorflow.python.keras", "tensorflow.keras"]
flags = [
root_init_template_flag,
"--apidir=$(@D)" + output_dir,
"--apiname=" + api_name,
"--apiversion=" + str(api_version),
compat_api_version_flags,
compat_init_template_flags,
"--packages=" + ",".join(packages),
"--packages_to_ignore=" + ",".join(packages_to_ignore),
"--output_package=" + output_package,
]
native.genrule(
name = name,
outs = all_output_files,
cmd = if_indexing_source_code(
_make_cmd(api_gen_binary_target, flags, loading = "static"),
_make_cmd(api_gen_binary_target, flags, loading = "default"),
),
srcs = srcs,
exec_tools = [":" + api_gen_binary_target],
visibility = ["//visibility:public"],
)
def _make_cmd(api_gen_binary_target, flags, loading = "default"):
binary = "$(location :" + api_gen_binary_target + ")"
flags.append("--loading=" + loading)
return " ".join([binary] + flags + ["$(OUTS)"])

@ -1,150 +0,0 @@
"""Keras API __init__.py files."""
# keep sorted
KERAS_API_INIT_FILES = [
"__init__.py",
"keras/__init__.py",
"keras/__internal__/__init__.py",
"keras/__internal__/backend/__init__.py",
"keras/__internal__/layers/__init__.py",
"keras/__internal__/losses/__init__.py",
"keras/__internal__/models/__init__.py",
"keras/__internal__/optimizers/__init__.py",
"keras/__internal__/utils/__init__.py",
"keras/activations/__init__.py",
"keras/applications/__init__.py",
"keras/applications/convnext/__init__.py",
"keras/applications/densenet/__init__.py",
"keras/applications/efficientnet/__init__.py",
"keras/applications/efficientnet_v2/__init__.py",
"keras/applications/imagenet_utils/__init__.py",
"keras/applications/inception_resnet_v2/__init__.py",
"keras/applications/inception_v3/__init__.py",
"keras/applications/mobilenet/__init__.py",
"keras/applications/mobilenet_v2/__init__.py",
"keras/applications/mobilenet_v3/__init__.py",
"keras/applications/nasnet/__init__.py",
"keras/applications/regnet/__init__.py",
"keras/applications/resnet/__init__.py",
"keras/applications/resnet50/__init__.py",
"keras/applications/resnet_rs/__init__.py",
"keras/applications/resnet_v2/__init__.py",
"keras/applications/vgg16/__init__.py",
"keras/applications/vgg19/__init__.py",
"keras/applications/xception/__init__.py",
"keras/backend/__init__.py",
"keras/backend/experimental/__init__.py",
"keras/callbacks/__init__.py",
"keras/callbacks/experimental/__init__.py",
"keras/constraints/__init__.py",
"keras/datasets/__init__.py",
"keras/datasets/boston_housing/__init__.py",
"keras/datasets/cifar10/__init__.py",
"keras/datasets/cifar100/__init__.py",
"keras/datasets/fashion_mnist/__init__.py",
"keras/datasets/imdb/__init__.py",
"keras/datasets/mnist/__init__.py",
"keras/datasets/reuters/__init__.py",
"keras/dtensor/__init__.py",
"keras/dtensor/experimental/__init__.py",
"keras/dtensor/experimental/optimizers/__init__.py",
"keras/estimator/__init__.py",
"keras/experimental/__init__.py",
"keras/export/__init__.py",
# Placeholder for internal API
"keras/initializers/__init__.py",
"keras/layers/__init__.py",
"keras/layers/experimental/__init__.py",
"keras/layers/experimental/preprocessing/__init__.py",
"keras/losses/__init__.py",
"keras/metrics/__init__.py",
"keras/metrics/experimental/__init__.py",
"keras/mixed_precision/__init__.py",
"keras/models/__init__.py",
"keras/models/experimental/__init__.py",
"keras/optimizers/__init__.py",
"keras/optimizers/experimental/__init__.py",
"keras/optimizers/legacy/__init__.py",
"keras/optimizers/schedules/__init__.py",
"keras/premade/__init__.py",
"keras/preprocessing/__init__.py",
"keras/preprocessing/image/__init__.py",
"keras/preprocessing/sequence/__init__.py",
"keras/preprocessing/text/__init__.py",
"keras/regularizers/__init__.py",
"keras/saving/__init__.py",
"keras/utils/__init__.py",
"keras/utils/experimental/__init__.py",
"keras/utils/legacy/__init__.py",
"keras/wrappers/__init__.py",
"keras/wrappers/scikit_learn/__init__.py",
]
KERAS_API_INIT_FILES_V1 = [
"__init__.py",
"keras/__init__.py",
"keras/__internal__/__init__.py",
"keras/__internal__/legacy/__init__.py",
"keras/__internal__/legacy/layers/__init__.py",
"keras/__internal__/layers/__init__.py",
"keras/__internal__/legacy/layers/experimental/__init__.py",
"keras/__internal__/legacy/rnn_cell/__init__.py",
"keras/activations/__init__.py",
"keras/applications/__init__.py",
"keras/applications/convnext/__init__.py",
"keras/applications/densenet/__init__.py",
"keras/applications/efficientnet/__init__.py",
"keras/applications/efficientnet_v2/__init__.py",
"keras/applications/imagenet_utils/__init__.py",
"keras/applications/inception_resnet_v2/__init__.py",
"keras/applications/inception_v3/__init__.py",
"keras/applications/mobilenet/__init__.py",
"keras/applications/mobilenet_v2/__init__.py",
"keras/applications/mobilenet_v3/__init__.py",
"keras/applications/nasnet/__init__.py",
"keras/applications/regnet/__init__.py",
"keras/applications/resnet/__init__.py",
"keras/applications/resnet_v2/__init__.py",
"keras/applications/resnet50/__init__.py",
"keras/applications/resnet_rs/__init__.py",
"keras/applications/vgg16/__init__.py",
"keras/applications/vgg19/__init__.py",
"keras/applications/xception/__init__.py",
"keras/backend/__init__.py",
"keras/callbacks/__init__.py",
"keras/callbacks/experimental/__init__.py",
"keras/constraints/__init__.py",
"keras/datasets/__init__.py",
"keras/datasets/boston_housing/__init__.py",
"keras/datasets/cifar10/__init__.py",
"keras/datasets/cifar100/__init__.py",
"keras/datasets/fashion_mnist/__init__.py",
"keras/datasets/imdb/__init__.py",
"keras/datasets/mnist/__init__.py",
"keras/datasets/reuters/__init__.py",
"keras/estimator/__init__.py",
"keras/experimental/__init__.py",
"keras/export/__init__.py",
"keras/initializers/__init__.py",
"keras/layers/__init__.py",
"keras/layers/experimental/__init__.py",
"keras/layers/experimental/preprocessing/__init__.py",
"keras/losses/__init__.py",
"keras/metrics/__init__.py",
"keras/mixed_precision/__init__.py",
"keras/models/__init__.py",
"keras/optimizers/__init__.py",
"keras/optimizers/schedules/__init__.py",
"keras/optimizers/legacy/__init__.py",
"keras/premade/__init__.py",
"keras/preprocessing/__init__.py",
"keras/preprocessing/image/__init__.py",
"keras/preprocessing/sequence/__init__.py",
"keras/preprocessing/text/__init__.py",
"keras/regularizers/__init__.py",
"keras/saving/__init__.py",
"keras/utils/__init__.py",
"keras/utils/legacy/__init__.py",
"keras/wrappers/__init__.py",
"keras/wrappers/scikit_learn/__init__.py",
]

@ -1,34 +0,0 @@
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Thin wrapper to call TensorFlow's API generation script.
This file exists to provide a main function for the py_binary in the API
generation genrule. It just calls the main function for the actual API
generation script in TensorFlow.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import keras # noqa: F401
# isort: off
from tensorflow.python.tools.api.generator import (
create_python_api,
)
if __name__ == "__main__":
create_python_api.main()

@ -1,16 +0,0 @@
# TensorFlow API backwards compatibility test goldens.
package(
default_visibility = ["//visibility:public"],
licenses = ["notice"], # Apache 2.0
)
filegroup(
name = "api_golden_v1",
srcs = glob(["v1/*.pbtxt"]),
)
filegroup(
name = "api_golden_v2",
srcs = glob(["v2/*.pbtxt"]),
)

@ -1,403 +0,0 @@
path: "tensorflow.keras.Model"
tf_class {
is_instance: "<class \'keras.engine.training.Model\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.trackable.autotrackable.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.trackable.base.Trackable\'>"
is_instance: "<class \'keras.utils.version_utils.LayerVersionSelector\'>"
is_instance: "<class \'keras.utils.version_utils.ModelVersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "compute_dtype"
mtype: "<type \'property\'>"
}
member {
name: "distribute_reduction_method"
mtype: "<type \'property\'>"
}
member {
name: "distribute_strategy"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype_policy"
mtype: "<type \'property\'>"
}
member {
name: "dynamic"
mtype: "<type \'property\'>"
}
member {
name: "inbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "input"
mtype: "<type \'property\'>"
}
member {
name: "input_mask"
mtype: "<type \'property\'>"
}
member {
name: "input_shape"
mtype: "<type \'property\'>"
}
member {
name: "input_spec"
mtype: "<type \'property\'>"
}
member {
name: "jit_compile"
mtype: "<type \'property\'>"
}
member {
name: "layers"
mtype: "<type \'property\'>"
}
member {
name: "losses"
mtype: "<type \'property\'>"
}
member {
name: "metrics"
mtype: "<type \'property\'>"
}
member {
name: "metrics_names"
mtype: "<type \'property\'>"
}
member {
name: "name"
mtype: "<type \'property\'>"
}
member {
name: "name_scope"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "outbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "output"
mtype: "<type \'property\'>"
}
member {
name: "output_mask"
mtype: "<type \'property\'>"
}
member {
name: "output_shape"
mtype: "<type \'property\'>"
}
member {
name: "run_eagerly"
mtype: "<type \'property\'>"
}
member {
name: "state_updates"
mtype: "<type \'property\'>"
}
member {
name: "stateful"
mtype: "<type \'property\'>"
}
member {
name: "submodules"
mtype: "<type \'property\'>"
}
member {
name: "supports_masking"
mtype: "<type \'property\'>"
}
member {
name: "trainable"
mtype: "<type \'property\'>"
}
member {
name: "trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "updates"
mtype: "<type \'property\'>"
}
member {
name: "variable_dtype"
mtype: "<type \'property\'>"
}
member {
name: "variables"
mtype: "<type \'property\'>"
}
member {
name: "weights"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_loss"
argspec: "args=[\'self\', \'losses\'], varargs=None, keywords=kwargs, defaults=None"
}
member_method {
name: "add_metric"
argspec: "args=[\'self\', \'value\', \'name\'], varargs=None, keywords=kwargs, defaults=[\'None\'], "
}
member_method {
name: "add_update"
argspec: "args=[\'self\', \'updates\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "add_variable"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_weight"
argspec: "args=[\'self\', \'name\', \'shape\', \'dtype\', \'initializer\', \'regularizer\', \'trainable\', \'constraint\', \'use_resource\', \'synchronization\', \'aggregation\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'VariableSynchronization.AUTO\', \'VariableAggregationV2.NONE\'], "
}
member_method {
name: "build"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "build_from_config"
argspec: "args=[\'self\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "call"
argspec: "args=[\'self\', \'inputs\', \'training\', \'mask\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "compile"
argspec: "args=[\'self\', \'optimizer\', \'loss\', \'metrics\', \'loss_weights\', \'weighted_metrics\', \'run_eagerly\', \'steps_per_execution\', \'jit_compile\', \'pss_evaluation_shards\'], varargs=None, keywords=kwargs, defaults=[\'rmsprop\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'0\'], "
}
member_method {
name: "compile_from_config"
argspec: "args=[\'self\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_loss"
argspec: "args=[\'self\', \'x\', \'y\', \'y_pred\', \'sample_weight\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\', \'None\'], "
}
member_method {
name: "compute_mask"
argspec: "args=[\'self\', \'inputs\', \'mask\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "compute_metrics"
argspec: "args=[\'self\', \'x\', \'y\', \'y_pred\', \'sample_weight\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_shape"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_signature"
argspec: "args=[\'self\', \'input_signature\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "evaluate"
argspec: "args=[\'self\', \'x\', \'y\', \'batch_size\', \'verbose\', \'sample_weight\', \'steps\', \'callbacks\', \'max_queue_size\', \'workers\', \'use_multiprocessing\', \'return_dict\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'auto\', \'None\', \'None\', \'None\', \'10\', \'1\', \'False\', \'False\'], "
}
member_method {
name: "evaluate_generator"
argspec: "args=[\'self\', \'generator\', \'steps\', \'callbacks\', \'max_queue_size\', \'workers\', \'use_multiprocessing\', \'verbose\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'10\', \'1\', \'False\', \'0\'], "
}
member_method {
name: "export"
argspec: "args=[\'self\', \'filepath\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "finalize_state"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "fit"
argspec: "args=[\'self\', \'x\', \'y\', \'batch_size\', \'epochs\', \'verbose\', \'callbacks\', \'validation_split\', \'validation_data\', \'shuffle\', \'class_weight\', \'sample_weight\', \'initial_epoch\', \'steps_per_epoch\', \'validation_steps\', \'validation_batch_size\', \'validation_freq\', \'max_queue_size\', \'workers\', \'use_multiprocessing\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\', \'1\', \'auto\', \'None\', \'0.0\', \'None\', \'True\', \'None\', \'None\', \'0\', \'None\', \'None\', \'None\', \'1\', \'10\', \'1\', \'False\'], "
}
member_method {
name: "fit_generator"
argspec: "args=[\'self\', \'generator\', \'steps_per_epoch\', \'epochs\', \'verbose\', \'callbacks\', \'validation_data\', \'validation_steps\', \'validation_freq\', \'class_weight\', \'max_queue_size\', \'workers\', \'use_multiprocessing\', \'shuffle\', \'initial_epoch\'], varargs=None, keywords=None, defaults=[\'None\', \'1\', \'1\', \'None\', \'None\', \'None\', \'1\', \'None\', \'10\', \'1\', \'False\', \'True\', \'0\'], "
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\', \'custom_objects\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "get_build_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_compile_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_layer"
argspec: "args=[\'self\', \'name\', \'index\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "get_metrics_result"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weight_paths"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weights"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "load_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "load_weights"
argspec: "args=[\'self\', \'filepath\', \'skip_mismatch\', \'by_name\', \'options\'], varargs=None, keywords=None, defaults=[\'False\', \'False\', \'None\'], "
}
member_method {
name: "make_predict_function"
argspec: "args=[\'self\', \'force\'], varargs=None, keywords=None, defaults=[\'False\'], "
}
member_method {
name: "make_test_function"
argspec: "args=[\'self\', \'force\'], varargs=None, keywords=None, defaults=[\'False\'], "
}
member_method {
name: "make_train_function"
argspec: "args=[\'self\', \'force\'], varargs=None, keywords=None, defaults=[\'False\'], "
}
member_method {
name: "predict"
argspec: "args=[\'self\', \'x\', \'batch_size\', \'verbose\', \'steps\', \'callbacks\', \'max_queue_size\', \'workers\', \'use_multiprocessing\'], varargs=None, keywords=None, defaults=[\'None\', \'auto\', \'None\', \'None\', \'10\', \'1\', \'False\'], "
}
member_method {
name: "predict_generator"
argspec: "args=[\'self\', \'generator\', \'steps\', \'callbacks\', \'max_queue_size\', \'workers\', \'use_multiprocessing\', \'verbose\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'10\', \'1\', \'False\', \'0\'], "
}
member_method {
name: "predict_on_batch"
argspec: "args=[\'self\', \'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "predict_step"
argspec: "args=[\'self\', \'data\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "reset_metrics"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "reset_states"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "save"
argspec: "args=[\'self\', \'filepath\', \'overwrite\', \'save_format\'], varargs=None, keywords=kwargs, defaults=[\'True\', \'None\'], "
}
member_method {
name: "save_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "save_spec"
argspec: "args=[\'self\', \'dynamic_batch\'], varargs=None, keywords=None, defaults=[\'True\'], "
}
member_method {
name: "save_weights"
argspec: "args=[\'self\', \'filepath\', \'overwrite\', \'save_format\', \'options\'], varargs=None, keywords=None, defaults=[\'True\', \'None\', \'None\'], "
}
member_method {
name: "set_weights"
argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "summary"
argspec: "args=[\'self\', \'line_length\', \'positions\', \'print_fn\', \'expand_nested\', \'show_trainable\', \'layer_range\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\', \'False\', \'False\', \'None\'], "
}
member_method {
name: "test_on_batch"
argspec: "args=[\'self\', \'x\', \'y\', \'sample_weight\', \'reset_metrics\', \'return_dict\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'True\', \'False\'], "
}
member_method {
name: "test_step"
argspec: "args=[\'self\', \'data\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "to_json"
argspec: "args=[\'self\'], varargs=None, keywords=kwargs, defaults=None"
}
member_method {
name: "to_yaml"
argspec: "args=[\'self\'], varargs=None, keywords=kwargs, defaults=None"
}
member_method {
name: "train_on_batch"
argspec: "args=[\'self\', \'x\', \'y\', \'sample_weight\', \'class_weight\', \'reset_metrics\', \'return_dict\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\', \'True\', \'False\'], "
}
member_method {
name: "train_step"
argspec: "args=[\'self\', \'data\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "with_name_scope"
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,413 +0,0 @@
path: "tensorflow.keras.Sequential"
tf_class {
is_instance: "<class \'keras.engine.sequential.Sequential\'>"
is_instance: "<class \'keras.engine.functional.Functional\'>"
is_instance: "<class \'keras.engine.training.Model\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.trackable.autotrackable.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.trackable.base.Trackable\'>"
is_instance: "<class \'keras.utils.version_utils.LayerVersionSelector\'>"
is_instance: "<class \'keras.utils.version_utils.ModelVersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "compute_dtype"
mtype: "<type \'property\'>"
}
member {
name: "distribute_reduction_method"
mtype: "<type \'property\'>"
}
member {
name: "distribute_strategy"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype_policy"
mtype: "<type \'property\'>"
}
member {
name: "dynamic"
mtype: "<type \'property\'>"
}
member {
name: "inbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "input"
mtype: "<type \'property\'>"
}
member {
name: "input_mask"
mtype: "<type \'property\'>"
}
member {
name: "input_shape"
mtype: "<type \'property\'>"
}
member {
name: "input_spec"
mtype: "<type \'property\'>"
}
member {
name: "jit_compile"
mtype: "<type \'property\'>"
}
member {
name: "layers"
mtype: "<type \'property\'>"
}
member {
name: "losses"
mtype: "<type \'property\'>"
}
member {
name: "metrics"
mtype: "<type \'property\'>"
}
member {
name: "metrics_names"
mtype: "<type \'property\'>"
}
member {
name: "name"
mtype: "<type \'property\'>"
}
member {
name: "name_scope"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "outbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "output"
mtype: "<type \'property\'>"
}
member {
name: "output_mask"
mtype: "<type \'property\'>"
}
member {
name: "output_shape"
mtype: "<type \'property\'>"
}
member {
name: "run_eagerly"
mtype: "<type \'property\'>"
}
member {
name: "state_updates"
mtype: "<type \'property\'>"
}
member {
name: "stateful"
mtype: "<type \'property\'>"
}
member {
name: "submodules"
mtype: "<type \'property\'>"
}
member {
name: "supports_masking"
mtype: "<type \'property\'>"
}
member {
name: "trainable"
mtype: "<type \'property\'>"
}
member {
name: "trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "updates"
mtype: "<type \'property\'>"
}
member {
name: "variable_dtype"
mtype: "<type \'property\'>"
}
member {
name: "variables"
mtype: "<type \'property\'>"
}
member {
name: "weights"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\', \'layers\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "add"
argspec: "args=[\'self\', \'layer\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "add_loss"
argspec: "args=[\'self\', \'losses\'], varargs=None, keywords=kwargs, defaults=None"
}
member_method {
name: "add_metric"
argspec: "args=[\'self\', \'value\', \'name\'], varargs=None, keywords=kwargs, defaults=[\'None\'], "
}
member_method {
name: "add_update"
argspec: "args=[\'self\', \'updates\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "add_variable"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_weight"
argspec: "args=[\'self\', \'name\', \'shape\', \'dtype\', \'initializer\', \'regularizer\', \'trainable\', \'constraint\', \'use_resource\', \'synchronization\', \'aggregation\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'VariableSynchronization.AUTO\', \'VariableAggregationV2.NONE\'], "
}
member_method {
name: "build"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "build_from_config"
argspec: "args=[\'self\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "call"
argspec: "args=[\'self\', \'inputs\', \'training\', \'mask\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "compile"
argspec: "args=[\'self\', \'optimizer\', \'loss\', \'metrics\', \'loss_weights\', \'weighted_metrics\', \'run_eagerly\', \'steps_per_execution\', \'jit_compile\', \'pss_evaluation_shards\'], varargs=None, keywords=kwargs, defaults=[\'rmsprop\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'0\'], "
}
member_method {
name: "compile_from_config"
argspec: "args=[\'self\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_loss"
argspec: "args=[\'self\', \'x\', \'y\', \'y_pred\', \'sample_weight\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\', \'None\'], "
}
member_method {
name: "compute_mask"
argspec: "args=[\'self\', \'inputs\', \'mask\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_metrics"
argspec: "args=[\'self\', \'x\', \'y\', \'y_pred\', \'sample_weight\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_shape"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_signature"
argspec: "args=[\'self\', \'input_signature\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "evaluate"
argspec: "args=[\'self\', \'x\', \'y\', \'batch_size\', \'verbose\', \'sample_weight\', \'steps\', \'callbacks\', \'max_queue_size\', \'workers\', \'use_multiprocessing\', \'return_dict\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'auto\', \'None\', \'None\', \'None\', \'10\', \'1\', \'False\', \'False\'], "
}
member_method {
name: "evaluate_generator"
argspec: "args=[\'self\', \'generator\', \'steps\', \'callbacks\', \'max_queue_size\', \'workers\', \'use_multiprocessing\', \'verbose\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'10\', \'1\', \'False\', \'0\'], "
}
member_method {
name: "export"
argspec: "args=[\'self\', \'filepath\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "finalize_state"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "fit"
argspec: "args=[\'self\', \'x\', \'y\', \'batch_size\', \'epochs\', \'verbose\', \'callbacks\', \'validation_split\', \'validation_data\', \'shuffle\', \'class_weight\', \'sample_weight\', \'initial_epoch\', \'steps_per_epoch\', \'validation_steps\', \'validation_batch_size\', \'validation_freq\', \'max_queue_size\', \'workers\', \'use_multiprocessing\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\', \'1\', \'auto\', \'None\', \'0.0\', \'None\', \'True\', \'None\', \'None\', \'0\', \'None\', \'None\', \'None\', \'1\', \'10\', \'1\', \'False\'], "
}
member_method {
name: "fit_generator"
argspec: "args=[\'self\', \'generator\', \'steps_per_epoch\', \'epochs\', \'verbose\', \'callbacks\', \'validation_data\', \'validation_steps\', \'validation_freq\', \'class_weight\', \'max_queue_size\', \'workers\', \'use_multiprocessing\', \'shuffle\', \'initial_epoch\'], varargs=None, keywords=None, defaults=[\'None\', \'1\', \'1\', \'None\', \'None\', \'None\', \'1\', \'None\', \'10\', \'1\', \'False\', \'True\', \'0\'], "
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\', \'custom_objects\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "get_build_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_compile_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_layer"
argspec: "args=[\'self\', \'name\', \'index\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "get_metrics_result"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weight_paths"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weights"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "load_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "load_weights"
argspec: "args=[\'self\', \'filepath\', \'skip_mismatch\', \'by_name\', \'options\'], varargs=None, keywords=None, defaults=[\'False\', \'False\', \'None\'], "
}
member_method {
name: "make_predict_function"
argspec: "args=[\'self\', \'force\'], varargs=None, keywords=None, defaults=[\'False\'], "
}
member_method {
name: "make_test_function"
argspec: "args=[\'self\', \'force\'], varargs=None, keywords=None, defaults=[\'False\'], "
}
member_method {
name: "make_train_function"
argspec: "args=[\'self\', \'force\'], varargs=None, keywords=None, defaults=[\'False\'], "
}
member_method {
name: "pop"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "predict"
argspec: "args=[\'self\', \'x\', \'batch_size\', \'verbose\', \'steps\', \'callbacks\', \'max_queue_size\', \'workers\', \'use_multiprocessing\'], varargs=None, keywords=None, defaults=[\'None\', \'auto\', \'None\', \'None\', \'10\', \'1\', \'False\'], "
}
member_method {
name: "predict_generator"
argspec: "args=[\'self\', \'generator\', \'steps\', \'callbacks\', \'max_queue_size\', \'workers\', \'use_multiprocessing\', \'verbose\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'10\', \'1\', \'False\', \'0\'], "
}
member_method {
name: "predict_on_batch"
argspec: "args=[\'self\', \'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "predict_step"
argspec: "args=[\'self\', \'data\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "reset_metrics"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "reset_states"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "save"
argspec: "args=[\'self\', \'filepath\', \'overwrite\', \'save_format\'], varargs=None, keywords=kwargs, defaults=[\'True\', \'None\'], "
}
member_method {
name: "save_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "save_spec"
argspec: "args=[\'self\', \'dynamic_batch\'], varargs=None, keywords=None, defaults=[\'True\'], "
}
member_method {
name: "save_weights"
argspec: "args=[\'self\', \'filepath\', \'overwrite\', \'save_format\', \'options\'], varargs=None, keywords=None, defaults=[\'True\', \'None\', \'None\'], "
}
member_method {
name: "set_weights"
argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "summary"
argspec: "args=[\'self\', \'line_length\', \'positions\', \'print_fn\', \'expand_nested\', \'show_trainable\', \'layer_range\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\', \'False\', \'False\', \'None\'], "
}
member_method {
name: "test_on_batch"
argspec: "args=[\'self\', \'x\', \'y\', \'sample_weight\', \'reset_metrics\', \'return_dict\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'True\', \'False\'], "
}
member_method {
name: "test_step"
argspec: "args=[\'self\', \'data\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "to_json"
argspec: "args=[\'self\'], varargs=None, keywords=kwargs, defaults=None"
}
member_method {
name: "to_yaml"
argspec: "args=[\'self\'], varargs=None, keywords=kwargs, defaults=None"
}
member_method {
name: "train_on_batch"
argspec: "args=[\'self\', \'x\', \'y\', \'sample_weight\', \'class_weight\', \'reset_metrics\', \'return_dict\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\', \'True\', \'False\'], "
}
member_method {
name: "train_step"
argspec: "args=[\'self\', \'data\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "with_name_scope"
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,242 +0,0 @@
path: "tensorflow.keras.__internal__.layers.BaseRandomLayer"
tf_class {
is_instance: "<class \'keras.engine.base_layer.BaseRandomLayer\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.trackable.autotrackable.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.trackable.base.Trackable\'>"
is_instance: "<class \'keras.utils.version_utils.LayerVersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "compute_dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype_policy"
mtype: "<type \'property\'>"
}
member {
name: "dynamic"
mtype: "<type \'property\'>"
}
member {
name: "inbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "input"
mtype: "<type \'property\'>"
}
member {
name: "input_mask"
mtype: "<type \'property\'>"
}
member {
name: "input_shape"
mtype: "<type \'property\'>"
}
member {
name: "input_spec"
mtype: "<type \'property\'>"
}
member {
name: "losses"
mtype: "<type \'property\'>"
}
member {
name: "metrics"
mtype: "<type \'property\'>"
}
member {
name: "name"
mtype: "<type \'property\'>"
}
member {
name: "name_scope"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "outbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "output"
mtype: "<type \'property\'>"
}
member {
name: "output_mask"
mtype: "<type \'property\'>"
}
member {
name: "output_shape"
mtype: "<type \'property\'>"
}
member {
name: "stateful"
mtype: "<type \'property\'>"
}
member {
name: "submodules"
mtype: "<type \'property\'>"
}
member {
name: "supports_masking"
mtype: "<type \'property\'>"
}
member {
name: "trainable"
mtype: "<type \'property\'>"
}
member {
name: "trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "updates"
mtype: "<type \'property\'>"
}
member {
name: "variable_dtype"
mtype: "<type \'property\'>"
}
member {
name: "variables"
mtype: "<type \'property\'>"
}
member {
name: "weights"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\', \'seed\', \'force_generator\', \'rng_type\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'False\', \'None\'], "
}
member_method {
name: "add_loss"
argspec: "args=[\'self\', \'losses\'], varargs=None, keywords=kwargs, defaults=None"
}
member_method {
name: "add_metric"
argspec: "args=[\'self\', \'value\', \'name\'], varargs=None, keywords=kwargs, defaults=[\'None\'], "
}
member_method {
name: "add_update"
argspec: "args=[\'self\', \'updates\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "add_variable"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_weight"
argspec: "args=[\'self\', \'name\', \'shape\', \'dtype\', \'initializer\', \'regularizer\', \'trainable\', \'constraint\', \'use_resource\', \'synchronization\', \'aggregation\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'VariableSynchronization.AUTO\', \'VariableAggregationV2.NONE\'], "
}
member_method {
name: "build"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "build_from_config"
argspec: "args=[\'self\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "call"
argspec: "args=[\'self\', \'inputs\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "compute_mask"
argspec: "args=[\'self\', \'inputs\', \'mask\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "compute_output_shape"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_signature"
argspec: "args=[\'self\', \'input_signature\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "finalize_state"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_build_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weights"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "load_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "save_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_weights"
argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "with_name_scope"
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,7 +0,0 @@
path: "tensorflow.keras.__internal__.layers"
tf_module {
member {
name: "BaseRandomLayer"
mtype: "<type \'type\'>"
}
}

@ -1,266 +0,0 @@
path: "tensorflow.keras.__internal__.legacy.layers.AveragePooling1D"
tf_class {
is_instance: "<class \'keras.legacy_tf_layers.pooling.AveragePooling1D\'>"
is_instance: "<class \'keras.layers.pooling.average_pooling1d.AveragePooling1D\'>"
is_instance: "<class \'keras.layers.pooling.base_pooling1d.Pooling1D\'>"
is_instance: "<class \'keras.legacy_tf_layers.base.Layer\'>"
is_instance: "<class \'keras.engine.base_layer_v1.Layer\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.trackable.autotrackable.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.trackable.base.Trackable\'>"
is_instance: "<class \'keras.utils.version_utils.LayerVersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "compute_dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype_policy"
mtype: "<type \'property\'>"
}
member {
name: "dynamic"
mtype: "<type \'property\'>"
}
member {
name: "graph"
mtype: "<type \'property\'>"
}
member {
name: "inbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "input"
mtype: "<type \'property\'>"
}
member {
name: "input_mask"
mtype: "<type \'property\'>"
}
member {
name: "input_shape"
mtype: "<type \'property\'>"
}
member {
name: "input_spec"
mtype: "<type \'property\'>"
}
member {
name: "losses"
mtype: "<type \'property\'>"
}
member {
name: "metrics"
mtype: "<type \'property\'>"
}
member {
name: "name"
mtype: "<type \'property\'>"
}
member {
name: "name_scope"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "outbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "output"
mtype: "<type \'property\'>"
}
member {
name: "output_mask"
mtype: "<type \'property\'>"
}
member {
name: "output_shape"
mtype: "<type \'property\'>"
}
member {
name: "scope_name"
mtype: "<type \'property\'>"
}
member {
name: "stateful"
mtype: "<type \'property\'>"
}
member {
name: "submodules"
mtype: "<type \'property\'>"
}
member {
name: "supports_masking"
mtype: "<type \'property\'>"
}
member {
name: "trainable"
mtype: "<type \'property\'>"
}
member {
name: "trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "updates"
mtype: "<type \'property\'>"
}
member {
name: "variable_dtype"
mtype: "<type \'property\'>"
}
member {
name: "variables"
mtype: "<type \'property\'>"
}
member {
name: "weights"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\', \'pool_size\', \'strides\', \'padding\', \'data_format\', \'name\'], varargs=None, keywords=kwargs, defaults=[\'valid\', \'channels_last\', \'None\'], "
}
member_method {
name: "add_loss"
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "add_metric"
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "add_update"
argspec: "args=[\'self\', \'updates\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "add_variable"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_weight"
argspec: "args=[\'self\', \'name\', \'shape\', \'dtype\', \'initializer\', \'regularizer\', \'trainable\', \'constraint\', \'use_resource\', \'synchronization\', \'aggregation\', \'partitioner\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'VariableSynchronization.AUTO\', \'VariableAggregation.NONE\', \'None\'], "
}
member_method {
name: "apply"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "build"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "build_from_config"
argspec: "args=[\'self\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "call"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_mask"
argspec: "args=[\'self\', \'inputs\', \'mask\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "compute_output_shape"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_signature"
argspec: "args=[\'self\', \'input_signature\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "finalize_state"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_build_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_losses_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_updates_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weights"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "load_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "save_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_weights"
argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "with_name_scope"
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,266 +0,0 @@
path: "tensorflow.keras.__internal__.legacy.layers.AveragePooling2D"
tf_class {
is_instance: "<class \'keras.legacy_tf_layers.pooling.AveragePooling2D\'>"
is_instance: "<class \'keras.layers.pooling.average_pooling2d.AveragePooling2D\'>"
is_instance: "<class \'keras.layers.pooling.base_pooling2d.Pooling2D\'>"
is_instance: "<class \'keras.legacy_tf_layers.base.Layer\'>"
is_instance: "<class \'keras.engine.base_layer_v1.Layer\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.trackable.autotrackable.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.trackable.base.Trackable\'>"
is_instance: "<class \'keras.utils.version_utils.LayerVersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "compute_dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype_policy"
mtype: "<type \'property\'>"
}
member {
name: "dynamic"
mtype: "<type \'property\'>"
}
member {
name: "graph"
mtype: "<type \'property\'>"
}
member {
name: "inbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "input"
mtype: "<type \'property\'>"
}
member {
name: "input_mask"
mtype: "<type \'property\'>"
}
member {
name: "input_shape"
mtype: "<type \'property\'>"
}
member {
name: "input_spec"
mtype: "<type \'property\'>"
}
member {
name: "losses"
mtype: "<type \'property\'>"
}
member {
name: "metrics"
mtype: "<type \'property\'>"
}
member {
name: "name"
mtype: "<type \'property\'>"
}
member {
name: "name_scope"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "outbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "output"
mtype: "<type \'property\'>"
}
member {
name: "output_mask"
mtype: "<type \'property\'>"
}
member {
name: "output_shape"
mtype: "<type \'property\'>"
}
member {
name: "scope_name"
mtype: "<type \'property\'>"
}
member {
name: "stateful"
mtype: "<type \'property\'>"
}
member {
name: "submodules"
mtype: "<type \'property\'>"
}
member {
name: "supports_masking"
mtype: "<type \'property\'>"
}
member {
name: "trainable"
mtype: "<type \'property\'>"
}
member {
name: "trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "updates"
mtype: "<type \'property\'>"
}
member {
name: "variable_dtype"
mtype: "<type \'property\'>"
}
member {
name: "variables"
mtype: "<type \'property\'>"
}
member {
name: "weights"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\', \'pool_size\', \'strides\', \'padding\', \'data_format\', \'name\'], varargs=None, keywords=kwargs, defaults=[\'valid\', \'channels_last\', \'None\'], "
}
member_method {
name: "add_loss"
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "add_metric"
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "add_update"
argspec: "args=[\'self\', \'updates\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "add_variable"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_weight"
argspec: "args=[\'self\', \'name\', \'shape\', \'dtype\', \'initializer\', \'regularizer\', \'trainable\', \'constraint\', \'use_resource\', \'synchronization\', \'aggregation\', \'partitioner\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'VariableSynchronization.AUTO\', \'VariableAggregation.NONE\', \'None\'], "
}
member_method {
name: "apply"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "build"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "build_from_config"
argspec: "args=[\'self\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "call"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_mask"
argspec: "args=[\'self\', \'inputs\', \'mask\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "compute_output_shape"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_signature"
argspec: "args=[\'self\', \'input_signature\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "finalize_state"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_build_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_losses_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_updates_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weights"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "load_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "save_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_weights"
argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "with_name_scope"
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,266 +0,0 @@
path: "tensorflow.keras.__internal__.legacy.layers.AveragePooling3D"
tf_class {
is_instance: "<class \'keras.legacy_tf_layers.pooling.AveragePooling3D\'>"
is_instance: "<class \'keras.layers.pooling.average_pooling3d.AveragePooling3D\'>"
is_instance: "<class \'keras.layers.pooling.base_pooling3d.Pooling3D\'>"
is_instance: "<class \'keras.legacy_tf_layers.base.Layer\'>"
is_instance: "<class \'keras.engine.base_layer_v1.Layer\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.trackable.autotrackable.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.trackable.base.Trackable\'>"
is_instance: "<class \'keras.utils.version_utils.LayerVersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "compute_dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype_policy"
mtype: "<type \'property\'>"
}
member {
name: "dynamic"
mtype: "<type \'property\'>"
}
member {
name: "graph"
mtype: "<type \'property\'>"
}
member {
name: "inbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "input"
mtype: "<type \'property\'>"
}
member {
name: "input_mask"
mtype: "<type \'property\'>"
}
member {
name: "input_shape"
mtype: "<type \'property\'>"
}
member {
name: "input_spec"
mtype: "<type \'property\'>"
}
member {
name: "losses"
mtype: "<type \'property\'>"
}
member {
name: "metrics"
mtype: "<type \'property\'>"
}
member {
name: "name"
mtype: "<type \'property\'>"
}
member {
name: "name_scope"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "outbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "output"
mtype: "<type \'property\'>"
}
member {
name: "output_mask"
mtype: "<type \'property\'>"
}
member {
name: "output_shape"
mtype: "<type \'property\'>"
}
member {
name: "scope_name"
mtype: "<type \'property\'>"
}
member {
name: "stateful"
mtype: "<type \'property\'>"
}
member {
name: "submodules"
mtype: "<type \'property\'>"
}
member {
name: "supports_masking"
mtype: "<type \'property\'>"
}
member {
name: "trainable"
mtype: "<type \'property\'>"
}
member {
name: "trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "updates"
mtype: "<type \'property\'>"
}
member {
name: "variable_dtype"
mtype: "<type \'property\'>"
}
member {
name: "variables"
mtype: "<type \'property\'>"
}
member {
name: "weights"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\', \'pool_size\', \'strides\', \'padding\', \'data_format\', \'name\'], varargs=None, keywords=kwargs, defaults=[\'valid\', \'channels_last\', \'None\'], "
}
member_method {
name: "add_loss"
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "add_metric"
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "add_update"
argspec: "args=[\'self\', \'updates\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "add_variable"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_weight"
argspec: "args=[\'self\', \'name\', \'shape\', \'dtype\', \'initializer\', \'regularizer\', \'trainable\', \'constraint\', \'use_resource\', \'synchronization\', \'aggregation\', \'partitioner\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'VariableSynchronization.AUTO\', \'VariableAggregation.NONE\', \'None\'], "
}
member_method {
name: "apply"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "build"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "build_from_config"
argspec: "args=[\'self\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "call"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_mask"
argspec: "args=[\'self\', \'inputs\', \'mask\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "compute_output_shape"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_signature"
argspec: "args=[\'self\', \'input_signature\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "finalize_state"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_build_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_losses_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_updates_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weights"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "load_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "save_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_weights"
argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "with_name_scope"
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,266 +0,0 @@
path: "tensorflow.keras.__internal__.legacy.layers.BatchNormalization"
tf_class {
is_instance: "<class \'keras.legacy_tf_layers.normalization.BatchNormalization\'>"
is_instance: "<class \'keras.layers.normalization.batch_normalization_v1.BatchNormalization\'>"
is_instance: "<class \'keras.layers.normalization.batch_normalization.BatchNormalizationBase\'>"
is_instance: "<class \'keras.legacy_tf_layers.base.Layer\'>"
is_instance: "<class \'keras.engine.base_layer_v1.Layer\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.trackable.autotrackable.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.trackable.base.Trackable\'>"
is_instance: "<class \'keras.utils.version_utils.LayerVersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "compute_dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype_policy"
mtype: "<type \'property\'>"
}
member {
name: "dynamic"
mtype: "<type \'property\'>"
}
member {
name: "graph"
mtype: "<type \'property\'>"
}
member {
name: "inbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "input"
mtype: "<type \'property\'>"
}
member {
name: "input_mask"
mtype: "<type \'property\'>"
}
member {
name: "input_shape"
mtype: "<type \'property\'>"
}
member {
name: "input_spec"
mtype: "<type \'property\'>"
}
member {
name: "losses"
mtype: "<type \'property\'>"
}
member {
name: "metrics"
mtype: "<type \'property\'>"
}
member {
name: "name"
mtype: "<type \'property\'>"
}
member {
name: "name_scope"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "outbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "output"
mtype: "<type \'property\'>"
}
member {
name: "output_mask"
mtype: "<type \'property\'>"
}
member {
name: "output_shape"
mtype: "<type \'property\'>"
}
member {
name: "scope_name"
mtype: "<type \'property\'>"
}
member {
name: "stateful"
mtype: "<type \'property\'>"
}
member {
name: "submodules"
mtype: "<type \'property\'>"
}
member {
name: "supports_masking"
mtype: "<type \'property\'>"
}
member {
name: "trainable"
mtype: "<type \'property\'>"
}
member {
name: "trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "updates"
mtype: "<type \'property\'>"
}
member {
name: "variable_dtype"
mtype: "<type \'property\'>"
}
member {
name: "variables"
mtype: "<type \'property\'>"
}
member {
name: "weights"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\', \'axis\', \'momentum\', \'epsilon\', \'center\', \'scale\', \'beta_initializer\', \'gamma_initializer\', \'moving_mean_initializer\', \'moving_variance_initializer\', \'beta_regularizer\', \'gamma_regularizer\', \'beta_constraint\', \'gamma_constraint\', \'renorm\', \'renorm_clipping\', \'renorm_momentum\', \'fused\', \'trainable\', \'virtual_batch_size\', \'adjustment\', \'name\'], varargs=None, keywords=kwargs, defaults=[\'-1\', \'0.99\', \'0.001\', \'True\', \'True\', \'<tensorflow.python.ops.init_ops.Zeros object instance>\', \'<tensorflow.python.ops.init_ops.Ones object instance>\', \'<tensorflow.python.ops.init_ops.Zeros object instance>\', \'<tensorflow.python.ops.init_ops.Ones object instance>\', \'None\', \'None\', \'None\', \'None\', \'False\', \'None\', \'0.99\', \'None\', \'True\', \'None\', \'None\', \'None\'], "
}
member_method {
name: "add_loss"
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "add_metric"
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "add_update"
argspec: "args=[\'self\', \'updates\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "add_variable"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_weight"
argspec: "args=[\'self\', \'name\', \'shape\', \'dtype\', \'initializer\', \'regularizer\', \'trainable\', \'constraint\', \'use_resource\', \'synchronization\', \'aggregation\', \'partitioner\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'VariableSynchronization.AUTO\', \'VariableAggregation.NONE\', \'None\'], "
}
member_method {
name: "apply"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "build"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "build_from_config"
argspec: "args=[\'self\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "call"
argspec: "args=[\'self\', \'inputs\', \'training\', \'mask\'], varargs=None, keywords=None, defaults=[\'False\', \'None\'], "
}
member_method {
name: "compute_mask"
argspec: "args=[\'self\', \'inputs\', \'mask\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "compute_output_shape"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_signature"
argspec: "args=[\'self\', \'input_signature\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "finalize_state"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_build_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_losses_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_updates_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weights"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "load_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "save_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_weights"
argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "with_name_scope"
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,270 +0,0 @@
path: "tensorflow.keras.__internal__.legacy.layers.Conv1D"
tf_class {
is_instance: "<class \'keras.legacy_tf_layers.convolutional.Conv1D\'>"
is_instance: "<class \'keras.layers.convolutional.conv1d.Conv1D\'>"
is_instance: "<class \'keras.layers.convolutional.base_conv.Conv\'>"
is_instance: "<class \'keras.legacy_tf_layers.base.Layer\'>"
is_instance: "<class \'keras.engine.base_layer_v1.Layer\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.trackable.autotrackable.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.trackable.base.Trackable\'>"
is_instance: "<class \'keras.utils.version_utils.LayerVersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "compute_dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype_policy"
mtype: "<type \'property\'>"
}
member {
name: "dynamic"
mtype: "<type \'property\'>"
}
member {
name: "graph"
mtype: "<type \'property\'>"
}
member {
name: "inbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "input"
mtype: "<type \'property\'>"
}
member {
name: "input_mask"
mtype: "<type \'property\'>"
}
member {
name: "input_shape"
mtype: "<type \'property\'>"
}
member {
name: "input_spec"
mtype: "<type \'property\'>"
}
member {
name: "losses"
mtype: "<type \'property\'>"
}
member {
name: "metrics"
mtype: "<type \'property\'>"
}
member {
name: "name"
mtype: "<type \'property\'>"
}
member {
name: "name_scope"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "outbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "output"
mtype: "<type \'property\'>"
}
member {
name: "output_mask"
mtype: "<type \'property\'>"
}
member {
name: "output_shape"
mtype: "<type \'property\'>"
}
member {
name: "scope_name"
mtype: "<type \'property\'>"
}
member {
name: "stateful"
mtype: "<type \'property\'>"
}
member {
name: "submodules"
mtype: "<type \'property\'>"
}
member {
name: "supports_masking"
mtype: "<type \'property\'>"
}
member {
name: "trainable"
mtype: "<type \'property\'>"
}
member {
name: "trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "updates"
mtype: "<type \'property\'>"
}
member {
name: "variable_dtype"
mtype: "<type \'property\'>"
}
member {
name: "variables"
mtype: "<type \'property\'>"
}
member {
name: "weights"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\', \'filters\', \'kernel_size\', \'strides\', \'padding\', \'data_format\', \'dilation_rate\', \'activation\', \'use_bias\', \'kernel_initializer\', \'bias_initializer\', \'kernel_regularizer\', \'bias_regularizer\', \'activity_regularizer\', \'kernel_constraint\', \'bias_constraint\', \'trainable\', \'name\'], varargs=None, keywords=kwargs, defaults=[\'1\', \'valid\', \'channels_last\', \'1\', \'None\', \'True\', \'None\', \'<tensorflow.python.ops.init_ops.Zeros object instance>\', \'None\', \'None\', \'None\', \'None\', \'None\', \'True\', \'None\'], "
}
member_method {
name: "add_loss"
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "add_metric"
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "add_update"
argspec: "args=[\'self\', \'updates\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "add_variable"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_weight"
argspec: "args=[\'self\', \'name\', \'shape\', \'dtype\', \'initializer\', \'regularizer\', \'trainable\', \'constraint\', \'use_resource\', \'synchronization\', \'aggregation\', \'partitioner\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'VariableSynchronization.AUTO\', \'VariableAggregation.NONE\', \'None\'], "
}
member_method {
name: "apply"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "build"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "build_from_config"
argspec: "args=[\'self\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "call"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_mask"
argspec: "args=[\'self\', \'inputs\', \'mask\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "compute_output_shape"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_signature"
argspec: "args=[\'self\', \'input_signature\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "convolution_op"
argspec: "args=[\'self\', \'inputs\', \'kernel\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "finalize_state"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_build_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_losses_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_updates_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weights"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "load_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "save_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_weights"
argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "with_name_scope"
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,271 +0,0 @@
path: "tensorflow.keras.__internal__.legacy.layers.Conv2DTranspose"
tf_class {
is_instance: "<class \'keras.legacy_tf_layers.convolutional.Conv2DTranspose\'>"
is_instance: "<class \'keras.layers.convolutional.conv2d_transpose.Conv2DTranspose\'>"
is_instance: "<class \'keras.layers.convolutional.conv2d.Conv2D\'>"
is_instance: "<class \'keras.layers.convolutional.base_conv.Conv\'>"
is_instance: "<class \'keras.legacy_tf_layers.base.Layer\'>"
is_instance: "<class \'keras.engine.base_layer_v1.Layer\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.trackable.autotrackable.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.trackable.base.Trackable\'>"
is_instance: "<class \'keras.utils.version_utils.LayerVersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "compute_dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype_policy"
mtype: "<type \'property\'>"
}
member {
name: "dynamic"
mtype: "<type \'property\'>"
}
member {
name: "graph"
mtype: "<type \'property\'>"
}
member {
name: "inbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "input"
mtype: "<type \'property\'>"
}
member {
name: "input_mask"
mtype: "<type \'property\'>"
}
member {
name: "input_shape"
mtype: "<type \'property\'>"
}
member {
name: "input_spec"
mtype: "<type \'property\'>"
}
member {
name: "losses"
mtype: "<type \'property\'>"
}
member {
name: "metrics"
mtype: "<type \'property\'>"
}
member {
name: "name"
mtype: "<type \'property\'>"
}
member {
name: "name_scope"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "outbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "output"
mtype: "<type \'property\'>"
}
member {
name: "output_mask"
mtype: "<type \'property\'>"
}
member {
name: "output_shape"
mtype: "<type \'property\'>"
}
member {
name: "scope_name"
mtype: "<type \'property\'>"
}
member {
name: "stateful"
mtype: "<type \'property\'>"
}
member {
name: "submodules"
mtype: "<type \'property\'>"
}
member {
name: "supports_masking"
mtype: "<type \'property\'>"
}
member {
name: "trainable"
mtype: "<type \'property\'>"
}
member {
name: "trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "updates"
mtype: "<type \'property\'>"
}
member {
name: "variable_dtype"
mtype: "<type \'property\'>"
}
member {
name: "variables"
mtype: "<type \'property\'>"
}
member {
name: "weights"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\', \'filters\', \'kernel_size\', \'strides\', \'padding\', \'data_format\', \'activation\', \'use_bias\', \'kernel_initializer\', \'bias_initializer\', \'kernel_regularizer\', \'bias_regularizer\', \'activity_regularizer\', \'kernel_constraint\', \'bias_constraint\', \'trainable\', \'name\'], varargs=None, keywords=kwargs, defaults=[\'(1, 1)\', \'valid\', \'channels_last\', \'None\', \'True\', \'None\', \'<tensorflow.python.ops.init_ops.Zeros object instance>\', \'None\', \'None\', \'None\', \'None\', \'None\', \'True\', \'None\'], "
}
member_method {
name: "add_loss"
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "add_metric"
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "add_update"
argspec: "args=[\'self\', \'updates\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "add_variable"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_weight"
argspec: "args=[\'self\', \'name\', \'shape\', \'dtype\', \'initializer\', \'regularizer\', \'trainable\', \'constraint\', \'use_resource\', \'synchronization\', \'aggregation\', \'partitioner\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'VariableSynchronization.AUTO\', \'VariableAggregation.NONE\', \'None\'], "
}
member_method {
name: "apply"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "build"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "build_from_config"
argspec: "args=[\'self\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "call"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_mask"
argspec: "args=[\'self\', \'inputs\', \'mask\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "compute_output_shape"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_signature"
argspec: "args=[\'self\', \'input_signature\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "convolution_op"
argspec: "args=[\'self\', \'inputs\', \'kernel\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "finalize_state"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_build_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_losses_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_updates_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weights"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "load_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "save_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_weights"
argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "with_name_scope"
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,270 +0,0 @@
path: "tensorflow.keras.__internal__.legacy.layers.Conv2D"
tf_class {
is_instance: "<class \'keras.legacy_tf_layers.convolutional.Conv2D\'>"
is_instance: "<class \'keras.layers.convolutional.conv2d.Conv2D\'>"
is_instance: "<class \'keras.layers.convolutional.base_conv.Conv\'>"
is_instance: "<class \'keras.legacy_tf_layers.base.Layer\'>"
is_instance: "<class \'keras.engine.base_layer_v1.Layer\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.trackable.autotrackable.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.trackable.base.Trackable\'>"
is_instance: "<class \'keras.utils.version_utils.LayerVersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "compute_dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype_policy"
mtype: "<type \'property\'>"
}
member {
name: "dynamic"
mtype: "<type \'property\'>"
}
member {
name: "graph"
mtype: "<type \'property\'>"
}
member {
name: "inbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "input"
mtype: "<type \'property\'>"
}
member {
name: "input_mask"
mtype: "<type \'property\'>"
}
member {
name: "input_shape"
mtype: "<type \'property\'>"
}
member {
name: "input_spec"
mtype: "<type \'property\'>"
}
member {
name: "losses"
mtype: "<type \'property\'>"
}
member {
name: "metrics"
mtype: "<type \'property\'>"
}
member {
name: "name"
mtype: "<type \'property\'>"
}
member {
name: "name_scope"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "outbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "output"
mtype: "<type \'property\'>"
}
member {
name: "output_mask"
mtype: "<type \'property\'>"
}
member {
name: "output_shape"
mtype: "<type \'property\'>"
}
member {
name: "scope_name"
mtype: "<type \'property\'>"
}
member {
name: "stateful"
mtype: "<type \'property\'>"
}
member {
name: "submodules"
mtype: "<type \'property\'>"
}
member {
name: "supports_masking"
mtype: "<type \'property\'>"
}
member {
name: "trainable"
mtype: "<type \'property\'>"
}
member {
name: "trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "updates"
mtype: "<type \'property\'>"
}
member {
name: "variable_dtype"
mtype: "<type \'property\'>"
}
member {
name: "variables"
mtype: "<type \'property\'>"
}
member {
name: "weights"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\', \'filters\', \'kernel_size\', \'strides\', \'padding\', \'data_format\', \'dilation_rate\', \'activation\', \'use_bias\', \'kernel_initializer\', \'bias_initializer\', \'kernel_regularizer\', \'bias_regularizer\', \'activity_regularizer\', \'kernel_constraint\', \'bias_constraint\', \'trainable\', \'name\'], varargs=None, keywords=kwargs, defaults=[\'(1, 1)\', \'valid\', \'channels_last\', \'(1, 1)\', \'None\', \'True\', \'None\', \'<tensorflow.python.ops.init_ops.Zeros object instance>\', \'None\', \'None\', \'None\', \'None\', \'None\', \'True\', \'None\'], "
}
member_method {
name: "add_loss"
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "add_metric"
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "add_update"
argspec: "args=[\'self\', \'updates\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "add_variable"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_weight"
argspec: "args=[\'self\', \'name\', \'shape\', \'dtype\', \'initializer\', \'regularizer\', \'trainable\', \'constraint\', \'use_resource\', \'synchronization\', \'aggregation\', \'partitioner\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'VariableSynchronization.AUTO\', \'VariableAggregation.NONE\', \'None\'], "
}
member_method {
name: "apply"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "build"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "build_from_config"
argspec: "args=[\'self\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "call"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_mask"
argspec: "args=[\'self\', \'inputs\', \'mask\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "compute_output_shape"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_signature"
argspec: "args=[\'self\', \'input_signature\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "convolution_op"
argspec: "args=[\'self\', \'inputs\', \'kernel\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "finalize_state"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_build_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_losses_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_updates_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weights"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "load_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "save_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_weights"
argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "with_name_scope"
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,271 +0,0 @@
path: "tensorflow.keras.__internal__.legacy.layers.Conv3DTranspose"
tf_class {
is_instance: "<class \'keras.legacy_tf_layers.convolutional.Conv3DTranspose\'>"
is_instance: "<class \'keras.layers.convolutional.conv3d_transpose.Conv3DTranspose\'>"
is_instance: "<class \'keras.layers.convolutional.conv3d.Conv3D\'>"
is_instance: "<class \'keras.layers.convolutional.base_conv.Conv\'>"
is_instance: "<class \'keras.legacy_tf_layers.base.Layer\'>"
is_instance: "<class \'keras.engine.base_layer_v1.Layer\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.trackable.autotrackable.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.trackable.base.Trackable\'>"
is_instance: "<class \'keras.utils.version_utils.LayerVersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "compute_dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype_policy"
mtype: "<type \'property\'>"
}
member {
name: "dynamic"
mtype: "<type \'property\'>"
}
member {
name: "graph"
mtype: "<type \'property\'>"
}
member {
name: "inbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "input"
mtype: "<type \'property\'>"
}
member {
name: "input_mask"
mtype: "<type \'property\'>"
}
member {
name: "input_shape"
mtype: "<type \'property\'>"
}
member {
name: "input_spec"
mtype: "<type \'property\'>"
}
member {
name: "losses"
mtype: "<type \'property\'>"
}
member {
name: "metrics"
mtype: "<type \'property\'>"
}
member {
name: "name"
mtype: "<type \'property\'>"
}
member {
name: "name_scope"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "outbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "output"
mtype: "<type \'property\'>"
}
member {
name: "output_mask"
mtype: "<type \'property\'>"
}
member {
name: "output_shape"
mtype: "<type \'property\'>"
}
member {
name: "scope_name"
mtype: "<type \'property\'>"
}
member {
name: "stateful"
mtype: "<type \'property\'>"
}
member {
name: "submodules"
mtype: "<type \'property\'>"
}
member {
name: "supports_masking"
mtype: "<type \'property\'>"
}
member {
name: "trainable"
mtype: "<type \'property\'>"
}
member {
name: "trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "updates"
mtype: "<type \'property\'>"
}
member {
name: "variable_dtype"
mtype: "<type \'property\'>"
}
member {
name: "variables"
mtype: "<type \'property\'>"
}
member {
name: "weights"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\', \'filters\', \'kernel_size\', \'strides\', \'padding\', \'data_format\', \'activation\', \'use_bias\', \'kernel_initializer\', \'bias_initializer\', \'kernel_regularizer\', \'bias_regularizer\', \'activity_regularizer\', \'kernel_constraint\', \'bias_constraint\', \'trainable\', \'name\'], varargs=None, keywords=kwargs, defaults=[\'(1, 1, 1)\', \'valid\', \'channels_last\', \'None\', \'True\', \'None\', \'<tensorflow.python.ops.init_ops.Zeros object instance>\', \'None\', \'None\', \'None\', \'None\', \'None\', \'True\', \'None\'], "
}
member_method {
name: "add_loss"
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "add_metric"
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "add_update"
argspec: "args=[\'self\', \'updates\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "add_variable"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_weight"
argspec: "args=[\'self\', \'name\', \'shape\', \'dtype\', \'initializer\', \'regularizer\', \'trainable\', \'constraint\', \'use_resource\', \'synchronization\', \'aggregation\', \'partitioner\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'VariableSynchronization.AUTO\', \'VariableAggregation.NONE\', \'None\'], "
}
member_method {
name: "apply"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "build"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "build_from_config"
argspec: "args=[\'self\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "call"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_mask"
argspec: "args=[\'self\', \'inputs\', \'mask\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "compute_output_shape"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_signature"
argspec: "args=[\'self\', \'input_signature\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "convolution_op"
argspec: "args=[\'self\', \'inputs\', \'kernel\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "finalize_state"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_build_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_losses_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_updates_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weights"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "load_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "save_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_weights"
argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "with_name_scope"
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,270 +0,0 @@
path: "tensorflow.keras.__internal__.legacy.layers.Conv3D"
tf_class {
is_instance: "<class \'keras.legacy_tf_layers.convolutional.Conv3D\'>"
is_instance: "<class \'keras.layers.convolutional.conv3d.Conv3D\'>"
is_instance: "<class \'keras.layers.convolutional.base_conv.Conv\'>"
is_instance: "<class \'keras.legacy_tf_layers.base.Layer\'>"
is_instance: "<class \'keras.engine.base_layer_v1.Layer\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.trackable.autotrackable.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.trackable.base.Trackable\'>"
is_instance: "<class \'keras.utils.version_utils.LayerVersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "compute_dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype_policy"
mtype: "<type \'property\'>"
}
member {
name: "dynamic"
mtype: "<type \'property\'>"
}
member {
name: "graph"
mtype: "<type \'property\'>"
}
member {
name: "inbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "input"
mtype: "<type \'property\'>"
}
member {
name: "input_mask"
mtype: "<type \'property\'>"
}
member {
name: "input_shape"
mtype: "<type \'property\'>"
}
member {
name: "input_spec"
mtype: "<type \'property\'>"
}
member {
name: "losses"
mtype: "<type \'property\'>"
}
member {
name: "metrics"
mtype: "<type \'property\'>"
}
member {
name: "name"
mtype: "<type \'property\'>"
}
member {
name: "name_scope"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "outbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "output"
mtype: "<type \'property\'>"
}
member {
name: "output_mask"
mtype: "<type \'property\'>"
}
member {
name: "output_shape"
mtype: "<type \'property\'>"
}
member {
name: "scope_name"
mtype: "<type \'property\'>"
}
member {
name: "stateful"
mtype: "<type \'property\'>"
}
member {
name: "submodules"
mtype: "<type \'property\'>"
}
member {
name: "supports_masking"
mtype: "<type \'property\'>"
}
member {
name: "trainable"
mtype: "<type \'property\'>"
}
member {
name: "trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "updates"
mtype: "<type \'property\'>"
}
member {
name: "variable_dtype"
mtype: "<type \'property\'>"
}
member {
name: "variables"
mtype: "<type \'property\'>"
}
member {
name: "weights"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\', \'filters\', \'kernel_size\', \'strides\', \'padding\', \'data_format\', \'dilation_rate\', \'activation\', \'use_bias\', \'kernel_initializer\', \'bias_initializer\', \'kernel_regularizer\', \'bias_regularizer\', \'activity_regularizer\', \'kernel_constraint\', \'bias_constraint\', \'trainable\', \'name\'], varargs=None, keywords=kwargs, defaults=[\'(1, 1, 1)\', \'valid\', \'channels_last\', \'(1, 1, 1)\', \'None\', \'True\', \'None\', \'<tensorflow.python.ops.init_ops.Zeros object instance>\', \'None\', \'None\', \'None\', \'None\', \'None\', \'True\', \'None\'], "
}
member_method {
name: "add_loss"
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "add_metric"
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "add_update"
argspec: "args=[\'self\', \'updates\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "add_variable"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_weight"
argspec: "args=[\'self\', \'name\', \'shape\', \'dtype\', \'initializer\', \'regularizer\', \'trainable\', \'constraint\', \'use_resource\', \'synchronization\', \'aggregation\', \'partitioner\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'VariableSynchronization.AUTO\', \'VariableAggregation.NONE\', \'None\'], "
}
member_method {
name: "apply"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "build"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "build_from_config"
argspec: "args=[\'self\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "call"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_mask"
argspec: "args=[\'self\', \'inputs\', \'mask\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "compute_output_shape"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_signature"
argspec: "args=[\'self\', \'input_signature\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "convolution_op"
argspec: "args=[\'self\', \'inputs\', \'kernel\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "finalize_state"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_build_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_losses_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_updates_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weights"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "load_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "save_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_weights"
argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "with_name_scope"
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,265 +0,0 @@
path: "tensorflow.keras.__internal__.legacy.layers.Dense"
tf_class {
is_instance: "<class \'keras.legacy_tf_layers.core.Dense\'>"
is_instance: "<class \'keras.layers.core.dense.Dense\'>"
is_instance: "<class \'keras.legacy_tf_layers.base.Layer\'>"
is_instance: "<class \'keras.engine.base_layer_v1.Layer\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.trackable.autotrackable.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.trackable.base.Trackable\'>"
is_instance: "<class \'keras.utils.version_utils.LayerVersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "compute_dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype_policy"
mtype: "<type \'property\'>"
}
member {
name: "dynamic"
mtype: "<type \'property\'>"
}
member {
name: "graph"
mtype: "<type \'property\'>"
}
member {
name: "inbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "input"
mtype: "<type \'property\'>"
}
member {
name: "input_mask"
mtype: "<type \'property\'>"
}
member {
name: "input_shape"
mtype: "<type \'property\'>"
}
member {
name: "input_spec"
mtype: "<type \'property\'>"
}
member {
name: "losses"
mtype: "<type \'property\'>"
}
member {
name: "metrics"
mtype: "<type \'property\'>"
}
member {
name: "name"
mtype: "<type \'property\'>"
}
member {
name: "name_scope"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "outbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "output"
mtype: "<type \'property\'>"
}
member {
name: "output_mask"
mtype: "<type \'property\'>"
}
member {
name: "output_shape"
mtype: "<type \'property\'>"
}
member {
name: "scope_name"
mtype: "<type \'property\'>"
}
member {
name: "stateful"
mtype: "<type \'property\'>"
}
member {
name: "submodules"
mtype: "<type \'property\'>"
}
member {
name: "supports_masking"
mtype: "<type \'property\'>"
}
member {
name: "trainable"
mtype: "<type \'property\'>"
}
member {
name: "trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "updates"
mtype: "<type \'property\'>"
}
member {
name: "variable_dtype"
mtype: "<type \'property\'>"
}
member {
name: "variables"
mtype: "<type \'property\'>"
}
member {
name: "weights"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\', \'units\', \'activation\', \'use_bias\', \'kernel_initializer\', \'bias_initializer\', \'kernel_regularizer\', \'bias_regularizer\', \'activity_regularizer\', \'kernel_constraint\', \'bias_constraint\', \'trainable\', \'name\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'True\', \'None\', \'<tensorflow.python.ops.init_ops.Zeros object instance>\', \'None\', \'None\', \'None\', \'None\', \'None\', \'True\', \'None\'], "
}
member_method {
name: "add_loss"
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "add_metric"
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "add_update"
argspec: "args=[\'self\', \'updates\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "add_variable"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_weight"
argspec: "args=[\'self\', \'name\', \'shape\', \'dtype\', \'initializer\', \'regularizer\', \'trainable\', \'constraint\', \'use_resource\', \'synchronization\', \'aggregation\', \'partitioner\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'VariableSynchronization.AUTO\', \'VariableAggregation.NONE\', \'None\'], "
}
member_method {
name: "apply"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "build"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "build_from_config"
argspec: "args=[\'self\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "call"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_mask"
argspec: "args=[\'self\', \'inputs\', \'mask\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "compute_output_shape"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_signature"
argspec: "args=[\'self\', \'input_signature\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "finalize_state"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_build_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_losses_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_updates_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weights"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "load_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "save_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_weights"
argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "with_name_scope"
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,266 +0,0 @@
path: "tensorflow.keras.__internal__.legacy.layers.Dropout"
tf_class {
is_instance: "<class \'keras.legacy_tf_layers.core.Dropout\'>"
is_instance: "<class \'keras.layers.regularization.dropout.Dropout\'>"
is_instance: "<class \'keras.engine.base_layer.BaseRandomLayer\'>"
is_instance: "<class \'keras.legacy_tf_layers.base.Layer\'>"
is_instance: "<class \'keras.engine.base_layer_v1.Layer\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.trackable.autotrackable.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.trackable.base.Trackable\'>"
is_instance: "<class \'keras.utils.version_utils.LayerVersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "compute_dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype_policy"
mtype: "<type \'property\'>"
}
member {
name: "dynamic"
mtype: "<type \'property\'>"
}
member {
name: "graph"
mtype: "<type \'property\'>"
}
member {
name: "inbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "input"
mtype: "<type \'property\'>"
}
member {
name: "input_mask"
mtype: "<type \'property\'>"
}
member {
name: "input_shape"
mtype: "<type \'property\'>"
}
member {
name: "input_spec"
mtype: "<type \'property\'>"
}
member {
name: "losses"
mtype: "<type \'property\'>"
}
member {
name: "metrics"
mtype: "<type \'property\'>"
}
member {
name: "name"
mtype: "<type \'property\'>"
}
member {
name: "name_scope"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "outbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "output"
mtype: "<type \'property\'>"
}
member {
name: "output_mask"
mtype: "<type \'property\'>"
}
member {
name: "output_shape"
mtype: "<type \'property\'>"
}
member {
name: "scope_name"
mtype: "<type \'property\'>"
}
member {
name: "stateful"
mtype: "<type \'property\'>"
}
member {
name: "submodules"
mtype: "<type \'property\'>"
}
member {
name: "supports_masking"
mtype: "<type \'property\'>"
}
member {
name: "trainable"
mtype: "<type \'property\'>"
}
member {
name: "trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "updates"
mtype: "<type \'property\'>"
}
member {
name: "variable_dtype"
mtype: "<type \'property\'>"
}
member {
name: "variables"
mtype: "<type \'property\'>"
}
member {
name: "weights"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\', \'rate\', \'noise_shape\', \'seed\', \'name\'], varargs=None, keywords=kwargs, defaults=[\'0.5\', \'None\', \'None\', \'None\'], "
}
member_method {
name: "add_loss"
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "add_metric"
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "add_update"
argspec: "args=[\'self\', \'updates\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "add_variable"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_weight"
argspec: "args=[\'self\', \'name\', \'shape\', \'dtype\', \'initializer\', \'regularizer\', \'trainable\', \'constraint\', \'use_resource\', \'synchronization\', \'aggregation\', \'partitioner\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'VariableSynchronization.AUTO\', \'VariableAggregation.NONE\', \'None\'], "
}
member_method {
name: "apply"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "build"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "build_from_config"
argspec: "args=[\'self\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "call"
argspec: "args=[\'self\', \'inputs\', \'training\'], varargs=None, keywords=None, defaults=[\'False\'], "
}
member_method {
name: "compute_mask"
argspec: "args=[\'self\', \'inputs\', \'mask\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "compute_output_shape"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_signature"
argspec: "args=[\'self\', \'input_signature\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "finalize_state"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_build_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_losses_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_updates_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weights"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "load_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "save_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_weights"
argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "with_name_scope"
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,265 +0,0 @@
path: "tensorflow.keras.__internal__.legacy.layers.Flatten"
tf_class {
is_instance: "<class \'keras.legacy_tf_layers.core.Flatten\'>"
is_instance: "<class \'keras.layers.reshaping.flatten.Flatten\'>"
is_instance: "<class \'keras.legacy_tf_layers.base.Layer\'>"
is_instance: "<class \'keras.engine.base_layer_v1.Layer\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.trackable.autotrackable.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.trackable.base.Trackable\'>"
is_instance: "<class \'keras.utils.version_utils.LayerVersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "compute_dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype_policy"
mtype: "<type \'property\'>"
}
member {
name: "dynamic"
mtype: "<type \'property\'>"
}
member {
name: "graph"
mtype: "<type \'property\'>"
}
member {
name: "inbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "input"
mtype: "<type \'property\'>"
}
member {
name: "input_mask"
mtype: "<type \'property\'>"
}
member {
name: "input_shape"
mtype: "<type \'property\'>"
}
member {
name: "input_spec"
mtype: "<type \'property\'>"
}
member {
name: "losses"
mtype: "<type \'property\'>"
}
member {
name: "metrics"
mtype: "<type \'property\'>"
}
member {
name: "name"
mtype: "<type \'property\'>"
}
member {
name: "name_scope"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "outbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "output"
mtype: "<type \'property\'>"
}
member {
name: "output_mask"
mtype: "<type \'property\'>"
}
member {
name: "output_shape"
mtype: "<type \'property\'>"
}
member {
name: "scope_name"
mtype: "<type \'property\'>"
}
member {
name: "stateful"
mtype: "<type \'property\'>"
}
member {
name: "submodules"
mtype: "<type \'property\'>"
}
member {
name: "supports_masking"
mtype: "<type \'property\'>"
}
member {
name: "trainable"
mtype: "<type \'property\'>"
}
member {
name: "trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "updates"
mtype: "<type \'property\'>"
}
member {
name: "variable_dtype"
mtype: "<type \'property\'>"
}
member {
name: "variables"
mtype: "<type \'property\'>"
}
member {
name: "weights"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\', \'data_format\'], varargs=None, keywords=kwargs, defaults=[\'None\'], "
}
member_method {
name: "add_loss"
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "add_metric"
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "add_update"
argspec: "args=[\'self\', \'updates\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "add_variable"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_weight"
argspec: "args=[\'self\', \'name\', \'shape\', \'dtype\', \'initializer\', \'regularizer\', \'trainable\', \'constraint\', \'use_resource\', \'synchronization\', \'aggregation\', \'partitioner\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'VariableSynchronization.AUTO\', \'VariableAggregation.NONE\', \'None\'], "
}
member_method {
name: "apply"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "build"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "build_from_config"
argspec: "args=[\'self\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "call"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_mask"
argspec: "args=[\'self\', \'inputs\', \'mask\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "compute_output_shape"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_signature"
argspec: "args=[\'self\', \'input_signature\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "finalize_state"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_build_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_losses_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_updates_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weights"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "load_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "save_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_weights"
argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "with_name_scope"
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,17 +0,0 @@
path: "tensorflow.keras.__internal__.legacy.layers.InputSpec"
tf_class {
is_instance: "<class \'keras.engine.input_spec.InputSpec\'>"
is_instance: "<type \'object\'>"
member_method {
name: "__init__"
argspec: "args=[\'self\', \'dtype\', \'shape\', \'ndim\', \'max_ndim\', \'min_ndim\', \'axes\', \'allow_last_axis_squeeze\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'False\', \'None\'], "
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,263 +0,0 @@
path: "tensorflow.keras.__internal__.legacy.layers.Layer"
tf_class {
is_instance: "<class \'keras.legacy_tf_layers.base.Layer\'>"
is_instance: "<class \'keras.engine.base_layer_v1.Layer\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.trackable.autotrackable.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.trackable.base.Trackable\'>"
is_instance: "<class \'keras.utils.version_utils.LayerVersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "compute_dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype_policy"
mtype: "<type \'property\'>"
}
member {
name: "dynamic"
mtype: "<type \'property\'>"
}
member {
name: "graph"
mtype: "<type \'property\'>"
}
member {
name: "inbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "input"
mtype: "<type \'property\'>"
}
member {
name: "input_mask"
mtype: "<type \'property\'>"
}
member {
name: "input_shape"
mtype: "<type \'property\'>"
}
member {
name: "input_spec"
mtype: "<type \'property\'>"
}
member {
name: "losses"
mtype: "<type \'property\'>"
}
member {
name: "metrics"
mtype: "<type \'property\'>"
}
member {
name: "name"
mtype: "<type \'property\'>"
}
member {
name: "name_scope"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "outbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "output"
mtype: "<type \'property\'>"
}
member {
name: "output_mask"
mtype: "<type \'property\'>"
}
member {
name: "output_shape"
mtype: "<type \'property\'>"
}
member {
name: "scope_name"
mtype: "<type \'property\'>"
}
member {
name: "stateful"
mtype: "<type \'property\'>"
}
member {
name: "submodules"
mtype: "<type \'property\'>"
}
member {
name: "supports_masking"
mtype: "<type \'property\'>"
}
member {
name: "trainable"
mtype: "<type \'property\'>"
}
member {
name: "trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "updates"
mtype: "<type \'property\'>"
}
member {
name: "variable_dtype"
mtype: "<type \'property\'>"
}
member {
name: "variables"
mtype: "<type \'property\'>"
}
member {
name: "weights"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\', \'trainable\', \'name\', \'dtype\'], varargs=None, keywords=kwargs, defaults=[\'True\', \'None\', \'None\'], "
}
member_method {
name: "add_loss"
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "add_metric"
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "add_update"
argspec: "args=[\'self\', \'updates\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "add_variable"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_weight"
argspec: "args=[\'self\', \'name\', \'shape\', \'dtype\', \'initializer\', \'regularizer\', \'trainable\', \'constraint\', \'use_resource\', \'synchronization\', \'aggregation\', \'partitioner\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'VariableSynchronization.AUTO\', \'VariableAggregation.NONE\', \'None\'], "
}
member_method {
name: "apply"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "build"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "build_from_config"
argspec: "args=[\'self\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "call"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=kwargs, defaults=None"
}
member_method {
name: "compute_mask"
argspec: "args=[\'self\', \'inputs\', \'mask\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "compute_output_shape"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_signature"
argspec: "args=[\'self\', \'input_signature\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "finalize_state"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_build_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_losses_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_updates_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weights"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "load_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "save_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_weights"
argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "with_name_scope"
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,266 +0,0 @@
path: "tensorflow.keras.__internal__.legacy.layers.MaxPooling1D"
tf_class {
is_instance: "<class \'keras.legacy_tf_layers.pooling.MaxPooling1D\'>"
is_instance: "<class \'keras.layers.pooling.max_pooling1d.MaxPooling1D\'>"
is_instance: "<class \'keras.layers.pooling.base_pooling1d.Pooling1D\'>"
is_instance: "<class \'keras.legacy_tf_layers.base.Layer\'>"
is_instance: "<class \'keras.engine.base_layer_v1.Layer\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.trackable.autotrackable.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.trackable.base.Trackable\'>"
is_instance: "<class \'keras.utils.version_utils.LayerVersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "compute_dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype_policy"
mtype: "<type \'property\'>"
}
member {
name: "dynamic"
mtype: "<type \'property\'>"
}
member {
name: "graph"
mtype: "<type \'property\'>"
}
member {
name: "inbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "input"
mtype: "<type \'property\'>"
}
member {
name: "input_mask"
mtype: "<type \'property\'>"
}
member {
name: "input_shape"
mtype: "<type \'property\'>"
}
member {
name: "input_spec"
mtype: "<type \'property\'>"
}
member {
name: "losses"
mtype: "<type \'property\'>"
}
member {
name: "metrics"
mtype: "<type \'property\'>"
}
member {
name: "name"
mtype: "<type \'property\'>"
}
member {
name: "name_scope"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "outbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "output"
mtype: "<type \'property\'>"
}
member {
name: "output_mask"
mtype: "<type \'property\'>"
}
member {
name: "output_shape"
mtype: "<type \'property\'>"
}
member {
name: "scope_name"
mtype: "<type \'property\'>"
}
member {
name: "stateful"
mtype: "<type \'property\'>"
}
member {
name: "submodules"
mtype: "<type \'property\'>"
}
member {
name: "supports_masking"
mtype: "<type \'property\'>"
}
member {
name: "trainable"
mtype: "<type \'property\'>"
}
member {
name: "trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "updates"
mtype: "<type \'property\'>"
}
member {
name: "variable_dtype"
mtype: "<type \'property\'>"
}
member {
name: "variables"
mtype: "<type \'property\'>"
}
member {
name: "weights"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\', \'pool_size\', \'strides\', \'padding\', \'data_format\', \'name\'], varargs=None, keywords=kwargs, defaults=[\'valid\', \'channels_last\', \'None\'], "
}
member_method {
name: "add_loss"
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "add_metric"
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "add_update"
argspec: "args=[\'self\', \'updates\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "add_variable"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_weight"
argspec: "args=[\'self\', \'name\', \'shape\', \'dtype\', \'initializer\', \'regularizer\', \'trainable\', \'constraint\', \'use_resource\', \'synchronization\', \'aggregation\', \'partitioner\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'VariableSynchronization.AUTO\', \'VariableAggregation.NONE\', \'None\'], "
}
member_method {
name: "apply"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "build"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "build_from_config"
argspec: "args=[\'self\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "call"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_mask"
argspec: "args=[\'self\', \'inputs\', \'mask\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "compute_output_shape"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_signature"
argspec: "args=[\'self\', \'input_signature\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "finalize_state"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_build_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_losses_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_updates_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weights"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "load_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "save_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_weights"
argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "with_name_scope"
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,266 +0,0 @@
path: "tensorflow.keras.__internal__.legacy.layers.MaxPooling2D"
tf_class {
is_instance: "<class \'keras.legacy_tf_layers.pooling.MaxPooling2D\'>"
is_instance: "<class \'keras.layers.pooling.max_pooling2d.MaxPooling2D\'>"
is_instance: "<class \'keras.layers.pooling.base_pooling2d.Pooling2D\'>"
is_instance: "<class \'keras.legacy_tf_layers.base.Layer\'>"
is_instance: "<class \'keras.engine.base_layer_v1.Layer\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.trackable.autotrackable.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.trackable.base.Trackable\'>"
is_instance: "<class \'keras.utils.version_utils.LayerVersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "compute_dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype_policy"
mtype: "<type \'property\'>"
}
member {
name: "dynamic"
mtype: "<type \'property\'>"
}
member {
name: "graph"
mtype: "<type \'property\'>"
}
member {
name: "inbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "input"
mtype: "<type \'property\'>"
}
member {
name: "input_mask"
mtype: "<type \'property\'>"
}
member {
name: "input_shape"
mtype: "<type \'property\'>"
}
member {
name: "input_spec"
mtype: "<type \'property\'>"
}
member {
name: "losses"
mtype: "<type \'property\'>"
}
member {
name: "metrics"
mtype: "<type \'property\'>"
}
member {
name: "name"
mtype: "<type \'property\'>"
}
member {
name: "name_scope"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "outbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "output"
mtype: "<type \'property\'>"
}
member {
name: "output_mask"
mtype: "<type \'property\'>"
}
member {
name: "output_shape"
mtype: "<type \'property\'>"
}
member {
name: "scope_name"
mtype: "<type \'property\'>"
}
member {
name: "stateful"
mtype: "<type \'property\'>"
}
member {
name: "submodules"
mtype: "<type \'property\'>"
}
member {
name: "supports_masking"
mtype: "<type \'property\'>"
}
member {
name: "trainable"
mtype: "<type \'property\'>"
}
member {
name: "trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "updates"
mtype: "<type \'property\'>"
}
member {
name: "variable_dtype"
mtype: "<type \'property\'>"
}
member {
name: "variables"
mtype: "<type \'property\'>"
}
member {
name: "weights"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\', \'pool_size\', \'strides\', \'padding\', \'data_format\', \'name\'], varargs=None, keywords=kwargs, defaults=[\'valid\', \'channels_last\', \'None\'], "
}
member_method {
name: "add_loss"
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "add_metric"
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "add_update"
argspec: "args=[\'self\', \'updates\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "add_variable"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_weight"
argspec: "args=[\'self\', \'name\', \'shape\', \'dtype\', \'initializer\', \'regularizer\', \'trainable\', \'constraint\', \'use_resource\', \'synchronization\', \'aggregation\', \'partitioner\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'VariableSynchronization.AUTO\', \'VariableAggregation.NONE\', \'None\'], "
}
member_method {
name: "apply"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "build"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "build_from_config"
argspec: "args=[\'self\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "call"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_mask"
argspec: "args=[\'self\', \'inputs\', \'mask\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "compute_output_shape"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_signature"
argspec: "args=[\'self\', \'input_signature\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "finalize_state"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_build_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_losses_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_updates_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weights"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "load_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "save_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_weights"
argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "with_name_scope"
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,266 +0,0 @@
path: "tensorflow.keras.__internal__.legacy.layers.MaxPooling3D"
tf_class {
is_instance: "<class \'keras.legacy_tf_layers.pooling.MaxPooling3D\'>"
is_instance: "<class \'keras.layers.pooling.max_pooling3d.MaxPooling3D\'>"
is_instance: "<class \'keras.layers.pooling.base_pooling3d.Pooling3D\'>"
is_instance: "<class \'keras.legacy_tf_layers.base.Layer\'>"
is_instance: "<class \'keras.engine.base_layer_v1.Layer\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.trackable.autotrackable.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.trackable.base.Trackable\'>"
is_instance: "<class \'keras.utils.version_utils.LayerVersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "compute_dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype_policy"
mtype: "<type \'property\'>"
}
member {
name: "dynamic"
mtype: "<type \'property\'>"
}
member {
name: "graph"
mtype: "<type \'property\'>"
}
member {
name: "inbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "input"
mtype: "<type \'property\'>"
}
member {
name: "input_mask"
mtype: "<type \'property\'>"
}
member {
name: "input_shape"
mtype: "<type \'property\'>"
}
member {
name: "input_spec"
mtype: "<type \'property\'>"
}
member {
name: "losses"
mtype: "<type \'property\'>"
}
member {
name: "metrics"
mtype: "<type \'property\'>"
}
member {
name: "name"
mtype: "<type \'property\'>"
}
member {
name: "name_scope"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "outbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "output"
mtype: "<type \'property\'>"
}
member {
name: "output_mask"
mtype: "<type \'property\'>"
}
member {
name: "output_shape"
mtype: "<type \'property\'>"
}
member {
name: "scope_name"
mtype: "<type \'property\'>"
}
member {
name: "stateful"
mtype: "<type \'property\'>"
}
member {
name: "submodules"
mtype: "<type \'property\'>"
}
member {
name: "supports_masking"
mtype: "<type \'property\'>"
}
member {
name: "trainable"
mtype: "<type \'property\'>"
}
member {
name: "trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "updates"
mtype: "<type \'property\'>"
}
member {
name: "variable_dtype"
mtype: "<type \'property\'>"
}
member {
name: "variables"
mtype: "<type \'property\'>"
}
member {
name: "weights"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\', \'pool_size\', \'strides\', \'padding\', \'data_format\', \'name\'], varargs=None, keywords=kwargs, defaults=[\'valid\', \'channels_last\', \'None\'], "
}
member_method {
name: "add_loss"
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "add_metric"
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "add_update"
argspec: "args=[\'self\', \'updates\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "add_variable"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_weight"
argspec: "args=[\'self\', \'name\', \'shape\', \'dtype\', \'initializer\', \'regularizer\', \'trainable\', \'constraint\', \'use_resource\', \'synchronization\', \'aggregation\', \'partitioner\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'VariableSynchronization.AUTO\', \'VariableAggregation.NONE\', \'None\'], "
}
member_method {
name: "apply"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "build"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "build_from_config"
argspec: "args=[\'self\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "call"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_mask"
argspec: "args=[\'self\', \'inputs\', \'mask\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "compute_output_shape"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_signature"
argspec: "args=[\'self\', \'input_signature\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "finalize_state"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_build_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_losses_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_updates_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weights"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "load_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "save_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_weights"
argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "with_name_scope"
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,271 +0,0 @@
path: "tensorflow.keras.__internal__.legacy.layers.SeparableConv1D"
tf_class {
is_instance: "<class \'keras.legacy_tf_layers.convolutional.SeparableConv1D\'>"
is_instance: "<class \'keras.layers.convolutional.separable_conv1d.SeparableConv1D\'>"
is_instance: "<class \'keras.layers.convolutional.base_separable_conv.SeparableConv\'>"
is_instance: "<class \'keras.layers.convolutional.base_conv.Conv\'>"
is_instance: "<class \'keras.legacy_tf_layers.base.Layer\'>"
is_instance: "<class \'keras.engine.base_layer_v1.Layer\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.trackable.autotrackable.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.trackable.base.Trackable\'>"
is_instance: "<class \'keras.utils.version_utils.LayerVersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "compute_dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype_policy"
mtype: "<type \'property\'>"
}
member {
name: "dynamic"
mtype: "<type \'property\'>"
}
member {
name: "graph"
mtype: "<type \'property\'>"
}
member {
name: "inbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "input"
mtype: "<type \'property\'>"
}
member {
name: "input_mask"
mtype: "<type \'property\'>"
}
member {
name: "input_shape"
mtype: "<type \'property\'>"
}
member {
name: "input_spec"
mtype: "<type \'property\'>"
}
member {
name: "losses"
mtype: "<type \'property\'>"
}
member {
name: "metrics"
mtype: "<type \'property\'>"
}
member {
name: "name"
mtype: "<type \'property\'>"
}
member {
name: "name_scope"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "outbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "output"
mtype: "<type \'property\'>"
}
member {
name: "output_mask"
mtype: "<type \'property\'>"
}
member {
name: "output_shape"
mtype: "<type \'property\'>"
}
member {
name: "scope_name"
mtype: "<type \'property\'>"
}
member {
name: "stateful"
mtype: "<type \'property\'>"
}
member {
name: "submodules"
mtype: "<type \'property\'>"
}
member {
name: "supports_masking"
mtype: "<type \'property\'>"
}
member {
name: "trainable"
mtype: "<type \'property\'>"
}
member {
name: "trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "updates"
mtype: "<type \'property\'>"
}
member {
name: "variable_dtype"
mtype: "<type \'property\'>"
}
member {
name: "variables"
mtype: "<type \'property\'>"
}
member {
name: "weights"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\', \'filters\', \'kernel_size\', \'strides\', \'padding\', \'data_format\', \'dilation_rate\', \'depth_multiplier\', \'activation\', \'use_bias\', \'depthwise_initializer\', \'pointwise_initializer\', \'bias_initializer\', \'depthwise_regularizer\', \'pointwise_regularizer\', \'bias_regularizer\', \'activity_regularizer\', \'depthwise_constraint\', \'pointwise_constraint\', \'bias_constraint\', \'trainable\', \'name\'], varargs=None, keywords=kwargs, defaults=[\'1\', \'valid\', \'channels_last\', \'1\', \'1\', \'None\', \'True\', \'None\', \'None\', \'<tensorflow.python.ops.init_ops.Zeros object instance>\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'True\', \'None\'], "
}
member_method {
name: "add_loss"
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "add_metric"
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "add_update"
argspec: "args=[\'self\', \'updates\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "add_variable"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_weight"
argspec: "args=[\'self\', \'name\', \'shape\', \'dtype\', \'initializer\', \'regularizer\', \'trainable\', \'constraint\', \'use_resource\', \'synchronization\', \'aggregation\', \'partitioner\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'VariableSynchronization.AUTO\', \'VariableAggregation.NONE\', \'None\'], "
}
member_method {
name: "apply"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "build"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "build_from_config"
argspec: "args=[\'self\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "call"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_mask"
argspec: "args=[\'self\', \'inputs\', \'mask\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "compute_output_shape"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_signature"
argspec: "args=[\'self\', \'input_signature\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "convolution_op"
argspec: "args=[\'self\', \'inputs\', \'kernel\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "finalize_state"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_build_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_losses_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_updates_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weights"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "load_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "save_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_weights"
argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "with_name_scope"
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,271 +0,0 @@
path: "tensorflow.keras.__internal__.legacy.layers.SeparableConv2D"
tf_class {
is_instance: "<class \'keras.legacy_tf_layers.convolutional.SeparableConv2D\'>"
is_instance: "<class \'keras.layers.convolutional.separable_conv2d.SeparableConv2D\'>"
is_instance: "<class \'keras.layers.convolutional.base_separable_conv.SeparableConv\'>"
is_instance: "<class \'keras.layers.convolutional.base_conv.Conv\'>"
is_instance: "<class \'keras.legacy_tf_layers.base.Layer\'>"
is_instance: "<class \'keras.engine.base_layer_v1.Layer\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.trackable.autotrackable.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.trackable.base.Trackable\'>"
is_instance: "<class \'keras.utils.version_utils.LayerVersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "compute_dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype_policy"
mtype: "<type \'property\'>"
}
member {
name: "dynamic"
mtype: "<type \'property\'>"
}
member {
name: "graph"
mtype: "<type \'property\'>"
}
member {
name: "inbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "input"
mtype: "<type \'property\'>"
}
member {
name: "input_mask"
mtype: "<type \'property\'>"
}
member {
name: "input_shape"
mtype: "<type \'property\'>"
}
member {
name: "input_spec"
mtype: "<type \'property\'>"
}
member {
name: "losses"
mtype: "<type \'property\'>"
}
member {
name: "metrics"
mtype: "<type \'property\'>"
}
member {
name: "name"
mtype: "<type \'property\'>"
}
member {
name: "name_scope"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "outbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "output"
mtype: "<type \'property\'>"
}
member {
name: "output_mask"
mtype: "<type \'property\'>"
}
member {
name: "output_shape"
mtype: "<type \'property\'>"
}
member {
name: "scope_name"
mtype: "<type \'property\'>"
}
member {
name: "stateful"
mtype: "<type \'property\'>"
}
member {
name: "submodules"
mtype: "<type \'property\'>"
}
member {
name: "supports_masking"
mtype: "<type \'property\'>"
}
member {
name: "trainable"
mtype: "<type \'property\'>"
}
member {
name: "trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "updates"
mtype: "<type \'property\'>"
}
member {
name: "variable_dtype"
mtype: "<type \'property\'>"
}
member {
name: "variables"
mtype: "<type \'property\'>"
}
member {
name: "weights"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\', \'filters\', \'kernel_size\', \'strides\', \'padding\', \'data_format\', \'dilation_rate\', \'depth_multiplier\', \'activation\', \'use_bias\', \'depthwise_initializer\', \'pointwise_initializer\', \'bias_initializer\', \'depthwise_regularizer\', \'pointwise_regularizer\', \'bias_regularizer\', \'activity_regularizer\', \'depthwise_constraint\', \'pointwise_constraint\', \'bias_constraint\', \'trainable\', \'name\'], varargs=None, keywords=kwargs, defaults=[\'(1, 1)\', \'valid\', \'channels_last\', \'(1, 1)\', \'1\', \'None\', \'True\', \'None\', \'None\', \'<tensorflow.python.ops.init_ops.Zeros object instance>\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'True\', \'None\'], "
}
member_method {
name: "add_loss"
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "add_metric"
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "add_update"
argspec: "args=[\'self\', \'updates\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "add_variable"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_weight"
argspec: "args=[\'self\', \'name\', \'shape\', \'dtype\', \'initializer\', \'regularizer\', \'trainable\', \'constraint\', \'use_resource\', \'synchronization\', \'aggregation\', \'partitioner\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'VariableSynchronization.AUTO\', \'VariableAggregation.NONE\', \'None\'], "
}
member_method {
name: "apply"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "build"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "build_from_config"
argspec: "args=[\'self\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "call"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_mask"
argspec: "args=[\'self\', \'inputs\', \'mask\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "compute_output_shape"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_signature"
argspec: "args=[\'self\', \'input_signature\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "convolution_op"
argspec: "args=[\'self\', \'inputs\', \'kernel\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "finalize_state"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_build_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_losses_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_updates_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weights"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "load_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "save_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_weights"
argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "with_name_scope"
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,11 +0,0 @@
path: "tensorflow.keras.__internal__.legacy.layers.experimental"
tf_module {
member_method {
name: "keras_style_scope"
argspec: "args=[], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_keras_style"
argspec: "args=[], varargs=None, keywords=None, defaults=None"
}
}

@ -1,151 +0,0 @@
path: "tensorflow.keras.__internal__.legacy.layers"
tf_module {
member {
name: "AveragePooling1D"
mtype: "<type \'type\'>"
}
member {
name: "AveragePooling2D"
mtype: "<type \'type\'>"
}
member {
name: "AveragePooling3D"
mtype: "<type \'type\'>"
}
member {
name: "BatchNormalization"
mtype: "<type \'type\'>"
}
member {
name: "Conv1D"
mtype: "<type \'type\'>"
}
member {
name: "Conv2D"
mtype: "<type \'type\'>"
}
member {
name: "Conv2DTranspose"
mtype: "<type \'type\'>"
}
member {
name: "Conv3D"
mtype: "<type \'type\'>"
}
member {
name: "Conv3DTranspose"
mtype: "<type \'type\'>"
}
member {
name: "Dense"
mtype: "<type \'type\'>"
}
member {
name: "Dropout"
mtype: "<type \'type\'>"
}
member {
name: "Flatten"
mtype: "<type \'type\'>"
}
member {
name: "InputSpec"
mtype: "<type \'type\'>"
}
member {
name: "Layer"
mtype: "<type \'type\'>"
}
member {
name: "MaxPooling1D"
mtype: "<type \'type\'>"
}
member {
name: "MaxPooling2D"
mtype: "<type \'type\'>"
}
member {
name: "MaxPooling3D"
mtype: "<type \'type\'>"
}
member {
name: "SeparableConv1D"
mtype: "<type \'type\'>"
}
member {
name: "SeparableConv2D"
mtype: "<type \'type\'>"
}
member {
name: "experimental"
mtype: "<type \'module\'>"
}
member_method {
name: "average_pooling1d"
argspec: "args=[\'inputs\', \'pool_size\', \'strides\', \'padding\', \'data_format\', \'name\'], varargs=None, keywords=None, defaults=[\'valid\', \'channels_last\', \'None\'], "
}
member_method {
name: "average_pooling2d"
argspec: "args=[\'inputs\', \'pool_size\', \'strides\', \'padding\', \'data_format\', \'name\'], varargs=None, keywords=None, defaults=[\'valid\', \'channels_last\', \'None\'], "
}
member_method {
name: "average_pooling3d"
argspec: "args=[\'inputs\', \'pool_size\', \'strides\', \'padding\', \'data_format\', \'name\'], varargs=None, keywords=None, defaults=[\'valid\', \'channels_last\', \'None\'], "
}
member_method {
name: "batch_normalization"
argspec: "args=[\'inputs\', \'axis\', \'momentum\', \'epsilon\', \'center\', \'scale\', \'beta_initializer\', \'gamma_initializer\', \'moving_mean_initializer\', \'moving_variance_initializer\', \'beta_regularizer\', \'gamma_regularizer\', \'beta_constraint\', \'gamma_constraint\', \'training\', \'trainable\', \'name\', \'reuse\', \'renorm\', \'renorm_clipping\', \'renorm_momentum\', \'fused\', \'virtual_batch_size\', \'adjustment\'], varargs=None, keywords=None, defaults=[\'-1\', \'0.99\', \'0.001\', \'True\', \'True\', \'<tensorflow.python.ops.init_ops.Zeros object instance>\', \'<tensorflow.python.ops.init_ops.Ones object instance>\', \'<tensorflow.python.ops.init_ops.Zeros object instance>\', \'<tensorflow.python.ops.init_ops.Ones object instance>\', \'None\', \'None\', \'None\', \'None\', \'False\', \'True\', \'None\', \'None\', \'False\', \'None\', \'0.99\', \'None\', \'None\', \'None\'], "
}
member_method {
name: "conv1d"
argspec: "args=[\'inputs\', \'filters\', \'kernel_size\', \'strides\', \'padding\', \'data_format\', \'dilation_rate\', \'activation\', \'use_bias\', \'kernel_initializer\', \'bias_initializer\', \'kernel_regularizer\', \'bias_regularizer\', \'activity_regularizer\', \'kernel_constraint\', \'bias_constraint\', \'trainable\', \'name\', \'reuse\'], varargs=None, keywords=None, defaults=[\'1\', \'valid\', \'channels_last\', \'1\', \'None\', \'True\', \'None\', \'<tensorflow.python.ops.init_ops.Zeros object instance>\', \'None\', \'None\', \'None\', \'None\', \'None\', \'True\', \'None\', \'None\'], "
}
member_method {
name: "conv2d"
argspec: "args=[\'inputs\', \'filters\', \'kernel_size\', \'strides\', \'padding\', \'data_format\', \'dilation_rate\', \'activation\', \'use_bias\', \'kernel_initializer\', \'bias_initializer\', \'kernel_regularizer\', \'bias_regularizer\', \'activity_regularizer\', \'kernel_constraint\', \'bias_constraint\', \'trainable\', \'name\', \'reuse\'], varargs=None, keywords=None, defaults=[\'(1, 1)\', \'valid\', \'channels_last\', \'(1, 1)\', \'None\', \'True\', \'None\', \'<tensorflow.python.ops.init_ops.Zeros object instance>\', \'None\', \'None\', \'None\', \'None\', \'None\', \'True\', \'None\', \'None\'], "
}
member_method {
name: "conv2d_transpose"
argspec: "args=[\'inputs\', \'filters\', \'kernel_size\', \'strides\', \'padding\', \'data_format\', \'activation\', \'use_bias\', \'kernel_initializer\', \'bias_initializer\', \'kernel_regularizer\', \'bias_regularizer\', \'activity_regularizer\', \'kernel_constraint\', \'bias_constraint\', \'trainable\', \'name\', \'reuse\'], varargs=None, keywords=None, defaults=[\'(1, 1)\', \'valid\', \'channels_last\', \'None\', \'True\', \'None\', \'<tensorflow.python.ops.init_ops.Zeros object instance>\', \'None\', \'None\', \'None\', \'None\', \'None\', \'True\', \'None\', \'None\'], "
}
member_method {
name: "conv3d"
argspec: "args=[\'inputs\', \'filters\', \'kernel_size\', \'strides\', \'padding\', \'data_format\', \'dilation_rate\', \'activation\', \'use_bias\', \'kernel_initializer\', \'bias_initializer\', \'kernel_regularizer\', \'bias_regularizer\', \'activity_regularizer\', \'kernel_constraint\', \'bias_constraint\', \'trainable\', \'name\', \'reuse\'], varargs=None, keywords=None, defaults=[\'(1, 1, 1)\', \'valid\', \'channels_last\', \'(1, 1, 1)\', \'None\', \'True\', \'None\', \'<tensorflow.python.ops.init_ops.Zeros object instance>\', \'None\', \'None\', \'None\', \'None\', \'None\', \'True\', \'None\', \'None\'], "
}
member_method {
name: "conv3d_transpose"
argspec: "args=[\'inputs\', \'filters\', \'kernel_size\', \'strides\', \'padding\', \'data_format\', \'activation\', \'use_bias\', \'kernel_initializer\', \'bias_initializer\', \'kernel_regularizer\', \'bias_regularizer\', \'activity_regularizer\', \'kernel_constraint\', \'bias_constraint\', \'trainable\', \'name\', \'reuse\'], varargs=None, keywords=None, defaults=[\'(1, 1, 1)\', \'valid\', \'channels_last\', \'None\', \'True\', \'None\', \'<tensorflow.python.ops.init_ops.Zeros object instance>\', \'None\', \'None\', \'None\', \'None\', \'None\', \'True\', \'None\', \'None\'], "
}
member_method {
name: "dense"
argspec: "args=[\'inputs\', \'units\', \'activation\', \'use_bias\', \'kernel_initializer\', \'bias_initializer\', \'kernel_regularizer\', \'bias_regularizer\', \'activity_regularizer\', \'kernel_constraint\', \'bias_constraint\', \'trainable\', \'name\', \'reuse\'], varargs=None, keywords=None, defaults=[\'None\', \'True\', \'None\', \'<tensorflow.python.ops.init_ops.Zeros object instance>\', \'None\', \'None\', \'None\', \'None\', \'None\', \'True\', \'None\', \'None\'], "
}
member_method {
name: "dropout"
argspec: "args=[\'inputs\', \'rate\', \'noise_shape\', \'seed\', \'training\', \'name\'], varargs=None, keywords=None, defaults=[\'0.5\', \'None\', \'None\', \'False\', \'None\'], "
}
member_method {
name: "flatten"
argspec: "args=[\'inputs\', \'name\', \'data_format\'], varargs=None, keywords=None, defaults=[\'None\', \'channels_last\'], "
}
member_method {
name: "max_pooling1d"
argspec: "args=[\'inputs\', \'pool_size\', \'strides\', \'padding\', \'data_format\', \'name\'], varargs=None, keywords=None, defaults=[\'valid\', \'channels_last\', \'None\'], "
}
member_method {
name: "max_pooling2d"
argspec: "args=[\'inputs\', \'pool_size\', \'strides\', \'padding\', \'data_format\', \'name\'], varargs=None, keywords=None, defaults=[\'valid\', \'channels_last\', \'None\'], "
}
member_method {
name: "max_pooling3d"
argspec: "args=[\'inputs\', \'pool_size\', \'strides\', \'padding\', \'data_format\', \'name\'], varargs=None, keywords=None, defaults=[\'valid\', \'channels_last\', \'None\'], "
}
member_method {
name: "separable_conv1d"
argspec: "args=[\'inputs\', \'filters\', \'kernel_size\', \'strides\', \'padding\', \'data_format\', \'dilation_rate\', \'depth_multiplier\', \'activation\', \'use_bias\', \'depthwise_initializer\', \'pointwise_initializer\', \'bias_initializer\', \'depthwise_regularizer\', \'pointwise_regularizer\', \'bias_regularizer\', \'activity_regularizer\', \'depthwise_constraint\', \'pointwise_constraint\', \'bias_constraint\', \'trainable\', \'name\', \'reuse\'], varargs=None, keywords=None, defaults=[\'1\', \'valid\', \'channels_last\', \'1\', \'1\', \'None\', \'True\', \'None\', \'None\', \'<tensorflow.python.ops.init_ops.Zeros object instance>\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'True\', \'None\', \'None\'], "
}
member_method {
name: "separable_conv2d"
argspec: "args=[\'inputs\', \'filters\', \'kernel_size\', \'strides\', \'padding\', \'data_format\', \'dilation_rate\', \'depth_multiplier\', \'activation\', \'use_bias\', \'depthwise_initializer\', \'pointwise_initializer\', \'bias_initializer\', \'depthwise_regularizer\', \'pointwise_regularizer\', \'bias_regularizer\', \'activity_regularizer\', \'depthwise_constraint\', \'pointwise_constraint\', \'bias_constraint\', \'trainable\', \'name\', \'reuse\'], varargs=None, keywords=None, defaults=[\'(1, 1)\', \'valid\', \'channels_last\', \'(1, 1)\', \'1\', \'None\', \'True\', \'None\', \'None\', \'<tensorflow.python.ops.init_ops.Zeros object instance>\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'True\', \'None\', \'None\'], "
}
}

@ -1,11 +0,0 @@
path: "tensorflow.keras.__internal__.legacy"
tf_module {
member {
name: "layers"
mtype: "<type \'module\'>"
}
member {
name: "rnn_cell"
mtype: "<type \'module\'>"
}
}

@ -1,282 +0,0 @@
path: "tensorflow.keras.__internal__.legacy.rnn_cell.BasicLSTMCell"
tf_class {
is_instance: "<class \'keras.layers.rnn.legacy_cells.BasicLSTMCell\'>"
is_instance: "<class \'keras.layers.rnn.legacy_cells.LayerRNNCell\'>"
is_instance: "<class \'keras.layers.rnn.legacy_cells.RNNCell\'>"
is_instance: "<class \'keras.legacy_tf_layers.base.Layer\'>"
is_instance: "<class \'keras.engine.base_layer_v1.Layer\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.trackable.autotrackable.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.trackable.base.Trackable\'>"
is_instance: "<class \'keras.utils.version_utils.LayerVersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "compute_dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype_policy"
mtype: "<type \'property\'>"
}
member {
name: "dynamic"
mtype: "<type \'property\'>"
}
member {
name: "graph"
mtype: "<type \'property\'>"
}
member {
name: "inbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "input"
mtype: "<type \'property\'>"
}
member {
name: "input_mask"
mtype: "<type \'property\'>"
}
member {
name: "input_shape"
mtype: "<type \'property\'>"
}
member {
name: "input_spec"
mtype: "<type \'property\'>"
}
member {
name: "losses"
mtype: "<type \'property\'>"
}
member {
name: "metrics"
mtype: "<type \'property\'>"
}
member {
name: "name"
mtype: "<type \'property\'>"
}
member {
name: "name_scope"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "outbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "output"
mtype: "<type \'property\'>"
}
member {
name: "output_mask"
mtype: "<type \'property\'>"
}
member {
name: "output_shape"
mtype: "<type \'property\'>"
}
member {
name: "output_size"
mtype: "<type \'property\'>"
}
member {
name: "scope_name"
mtype: "<type \'property\'>"
}
member {
name: "state_size"
mtype: "<type \'property\'>"
}
member {
name: "stateful"
mtype: "<type \'property\'>"
}
member {
name: "submodules"
mtype: "<type \'property\'>"
}
member {
name: "supports_masking"
mtype: "<type \'property\'>"
}
member {
name: "trainable"
mtype: "<type \'property\'>"
}
member {
name: "trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "updates"
mtype: "<type \'property\'>"
}
member {
name: "variable_dtype"
mtype: "<type \'property\'>"
}
member {
name: "variables"
mtype: "<type \'property\'>"
}
member {
name: "weights"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\', \'num_units\', \'forget_bias\', \'state_is_tuple\', \'activation\', \'reuse\', \'name\', \'dtype\'], varargs=None, keywords=kwargs, defaults=[\'1.0\', \'True\', \'None\', \'None\', \'None\', \'None\'], "
}
member_method {
name: "add_loss"
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "add_metric"
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "add_update"
argspec: "args=[\'self\', \'updates\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "add_variable"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_weight"
argspec: "args=[\'self\', \'name\', \'shape\', \'dtype\', \'initializer\', \'regularizer\', \'trainable\', \'constraint\', \'use_resource\', \'synchronization\', \'aggregation\', \'partitioner\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'VariableSynchronization.AUTO\', \'VariableAggregation.NONE\', \'None\'], "
}
member_method {
name: "apply"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "build"
argspec: "args=[\'instance\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "build_from_config"
argspec: "args=[\'self\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "call"
argspec: "args=[\'self\', \'inputs\', \'state\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_mask"
argspec: "args=[\'self\', \'inputs\', \'mask\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "compute_output_shape"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_signature"
argspec: "args=[\'self\', \'input_signature\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "finalize_state"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_build_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_initial_state"
argspec: "args=[\'self\', \'inputs\', \'batch_size\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\'], "
}
member_method {
name: "get_input_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_losses_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_updates_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weights"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "load_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "save_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_weights"
argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "with_name_scope"
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "zero_state"
argspec: "args=[\'self\', \'batch_size\', \'dtype\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,282 +0,0 @@
path: "tensorflow.keras.__internal__.legacy.rnn_cell.BasicRNNCell"
tf_class {
is_instance: "<class \'keras.layers.rnn.legacy_cells.BasicRNNCell\'>"
is_instance: "<class \'keras.layers.rnn.legacy_cells.LayerRNNCell\'>"
is_instance: "<class \'keras.layers.rnn.legacy_cells.RNNCell\'>"
is_instance: "<class \'keras.legacy_tf_layers.base.Layer\'>"
is_instance: "<class \'keras.engine.base_layer_v1.Layer\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.trackable.autotrackable.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.trackable.base.Trackable\'>"
is_instance: "<class \'keras.utils.version_utils.LayerVersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "compute_dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype_policy"
mtype: "<type \'property\'>"
}
member {
name: "dynamic"
mtype: "<type \'property\'>"
}
member {
name: "graph"
mtype: "<type \'property\'>"
}
member {
name: "inbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "input"
mtype: "<type \'property\'>"
}
member {
name: "input_mask"
mtype: "<type \'property\'>"
}
member {
name: "input_shape"
mtype: "<type \'property\'>"
}
member {
name: "input_spec"
mtype: "<type \'property\'>"
}
member {
name: "losses"
mtype: "<type \'property\'>"
}
member {
name: "metrics"
mtype: "<type \'property\'>"
}
member {
name: "name"
mtype: "<type \'property\'>"
}
member {
name: "name_scope"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "outbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "output"
mtype: "<type \'property\'>"
}
member {
name: "output_mask"
mtype: "<type \'property\'>"
}
member {
name: "output_shape"
mtype: "<type \'property\'>"
}
member {
name: "output_size"
mtype: "<type \'property\'>"
}
member {
name: "scope_name"
mtype: "<type \'property\'>"
}
member {
name: "state_size"
mtype: "<type \'property\'>"
}
member {
name: "stateful"
mtype: "<type \'property\'>"
}
member {
name: "submodules"
mtype: "<type \'property\'>"
}
member {
name: "supports_masking"
mtype: "<type \'property\'>"
}
member {
name: "trainable"
mtype: "<type \'property\'>"
}
member {
name: "trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "updates"
mtype: "<type \'property\'>"
}
member {
name: "variable_dtype"
mtype: "<type \'property\'>"
}
member {
name: "variables"
mtype: "<type \'property\'>"
}
member {
name: "weights"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\', \'num_units\', \'activation\', \'reuse\', \'name\', \'dtype\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\'], "
}
member_method {
name: "add_loss"
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "add_metric"
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "add_update"
argspec: "args=[\'self\', \'updates\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "add_variable"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_weight"
argspec: "args=[\'self\', \'name\', \'shape\', \'dtype\', \'initializer\', \'regularizer\', \'trainable\', \'constraint\', \'use_resource\', \'synchronization\', \'aggregation\', \'partitioner\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'VariableSynchronization.AUTO\', \'VariableAggregation.NONE\', \'None\'], "
}
member_method {
name: "apply"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "build"
argspec: "args=[\'instance\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "build_from_config"
argspec: "args=[\'self\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "call"
argspec: "args=[\'self\', \'inputs\', \'state\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_mask"
argspec: "args=[\'self\', \'inputs\', \'mask\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "compute_output_shape"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_signature"
argspec: "args=[\'self\', \'input_signature\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "finalize_state"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_build_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_initial_state"
argspec: "args=[\'self\', \'inputs\', \'batch_size\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\'], "
}
member_method {
name: "get_input_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_losses_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_updates_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weights"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "load_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "save_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_weights"
argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "with_name_scope"
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "zero_state"
argspec: "args=[\'self\', \'batch_size\', \'dtype\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,282 +0,0 @@
path: "tensorflow.keras.__internal__.legacy.rnn_cell.DeviceWrapper"
tf_class {
is_instance: "<class \'keras.layers.rnn.legacy_cell_wrappers.DeviceWrapper\'>"
is_instance: "<class \'keras.layers.rnn.legacy_cell_wrappers._RNNCellWrapperV1\'>"
is_instance: "<class \'keras.layers.rnn.legacy_cells.RNNCell\'>"
is_instance: "<class \'keras.legacy_tf_layers.base.Layer\'>"
is_instance: "<class \'keras.engine.base_layer_v1.Layer\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.trackable.autotrackable.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.trackable.base.Trackable\'>"
is_instance: "<class \'keras.utils.version_utils.LayerVersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "compute_dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype_policy"
mtype: "<type \'property\'>"
}
member {
name: "dynamic"
mtype: "<type \'property\'>"
}
member {
name: "graph"
mtype: "<type \'property\'>"
}
member {
name: "inbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "input"
mtype: "<type \'property\'>"
}
member {
name: "input_mask"
mtype: "<type \'property\'>"
}
member {
name: "input_shape"
mtype: "<type \'property\'>"
}
member {
name: "input_spec"
mtype: "<type \'property\'>"
}
member {
name: "losses"
mtype: "<type \'property\'>"
}
member {
name: "metrics"
mtype: "<type \'property\'>"
}
member {
name: "name"
mtype: "<type \'property\'>"
}
member {
name: "name_scope"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "outbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "output"
mtype: "<type \'property\'>"
}
member {
name: "output_mask"
mtype: "<type \'property\'>"
}
member {
name: "output_shape"
mtype: "<type \'property\'>"
}
member {
name: "output_size"
mtype: "<type \'property\'>"
}
member {
name: "scope_name"
mtype: "<type \'property\'>"
}
member {
name: "state_size"
mtype: "<type \'property\'>"
}
member {
name: "stateful"
mtype: "<type \'property\'>"
}
member {
name: "submodules"
mtype: "<type \'property\'>"
}
member {
name: "supports_masking"
mtype: "<type \'property\'>"
}
member {
name: "trainable"
mtype: "<type \'property\'>"
}
member {
name: "trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "updates"
mtype: "<type \'property\'>"
}
member {
name: "variable_dtype"
mtype: "<type \'property\'>"
}
member {
name: "variables"
mtype: "<type \'property\'>"
}
member {
name: "weights"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\', \'cell\', \'device\'], varargs=None, keywords=kwargs, defaults=None"
}
member_method {
name: "add_loss"
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "add_metric"
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "add_update"
argspec: "args=[\'self\', \'updates\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "add_variable"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_weight"
argspec: "args=[\'self\', \'name\', \'shape\', \'dtype\', \'initializer\', \'regularizer\', \'trainable\', \'constraint\', \'use_resource\', \'synchronization\', \'aggregation\', \'partitioner\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'VariableSynchronization.AUTO\', \'VariableAggregation.NONE\', \'None\'], "
}
member_method {
name: "apply"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "build"
argspec: "args=[\'self\', \'_\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "build_from_config"
argspec: "args=[\'self\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "call"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=kwargs, defaults=None"
}
member_method {
name: "compute_mask"
argspec: "args=[\'self\', \'inputs\', \'mask\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "compute_output_shape"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_signature"
argspec: "args=[\'self\', \'input_signature\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "finalize_state"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\', \'custom_objects\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "get_build_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_initial_state"
argspec: "args=[\'self\', \'inputs\', \'batch_size\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\'], "
}
member_method {
name: "get_input_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_losses_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_updates_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weights"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "load_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "save_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_weights"
argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "with_name_scope"
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "zero_state"
argspec: "args=[\'self\', \'batch_size\', \'dtype\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,286 +0,0 @@
path: "tensorflow.keras.__internal__.legacy.rnn_cell.DropoutWrapper"
tf_class {
is_instance: "<class \'keras.layers.rnn.legacy_cell_wrappers.DropoutWrapper\'>"
is_instance: "<class \'keras.layers.rnn.legacy_cell_wrappers._RNNCellWrapperV1\'>"
is_instance: "<class \'keras.layers.rnn.legacy_cells.RNNCell\'>"
is_instance: "<class \'keras.legacy_tf_layers.base.Layer\'>"
is_instance: "<class \'keras.engine.base_layer_v1.Layer\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.trackable.autotrackable.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.trackable.base.Trackable\'>"
is_instance: "<class \'keras.utils.version_utils.LayerVersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "compute_dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype_policy"
mtype: "<type \'property\'>"
}
member {
name: "dynamic"
mtype: "<type \'property\'>"
}
member {
name: "graph"
mtype: "<type \'property\'>"
}
member {
name: "inbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "input"
mtype: "<type \'property\'>"
}
member {
name: "input_mask"
mtype: "<type \'property\'>"
}
member {
name: "input_shape"
mtype: "<type \'property\'>"
}
member {
name: "input_spec"
mtype: "<type \'property\'>"
}
member {
name: "losses"
mtype: "<type \'property\'>"
}
member {
name: "metrics"
mtype: "<type \'property\'>"
}
member {
name: "name"
mtype: "<type \'property\'>"
}
member {
name: "name_scope"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "outbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "output"
mtype: "<type \'property\'>"
}
member {
name: "output_mask"
mtype: "<type \'property\'>"
}
member {
name: "output_shape"
mtype: "<type \'property\'>"
}
member {
name: "output_size"
mtype: "<type \'property\'>"
}
member {
name: "scope_name"
mtype: "<type \'property\'>"
}
member {
name: "state_size"
mtype: "<type \'property\'>"
}
member {
name: "stateful"
mtype: "<type \'property\'>"
}
member {
name: "submodules"
mtype: "<type \'property\'>"
}
member {
name: "supports_masking"
mtype: "<type \'property\'>"
}
member {
name: "trainable"
mtype: "<type \'property\'>"
}
member {
name: "trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "updates"
mtype: "<type \'property\'>"
}
member {
name: "variable_dtype"
mtype: "<type \'property\'>"
}
member {
name: "variables"
mtype: "<type \'property\'>"
}
member {
name: "weights"
mtype: "<type \'property\'>"
}
member {
name: "wrapped_cell"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\', \'cell\', \'input_keep_prob\', \'output_keep_prob\', \'state_keep_prob\', \'variational_recurrent\', \'input_size\', \'dtype\', \'seed\', \'dropout_state_filter_visitor\'], varargs=None, keywords=kwargs, defaults=[\'1.0\', \'1.0\', \'1.0\', \'False\', \'None\', \'None\', \'None\', \'None\'], "
}
member_method {
name: "add_loss"
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "add_metric"
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "add_update"
argspec: "args=[\'self\', \'updates\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "add_variable"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_weight"
argspec: "args=[\'self\', \'name\', \'shape\', \'dtype\', \'initializer\', \'regularizer\', \'trainable\', \'constraint\', \'use_resource\', \'synchronization\', \'aggregation\', \'partitioner\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'VariableSynchronization.AUTO\', \'VariableAggregation.NONE\', \'None\'], "
}
member_method {
name: "apply"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "build"
argspec: "args=[\'self\', \'inputs_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "build_from_config"
argspec: "args=[\'self\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "call"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=kwargs, defaults=None"
}
member_method {
name: "compute_mask"
argspec: "args=[\'self\', \'inputs\', \'mask\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "compute_output_shape"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_signature"
argspec: "args=[\'self\', \'input_signature\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "finalize_state"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\', \'custom_objects\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "get_build_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_initial_state"
argspec: "args=[\'self\', \'inputs\', \'batch_size\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\'], "
}
member_method {
name: "get_input_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_losses_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_updates_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weights"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "load_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "save_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_weights"
argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "with_name_scope"
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "zero_state"
argspec: "args=[\'self\', \'batch_size\', \'dtype\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,282 +0,0 @@
path: "tensorflow.keras.__internal__.legacy.rnn_cell.GRUCell"
tf_class {
is_instance: "<class \'keras.layers.rnn.legacy_cells.GRUCell\'>"
is_instance: "<class \'keras.layers.rnn.legacy_cells.LayerRNNCell\'>"
is_instance: "<class \'keras.layers.rnn.legacy_cells.RNNCell\'>"
is_instance: "<class \'keras.legacy_tf_layers.base.Layer\'>"
is_instance: "<class \'keras.engine.base_layer_v1.Layer\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.trackable.autotrackable.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.trackable.base.Trackable\'>"
is_instance: "<class \'keras.utils.version_utils.LayerVersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "compute_dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype_policy"
mtype: "<type \'property\'>"
}
member {
name: "dynamic"
mtype: "<type \'property\'>"
}
member {
name: "graph"
mtype: "<type \'property\'>"
}
member {
name: "inbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "input"
mtype: "<type \'property\'>"
}
member {
name: "input_mask"
mtype: "<type \'property\'>"
}
member {
name: "input_shape"
mtype: "<type \'property\'>"
}
member {
name: "input_spec"
mtype: "<type \'property\'>"
}
member {
name: "losses"
mtype: "<type \'property\'>"
}
member {
name: "metrics"
mtype: "<type \'property\'>"
}
member {
name: "name"
mtype: "<type \'property\'>"
}
member {
name: "name_scope"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "outbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "output"
mtype: "<type \'property\'>"
}
member {
name: "output_mask"
mtype: "<type \'property\'>"
}
member {
name: "output_shape"
mtype: "<type \'property\'>"
}
member {
name: "output_size"
mtype: "<type \'property\'>"
}
member {
name: "scope_name"
mtype: "<type \'property\'>"
}
member {
name: "state_size"
mtype: "<type \'property\'>"
}
member {
name: "stateful"
mtype: "<type \'property\'>"
}
member {
name: "submodules"
mtype: "<type \'property\'>"
}
member {
name: "supports_masking"
mtype: "<type \'property\'>"
}
member {
name: "trainable"
mtype: "<type \'property\'>"
}
member {
name: "trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "updates"
mtype: "<type \'property\'>"
}
member {
name: "variable_dtype"
mtype: "<type \'property\'>"
}
member {
name: "variables"
mtype: "<type \'property\'>"
}
member {
name: "weights"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\', \'num_units\', \'activation\', \'reuse\', \'kernel_initializer\', \'bias_initializer\', \'name\', \'dtype\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\'], "
}
member_method {
name: "add_loss"
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "add_metric"
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "add_update"
argspec: "args=[\'self\', \'updates\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "add_variable"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_weight"
argspec: "args=[\'self\', \'name\', \'shape\', \'dtype\', \'initializer\', \'regularizer\', \'trainable\', \'constraint\', \'use_resource\', \'synchronization\', \'aggregation\', \'partitioner\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'VariableSynchronization.AUTO\', \'VariableAggregation.NONE\', \'None\'], "
}
member_method {
name: "apply"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "build"
argspec: "args=[\'instance\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "build_from_config"
argspec: "args=[\'self\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "call"
argspec: "args=[\'self\', \'inputs\', \'state\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_mask"
argspec: "args=[\'self\', \'inputs\', \'mask\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "compute_output_shape"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_signature"
argspec: "args=[\'self\', \'input_signature\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "finalize_state"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_build_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_initial_state"
argspec: "args=[\'self\', \'inputs\', \'batch_size\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\'], "
}
member_method {
name: "get_input_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_losses_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_updates_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weights"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "load_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "save_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_weights"
argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "with_name_scope"
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "zero_state"
argspec: "args=[\'self\', \'batch_size\', \'dtype\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,282 +0,0 @@
path: "tensorflow.keras.__internal__.legacy.rnn_cell.LSTMCell"
tf_class {
is_instance: "<class \'keras.layers.rnn.legacy_cells.LSTMCell\'>"
is_instance: "<class \'keras.layers.rnn.legacy_cells.LayerRNNCell\'>"
is_instance: "<class \'keras.layers.rnn.legacy_cells.RNNCell\'>"
is_instance: "<class \'keras.legacy_tf_layers.base.Layer\'>"
is_instance: "<class \'keras.engine.base_layer_v1.Layer\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.trackable.autotrackable.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.trackable.base.Trackable\'>"
is_instance: "<class \'keras.utils.version_utils.LayerVersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "compute_dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype_policy"
mtype: "<type \'property\'>"
}
member {
name: "dynamic"
mtype: "<type \'property\'>"
}
member {
name: "graph"
mtype: "<type \'property\'>"
}
member {
name: "inbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "input"
mtype: "<type \'property\'>"
}
member {
name: "input_mask"
mtype: "<type \'property\'>"
}
member {
name: "input_shape"
mtype: "<type \'property\'>"
}
member {
name: "input_spec"
mtype: "<type \'property\'>"
}
member {
name: "losses"
mtype: "<type \'property\'>"
}
member {
name: "metrics"
mtype: "<type \'property\'>"
}
member {
name: "name"
mtype: "<type \'property\'>"
}
member {
name: "name_scope"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "outbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "output"
mtype: "<type \'property\'>"
}
member {
name: "output_mask"
mtype: "<type \'property\'>"
}
member {
name: "output_shape"
mtype: "<type \'property\'>"
}
member {
name: "output_size"
mtype: "<type \'property\'>"
}
member {
name: "scope_name"
mtype: "<type \'property\'>"
}
member {
name: "state_size"
mtype: "<type \'property\'>"
}
member {
name: "stateful"
mtype: "<type \'property\'>"
}
member {
name: "submodules"
mtype: "<type \'property\'>"
}
member {
name: "supports_masking"
mtype: "<type \'property\'>"
}
member {
name: "trainable"
mtype: "<type \'property\'>"
}
member {
name: "trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "updates"
mtype: "<type \'property\'>"
}
member {
name: "variable_dtype"
mtype: "<type \'property\'>"
}
member {
name: "variables"
mtype: "<type \'property\'>"
}
member {
name: "weights"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\', \'num_units\', \'use_peepholes\', \'cell_clip\', \'initializer\', \'num_proj\', \'proj_clip\', \'num_unit_shards\', \'num_proj_shards\', \'forget_bias\', \'state_is_tuple\', \'activation\', \'reuse\', \'name\', \'dtype\'], varargs=None, keywords=kwargs, defaults=[\'False\', \'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'1.0\', \'True\', \'None\', \'None\', \'None\', \'None\'], "
}
member_method {
name: "add_loss"
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "add_metric"
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "add_update"
argspec: "args=[\'self\', \'updates\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "add_variable"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_weight"
argspec: "args=[\'self\', \'name\', \'shape\', \'dtype\', \'initializer\', \'regularizer\', \'trainable\', \'constraint\', \'use_resource\', \'synchronization\', \'aggregation\', \'partitioner\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'VariableSynchronization.AUTO\', \'VariableAggregation.NONE\', \'None\'], "
}
member_method {
name: "apply"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "build"
argspec: "args=[\'instance\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "build_from_config"
argspec: "args=[\'self\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "call"
argspec: "args=[\'self\', \'inputs\', \'state\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_mask"
argspec: "args=[\'self\', \'inputs\', \'mask\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "compute_output_shape"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_signature"
argspec: "args=[\'self\', \'input_signature\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "finalize_state"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_build_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_initial_state"
argspec: "args=[\'self\', \'inputs\', \'batch_size\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\'], "
}
member_method {
name: "get_input_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_losses_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_updates_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weights"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "load_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "save_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_weights"
argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "with_name_scope"
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "zero_state"
argspec: "args=[\'self\', \'batch_size\', \'dtype\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,27 +0,0 @@
path: "tensorflow.keras.__internal__.legacy.rnn_cell.LSTMStateTuple"
tf_class {
is_instance: "<class \'keras.layers.rnn.legacy_cells.LSTMStateTuple\'>"
is_instance: "<class \'keras.layers.rnn.legacy_cells.LSTMStateTuple\'>"
is_instance: "<type \'tuple\'>"
member {
name: "c"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "h"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
}
member_method {
name: "count"
}
member_method {
name: "index"
}
}

@ -1,281 +0,0 @@
path: "tensorflow.keras.__internal__.legacy.rnn_cell.MultiRNNCell"
tf_class {
is_instance: "<class \'keras.layers.rnn.legacy_cells.MultiRNNCell\'>"
is_instance: "<class \'keras.layers.rnn.legacy_cells.RNNCell\'>"
is_instance: "<class \'keras.legacy_tf_layers.base.Layer\'>"
is_instance: "<class \'keras.engine.base_layer_v1.Layer\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.trackable.autotrackable.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.trackable.base.Trackable\'>"
is_instance: "<class \'keras.utils.version_utils.LayerVersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "compute_dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype_policy"
mtype: "<type \'property\'>"
}
member {
name: "dynamic"
mtype: "<type \'property\'>"
}
member {
name: "graph"
mtype: "<type \'property\'>"
}
member {
name: "inbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "input"
mtype: "<type \'property\'>"
}
member {
name: "input_mask"
mtype: "<type \'property\'>"
}
member {
name: "input_shape"
mtype: "<type \'property\'>"
}
member {
name: "input_spec"
mtype: "<type \'property\'>"
}
member {
name: "losses"
mtype: "<type \'property\'>"
}
member {
name: "metrics"
mtype: "<type \'property\'>"
}
member {
name: "name"
mtype: "<type \'property\'>"
}
member {
name: "name_scope"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "outbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "output"
mtype: "<type \'property\'>"
}
member {
name: "output_mask"
mtype: "<type \'property\'>"
}
member {
name: "output_shape"
mtype: "<type \'property\'>"
}
member {
name: "output_size"
mtype: "<type \'property\'>"
}
member {
name: "scope_name"
mtype: "<type \'property\'>"
}
member {
name: "state_size"
mtype: "<type \'property\'>"
}
member {
name: "stateful"
mtype: "<type \'property\'>"
}
member {
name: "submodules"
mtype: "<type \'property\'>"
}
member {
name: "supports_masking"
mtype: "<type \'property\'>"
}
member {
name: "trainable"
mtype: "<type \'property\'>"
}
member {
name: "trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "updates"
mtype: "<type \'property\'>"
}
member {
name: "variable_dtype"
mtype: "<type \'property\'>"
}
member {
name: "variables"
mtype: "<type \'property\'>"
}
member {
name: "weights"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\', \'cells\', \'state_is_tuple\'], varargs=None, keywords=None, defaults=[\'True\'], "
}
member_method {
name: "add_loss"
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "add_metric"
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "add_update"
argspec: "args=[\'self\', \'updates\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "add_variable"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_weight"
argspec: "args=[\'self\', \'name\', \'shape\', \'dtype\', \'initializer\', \'regularizer\', \'trainable\', \'constraint\', \'use_resource\', \'synchronization\', \'aggregation\', \'partitioner\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'VariableSynchronization.AUTO\', \'VariableAggregation.NONE\', \'None\'], "
}
member_method {
name: "apply"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "build"
argspec: "args=[\'self\', \'_\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "build_from_config"
argspec: "args=[\'self\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "call"
argspec: "args=[\'self\', \'inputs\', \'state\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_mask"
argspec: "args=[\'self\', \'inputs\', \'mask\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "compute_output_shape"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_signature"
argspec: "args=[\'self\', \'input_signature\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "finalize_state"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_build_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_initial_state"
argspec: "args=[\'self\', \'inputs\', \'batch_size\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\'], "
}
member_method {
name: "get_input_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_losses_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_updates_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weights"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "load_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "save_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_weights"
argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "with_name_scope"
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "zero_state"
argspec: "args=[\'self\', \'batch_size\', \'dtype\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,280 +0,0 @@
path: "tensorflow.keras.__internal__.legacy.rnn_cell.RNNCell"
tf_class {
is_instance: "<class \'keras.layers.rnn.legacy_cells.RNNCell\'>"
is_instance: "<class \'keras.legacy_tf_layers.base.Layer\'>"
is_instance: "<class \'keras.engine.base_layer_v1.Layer\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.trackable.autotrackable.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.trackable.base.Trackable\'>"
is_instance: "<class \'keras.utils.version_utils.LayerVersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "compute_dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype_policy"
mtype: "<type \'property\'>"
}
member {
name: "dynamic"
mtype: "<type \'property\'>"
}
member {
name: "graph"
mtype: "<type \'property\'>"
}
member {
name: "inbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "input"
mtype: "<type \'property\'>"
}
member {
name: "input_mask"
mtype: "<type \'property\'>"
}
member {
name: "input_shape"
mtype: "<type \'property\'>"
}
member {
name: "input_spec"
mtype: "<type \'property\'>"
}
member {
name: "losses"
mtype: "<type \'property\'>"
}
member {
name: "metrics"
mtype: "<type \'property\'>"
}
member {
name: "name"
mtype: "<type \'property\'>"
}
member {
name: "name_scope"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "outbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "output"
mtype: "<type \'property\'>"
}
member {
name: "output_mask"
mtype: "<type \'property\'>"
}
member {
name: "output_shape"
mtype: "<type \'property\'>"
}
member {
name: "output_size"
mtype: "<type \'property\'>"
}
member {
name: "scope_name"
mtype: "<type \'property\'>"
}
member {
name: "state_size"
mtype: "<type \'property\'>"
}
member {
name: "stateful"
mtype: "<type \'property\'>"
}
member {
name: "submodules"
mtype: "<type \'property\'>"
}
member {
name: "supports_masking"
mtype: "<type \'property\'>"
}
member {
name: "trainable"
mtype: "<type \'property\'>"
}
member {
name: "trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "updates"
mtype: "<type \'property\'>"
}
member {
name: "variable_dtype"
mtype: "<type \'property\'>"
}
member {
name: "variables"
mtype: "<type \'property\'>"
}
member {
name: "weights"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\', \'trainable\', \'name\', \'dtype\'], varargs=None, keywords=kwargs, defaults=[\'True\', \'None\', \'None\'], "
}
member_method {
name: "add_loss"
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "add_metric"
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "add_update"
argspec: "args=[\'self\', \'updates\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "add_variable"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_weight"
argspec: "args=[\'self\', \'name\', \'shape\', \'dtype\', \'initializer\', \'regularizer\', \'trainable\', \'constraint\', \'use_resource\', \'synchronization\', \'aggregation\', \'partitioner\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'VariableSynchronization.AUTO\', \'VariableAggregation.NONE\', \'None\'], "
}
member_method {
name: "apply"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "build"
argspec: "args=[\'self\', \'_\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "build_from_config"
argspec: "args=[\'self\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "call"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=kwargs, defaults=None"
}
member_method {
name: "compute_mask"
argspec: "args=[\'self\', \'inputs\', \'mask\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "compute_output_shape"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_signature"
argspec: "args=[\'self\', \'input_signature\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "finalize_state"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_build_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_initial_state"
argspec: "args=[\'self\', \'inputs\', \'batch_size\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\'], "
}
member_method {
name: "get_input_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_losses_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_updates_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weights"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "load_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "save_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_weights"
argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "with_name_scope"
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "zero_state"
argspec: "args=[\'self\', \'batch_size\', \'dtype\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,282 +0,0 @@
path: "tensorflow.keras.__internal__.legacy.rnn_cell.ResidualWrapper"
tf_class {
is_instance: "<class \'keras.layers.rnn.legacy_cell_wrappers.ResidualWrapper\'>"
is_instance: "<class \'keras.layers.rnn.legacy_cell_wrappers._RNNCellWrapperV1\'>"
is_instance: "<class \'keras.layers.rnn.legacy_cells.RNNCell\'>"
is_instance: "<class \'keras.legacy_tf_layers.base.Layer\'>"
is_instance: "<class \'keras.engine.base_layer_v1.Layer\'>"
is_instance: "<class \'keras.engine.base_layer.Layer\'>"
is_instance: "<class \'tensorflow.python.module.module.Module\'>"
is_instance: "<class \'tensorflow.python.trackable.autotrackable.AutoTrackable\'>"
is_instance: "<class \'tensorflow.python.trackable.base.Trackable\'>"
is_instance: "<class \'keras.utils.version_utils.LayerVersionSelector\'>"
is_instance: "<type \'object\'>"
member {
name: "activity_regularizer"
mtype: "<type \'property\'>"
}
member {
name: "compute_dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype"
mtype: "<type \'property\'>"
}
member {
name: "dtype_policy"
mtype: "<type \'property\'>"
}
member {
name: "dynamic"
mtype: "<type \'property\'>"
}
member {
name: "graph"
mtype: "<type \'property\'>"
}
member {
name: "inbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "input"
mtype: "<type \'property\'>"
}
member {
name: "input_mask"
mtype: "<type \'property\'>"
}
member {
name: "input_shape"
mtype: "<type \'property\'>"
}
member {
name: "input_spec"
mtype: "<type \'property\'>"
}
member {
name: "losses"
mtype: "<type \'property\'>"
}
member {
name: "metrics"
mtype: "<type \'property\'>"
}
member {
name: "name"
mtype: "<type \'property\'>"
}
member {
name: "name_scope"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "non_trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "outbound_nodes"
mtype: "<type \'property\'>"
}
member {
name: "output"
mtype: "<type \'property\'>"
}
member {
name: "output_mask"
mtype: "<type \'property\'>"
}
member {
name: "output_shape"
mtype: "<type \'property\'>"
}
member {
name: "output_size"
mtype: "<type \'property\'>"
}
member {
name: "scope_name"
mtype: "<type \'property\'>"
}
member {
name: "state_size"
mtype: "<type \'property\'>"
}
member {
name: "stateful"
mtype: "<type \'property\'>"
}
member {
name: "submodules"
mtype: "<type \'property\'>"
}
member {
name: "supports_masking"
mtype: "<type \'property\'>"
}
member {
name: "trainable"
mtype: "<type \'property\'>"
}
member {
name: "trainable_variables"
mtype: "<type \'property\'>"
}
member {
name: "trainable_weights"
mtype: "<type \'property\'>"
}
member {
name: "updates"
mtype: "<type \'property\'>"
}
member {
name: "variable_dtype"
mtype: "<type \'property\'>"
}
member {
name: "variables"
mtype: "<type \'property\'>"
}
member {
name: "weights"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\', \'cell\', \'residual_fn\'], varargs=None, keywords=kwargs, defaults=[\'None\'], "
}
member_method {
name: "add_loss"
argspec: "args=[\'self\', \'losses\', \'inputs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "add_metric"
argspec: "args=[\'self\', \'value\', \'aggregation\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "add_update"
argspec: "args=[\'self\', \'updates\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "add_variable"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "add_weight"
argspec: "args=[\'self\', \'name\', \'shape\', \'dtype\', \'initializer\', \'regularizer\', \'trainable\', \'constraint\', \'use_resource\', \'synchronization\', \'aggregation\', \'partitioner\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\', \'VariableSynchronization.AUTO\', \'VariableAggregation.NONE\', \'None\'], "
}
member_method {
name: "apply"
argspec: "args=[\'self\'], varargs=args, keywords=kwargs, defaults=None"
}
member_method {
name: "build"
argspec: "args=[\'self\', \'_\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "build_from_config"
argspec: "args=[\'self\', \'config\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "call"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=kwargs, defaults=None"
}
member_method {
name: "compute_mask"
argspec: "args=[\'self\', \'inputs\', \'mask\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "compute_output_shape"
argspec: "args=[\'self\', \'input_shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "compute_output_signature"
argspec: "args=[\'self\', \'input_signature\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "finalize_state"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "from_config"
argspec: "args=[\'cls\', \'config\', \'custom_objects\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "get_build_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_config"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_initial_state"
argspec: "args=[\'self\', \'inputs\', \'batch_size\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\'], "
}
member_method {
name: "get_input_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_input_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_losses_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_mask_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_output_shape_at"
argspec: "args=[\'self\', \'node_index\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_updates_for"
argspec: "args=[\'self\', \'inputs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_weights"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "load_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "save_own_variables"
argspec: "args=[\'self\', \'store\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_weights"
argspec: "args=[\'self\', \'weights\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "with_name_scope"
argspec: "args=[\'cls\', \'method\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "zero_state"
argspec: "args=[\'self\', \'batch_size\', \'dtype\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,43 +0,0 @@
path: "tensorflow.keras.__internal__.legacy.rnn_cell"
tf_module {
member {
name: "BasicLSTMCell"
mtype: "<type \'type\'>"
}
member {
name: "BasicRNNCell"
mtype: "<type \'type\'>"
}
member {
name: "DeviceWrapper"
mtype: "<type \'type\'>"
}
member {
name: "DropoutWrapper"
mtype: "<type \'type\'>"
}
member {
name: "GRUCell"
mtype: "<type \'type\'>"
}
member {
name: "LSTMCell"
mtype: "<type \'type\'>"
}
member {
name: "LSTMStateTuple"
mtype: "<type \'type\'>"
}
member {
name: "MultiRNNCell"
mtype: "<type \'type\'>"
}
member {
name: "RNNCell"
mtype: "<type \'type\'>"
}
member {
name: "ResidualWrapper"
mtype: "<type \'type\'>"
}
}

@ -1,15 +0,0 @@
path: "tensorflow.keras.__internal__"
tf_module {
member {
name: "layers"
mtype: "<type \'module\'>"
}
member {
name: "legacy"
mtype: "<type \'module\'>"
}
member_method {
name: "enable_unsafe_deserialization"
argspec: "args=[], varargs=None, keywords=None, defaults=None"
}
}

@ -1,67 +0,0 @@
path: "tensorflow.keras.activations"
tf_module {
member_method {
name: "deserialize"
argspec: "args=[\'name\', \'custom_objects\', \'use_legacy_format\'], varargs=None, keywords=None, defaults=[\'None\', \'False\'], "
}
member_method {
name: "elu"
argspec: "args=[\'x\', \'alpha\'], varargs=None, keywords=None, defaults=[\'1.0\'], "
}
member_method {
name: "exponential"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get"
argspec: "args=[\'identifier\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "hard_sigmoid"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "linear"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "mish"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "relu"
argspec: "args=[\'x\', \'alpha\', \'max_value\', \'threshold\'], varargs=None, keywords=None, defaults=[\'0.0\', \'None\', \'0.0\'], "
}
member_method {
name: "selu"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "serialize"
argspec: "args=[\'activation\', \'use_legacy_format\'], varargs=None, keywords=None, defaults=[\'False\'], "
}
member_method {
name: "sigmoid"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "softmax"
argspec: "args=[\'x\', \'axis\'], varargs=None, keywords=None, defaults=[\'-1\'], "
}
member_method {
name: "softplus"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "softsign"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "swish"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "tanh"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,31 +0,0 @@
path: "tensorflow.keras.applications.convnext"
tf_module {
member_method {
name: "ConvNeXtBase"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'convnext_base\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "ConvNeXtLarge"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'convnext_large\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "ConvNeXtSmall"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'convnext_small\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "ConvNeXtTiny"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'convnext_tiny\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "ConvNeXtXLarge"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'convnext_xlarge\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "decode_predictions"
argspec: "args=[\'preds\', \'top\'], varargs=None, keywords=None, defaults=[\'5\'], "
}
member_method {
name: "preprocess_input"
argspec: "args=[\'x\', \'data_format\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
}

@ -1,23 +0,0 @@
path: "tensorflow.keras.applications.densenet"
tf_module {
member_method {
name: "DenseNet121"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "DenseNet169"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "DenseNet201"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "decode_predictions"
argspec: "args=[\'preds\', \'top\'], varargs=None, keywords=None, defaults=[\'5\'], "
}
member_method {
name: "preprocess_input"
argspec: "args=[\'x\', \'data_format\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
}

@ -1,43 +0,0 @@
path: "tensorflow.keras.applications.efficientnet"
tf_module {
member_method {
name: "EfficientNetB0"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "EfficientNetB1"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "EfficientNetB2"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "EfficientNetB3"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "EfficientNetB4"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "EfficientNetB5"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "EfficientNetB6"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "EfficientNetB7"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "decode_predictions"
argspec: "args=[\'preds\', \'top\'], varargs=None, keywords=None, defaults=[\'5\'], "
}
member_method {
name: "preprocess_input"
argspec: "args=[\'x\', \'data_format\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
}

@ -1,39 +0,0 @@
path: "tensorflow.keras.applications.efficientnet_v2"
tf_module {
member_method {
name: "EfficientNetV2B0"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\', \'include_preprocessing\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\', \'True\'], "
}
member_method {
name: "EfficientNetV2B1"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\', \'include_preprocessing\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\', \'True\'], "
}
member_method {
name: "EfficientNetV2B2"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\', \'include_preprocessing\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\', \'True\'], "
}
member_method {
name: "EfficientNetV2B3"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\', \'include_preprocessing\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\', \'True\'], "
}
member_method {
name: "EfficientNetV2L"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\', \'include_preprocessing\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\', \'True\'], "
}
member_method {
name: "EfficientNetV2M"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\', \'include_preprocessing\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\', \'True\'], "
}
member_method {
name: "EfficientNetV2S"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\', \'include_preprocessing\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\', \'True\'], "
}
member_method {
name: "decode_predictions"
argspec: "args=[\'preds\', \'top\'], varargs=None, keywords=None, defaults=[\'5\'], "
}
member_method {
name: "preprocess_input"
argspec: "args=[\'x\', \'data_format\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
}

@ -1,11 +0,0 @@
path: "tensorflow.keras.applications.imagenet_utils"
tf_module {
member_method {
name: "decode_predictions"
argspec: "args=[\'preds\', \'top\'], varargs=None, keywords=None, defaults=[\'5\'], "
}
member_method {
name: "preprocess_input"
argspec: "args=[\'x\', \'data_format\', \'mode\'], varargs=None, keywords=None, defaults=[\'None\', \'caffe\'], "
}
}

@ -1,15 +0,0 @@
path: "tensorflow.keras.applications.inception_resnet_v2"
tf_module {
member_method {
name: "InceptionResNetV2"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "decode_predictions"
argspec: "args=[\'preds\', \'top\'], varargs=None, keywords=None, defaults=[\'5\'], "
}
member_method {
name: "preprocess_input"
argspec: "args=[\'x\', \'data_format\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
}

@ -1,15 +0,0 @@
path: "tensorflow.keras.applications.inception_v3"
tf_module {
member_method {
name: "InceptionV3"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "decode_predictions"
argspec: "args=[\'preds\', \'top\'], varargs=None, keywords=None, defaults=[\'5\'], "
}
member_method {
name: "preprocess_input"
argspec: "args=[\'x\', \'data_format\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
}

@ -1,15 +0,0 @@
path: "tensorflow.keras.applications.mobilenet"
tf_module {
member_method {
name: "MobileNet"
argspec: "args=[\'input_shape\', \'alpha\', \'depth_multiplier\', \'dropout\', \'include_top\', \'weights\', \'input_tensor\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'1.0\', \'1\', \'0.001\', \'True\', \'imagenet\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "decode_predictions"
argspec: "args=[\'preds\', \'top\'], varargs=None, keywords=None, defaults=[\'5\'], "
}
member_method {
name: "preprocess_input"
argspec: "args=[\'x\', \'data_format\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
}

@ -1,15 +0,0 @@
path: "tensorflow.keras.applications.mobilenet_v2"
tf_module {
member_method {
name: "MobileNetV2"
argspec: "args=[\'input_shape\', \'alpha\', \'include_top\', \'weights\', \'input_tensor\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'1.0\', \'True\', \'imagenet\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "decode_predictions"
argspec: "args=[\'preds\', \'top\'], varargs=None, keywords=None, defaults=[\'5\'], "
}
member_method {
name: "preprocess_input"
argspec: "args=[\'x\', \'data_format\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
}

@ -1,11 +0,0 @@
path: "tensorflow.keras.applications.mobilenet_v3"
tf_module {
member_method {
name: "decode_predictions"
argspec: "args=[\'preds\', \'top\'], varargs=None, keywords=None, defaults=[\'5\'], "
}
member_method {
name: "preprocess_input"
argspec: "args=[\'x\', \'data_format\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
}

@ -1,19 +0,0 @@
path: "tensorflow.keras.applications.nasnet"
tf_module {
member_method {
name: "NASNetLarge"
argspec: "args=[\'input_shape\', \'include_top\', \'weights\', \'input_tensor\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'None\', \'True\', \'imagenet\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "NASNetMobile"
argspec: "args=[\'input_shape\', \'include_top\', \'weights\', \'input_tensor\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'None\', \'True\', \'imagenet\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "decode_predictions"
argspec: "args=[\'preds\', \'top\'], varargs=None, keywords=None, defaults=[\'5\'], "
}
member_method {
name: "preprocess_input"
argspec: "args=[\'x\', \'data_format\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
}

@ -1,363 +0,0 @@
path: "tensorflow.keras.applications"
tf_module {
member {
name: "convnext"
mtype: "<type \'module\'>"
}
member {
name: "densenet"
mtype: "<type \'module\'>"
}
member {
name: "efficientnet"
mtype: "<type \'module\'>"
}
member {
name: "efficientnet_v2"
mtype: "<type \'module\'>"
}
member {
name: "imagenet_utils"
mtype: "<type \'module\'>"
}
member {
name: "inception_resnet_v2"
mtype: "<type \'module\'>"
}
member {
name: "inception_v3"
mtype: "<type \'module\'>"
}
member {
name: "mobilenet"
mtype: "<type \'module\'>"
}
member {
name: "mobilenet_v2"
mtype: "<type \'module\'>"
}
member {
name: "mobilenet_v3"
mtype: "<type \'module\'>"
}
member {
name: "nasnet"
mtype: "<type \'module\'>"
}
member {
name: "regnet"
mtype: "<type \'module\'>"
}
member {
name: "resnet"
mtype: "<type \'module\'>"
}
member {
name: "resnet50"
mtype: "<type \'module\'>"
}
member {
name: "resnet_rs"
mtype: "<type \'module\'>"
}
member {
name: "resnet_v2"
mtype: "<type \'module\'>"
}
member {
name: "vgg16"
mtype: "<type \'module\'>"
}
member {
name: "vgg19"
mtype: "<type \'module\'>"
}
member {
name: "xception"
mtype: "<type \'module\'>"
}
member_method {
name: "ConvNeXtBase"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'convnext_base\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "ConvNeXtLarge"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'convnext_large\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "ConvNeXtSmall"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'convnext_small\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "ConvNeXtTiny"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'convnext_tiny\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "ConvNeXtXLarge"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'convnext_xlarge\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "DenseNet121"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "DenseNet169"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "DenseNet201"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "EfficientNetB0"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "EfficientNetB1"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "EfficientNetB2"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "EfficientNetB3"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "EfficientNetB4"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "EfficientNetB5"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "EfficientNetB6"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "EfficientNetB7"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "EfficientNetV2B0"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\', \'include_preprocessing\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\', \'True\'], "
}
member_method {
name: "EfficientNetV2B1"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\', \'include_preprocessing\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\', \'True\'], "
}
member_method {
name: "EfficientNetV2B2"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\', \'include_preprocessing\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\', \'True\'], "
}
member_method {
name: "EfficientNetV2B3"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\', \'include_preprocessing\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\', \'True\'], "
}
member_method {
name: "EfficientNetV2L"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\', \'include_preprocessing\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\', \'True\'], "
}
member_method {
name: "EfficientNetV2M"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\', \'include_preprocessing\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\', \'True\'], "
}
member_method {
name: "EfficientNetV2S"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\', \'include_preprocessing\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\', \'True\'], "
}
member_method {
name: "InceptionResNetV2"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "InceptionV3"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "MobileNet"
argspec: "args=[\'input_shape\', \'alpha\', \'depth_multiplier\', \'dropout\', \'include_top\', \'weights\', \'input_tensor\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'1.0\', \'1\', \'0.001\', \'True\', \'imagenet\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "MobileNetV2"
argspec: "args=[\'input_shape\', \'alpha\', \'include_top\', \'weights\', \'input_tensor\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'1.0\', \'True\', \'imagenet\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "MobileNetV3Large"
argspec: "args=[\'input_shape\', \'alpha\', \'minimalistic\', \'include_top\', \'weights\', \'input_tensor\', \'classes\', \'pooling\', \'dropout_rate\', \'classifier_activation\', \'include_preprocessing\'], varargs=None, keywords=None, defaults=[\'None\', \'1.0\', \'False\', \'True\', \'imagenet\', \'None\', \'1000\', \'None\', \'0.2\', \'softmax\', \'True\'], "
}
member_method {
name: "MobileNetV3Small"
argspec: "args=[\'input_shape\', \'alpha\', \'minimalistic\', \'include_top\', \'weights\', \'input_tensor\', \'classes\', \'pooling\', \'dropout_rate\', \'classifier_activation\', \'include_preprocessing\'], varargs=None, keywords=None, defaults=[\'None\', \'1.0\', \'False\', \'True\', \'imagenet\', \'None\', \'1000\', \'None\', \'0.2\', \'softmax\', \'True\'], "
}
member_method {
name: "NASNetLarge"
argspec: "args=[\'input_shape\', \'include_top\', \'weights\', \'input_tensor\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'None\', \'True\', \'imagenet\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "NASNetMobile"
argspec: "args=[\'input_shape\', \'include_top\', \'weights\', \'input_tensor\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'None\', \'True\', \'imagenet\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetX002"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnetx002\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetX004"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnetx004\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetX006"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnetx006\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetX008"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnetx008\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetX016"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnetx016\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetX032"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnetx032\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetX040"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnetx040\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetX064"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnetx064\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetX080"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnetx080\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetX120"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnetx120\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetX160"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnetx160\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetX320"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnetx320\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetY002"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnety002\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetY004"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnety004\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetY006"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnety006\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetY008"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnety008\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetY016"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnety016\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetY032"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnety032\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetY040"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnety040\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetY064"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnety064\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetY080"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnety080\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetY120"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnety120\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetY160"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnety160\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetY320"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnety320\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "ResNet101"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\'], varargs=None, keywords=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\'], "
}
member_method {
name: "ResNet101V2"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "ResNet152"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\'], varargs=None, keywords=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\'], "
}
member_method {
name: "ResNet152V2"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "ResNet50"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\'], varargs=None, keywords=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\'], "
}
member_method {
name: "ResNet50V2"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "ResNetRS101"
argspec: "args=[\'include_top\', \'weights\', \'classes\', \'input_shape\', \'input_tensor\', \'pooling\', \'classifier_activation\', \'include_preprocessing\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'1000\', \'None\', \'None\', \'None\', \'softmax\', \'True\'], "
}
member_method {
name: "ResNetRS152"
argspec: "args=[\'include_top\', \'weights\', \'classes\', \'input_shape\', \'input_tensor\', \'pooling\', \'classifier_activation\', \'include_preprocessing\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'1000\', \'None\', \'None\', \'None\', \'softmax\', \'True\'], "
}
member_method {
name: "ResNetRS200"
argspec: "args=[\'include_top\', \'weights\', \'classes\', \'input_shape\', \'input_tensor\', \'pooling\', \'classifier_activation\', \'include_preprocessing\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'1000\', \'None\', \'None\', \'None\', \'softmax\', \'True\'], "
}
member_method {
name: "ResNetRS270"
argspec: "args=[\'include_top\', \'weights\', \'classes\', \'input_shape\', \'input_tensor\', \'pooling\', \'classifier_activation\', \'include_preprocessing\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'1000\', \'None\', \'None\', \'None\', \'softmax\', \'True\'], "
}
member_method {
name: "ResNetRS350"
argspec: "args=[\'include_top\', \'weights\', \'classes\', \'input_shape\', \'input_tensor\', \'pooling\', \'classifier_activation\', \'include_preprocessing\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'1000\', \'None\', \'None\', \'None\', \'softmax\', \'True\'], "
}
member_method {
name: "ResNetRS420"
argspec: "args=[\'include_top\', \'weights\', \'classes\', \'input_shape\', \'input_tensor\', \'pooling\', \'classifier_activation\', \'include_preprocessing\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'1000\', \'None\', \'None\', \'None\', \'softmax\', \'True\'], "
}
member_method {
name: "ResNetRS50"
argspec: "args=[\'include_top\', \'weights\', \'classes\', \'input_shape\', \'input_tensor\', \'pooling\', \'classifier_activation\', \'include_preprocessing\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'1000\', \'None\', \'None\', \'None\', \'softmax\', \'True\'], "
}
member_method {
name: "VGG16"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "VGG19"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "Xception"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
}

@ -1,107 +0,0 @@
path: "tensorflow.keras.applications.regnet"
tf_module {
member_method {
name: "RegNetX002"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnetx002\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetX004"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnetx004\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetX006"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnetx006\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetX008"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnetx008\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetX016"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnetx016\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetX032"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnetx032\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetX040"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnetx040\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetX064"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnetx064\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetX080"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnetx080\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetX120"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnetx120\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetX160"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnetx160\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetX320"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnetx320\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetY002"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnety002\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetY004"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnety004\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetY006"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnety006\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetY008"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnety008\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetY016"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnety016\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetY032"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnety032\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetY040"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnety040\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetY064"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnety064\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetY080"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnety080\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetY120"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnety120\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetY160"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnety160\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "RegNetY320"
argspec: "args=[\'model_name\', \'include_top\', \'include_preprocessing\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'regnety320\', \'True\', \'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "decode_predictions"
argspec: "args=[\'preds\', \'top\'], varargs=None, keywords=None, defaults=[\'5\'], "
}
member_method {
name: "preprocess_input"
argspec: "args=[\'x\', \'data_format\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
}

@ -1,23 +0,0 @@
path: "tensorflow.keras.applications.resnet"
tf_module {
member_method {
name: "ResNet101"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\'], varargs=None, keywords=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\'], "
}
member_method {
name: "ResNet152"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\'], varargs=None, keywords=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\'], "
}
member_method {
name: "ResNet50"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\'], varargs=None, keywords=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\'], "
}
member_method {
name: "decode_predictions"
argspec: "args=[\'preds\', \'top\'], varargs=None, keywords=None, defaults=[\'5\'], "
}
member_method {
name: "preprocess_input"
argspec: "args=[\'x\', \'data_format\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
}

@ -1,15 +0,0 @@
path: "tensorflow.keras.applications.resnet50"
tf_module {
member_method {
name: "ResNet50"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\'], varargs=None, keywords=kwargs, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\'], "
}
member_method {
name: "decode_predictions"
argspec: "args=[\'preds\', \'top\'], varargs=None, keywords=None, defaults=[\'5\'], "
}
member_method {
name: "preprocess_input"
argspec: "args=[\'x\', \'data_format\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
}

@ -1,39 +0,0 @@
path: "tensorflow.keras.applications.resnet_rs"
tf_module {
member_method {
name: "ResNetRS101"
argspec: "args=[\'include_top\', \'weights\', \'classes\', \'input_shape\', \'input_tensor\', \'pooling\', \'classifier_activation\', \'include_preprocessing\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'1000\', \'None\', \'None\', \'None\', \'softmax\', \'True\'], "
}
member_method {
name: "ResNetRS152"
argspec: "args=[\'include_top\', \'weights\', \'classes\', \'input_shape\', \'input_tensor\', \'pooling\', \'classifier_activation\', \'include_preprocessing\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'1000\', \'None\', \'None\', \'None\', \'softmax\', \'True\'], "
}
member_method {
name: "ResNetRS200"
argspec: "args=[\'include_top\', \'weights\', \'classes\', \'input_shape\', \'input_tensor\', \'pooling\', \'classifier_activation\', \'include_preprocessing\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'1000\', \'None\', \'None\', \'None\', \'softmax\', \'True\'], "
}
member_method {
name: "ResNetRS270"
argspec: "args=[\'include_top\', \'weights\', \'classes\', \'input_shape\', \'input_tensor\', \'pooling\', \'classifier_activation\', \'include_preprocessing\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'1000\', \'None\', \'None\', \'None\', \'softmax\', \'True\'], "
}
member_method {
name: "ResNetRS350"
argspec: "args=[\'include_top\', \'weights\', \'classes\', \'input_shape\', \'input_tensor\', \'pooling\', \'classifier_activation\', \'include_preprocessing\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'1000\', \'None\', \'None\', \'None\', \'softmax\', \'True\'], "
}
member_method {
name: "ResNetRS420"
argspec: "args=[\'include_top\', \'weights\', \'classes\', \'input_shape\', \'input_tensor\', \'pooling\', \'classifier_activation\', \'include_preprocessing\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'1000\', \'None\', \'None\', \'None\', \'softmax\', \'True\'], "
}
member_method {
name: "ResNetRS50"
argspec: "args=[\'include_top\', \'weights\', \'classes\', \'input_shape\', \'input_tensor\', \'pooling\', \'classifier_activation\', \'include_preprocessing\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'1000\', \'None\', \'None\', \'None\', \'softmax\', \'True\'], "
}
member_method {
name: "decode_predictions"
argspec: "args=[\'preds\', \'top\'], varargs=None, keywords=None, defaults=[\'5\'], "
}
member_method {
name: "preprocess_input"
argspec: "args=[\'x\', \'data_format\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
}

@ -1,23 +0,0 @@
path: "tensorflow.keras.applications.resnet_v2"
tf_module {
member_method {
name: "ResNet101V2"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "ResNet152V2"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "ResNet50V2"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "decode_predictions"
argspec: "args=[\'preds\', \'top\'], varargs=None, keywords=None, defaults=[\'5\'], "
}
member_method {
name: "preprocess_input"
argspec: "args=[\'x\', \'data_format\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
}

@ -1,15 +0,0 @@
path: "tensorflow.keras.applications.vgg16"
tf_module {
member_method {
name: "VGG16"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "decode_predictions"
argspec: "args=[\'preds\', \'top\'], varargs=None, keywords=None, defaults=[\'5\'], "
}
member_method {
name: "preprocess_input"
argspec: "args=[\'x\', \'data_format\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
}

@ -1,15 +0,0 @@
path: "tensorflow.keras.applications.vgg19"
tf_module {
member_method {
name: "VGG19"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "decode_predictions"
argspec: "args=[\'preds\', \'top\'], varargs=None, keywords=None, defaults=[\'5\'], "
}
member_method {
name: "preprocess_input"
argspec: "args=[\'x\', \'data_format\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
}

@ -1,15 +0,0 @@
path: "tensorflow.keras.applications.xception"
tf_module {
member_method {
name: "Xception"
argspec: "args=[\'include_top\', \'weights\', \'input_tensor\', \'input_shape\', \'pooling\', \'classes\', \'classifier_activation\'], varargs=None, keywords=None, defaults=[\'True\', \'imagenet\', \'None\', \'None\', \'None\', \'1000\', \'softmax\'], "
}
member_method {
name: "decode_predictions"
argspec: "args=[\'preds\', \'top\'], varargs=None, keywords=None, defaults=[\'5\'], "
}
member_method {
name: "preprocess_input"
argspec: "args=[\'x\', \'data_format\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
}

@ -1,13 +0,0 @@
path: "tensorflow.keras.backend.name_scope"
tf_class {
is_instance: "<class \'tensorflow.python.framework.ops.name_scope_v1\'>"
is_instance: "<type \'object\'>"
member {
name: "name"
mtype: "<type \'property\'>"
}
member_method {
name: "__init__"
argspec: "args=[\'self\', \'name\', \'default_name\', \'values\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
}

@ -1,599 +0,0 @@
path: "tensorflow.keras.backend"
tf_module {
member {
name: "name_scope"
mtype: "<type \'type\'>"
}
member_method {
name: "abs"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "all"
argspec: "args=[\'x\', \'axis\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'False\'], "
}
member_method {
name: "any"
argspec: "args=[\'x\', \'axis\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'False\'], "
}
member_method {
name: "arange"
argspec: "args=[\'start\', \'stop\', \'step\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \'1\', \'int32\'], "
}
member_method {
name: "argmax"
argspec: "args=[\'x\', \'axis\'], varargs=None, keywords=None, defaults=[\'-1\'], "
}
member_method {
name: "argmin"
argspec: "args=[\'x\', \'axis\'], varargs=None, keywords=None, defaults=[\'-1\'], "
}
member_method {
name: "backend"
argspec: "args=[], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "batch_dot"
argspec: "args=[\'x\', \'y\', \'axes\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "batch_flatten"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "batch_get_value"
argspec: "args=[\'tensors\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "batch_normalization"
argspec: "args=[\'x\', \'mean\', \'var\', \'beta\', \'gamma\', \'axis\', \'epsilon\'], varargs=None, keywords=None, defaults=[\'-1\', \'0.001\'], "
}
member_method {
name: "batch_set_value"
argspec: "args=[\'tuples\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "bias_add"
argspec: "args=[\'x\', \'bias\', \'data_format\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "binary_crossentropy"
argspec: "args=[\'target\', \'output\', \'from_logits\'], varargs=None, keywords=None, defaults=[\'False\'], "
}
member_method {
name: "binary_focal_crossentropy"
argspec: "args=[\'target\', \'output\', \'apply_class_balancing\', \'alpha\', \'gamma\', \'from_logits\'], varargs=None, keywords=None, defaults=[\'False\', \'0.25\', \'2.0\', \'False\'], "
}
member_method {
name: "cast"
argspec: "args=[\'x\', \'dtype\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "cast_to_floatx"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "categorical_crossentropy"
argspec: "args=[\'target\', \'output\', \'from_logits\', \'axis\'], varargs=None, keywords=None, defaults=[\'False\', \'-1\'], "
}
member_method {
name: "categorical_focal_crossentropy"
argspec: "args=[\'target\', \'output\', \'alpha\', \'gamma\', \'from_logits\', \'axis\'], varargs=None, keywords=None, defaults=[\'0.25\', \'2.0\', \'False\', \'-1\'], "
}
member_method {
name: "clear_session"
argspec: "args=[], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "clip"
argspec: "args=[\'x\', \'min_value\', \'max_value\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "concatenate"
argspec: "args=[\'tensors\', \'axis\'], varargs=None, keywords=None, defaults=[\'-1\'], "
}
member_method {
name: "constant"
argspec: "args=[\'value\', \'dtype\', \'shape\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\'], "
}
member_method {
name: "conv1d"
argspec: "args=[\'x\', \'kernel\', \'strides\', \'padding\', \'data_format\', \'dilation_rate\'], varargs=None, keywords=None, defaults=[\'1\', \'valid\', \'None\', \'1\'], "
}
member_method {
name: "conv2d"
argspec: "args=[\'x\', \'kernel\', \'strides\', \'padding\', \'data_format\', \'dilation_rate\'], varargs=None, keywords=None, defaults=[\'(1, 1)\', \'valid\', \'None\', \'(1, 1)\'], "
}
member_method {
name: "conv2d_transpose"
argspec: "args=[\'x\', \'kernel\', \'output_shape\', \'strides\', \'padding\', \'data_format\', \'dilation_rate\'], varargs=None, keywords=None, defaults=[\'(1, 1)\', \'valid\', \'None\', \'(1, 1)\'], "
}
member_method {
name: "conv3d"
argspec: "args=[\'x\', \'kernel\', \'strides\', \'padding\', \'data_format\', \'dilation_rate\'], varargs=None, keywords=None, defaults=[\'(1, 1, 1)\', \'valid\', \'None\', \'(1, 1, 1)\'], "
}
member_method {
name: "cos"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "count_params"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "ctc_batch_cost"
argspec: "args=[\'y_true\', \'y_pred\', \'input_length\', \'label_length\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "ctc_decode"
argspec: "args=[\'y_pred\', \'input_length\', \'greedy\', \'beam_width\', \'top_paths\'], varargs=None, keywords=None, defaults=[\'True\', \'100\', \'1\'], "
}
member_method {
name: "ctc_label_dense_to_sparse"
argspec: "args=[\'labels\', \'label_lengths\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "cumprod"
argspec: "args=[\'x\', \'axis\'], varargs=None, keywords=None, defaults=[\'0\'], "
}
member_method {
name: "cumsum"
argspec: "args=[\'x\', \'axis\'], varargs=None, keywords=None, defaults=[\'0\'], "
}
member_method {
name: "depthwise_conv2d"
argspec: "args=[\'x\', \'depthwise_kernel\', \'strides\', \'padding\', \'data_format\', \'dilation_rate\'], varargs=None, keywords=None, defaults=[\'(1, 1)\', \'valid\', \'None\', \'(1, 1)\'], "
}
member_method {
name: "dot"
argspec: "args=[\'x\', \'y\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "dropout"
argspec: "args=[\'x\', \'level\', \'noise_shape\', \'seed\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "dtype"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "elu"
argspec: "args=[\'x\', \'alpha\'], varargs=None, keywords=None, defaults=[\'1.0\'], "
}
member_method {
name: "epsilon"
argspec: "args=[], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "equal"
argspec: "args=[\'x\', \'y\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "eval"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "exp"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "expand_dims"
argspec: "args=[\'x\', \'axis\'], varargs=None, keywords=None, defaults=[\'-1\'], "
}
member_method {
name: "eye"
argspec: "args=[\'size\', \'dtype\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "flatten"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "floatx"
argspec: "args=[], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "foldl"
argspec: "args=[\'fn\', \'elems\', \'initializer\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "foldr"
argspec: "args=[\'fn\', \'elems\', \'initializer\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "function"
argspec: "args=[\'inputs\', \'outputs\', \'updates\', \'name\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\'], "
}
member_method {
name: "gather"
argspec: "args=[\'reference\', \'indices\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "get_session"
argspec: "args=[\'op_input_list\'], varargs=None, keywords=None, defaults=[\'()\'], "
}
member_method {
name: "get_uid"
argspec: "args=[\'prefix\'], varargs=None, keywords=None, defaults=[\'\'], "
}
member_method {
name: "get_value"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "gradients"
argspec: "args=[\'loss\', \'variables\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "greater"
argspec: "args=[\'x\', \'y\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "greater_equal"
argspec: "args=[\'x\', \'y\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "hard_sigmoid"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "image_data_format"
argspec: "args=[], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "in_test_phase"
argspec: "args=[\'x\', \'alt\', \'training\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "in_top_k"
argspec: "args=[\'predictions\', \'targets\', \'k\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "in_train_phase"
argspec: "args=[\'x\', \'alt\', \'training\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "int_shape"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "is_keras_tensor"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "is_sparse"
argspec: "args=[\'tensor\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "l2_normalize"
argspec: "args=[\'x\', \'axis\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "learning_phase"
argspec: "args=[], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "learning_phase_scope"
argspec: "args=[\'value\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "less"
argspec: "args=[\'x\', \'y\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "less_equal"
argspec: "args=[\'x\', \'y\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "local_conv1d"
argspec: "args=[\'inputs\', \'kernel\', \'kernel_size\', \'strides\', \'data_format\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "local_conv2d"
argspec: "args=[\'inputs\', \'kernel\', \'kernel_size\', \'strides\', \'output_shape\', \'data_format\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "log"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "manual_variable_initialization"
argspec: "args=[\'value\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "map_fn"
argspec: "args=[\'fn\', \'elems\', \'name\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "max"
argspec: "args=[\'x\', \'axis\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'False\'], "
}
member_method {
name: "maximum"
argspec: "args=[\'x\', \'y\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "mean"
argspec: "args=[\'x\', \'axis\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'False\'], "
}
member_method {
name: "min"
argspec: "args=[\'x\', \'axis\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'False\'], "
}
member_method {
name: "minimum"
argspec: "args=[\'x\', \'y\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "moving_average_update"
argspec: "args=[\'x\', \'value\', \'momentum\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "ndim"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "normalize_batch_in_training"
argspec: "args=[\'x\', \'gamma\', \'beta\', \'reduction_axes\', \'epsilon\'], varargs=None, keywords=None, defaults=[\'0.001\'], "
}
member_method {
name: "not_equal"
argspec: "args=[\'x\', \'y\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "one_hot"
argspec: "args=[\'indices\', \'num_classes\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "ones"
argspec: "args=[\'shape\', \'dtype\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "ones_like"
argspec: "args=[\'x\', \'dtype\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "permute_dimensions"
argspec: "args=[\'x\', \'pattern\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "placeholder"
argspec: "args=[\'shape\', \'ndim\', \'dtype\', \'sparse\', \'name\', \'ragged\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\', \'False\', \'None\', \'False\'], "
}
member_method {
name: "pool2d"
argspec: "args=[\'x\', \'pool_size\', \'strides\', \'padding\', \'data_format\', \'pool_mode\'], varargs=None, keywords=None, defaults=[\'(1, 1)\', \'valid\', \'None\', \'max\'], "
}
member_method {
name: "pool3d"
argspec: "args=[\'x\', \'pool_size\', \'strides\', \'padding\', \'data_format\', \'pool_mode\'], varargs=None, keywords=None, defaults=[\'(1, 1, 1)\', \'valid\', \'None\', \'max\'], "
}
member_method {
name: "pow"
argspec: "args=[\'x\', \'a\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "print_tensor"
argspec: "args=[\'x\', \'message\', \'summarize\'], varargs=None, keywords=None, defaults=[\'\', \'3\'], "
}
member_method {
name: "prod"
argspec: "args=[\'x\', \'axis\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'False\'], "
}
member_method {
name: "random_bernoulli"
argspec: "args=[\'shape\', \'p\', \'dtype\', \'seed\'], varargs=None, keywords=None, defaults=[\'0.0\', \'None\', \'None\'], "
}
member_method {
name: "random_binomial"
argspec: "args=[\'shape\', \'p\', \'dtype\', \'seed\'], varargs=None, keywords=None, defaults=[\'0.0\', \'None\', \'None\'], "
}
member_method {
name: "random_normal"
argspec: "args=[\'shape\', \'mean\', \'stddev\', \'dtype\', \'seed\'], varargs=None, keywords=None, defaults=[\'0.0\', \'1.0\', \'None\', \'None\'], "
}
member_method {
name: "random_normal_variable"
argspec: "args=[\'shape\', \'mean\', \'scale\', \'dtype\', \'name\', \'seed\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\'], "
}
member_method {
name: "random_uniform"
argspec: "args=[\'shape\', \'minval\', \'maxval\', \'dtype\', \'seed\'], varargs=None, keywords=None, defaults=[\'0.0\', \'1.0\', \'None\', \'None\'], "
}
member_method {
name: "random_uniform_variable"
argspec: "args=[\'shape\', \'low\', \'high\', \'dtype\', \'name\', \'seed\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\'], "
}
member_method {
name: "relu"
argspec: "args=[\'x\', \'alpha\', \'max_value\', \'threshold\'], varargs=None, keywords=None, defaults=[\'0.0\', \'None\', \'0.0\'], "
}
member_method {
name: "repeat"
argspec: "args=[\'x\', \'n\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "repeat_elements"
argspec: "args=[\'x\', \'rep\', \'axis\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "reset_uids"
argspec: "args=[], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "reshape"
argspec: "args=[\'x\', \'shape\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "resize_images"
argspec: "args=[\'x\', \'height_factor\', \'width_factor\', \'data_format\', \'interpolation\'], varargs=None, keywords=None, defaults=[\'nearest\'], "
}
member_method {
name: "resize_volumes"
argspec: "args=[\'x\', \'depth_factor\', \'height_factor\', \'width_factor\', \'data_format\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "reverse"
argspec: "args=[\'x\', \'axes\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "rnn"
argspec: "args=[\'step_function\', \'inputs\', \'initial_states\', \'go_backwards\', \'mask\', \'constants\', \'unroll\', \'input_length\', \'time_major\', \'zero_output_for_mask\', \'return_all_outputs\'], varargs=None, keywords=None, defaults=[\'False\', \'None\', \'None\', \'False\', \'None\', \'False\', \'False\', \'True\'], "
}
member_method {
name: "round"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "separable_conv2d"
argspec: "args=[\'x\', \'depthwise_kernel\', \'pointwise_kernel\', \'strides\', \'padding\', \'data_format\', \'dilation_rate\'], varargs=None, keywords=None, defaults=[\'(1, 1)\', \'valid\', \'None\', \'(1, 1)\'], "
}
member_method {
name: "set_epsilon"
argspec: "args=[\'value\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_floatx"
argspec: "args=[\'value\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_image_data_format"
argspec: "args=[\'data_format\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_learning_phase"
argspec: "args=[\'value\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_session"
argspec: "args=[\'session\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_value"
argspec: "args=[\'x\', \'value\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "shape"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "sigmoid"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "sign"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "sin"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "softmax"
argspec: "args=[\'x\', \'axis\'], varargs=None, keywords=None, defaults=[\'-1\'], "
}
member_method {
name: "softplus"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "softsign"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "sparse_categorical_crossentropy"
argspec: "args=[\'target\', \'output\', \'from_logits\', \'axis\', \'ignore_class\'], varargs=None, keywords=None, defaults=[\'False\', \'-1\', \'None\'], "
}
member_method {
name: "spatial_2d_padding"
argspec: "args=[\'x\', \'padding\', \'data_format\'], varargs=None, keywords=None, defaults=[\'((1, 1), (1, 1))\', \'None\'], "
}
member_method {
name: "spatial_3d_padding"
argspec: "args=[\'x\', \'padding\', \'data_format\'], varargs=None, keywords=None, defaults=[\'((1, 1), (1, 1), (1, 1))\', \'None\'], "
}
member_method {
name: "sqrt"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "square"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "squeeze"
argspec: "args=[\'x\', \'axis\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "stack"
argspec: "args=[\'x\', \'axis\'], varargs=None, keywords=None, defaults=[\'0\'], "
}
member_method {
name: "std"
argspec: "args=[\'x\', \'axis\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'False\'], "
}
member_method {
name: "stop_gradient"
argspec: "args=[\'variables\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "sum"
argspec: "args=[\'x\', \'axis\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'False\'], "
}
member_method {
name: "switch"
argspec: "args=[\'condition\', \'then_expression\', \'else_expression\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "tanh"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "temporal_padding"
argspec: "args=[\'x\', \'padding\'], varargs=None, keywords=None, defaults=[\'(1, 1)\'], "
}
member_method {
name: "tile"
argspec: "args=[\'x\', \'n\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "to_dense"
argspec: "args=[\'tensor\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "transpose"
argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "truncated_normal"
argspec: "args=[\'shape\', \'mean\', \'stddev\', \'dtype\', \'seed\'], varargs=None, keywords=None, defaults=[\'0.0\', \'1.0\', \'None\', \'None\'], "
}
member_method {
name: "update"
argspec: "args=[\'x\', \'new_x\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "update_add"
argspec: "args=[\'x\', \'increment\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "update_sub"
argspec: "args=[\'x\', \'decrement\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "var"
argspec: "args=[\'x\', \'axis\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'False\'], "
}
member_method {
name: "variable"
argspec: "args=[\'value\', \'dtype\', \'name\', \'constraint\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\'], "
}
member_method {
name: "zeros"
argspec: "args=[\'shape\', \'dtype\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
member_method {
name: "zeros_like"
argspec: "args=[\'x\', \'dtype\', \'name\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
}
}

@ -1,82 +0,0 @@
path: "tensorflow.keras.callbacks.BaseLogger"
tf_class {
is_instance: "<class \'keras.callbacks.BaseLogger\'>"
is_instance: "<class \'keras.callbacks.Callback\'>"
is_instance: "<type \'object\'>"
member_method {
name: "__init__"
argspec: "args=[\'self\', \'stateful_metrics\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_epoch_begin"
argspec: "args=[\'self\', \'epoch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_epoch_end"
argspec: "args=[\'self\', \'epoch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_begin"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_end"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_begin"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_end"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_begin"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_end"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "set_model"
argspec: "args=[\'self\', \'model\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_params"
argspec: "args=[\'self\', \'params\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,82 +0,0 @@
path: "tensorflow.keras.callbacks.CSVLogger"
tf_class {
is_instance: "<class \'keras.callbacks.CSVLogger\'>"
is_instance: "<class \'keras.callbacks.Callback\'>"
is_instance: "<type \'object\'>"
member_method {
name: "__init__"
argspec: "args=[\'self\', \'filename\', \'separator\', \'append\'], varargs=None, keywords=None, defaults=[\',\', \'False\'], "
}
member_method {
name: "on_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_epoch_begin"
argspec: "args=[\'self\', \'epoch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_epoch_end"
argspec: "args=[\'self\', \'epoch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_begin"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_end"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_begin"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_end"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_begin"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_end"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "set_model"
argspec: "args=[\'self\', \'model\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_params"
argspec: "args=[\'self\', \'params\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,89 +0,0 @@
path: "tensorflow.keras.callbacks.CallbackList"
tf_class {
is_instance: "<class \'keras.callbacks.CallbackList\'>"
is_instance: "<type \'object\'>"
member_method {
name: "__init__"
argspec: "args=[\'self\', \'callbacks\', \'add_history\', \'add_progbar\', \'model\'], varargs=None, keywords=params, defaults=[\'None\', \'False\', \'False\', \'None\'], "
}
member_method {
name: "append"
argspec: "args=[\'self\', \'callback\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "make_logs"
argspec: "args=[\'self\', \'model\', \'logs\', \'outputs\', \'mode\', \'prefix\'], varargs=None, keywords=None, defaults=[\'\'], "
}
member_method {
name: "on_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_epoch_begin"
argspec: "args=[\'self\', \'epoch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_epoch_end"
argspec: "args=[\'self\', \'epoch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_begin"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_end"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_begin"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_end"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_begin"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_end"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "set_model"
argspec: "args=[\'self\', \'model\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_params"
argspec: "args=[\'self\', \'params\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,81 +0,0 @@
path: "tensorflow.keras.callbacks.Callback"
tf_class {
is_instance: "<class \'keras.callbacks.Callback\'>"
is_instance: "<type \'object\'>"
member_method {
name: "__init__"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "on_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_epoch_begin"
argspec: "args=[\'self\', \'epoch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_epoch_end"
argspec: "args=[\'self\', \'epoch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_begin"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_end"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_begin"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_end"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_begin"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_end"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "set_model"
argspec: "args=[\'self\', \'model\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_params"
argspec: "args=[\'self\', \'params\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,86 +0,0 @@
path: "tensorflow.keras.callbacks.EarlyStopping"
tf_class {
is_instance: "<class \'keras.callbacks.EarlyStopping\'>"
is_instance: "<class \'keras.callbacks.Callback\'>"
is_instance: "<type \'object\'>"
member_method {
name: "__init__"
argspec: "args=[\'self\', \'monitor\', \'min_delta\', \'patience\', \'verbose\', \'mode\', \'baseline\', \'restore_best_weights\', \'start_from_epoch\'], varargs=None, keywords=None, defaults=[\'val_loss\', \'0\', \'0\', \'0\', \'auto\', \'None\', \'False\', \'0\'], "
}
member_method {
name: "get_monitor_value"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "on_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_epoch_begin"
argspec: "args=[\'self\', \'epoch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_epoch_end"
argspec: "args=[\'self\', \'epoch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_begin"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_end"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_begin"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_end"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_begin"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_end"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "set_model"
argspec: "args=[\'self\', \'model\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_params"
argspec: "args=[\'self\', \'params\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,82 +0,0 @@
path: "tensorflow.keras.callbacks.History"
tf_class {
is_instance: "<class \'keras.callbacks.History\'>"
is_instance: "<class \'keras.callbacks.Callback\'>"
is_instance: "<type \'object\'>"
member_method {
name: "__init__"
argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "on_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_epoch_begin"
argspec: "args=[\'self\', \'epoch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_epoch_end"
argspec: "args=[\'self\', \'epoch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_begin"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_end"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_begin"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_end"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_begin"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_end"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "set_model"
argspec: "args=[\'self\', \'model\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_params"
argspec: "args=[\'self\', \'params\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,82 +0,0 @@
path: "tensorflow.keras.callbacks.LambdaCallback"
tf_class {
is_instance: "<class \'keras.callbacks.LambdaCallback\'>"
is_instance: "<class \'keras.callbacks.Callback\'>"
is_instance: "<type \'object\'>"
member_method {
name: "__init__"
argspec: "args=[\'self\', \'on_epoch_begin\', \'on_epoch_end\', \'on_batch_begin\', \'on_batch_end\', \'on_train_begin\', \'on_train_end\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\', \'None\', \'None\', \'None\', \'None\'], "
}
member_method {
name: "on_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_epoch_begin"
argspec: "args=[\'self\', \'epoch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_epoch_end"
argspec: "args=[\'self\', \'epoch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_begin"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_end"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_begin"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_end"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_begin"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_end"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "set_model"
argspec: "args=[\'self\', \'model\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_params"
argspec: "args=[\'self\', \'params\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,82 +0,0 @@
path: "tensorflow.keras.callbacks.LearningRateScheduler"
tf_class {
is_instance: "<class \'keras.callbacks.LearningRateScheduler\'>"
is_instance: "<class \'keras.callbacks.Callback\'>"
is_instance: "<type \'object\'>"
member_method {
name: "__init__"
argspec: "args=[\'self\', \'schedule\', \'verbose\'], varargs=None, keywords=None, defaults=[\'0\'], "
}
member_method {
name: "on_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_epoch_begin"
argspec: "args=[\'self\', \'epoch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_epoch_end"
argspec: "args=[\'self\', \'epoch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_begin"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_end"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_begin"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_end"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_begin"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_end"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "set_model"
argspec: "args=[\'self\', \'model\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_params"
argspec: "args=[\'self\', \'params\'], varargs=None, keywords=None, defaults=None"
}
}

@ -1,82 +0,0 @@
path: "tensorflow.keras.callbacks.ModelCheckpoint"
tf_class {
is_instance: "<class \'keras.callbacks.ModelCheckpoint\'>"
is_instance: "<class \'keras.callbacks.Callback\'>"
is_instance: "<type \'object\'>"
member_method {
name: "__init__"
argspec: "args=[\'self\', \'filepath\', \'monitor\', \'verbose\', \'save_best_only\', \'save_weights_only\', \'mode\', \'save_freq\', \'options\', \'initial_value_threshold\'], varargs=None, keywords=kwargs, defaults=[\'val_loss\', \'0\', \'False\', \'False\', \'auto\', \'epoch\', \'None\', \'None\'], "
}
member_method {
name: "on_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_epoch_begin"
argspec: "args=[\'self\', \'epoch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_epoch_end"
argspec: "args=[\'self\', \'epoch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_begin"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_predict_end"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_begin"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_test_end"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_batch_begin"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_batch_end"
argspec: "args=[\'self\', \'batch\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_begin"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "on_train_end"
argspec: "args=[\'self\', \'logs\'], varargs=None, keywords=None, defaults=[\'None\'], "
}
member_method {
name: "set_model"
argspec: "args=[\'self\', \'model\'], varargs=None, keywords=None, defaults=None"
}
member_method {
name: "set_params"
argspec: "args=[\'self\', \'params\'], varargs=None, keywords=None, defaults=None"
}
}

Some files were not shown because too many files have changed in this diff Show More