Skip to content
Open

Export #21910

Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
156 commits
Select commit Hold shift + click to select a range
04cd682
Update nn.py
pctablet505 May 6, 2025
1a74465
Update nn.py
pctablet505 May 6, 2025
c11eb81
Update nn.py
pctablet505 May 6, 2025
c81e18c
Update nn.py
pctablet505 May 6, 2025
d938e20
Update nn.py
pctablet505 May 7, 2025
f60811e
Update nn.py
pctablet505 May 7, 2025
b3ae323
Merge branch 'master' of https://github.com/pctablet505/keras
pctablet505 May 12, 2025
28eeb24
Update random_grayscale.py
pctablet505 May 12, 2025
de81e5b
Update keras/src/layers/preprocessing/image_preprocessing/random_gray…
pctablet505 May 12, 2025
66661ac
Update random_grayscale_test.py
pctablet505 May 12, 2025
c37f2b5
code reformat
pctablet505 May 13, 2025
498dece
Update random_grayscale_test.py
pctablet505 May 13, 2025
b0b5f63
Merge branch 'master' of https://github.com/pctablet505/keras
pctablet505 May 21, 2025
653f5b1
changed compute_output_spec
pctablet505 May 21, 2025
e681e4c
Merge branch 'keras-team:master' into master
pctablet505 May 21, 2025
27ad80b
Update random_grayscale.py
pctablet505 May 26, 2025
50f6292
Merge branch 'master' of https://github.com/pctablet505/keras
pctablet505 May 29, 2025
579cc11
Reapply "Fixed issue with dot_product_attention when using TPU. (#21…
pctablet505 May 29, 2025
7a0c547
Improve error handling in _can_use_flash_attention for better debugging
pctablet505 May 29, 2025
f7a2290
Revert "Improve error handling in _can_use_flash_attention for better…
pctablet505 May 29, 2025
8bae892
Fix JAX API compatibility and improve error handling in `_can_use_fla…
pctablet505 May 29, 2025
ee196cd
Updated `dot_product_attention`
pctablet505 May 29, 2025
40583c8
Update nn.py
pctablet505 Jun 7, 2025
7c918ba
Update nn.py
pctablet505 Jun 7, 2025
a927e7e
Merge branch 'keras-team:master' into master
pctablet505 Jun 10, 2025
f04eafa
Merge branch 'keras-team:master' into master
pctablet505 Jul 7, 2025
bbc29a7
Merge branch 'keras-team:master' into master
pctablet505 Jul 14, 2025
bac3416
Merge branch 'keras-team:master' into master
pctablet505 Jul 15, 2025
98877eb
Created using Colab
pctablet505 Aug 18, 2025
39c559b
Merge branch 'keras-team:master' into master
pctablet505 Aug 20, 2025
417e4b1
Delete Model_Pruning.ipynb
pctablet505 Aug 20, 2025
14bfd9b
Merge branch 'master' of https://github.com/pctablet505/keras
pctablet505 Aug 20, 2025
9d34d0a
Merge branch 'keras-team:master' into master
pctablet505 Aug 26, 2025
837506d
Add LiteRT (TFLite) export support to Keras
pctablet505 Aug 28, 2025
631850e
Update lite_rt_exporter.py
pctablet505 Aug 28, 2025
f5aa72e
Update export_utils.py
pctablet505 Aug 28, 2025
2b952d6
Refactor LiteRTExporter to simplify TFLite conversion
pctablet505 Aug 28, 2025
8f81dd5
Refactor import structure to avoid circular dependencies
pctablet505 Aug 28, 2025
011f1d8
trying kerashub
pctablet505 Aug 28, 2025
9a99a32
Enhance LiteRT export for sequence models and large models
pctablet505 Aug 28, 2025
d0070c6
Update lite_rt_exporter.py
pctablet505 Aug 28, 2025
761793f
Update lite_rt_exporter.py
pctablet505 Aug 28, 2025
7bb0506
Prevent tensor overflow for large vocabulary models
pctablet505 Sep 1, 2025
c219eb1
Update export_utils.py
pctablet505 Sep 1, 2025
e26ff6b
Update lite_rt_exporter.py
pctablet505 Sep 1, 2025
4a32e04
Simplify TFLite export and sequence length safety checks
pctablet505 Sep 1, 2025
20d29a8
Merge branch 'keras-team:master' into master
pctablet505 Sep 2, 2025
3aca2f6
Merge branch 'keras-team:master' into export
pctablet505 Sep 2, 2025
926b0a8
Refactor TFLite export logic and add simple exporter
pctablet505 Sep 2, 2025
441a778
Merge branch 'export' of https://github.com/pctablet505/keras into ex…
pctablet505 Sep 2, 2025
4a8a9d5
Improve export robustness for large vocab and Keras-Hub models
pctablet505 Sep 2, 2025
f4b43b4
Update lite_rt_exporter.py
pctablet505 Sep 2, 2025
0fe4bd5
Update lite_rt_exporter.py
pctablet505 Sep 2, 2025
8c3faa3
Update lite_rt_exporter.py
pctablet505 Sep 2, 2025
88b6a6f
Update lite_rt_exporter.py
pctablet505 Sep 2, 2025
da13d04
Update lite_rt_exporter.py
pctablet505 Sep 3, 2025
f1f700c
Update lite_rt_exporter.py
pctablet505 Sep 8, 2025
5944780
Update lite_rt_exporter.py
pctablet505 Sep 9, 2025
4404c39
Update lite_rt_exporter.py
pctablet505 Sep 9, 2025
6a119fb
Update lite_rt_exporter.py
pctablet505 Sep 15, 2025
e469244
Merge branch 'keras-team:master' into master
pctablet505 Sep 16, 2025
4cec7cd
Merge branch 'keras-team:master' into export
pctablet505 Sep 16, 2025
3a7fcc4
Merge branch 'keras-team:master' into export
pctablet505 Sep 17, 2025
51a1c7f
Remove sequence length bounding from export utils
pctablet505 Sep 17, 2025
e1fca24
Delete test_keras_hub_export.py
pctablet505 Sep 17, 2025
fd197d9
Merge branch 'keras-team:master' into master
pctablet505 Sep 18, 2025
214558a
Merge branch 'keras-team:master' into export
pctablet505 Sep 22, 2025
73f00f1
Rename LiteRT exporter to Litert and update references
pctablet505 Sep 29, 2025
ebf11e2
Enhance LiteRT exporter and expand export tests
pctablet505 Sep 29, 2025
c6f0c70
Refactor LiteRT exporter to use module_utils.litert
pctablet505 Sep 30, 2025
a6746e1
Merge branch 'keras-team:master' into master
pctablet505 Sep 30, 2025
3c1d90a
Simplify export_litert return value and messaging
pctablet505 Oct 1, 2025
657a271
Merge branch 'keras-team:master' into export
pctablet505 Oct 1, 2025
8ce8bfa
Merge branch 'export' of https://github.com/pctablet505/keras into ex…
pctablet505 Oct 1, 2025
cd9d063
Update export_utils.py
pctablet505 Oct 1, 2025
fa3d3ed
Refactor input signature inference for export
pctablet505 Oct 3, 2025
e775ff2
simplified code
pctablet505 Oct 3, 2025
34b662d
Refactor LiteRT exporter and update import paths
pctablet505 Oct 6, 2025
33b0550
Merge branch 'keras-team:master' into export
pctablet505 Oct 6, 2025
cbe0229
Refactor import statements for export_utils functions
pctablet505 Oct 6, 2025
e52de85
Update saved_model.py
pctablet505 Oct 6, 2025
87af9ed
Update litert.py
pctablet505 Oct 6, 2025
c643772
Add conditional TensorFlow import for LiteRT export
pctablet505 Oct 6, 2025
f243a6e
reformat
pctablet505 Oct 6, 2025
d8236fa
Update litert_test.py
pctablet505 Oct 6, 2025
83577be
Update litert_test.py
pctablet505 Oct 6, 2025
c53b264
Update litert_test.py
pctablet505 Oct 6, 2025
487184d
Update litert_test.py
pctablet505 Oct 7, 2025
374d90b
Update requirements-tensorflow-cuda.txt
pctablet505 Oct 7, 2025
6a5597d
Merge branch 'keras-team:master' into master
pctablet505 Oct 13, 2025
e843f7e
Merge branch 'keras-team:master' into export
pctablet505 Oct 13, 2025
f99a103
Add litert_kwargs support to LiteRT export
pctablet505 Oct 13, 2025
d01a4cb
Update model.py
pctablet505 Oct 13, 2025
52440e1
Refactor LiteRT export wrapper and test setup
pctablet505 Oct 14, 2025
794d85d
Update export_utils.py
pctablet505 Oct 14, 2025
7a46f78
Replace print statements with io_utils.print_msg and logging
pctablet505 Oct 14, 2025
d2b90eb
typo fix
pctablet505 Oct 16, 2025
191f802
set verbose to True by default
pctablet505 Oct 16, 2025
b736ede
removed unnecessary variable
pctablet505 Oct 16, 2025
27f1d07
Rename LitertExporter to LiteRTExporter
pctablet505 Oct 21, 2025
17dccf2
Update litert.py
pctablet505 Oct 21, 2025
3e16ab3
Update export_utils.py
pctablet505 Oct 22, 2025
efbc6d3
Fix input signature inference and doc formatting
pctablet505 Oct 22, 2025
7825983
Update export_utils.py
pctablet505 Oct 22, 2025
676a53c
Update litert.py
pctablet505 Oct 22, 2025
4b6386e
Update litert.py
pctablet505 Oct 22, 2025
79f05c8
Update litert_test.py
pctablet505 Oct 22, 2025
a22eb65
Update litert.py
pctablet505 Oct 22, 2025
315f7f6
Merge branch 'keras-team:master' into master
pctablet505 Oct 25, 2025
4efae3e
Merge branch 'keras-team:master' into export
pctablet505 Oct 25, 2025
f019a0a
Add support for extra TFLite converter settings via kwargs
pctablet505 Oct 27, 2025
5067904
Merge branch 'export' of https://github.com/pctablet505/keras into ex…
pctablet505 Oct 27, 2025
1c8dbcd
Update litert.py
pctablet505 Oct 27, 2025
ff4a81e
Update .gitignore from master
pctablet505 Oct 27, 2025
bcd965b
Merge branch 'keras-team:master' into export
pctablet505 Oct 28, 2025
022cce8
Add LiteRT export optimization tests and update doc
pctablet505 Oct 29, 2025
820f73b
Merge branch 'export' of https://github.com/pctablet505/keras into ex…
pctablet505 Oct 29, 2025
85e878b
Add tests for LiteRT AOT compilation support
pctablet505 Oct 31, 2025
1005063
Update LiteRT export backend check in Model
pctablet505 Oct 31, 2025
c984a6b
Update litert_test.py
pctablet505 Oct 31, 2025
809f6bc
Merge branch 'keras-team:master' into export
pctablet505 Oct 31, 2025
30e4cdd
Merge branch 'keras-team:master' into master
pctablet505 Oct 31, 2025
65dc0f9
Update litert.py
pctablet505 Nov 2, 2025
dd1cfbd
Fix model call with multiple positional arguments
pctablet505 Nov 2, 2025
4bf2e80
Add comprehensive SignatureDef tests for LiteRT export
pctablet505 Nov 4, 2025
4773089
Improve error reporting for AOT compilation failure
pctablet505 Nov 5, 2025
d98cca1
Add support for dictionary model inputs in LiteRTExporter
pctablet505 Nov 12, 2025
11bb4be
Add tests for dict input adapter in LiteRT export
pctablet505 Nov 12, 2025
0f9f214
Update litert_test.py
pctablet505 Nov 12, 2025
ddf911f
Simplify LiteRT export error and remove verbose param
pctablet505 Nov 12, 2025
2a46ab3
Update litert.py
pctablet505 Nov 12, 2025
26ac160
Remove model build step from LiteRTExporter
pctablet505 Nov 13, 2025
7c5cb3f
Remove LiteRT AOT compilation support
pctablet505 Nov 13, 2025
537880f
Refactor import statements in litert.py
pctablet505 Nov 13, 2025
211b44d
Raise ValueError for invalid TFLite file extension
pctablet505 Nov 13, 2025
4199c69
Refactor tracked collection conversion and add _get_save_spec
pctablet505 Nov 17, 2025
d376afb
Refactor TFLite conversion fallback and add verbose option
pctablet505 Nov 17, 2025
66acb8f
Remove verbose argument from LiteRT export functions
pctablet505 Nov 17, 2025
3c2a4be
Enable resource variables for TFLite conversion
pctablet505 Nov 17, 2025
071c819
Remove unused _has_dict_inputs method from LiteRTExporter
pctablet505 Nov 17, 2025
f17422c
Refactor converter kwargs handling in LiteRTExporter
pctablet505 Nov 17, 2025
a550fcc
Update litert.py
pctablet505 Nov 17, 2025
b8267c6
Remove SavedModel fallback in TFLite conversion
pctablet505 Nov 18, 2025
46ead2f
Refactor to use local keras layers and models imports
pctablet505 Nov 18, 2025
b523552
Refactor input signature handling for TFLite export
pctablet505 Nov 27, 2025
ada71de
Add ai-edge-litert to requirements.txt
pctablet505 Nov 27, 2025
00088c9
Fix input signature handling in LiteRTExporter
pctablet505 Nov 27, 2025
42407e8
Merge master into export: Sync with upstream Keras changes
pctablet505 Nov 27, 2025
30deea8
Make batch dimension adjustment consistent in make_tf_tensor_spec
pctablet505 Dec 3, 2025
2f64e9a
Merge upstream/master: Sync with latest Keras changes
pctablet505 Dec 3, 2025
0abb53d
Merge branch 'keras-team:master' into export
pctablet505 Dec 8, 2025
04cf978
Merge branch 'keras-team:master' into master
pctablet505 Dec 8, 2025
161f420
Update export API to use grouped kwargs for format-specific options
pctablet505 Dec 10, 2025
c3792ef
Merge origin/master into export branch - resolve kwargs API conflicts
pctablet505 Dec 10, 2025
80d2743
Merge branch 'export' of https://github.com/pctablet505/keras into ex…
pctablet505 Dec 10, 2025
babf7da
Merge branch 'keras-team:master' into export
pctablet505 Dec 10, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 28 additions & 14 deletions keras/src/export/litert_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -470,7 +470,7 @@ def test_export_with_optimizations_default(self):
model.export(
temp_filepath,
format="litert",
optimizations=[tensorflow.lite.Optimize.DEFAULT],
litert_kwargs={"optimizations": [tensorflow.lite.Optimize.DEFAULT]},
)
self.assertTrue(os.path.exists(temp_filepath))

Expand Down Expand Up @@ -501,7 +501,11 @@ def test_export_with_optimizations_sparsity(self):
model.export(
temp_filepath,
format="litert",
optimizations=[tensorflow.lite.Optimize.EXPERIMENTAL_SPARSITY],
litert_kwargs={
"optimizations": [
tensorflow.lite.Optimize.EXPERIMENTAL_SPARSITY
]
},
)
self.assertTrue(os.path.exists(temp_filepath))

Expand Down Expand Up @@ -532,7 +536,9 @@ def test_export_with_optimizations_size(self):
model.export(
temp_filepath,
format="litert",
optimizations=[tensorflow.lite.Optimize.OPTIMIZE_FOR_SIZE],
litert_kwargs={
"optimizations": [tensorflow.lite.Optimize.OPTIMIZE_FOR_SIZE]
},
)
self.assertTrue(os.path.exists(temp_filepath))

Expand Down Expand Up @@ -562,7 +568,9 @@ def test_export_with_optimizations_latency(self):
model.export(
temp_filepath,
format="litert",
optimizations=[tensorflow.lite.Optimize.OPTIMIZE_FOR_LATENCY],
litert_kwargs={
"optimizations": [tensorflow.lite.Optimize.OPTIMIZE_FOR_LATENCY]
},
)
self.assertTrue(os.path.exists(temp_filepath))

Expand Down Expand Up @@ -592,10 +600,12 @@ def test_export_with_multiple_optimizations(self):
model.export(
temp_filepath,
format="litert",
optimizations=[
tensorflow.lite.Optimize.DEFAULT,
tensorflow.lite.Optimize.EXPERIMENTAL_SPARSITY,
],
litert_kwargs={
"optimizations": [
tensorflow.lite.Optimize.DEFAULT,
tensorflow.lite.Optimize.EXPERIMENTAL_SPARSITY,
]
},
)
self.assertTrue(os.path.exists(temp_filepath))

Expand Down Expand Up @@ -627,8 +637,10 @@ def representative_dataset():
model.export(
temp_filepath,
format="litert",
optimizations=[tensorflow.lite.Optimize.DEFAULT],
representative_dataset=representative_dataset,
litert_kwargs={
"optimizations": [tensorflow.lite.Optimize.DEFAULT],
"representative_dataset": representative_dataset,
},
)
self.assertTrue(os.path.exists(temp_filepath))

Expand Down Expand Up @@ -671,9 +683,11 @@ def representative_dataset():
model.export(
temp_filepath,
format="litert",
optimizations=[tensorflow.lite.Optimize.DEFAULT],
representative_dataset=representative_dataset,
experimental_new_quantizer=True,
litert_kwargs={
"optimizations": [tensorflow.lite.Optimize.DEFAULT],
"representative_dataset": representative_dataset,
"experimental_new_quantizer": True,
},
)
self.assertTrue(os.path.exists(temp_filepath))

Expand Down Expand Up @@ -709,7 +723,7 @@ def test_export_optimization_file_size_comparison(self):
model.export(
filepath_with_opt,
format="litert",
optimizations=[tensorflow.lite.Optimize.DEFAULT],
litert_kwargs={"optimizations": [tensorflow.lite.Optimize.DEFAULT]},
)

# Optimized model should be smaller
Expand Down
85 changes: 63 additions & 22 deletions keras/src/models/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -622,7 +622,10 @@ def export(
format="tf_saved_model",
verbose=None,
input_signature=None,
**kwargs,
saved_model_kwargs=None,
onnx_kwargs=None,
litert_kwargs=None,
openvino_kwargs=None,
):
"""Export the model as an artifact for inference.

Expand All @@ -640,27 +643,29 @@ def export(
`tf.TensorSpec`, `backend.KerasTensor`, or backend tensor. If
not provided, it will be automatically computed. Defaults to
`None`.
**kwargs: Additional keyword arguments.
- `is_static`: Optional `bool`. Specific to the JAX backend and
`format="tf_saved_model"`. Indicates whether `fn` is static.
Set to `False` if `fn` involves state updates (e.g., RNG
seeds and counters).
- `jax2tf_kwargs`: Optional `dict`. Specific to the JAX backend
and `format="tf_saved_model"`. Arguments for
`jax2tf.convert`. See the documentation for
saved_model_kwargs: Optional `dict`. Keyword arguments specific to
`format="tf_saved_model"`. Supported options:
- `is_static`: Optional `bool`. Specific to the JAX backend.
Indicates whether `fn` is static. Set to `False` if `fn`
involves state updates (e.g., RNG seeds and counters).
- `jax2tf_kwargs`: Optional `dict`. Specific to the JAX backend.
Arguments for `jax2tf.convert`. See the documentation for
[`jax2tf.convert`](
https://github.com/google/jax/blob/main/jax/experimental/jax2tf/README.md).
If `native_serialization` and `polymorphic_shapes` are not
provided, they will be automatically computed.
- `opset_version`: Optional `int`. Specific to `format="onnx"`.
An integer value that specifies the ONNX opset version.
- LiteRT-specific options: Optional keyword arguments specific
to `format="litert"`. These are passed directly to the
TensorFlow Lite converter and include options like
`optimizations`, `representative_dataset`,
`experimental_new_quantizer`, `allow_custom_ops`,
`enable_select_tf_ops`, etc. See TensorFlow Lite
documentation for all available options.
onnx_kwargs: Optional `dict`. Keyword arguments specific to
`format="onnx"`. Supported options:
- `opset_version`: Optional `int`. An integer value that
specifies the ONNX opset version.
litert_kwargs: Optional `dict`. Keyword arguments specific to
`format="litert"`. These are passed directly to the TensorFlow
Lite converter and include options like `optimizations`,
`representative_dataset`, `experimental_new_quantizer`,
`allow_custom_ops`, `enable_select_tf_ops`, etc. See
TensorFlow Lite documentation for all available options.
openvino_kwargs: Optional `dict`. Keyword arguments specific to
`format="openvino"`.

**Note:** This feature is currently supported only with TensorFlow, JAX
and Torch backends.
Expand All @@ -682,12 +687,34 @@ def export(
predictions = reloaded_artifact.serve(input_data)
```

With JAX backend, you can pass additional options via
`saved_model_kwargs`:

```python
# Export with JAX-specific options
model.export(
"path/to/location",
format="tf_saved_model",
saved_model_kwargs={
"is_static": True,
"jax2tf_kwargs": {"enable_xla": True}
}
)
```

Here's how to export an ONNX for inference.

```python
# Export the model as a ONNX artifact
model.export("path/to/location", format="onnx")

# Export with specific ONNX opset version
model.export(
"path/to/location",
format="onnx",
onnx_kwargs={"opset_version": 18}
)

# Load the artifact in a different process/environment
ort_session = onnxruntime.InferenceSession("path/to/location")
ort_inputs = {
Expand All @@ -702,6 +729,20 @@ def export(
# Export the model as a LiteRT artifact
model.export("path/to/location", format="litert")

# Export with quantization options
def representative_dataset():
for _ in range(100):
yield [sample_input_data]

model.export(
"path/to/location",
format="litert",
litert_kwargs={
"optimizations": [tf.lite.Optimize.DEFAULT],
"representative_dataset": representative_dataset
}
)

# Load the artifact in a different process/environment
interpreter = tf.lite.Interpreter(model_path="path/to/location")
interpreter.allocate_tensors()
Expand Down Expand Up @@ -736,30 +777,30 @@ def export(
filepath,
verbose,
input_signature=input_signature,
**kwargs,
**(saved_model_kwargs or {}),
)
elif format == "onnx":
export_onnx(
self,
filepath,
verbose,
input_signature=input_signature,
**kwargs,
**(onnx_kwargs or {}),
)
elif format == "openvino":
export_openvino(
self,
filepath,
verbose,
input_signature=input_signature,
**kwargs,
**(openvino_kwargs or {}),
)
elif format == "litert":
export_litert(
self,
filepath,
input_signature=input_signature,
**kwargs,
**(litert_kwargs or {}),
)

@classmethod
Expand Down
Loading