From 5583fa9986165c0a765429283ef06f3bad11c58c Mon Sep 17 00:00:00 2001 From: Venkata Sai Madhur Karampudi Date: Thu, 3 Apr 2025 22:22:55 +0000 Subject: [PATCH 1/9] Update 0.48.0 in version.py and RELEASE.md --- RELEASE.md | 571 +-------------------------------------- struct2tensor/version.py | 2 +- 2 files changed, 2 insertions(+), 571 deletions(-) diff --git a/RELEASE.md b/RELEASE.md index 0985ad6..4f5140e 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -1,16 +1,4 @@ -# `struct2tensor` release notes - -## Current Version (not yet released; still in development) - -## Major Features and Improvements - -## Bug Fixes and Other Changes - -## Breaking Changes - -## Deprecations - -## Version 0.48.0 +# Version 0.48.0 ## Major Features and Improvements @@ -30,560 +18,3 @@ * N/A -## Version 0.47.0 - -## Major Features and Improvements - -* N/A - -## Bug Fixes and Other Changes - -* Bumped the minimum bazel version required to build `struct2tensor` to 6.5.0. -* Depends on `tensorflow 2.16.2`. -* Relax dependency on Protobuf to include version 5.x - -## Breaking Changes - -* N/A - -## Deprecations - -* N/A - -## Version 0.46.0 - -## Major Features and Improvements - -* N/A - -## Bug Fixes and Other Changes - -* Bumped the Ubuntu version on which `struct2tensor` is tested to 20.04 - (previously was 16.04). -* Depends on `tensorflow 2.15`. -* Bumped the minimum bazel version required to build `struct2tensor` to 6.1.0. -* Depends on `protobuf>=4.25.2,<5` for Python 3.11 and on `protobuf>3.20.3,<5` - for 3.9 and 3.10. - -## Breaking Changes - -* N/A - -## Deprecations - -* Deprecated python 3.8 support. - -## Version 0.45.0 - -## Major Features and Improvements - -* N/A - -## Bug Fixes and Other Changes - -* Depends on `pyarrow>=10,<11`. -* Depends on `numpy>=1.22.0`. -* Depends on `tensorflow>=2.13.0,<3`. - -## Breaking Changes - -* N/A - -## Deprecations - -* N/A - -## Version 0.44.0 - -## Major Features and Improvements - -* N/A - -## Bug Fixes and Other Changes - -* Introduced an argument to disable path step validation. -* Depends on `tensorflow>=2.12.0,<2.13`. -* Depends on `protobuf>=3.20.3,<5`. - -## Breaking Changes - -* N/A - -## Deprecations - -* Deprecated python 3.7 support. - -## Version 0.43.0 - -## Major Features and Improvements - -* N/A - -## Bug Fixes and Other Changes - -* Depends on `tensorflow>=2.11.0,<2.12`. - -## Breaking Changes - -* N/A - -## Deprecations - -* N/A - -## Version 0.42.0 - -## Major Features and Improvements - -* N/A - -## Bug Fixes and Other Changes - -* Updates bundled `arrow` version to 6.0.1. -* Depends on `tensorflow>=2.10.0,<2.11`. - -## Breaking Changes - -* N/A - -## Deprecations - -* N/A - -## Version 0.41.0 - -## Major Features and Improvements - -* N/A - -## Bug Fixes and Other Changes - -* Depends on `pyarrow>=6,<7`. -* Depends on `tensorflow-metadata>=1.10.0,<1.11.0`. - -## Breaking Changes - -* N/A - -## Deprecations - -* N/A - -## Version 0.40.0 - -## Major Features and Improvements - -* Linux wheels now specifically conform to - [manylinux2014](https://peps.python.org/pep-0599/), an upgrade from - manylinux2010. This is aligned with TensorFlow 2.9 requirement. - -## Bug Fixes and Other Changes - -* Depends on `tensorflow>=2.9.0,<2.10`. -* Depends on `tensorflow-metadata>=1.9.0,<1.10.0`. - -## Breaking Changes - -* N/A - -## Deprecations - -* N/A - -## Version 0.39.0 - -## Major Features and Improvements - -* From this version we will be releasing python 3.9 wheels. - -## Bug Fixes and Other Changes - -* Depends on `tensorflow-metadata>=1.8.0,<1.9.0`. - -## Breaking Changes - -* N/A - -## Deprecations - -* N/A - -## Version 0.38.0 - -## Major Features and Improvements - -* Added equi_join_any_indices_op. -* Added broadcast for subtrees. - -## Bug Fixes and Other Changes - -* Depends on `tensorflow>=2.8.0,<2.9`. -* Depends on `tensorflow-metadata>=1.7.0,<1.8.0`. - -## Breaking Changes - -* N/A - -## Deprecations - -* N/A - -## Version 0.37.0 - -## Major Features and Improvements - -* N/A - -## Bug Fixes and Other Changes - -* Depends on `tensorflow-metadata>=1.6.0,<1.7.0`. - -## Breaking Changes - -* N/A - -## Deprecations - -* N/A - -## Version 0.36.0 - -## Major Features and Improvements - -* N/A - -## Bug Fixes and Other Changes - -* Depends on `tensorflow>=2.7.0,<2.8`. -* Depends on `tensorflow-metadata>=1.5.0,<1.6.0`. - -## Breaking Changes - -* N/A - -## Deprecations - -* N/A - -## Version 0.35.0 - -## Major Features and Improvements - -* N/A - -## Bug Fixes and Other Changes - -* Fix bug in which expression.apply_schema mutated its input schema - -## Breaking Changes - -* N/A - -## Deprecations - -* Deprecated python 3.6 support. - -## Version 0.34.0 - -## Major Features and Improvements - -* N/A - -## Bug Fixes and Other Changes - -* Depends on `tensorflow>=2.6.0,<2.7`. -* Depends on `pyarrow>=1,<6`. - -## Breaking Changes - -* N/A - -## Deprecations - -* N/A - -## Version 0.33.0 - -## Major Features and Improvements - -* N/A - -## Bug Fixes and Other Changes - -* Added doc with benchmark numbers. Also added the benchmark code and test - data. -* Depends on `tensorflow-metadata>=1.2.0,<1.3.0`. - -## Breaking Changes - -* N/A - -## Deprecations - -* N/A - -## Version 0.32.0 - -## Major Features and Improvements - -* N/A - -## Bug Fixes and Other Changes - -* Depends on `protobuf>=3.13,<4`. -* Depends on `tensorflow-metadata>=1.1.0,<1.2.0`. - -## Breaking Changes - -* N/A - -## Deprecations - -* N/A - -## Version 0.31.0 - -## Major Features and Improvements - -* N/A - -## Bug Fixes and Other Changes - -* Introduced DecodeProtoSparseV4. It is same as V3 and will replace V3 soon. -* DecodeProtoSparseV3 is now the default (instead of V2). -* Bumped tf version for statically linked libraries to TF 2.5.0. -* Depends on `tensorflow>=2.5.0,<2.6`. -* Depends on `tensorflow-metadata>=1.0.0,<1.1.0`. - -## Breaking Changes - -* N/A - -## Deprecations - -* N/A - -## Release 0.30.0 - -## Major Features and Improvements - -* N/A - -## Bug Fixes and Other Changes - -* Deprecate `get_ragged_tensors()` and `get_sparse_tensors()` in prensor_util. -* Expose `get_ragged_tensors()` and `get_sparse_tensors()` as `Prensor` - methods. -* Expose `get_positional_index` as a method of `NodeTensor`. -* Depends on `tensorflow-metadata>=0.30,<0.31` - -## Breaking Changes - -* N/A - -## Deprecations - -* N/A - -## Release 0.29.0 - -## Major Features and Improvements - -* N/A - -## Bug Fixes and Other Changes - -* Allow path to concat with string. -* Bumped the minimum bazel version required to build `struct2tensor` to 3.7.2. -* Depends on `tensorflow-metadata>=0.29,<0.30` - -## Breaking Changes - -* N/A - -## Deprecations - -* N/A - -## Release 0.28.0 - -## Major Features and Improvements - -* N/A - -## Bug Fixes and Other Changes - -* Depends on `tensorflow-metadata>=0.28,<0.29` - -## Breaking Changes - -* N/A - -## Deprecations - -* N/A - -## Release 0.27.0 - -## Major Features and Improvements - -* N/A - -## Bug Fixes and Other Changes - -* Depends on `pyarrow>=1,<3` -* Depends on `tensorflow>=2.4.0,<2.5` -* Depends on `tensorflow-metadata>=0.27,<0.28` - -## Breaking changes - -* N/A - -## Deprecations - -* N/A - -## Release 0.26.0 - -## Major Features and Improvements - -* Created a docker image that contains a TF model server with struct2tensor - ops linked. This docker image is available at - `gcr.io/tfx-oss-public/s2t_tf_serving` . -* Add support for string_views for intermediate serialized protos. To use, set - the option "use_string_view" in CalculateOptions to true. string_views are - potentially more memory bandwidth efficient depending on the depth and - complexity of the input proto. - -## Bug Fixes and Other Changes - -* Depends on `tensorflow-metadata>=0.26,<0.27`. - -## Breaking changes - -* N/A - -## Deprecations - -* N/A - -## Release 0.25.0 - -## Major Features and Improvements - -* From this release Struct2Tensor will also be hosting nightly packages on - https://pypi-nightly.tensorflow.org. To install the nightly package use the - following command: - - ``` - pip install --extra-index-url https://pypi-nightly.tensorflow.org/simple struct2tensor - ``` - - Note: These nightly packages are unstable and breakages are likely to - happen. The fix could often take a week or more depending on the complexity - involved for the wheels to be available on the PyPI cloud service. You can - always use the stable version of struct2tensor available on PyPI by running - the command `pip install struct2tensor` . - -## Bug Fixes and Other Changes - -* Update __init__.py to import the API, instead of just the modules. -* Provide an __init__.py for struct2tensor.expression_impl directory. This is - meant for power users. -* Update python notebook to use import style. -* Fix bug in prensor_to_structured_tensor. -* Depends on `tensorflow-metadata>=0.25,<0.26`. -* Depends on `pyarrow>=0.17,<1`. - -## Breaking changes - -* N/A - -## Deprecations - -* N/A - -## Release 0.24.0 - -## Major Features and Improvements - -* Add support for converting prensor to `StructuredTensor`. - -## Bug Fixes and Other Changes - -* Depends on `tensorflow>=2.3.0,<2.4` -* Depends on `tensorflow-metadata>=0.24,<0.25` - -## Breaking changes - -* N/A - -## Deprecations - -* Deprecated py3.5 support - -## Release 0.23.0 - -### Major Features and Improvements - -* Add promote for substructures. -* Add support for converting `StructuredTensor` to prensor. - -### Bug Fixes and Other Changes - -* Depends on `tensorflow>=2.3.0,<2.4` -* Depends on `tensorflow-metadata>=0.23,<0.24` - -### Breaking Changes - -* Drop snappy support for parquet dataset. - -### Deprecations - -* Deprecating Py2 support. - -## Release 0.22.0 - -### Major Features and Improvements - -### Bug Fixes and Other Changes - -* Depends on `tensorflow>=2.2.0,<2.3 - -### Breaking Changes - -### Deprecations - -## Release 0.21.1 - -### Major Features and Improvements - -* Bumped Tensorflow version for statically linked libraries from 1.5 to 2.1. - -### Bug Fixes and Other Changes - -* Added tests for statically linked libraries. -* Statically linked libraries build now. - -### Breaking Changes - -### Deprecations - -## Release 0.21.0 - -### Major Features and Improvements - -* Parquet dataset that can apply expressions to a parquet schema, allowing for - reading data from IO to tensors directly. - -### Bug Fixes and Other Changes - -* Now requires tensorflow>=2.1.0,<2.2. - -### Breaking Changes - -### Deprecations - -## Release 0.0.1dev6 - -* Initial release of struct2tensor. diff --git a/struct2tensor/version.py b/struct2tensor/version.py index b61835f..5a1fe67 100644 --- a/struct2tensor/version.py +++ b/struct2tensor/version.py @@ -15,4 +15,4 @@ """Contains the version string of struct2tensor.""" # Note that setup.py uses this version. -__version__ = '0.49.0.dev' +__version__ = '0.48.0' From c752a3c50555279527cdcdfe592fb6decc5d6693 Mon Sep 17 00:00:00 2001 From: Madhur Karampudi <142544288+vkarampudi@users.noreply.github.com> Date: Fri, 2 Jan 2026 13:42:26 -0800 Subject: [PATCH 2/9] Update WORKSPACE --- WORKSPACE | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/WORKSPACE b/WORKSPACE index ec9027d..3dad9d5 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -41,8 +41,8 @@ tf_configure(name = "local_config_tf") # 3. Request the new archive to be mirrored on mirror.bazel.build for more # reliable downloads. -_TENSORFLOW_GIT_COMMIT = "5bc9d26649cca274750ad3625bd93422617eed4b" # tf 2.16.1 -_TENSORFLOW_ARCHIVE_SHA256 = "fe592915c85d1a89c20f3dd89db0772ee22a0fbda78e39aa46a778d638a96abc" +_TENSORFLOW_GIT_COMMIT = "3c92ac03cab816044f7b18a86eb86aa01a294d95" # tf 2.17.1 +_TENSORFLOW_ARCHIVE_SHA256 = "317dd95c4830a408b14f3e802698eb68d70d81c7c7cfcd3d28b0ba023fe84a68" http_archive( name = "org_tensorflow", From 3edf2c8365c4d5e9aabd443f209993b5abb6c05b Mon Sep 17 00:00:00 2001 From: Madhur Karampudi <142544288+vkarampudi@users.noreply.github.com> Date: Fri, 2 Jan 2026 14:12:28 -0800 Subject: [PATCH 3/9] Update workspace.bzl --- struct2tensor/workspace.bzl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/struct2tensor/workspace.bzl b/struct2tensor/workspace.bzl index a753abf..8043edf 100644 --- a/struct2tensor/workspace.bzl +++ b/struct2tensor/workspace.bzl @@ -63,10 +63,10 @@ def struct2tensor_workspace(): urls = ["https://github.com/apache/arrow/archive/%s.zip" % ARROW_COMMIT], ) - _TFMD_COMMIT_HASH = "e0f569f3b1039b6a51e9156bf323f677a026e537" # 1.17.0 + _TFMD_COMMIT_HASH = "404805761e614561cceedc429e67c357c62be26d" # 1.17.0 http_archive( name = "com_github_tensorflow_metadata", - sha256 = "24e498b5030062e7836eabf2fde93664e27054a162df5f43a7934a22bda24153", + sha256 = "9abfe4019f33ff067438ce69053fe63fc2e8dde5192aa7cf30d501809e45c18c", strip_prefix = "metadata-%s" % _TFMD_COMMIT_HASH, urls = [ "https://github.com/tensorflow/metadata/archive/%s.tar.gz" % _TFMD_COMMIT_HASH, From 631b669e23205df98d2b0f458862f9f9cadfc709 Mon Sep 17 00:00:00 2001 From: Gagandeep Singh Date: Tue, 20 Jan 2026 00:15:56 +0530 Subject: [PATCH 4/9] Migrate to Protobuf 4.23.4: custom Bazel rule for Python proto generation Protobuf 4.x removed py_proto_library and changed proto_library behavior. This commit: - Creates _py_proto_library_rule: a custom Bazel rule that accepts ProtoInfo or PyInfo, runs protoc to generate Python _pb2.py files, and provides PyInfo for Python deps - Replaces deprecated py_proto_library calls with custom rule implementation - Configures protoc proto_path to include workspace dirs and external dependencies - Adds local any.proto copies to bypass sandbox limitations in proto imports - Applies compatibility patches to TensorFlow and TensorFlow Metadata dependencies - Updates all s2t_proto_library_py calls to work with new implementation Fixes: Protobuf 4.23.4 compatibility for struct2tensor build system --- WORKSPACE | 17 ++ google/protobuf/any.proto | 47 ++++ struct2tensor/BUILD | 2 +- struct2tensor/proto/BUILD | 5 +- struct2tensor/struct2tensor.bzl | 265 ++++++++++++++++++++-- struct2tensor/test/BUILD | 15 +- struct2tensor/test/any.proto | 47 ++++ struct2tensor/test/test_any.proto | 2 +- struct2tensor/workspace.bzl | 5 +- third_party/BUILD | 1 + third_party/README.md | 28 +++ third_party/tensorflow.patch | 136 +++++++++++ third_party/tfmd_protobuf_downgrade.patch | 58 +++++ 13 files changed, 594 insertions(+), 34 deletions(-) create mode 100644 google/protobuf/any.proto create mode 100644 struct2tensor/test/any.proto create mode 100644 third_party/README.md create mode 100644 third_party/tensorflow.patch create mode 100644 third_party/tfmd_protobuf_downgrade.patch diff --git a/WORKSPACE b/WORKSPACE index 3dad9d5..39fe82a 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -51,6 +51,8 @@ http_archive( "https://github.com/tensorflow/tensorflow/archive/%s.tar.gz" % _TENSORFLOW_GIT_COMMIT, ], strip_prefix = "tensorflow-%s" % _TENSORFLOW_GIT_COMMIT, + patches = ["//third_party:tensorflow.patch"], + patch_args = ["-p1"], ) load("//third_party:python_configure.bzl", "local_python_configure") @@ -62,6 +64,21 @@ local_python_configure(name = "local_execution_config_python") load("//struct2tensor:workspace.bzl", "struct2tensor_workspace") struct2tensor_workspace() +# ===== Protobuf 4.23.4 dependency ===== +# Must be declared BEFORE TensorFlow's workspaces to override the version they pull +http_archive( + name = "com_google_protobuf", + sha256 = "5f3cd52d6e5062071d99da57a96ea87e39bc020d6d25748001d919c474a4d8ed", + strip_prefix = "protobuf-4.23.4", + urls = [ + "https://github.com/protocolbuffers/protobuf/archive/v4.23.4.tar.gz", + ], +) + +# Load Protobuf dependencies +load("@com_google_protobuf//:protobuf_deps.bzl", "protobuf_deps") +protobuf_deps() + # Initialize TensorFlow's external dependencies. load("@org_tensorflow//tensorflow:workspace3.bzl", "tf_workspace3") tf_workspace3() diff --git a/google/protobuf/any.proto b/google/protobuf/any.proto new file mode 100644 index 0000000..101b575 --- /dev/null +++ b/google/protobuf/any.proto @@ -0,0 +1,47 @@ +// Protocol Buffers - Google's data interchange format +// Minimal local copy to satisfy imports for struct2tensor tests. +// Source reference: https://github.com/protocolbuffers/protobuf/blob/v3.21.9/src/google/protobuf/any.proto + +syntax = "proto3"; + +package google.protobuf; + +option cc_enable_arenas = true; +option java_package = "com.google.protobuf"; +option java_outer_classname = "AnyProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; + +// "Any" contains an arbitrary serialized protocol buffer message along with a +// URL that describes the type of the serialized message. +// +// The protocol buffer library provides support to pack/unpack Any values in the +// form of utility APIs. Note that any API that accepts or returns an Any value +// will perform type checking and packing/unpacking on the fly. Therefore, if you +// need to convert between a known proto type and Any, use the library helpers +// rather than manipulating this message directly. +// +// Example usage: +// Foo foo = ...; +// Any any = Any.pack(foo); +// ... +// if (any.is()) { +// Foo foo2 = any.unpack(); +// } +// +// JSON +// ==== +// The JSON representation of an Any value uses the following format: +// +// { "@type": , "value": } +// +// where the value field depends on the type_url. +message Any { + // A URL/resource name uniquely identifying the type of the serialized + // protocol buffer message. + string type_url = 1; + + // Must be a valid serialized protocol buffer of the above specified type. + bytes value = 2; +} diff --git a/struct2tensor/BUILD b/struct2tensor/BUILD index 573e6e6..0184e3a 100644 --- a/struct2tensor/BUILD +++ b/struct2tensor/BUILD @@ -83,7 +83,7 @@ s2t_pytype_library( "path.py", ], deps = [ - "@com_github_tensorflow_metadata//tensorflow_metadata/proto/v0:py_metadata_v0_proto_py", + "@com_github_tensorflow_metadata//tensorflow_metadata/proto/v0:metadata_v0_proto_py_pb2", "@com_google_protobuf//:protobuf_python", ], ) diff --git a/struct2tensor/proto/BUILD b/struct2tensor/proto/BUILD index 63521ac..8e26d8d 100644 --- a/struct2tensor/proto/BUILD +++ b/struct2tensor/proto/BUILD @@ -9,7 +9,7 @@ licenses(["notice"]) s2t_proto_library( name = "query_metadata_proto", srcs = ["query_metadata.proto"], - deps = ["@com_github_tensorflow_metadata//tensorflow_metadata/proto/v0:cc_metadata_v0_proto_cc"], + deps = ["@com_github_tensorflow_metadata//tensorflow_metadata/proto/v0:metadata_v0_proto"], ) s2t_proto_library_cc( @@ -19,10 +19,9 @@ s2t_proto_library_cc( s2t_proto_library_py( name = "query_metadata_py_pb2", - srcs = ["query_metadata.proto"], api_version = 2, oss_deps = [ - "@com_github_tensorflow_metadata//tensorflow_metadata/proto/v0:py_metadata_v0_proto_py", + "@com_github_tensorflow_metadata//tensorflow_metadata/proto/v0:metadata_v0_proto_py_pb2", ], proto_library = "query_metadata_proto", ) diff --git a/struct2tensor/struct2tensor.bzl b/struct2tensor/struct2tensor.bzl index 07cab55..4e92d91 100644 --- a/struct2tensor/struct2tensor.bzl +++ b/struct2tensor/struct2tensor.bzl @@ -14,7 +14,182 @@ """Bazel macros used in OSS.""" -load("@com_google_protobuf//:protobuf.bzl", "cc_proto_library", "py_proto_library") +def _py_proto_library_impl(ctx): + """Implementation of py_proto_library rule.""" + proto_deps = ctx.attr.deps + + # Separate proto and Python dependencies + all_sources = [] + py_infos = [] + + for dep in proto_deps: + if ProtoInfo in dep: + # It's a proto_library - collect proto sources + all_sources.extend(dep[ProtoInfo].direct_sources) + elif PyInfo in dep: + # It's already a py_library - collect its PyInfo for passthrough + py_infos.append(dep[PyInfo]) + + # Filter to only include sources from the workspace (not external packages) + # We can only declare outputs in our own package + workspace_sources = [] + for src in all_sources: + # Filter out external sources (they start with external/ or ..) + if not src.short_path.startswith("external/") and not src.short_path.startswith("../"): + workspace_sources.append(src) + + # Generate Python output files from proto sources + py_outputs = [] + for proto_src in workspace_sources: + # Use just the basename to avoid path issues + basename = proto_src.basename[:-6] # Remove .proto + py_file = ctx.actions.declare_file(basename + "_pb2.py") + py_outputs.append(py_file) + + if py_outputs: + # Build proto_path arguments for protoc + # We need to include paths for workspace root and external dependencies + proto_path_args = [] + + # Add current directory to find workspace proto files + proto_path_args.append("--proto_path=.") + + # Collect proto_path entries from all transitive dependencies + # Use dictionary as a set (Starlark doesn't have set type) + proto_paths = {".": True} + + # Also add directories of workspace sources so imports like "any.proto" + # (in the same folder) resolve correctly. + for ws in workspace_sources: + ws_dir = "/".join(ws.short_path.split("/")[:-1]) + if ws_dir and ws_dir not in proto_paths: + proto_paths[ws_dir] = True + proto_path_args.append("--proto_path=" + ws_dir) + + for dep in proto_deps: + if ProtoInfo in dep: + # Add proto_source_root if available + if hasattr(dep[ProtoInfo], 'proto_source_root'): + root = dep[ProtoInfo].proto_source_root + if root and root not in proto_paths: + proto_paths[root] = True + proto_path_args.append("--proto_path=" + root) + + # Also derive from file paths for more coverage + for src in dep[ProtoInfo].transitive_sources.to_list(): + # Use the directory containing the proto file's import root + # For external/com_google_protobuf/src/google/protobuf/any.proto, + # we want external/com_google_protobuf/src + if src.path.startswith("external/com_google_protobuf/"): + proto_path = "external/com_google_protobuf/src" + if proto_path not in proto_paths: + proto_paths[proto_path] = True + proto_path_args.append("--proto_path=" + proto_path) + elif src.path.startswith("external/"): + # For other external repos like tensorflow_metadata + # Extract external/repo_name + parts = src.path.split("/") + if len(parts) >= 2: + proto_path = "/".join(parts[:2]) + if proto_path not in proto_paths: + proto_paths[proto_path] = True + proto_path_args.append("--proto_path=" + proto_path) + + # Also add Bazel root paths + if src.root.path and src.root.path not in proto_paths: + proto_paths[src.root.path] = True + proto_path_args.append("--proto_path=" + src.root.path) + +# Build list of proto file paths - only include workspace sources + proto_file_args = [] + for src in workspace_sources: + proto_file_args.append(src.short_path) + + # Run protoc to generate Python files + # Use ctx.bin_dir.path as the output directory root + output_root = ctx.bin_dir.path + + ctx.actions.run( + # Include workspace sources plus all transitive dependencies for imports + inputs = depset(direct = workspace_sources, transitive = [ + dep[ProtoInfo].transitive_sources for dep in proto_deps if ProtoInfo in dep + ]), + outputs = py_outputs, + executable = ctx.executable._protoc, + arguments = [ + "--python_out=" + output_root, + ] + proto_path_args + proto_file_args, + mnemonic = "ProtocPython", + ) + + # Collect transitive sources from both generated files and Python deps + all_transitive_sources = [depset(py_outputs)] + all_imports = [depset([ctx.bin_dir.path])] if py_outputs else [] + + for py_info in py_infos: + all_transitive_sources.append(py_info.transitive_sources) + if hasattr(py_info, 'imports'): + all_imports.append(py_info.imports) + + # Return PyInfo provider so this can be used as a py_library dependency + # Merge proto-generated files with passthrough Python dependencies + return [ + DefaultInfo(files = depset(py_outputs)), + PyInfo( + transitive_sources = depset(transitive = all_transitive_sources), + imports = depset(transitive = all_imports), + has_py2_only_sources = False, + has_py3_only_sources = True, + ), + ] + +_py_proto_library_rule = rule( + implementation = _py_proto_library_impl, + attrs = { + "deps": attr.label_list( + providers = [[ProtoInfo], [PyInfo]], # Accept either ProtoInfo OR PyInfo + doc = "Proto library or Python library dependencies", + ), + "_protoc": attr.label( + default = "@com_google_protobuf//:protoc", + executable = True, + cfg = "exec", + ), + }, + provides = [PyInfo], +) + +# Wrapper for cc_proto_library to maintain compatibility with old Protobuf 3.x API +def cc_proto_library( + name, + srcs = [], + deps = [], + cc_libs = [], + protoc = None, + default_runtime = None, + use_grpc_plugin = None, + testonly = 0, + visibility = None, + **kwargs): + """Wrapper for cc_proto_library that works with Protobuf 4.x.""" + _ignore = [cc_libs, protoc, default_runtime, use_grpc_plugin, kwargs] + + # Create proto_library first + native.proto_library( + name = name + "_proto", + srcs = srcs, + deps = [d + "_proto" if not d.startswith("@") else d for d in deps], + testonly = testonly, + visibility = visibility, + ) + + # Create cc_proto_library that depends on proto_library + native.cc_proto_library( + name = name, + deps = [":" + name + "_proto"], + testonly = testonly, + visibility = visibility, + ) def s2t_pytype_library( name, @@ -22,7 +197,41 @@ def s2t_pytype_library( deps = [], srcs_version = "PY3ONLY", testonly = False): - native.py_library(name = name, srcs = srcs, deps = deps, testonly = testonly) + """Python library that automatically wraps proto_library deps with PyInfo. + + This wrapper wraps all dependencies with our custom py_proto_library_rule. + Dependencies that don't provide ProtoInfo will fail with a clear error. + Dependencies that do provide ProtoInfo (proto_library targets) will get PyInfo. + """ + # Process dependencies to wrap them all with our custom rule + processed_deps = [] + for dep in deps: + # Skip protobuf_python - it's already a proper Python library + if dep == "@com_google_protobuf//:protobuf_python": + processed_deps.append(dep) + continue + + # Create a safe wrapper name for this dependency + safe_dep_name = dep.replace(":", "_").replace("//", "").replace("/", "_").replace("@", "").replace("-", "_").replace(".", "_") + wrapper_name = name + "_proto_wrapper_" + safe_dep_name + + # Wrap all dependencies with our custom py_proto_library rule + # If the dep provides ProtoInfo, this will work and provide PyInfo + # If it doesn't provide ProtoInfo, it will fail with a clear error + _py_proto_library_rule( + name = wrapper_name, + deps = [dep], + testonly = testonly, + ) + processed_deps.append(":" + wrapper_name) + + native.py_library( + name = name, + srcs = srcs, + deps = processed_deps, + testonly = testonly, + ) + def s2t_proto_library( name, @@ -50,18 +259,22 @@ def s2t_proto_library( testonly = testonly, ) - use_grpc_plugin = None - if cc_grpc_version: - use_grpc_plugin = True + # Create a native proto_library for Python generation + # This is needed by s2t_proto_library_py + proto_lib_deps = [d + "_proto" if not d.startswith("@") else d for d in deps] + native.proto_library( + name = name + "_proto", + srcs = srcs, + deps = proto_lib_deps, + visibility = visibility, + testonly = testonly, + ) - # TODO(martinz): replace with proto_library, when that works. - cc_proto_library( + # Create cc_proto_library that depends on the proto_library we just created + # Don't use our cc_proto_library wrapper to avoid duplicate proto_library creation + native.cc_proto_library( name = name, - srcs = srcs, - deps = deps, - cc_libs = ["@com_google_protobuf//:protobuf"], - protoc = "@com_google_protobuf//:protoc", - default_runtime = "@com_google_protobuf//:protobuf", + deps = [":" + name + "_proto"], testonly = testonly, visibility = visibility, ) @@ -167,15 +380,25 @@ def s2t_proto_library_cc( ) def s2t_proto_library_py(name, proto_library, srcs = [], deps = [], oss_deps = [], visibility = None, testonly = 0, api_version = None): - """Opensource py_proto_library.""" - _ignore = [proto_library, api_version] - py_proto_library( + """Opensource py_proto_library. + + Uses a custom rule implementation that properly generates Python from proto_library + and provides PyInfo for Python library dependencies. + + Note: s2t_proto_library creates {name}_proto for the proto_library, so we append _proto. + """ + _ignore = [api_version, srcs, deps] + + if not proto_library: + fail("proto_library parameter is required for s2t_proto_library_py") + + # s2t_proto_library creates a proto_library named {name}_proto + # So we need to reference it correctly + actual_proto_library = ":" + proto_library + "_proto" + + # Use our custom py_proto_library rule + _py_proto_library_rule( name = name, - srcs = srcs, - srcs_version = "PY3ONLY", - deps = ["@com_google_protobuf//:well_known_types_py_pb2"] + oss_deps, - default_runtime = "@com_google_protobuf//:protobuf_python", - protoc = "@com_google_protobuf//:protoc", + deps = [actual_proto_library] + oss_deps, visibility = visibility, - testonly = testonly, ) diff --git a/struct2tensor/test/BUILD b/struct2tensor/test/BUILD index 5a545e1..9e38a21 100644 --- a/struct2tensor/test/BUILD +++ b/struct2tensor/test/BUILD @@ -26,7 +26,6 @@ s2t_proto_library_cc( s2t_proto_library_py( name = "test_py_pb2", - srcs = ["test.proto"], api_version = 2, proto_library = "test_proto", ) @@ -34,7 +33,8 @@ s2t_proto_library_py( s2t_proto_library( name = "test_any_proto", srcs = ["test_any.proto"], - deps = ["@com_google_protobuf//:cc_wkt_protos"], + # Use a local any.proto so protoc can read it inside sandbox + deps = [":any_local_proto"], ) s2t_proto_library_cc( @@ -44,11 +44,16 @@ s2t_proto_library_cc( s2t_proto_library_py( name = "test_any_py_pb2", - srcs = ["test_any.proto"], api_version = 2, proto_library = "test_any_proto", ) +# Local wrapper for any.proto so it becomes an input and importable +s2t_proto_library( + name = "any_local_proto", + srcs = ["any.proto"], +) + s2t_proto_library( name = "test_map_proto", srcs = ["test_map.proto"], @@ -61,7 +66,6 @@ s2t_proto_library_cc( s2t_proto_library_py( name = "test_map_py_pb2", - srcs = ["test_map.proto"], proto_library = "test_map_proto", ) @@ -72,7 +76,6 @@ s2t_proto_library( s2t_proto_library_py( name = "test_proto3_py_pb2", - srcs = ["test_proto3.proto"], api_version = 2, proto_library = "test_proto3_proto", ) @@ -85,7 +88,6 @@ s2t_proto_library( s2t_proto_library_py( name = "test_extension_py_pb2", - srcs = ["test_extension.proto"], oss_deps = [":test_py_pb2"], proto_library = "test_extension_proto", ) @@ -98,7 +100,6 @@ s2t_proto_library( s2t_proto_library_py( name = "dependent_test_py_pb2", - srcs = ["dependent_test.proto"], oss_deps = [":test_py_pb2"], proto_library = "dependent_test_proto", ) diff --git a/struct2tensor/test/any.proto b/struct2tensor/test/any.proto new file mode 100644 index 0000000..101b575 --- /dev/null +++ b/struct2tensor/test/any.proto @@ -0,0 +1,47 @@ +// Protocol Buffers - Google's data interchange format +// Minimal local copy to satisfy imports for struct2tensor tests. +// Source reference: https://github.com/protocolbuffers/protobuf/blob/v3.21.9/src/google/protobuf/any.proto + +syntax = "proto3"; + +package google.protobuf; + +option cc_enable_arenas = true; +option java_package = "com.google.protobuf"; +option java_outer_classname = "AnyProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; + +// "Any" contains an arbitrary serialized protocol buffer message along with a +// URL that describes the type of the serialized message. +// +// The protocol buffer library provides support to pack/unpack Any values in the +// form of utility APIs. Note that any API that accepts or returns an Any value +// will perform type checking and packing/unpacking on the fly. Therefore, if you +// need to convert between a known proto type and Any, use the library helpers +// rather than manipulating this message directly. +// +// Example usage: +// Foo foo = ...; +// Any any = Any.pack(foo); +// ... +// if (any.is()) { +// Foo foo2 = any.unpack(); +// } +// +// JSON +// ==== +// The JSON representation of an Any value uses the following format: +// +// { "@type": , "value": } +// +// where the value field depends on the type_url. +message Any { + // A URL/resource name uniquely identifying the type of the serialized + // protocol buffer message. + string type_url = 1; + + // Must be a valid serialized protocol buffer of the above specified type. + bytes value = 2; +} diff --git a/struct2tensor/test/test_any.proto b/struct2tensor/test/test_any.proto index 732d235..fc67dc6 100644 --- a/struct2tensor/test/test_any.proto +++ b/struct2tensor/test/test_any.proto @@ -16,7 +16,7 @@ syntax = "proto3"; package struct2tensor.test; -import "google/protobuf/any.proto"; +import "any.proto"; message MessageWithAny { google.protobuf.Any my_any = 1; diff --git a/struct2tensor/workspace.bzl b/struct2tensor/workspace.bzl index 8043edf..8cce206 100644 --- a/struct2tensor/workspace.bzl +++ b/struct2tensor/workspace.bzl @@ -17,6 +17,7 @@ """struct2tensor external dependencies that can be loaded in WORKSPACE files.""" load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") +load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository") def struct2tensor_workspace(): """All struct2tensor external dependencies.""" @@ -63,7 +64,7 @@ def struct2tensor_workspace(): urls = ["https://github.com/apache/arrow/archive/%s.zip" % ARROW_COMMIT], ) - _TFMD_COMMIT_HASH = "404805761e614561cceedc429e67c357c62be26d" # 1.17.0 + _TFMD_COMMIT_HASH = "404805761e614561cceedc429e67c357c62be26d" # 1.17.1 http_archive( name = "com_github_tensorflow_metadata", sha256 = "9abfe4019f33ff067438ce69053fe63fc2e8dde5192aa7cf30d501809e45c18c", @@ -71,4 +72,6 @@ def struct2tensor_workspace(): urls = [ "https://github.com/tensorflow/metadata/archive/%s.tar.gz" % _TFMD_COMMIT_HASH, ], + patches = ["//third_party:tfmd_protobuf_downgrade.patch"], + patch_args = ["-p1"], ) diff --git a/third_party/BUILD b/third_party/BUILD index e5433c8..331ebed 100644 --- a/third_party/BUILD +++ b/third_party/BUILD @@ -17,4 +17,5 @@ licenses(["notice"]) exports_files([ "python_configure.bzl", + "tfmd_protobuf_downgrade.patch", ]) diff --git a/third_party/README.md b/third_party/README.md new file mode 100644 index 0000000..9d6afd4 --- /dev/null +++ b/third_party/README.md @@ -0,0 +1,28 @@ +# Third Party Dependencies + +## TensorFlow Metadata Protobuf Downgrade Patch + +### Background + +TensorFlow Metadata v1.17.1 upgraded from Protobuf 3.21.9 to 4.25.6 in their Bazel build configuration. However, this creates compatibility issues with TensorFlow 2.17.1, which still uses an older Protobuf version range (`>=3.20.3,<5.0.0dev` with exclusions). + +### Solution + +To maintain compatibility with TensorFlow 2.17.1 while using TensorFlow Metadata v1.17.1, we apply a patch that reverts the Protobuf upgrade in TFMD back to version 3.21.9. + +The patch file `tfmd_protobuf_downgrade.patch` reverts the following changes from TFMD v1.17.1: + +1. **tensorflow_metadata/proto/v0/BUILD file**: + - Reverts proto library definitions to use Protobuf 3.x syntax + - Restores the old `cc_proto_library` and `py_proto_library` patterns + +### Usage + +The patch is automatically applied when building struct2tensor through the `patches` parameter in the `http_archive` rule for `com_github_tensorflow_metadata` in [workspace.bzl](../struct2tensor/workspace.bzl). + +No manual intervention is required. + +### References + +- [TensorFlow Metadata v1.17.0...v1.17.1 comparison](https://github.com/tensorflow/metadata/compare/v1.17.0...v1.17.1) +- [TensorFlow 2.17.1 dependencies](https://github.com/tensorflow/tensorflow/blob/v2.17.1/tensorflow/tools/pip_package/setup.py) diff --git a/third_party/tensorflow.patch b/third_party/tensorflow.patch new file mode 100644 index 0000000..cfa9116 --- /dev/null +++ b/third_party/tensorflow.patch @@ -0,0 +1,136 @@ +diff --git a/third_party/xla/third_party/tsl/tsl/platform/default/build_config.bzl b/third_party/xla/third_party/tsl/tsl/platform/default/build_config.bzl +index 35cdcdc50..df3badf2d 100644 +--- a/third_party/xla/third_party/tsl/tsl/platform/default/build_config.bzl ++++ b/third_party/xla/third_party/tsl/tsl/platform/default/build_config.bzl +@@ -1,7 +1,6 @@ + # Platform-specific build configurations. + + load("@com_github_grpc_grpc//bazel:generate_cc.bzl", "generate_cc") +-load("@com_google_protobuf//:protobuf.bzl", "proto_gen") + load( + "@local_xla//xla/tsl:tsl.bzl", + "clean_dep", +@@ -195,18 +194,15 @@ def cc_proto_library( + if protolib_name == None: + protolib_name = name + +- genproto_deps = ([s + "_genproto" for s in protolib_deps] + +- ["@com_google_protobuf//:cc_wkt_protos_genproto"]) + if internal_bootstrap_hack: + # For pre-checked-in generated files, we add the internal_bootstrap_hack + # which will skip the codegen action. +- proto_gen( ++ # Skip proto generation for internal bootstrap ++ ++ native.proto_library( + name = protolib_name + "_genproto", +- srcs = srcs, +- includes = includes, +- protoc = protoc, ++ deps = protolib_deps, + visibility = ["//visibility:public"], +- deps = genproto_deps, + ) + + # An empty cc_library to make rule dependency consistent. +@@ -227,18 +223,16 @@ def cc_proto_library( + gen_hdrs = _proto_cc_hdrs(srcs, use_grpc_plugin) + outs = gen_srcs + gen_hdrs + +- proto_gen( ++ native.proto_library( + name = protolib_name + "_genproto", + srcs = srcs, +- outs = outs, +- gen_cc = 1, +- includes = includes, +- plugin = grpc_cpp_plugin, +- plugin_language = "grpc", +- plugin_options = plugin_options, +- protoc = protoc, ++ deps = protolib_deps, + visibility = ["//visibility:public"], +- deps = genproto_deps, ++ ) ++ ++ native.cc_proto_library( ++ name = protolib_name + "_cc_genproto", ++ deps = [":" + protolib_name + "_genproto"], + ) + + if use_grpc_plugin: +@@ -265,7 +259,7 @@ def cc_proto_library( + + native.cc_library( + name = impl_name, +- srcs = gen_srcs, ++ srcs = [], + hdrs = gen_hdrs, + deps = cc_libs + deps, + includes = includes, +@@ -402,32 +396,18 @@ def py_proto_library( + genproto_deps = [] + for dep in deps: + if dep != "@com_google_protobuf//:protobuf_python": +- genproto_deps.append(dep + "_genproto") +- else: +- genproto_deps.append("@com_google_protobuf//:well_known_types_py_pb2_genproto") ++ genproto_deps.append(dep) + +- proto_gen( ++ native.proto_library( + name = name + "_genproto", + srcs = srcs, +- outs = outs, +- gen_py = 1, +- includes = includes, +- plugin = grpc_python_plugin, +- plugin_language = "grpc", +- protoc = protoc, +- visibility = ["//visibility:public"], + deps = genproto_deps, ++ visibility = ["//visibility:public"], + ) + +- if default_runtime and not default_runtime in py_libs + deps: +- py_libs = py_libs + [default_runtime] +- +- native.py_library( ++ native.py_proto_library( + name = name, +- srcs = outs + py_extra_srcs, +- deps = py_libs + deps, +- imports = includes, +- **kwargs ++ deps = [":" + name + "_genproto"], + ) + + def tf_proto_library_cc( +@@ -467,11 +447,10 @@ def tf_proto_library_cc( + if not srcs: + # This is a collection of sub-libraries. Build header-only and impl + # libraries containing all the sources. +- proto_gen( ++ native.proto_library( + name = name + "_genproto", +- protoc = "@com_google_protobuf//:protoc", +- visibility = ["//visibility:public"], + deps = [s + "_genproto" for s in protolib_deps], ++ visibility = ["//visibility:public"], + ) + + native.alias( +@@ -538,11 +517,10 @@ def tf_proto_library_py( + if not srcs: + # This is a collection of sub-libraries. Build header-only and impl + # libraries containing all the sources. +- proto_gen( ++ native.proto_library( + name = py_name + "_genproto", +- protoc = "@com_google_protobuf//:protoc", +- visibility = ["//visibility:public"], + deps = [s + "_genproto" for s in py_deps], ++ visibility = ["//visibility:public"], + ) + native.py_library( + name = py_name, diff --git a/third_party/tfmd_protobuf_downgrade.patch b/third_party/tfmd_protobuf_downgrade.patch new file mode 100644 index 0000000..c509daf --- /dev/null +++ b/third_party/tfmd_protobuf_downgrade.patch @@ -0,0 +1,58 @@ +diff --git a/tensorflow_metadata/proto/v0/BUILD b/tensorflow_metadata/proto/v0/BUILD +index 24cf416..9e426aa 100644 +--- a/tensorflow_metadata/proto/v0/BUILD ++++ b/tensorflow_metadata/proto/v0/BUILD +@@ -14,8 +14,6 @@ + # ============================================================================== + + +-load("@com_google_protobuf//bazel:py_proto_library.bzl", "py_proto_library") +- + licenses(["notice"]) # Apache 2.0 + + package(default_visibility = ["//visibility:public"]) +@@ -24,26 +22,20 @@ proto_library( + name = "metadata_v0_proto", + srcs = [ + "anomalies.proto", +- "derived_feature.proto", + "metric.proto", + "path.proto", + "problem_statement.proto", + "schema.proto", + "statistics.proto", ++ "derived_feature.proto" + ], + deps = [ + # For well-known proto types like protobuf.Any. + "@com_google_protobuf//:any_proto", +- "@com_google_protobuf//:api_proto", +- "@com_google_protobuf//:compiler_plugin_proto", + "@com_google_protobuf//:descriptor_proto", + "@com_google_protobuf//:duration_proto", +- "@com_google_protobuf//:empty_proto", +- "@com_google_protobuf//:field_mask_proto", +- "@com_google_protobuf//:source_context_proto", + "@com_google_protobuf//:struct_proto", + "@com_google_protobuf//:timestamp_proto", +- "@com_google_protobuf//:type_proto", + "@com_google_protobuf//:wrappers_proto", + ], + ) +@@ -60,10 +52,12 @@ cc_library( + deps = [":cc_metadata_v0_proto_cc"], + ) + +-py_proto_library( ++alias( + name = "metadata_v0_proto_py_pb2", +- deps = [ +- ":metadata_v0_proto", +- ], ++ actual = ":metadata_v0_proto", + ) + ++alias( ++ name = "metadata_v0_proto_py_pb2_genproto", ++ actual = ":metadata_v0_proto", ++) From d7d24961b7538567f7211a4ceb863a2e0a9d3312 Mon Sep 17 00:00:00 2001 From: Gagandeep Singh Date: Fri, 23 Jan 2026 17:13:04 +0530 Subject: [PATCH 5/9] Add compatibility patches for older Abseil and Protobuf 4.x This patch extends tensorflow.patch with changes to support: 1. Abseil backward compatibility: - Add absl_base_prefetch.h compatibility shim for older Abseil versions that lack absl/base/prefetch.h - Update prefetch includes across TensorFlow/TSL to use the shim - Add inline implementation of NullTerminatedMessage for older Abseil versions without StatusMessageAsCStr 2. Protobuf 4.x support: - Refactor cc_proto_library to use native proto_library and cc_proto_library rules instead of custom proto_gen - Implement custom _tsl_py_proto_library_rule to replace the built-in py_proto_library removed in Protobuf 4.x - Update proto library generation to depend on generated cc_proto_library targets 3. Build cleanup: - Remove unused absl/strings:string_view dependency - Update BUILD files to reflect new dependency structure These changes enable building struct2tensor with older Abseil versions while supporting Protobuf 4.x, improving compatibility across different dependency versions. --- third_party/tensorflow.patch | 429 +++++++++++++++++++++++++++++++++-- 1 file changed, 408 insertions(+), 21 deletions(-) diff --git a/third_party/tensorflow.patch b/third_party/tensorflow.patch index cfa9116..f13f6fb 100644 --- a/third_party/tensorflow.patch +++ b/third_party/tensorflow.patch @@ -1,5 +1,245 @@ +diff --git a/tensorflow/core/lib/gtl/BUILD b/tensorflow/core/lib/gtl/BUILD +index 868d05f09..4876f801c 100644 +--- a/tensorflow/core/lib/gtl/BUILD ++++ b/tensorflow/core/lib/gtl/BUILD +@@ -83,6 +83,7 @@ cc_library( + name = "flatrep", + hdrs = ["flatrep.h"], + deps = [ ++ "@com_google_absl//absl/base", + "@local_tsl//tsl/lib/gtl:flatrep", + ], + ) +diff --git a/tensorflow/core/util/BUILD b/tensorflow/core/util/BUILD +index 3902d1700..74f6adeae 100644 +--- a/tensorflow/core/util/BUILD ++++ b/tensorflow/core/util/BUILD +@@ -73,6 +73,7 @@ filegroup( + filegroup( + name = "mobile_srcs_only_runtime", + srcs = [ ++ "absl_base_prefetch.h", + "activation_mode.h", + "batch_util.cc", + "batch_util.h", +@@ -208,6 +209,7 @@ filegroup( + filegroup( + name = "framework_internal_impl_srcs", + srcs = [ ++ "absl_base_prefetch.h", + "activation_mode.cc", + "batch_util.cc", + "bcast.cc", +diff --git a/tensorflow/core/util/absl_base_prefetch.h b/tensorflow/core/util/absl_base_prefetch.h +new file mode 100644 +index 000000000..429a17315 +--- /dev/null ++++ b/tensorflow/core/util/absl_base_prefetch.h +@@ -0,0 +1,62 @@ ++/* Copyright 2026 The TensorFlow Authors. All Rights Reserved. ++ ++Licensed under the Apache License, Version 2.0 (the "License"); ++you may not use this file except in compliance with the License. ++You may obtain a copy of the License at ++ ++ http://www.apache.org/licenses/LICENSE-2.0 ++ ++Unless required by applicable law or agreed to in writing, software ++distributed under the License is distributed on an "AS IS" BASIS, ++WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ++See the License for the specific language governing permissions and ++limitations under the License. ++==============================================================================*/ ++ ++#ifndef ABSL_BASE_PREFETCH_H_ ++#define ABSL_BASE_PREFETCH_H_ ++ ++// Minimal compatibility shim for absl/base/prefetch.h. Provides the prefetch ++// helpers used by TensorFlow when the corresponding Abseil header is absent. ++// This should be sufficient for builds that depend on an older Abseil release. ++ ++namespace absl { ++ ++// Hints the processor to prefetch the given address into the local cache for ++// read-mostly access. ++inline void PrefetchToLocalCache(const void* addr) { ++#if defined(__GNUC__) || defined(__clang__) ++ __builtin_prefetch(addr, 0 /* read */, 3 /* high temporal locality */); ++#else ++ (void)addr; ++#endif ++} ++ ++// Hints the processor to prefetch the given address into the local cache for ++// imminent writes. ++inline void PrefetchToLocalCacheForWrite(const void* addr) { ++#if defined(__GNUC__) || defined(__clang__) ++ __builtin_prefetch(addr, 1 /* write */, 3 /* high temporal locality */); ++#else ++ (void)addr; ++#endif ++} ++ ++// Non-temporal prefetch (prefer not to pollute caches). ++inline void PrefetchToLocalCacheNta(const void* addr) { ++#if defined(__GNUC__) || defined(__clang__) ++ __builtin_prefetch(addr, 0 /* read */, 0 /* no temporal locality */); ++#else ++ (void)addr; ++#endif ++} ++ ++// Convenience aliases mirroring Abseil's API. ++inline void Prefetch(const void* addr) { PrefetchToLocalCache(addr); } ++inline void PrefetchForWrite(const void* addr) { ++ PrefetchToLocalCacheForWrite(addr); ++} ++ ++} // namespace absl ++ ++#endif // ABSL_BASE_PREFETCH_H_ +diff --git a/tensorflow/core/util/presized_cuckoo_map.h b/tensorflow/core/util/presized_cuckoo_map.h +index 2a03c511e..8d73d6f85 100644 +--- a/tensorflow/core/util/presized_cuckoo_map.h ++++ b/tensorflow/core/util/presized_cuckoo_map.h +@@ -19,7 +19,7 @@ limitations under the License. + #include + #include + +-#include "absl/base/prefetch.h" ++#include "absl_base_prefetch.h" + #include "absl/numeric/int128.h" + #include "tensorflow/core/framework/types.h" + #include "tensorflow/core/platform/macros.h" +diff --git a/third_party/xla/third_party/tsl/tsl/lib/gtl/BUILD b/third_party/xla/third_party/tsl/tsl/lib/gtl/BUILD +index ed2d8656f..fd53055c2 100644 +--- a/third_party/xla/third_party/tsl/tsl/lib/gtl/BUILD ++++ b/third_party/xla/third_party/tsl/tsl/lib/gtl/BUILD +@@ -58,8 +58,8 @@ cc_library( + name = "flatrep", + hdrs = ["flatrep.h"], + deps = [ ++ "//tsl/platform:prefetch", + "//tsl/platform:types", +- "@com_google_absl//absl/base:prefetch", + ], + ) + +diff --git a/third_party/xla/third_party/tsl/tsl/lib/gtl/flatrep.h b/third_party/xla/third_party/tsl/tsl/lib/gtl/flatrep.h +index dfc65844e..e9be67ca6 100644 +--- a/third_party/xla/third_party/tsl/tsl/lib/gtl/flatrep.h ++++ b/third_party/xla/third_party/tsl/tsl/lib/gtl/flatrep.h +@@ -20,7 +20,7 @@ limitations under the License. + + #include + +-#include "absl/base/prefetch.h" ++#include "tsl/platform/prefetch.h" + #include "tsl/platform/types.h" + + namespace tsl { +diff --git a/third_party/xla/third_party/tsl/tsl/lib/hash/BUILD b/third_party/xla/third_party/tsl/tsl/lib/hash/BUILD +index c497abfe1..fffee96fe 100644 +--- a/third_party/xla/third_party/tsl/tsl/lib/hash/BUILD ++++ b/third_party/xla/third_party/tsl/tsl/lib/hash/BUILD +@@ -39,7 +39,6 @@ cc_library( + "//tsl/platform:types", + "@com_google_absl//absl/crc:crc32c", + "@com_google_absl//absl/strings:cord", +- "@com_google_absl//absl/strings:string_view", + ], + ) + +diff --git a/third_party/xla/third_party/tsl/tsl/platform/BUILD b/third_party/xla/third_party/tsl/tsl/platform/BUILD +index 273fd5306..028c5db06 100644 +--- a/third_party/xla/third_party/tsl/tsl/platform/BUILD ++++ b/third_party/xla/third_party/tsl/tsl/platform/BUILD +@@ -1069,10 +1069,13 @@ cc_library( + + cc_library( + name = "prefetch", +- hdrs = ["prefetch.h"], ++ hdrs = [ ++ "prefetch.h", ++ "absl_base_prefetch.h", ++ ], + compatible_with = get_compatible_with_portable(), + deps = [ +- "@com_google_absl//absl/base:prefetch", ++ "@com_google_absl//absl/base", + ], + ) + +diff --git a/third_party/xla/third_party/tsl/tsl/platform/absl_base_prefetch.h b/third_party/xla/third_party/tsl/tsl/platform/absl_base_prefetch.h +new file mode 100644 +index 000000000..429a17315 +--- /dev/null ++++ b/third_party/xla/third_party/tsl/tsl/platform/absl_base_prefetch.h +@@ -0,0 +1,62 @@ ++/* Copyright 2026 The TensorFlow Authors. All Rights Reserved. ++ ++Licensed under the Apache License, Version 2.0 (the "License"); ++you may not use this file except in compliance with the License. ++You may obtain a copy of the License at ++ ++ http://www.apache.org/licenses/LICENSE-2.0 ++ ++Unless required by applicable law or agreed to in writing, software ++distributed under the License is distributed on an "AS IS" BASIS, ++WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ++See the License for the specific language governing permissions and ++limitations under the License. ++==============================================================================*/ ++ ++#ifndef ABSL_BASE_PREFETCH_H_ ++#define ABSL_BASE_PREFETCH_H_ ++ ++// Minimal compatibility shim for absl/base/prefetch.h. Provides the prefetch ++// helpers used by TensorFlow when the corresponding Abseil header is absent. ++// This should be sufficient for builds that depend on an older Abseil release. ++ ++namespace absl { ++ ++// Hints the processor to prefetch the given address into the local cache for ++// read-mostly access. ++inline void PrefetchToLocalCache(const void* addr) { ++#if defined(__GNUC__) || defined(__clang__) ++ __builtin_prefetch(addr, 0 /* read */, 3 /* high temporal locality */); ++#else ++ (void)addr; ++#endif ++} ++ ++// Hints the processor to prefetch the given address into the local cache for ++// imminent writes. ++inline void PrefetchToLocalCacheForWrite(const void* addr) { ++#if defined(__GNUC__) || defined(__clang__) ++ __builtin_prefetch(addr, 1 /* write */, 3 /* high temporal locality */); ++#else ++ (void)addr; ++#endif ++} ++ ++// Non-temporal prefetch (prefer not to pollute caches). ++inline void PrefetchToLocalCacheNta(const void* addr) { ++#if defined(__GNUC__) || defined(__clang__) ++ __builtin_prefetch(addr, 0 /* read */, 0 /* no temporal locality */); ++#else ++ (void)addr; ++#endif ++} ++ ++// Convenience aliases mirroring Abseil's API. ++inline void Prefetch(const void* addr) { PrefetchToLocalCache(addr); } ++inline void PrefetchForWrite(const void* addr) { ++ PrefetchToLocalCacheForWrite(addr); ++} ++ ++} // namespace absl ++ ++#endif // ABSL_BASE_PREFETCH_H_ diff --git a/third_party/xla/third_party/tsl/tsl/platform/default/build_config.bzl b/third_party/xla/third_party/tsl/tsl/platform/default/build_config.bzl -index 35cdcdc50..df3badf2d 100644 +index 35cdcdc50..1920ec210 100644 --- a/third_party/xla/third_party/tsl/tsl/platform/default/build_config.bzl +++ b/third_party/xla/third_party/tsl/tsl/platform/default/build_config.bzl @@ -1,7 +1,6 @@ @@ -10,7 +250,7 @@ index 35cdcdc50..df3badf2d 100644 load( "@local_xla//xla/tsl:tsl.bzl", "clean_dep", -@@ -195,18 +194,15 @@ def cc_proto_library( +@@ -195,18 +194,13 @@ def cc_proto_library( if protolib_name == None: protolib_name = name @@ -20,8 +260,6 @@ index 35cdcdc50..df3badf2d 100644 # For pre-checked-in generated files, we add the internal_bootstrap_hack # which will skip the codegen action. - proto_gen( -+ # Skip proto generation for internal bootstrap -+ + native.proto_library( name = protolib_name + "_genproto", - srcs = srcs, @@ -33,14 +271,13 @@ index 35cdcdc50..df3badf2d 100644 ) # An empty cc_library to make rule dependency consistent. -@@ -227,18 +223,16 @@ def cc_proto_library( +@@ -227,18 +221,11 @@ def cc_proto_library( gen_hdrs = _proto_cc_hdrs(srcs, use_grpc_plugin) outs = gen_srcs + gen_hdrs - proto_gen( -+ native.proto_library( - name = protolib_name + "_genproto", - srcs = srcs, +- name = protolib_name + "_genproto", +- srcs = srcs, - outs = outs, - gen_cc = 1, - includes = includes, @@ -48,27 +285,147 @@ index 35cdcdc50..df3badf2d 100644 - plugin_language = "grpc", - plugin_options = plugin_options, - protoc = protoc, -+ deps = protolib_deps, - visibility = ["//visibility:public"], +- visibility = ["//visibility:public"], - deps = genproto_deps, -+ ) -+ ++ # Don't create proto_library - it should already exist from tf_proto_library ++ # Just create cc_proto_library that depends on the existing proto_library + native.cc_proto_library( + name = protolib_name + "_cc_genproto", -+ deps = [":" + protolib_name + "_genproto"], ++ deps = [":" + protolib_name], ) if use_grpc_plugin: -@@ -265,7 +259,7 @@ def cc_proto_library( +@@ -263,11 +250,12 @@ def cc_proto_library( + visibility = kwargs["visibility"], + ) ++ # Depend on generated cc_proto_library instead of declaring hdrs/srcs native.cc_library( name = impl_name, - srcs = gen_srcs, +- hdrs = gen_hdrs, +- deps = cc_libs + deps, + srcs = [], - hdrs = gen_hdrs, - deps = cc_libs + deps, ++ hdrs = [], ++ deps = cc_libs + deps + [":" + protolib_name + "_cc_genproto"], includes = includes, -@@ -402,32 +396,18 @@ def py_proto_library( + alwayslink = 1, + **kwargs +@@ -276,8 +264,9 @@ def cc_proto_library( + name = header_only_name, + deps = [ + "@com_google_protobuf//:protobuf_headers", ++ ":" + protolib_name + "_cc_genproto", + ] + header_only_deps + if_tsl_link_protobuf([impl_name]), +- hdrs = gen_hdrs, ++ hdrs = [], + **kwargs + ) + +@@ -348,6 +337,102 @@ def cc_grpc_library( + **kwargs + ) + ++# Custom implementation for py_proto_library to support Protobuf 4.x ++# which removed the built-in py_proto_library rule. ++def _tsl_py_proto_library_impl(ctx): ++ """Generate Python code from proto_library deps.""" ++ proto_deps = ctx.attr.deps ++ all_sources = [] ++ py_infos = [] ++ ++ for dep in proto_deps: ++ if ProtoInfo in dep: ++ all_sources.extend(dep[ProtoInfo].direct_sources) ++ elif PyInfo in dep: ++ py_infos.append(dep[PyInfo]) ++ ++ # Filter workspace sources only ++ workspace_sources = [] ++ for src in all_sources: ++ if not src.short_path.startswith("external/") and not src.short_path.startswith("../"): ++ workspace_sources.append(src) ++ ++ # Generate Python files ++ py_outputs = [] ++ for proto_src in workspace_sources: ++ basename = proto_src.basename[:-6] # Remove .proto ++ py_file = ctx.actions.declare_file(basename + "_pb2.py") ++ py_outputs.append(py_file) ++ ++ if py_outputs: ++ proto_path_args = ["--proto_path=."] ++ proto_paths = {".": True} ++ ++ for dep in proto_deps: ++ if ProtoInfo in dep: ++ for src in dep[ProtoInfo].transitive_sources.to_list(): ++ if src.path.startswith("external/com_google_protobuf/"): ++ proto_path = "external/com_google_protobuf/src" ++ if proto_path not in proto_paths: ++ proto_paths[proto_path] = True ++ proto_path_args.append("--proto_path=" + proto_path) ++ elif src.path.startswith("external/"): ++ parts = src.path.split("/") ++ if len(parts) >= 2: ++ proto_path = "/".join(parts[:2]) ++ if proto_path not in proto_paths: ++ proto_paths[proto_path] = True ++ proto_path_args.append("--proto_path=" + proto_path) ++ if src.root.path and src.root.path not in proto_paths: ++ proto_paths[src.root.path] = True ++ proto_path_args.append("--proto_path=" + src.root.path) ++ ++ proto_file_args = [src.short_path for src in workspace_sources] ++ output_root = ctx.bin_dir.path ++ ++ ctx.actions.run( ++ inputs = depset(direct = workspace_sources, transitive = [ ++ dep[ProtoInfo].transitive_sources for dep in proto_deps if ProtoInfo in dep ++ ]), ++ outputs = py_outputs, ++ executable = ctx.executable._protoc, ++ arguments = ["--python_out=" + output_root] + proto_path_args + proto_file_args, ++ mnemonic = "ProtocPython", ++ ) ++ ++ all_transitive_sources = [depset(py_outputs)] ++ all_imports = [depset([ctx.bin_dir.path])] if py_outputs else [] ++ ++ for py_info in py_infos: ++ all_transitive_sources.append(py_info.transitive_sources) ++ if hasattr(py_info, 'imports'): ++ all_imports.append(py_info.imports) ++ ++ return [ ++ DefaultInfo(files = depset(py_outputs)), ++ PyInfo( ++ transitive_sources = depset(transitive = all_transitive_sources), ++ imports = depset(transitive = all_imports), ++ has_py2_only_sources = False, ++ has_py3_only_sources = True, ++ ), ++ ] ++ ++_tsl_py_proto_library_rule = rule( ++ implementation = _tsl_py_proto_library_impl, ++ attrs = { ++ "deps": attr.label_list( ++ providers = [[ProtoInfo], [PyInfo]], ++ ), ++ "_protoc": attr.label( ++ default = "@com_google_protobuf//:protoc", ++ executable = True, ++ cfg = "exec", ++ ), ++ }, ++ provides = [PyInfo], ++) ++ + # Re-defined protocol buffer rule to bring in the change introduced in commit + # https://github.com/google/protobuf/commit/294b5758c373cbab4b72f35f4cb62dc1d8332b68 + # which was not part of a stable protobuf release in 04/2018. +@@ -402,32 +487,19 @@ def py_proto_library( genproto_deps = [] for dep in deps: if dep != "@com_google_protobuf//:protobuf_python": @@ -96,17 +453,18 @@ index 35cdcdc50..df3badf2d 100644 - py_libs = py_libs + [default_runtime] - - native.py_library( -+ native.py_proto_library( ++ # Use custom rule instead of removed native.py_proto_library ++ _tsl_py_proto_library_rule( name = name, - srcs = outs + py_extra_srcs, - deps = py_libs + deps, - imports = includes, - **kwargs -+ deps = [":" + name + "_genproto"], ++ deps = [":" + name + "_genproto"] + deps, ) def tf_proto_library_cc( -@@ -467,11 +447,10 @@ def tf_proto_library_cc( +@@ -467,11 +539,10 @@ def tf_proto_library_cc( if not srcs: # This is a collection of sub-libraries. Build header-only and impl # libraries containing all the sources. @@ -120,7 +478,7 @@ index 35cdcdc50..df3badf2d 100644 ) native.alias( -@@ -538,11 +517,10 @@ def tf_proto_library_py( +@@ -538,11 +609,10 @@ def tf_proto_library_py( if not srcs: # This is a collection of sub-libraries. Build header-only and impl # libraries containing all the sources. @@ -134,3 +492,32 @@ index 35cdcdc50..df3badf2d 100644 ) native.py_library( name = py_name, +diff --git a/third_party/xla/third_party/tsl/tsl/platform/prefetch.h b/third_party/xla/third_party/tsl/tsl/platform/prefetch.h +index d883529c6..05f3469a4 100644 +--- a/third_party/xla/third_party/tsl/tsl/platform/prefetch.h ++++ b/third_party/xla/third_party/tsl/tsl/platform/prefetch.h +@@ -16,7 +16,7 @@ limitations under the License. + #ifndef TENSORFLOW_TSL_PLATFORM_PREFETCH_H_ + #define TENSORFLOW_TSL_PLATFORM_PREFETCH_H_ + +-#include "absl/base/prefetch.h" ++#include "absl_base_prefetch.h" + + namespace tsl { + namespace port { +diff --git a/third_party/xla/third_party/tsl/tsl/platform/status.h b/third_party/xla/third_party/tsl/tsl/platform/status.h +index 84954ff48..724ad934b 100644 +--- a/third_party/xla/third_party/tsl/tsl/platform/status.h ++++ b/third_party/xla/third_party/tsl/tsl/platform/status.h +@@ -126,7 +126,10 @@ const char* NullTerminatedMessage(const absl::Status& status); + #else + ABSL_DEPRECATE_AND_INLINE() + inline const char* NullTerminatedMessage(const absl::Status& status) { +- return absl::StatusMessageAsCStr(status); ++ // absl::StatusMessageAsCStr doesn't exist in older Abseil versions ++ // Inline implementation from newer Abseil ++ auto sv_message = status.message(); ++ return sv_message.empty() ? "" : sv_message.data(); + } + #endif + From c7deba0b4dafd85c8e975ceb769f14dff8777ab8 Mon Sep 17 00:00:00 2001 From: Gagandeep Singh Date: Fri, 23 Jan 2026 19:04:21 +0530 Subject: [PATCH 6/9] chore: upgrade Protobuf dependency from 4.23.4 to 4.25.6 --- WORKSPACE | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/WORKSPACE b/WORKSPACE index 39fe82a..639f9a2 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -64,14 +64,14 @@ local_python_configure(name = "local_execution_config_python") load("//struct2tensor:workspace.bzl", "struct2tensor_workspace") struct2tensor_workspace() -# ===== Protobuf 4.23.4 dependency ===== +# ===== Protobuf 4.25.6 dependency ===== # Must be declared BEFORE TensorFlow's workspaces to override the version they pull http_archive( name = "com_google_protobuf", - sha256 = "5f3cd52d6e5062071d99da57a96ea87e39bc020d6d25748001d919c474a4d8ed", - strip_prefix = "protobuf-4.23.4", + sha256 = "4e6727bc5d23177edefa3ad86fd2f5a92cd324151636212fd1f7f13aef3fd2b7", + strip_prefix = "protobuf-4.25.6", urls = [ - "https://github.com/protocolbuffers/protobuf/archive/v4.23.4.tar.gz", + "https://github.com/protocolbuffers/protobuf/archive/v4.25.6.tar.gz", ], ) From ec8379537f86d2843b9bd5489d4ba80d3266c64d Mon Sep 17 00:00:00 2001 From: Gagandeep Singh Date: Fri, 23 Jan 2026 19:31:23 +0530 Subject: [PATCH 7/9] Move TFMD patch to tfmd.patch; Protobuf 4.x compatibility Remove vendored google/protobuf/any.proto and obsolete third_party/README.md --- google/protobuf/any.proto | 47 ------------------- struct2tensor/workspace.bzl | 2 +- third_party/BUILD | 2 +- third_party/README.md | 28 ----------- ...md_protobuf_downgrade.patch => tfmd.patch} | 0 5 files changed, 2 insertions(+), 77 deletions(-) delete mode 100644 google/protobuf/any.proto delete mode 100644 third_party/README.md rename third_party/{tfmd_protobuf_downgrade.patch => tfmd.patch} (100%) diff --git a/google/protobuf/any.proto b/google/protobuf/any.proto deleted file mode 100644 index 101b575..0000000 --- a/google/protobuf/any.proto +++ /dev/null @@ -1,47 +0,0 @@ -// Protocol Buffers - Google's data interchange format -// Minimal local copy to satisfy imports for struct2tensor tests. -// Source reference: https://github.com/protocolbuffers/protobuf/blob/v3.21.9/src/google/protobuf/any.proto - -syntax = "proto3"; - -package google.protobuf; - -option cc_enable_arenas = true; -option java_package = "com.google.protobuf"; -option java_outer_classname = "AnyProto"; -option java_multiple_files = true; -option objc_class_prefix = "GPB"; -option csharp_namespace = "Google.Protobuf.WellKnownTypes"; - -// "Any" contains an arbitrary serialized protocol buffer message along with a -// URL that describes the type of the serialized message. -// -// The protocol buffer library provides support to pack/unpack Any values in the -// form of utility APIs. Note that any API that accepts or returns an Any value -// will perform type checking and packing/unpacking on the fly. Therefore, if you -// need to convert between a known proto type and Any, use the library helpers -// rather than manipulating this message directly. -// -// Example usage: -// Foo foo = ...; -// Any any = Any.pack(foo); -// ... -// if (any.is()) { -// Foo foo2 = any.unpack(); -// } -// -// JSON -// ==== -// The JSON representation of an Any value uses the following format: -// -// { "@type": , "value": } -// -// where the value field depends on the type_url. -message Any { - // A URL/resource name uniquely identifying the type of the serialized - // protocol buffer message. - string type_url = 1; - - // Must be a valid serialized protocol buffer of the above specified type. - bytes value = 2; -} diff --git a/struct2tensor/workspace.bzl b/struct2tensor/workspace.bzl index 8cce206..b477929 100644 --- a/struct2tensor/workspace.bzl +++ b/struct2tensor/workspace.bzl @@ -72,6 +72,6 @@ def struct2tensor_workspace(): urls = [ "https://github.com/tensorflow/metadata/archive/%s.tar.gz" % _TFMD_COMMIT_HASH, ], - patches = ["//third_party:tfmd_protobuf_downgrade.patch"], + patches = ["//third_party:tfmd.patch"], patch_args = ["-p1"], ) diff --git a/third_party/BUILD b/third_party/BUILD index 331ebed..aaf12f6 100644 --- a/third_party/BUILD +++ b/third_party/BUILD @@ -17,5 +17,5 @@ licenses(["notice"]) exports_files([ "python_configure.bzl", - "tfmd_protobuf_downgrade.patch", + "tfmd.patch", ]) diff --git a/third_party/README.md b/third_party/README.md deleted file mode 100644 index 9d6afd4..0000000 --- a/third_party/README.md +++ /dev/null @@ -1,28 +0,0 @@ -# Third Party Dependencies - -## TensorFlow Metadata Protobuf Downgrade Patch - -### Background - -TensorFlow Metadata v1.17.1 upgraded from Protobuf 3.21.9 to 4.25.6 in their Bazel build configuration. However, this creates compatibility issues with TensorFlow 2.17.1, which still uses an older Protobuf version range (`>=3.20.3,<5.0.0dev` with exclusions). - -### Solution - -To maintain compatibility with TensorFlow 2.17.1 while using TensorFlow Metadata v1.17.1, we apply a patch that reverts the Protobuf upgrade in TFMD back to version 3.21.9. - -The patch file `tfmd_protobuf_downgrade.patch` reverts the following changes from TFMD v1.17.1: - -1. **tensorflow_metadata/proto/v0/BUILD file**: - - Reverts proto library definitions to use Protobuf 3.x syntax - - Restores the old `cc_proto_library` and `py_proto_library` patterns - -### Usage - -The patch is automatically applied when building struct2tensor through the `patches` parameter in the `http_archive` rule for `com_github_tensorflow_metadata` in [workspace.bzl](../struct2tensor/workspace.bzl). - -No manual intervention is required. - -### References - -- [TensorFlow Metadata v1.17.0...v1.17.1 comparison](https://github.com/tensorflow/metadata/compare/v1.17.0...v1.17.1) -- [TensorFlow 2.17.1 dependencies](https://github.com/tensorflow/tensorflow/blob/v2.17.1/tensorflow/tools/pip_package/setup.py) diff --git a/third_party/tfmd_protobuf_downgrade.patch b/third_party/tfmd.patch similarity index 100% rename from third_party/tfmd_protobuf_downgrade.patch rename to third_party/tfmd.patch From ae94ddb6ede0578610dd3517306ee42a4e63ef49 Mon Sep 17 00:00:00 2001 From: Gagandeep Singh Date: Mon, 26 Jan 2026 09:53:34 +0530 Subject: [PATCH 8/9] fix: Use native prefix for Bazel rules in macros Prefix cc_binary and cc_library with native. in struct2tensor.bzl --- struct2tensor/struct2tensor.bzl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/struct2tensor/struct2tensor.bzl b/struct2tensor/struct2tensor.bzl index fe246c1..4e92d91 100644 --- a/struct2tensor/struct2tensor.bzl +++ b/struct2tensor/struct2tensor.bzl @@ -287,7 +287,7 @@ DYNAMIC_DEPS = ["@local_config_tf//:libtensorflow_framework", "@local_config_tf/ def s2t_dynamic_binary(name, deps): """Creates a .so file intended for linking with tensorflow_framework.so.""" - cc_binary( + native.cc_binary( name = name, copts = DYNAMIC_COPTS, linkshared = 1, @@ -300,7 +300,7 @@ def s2t_dynamic_library( deps = None): """Creates a static library intended for linking with tensorflow_framework.so.""" true_deps = [] if deps == None else deps - cc_library( + native.cc_library( name = name, srcs = srcs, alwayslink = 1, From cb0e57a6f79ec65a2b7bffa5595f83b67fc9dc54 Mon Sep 17 00:00:00 2001 From: Venkata Sai Madhur Karampudi Date: Thu, 29 Jan 2026 19:44:25 +0000 Subject: [PATCH 9/9] Update Dockerfile --- struct2tensor/tools/tf_serving_docker/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/struct2tensor/tools/tf_serving_docker/Dockerfile b/struct2tensor/tools/tf_serving_docker/Dockerfile index 61ffaae..04f1ec0 100644 --- a/struct2tensor/tools/tf_serving_docker/Dockerfile +++ b/struct2tensor/tools/tf_serving_docker/Dockerfile @@ -16,7 +16,7 @@ # that has struct2tensor ops linked in. FROM ubuntu:20.04 as base_build -ARG TF_SERVING_VERSION_GIT_BRANCH=master +ARG TF_SERVING_VERSION_GIT_BRANCH=2.17.0 ARG TF_SERVING_VERSION_GIT_COMMIT=head ARG S2T_VERSION_GIT_BRANCH=master ARG S2T_VERSION_GIT_COMMIT=head