Skip to content

Commit dd3a0f9

Browse files
Major tf-keras update:
1) Update Bazel version to 7.4.1 2) Introduce hermetic C++ and CUDA toolchains 3) Update protobuf to 5.28.3 4) Introduce the new wheel building process. The instructions on how to build and test the wheels using Bazel commands are provided in `wheel/README.md`. Note that there are the changes in the wheel content: 1) The majority of the files in `tf_keras/layers/preprocessing/benchmarks` are not added in `tf_keras/src` because they are actually unit tests. 2) Same about `tf_keras/src/tests/keras_doctest.py`. 3) `tf_keras/src/saving/legacy/saved_model/create_test_saved_model.py` is absent because `create_test_saved_model` target has the tag `"no_oss"`. PiperOrigin-RevId: 800518252
1 parent 438b870 commit dd3a0f9

File tree

66 files changed

+3818
-1174
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

66 files changed

+3818
-1174
lines changed

.bazelrc

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,9 @@
2828
# nohdfs: Disable hadoop hdfs support.
2929
# nonccl: Disable nccl support.
3030

31+
# No MODULE.bzl file.
32+
common --noenable_bzlmod
33+
3134
# Sets the default Apple platform to macOS.
3235
build --apple_platform_type=macos
3336

@@ -39,11 +42,15 @@ test --define open_source_build=true
3942
build --define=use_fast_cpp_protos=false
4043
test --define=use_fast_cpp_protos=false
4144

45+
build --incompatible_enable_cc_toolchain_resolution
46+
build --repo_env USE_HERMETIC_CC_TOOLCHAIN=1
47+
4248
# This config refers to building with CUDA available. It does not necessarily
4349
# mean that we build CUDA op kernels.
4450
build:using_cuda --define=using_cuda=true
4551
build:using_cuda --action_env TF_NEED_CUDA=1
4652
build:using_cuda --crosstool_top=@local_config_cuda//crosstool:toolchain
53+
build:using_cuda --@rules_ml_toolchain//common:enable_cuda
4754

4855
# Enable the mlir generated GPU kernels only for cuda builds.
4956
build --define=tensorflow_enable_mlir_generated_gpu_kernels=0
@@ -53,6 +60,9 @@ build:using_cuda --define=tensorflow_enable_mlir_generated_gpu_kernels=1
5360
# This config refers to building CUDA op kernels with nvcc.
5461
build:cuda --config=using_cuda
5562
build:cuda --define=using_cuda_nvcc=true
63+
# Build CUDA with NVCC and other C++ targets with Clang
64+
build:cuda --action_env=TF_NVCC_CLANG="1"
65+
build:cuda --@local_config_cuda//:cuda_compiler=nvcc
5666

5767
# dbg config, as a shorthand for '--config=opt -c dbg'
5868
build:dbg --config=opt -c dbg

.bazelversion

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
5.4.0
1+
7.4.1

BUILD

Lines changed: 55 additions & 117 deletions
Original file line numberDiff line numberDiff line change
@@ -1,118 +1,56 @@
1-
py_library(
2-
name = "expect_absl_installed",
3-
# This is a dummy rule used as a absl dependency in open-source.
4-
# We expect absl to already be installed on the system, e.g. via
5-
# `pip install absl`
6-
visibility = ["//visibility:public"],
7-
deps = [],
8-
)
9-
10-
py_library(
11-
name = "expect_h5py_installed",
12-
# This is a dummy rule used as a h5 dependency in open-source.
13-
# We expect h5py to already be installed on the system, e.g. via
14-
# `pip install h5py'
15-
visibility = ["//visibility:public"],
16-
deps = [],
17-
)
18-
19-
py_library(
20-
name = "expect_numpy_installed",
21-
# This is a dummy rule used as a numpy dependency in open-source.
22-
# We expect numpy to already be installed on the system, e.g. via
23-
# `pip install numpy`
24-
visibility = ["//visibility:public"],
25-
deps = [],
26-
)
27-
28-
py_library(
29-
name = "expect_pandas_installed",
30-
# This is a dummy rule used as a pandas dependency in open-source.
31-
# We expect pandas to already be installed on the system, e.g. via
32-
# `pip install pandas'
33-
visibility = ["//visibility:public"],
34-
deps = [],
35-
)
36-
37-
py_library(
38-
name = "expect_pillow_installed",
39-
# This is a dummy rule used as a pillow dependency in open-source.
40-
# We expect pillow to already be installed on the system, e.g. via
41-
# `pip install Pillow'
42-
visibility = ["//visibility:public"],
43-
deps = [],
44-
)
45-
46-
# Note that this dependency is for testing only.
47-
py_library(
48-
name = "expect_portpicker_installed",
49-
# This is a dummy rule used as a pandas dependency in open-source.
50-
# We expect portpicker to already be installed on the system, e.g. via
51-
# `pip install portpicker'
52-
visibility = ["//visibility:public"],
53-
deps = [],
54-
)
55-
56-
py_library(
57-
name = "expect_pydot_installed",
58-
# This is a dummy rule used as a pydot dependency in open-source.
59-
# We expect pydot to already be installed on the system, e.g. via
60-
# `pip install pydot'
61-
visibility = ["//visibility:public"],
62-
deps = [],
63-
)
64-
65-
py_library(
66-
name = "expect_scipy_installed",
67-
# This is a dummy rule used as a scipy dependency in open-source.
68-
# We expect scipy to already be installed on the system, e.g. via
69-
# `pip install scipy'
70-
visibility = ["//visibility:public"],
71-
deps = [],
72-
)
73-
74-
py_library(
75-
name = "expect_six_installed",
76-
# This is a dummy rule used as a six dependency in open-source.
77-
# We expect six to already be installed on the system, e.g. via
78-
# `pip install six`
79-
visibility = ["//visibility:public"],
80-
deps = [],
81-
)
82-
83-
py_library(
84-
name = "expect_tensorboard_installed",
85-
# This is a dummy rule used as a tensorboard dependency in open-source.
86-
# We expect tensorboard to already be installed on the system, e.g. via
87-
# `pip install tensorflow`
88-
visibility = ["//visibility:public"],
89-
deps = [],
90-
)
91-
92-
py_library(
93-
name = "expect_tensorflow_installed",
94-
# This is a dummy rule used as a tensorflow dependency in open-source.
95-
# We expect tensorflow to already be installed on the system, e.g. via
96-
# `pip install tensorflow`
97-
visibility = ["//visibility:public"],
98-
deps = [],
99-
)
100-
101-
py_library(
102-
name = "expect_yaml_installed",
103-
# This is a dummy rule used as a yaml dependency in open-source.
104-
# We expect yaml to already be installed on the system, e.g. via
105-
# `pip install yaml`
106-
visibility = ["//visibility:public"],
107-
deps = [],
108-
)
109-
110-
# Note that this dependency is for testing only.
111-
py_library(
112-
name = "expect_tensorflow_io_installed",
113-
# This is a dummy rule used as a tensorflow_io dependency in open-source.
114-
# We expect tensorflow_io to already be installed on the system, e.g. via
115-
# `pip install tensorflow-io`
116-
visibility = ["//visibility:public"],
117-
deps = [],
1+
load("@local_config_cuda//cuda:build_defs.bzl", "if_cuda")
2+
load("@python//:defs.bzl", "compile_pip_requirements")
3+
load("@python_version_repo//:py_version.bzl", "REQUIREMENTS")
4+
load(
5+
"@xla//third_party/py:py_import.bzl",
6+
"py_import",
7+
)
8+
9+
exports_files(["oss_setup.py"])
10+
11+
compile_pip_requirements(
12+
name = "requirements",
13+
extra_args = [
14+
"--allow-unsafe",
15+
"--build-isolation",
16+
"--rebuild",
17+
],
18+
generate_hashes = True,
19+
requirements_in = "requirements.in",
20+
requirements_txt = REQUIREMENTS,
21+
)
22+
23+
py_import(
24+
name = "tf_nightly_with_deps",
25+
wheel = "@pypi//tf_nightly:whl",
26+
wheel_deps = if_cuda([
27+
"@pypi_nvidia_cublas_cu12//:pkg",
28+
"@pypi_nvidia_cuda_cupti_cu12//:pkg",
29+
"@pypi_nvidia_cuda_nvcc_cu12//:pkg",
30+
"@pypi_nvidia_cuda_nvrtc_cu12//:pkg",
31+
"@pypi_nvidia_cuda_runtime_cu12//:pkg",
32+
"@pypi_nvidia_cudnn_cu12//:pkg",
33+
"@pypi_nvidia_cufft_cu12//:pkg",
34+
"@pypi_nvidia_curand_cu12//:pkg",
35+
"@pypi_nvidia_cusolver_cu12//:pkg",
36+
"@pypi_nvidia_cusparse_cu12//:pkg",
37+
"@pypi_nvidia_nccl_cu12//:pkg",
38+
"@pypi_nvidia_nvjitlink_cu12//:pkg",
39+
]),
40+
deps = [
41+
"@pypi_absl_py//:pkg",
42+
"@pypi_astunparse//:pkg",
43+
"@pypi_flatbuffers//:pkg",
44+
"@pypi_gast//:pkg",
45+
"@pypi_ml_dtypes//:pkg",
46+
"@pypi_numpy//:pkg",
47+
"@pypi_opt_einsum//:pkg",
48+
"@pypi_packaging//:pkg",
49+
"@pypi_protobuf//:pkg",
50+
"@pypi_requests//:pkg",
51+
"@pypi_tensorboard//:pkg",
52+
"@pypi_termcolor//:pkg",
53+
"@pypi_typing_extensions//:pkg",
54+
"@pypi_wrapt//:pkg",
55+
],
11856
)

WORKSPACE

Lines changed: 124 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,126 @@ workspace(name = "org_keras")
22

33
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
44

5+
# Toolchains for ML projects hermetic builds.
6+
# Details: https://github.com/google-ml-infra/rules_ml_toolchain
7+
http_archive(
8+
name = "rules_ml_toolchain",
9+
sha256 = "59d7eb36a02cbe3c2e2fa67fda5e8f1ab7e274bc4773bbd207c51fe199e11c19",
10+
strip_prefix = "rules_ml_toolchain-ffd9e3d7b84e43c2686c803cb08ce790ffd58baa",
11+
urls = [
12+
"https://github.com/google-ml-infra/rules_ml_toolchain/archive/ffd9e3d7b84e43c2686c803cb08ce790ffd58baa.tar.gz",
13+
],
14+
)
15+
16+
load(
17+
"@rules_ml_toolchain//cc/deps:cc_toolchain_deps.bzl",
18+
"cc_toolchain_deps",
19+
)
20+
21+
cc_toolchain_deps()
22+
23+
register_toolchains("@rules_ml_toolchain//cc:linux_x86_64_linux_x86_64")
24+
25+
register_toolchains("@rules_ml_toolchain//cc:linux_x86_64_linux_x86_64_cuda")
26+
27+
http_archive(
28+
name = "xla",
29+
sha256 = "5b20a5fd981a23a5dea092f13f9279b5c2e8005b509ac141fb65f22a42ad35a9",
30+
strip_prefix = "xla-32b7537aee5a1e9145a6e11fc258347c41d6f5f8",
31+
urls = [
32+
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/openxla/xla/archive/32b7537aee5a1e9145a6e11fc258347c41d6f5f8.tar.gz",
33+
"https://github.com/openxla/xla/archive/32b7537aee5a1e9145a6e11fc258347c41d6f5f8.tar.gz",
34+
],
35+
)
36+
37+
load("@xla//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
38+
39+
# Initialize hermetic Python
40+
load("@xla//third_party/py:python_init_rules.bzl", "python_init_rules")
41+
42+
python_init_rules()
43+
44+
load("@xla//third_party/py:python_init_repositories.bzl", "python_init_repositories")
45+
46+
python_init_repositories(
47+
requirements = {
48+
"3.9": "//:requirements_lock_3_9.txt",
49+
},
50+
local_wheel_workspaces = ["//:WORKSPACE"],
51+
local_wheel_dist_folder = "dist",
52+
local_wheel_inclusion_list = [
53+
"tf_keras*",
54+
"tf_nightly*",
55+
"tf-keras*",
56+
"tf-nightly*",
57+
"tensorflow*",
58+
"keras*",
59+
],
60+
)
61+
62+
load("@xla//third_party/py:python_init_toolchains.bzl", "python_init_toolchains")
63+
64+
python_init_toolchains()
65+
66+
load("@xla//third_party/py:python_init_pip.bzl", "python_init_pip")
67+
68+
python_init_pip()
69+
70+
load("@pypi//:requirements.bzl", "install_deps")
71+
72+
install_deps()
73+
74+
load("//:tf_keras_python_wheel.bzl", "tf_keras_python_wheel_repository")
75+
76+
tf_keras_python_wheel_repository(
77+
name = "tf_keras_wheel",
78+
version_key = "__version__",
79+
version_source = "//tf_keras:__init__.py",
80+
)
81+
82+
load(
83+
"@xla//third_party/py:python_wheel.bzl",
84+
"python_wheel_version_suffix_repository",
85+
)
86+
87+
python_wheel_version_suffix_repository(
88+
name = "tf_keras_wheel_version_suffix",
89+
)
90+
91+
load(
92+
"@rules_ml_toolchain//gpu/cuda:cuda_json_init_repository.bzl",
93+
"cuda_json_init_repository",
94+
)
95+
96+
cuda_json_init_repository()
97+
98+
load(
99+
"@cuda_redist_json//:distributions.bzl",
100+
"CUDA_REDISTRIBUTIONS",
101+
"CUDNN_REDISTRIBUTIONS",
102+
)
103+
load(
104+
"@rules_ml_toolchain//gpu/cuda:cuda_redist_init_repositories.bzl",
105+
"cuda_redist_init_repositories",
106+
"cudnn_redist_init_repository",
107+
)
108+
109+
cuda_redist_init_repositories(
110+
cuda_redistributions = CUDA_REDISTRIBUTIONS,
111+
)
112+
113+
cudnn_redist_init_repository(
114+
cudnn_redistributions = CUDNN_REDISTRIBUTIONS,
115+
)
116+
117+
load(
118+
"@rules_ml_toolchain//gpu/cuda:cuda_configure.bzl",
119+
"cuda_configure",
120+
)
121+
122+
cuda_configure(name = "local_config_cuda")
123+
5124
# Needed by protobuf
6-
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
7125
http_archive(
8126
name = "bazel_skylib",
9127
urls = [
@@ -29,11 +147,12 @@ bind(
29147
actual = "@six_archive//:six",
30148
)
31149

32-
http_archive(
150+
tf_http_archive(
33151
name = "com_google_protobuf",
34-
sha256 = "f66073dee0bc159157b0bd7f502d7d1ee0bc76b3c1eac9836927511bdc4b3fc1",
35-
strip_prefix = "protobuf-3.21.9",
36-
urls = ["https://github.com/protocolbuffers/protobuf/archive/v3.21.9.zip"],
152+
patch_file = ["@xla//third_party/protobuf:protobuf.patch"],
153+
sha256 = "f645e6e42745ce922ca5388b1883ca583bafe4366cc74cf35c3c9299005136e2",
154+
strip_prefix = "protobuf-5.28.3",
155+
urls = tf_mirror_urls("https://github.com/protocolbuffers/protobuf/archive/refs/tags/v5.28.3.zip"),
37156
)
38157

39158
# ZLIB. Need by com_google_protobuf.

pip_build.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
"""Build the TF-Keras pip package.
22
3+
DEPRECATED: Use the Bazel commands in `wheel/README.md` instead.
4+
35
The steps are as follows:
46
57
0. Run bazel build in TF-Keras root directory to obtain protobuf Python files.

0 commit comments

Comments
 (0)