summaryrefslogtreecommitdiff
path: root/sci-libs
diff options
context:
space:
mode:
authorV3n3RiX <venerix@koprulu.sector>2024-02-03 23:44:18 +0000
committerV3n3RiX <venerix@koprulu.sector>2024-02-03 23:44:18 +0000
commit9becec60cd1423a327b86686a981699c0522cd79 (patch)
tree45888e8316556b1667e06ec2555a87601353747e /sci-libs
parentc44123f79f40edf5fe3d4b0d211d6aa68542abd2 (diff)
gentoo auto-resync : 03:02:2024 - 23:44:17
Diffstat (limited to 'sci-libs')
-rw-r--r--sci-libs/Manifest.gzbin45097 -> 45098 bytes
-rw-r--r--sci-libs/keras/Manifest8
-rw-r--r--sci-libs/keras/files/keras-2.14.0-0001-bazel-Use-system-protobuf.patch330
-rw-r--r--sci-libs/keras/keras-2.13.1.ebuild84
-rw-r--r--sci-libs/keras/keras-2.14.0.ebuild84
-rw-r--r--sci-libs/keras/keras-2.15.0.ebuild84
-rw-r--r--sci-libs/openblas/Manifest2
-rw-r--r--sci-libs/openblas/openblas-0.3.25-r1.ebuild4
-rw-r--r--sci-libs/tensorflow-estimator/Manifest9
-rw-r--r--sci-libs/tensorflow-estimator/files/0001-Revert-Exclude-extractor_wrapper-and-generator_wrapp.patch29
-rw-r--r--sci-libs/tensorflow-estimator/files/0002-Revert-Update-TF-Estimator-to-use-new-TF-API-Generat.patch703
-rw-r--r--sci-libs/tensorflow-estimator/tensorflow-estimator-2.13.0.ebuild75
-rw-r--r--sci-libs/tensorflow-estimator/tensorflow-estimator-2.14.0.ebuild75
-rw-r--r--sci-libs/tensorflow-estimator/tensorflow-estimator-2.15.0.ebuild80
-rw-r--r--sci-libs/tensorflow/Manifest95
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.13.0-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch38
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.13.0-0002-systemlib-Latest-absl-LTS-has-split-cord-libs.patch32
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.13.0-0003-mkl_dnn-Must-link-against-libm-for-round-and-log2.patch29
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.13.0-0004-tensorflow_cc-Add-systemlib-nsync-linkopts.patch35
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.13.0-0005-systemlib-Updates-for-Abseil-20220623-LTS.patch71
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.13.0-0006-systemlib-Update-targets-for-absl_py.patch27
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.13.0-0007-systemlibs-Add-well_known_types_py_pb2-target.patch28
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.13.0-0008-Relax-setup.py-version-requirements.patch55
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.13.0-0009-fix-sparse-transpose-op2.patch30
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.13.0-0010-systemlibs-update-targets-for-absl.patch352
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.13.0-0011-systemlibs-update-targets-for-google_cloud_cpp.patch25
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.13.0-0012-bump-cudnn-frontend-to-v0.9.patch29
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.13.0-0013-Fixing-build-issue-with-Clang-16-and-GCC-13.patch39
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.14.1-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch37
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.14.1-0002-systemlib-Latest-absl-LTS-has-split-cord-libs.patch32
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.14.1-0003-mkl_dnn-Must-link-against-libm-for-round-and-log2.patch29
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.14.1-0004-tensorflow_cc-Add-systemlib-nsync-linkopts.patch35
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.14.1-0005-systemlib-Updates-for-Abseil-20220623-LTS.patch71
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.14.1-0006-systemlib-Update-targets-for-absl_py.patch24
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.14.1-0007-systemlib-Add-well_known_types_py_pb2-target.patch28
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.14.1-0008-Relax-setup.py-version-requirements.patch38
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.14.1-0009-systemlib-update-targets-for-absl.patch365
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.14.1-0010-systemlib-fix-missing-osx-in-pybind11.patch25
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.14.1-0011-systemlib-fix-missing-LICENSE-in-flatbuffers.patch25
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.14.1-0012-build-use-non-hermetic-python.patch2745
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.14.1-0013-installation-remove-cp_local_config_python.patch68
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.14.1-0014-Fixing-build-issue-with-Clang-16.patch25
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.15.0-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch37
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.15.0-0002-systemlib-Latest-absl-LTS-has-split-cord-libs.patch32
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.15.0-0003-mkl_dnn-Must-link-against-libm-for-round-and-log2.patch29
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.15.0-0004-tensorflow_cc-Add-systemlib-nsync-linkopts.patch35
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.15.0-0005-systemlib-Updates-for-Abseil-20220623-LTS.patch71
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.15.0-0006-systemlib-Update-targets-for-absl_py.patch24
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.15.0-0007-systemlib-Add-well_known_types_py_pb2-target.patch28
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.15.0-0008-Relax-setup.py-version-requirements.patch86
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.15.0-0009-systemlib-update-targets-for-absl.patch365
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.15.0-0010-systemlib-fix-missing-osx-in-pybind11.patch25
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.15.0-0011-systemlib-fix-missing-LICENSE-in-flatbuffers.patch25
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.15.0-0012-installation-remove-cp_local_config_python.patch68
-rw-r--r--sci-libs/tensorflow/files/tensorflow-2.15.0-0013-build-use-non-hermetic-python.patch990
-rw-r--r--sci-libs/tensorflow/tensorflow-2.11.0.ebuild4
-rw-r--r--sci-libs/tensorflow/tensorflow-2.12.0.ebuild5
-rw-r--r--sci-libs/tensorflow/tensorflow-2.13.1.ebuild454
-rw-r--r--sci-libs/tensorflow/tensorflow-2.14.1.ebuild447
-rw-r--r--sci-libs/tensorflow/tensorflow-2.15.0.ebuild464
60 files changed, 9180 insertions, 8 deletions
diff --git a/sci-libs/Manifest.gz b/sci-libs/Manifest.gz
index bde293036bd8..fda2a41407b6 100644
--- a/sci-libs/Manifest.gz
+++ b/sci-libs/Manifest.gz
Binary files differ
diff --git a/sci-libs/keras/Manifest b/sci-libs/keras/Manifest
index a7b9a95eb2ed..9da126ac8b6a 100644
--- a/sci-libs/keras/Manifest
+++ b/sci-libs/keras/Manifest
@@ -1,12 +1,20 @@
AUX keras-2.10.0-0001-bazel-Use-system-protobuf.patch 11106 BLAKE2B 9c302e34a2f9a9ab99fa25bbe693485b646c9121f074a8416d6be4c245b70f324ea800380b2eb09e4305c1e9fda51ba1efb784003b388646242d6b44184df5e9 SHA512 7f808db304838cca4e299727d22662d88a490a39c721ecf80f96a6259bd7a0411f0f83f2564993a699196242489406eb839c8b686d05c716add3d8936fdafa37
AUX keras-2.11.0-0001-bazel-Use-system-protobuf.patch 11215 BLAKE2B cae8bf7de080168854dfc51e5c2f407a5f81f552971137c21af5bd4c0775d40e70342a0dc36c824d7bb3604e8d89fe9d53511adaf61f9e47264f2408a6212849 SHA512 b159b0a9e254209cd89d7f4ad774486582411719ea00dc15755fbd0f5bd618b6d38080e67a90d235469c49b2bc3df0ae05b4edce119802d64507fb16fb447a39
AUX keras-2.12.0-0001-bazel-Use-system-protobuf.patch 11241 BLAKE2B ec71c15c1da687b55d0552c0bdfb18e538a126d603756d276f2b2c094ca2dbcaab76062cdad8337e9b801a67d29e5bcc6da50ec347f0aff1e1f21bddb62963c8 SHA512 4683774ba60423ab35693d2d1821761e489008372bc1605cbfcd5e30f902e02deb8d0393cf8237d0ecfe6a66233ac0d8d380f1df68e9bb763a6e8c01a550df1e
+AUX keras-2.14.0-0001-bazel-Use-system-protobuf.patch 11290 BLAKE2B 5dd3bec2864dd58bf1514b26a6958da8985988ea3e59fc42c4644ecbf38cd836e25178eee98a99a91eb072e314c81b414ee610ead1fe0a54c59bde49f1edfa49 SHA512 5080aacb7be69a1fa4524c955765f8e359bd5bca826cf3d647c92559f1a5874b179f00a9acb0606f6856063d1b19c44898e2ff935e835b832f2901d4edb70a12
+DIST bazelbuild-rules_cc-0.0.2.tar.gz 127138 BLAKE2B 2a4cb12096bd8e6efebb13861b259ea12266372d37ef0d6c6005c4b9a6f919c26e1caa2ac78578aefd80a4f5bf08c1a14e15df5218f5d6b949beacba116b7b33 SHA512 688b5e6ea0e4cc8e5e11fc7aaac70f88c54d2d70221df0772eea1f0ff3ec16e2e92305f76da09c3c497f29e9132870ceaec7ecad034cca678ff4303899b67c14
DIST bazelbuild-rules_cc-b1c40e1de81913a3c40e5948f78719c28152486d.zip 201360 BLAKE2B 0173a8edb17f344b18e42026e2f8a62a1909d4694ab0be83562b6437a3f1ef38889f069269ddd2308b41062e5f375f791be0d2631a8e52ede668203779813b56 SHA512 2ba1dc316d88947c1d17ec67154c052700e3571b8d03370a9f204cf9fe94a1bd58f93a2c4a3c0b02233ee3250053a4c30a4133b5b38745858c1d04be8c9d973f
DIST bazelbuild-rules_java-7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip 9422 BLAKE2B bbc45c97551ee8126335b611ffca6574dac843d4db9b3221b10d26487f8eecfe38ba0cd67a4039a3ad921c25fea6294c43e4f4e7a57a060a30791acd6c4f9d39 SHA512 ba06242feb711e7fb6e821fd9ac029248b4f3e466fb1acf971d0db8196f3efe4d114ef137acbe4913073f8cbe8ccab4d47d8bafa21b867b2d6a6d57f5a647f0c
DIST keras-2.10.0.tar.gz 2250425 BLAKE2B eb05537739cdc81e279b5c400e2e2b77d46f4085e8530fef565bde12414ddccedb44c0b13265106a3353e67c367106e1d341412dc25b32e2221f144a31d76079 SHA512 8682110495a045c2ebc61f31a98d646b88fd9608d5169be78701cf528457ff0d2e39669d885854c14c1e6bebb380b14968957f2a8a34a8a023f8860efc7aeecd
DIST keras-2.11.0.tar.gz 2260857 BLAKE2B 9c85e0f64176848886509fbf269b85732f8ad98e7be3ec186e71870e453454b9b81516ef2ca4e67fb8906bc0ca1241d61ae94db6b58c7483527d33519f7c38fb SHA512 17c773e1f9c592676d56a8b3eed707d52240800b33e85dccccb80f206e8459c8c18485f0bcb6d5775ce1ef0625dc5ebe699574b8599dd98dbb73620da4301968
DIST keras-2.12.0.tar.gz 2319287 BLAKE2B 332b2b02eecac2da7c87c1f051c56d495027630e2e0254d543897e462f7f44a282bff6cf3cbe2602663cae2e2c76ab45c9b3b53cadbc3317b3e2dd366bdf7ace SHA512 10d034abc9b159a0945610dcfb2fc58d8143267629b8f59496b4e5f097b1220bdb02a269f135f05b8abed56c2d5237b3e0c2b74882cd3f321850fc7f47694dbb
+DIST keras-2.13.1.tar.gz 2373567 BLAKE2B 8b8b0c51fa9c0f53baa2306d0ee4361aaeaed272d887e0fb2f4289696a74e2cb45239fb378db70d954346b30124255a16002555002813656607721b3f393d0c2 SHA512 0f0c93f31d555e27e3f4268af7749b4c494a1f698bc763cebc4efe55e8011dbf637b978ca976ed61c46e8b11b8267b1a277d5048c3d3bcbe47005ee8960a1fc3
+DIST keras-2.14.0.tar.gz 2385374 BLAKE2B adca6634e06130d6f238d58e1d94ddd435b30da9b9cc8dd46f40d3ceec98cc6a82c37c3454c438f79b22da1aee223fcae99cf73fe1348383669c5176ee9ef225 SHA512 aeb89c8186410d0a5b36b3293d5e36111755716adde1b89fc33a3cc35f8dbdd297a8f8c7f54c9ca21835bc430ba0886a1eee13febe0dbcc510bfab602a6ddc81
+DIST keras-2.15.0.tar.gz 2389088 BLAKE2B 8cadb992f8e23568a1680d825b382c25d5ec5b82be758289c977a071cb5000429d40738c1462c292f53cda20fbfdc499c025b2fbb1ca9637eb2a8e5789c0a5bf SHA512 2ce001d8dadda2a3df24420a512822204795300215db4496003c1f1a317223f1a8aa6f8b2493a67fbaf9986c2a48e1fd1065828eac6a6f3953bad3db66ed7515
EBUILD keras-2.10.0.ebuild 2239 BLAKE2B 20854f7af4e476b7882e6316a392dbe38d4c584f9ea8a481a8140196371cc393ff3858d859ca1ee14ae9890cbb06c8c204d809652d8b6141c93bff1eb883b898 SHA512 1a98884ed476a27c18f1a105a6887cf5eb6fec46a90fc27e93061cdd85290453ea31a80e7e06239fa0a92b30777d05d171de02bc7f39efec59641e4fd2ce9c21
EBUILD keras-2.11.0.ebuild 2239 BLAKE2B 177a58b268c7f644dba1d8e7b695de60670ea5ad31764a3148230a964e6e5d1bf4da5e69aae7b2b472debd79f774ccf39e15862275230418e52171aee7ab1627 SHA512 fc17969d9a023c5b81da8d46331cf24a0109116ebdb18ebc866097bf599d2a37151be91c634ff5a7bcc3194a51381f39363e5caf198e4e1da7c293f430f5bac6
EBUILD keras-2.12.0.ebuild 2250 BLAKE2B 634a7a7e35109fd656601e6da20fbdd410852a07047ea739c4a739011c34eaab7e316dada88478cf11234a620103f5d790e64095eaa092169f5f6bf9e32d7b84 SHA512 9536dbe2ee0382c56150c7eb238ee1572b718a957ed9ef519f659efe9aa3f2548dab4dc00e61705b6def9f03f6b5b59dd83acad75996b167c45f987d80aea495
+EBUILD keras-2.13.1.ebuild 2236 BLAKE2B ee7aa99005e4bba1deaba2d64374c823fc3ba69ca049d762411e3a39b0d292749d65c6aade6015b9717a04e65bc95f28d08b938378eb3371bfd04f69580060e5 SHA512 55367a2ff35b084372c9011826f78175197aa0efaa5ad65736dd842f82db84556861856c8d5eb0656cc97e01abbb0d05e4b858b08ae9dfebe9265d99105c8660
+EBUILD keras-2.14.0.ebuild 2197 BLAKE2B 02c06d2b328e9436cb97c0aee08cabd99b87f2bd6189e8ebad5d3f9d2a9f7b31ef99586fa541287d7cc0f5ab4cc54e4e4066c2e7726f323f3780c3f77af85e56 SHA512 414bdd2298e565b04187444e28b382a1c19ff3b925e7b12c783ed2c04585b77847c400b0f644dd16bff940360c49b21ac2c2f0c2d47124fcd0515de17624e682
+EBUILD keras-2.15.0.ebuild 2197 BLAKE2B f2bdb35121c18f04ac53afa6442784729226586e85ef16e64721db6f12560fb69a79650af46b88784f5b838d836aa208c6badd31c900eed22bbb6212e16fe530 SHA512 9e7f18ad2cc643b59c5fddcd09f4f79bd66752a6e4f6826659d42ee57af2cdb19c4ce0ecff4562d8f206a43d070ab7eab07b2b994e134a7085283299a459fe14
MISC metadata.xml 500 BLAKE2B 503c82776ec2dd8ceb09ec37e41fb2555c34937d4bf8febb25aaf2faeefb6ae86d67d8e9e04ff47d0b81758fe4710a02ecb0939f727a7af0dbd25bcee50f3e2b SHA512 e2e0edf60e56427234b6a39e4a818735bd691d582eadca23d92debde8f3f759dea2b2a3acc399e1010ffd37d7bb4fa8d468076c0220b653a41668ead58ee5f66
diff --git a/sci-libs/keras/files/keras-2.14.0-0001-bazel-Use-system-protobuf.patch b/sci-libs/keras/files/keras-2.14.0-0001-bazel-Use-system-protobuf.patch
new file mode 100644
index 000000000000..526742490f90
--- /dev/null
+++ b/sci-libs/keras/files/keras-2.14.0-0001-bazel-Use-system-protobuf.patch
@@ -0,0 +1,330 @@
+From a064130a9cb6a2544c96552d43b3ff1b4bab0de4 Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Fri, 24 Nov 2023 00:35:16 +0800
+Subject: [PATCH] bazel: Use system protobuf
+
+---
+ WORKSPACE | 52 ----------
+ keras/protobuf/BUILD | 2 +-
+ third_party/BUILD | 8 +-
+ third_party/protobuf.bzl | 216 +++++++++++++++++++++++++++++++++++++++
+ 4 files changed, 224 insertions(+), 54 deletions(-)
+ create mode 100644 third_party/protobuf.bzl
+
+diff --git a/WORKSPACE b/WORKSPACE
+index c0ebc4e52..61282aead 100644
+--- a/WORKSPACE
++++ b/WORKSPACE
+@@ -1,53 +1 @@
+ workspace(name = "org_keras")
+-
+-load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
+-
+-# Needed by protobuf
+-load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
+-http_archive(
+- name = "bazel_skylib",
+- urls = [
+- "https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz",
+- "https://github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz",
+- ],
+- sha256 = "74d544d96f4a5bb630d465ca8bbcfe231e3594e5aae57e1edbf17a6eb3ca2506",
+-)
+-load("@bazel_skylib//:workspace.bzl", "bazel_skylib_workspace")
+-bazel_skylib_workspace()
+-
+-# Needed by protobuf
+-http_archive(
+- name = "six_archive",
+- build_file = "//third_party:six.BUILD",
+- sha256 = "1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926",
+- strip_prefix = "six-1.16.0",
+- urls = ["https://pypi.python.org/packages/source/s/six/six-1.16.0.tar.gz"],
+-)
+-
+-bind(
+- name = "six",
+- actual = "@six_archive//:six",
+-)
+-
+-http_archive(
+- name = "com_google_protobuf",
+- sha256 = "f66073dee0bc159157b0bd7f502d7d1ee0bc76b3c1eac9836927511bdc4b3fc1",
+- strip_prefix = "protobuf-3.21.9",
+- urls = ["https://github.com/protocolbuffers/protobuf/archive/v3.21.9.zip"],
+-)
+-
+-# ZLIB. Need by com_google_protobuf.
+-http_archive(
+- name = "zlib",
+- build_file = "@com_google_protobuf//:third_party/zlib.BUILD",
+- sha256 = "b3a24de97a8fdbc835b9833169501030b8977031bcb54b3b3ac13740f846ab30",
+- strip_prefix = "zlib-1.2.13",
+- urls = [
+- "https://storage.googleapis.com/mirror.tensorflow.org/zlib.net/zlib-1.2.13.tar.gz",
+- "https://zlib.net/zlib-1.2.13.tar.gz",
+- ],
+-)
+-
+-
+-load("@com_google_protobuf//:protobuf_deps.bzl", "protobuf_deps")
+-protobuf_deps()
+diff --git a/keras/protobuf/BUILD b/keras/protobuf/BUILD
+index e2f9c1f3b..d50579a7e 100644
+--- a/keras/protobuf/BUILD
++++ b/keras/protobuf/BUILD
+@@ -1,7 +1,7 @@
+ # Description:
+ # Contains Keras protobufs
+
+-load("@com_google_protobuf//:protobuf.bzl", "py_proto_library")
++load("//third_party:protobuf.bzl", "py_proto_library")
+
+ package(
+ # copybara:uncomment default_applicable_licenses = ["//keras:license"],
+diff --git a/third_party/BUILD b/third_party/BUILD
+index 9be54076c..9399232f6 100644
+--- a/third_party/BUILD
++++ b/third_party/BUILD
+@@ -1,4 +1,5 @@
+ package(
++ default_visibility = ["//visibility:public"],
+ licenses = ["notice"], # Apache 2.0
+ )
+
+@@ -7,4 +8,9 @@ exports_files([
+ "six.BUILD",
+ ])
+
+-package(default_visibility = ["//visibility:public"])
++genrule(
++ name = "protoc",
++ outs = ["protoc.bin"],
++ cmd = "ln -s $$(which protoc) $@",
++ executable = 1,
++)
+diff --git a/third_party/protobuf.bzl b/third_party/protobuf.bzl
+new file mode 100644
+index 000000000..3a7ef0a27
+--- /dev/null
++++ b/third_party/protobuf.bzl
+@@ -0,0 +1,216 @@
++def _GetPath(ctx, path):
++ if ctx.label.workspace_root:
++ return ctx.label.workspace_root + "/" + path
++ else:
++ return path
++
++def _IsNewExternal(ctx):
++ # Bazel 0.4.4 and older have genfiles paths that look like:
++ # bazel-out/local-fastbuild/genfiles/external/repo/foo
++ # After the exec root rearrangement, they look like:
++ # ../repo/bazel-out/local-fastbuild/genfiles/foo
++ return ctx.label.workspace_root.startswith("../")
++
++def _GenDir(ctx):
++ if _IsNewExternal(ctx):
++ # We are using the fact that Bazel 0.4.4+ provides repository-relative paths
++ # for ctx.genfiles_dir.
++ return ctx.genfiles_dir.path + (
++ "/" + ctx.attr.includes[0] if ctx.attr.includes and ctx.attr.includes[0] else ""
++ )
++
++ # This means that we're either in the old version OR the new version in the local repo.
++ # Either way, appending the source path to the genfiles dir works.
++ return ctx.var["GENDIR"] + "/" + _SourceDir(ctx)
++
++def _SourceDir(ctx):
++ if not ctx.attr.includes:
++ return ctx.label.workspace_root
++ if not ctx.attr.includes[0]:
++ return _GetPath(ctx, ctx.label.package)
++ if not ctx.label.package:
++ return _GetPath(ctx, ctx.attr.includes[0])
++ return _GetPath(ctx, ctx.label.package + "/" + ctx.attr.includes[0])
++
++def _PyOuts(srcs, use_grpc_plugin = False):
++ ret = [s[:-len(".proto")] + "_pb2.py" for s in srcs]
++ if use_grpc_plugin:
++ ret += [s[:-len(".proto")] + "_pb2_grpc.py" for s in srcs]
++ return ret
++
++def _proto_gen_impl(ctx):
++ """General implementation for generating protos"""
++ srcs = ctx.files.srcs
++ deps = []
++ deps += ctx.files.srcs
++ source_dir = _SourceDir(ctx)
++ gen_dir = _GenDir(ctx)
++ if source_dir:
++ import_flags = ["-I" + source_dir, "-I" + gen_dir]
++ else:
++ import_flags = ["-I."]
++
++ for dep in ctx.attr.deps:
++ import_flags += dep.proto.import_flags
++ deps += dep.proto.deps
++ import_flags = depset(import_flags).to_list()
++ deps = depset(deps).to_list()
++
++ args = []
++ if ctx.attr.gen_cc:
++ args += ["--cpp_out=" + gen_dir]
++ if ctx.attr.gen_py:
++ args += ["--python_out=" + gen_dir]
++
++ inputs = srcs + deps
++ tools = [ctx.executable.protoc]
++ if ctx.executable.plugin:
++ plugin = ctx.executable.plugin
++ lang = ctx.attr.plugin_language
++ if not lang and plugin.basename.startswith("protoc-gen-"):
++ lang = plugin.basename[len("protoc-gen-"):]
++ if not lang:
++ fail("cannot infer the target language of plugin", "plugin_language")
++
++ outdir = gen_dir
++ if ctx.attr.plugin_options:
++ outdir = ",".join(ctx.attr.plugin_options) + ":" + outdir
++ args += ["--plugin=protoc-gen-%s=%s" % (lang, plugin.path)]
++ args += ["--%s_out=%s" % (lang, outdir)]
++ tools.append(plugin)
++
++ if args:
++ ctx.actions.run(
++ inputs = inputs,
++ outputs = ctx.outputs.outs,
++ arguments = args + import_flags + [s.path for s in srcs],
++ executable = ctx.executable.protoc,
++ mnemonic = "ProtoCompile",
++ tools = tools,
++ use_default_shell_env = True,
++ )
++
++ return struct(
++ proto = struct(
++ srcs = srcs,
++ import_flags = import_flags,
++ deps = deps,
++ ),
++ )
++
++proto_gen = rule(
++ attrs = {
++ "srcs": attr.label_list(allow_files = True),
++ "deps": attr.label_list(providers = ["proto"]),
++ "includes": attr.string_list(),
++ "protoc": attr.label(
++ cfg = "host",
++ executable = True,
++ allow_single_file = True,
++ mandatory = True,
++ ),
++ "plugin": attr.label(
++ cfg = "host",
++ allow_files = True,
++ executable = True,
++ ),
++ "plugin_language": attr.string(),
++ "plugin_options": attr.string_list(),
++ "gen_cc": attr.bool(),
++ "gen_py": attr.bool(),
++ "outs": attr.output_list(),
++ },
++ output_to_genfiles = True,
++ implementation = _proto_gen_impl,
++)
++"""Generates codes from Protocol Buffers definitions.
++
++This rule helps you to implement Skylark macros specific to the target
++language. You should prefer more specific `cc_proto_library `,
++`py_proto_library` and others unless you are adding such wrapper macros.
++
++Args:
++ srcs: Protocol Buffers definition files (.proto) to run the protocol compiler
++ against.
++ deps: a list of dependency labels; must be other proto libraries.
++ includes: a list of include paths to .proto files.
++ protoc: the label of the protocol compiler to generate the sources.
++ plugin: the label of the protocol compiler plugin to be passed to the protocol
++ compiler.
++ plugin_language: the language of the generated sources
++ plugin_options: a list of options to be passed to the plugin
++ gen_cc: generates C++ sources in addition to the ones from the plugin.
++ gen_py: generates Python sources in addition to the ones from the plugin.
++ outs: a list of labels of the expected outputs from the protocol compiler.
++"""
++
++def py_proto_library(
++ name,
++ srcs = [],
++ deps = [],
++ py_libs = [],
++ py_extra_srcs = [],
++ include = None,
++ default_runtime = None,
++ protoc = "//third_party:protoc",
++ use_grpc_plugin = False,
++ **kwargs):
++ """Bazel rule to create a Python protobuf library from proto source files
++
++ NOTE: the rule is only an internal workaround to generate protos. The
++ interface may change and the rule may be removed when bazel has introduced
++ the native rule.
++
++ Args:
++ name: the name of the py_proto_library.
++ srcs: the .proto files of the py_proto_library.
++ deps: a list of dependency labels; must be py_proto_library.
++ py_libs: a list of other py_library targets depended by the generated
++ py_library.
++ py_extra_srcs: extra source files that will be added to the output
++ py_library. This attribute is used for internal bootstrapping.
++ include: a string indicating the include path of the .proto files.
++ default_runtime: the implicitly default runtime which will be depended on by
++ the generated py_library target.
++ protoc: the label of the protocol compiler to generate the sources.
++ use_grpc_plugin: a flag to indicate whether to call the Python C++ plugin
++ when processing the proto files.
++ **kwargs: other keyword arguments that are passed to py_library.
++
++ """
++ outs = _PyOuts(srcs, use_grpc_plugin)
++
++ includes = []
++ if include != None:
++ includes = [include]
++
++ grpc_python_plugin = None
++ if use_grpc_plugin:
++ grpc_python_plugin = "//external:grpc_python_plugin"
++ # Note: Generated grpc code depends on Python grpc module. This dependency
++ # is not explicitly listed in py_libs. Instead, host system is assumed to
++ # have grpc installed.
++
++ proto_gen(
++ name = name + "_genproto",
++ srcs = srcs,
++ deps = [s + "_genproto" for s in deps],
++ includes = includes,
++ protoc = protoc,
++ gen_py = 1,
++ outs = outs,
++ visibility = ["//visibility:public"],
++ plugin = grpc_python_plugin,
++ plugin_language = "grpc",
++ )
++
++ if default_runtime and not default_runtime in py_libs + deps:
++ py_libs = py_libs + [default_runtime]
++
++ native.py_library(
++ name = name,
++ srcs = outs + py_extra_srcs,
++ deps = py_libs + deps,
++ imports = includes,
++ **kwargs
++ )
+--
+2.41.0
+
diff --git a/sci-libs/keras/keras-2.13.1.ebuild b/sci-libs/keras/keras-2.13.1.ebuild
new file mode 100644
index 000000000000..f6a875c9e4ab
--- /dev/null
+++ b/sci-libs/keras/keras-2.13.1.ebuild
@@ -0,0 +1,84 @@
+# Copyright 1999-2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+EAPI=8
+
+PYTHON_COMPAT=( python3_{10..11} )
+inherit bazel distutils-r1
+
+DESCRIPTION="Deep Learning for humans"
+HOMEPAGE="https://keras.io/"
+
+LICENSE="Apache-2.0"
+SLOT="0"
+KEYWORDS="~amd64"
+
+bazel_external_uris="
+ https://github.com/bazelbuild/rules_cc/archive/b1c40e1de81913a3c40e5948f78719c28152486d.zip -> bazelbuild-rules_cc-b1c40e1de81913a3c40e5948f78719c28152486d.zip
+ https://github.com/bazelbuild/rules_java/archive/7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip -> bazelbuild-rules_java-7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip"
+
+SRC_URI="https://github.com/keras-team/${PN}/archive/v${PV}.tar.gz -> ${P}.tar.gz
+ ${bazel_external_uris}"
+
+RDEPEND="
+ >=dev-libs/protobuf-3.13.0:=
+ dev-python/absl-py[${PYTHON_USEDEP}]
+ dev-python/h5py[${PYTHON_USEDEP}]
+ dev-python/numpy[${PYTHON_USEDEP}]
+ dev-python/pandas[${PYTHON_USEDEP}]
+ dev-python/pillow[${PYTHON_USEDEP}]
+ >=dev-python/protobuf-python-3.13.0[${PYTHON_USEDEP}]
+ dev-python/six[${PYTHON_USEDEP}]
+ >=sci-libs/keras-applications-1.0.8[${PYTHON_USEDEP}]
+ >=sci-libs/keras-preprocessing-1.1.2[${PYTHON_USEDEP}]
+ >=sci-libs/tensorflow-2.13[python,${PYTHON_USEDEP}]"
+DEPEND="${RDEPEND}
+ dev-python/setuptools[${PYTHON_USEDEP}]"
+BDEPEND="
+ app-arch/unzip
+ >=dev-libs/protobuf-3.13.0
+ dev-java/java-config
+ >=dev-build/bazel-5.3.0"
+
+# Bazel tests not pytest, also want GPU access
+RESTRICT="test"
+DOCS=( CONTRIBUTING.md README.md )
+PATCHES=(
+ "${FILESDIR}/keras-2.12.0-0001-bazel-Use-system-protobuf.patch"
+)
+
+src_unpack() {
+ unpack "${P}.tar.gz"
+ bazel_load_distfiles "${bazel_external_uris}"
+}
+
+src_prepare() {
+ bazel_setup_bazelrc
+ default
+ python_copy_sources
+}
+
+python_compile() {
+ pushd "${BUILD_DIR}" >/dev/null || die
+
+ ebazel build //keras/tools/pip_package:build_pip_package
+ ebazel shutdown
+
+ local srcdir="${T}/src-${EPYTHON/./_}"
+ mkdir -p "${srcdir}" || die
+ bazel-bin/keras/tools/pip_package/build_pip_package --src "${srcdir}" || die
+
+ popd || die
+}
+
+src_compile() {
+ export JAVA_HOME=$(java-config --jre-home)
+ distutils-r1_src_compile
+}
+
+python_install() {
+ pushd "${T}/src-${EPYTHON/./_}" >/dev/null || die
+ esetup.py install
+ python_optimize
+ popd || die
+}
diff --git a/sci-libs/keras/keras-2.14.0.ebuild b/sci-libs/keras/keras-2.14.0.ebuild
new file mode 100644
index 000000000000..10bc1361ae4e
--- /dev/null
+++ b/sci-libs/keras/keras-2.14.0.ebuild
@@ -0,0 +1,84 @@
+# Copyright 1999-2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+EAPI=8
+
+PYTHON_COMPAT=( python3_{10..11} )
+inherit bazel distutils-r1
+
+DESCRIPTION="Deep Learning for humans"
+HOMEPAGE="https://keras.io/"
+
+LICENSE="Apache-2.0"
+SLOT="0"
+KEYWORDS="~amd64"
+
+bazel_external_uris="
+ https://github.com/bazelbuild/rules_cc/releases/download/0.0.2/rules_cc-0.0.2.tar.gz -> bazelbuild-rules_cc-0.0.2.tar.gz
+ https://github.com/bazelbuild/rules_java/archive/7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip -> bazelbuild-rules_java-7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip"
+
+SRC_URI="https://github.com/keras-team/${PN}/archive/v${PV}.tar.gz -> ${P}.tar.gz
+ ${bazel_external_uris}"
+
+RDEPEND="
+ >=dev-libs/protobuf-3.13.0:=
+ dev-python/absl-py[${PYTHON_USEDEP}]
+ dev-python/h5py[${PYTHON_USEDEP}]
+ dev-python/numpy[${PYTHON_USEDEP}]
+ dev-python/pandas[${PYTHON_USEDEP}]
+ dev-python/pillow[${PYTHON_USEDEP}]
+ >=dev-python/protobuf-python-3.13.0[${PYTHON_USEDEP}]
+ dev-python/six[${PYTHON_USEDEP}]
+ >=sci-libs/keras-applications-1.0.8[${PYTHON_USEDEP}]
+ >=sci-libs/keras-preprocessing-1.1.2[${PYTHON_USEDEP}]
+ >=sci-libs/tensorflow-2.14[python,${PYTHON_USEDEP}]"
+DEPEND="${RDEPEND}
+ dev-python/setuptools[${PYTHON_USEDEP}]"
+BDEPEND="
+ app-arch/unzip
+ >=dev-libs/protobuf-3.13.0
+ dev-java/java-config
+ >=dev-build/bazel-5.3.0"
+
+# Bazel tests not pytest, also want GPU access
+RESTRICT="test"
+DOCS=( CONTRIBUTING.md README.md )
+PATCHES=(
+ "${FILESDIR}/keras-2.14.0-0001-bazel-Use-system-protobuf.patch"
+)
+
+src_unpack() {
+ unpack "${P}.tar.gz"
+ bazel_load_distfiles "${bazel_external_uris}"
+}
+
+src_prepare() {
+ bazel_setup_bazelrc
+ default
+ python_copy_sources
+}
+
+python_compile() {
+ pushd "${BUILD_DIR}" >/dev/null || die
+
+ ebazel build //keras/tools/pip_package:build_pip_package
+ ebazel shutdown
+
+ local srcdir="${T}/src-${EPYTHON/./_}"
+ mkdir -p "${srcdir}" || die
+ bazel-bin/keras/tools/pip_package/build_pip_package --src "${srcdir}" || die
+
+ popd || die
+}
+
+src_compile() {
+ export JAVA_HOME=$(java-config --jre-home)
+ distutils-r1_src_compile
+}
+
+python_install() {
+ pushd "${T}/src-${EPYTHON/./_}" >/dev/null || die
+ esetup.py install
+ python_optimize
+ popd || die
+}
diff --git a/sci-libs/keras/keras-2.15.0.ebuild b/sci-libs/keras/keras-2.15.0.ebuild
new file mode 100644
index 000000000000..c67472647c1b
--- /dev/null
+++ b/sci-libs/keras/keras-2.15.0.ebuild
@@ -0,0 +1,84 @@
+# Copyright 1999-2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+EAPI=8
+
+PYTHON_COMPAT=( python3_{10..11} )
+inherit bazel distutils-r1
+
+DESCRIPTION="Deep Learning for humans"
+HOMEPAGE="https://keras.io/"
+
+LICENSE="Apache-2.0"
+SLOT="0"
+KEYWORDS="~amd64"
+
+bazel_external_uris="
+ https://github.com/bazelbuild/rules_cc/releases/download/0.0.2/rules_cc-0.0.2.tar.gz -> bazelbuild-rules_cc-0.0.2.tar.gz
+ https://github.com/bazelbuild/rules_java/archive/7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip -> bazelbuild-rules_java-7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip"
+
+SRC_URI="https://github.com/keras-team/${PN}/archive/v${PV}.tar.gz -> ${P}.tar.gz
+ ${bazel_external_uris}"
+
+RDEPEND="
+ >=dev-libs/protobuf-3.13.0:=
+ dev-python/absl-py[${PYTHON_USEDEP}]
+ dev-python/h5py[${PYTHON_USEDEP}]
+ dev-python/numpy[${PYTHON_USEDEP}]
+ dev-python/pandas[${PYTHON_USEDEP}]
+ dev-python/pillow[${PYTHON_USEDEP}]
+ >=dev-python/protobuf-python-3.13.0[${PYTHON_USEDEP}]
+ dev-python/six[${PYTHON_USEDEP}]
+ >=sci-libs/keras-applications-1.0.8[${PYTHON_USEDEP}]
+ >=sci-libs/keras-preprocessing-1.1.2[${PYTHON_USEDEP}]
+ >=sci-libs/tensorflow-2.15[python,${PYTHON_USEDEP}]"
+DEPEND="${RDEPEND}
+ dev-python/setuptools[${PYTHON_USEDEP}]"
+BDEPEND="
+ app-arch/unzip
+ >=dev-libs/protobuf-3.13.0
+ dev-java/java-config
+ >=dev-build/bazel-5.3.0"
+
+# Bazel tests not pytest, also want GPU access
+RESTRICT="test"
+DOCS=( CONTRIBUTING.md README.md )
+PATCHES=(
+ "${FILESDIR}/keras-2.14.0-0001-bazel-Use-system-protobuf.patch"
+)
+
+src_unpack() {
+ unpack "${P}.tar.gz"
+ bazel_load_distfiles "${bazel_external_uris}"
+}
+
+src_prepare() {
+ bazel_setup_bazelrc
+ default
+ python_copy_sources
+}
+
+python_compile() {
+ pushd "${BUILD_DIR}" >/dev/null || die
+
+ ebazel build //keras/tools/pip_package:build_pip_package
+ ebazel shutdown
+
+ local srcdir="${T}/src-${EPYTHON/./_}"
+ mkdir -p "${srcdir}" || die
+ bazel-bin/keras/tools/pip_package/build_pip_package --src "${srcdir}" || die
+
+ popd || die
+}
+
+src_compile() {
+ export JAVA_HOME=$(java-config --jre-home)
+ distutils-r1_src_compile
+}
+
+python_install() {
+ pushd "${T}/src-${EPYTHON/./_}" >/dev/null || die
+ esetup.py install
+ python_optimize
+ popd || die
+}
diff --git a/sci-libs/openblas/Manifest b/sci-libs/openblas/Manifest
index 4149171b7290..1cca24d01c68 100644
--- a/sci-libs/openblas/Manifest
+++ b/sci-libs/openblas/Manifest
@@ -9,6 +9,6 @@ DIST openblas-0.3.21.tar.gz 23729571 BLAKE2B 678bfb13ead69045e7d4d840bbcaed884d9
DIST openblas-0.3.23.tar.gz 23950794 BLAKE2B 8394e09c682bba10450248cbbe090bb81c26661eac7d238942055ae0eecce752c8a3481af905de9efe20fe635f2c4a0e06af83bb64c998ab410e4cdc56751a47 SHA512 ea64c693e57eb63cc2a88e4d6ab2d8cf9ab84ae6a15048fb12090a3570dd41053e62e99c1ff9d3e02dd67ca93233591ab41b8c017d06585d0a69222e1ad3023f
EBUILD openblas-0.3.21-r1.ebuild 5639 BLAKE2B 9f42f3af66c8d65d208cff75c03df80df9f9337c55a8c52557e9c6fecbc732cf10a47b36d341bafc0960550fbdc989f263d2aa07a04d7cc1911682ec3953c1b4 SHA512 4a663034ad381724dcaaaabe292d5f5c527c0d66c10f8b7f674ec6d66976d75f072299a90162e04379e6426ca0b4a3967b8ca253e0f125053911b37325f2ad9f
EBUILD openblas-0.3.23.ebuild 5763 BLAKE2B 219de2d1aee0946e4d075287c14ea5910b43f1ac3d012a1289facbd524cd96bfb9e2ab9df218c284e3c148c49e87f7fd4dd689c938f659a961375f57d9105139 SHA512 e14516dee85e9d9b91c33532d6218e67c277cd2a4c24f659a66ce04f55f3f73cddf0c875f622da3a79d69113794417942d05149d56488ef528eb35ed857ac91b
-EBUILD openblas-0.3.25-r1.ebuild 5999 BLAKE2B d41a6ec4c8b0e934fe2b7b855e9ea62403aa2aa39497532d2205bf35c27885c1f1b8c9f8ffa1aebdf2d82923682dd8a83e018edc0998a4e3f11fa41e71bf6929 SHA512 8d6c08dfc365f934511af59edab65132cba84ae8014728cee793b217de38cb07db2b048e527a6b645eccbbe7134c187f9cc9afe79c0023ab7d8db670d59ece4e
+EBUILD openblas-0.3.25-r1.ebuild 5998 BLAKE2B b34389f4c292ab5ac1506616c31d0be394e256206c2db8ded4574626a0298a5ee0d0d745c96fd9c68d063543b7be51e9a980f6a2f4a73c6c9c23f83f01770de5 SHA512 d0fb3979242ba04ccbf437a97e77ef4780d57b102c92c1596e6455e13581fc03c5bdc2b03a25a01385ef0cdbb9ef01cd3e4cd30215a8237fa440f6be8fd483e9
EBUILD openblas-0.3.26.ebuild 5999 BLAKE2B cc659bbc1ba9856927e05dd331848a0c58214d513ad49cf168a52498c0ec81fe305e8e3e681d70e2a4da577f16ccee1cdbd2e1eba0cb7757d4c1018190373f69 SHA512 efa071a6469fa258c200c2548bd6c3b0cd6842898c7ac40d1b0dff36ee8d6165b83a4abbafa4b887ae30d3aa7a002516c2f647f38bbd98c26ee6f0b1ae58db6a
MISC metadata.xml 1561 BLAKE2B 5ba7ec66dd8ba7ca8bc29bc0193b48b6a82b4949bb79bab461400040667aa66aac3cfd2ad9c7323798048fd43d18ac96a562d53b68f3f7ae8bf8505ac7a04691 SHA512 05272eb56480ff82b240d0f430df3df03981e24b971dbd37c13d464f4f4b334bac19b54915446a09326a334aa76249bb79a4fa389d26150365f21389992f9a48
diff --git a/sci-libs/openblas/openblas-0.3.25-r1.ebuild b/sci-libs/openblas/openblas-0.3.25-r1.ebuild
index c32ecf3ad4d7..da08897df4c6 100644
--- a/sci-libs/openblas/openblas-0.3.25-r1.ebuild
+++ b/sci-libs/openblas/openblas-0.3.25-r1.ebuild
@@ -1,4 +1,4 @@
-# Copyright 1999-2023 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
EAPI=8
@@ -13,7 +13,7 @@ S="${WORKDIR}"/${MY_P}
LICENSE="BSD"
SLOT="0"
-KEYWORDS="~amd64 ~arm ~arm64 ~loong ~riscv ~x86 ~amd64-linux ~x86-linux ~x64-macos"
+KEYWORDS="amd64 ~arm ~arm64 ~loong ~riscv ~x86 ~amd64-linux ~x86-linux ~x64-macos"
IUSE="dynamic eselect-ldso index-64bit openmp pthread relapack test"
REQUIRED_USE="?? ( openmp pthread )"
RESTRICT="!test? ( test )"
diff --git a/sci-libs/tensorflow-estimator/Manifest b/sci-libs/tensorflow-estimator/Manifest
index a1485c9e1d08..387d090afc85 100644
--- a/sci-libs/tensorflow-estimator/Manifest
+++ b/sci-libs/tensorflow-estimator/Manifest
@@ -1,8 +1,17 @@
+AUX 0001-Revert-Exclude-extractor_wrapper-and-generator_wrapp.patch 1449 BLAKE2B a7ead50569e8dd70c98e11abe7203520fa6c55195b75576c2efbe68c43435d429b60173f3c05b2e8885f7931a770c7de042da70a7df1b075f02def366c08af5b SHA512 ae8961d088954a2ad4db3996f67b578ddf11a6bda43ae596b22b156fea05a9bcda0d048aa4eee45fd8ea2e5739ca753eef7afe04020370fe6093e4cd95aa3d64
AUX 0001-Update-setup.py-for-2.11.0-final-release.patch 946 BLAKE2B ac6417e5bc0953ce69a148cfc1d56cbfe46fc2752287dc595457406def376f8cbe5379c0d2e2eb6092a15729c05c8acafa0435c6c82cea8771292953283d1eda SHA512 87cbf6c462c7209cd766c298f7ae869dc89e1b0de56da42ec4ce7e7da2d131683e15abce6be96482c8501116e3a21a4a34b1324d43c6a382956dce8ad705b579
+AUX 0002-Revert-Update-TF-Estimator-to-use-new-TF-API-Generat.patch 28494 BLAKE2B e1cb1cef771a81fd28375a8af02bfce9bf0109ff38666688a117cf6a095799c82e2b9a88dd2d42a811713778904165323a9b27950dbc0a29dca9597a4dcdb382 SHA512 63f74cb59a00f91e083bc0fc53c7646cf1f3943beaa7636a414c793a793a4bd156ce891941709706b69d91dff9532f2e72e1edd9457f69e6d2dad8380fab4772
+DIST bazelbuild-rules_cc-0.0.2.tar.gz 127138 BLAKE2B 2a4cb12096bd8e6efebb13861b259ea12266372d37ef0d6c6005c4b9a6f919c26e1caa2ac78578aefd80a4f5bf08c1a14e15df5218f5d6b949beacba116b7b33 SHA512 688b5e6ea0e4cc8e5e11fc7aaac70f88c54d2d70221df0772eea1f0ff3ec16e2e92305f76da09c3c497f29e9132870ceaec7ecad034cca678ff4303899b67c14
DIST bazelbuild-rules_cc-b1c40e1de81913a3c40e5948f78719c28152486d.zip 201360 BLAKE2B 0173a8edb17f344b18e42026e2f8a62a1909d4694ab0be83562b6437a3f1ef38889f069269ddd2308b41062e5f375f791be0d2631a8e52ede668203779813b56 SHA512 2ba1dc316d88947c1d17ec67154c052700e3571b8d03370a9f204cf9fe94a1bd58f93a2c4a3c0b02233ee3250053a4c30a4133b5b38745858c1d04be8c9d973f
DIST bazelbuild-rules_java-7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip 9422 BLAKE2B bbc45c97551ee8126335b611ffca6574dac843d4db9b3221b10d26487f8eecfe38ba0cd67a4039a3ad921c25fea6294c43e4f4e7a57a060a30791acd6c4f9d39 SHA512 ba06242feb711e7fb6e821fd9ac029248b4f3e466fb1acf971d0db8196f3efe4d114ef137acbe4913073f8cbe8ccab4d47d8bafa21b867b2d6a6d57f5a647f0c
DIST tensorflow-estimator-2.11.0-rc0.tar.gz 655514 BLAKE2B cc73f282a32f657bf507d948210aa981f0867c4898574c61fb5654204fa573ac7bdef16635d202ed3d202ccc9c8915f5613e4149ce6839d59281a38cfd6953f7 SHA512 c473a6db62a0a55f25f8fd5fd7ffb2f2a473261c5a7b3217bd1d8a21a02ab4871793ea57917c6eb9dc9012bd030faf8030702caf3f764983ce526d201866204f
DIST tensorflow-estimator-2.12.0.tar.gz 656763 BLAKE2B a67c81487b7765f3195f7567b66455a4be390bff8d6dec751d054a26b654b1aac96982f6fa55d0488a8e6414e7d4b58d1ae59680a1c694615f82718c77478872 SHA512 d86bfde67c97988beeb5abc8042c1847c6d0081d54e21e35424d463895e95a5d0adc74d7d2bb006a617748da3b9a8bbb8979d6a842f024ab3a4d08a2e806147b
+DIST tensorflow-estimator-2.13.0.tar.gz 656953 BLAKE2B 2aeae25089990914018cd713aba1289195595e67b85cc5d66b1a348b9d65c29854768118989aa0d5540eb5d70bb32ac1699584f0191460f5f23f6f8b3f9d7204 SHA512 c445a43d497c575fb6e2f7a26d5346b2db8712540f807e0bc64759db056468ada909371b29b828470c7763156570c25917157dbe0d61e9f148c3027c29ebd181
+DIST tensorflow-estimator-2.14.0.tar.gz 656933 BLAKE2B 4506b4149824f505b7f42c5c6a73daaa1d5b46201141e11e0775baa7524e0a7886cee5079bdb97af6a880d2f6dbfa276b9e484d06406f84f254b2541b2f80c5b SHA512 a2ac534acba806177400c18968a3d5384a30e4fd431eb3b9f4e1542220b1e382e9272be9909f14d6984d50591b155be5eef7051620a9133ffd199bd1e6fe4582
+DIST tensorflow-estimator-2.15.0.tar.gz 658541 BLAKE2B 519ccab7f77a3074009b3b9d24f18267abece981dc33a7716cd916b388a4306183221ec12ddb3e32ac41cbe30d936dc504d931a612b71d35093d27def5d391e8 SHA512 129fe36ee63932b324d8ee77f917d4ded99be2e645e0884bd920bebf703b63889ed72b76b14fcc3aa8f9cfd0b465428d67067fcfc43f2c761a101ea56d574d04
EBUILD tensorflow-estimator-2.11.0.ebuild 1998 BLAKE2B 4bb8eef78da79925c66c0be30a237a8e9a78a46a4113a750c886545e9bd501f3ac69a6b5cd23b7342ccad5cfc9caff75ff0633a36d247877d4883e760dfee4f5 SHA512 bd87ed367d860c73ec30e91e8ab4834a331670e83ef07e2e5fec5d1db020a55edcb1371139c730efae59ce40b19c6a77ff1de96cc510cb0eb4e77b6458984037
EBUILD tensorflow-estimator-2.12.0.ebuild 1865 BLAKE2B 9696193b35bc0164f5507f76867cfb0e1c565db6e10c6b5110ccc819a9543ff8bfbfbf816a58525c9cf01c51263115271caccdf5884b1dcdf4b296be8932f744 SHA512 d4294df14ea176a9fd727fb047c554fb11c5e57f9d174395333f7b92a8ed9a4ee2e161fe324bf2339eb997af2ac6b97cad712844dec848a3541e88b958035162
+EBUILD tensorflow-estimator-2.13.0.ebuild 1859 BLAKE2B 6fca75791dd7fbf12cd09eb528d3fe8f4901fe4ae0472e5cd4aa1e2f7995b2d4ae8f603be7ff8319a180798987df2ca072f7c9b55e21b96eff6d96f5611ae9d1 SHA512 5dca6d03c6a1f82b103519b4407e51162d422be1089749cc9013c3131b31bba23791452ff5d6c882568565c1502ab75fcd4b4c5c0eb46ca9edb2e98d1fe3db0a
+EBUILD tensorflow-estimator-2.14.0.ebuild 1820 BLAKE2B 115a47dc7ba8b347edf7b82c02759c1184417eacae2414dcec10810fcb0efc4bb84cb922538ab92ffd6d092a46ab2b278e016506f2c10e94e8930052a186109d SHA512 ab852d156cc5d1a947bd96bc98b1cf2f56530037943625f8da5decab535fee4ae1d6b034c15d5fee45c4c6e9c4a16c1a193774f6a2356c8e0294646c7936dcb3
+EBUILD tensorflow-estimator-2.15.0.ebuild 1991 BLAKE2B f94c45271ea2b13529d2daf2f81722a0cd69326c6005039f5eb080812982fe74523704863e191377780069f4b3c9d267337b9343d911d14ae05d2676b97961b7 SHA512 3ef8ea27ffb9bacf6a56ea0a2adf76c7ecd7474f4a89eac85fb0aa4ef06dee5a9f5b2ac579255f2c0ebb452fcc3fc84d81bfaea474d3593d2c3493782173a55a
MISC metadata.xml 581 BLAKE2B f3c4be0d25d3337871d9fc5f0d2e1e420fd68f45c2a7fbabaab0557f2f323661d4dd1ab847778ccc5a06bfe86a6fcf77bc4f65a04baa8ed569fd868ca8a03009 SHA512 41e7c5f2bce2166ed98259fa86fcf92cb3e2e186e7387a1cc75d630313791fc83e297288ce52ad25655292eb14b28a651dec3cb6b3101c75c327f447b15d6293
diff --git a/sci-libs/tensorflow-estimator/files/0001-Revert-Exclude-extractor_wrapper-and-generator_wrapp.patch b/sci-libs/tensorflow-estimator/files/0001-Revert-Exclude-extractor_wrapper-and-generator_wrapp.patch
new file mode 100644
index 000000000000..cc44a9592d96
--- /dev/null
+++ b/sci-libs/tensorflow-estimator/files/0001-Revert-Exclude-extractor_wrapper-and-generator_wrapp.patch
@@ -0,0 +1,29 @@
+From f6d965a9f81b3062ca2f9194bcfa52ee4eeec928 Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Sat, 23 Dec 2023 10:06:57 +0800
+Subject: [PATCH 1/2] Revert "Exclude `extractor_wrapper` and
+ `generator_wrapper` from the pip package, and remove the exclusion for the
+ deleted `create_python_api_wrapper`."
+
+This reverts commit bd50049c3905fd8c9ad058d2e09682b45b9b9b02.
+---
+ tensorflow_estimator/tools/pip_package/create_pip_helper.py | 3 +--
+ 1 file changed, 1 insertion(+), 2 deletions(-)
+
+diff --git a/tensorflow_estimator/tools/pip_package/create_pip_helper.py b/tensorflow_estimator/tools/pip_package/create_pip_helper.py
+index 4810594..e1b1f27 100644
+--- a/tensorflow_estimator/tools/pip_package/create_pip_helper.py
++++ b/tensorflow_estimator/tools/pip_package/create_pip_helper.py
+@@ -35,8 +35,7 @@ PIP_EXCLUDED_FILES = frozenset([
+ 'tensorflow_estimator/python/estimator/canned/v1/baseline_test_v1.py',
+ 'tensorflow_estimator/python/estimator/canned/v1/dnn_test_fc_v1_v1.py',
+ 'tensorflow_estimator/python/estimator/canned/v1/dnn_test_fc_v2_v1.py',
+- 'tensorflow_estimator/python/estimator/api/extractor_wrapper.py',
+- 'tensorflow_estimator/python/estimator/api/generator_wrapper.py',
++ 'tensorflow_estimator/python/estimator/api/create_python_api_wrapper.py',
+ 'tensorflow_estimator/tools/pip_package/setup.py',
+ 'tensorflow_estimator/tools/pip_package/create_pip_helper.py',
+ ])
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow-estimator/files/0002-Revert-Update-TF-Estimator-to-use-new-TF-API-Generat.patch b/sci-libs/tensorflow-estimator/files/0002-Revert-Update-TF-Estimator-to-use-new-TF-API-Generat.patch
new file mode 100644
index 000000000000..d7167adb8488
--- /dev/null
+++ b/sci-libs/tensorflow-estimator/files/0002-Revert-Update-TF-Estimator-to-use-new-TF-API-Generat.patch
@@ -0,0 +1,703 @@
+From 38076a1b48096ef7b4f1e5005fdf9e14425d56a8 Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Sat, 23 Dec 2023 10:07:02 +0800
+Subject: [PATCH 2/2] Revert "Update TF Estimator to use new TF API Generator"
+
+This reverts commit f7653f9adf978acb3bd459e6ec779a659f1f9a2a.
+---
+ tensorflow_estimator/BUILD | 14 +-
+ .../python/estimator/api/BUILD | 48 +-
+ .../python/estimator/api/api_gen.bzl | 422 ++++--------------
+ .../api/create_python_api_wrapper.py | 30 ++
+ .../python/estimator/api/extractor_wrapper.py | 21 -
+ .../python/estimator/api/generator_wrapper.py | 20 -
+ 6 files changed, 155 insertions(+), 400 deletions(-)
+ create mode 100644 tensorflow_estimator/python/estimator/api/create_python_api_wrapper.py
+ delete mode 100644 tensorflow_estimator/python/estimator/api/extractor_wrapper.py
+ delete mode 100644 tensorflow_estimator/python/estimator/api/generator_wrapper.py
+
+diff --git a/tensorflow_estimator/BUILD b/tensorflow_estimator/BUILD
+index 680cc4e..037c7a9 100644
+--- a/tensorflow_estimator/BUILD
++++ b/tensorflow_estimator/BUILD
+@@ -5,7 +5,7 @@ load(
+ "//tensorflow_estimator/python/estimator/api:api_gen.bzl",
+ "ESTIMATOR_API_INIT_FILES_V1",
+ "ESTIMATOR_API_INIT_FILES_V2",
+- "generate_apis",
++ "gen_api_init_files",
+ )
+
+ licenses(["notice"])
+@@ -67,17 +67,17 @@ py_library(
+ genrule(
+ name = "root_init_gen",
+ srcs = select({
+- "api_version_2": ["_api/v2/v2.py"],
+- "//conditions:default": ["_api/v1/v1.py"],
++ "api_version_2": [":estimator_python_api_gen_compat_v2"],
++ "//conditions:default": [":estimator_python_api_gen_compat_v1"],
+ }),
+ outs = ["__init__.py"],
+ cmd = select({
+- "api_version_2": "cp $(location :_api/v2/v2.py) $(OUTS)",
+- "//conditions:default": "cp $(location :_api/v1/v1.py) $(OUTS)",
++ "api_version_2": "cp $(@D)/_api/v2/v2.py $(OUTS)",
++ "//conditions:default": "cp $(@D)/_api/v1/v1.py $(OUTS)",
+ }),
+ )
+
+-generate_apis(
++gen_api_init_files(
+ name = "estimator_python_api_gen_compat_v1",
+ api_version = 1,
+ output_dir = "_api/v1/",
+@@ -86,7 +86,7 @@ generate_apis(
+ root_file_name = "v1.py",
+ )
+
+-generate_apis(
++gen_api_init_files(
+ name = "estimator_python_api_gen_compat_v2",
+ api_version = 2,
+ output_dir = "_api/v2/",
+diff --git a/tensorflow_estimator/python/estimator/api/BUILD b/tensorflow_estimator/python/estimator/api/BUILD
+index 01dce90..96ac567 100644
+--- a/tensorflow_estimator/python/estimator/api/BUILD
++++ b/tensorflow_estimator/python/estimator/api/BUILD
+@@ -1,10 +1,17 @@
+-# Placeholder: load aliased py_binary
+-load("//tensorflow_estimator/python/estimator/api:api_gen.bzl", "ESTIMATOR_API_INIT_FILES_V1", "ESTIMATOR_API_INIT_FILES_V2", "generate_apis")
+-
+ package(default_visibility = ["//tensorflow_estimator:internal"])
+
+ licenses(["notice"])
+
++load("//tensorflow_estimator/python/estimator/api:api_gen.bzl", "gen_api_init_files")
++load("//tensorflow_estimator/python/estimator/api:api_gen.bzl", "ESTIMATOR_API_INIT_FILES_V1")
++load("//tensorflow_estimator/python/estimator/api:api_gen.bzl", "ESTIMATOR_API_INIT_FILES_V2")
++
++exports_files(
++ [
++ "create_python_api_wrapper.py",
++ ],
++)
++
+ # This flag specifies whether Estimator 2.0 API should be built instead
+ # of 1.* API. Note that Estimator 2.0 API is currently under development.
+ config_setting(
+@@ -12,53 +19,36 @@ config_setting(
+ define_values = {"estimator_api_version": "2"},
+ )
+
+-py_binary(
+- name = "extractor_wrapper",
+- srcs = ["extractor_wrapper.py"],
+- visibility = ["//visibility:public"],
+- deps = [
+- "//tensorflow_estimator/python/estimator:expect_absl_installed", # absl:app
+- ],
+-)
+-
+-py_binary(
+- name = "generator_wrapper",
+- srcs = ["generator_wrapper.py"],
+- visibility = ["//visibility:public"],
+- deps = [
+- "//tensorflow_estimator/python/estimator:expect_absl_installed", # absl:app
+- ],
+-)
+-
+ genrule(
+ name = "estimator_python_api_gen",
+ srcs = select({
+- "api_version_2": ["_v2/v2.py"],
+- "//conditions:default": ["_v1/v1.py"],
++ "api_version_2": [":estimator_python_api_gen_compat_v2"],
++ "//conditions:default": [":estimator_python_api_gen_compat_v1"],
+ }),
+ outs = ["__init__.py"],
+ cmd = select({
+- "api_version_2": "cp $(location :_v2/v2.py) $(OUTS)",
+- "//conditions:default": "cp $(location :_v1/v1.py) $(OUTS)",
++ # Copy the right init file and replace 'from . import'
++ # with 'from ._vN import'.
++ "api_version_2": "cp $(@D)/_v2/v2.py $(OUTS) && sed -i'.original' 's/from . import/from ._v2 import/g' $(OUTS)",
++ "//conditions:default": "cp $(@D)/_v1/v1.py $(OUTS) && sed -i'.original' 's/from . import/from ._v1 import/g' $(OUTS)",
+ }),
++ visibility = ["//visibility:public"],
+ )
+
+-generate_apis(
++gen_api_init_files(
+ name = "estimator_python_api_gen_compat_v1",
+ api_version = 1,
+ output_dir = "_v1/",
+ output_files = ESTIMATOR_API_INIT_FILES_V1,
+ output_package = "tensorflow_estimator.python.estimator.api._v1",
+ root_file_name = "v1.py",
+- visibility = ["//visibility:public"],
+ )
+
+-generate_apis(
++gen_api_init_files(
+ name = "estimator_python_api_gen_compat_v2",
+ api_version = 2,
+ output_dir = "_v2/",
+ output_files = ESTIMATOR_API_INIT_FILES_V2,
+ output_package = "tensorflow_estimator.python.estimator.api._v2",
+ root_file_name = "v2.py",
+- visibility = ["//visibility:public"],
+ )
+diff --git a/tensorflow_estimator/python/estimator/api/api_gen.bzl b/tensorflow_estimator/python/estimator/api/api_gen.bzl
+index b8eaf84..87dd65b 100644
+--- a/tensorflow_estimator/python/estimator/api/api_gen.bzl
++++ b/tensorflow_estimator/python/estimator/api/api_gen.bzl
+@@ -1,7 +1,7 @@
+-"""Targets for generating TensorFlow Estimator Python API __init__.py files.
++"""Targets for generating TensorFlow Python API __init__.py files.
+
+ This bzl file is copied with slight modifications from
+-tensorflow/python/tools/api/generator2/generate_api.bzl
++tensorflow/python/estimator/api/api_gen.bzl
+ so that we can avoid needing to depend on TF source code in Bazel build.
+
+ It should be noted that because this file is executed during the build,
+@@ -10,15 +10,7 @@ is required to Bazel build Estimator.
+ """
+
+ load("//tensorflow_estimator:estimator.bzl", "if_indexing_source_code")
+-
+-_TARGET_PATTERNS = [
+- "//tensorflow_estimator:",
+- "//tensorflow_estimator/",
+-]
+-
+-_DECORATOR = "tensorflow_estimator.python.estimator.estimator_export.estimator_export"
+-
+-_MODULE_PREFIX = ""
++# Placeholder: load aliased py_binary
+
+ ESTIMATOR_API_INIT_FILES_V1 = [
+ "__init__.py",
+@@ -38,332 +30,116 @@ ESTIMATOR_API_INIT_FILES_V2 = [
+ "estimator/inputs/__init__.py",
+ ]
+
+-def _any_match(label):
+- full_target = "//" + label.package + ":" + label.name
+- for pattern in _TARGET_PATTERNS:
+- if pattern in full_target:
+- return True
+- return False
+-
+-def _join(path, *others):
+- result = path
+-
+- for p in others:
+- if not result or result.endswith("/"):
+- result += p
+- else:
+- result += "/" + p
+-
+- return result
+-
+-def _api_info_init(*, transitive_api):
+- if type(transitive_api) != type(depset()):
+- fail("ApiInfo.transitive_api must be a depset")
+- return {"transitive_api": transitive_api}
+-
+-ApiInfo, _new_api_info = provider(
+- doc = "Provider for API symbols and docstrings extracted from Python files.",
+- fields = {
+- "transitive_api": "depset of files with extracted API.",
+- },
+- init = _api_info_init,
+-)
+-
+-def _py_files(f):
+- if f.basename.endswith(".py") or f.basename.endswith(".py3"):
+- return f.path
+- return None
+-
+-def _merge_py_info(
+- deps,
+- direct_sources = None,
+- direct_imports = None,
+- has_py2_only_sources = False,
+- has_py3_only_sources = False,
+- uses_shared_libraries = False):
+- transitive_sources = []
+- transitive_imports = []
+- for dep in deps:
+- if PyInfo in dep:
+- transitive_sources.append(dep[PyInfo].transitive_sources)
+- transitive_imports.append(dep[PyInfo].imports)
+- has_py2_only_sources = has_py2_only_sources or dep[PyInfo].has_py2_only_sources
+- has_py3_only_sources = has_py3_only_sources or dep[PyInfo].has_py3_only_sources
+- uses_shared_libraries = uses_shared_libraries or dep[PyInfo].uses_shared_libraries
+-
+- return PyInfo(
+- transitive_sources = depset(direct = direct_sources, transitive = transitive_sources),
+- imports = depset(direct = direct_imports, transitive = transitive_imports),
+- has_py2_only_sources = has_py2_only_sources,
+- has_py3_only_sources = has_py3_only_sources,
+- uses_shared_libraries = uses_shared_libraries,
+- )
+-
+-def _merge_api_info(
+- deps,
+- direct_api = None):
+- transitive_api = []
+- for dep in deps:
+- if ApiInfo in dep:
+- transitive_api.append(dep[ApiInfo].transitive_api)
+- return ApiInfo(transitive_api = depset(direct = direct_api, transitive = transitive_api))
+-
+-def _api_extractor_impl(target, ctx):
+- direct_api = []
+-
+- # Make sure the rule has a non-empty srcs attribute.
+- if (
+- _any_match(target.label) and
+- hasattr(ctx.rule.attr, "srcs") and
+- ctx.rule.attr.srcs
+- ):
+- output = ctx.actions.declare_file("_".join([
+- target.label.name,
+- "extracted_tensorflow_estimator_api.json",
+- ]))
+-
+- args = ctx.actions.args()
+- args.set_param_file_format("multiline")
+- args.use_param_file("--flagfile=%s")
+-
+- args.add("--output", output)
+- args.add("--decorator", _DECORATOR)
+- args.add("--api_name", "tensorflow_estimator")
+- args.add_all(ctx.rule.files.srcs, expand_directories = True, map_each = _py_files)
+-
+- ctx.actions.run(
+- mnemonic = "ExtractAPI",
+- executable = ctx.executable._extractor_bin,
+- inputs = ctx.rule.files.srcs,
+- outputs = [output],
+- arguments = [args],
+- progress_message = "Extracting tensorflow_estimator APIs for %{label} to %{output}.",
+- )
+-
+- direct_api.append(output)
+-
+- return [
+- _merge_api_info(ctx.rule.attr.deps if hasattr(ctx.rule.attr, "deps") else [], direct_api = direct_api),
+- ]
+-
+-api_extractor = aspect(
+- doc = "Extracts the exported API for the given target and its dependencies.",
+- implementation = _api_extractor_impl,
+- attr_aspects = ["deps"],
+- provides = [ApiInfo],
+- # Currently the Python rules do not correctly advertise their providers.
+- # required_providers = [PyInfo],
+- attrs = {
+- "_extractor_bin": attr.label(
+- default = Label("//tensorflow_estimator/python/estimator/api:extractor_wrapper"),
+- executable = True,
+- cfg = "exec",
+- ),
+- },
+-)
+-
+-def _extract_api_impl(ctx):
+- return [
+- _merge_api_info(ctx.attr.deps),
+- _merge_py_info(ctx.attr.deps),
+- ]
+-
+-extract_api = rule(
+- doc = "Extract Python API for all targets in transitive dependencies.",
+- implementation = _extract_api_impl,
+- attrs = {
+- "deps": attr.label_list(
+- doc = "Targets to extract API from.",
+- allow_empty = False,
+- aspects = [api_extractor],
+- providers = [PyInfo],
+- mandatory = True,
+- ),
+- },
+- provides = [ApiInfo, PyInfo],
+-)
+-
+-def _generate_api_impl(ctx):
+- args = ctx.actions.args()
+- args.set_param_file_format("multiline")
+- args.use_param_file("--flagfile=%s")
+-
+- args.add_joined("--output_files", ctx.outputs.output_files, join_with = ",")
+- args.add("--output_dir", _join(ctx.bin_dir.path, ctx.label.package, ctx.attr.output_dir))
+- if ctx.file.root_init_template:
+- args.add("--root_init_template", ctx.file.root_init_template)
+- args.add("--apiversion", ctx.attr.api_version)
+- args.add_joined("--compat_api_versions", ctx.attr.compat_api_versions, join_with = ",")
+- args.add_joined("--compat_init_templates", ctx.files.compat_init_templates, join_with = ",")
+- args.add("--output_package", ctx.attr.output_package)
+- args.add_joined("--packages_to_ignore", ctx.attr.packages_to_ignore, join_with = ",")
+- if _MODULE_PREFIX:
+- args.add("--module_prefix", _MODULE_PREFIX)
+- if ctx.attr.use_lazy_loading:
+- args.add("--use_lazy_loading")
+- else:
+- args.add("--nouse_lazy_loading")
+- if ctx.attr.proxy_module_root:
+- args.add("--proxy_module_root", ctx.attr.proxy_module_root)
+- args.add_joined("--file_prefixes_to_strip", [ctx.bin_dir.path, ctx.genfiles_dir.path], join_with = ",")
+- if ctx.attr.root_file_name:
+- args.add("--root_file_name", ctx.attr.root_file_name)
+-
+- inputs = depset(transitive = [
+- dep[ApiInfo].transitive_api
+- for dep in ctx.attr.deps
+- ])
+- args.add_all(
+- inputs,
+- expand_directories = True,
+- )
+-
+- transitive_inputs = [inputs]
+- if ctx.attr.root_init_template:
+- transitive_inputs.append(ctx.attr.root_init_template.files)
+-
+- ctx.actions.run(
+- mnemonic = "GenerateAPI",
+- executable = ctx.executable._generator_bin,
+- inputs = depset(
+- direct = ctx.files.compat_init_templates,
+- transitive = transitive_inputs,
+- ),
+- outputs = ctx.outputs.output_files,
+- arguments = [args],
+- progress_message = "Generating APIs for %{label} to %{output}.",
+- )
+-
+-generate_api = rule(
+- doc = "Generate Python API for all targets in transitive dependencies.",
+- implementation = _generate_api_impl,
+- attrs = {
+- "deps": attr.label_list(
+- doc = "extract_api targets to generate API from.",
+- allow_empty = True,
+- providers = [ApiInfo, PyInfo],
+- mandatory = True,
+- ),
+- "root_init_template": attr.label(
+- doc = "Template for the top level __init__.py file",
+- allow_single_file = True,
+- ),
+- "api_version": attr.int(
+- doc = "The API version to generate (1 or 2)",
+- values = [1, 2],
+- ),
+- "compat_api_versions": attr.int_list(
+- doc = "Additional versions to generate in compat/ subdirectory.",
+- ),
+- "compat_init_templates": attr.label_list(
+- doc = "Template for top-level __init__files under compat modules. This list must be " +
+- "in the same order as the list of versions in compat_apiversions",
+- allow_files = True,
+- ),
+- "output_package": attr.string(
+- doc = "Root output package.",
+- ),
+- "output_dir": attr.string(
+- doc = "Subdirectory to output API to. If non-empty, must end with '/'.",
+- ),
+- "proxy_module_root": attr.string(
+- doc = "Module root for proxy-import format. If specified, proxy files with " +
+- "`from proxy_module_root.proxy_module import *` will be created to enable " +
+- "import resolution under TensorFlow.",
+- ),
+- "output_files": attr.output_list(
+- doc = "List of __init__.py files that should be generated. This list should include " +
+- "file name for every module exported using tf_export. For e.g. if an op is " +
+- "decorated with @tf_export('module1.module2', 'module3'). Then, output_files " +
+- "should include module1/module2/__init__.py and module3/__init__.py.",
+- ),
+- "use_lazy_loading": attr.bool(
+- doc = "If true, lazy load imports in the generated API rather then imporing them all statically.",
+- ),
+- "packages_to_ignore": attr.string_list(
+- doc = "List of packages to ignore tf_exports from.",
+- ),
+- "root_file_name": attr.string(
+- doc = "The file name that should be generated for the top level API.",
+- ),
+- "_generator_bin": attr.label(
+- default = Label("//tensorflow_estimator/python/estimator/api:generator_wrapper"),
+- executable = True,
+- cfg = "exec",
+- ),
+- },
+-)
+-
+-def generate_apis(
++def gen_api_init_files(
+ name,
+- deps = [
++ output_files,
++ root_init_template = None,
++ srcs = [],
++ api_name = "estimator",
++ api_version = 2,
++ compat_api_versions = [],
++ compat_init_templates = [],
++ packages = ["tensorflow_estimator.python.estimator"],
++ package_deps = [
+ "//tensorflow_estimator/python/estimator:estimator_py",
+ # "//third_party/tensorflow/lite/python:analyzer",
+ # "//third_party/tensorflow/lite/python:lite",
+ # "//third_party/tensorflow/lite/python/authoring",
+ ],
+- output_files = ESTIMATOR_API_INIT_FILES_V2,
+- root_init_template = None,
+- api_version = 2,
+- compat_api_versions = [],
+- compat_init_templates = [],
+ output_package = "tensorflow_estimator.python.estimator.api",
+ output_dir = "",
+- proxy_module_root = None,
+- packages_to_ignore = [],
+- root_file_name = "__init__.py",
+- visibility = ["//visibility:private"]):
+- """Generate TensorFlow APIs for a set of libraries.
++ root_file_name = "__init__.py"):
++ """Creates API directory structure and __init__.py files.
++
++ Creates a genrule that generates a directory structure with __init__.py
++ files that import all exported modules (i.e. modules with tf_export
++ decorators).
+
+ Args:
+- name: name of generate_api target.
+- deps: python_library targets to serve as roots for extracting APIs.
+- output_files: The list of files that the API generator is exected to create.
+- root_init_template: The template for the top level __init__.py file generated.
+- "#API IMPORTS PLACEHOLDER" comment will be replaced with imports.
+- api_version: THhe API version to generate. (1 or 2)
+- compat_api_versions: Additional versions to generate in compat/ subdirectory.
+- compat_init_templates: Template for top level __init__.py files under the compat modules.
+- The list must be in the same order as the list of versions in 'compat_api_versions'
+- output_package: Root output package.
+- output_dir: Directory where the generated output files are placed. This should be a prefix
+- of every directory in 'output_files'
+- proxy_module_root: Module root for proxy-import format. If specified, proxy files with
+- `from proxy_module_root.proxy_module import *` will be created to enable import
+- resolution under TensorFlow.
+- packages_to_ignore: List of packages to ignore tf_exports from.
+- root_file_name: The file name that should be generated for the top level API.
+- visibility: Visibility of the target containing the generated files.
++ name: name of genrule to create.
++ output_files: List of __init__.py files that should be generated.
++ This list should include file name for every module exported using
++ tf_export. For e.g. if an op is decorated with
++ @tf_export('module1.module2', 'module3'). Then, output_files should
++ include module1/module2/__init__.py and module3/__init__.py.
++ root_init_template: Python init file that should be used as template for
++ root __init__.py file. "# API IMPORTS PLACEHOLDER" comment inside this
++ template will be replaced with root imports collected by this genrule.
++ srcs: genrule sources. If passing root_init_template, the template file
++ must be included in sources.
++ api_name: Name of the project that you want to generate API files for
++ (e.g. "tensorflow" or "estimator").
++ api_version: TensorFlow API version to generate. Must be either 1 or 2.
++ compat_api_versions: Older TensorFlow API versions to generate under
++ compat/ directory.
++ compat_init_templates: Python init file that should be used as template
++ for top level __init__.py files under compat/vN directories.
++ "# API IMPORTS PLACEHOLDER" comment inside this
++ template will be replaced with root imports collected by this genrule.
++ packages: Python packages containing the @tf_export decorators you want to
++ process
++ package_deps: Python library target containing your packages.
++ output_package: Package where generated API will be added to.
++ output_dir: Subdirectory to output API to.
++ If non-empty, must end with '/'.
++ root_file_name: Name of the root file with all the root imports.
+ """
+- extract_name = name + ".extract-tensorflow-estimator"
+- extract_api(
+- name = extract_name,
+- deps = deps,
+- visibility = ["//visibility:private"],
++ root_init_template_flag = ""
++ if root_init_template:
++ root_init_template_flag = "--root_init_template=$(location " + root_init_template + ")"
++
++ primary_package = packages[0]
++ api_gen_binary_target = ("create_" + primary_package + "_api_%d_%s") % (api_version, name)
++ native.py_binary(
++ name = api_gen_binary_target,
++ srcs = ["//tensorflow_estimator/python/estimator/api:create_python_api_wrapper.py"],
++ main = "//tensorflow_estimator/python/estimator/api:create_python_api_wrapper.py",
++ python_version = "PY3",
++ srcs_version = "PY3",
++ visibility = ["//visibility:public"],
++ deps = package_deps,
+ )
+
+- if proxy_module_root != None:
+- # Avoid conflicts between the __init__.py file of TensorFlow and proxy module.
+- output_files = [f for f in output_files if f != "__init__.py"]
+-
+- if root_file_name != None:
+- output_files = [f if f != "__init__.py" else root_file_name for f in output_files]
++ # Replace name of root file with root_file_name.
++ output_files = [
++ root_file_name if f == "__init__.py" else f
++ for f in output_files
++ ]
++ all_output_files = ["%s%s" % (output_dir, f) for f in output_files]
++ compat_api_version_flags = ""
++ for compat_api_version in compat_api_versions:
++ compat_api_version_flags += " --compat_apiversion=%d" % compat_api_version
++
++ compat_init_template_flags = ""
++ for compat_init_template in compat_init_templates:
++ compat_init_template_flags += (
++ " --compat_init_template=$(location %s)" % compat_init_template
++ )
+
+- all_output_files = [_join(output_dir, f) for f in output_files]
++ flags = [
++ root_init_template_flag,
++ "--apidir=$(@D)" + output_dir,
++ "--apiname=" + api_name,
++ "--apiversion=" + str(api_version),
++ compat_api_version_flags,
++ compat_init_template_flags,
++ "--packages=" + ",".join(packages),
++ "--output_package=" + output_package,
++ ]
+
+- generate_api(
++ native.genrule(
+ name = name,
+- deps = [":" + extract_name],
+- output_files = all_output_files,
+- output_dir = output_dir,
+- root_init_template = root_init_template,
+- compat_api_versions = compat_api_versions,
+- compat_init_templates = compat_init_templates,
+- api_version = api_version,
+- proxy_module_root = proxy_module_root,
+- visibility = visibility,
+- packages_to_ignore = packages_to_ignore,
+- use_lazy_loading = False,
+- output_package = output_package,
+- root_file_name = root_file_name,
++ outs = all_output_files,
++ cmd = if_indexing_source_code(
++ _make_cmd(api_gen_binary_target, flags, loading = "static"),
++ _make_cmd(api_gen_binary_target, flags, loading = "default"),
++ ),
++ srcs = srcs,
++ tools = [":" + api_gen_binary_target],
++ visibility = ["//visibility:public"],
+ )
++
++def _make_cmd(api_gen_binary_target, flags, loading = "default"):
++ binary = "$(location :" + api_gen_binary_target + ")"
++ flags.append("--loading=" + loading)
++ return " ".join([binary] + flags + ["$(OUTS)"])
+diff --git a/tensorflow_estimator/python/estimator/api/create_python_api_wrapper.py b/tensorflow_estimator/python/estimator/api/create_python_api_wrapper.py
+new file mode 100644
+index 0000000..9d52a02
+--- /dev/null
++++ b/tensorflow_estimator/python/estimator/api/create_python_api_wrapper.py
+@@ -0,0 +1,30 @@
++# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
++#
++# Licensed under the Apache License, Version 2.0 (the "License");
++# you may not use this file except in compliance with the License.
++# You may obtain a copy of the License at
++#
++# http://www.apache.org/licenses/LICENSE-2.0
++#
++# Unless required by applicable law or agreed to in writing, software
++# distributed under the License is distributed on an "AS IS" BASIS,
++# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++# See the License for the specific language governing permissions and
++# limitations under the License.
++# ==============================================================================
++"""Thin wrapper to call TensorFlow's API generation script.
++
++This file exists to provide a main function for the py_binary in the API
++generation genrule. It just calls the main function for the actual API
++generation script in TensorFlow.
++"""
++
++from __future__ import absolute_import
++from __future__ import division
++from __future__ import print_function
++
++from tensorflow_estimator.python.estimator import estimator_lib # pylint: disable=unused-import
++from tensorflow.python.tools.api.generator import create_python_api
++
++if __name__ == '__main__':
++ create_python_api.main()
+diff --git a/tensorflow_estimator/python/estimator/api/extractor_wrapper.py b/tensorflow_estimator/python/estimator/api/extractor_wrapper.py
+deleted file mode 100644
+index 884fcba..0000000
+--- a/tensorflow_estimator/python/estimator/api/extractor_wrapper.py
++++ /dev/null
+@@ -1,21 +0,0 @@
+-# Copyright 2023 The TensorFlow Authors. All Rights Reserved.
+-#
+-# Licensed under the Apache License, Version 2.0 (the "License");
+-# you may not use this file except in compliance with the License.
+-# You may obtain a copy of the License at
+-#
+-# http://www.apache.org/licenses/LICENSE-2.0
+-#
+-# Unless required by applicable law or agreed to in writing, software
+-# distributed under the License is distributed on an "AS IS" BASIS,
+-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-# See the License for the specific language governing permissions and
+-# limitations under the License.
+-# ==============================================================================
+-"""Thin wrapper to call TensorFlow's API extractor script."""
+-from absl import app
+-
+-from tensorflow.python.tools.api.generator2.extractor import extractor
+-
+-if __name__ == "__main__":
+- app.run(extractor.main)
+diff --git a/tensorflow_estimator/python/estimator/api/generator_wrapper.py b/tensorflow_estimator/python/estimator/api/generator_wrapper.py
+deleted file mode 100644
+index ffcd49a..0000000
+--- a/tensorflow_estimator/python/estimator/api/generator_wrapper.py
++++ /dev/null
+@@ -1,20 +0,0 @@
+-# Copyright 2023 The TensorFlow Authors. All Rights Reserved.
+-#
+-# Licensed under the Apache License, Version 2.0 (the "License");
+-# you may not use this file except in compliance with the License.
+-# You may obtain a copy of the License at
+-#
+-# http://www.apache.org/licenses/LICENSE-2.0
+-#
+-# Unless required by applicable law or agreed to in writing, software
+-# distributed under the License is distributed on an "AS IS" BASIS,
+-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-# See the License for the specific language governing permissions and
+-# limitations under the License.
+-# ==============================================================================
+-"""Thin wrapper to call TensorFlow's API generator script."""
+-from absl import app
+-from tensorflow.python.tools.api.generator2.generator import generator
+-
+-if __name__ == "__main__":
+- app.run(generator.main)
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow-estimator/tensorflow-estimator-2.13.0.ebuild b/sci-libs/tensorflow-estimator/tensorflow-estimator-2.13.0.ebuild
new file mode 100644
index 000000000000..5d4bf8bd330d
--- /dev/null
+++ b/sci-libs/tensorflow-estimator/tensorflow-estimator-2.13.0.ebuild
@@ -0,0 +1,75 @@
+# Copyright 1999-2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+EAPI=8
+
+PYTHON_COMPAT=( python3_{10..11} )
+MY_PN="estimator"
+MY_PV=${PV/_rc/-rc}
+MY_P=${MY_PN}-${MY_PV}
+
+inherit bazel distutils-r1
+
+DESCRIPTION="A high-level TensorFlow API that greatly simplifies machine learning programming"
+HOMEPAGE="https://www.tensorflow.org/"
+
+LICENSE="Apache-2.0"
+SLOT="0"
+KEYWORDS="~amd64"
+
+bazel_external_uris="
+ https://github.com/bazelbuild/rules_cc/archive/b1c40e1de81913a3c40e5948f78719c28152486d.zip -> bazelbuild-rules_cc-b1c40e1de81913a3c40e5948f78719c28152486d.zip
+ https://github.com/bazelbuild/rules_java/archive/7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip -> bazelbuild-rules_java-7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip"
+
+SRC_URI="https://github.com/tensorflow/${MY_PN}/archive/v${MY_PV}.tar.gz -> ${P}.tar.gz
+ ${bazel_external_uris}"
+
+RDEPEND="
+ =sci-libs/tensorflow-2.13*[python,${PYTHON_USEDEP}]
+ sci-libs/keras[${PYTHON_USEDEP}]"
+DEPEND="${RDEPEND}"
+BDEPEND="
+ app-arch/unzip
+ >=dev-build/bazel-5.3.0
+ dev-java/java-config
+"
+
+S="${WORKDIR}/${MY_P}"
+
+DOCS=( CONTRIBUTING.md README.md )
+
+src_unpack() {
+ unpack "${P}.tar.gz"
+ bazel_load_distfiles "${bazel_external_uris}"
+}
+
+src_prepare() {
+ bazel_setup_bazelrc
+ default
+ python_copy_sources
+}
+
+python_compile() {
+ pushd "${BUILD_DIR}" >/dev/null || die
+
+ ebazel build //tensorflow_estimator/tools/pip_package:build_pip_package
+ ebazel shutdown
+
+ local srcdir="${T}/src-${EPYTHON/./_}"
+ mkdir -p "${srcdir}" || die
+ bazel-bin/tensorflow_estimator/tools/pip_package/build_pip_package --src "${srcdir}" || die
+
+ popd >/dev/null || die
+}
+
+src_compile() {
+ export JAVA_HOME=$(java-config --jre-home)
+ distutils-r1_src_compile
+}
+
+python_install() {
+ pushd "${T}/src-${EPYTHON/./_}" >/dev/null || die
+ esetup.py install
+ python_optimize
+ popd >/dev/null || die
+}
diff --git a/sci-libs/tensorflow-estimator/tensorflow-estimator-2.14.0.ebuild b/sci-libs/tensorflow-estimator/tensorflow-estimator-2.14.0.ebuild
new file mode 100644
index 000000000000..7ce12833b815
--- /dev/null
+++ b/sci-libs/tensorflow-estimator/tensorflow-estimator-2.14.0.ebuild
@@ -0,0 +1,75 @@
+# Copyright 1999-2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+EAPI=8
+
+PYTHON_COMPAT=( python3_{10..11} )
+MY_PN="estimator"
+MY_PV=${PV/_rc/-rc}
+MY_P=${MY_PN}-${MY_PV}
+
+inherit bazel distutils-r1
+
+DESCRIPTION="A high-level TensorFlow API that greatly simplifies machine learning programming"
+HOMEPAGE="https://www.tensorflow.org/"
+
+LICENSE="Apache-2.0"
+SLOT="0"
+KEYWORDS="~amd64"
+
+bazel_external_uris="
+ https://github.com/bazelbuild/rules_cc/releases/download/0.0.2/rules_cc-0.0.2.tar.gz -> bazelbuild-rules_cc-0.0.2.tar.gz
+ https://github.com/bazelbuild/rules_java/archive/7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip -> bazelbuild-rules_java-7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip"
+
+SRC_URI="https://github.com/tensorflow/${MY_PN}/archive/v${MY_PV}.tar.gz -> ${P}.tar.gz
+ ${bazel_external_uris}"
+
+RDEPEND="
+ =sci-libs/tensorflow-2.14*[python,${PYTHON_USEDEP}]
+ sci-libs/keras[${PYTHON_USEDEP}]"
+DEPEND="${RDEPEND}"
+BDEPEND="
+ app-arch/unzip
+ >=dev-build/bazel-5.3.0
+ dev-java/java-config
+"
+
+S="${WORKDIR}/${MY_P}"
+
+DOCS=( CONTRIBUTING.md README.md )
+
+src_unpack() {
+ unpack "${P}.tar.gz"
+ bazel_load_distfiles "${bazel_external_uris}"
+}
+
+src_prepare() {
+ bazel_setup_bazelrc
+ default
+ python_copy_sources
+}
+
+python_compile() {
+ pushd "${BUILD_DIR}" >/dev/null || die
+
+ ebazel build //tensorflow_estimator/tools/pip_package:build_pip_package
+ ebazel shutdown
+
+ local srcdir="${T}/src-${EPYTHON/./_}"
+ mkdir -p "${srcdir}" || die
+ bazel-bin/tensorflow_estimator/tools/pip_package/build_pip_package --src "${srcdir}" || die
+
+ popd >/dev/null || die
+}
+
+src_compile() {
+ export JAVA_HOME=$(java-config --jre-home)
+ distutils-r1_src_compile
+}
+
+python_install() {
+ pushd "${T}/src-${EPYTHON/./_}" >/dev/null || die
+ esetup.py install
+ python_optimize
+ popd >/dev/null || die
+}
diff --git a/sci-libs/tensorflow-estimator/tensorflow-estimator-2.15.0.ebuild b/sci-libs/tensorflow-estimator/tensorflow-estimator-2.15.0.ebuild
new file mode 100644
index 000000000000..662af9932208
--- /dev/null
+++ b/sci-libs/tensorflow-estimator/tensorflow-estimator-2.15.0.ebuild
@@ -0,0 +1,80 @@
+# Copyright 1999-2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+EAPI=8
+
+PYTHON_COMPAT=( python3_{10..11} )
+MY_PN="estimator"
+MY_PV=${PV/_rc/-rc}
+MY_P=${MY_PN}-${MY_PV}
+
+inherit bazel distutils-r1
+
+DESCRIPTION="A high-level TensorFlow API that greatly simplifies machine learning programming"
+HOMEPAGE="https://www.tensorflow.org/"
+
+LICENSE="Apache-2.0"
+SLOT="0"
+KEYWORDS="~amd64"
+
+bazel_external_uris="
+ https://github.com/bazelbuild/rules_cc/releases/download/0.0.2/rules_cc-0.0.2.tar.gz -> bazelbuild-rules_cc-0.0.2.tar.gz
+ https://github.com/bazelbuild/rules_java/archive/7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip -> bazelbuild-rules_java-7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip"
+
+SRC_URI="https://github.com/tensorflow/${MY_PN}/archive/v${MY_PV}.tar.gz -> ${P}.tar.gz
+ ${bazel_external_uris}"
+
+RDEPEND="
+ =sci-libs/tensorflow-2.15*[python,${PYTHON_USEDEP}]
+ sci-libs/keras[${PYTHON_USEDEP}]"
+DEPEND="${RDEPEND}"
+BDEPEND="
+ app-arch/unzip
+ >=dev-build/bazel-5.3.0
+ dev-java/java-config
+"
+
+S="${WORKDIR}/${MY_P}"
+
+DOCS=( CONTRIBUTING.md README.md )
+
+PATCHES=(
+ "${FILESDIR}"/0001-Revert-Exclude-extractor_wrapper-and-generator_wrapp.patch
+ "${FILESDIR}"/0002-Revert-Update-TF-Estimator-to-use-new-TF-API-Generat.patch
+)
+
+src_unpack() {
+ unpack "${P}.tar.gz"
+ bazel_load_distfiles "${bazel_external_uris}"
+}
+
+src_prepare() {
+ bazel_setup_bazelrc
+ default
+ python_copy_sources
+}
+
+python_compile() {
+ pushd "${BUILD_DIR}" >/dev/null || die
+
+ ebazel build //tensorflow_estimator/tools/pip_package:build_pip_package
+ ebazel shutdown
+
+ local srcdir="${T}/src-${EPYTHON/./_}"
+ mkdir -p "${srcdir}" || die
+ bazel-bin/tensorflow_estimator/tools/pip_package/build_pip_package --src "${srcdir}" || die
+
+ popd >/dev/null || die
+}
+
+src_compile() {
+ export JAVA_HOME=$(java-config --jre-home)
+ distutils-r1_src_compile
+}
+
+python_install() {
+ pushd "${T}/src-${EPYTHON/./_}" >/dev/null || die
+ esetup.py install
+ python_optimize
+ popd >/dev/null || die
+}
diff --git a/sci-libs/tensorflow/Manifest b/sci-libs/tensorflow/Manifest
index 70b240c89231..ea83e7d64736 100644
--- a/sci-libs/tensorflow/Manifest
+++ b/sci-libs/tensorflow/Manifest
@@ -1,10 +1,54 @@
+AUX tensorflow-2.13.0-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch 1218 BLAKE2B 26a1a8f228b1b5592d3dfd0ba99908086cdbf05f5df78d67ac40a63006e12caca441063dcbc4f375bc77337634d0f1e4f8504c94549a51c210126dfb63ca7178 SHA512 5ace683424c04831eb37f5d8df97c22270f3bd7571258ad9d89c98735ded810d0bca0f9d2f919cfbc38cce68597f015d58c55f3dccddd6507457b4e86cfe174f
+AUX tensorflow-2.13.0-0002-systemlib-Latest-absl-LTS-has-split-cord-libs.patch 980 BLAKE2B a2d7ecf78ea306146b59c4830f86798b0b9197cbfc750b2ace8e56ded19eddfc5445bef8a6239725659031efecd643191a391e58ed6b7e1d8513595e1acf07af SHA512 2ef446eaab044a2bfe283a653f8e4fc70a2831b01cb9e7aa8478dde597501c47524d4d5b22706994a981992c1cbb9fc3e254a50ec6a57e599a6bcdc07d4fe713
+AUX tensorflow-2.13.0-0003-mkl_dnn-Must-link-against-libm-for-round-and-log2.patch 1218 BLAKE2B 11994f3f2bb9e21eb792568cbfbdb7d2a8ba3487ad7ae525f51804bf8127496dd563f1edb36fdedf9dd0a7a9b5c5b961158f49633bf89b9535715d8001d88e8c SHA512 56df0e669313beb92b319261d33c2a390201e187afe9e115a7e51e6d15ad860c796d97f8fd7c8340b8511a79f15d39b1197f7f0e202942e42c8e2b0fce69dbc5
+AUX tensorflow-2.13.0-0004-tensorflow_cc-Add-systemlib-nsync-linkopts.patch 1130 BLAKE2B 0a4fff489c81f5aa1f0781b1840170fbbb56511703d27edbd9e70ab553b977f0ed931113ef9eb6870ac6cedf3c3200fefff8c4e73431e0199f82e95b55b86bff SHA512 a970a872ca4ce82009a7d9c20c63eae0c6e30893ba5a24581900f223c699f1389848d9b23f0e9f1ddd06e1c8d062de527858d70a18da6b84e8e2d9f3fecc7c9f
+AUX tensorflow-2.13.0-0005-systemlib-Updates-for-Abseil-20220623-LTS.patch 2075 BLAKE2B 5c76994b140c547f0fe5f8f9f1d79cb5f0c90fe239f27e8aa7475580e472dc97fe369b1c10c43af855b2dcc89af334137f4d6f5ecf561076bbedf5db28513690 SHA512 6c35a6c0c0d61075aa6e87a322019983e536cff951649ad8c16d91855028c1fc140d12084abc04fd55972aad702c9decdc72f2067736928a6973a28ec5f65dd2
+AUX tensorflow-2.13.0-0006-systemlib-Update-targets-for-absl_py.patch 776 BLAKE2B 74ca560a9c029b7f8deaba3b3dc44197e99e752b32d56233d58290c13bdf2f40d1361ec96f694636741a91ac83b7893a400ce1e9cda54e68e39c29601d0eb509 SHA512 bce1c8cfe834873a431780719c4ed0daf9a54c2b0e40ee056ffa39c54aa030b1db31046869297cc10ddcdf834dabbdbf9c3100a223962d6e815e5373958de7d0
+AUX tensorflow-2.13.0-0007-systemlibs-Add-well_known_types_py_pb2-target.patch 966 BLAKE2B 904c424f00e766048fbf073af4e418f82c238f384dafa64ce90cc3c0a9645cb8bbba4b3b832849ae8fd89a8d4fdd82af8e573e6f42ae67c2b75ca686a10ebd0c SHA512 3fc0b0f049dd783b3091ed2e406fe58b322942332a7e3f1f97c0c353e33ac23a0d015fde4ef70c847379ad7019df06c12f957c4d41f8ef0736b2a5f3ff0ebb16
+AUX tensorflow-2.13.0-0008-Relax-setup.py-version-requirements.patch 2072 BLAKE2B 8e0c547d2c2791626a0d4711e018e49228e9d33d0dd8354b8dc0239f4a7e654d9513e43bc93e8da09973da5644ddf328da24536e183d450a34698bc5e4f9b51e SHA512 69e6e77a9ace0b8ac8a35f5a33acaf543031103f9d446c0f840a250a13dbfcb769776190772419f14dde7db082a5719ac21242f734cb9ea75f07eab1b526edc3
+AUX tensorflow-2.13.0-0009-fix-sparse-transpose-op2.patch 1143 BLAKE2B 250465d5c19fd46e1663f2de5b1a134555068e909e7fe8f4308dbc24da977c75c173a32aaf007f174a50e96c68d5681baf7412b40a82080e6d06851f59da1965 SHA512 e2d4b190fd042aea74929c000764da29c3811ff0525cf58cea4a415ae3436629bac9c9cb01c8fa90d10e741be9d15533f81113bb6c308dd956b765f1b414b453
+AUX tensorflow-2.13.0-0010-systemlibs-update-targets-for-absl.patch 8745 BLAKE2B 304bf3ce28d560fa5a707e2dce669bdd35d34ce5b3830dd87a6de3095b8c3479cb8d5c493540fc6a068734d2395dcba64908cd84d93404c64b60fda71471e8b0 SHA512 6e3617b48f97ce559027b3bc26abb5a916bb261834a30f0e801b3d5b9501f9a48ec8f0bc174b5d98d8b712c01d7048be8065ed90df98b9c06ba6f1e6ca8f7e0e
+AUX tensorflow-2.13.0-0011-systemlibs-update-targets-for-google_cloud_cpp.patch 763 BLAKE2B 805e5baed97af1ac347a5a84204b338d24ddc511c04e5a03d2e4fc0d29664a2388e4d8cf30f6bc0310f7e9926035702208a534d1e1515f53223e96cc3fd19891 SHA512 e56db05cab1e8b810ca063b22de28a190c3853d6f9242809c0c0a0109acf714f23864ce0ef22497e691f6542e98cbd743d53f4d2ce1fcf28aea018b71aafe95c
+AUX tensorflow-2.13.0-0012-bump-cudnn-frontend-to-v0.9.patch 1194 BLAKE2B 96bf5650be3505dfd56a62ec544cf3117b38575d30335a156aa8f5128d2ec9c832c446841692fbb9b895def175a7692430762dbea365ee9956966369b08ebf42 SHA512 c75e6d50cf30b5ca80b6c9b84abe2a78ca952465f4b620d9b4ded939d645d99377bf4bb8a7e097c9886bfd68657f3b956b987be6b996556ec735fdafc15419d6
+AUX tensorflow-2.13.0-0013-Fixing-build-issue-with-Clang-16-and-GCC-13.patch 1269 BLAKE2B 879b52b846d4c42e14cb2af88061c1cb54b5704643cb528bfbadf16d3766e12c8528623f377b389dfd78ad1da7eb470a2f69c8ea701ada55e60e3d2f1524901f SHA512 33cd834693146255217dac56a25d748c2a216a2295f7471b3635e52d879f6c4c6477d7be117c20369144e48a95e91b3e053295b530dd072652621199986f7516
+AUX tensorflow-2.14.1-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch 1144 BLAKE2B bc918bb787bcab422953cda31db37b6ba8b488e68da448e53f723fca248155fbabb15361d89d988adbb15d95d5af4f1ca5b320a24b7f494c209ec1920dd1ddd1 SHA512 980c8343e73c01d7f28d3bc6edbf76b07a8c0b6af8f3790f745407b6f680543d8907f560faa082fcab56d319f7eab93710d79276641039605dc81189589bd99d
+AUX tensorflow-2.14.1-0002-systemlib-Latest-absl-LTS-has-split-cord-libs.patch 980 BLAKE2B 77474e163f4685a08973ddb8079a5b5516d211e7f88fa4bd8c9d2c2d977a1ac44f83ecedd9fc2c9b485aa2264bdecdc8a8fceb5a69f86e669214f67ff0c7a652 SHA512 0b93ff286bb8eb9f3897d54bdf7bef0e7ad12596434ee1dde38cfa0972abed9b801dc894e7c8176cab9476168dcca28e8e7a34a2d410908b342c2e9a824aba5a
+AUX tensorflow-2.14.1-0003-mkl_dnn-Must-link-against-libm-for-round-and-log2.patch 1218 BLAKE2B 17a472445289a7c0dd03cf51c1f71a1a93bfc26201298b69ef6b2868e61b8770ceff5c8ee6fb8f3039c9718a67d324a0953a28e0207247bd16c20c22b3347532 SHA512 505b6c53eae1805cb6133f3e0539168d6bb073ab2ef404894d0a18c1c4537732a005bc94ddd17f92e1d2d3bc363b2ea8b2979249f9a57121f02be70fe9a3ab89
+AUX tensorflow-2.14.1-0004-tensorflow_cc-Add-systemlib-nsync-linkopts.patch 1130 BLAKE2B 29c3e4bdddb743d8c5840f179491052f6ecd4e0d91e65605523f10de85df7592b740b264fa3284c72b217a99fd09c385dbf7c487c0102117e5387f716f5cca99 SHA512 60f96f15186cf26ae1170257eb2c9e9d829be630140066f76ecb4c0316a0b993f1a59b94298523cedd1be28dcd8acb39166e1201cea2b99b2eed2fc3b2228703
+AUX tensorflow-2.14.1-0005-systemlib-Updates-for-Abseil-20220623-LTS.patch 2075 BLAKE2B f5b51c216a3261817759c51986af743aeaef8784bcf76e4f28415cc5b0cbaafbaa7f6ba336b5271fc10db26e66270e43d764944ea99c94fb3ca4427a2de36804 SHA512 6159b0a96b51c5286ff39301372dedd1e35c1bb0c796a1316b9335f3409026c00e6d4864c800a55829087ca60621ca25b8d472dec1dbb8c5cdfb68b5b9d579ab
+AUX tensorflow-2.14.1-0006-systemlib-Update-targets-for-absl_py.patch 692 BLAKE2B 4d6c2d34e713d35fbe346181d87ad2918ad0a9fcad39ee3d4e8a59b3e105b068d0c8ce165c8dd6478843286567144e2924135c7398eb7ddfbb12319c429e1bb8 SHA512 9b111eaf8b3b05ce9aa22911719c2b4c3d4c5274a8574f311a0b139882b1fc375cf866ccb84345d7baa2d0fcfb6b1ade70603d849c1baad93b110535a6476d27
+AUX tensorflow-2.14.1-0007-systemlib-Add-well_known_types_py_pb2-target.patch 965 BLAKE2B 2f96682ca856b3e64dd9d65debe3519690bebf3f1fb7e554f7b4cf6a4418556000afc674281752925fbb4752323ad3d2d3308c0ee6dc7a8fe120d4610c45d6e6 SHA512 4efc68839c088ccb5ec8799e2c37446801428d44baa62c43889b2a9e97d11a9c37c18efa2fe9853f195530a89c386e2385e54775d8bb31f858fdd1aea6caca3a
+AUX tensorflow-2.14.1-0008-Relax-setup.py-version-requirements.patch 1436 BLAKE2B 784337a7edaf2c7bf6b7bafb0ad9b7edfe47a6ef96f7947f8d137031fb4b740fbdb493d5c669cbbdf999a3fc26aa443b51c95013ec0c59547e3a9a5caf8777a5 SHA512 ada8f62cc70ad073b8883504e4e732706b52c6d60a6745fdaff4139e1b3924f216ea81c32594370fcfee48352f43813bb05c25aae5f309af79b1688e54fb9a82
+AUX tensorflow-2.14.1-0009-systemlib-update-targets-for-absl.patch 9389 BLAKE2B a76b763db59d9cb665e6cfdbe8f5aa02df851acc6cee2a340447d64aa648302923a80feb0bfdbc3c8010e5aa1c5318bb187e1fa820e24e5aacde6a69f435ff87 SHA512 4302f4ea6776a5e4b0410bdc816a5c9ac6ec310838f3f0dd5ed55c294391efd98038d8e4bebc11f0c27af9f644d479088fbbe3a3a21c75079fd415736f2cb1db
+AUX tensorflow-2.14.1-0010-systemlib-fix-missing-osx-in-pybind11.patch 724 BLAKE2B 9914f3b5e7ae64ff38fb00e85c62b8fdbf820ed4bb55dea66b3b3319e71cbbef26bf1fbc71101feae37bd6df2116e1ffa0cefe19ce424fde7e19a06a2af38e0a SHA512 9f1e78934bd587e7198424230dd782d347dfbfd6d74c97f8f31ae771f3f011139cec02493c527a6c05df0b2e5f4a0b0a1b2260cb0352d3fa072cccc7891a8b99
+AUX tensorflow-2.14.1-0011-systemlib-fix-missing-LICENSE-in-flatbuffers.patch 707 BLAKE2B fbf78191408409b4fa11fc7c1e17b15751a90ee9edc915f6b84dd632bf35f3ac775d675843ac3afe8548e5953f9c91846c2bfc30759e9cc706856163f5e70ced SHA512 3e50f2342a3103e0fdaf2ee8256a2aad1236036463364cc4909bc873ff9825a7202e9cfec5706251561f3b78e334d42a5e357b865d6bd13a1c28fceab3beb415
+AUX tensorflow-2.14.1-0012-build-use-non-hermetic-python.patch 98698 BLAKE2B 62786dbff5176e7677de3bada093b3a4295df39cf2cba25dc3d21cdb5d3a449162e16abcaecb4029a3267374933ea1551fa0f4d39d990af33d4458e0d6ddeb11 SHA512 0ca99ffd56d8930585f6e34c075f2f8ccf4d3932d920800e10f33e899b66b45a8f2376ae70efde9e823243ec246010829835ae4cc33a17be62633b777c93948d
+AUX tensorflow-2.14.1-0013-installation-remove-cp_local_config_python.patch 2827 BLAKE2B efde5ca7137db2005492d979f625b6d53bd871ecf31b3c9b549bc1d2d1956251dab330eb6d6b640d389082dd0d46940ad263d45f832f3f7b202956ba82a4300e SHA512 43ca2c56fa036dc070631d5fc9f46e39e22ae68d8a8fdd9df4aa9dc02ff15cbd708b4f31d180f1da5b5386509ff0f5db507e5624569926473e54f7395262abe3
+AUX tensorflow-2.14.1-0014-Fixing-build-issue-with-Clang-16.patch 756 BLAKE2B 1fa2fcfac676ab3e339c1e067a489a7e9fe1bbc6458077ce7ed0a565c52e64ec65ff3d04a47a931bda35bb1fe754a977fdb1b472072344c48ead0ec5dbb0c109 SHA512 a012e149852a2b71d2e1c50a839251879df50031eae64e9461875c927e24fad1dbdb536dabc03a70a28090ede5a03d17a3d0064fa474293e1a7d3d151c2489c5
+AUX tensorflow-2.15.0-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch 1144 BLAKE2B 70bd537a8516b9ea3dba688880b4ba7efe9048d3ce7c716c183dd39016b7d7fcaedd730b7d3a2cafe18ae71d025fcb7ecb34a84ef1d35d9c38c16b35d9eaf8f6 SHA512 a4d1bab268f873c265f9c20cdafce5247daf08a760e29c088fed662929efaf770b573e7ff84f60d9c7a58887859b9e6d2a7fae6c333b7326b85af93341e37950
+AUX tensorflow-2.15.0-0002-systemlib-Latest-absl-LTS-has-split-cord-libs.patch 980 BLAKE2B b328fd6dce6ba1da4954f6f125fb5ea117d85fd83bf4dc036ae849509ef1edaafce6b64e9e1bbd42f8db0d130e56b2e1424f118234fe4d72d01f21cb79dca9c0 SHA512 b86b70c5b31bc6f7d49d1014af3d196252746b3870fdd7c6ffbd330ab67eb5229d95d4d6e06700cbba1b5ea3fd554bdb8ad4480aa484621e396eb51a0f9cde7d
+AUX tensorflow-2.15.0-0003-mkl_dnn-Must-link-against-libm-for-round-and-log2.patch 1122 BLAKE2B 66e5d8df040d689fcf022db16df1d9462b7aff05085b8e825043eb741faa674b49b3aee4e11650a9ccce52b54bdadf1c4f0c9aa9419951edfcf15f17696eb87b SHA512 545a63812d49a4dd7b4de412ce0b943f1aee44564e0adc2163fd79c83212d655d1d09ac6edc27aa8f70f6c479b01025ed61a0705ff570b7934a0fa858158e0f0
+AUX tensorflow-2.15.0-0004-tensorflow_cc-Add-systemlib-nsync-linkopts.patch 1230 BLAKE2B b771c535066ac5d35f4f025ae0d84e2d0bf38bbc299f430cb27ec77a623cda06c7b7ebf400061d385886b3650e3e57194abf9112fd5d1acb99449052a097021e SHA512 44eac1fcdcfce424a0ce30583faa38d67ebc415842ba3cc97fdac20d69edf1a837a8e78ebea33a33a163b88aa6346ad5e1237bf76ee0786b25247be95b866fa2
+AUX tensorflow-2.15.0-0005-systemlib-Updates-for-Abseil-20220623-LTS.patch 2075 BLAKE2B fb2704eeb56144de0ddff022a009a864669b22d3c83bc0fdbde03c0aedd113ffe453e0f5d567337a4eb073086dd1ba1810c2d2bc3a826cd07c61d008ed1c4cf7 SHA512 309f42cd6bfa767f55be5989465fb23c06fb6b32475fce5bce5b24d5f1790d65f415a8daa05918cdc74a15b923b8c80f65ec93432839360c1e900646a64317b2
+AUX tensorflow-2.15.0-0006-systemlib-Update-targets-for-absl_py.patch 692 BLAKE2B 9130a9b6e11678b9df3d887b412e71666a0046866d273ce00e12c5561579d5ba741e5081a04c9c49aae48921a8f9d325f0f6c33b6ae63616b1896b0cb7ca8cef SHA512 9279d8942997f57bd9b1bde488202259e614bdbdfd9be669b6cdd859b858f224661ace75a97d8ff4b4dd0d9e03350ae1de74552623e72618b92cf6dbeb2a887e
+AUX tensorflow-2.15.0-0007-systemlib-Add-well_known_types_py_pb2-target.patch 965 BLAKE2B f98f72999311ee41d39ee2763fb8265ee38b49618cf8dc7be5441b128e55f780cc6befb8a53aef23ff775783edc12b783e300a9c6cb7390eceee92b85738c108 SHA512 7b8e3fe528eeb015658d217872aa55872c7f2cee2b27b9ee6edd1378731af025e0b1197f9e137a3f04584405317f347c45b398100fab4781747ee75cd264d2f8
+AUX tensorflow-2.15.0-0008-Relax-setup.py-version-requirements.patch 3173 BLAKE2B a9f6ac201ad16892b7461d302b1d3b6dbdef3ac518069184608087bda3c2defba87b8d22d59c64bf2184a24bb19cbd83dcfbb06614c7c9aa70fc6795a4ff705d SHA512 585d8ebc69dac9793e71b8701509c364dafcaf7c83c48d72fddea0a76f1a01ae41befff232b886d151357fe7a7147ff59319de2db7613dd6ad78af167eefa54b
+AUX tensorflow-2.15.0-0009-systemlib-update-targets-for-absl.patch 9427 BLAKE2B 3d86c9a639f87c57421e648745ac1c2de1544d506bc1b1699162f3af2ad0744e3ea47c6b40719ef6b18ca9ee119bf06feb2cddda1b689d5149bf5827113ee150 SHA512 f2eabaccbaacce610820f6121e85554beedaff338d5f4ca4817af6741fced4bbe5fe1d75217d0566d94054faf43d7ce56f56a2486d2326c32b8545e37827b58b
+AUX tensorflow-2.15.0-0010-systemlib-fix-missing-osx-in-pybind11.patch 724 BLAKE2B bc451120de5dc747c664b3eda63c57e9fbe2d851fce79e853c1ebf648d910721b700b76e2ec9f03fec117ca40a109d89dd17411c544340ec6b561008eba412b0 SHA512 00ac44ef5c198e8d8f40e2aa04c9d6eef603e8ab5c0bd70123e4e339bdaa54d762c140d85172e08bfdafabd711eb90f33ceafee378ce9eb3bd3dbf694c42b2a4
+AUX tensorflow-2.15.0-0011-systemlib-fix-missing-LICENSE-in-flatbuffers.patch 707 BLAKE2B 9f01591b39de0a775e0701c28b871edac2b276fe41e67ac32d292b3557f785ae64a19f353084bbd605c8007b3d96427096b5e0f7cda7e424bcd255e2710836f9 SHA512 6987e3938b1f770f8432252513aa1b4939ec812ca3a04e948c71e0e14b47161a4a33197642b019532df397cf93a1cbb9ca914bb42bccca5921e8f730d11b7f13
+AUX tensorflow-2.15.0-0012-installation-remove-cp_local_config_python.patch 2827 BLAKE2B 14a53544198c8c60100637a1e6ceb31761ac0af1cb5bd8e70764df4a81d56d7e35e3a979b9cd81bc993ec675816b4a41f0562c166a632baf923716b73d20c71a SHA512 1baa6c8b859a1656b30c087fed99491c21720c1fe4489dd042c5c59a035196cfb0e57ce8e77212bdda54fba57b8f4166d3cad27eac1202c2d774e00e4ba427ce
+AUX tensorflow-2.15.0-0013-build-use-non-hermetic-python.patch 33037 BLAKE2B b7414b59bdad9f6b0f692d9a01963d4584c8c14efcf6c45752b4964354cf77f11f276c8d702f85ca1643521a1d1a7e14531c120cd86515231adb56830eadee39 SHA512 d24ba33f16da1b742396667bd2afe997322ef30e32275b246d55a5cf5ad885ae036adba4990852de233029e2c757de6619060decbc5676317f13c5f5e8026624
DIST ARM_NEON_2_x86_SSE-a15b489e1222b2087007546b4912e21293ea86ff.tar.gz 103972 BLAKE2B 77d843aae7d471f7bb916caa40f66ec66322c11bab18ac338360c1a1faeb51a54fb394a35bee7ad3095ba34835e51441416d0c09ecc627769d9181678494eada SHA512 1d9a4192baf05be8cd36ffa55b9bf97e09ab6f714b303bd2ca7b16bbffbb661475c11b130437cafe234fce66d046d7b041f7daf9c25263c5921584412a5c1c12
DIST FP16-4dfe081cf6bcd15db339cf2680b9281b8451eeb3.zip 91462 BLAKE2B dc40d6129c009fc28b40d58b81b89e96a2deeeda3ea19c680b66073f02f955cc5dfc379d7843ec8aeed9fedeae6a70594b45752014526deee5c6226ec4b952e9 SHA512 15ae434977209a938739691eb91150556bac75a4a5449a9b4f135424f42beabd67ede4cf12c3a5594dea5028cc4aadaf989d0b42c2bb6f741db55ebd34717e24
DIST FXdiv-63058eff77e11aa15bf531df5dd34395ec3017c8.zip 16646 BLAKE2B 8f6ca6feee16cb57d9b4504e7615703c2c7ea86fa8ea3f814668478cc13b8cb3cb1429200359be5521ca11f77e1c3d575b2222d123d3ac03586faab88a131ac2 SHA512 6b4830a56137e226d51ba8166fe1a31c51256f1666cc0a683abfb154fbe313d415ce15e362756865a1dd91510cf581c619604b29e8a5288c328c85da57d53308
+DIST Implib.so-5fb84c2a750434b9df1da67d67b749eb929598f1.tar.gz 27694 BLAKE2B 6c3339a98dbf36a77290c1b58da3005cfbf358d9f7759d6fba9047a82d7d240e5c7168d1a172447770559e1cd4ad37aaae6c4e884896ecbc636740d24bf2137a SHA512 694c392754f3b483176515096f98add0cb453c6c36d729ac8609c35fc3d6f01e2982b01c20bb1c5a2ed0776559aca5b8e86967d1984a16678ba047475c21f3d6
DIST OouraFFT-v1.0.tar.gz 110531 BLAKE2B de0e280407e21118e2bc6ca93a7caf3c4a49d0a13eda018b1497f98851af73dda4cef56460dba310336c7ed958e34feef4784ca2575a13287dec2c1ac9a5af6d SHA512 89c6e8fd57abf26351b3efb792008a1bbe62d404a4225dcae8aa666b3782a421be071bdc9760ebb0c95b5336ee5ea517d2fa43ab915045f7cf6fd76e73578079
DIST XNNPACK-659147817805d17c7be2d60bd7bbca7e780f9c82.zip 20341886 BLAKE2B 9aa37b3622e62e554b9021fca88f9f8d274989d4799d942c27a6d13ebb59409b130423a23f60f4e42042b8894457d8d691e17e3f21555562c8e9d1f97bb1ae13 SHA512 13c87064ec57adb29320c4d061ce0ae6cdb42b5e7584fbb758e3703f7f0a96d47e618b7ef584bf10d5b11d3658c43c6822e3b13748b6e659a5820ef534652ebf
+DIST XNNPACK-b9d4073a6913891ce9cbd8965c8d506075d2a45a.zip 24168206 BLAKE2B ba6d612cb09823a655f065a76bd9b956a37664eade90aede2d30d9892f6bcfa2c6134f5332eb31247bea6b46e51f47250ae09a6810bde181c72f715550811f49 SHA512 29c844e758ec2f2913dc477866b016afc04679f16da5774069dabbc9373ed210d510c4b1205eb681de20669e49f34098f490340f5524139b079461589f41e7b0
+DIST XNNPACK-bbbaa7352a3ea729987d3e654d37be93e8009691.zip 26938397 BLAKE2B fa8b24435529958833bae368d554a6a71c04bad96a53c26b8f247c1dff2ffcccd0b5382a6b9ae07da111f3f071fc4a96ea28abf5f5a09e3f3099ab032bcc92b3 SHA512 b542c6850c6698f2d46cd2a7a6de4b18f14558a9720c5b169fa9898c5cff5b499005adc767962fcf2a38b4f06b5eb038e4c1fecee0eac0524dde23aafe55ee15
DIST XNNPACK-e8f74a9763aa36559980a0c2f37f587794995622.zip 18756888 BLAKE2B 0a1787166e8bbfda4aa6010075d92573112a21f3f9d3b1c13bc931fae6fa4cafb71685e4c57e86d7a662912bb6431c2d39a24378bf82361b50e5855d1b62f524 SHA512 a6802f0995742af0ca82de010cbd42da230b36cc884612d4ba2de20ba0ca56da6a11209bfb01ee1a5ddc31dc891a69438fa4836ec9d62d56e32c6aa144c6e7aa
DIST apple_support.1.1.0.tar.gz 27105 BLAKE2B 6982ed0188760caeb6951dd28d211449d37a3192fa75e22f5ea86b599a5a92bf8efcfe5a549146533b725aa0fd41584c4d12db3fab41ffbcbca60f657e9590f5 SHA512 db291209ab9a54238b244e02abbca749a695ca3b9b9dc2e207227d3ea32f13144f3236fa921df4c6ba954850635db56584582d8916bdf4c90a2adc55dc90cd3a
+DIST apple_support.1.6.0.tar.gz 66375 BLAKE2B 7106e02676861b6ae4b0b42a12fb1fcde0470a99b49088beceabca87743430d6f691688aac5d4cf27e4c4e941781ee9d899fc7c3219095c00bbfe5b6eddafeb5 SHA512 e1d7a119d685fcfd9af1b9b48bb5685743af2d66e86d109575853172c1d9d7c1ce1beaa3fe65d21b55943d243506cdccadc724c603adc5566293b0c0846f874d
DIST bazel-skylib-1.3.0.tar.gz 36103 BLAKE2B a58142b9d2a5da9f137705105aa735c8489519989ca7e633968114309f23074a56cd03b5fed70e284da63751d666904c2573940ad9a0feb2be689d695f0f07ae SHA512 ab3a0b465ebbfe07c139b92f1e8b2c0bcede66d6366d184891e3c0ccd6619164bc299777e7d7236cb463834b98426f6fb6890409e0ce94b75446dbd85854944f
DIST bazel-toolchains-8c717f8258cd5f6c7a45b97d974292755852b658.tar.gz 80728 BLAKE2B 340a295d8998d01eba6bdd3a97efae869c5dde93dee9bd9266af8ad92a00a5c84cafbc6dd1f5d20f78dfdaa59f7585cefc7af4b87df712489db7f76bfa1d5210 SHA512 cf391a756d2520c65423f991bd0afdf3aed1785b91481c55c5d3182e54b137fc7922fd179e758af2868e11f9f10ce9903409015a2fb0f18e67d14a3b073c6d72
DIST bazelbuild-platforms-0.0.5.tar.gz 5399 BLAKE2B d53aa2dbbd526d15aef24914053a5fa7c7402e1201d94a603c8148281a933292e4815f01aae7f7f166d5325f40b14d2810d6d8e03e0780329c25299c3d8ebffe SHA512 332d5954c349a7d0b801d6338bc42193a730e8ba6c75866ccef17c5053f8f29e1d185cd33a73fe5758e96e57de0c2631974f45d18bdd58c825f9a5fc231ad8b6
@@ -13,49 +57,96 @@ DIST bazelbuild-rules_android-v0.1.1.zip 14654 BLAKE2B 1cfd5af08c6eab9602883f62d
DIST bazelbuild-rules_cc-081771d4a0e9d7d3aa0eed2ef389fa4700dfb23e.tar.gz 130803 BLAKE2B 406331205e8e35fc9f7f727f6f7f9697c658af800e4b90373440c433ec5cfedfe0a3986ce5b1499a2db5b7aa4dc62efac364e73639b98864d2f63076e5b209e3 SHA512 2dc8f50c405cadf10d797117b51868fa22798c4ff7cc1c4f7d4c39fa0d2e5ffbfe8269df42f90c92e0870bb8e8a9c204d3dea237a7ded03f6275657060ee9774
DIST bazelbuild-rules_closure-308b05b2419edb5c8ee0471b67a40403df940149.tar.gz 464906 BLAKE2B 033d76b8081f4f987e64875ad5c8e7b8f894ec6be58c52ee02c4d31d4480fee02f3f432ea9c4630ad3f5d1163f820aff37f6493da797ec51b148b361ab3c8b25 SHA512 2cd841f4530503ed31fa6425cb749ef29f8a1071b5d55594644303233e58455783cb02402bc23d7104ef036745733087d43075a1fcdab2ac96cd1a9872a6ea4a
DIST bazelbuild-rules_docker-v0.10.0.tar.gz 549649 BLAKE2B e7a537b21138a5c5d9ce360e46238f57c917d2dbf5dd17887607402227cbe7c5d68aead9db0ecdb74c09eed9dac26eb86e004a9020c251152d15beb48be0e0d7 SHA512 7802107f8520c88646728800637cce54dbcefc54aa4361288a5b46e403e41998bc23e16fbe94f23a2ca52e5c33fc255af65846963c2fd0139ca1195435ce1b03
+DIST bazelbuild-rules_foreign_cc-0.7.1.tar.gz 215296 BLAKE2B fec096138fbe0662bbaa566d1f9ba36f60ac14499019dec060d1cbe21b09fe11cbf9fa72dd45884d9c14dd5cfc0a41087634ddeeadafe1ed37992e965b362513 SHA512 cca6c685f566b113148b94a02c5a976ec585e7e1794d18c607983801ddb3985d5031aa76501b095d79420ac5852d8c27812f005aaf2db43ba885892c51e7fd0d
+DIST bazelbuild-rules_java-5.5.1.tar.gz 21057 BLAKE2B c9caf7019b8377d204adfafa45bc6c274177709ac2e2e387895dc186d035bdf0f22663c498b3d393dd947ad078b149774d0688bf82ebd3db6163f57b5c2141c5 SHA512 71fc4b2990cac3e8d4205fc847df22e480d33a2411dd969d779d0c3ee66ed6bd4f3a06e396d0df500442c942551cc69cdc5288eac4841f2badd99f44b61447b9
DIST bazelbuild-rules_java-7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip 9422 BLAKE2B bbc45c97551ee8126335b611ffca6574dac843d4db9b3221b10d26487f8eecfe38ba0cd67a4039a3ad921c25fea6294c43e4f4e7a57a060a30791acd6c4f9d39 SHA512 ba06242feb711e7fb6e821fd9ac029248b4f3e466fb1acf971d0db8196f3efe4d114ef137acbe4913073f8cbe8ccab4d47d8bafa21b867b2d6a6d57f5a647f0c
DIST bazelbuild-rules_jvm_external-4.3.zip 3998031 BLAKE2B e13022ad12c5cd1901d7dd3f1cccfd8ad04e0cf8f12b7329b9a1ed49b7ae7aca6c08704c5229427fa6a5b1f16e9c50e1a61f9adbe75c8a7ab69a291d8b2c2914 SHA512 a23bb25fdf7a5f5916bf5a4576c6c06ae2de92a6b9d8d52720ed12e61ee766f665652acdf84f9814acfd90baefab0941d3244a011f341185a378b053a51f21f3
DIST bazelbuild-rules_pkg-0.7.0.tar.gz 76580 BLAKE2B 77574785070b45609d12aa2c2dd53c8cef18cb2c3a326324b5add996cc31a40f725bb3b12affcfba96540cd1d64a60fb8afa789125fe0aca328c652755d12cab SHA512 2fba108997065d753e17f1e7de1db08461416d03f8832d38e25f5151bfd0ad896565db9eb56b3702b604296a19620f9b60e5c15440855d36e64865010e6f4cdc
DIST bazelbuild-rules_pkg-0.7.1.tar.gz 77334 BLAKE2B fef99181792dac840724d0cfe4f1d71ae77e16e9da0b2f938752e6971b04264bfb7d731998998b5637da774b5e67adb68cc7eb3c4f38a3933ef62f949d56553d SHA512 5b47922e9b60bf82ded612bf023d66d2c6786cc81abe6bc1653aa93400e3497acc2d92d5ff90f9f4ff757143ea0d66c1f8c8eea4059142889f9eb0d9073d9a80
DIST bazelbuild-rules_proto-11bf7c25e666dd7ddacbcd4d4c4a9de7a25175f8.tar.gz 14304 BLAKE2B cdd23ef47d247f6d1b9fbaa49edbda7e1cd55ad2e3616f43ff8d21fa42888c1f36172683e47beb3f3678a2b252d9b6c82fd692711e3133862eade8b64da06ea1 SHA512 024021816b4999b62db41709e0e9354ffdc88beb61a081b061d934241f06b1aa5be3b74324cbec94d1871e60eb65209b2e6b4bb8ca4a101218eaf6196ec6a974
DIST bazelbuild-rules_python-0.0.1.tar.gz 2302092 BLAKE2B 1db52eebf2461d779f764f2afdd070d1d0dd65eb2b83ccd98c2831da1784614ca281b114064729a9f257c64eceb62975aac8362d231c84f32abdf19aee7a1852 SHA512 40fa069a4482e2f83e29dc8e109652d14d187b2ec8efdcd36e98d117de93d66a938ed74999b42a2293fcb6eccc0a111cbbcf65c5c155579214bb1b96644280a5
+DIST bazelbuild-rules_python-0.1.0.tar.gz 2490176 BLAKE2B dfb4df19ba787c3cb2c2b0ab7115b9678b64ba03b61b60af0253031333aef2ac37942e425ff724e3e759e5198e8ff45b55866a20f7b497f5735adb9f8deb1e72 SHA512 b83b35f5b200f115d9d5e89b2c81745dd834155f52be0ad2972007d4654ae9438f24c7bea3c9122e6056924a69b348ec3c53d649e092dbe5ae8af3b2119bbc5e
DIST bazelbuild-rules_swift.1.0.0.tar.gz 199181 BLAKE2B 8261cf061ab630cff5bd0bf55c0b62252d3c7cc34a368eef80c0d1e70534dc43b5596077754306e87ba3e5bbc4b77710ba4934ff748079b8e03e72143e15deab SHA512 9e4acdd0a168462b5b480aad72cda9b3b16aaaf86fdf367c4de80dfcc093cb80c74f2f2219190730be10471d07c94c4f9bf8756a938fb9aaee9d1a4d983c4761
+DIST bazelbuild-rules_swift.1.5.0.tar.gz 206209 BLAKE2B 3d0289129ec42e12cf17dcac0f633cd043d9f758fc7c73ff2acd27a9c8c1a0ac6a5b29270599121215f10bc024a5cc52612b3448499a074dd4ec1b79c827e8d3 SHA512 b3699a9f05b73c3fd79c956eefac367e8d6b0ce43c410e87d8e5e67f9b4d5c6632d701a64a09357e67abda9c810a9293f3a7b85213fa7995c57fdf5ce9b3a9e9
+DIST benchmark-f7547e29ccaed7b64ef4f7495ecfff1c9f6f3d03.tar.gz 204856 BLAKE2B a1601a38e71e08490f42e85e87675b2451082c531e2712f7e78ac0267c7fab0b6f1ac96fde34933d82380c61097a4467b277790657695fa51c97ac9504435959 SHA512 e4323f2e7b05566e7b634cc1ec18ae64c7cee1bf4ecdb2a3da97dec8393b1ef26580975e7823d4ee9d51d151db6456bc685717836eb3209574ada22a07451063
+DIST brotli-3914999fcc1fda92e750ef9190aa6db9bf7bdb07.zip 592501 BLAKE2B 47ddd728b35eac1b8c4ecc7a198b2dd1270a44b18b437988ccc7f05041135c0d42aa0afebe88b07cd1f26cc1363877b11327f4ed09340268304620725e1c0586 SHA512 d68afb576e5ed1239f2a3b0f2f9434dc95b650ae51ab3296075db871bc1f6b75febb07d8b6934e5acafb9baab2bafb6fb3916415241e130f95d0f28ec8768279
DIST cub-1.9.9.zip 619595 BLAKE2B 265b797a906b03da886de88863236c9ab90daa31498ddf848fcaf5e5ee1342614ad9a41618120ca09cc4c0da3e96eeec5e20ca9d7ba3f9860c507f06d15e59e1 SHA512 8c9c0a3f66f8d518ec07f857b5625e006d52f28bade1c1478a0f37420e2f7586dc3ff029d551748a1802bb5544b16fde5388e8d5a45d61eec595201b9db7a30d
DIST cudnn-frontend-v0.7.1.zip 20112411 BLAKE2B 6f836f6b484e708d43833aef3ae52b930b80845d9e85c61f0fa4670c04a9274975eb65f682a989b9ce5ce1c3992cf43afa6f0c8e684b1754965966a79bcfda65 SHA512 f211c5e8f2a80e445d15fc9003a63485a85dcfb91c170f92e0464d494509ef44c0083a745372ff2e17dc25791f90402e4a672f795eb3faa9650e6776266b6a36
DIST cudnn-frontend-v0.7.3.zip 20124177 BLAKE2B 9c32d99d69d4c7bc96fd6189aa1c2cdfac6fa31dfe84beebaee0e791e7a27768864067159da4473f737612973388daf39c7770ad9c1270bed840221bb603fc4d SHA512 68f5dba9873b317d8239187b57e7b4306e9078e52ef0992e6f23982aa374eff6c2ef2232b6cfff8012f50d9105d6f61c84f7f7c9ab4139d4db451599f896e0b4
+DIST cudnn-frontend-v0.9.zip 20077185 BLAKE2B fcd9425be4c2ecc39db0fd92be355a7767b3d00cea990ff4b63ade3dff957f97a6e5fdb1e6f287f6473b2212a66e160940328062a70485c38d5619cf3cc2eb54 SHA512 f38fc912303f4f61ae76d3159ac51b105aba0f728e895b850a164480a729ec144bd8f99764db3e2203210dc114662aba4b4ffe0435d027c0cf620cb42a50df64
DIST dlpack-9351cf542ab478499294864ff3acfdab5c8c5f3d.tar.gz 76170 BLAKE2B c28873deab233d73996137c254acecc4adb0a750cee675cfd0777ccdfa91ea704e338e7166705d47e775c45b46b152834268d89c0443a08c57b4b830bd07ac71 SHA512 e6a4fe9356b8f75f96e7f9960df40e227f8e5242e609f8cc8bf28e8161bd4f58e8c6de374d9cf216edf7e0e09ca502bc158d41c3058bc6e6e7b2bbfb9c5483ff
+DIST ducc-3d28aadfd8bb0219e3df188613dbbcdfffccc3cd.tar.gz 402963 BLAKE2B e178b7358e95643f118cdc210e2a33fb66b4e436636171e292f609d01b4559c9c508c2b07a3bd8008ccdc29f0b442d7a41bf47aa31ba6d4c8edef66aebd80810 SHA512 364d39cb0777e1ced0e5d778d1249bdf95dd75f437df45e0b585c11b0327169eb1c4bcc07dfc99dd1d517dd9b939ee1fa74485bbd231dc128e41850211d534a6
+DIST eigen-0b51f763cbbd0ed08168f88972724329f0375498.tar.gz 2848019 BLAKE2B 005e353f101ee9db0a794843b03169e35d0b74867b7c5638036b973ec2424539646f61e063c892de577e04c78e647c8cb11ede67134b7a8b0471286be7429dfc SHA512 9885feb9ae493bb8860799c69d795df199f3ab84afd2bed5b1e71fff221ccd22023b512b4ab763e1b0c684a93d20d5d202088246fc5ffe211f0995e3839ece55
DIST eigen-3460f3558e7b469efb8a225894e21929c8c77629.tar.gz 2833459 BLAKE2B f624102a174d80860314f0e895f641fb86369a2df88ba2e2589da86e5ff0802b36c64be44211f6013997e0251d74bb28051e3e7edcc6cc43ab88448524db3152 SHA512 c015dae59e0df1f25b6caef5f3c724cfd91cd440df6c3eba1ee7c129b20caf9ec4010209cc5edb82ed6534503a697ba6ee49f64be0359de332ed023cdede05cf
DIST eigen-3bb6a48d8c171cf20b5f8e48bfb4e424fbd4f79e.tar.gz 2810518 BLAKE2B 97c9221024f765e6899c676602ee2c850fae661dad613957cead4bce29fce8d9cbb1ac20b812b71c699feea75768be2da945fc39e9b9e9cd2e5b3c6bcf034c60 SHA512 de2c35d3ab859021dac9de35b83cb94e824814e436cd40e45ca2f5f7f9fefadac2b4222f05de9eb1b03d04a29c751f9da3a2b804e6c0fc97b4a5508f25b1e7d4
+DIST eigen-66e8f38891841bf88ee976a316c0c78a52f0cee5.tar.gz 2856636 BLAKE2B 64ce221dd6b77790bb5f2b21006163314c88d18596c4394db5186a741da5c74064f24acb2fb1b8bdfada464a70cd3abe25bcfed904248f509980d7c10f0f8162 SHA512 d54d8bbd92677fe1e8109f6fc9d35dc8e8b6efa8060c6c88bf1f84c40dff36823de0901d6eefbb29468401d6038e4dece1e1de8fce36f4185dfff81b095ead7e
+DIST eigen-b0f877f8e01e90a5b0f3a79d46ea234899f8b499.tar.gz 2833536 BLAKE2B 04bb103b64fa8c81ed337c67f5484fb15097f03905a504b19ebeaad4b33ab75baf1e3a2e060c91f1974272f55998555cd16c3da9d8a54a725aef39da7d39dae0 SHA512 1b239db63199aa6f3f9c35198294aff5b89c817befe6f394d69d2f4d6c6e3d946fda32119592da0d7894ea8b4fff12a1c1b8c5eda2e52f7365dc5aedda11f90f
DIST farmhash-0d859a811870d10f53a594927d0d0b97573ad06d.tar.gz 467251 BLAKE2B 74fd0cf3de41a6db0aac74148f795335df58b5a3bdd370b22ae0763aba5a4952dcd0133ed300350636a4678ff54a5c59eedee749dfcef7283e21c181b88f92cb SHA512 7bc14931e488464c1cedbc17551fb90a8cec494d0e0860db9df8efff09000fd8d91e01060dd5c5149b1104ac4ac8bf7eb57e5b156b05ef42636938edad1518f1
DIST gemmlowp-e844ffd17118c1e17d94e1ba4354c075a4577b88.zip 940590 BLAKE2B 72e8ef48e6c20d3f3510eb3a891748df386f3ac127e79cd5c594b7289826294e29be5987632800898d94d6ded9016e398672fbdabd07af8f55d458540e66ec7b SHA512 b5683bd6e1cb942d0e1e07c3bd28d83d74706c45a33b59896124f659f361bb4f93181d3ccd282d9dd61df49695b38470f138ffd2c88b6d11fe160a0266d3909a
+DIST googleapis-6b3fdcea8bc5398be4e7e9930c693f0ea09316a0.tar.gz 6091152 BLAKE2B 7d846fa18bdaf2ea12c1a064d6bf5a221f02b4398821032faf679dc41778f1ad13f0631a88baee127dfcfea40d3aed75f6bf1253ba800b25a4ea7b067044596e SHA512 7d098723547835c6646b776c764e85a226c23cab1709b7d4a780f60a3193630bb9301c0153382175fb4ddab876d1168d98a325f14bdc8500c02f6723fe7c7ab3
DIST highwayhash-c13d28517a4db259d738ea4886b1f00352a3cc33.tar.gz 160745 BLAKE2B f2ffe8f13b1a8346b5b29e719c0a57f34e1fa6320d0f2d0575e4e97e813ed1a8f55acfb44b57aba70ea7d372ade18aee5ef82a881ecf457f580ffc92d6528c7b SHA512 4d6a7e8e321df82c009a0849b8da097a551f6c35a49fef65b89e731075a6af624918c0e55be9fd3a5bf07c519ab09bdefed57e0e39d4df0e79e189c468939de7
DIST kissfft-131.1.0.tar.gz 52383 BLAKE2B 74e6d2e7d132a311b31c28a468e13d9772a53f0ea0abed0e0f49d8db9c183fb0646f58fd38df3e797b8577285899daf6b80446b149ce2582bb828410656d96df SHA512 bd715868ce0e93a291a0592fb1f8b960e832fc64efe863755e52b67d5addff9bcb444a1bf2570d1914c52b41dad1023d0d86400f5ea30c9fb84cd6b4f7210708
DIST llvm-project-10939d1d580b9d3c9c2f3539c6bdb39f408179c0.tar.gz 179559452 BLAKE2B ccdf998502aea7e87ba128560f5458b2a959d32a13f56dc795b8a0ed794c0348ca035ca601a34c0c288990f358dc5c3c01712b7458ebd15c48b1c41b4413fcd2 SHA512 36997be5da7caeaf949ae093b3ec1d81dda668087cc94de1fee9f5262a019f40fca61e652787e3f9498cd8b021b8ffc8002daef189ae0e15fda281ef6d56ecd7
+DIST llvm-project-49cb1595c1b3ae1de3684fea6148363c15bae12a.tar.gz 198655180 BLAKE2B d29cc1ea0045ce9d05ebd2716389be01d1651ee84b401e9f0625c964134d835c3079eb83b8de26f0f5cb617bd857412a07514bdab1d90e9208e5371c04c3ab76 SHA512 0150749286084fc7c6e7df7dc29aa8a56403da24cf6237a9a344b40216e55ad5f169d96a74ea3021d0987593ddef4822877332c55bd4c6a467a3a098b81abdad
+DIST llvm-project-668e33c6401abe7844691fb7d47a3cf2d2012dbc.tar.gz 195368050 BLAKE2B 4fe9c27ce35e579e36f1bdca7281206eeb530eeab00089b4c71834f7b47c96e8f951f3ff8477e2946a30c21cd4dfee5a9b485282e5f4f449a70ad7652f378a45 SHA512 6ef3796c8931503f8e7000087262feb011995c1df79f7a7776ab508e9fb37a7bf5bad471d3317d550142b68818a1b3a26d39e23214a3fff852de0c763cf05b2f
DIST llvm-project-d8415b02a519f222ecf71b069c96cc85ac635de3.tar.gz 169045379 BLAKE2B fe25f9c889c5159fbc1d251640f65b3097b4260ec7b27d5133843502ee397995c1075fb10f1a6519c6f693757ab8fe0fe2b82bb96678ef4ec4086c09ce3c90c3 SHA512 546edd97778b4298d7bb645620010e00569813fab07b925a909db4cdd8feb9adc4898e488f7bb31e70587d7b4134820a3f49a98d4f87bcf1dcad9adf5eed7e4c
+DIST llvm-project-dc275fd03254d67d29cc70a5a0569acf24d2280d.tar.gz 182909064 BLAKE2B ba2a2db104849d1b09115cc2decdbb2e5dc84c58b61074500ff728e29c2f380a0818a4e8df22f4a1552c04e243dd114862492d7f8df06132348034c500200e14 SHA512 4f51271b765a666b023547382f3f983453afbfc69b793336e381e335d6103978292e781f86fffe16cba8b6d6ea309b64e6d899570060c275779aa0a2b90948c7
DIST llvmorg-10.0.1-openmp-10.0.1.src.tar.xz 955492 BLAKE2B 4197ecfb2e3498a95a2ba5923f2b4bdafbab645ddf2d3f1875d39752d3ab7304fb35bce918d2dc6e40e5ea809545ae0907d6bc0b94a3d68b8d96da48f5a62adc SHA512 5b6f6487feaabd2a18ef2bbb1a5f86bb567e264f9fdf56805cfdd3f65f36f21e7c1392ba5170fafb52a395fc1606b277233beba3df31dc2ab825ef8924e7a05a
+DIST ml_dtypes-2ca30a2b3c0744625ae3d6988f5596740080bbd0.tar.gz 69192 BLAKE2B 74fcbdbc5d788a12e01bbeaaf17a493f10b40be2724751a2b04bd877ba202a20e194ddf0d0161235eca233222b4292a2465f1d712acc407e984d17d3268829e0 SHA512 4912e0e224e618280c9b908500f865ba92cfd99341fb100b784bebab0489b283a01d65b9a34ae7984e026a9a06864009400cbcbcdfeed975581d2e61b563a4e0
+DIST ml_dtypes-5b9fc9ad978757654843f4a8d899715dbea30e88.tar.gz 66243 BLAKE2B 541ce280fff68b51eb9e7f9eaff7c03d0c095ed5b219d3ca3d61c31650a21a63bae6fd6a8efddaced6de4d2601d5a7c6924d300d120c30907ea5e239c00ec70d SHA512 78f7e25e37ea30b0dc0cfd0dec4e03d4e208cbf223c239fa386eec9f9912f1adea290eefcca7b15b73f2329c457b34fef4374fb1ad0f3cedb02b310e0eb9fdb6
DIST nvidia-nccl-v2.13.4-1.tar.gz 287269 BLAKE2B 8719e26978225a9697101fb7998388c3179bb0af5c396602689242e5529492ad7a81a142e3a8492c9fa4c92adc251c58e67f97fee64a4fd1a046134ac7d737d7 SHA512 5f7077f69a0c1624a1c8ca3d2f503d8269751e26cb6ee63e1a165fb86877b62073ec4e182d939b9aacce4ee8bb8295a39d1b6d65ef3dc0fce795735341a13fc6
DIST nvidia-nccl-v2.16.2-1.tar.gz 326883 BLAKE2B 86db7adc67ba311b72e7e013dbc2a04918c0746c1fb62079ccd3300691479e1f6e35e379d6ee4320e343666b68372c56607ae521f5ff2d7e59d5f4dc3b894097 SHA512 e6572c2e7adc03053048c0b1e5290ffaf6f294239d78038887582c847aa549e5e95c7970b943f1d0b8964c32b4cdee3785bf40886f274907b613f320e9de10d0
+DIST nvidia-nccl-v2.16.5-1.tar.gz 327261 BLAKE2B abeeb6a2d4b58647ecb17694d92f79e650d2f2ffbccf26682ab202e17a1b7d3c356fce26d9f6edffee0756d71887bba8a9d5c254ad433d3b4ae8babfe3294534 SHA512 fc13e83e2339921b732c02250e95614b21202c52461aa262489714af6d92117aa5c0647bb0dcc3394cd357d4b7e8a76fe4c3a3567ba4512c359f19e2ff41de4d
DIST oneDNN-v2.7.1.tar.gz 6405831 BLAKE2B b43253f7bc1be0bca51746f06d825191ae544376b259662cbf8f567d8f39a6befde3c88a14744e053b851d2f89fb2600b999abef1acb585bc116d6fa0c95fe3f SHA512 062e97ac613d265627ec1f010aa1d101bf71c449c813187c26244c66c9e6b9b582a0a0a823a9391fa828f396051318fada8263ff64c4f4b4bb6ca1d7a08ea6e1
DIST oneDNN-v2.7.3.tar.gz 6410473 BLAKE2B c6730100e0438d456eb4986f416ae2bd1f173a80c52d5090523af06790afae8ee17cc58ffa8ed7215cd0eff99191a925d8cdce9986c72ccb8ebffacedc434b18 SHA512 ad9450f8b701288fa1721f64d0cb33fc344e9fc4f84e517b3d52377189ffcd0c5b56156ef4a33ca3ffe2da886abcc7ac5b2a3407cc155bd4be2223751b84f7c9
+DIST oneDNN-v3.1.tar.gz 7556565 BLAKE2B db6865410e902778c0153c50cc1f16c12e358b360d7e865207a86489d42727e66945d422b8bfa52b04b5f2b34daf585f1472a031cd8810a36c6724a2779120c1 SHA512 2053157a3885618364a9da5ec738c9cc2dde15db1ce9737578565d25c4a15a65944db3bbd17780de2032cfa2329bea4cb0af24ee428c9c246bdfa07a5bdde30b
+DIST oneDNN-v3.2.1.tar.gz 9186820 BLAKE2B f85cb1b410c3f57e098106ca13939c8c93c396e012b15a63c4f728ba75138a6f371db5fd182a54711479beca8f215578ea52d9c3d07be36647f6befb6c16746a SHA512 115819dc47fce5ef8fc7403f88e141743b360bc33243c90740d1b3871849ac379930d23e3e1d62d0abaaa3af5d2cdbd4218b80aa1be1edb09d0d949f7532a559
+DIST openxla-stablehlo-43d81c6883ade82052920bd367c61f9e52f09954.zip 27954369 BLAKE2B 30dddfcf2102e344d82171d8fcb2df68a3c2dedfc349a3f248c060e591535127d7716e1bf10c5eef20369eb0d81a6cc0eb5350a6979adb8a164b7bda62d6c745 SHA512 2432e4256bfd2d92ba717895967d87e05bb0201a5086314b1de5fe9078bfea37c14245b88b720ec82f2906751ab344da0dab9f714a6fffe79a0423cf7659e5ac
DIST openxla-stablehlo-51f005f0a8ff6e28f535adfec4de936cb4097aa4.zip 6902721 BLAKE2B ef9766377a38f816f5a6dc60f34d5300b2775bc282084e9f34c7a5ccc6104a0154d44f2c57aba081889de50fc141a6059255fca3f681322343e316289d6540d7 SHA512 ffe46e21be6f617b6ecbc7ef35e83d441256e429150af60451cf04c02085fb1a0b60a9687d8d60d6f1f9321e6f6a92f24749a3c1cf1ee694a8ffc0fcd13f64f4
+DIST openxla-stablehlo-78f57e34a25367ef3192cd35da36b01c763f4ecf.zip 17845568 BLAKE2B d745a8c61c78bea3b2cd2408fb1a93c2f47b5166b8b92515fb32e3f23c21a20d2278fbfe8a4b9bb3993f79884a1a914e7525f11c59b7e9c059b3bf71fc52282b SHA512 6c9a7f5e2dfe3323af7d6753740b25dd1075cd315edb9b2e9d65521df54fbcf62e9d8a16006e94fd64649b8c4fa7bf0fd6566504ee6b145fdf344b142f91503d
+DIST openxla-stablehlo-9ae6c373a6e2941ff84a8831bb3724728cb2b49a.zip 17784280 BLAKE2B 99bd410d36d78c4dbefef46d7df137b0bf583cc9cb2d34832f3d95360237647a05511c904ce23030d23ce6d95c34af11c29085db9f775aa96a702c28cec1891d SHA512 b098c2ec986ffae14f916a62095561942a809f80d2159005fbaa08691532ae2a3383b11d8672907b116fcedcf21c88ad86f3f4911b666a66543eab16fae06d86
DIST openxla-stablehlo-fdd47908468488cbbb386bb7fc723dc19321cb83.zip 301572 BLAKE2B c8242b3d9612fbdfa1c34ae5cb610aadd68464498e6cc84d48bcc38abb9e8636fa31b9a03b5a604a29cafe12a47e5b7be90c48d52fb6587bcd376d6307775197 SHA512 61b89d0dafe450ae95d770878385b5ed3cbb0109b79cf5e01304119f2f52255ccc83cedc542cfa5c342692945833b86606468224e67c6ff9dd6f7216b86adc7a
+DIST openxla-triton-1627e0c27869b4098e5fa720717645c1baaf5972.tar.gz 985789 BLAKE2B ef3aa4d8033757d37a9ecde3a687a261c5ecde1200b30db1ae24cc5725c334f8662e741041be1e854ddb2cd5d9cb2b9d5f254d9219e8035c5f08df92b3ee9fab SHA512 67ae9075219e142ec3f3e5c3f05cff5fb15f3ef9605bd1c0dbb1e26940bf1e428a9c17b0d5f85c069844f6ea94b8a3ce94277bd4cd344238fbbdc3f56f91e08f
DIST openxla-triton-2c3853269281da6742cf469a5ca5772947d271ce.tar.gz 459751 BLAKE2B 8b1b314fd1b6d8822a84cb9cacfd70e2c59784a76f879d75c910f376d38fbdccbc132ebab7f29c5bddde04afd7821c0322311d97c55fcfcc87580b82039a7efa SHA512 cedee3b982b93ae237a1e035ef2a9447aabc94ea4add63959b927670006b5cf546f064d5741ee9e731de31c343ed5869abe9c479d07360b07ef2b26f93081a6a
+DIST openxla-triton-cl546794996.tar.gz 948559 BLAKE2B 9c2ed46364b4986c39466803f14ec5618cab0cbc504f53909f74eabf7f6d5e5f4f6fcf1d19965f48b38f18dc99f26fc02ecc7275f05194b228e281988bbb4cea SHA512 680774ffb6cf291bb0f7bd851d4cb66d4e40d70ce2761441ac17595fb98fee6cb013fc5d4f8ca33d79f7b09f2e2924e50c027a09e7250d72767c59a119e56143
+DIST openxla-triton-cl555471166.tar.gz 955143 BLAKE2B 00c4be9f2ba24d46a811f6d30b4972bc00a0d490f375f2156462699e64714667cb77d8ff34d2b78074f505d94dc737355f62c0de156b45e329fcdd8d19a69e12 SHA512 e2c56d48b8db852a93e194d2f29eeae89ef332d6a62a0ba208a5b0dbb4e78bfc06464dee53d653b22d0022b9169a7fcd5b9e8ce2ad3e4b3741d712ce98e25155
+DIST protobuf-3.21.9.zip 7156366 BLAKE2B 464ec84fd380d2d472cde5b7dd978c31ac8dc455934b7b1f7afe7dd836579ff74c1196d56dea2944fb41e5ef2f2e05683b889b54e4a3a11bb7cf87a0cd57f691 SHA512 311e0bcd3da54b8fb2df4de03a9b465cd56451018438e4d72054d78e70c37c70ee3c8a0378a177acb737ec863bdf9250d10bcbcdc0f97707c7edf97b9b37683b
+DIST pthreadpool-4fe0e1e183925bf8cfa6aae24237e724a96479b8.zip 72209 BLAKE2B 7fbbe569d8d08853c826212b487b8138b6361180444893b4975cd10abc391e26aeb6669db7d78ed39014803e59cbaff7b80140d01cfa46e8138fc3123ef3d54b SHA512 e951c12896543ebddb115dd4ddad539145cc30342ccdf71b83a568fb22f50e6ab5a7ab92309d02d9960f43c8ea9f690c95306924cac75fa9f4fc64a458e5ee10
DIST pthreadpool-b8374f80e42010941bda6c85b0e3f1a1bd77a1e0.zip 61524 BLAKE2B 924419730bc6b94ec98a13db94f177b3b70d6c763158f43fb6f9e45605e73cfce238e6c996e2bf629dbb2a5af93ae99849ddc91174fc4664d702667f7423892d SHA512 d25262b47e39058d5aa8b8197c4984a5b941587a19079a2210f738c3af34ab8e8477731c88ca80c3f812a6a04635f80300124d93cc1e099352ef2aca04bdc3ae
+DIST pybind11_abseil-2c4932ed6f6204f1656e245838f4f5eae69d2e29.tar.gz 28202 BLAKE2B 58a13d005367d938e9fc17da6435a8c2706722d0770c173cbfc433b7ea4de7e7d1b97653c5859cc8c436cccda3b8d21df906249a3a60ee4bba7cc6601abfaa59 SHA512 91befca03fa1b4f12446b84d5fe3514df6c9e352a19042f4f8856f6df306229f23b6ca2976a17ab22c8dd5afa223a44013e54a4348298c305a7688646129f0a4
+DIST pybind11_bazel-72cbbf1fbc830e487e3012862b7b720001b70672.tar.gz 8722 BLAKE2B 3f323e622fdd9921019cfdf4d281d54530b152efa557d2e57ed08bc8785fa419815c2a7e36c38de8d0c984f76d052607d4fbd409c4ede76cb9878823350dda2c SHA512 80fff0799119b535a95badb307408c7a81bad0261c0e753095732036d0801c4894318df736c6b1b3ca09201d3f5e4e971593a7d5e9df71c1cd588e4be3799c1f
+DIST pybind11_protobuf-80f3440cd8fee124e077e2e47a8a17b78b451363.zip 76830 BLAKE2B 16b35eba4d9bcd345809e9acfc4c692a30d91ffa5319064e8d00a9d1382d6215ef2a73447ea54c8302610fa59d0b1a17add73f9fb0c86b2fae946614f1b70592 SHA512 f73fb217ff471143f473707576838ea85c5319edd09847d452cdaf66445f2abe303545208c44bf32856777d27323d7da9d76d2b148a16dc025c351b16fa2b279
DIST pytorch-cpuinfo-3dc310302210c1891ffcfb12ae67b11a3ad3a150.tar.gz 3515639 BLAKE2B d4adc49ea1bcbfd7a7efb13cdfea6a1d9cf717b06209c03342f92a3d624de65bcdf2ce7aa7fa8bd5f95ad423447ee833fdea718e16f98037344df75de8bde943 SHA512 f4c0ce922cee0df62c15a335889bb59b5f70ad71448b42b9c1bfa9b5e77c4c4d5f1613f607f32fa9d6817d0d5f49c554e1378079a1cd66a9cd2492796d48c3c2
DIST pytorch-cpuinfo-3dc310302210c1891ffcfb12ae67b11a3ad3a150.zip 3812878 BLAKE2B 30048677534192f1e288c69be5a0373844206cc4e209d48b92f5bf38da37003bdd5125b6588ec0f34acd855acd9cd16193725976ede644d3140fbbcf03d2d364 SHA512 963fa6c6948102d15cae1db90645e9cf85d1efc0fd541f75dfff7d6efe62fdd196085910cdb366be56b7e71f36df98edd211fc875aff6eb1e9962e0d62f43667
DIST pytorch-cpuinfo-5e63739504f0f8e18e941bd63b2d6d42536c7d90.tar.gz 3512335 BLAKE2B 71c14866fde3846b5f375b39fe2f081a6c219b36fc0721640971f99c53ca77c8e7f8df83992d777af32a28c645d64f27dca838bd8e60835c5497e131467b22d0 SHA512 6a61f4574661a55771c2ec31bb0919a51d0bd8c770477b254a5c14dc5323716af275c7fe3abc5aa96720d7cc929559ca66f614265d3940e076b8db2fa15c8e36
+DIST pytorch-cpuinfo-87d8234510367db49a65535021af5e1838a65ac2.tar.gz 3516012 BLAKE2B 78845105e55397d3e948382bac22fff6a639c2d27c096176f29b9b29c6e1e2f78a8ffb44eddf5e20b4d08e2f5dbd7be949a6b55ffe0ca754a00794db8de540a3 SHA512 53b687196b8e46bb99469bbf37f8141c3ee89be78bab67accc63af849207a0234447304b7fa63fb44635add0ddab585df337130acb85fd7b026c0990e12a5840
+DIST pytorch-cpuinfo-87d8234510367db49a65535021af5e1838a65ac2.zip 3813501 BLAKE2B 7b4c54fd6a21b9845dce1f1834eb07613b165ca3fd8ac132bfb3a6964354af9910664f77601f7b839a8770036a1b2a2b21befe3a51d2e6c1e0c6400abbcc952a SHA512 1f697dd26b01bda1e21bebb948fdc2c224455910f55fba7327533b131c016f7cb51eb00804d6d765b37b4614c9093243898363482b3e37e427f83941b0c88f48
+DIST pytorch-cpuinfo-959002f82d7962a473d8bf301845f2af720e0aa4.zip 3813452 BLAKE2B 71b895caefaae40693a815961ef6e660f1e9ebb0318d7147c9b79c6c884006cc624b8191b5dc814acb64162da0090e024052522e08239759d0eda1c992863d81 SHA512 53cc2f8e21c965f885d6798d113594c43b53a3d5db41c176f6c601169fc085f3ca647dc6aac93f9829881f4119bafccc6151109f281a892936a50144f87d1061
+DIST re2-03da4fc0857c285e3a26782f6bc8931c4c950df4.tar.gz 396457 BLAKE2B 33b90f851c4a4ba634bcb907e8f32d21f6792e0fb680d38034d02f469e499380b0b458ad75fa42f1ad931cda314f7fb0ba0798ba085e176e5f2e38a67c8e14d7 SHA512 d573150b7a6d8fa3e970f0e245501d7653944fd406f2dc4016c7126c9602be988ba18d0c8b0296684dd30f4a3b9207284db5df4ef5f05e305160b845209f23d0
DIST re2-a276a8c738735a0fe45a6ee590fe2df69bcf4502.tar.gz 306766 BLAKE2B 54162a22afe10b392e30864729b3b210194c0dbf7926cc3536dfe3afd43e0b8abf3d01b08e0feb71a8ade19cd497aea9e9b7b34eacb85e10cc7e1c5fd62a407d SHA512 9557830cea13485bd332ccdcdd3735ea63e6bb41f4cf943ecc24a1d79b75a58c5894cfe70c4a35a07a1b2b1f3d2ffa76fbeff168fda17c72b3436cb6213121dd
+DIST riegeli-264ef7b4a1314d97265b37544b27cd3923ea72d2.zip 1047649 BLAKE2B cb5f94ed39c5752ff21f5a042389a147ef83be6893130da79c412bbd1c29904bdcc6190f4ddbc883c166fa057b009aa6acd5d936ec9fd0c28c4f71c467431b6f SHA512 e41e65fefd9f4a5179f48f108f2839f6cdd75284b22bbd639a4bf9253c9217d0ea2d7935635b0e6f8f8a961dd4cb50817204e6745981b1d619e69d3665458fd2
DIST rules_apple.1.0.1.tar.gz 2198467 BLAKE2B 79178efe1acfa36af3f41b31610a5add8bd9c35529931415ab45cc1588c4fea477ddf0cd67c5d799de688db049fe2f3ce776c5e5da2e1dde1c329efc44d51ec0 SHA512 a24d880dd8174241b2808424fd3364f8b0ba9865109704981ad68f383d983bab9f8e8923942423e70b0a3af6951d16f5712647692a8ca3bef4347057f3536cc6
+DIST rules_apple.2.3.0.tar.gz 2222651 BLAKE2B a0a0e153eaa16be0ace362c1738b3f5ff87a3e76bd6fbf65c7d76e68d33b40d99e86045cf0bb104644fbf076024dcee4082303b823e12cbce6675202d93c29d5 SHA512 e8f6c164451a28ad70a9fdabfc2540a0165de1d23e2bdb7ec06ddf2e455f55cf82b8024be0e88153fca44433ae84144cc4054974737c8f6374fc88de37338394
DIST ruy-3286a34cc8de6149ac6844107dfdffac91531e72.zip 381045 BLAKE2B 00cf2a009ff6cac8e3e8c3380c3fdb4fe9299614eba56bfbf0b2b8dd908ec2ec7d58b8185810899dd8bac80cc990d69a26e01eed8562f73c5fc08b8b3ad198e0 SHA512 4acb4dcee41788c3f2a65a4335f64d20057980289f231a401ea45c27dcd16bc2e48d0748d6ad35e77c3212104c54353193d4710260993ae8927dce24a6ef435e
DIST ruy-841ea4172ba904fe3536789497f9565f2ef64129.zip 379664 BLAKE2B 82f54b4e7959ca2ff489cf0eaa7c01c5084b11174a43e2caa8f30dcd3951fb9552e513fa0488190fa73dde62719bfd8e4be59bd264fe316ec5b9852db2494ed2 SHA512 e10bed1901eb53cc0174d8723b67b7ff4f7021b5d94e8e7596879a9a625e77948f265d430b5c56f7789030874ba08bdb5263796212d9b60affd1a20694ec3317
DIST sobol_data-835a7d7b1ee3bc83e575e302a985c66ec4b65249.tar.gz 2569603 BLAKE2B cd66dc8a0b4ad3ea586a79ef588c8d68e4f05b6ea89e6d8119b5ee338b9046c11a747ca57c2f80a3c90fab119c05e85f5965571c9e522ccb8a312b95804d1a36 SHA512 b819d050bb38246b503f1eb3d6e7d878d0f166f3602ae3c327749b1f6ddc2b1d6ac7f768b1f1d055b98b7b98812e4643a75bcebc9728c7f615d67ca739761b3e
DIST tensorflow-1.15.0-python-license.rst.txt 45132 BLAKE2B 8a8ace4b64fd9eda1932272704dd4fea002d2562cd9c2c94470d3ff5de58bb171ff849d3f998ea90cd779d1084d39af1267aca8400b74b701205d2e76e6a2628 SHA512 c79c42f4ceecf33c8f8778ca17b53595b75d5d0bdf30b11058cf01dd70a6351f530ca9dd846460aba14edc1d5876f7bf6fa0f9f49f7915590b0f54d267ce3856
DIST tensorflow-2.11.0.tar.gz 67996769 BLAKE2B 539a8d7084280023c7b019a079aad6bdf0fa94b22276250a02913fb0ad496b4af2115276152b4f37101547404b616de58f209b9d1036e5d4dd9b0f7072a59ba9 SHA512 cda16db72a0ede72ac9f5e76c3a745ea9d72421fa40021303032f8fc3ac2755f64524f97a4629c18cf888f259027439b49ec921e0f5fd329a6ba060235a658d5
DIST tensorflow-2.12.0.tar.gz 69710128 BLAKE2B 582b7b2717edd0ce41ecd74525fd38684d375cf1833c206cb53fa10ef964bb41ef8f29fa947a1f77e892bb68793d53c567bc9c4c9124dba94913f57ddcd3b1f9 SHA512 9273720b5be08e5d3dc76aafa4af6b27a2d50afd02b181e7632f3d70961995b2e0e5acb13e70c9b704ef475617c23d70047fbe74d5b63b156cf8f2fa8a856b84
+DIST tensorflow-2.13.1.tar.gz 70663056 BLAKE2B fcaff251ae3757f7e20deb07566485ca243f943ce118886edcfea3c41aff8baf83b98e5e3eea97c5243cb5db2e7418ec58687b3099dffd5c750395b554689343 SHA512 4f7bae717444a34704cb53466051a5708be13342a193be14914fbddd4790e327f3b6c37063a36d1c7835c50cf99500895aaffc646fdb9b421e6b873dfe4b2e46
+DIST tensorflow-2.14.1.tar.gz 72760001 BLAKE2B dd443c087bbe524b8b6dd6e9f08ec1c7bbc406e2ae7f854573fd29212004f927daaf8115a66f1c11c97da2e6f40a44ccb0e4f8e28455e1bf94872d630277e4bf SHA512 c5e9a176027a00b5efb1343bee000330f56229a1a8559db2fb9e2c9388afaf8420d69b6fd6e7b85811272c110245315935232a859e9fd4106b29b226780c447e
+DIST tensorflow-2.15.0.tar.gz 73544567 BLAKE2B fe528a8ec2ad7f75ee4fcaf0febc673fc743835fe6a27561efb3e55fcc6cf4b22220acea38b82318766b3f0d640b8d539628f6062af0ce5bb9e36b1e9ad1e629 SHA512 51976c7255ffbdb98fe67a28f6ae1c3b9a073e49fe6b44187a53d99654e4af753de53bfa7229cdd1997ac71e8ddecbc15e4759d46c6d24b55eb84c5d31523dfe
DIST tensorflow-patches-2.11.0.tar.bz2 2977 BLAKE2B 53672704ccfc5291f7070421af9f7246d2f211689b18f35917d4d166ff5e9ddb623db4dd9dc8054e0f2262b162dd8c2216446c6ca5e2bf538872debf8eb8aec1 SHA512 866c6abb78934c1a645ab3172f93d81423e2023fa1e8688255ef0777e340d810a6889c838b841be765f0897f7a269c4d6cb52b8f59af114bf5b9e181b1348590
DIST tensorflow-patches-2.12.0.tar.bz2 4194 BLAKE2B b61efaf0ade6ef88b5abb858a84b537e02ff9fcd032a2a7f68a6467e53511a50fff66ef7e1096f343a8909e165b1b76146cb6a8db8e1974eeecf2cbf0b6a71a0 SHA512 2f931fd4b995d33300d392f7dafd6dd23671772f733c28faed239d01e9b032967afb17cab50908fa38956e2cde479a13dfdc632e622d918fe55d281aa9b3dc4e
DIST tensorflow-runtime-4ce3e4da2e21ae4dfcee9366415e55f408c884ec.tar.gz 15313054 BLAKE2B 316da579b93d83bca43d51198dc65dea12972d73f019a5b78fe53162966e022d21d4225ba4a7786d1a0f376550a1052c59858df04b958768962b88d64d3c5083 SHA512 ea490ebc8a5eef4a7ce6185c19e3b1305fd886c8145ef54387076f458bfec56a8a33452728206afa67001273920f6958317c8c4289e32ac6fea432e15a2502c5
+DIST tensorflow-runtime-70637966e2ec9afccc2cf4d51ed2391172b1b9c5.tar.gz 3925477 BLAKE2B 9a34c431e98d3d6d10a880bfcd70397ed7e63e4ecc0669c1f75a6c15d3c143ab21245d2454031ac0abd67f56b675f7435e8ce7dc934500d506f4b0d5c2a81ecc SHA512 ec7189bdf04c7a54ea2b0e232b311a375eedc7a172cfe72935494faa71947408bedfa0e5160ce1ad9c14e120e64b1cc2d10c9714063179c4e73f867c3b236160
+DIST tensorflow-runtime-769f5cc9b8732933140b09e8808d13614182b496.tar.gz 15183227 BLAKE2B 3c9a3f256db2cd4ff74318da2fc42dbe98669142cc0ea567ac29df4e2faea6e6bc55508f8ec555a88d19bc064123f80e9809affd64628dd9483adfa0dac41aca SHA512 d505278cc7b82f1b1b3c0588e654e64cd63824c920b0b3c93b778ec1f46f005d17e922ee24dde9cb78714f0a2b22c7038f73273d94c46360b7aca92cb5ad61a3
+DIST tensorflow-runtime-7d879c8b161085a4374ea481b93a52adb19c0529.tar.gz 15228644 BLAKE2B e621ece4bbe3139661ef48c628459118eb2078151907630d6fde4086bd73f09af2ab0bb1c43ccf81d84230e3bb3be617e505f76c5d4333fee9adece58e4f4042 SHA512 f79f1e0a44a60cd064e21461380dfd5eb47a8912064f238da4ea94c8c8c94a680e438ff2b202bd0c81049e104293b5bbbcdfb604cf9ebecf6e6bf34d6782b0f5
DIST tensorflow-runtime-91d765cad5599f9710973d3e34d4dc22583e2e79.tar.gz 15226589 BLAKE2B 5a00d0f884c86f85a25aba8f7d9eee509f35c114e9bfa24ce3effe9437bc549a4a7f013b03b515fbb4a217724a2e2abca96300fba64560b0d0e4fdb05fb9c3ac SHA512 b2fc8a240de1a13fade8628358f410b8f6e1bfde9d2cec3765b62d2ee7eb143c168687a23cb79c7aecd19a668d2d3be46fba361d16ad29b722fe19004aa3c5a2
-EBUILD tensorflow-2.11.0.ebuild 16390 BLAKE2B 3c42f3144b090dbe55d65dab4ea8aac0222667a2aba5d7478670170916c85d0f1fbd56e78afd80f387846c3128e65d9068aa4e8a7ed90da4fc8484a6ef6ed43a SHA512 6f84bec7d5570666842422c090c86585a397a6b50690a4fc48a60d68a8dfbe1d9e6259ade4dc5b1bb569b86d167a8b06af20f82e4b9f2e220ff2734f90ca7bd0
-EBUILD tensorflow-2.12.0.ebuild 16814 BLAKE2B 8760241cc7b398ac93d52aeb25ddef5ef38dd53ca9e8e02df6402c0dca73393cf9db653c1a13ee1d70e4848bcde85ec5832cad031d5cd36522d8088dfca1ce0d SHA512 981f776b11d7a61dbc109a71764a3b2c51ed157fb6f5dc0039ce827ba55f2e5e0fef5bc92196f1e990592cf9f2705fe3709d57345710682e046c5dc664f0f935
+DIST zstd-v1.4.5.zip 2261522 BLAKE2B 0fdd2bc47d39af48445868e8ad0dc1bb06d377422c0743924cc309b9772a6964bf5a0d40380fffdd84d37ff114a371d283ca1eacd02c505380ba757462cf87d2 SHA512 7ab1f4ce57d89ea4848227173fe7600b1b4378717b1460e46d2c862531fa717344c0e52fc498f925646ea621b7209326e73f88885839191aa9a7c6c2896d9091
+EBUILD tensorflow-2.11.0.ebuild 16502 BLAKE2B c16edfdb7c70d55fbc5f977bc3cb8277e12bf575e00b40d9d975cd1f482a3001ec0acbaa6eb48115edb0435afda27e392f61f962476f37fae2c315a0784e9169 SHA512 57050eb35c15e928c9996aeef6e4f084a071d06c8613ad66532eb755821b7215e63926abebde19ba706fe6d68e53126b9bde6c9d8b5d060e582d1bc68554f26d
+EBUILD tensorflow-2.12.0.ebuild 16901 BLAKE2B a3547dc5f29ccf1fef287301a0b4172276a05c9a74eafa52916ba43fffdb5d804ce32203835eb8c2c1742f628222640f2a6a7ca4e3304022d89f2a3f66a5dd62 SHA512 e135b895b91149fec71e55ea0d9157b49294aa2d4168aed22b221bd3e302347134b1caf36ef2d01ed2e0876a661f18558e0f36fc34c76997d6c0e1911472cd96
+EBUILD tensorflow-2.13.1.ebuild 19647 BLAKE2B 874d58bf79994bc7c170ba1d23f0addba19cd369293ef61f16adbf42a80215c1658c7705b500cdffb217a5038fa890d1d1bb3aaa462f2645cfcd78437875d726 SHA512 7c23f706e3d252fa438f27bbe492117b3671dc575fe6be99cf804e04486af6fa515cdde5985eb955d912b415f97e2f2988916e904e9ec330f8b94cb937d7b4aa
+EBUILD tensorflow-2.14.1.ebuild 19177 BLAKE2B 1bb46ec6894877ddc65105d51ca504049092ec0583f2d02a06934ce7f060281b71ccb6b3545c1a8bacd8c234f9dce71d4ef3d5292cd750d2b18d2d8da950bacc SHA512 44974cf844cb6d7f61f7cfa16fc7c74d6a806de433b96b5f86d4d96c1c337bc2bbac3db8ed20b6af6127b512636b34ace8f2c52bd5f7be8715a7513bb9073bc0
+EBUILD tensorflow-2.15.0.ebuild 20328 BLAKE2B 41d1f89050a44605a8f162a9a5d1b630f608e3627cc1ab84b198ba2fef2aa9c4ca29dbf0824e2450e2bb64f49116b5dcb6452f573b2a4efed4532fbaa2a1cc3b SHA512 156623f85132e3783d1c7f33a071db47b99d8d4b1b8d861e05038f0f08861b546bea81a04a604ffd2ae5935c093908b7e8927563c992e4a2c1422233526bfc6f
MISC metadata.xml 504 BLAKE2B e0c7f8e0d4c1e632542e61c9d09a69119d56b7d0f6c2a289fa9fb074b055f2a9b238309b1cedbd30da97c85daad4a8d26b821337f2bdb7809cda7ab70a344aef SHA512 fee4f53ef507ae09e85d5792921a2ad822b9b106db9c67b55dff7337ceb095138cf6b1edc47c81f91b6fae981a979c3e9f46afbb7cae6f2987080e9651640a49
diff --git a/sci-libs/tensorflow/files/tensorflow-2.13.0-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch b/sci-libs/tensorflow/files/tensorflow-2.13.0-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch
new file mode 100644
index 000000000000..b6a4dab0313a
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.13.0-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch
@@ -0,0 +1,38 @@
+From 87b5fceaeb9667c47504637462f0b5944661e5fd Mon Sep 17 00:00:00 2001
+From: Jason Zaman <jason@perfinion.com>
+Date: Wed, 11 Sep 2019 12:08:34 +0800
+Subject: [PATCH 01/12] WORKSPACE: add rules-docker http_archive,
+ bazel-toolchains uses git_repo
+
+git_repository() rules cannot pull from --distdir and fail when building
+without internet access. Use http_archive instead and pin the sha256
+hash as well.
+
+Signed-off-by: Jason Zaman <jason@perfinion.com>
+---
+ WORKSPACE | 10 ++++++++++
+ 1 file changed, 10 insertions(+)
+
+diff --git a/WORKSPACE b/WORKSPACE
+index 1286ef9ac03..1a19e5f2e3d 100644
+--- a/WORKSPACE
++++ b/WORKSPACE
+@@ -1,5 +1,15 @@
+ workspace(name = "org_tensorflow")
+
++load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
++http_archive(
++ name = "io_bazel_rules_docker",
++ sha256 = "7d453450e1eb70e238eea6b31f4115607ec1200e91afea01c25f9804f37e39c8",
++ strip_prefix = "rules_docker-0.10.0",
++ urls = [
++ "https://github.com/bazelbuild/rules_docker/releases/download/v0.10.0/rules_docker-v0.10.0.tar.gz",
++ ],
++)
++
+ # Initialize the TensorFlow repository and all dependencies.
+ #
+ # The cascade of load() statements and tf_workspace?() calls works around the
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.13.0-0002-systemlib-Latest-absl-LTS-has-split-cord-libs.patch b/sci-libs/tensorflow/files/tensorflow-2.13.0-0002-systemlib-Latest-absl-LTS-has-split-cord-libs.patch
new file mode 100644
index 000000000000..d14e9a7215a9
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.13.0-0002-systemlib-Latest-absl-LTS-has-split-cord-libs.patch
@@ -0,0 +1,32 @@
+From e3c7880a940711d071747c2494dd8966fad9c63f Mon Sep 17 00:00:00 2001
+From: Jason Zaman <jason@perfinion.com>
+Date: Sun, 6 Feb 2022 00:13:56 -0800
+Subject: [PATCH 02/12] systemlib: Latest absl LTS has split cord libs
+
+---
+ third_party/absl/system.absl.strings.BUILD | 9 ++++++++-
+ 1 file changed, 8 insertions(+), 1 deletion(-)
+
+diff --git a/third_party/absl/system.absl.strings.BUILD b/third_party/absl/system.absl.strings.BUILD
+index fa9a7a84f67..63bac99d71b 100644
+--- a/third_party/absl/system.absl.strings.BUILD
++++ b/third_party/absl/system.absl.strings.BUILD
+@@ -26,7 +26,14 @@ cc_library(
+
+ cc_library(
+ name = "cord",
+- linkopts = ["-labsl_cord"],
++ linkopts = [
++ "-labsl_cord",
++ "-labsl_cord_internal",
++ "-labsl_cordz_functions",
++ "-labsl_cordz_handle",
++ "-labsl_cordz_info",
++ "-labsl_cordz_sample_token",
++ ],
+ deps = [
+ ":str_format",
+ "//absl/container:compressed_tuple",
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.13.0-0003-mkl_dnn-Must-link-against-libm-for-round-and-log2.patch b/sci-libs/tensorflow/files/tensorflow-2.13.0-0003-mkl_dnn-Must-link-against-libm-for-round-and-log2.patch
new file mode 100644
index 000000000000..f07c70b8f88e
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.13.0-0003-mkl_dnn-Must-link-against-libm-for-round-and-log2.patch
@@ -0,0 +1,29 @@
+From d508dad1ffe6c6229f912fb7283dc90666a09025 Mon Sep 17 00:00:00 2001
+From: Jason Zaman <jason@perfinion.com>
+Date: Mon, 5 Sep 2022 12:52:44 -0700
+Subject: [PATCH 03/12] mkl_dnn: Must link against libm for round and log2
+
+---
+ third_party/mkl_dnn/mkldnn_v1.BUILD | 6 +++---
+ 1 file changed, 3 insertions(+), 3 deletions(-)
+
+diff --git a/third_party/mkl_dnn/mkldnn_v1.BUILD b/third_party/mkl_dnn/mkldnn_v1.BUILD
+index 6a26ca83b44..c938a8c7a9f 100644
+--- a/third_party/mkl_dnn/mkldnn_v1.BUILD
++++ b/third_party/mkl_dnn/mkldnn_v1.BUILD
+@@ -163,9 +163,9 @@ cc_library(
+ includes = _INCLUDES_LIST,
+ # TODO(penpornk): Use lrt_if_needed from tensorflow.bzl instead.
+ linkopts = select({
+- "@org_tensorflow//tensorflow/tsl:linux_aarch64": ["-lrt"],
+- "@org_tensorflow//tensorflow/tsl:linux_x86_64": ["-lrt"],
+- "@org_tensorflow//tensorflow/tsl:linux_ppc64le": ["-lrt"],
++ "@org_tensorflow//tensorflow/tsl:linux_aarch64": ["-lrt", "-lm"],
++ "@org_tensorflow//tensorflow/tsl:linux_x86_64": ["-lrt", "-lm"],
++ "@org_tensorflow//tensorflow/tsl:linux_ppc64le": ["-lrt", "-lm"],
+ "//conditions:default": [],
+ }),
+ textual_hdrs = _TEXTUAL_HDRS_LIST,
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.13.0-0004-tensorflow_cc-Add-systemlib-nsync-linkopts.patch b/sci-libs/tensorflow/files/tensorflow-2.13.0-0004-tensorflow_cc-Add-systemlib-nsync-linkopts.patch
new file mode 100644
index 000000000000..5fe8e7117c66
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.13.0-0004-tensorflow_cc-Add-systemlib-nsync-linkopts.patch
@@ -0,0 +1,35 @@
+From b87687bace18200785b3f2686791c457f3ef72e7 Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Fri, 24 Nov 2023 16:42:48 +0800
+Subject: [PATCH 04/12] tensorflow_cc: Add systemlib nsync linkopts
+
+Linkopts dont get propagated up to the shared library correctly so
+workaround by applying them directly
+---
+ tensorflow/BUILD | 3 ++-
+ 1 file changed, 2 insertions(+), 1 deletion(-)
+
+diff --git a/tensorflow/BUILD b/tensorflow/BUILD
+index fce465ff1f2..4c0bd5a148b 100644
+--- a/tensorflow/BUILD
++++ b/tensorflow/BUILD
+@@ -43,6 +43,7 @@ load(
+ "tf_cc_shared_library",
+ )
+ load("@local_config_cuda//cuda:build_defs.bzl", "if_cuda")
++load("@local_config_syslibs//:build_defs.bzl", "if_system_lib")
+
+ # copybara:uncomment_begin
+ # load("//devtools/copybara/rules:copybara.bzl", "copybara_config_test")
+@@ -1293,7 +1294,7 @@ tf_cc_shared_library(
+ "-z defs",
+ "-Wl,--version-script,$(location //tensorflow:tf_version_script.lds)",
+ ],
+- }),
++ }) + if_system_lib("nsync", ["-lnsync_cpp"]),
+ per_os_targets = True,
+ roots = [
+ "//tensorflow/c:c_api",
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.13.0-0005-systemlib-Updates-for-Abseil-20220623-LTS.patch b/sci-libs/tensorflow/files/tensorflow-2.13.0-0005-systemlib-Updates-for-Abseil-20220623-LTS.patch
new file mode 100644
index 000000000000..4395e0dc1180
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.13.0-0005-systemlib-Updates-for-Abseil-20220623-LTS.patch
@@ -0,0 +1,71 @@
+From 11ca00ba2561f6466917a7bbb23fb266e5e31045 Mon Sep 17 00:00:00 2001
+From: Jason Zaman <jason@perfinion.com>
+Date: Tue, 30 May 2023 09:10:03 -0700
+Subject: [PATCH 05/12] systemlib: Updates for Abseil 20220623 LTS
+
+These targets are header-only and just need stub bazel targets
+---
+ third_party/absl/system.absl.functional.BUILD | 22 +++++++++++++++++++
+ third_party/absl/system.absl.random.BUILD | 12 ++++++++++
+ 2 files changed, 34 insertions(+)
+
+diff --git a/third_party/absl/system.absl.functional.BUILD b/third_party/absl/system.absl.functional.BUILD
+index a4f70acf35c..579181dec07 100644
+--- a/third_party/absl/system.absl.functional.BUILD
++++ b/third_party/absl/system.absl.functional.BUILD
+@@ -2,10 +2,32 @@ load("@rules_cc//cc:defs.bzl", "cc_library")
+
+ package(default_visibility = ["//visibility:public"])
+
++cc_library(
++ name = "any_invocable",
++ deps = [
++ "//absl/base:base_internal",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ "//absl/meta:type_traits",
++ "//absl/utility",
++ ],
++)
++
+ cc_library(
+ name = "bind_front",
++ deps = [
++ "//absl/base:base_internal",
++ "//absl/container:compressed_tuple",
++ "//absl/meta:type_traits",
++ "//absl/utility",
++ ],
+ )
+
+ cc_library(
+ name = "function_ref",
++ deps = [
++ "//absl/base:base_internal",
++ "//absl/base:core_headers",
++ "//absl/meta:type_traits",
++ ],
+ )
+diff --git a/third_party/absl/system.absl.random.BUILD b/third_party/absl/system.absl.random.BUILD
+index 948de07751a..5ebd656be8e 100644
+--- a/third_party/absl/system.absl.random.BUILD
++++ b/third_party/absl/system.absl.random.BUILD
+@@ -51,3 +51,15 @@ cc_library(
+ "//absl/types:span",
+ ],
+ )
++
++cc_library(
++ name = "bit_gen_ref",
++ deps = [
++ ":random",
++ "//absl/base:core_headers",
++ "//absl/base:fast_type_id",
++ "//absl/meta:type_traits",
++ "//absl/random/internal:distribution_caller",
++ "//absl/random/internal:fast_uniform_bits",
++ ],
++)
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.13.0-0006-systemlib-Update-targets-for-absl_py.patch b/sci-libs/tensorflow/files/tensorflow-2.13.0-0006-systemlib-Update-targets-for-absl_py.patch
new file mode 100644
index 000000000000..683613ced2f2
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.13.0-0006-systemlib-Update-targets-for-absl_py.patch
@@ -0,0 +1,27 @@
+From cb0eb12a89b18955c75e305f96069aeabf01a5b2 Mon Sep 17 00:00:00 2001
+From: Jason Zaman <jason@perfinion.com>
+Date: Tue, 30 May 2023 09:33:14 -0700
+Subject: [PATCH 06/12] systemlib: Update targets for absl_py
+
+---
+ third_party/systemlibs/absl_py.absl.flags.BUILD | 7 +++++++
+ 1 file changed, 7 insertions(+)
+
+diff --git a/third_party/systemlibs/absl_py.absl.flags.BUILD b/third_party/systemlibs/absl_py.absl.flags.BUILD
+index 4049989ae2f..614938fb8c4 100644
+--- a/third_party/systemlibs/absl_py.absl.flags.BUILD
++++ b/third_party/systemlibs/absl_py.absl.flags.BUILD
+@@ -5,3 +5,10 @@ package(default_visibility = ["//visibility:public"])
+ py_library(
+ name = "flags",
+ )
++
++py_library(
++ name = "argparse_flags",
++ deps = [
++ ":flags",
++ ],
++)
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.13.0-0007-systemlibs-Add-well_known_types_py_pb2-target.patch b/sci-libs/tensorflow/files/tensorflow-2.13.0-0007-systemlibs-Add-well_known_types_py_pb2-target.patch
new file mode 100644
index 000000000000..c7a82e4d8c16
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.13.0-0007-systemlibs-Add-well_known_types_py_pb2-target.patch
@@ -0,0 +1,28 @@
+From 7c532a426ca473aff233df19baef46fe9a2196a1 Mon Sep 17 00:00:00 2001
+From: Jason Zaman <jason@perfinion.com>
+Date: Sat, 3 Jun 2023 16:23:51 -0700
+Subject: [PATCH 07/12] systemlibs: Add well_known_types_py_pb2 target
+
+Bug: https://github.com/tensorflow/tensorflow/issues/60667
+---
+ third_party/systemlibs/protobuf.BUILD | 7 +++++++
+ 1 file changed, 7 insertions(+)
+
+diff --git a/third_party/systemlibs/protobuf.BUILD b/third_party/systemlibs/protobuf.BUILD
+index 4d05ab28d12..b3d72b0e3ad 100644
+--- a/third_party/systemlibs/protobuf.BUILD
++++ b/third_party/systemlibs/protobuf.BUILD
+@@ -111,3 +111,10 @@ py_library(
+ visibility = ["//visibility:public"],
+ deps = [dep + "_proto" for dep in proto[1][1]],
+ ) for proto in WELL_KNOWN_PROTO_MAP.items()]
++
++py_proto_library(
++ name = "well_known_types_py_pb2",
++ include = ".",
++ srcs = [proto[1][0] for proto in WELL_KNOWN_PROTO_MAP.items()],
++ visibility = ["//visibility:public"],
++)
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.13.0-0008-Relax-setup.py-version-requirements.patch b/sci-libs/tensorflow/files/tensorflow-2.13.0-0008-Relax-setup.py-version-requirements.patch
new file mode 100644
index 000000000000..c2467776b33a
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.13.0-0008-Relax-setup.py-version-requirements.patch
@@ -0,0 +1,55 @@
+From fa870249f5629d1e5cc3299c1a42078dd94343c7 Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Sun, 26 Nov 2023 10:14:42 +0800
+Subject: [PATCH 08/12] Relax setup.py version requirements
+
+---
+ tensorflow/tools/pip_package/setup.py | 16 ++++++++--------
+ 1 file changed, 8 insertions(+), 8 deletions(-)
+
+diff --git a/tensorflow/tools/pip_package/setup.py b/tensorflow/tools/pip_package/setup.py
+index e64c0d4344b..62315962318 100644
+--- a/tensorflow/tools/pip_package/setup.py
++++ b/tensorflow/tools/pip_package/setup.py
+@@ -86,21 +86,21 @@ REQUIRED_PACKAGES = [
+ 'astunparse >= 1.6.0',
+ 'flatbuffers >= 23.1.21',
+ # TODO(b/213222745) gast versions above 0.4.0 break TF's tests
+- 'gast >= 0.2.1, <= 0.4.0',
++ 'gast',
+ 'google_pasta >= 0.1.1',
+ 'h5py >= 2.9.0',
+- 'libclang >= 13.0.0',
+- 'numpy >= 1.22, <= 1.24.3',
++ # 'libclang >= 13.0.0',
++ 'numpy >= 1.22',
+ 'opt_einsum >= 2.3.2',
+ 'packaging',
+ 'protobuf>=3.20.3,<5.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5',
+ 'setuptools',
+ 'six >= 1.12.0',
+ 'termcolor >= 1.1.0',
+- 'typing_extensions>=3.6.6,<4.6.0',
++ 'typing_extensions>=3.6.6',
+ 'wrapt >= 1.11.0',
+- 'tensorflow-io-gcs-filesystem >= 0.23.1;platform_machine!="arm64" or ' +
+- 'platform_system!="Darwin"',
++ # 'tensorflow-io-gcs-filesystem >= 0.23.1;platform_machine!="arm64" or ' +
++ # 'platform_system!="Darwin"',
+ # grpcio does not build correctly on big-endian machines due to lack of
+ # BoringSSL support.
+ # See https://github.com/tensorflow/tensorflow/issues/17882.
+@@ -134,8 +134,8 @@ FAKE_REQUIRED_PACKAGES = [
+ _VERSION + ';platform_system=="Windows"',
+ ]
+
+-if platform.system() == 'Linux' and platform.machine() == 'x86_64':
+- REQUIRED_PACKAGES.append(FAKE_REQUIRED_PACKAGES)
++# if platform.system() == 'Linux' and platform.machine() == 'x86_64':
++# REQUIRED_PACKAGES.append(FAKE_REQUIRED_PACKAGES)
+
+ if collaborator_build:
+ # If this is a collaborator build, then build an "installer" wheel and
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.13.0-0009-fix-sparse-transpose-op2.patch b/sci-libs/tensorflow/files/tensorflow-2.13.0-0009-fix-sparse-transpose-op2.patch
new file mode 100644
index 000000000000..26b61ac3e5fd
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.13.0-0009-fix-sparse-transpose-op2.patch
@@ -0,0 +1,30 @@
+From 7961afc6f67a4278409f7bdb710180daeb91c106 Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Sun, 26 Nov 2023 10:31:31 +0800
+Subject: [PATCH 09/12] fix sparse transpose op2
+
+---
+ tensorflow/core/kernels/sparse/transpose_op.cc | 7 +++++++
+ 1 file changed, 7 insertions(+)
+
+diff --git a/tensorflow/core/kernels/sparse/transpose_op.cc b/tensorflow/core/kernels/sparse/transpose_op.cc
+index 4fe99013480..a247d417504 100644
+--- a/tensorflow/core/kernels/sparse/transpose_op.cc
++++ b/tensorflow/core/kernels/sparse/transpose_op.cc
+@@ -208,6 +208,13 @@ Status CSRSparseMatrixTranspose<Device, T>::operator()(
+ return OkStatus();
+ }
+
++#if GOOGLE_CUDA || TENSORFLOW_USE_ROCM
++template struct CSRSparseMatrixTranspose<GPUDevice, float>;
++template struct CSRSparseMatrixTranspose<GPUDevice, double>;
++template struct CSRSparseMatrixTranspose<GPUDevice, std::complex<float>>;
++template struct CSRSparseMatrixTranspose<GPUDevice, std::complex<double>>;
++#endif
++
+ // CPU kernel for transposing a single component of a CSR SparseMatrix.
+ template <typename T>
+ struct CSRSparseMatrixTransposeComponent<CPUDevice, T> {
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.13.0-0010-systemlibs-update-targets-for-absl.patch b/sci-libs/tensorflow/files/tensorflow-2.13.0-0010-systemlibs-update-targets-for-absl.patch
new file mode 100644
index 000000000000..8ba85f4f1019
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.13.0-0010-systemlibs-update-targets-for-absl.patch
@@ -0,0 +1,352 @@
+From d224ce2be1a6b67ab53697ae978c1d29e6d3e159 Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Sun, 26 Nov 2023 13:12:20 +0800
+Subject: [PATCH 10/12] systemlibs: update targets for absl
+
+---
+ .../distribute/experimental/rpc/kernels/BUILD | 1 +
+ third_party/absl/system.absl.debugging.BUILD | 20 +-
+ third_party/absl/system.absl.log.BUILD | 271 ++++++++++++++++++
+ third_party/absl/workspace.bzl | 1 +
+ 4 files changed, 288 insertions(+), 5 deletions(-)
+ create mode 100644 third_party/absl/system.absl.log.BUILD
+
+diff --git a/tensorflow/distribute/experimental/rpc/kernels/BUILD b/tensorflow/distribute/experimental/rpc/kernels/BUILD
+index f8757df41e0..d168d36cb2a 100644
+--- a/tensorflow/distribute/experimental/rpc/kernels/BUILD
++++ b/tensorflow/distribute/experimental/rpc/kernels/BUILD
+@@ -51,6 +51,7 @@ tf_kernel_library(
+ "//tensorflow/distribute/experimental/rpc/proto:tf_rpc_service_cc_grpc_proto",
+ "//tensorflow/distribute/experimental/rpc/proto:tf_rpc_service_proto_cc",
+ "@com_github_grpc_grpc//:grpc++",
++ "@com_google_absl//absl/log:check",
+ "@com_google_absl//absl/strings",
+ "@com_google_absl//absl/strings:str_format",
+ ],
+diff --git a/third_party/absl/system.absl.debugging.BUILD b/third_party/absl/system.absl.debugging.BUILD
+index 931ffdc9e92..223db7b4c46 100644
+--- a/third_party/absl/system.absl.debugging.BUILD
++++ b/third_party/absl/system.absl.debugging.BUILD
+@@ -26,15 +26,25 @@ cc_library(
+
+ cc_library(
+ name = "failure_signal_handler",
+- linkopts = [
+- "-labsl_failure_signal_handler",
+- "-labsl_examine_stack",
++ linkopts = ["-labsl_failure_signal_handler"],
++ deps = [
++ ":examine_stack",
++ ":stacktrace",
++ "//absl/base",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ "//absl/base:raw_logging_internal",
+ ],
++)
++
++cc_library(
++ name = "examine_stack",
++ linkopts = ["-labsl_examine_stack"],
+ deps = [
+ ":stacktrace",
+ ":symbolize",
+- "//absl/base",
+- "//absl/base:errno_saver",
++ "//absl/base:config",
++ "//absl/base:core_headers",
+ "//absl/base:raw_logging_internal",
+ ],
+ )
+diff --git a/third_party/absl/system.absl.log.BUILD b/third_party/absl/system.absl.log.BUILD
+new file mode 100644
+index 00000000000..9a2a5de657e
+--- /dev/null
++++ b/third_party/absl/system.absl.log.BUILD
+@@ -0,0 +1,271 @@
++load("@rules_cc//cc:defs.bzl", "cc_library")
++
++package(default_visibility = ["//visibility:public"])
++
++cc_library(
++ name = "log",
++ deps = [
++ "//absl/log:internal_log_impl",
++ ],
++)
++
++cc_library(
++ name = "internal_log_impl",
++ deps = [
++ ":internal_conditions",
++ ":internal_message",
++ ":internal_strip",
++ ],
++)
++
++cc_library(
++ name = "internal_conditions",
++ linkopts = ["-labsl_log_internal_conditions"],
++ deps = [
++ ":internal_voidify",
++ "//absl/base",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ ],
++)
++
++cc_library(
++ name = "internal_voidify",
++ deps = ["//absl/base:config"],
++)
++
++cc_library(
++ name = "internal_message",
++ linkopts = ["-labsl_log_internal_message"],
++ deps = [
++ ":entry",
++ ":globals",
++ ":internal_append_truncated",
++ ":internal_format",
++ ":internal_globals",
++ ":internal_log_sink_set",
++ ":internal_nullguard",
++ ":internal_proto",
++ ":severity",
++ ":sink",
++ ":sink_registry",
++ "//absl/base",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ "//absl/base:errno_saver",
++ "//absl/base:raw_logging_internal",
++ "//absl/base:strerror",
++ "//absl/container:inlined_vector",
++ "//absl/debugging:examine_stack",
++ "//absl/memory",
++ "//absl/strings",
++ "//absl/time",
++ "//absl/types:span",
++ ],
++)
++
++cc_library(
++ name = "internal_append_truncated",
++ deps = [
++ "//absl/base:config",
++ "//absl/strings",
++ "//absl/types:span",
++ ],
++)
++
++cc_library(
++ name = "internal_format",
++ linkopts = ["-labsl_log_internal_format"],
++ deps = [
++ ":internal_append_truncated",
++ ":internal_config",
++ ":internal_globals",
++ ":severity",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ "//absl/strings",
++ "//absl/strings:str_format",
++ "//absl/time",
++ "//absl/types:span",
++ ],
++)
++
++cc_library(
++ name = "internal_globals",
++ linkopts = ["-labsl_log_internal_globals"],
++ deps = [
++ ":severity",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ "//absl/base:raw_logging_internal",
++ "//absl/strings",
++ "//absl/time",
++ ],
++)
++
++cc_library(
++ name = "internal_proto",
++ linkopts = ["-labsl_log_internal_proto"],
++ deps = [
++ "//absl/base",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ "//absl/strings",
++ "//absl/types:span",
++ ],
++)
++
++cc_library(
++ name = "internal_log_sink_set",
++ linkopts = ["-labsl_log_internal_log_sink_set"],
++ deps = [
++ ":entry",
++ ":globals",
++ ":internal_config",
++ ":internal_globals",
++ ":severity",
++ ":sink",
++ "//absl/base",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ "//absl/base:raw_logging_internal",
++ "//absl/cleanup",
++ "//absl/strings",
++ "//absl/synchronization",
++ "//absl/types:span",
++ ],
++)
++
++cc_library(
++ name = "internal_config",
++ deps = [
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ ],
++)
++
++cc_library(
++ name = "internal_nullguard",
++ linkopts = ["-labsl_log_internal_nullguard"],
++ deps = [
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ ],
++)
++
++cc_library(
++ name = "globals",
++ linkopts = ["-labsl_log_globals"],
++ deps = [
++ ":severity",
++ "//absl/base:atomic_hook",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ "//absl/base:raw_logging_internal",
++ "//absl/hash",
++ "//absl/strings",
++ ],
++)
++
++cc_library(
++ name = "entry",
++ linkopts = ["-labsl_log_entry"],
++ deps = [
++ ":internal_config",
++ ":severity",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ "//absl/strings",
++ "//absl/time",
++ "//absl/types:span",
++ ],
++)
++
++cc_library(
++ name = "severity",
++ linkopts = ["-labsl_log_severity"],
++ deps = ["//absl/base:core_headers"],
++)
++
++cc_library(
++ name = "sink",
++ linkopts = ["-labsl_log_sink"],
++ deps = [
++ ":entry",
++ "//absl/base:config",
++ ],
++)
++
++cc_library(
++ name = "sink_registry",
++ deps = [
++ ":internal_log_sink_set",
++ ":sink",
++ "//absl/base:config",
++ ],
++)
++
++cc_library(
++ name = "internal_strip",
++ deps = [
++ ":internal_message",
++ ":internal_nullstream",
++ ":severity",
++ ],
++)
++
++cc_library(
++ name = "internal_nullstream",
++ deps = [
++ ":severity",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ "//absl/strings",
++ ],
++)
++
++cc_library(
++ name = "check",
++ deps = [
++ ":internal_check_impl",
++ ":internal_check_op",
++ ":internal_conditions",
++ ":internal_message",
++ ":internal_strip",
++ "//absl/base:core_headers",
++ ],
++)
++
++cc_library(
++ name = "internal_check_impl",
++ deps = [
++ ":internal_check_op",
++ ":internal_conditions",
++ ":internal_message",
++ ":internal_strip",
++ "//absl/base:core_headers",
++ ],
++)
++
++cc_library(
++ name = "internal_check_op",
++ linkopts = ["-labsl_log_internal_check_op"],
++ deps = [
++ ":internal_nullguard",
++ ":internal_nullstream",
++ ":internal_strip",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ "//absl/strings",
++ ],
++)
++
++cc_library(
++ name = "absl_check",
++ deps = [":internal_check_impl"],
++)
++
++cc_library(
++ name = "absl_log",
++ deps = [":internal_log_impl"],
++)
+diff --git a/third_party/absl/workspace.bzl b/third_party/absl/workspace.bzl
+index 07f49cebb78..a7f4e5ffc44 100644
+--- a/third_party/absl/workspace.bzl
++++ b/third_party/absl/workspace.bzl
+@@ -20,6 +20,7 @@ def repo():
+ "flags",
+ "functional",
+ "hash",
++ "log",
+ "memory",
+ "meta",
+ "numeric",
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.13.0-0011-systemlibs-update-targets-for-google_cloud_cpp.patch b/sci-libs/tensorflow/files/tensorflow-2.13.0-0011-systemlibs-update-targets-for-google_cloud_cpp.patch
new file mode 100644
index 000000000000..d6c135cc706d
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.13.0-0011-systemlibs-update-targets-for-google_cloud_cpp.patch
@@ -0,0 +1,25 @@
+From 0264617528e53e6b9c8f298ec9bec4a064ffdf27 Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Wed, 29 Nov 2023 13:35:24 +0800
+Subject: [PATCH 11/12] systemlibs:update targets for google_cloud_cpp
+
+---
+ third_party/systemlibs/google_cloud_cpp.BUILD | 5 +++++
+ 1 file changed, 5 insertions(+)
+
+diff --git a/third_party/systemlibs/google_cloud_cpp.BUILD b/third_party/systemlibs/google_cloud_cpp.BUILD
+index cbe6e10ba5b..fce306a22f1 100644
+--- a/third_party/systemlibs/google_cloud_cpp.BUILD
++++ b/third_party/systemlibs/google_cloud_cpp.BUILD
+@@ -4,3 +4,8 @@ filegroup(
+ name = "LICENSE",
+ visibility = ["//visibility:public"],
+ )
++
++cc_library(
++ name = "storage_client",
++ visibility = ["//visibility:public"],
++)
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.13.0-0012-bump-cudnn-frontend-to-v0.9.patch b/sci-libs/tensorflow/files/tensorflow-2.13.0-0012-bump-cudnn-frontend-to-v0.9.patch
new file mode 100644
index 000000000000..e564fdbfd185
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.13.0-0012-bump-cudnn-frontend-to-v0.9.patch
@@ -0,0 +1,29 @@
+From 62c2fc13f69f6ae6a3315f59430018898b37b74f Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Wed, 29 Nov 2023 19:14:28 +0800
+Subject: [PATCH 12/12] bump cudnn frontend to v0.9
+
+---
+ tensorflow/workspace2.bzl | 6 +++---
+ 1 file changed, 3 insertions(+), 3 deletions(-)
+
+diff --git a/tensorflow/workspace2.bzl b/tensorflow/workspace2.bzl
+index da9295adaba..86a3df2fe37 100644
+--- a/tensorflow/workspace2.bzl
++++ b/tensorflow/workspace2.bzl
+@@ -174,9 +174,9 @@ def _tf_repositories():
+ name = "cudnn_frontend_archive",
+ build_file = "//third_party:cudnn_frontend.BUILD",
+ patch_file = ["//third_party:cudnn_frontend_header_fix.patch"],
+- sha256 = "bfcf778030831f325cfc13ae5995388cc834fbff2995a297ba580d9ec65ca3b6",
+- strip_prefix = "cudnn-frontend-0.8",
+- urls = tf_mirror_urls("https://github.com/NVIDIA/cudnn-frontend/archive/refs/tags/v0.8.zip"),
++ sha256 = "d8dba9e2607a0c256aa8eacb45b39986ab6f3f24a4d431d4397047a3cb0cd4fb",
++ strip_prefix = "cudnn-frontend-0.9",
++ urls = tf_mirror_urls("https://github.com/NVIDIA/cudnn-frontend/archive/refs/tags/v0.9.zip"),
+ )
+
+ tf_http_archive(
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.13.0-0013-Fixing-build-issue-with-Clang-16-and-GCC-13.patch b/sci-libs/tensorflow/files/tensorflow-2.13.0-0013-Fixing-build-issue-with-Clang-16-and-GCC-13.patch
new file mode 100644
index 000000000000..9961d94cd48c
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.13.0-0013-Fixing-build-issue-with-Clang-16-and-GCC-13.patch
@@ -0,0 +1,39 @@
+From 9081d1ccadb7fcd3e2dd01106e85003af2fb7975 Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Sun, 21 Jan 2024 00:26:21 +0800
+Subject: [PATCH 13/13] Fixing build issue with Clang 16 and GCC 13
+
+---
+ tensorflow/lite/kernels/internal/spectrogram.cc | 2 ++
+ tensorflow/tsl/lib/io/cache.h | 2 ++
+ 2 files changed, 4 insertions(+)
+
+diff --git a/tensorflow/lite/kernels/internal/spectrogram.cc b/tensorflow/lite/kernels/internal/spectrogram.cc
+index a832962a38d..9b1f86ba717 100644
+--- a/tensorflow/lite/kernels/internal/spectrogram.cc
++++ b/tensorflow/lite/kernels/internal/spectrogram.cc
+@@ -18,6 +18,8 @@ limitations under the License.
+ #include <assert.h>
+ #include <math.h>
+
++#include <cstdint>
++
+ #include "third_party/fft2d/fft.h"
+
+ namespace tflite {
+diff --git a/tensorflow/tsl/lib/io/cache.h b/tensorflow/tsl/lib/io/cache.h
+index f894c5916d5..e49d09b7450 100644
+--- a/tensorflow/tsl/lib/io/cache.h
++++ b/tensorflow/tsl/lib/io/cache.h
+@@ -16,6 +16,8 @@ limitations under the License.
+ #ifndef TENSORFLOW_TSL_LIB_IO_CACHE_H_
+ #define TENSORFLOW_TSL_LIB_IO_CACHE_H_
+
++#include <cstdint>
++
+ #include "tensorflow/tsl/platform/stringpiece.h"
+
+ // A Cache is an interface that maps keys to values. It has internal
+--
+2.43.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch
new file mode 100644
index 000000000000..9e93b3d5b8f3
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch
@@ -0,0 +1,37 @@
+From 0d3f532325cc39eb816e94e5bae259ea5a5a2304 Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Fri, 24 Nov 2023 16:34:44 +0800
+Subject: [PATCH 01/13] WORKSPACE: add rules-docker http_archive,
+ bazel-toolchains uses git_repo
+
+git_repository() rules cannot pull from --distdir and fail when building
+without internet access. Use http_archive instead and pin the sha256
+hash as well.
+---
+ WORKSPACE | 11 +++++++++++
+ 1 file changed, 11 insertions(+)
+
+diff --git a/WORKSPACE b/WORKSPACE
+index fb3af8a2bea..644b731b1dc 100644
+--- a/WORKSPACE
++++ b/WORKSPACE
+@@ -1,5 +1,16 @@
+ workspace(name = "org_tensorflow")
+
++load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
++
++http_archive(
++ name = "io_bazel_rules_docker",
++ sha256 = "7d453450e1eb70e238eea6b31f4115607ec1200e91afea01c25f9804f37e39c8",
++ strip_prefix = "rules_docker-0.10.0",
++ urls = [
++ "https://github.com/bazelbuild/rules_docker/releases/download/v0.10.0/rules_docker-v0.10.0.tar.gz",
++ ],
++)
++
+ # We must initialize hermetic python first.
+ load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
+
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0002-systemlib-Latest-absl-LTS-has-split-cord-libs.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0002-systemlib-Latest-absl-LTS-has-split-cord-libs.patch
new file mode 100644
index 000000000000..5436744e1275
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0002-systemlib-Latest-absl-LTS-has-split-cord-libs.patch
@@ -0,0 +1,32 @@
+From 33b11df0767ead9a64a65e3ae19e329bba91dd75 Mon Sep 17 00:00:00 2001
+From: Jason Zaman <jason@perfinion.com>
+Date: Sun, 6 Feb 2022 00:13:56 -0800
+Subject: [PATCH 02/13] systemlib: Latest absl LTS has split cord libs
+
+---
+ third_party/absl/system.absl.strings.BUILD | 9 ++++++++-
+ 1 file changed, 8 insertions(+), 1 deletion(-)
+
+diff --git a/third_party/absl/system.absl.strings.BUILD b/third_party/absl/system.absl.strings.BUILD
+index fa9a7a84f67..63bac99d71b 100644
+--- a/third_party/absl/system.absl.strings.BUILD
++++ b/third_party/absl/system.absl.strings.BUILD
+@@ -26,7 +26,14 @@ cc_library(
+
+ cc_library(
+ name = "cord",
+- linkopts = ["-labsl_cord"],
++ linkopts = [
++ "-labsl_cord",
++ "-labsl_cord_internal",
++ "-labsl_cordz_functions",
++ "-labsl_cordz_handle",
++ "-labsl_cordz_info",
++ "-labsl_cordz_sample_token",
++ ],
+ deps = [
+ ":str_format",
+ "//absl/container:compressed_tuple",
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0003-mkl_dnn-Must-link-against-libm-for-round-and-log2.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0003-mkl_dnn-Must-link-against-libm-for-round-and-log2.patch
new file mode 100644
index 000000000000..8cff4a422ee3
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0003-mkl_dnn-Must-link-against-libm-for-round-and-log2.patch
@@ -0,0 +1,29 @@
+From e098854ed15caa864b83033a1bc6b1aa7ca93a5c Mon Sep 17 00:00:00 2001
+From: Jason Zaman <jason@perfinion.com>
+Date: Mon, 5 Sep 2022 12:52:44 -0700
+Subject: [PATCH 03/13] mkl_dnn: Must link against libm for round and log2
+
+---
+ third_party/mkl_dnn/mkldnn_v1.BUILD | 6 +++---
+ 1 file changed, 3 insertions(+), 3 deletions(-)
+
+diff --git a/third_party/mkl_dnn/mkldnn_v1.BUILD b/third_party/mkl_dnn/mkldnn_v1.BUILD
+index 263c64eb681..f1860b1e7c3 100644
+--- a/third_party/mkl_dnn/mkldnn_v1.BUILD
++++ b/third_party/mkl_dnn/mkldnn_v1.BUILD
+@@ -165,9 +165,9 @@ cc_library(
+ includes = _INCLUDES_LIST,
+ # TODO(penpornk): Use lrt_if_needed from tensorflow.bzl instead.
+ linkopts = select({
+- "@org_tensorflow//tensorflow/tsl:linux_aarch64": ["-lrt"],
+- "@org_tensorflow//tensorflow/tsl:linux_x86_64": ["-lrt"],
+- "@org_tensorflow//tensorflow/tsl:linux_ppc64le": ["-lrt"],
++ "@org_tensorflow//tensorflow/tsl:linux_aarch64": ["-lrt", "-lm"],
++ "@org_tensorflow//tensorflow/tsl:linux_x86_64": ["-lrt", "-lm"],
++ "@org_tensorflow//tensorflow/tsl:linux_ppc64le": ["-lrt", "-lm"],
+ "//conditions:default": [],
+ }),
+ textual_hdrs = _TEXTUAL_HDRS_LIST,
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0004-tensorflow_cc-Add-systemlib-nsync-linkopts.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0004-tensorflow_cc-Add-systemlib-nsync-linkopts.patch
new file mode 100644
index 000000000000..0fa4d02d4c62
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0004-tensorflow_cc-Add-systemlib-nsync-linkopts.patch
@@ -0,0 +1,35 @@
+From e6645115b8a838b40a49c73cb948dc373c5e98c8 Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Fri, 24 Nov 2023 16:42:48 +0800
+Subject: [PATCH 04/13] tensorflow_cc: Add systemlib nsync linkopts
+
+Linkopts dont get propagated up to the shared library correctly so
+workaround by applying them directly
+---
+ tensorflow/BUILD | 3 ++-
+ 1 file changed, 2 insertions(+), 1 deletion(-)
+
+diff --git a/tensorflow/BUILD b/tensorflow/BUILD
+index 202553cd531..63ce1e7b385 100644
+--- a/tensorflow/BUILD
++++ b/tensorflow/BUILD
+@@ -39,6 +39,7 @@ load(
+ "tf_cc_shared_library",
+ )
+ load("@local_config_cuda//cuda:build_defs.bzl", "if_cuda")
++load("@local_config_syslibs//:build_defs.bzl", "if_system_lib")
+
+ # copybara:uncomment_begin
+ # load("//devtools/copybara/rules:copybara.bzl", "copybara_config_test")
+@@ -1312,7 +1313,7 @@ tf_cc_shared_library(
+ "-z defs",
+ "-Wl,--version-script,$(location //tensorflow:tf_version_script.lds)",
+ ],
+- }),
++ }) + if_system_lib("nsync", ["-lnsync_cpp"]),
+ per_os_targets = True,
+ roots = [
+ "//tensorflow/c:c_api",
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0005-systemlib-Updates-for-Abseil-20220623-LTS.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0005-systemlib-Updates-for-Abseil-20220623-LTS.patch
new file mode 100644
index 000000000000..7dadd35bc2b7
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0005-systemlib-Updates-for-Abseil-20220623-LTS.patch
@@ -0,0 +1,71 @@
+From c390554addb171439310c00dce2972539ac0e71d Mon Sep 17 00:00:00 2001
+From: Jason Zaman <jason@perfinion.com>
+Date: Tue, 30 May 2023 09:10:03 -0700
+Subject: [PATCH 05/13] systemlib: Updates for Abseil 20220623 LTS
+
+These targets are header-only and just need stub bazel targets
+---
+ third_party/absl/system.absl.functional.BUILD | 22 +++++++++++++++++++
+ third_party/absl/system.absl.random.BUILD | 12 ++++++++++
+ 2 files changed, 34 insertions(+)
+
+diff --git a/third_party/absl/system.absl.functional.BUILD b/third_party/absl/system.absl.functional.BUILD
+index a4f70acf35c..579181dec07 100644
+--- a/third_party/absl/system.absl.functional.BUILD
++++ b/third_party/absl/system.absl.functional.BUILD
+@@ -2,10 +2,32 @@ load("@rules_cc//cc:defs.bzl", "cc_library")
+
+ package(default_visibility = ["//visibility:public"])
+
++cc_library(
++ name = "any_invocable",
++ deps = [
++ "//absl/base:base_internal",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ "//absl/meta:type_traits",
++ "//absl/utility",
++ ],
++)
++
+ cc_library(
+ name = "bind_front",
++ deps = [
++ "//absl/base:base_internal",
++ "//absl/container:compressed_tuple",
++ "//absl/meta:type_traits",
++ "//absl/utility",
++ ],
+ )
+
+ cc_library(
+ name = "function_ref",
++ deps = [
++ "//absl/base:base_internal",
++ "//absl/base:core_headers",
++ "//absl/meta:type_traits",
++ ],
+ )
+diff --git a/third_party/absl/system.absl.random.BUILD b/third_party/absl/system.absl.random.BUILD
+index 948de07751a..5ebd656be8e 100644
+--- a/third_party/absl/system.absl.random.BUILD
++++ b/third_party/absl/system.absl.random.BUILD
+@@ -51,3 +51,15 @@ cc_library(
+ "//absl/types:span",
+ ],
+ )
++
++cc_library(
++ name = "bit_gen_ref",
++ deps = [
++ ":random",
++ "//absl/base:core_headers",
++ "//absl/base:fast_type_id",
++ "//absl/meta:type_traits",
++ "//absl/random/internal:distribution_caller",
++ "//absl/random/internal:fast_uniform_bits",
++ ],
++)
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0006-systemlib-Update-targets-for-absl_py.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0006-systemlib-Update-targets-for-absl_py.patch
new file mode 100644
index 000000000000..fa021358998c
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0006-systemlib-Update-targets-for-absl_py.patch
@@ -0,0 +1,24 @@
+From d2dc4d308a83cb2d1620e7c5213ec570fe3138af Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Fri, 24 Nov 2023 16:48:15 +0800
+Subject: [PATCH 06/13] systemlib: Update targets for absl_py
+
+---
+ third_party/systemlibs/absl_py.absl.flags.BUILD | 3 +++
+ 1 file changed, 3 insertions(+)
+
+diff --git a/third_party/systemlibs/absl_py.absl.flags.BUILD b/third_party/systemlibs/absl_py.absl.flags.BUILD
+index d92f4949df1..614938fb8c4 100644
+--- a/third_party/systemlibs/absl_py.absl.flags.BUILD
++++ b/third_party/systemlibs/absl_py.absl.flags.BUILD
+@@ -8,4 +8,7 @@ py_library(
+
+ py_library(
+ name = "argparse_flags",
++ deps = [
++ ":flags",
++ ],
+ )
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0007-systemlib-Add-well_known_types_py_pb2-target.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0007-systemlib-Add-well_known_types_py_pb2-target.patch
new file mode 100644
index 000000000000..655be6bc919f
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0007-systemlib-Add-well_known_types_py_pb2-target.patch
@@ -0,0 +1,28 @@
+From e58f5674af07a3853e59c32b92d91e590b0224e2 Mon Sep 17 00:00:00 2001
+From: Jason Zaman <jason@perfinion.com>
+Date: Sat, 3 Jun 2023 16:23:51 -0700
+Subject: [PATCH 07/13] systemlib: Add well_known_types_py_pb2 target
+
+Bug: https://github.com/tensorflow/tensorflow/issues/60667
+---
+ third_party/systemlibs/protobuf.BUILD | 7 +++++++
+ 1 file changed, 7 insertions(+)
+
+diff --git a/third_party/systemlibs/protobuf.BUILD b/third_party/systemlibs/protobuf.BUILD
+index 4d05ab28d12..b3d72b0e3ad 100644
+--- a/third_party/systemlibs/protobuf.BUILD
++++ b/third_party/systemlibs/protobuf.BUILD
+@@ -111,3 +111,10 @@ py_library(
+ visibility = ["//visibility:public"],
+ deps = [dep + "_proto" for dep in proto[1][1]],
+ ) for proto in WELL_KNOWN_PROTO_MAP.items()]
++
++py_proto_library(
++ name = "well_known_types_py_pb2",
++ include = ".",
++ srcs = [proto[1][0] for proto in WELL_KNOWN_PROTO_MAP.items()],
++ visibility = ["//visibility:public"],
++)
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0008-Relax-setup.py-version-requirements.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0008-Relax-setup.py-version-requirements.patch
new file mode 100644
index 000000000000..5d1667d75e11
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0008-Relax-setup.py-version-requirements.patch
@@ -0,0 +1,38 @@
+From e6cecad5c2595cb1166a78b698377f12da6e7a09 Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Fri, 24 Nov 2023 16:54:18 +0800
+Subject: [PATCH 08/13] Relax setup.py version requirements
+
+---
+ tensorflow/tools/pip_package/setup.py | 8 ++++----
+ 1 file changed, 4 insertions(+), 4 deletions(-)
+
+diff --git a/tensorflow/tools/pip_package/setup.py b/tensorflow/tools/pip_package/setup.py
+index fdb718d1628..3897d5316ba 100644
+--- a/tensorflow/tools/pip_package/setup.py
++++ b/tensorflow/tools/pip_package/setup.py
+@@ -101,8 +101,8 @@ REQUIRED_PACKAGES = [
+ 'six >= 1.12.0',
+ 'termcolor >= 1.1.0',
+ 'typing_extensions >= 3.6.6',
+- 'wrapt >= 1.11.0, < 1.15',
+- 'tensorflow-io-gcs-filesystem >= 0.23.1',
++ 'wrapt >= 1.11.0',
++ # 'tensorflow-io-gcs-filesystem >= 0.23.1',
+ # grpcio does not build correctly on big-endian machines due to lack of
+ # BoringSSL support.
+ # See https://github.com/tensorflow/tensorflow/issues/17882.
+@@ -140,8 +140,8 @@ FAKE_REQUIRED_PACKAGES = [
+ _VERSION + ';platform_system=="Windows"',
+ ]
+
+-if platform.system() == 'Linux' and platform.machine() == 'x86_64':
+- REQUIRED_PACKAGES.append(FAKE_REQUIRED_PACKAGES)
++# if platform.system() == 'Linux' and platform.machine() == 'x86_64':
++# REQUIRED_PACKAGES.append(FAKE_REQUIRED_PACKAGES)
+
+ if collaborator_build:
+ # If this is a collaborator build, then build an "installer" wheel and
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0009-systemlib-update-targets-for-absl.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0009-systemlib-update-targets-for-absl.patch
new file mode 100644
index 000000000000..6b946461fba6
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0009-systemlib-update-targets-for-absl.patch
@@ -0,0 +1,365 @@
+From 1a72b50ed5054cb025c0aa2a39ce2499417f2d76 Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Sun, 26 Nov 2023 13:12:20 +0800
+Subject: [PATCH 09/13] systemlib: update targets for absl
+
+---
+ .../compiler/mlir/tools/kernel_gen/BUILD | 1 +
+ .../distribute/experimental/rpc/kernels/BUILD | 1 +
+ third_party/absl/system.absl.debugging.BUILD | 20 +-
+ third_party/absl/system.absl.log.BUILD | 271 ++++++++++++++++++
+ third_party/absl/workspace.bzl | 1 +
+ 5 files changed, 289 insertions(+), 5 deletions(-)
+ create mode 100644 third_party/absl/system.absl.log.BUILD
+
+diff --git a/tensorflow/compiler/mlir/tools/kernel_gen/BUILD b/tensorflow/compiler/mlir/tools/kernel_gen/BUILD
+index 71d85d2c96e..f4a479a9daf 100644
+--- a/tensorflow/compiler/mlir/tools/kernel_gen/BUILD
++++ b/tensorflow/compiler/mlir/tools/kernel_gen/BUILD
+@@ -107,6 +107,7 @@ tf_cc_binary(
+ "//tensorflow/compiler/mlir:init_mlir",
+ "//tensorflow/compiler/mlir/tensorflow",
+ "//tensorflow/core:lib",
++ "@com_google_absl//absl/log:check",
+ "@com_google_absl//absl/strings",
+ "@llvm-project//llvm:AArch64CodeGen", # fixdeps: keep
+ "@llvm-project//llvm:ARMCodeGen", # fixdeps: keep
+diff --git a/tensorflow/distribute/experimental/rpc/kernels/BUILD b/tensorflow/distribute/experimental/rpc/kernels/BUILD
+index f9a525364c5..8b7f7b54761 100644
+--- a/tensorflow/distribute/experimental/rpc/kernels/BUILD
++++ b/tensorflow/distribute/experimental/rpc/kernels/BUILD
+@@ -65,6 +65,7 @@ tf_kernel_library(
+ "//tensorflow/distribute/experimental/rpc/proto:tf_rpc_service_proto_cc",
+ "@com_github_grpc_grpc//:grpc++",
+ "@com_google_absl//absl/status",
++ "@com_google_absl//absl/log:check",
+ "@com_google_absl//absl/strings",
+ "@com_google_absl//absl/strings:str_format",
+ ],
+diff --git a/third_party/absl/system.absl.debugging.BUILD b/third_party/absl/system.absl.debugging.BUILD
+index 931ffdc9e92..223db7b4c46 100644
+--- a/third_party/absl/system.absl.debugging.BUILD
++++ b/third_party/absl/system.absl.debugging.BUILD
+@@ -26,15 +26,25 @@ cc_library(
+
+ cc_library(
+ name = "failure_signal_handler",
+- linkopts = [
+- "-labsl_failure_signal_handler",
+- "-labsl_examine_stack",
++ linkopts = ["-labsl_failure_signal_handler"],
++ deps = [
++ ":examine_stack",
++ ":stacktrace",
++ "//absl/base",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ "//absl/base:raw_logging_internal",
+ ],
++)
++
++cc_library(
++ name = "examine_stack",
++ linkopts = ["-labsl_examine_stack"],
+ deps = [
+ ":stacktrace",
+ ":symbolize",
+- "//absl/base",
+- "//absl/base:errno_saver",
++ "//absl/base:config",
++ "//absl/base:core_headers",
+ "//absl/base:raw_logging_internal",
+ ],
+ )
+diff --git a/third_party/absl/system.absl.log.BUILD b/third_party/absl/system.absl.log.BUILD
+new file mode 100644
+index 00000000000..9a2a5de657e
+--- /dev/null
++++ b/third_party/absl/system.absl.log.BUILD
+@@ -0,0 +1,271 @@
++load("@rules_cc//cc:defs.bzl", "cc_library")
++
++package(default_visibility = ["//visibility:public"])
++
++cc_library(
++ name = "log",
++ deps = [
++ "//absl/log:internal_log_impl",
++ ],
++)
++
++cc_library(
++ name = "internal_log_impl",
++ deps = [
++ ":internal_conditions",
++ ":internal_message",
++ ":internal_strip",
++ ],
++)
++
++cc_library(
++ name = "internal_conditions",
++ linkopts = ["-labsl_log_internal_conditions"],
++ deps = [
++ ":internal_voidify",
++ "//absl/base",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ ],
++)
++
++cc_library(
++ name = "internal_voidify",
++ deps = ["//absl/base:config"],
++)
++
++cc_library(
++ name = "internal_message",
++ linkopts = ["-labsl_log_internal_message"],
++ deps = [
++ ":entry",
++ ":globals",
++ ":internal_append_truncated",
++ ":internal_format",
++ ":internal_globals",
++ ":internal_log_sink_set",
++ ":internal_nullguard",
++ ":internal_proto",
++ ":severity",
++ ":sink",
++ ":sink_registry",
++ "//absl/base",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ "//absl/base:errno_saver",
++ "//absl/base:raw_logging_internal",
++ "//absl/base:strerror",
++ "//absl/container:inlined_vector",
++ "//absl/debugging:examine_stack",
++ "//absl/memory",
++ "//absl/strings",
++ "//absl/time",
++ "//absl/types:span",
++ ],
++)
++
++cc_library(
++ name = "internal_append_truncated",
++ deps = [
++ "//absl/base:config",
++ "//absl/strings",
++ "//absl/types:span",
++ ],
++)
++
++cc_library(
++ name = "internal_format",
++ linkopts = ["-labsl_log_internal_format"],
++ deps = [
++ ":internal_append_truncated",
++ ":internal_config",
++ ":internal_globals",
++ ":severity",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ "//absl/strings",
++ "//absl/strings:str_format",
++ "//absl/time",
++ "//absl/types:span",
++ ],
++)
++
++cc_library(
++ name = "internal_globals",
++ linkopts = ["-labsl_log_internal_globals"],
++ deps = [
++ ":severity",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ "//absl/base:raw_logging_internal",
++ "//absl/strings",
++ "//absl/time",
++ ],
++)
++
++cc_library(
++ name = "internal_proto",
++ linkopts = ["-labsl_log_internal_proto"],
++ deps = [
++ "//absl/base",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ "//absl/strings",
++ "//absl/types:span",
++ ],
++)
++
++cc_library(
++ name = "internal_log_sink_set",
++ linkopts = ["-labsl_log_internal_log_sink_set"],
++ deps = [
++ ":entry",
++ ":globals",
++ ":internal_config",
++ ":internal_globals",
++ ":severity",
++ ":sink",
++ "//absl/base",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ "//absl/base:raw_logging_internal",
++ "//absl/cleanup",
++ "//absl/strings",
++ "//absl/synchronization",
++ "//absl/types:span",
++ ],
++)
++
++cc_library(
++ name = "internal_config",
++ deps = [
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ ],
++)
++
++cc_library(
++ name = "internal_nullguard",
++ linkopts = ["-labsl_log_internal_nullguard"],
++ deps = [
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ ],
++)
++
++cc_library(
++ name = "globals",
++ linkopts = ["-labsl_log_globals"],
++ deps = [
++ ":severity",
++ "//absl/base:atomic_hook",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ "//absl/base:raw_logging_internal",
++ "//absl/hash",
++ "//absl/strings",
++ ],
++)
++
++cc_library(
++ name = "entry",
++ linkopts = ["-labsl_log_entry"],
++ deps = [
++ ":internal_config",
++ ":severity",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ "//absl/strings",
++ "//absl/time",
++ "//absl/types:span",
++ ],
++)
++
++cc_library(
++ name = "severity",
++ linkopts = ["-labsl_log_severity"],
++ deps = ["//absl/base:core_headers"],
++)
++
++cc_library(
++ name = "sink",
++ linkopts = ["-labsl_log_sink"],
++ deps = [
++ ":entry",
++ "//absl/base:config",
++ ],
++)
++
++cc_library(
++ name = "sink_registry",
++ deps = [
++ ":internal_log_sink_set",
++ ":sink",
++ "//absl/base:config",
++ ],
++)
++
++cc_library(
++ name = "internal_strip",
++ deps = [
++ ":internal_message",
++ ":internal_nullstream",
++ ":severity",
++ ],
++)
++
++cc_library(
++ name = "internal_nullstream",
++ deps = [
++ ":severity",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ "//absl/strings",
++ ],
++)
++
++cc_library(
++ name = "check",
++ deps = [
++ ":internal_check_impl",
++ ":internal_check_op",
++ ":internal_conditions",
++ ":internal_message",
++ ":internal_strip",
++ "//absl/base:core_headers",
++ ],
++)
++
++cc_library(
++ name = "internal_check_impl",
++ deps = [
++ ":internal_check_op",
++ ":internal_conditions",
++ ":internal_message",
++ ":internal_strip",
++ "//absl/base:core_headers",
++ ],
++)
++
++cc_library(
++ name = "internal_check_op",
++ linkopts = ["-labsl_log_internal_check_op"],
++ deps = [
++ ":internal_nullguard",
++ ":internal_nullstream",
++ ":internal_strip",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ "//absl/strings",
++ ],
++)
++
++cc_library(
++ name = "absl_check",
++ deps = [":internal_check_impl"],
++)
++
++cc_library(
++ name = "absl_log",
++ deps = [":internal_log_impl"],
++)
+diff --git a/third_party/absl/workspace.bzl b/third_party/absl/workspace.bzl
+index 07f49cebb78..a7f4e5ffc44 100644
+--- a/third_party/absl/workspace.bzl
++++ b/third_party/absl/workspace.bzl
+@@ -20,6 +20,7 @@ def repo():
+ "flags",
+ "functional",
+ "hash",
++ "log",
+ "memory",
+ "meta",
+ "numeric",
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0010-systemlib-fix-missing-osx-in-pybind11.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0010-systemlib-fix-missing-osx-in-pybind11.patch
new file mode 100644
index 000000000000..24b7cf4eec90
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0010-systemlib-fix-missing-osx-in-pybind11.patch
@@ -0,0 +1,25 @@
+From ce5e7c9b7f0a667514a65dc58ca67b61fa591c8f Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Thu, 21 Dec 2023 22:22:35 +0800
+Subject: [PATCH 10/13] systemlib: fix missing `:osx` in pybind11
+
+---
+ third_party/systemlibs/pybind11.BUILD | 5 +++++
+ 1 file changed, 5 insertions(+)
+
+diff --git a/third_party/systemlibs/pybind11.BUILD b/third_party/systemlibs/pybind11.BUILD
+index 79a483d7b5d..cda63fbd019 100644
+--- a/third_party/systemlibs/pybind11.BUILD
++++ b/third_party/systemlibs/pybind11.BUILD
+@@ -6,3 +6,8 @@ cc_library(
+ "@org_tensorflow//third_party/python_runtime:headers",
+ ],
+ )
++
++config_setting(
++ name = "osx",
++ constraint_values = ["@platforms//os:osx"],
++)
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0011-systemlib-fix-missing-LICENSE-in-flatbuffers.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0011-systemlib-fix-missing-LICENSE-in-flatbuffers.patch
new file mode 100644
index 000000000000..acd46106115f
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0011-systemlib-fix-missing-LICENSE-in-flatbuffers.patch
@@ -0,0 +1,25 @@
+From 084723bca84ba51f7f67209618b5a4e064c1576a Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Thu, 21 Dec 2023 22:24:24 +0800
+Subject: [PATCH 11/13] systemlib: fix missing `LICENSE` in flatbuffers
+
+---
+ third_party/flatbuffers/BUILD.system | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/third_party/flatbuffers/BUILD.system b/third_party/flatbuffers/BUILD.system
+index 8fe4d7a5907..b1d63b4ca0f 100644
+--- a/third_party/flatbuffers/BUILD.system
++++ b/third_party/flatbuffers/BUILD.system
+@@ -1,7 +1,7 @@
+ licenses(["notice"]) # Apache 2.0
+
+ filegroup(
+- name = "LICENSE.txt",
++ name = "LICENSE",
+ visibility = ["//visibility:public"],
+ )
+
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0012-build-use-non-hermetic-python.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0012-build-use-non-hermetic-python.patch
new file mode 100644
index 000000000000..67108a290e13
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0012-build-use-non-hermetic-python.patch
@@ -0,0 +1,2745 @@
+From 3f0e4685b47f71c80b18bc5b6cba1afd56070604 Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Thu, 21 Dec 2023 22:25:46 +0800
+Subject: [PATCH 12/13] build: use non-hermetic python
+
+---
+ WORKSPACE | 65 --
+ tensorflow/BUILD | 2 -
+ tensorflow/compiler/mlir/glob_lit_test.bzl | 1 -
+ tensorflow/compiler/xla/glob_lit_test.bzl | 5 -
+ tensorflow/compiler/xla/mlir_hlo/tests/BUILD | 1 -
+ tensorflow/dtensor/python/tests/BUILD | 1 -
+ tensorflow/lite/python/BUILD | 1 -
+ tensorflow/python/BUILD | 1 -
+ tensorflow/python/compiler/tensorrt/BUILD | 1 -
+ .../experimental/kernel_tests/service/BUILD | 1 -
+ tensorflow/python/debug/lib/BUILD | 1 -
+ .../python/distribute/experimental/rpc/BUILD | 1 -
+ .../python/distribute/failure_handling/BUILD | 1 -
+ tensorflow/python/eager/BUILD | 1 -
+ tensorflow/python/estimator/BUILD | 5 +-
+ tensorflow/python/framework/BUILD | 2 -
+ tensorflow/python/keras/BUILD | 1 -
+ tensorflow/python/keras/engine/BUILD | 1 -
+ tensorflow/python/keras/saving/BUILD | 1 -
+ tensorflow/python/profiler/BUILD | 1 -
+ .../python/profiler/integration_test/BUILD | 1 -
+ tensorflow/python/summary/BUILD | 1 -
+ third_party/py/BUILD.tpl | 39 +-
+ third_party/py/{non_hermetic => }/README | 0
+ third_party/py/non_hermetic/BUILD | 0
+ third_party/py/non_hermetic/BUILD.tpl | 80 --
+ third_party/py/non_hermetic/ml_dtypes/BUILD | 0
+ third_party/py/non_hermetic/ml_dtypes/LICENSE | 202 ----
+ .../py/non_hermetic/ml_dtypes/ml_dtypes.BUILD | 50 -
+ .../ml_dtypes/ml_dtypes.tests.BUILD | 60 --
+ .../py/non_hermetic/ml_dtypes/workspace.bzl | 22 -
+ third_party/py/non_hermetic/numpy/BUILD | 21 -
+ third_party/py/non_hermetic/numpy/README.md | 4 -
+ .../py/non_hermetic/numpy/tf_numpy_api/BUILD | 12 -
+ ...ensorflow.experimental.numpy.ndarray.pbtxt | 51 -
+ .../tensorflow.experimental.numpy.pbtxt | 919 ------------------
+ ...tensorflow.experimental.numpy.random.pbtxt | 35 -
+ .../py/non_hermetic/python_configure.bzl | 315 ------
+ third_party/py/numpy/BUILD | 7 +-
+ third_party/py/numpy/LICENSE | 60 --
+ .../tensorflow.experimental.numpy.pbtxt | 2 +-
+ third_party/py/python_configure.bzl | 252 ++++-
+ 42 files changed, 291 insertions(+), 1936 deletions(-)
+ rename third_party/py/{non_hermetic => }/README (100%)
+ delete mode 100644 third_party/py/non_hermetic/BUILD
+ delete mode 100644 third_party/py/non_hermetic/BUILD.tpl
+ delete mode 100644 third_party/py/non_hermetic/ml_dtypes/BUILD
+ delete mode 100644 third_party/py/non_hermetic/ml_dtypes/LICENSE
+ delete mode 100644 third_party/py/non_hermetic/ml_dtypes/ml_dtypes.BUILD
+ delete mode 100644 third_party/py/non_hermetic/ml_dtypes/ml_dtypes.tests.BUILD
+ delete mode 100644 third_party/py/non_hermetic/ml_dtypes/workspace.bzl
+ delete mode 100644 third_party/py/non_hermetic/numpy/BUILD
+ delete mode 100644 third_party/py/non_hermetic/numpy/README.md
+ delete mode 100644 third_party/py/non_hermetic/numpy/tf_numpy_api/BUILD
+ delete mode 100644 third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.ndarray.pbtxt
+ delete mode 100644 third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.pbtxt
+ delete mode 100644 third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.random.pbtxt
+ delete mode 100644 third_party/py/non_hermetic/python_configure.bzl
+ delete mode 100644 third_party/py/numpy/LICENSE
+
+diff --git a/WORKSPACE b/WORKSPACE
+index 644b731b1dc..3626ae4e805 100644
+--- a/WORKSPACE
++++ b/WORKSPACE
+@@ -11,71 +11,6 @@ http_archive(
+ ],
+ )
+
+-# We must initialize hermetic python first.
+-load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
+-
+-http_archive(
+- name = "bazel_skylib",
+- sha256 = "74d544d96f4a5bb630d465ca8bbcfe231e3594e5aae57e1edbf17a6eb3ca2506",
+- urls = [
+- "https://storage.googleapis.com/mirror.tensorflow.org/github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz",
+- "https://github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz",
+- ],
+-)
+-
+-http_archive(
+- name = "rules_python",
+- sha256 = "84aec9e21cc56fbc7f1335035a71c850d1b9b5cc6ff497306f84cced9a769841",
+- strip_prefix = "rules_python-0.23.1",
+- url = "https://github.com/bazelbuild/rules_python/releases/download/0.23.1/rules_python-0.23.1.tar.gz",
+-)
+-
+-load("@rules_python//python:repositories.bzl", "python_register_toolchains")
+-load(
+- "//tensorflow/tools/toolchains/python:python_repo.bzl",
+- "python_repository",
+-)
+-
+-python_repository(name = "python_version_repo")
+-
+-load("@python_version_repo//:py_version.bzl", "HERMETIC_PYTHON_VERSION")
+-
+-python_register_toolchains(
+- name = "python",
+- ignore_root_user_error = True,
+- python_version = HERMETIC_PYTHON_VERSION,
+-)
+-
+-load("@python//:defs.bzl", "interpreter")
+-load("@rules_python//python:pip.bzl", "package_annotation", "pip_parse")
+-
+-NUMPY_ANNOTATIONS = {
+- "numpy": package_annotation(
+- additive_build_content = """\
+-filegroup(
+- name = "includes",
+- srcs = glob(["site-packages/numpy/core/include/**/*.h"]),
+-)
+-cc_library(
+- name = "numpy_headers",
+- hdrs = [":includes"],
+- strip_include_prefix="site-packages/numpy/core/include/",
+-)
+-""",
+- ),
+-}
+-
+-pip_parse(
+- name = "pypi",
+- annotations = NUMPY_ANNOTATIONS,
+- python_interpreter_target = interpreter,
+- requirements = "//:requirements_lock_" + HERMETIC_PYTHON_VERSION.replace(".", "_") + ".txt",
+-)
+-
+-load("@pypi//:requirements.bzl", "install_deps")
+-
+-install_deps()
+-
+ # Initialize the TensorFlow repository and all dependencies.
+ #
+ # The cascade of load() statements and tf_workspace?() calls works around the
+diff --git a/tensorflow/BUILD b/tensorflow/BUILD
+index 63ce1e7b385..9573a982298 100644
+--- a/tensorflow/BUILD
++++ b/tensorflow/BUILD
+@@ -1718,8 +1718,6 @@ py_library(
+ "//tensorflow/lite/python:lite",
+ "//tensorflow/lite/python/authoring",
+ "//tensorflow/python:no_contrib",
+- "@pypi_keras//:pkg",
+- "@pypi_tensorboard//:pkg",
+ ],
+ )
+ # copybara:comment_end
+diff --git a/tensorflow/compiler/mlir/glob_lit_test.bzl b/tensorflow/compiler/mlir/glob_lit_test.bzl
+index e689b4c0b31..f65c86b727b 100644
+--- a/tensorflow/compiler/mlir/glob_lit_test.bzl
++++ b/tensorflow/compiler/mlir/glob_lit_test.bzl
+@@ -58,7 +58,6 @@ def _run_lit_test(name, data, size, tags, driver, features, exec_properties):
+ "@llvm-project//llvm:count",
+ "@llvm-project//llvm:not",
+ ],
+- deps = ["@pypi_lit//:pkg"],
+ size = size,
+ main = "lit.py",
+ exec_properties = exec_properties,
+diff --git a/tensorflow/compiler/xla/glob_lit_test.bzl b/tensorflow/compiler/xla/glob_lit_test.bzl
+index 44b838ccb0a..86200b24da1 100644
+--- a/tensorflow/compiler/xla/glob_lit_test.bzl
++++ b/tensorflow/compiler/xla/glob_lit_test.bzl
+@@ -52,10 +52,6 @@ def _run_lit_test(name, data, size, tags, driver, features, exec_properties):
+ # can remove this logic. This is necessary to have these tests run on builds
+ # using Python 3.11, but also to not include `@pypi_lit` in standalone xla
+ # builds where it won't be found.
+- deps = []
+- if xla_root_dir == "tensorflow/compiler/xla/":
+- deps.append("@pypi_lit//:pkg")
+-
+ native.py_test(
+ name = name,
+ srcs = ["@llvm-project//llvm:lit"],
+@@ -69,7 +65,6 @@ def _run_lit_test(name, data, size, tags, driver, features, exec_properties):
+ "@llvm-project//llvm:count",
+ "@llvm-project//llvm:not",
+ ],
+- deps = deps,
+ size = size,
+ main = "lit.py",
+ exec_properties = exec_properties,
+diff --git a/tensorflow/compiler/xla/mlir_hlo/tests/BUILD b/tensorflow/compiler/xla/mlir_hlo/tests/BUILD
+index 3b67c8fdbec..30a3c562f75 100644
+--- a/tensorflow/compiler/xla/mlir_hlo/tests/BUILD
++++ b/tensorflow/compiler/xla/mlir_hlo/tests/BUILD
+@@ -26,7 +26,6 @@ package(
+ tags = [
+ "nomsan", # The execution engine doesn't work with msan, see b/248097619.
+ ],
+- deps = ["@pypi_lit//:pkg"],
+ )
+ for src in glob(["**/*.mlir"])
+ ]
+diff --git a/tensorflow/dtensor/python/tests/BUILD b/tensorflow/dtensor/python/tests/BUILD
+index 615baad3085..9b6c5839b03 100644
+--- a/tensorflow/dtensor/python/tests/BUILD
++++ b/tensorflow/dtensor/python/tests/BUILD
+@@ -303,7 +303,6 @@ pytype_strict_library(
+ ":test_util",
+ "//tensorflow/python/platform:client_testlib",
+ "@absl_py//absl/flags",
+- "@pypi_portpicker//:pkg",
+ ],
+ )
+
+diff --git a/tensorflow/lite/python/BUILD b/tensorflow/lite/python/BUILD
+index cf03dad0ee0..8b771ac20ae 100644
+--- a/tensorflow/lite/python/BUILD
++++ b/tensorflow/lite/python/BUILD
+@@ -266,7 +266,6 @@ py_test(
+ "//tensorflow/python/framework:test_lib",
+ "//tensorflow/python/platform:client_testlib",
+ "//tensorflow/python/platform:resource_loader",
+- "@pypi_jax//:pkg",
+ ],
+ )
+
+diff --git a/tensorflow/python/BUILD b/tensorflow/python/BUILD
+index c1b7eb7c0ea..4093f4c5c09 100644
+--- a/tensorflow/python/BUILD
++++ b/tensorflow/python/BUILD
+@@ -550,7 +550,6 @@ py_strict_library(
+ deps = [
+ ":keras_lib",
+ "//third_party/py/numpy",
+- "@pypi_scipy//:pkg",
+ "@six_archive//:six",
+ ],
+ )
+diff --git a/tensorflow/python/compiler/tensorrt/BUILD b/tensorflow/python/compiler/tensorrt/BUILD
+index f3fd845ff53..78a45f4ed25 100644
+--- a/tensorflow/python/compiler/tensorrt/BUILD
++++ b/tensorflow/python/compiler/tensorrt/BUILD
+@@ -69,7 +69,6 @@ py_strict_library(
+ "//tensorflow/python/util:nest",
+ "//tensorflow/python/util:tf_export",
+ "//third_party/py/numpy",
+- "@pypi_packaging//:pkg",
+ "@six_archive//:six",
+ ],
+ )
+diff --git a/tensorflow/python/data/experimental/kernel_tests/service/BUILD b/tensorflow/python/data/experimental/kernel_tests/service/BUILD
+index 8d36d2e3637..2b8a8fd3654 100644
+--- a/tensorflow/python/data/experimental/kernel_tests/service/BUILD
++++ b/tensorflow/python/data/experimental/kernel_tests/service/BUILD
+@@ -143,7 +143,6 @@ tf_py_strict_test(
+ "//tensorflow/python/ops:array_ops",
+ "//tensorflow/python/platform:client_testlib",
+ "@absl_py//absl/testing:parameterized",
+- "@pypi_portpicker//:pkg",
+ ],
+ )
+
+diff --git a/tensorflow/python/debug/lib/BUILD b/tensorflow/python/debug/lib/BUILD
+index 37c99b30dd2..012e349dffc 100644
+--- a/tensorflow/python/debug/lib/BUILD
++++ b/tensorflow/python/debug/lib/BUILD
+@@ -596,7 +596,6 @@ py_strict_library(
+ "//tensorflow/python/lib/io:lib",
+ "//tensorflow/python/ops:variables",
+ "//tensorflow/python/util:compat",
+- "@pypi_portpicker//:pkg",
+ ],
+ )
+
+diff --git a/tensorflow/python/distribute/experimental/rpc/BUILD b/tensorflow/python/distribute/experimental/rpc/BUILD
+index 94855205c70..3b3e3f9aee3 100644
+--- a/tensorflow/python/distribute/experimental/rpc/BUILD
++++ b/tensorflow/python/distribute/experimental/rpc/BUILD
+@@ -60,6 +60,5 @@ tf_py_strict_test(
+ "//tensorflow/python/ops:variables",
+ "//tensorflow/python/platform:client_testlib",
+ "//tensorflow/python/util:nest",
+- "@pypi_portpicker//:pkg",
+ ],
+ )
+diff --git a/tensorflow/python/distribute/failure_handling/BUILD b/tensorflow/python/distribute/failure_handling/BUILD
+index 77317019fee..df52d80552e 100644
+--- a/tensorflow/python/distribute/failure_handling/BUILD
++++ b/tensorflow/python/distribute/failure_handling/BUILD
+@@ -47,7 +47,6 @@ py_strict_library(
+ deps = [
+ "//tensorflow/python/eager:context",
+ "//tensorflow/python/platform:tf_logging",
+- "@pypi_requests//:pkg",
+ "@six_archive//:six",
+ ],
+ )
+diff --git a/tensorflow/python/eager/BUILD b/tensorflow/python/eager/BUILD
+index b7bc8350e13..dc5e0ae232f 100644
+--- a/tensorflow/python/eager/BUILD
++++ b/tensorflow/python/eager/BUILD
+@@ -1167,7 +1167,6 @@ cuda_py_strict_test(
+ "//tensorflow/python/training:server_lib",
+ "//tensorflow/python/util:compat",
+ "@absl_py//absl/testing:parameterized",
+- "@pypi_portpicker//:pkg",
+ ],
+ )
+
+diff --git a/tensorflow/python/estimator/BUILD b/tensorflow/python/estimator/BUILD
+index 73a7c2626bb..0952ccb3154 100644
+--- a/tensorflow/python/estimator/BUILD
++++ b/tensorflow/python/estimator/BUILD
+@@ -380,7 +380,8 @@ py_library(
+ ],
+ )
+
+-alias(
++py_library(
+ name = "expect_tensorflow_estimator_installed",
+- actual = "@pypi_tensorflow_estimator//:pkg",
++ srcs_version = "PY3",
++ visibility = ["//visibility:public"],
+ )
+diff --git a/tensorflow/python/framework/BUILD b/tensorflow/python/framework/BUILD
+index d8ce1f5c0bf..1100c23b562 100644
+--- a/tensorflow/python/framework/BUILD
++++ b/tensorflow/python/framework/BUILD
+@@ -359,7 +359,6 @@ py_strict_library(
+ "//tensorflow/python/util:deprecation",
+ "//tensorflow/python/util:tf_export",
+ "//third_party/py/numpy",
+- "@pypi_packaging//:pkg",
+ ] + if_xla_available([
+ "//tensorflow/python:_pywrap_tfcompile",
+ ]),
+@@ -2036,7 +2035,6 @@ py_strict_library(
+ "//tensorflow/python/util/protobuf",
+ "//third_party/py/numpy",
+ "@absl_py//absl/testing:parameterized",
+- "@pypi_portpicker//:pkg",
+ ],
+ )
+
+diff --git a/tensorflow/python/keras/BUILD b/tensorflow/python/keras/BUILD
+index c271a5ef77a..d516853a13e 100755
+--- a/tensorflow/python/keras/BUILD
++++ b/tensorflow/python/keras/BUILD
+@@ -42,7 +42,6 @@ py_library(
+ "//tensorflow/python/saved_model",
+ "//tensorflow/python/training",
+ "//tensorflow/python/util:nest",
+- "@pypi_h5py//:pkg",
+ ],
+ )
+
+diff --git a/tensorflow/python/keras/engine/BUILD b/tensorflow/python/keras/engine/BUILD
+index 2098b1650bc..287b1a4aa91 100644
+--- a/tensorflow/python/keras/engine/BUILD
++++ b/tensorflow/python/keras/engine/BUILD
+@@ -93,7 +93,6 @@ py_library(
+ "//tensorflow/python/util:tf_decorator",
+ "//tensorflow/python/util:tf_export",
+ "//tensorflow/tools/docs:doc_controls",
+- "@pypi_h5py//:pkg",
+ ],
+ )
+
+diff --git a/tensorflow/python/keras/saving/BUILD b/tensorflow/python/keras/saving/BUILD
+index d7cb2ccb2fc..b40d979c82d 100644
+--- a/tensorflow/python/keras/saving/BUILD
++++ b/tensorflow/python/keras/saving/BUILD
+@@ -53,6 +53,5 @@ py_library(
+ "//tensorflow/python/platform:tf_logging",
+ "//tensorflow/python/saved_model",
+ "//tensorflow/python/training:saver",
+- "@pypi_h5py//:pkg",
+ ],
+ )
+diff --git a/tensorflow/python/profiler/BUILD b/tensorflow/python/profiler/BUILD
+index b1cfd6ea10c..9413aeeab8b 100644
+--- a/tensorflow/python/profiler/BUILD
++++ b/tensorflow/python/profiler/BUILD
+@@ -43,7 +43,6 @@ cuda_py_strict_test(
+ "//tensorflow/python/eager:test",
+ "//tensorflow/python/framework:errors",
+ "//tensorflow/python/framework:test_lib",
+- "@pypi_portpicker//:pkg",
+ ],
+ )
+
+diff --git a/tensorflow/python/profiler/integration_test/BUILD b/tensorflow/python/profiler/integration_test/BUILD
+index b20698ea6ea..e7060e5a315 100644
+--- a/tensorflow/python/profiler/integration_test/BUILD
++++ b/tensorflow/python/profiler/integration_test/BUILD
+@@ -35,6 +35,5 @@ cuda_py_strict_test(
+ "//tensorflow/python/platform:tf_logging",
+ "//tensorflow/python/profiler:profiler_client",
+ "//tensorflow/python/profiler:profiler_v2",
+- "@pypi_portpicker//:pkg",
+ ],
+ )
+diff --git a/tensorflow/python/summary/BUILD b/tensorflow/python/summary/BUILD
+index 126fb6d31f7..b292e39356f 100644
+--- a/tensorflow/python/summary/BUILD
++++ b/tensorflow/python/summary/BUILD
+@@ -121,6 +121,5 @@ tf_py_strict_test(
+ "//tensorflow/python/ops:summary_ops_v2",
+ "//tensorflow/python/platform:client_testlib",
+ "//tensorflow/python/training:training_util",
+- "@pypi_tensorboard//:pkg",
+ ],
+ )
+diff --git a/third_party/py/BUILD.tpl b/third_party/py/BUILD.tpl
+index 7cc1e085684..45480bd4a31 100644
+--- a/third_party/py/BUILD.tpl
++++ b/third_party/py/BUILD.tpl
+@@ -5,17 +5,16 @@ package(default_visibility = ["//visibility:public"])
+ # Point both runtimes to the same python binary to ensure we always
+ # use the python binary specified by ./configure.py script.
+ load("@bazel_tools//tools/python:toolchain.bzl", "py_runtime_pair")
+-load("@python//:defs.bzl", "interpreter")
+
+ py_runtime(
+ name = "py2_runtime",
+- interpreter_path = interpreter,
++ interpreter_path = "%{PYTHON_BIN_PATH}",
+ python_version = "PY2",
+ )
+
+ py_runtime(
+ name = "py3_runtime",
+- interpreter_path = interpreter,
++ interpreter_path = "%{PYTHON_BIN_PATH}",
+ python_version = "PY3",
+ )
+
+@@ -33,8 +32,27 @@ toolchain(
+ exec_compatible_with = [%{PLATFORM_CONSTRAINT}],
+ )
+
+-alias(name = "python_headers",
+- actual = "@python//:python_headers")
++# To build Python C/C++ extension on Windows, we need to link to python import library pythonXY.lib
++# See https://docs.python.org/3/extending/windows.html
++cc_import(
++ name = "python_lib",
++ interface_library = select({
++ ":windows": ":python_import_lib",
++ # A placeholder for Unix platforms which makes --no_build happy.
++ "//conditions:default": "not-existing.lib",
++ }),
++ system_provided = 1,
++)
++
++cc_library(
++ name = "python_headers",
++ hdrs = [":python_include"],
++ deps = select({
++ ":windows": [":python_lib"],
++ "//conditions:default": [],
++ }),
++ includes = ["python_include"],
++)
+
+ # This alias is exists for the use of targets in the @llvm-project dependency,
+ # which expect a python_headers target called @python_runtime//:headers. We use
+@@ -45,9 +63,18 @@ alias(
+ actual = ":python_headers",
+ )
+
++cc_library(
++ name = "numpy_headers",
++ hdrs = [":numpy_include"],
++ includes = ["numpy_include"],
++)
+
+ config_setting(
+ name = "windows",
+ values = {"cpu": "x64_windows"},
+ visibility = ["//visibility:public"],
+-)
+\ No newline at end of file
++)
++
++%{PYTHON_INCLUDE_GENRULE}
++%{NUMPY_INCLUDE_GENRULE}
++%{PYTHON_IMPORT_LIB_GENRULE}
+\ No newline at end of file
+diff --git a/third_party/py/non_hermetic/README b/third_party/py/README
+similarity index 100%
+rename from third_party/py/non_hermetic/README
+rename to third_party/py/README
+diff --git a/third_party/py/non_hermetic/BUILD b/third_party/py/non_hermetic/BUILD
+deleted file mode 100644
+index e69de29bb2d..00000000000
+diff --git a/third_party/py/non_hermetic/BUILD.tpl b/third_party/py/non_hermetic/BUILD.tpl
+deleted file mode 100644
+index 45480bd4a31..00000000000
+--- a/third_party/py/non_hermetic/BUILD.tpl
++++ /dev/null
+@@ -1,80 +0,0 @@
+-licenses(["restricted"])
+-
+-package(default_visibility = ["//visibility:public"])
+-
+-# Point both runtimes to the same python binary to ensure we always
+-# use the python binary specified by ./configure.py script.
+-load("@bazel_tools//tools/python:toolchain.bzl", "py_runtime_pair")
+-
+-py_runtime(
+- name = "py2_runtime",
+- interpreter_path = "%{PYTHON_BIN_PATH}",
+- python_version = "PY2",
+-)
+-
+-py_runtime(
+- name = "py3_runtime",
+- interpreter_path = "%{PYTHON_BIN_PATH}",
+- python_version = "PY3",
+-)
+-
+-py_runtime_pair(
+- name = "py_runtime_pair",
+- py2_runtime = ":py2_runtime",
+- py3_runtime = ":py3_runtime",
+-)
+-
+-toolchain(
+- name = "py_toolchain",
+- toolchain = ":py_runtime_pair",
+- toolchain_type = "@bazel_tools//tools/python:toolchain_type",
+- target_compatible_with = [%{PLATFORM_CONSTRAINT}],
+- exec_compatible_with = [%{PLATFORM_CONSTRAINT}],
+-)
+-
+-# To build Python C/C++ extension on Windows, we need to link to python import library pythonXY.lib
+-# See https://docs.python.org/3/extending/windows.html
+-cc_import(
+- name = "python_lib",
+- interface_library = select({
+- ":windows": ":python_import_lib",
+- # A placeholder for Unix platforms which makes --no_build happy.
+- "//conditions:default": "not-existing.lib",
+- }),
+- system_provided = 1,
+-)
+-
+-cc_library(
+- name = "python_headers",
+- hdrs = [":python_include"],
+- deps = select({
+- ":windows": [":python_lib"],
+- "//conditions:default": [],
+- }),
+- includes = ["python_include"],
+-)
+-
+-# This alias is exists for the use of targets in the @llvm-project dependency,
+-# which expect a python_headers target called @python_runtime//:headers. We use
+-# a repo_mapping to alias python_runtime to this package, and an alias to create
+-# the correct target.
+-alias(
+- name = "headers",
+- actual = ":python_headers",
+-)
+-
+-cc_library(
+- name = "numpy_headers",
+- hdrs = [":numpy_include"],
+- includes = ["numpy_include"],
+-)
+-
+-config_setting(
+- name = "windows",
+- values = {"cpu": "x64_windows"},
+- visibility = ["//visibility:public"],
+-)
+-
+-%{PYTHON_INCLUDE_GENRULE}
+-%{NUMPY_INCLUDE_GENRULE}
+-%{PYTHON_IMPORT_LIB_GENRULE}
+\ No newline at end of file
+diff --git a/third_party/py/non_hermetic/ml_dtypes/BUILD b/third_party/py/non_hermetic/ml_dtypes/BUILD
+deleted file mode 100644
+index e69de29bb2d..00000000000
+diff --git a/third_party/py/non_hermetic/ml_dtypes/LICENSE b/third_party/py/non_hermetic/ml_dtypes/LICENSE
+deleted file mode 100644
+index d6456956733..00000000000
+--- a/third_party/py/non_hermetic/ml_dtypes/LICENSE
++++ /dev/null
+@@ -1,202 +0,0 @@
+-
+- Apache License
+- Version 2.0, January 2004
+- http://www.apache.org/licenses/
+-
+- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+-
+- 1. Definitions.
+-
+- "License" shall mean the terms and conditions for use, reproduction,
+- and distribution as defined by Sections 1 through 9 of this document.
+-
+- "Licensor" shall mean the copyright owner or entity authorized by
+- the copyright owner that is granting the License.
+-
+- "Legal Entity" shall mean the union of the acting entity and all
+- other entities that control, are controlled by, or are under common
+- control with that entity. For the purposes of this definition,
+- "control" means (i) the power, direct or indirect, to cause the
+- direction or management of such entity, whether by contract or
+- otherwise, or (ii) ownership of fifty percent (50%) or more of the
+- outstanding shares, or (iii) beneficial ownership of such entity.
+-
+- "You" (or "Your") shall mean an individual or Legal Entity
+- exercising permissions granted by this License.
+-
+- "Source" form shall mean the preferred form for making modifications,
+- including but not limited to software source code, documentation
+- source, and configuration files.
+-
+- "Object" form shall mean any form resulting from mechanical
+- transformation or translation of a Source form, including but
+- not limited to compiled object code, generated documentation,
+- and conversions to other media types.
+-
+- "Work" shall mean the work of authorship, whether in Source or
+- Object form, made available under the License, as indicated by a
+- copyright notice that is included in or attached to the work
+- (an example is provided in the Appendix below).
+-
+- "Derivative Works" shall mean any work, whether in Source or Object
+- form, that is based on (or derived from) the Work and for which the
+- editorial revisions, annotations, elaborations, or other modifications
+- represent, as a whole, an original work of authorship. For the purposes
+- of this License, Derivative Works shall not include works that remain
+- separable from, or merely link (or bind by name) to the interfaces of,
+- the Work and Derivative Works thereof.
+-
+- "Contribution" shall mean any work of authorship, including
+- the original version of the Work and any modifications or additions
+- to that Work or Derivative Works thereof, that is intentionally
+- submitted to Licensor for inclusion in the Work by the copyright owner
+- or by an individual or Legal Entity authorized to submit on behalf of
+- the copyright owner. For the purposes of this definition, "submitted"
+- means any form of electronic, verbal, or written communication sent
+- to the Licensor or its representatives, including but not limited to
+- communication on electronic mailing lists, source code control systems,
+- and issue tracking systems that are managed by, or on behalf of, the
+- Licensor for the purpose of discussing and improving the Work, but
+- excluding communication that is conspicuously marked or otherwise
+- designated in writing by the copyright owner as "Not a Contribution."
+-
+- "Contributor" shall mean Licensor and any individual or Legal Entity
+- on behalf of whom a Contribution has been received by Licensor and
+- subsequently incorporated within the Work.
+-
+- 2. Grant of Copyright License. Subject to the terms and conditions of
+- this License, each Contributor hereby grants to You a perpetual,
+- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+- copyright license to reproduce, prepare Derivative Works of,
+- publicly display, publicly perform, sublicense, and distribute the
+- Work and such Derivative Works in Source or Object form.
+-
+- 3. Grant of Patent License. Subject to the terms and conditions of
+- this License, each Contributor hereby grants to You a perpetual,
+- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+- (except as stated in this section) patent license to make, have made,
+- use, offer to sell, sell, import, and otherwise transfer the Work,
+- where such license applies only to those patent claims licensable
+- by such Contributor that are necessarily infringed by their
+- Contribution(s) alone or by combination of their Contribution(s)
+- with the Work to which such Contribution(s) was submitted. If You
+- institute patent litigation against any entity (including a
+- cross-claim or counterclaim in a lawsuit) alleging that the Work
+- or a Contribution incorporated within the Work constitutes direct
+- or contributory patent infringement, then any patent licenses
+- granted to You under this License for that Work shall terminate
+- as of the date such litigation is filed.
+-
+- 4. Redistribution. You may reproduce and distribute copies of the
+- Work or Derivative Works thereof in any medium, with or without
+- modifications, and in Source or Object form, provided that You
+- meet the following conditions:
+-
+- (a) You must give any other recipients of the Work or
+- Derivative Works a copy of this License; and
+-
+- (b) You must cause any modified files to carry prominent notices
+- stating that You changed the files; and
+-
+- (c) You must retain, in the Source form of any Derivative Works
+- that You distribute, all copyright, patent, trademark, and
+- attribution notices from the Source form of the Work,
+- excluding those notices that do not pertain to any part of
+- the Derivative Works; and
+-
+- (d) If the Work includes a "NOTICE" text file as part of its
+- distribution, then any Derivative Works that You distribute must
+- include a readable copy of the attribution notices contained
+- within such NOTICE file, excluding those notices that do not
+- pertain to any part of the Derivative Works, in at least one
+- of the following places: within a NOTICE text file distributed
+- as part of the Derivative Works; within the Source form or
+- documentation, if provided along with the Derivative Works; or,
+- within a display generated by the Derivative Works, if and
+- wherever such third-party notices normally appear. The contents
+- of the NOTICE file are for informational purposes only and
+- do not modify the License. You may add Your own attribution
+- notices within Derivative Works that You distribute, alongside
+- or as an addendum to the NOTICE text from the Work, provided
+- that such additional attribution notices cannot be construed
+- as modifying the License.
+-
+- You may add Your own copyright statement to Your modifications and
+- may provide additional or different license terms and conditions
+- for use, reproduction, or distribution of Your modifications, or
+- for any such Derivative Works as a whole, provided Your use,
+- reproduction, and distribution of the Work otherwise complies with
+- the conditions stated in this License.
+-
+- 5. Submission of Contributions. Unless You explicitly state otherwise,
+- any Contribution intentionally submitted for inclusion in the Work
+- by You to the Licensor shall be under the terms and conditions of
+- this License, without any additional terms or conditions.
+- Notwithstanding the above, nothing herein shall supersede or modify
+- the terms of any separate license agreement you may have executed
+- with Licensor regarding such Contributions.
+-
+- 6. Trademarks. This License does not grant permission to use the trade
+- names, trademarks, service marks, or product names of the Licensor,
+- except as required for reasonable and customary use in describing the
+- origin of the Work and reproducing the content of the NOTICE file.
+-
+- 7. Disclaimer of Warranty. Unless required by applicable law or
+- agreed to in writing, Licensor provides the Work (and each
+- Contributor provides its Contributions) on an "AS IS" BASIS,
+- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+- implied, including, without limitation, any warranties or conditions
+- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+- PARTICULAR PURPOSE. You are solely responsible for determining the
+- appropriateness of using or redistributing the Work and assume any
+- risks associated with Your exercise of permissions under this License.
+-
+- 8. Limitation of Liability. In no event and under no legal theory,
+- whether in tort (including negligence), contract, or otherwise,
+- unless required by applicable law (such as deliberate and grossly
+- negligent acts) or agreed to in writing, shall any Contributor be
+- liable to You for damages, including any direct, indirect, special,
+- incidental, or consequential damages of any character arising as a
+- result of this License or out of the use or inability to use the
+- Work (including but not limited to damages for loss of goodwill,
+- work stoppage, computer failure or malfunction, or any and all
+- other commercial damages or losses), even if such Contributor
+- has been advised of the possibility of such damages.
+-
+- 9. Accepting Warranty or Additional Liability. While redistributing
+- the Work or Derivative Works thereof, You may choose to offer,
+- and charge a fee for, acceptance of support, warranty, indemnity,
+- or other liability obligations and/or rights consistent with this
+- License. However, in accepting such obligations, You may act only
+- on Your own behalf and on Your sole responsibility, not on behalf
+- of any other Contributor, and only if You agree to indemnify,
+- defend, and hold each Contributor harmless for any liability
+- incurred by, or claims asserted against, such Contributor by reason
+- of your accepting any such warranty or additional liability.
+-
+- END OF TERMS AND CONDITIONS
+-
+- APPENDIX: How to apply the Apache License to your work.
+-
+- To apply the Apache License to your work, attach the following
+- boilerplate notice, with the fields enclosed by brackets "[]"
+- replaced with your own identifying information. (Don't include
+- the brackets!) The text should be enclosed in the appropriate
+- comment syntax for the file format. We also recommend that a
+- file or class name and description of purpose be included on the
+- same "printed page" as the copyright notice for easier
+- identification within third-party archives.
+-
+- Copyright [yyyy] [name of copyright owner]
+-
+- Licensed under the Apache License, Version 2.0 (the "License");
+- you may not use this file except in compliance with the License.
+- You may obtain a copy of the License at
+-
+- http://www.apache.org/licenses/LICENSE-2.0
+-
+- Unless required by applicable law or agreed to in writing, software
+- distributed under the License is distributed on an "AS IS" BASIS,
+- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+- See the License for the specific language governing permissions and
+- limitations under the License.
+diff --git a/third_party/py/non_hermetic/ml_dtypes/ml_dtypes.BUILD b/third_party/py/non_hermetic/ml_dtypes/ml_dtypes.BUILD
+deleted file mode 100644
+index 95f58d3c476..00000000000
+--- a/third_party/py/non_hermetic/ml_dtypes/ml_dtypes.BUILD
++++ /dev/null
+@@ -1,50 +0,0 @@
+-load("@pybind11_bazel//:build_defs.bzl", "pybind_extension")
+-
+-package(
+- default_visibility = ["//visibility:public"],
+- licenses = ["notice"],
+-)
+-
+-exports_files(["LICENSE"])
+-
+-cc_library(
+- name = "float8",
+- hdrs = ["include/float8.h"],
+- # Internal headers are all relative to , but other packages
+- # include these headers with the prefix.
+- includes = [
+- ".",
+- "ml_dtypes",
+- ],
+- deps = ["@org_tensorflow//third_party/eigen3"],
+-)
+-
+-pybind_extension(
+- name = "_custom_floats",
+- srcs = [
+- "_src/common.h",
+- "_src/custom_float.h",
+- "_src/dtypes.cc",
+- "_src/int4.h",
+- "_src/numpy.cc",
+- "_src/numpy.h",
+- "_src/ufuncs.h",
+- ],
+- includes = ["ml_dtypes"],
+- visibility = [":__subpackages__"],
+- deps = [
+- ":float8",
+- "@org_tensorflow//third_party/eigen3",
+- "@org_tensorflow//third_party/py/numpy:headers",
+- ],
+-)
+-
+-py_library(
+- name = "ml_dtypes",
+- srcs = [
+- "__init__.py",
+- "_finfo.py",
+- "_iinfo.py",
+- ],
+- deps = [":_custom_floats"],
+-)
+diff --git a/third_party/py/non_hermetic/ml_dtypes/ml_dtypes.tests.BUILD b/third_party/py/non_hermetic/ml_dtypes/ml_dtypes.tests.BUILD
+deleted file mode 100644
+index fde5f2eaccf..00000000000
+--- a/third_party/py/non_hermetic/ml_dtypes/ml_dtypes.tests.BUILD
++++ /dev/null
+@@ -1,60 +0,0 @@
+-package(
+- default_visibility = ["//visibility:public"],
+-)
+-
+-py_library(
+- name = "testing_base",
+- deps = [
+- "//:ml_dtypes",
+- "@absl_py//absl/testing:absltest",
+- "@absl_py//absl/testing:parameterized",
+- "@org_tensorflow//third_party/py/numpy",
+- ],
+-)
+-
+-py_test(
+- name = "custom_float_test",
+- srcs = ["custom_float_test.py"],
+- main = "custom_float_test.py",
+- deps = [":testing_base"],
+-)
+-
+-py_test(
+- name = "int4_test",
+- srcs = ["int4_test.py"],
+- main = "int4_test.py",
+- deps = [":testing_base"],
+-)
+-
+-py_test(
+- name = "iinfo_test",
+- srcs = ["iinfo_test.py"],
+- main = "iinfo_test.py",
+- deps = [":testing_base"],
+-)
+-
+-py_test(
+- name = "finfo_test",
+- srcs = ["finfo_test.py"],
+- main = "finfo_test.py",
+- deps = [":testing_base"],
+-)
+-
+-py_test(
+- name = "metadata_test",
+- srcs = ["metadata_test.py"],
+- main = "metadata_test.py",
+- deps = [":testing_base"],
+-)
+-
+-cc_test(
+- name = "float8_test",
+- srcs = ["float8_test.cc"],
+- linkstatic = 1,
+- deps = [
+- "//:float8",
+- "@com_google_absl//absl/strings",
+- "@com_google_googletest//:gtest_main",
+- "@org_tensorflow//third_party/eigen3",
+- ],
+-)
+diff --git a/third_party/py/non_hermetic/ml_dtypes/workspace.bzl b/third_party/py/non_hermetic/ml_dtypes/workspace.bzl
+deleted file mode 100644
+index 2c34f494c34..00000000000
+--- a/third_party/py/non_hermetic/ml_dtypes/workspace.bzl
++++ /dev/null
+@@ -1,22 +0,0 @@
+-"""Provides the repo macro to import ml_dtypes.
+-
+-ml_dtypes provides machine-learning-specific data-types like bfloat16,
+-float8 varieties, and int4.
+-"""
+-
+-load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
+-
+-def repo():
+- ML_DTYPES_COMMIT = "5b9fc9ad978757654843f4a8d899715dbea30e88"
+- ML_DTYPES_SHA256 = "9662811d9ab3823a56f8fa91b5a67fd82062b6dd4f187169b41e82a44e526455"
+- tf_http_archive(
+- name = "ml_dtypes",
+- build_file = "//third_party/py/ml_dtypes:ml_dtypes.BUILD",
+- link_files = {
+- "//third_party/py/ml_dtypes:ml_dtypes.tests.BUILD": "tests/BUILD.bazel",
+- "//third_party/py/ml_dtypes:LICENSE": "LICENSE",
+- },
+- sha256 = ML_DTYPES_SHA256,
+- strip_prefix = "ml_dtypes-{commit}/ml_dtypes".format(commit = ML_DTYPES_COMMIT),
+- urls = tf_mirror_urls("https://github.com/jax-ml/ml_dtypes/archive/{commit}/ml_dtypes-{commit}.tar.gz".format(commit = ML_DTYPES_COMMIT)),
+- )
+diff --git a/third_party/py/non_hermetic/numpy/BUILD b/third_party/py/non_hermetic/numpy/BUILD
+deleted file mode 100644
+index c80cc5287bc..00000000000
+--- a/third_party/py/non_hermetic/numpy/BUILD
++++ /dev/null
+@@ -1,21 +0,0 @@
+-licenses(["restricted"])
+-
+-package(default_visibility = ["//visibility:public"])
+-
+-py_library(
+- name = "numpy",
+- srcs = ["tf_numpy_dummy.py"],
+- srcs_version = "PY3",
+-)
+-
+-alias(
+- name = "headers",
+- actual = "@local_config_python//:numpy_headers",
+-)
+-
+-genrule(
+- name = "dummy",
+- outs = ["tf_numpy_dummy.py"],
+- cmd = "touch $@",
+- visibility = ["//visibility:private"],
+-)
+diff --git a/third_party/py/non_hermetic/numpy/README.md b/third_party/py/non_hermetic/numpy/README.md
+deleted file mode 100644
+index 4e58b9df87b..00000000000
+--- a/third_party/py/non_hermetic/numpy/README.md
++++ /dev/null
+@@ -1,4 +0,0 @@
+-# numpy_ops
+-
+-The folder tf_numpy_api/ contains lists of NumPy API symbols that the
+-`numpy_ops` internal module in TensorFlow implements.
+diff --git a/third_party/py/non_hermetic/numpy/tf_numpy_api/BUILD b/third_party/py/non_hermetic/numpy/tf_numpy_api/BUILD
+deleted file mode 100644
+index 070f8ab8a65..00000000000
+--- a/third_party/py/non_hermetic/numpy/tf_numpy_api/BUILD
++++ /dev/null
+@@ -1,12 +0,0 @@
+-# TensorFlow API backwards compatibility test goldens for tf.experimental.numpy.
+-
+-package(
+- # copybara:uncomment default_applicable_licenses = ["//tensorflow:license"],
+- default_visibility = ["//visibility:public"],
+- licenses = ["notice"],
+-)
+-
+-filegroup(
+- name = "api_golden",
+- srcs = glob(["*.pbtxt"]),
+-)
+diff --git a/third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.ndarray.pbtxt b/third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.ndarray.pbtxt
+deleted file mode 100644
+index 9198264c029..00000000000
+--- a/third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.ndarray.pbtxt
++++ /dev/null
+@@ -1,51 +0,0 @@
+-path: "tensorflow.experimental.numpy.ndarray"
+-tf_class {
+- is_instance: "<class \'tensorflow.python.framework.tensor.Tensor\'>"
+- is_instance: "<class \'tensorflow.python.types.internal.NativeObject\'>"
+- is_instance: "<class \'tensorflow.python.types.core.Symbol\'>"
+- is_instance: "<class \'tensorflow.python.types.core.Tensor\'>"
+- is_instance: "<type \'object\'>"
+- member {
+- name: "OVERLOADABLE_OPERATORS"
+- mtype: "<type \'set\'>"
+- }
+- member {
+- name: "dtype"
+- mtype: "<type \'property\'>"
+- }
+- member {
+- name: "name"
+- mtype: "<type \'property\'>"
+- }
+- member {
+- name: "ndim"
+- mtype: "<type \'property\'>"
+- }
+- member {
+- name: "shape"
+- mtype: "<type \'property\'>"
+- }
+- member_method {
+- name: "__init__"
+- }
+- member_method {
+- name: "eval"
+- argspec: "args=[\'self\', \'feed_dict\', \'session\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
+- }
+- member_method {
+- name: "experimental_ref"
+- argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "get_shape"
+- argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "ref"
+- argspec: "args=[\'self\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "set_shape"
+- argspec: "args=[\'self\', \'shape\'], varargs=None, keywords=None, defaults=None"
+- }
+-}
+diff --git a/third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.pbtxt b/third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.pbtxt
+deleted file mode 100644
+index 2f5490ad0c9..00000000000
+--- a/third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.pbtxt
++++ /dev/null
+@@ -1,919 +0,0 @@
+-path: "tensorflow.experimental.numpy"
+-tf_module {
+- member {
+- name: "bool_"
+- mtype: "<type \'type\'>"
+- }
+- member {
+- name: "complex128"
+- mtype: "<type \'type\'>"
+- }
+- member {
+- name: "complex64"
+- mtype: "<type \'type\'>"
+- }
+- member {
+- name: "complex_"
+- mtype: "<type \'type\'>"
+- }
+- member {
+- name: "e"
+- mtype: "<class \'float\'>"
+- }
+- member {
+- name: "float16"
+- mtype: "<type \'type\'>"
+- }
+- member {
+- name: "float32"
+- mtype: "<type \'type\'>"
+- }
+- member {
+- name: "float64"
+- mtype: "<type \'type\'>"
+- }
+- member {
+- name: "float_"
+- mtype: "<type \'type\'>"
+- }
+- member {
+- name: "iinfo"
+- mtype: "<type \'type\'>"
+- }
+- member {
+- name: "inexact"
+- mtype: "<type \'type\'>"
+- }
+- member {
+- name: "inf"
+- mtype: "<class \'float\'>"
+- }
+- member {
+- name: "int16"
+- mtype: "<type \'type\'>"
+- }
+- member {
+- name: "int32"
+- mtype: "<type \'type\'>"
+- }
+- member {
+- name: "int64"
+- mtype: "<type \'type\'>"
+- }
+- member {
+- name: "int8"
+- mtype: "<type \'type\'>"
+- }
+- member {
+- name: "int_"
+- mtype: "<type \'type\'>"
+- }
+- member {
+- name: "ndarray"
+- mtype: "<type \'type\'>"
+- }
+- member {
+- name: "newaxis"
+- mtype: "<type \'NoneType\'>"
+- }
+- member {
+- name: "object_"
+- mtype: "<type \'type\'>"
+- }
+- member {
+- name: "pi"
+- mtype: "<class \'float\'>"
+- }
+- member {
+- name: "random"
+- mtype: "<type \'module\'>"
+- }
+- member {
+- name: "string_"
+- mtype: "<type \'type\'>"
+- }
+- member {
+- name: "uint16"
+- mtype: "<type \'type\'>"
+- }
+- member {
+- name: "uint32"
+- mtype: "<type \'type\'>"
+- }
+- member {
+- name: "uint64"
+- mtype: "<type \'type\'>"
+- }
+- member {
+- name: "uint8"
+- mtype: "<type \'type\'>"
+- }
+- member {
+- name: "unicode_"
+- mtype: "<type \'type\'>"
+- }
+- member_method {
+- name: "abs"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "absolute"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "add"
+- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "all"
+- argspec: "args=[\'a\', \'axis\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
+- }
+- member_method {
+- name: "allclose"
+- argspec: "args=[\'a\', \'b\', \'rtol\', \'atol\', \'equal_nan\'], varargs=None, keywords=None, defaults=[\'1e-05\', \'1e-08\', \'False\'], "
+- }
+- member_method {
+- name: "amax"
+- argspec: "args=[\'a\', \'axis\', \'out\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\'], "
+- }
+- member_method {
+- name: "amin"
+- argspec: "args=[\'a\', \'axis\', \'out\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\'], "
+- }
+- member_method {
+- name: "angle"
+- argspec: "args=[\'z\', \'deg\'], varargs=None, keywords=None, defaults=[\'False\'], "
+- }
+- member_method {
+- name: "any"
+- argspec: "args=[\'a\', \'axis\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
+- }
+- member_method {
+- name: "append"
+- argspec: "args=[\'arr\', \'values\', \'axis\'], varargs=None, keywords=None, defaults=[\'None\'], "
+- }
+- member_method {
+- name: "arange"
+- argspec: "args=[\'start\', \'stop\', \'step\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \'1\', \'None\'], "
+- }
+- member_method {
+- name: "arccos"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "arccosh"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "arcsin"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "arcsinh"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "arctan"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "arctan2"
+- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "arctanh"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "argmax"
+- argspec: "args=[\'a\', \'axis\'], varargs=None, keywords=None, defaults=[\'None\'], "
+- }
+- member_method {
+- name: "argmin"
+- argspec: "args=[\'a\', \'axis\'], varargs=None, keywords=None, defaults=[\'None\'], "
+- }
+- member_method {
+- name: "argsort"
+- argspec: "args=[\'a\', \'axis\', \'kind\', \'order\'], varargs=None, keywords=None, defaults=[\'-1\', \'quicksort\', \'None\'], "
+- }
+- member_method {
+- name: "around"
+- argspec: "args=[\'a\', \'decimals\'], varargs=None, keywords=None, defaults=[\'0\'], "
+- }
+- member_method {
+- name: "array"
+- argspec: "args=[\'val\', \'dtype\', \'copy\', \'ndmin\'], varargs=None, keywords=None, defaults=[\'None\', \'True\', \'0\'], "
+- }
+- member_method {
+- name: "array_equal"
+- argspec: "args=[\'a1\', \'a2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "asanyarray"
+- argspec: "args=[\'a\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\'], "
+- }
+- member_method {
+- name: "asarray"
+- argspec: "args=[\'a\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\'], "
+- }
+- member_method {
+- name: "ascontiguousarray"
+- argspec: "args=[\'a\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\'], "
+- }
+- member_method {
+- name: "atleast_1d"
+- argspec: "args=[], varargs=arys, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "atleast_2d"
+- argspec: "args=[], varargs=arys, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "atleast_3d"
+- argspec: "args=[], varargs=arys, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "average"
+- argspec: "args=[\'a\', \'axis\', \'weights\', \'returned\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'False\'], "
+- }
+- member_method {
+- name: "bitwise_and"
+- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "bitwise_not"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "bitwise_or"
+- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "bitwise_xor"
+- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "broadcast_arrays"
+- argspec: "args=[], varargs=args, keywords=kwargs, defaults=None"
+- }
+- member_method {
+- name: "broadcast_to"
+- argspec: "args=[\'array\', \'shape\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "cbrt"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "ceil"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "clip"
+- argspec: "args=[\'a\', \'a_min\', \'a_max\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "compress"
+- argspec: "args=[\'condition\', \'a\', \'axis\'], varargs=None, keywords=None, defaults=[\'None\'], "
+- }
+- member_method {
+- name: "concatenate"
+- argspec: "args=[\'arys\', \'axis\'], varargs=None, keywords=None, defaults=[\'0\'], "
+- }
+- member_method {
+- name: "conj"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "conjugate"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "copy"
+- argspec: "args=[\'a\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "cos"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "cosh"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "count_nonzero"
+- argspec: "args=[\'a\', \'axis\'], varargs=None, keywords=None, defaults=[\'None\'], "
+- }
+- member_method {
+- name: "cross"
+- argspec: "args=[\'a\', \'b\', \'axisa\', \'axisb\', \'axisc\', \'axis\'], varargs=None, keywords=None, defaults=[\'-1\', \'-1\', \'-1\', \'None\'], "
+- }
+- member_method {
+- name: "cumprod"
+- argspec: "args=[\'a\', \'axis\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
+- }
+- member_method {
+- name: "cumsum"
+- argspec: "args=[\'a\', \'axis\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
+- }
+- member_method {
+- name: "deg2rad"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "diag"
+- argspec: "args=[\'v\', \'k\'], varargs=None, keywords=None, defaults=[\'0\'], "
+- }
+- member_method {
+- name: "diag_indices"
+- argspec: "args=[\'n\', \'ndim\'], varargs=None, keywords=None, defaults=[\'2\'], "
+- }
+- member_method {
+- name: "diagflat"
+- argspec: "args=[\'v\', \'k\'], varargs=None, keywords=None, defaults=[\'0\'], "
+- }
+- member_method {
+- name: "diagonal"
+- argspec: "args=[\'a\', \'offset\', \'axis1\', \'axis2\'], varargs=None, keywords=None, defaults=[\'0\', \'0\', \'1\'], "
+- }
+- member_method {
+- name: "diff"
+- argspec: "args=[\'a\', \'n\', \'axis\'], varargs=None, keywords=None, defaults=[\'1\', \'-1\'], "
+- }
+- member_method {
+- name: "divide"
+- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "divmod"
+- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "dot"
+- argspec: "args=[\'a\', \'b\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "dsplit"
+- argspec: "args=[\'ary\', \'indices_or_sections\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "dstack"
+- argspec: "args=[\'tup\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "einsum"
+- argspec: "args=[\'subscripts\'], varargs=operands, keywords=kwargs, defaults=None"
+- }
+- member_method {
+- name: "empty"
+- argspec: "args=[\'shape\', \'dtype\'], varargs=None, keywords=None, defaults=[\"<class \'float\'>\"], "
+- }
+- member_method {
+- name: "empty_like"
+- argspec: "args=[\'a\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\'], "
+- }
+- member_method {
+- name: "equal"
+- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "exp"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "exp2"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "expand_dims"
+- argspec: "args=[\'a\', \'axis\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "experimental_enable_numpy_behavior"
+- argspec: "args=[\'prefer_float32\'], varargs=None, keywords=None, defaults=[\'False\'], "
+- }
+- member_method {
+- name: "expm1"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "eye"
+- argspec: "args=[\'N\', \'M\', \'k\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \'0\', \"<class \'float\'>\"], "
+- }
+- member_method {
+- name: "fabs"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "finfo"
+- argspec: "args=[\'dtype\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "fix"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "flatten"
+- argspec: "args=[\'a\', \'order\'], varargs=None, keywords=None, defaults=[\'C\'], "
+- }
+- member_method {
+- name: "flip"
+- argspec: "args=[\'m\', \'axis\'], varargs=None, keywords=None, defaults=[\'None\'], "
+- }
+- member_method {
+- name: "fliplr"
+- argspec: "args=[\'m\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "flipud"
+- argspec: "args=[\'m\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "float_power"
+- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "floor"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "floor_divide"
+- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "full"
+- argspec: "args=[\'shape\', \'fill_value\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\'], "
+- }
+- member_method {
+- name: "full_like"
+- argspec: "args=[\'a\', \'fill_value\', \'dtype\', \'order\', \'subok\', \'shape\'], varargs=None, keywords=None, defaults=[\'None\', \'K\', \'True\', \'None\'], "
+- }
+- member_method {
+- name: "gcd"
+- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "geomspace"
+- argspec: "args=[\'start\', \'stop\', \'num\', \'endpoint\', \'dtype\', \'axis\'], varargs=None, keywords=None, defaults=[\'50\', \'True\', \'None\', \'0\'], "
+- }
+- member_method {
+- name: "greater"
+- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "greater_equal"
+- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "heaviside"
+- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "hsplit"
+- argspec: "args=[\'ary\', \'indices_or_sections\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "hstack"
+- argspec: "args=[\'tup\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "hypot"
+- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "identity"
+- argspec: "args=[\'n\', \'dtype\'], varargs=None, keywords=None, defaults=[\"<class \'float\'>\"], "
+- }
+- member_method {
+- name: "imag"
+- argspec: "args=[\'val\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "inner"
+- argspec: "args=[\'a\', \'b\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "isclose"
+- argspec: "args=[\'a\', \'b\', \'rtol\', \'atol\', \'equal_nan\'], varargs=None, keywords=None, defaults=[\'1e-05\', \'1e-08\', \'False\'], "
+- }
+- member_method {
+- name: "iscomplex"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "iscomplexobj"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "isfinite"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "isinf"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "isnan"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "isneginf"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "isposinf"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "isreal"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "isrealobj"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "isscalar"
+- argspec: "args=[\'num\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "issubdtype"
+- argspec: "args=[\'arg1\', \'arg2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "ix_"
+- argspec: "args=[], varargs=args, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "kron"
+- argspec: "args=[\'a\', \'b\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "lcm"
+- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "less"
+- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "less_equal"
+- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "linspace"
+- argspec: "args=[\'start\', \'stop\', \'num\', \'endpoint\', \'retstep\', \'dtype\', \'axis\'], varargs=None, keywords=None, defaults=[\'50\', \'True\', \'False\', \"<class \'float\'>\", \'0\'], "
+- }
+- member_method {
+- name: "log"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "log10"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "log1p"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "log2"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "logaddexp"
+- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "logaddexp2"
+- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "logical_and"
+- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "logical_not"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "logical_or"
+- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "logical_xor"
+- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "logspace"
+- argspec: "args=[\'start\', \'stop\', \'num\', \'endpoint\', \'base\', \'dtype\', \'axis\'], varargs=None, keywords=None, defaults=[\'50\', \'True\', \'10.0\', \'None\', \'0\'], "
+- }
+- member_method {
+- name: "matmul"
+- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "max"
+- argspec: "args=[\'a\', \'axis\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
+- }
+- member_method {
+- name: "maximum"
+- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "mean"
+- argspec: "args=[\'a\', \'axis\', \'dtype\', \'out\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\', \'None\'], "
+- }
+- member_method {
+- name: "meshgrid"
+- argspec: "args=[], varargs=xi, keywords=kwargs, defaults=None"
+- }
+- member_method {
+- name: "min"
+- argspec: "args=[\'a\', \'axis\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
+- }
+- member_method {
+- name: "minimum"
+- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "mod"
+- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "moveaxis"
+- argspec: "args=[\'a\', \'source\', \'destination\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "multiply"
+- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "nanmean"
+- argspec: "args=[\'a\', \'axis\', \'dtype\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\'], "
+- }
+- member_method {
+- name: "nanprod"
+- argspec: "args=[\'a\', \'axis\', \'dtype\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'False\'], "
+- }
+- member_method {
+- name: "nansum"
+- argspec: "args=[\'a\', \'axis\', \'dtype\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'False\'], "
+- }
+- member_method {
+- name: "ndim"
+- argspec: "args=[\'a\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "negative"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "nextafter"
+- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "nonzero"
+- argspec: "args=[\'a\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "not_equal"
+- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "ones"
+- argspec: "args=[\'shape\', \'dtype\'], varargs=None, keywords=None, defaults=[\"<class \'float\'>\"], "
+- }
+- member_method {
+- name: "ones_like"
+- argspec: "args=[\'a\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\'], "
+- }
+- member_method {
+- name: "outer"
+- argspec: "args=[\'a\', \'b\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "pad"
+- argspec: "args=[\'array\', \'pad_width\', \'mode\'], varargs=None, keywords=kwargs, defaults=None"
+- }
+- member_method {
+- name: "polyval"
+- argspec: "args=[\'p\', \'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "positive"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "power"
+- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "prod"
+- argspec: "args=[\'a\', \'axis\', \'dtype\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\'], "
+- }
+- member_method {
+- name: "promote_types"
+- argspec: "args=[\'type1\', \'type2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "ptp"
+- argspec: "args=[\'a\', \'axis\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
+- }
+- member_method {
+- name: "rad2deg"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "ravel"
+- argspec: "args=[\'a\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "real"
+- argspec: "args=[\'val\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "reciprocal"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "remainder"
+- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "repeat"
+- argspec: "args=[\'a\', \'repeats\', \'axis\'], varargs=None, keywords=None, defaults=[\'None\'], "
+- }
+- member_method {
+- name: "reshape"
+- argspec: "args=[\'a\', \'newshape\', \'order\'], varargs=None, keywords=None, defaults=[\'C\'], "
+- }
+- member_method {
+- name: "result_type"
+- argspec: "args=[], varargs=arrays_and_dtypes, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "roll"
+- argspec: "args=[\'a\', \'shift\', \'axis\'], varargs=None, keywords=None, defaults=[\'None\'], "
+- }
+- member_method {
+- name: "rot90"
+- argspec: "args=[\'m\', \'k\', \'axes\'], varargs=None, keywords=None, defaults=[\'1\', \'(0, 1)\'], "
+- }
+- member_method {
+- name: "round"
+- argspec: "args=[\'a\', \'decimals\'], varargs=None, keywords=None, defaults=[\'0\'], "
+- }
+- member_method {
+- name: "select"
+- argspec: "args=[\'condlist\', \'choicelist\', \'default\'], varargs=None, keywords=None, defaults=[\'0\'], "
+- }
+- member_method {
+- name: "shape"
+- argspec: "args=[\'a\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "sign"
+- argspec: "args=[\'x\', \'out\', \'where\'], varargs=None, keywords=kwargs, defaults=[\'None\', \'None\'], "
+- }
+- member_method {
+- name: "signbit"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "sin"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "sinc"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "sinh"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "size"
+- argspec: "args=[\'x\', \'axis\'], varargs=None, keywords=None, defaults=[\'None\'], "
+- }
+- member_method {
+- name: "sort"
+- argspec: "args=[\'a\', \'axis\', \'kind\', \'order\'], varargs=None, keywords=None, defaults=[\'-1\', \'quicksort\', \'None\'], "
+- }
+- member_method {
+- name: "split"
+- argspec: "args=[\'ary\', \'indices_or_sections\', \'axis\'], varargs=None, keywords=None, defaults=[\'0\'], "
+- }
+- member_method {
+- name: "sqrt"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "square"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "squeeze"
+- argspec: "args=[\'a\', \'axis\'], varargs=None, keywords=None, defaults=[\'None\'], "
+- }
+- member_method {
+- name: "stack"
+- argspec: "args=[\'arrays\', \'axis\'], varargs=None, keywords=None, defaults=[\'0\'], "
+- }
+- member_method {
+- name: "std"
+- argspec: "args=[\'a\', \'axis\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
+- }
+- member_method {
+- name: "subtract"
+- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "sum"
+- argspec: "args=[\'a\', \'axis\', \'dtype\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\'], "
+- }
+- member_method {
+- name: "swapaxes"
+- argspec: "args=[\'a\', \'axis1\', \'axis2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "take"
+- argspec: "args=[\'a\', \'indices\', \'axis\', \'out\', \'mode\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'clip\'], "
+- }
+- member_method {
+- name: "take_along_axis"
+- argspec: "args=[\'arr\', \'indices\', \'axis\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "tan"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "tanh"
+- argspec: "args=[\'x\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "tensordot"
+- argspec: "args=[\'a\', \'b\', \'axes\'], varargs=None, keywords=None, defaults=[\'2\'], "
+- }
+- member_method {
+- name: "tile"
+- argspec: "args=[\'a\', \'reps\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "trace"
+- argspec: "args=[\'a\', \'offset\', \'axis1\', \'axis2\', \'dtype\'], varargs=None, keywords=None, defaults=[\'0\', \'0\', \'1\', \'None\'], "
+- }
+- member_method {
+- name: "transpose"
+- argspec: "args=[\'a\', \'axes\'], varargs=None, keywords=None, defaults=[\'None\'], "
+- }
+- member_method {
+- name: "tri"
+- argspec: "args=[\'N\', \'M\', \'k\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \'0\', \'None\'], "
+- }
+- member_method {
+- name: "tril"
+- argspec: "args=[\'m\', \'k\'], varargs=None, keywords=None, defaults=[\'0\'], "
+- }
+- member_method {
+- name: "triu"
+- argspec: "args=[\'m\', \'k\'], varargs=None, keywords=None, defaults=[\'0\'], "
+- }
+- member_method {
+- name: "true_divide"
+- argspec: "args=[\'x1\', \'x2\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "vander"
+- argspec: "args=[\'x\', \'N\', \'increasing\'], varargs=None, keywords=None, defaults=[\'None\', \'False\'], "
+- }
+- member_method {
+- name: "var"
+- argspec: "args=[\'a\', \'axis\', \'dtype\', \'out\', \'ddof\', \'keepdims\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \'None\', \'0\', \'None\'], "
+- }
+- member_method {
+- name: "vdot"
+- argspec: "args=[\'a\', \'b\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "vsplit"
+- argspec: "args=[\'ary\', \'indices_or_sections\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "vstack"
+- argspec: "args=[\'tup\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "where"
+- argspec: "args=[\'condition\', \'x\', \'y\'], varargs=None, keywords=None, defaults=[\'None\', \'None\'], "
+- }
+- member_method {
+- name: "zeros"
+- argspec: "args=[\'shape\', \'dtype\'], varargs=None, keywords=None, defaults=[\"<class \'float\'>\"], "
+- }
+- member_method {
+- name: "zeros_like"
+- argspec: "args=[\'a\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\'], "
+- }
+-}
+diff --git a/third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.random.pbtxt b/third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.random.pbtxt
+deleted file mode 100644
+index 61a4766f3f8..00000000000
+--- a/third_party/py/non_hermetic/numpy/tf_numpy_api/tensorflow.experimental.numpy.random.pbtxt
++++ /dev/null
+@@ -1,35 +0,0 @@
+-path: "tensorflow.experimental.numpy.random"
+-tf_module {
+- member_method {
+- name: "poisson"
+- argspec: "args=[\'lam\', \'size\'], varargs=None, keywords=None, defaults=[\'1.0\', \'None\'], "
+- }
+- member_method {
+- name: "rand"
+- argspec: "args=[], varargs=size, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "randint"
+- argspec: "args=[\'low\', \'high\', \'size\', \'dtype\'], varargs=None, keywords=None, defaults=[\'None\', \'None\', \"<class \'numpy.int64\'>\"], "
+- }
+- member_method {
+- name: "randn"
+- argspec: "args=[], varargs=args, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "random"
+- argspec: "args=[\'size\'], varargs=None, keywords=None, defaults=[\'None\'], "
+- }
+- member_method {
+- name: "seed"
+- argspec: "args=[\'s\'], varargs=None, keywords=None, defaults=None"
+- }
+- member_method {
+- name: "standard_normal"
+- argspec: "args=[\'size\'], varargs=None, keywords=None, defaults=[\'None\'], "
+- }
+- member_method {
+- name: "uniform"
+- argspec: "args=[\'low\', \'high\', \'size\'], varargs=None, keywords=None, defaults=[\'0.0\', \'1.0\', \'None\'], "
+- }
+-}
+diff --git a/third_party/py/non_hermetic/python_configure.bzl b/third_party/py/non_hermetic/python_configure.bzl
+deleted file mode 100644
+index 300cbfb6c71..00000000000
+--- a/third_party/py/non_hermetic/python_configure.bzl
++++ /dev/null
+@@ -1,315 +0,0 @@
+-"""Repository rule for Python autoconfiguration.
+-
+-`python_configure` depends on the following environment variables:
+-
+- * `PYTHON_BIN_PATH`: location of python binary.
+- * `PYTHON_LIB_PATH`: Location of python libraries.
+-"""
+-
+-load(
+- "//third_party/remote_config:common.bzl",
+- "BAZEL_SH",
+- "PYTHON_BIN_PATH",
+- "PYTHON_LIB_PATH",
+- "TF_PYTHON_CONFIG_REPO",
+- "auto_config_fail",
+- "config_repo_label",
+- "execute",
+- "get_bash_bin",
+- "get_host_environ",
+- "get_python_bin",
+- "is_windows",
+- "raw_exec",
+- "read_dir",
+-)
+-
+-def _genrule(src_dir, genrule_name, command, outs):
+- """Returns a string with a genrule.
+-
+- Genrule executes the given command and produces the given outputs.
+- """
+- return (
+- "genrule(\n" +
+- ' name = "' +
+- genrule_name + '",\n' +
+- " outs = [\n" +
+- outs +
+- "\n ],\n" +
+- ' cmd = """\n' +
+- command +
+- '\n """,\n' +
+- ")\n"
+- )
+-
+-def _norm_path(path):
+- """Returns a path with '/' and remove the trailing slash."""
+- path = path.replace("\\", "/")
+- if path[-1] == "/":
+- path = path[:-1]
+- return path
+-
+-def _symlink_genrule_for_dir(
+- repository_ctx,
+- src_dir,
+- dest_dir,
+- genrule_name,
+- src_files = [],
+- dest_files = []):
+- """Returns a genrule to symlink(or copy if on Windows) a set of files.
+-
+- If src_dir is passed, files will be read from the given directory; otherwise
+- we assume files are in src_files and dest_files
+- """
+- if src_dir != None:
+- src_dir = _norm_path(src_dir)
+- dest_dir = _norm_path(dest_dir)
+- files = "\n".join(read_dir(repository_ctx, src_dir))
+-
+- # Create a list with the src_dir stripped to use for outputs.
+- dest_files = files.replace(src_dir, "").splitlines()
+- src_files = files.splitlines()
+- command = []
+- outs = []
+- for i in range(len(dest_files)):
+- if dest_files[i] != "":
+- # If we have only one file to link we do not want to use the dest_dir, as
+- # $(@D) will include the full path to the file.
+- dest = "$(@D)/" + dest_dir + dest_files[i] if len(dest_files) != 1 else "$(@D)/" + dest_files[i]
+-
+- # Copy the headers to create a sandboxable setup.
+- cmd = "cp -f"
+- command.append(cmd + ' "%s" "%s"' % (src_files[i], dest))
+- outs.append(' "' + dest_dir + dest_files[i] + '",')
+- genrule = _genrule(
+- src_dir,
+- genrule_name,
+- " && ".join(command),
+- "\n".join(outs),
+- )
+- return genrule
+-
+-def _get_python_lib(repository_ctx, python_bin):
+- """Gets the python lib path."""
+- python_lib = get_host_environ(repository_ctx, PYTHON_LIB_PATH)
+- if python_lib != None:
+- return python_lib
+-
+- # The interesting program to execute.
+- print_lib = [
+- "from __future__ import print_function",
+- "import site",
+- "import os",
+- "python_paths = []",
+- "if os.getenv('PYTHONPATH') is not None:",
+- " python_paths = os.getenv('PYTHONPATH').split(':')",
+- "try:",
+- " library_paths = site.getsitepackages()",
+- "except AttributeError:",
+- " from distutils.sysconfig import get_python_lib",
+- " library_paths = [get_python_lib()]",
+- "all_paths = set(python_paths + library_paths)",
+- "paths = []",
+- "for path in all_paths:",
+- " if os.path.isdir(path):",
+- " paths.append(path)",
+- "if len(paths) >=1:",
+- " print(paths[0])",
+- ]
+-
+- # The below script writes the above program to a file
+- # and executes it. This is to work around the limitation
+- # of not being able to upload files as part of execute.
+- cmd = "from os import linesep;"
+- cmd += "f = open('script.py', 'w');"
+- for line in print_lib:
+- cmd += "f.write(\"%s\" + linesep);" % line
+- cmd += "f.close();"
+- cmd += "from subprocess import call;"
+- cmd += "call([\"%s\", \"script.py\"]);" % python_bin
+-
+- result = execute(repository_ctx, [python_bin, "-c", cmd])
+- return result.stdout.strip()
+-
+-def _check_python_lib(repository_ctx, python_lib):
+- """Checks the python lib path."""
+- cmd = 'test -d "%s" -a -x "%s"' % (python_lib, python_lib)
+- result = raw_exec(repository_ctx, [get_bash_bin(repository_ctx), "-c", cmd])
+- if result.return_code == 1:
+- auto_config_fail("Invalid python library path: %s" % python_lib)
+-
+-def _check_python_bin(repository_ctx, python_bin):
+- """Checks the python bin path."""
+- cmd = '[[ -x "%s" ]] && [[ ! -d "%s" ]]' % (python_bin, python_bin)
+- result = raw_exec(repository_ctx, [get_bash_bin(repository_ctx), "-c", cmd])
+- if result.return_code == 1:
+- auto_config_fail("--define %s='%s' is not executable. Is it the python binary?" % (
+- PYTHON_BIN_PATH,
+- python_bin,
+- ))
+-
+-def _get_python_include(repository_ctx, python_bin):
+- """Gets the python include path."""
+- result = execute(
+- repository_ctx,
+- [
+- python_bin,
+- "-Wignore",
+- "-c",
+- "import importlib; " +
+- "import importlib.util; " +
+- "print(importlib.import_module('distutils.sysconfig').get_python_inc() " +
+- "if importlib.util.find_spec('distutils.sysconfig') " +
+- "else importlib.import_module('sysconfig').get_path('include'))",
+- ],
+- error_msg = "Problem getting python include path.",
+- error_details = ("Is the Python binary path set up right? " +
+- "(See ./configure or " + PYTHON_BIN_PATH + ".) " +
+- "Is distutils installed?"),
+- )
+- return result.stdout.splitlines()[0]
+-
+-def _get_python_import_lib_name(repository_ctx, python_bin):
+- """Get Python import library name (pythonXY.lib) on Windows."""
+- result = execute(
+- repository_ctx,
+- [
+- python_bin,
+- "-c",
+- "import sys;" +
+- 'print("python" + str(sys.version_info[0]) + ' +
+- ' str(sys.version_info[1]) + ".lib")',
+- ],
+- error_msg = "Problem getting python import library.",
+- error_details = ("Is the Python binary path set up right? " +
+- "(See ./configure or " + PYTHON_BIN_PATH + ".) "),
+- )
+- return result.stdout.splitlines()[0]
+-
+-def _get_numpy_include(repository_ctx, python_bin):
+- """Gets the numpy include path."""
+- return execute(
+- repository_ctx,
+- [
+- python_bin,
+- "-c",
+- "from __future__ import print_function;" +
+- "import numpy;" +
+- " print(numpy.get_include());",
+- ],
+- error_msg = "Problem getting numpy include path.",
+- error_details = "Is numpy installed?",
+- ).stdout.splitlines()[0]
+-
+-def _create_local_python_repository(repository_ctx):
+- """Creates the repository containing files set up to build with Python."""
+-
+- # Resolve all labels before doing any real work. Resolving causes the
+- # function to be restarted with all previous state being lost. This
+- # can easily lead to a O(n^2) runtime in the number of labels.
+- build_tpl = repository_ctx.path(Label("//third_party/py:BUILD.tpl"))
+-
+- python_bin = get_python_bin(repository_ctx)
+- _check_python_bin(repository_ctx, python_bin)
+- python_lib = _get_python_lib(repository_ctx, python_bin)
+- _check_python_lib(repository_ctx, python_lib)
+- python_include = _get_python_include(repository_ctx, python_bin)
+- numpy_include = _get_numpy_include(repository_ctx, python_bin) + "/numpy"
+- python_include_rule = _symlink_genrule_for_dir(
+- repository_ctx,
+- python_include,
+- "python_include",
+- "python_include",
+- )
+- python_import_lib_genrule = ""
+-
+- # To build Python C/C++ extension on Windows, we need to link to python import library pythonXY.lib
+- # See https://docs.python.org/3/extending/windows.html
+- if is_windows(repository_ctx):
+- python_bin = python_bin.replace("\\", "/")
+- python_include = _norm_path(python_include)
+- python_import_lib_name = _get_python_import_lib_name(repository_ctx, python_bin)
+- python_import_lib_src = python_include.rsplit("/", 1)[0] + "/libs/" + python_import_lib_name
+- python_import_lib_genrule = _symlink_genrule_for_dir(
+- repository_ctx,
+- None,
+- "",
+- "python_import_lib",
+- [python_import_lib_src],
+- [python_import_lib_name],
+- )
+- numpy_include_rule = _symlink_genrule_for_dir(
+- repository_ctx,
+- numpy_include,
+- "numpy_include/numpy",
+- "numpy_include",
+- )
+-
+- platform_constraint = ""
+- if repository_ctx.attr.platform_constraint:
+- platform_constraint = "\"%s\"" % repository_ctx.attr.platform_constraint
+- repository_ctx.template("BUILD", build_tpl, {
+- "%{PYTHON_BIN_PATH}": python_bin,
+- "%{PYTHON_INCLUDE_GENRULE}": python_include_rule,
+- "%{PYTHON_IMPORT_LIB_GENRULE}": python_import_lib_genrule,
+- "%{NUMPY_INCLUDE_GENRULE}": numpy_include_rule,
+- "%{PLATFORM_CONSTRAINT}": platform_constraint,
+- })
+-
+-def _create_remote_python_repository(repository_ctx, remote_config_repo):
+- """Creates pointers to a remotely configured repo set up to build with Python.
+- """
+- repository_ctx.template("BUILD", config_repo_label(remote_config_repo, ":BUILD"), {})
+-
+-def _python_autoconf_impl(repository_ctx):
+- """Implementation of the python_autoconf repository rule."""
+- if get_host_environ(repository_ctx, TF_PYTHON_CONFIG_REPO) != None:
+- _create_remote_python_repository(
+- repository_ctx,
+- get_host_environ(repository_ctx, TF_PYTHON_CONFIG_REPO),
+- )
+- else:
+- _create_local_python_repository(repository_ctx)
+-
+-_ENVIRONS = [
+- BAZEL_SH,
+- PYTHON_BIN_PATH,
+- PYTHON_LIB_PATH,
+-]
+-
+-local_python_configure = repository_rule(
+- implementation = _create_local_python_repository,
+- environ = _ENVIRONS,
+- attrs = {
+- "environ": attr.string_dict(),
+- "platform_constraint": attr.string(),
+- },
+-)
+-
+-remote_python_configure = repository_rule(
+- implementation = _create_local_python_repository,
+- environ = _ENVIRONS,
+- remotable = True,
+- attrs = {
+- "environ": attr.string_dict(),
+- "platform_constraint": attr.string(),
+- },
+-)
+-
+-python_configure = repository_rule(
+- implementation = _python_autoconf_impl,
+- environ = _ENVIRONS + [TF_PYTHON_CONFIG_REPO],
+- attrs = {
+- "platform_constraint": attr.string(),
+- },
+-)
+-"""Detects and configures the local Python.
+-
+-Add the following to your WORKSPACE FILE:
+-
+-```python
+-python_configure(name = "local_config_python")
+-```
+-
+-Args:
+- name: A unique name for this workspace rule.
+-"""
+diff --git a/third_party/py/numpy/BUILD b/third_party/py/numpy/BUILD
+index 97c7907fc38..c80cc5287bc 100644
+--- a/third_party/py/numpy/BUILD
++++ b/third_party/py/numpy/BUILD
+@@ -2,14 +2,15 @@ licenses(["restricted"])
+
+ package(default_visibility = ["//visibility:public"])
+
+-alias(
++py_library(
+ name = "numpy",
+- actual = "@pypi_numpy//:pkg",
++ srcs = ["tf_numpy_dummy.py"],
++ srcs_version = "PY3",
+ )
+
+ alias(
+ name = "headers",
+- actual = "@pypi_numpy//:numpy_headers",
++ actual = "@local_config_python//:numpy_headers",
+ )
+
+ genrule(
+diff --git a/third_party/py/numpy/LICENSE b/third_party/py/numpy/LICENSE
+deleted file mode 100644
+index b9731f734f5..00000000000
+--- a/third_party/py/numpy/LICENSE
++++ /dev/null
+@@ -1,60 +0,0 @@
+-Copyright (c) 2005-2019, NumPy Developers.
+-All rights reserved.
+-
+-Redistribution and use in source and binary forms, with or without
+-modification, are permitted provided that the following conditions are
+-met:
+-
+- * Redistributions of source code must retain the above copyright
+- notice, this list of conditions and the following disclaimer.
+-
+- * Redistributions in binary form must reproduce the above
+- copyright notice, this list of conditions and the following
+- disclaimer in the documentation and/or other materials provided
+- with the distribution.
+-
+- * Neither the name of the NumPy Developers nor the names of any
+- contributors may be used to endorse or promote products derived
+- from this software without specific prior written permission.
+-
+-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+-
+-
+-
+-The NumPy repository and source distributions bundle several libraries that are
+-compatibly licensed. We list these here.
+-
+-Name: Numpydoc
+-Files: doc/sphinxext/numpydoc/*
+-License: 2-clause BSD
+- For details, see doc/sphinxext/LICENSE.txt
+-
+-Name: scipy-sphinx-theme
+-Files: doc/scipy-sphinx-theme/*
+-License: 3-clause BSD, PSF and Apache 2.0
+- For details, see doc/scipy-sphinx-theme/LICENSE.txt
+-
+-Name: lapack-lite
+-Files: numpy/linalg/lapack_lite/*
+-License: 3-clause BSD
+- For details, see numpy/linalg/lapack_lite/LICENSE.txt
+-
+-Name: tempita
+-Files: tools/npy_tempita/*
+-License: BSD derived
+- For details, see tools/npy_tempita/license.txt
+-
+-Name: dragon4
+-Files: numpy/core/src/multiarray/dragon4.c
+-License: One of a kind
+- For license text, see numpy/core/src/multiarray/dragon4.c
+diff --git a/third_party/py/numpy/tf_numpy_api/tensorflow.experimental.numpy.pbtxt b/third_party/py/numpy/tf_numpy_api/tensorflow.experimental.numpy.pbtxt
+index 05939b53b5f..2f5490ad0c9 100644
+--- a/third_party/py/numpy/tf_numpy_api/tensorflow.experimental.numpy.pbtxt
++++ b/third_party/py/numpy/tf_numpy_api/tensorflow.experimental.numpy.pbtxt
+@@ -390,7 +390,7 @@ tf_module {
+ }
+ member_method {
+ name: "experimental_enable_numpy_behavior"
+- argspec: "args=[\'prefer_float32\', \'dtype_conversion_mode\'], varargs=None, keywords=None, defaults=[\'False\', \'legacy\'], "
++ argspec: "args=[\'prefer_float32\'], varargs=None, keywords=None, defaults=[\'False\'], "
+ }
+ member_method {
+ name: "expm1"
+diff --git a/third_party/py/python_configure.bzl b/third_party/py/python_configure.bzl
+index 3728a91b931..300cbfb6c71 100644
+--- a/third_party/py/python_configure.bzl
++++ b/third_party/py/python_configure.bzl
+@@ -1,4 +1,9 @@
+ """Repository rule for Python autoconfiguration.
++
++`python_configure` depends on the following environment variables:
++
++ * `PYTHON_BIN_PATH`: location of python binary.
++ * `PYTHON_LIB_PATH`: Location of python libraries.
+ """
+
+ load(
+@@ -6,8 +11,195 @@ load(
+ "BAZEL_SH",
+ "PYTHON_BIN_PATH",
+ "PYTHON_LIB_PATH",
++ "TF_PYTHON_CONFIG_REPO",
++ "auto_config_fail",
++ "config_repo_label",
++ "execute",
++ "get_bash_bin",
++ "get_host_environ",
++ "get_python_bin",
++ "is_windows",
++ "raw_exec",
++ "read_dir",
+ )
+
++def _genrule(src_dir, genrule_name, command, outs):
++ """Returns a string with a genrule.
++
++ Genrule executes the given command and produces the given outputs.
++ """
++ return (
++ "genrule(\n" +
++ ' name = "' +
++ genrule_name + '",\n' +
++ " outs = [\n" +
++ outs +
++ "\n ],\n" +
++ ' cmd = """\n' +
++ command +
++ '\n """,\n' +
++ ")\n"
++ )
++
++def _norm_path(path):
++ """Returns a path with '/' and remove the trailing slash."""
++ path = path.replace("\\", "/")
++ if path[-1] == "/":
++ path = path[:-1]
++ return path
++
++def _symlink_genrule_for_dir(
++ repository_ctx,
++ src_dir,
++ dest_dir,
++ genrule_name,
++ src_files = [],
++ dest_files = []):
++ """Returns a genrule to symlink(or copy if on Windows) a set of files.
++
++ If src_dir is passed, files will be read from the given directory; otherwise
++ we assume files are in src_files and dest_files
++ """
++ if src_dir != None:
++ src_dir = _norm_path(src_dir)
++ dest_dir = _norm_path(dest_dir)
++ files = "\n".join(read_dir(repository_ctx, src_dir))
++
++ # Create a list with the src_dir stripped to use for outputs.
++ dest_files = files.replace(src_dir, "").splitlines()
++ src_files = files.splitlines()
++ command = []
++ outs = []
++ for i in range(len(dest_files)):
++ if dest_files[i] != "":
++ # If we have only one file to link we do not want to use the dest_dir, as
++ # $(@D) will include the full path to the file.
++ dest = "$(@D)/" + dest_dir + dest_files[i] if len(dest_files) != 1 else "$(@D)/" + dest_files[i]
++
++ # Copy the headers to create a sandboxable setup.
++ cmd = "cp -f"
++ command.append(cmd + ' "%s" "%s"' % (src_files[i], dest))
++ outs.append(' "' + dest_dir + dest_files[i] + '",')
++ genrule = _genrule(
++ src_dir,
++ genrule_name,
++ " && ".join(command),
++ "\n".join(outs),
++ )
++ return genrule
++
++def _get_python_lib(repository_ctx, python_bin):
++ """Gets the python lib path."""
++ python_lib = get_host_environ(repository_ctx, PYTHON_LIB_PATH)
++ if python_lib != None:
++ return python_lib
++
++ # The interesting program to execute.
++ print_lib = [
++ "from __future__ import print_function",
++ "import site",
++ "import os",
++ "python_paths = []",
++ "if os.getenv('PYTHONPATH') is not None:",
++ " python_paths = os.getenv('PYTHONPATH').split(':')",
++ "try:",
++ " library_paths = site.getsitepackages()",
++ "except AttributeError:",
++ " from distutils.sysconfig import get_python_lib",
++ " library_paths = [get_python_lib()]",
++ "all_paths = set(python_paths + library_paths)",
++ "paths = []",
++ "for path in all_paths:",
++ " if os.path.isdir(path):",
++ " paths.append(path)",
++ "if len(paths) >=1:",
++ " print(paths[0])",
++ ]
++
++ # The below script writes the above program to a file
++ # and executes it. This is to work around the limitation
++ # of not being able to upload files as part of execute.
++ cmd = "from os import linesep;"
++ cmd += "f = open('script.py', 'w');"
++ for line in print_lib:
++ cmd += "f.write(\"%s\" + linesep);" % line
++ cmd += "f.close();"
++ cmd += "from subprocess import call;"
++ cmd += "call([\"%s\", \"script.py\"]);" % python_bin
++
++ result = execute(repository_ctx, [python_bin, "-c", cmd])
++ return result.stdout.strip()
++
++def _check_python_lib(repository_ctx, python_lib):
++ """Checks the python lib path."""
++ cmd = 'test -d "%s" -a -x "%s"' % (python_lib, python_lib)
++ result = raw_exec(repository_ctx, [get_bash_bin(repository_ctx), "-c", cmd])
++ if result.return_code == 1:
++ auto_config_fail("Invalid python library path: %s" % python_lib)
++
++def _check_python_bin(repository_ctx, python_bin):
++ """Checks the python bin path."""
++ cmd = '[[ -x "%s" ]] && [[ ! -d "%s" ]]' % (python_bin, python_bin)
++ result = raw_exec(repository_ctx, [get_bash_bin(repository_ctx), "-c", cmd])
++ if result.return_code == 1:
++ auto_config_fail("--define %s='%s' is not executable. Is it the python binary?" % (
++ PYTHON_BIN_PATH,
++ python_bin,
++ ))
++
++def _get_python_include(repository_ctx, python_bin):
++ """Gets the python include path."""
++ result = execute(
++ repository_ctx,
++ [
++ python_bin,
++ "-Wignore",
++ "-c",
++ "import importlib; " +
++ "import importlib.util; " +
++ "print(importlib.import_module('distutils.sysconfig').get_python_inc() " +
++ "if importlib.util.find_spec('distutils.sysconfig') " +
++ "else importlib.import_module('sysconfig').get_path('include'))",
++ ],
++ error_msg = "Problem getting python include path.",
++ error_details = ("Is the Python binary path set up right? " +
++ "(See ./configure or " + PYTHON_BIN_PATH + ".) " +
++ "Is distutils installed?"),
++ )
++ return result.stdout.splitlines()[0]
++
++def _get_python_import_lib_name(repository_ctx, python_bin):
++ """Get Python import library name (pythonXY.lib) on Windows."""
++ result = execute(
++ repository_ctx,
++ [
++ python_bin,
++ "-c",
++ "import sys;" +
++ 'print("python" + str(sys.version_info[0]) + ' +
++ ' str(sys.version_info[1]) + ".lib")',
++ ],
++ error_msg = "Problem getting python import library.",
++ error_details = ("Is the Python binary path set up right? " +
++ "(See ./configure or " + PYTHON_BIN_PATH + ".) "),
++ )
++ return result.stdout.splitlines()[0]
++
++def _get_numpy_include(repository_ctx, python_bin):
++ """Gets the numpy include path."""
++ return execute(
++ repository_ctx,
++ [
++ python_bin,
++ "-c",
++ "from __future__ import print_function;" +
++ "import numpy;" +
++ " print(numpy.get_include());",
++ ],
++ error_msg = "Problem getting numpy include path.",
++ error_details = "Is numpy installed?",
++ ).stdout.splitlines()[0]
++
+ def _create_local_python_repository(repository_ctx):
+ """Creates the repository containing files set up to build with Python."""
+
+@@ -15,14 +207,68 @@ def _create_local_python_repository(repository_ctx):
+ # function to be restarted with all previous state being lost. This
+ # can easily lead to a O(n^2) runtime in the number of labels.
+ build_tpl = repository_ctx.path(Label("//third_party/py:BUILD.tpl"))
++
++ python_bin = get_python_bin(repository_ctx)
++ _check_python_bin(repository_ctx, python_bin)
++ python_lib = _get_python_lib(repository_ctx, python_bin)
++ _check_python_lib(repository_ctx, python_lib)
++ python_include = _get_python_include(repository_ctx, python_bin)
++ numpy_include = _get_numpy_include(repository_ctx, python_bin) + "/numpy"
++ python_include_rule = _symlink_genrule_for_dir(
++ repository_ctx,
++ python_include,
++ "python_include",
++ "python_include",
++ )
++ python_import_lib_genrule = ""
++
++ # To build Python C/C++ extension on Windows, we need to link to python import library pythonXY.lib
++ # See https://docs.python.org/3/extending/windows.html
++ if is_windows(repository_ctx):
++ python_bin = python_bin.replace("\\", "/")
++ python_include = _norm_path(python_include)
++ python_import_lib_name = _get_python_import_lib_name(repository_ctx, python_bin)
++ python_import_lib_src = python_include.rsplit("/", 1)[0] + "/libs/" + python_import_lib_name
++ python_import_lib_genrule = _symlink_genrule_for_dir(
++ repository_ctx,
++ None,
++ "",
++ "python_import_lib",
++ [python_import_lib_src],
++ [python_import_lib_name],
++ )
++ numpy_include_rule = _symlink_genrule_for_dir(
++ repository_ctx,
++ numpy_include,
++ "numpy_include/numpy",
++ "numpy_include",
++ )
++
+ platform_constraint = ""
+ if repository_ctx.attr.platform_constraint:
+ platform_constraint = "\"%s\"" % repository_ctx.attr.platform_constraint
+- repository_ctx.template("BUILD", build_tpl, {"%{PLATFORM_CONSTRAINT}": platform_constraint})
++ repository_ctx.template("BUILD", build_tpl, {
++ "%{PYTHON_BIN_PATH}": python_bin,
++ "%{PYTHON_INCLUDE_GENRULE}": python_include_rule,
++ "%{PYTHON_IMPORT_LIB_GENRULE}": python_import_lib_genrule,
++ "%{NUMPY_INCLUDE_GENRULE}": numpy_include_rule,
++ "%{PLATFORM_CONSTRAINT}": platform_constraint,
++ })
++
++def _create_remote_python_repository(repository_ctx, remote_config_repo):
++ """Creates pointers to a remotely configured repo set up to build with Python.
++ """
++ repository_ctx.template("BUILD", config_repo_label(remote_config_repo, ":BUILD"), {})
+
+ def _python_autoconf_impl(repository_ctx):
+ """Implementation of the python_autoconf repository rule."""
+- _create_local_python_repository(repository_ctx)
++ if get_host_environ(repository_ctx, TF_PYTHON_CONFIG_REPO) != None:
++ _create_remote_python_repository(
++ repository_ctx,
++ get_host_environ(repository_ctx, TF_PYTHON_CONFIG_REPO),
++ )
++ else:
++ _create_local_python_repository(repository_ctx)
+
+ _ENVIRONS = [
+ BAZEL_SH,
+@@ -32,6 +278,7 @@ _ENVIRONS = [
+
+ local_python_configure = repository_rule(
+ implementation = _create_local_python_repository,
++ environ = _ENVIRONS,
+ attrs = {
+ "environ": attr.string_dict(),
+ "platform_constraint": attr.string(),
+@@ -50,6 +297,7 @@ remote_python_configure = repository_rule(
+
+ python_configure = repository_rule(
+ implementation = _python_autoconf_impl,
++ environ = _ENVIRONS + [TF_PYTHON_CONFIG_REPO],
+ attrs = {
+ "platform_constraint": attr.string(),
+ },
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0013-installation-remove-cp_local_config_python.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0013-installation-remove-cp_local_config_python.patch
new file mode 100644
index 000000000000..d6c502878849
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0013-installation-remove-cp_local_config_python.patch
@@ -0,0 +1,68 @@
+From 9a0eb9b34277229370d8df8407e4b99c13a6da0f Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Fri, 22 Dec 2023 20:25:52 +0800
+Subject: [PATCH 13/13] installation: remove `cp_local_config_python`
+
+Revert https://github.com/tensorflow/tensorflow/commit/a034b3d48a9d3dbccff22800ab4b435a89f45103
+---
+ .../tools/pip_package/build_pip_package.sh | 25 -------------------
+ 1 file changed, 25 deletions(-)
+
+diff --git a/tensorflow/tools/pip_package/build_pip_package.sh b/tensorflow/tools/pip_package/build_pip_package.sh
+index 4a2d42bba58..af76ca4d978 100755
+--- a/tensorflow/tools/pip_package/build_pip_package.sh
++++ b/tensorflow/tools/pip_package/build_pip_package.sh
+@@ -47,22 +47,6 @@ function cp_external() {
+ cp "${src_dir}/local_config_cuda/cuda/cuda/cuda_config.h" "${dest_dir}/local_config_cuda/cuda/cuda/"
+ }
+
+-function cp_local_config_python() {
+- local src_dir=$1
+- local dest_dir=$2
+- pushd .
+- cd "$src_dir"
+- mkdir -p "${dest_dir}/local_config_python/numpy_include/"
+- cp -r "pypi_numpy/site-packages/numpy/core/include/numpy" "${dest_dir}/local_config_python/numpy_include/"
+- mkdir -p "${dest_dir}/local_config_python/python_include/"
+- if is_windows; then
+- cp -r python_*/include/* "${dest_dir}/local_config_python/python_include/"
+- else
+- cp -r python_*/include/python*/* "${dest_dir}/local_config_python/python_include/"
+- fi
+- popd
+-}
+-
+ function copy_xla_aot_runtime_sources() {
+ local src_dir=$1
+ local dst_dir=$2
+@@ -174,9 +158,6 @@ function prepare_src() {
+ cp_external \
+ bazel-bin/tensorflow/tools/pip_package/build_pip_package.exe.runfiles \
+ "${EXTERNAL_INCLUDES}/"
+- cp_local_config_python \
+- bazel-bin/tensorflow/tools/pip_package/build_pip_package.exe.runfiles \
+- "${EXTERNAL_INCLUDES}/"
+ copy_xla_aot_runtime_sources \
+ bazel-bin/tensorflow/tools/pip_package/build_pip_package.exe.runfiles/org_tensorflow \
+ "${XLA_AOT_RUNTIME_SOURCES}/"
+@@ -220,17 +201,11 @@ function prepare_src() {
+ cp_external \
+ bazel-bin/tensorflow/tools/pip_package/build_pip_package.runfiles/org_tensorflow/external \
+ "${EXTERNAL_INCLUDES}"
+- cp_local_config_python \
+- bazel-bin/tensorflow/tools/pip_package/build_pip_package.runfiles/org_tensorflow/external \
+- "${EXTERNAL_INCLUDES}"
+ else
+ # New-style runfiles structure (--nolegacy_external_runfiles).
+ cp_external \
+ bazel-bin/tensorflow/tools/pip_package/build_pip_package.runfiles \
+ "${EXTERNAL_INCLUDES}"
+- cp_local_config_python \
+- bazel-bin/tensorflow/tools/pip_package/build_pip_package.runfiles \
+- "${EXTERNAL_INCLUDES}"
+ fi
+ copy_xla_aot_runtime_sources \
+ bazel-bin/tensorflow/tools/pip_package/build_pip_package.runfiles/org_tensorflow \
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.14.1-0014-Fixing-build-issue-with-Clang-16.patch b/sci-libs/tensorflow/files/tensorflow-2.14.1-0014-Fixing-build-issue-with-Clang-16.patch
new file mode 100644
index 000000000000..379aa7385df1
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.14.1-0014-Fixing-build-issue-with-Clang-16.patch
@@ -0,0 +1,25 @@
+From 9db05c2eb25372b3be4badb15aa67d19298aeaf4 Mon Sep 17 00:00:00 2001
+From: Ben Olson <matthew.olson@intel.com>
+Date: Tue, 8 Aug 2023 17:15:50 -0500
+Subject: [PATCH 14/14] Fixing build issue with Clang 16
+
+---
+ tensorflow/tsl/lib/io/cache.h | 2 ++
+ 1 file changed, 2 insertions(+)
+
+diff --git a/tensorflow/tsl/lib/io/cache.h b/tensorflow/tsl/lib/io/cache.h
+index f894c5916d5..e49d09b7450 100644
+--- a/tensorflow/tsl/lib/io/cache.h
++++ b/tensorflow/tsl/lib/io/cache.h
+@@ -16,6 +16,8 @@ limitations under the License.
+ #ifndef TENSORFLOW_TSL_LIB_IO_CACHE_H_
+ #define TENSORFLOW_TSL_LIB_IO_CACHE_H_
+
++#include <cstdint>
++
+ #include "tensorflow/tsl/platform/stringpiece.h"
+
+ // A Cache is an interface that maps keys to values. It has internal
+--
+2.43.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.15.0-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch b/sci-libs/tensorflow/files/tensorflow-2.15.0-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch
new file mode 100644
index 000000000000..0143ea01cab7
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.15.0-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch
@@ -0,0 +1,37 @@
+From c49a30cc3e2d96cbe97a7012ef89800af7a0c5e0 Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Fri, 24 Nov 2023 16:34:44 +0800
+Subject: [PATCH 01/13] WORKSPACE: add rules-docker http_archive,
+ bazel-toolchains uses git_repo
+
+git_repository() rules cannot pull from --distdir and fail when building
+without internet access. Use http_archive instead and pin the sha256
+hash as well.
+---
+ WORKSPACE | 11 +++++++++++
+ 1 file changed, 11 insertions(+)
+
+diff --git a/WORKSPACE b/WORKSPACE
+index 6a85ffeb29a..c10a2c4a482 100644
+--- a/WORKSPACE
++++ b/WORKSPACE
+@@ -1,5 +1,16 @@
+ workspace(name = "org_tensorflow")
+
++load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
++
++http_archive(
++ name = "io_bazel_rules_docker",
++ sha256 = "7d453450e1eb70e238eea6b31f4115607ec1200e91afea01c25f9804f37e39c8",
++ strip_prefix = "rules_docker-0.10.0",
++ urls = [
++ "https://github.com/bazelbuild/rules_docker/releases/download/v0.10.0/rules_docker-v0.10.0.tar.gz",
++ ],
++)
++
+ # We must initialize hermetic python first.
+ load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
+
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.15.0-0002-systemlib-Latest-absl-LTS-has-split-cord-libs.patch b/sci-libs/tensorflow/files/tensorflow-2.15.0-0002-systemlib-Latest-absl-LTS-has-split-cord-libs.patch
new file mode 100644
index 000000000000..d3f1471f6dff
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.15.0-0002-systemlib-Latest-absl-LTS-has-split-cord-libs.patch
@@ -0,0 +1,32 @@
+From 79f6530e6beb5ca25fd07245f251897bf60c85e2 Mon Sep 17 00:00:00 2001
+From: Jason Zaman <jason@perfinion.com>
+Date: Sun, 6 Feb 2022 00:13:56 -0800
+Subject: [PATCH 02/13] systemlib: Latest absl LTS has split cord libs
+
+---
+ third_party/absl/system.absl.strings.BUILD | 9 ++++++++-
+ 1 file changed, 8 insertions(+), 1 deletion(-)
+
+diff --git a/third_party/absl/system.absl.strings.BUILD b/third_party/absl/system.absl.strings.BUILD
+index fa9a7a84f67..63bac99d71b 100644
+--- a/third_party/absl/system.absl.strings.BUILD
++++ b/third_party/absl/system.absl.strings.BUILD
+@@ -26,7 +26,14 @@ cc_library(
+
+ cc_library(
+ name = "cord",
+- linkopts = ["-labsl_cord"],
++ linkopts = [
++ "-labsl_cord",
++ "-labsl_cord_internal",
++ "-labsl_cordz_functions",
++ "-labsl_cordz_handle",
++ "-labsl_cordz_info",
++ "-labsl_cordz_sample_token",
++ ],
+ deps = [
+ ":str_format",
+ "//absl/container:compressed_tuple",
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.15.0-0003-mkl_dnn-Must-link-against-libm-for-round-and-log2.patch b/sci-libs/tensorflow/files/tensorflow-2.15.0-0003-mkl_dnn-Must-link-against-libm-for-round-and-log2.patch
new file mode 100644
index 000000000000..5ffab1f6c269
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.15.0-0003-mkl_dnn-Must-link-against-libm-for-round-and-log2.patch
@@ -0,0 +1,29 @@
+From 51b47cec16b0818d47086b629fb361f90cca284a Mon Sep 17 00:00:00 2001
+From: Jason Zaman <jason@perfinion.com>
+Date: Mon, 5 Sep 2022 12:52:44 -0700
+Subject: [PATCH 03/13] mkl_dnn: Must link against libm for round and log2
+
+---
+ third_party/mkl_dnn/mkldnn_v1.BUILD | 6 +++---
+ 1 file changed, 3 insertions(+), 3 deletions(-)
+
+diff --git a/third_party/mkl_dnn/mkldnn_v1.BUILD b/third_party/mkl_dnn/mkldnn_v1.BUILD
+index e25a66db664..4185df77f3d 100644
+--- a/third_party/mkl_dnn/mkldnn_v1.BUILD
++++ b/third_party/mkl_dnn/mkldnn_v1.BUILD
+@@ -165,9 +165,9 @@ cc_library(
+ includes = _INCLUDES_LIST,
+ # TODO(penpornk): Use lrt_if_needed from tensorflow.bzl instead.
+ linkopts = select({
+- "@local_tsl//tsl:linux_aarch64": ["-lrt"],
+- "@local_tsl//tsl:linux_x86_64": ["-lrt"],
+- "@local_tsl//tsl:linux_ppc64le": ["-lrt"],
++ "@local_tsl//tsl:linux_aarch64": ["-lrt", "-lm"],
++ "@local_tsl//tsl:linux_x86_64": ["-lrt", "-lm"],
++ "@local_tsl//tsl:linux_ppc64le": ["-lrt", "-lm"],
+ "//conditions:default": [],
+ }),
+ textual_hdrs = _TEXTUAL_HDRS_LIST,
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.15.0-0004-tensorflow_cc-Add-systemlib-nsync-linkopts.patch b/sci-libs/tensorflow/files/tensorflow-2.15.0-0004-tensorflow_cc-Add-systemlib-nsync-linkopts.patch
new file mode 100644
index 000000000000..d540ee2c03ce
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.15.0-0004-tensorflow_cc-Add-systemlib-nsync-linkopts.patch
@@ -0,0 +1,35 @@
+From e9d513bbb40004ff25a9809eb99e32958e8e3b82 Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Fri, 24 Nov 2023 16:42:48 +0800
+Subject: [PATCH 04/13] tensorflow_cc: Add systemlib nsync linkopts
+
+Linkopts dont get propagated up to the shared library correctly so
+workaround by applying them directly
+---
+ tensorflow/BUILD | 3 ++-
+ 1 file changed, 2 insertions(+), 1 deletion(-)
+
+diff --git a/tensorflow/BUILD b/tensorflow/BUILD
+index f72931217b3..ffbe65fdc61 100644
+--- a/tensorflow/BUILD
++++ b/tensorflow/BUILD
+@@ -6,6 +6,7 @@ load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
+ load("@bazel_skylib//lib:selects.bzl", "selects")
+ load("@bazel_skylib//rules:common_settings.bzl", "bool_flag", "bool_setting")
+ load("@local_config_cuda//cuda:build_defs.bzl", "if_cuda")
++load("@local_config_syslibs//:build_defs.bzl", "if_system_lib")
+ load(
+ "//tensorflow:tensorflow.bzl",
+ "VERSION",
+@@ -1324,7 +1325,7 @@ tf_cc_shared_library(
+ "-z defs",
+ "-Wl,--version-script,$(location //tensorflow:tf_version_script.lds)",
+ ],
+- }),
++ }) + if_system_lib("nsync", ["-lnsync_cpp"]),
+ per_os_targets = True,
+ roots = [
+ "//tensorflow/c:c_api",
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.15.0-0005-systemlib-Updates-for-Abseil-20220623-LTS.patch b/sci-libs/tensorflow/files/tensorflow-2.15.0-0005-systemlib-Updates-for-Abseil-20220623-LTS.patch
new file mode 100644
index 000000000000..520f4bcf9a68
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.15.0-0005-systemlib-Updates-for-Abseil-20220623-LTS.patch
@@ -0,0 +1,71 @@
+From b43d73336a9209717adb038ca1fe3a808d3ce36e Mon Sep 17 00:00:00 2001
+From: Jason Zaman <jason@perfinion.com>
+Date: Tue, 30 May 2023 09:10:03 -0700
+Subject: [PATCH 05/13] systemlib: Updates for Abseil 20220623 LTS
+
+These targets are header-only and just need stub bazel targets
+---
+ third_party/absl/system.absl.functional.BUILD | 22 +++++++++++++++++++
+ third_party/absl/system.absl.random.BUILD | 12 ++++++++++
+ 2 files changed, 34 insertions(+)
+
+diff --git a/third_party/absl/system.absl.functional.BUILD b/third_party/absl/system.absl.functional.BUILD
+index a4f70acf35c..579181dec07 100644
+--- a/third_party/absl/system.absl.functional.BUILD
++++ b/third_party/absl/system.absl.functional.BUILD
+@@ -2,10 +2,32 @@ load("@rules_cc//cc:defs.bzl", "cc_library")
+
+ package(default_visibility = ["//visibility:public"])
+
++cc_library(
++ name = "any_invocable",
++ deps = [
++ "//absl/base:base_internal",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ "//absl/meta:type_traits",
++ "//absl/utility",
++ ],
++)
++
+ cc_library(
+ name = "bind_front",
++ deps = [
++ "//absl/base:base_internal",
++ "//absl/container:compressed_tuple",
++ "//absl/meta:type_traits",
++ "//absl/utility",
++ ],
+ )
+
+ cc_library(
+ name = "function_ref",
++ deps = [
++ "//absl/base:base_internal",
++ "//absl/base:core_headers",
++ "//absl/meta:type_traits",
++ ],
+ )
+diff --git a/third_party/absl/system.absl.random.BUILD b/third_party/absl/system.absl.random.BUILD
+index 948de07751a..5ebd656be8e 100644
+--- a/third_party/absl/system.absl.random.BUILD
++++ b/third_party/absl/system.absl.random.BUILD
+@@ -51,3 +51,15 @@ cc_library(
+ "//absl/types:span",
+ ],
+ )
++
++cc_library(
++ name = "bit_gen_ref",
++ deps = [
++ ":random",
++ "//absl/base:core_headers",
++ "//absl/base:fast_type_id",
++ "//absl/meta:type_traits",
++ "//absl/random/internal:distribution_caller",
++ "//absl/random/internal:fast_uniform_bits",
++ ],
++)
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.15.0-0006-systemlib-Update-targets-for-absl_py.patch b/sci-libs/tensorflow/files/tensorflow-2.15.0-0006-systemlib-Update-targets-for-absl_py.patch
new file mode 100644
index 000000000000..79c9773e2b5a
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.15.0-0006-systemlib-Update-targets-for-absl_py.patch
@@ -0,0 +1,24 @@
+From d12cf2b464f129c8aeb513c34304c117e5384ffd Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Fri, 24 Nov 2023 16:48:15 +0800
+Subject: [PATCH 06/13] systemlib: Update targets for absl_py
+
+---
+ third_party/systemlibs/absl_py.absl.flags.BUILD | 3 +++
+ 1 file changed, 3 insertions(+)
+
+diff --git a/third_party/systemlibs/absl_py.absl.flags.BUILD b/third_party/systemlibs/absl_py.absl.flags.BUILD
+index d92f4949df1..614938fb8c4 100644
+--- a/third_party/systemlibs/absl_py.absl.flags.BUILD
++++ b/third_party/systemlibs/absl_py.absl.flags.BUILD
+@@ -8,4 +8,7 @@ py_library(
+
+ py_library(
+ name = "argparse_flags",
++ deps = [
++ ":flags",
++ ],
+ )
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.15.0-0007-systemlib-Add-well_known_types_py_pb2-target.patch b/sci-libs/tensorflow/files/tensorflow-2.15.0-0007-systemlib-Add-well_known_types_py_pb2-target.patch
new file mode 100644
index 000000000000..6a6c722da38f
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.15.0-0007-systemlib-Add-well_known_types_py_pb2-target.patch
@@ -0,0 +1,28 @@
+From e5c0870dda44361c1d3ffdf8a0adc549ba16444a Mon Sep 17 00:00:00 2001
+From: Jason Zaman <jason@perfinion.com>
+Date: Sat, 3 Jun 2023 16:23:51 -0700
+Subject: [PATCH 07/13] systemlib: Add well_known_types_py_pb2 target
+
+Bug: https://github.com/tensorflow/tensorflow/issues/60667
+---
+ third_party/systemlibs/protobuf.BUILD | 7 +++++++
+ 1 file changed, 7 insertions(+)
+
+diff --git a/third_party/systemlibs/protobuf.BUILD b/third_party/systemlibs/protobuf.BUILD
+index 4d05ab28d12..b3d72b0e3ad 100644
+--- a/third_party/systemlibs/protobuf.BUILD
++++ b/third_party/systemlibs/protobuf.BUILD
+@@ -111,3 +111,10 @@ py_library(
+ visibility = ["//visibility:public"],
+ deps = [dep + "_proto" for dep in proto[1][1]],
+ ) for proto in WELL_KNOWN_PROTO_MAP.items()]
++
++py_proto_library(
++ name = "well_known_types_py_pb2",
++ include = ".",
++ srcs = [proto[1][0] for proto in WELL_KNOWN_PROTO_MAP.items()],
++ visibility = ["//visibility:public"],
++)
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.15.0-0008-Relax-setup.py-version-requirements.patch b/sci-libs/tensorflow/files/tensorflow-2.15.0-0008-Relax-setup.py-version-requirements.patch
new file mode 100644
index 000000000000..949805fd77e7
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.15.0-0008-Relax-setup.py-version-requirements.patch
@@ -0,0 +1,86 @@
+From fada9b72dc55a016cc702e50fd688a6e9d53793e Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Fri, 24 Nov 2023 16:54:18 +0800
+Subject: [PATCH 08/13] Relax setup.py version requirements
+
+---
+ tensorflow/tools/pip_package/setup.py | 42 +++++++++++++--------------
+ 1 file changed, 21 insertions(+), 21 deletions(-)
+
+diff --git a/tensorflow/tools/pip_package/setup.py b/tensorflow/tools/pip_package/setup.py
+index b416731380f..a21f566ee4d 100644
+--- a/tensorflow/tools/pip_package/setup.py
++++ b/tensorflow/tools/pip_package/setup.py
+@@ -88,8 +88,8 @@ REQUIRED_PACKAGES = [
+ 'gast >=0.2.1,!=0.5.0,!=0.5.1,!=0.5.2',
+ 'google_pasta >= 0.1.1',
+ 'h5py >= 2.9.0',
+- 'libclang >= 13.0.0',
+- 'ml_dtypes ~= 0.2.0',
++ # 'libclang >= 13.0.0',
++ 'ml_dtypes >= 0.2.0',
+ 'numpy >= 1.23.5, < 2.0.0',
+ 'opt_einsum >= 2.3.2',
+ 'packaging',
+@@ -101,8 +101,8 @@ REQUIRED_PACKAGES = [
+ 'six >= 1.12.0',
+ 'termcolor >= 1.1.0',
+ 'typing_extensions >= 3.6.6',
+- 'wrapt >= 1.11.0, < 1.15',
+- 'tensorflow-io-gcs-filesystem >= 0.23.1',
++ 'wrapt >= 1.11.0',
++ # 'tensorflow-io-gcs-filesystem >= 0.23.1',
+ # grpcio does not build correctly on big-endian machines due to lack of
+ # BoringSSL support.
+ # See https://github.com/tensorflow/tensorflow/issues/17882.
+@@ -135,8 +135,8 @@ FAKE_REQUIRED_PACKAGES = [
+ _VERSION + ';platform_system=="Windows"',
+ ]
+
+-if platform.system() == 'Linux' and platform.machine() == 'x86_64':
+- REQUIRED_PACKAGES.append(FAKE_REQUIRED_PACKAGES)
++# if platform.system() == 'Linux' and platform.machine() == 'x86_64':
++# REQUIRED_PACKAGES.append(FAKE_REQUIRED_PACKAGES)
+
+ if collaborator_build:
+ # If this is a collaborator build, then build an "installer" wheel and
+@@ -163,21 +163,21 @@ if collaborator_build:
+ EXTRA_PACKAGES = {}
+ EXTRA_PACKAGES['and-cuda'] = [
+ # TODO(nluehr): set nvidia-* versions based on build components.
+- 'nvidia-cublas-cu12 == 12.2.5.6',
+- 'nvidia-cuda-cupti-cu12 == 12.2.142',
+- 'nvidia-cuda-nvcc-cu12 == 12.2.140',
+- 'nvidia-cuda-nvrtc-cu12 == 12.2.140',
+- 'nvidia-cuda-runtime-cu12 == 12.2.140',
+- 'nvidia-cudnn-cu12 == 8.9.4.25',
+- 'nvidia-cufft-cu12 == 11.0.8.103',
+- 'nvidia-curand-cu12 == 10.3.3.141',
+- 'nvidia-cusolver-cu12 == 11.5.2.141',
+- 'nvidia-cusparse-cu12 == 12.1.2.141',
+- 'nvidia-nccl-cu12 == 2.16.5',
+- 'nvidia-nvjitlink-cu12 == 12.2.140',
+- 'tensorrt == 8.6.1.post1',
+- 'tensorrt-bindings == 8.6.1',
+- 'tensorrt-libs == 8.6.1',
++ 'nvidia-cublas-cu12 >= 12.2.5.6',
++ 'nvidia-cuda-cupti-cu12 >= 12.2.142',
++ 'nvidia-cuda-nvcc-cu12 >= 12.2.140',
++ 'nvidia-cuda-nvrtc-cu12 >= 12.2.140',
++ 'nvidia-cuda-runtime-cu12 >= 12.2.140',
++ 'nvidia-cudnn-cu12 >= 8.9.4.25',
++ 'nvidia-cufft-cu12 >= 11.0.8.103',
++ 'nvidia-curand-cu12 >= 10.3.3.141',
++ 'nvidia-cusolver-cu12 >= 11.5.2.141',
++ 'nvidia-cusparse-cu12 >= 12.1.2.141',
++ 'nvidia-nccl-cu12 >= 2.16.5',
++ 'nvidia-nvjitlink-cu12 >= 12.2.140',
++ 'tensorrt >= 8.6.1.post1',
++ 'tensorrt-bindings >= 8.6.1',
++ 'tensorrt-libs >= 8.6.1',
+ ]
+
+ DOCLINES = __doc__.split('\n')
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.15.0-0009-systemlib-update-targets-for-absl.patch b/sci-libs/tensorflow/files/tensorflow-2.15.0-0009-systemlib-update-targets-for-absl.patch
new file mode 100644
index 000000000000..4cbce75b96de
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.15.0-0009-systemlib-update-targets-for-absl.patch
@@ -0,0 +1,365 @@
+From de3fc37567b3b135ee45c2ca8bd2256ea53812e8 Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Sun, 26 Nov 2023 13:12:20 +0800
+Subject: [PATCH 09/13] systemlib: update targets for absl
+
+---
+ .../compiler/mlir/tools/kernel_gen/BUILD | 1 +
+ .../distribute/experimental/rpc/kernels/BUILD | 1 +
+ third_party/absl/system.absl.debugging.BUILD | 20 +-
+ third_party/absl/system.absl.log.BUILD | 271 ++++++++++++++++++
+ third_party/absl/workspace.bzl | 1 +
+ 5 files changed, 289 insertions(+), 5 deletions(-)
+ create mode 100644 third_party/absl/system.absl.log.BUILD
+
+diff --git a/tensorflow/compiler/mlir/tools/kernel_gen/BUILD b/tensorflow/compiler/mlir/tools/kernel_gen/BUILD
+index e6ce181074d..ff3f65fb551 100644
+--- a/tensorflow/compiler/mlir/tools/kernel_gen/BUILD
++++ b/tensorflow/compiler/mlir/tools/kernel_gen/BUILD
+@@ -107,6 +107,7 @@ tf_cc_binary(
+ "//tensorflow/compiler/mlir/tensorflow",
+ "//tensorflow/core:lib",
+ "@com_google_absl//absl/status",
++ "@com_google_absl//absl/log:check",
+ "@com_google_absl//absl/strings",
+ "@llvm-project//llvm:AArch64CodeGen", # fixdeps: keep
+ "@llvm-project//llvm:ARMCodeGen", # fixdeps: keep
+diff --git a/tensorflow/distribute/experimental/rpc/kernels/BUILD b/tensorflow/distribute/experimental/rpc/kernels/BUILD
+index 60fa55c7702..6527914c51d 100644
+--- a/tensorflow/distribute/experimental/rpc/kernels/BUILD
++++ b/tensorflow/distribute/experimental/rpc/kernels/BUILD
+@@ -64,6 +64,7 @@ tf_kernel_library(
+ "//tensorflow/distribute/experimental/rpc/proto:tf_rpc_service_proto_cc",
+ "@com_github_grpc_grpc//:grpc++",
+ "@com_google_absl//absl/status",
++ "@com_google_absl//absl/log:check",
+ "@com_google_absl//absl/strings",
+ "@com_google_absl//absl/strings:str_format",
+ "@local_xla//xla/stream_executor/platform",
+diff --git a/third_party/absl/system.absl.debugging.BUILD b/third_party/absl/system.absl.debugging.BUILD
+index 931ffdc9e92..223db7b4c46 100644
+--- a/third_party/absl/system.absl.debugging.BUILD
++++ b/third_party/absl/system.absl.debugging.BUILD
+@@ -26,15 +26,25 @@ cc_library(
+
+ cc_library(
+ name = "failure_signal_handler",
+- linkopts = [
+- "-labsl_failure_signal_handler",
+- "-labsl_examine_stack",
++ linkopts = ["-labsl_failure_signal_handler"],
++ deps = [
++ ":examine_stack",
++ ":stacktrace",
++ "//absl/base",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ "//absl/base:raw_logging_internal",
+ ],
++)
++
++cc_library(
++ name = "examine_stack",
++ linkopts = ["-labsl_examine_stack"],
+ deps = [
+ ":stacktrace",
+ ":symbolize",
+- "//absl/base",
+- "//absl/base:errno_saver",
++ "//absl/base:config",
++ "//absl/base:core_headers",
+ "//absl/base:raw_logging_internal",
+ ],
+ )
+diff --git a/third_party/absl/system.absl.log.BUILD b/third_party/absl/system.absl.log.BUILD
+new file mode 100644
+index 00000000000..9a2a5de657e
+--- /dev/null
++++ b/third_party/absl/system.absl.log.BUILD
+@@ -0,0 +1,271 @@
++load("@rules_cc//cc:defs.bzl", "cc_library")
++
++package(default_visibility = ["//visibility:public"])
++
++cc_library(
++ name = "log",
++ deps = [
++ "//absl/log:internal_log_impl",
++ ],
++)
++
++cc_library(
++ name = "internal_log_impl",
++ deps = [
++ ":internal_conditions",
++ ":internal_message",
++ ":internal_strip",
++ ],
++)
++
++cc_library(
++ name = "internal_conditions",
++ linkopts = ["-labsl_log_internal_conditions"],
++ deps = [
++ ":internal_voidify",
++ "//absl/base",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ ],
++)
++
++cc_library(
++ name = "internal_voidify",
++ deps = ["//absl/base:config"],
++)
++
++cc_library(
++ name = "internal_message",
++ linkopts = ["-labsl_log_internal_message"],
++ deps = [
++ ":entry",
++ ":globals",
++ ":internal_append_truncated",
++ ":internal_format",
++ ":internal_globals",
++ ":internal_log_sink_set",
++ ":internal_nullguard",
++ ":internal_proto",
++ ":severity",
++ ":sink",
++ ":sink_registry",
++ "//absl/base",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ "//absl/base:errno_saver",
++ "//absl/base:raw_logging_internal",
++ "//absl/base:strerror",
++ "//absl/container:inlined_vector",
++ "//absl/debugging:examine_stack",
++ "//absl/memory",
++ "//absl/strings",
++ "//absl/time",
++ "//absl/types:span",
++ ],
++)
++
++cc_library(
++ name = "internal_append_truncated",
++ deps = [
++ "//absl/base:config",
++ "//absl/strings",
++ "//absl/types:span",
++ ],
++)
++
++cc_library(
++ name = "internal_format",
++ linkopts = ["-labsl_log_internal_format"],
++ deps = [
++ ":internal_append_truncated",
++ ":internal_config",
++ ":internal_globals",
++ ":severity",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ "//absl/strings",
++ "//absl/strings:str_format",
++ "//absl/time",
++ "//absl/types:span",
++ ],
++)
++
++cc_library(
++ name = "internal_globals",
++ linkopts = ["-labsl_log_internal_globals"],
++ deps = [
++ ":severity",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ "//absl/base:raw_logging_internal",
++ "//absl/strings",
++ "//absl/time",
++ ],
++)
++
++cc_library(
++ name = "internal_proto",
++ linkopts = ["-labsl_log_internal_proto"],
++ deps = [
++ "//absl/base",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ "//absl/strings",
++ "//absl/types:span",
++ ],
++)
++
++cc_library(
++ name = "internal_log_sink_set",
++ linkopts = ["-labsl_log_internal_log_sink_set"],
++ deps = [
++ ":entry",
++ ":globals",
++ ":internal_config",
++ ":internal_globals",
++ ":severity",
++ ":sink",
++ "//absl/base",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ "//absl/base:raw_logging_internal",
++ "//absl/cleanup",
++ "//absl/strings",
++ "//absl/synchronization",
++ "//absl/types:span",
++ ],
++)
++
++cc_library(
++ name = "internal_config",
++ deps = [
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ ],
++)
++
++cc_library(
++ name = "internal_nullguard",
++ linkopts = ["-labsl_log_internal_nullguard"],
++ deps = [
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ ],
++)
++
++cc_library(
++ name = "globals",
++ linkopts = ["-labsl_log_globals"],
++ deps = [
++ ":severity",
++ "//absl/base:atomic_hook",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ "//absl/base:raw_logging_internal",
++ "//absl/hash",
++ "//absl/strings",
++ ],
++)
++
++cc_library(
++ name = "entry",
++ linkopts = ["-labsl_log_entry"],
++ deps = [
++ ":internal_config",
++ ":severity",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ "//absl/strings",
++ "//absl/time",
++ "//absl/types:span",
++ ],
++)
++
++cc_library(
++ name = "severity",
++ linkopts = ["-labsl_log_severity"],
++ deps = ["//absl/base:core_headers"],
++)
++
++cc_library(
++ name = "sink",
++ linkopts = ["-labsl_log_sink"],
++ deps = [
++ ":entry",
++ "//absl/base:config",
++ ],
++)
++
++cc_library(
++ name = "sink_registry",
++ deps = [
++ ":internal_log_sink_set",
++ ":sink",
++ "//absl/base:config",
++ ],
++)
++
++cc_library(
++ name = "internal_strip",
++ deps = [
++ ":internal_message",
++ ":internal_nullstream",
++ ":severity",
++ ],
++)
++
++cc_library(
++ name = "internal_nullstream",
++ deps = [
++ ":severity",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ "//absl/strings",
++ ],
++)
++
++cc_library(
++ name = "check",
++ deps = [
++ ":internal_check_impl",
++ ":internal_check_op",
++ ":internal_conditions",
++ ":internal_message",
++ ":internal_strip",
++ "//absl/base:core_headers",
++ ],
++)
++
++cc_library(
++ name = "internal_check_impl",
++ deps = [
++ ":internal_check_op",
++ ":internal_conditions",
++ ":internal_message",
++ ":internal_strip",
++ "//absl/base:core_headers",
++ ],
++)
++
++cc_library(
++ name = "internal_check_op",
++ linkopts = ["-labsl_log_internal_check_op"],
++ deps = [
++ ":internal_nullguard",
++ ":internal_nullstream",
++ ":internal_strip",
++ "//absl/base:config",
++ "//absl/base:core_headers",
++ "//absl/strings",
++ ],
++)
++
++cc_library(
++ name = "absl_check",
++ deps = [":internal_check_impl"],
++)
++
++cc_library(
++ name = "absl_log",
++ deps = [":internal_log_impl"],
++)
+diff --git a/third_party/absl/workspace.bzl b/third_party/absl/workspace.bzl
+index 07f49cebb78..a7f4e5ffc44 100644
+--- a/third_party/absl/workspace.bzl
++++ b/third_party/absl/workspace.bzl
+@@ -20,6 +20,7 @@ def repo():
+ "flags",
+ "functional",
+ "hash",
++ "log",
+ "memory",
+ "meta",
+ "numeric",
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.15.0-0010-systemlib-fix-missing-osx-in-pybind11.patch b/sci-libs/tensorflow/files/tensorflow-2.15.0-0010-systemlib-fix-missing-osx-in-pybind11.patch
new file mode 100644
index 000000000000..4e77e14577d1
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.15.0-0010-systemlib-fix-missing-osx-in-pybind11.patch
@@ -0,0 +1,25 @@
+From 0c2e7e4e987aea6759e66f49b471b89d1cc01269 Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Thu, 21 Dec 2023 22:22:35 +0800
+Subject: [PATCH 10/13] systemlib: fix missing `:osx` in pybind11
+
+---
+ third_party/systemlibs/pybind11.BUILD | 5 +++++
+ 1 file changed, 5 insertions(+)
+
+diff --git a/third_party/systemlibs/pybind11.BUILD b/third_party/systemlibs/pybind11.BUILD
+index 79a483d7b5d..cda63fbd019 100644
+--- a/third_party/systemlibs/pybind11.BUILD
++++ b/third_party/systemlibs/pybind11.BUILD
+@@ -6,3 +6,8 @@ cc_library(
+ "@org_tensorflow//third_party/python_runtime:headers",
+ ],
+ )
++
++config_setting(
++ name = "osx",
++ constraint_values = ["@platforms//os:osx"],
++)
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.15.0-0011-systemlib-fix-missing-LICENSE-in-flatbuffers.patch b/sci-libs/tensorflow/files/tensorflow-2.15.0-0011-systemlib-fix-missing-LICENSE-in-flatbuffers.patch
new file mode 100644
index 000000000000..ec6e9ef3e36f
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.15.0-0011-systemlib-fix-missing-LICENSE-in-flatbuffers.patch
@@ -0,0 +1,25 @@
+From c582286b732a820b29f1c03e2fa2c4aa61a4af03 Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Thu, 21 Dec 2023 22:24:24 +0800
+Subject: [PATCH 11/13] systemlib: fix missing `LICENSE` in flatbuffers
+
+---
+ third_party/flatbuffers/BUILD.system | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/third_party/flatbuffers/BUILD.system b/third_party/flatbuffers/BUILD.system
+index 8fe4d7a5907..b1d63b4ca0f 100644
+--- a/third_party/flatbuffers/BUILD.system
++++ b/third_party/flatbuffers/BUILD.system
+@@ -1,7 +1,7 @@
+ licenses(["notice"]) # Apache 2.0
+
+ filegroup(
+- name = "LICENSE.txt",
++ name = "LICENSE",
+ visibility = ["//visibility:public"],
+ )
+
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.15.0-0012-installation-remove-cp_local_config_python.patch b/sci-libs/tensorflow/files/tensorflow-2.15.0-0012-installation-remove-cp_local_config_python.patch
new file mode 100644
index 000000000000..e78efd2e9944
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.15.0-0012-installation-remove-cp_local_config_python.patch
@@ -0,0 +1,68 @@
+From 90ad87a2d71522412ec80f7f7025d828354a6e20 Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Fri, 22 Dec 2023 20:25:52 +0800
+Subject: [PATCH 12/13] installation: remove `cp_local_config_python`
+
+Revert https://github.com/tensorflow/tensorflow/commit/a034b3d48a9d3dbccff22800ab4b435a89f45103
+---
+ .../tools/pip_package/build_pip_package.sh | 25 -------------------
+ 1 file changed, 25 deletions(-)
+
+diff --git a/tensorflow/tools/pip_package/build_pip_package.sh b/tensorflow/tools/pip_package/build_pip_package.sh
+index 423a79bff78..d8ae485803c 100755
+--- a/tensorflow/tools/pip_package/build_pip_package.sh
++++ b/tensorflow/tools/pip_package/build_pip_package.sh
+@@ -47,22 +47,6 @@ function cp_external() {
+ cp "${src_dir}/local_config_cuda/cuda/cuda/cuda_config.h" "${dest_dir}/local_config_cuda/cuda/cuda/"
+ }
+
+-function cp_local_config_python() {
+- local src_dir=$1
+- local dest_dir=$2
+- pushd .
+- cd "$src_dir"
+- mkdir -p "${dest_dir}/local_config_python/numpy_include/"
+- cp -r "pypi_numpy/site-packages/numpy/core/include/numpy" "${dest_dir}/local_config_python/numpy_include/"
+- mkdir -p "${dest_dir}/local_config_python/python_include/"
+- if is_windows; then
+- cp -r python_*/include/* "${dest_dir}/local_config_python/python_include/"
+- else
+- cp -r python_*/include/python*/* "${dest_dir}/local_config_python/python_include/"
+- fi
+- popd
+-}
+-
+ function copy_xla_aot_runtime_sources() {
+ local src_dir=$1
+ local dst_dir=$2
+@@ -179,9 +163,6 @@ function prepare_src() {
+ cp_external \
+ bazel-bin/tensorflow/tools/pip_package/build_pip_package.exe.runfiles \
+ "${EXTERNAL_INCLUDES}/"
+- cp_local_config_python \
+- bazel-bin/tensorflow/tools/pip_package/build_pip_package.exe.runfiles \
+- "${EXTERNAL_INCLUDES}/"
+ copy_xla_aot_runtime_sources \
+ bazel-bin/tensorflow/tools/pip_package/build_pip_package.exe.runfiles/org_tensorflow \
+ "${XLA_AOT_RUNTIME_SOURCES}/"
+@@ -242,17 +223,11 @@ function prepare_src() {
+ cp_external \
+ bazel-bin/tensorflow/tools/pip_package/build_pip_package.runfiles/org_tensorflow/external \
+ "${EXTERNAL_INCLUDES}"
+- cp_local_config_python \
+- bazel-bin/tensorflow/tools/pip_package/build_pip_package.runfiles/org_tensorflow/external \
+- "${EXTERNAL_INCLUDES}"
+ else
+ # New-style runfiles structure (--nolegacy_external_runfiles).
+ cp_external \
+ bazel-bin/tensorflow/tools/pip_package/build_pip_package.runfiles \
+ "${EXTERNAL_INCLUDES}"
+- cp_local_config_python \
+- bazel-bin/tensorflow/tools/pip_package/build_pip_package.runfiles \
+- "${EXTERNAL_INCLUDES}"
+ fi
+ copy_xla_aot_runtime_sources \
+ bazel-bin/tensorflow/tools/pip_package/build_pip_package.runfiles/org_tensorflow \
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/files/tensorflow-2.15.0-0013-build-use-non-hermetic-python.patch b/sci-libs/tensorflow/files/tensorflow-2.15.0-0013-build-use-non-hermetic-python.patch
new file mode 100644
index 000000000000..e36a3a0617be
--- /dev/null
+++ b/sci-libs/tensorflow/files/tensorflow-2.15.0-0013-build-use-non-hermetic-python.patch
@@ -0,0 +1,990 @@
+From 0b56f871d620371e1deb66822aa8fb6df68dc568 Mon Sep 17 00:00:00 2001
+From: wangjiezhe <wangjiezhe@gmail.com>
+Date: Fri, 22 Dec 2023 21:23:14 +0800
+Subject: [PATCH 13/13] build: use non-hermetic python
+
+---
+ WORKSPACE | 69 -------------------
+ tensorflow/BUILD | 2 -
+ tensorflow/compiler/mlir/glob_lit_test.bzl | 1 -
+ tensorflow/compiler/mlir/tfr/BUILD | 2 -
+ tensorflow/dtensor/python/tests/BUILD | 1 -
+ tensorflow/lite/python/BUILD | 1 -
+ tensorflow/python/BUILD | 1 -
+ tensorflow/python/autograph/converters/BUILD | 11 ---
+ tensorflow/python/autograph/core/BUILD | 1 -
+ tensorflow/python/autograph/pyct/BUILD | 23 -------
+ .../autograph/pyct/common_transformers/BUILD | 2 -
+ .../autograph/pyct/static_analysis/BUILD | 6 --
+ .../python/autograph/pyct/testing/BUILD | 1 -
+ tensorflow/python/client/BUILD | 1 -
+ tensorflow/python/compiler/tensorrt/BUILD | 1 -
+ .../experimental/kernel_tests/service/BUILD | 1 -
+ tensorflow/python/data/util/BUILD | 2 -
+ tensorflow/python/debug/lib/BUILD | 1 -
+ tensorflow/python/distribute/BUILD | 5 --
+ .../python/distribute/experimental/rpc/BUILD | 1 -
+ .../python/distribute/failure_handling/BUILD | 2 -
+ tensorflow/python/eager/BUILD | 2 -
+ tensorflow/python/estimator/BUILD | 4 +-
+ tensorflow/python/framework/BUILD | 7 --
+ tensorflow/python/keras/BUILD | 1 -
+ tensorflow/python/keras/engine/BUILD | 1 -
+ tensorflow/python/keras/saving/BUILD | 1 -
+ tensorflow/python/ops/BUILD | 2 -
+ tensorflow/python/profiler/BUILD | 1 -
+ .../python/profiler/integration_test/BUILD | 1 -
+ tensorflow/python/summary/BUILD | 1 -
+ tensorflow/python/trackable/BUILD | 1 -
+ tensorflow/python/types/BUILD | 2 -
+ tensorflow/python/util/BUILD | 3 -
+ tensorflow/tools/docs/BUILD | 3 -
+ third_party/xla/xla/glob_lit_test.bzl | 5 --
+ third_party/xla/xla/mlir_hlo/tests/BUILD | 1 -
+ 37 files changed, 2 insertions(+), 169 deletions(-)
+
+diff --git a/WORKSPACE b/WORKSPACE
+index c10a2c4a482..3626ae4e805 100644
+--- a/WORKSPACE
++++ b/WORKSPACE
+@@ -11,75 +11,6 @@ http_archive(
+ ],
+ )
+
+-# We must initialize hermetic python first.
+-load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
+-
+-http_archive(
+- name = "bazel_skylib",
+- sha256 = "74d544d96f4a5bb630d465ca8bbcfe231e3594e5aae57e1edbf17a6eb3ca2506",
+- urls = [
+- "https://storage.googleapis.com/mirror.tensorflow.org/github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz",
+- "https://github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz",
+- ],
+-)
+-
+-http_archive(
+- name = "rules_python",
+- sha256 = "9d04041ac92a0985e344235f5d946f71ac543f1b1565f2cdbc9a2aaee8adf55b",
+- strip_prefix = "rules_python-0.26.0",
+- url = "https://github.com/bazelbuild/rules_python/releases/download/0.26.0/rules_python-0.26.0.tar.gz",
+-)
+-
+-load("@rules_python//python:repositories.bzl", "py_repositories")
+-
+-py_repositories()
+-
+-load("@rules_python//python:repositories.bzl", "python_register_toolchains")
+-load(
+- "//tensorflow/tools/toolchains/python:python_repo.bzl",
+- "python_repository",
+-)
+-
+-python_repository(name = "python_version_repo")
+-
+-load("@python_version_repo//:py_version.bzl", "HERMETIC_PYTHON_VERSION")
+-
+-python_register_toolchains(
+- name = "python",
+- ignore_root_user_error = True,
+- python_version = HERMETIC_PYTHON_VERSION,
+-)
+-
+-load("@python//:defs.bzl", "interpreter")
+-load("@rules_python//python:pip.bzl", "package_annotation", "pip_parse")
+-
+-NUMPY_ANNOTATIONS = {
+- "numpy": package_annotation(
+- additive_build_content = """\
+-filegroup(
+- name = "includes",
+- srcs = glob(["site-packages/numpy/core/include/**/*.h"]),
+-)
+-cc_library(
+- name = "numpy_headers",
+- hdrs = [":includes"],
+- strip_include_prefix="site-packages/numpy/core/include/",
+-)
+-""",
+- ),
+-}
+-
+-pip_parse(
+- name = "pypi",
+- annotations = NUMPY_ANNOTATIONS,
+- python_interpreter_target = interpreter,
+- requirements = "//:requirements_lock_" + HERMETIC_PYTHON_VERSION.replace(".", "_") + ".txt",
+-)
+-
+-load("@pypi//:requirements.bzl", "install_deps")
+-
+-install_deps()
+-
+ # Initialize the TensorFlow repository and all dependencies.
+ #
+ # The cascade of load() statements and tf_workspace?() calls works around the
+diff --git a/tensorflow/BUILD b/tensorflow/BUILD
+index ffbe65fdc61..cd9d61797e8 100644
+--- a/tensorflow/BUILD
++++ b/tensorflow/BUILD
+@@ -1712,8 +1712,6 @@ py_library(
+ "//tensorflow/lite/python:lite",
+ "//tensorflow/lite/python/authoring",
+ "//tensorflow/python:no_contrib",
+- "@pypi_keras//:pkg",
+- "@pypi_tensorboard//:pkg",
+ ],
+ )
+ # copybara:comment_end
+diff --git a/tensorflow/compiler/mlir/glob_lit_test.bzl b/tensorflow/compiler/mlir/glob_lit_test.bzl
+index e689b4c0b31..f65c86b727b 100644
+--- a/tensorflow/compiler/mlir/glob_lit_test.bzl
++++ b/tensorflow/compiler/mlir/glob_lit_test.bzl
+@@ -58,7 +58,6 @@ def _run_lit_test(name, data, size, tags, driver, features, exec_properties):
+ "@llvm-project//llvm:count",
+ "@llvm-project//llvm:not",
+ ],
+- deps = ["@pypi_lit//:pkg"],
+ size = size,
+ main = "lit.py",
+ exec_properties = exec_properties,
+diff --git a/tensorflow/compiler/mlir/tfr/BUILD b/tensorflow/compiler/mlir/tfr/BUILD
+index f8dfcd1c0a6..9f13d2eb068 100644
+--- a/tensorflow/compiler/mlir/tfr/BUILD
++++ b/tensorflow/compiler/mlir/tfr/BUILD
+@@ -387,7 +387,6 @@ py_strict_library(
+ "//tensorflow/python/framework:op_def_registry",
+ "//tensorflow/python/platform:tf_logging",
+ "//tensorflow/python/util:tf_inspect",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+@@ -421,7 +420,6 @@ py_strict_library(
+ "//tensorflow/python/autograph/pyct:transpiler",
+ "//tensorflow/python/framework:op_def_registry",
+ "//tensorflow/python/util:tf_inspect",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+diff --git a/tensorflow/dtensor/python/tests/BUILD b/tensorflow/dtensor/python/tests/BUILD
+index 498642cb5ff..e4482821869 100644
+--- a/tensorflow/dtensor/python/tests/BUILD
++++ b/tensorflow/dtensor/python/tests/BUILD
+@@ -326,7 +326,6 @@ pytype_strict_library(
+ ":test_util",
+ "//tensorflow/python/platform:client_testlib",
+ "@absl_py//absl/flags",
+- "@pypi_portpicker//:pkg",
+ ],
+ )
+
+diff --git a/tensorflow/lite/python/BUILD b/tensorflow/lite/python/BUILD
+index 3ac3bb20eba..c37bbc385f1 100644
+--- a/tensorflow/lite/python/BUILD
++++ b/tensorflow/lite/python/BUILD
+@@ -310,7 +310,6 @@ py_strict_test(
+ "//tensorflow/python/trackable:autotrackable",
+ "//third_party/py/numpy",
+ "@absl_py//absl/testing:parameterized",
+- "@pypi_jax//:pkg",
+ ],
+ )
+
+diff --git a/tensorflow/python/BUILD b/tensorflow/python/BUILD
+index 9810f8acd2e..056534591a3 100644
+--- a/tensorflow/python/BUILD
++++ b/tensorflow/python/BUILD
+@@ -589,7 +589,6 @@ py_strict_library(
+ deps = [
+ ":keras_lib",
+ "//third_party/py/numpy",
+- "@pypi_scipy//:pkg",
+ "@six_archive//:six",
+ ],
+ )
+diff --git a/tensorflow/python/autograph/converters/BUILD b/tensorflow/python/autograph/converters/BUILD
+index 5624f7611f3..11bc2cd9deb 100644
+--- a/tensorflow/python/autograph/converters/BUILD
++++ b/tensorflow/python/autograph/converters/BUILD
+@@ -13,7 +13,6 @@ py_strict_library(
+ "//tensorflow/python/autograph/core:converter",
+ "//tensorflow/python/autograph/lang:directives",
+ "//tensorflow/python/autograph/pyct:templates",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+@@ -39,7 +38,6 @@ py_strict_library(
+ "//tensorflow/python/autograph/core:converter",
+ "//tensorflow/python/autograph/pyct:parser",
+ "//tensorflow/python/autograph/pyct:templates",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+@@ -65,7 +63,6 @@ py_strict_library(
+ "//tensorflow/python/autograph/core:converter",
+ "//tensorflow/python/autograph/pyct:parser",
+ "//tensorflow/python/autograph/pyct:templates",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+@@ -80,7 +77,6 @@ py_strict_library(
+ "//tensorflow/python/autograph/pyct:qual_names",
+ "//tensorflow/python/autograph/pyct:templates",
+ "//tensorflow/python/autograph/utils:ag_logging",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+@@ -93,7 +89,6 @@ py_strict_library(
+ "//tensorflow/python/autograph/lang:directives",
+ "//tensorflow/python/autograph/pyct:anno",
+ "//tensorflow/python/util:tf_inspect",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+@@ -105,7 +100,6 @@ py_strict_library(
+ "//tensorflow/python/autograph/core:converter",
+ "//tensorflow/python/autograph/pyct:anno",
+ "//tensorflow/python/autograph/pyct:templates",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+@@ -127,7 +121,6 @@ py_strict_library(
+ "//tensorflow/python/autograph/pyct/static_analysis:liveness",
+ "//tensorflow/python/autograph/pyct/static_analysis:reaching_definitions",
+ "//tensorflow/python/autograph/pyct/static_analysis:reaching_fndefs",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+@@ -143,7 +136,6 @@ py_strict_library(
+ "//tensorflow/python/autograph/pyct:templates",
+ "//tensorflow/python/autograph/pyct/static_analysis:activity",
+ "//tensorflow/python/autograph/pyct/static_analysis:annos",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+@@ -160,7 +152,6 @@ py_strict_library(
+ "//tensorflow/python/autograph/pyct:templates",
+ "//tensorflow/python/autograph/pyct/static_analysis:activity",
+ "//tensorflow/python/autograph/pyct/static_analysis:annos",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+@@ -176,7 +167,6 @@ py_strict_library(
+ "//tensorflow/python/autograph/pyct:templates",
+ "//tensorflow/python/autograph/pyct/static_analysis:activity",
+ "//tensorflow/python/autograph/pyct/static_analysis:annos",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+@@ -187,7 +177,6 @@ py_strict_library(
+ deps = [
+ "//tensorflow/python/autograph/core:converter",
+ "//tensorflow/python/autograph/pyct:templates",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+diff --git a/tensorflow/python/autograph/core/BUILD b/tensorflow/python/autograph/core/BUILD
+index 46983ab39f0..3ccdc20293c 100644
+--- a/tensorflow/python/autograph/core/BUILD
++++ b/tensorflow/python/autograph/core/BUILD
+@@ -37,7 +37,6 @@ py_strict_library(
+ visibility = ["//tensorflow:__subpackages__"],
+ deps = [
+ "//tensorflow/python/autograph/pyct:errors",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+diff --git a/tensorflow/python/autograph/pyct/BUILD b/tensorflow/python/autograph/pyct/BUILD
+index 949d841e00c..31d4b026460 100644
+--- a/tensorflow/python/autograph/pyct/BUILD
++++ b/tensorflow/python/autograph/pyct/BUILD
+@@ -14,7 +14,6 @@ py_strict_library(
+ ":ast_util",
+ ":parser",
+ ":qual_names",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+@@ -32,7 +31,6 @@ py_strict_library(
+ ":templates",
+ ":transformer",
+ "//tensorflow/python/autograph/utils:ag_logging",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+@@ -44,7 +42,6 @@ py_strict_library(
+ ":anno",
+ ":parser",
+ ":qual_names",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+@@ -62,7 +59,6 @@ py_strict_library(
+ name = "gast_util",
+ srcs = ["gast_util.py"],
+ visibility = ["//visibility:public"],
+- deps = ["@pypi_gast//:pkg"],
+ )
+
+ py_strict_library(
+@@ -79,8 +75,6 @@ py_strict_library(
+ ":errors",
+ ":inspect_utils",
+ "//tensorflow/python/util:tf_inspect",
+- "@pypi_astunparse//:pkg",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+@@ -108,7 +102,6 @@ py_strict_library(
+ ":parser",
+ ":pretty_printer",
+ "//tensorflow/python/util:tf_inspect",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+@@ -116,7 +109,6 @@ py_strict_library(
+ name = "anno",
+ srcs = ["anno.py"],
+ visibility = ["//visibility:public"],
+- deps = ["@pypi_gast//:pkg"],
+ )
+
+ py_strict_library(
+@@ -134,7 +126,6 @@ py_strict_library(
+ ":parser",
+ ":pretty_printer",
+ ":templates",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+@@ -145,7 +136,6 @@ py_strict_library(
+ deps = [
+ ":anno",
+ ":parser",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+@@ -155,8 +145,6 @@ py_strict_library(
+ visibility = ["//visibility:public"],
+ deps = [
+ ":anno",
+- "@pypi_astunparse//:pkg",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+@@ -180,10 +168,6 @@ py_strict_library(
+ name = "pretty_printer",
+ srcs = ["pretty_printer.py"],
+ visibility = ["//visibility:public"],
+- deps = [
+- "@pypi_gast//:pkg",
+- "@pypi_termcolor//:pkg",
+- ],
+ )
+
+ py_strict_test(
+@@ -210,7 +194,6 @@ py_strict_test(
+ ":pretty_printer",
+ ":qual_names",
+ "//tensorflow/python/platform:client_testlib",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+@@ -234,7 +217,6 @@ py_strict_test(
+ ":cfg",
+ ":parser",
+ "//tensorflow/python/platform:client_testlib",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+@@ -250,7 +232,6 @@ py_strict_test(
+ ":pretty_printer",
+ "//tensorflow/python/platform:client_testlib",
+ "//tensorflow/python/util:tf_inspect",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+@@ -325,7 +306,6 @@ py_strict_test(
+ ":parser",
+ ":pretty_printer",
+ "//tensorflow/python/platform:client_testlib",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+@@ -365,7 +345,6 @@ py_strict_test(
+ ":templates",
+ "//tensorflow/python/platform:client_testlib",
+ "@absl_py//absl/testing:parameterized",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+@@ -380,7 +359,6 @@ py_strict_test(
+ ":parser",
+ ":transformer",
+ "//tensorflow/python/platform:client_testlib",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+@@ -393,6 +371,5 @@ py_strict_test(
+ ":transformer",
+ ":transpiler",
+ "//tensorflow/python/platform:client_testlib",
+- "@pypi_gast//:pkg",
+ ],
+ )
+diff --git a/tensorflow/python/autograph/pyct/common_transformers/BUILD b/tensorflow/python/autograph/pyct/common_transformers/BUILD
+index 2be00498cf7..b9da2f210c9 100644
+--- a/tensorflow/python/autograph/pyct/common_transformers/BUILD
++++ b/tensorflow/python/autograph/pyct/common_transformers/BUILD
+@@ -16,7 +16,6 @@ py_strict_library(
+ "//tensorflow/python/autograph/pyct:gast_util",
+ "//tensorflow/python/autograph/pyct:templates",
+ "//tensorflow/python/autograph/pyct:transformer",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+@@ -32,6 +31,5 @@ py_strict_test(
+ "//tensorflow/python/autograph/pyct:parser",
+ "//tensorflow/python/autograph/pyct:transformer",
+ "//tensorflow/python/platform:client_testlib",
+- "@pypi_gast//:pkg",
+ ],
+ )
+diff --git a/tensorflow/python/autograph/pyct/static_analysis/BUILD b/tensorflow/python/autograph/pyct/static_analysis/BUILD
+index 4329523b056..9c643ccd63c 100644
+--- a/tensorflow/python/autograph/pyct/static_analysis/BUILD
++++ b/tensorflow/python/autograph/pyct/static_analysis/BUILD
+@@ -14,7 +14,6 @@ py_strict_library(
+ "//tensorflow/python/autograph/pyct:anno",
+ "//tensorflow/python/autograph/pyct:cfg",
+ "//tensorflow/python/autograph/pyct:transformer",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+@@ -26,7 +25,6 @@ py_strict_library(
+ "//tensorflow/python/autograph/pyct:anno",
+ "//tensorflow/python/autograph/pyct:cfg",
+ "//tensorflow/python/autograph/pyct:transformer",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+@@ -56,7 +54,6 @@ py_strict_library(
+ "//tensorflow/python/autograph/pyct:anno",
+ "//tensorflow/python/autograph/pyct:qual_names",
+ "//tensorflow/python/autograph/pyct:transformer",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+@@ -71,7 +68,6 @@ py_strict_library(
+ "//tensorflow/python/autograph/pyct:cfg",
+ "//tensorflow/python/autograph/pyct:qual_names",
+ "//tensorflow/python/autograph/pyct:transformer",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+@@ -83,7 +79,6 @@ py_strict_library(
+ "//tensorflow/python/autograph/pyct:anno",
+ "//tensorflow/python/autograph/pyct:cfg",
+ "//tensorflow/python/autograph/pyct:transformer",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+@@ -107,7 +102,6 @@ py_strict_test(
+ "//tensorflow/python/autograph/pyct:qual_names",
+ "//tensorflow/python/autograph/pyct:transformer",
+ "//tensorflow/python/platform:client_testlib",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+diff --git a/tensorflow/python/autograph/pyct/testing/BUILD b/tensorflow/python/autograph/pyct/testing/BUILD
+index 21a6775b0fb..fcac2065ca0 100644
+--- a/tensorflow/python/autograph/pyct/testing/BUILD
++++ b/tensorflow/python/autograph/pyct/testing/BUILD
+@@ -15,7 +15,6 @@ py_strict_library(
+ deps = [
+ "//tensorflow/python/autograph/pyct:templates",
+ "//third_party/py/numpy",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+diff --git a/tensorflow/python/client/BUILD b/tensorflow/python/client/BUILD
+index c13a6122f41..77bc522a758 100644
+--- a/tensorflow/python/client/BUILD
++++ b/tensorflow/python/client/BUILD
+@@ -294,7 +294,6 @@ py_strict_library(
+ "//tensorflow/python/util:nest",
+ "//tensorflow/python/util:tf_export",
+ "//third_party/py/numpy",
+- "@pypi_wrapt//:pkg",
+ ],
+ )
+
+diff --git a/tensorflow/python/compiler/tensorrt/BUILD b/tensorflow/python/compiler/tensorrt/BUILD
+index f3ca24c9a4b..ee086bdaed2 100644
+--- a/tensorflow/python/compiler/tensorrt/BUILD
++++ b/tensorflow/python/compiler/tensorrt/BUILD
+@@ -69,7 +69,6 @@ py_strict_library(
+ "//tensorflow/python/util:nest",
+ "//tensorflow/python/util:tf_export",
+ "//third_party/py/numpy",
+- "@pypi_packaging//:pkg",
+ "@six_archive//:six",
+ ],
+ )
+diff --git a/tensorflow/python/data/experimental/kernel_tests/service/BUILD b/tensorflow/python/data/experimental/kernel_tests/service/BUILD
+index cfac30fe0db..355a558bc93 100644
+--- a/tensorflow/python/data/experimental/kernel_tests/service/BUILD
++++ b/tensorflow/python/data/experimental/kernel_tests/service/BUILD
+@@ -143,7 +143,6 @@ tf_py_strict_test(
+ "//tensorflow/python/ops:array_ops",
+ "//tensorflow/python/platform:client_testlib",
+ "@absl_py//absl/testing:parameterized",
+- "@pypi_portpicker//:pkg",
+ ],
+ )
+
+diff --git a/tensorflow/python/data/util/BUILD b/tensorflow/python/data/util/BUILD
+index f7298a6f66b..cef29c3d992 100644
+--- a/tensorflow/python/data/util/BUILD
++++ b/tensorflow/python/data/util/BUILD
+@@ -94,7 +94,6 @@ py_strict_library(
+ "//tensorflow/python/util:deprecation",
+ "//tensorflow/python/util:nest_util",
+ "//tensorflow/python/util:tf_export",
+- "@pypi_wrapt//:pkg",
+ ],
+ )
+
+@@ -125,7 +124,6 @@ py_strict_test(
+ "//tensorflow/python/util:compat",
+ "//third_party/py/numpy",
+ "@absl_py//absl/testing:parameterized",
+- "@pypi_wrapt//:pkg",
+ ],
+ )
+
+diff --git a/tensorflow/python/debug/lib/BUILD b/tensorflow/python/debug/lib/BUILD
+index 37c99b30dd2..012e349dffc 100644
+--- a/tensorflow/python/debug/lib/BUILD
++++ b/tensorflow/python/debug/lib/BUILD
+@@ -596,7 +596,6 @@ py_strict_library(
+ "//tensorflow/python/lib/io:lib",
+ "//tensorflow/python/ops:variables",
+ "//tensorflow/python/util:compat",
+- "@pypi_portpicker//:pkg",
+ ],
+ )
+
+diff --git a/tensorflow/python/distribute/BUILD b/tensorflow/python/distribute/BUILD
+index 0c233b2b4a4..19b496f427a 100644
+--- a/tensorflow/python/distribute/BUILD
++++ b/tensorflow/python/distribute/BUILD
+@@ -523,7 +523,6 @@ cuda_py_strict_test(
+ "//tensorflow/python/ops:variable_scope",
+ "//third_party/py/numpy",
+ "@absl_py//absl/testing:parameterized",
+- "@pypi_dill//:pkg", # build_cleaner: keep
+ ],
+ )
+
+@@ -1440,7 +1439,6 @@ cuda_py_strict_test(
+ "//tensorflow/python/ops:math_ops",
+ "//tensorflow/python/util:nest",
+ "@absl_py//absl/testing:parameterized",
+- "@pypi_dill//:pkg", # build_cleaner: keep
+ ],
+ )
+
+@@ -1764,7 +1762,6 @@ distribute_py_strict_test(
+ "//tensorflow/python/ops:variable_v1",
+ "//tensorflow/python/saved_model/model_utils:mode_keys",
+ "@absl_py//absl/testing:parameterized",
+- "@pypi_wrapt//:pkg",
+ ],
+ )
+
+@@ -2295,8 +2292,6 @@ py_strict_library(
+ "//tensorflow/python/framework:test_lib",
+ "//tensorflow/python/util:tf_export",
+ "@absl_py//absl/logging",
+- "@pypi_dill//:pkg", # build_cleaner: keep
+- "@pypi_tblib//:pkg", # build_cleaner: keep
+ "@six_archive//:six",
+ ],
+ )
+diff --git a/tensorflow/python/distribute/experimental/rpc/BUILD b/tensorflow/python/distribute/experimental/rpc/BUILD
+index 3a3682cd6cf..8ed343db636 100644
+--- a/tensorflow/python/distribute/experimental/rpc/BUILD
++++ b/tensorflow/python/distribute/experimental/rpc/BUILD
+@@ -60,6 +60,5 @@ tf_py_strict_test(
+ "//tensorflow/python/ops:variables",
+ "//tensorflow/python/platform:client_testlib",
+ "//tensorflow/python/util:nest",
+- "@pypi_portpicker//:pkg",
+ ],
+ )
+diff --git a/tensorflow/python/distribute/failure_handling/BUILD b/tensorflow/python/distribute/failure_handling/BUILD
+index c9af884fbb7..3aaad5eced1 100644
+--- a/tensorflow/python/distribute/failure_handling/BUILD
++++ b/tensorflow/python/distribute/failure_handling/BUILD
+@@ -47,7 +47,6 @@ py_strict_library(
+ deps = [
+ "//tensorflow/python/eager:context",
+ "//tensorflow/python/platform:tf_logging",
+- "@pypi_requests//:pkg",
+ "@six_archive//:six",
+ ],
+ )
+@@ -134,7 +133,6 @@ tf_py_strict_test(
+ "//tensorflow/python/platform:tf_logging",
+ "//tensorflow/python/training:server_lib",
+ "@absl_py//absl/testing:parameterized",
+- "@pypi_dill//:pkg", # build_cleaner: keep
+ ],
+ )
+
+diff --git a/tensorflow/python/eager/BUILD b/tensorflow/python/eager/BUILD
+index e72f54c48fd..057bce876fd 100644
+--- a/tensorflow/python/eager/BUILD
++++ b/tensorflow/python/eager/BUILD
+@@ -1180,7 +1180,6 @@ cuda_py_strict_test(
+ "//tensorflow/python/training:server_lib",
+ "//tensorflow/python/util:compat",
+ "@absl_py//absl/testing:parameterized",
+- "@pypi_portpicker//:pkg",
+ ],
+ )
+
+@@ -1320,7 +1319,6 @@ py_strict_library(
+ "//tensorflow/python/autograph/pyct/static_analysis:reaching_fndefs",
+ "//tensorflow/python/framework:op_def_registry",
+ "//tensorflow/python/framework:ops",
+- "@pypi_gast//:pkg",
+ ],
+ )
+
+diff --git a/tensorflow/python/estimator/BUILD b/tensorflow/python/estimator/BUILD
+index 73afdd8b239..7e58789cee5 100644
+--- a/tensorflow/python/estimator/BUILD
++++ b/tensorflow/python/estimator/BUILD
+@@ -382,7 +382,7 @@ py_library(
+ ],
+ )
+
+-alias(
++py_library(
+ name = "expect_tensorflow_estimator_installed",
+- actual = "@pypi_tensorflow_estimator//:pkg",
++ srcs_version = "PY3",
+ )
+diff --git a/tensorflow/python/framework/BUILD b/tensorflow/python/framework/BUILD
+index 9bfb9d2d9dd..19b43eab9ac 100644
+--- a/tensorflow/python/framework/BUILD
++++ b/tensorflow/python/framework/BUILD
+@@ -279,7 +279,6 @@ py_strict_library(
+ "//tensorflow/python/eager:execute",
+ "//tensorflow/security/fuzzing/py:annotation_types",
+ "//tensorflow/tools/docs:doc_controls",
+- "@pypi_typing_extensions//:pkg",
+ ],
+ )
+
+@@ -365,7 +364,6 @@ py_strict_library(
+ "//tensorflow/python/util:deprecation",
+ "//tensorflow/python/util:tf_export",
+ "//third_party/py/numpy",
+- "@pypi_packaging//:pkg",
+ ] + if_xla_available([
+ "//tensorflow/python:_pywrap_tfcompile",
+ ]),
+@@ -1760,7 +1758,6 @@ py_strict_library(
+ deps = [
+ ":composite_tensor",
+ "//tensorflow/python/util:nest",
+- "@pypi_typing_extensions//:pkg",
+ ],
+ )
+
+@@ -1788,7 +1785,6 @@ py_strict_library(
+ "//tensorflow/python/util:nest",
+ "//tensorflow/python/util:tf_decorator",
+ "//tensorflow/python/util:tf_export",
+- "@pypi_typing_extensions//:pkg",
+ ],
+ )
+
+@@ -1932,7 +1928,6 @@ pytype_strict_library(
+ "//tensorflow/python/lib/io:lib",
+ "//tensorflow/python/platform:tf_logging",
+ "//tensorflow/python/util:compat",
+- "@pypi_packaging//:pkg",
+ ],
+ )
+
+@@ -2185,7 +2180,6 @@ py_strict_library(
+ "//tensorflow/python/util/protobuf",
+ "//third_party/py/numpy",
+ "@absl_py//absl/testing:parameterized",
+- "@pypi_portpicker//:pkg",
+ ],
+ )
+
+@@ -2781,7 +2775,6 @@ tf_py_strict_test(
+ "//tensorflow/python/util:nest",
+ "//tensorflow/python/util:tf_decorator",
+ "@absl_py//absl/testing:parameterized",
+- "@pypi_typing_extensions//:pkg",
+ ],
+ )
+
+diff --git a/tensorflow/python/keras/BUILD b/tensorflow/python/keras/BUILD
+index f9e31edae67..011cc3a73b8 100755
+--- a/tensorflow/python/keras/BUILD
++++ b/tensorflow/python/keras/BUILD
+@@ -44,7 +44,6 @@ py_library(
+ "//tensorflow/python/saved_model",
+ "//tensorflow/python/training",
+ "//tensorflow/python/util:nest",
+- "@pypi_h5py//:pkg",
+ ],
+ )
+
+diff --git a/tensorflow/python/keras/engine/BUILD b/tensorflow/python/keras/engine/BUILD
+index a24dc27b25f..1b95ca18d06 100644
+--- a/tensorflow/python/keras/engine/BUILD
++++ b/tensorflow/python/keras/engine/BUILD
+@@ -95,7 +95,6 @@ py_library(
+ "//tensorflow/python/util:tf_decorator",
+ "//tensorflow/python/util:tf_export",
+ "//tensorflow/tools/docs:doc_controls",
+- "@pypi_h5py//:pkg",
+ ],
+ )
+
+diff --git a/tensorflow/python/keras/saving/BUILD b/tensorflow/python/keras/saving/BUILD
+index db1d2d814ca..b37abc164b3 100644
+--- a/tensorflow/python/keras/saving/BUILD
++++ b/tensorflow/python/keras/saving/BUILD
+@@ -55,6 +55,5 @@ py_library(
+ "//tensorflow/python/platform:tf_logging",
+ "//tensorflow/python/saved_model",
+ "//tensorflow/python/training:saver",
+- "@pypi_h5py//:pkg",
+ ],
+ )
+diff --git a/tensorflow/python/ops/BUILD b/tensorflow/python/ops/BUILD
+index b3a3d612a01..c242923f832 100644
+--- a/tensorflow/python/ops/BUILD
++++ b/tensorflow/python/ops/BUILD
+@@ -2780,7 +2780,6 @@ py_strict_library(
+ "//tensorflow/python/util:dispatch",
+ "//tensorflow/python/util:tf_export",
+ "//third_party/py/numpy",
+- "@pypi_opt_einsum//:pkg",
+ ],
+ )
+
+@@ -3872,7 +3871,6 @@ cuda_py_strict_test(
+ "//tensorflow/python/platform:tf_logging",
+ "//third_party/py/numpy",
+ "@absl_py//absl/testing:parameterized",
+- "@pypi_opt_einsum//:pkg",
+ ],
+ )
+
+diff --git a/tensorflow/python/profiler/BUILD b/tensorflow/python/profiler/BUILD
+index b1cfd6ea10c..9413aeeab8b 100644
+--- a/tensorflow/python/profiler/BUILD
++++ b/tensorflow/python/profiler/BUILD
+@@ -43,7 +43,6 @@ cuda_py_strict_test(
+ "//tensorflow/python/eager:test",
+ "//tensorflow/python/framework:errors",
+ "//tensorflow/python/framework:test_lib",
+- "@pypi_portpicker//:pkg",
+ ],
+ )
+
+diff --git a/tensorflow/python/profiler/integration_test/BUILD b/tensorflow/python/profiler/integration_test/BUILD
+index b20698ea6ea..e7060e5a315 100644
+--- a/tensorflow/python/profiler/integration_test/BUILD
++++ b/tensorflow/python/profiler/integration_test/BUILD
+@@ -35,6 +35,5 @@ cuda_py_strict_test(
+ "//tensorflow/python/platform:tf_logging",
+ "//tensorflow/python/profiler:profiler_client",
+ "//tensorflow/python/profiler:profiler_v2",
+- "@pypi_portpicker//:pkg",
+ ],
+ )
+diff --git a/tensorflow/python/summary/BUILD b/tensorflow/python/summary/BUILD
+index 126fb6d31f7..b292e39356f 100644
+--- a/tensorflow/python/summary/BUILD
++++ b/tensorflow/python/summary/BUILD
+@@ -121,6 +121,5 @@ tf_py_strict_test(
+ "//tensorflow/python/ops:summary_ops_v2",
+ "//tensorflow/python/platform:client_testlib",
+ "//tensorflow/python/training:training_util",
+- "@pypi_tensorboard//:pkg",
+ ],
+ )
+diff --git a/tensorflow/python/trackable/BUILD b/tensorflow/python/trackable/BUILD
+index 30efc64b5fd..2e2390a6643 100644
+--- a/tensorflow/python/trackable/BUILD
++++ b/tensorflow/python/trackable/BUILD
+@@ -225,7 +225,6 @@ py_strict_library(
+ "//tensorflow/python/ops:variables",
+ "//tensorflow/python/util:compat",
+ "//tensorflow/python/util:tf_export",
+- "@pypi_wrapt//:pkg",
+ ],
+ )
+
+diff --git a/tensorflow/python/types/BUILD b/tensorflow/python/types/BUILD
+index c04dc039153..43a030a051e 100644
+--- a/tensorflow/python/types/BUILD
++++ b/tensorflow/python/types/BUILD
+@@ -24,7 +24,6 @@ pytype_strict_library(
+ "//tensorflow/python/util:_pywrap_utils",
+ "//tensorflow/python/util:tf_export",
+ "//third_party/py/numpy",
+- "@pypi_typing_extensions//:pkg",
+ ],
+ )
+
+@@ -71,7 +70,6 @@ pytype_strict_library(
+ ":core",
+ "//tensorflow/python/util:tf_export",
+ "//tensorflow/tools/docs:doc_controls",
+- "@pypi_typing_extensions//:pkg",
+ ],
+ )
+
+diff --git a/tensorflow/python/util/BUILD b/tensorflow/python/util/BUILD
+index d1e7d626a1d..248eb23bcfe 100644
+--- a/tensorflow/python/util/BUILD
++++ b/tensorflow/python/util/BUILD
+@@ -732,7 +732,6 @@ py_strict_library(
+ # py_test because not all tensorflow tests use tensorflow.bzl's py_test.
+ "//tensorflow/python:global_test_configuration",
+ "@six_archive//:six",
+- "@pypi_wrapt//:pkg",
+ "//tensorflow/python:pywrap_tensorflow",
+ ":_pywrap_utils",
+ ],
+@@ -893,7 +892,6 @@ py_strict_library(
+ # py_test because not all tensorflow tests use tensorflow.bzl's py_test.
+ "//tensorflow/python:global_test_configuration",
+ "//third_party/py/numpy",
+- "@pypi_wrapt//:pkg",
+ ],
+ )
+
+@@ -1029,7 +1027,6 @@ py_strict_library(
+ # py_test because not all tensorflow tests use tensorflow.bzl's py_test.
+ "//tensorflow/python:global_test_configuration",
+ ":tf_export",
+- "@pypi_wrapt//:pkg",
+ ":_pywrap_utils",
+ ":_pywrap_nest",
+ ],
+diff --git a/tensorflow/tools/docs/BUILD b/tensorflow/tools/docs/BUILD
+index aa9490cf911..48e45b23a2a 100644
+--- a/tensorflow/tools/docs/BUILD
++++ b/tensorflow/tools/docs/BUILD
+@@ -137,7 +137,6 @@ py_strict_library(
+ srcs = ["fenced_doctest_lib.py"],
+ deps = [
+ ":tf_doctest_lib",
+- "@pypi_astor//:pkg",
+ ],
+ )
+
+@@ -178,7 +177,6 @@ py_strict_test(
+ # copybara:uncomment "//third_party/py/tensorflow:tensorflow_estimator",
+ "//tensorflow:tensorflow_py",
+ "//tensorflow/python/platform:test",
+- "@pypi_packaging//:pkg",
+ ],
+ )
+
+@@ -214,7 +212,6 @@ py_strict_library(
+ "//tensorflow/python/util:tf_inspect",
+ "@absl_py//absl:app",
+ "@absl_py//absl/flags",
+- "@pypi_packaging//:pkg",
+ ],
+ )
+
+diff --git a/third_party/xla/xla/glob_lit_test.bzl b/third_party/xla/xla/glob_lit_test.bzl
+index 79b4adc3edd..5893eba46c2 100644
+--- a/third_party/xla/xla/glob_lit_test.bzl
++++ b/third_party/xla/xla/glob_lit_test.bzl
+@@ -52,10 +52,6 @@ def _run_lit_test(name, data, size, tags, driver, features, exec_properties):
+ # can remove this logic. This is necessary to have these tests run on builds
+ # using Python 3.11, but also to not include `@pypi_lit` in standalone xla
+ # builds where it won't be found.
+- deps = []
+- if xla_root_dir == "tensorflow/compiler/xla/":
+- deps.append("@pypi_lit//:pkg")
+-
+ native.py_test(
+ name = name,
+ srcs = ["@llvm-project//llvm:lit"],
+@@ -69,7 +65,6 @@ def _run_lit_test(name, data, size, tags, driver, features, exec_properties):
+ "@llvm-project//llvm:count",
+ "@llvm-project//llvm:not",
+ ],
+- deps = deps,
+ size = size,
+ main = "lit.py",
+ exec_properties = exec_properties,
+diff --git a/third_party/xla/xla/mlir_hlo/tests/BUILD b/third_party/xla/xla/mlir_hlo/tests/BUILD
+index 89c6533956f..771d337be08 100644
+--- a/third_party/xla/xla/mlir_hlo/tests/BUILD
++++ b/third_party/xla/xla/mlir_hlo/tests/BUILD
+@@ -27,7 +27,6 @@ package(
+ tags = [
+ "nomsan", # The execution engine doesn't work with msan, see b/248097619.
+ ],
+- deps = ["@pypi_lit//:pkg"],
+ )
+ for src in glob(["**/*.mlir"])
+ ]
+--
+2.41.0
+
diff --git a/sci-libs/tensorflow/tensorflow-2.11.0.ebuild b/sci-libs/tensorflow/tensorflow-2.11.0.ebuild
index f84685652a58..7307e3bc3c8d 100644
--- a/sci-libs/tensorflow/tensorflow-2.11.0.ebuild
+++ b/sci-libs/tensorflow/tensorflow-2.11.0.ebuild
@@ -79,7 +79,7 @@ SRC_URI="https://github.com/${PN}/${PN}/archive/v${MY_PV}.tar.gz -> ${P}.tar.gz
# abseil-cpp-20211102.0-r0 does not work with NVCC
RDEPEND="
app-arch/snappy
- >=dev-cpp/abseil-cpp-20211102-r2:=
+ =dev-cpp/abseil-cpp-20220623*:=
dev-db/lmdb
dev-db/sqlite
dev-libs/double-conversion
@@ -138,6 +138,7 @@ PDEPEND="python? (
BDEPEND="
app-arch/unzip
>=dev-build/bazel-5.1.1
+ <dev-build/bazel-6
>=dev-libs/protobuf-3.8.0
dev-java/java-config
cuda? (
@@ -197,6 +198,7 @@ src_prepare() {
bazel_setup_bazelrc
eapply "${WORKDIR}"/patches/*.patch
+ eapply "${FILESDIR}/tensorflow-2.13.0-0013-Fixing-build-issue-with-Clang-16-and-GCC-13.patch"
# Relax version checks in setup.py
sed -i "/^ '/s/==/>=/g" tensorflow/tools/pip_package/setup.py || die
diff --git a/sci-libs/tensorflow/tensorflow-2.12.0.ebuild b/sci-libs/tensorflow/tensorflow-2.12.0.ebuild
index e5389e1f52a8..adc648f45814 100644
--- a/sci-libs/tensorflow/tensorflow-2.12.0.ebuild
+++ b/sci-libs/tensorflow/tensorflow-2.12.0.ebuild
@@ -4,7 +4,7 @@
EAPI=8
DISTUTILS_OPTIONAL=1
-PYTHON_COMPAT=( python3_10 python3_11 )
+PYTHON_COMPAT=( python3_{10..11} )
MY_PV=${PV/_rc/-rc}
MY_P=${PN}-${MY_PV}
DEP_VER="$(ver_cut 1-2)"
@@ -82,7 +82,7 @@ SRC_URI="https://github.com/${PN}/${PN}/archive/v${MY_PV}.tar.gz -> ${P}.tar.gz
# abseil-cpp-20211102.0-r0 does not work with NVCC
RDEPEND="
app-arch/snappy
- >=dev-cpp/abseil-cpp-20211102-r2:=
+ =dev-cpp/abseil-cpp-20220623*:=
dev-db/lmdb
dev-db/sqlite
dev-libs/double-conversion
@@ -200,6 +200,7 @@ src_prepare() {
bazel_setup_bazelrc
eapply "${WORKDIR}"/patches/*.patch
+ eapply "${FILESDIR}/tensorflow-2.13.0-0013-Fixing-build-issue-with-Clang-16-and-GCC-13.patch"
# Relax version checks in setup.py
sed -i "/^ '/s/==/>=/g" tensorflow/tools/pip_package/setup.py || die
diff --git a/sci-libs/tensorflow/tensorflow-2.13.1.ebuild b/sci-libs/tensorflow/tensorflow-2.13.1.ebuild
new file mode 100644
index 000000000000..fb1e328f6e07
--- /dev/null
+++ b/sci-libs/tensorflow/tensorflow-2.13.1.ebuild
@@ -0,0 +1,454 @@
+# Copyright 1999-2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+EAPI=8
+
+DISTUTILS_OPTIONAL=1
+PYTHON_COMPAT=( python3_{10..11} )
+MY_PV=${PV/_rc/-rc}
+MY_P=${PN}-${MY_PV}
+DEP_VER="$(ver_cut 1-2)"
+
+inherit bazel check-reqs cuda distutils-r1 flag-o-matic prefix toolchain-funcs
+
+DESCRIPTION="Computation framework using data flow graphs for scalable machine learning"
+HOMEPAGE="https://www.tensorflow.org/"
+
+RESTRICT="test" # Tests need GPU access
+LICENSE="Apache-2.0"
+SLOT="0"
+KEYWORDS="~amd64"
+IUSE="cuda mpi +python xla"
+CPU_USE_FLAGS_X86="sse sse2 sse3 sse4_1 sse4_2 avx avx2 fma3 fma4"
+for i in $CPU_USE_FLAGS_X86; do
+ IUSE+=" cpu_flags_x86_${i}"
+done
+
+# distfiles that bazel uses for the workspace, will be copied to basel-distdir
+# pkgcheck complains but do NOT change the .zip to .tar.gz, bazel requires the exact tarball (basename and sha256).
+# the build will fail if different archives are used.
+bazel_external_uris="
+ https://github.com/Maratyszcza/FP16/archive/4dfe081cf6bcd15db339cf2680b9281b8451eeb3.zip -> FP16-4dfe081cf6bcd15db339cf2680b9281b8451eeb3.zip
+ https://github.com/Maratyszcza/FXdiv/archive/63058eff77e11aa15bf531df5dd34395ec3017c8.zip -> FXdiv-63058eff77e11aa15bf531df5dd34395ec3017c8.zip
+ https://github.com/Maratyszcza/pthreadpool/archive/b8374f80e42010941bda6c85b0e3f1a1bd77a1e0.zip -> pthreadpool-b8374f80e42010941bda6c85b0e3f1a1bd77a1e0.zip
+ https://github.com/bazelbuild/apple_support/releases/download/1.1.0/apple_support.1.1.0.tar.gz
+ https://github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz
+ https://github.com/bazelbuild/bazel-toolchains/archive/8c717f8258cd5f6c7a45b97d974292755852b658.tar.gz -> bazel-toolchains-8c717f8258cd5f6c7a45b97d974292755852b658.tar.gz
+ https://github.com/bazelbuild/platforms/releases/download/0.0.6/platforms-0.0.6.tar.gz -> bazelbuild-platforms-0.0.6.tar.gz
+ https://github.com/bazelbuild/rules_android/archive/v0.1.1.zip -> bazelbuild-rules_android-v0.1.1.zip
+ https://github.com/bazelbuild/rules_apple/releases/download/1.0.1/rules_apple.1.0.1.tar.gz
+ https://github.com/bazelbuild/rules_cc/archive/081771d4a0e9d7d3aa0eed2ef389fa4700dfb23e.tar.gz -> bazelbuild-rules_cc-081771d4a0e9d7d3aa0eed2ef389fa4700dfb23e.tar.gz
+ https://github.com/bazelbuild/rules_closure/archive/308b05b2419edb5c8ee0471b67a40403df940149.tar.gz -> bazelbuild-rules_closure-308b05b2419edb5c8ee0471b67a40403df940149.tar.gz
+ https://github.com/bazelbuild/rules_docker/releases/download/v0.10.0/rules_docker-v0.10.0.tar.gz -> bazelbuild-rules_docker-v0.10.0.tar.gz
+ https://github.com/bazelbuild/rules_foreign_cc/archive/0.7.1.tar.gz -> bazelbuild-rules_foreign_cc-0.7.1.tar.gz
+ https://github.com/bazelbuild/rules_java/archive/7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip -> bazelbuild-rules_java-7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip
+ https://github.com/bazelbuild/rules_java/releases/download/5.5.1/rules_java-5.5.1.tar.gz -> bazelbuild-rules_java-5.5.1.tar.gz
+ https://github.com/bazelbuild/rules_jvm_external/archive/4.3.zip -> bazelbuild-rules_jvm_external-4.3.zip
+ https://github.com/bazelbuild/rules_pkg/releases/download/0.7.1/rules_pkg-0.7.1.tar.gz -> bazelbuild-rules_pkg-0.7.1.tar.gz
+ https://github.com/bazelbuild/rules_proto/archive/11bf7c25e666dd7ddacbcd4d4c4a9de7a25175f8.tar.gz -> bazelbuild-rules_proto-11bf7c25e666dd7ddacbcd4d4c4a9de7a25175f8.tar.gz
+ https://github.com/bazelbuild/rules_python/releases/download/0.0.1/rules_python-0.0.1.tar.gz -> bazelbuild-rules_python-0.0.1.tar.gz
+ https://github.com/bazelbuild/rules_swift/releases/download/1.0.0/rules_swift.1.0.0.tar.gz -> bazelbuild-rules_swift.1.0.0.tar.gz
+ https://github.com/dmlc/dlpack/archive/9351cf542ab478499294864ff3acfdab5c8c5f3d.tar.gz -> dlpack-9351cf542ab478499294864ff3acfdab5c8c5f3d.tar.gz
+ https://github.com/google/XNNPACK/archive/b9d4073a6913891ce9cbd8965c8d506075d2a45a.zip -> XNNPACK-b9d4073a6913891ce9cbd8965c8d506075d2a45a.zip
+ https://github.com/google/benchmark/archive/f7547e29ccaed7b64ef4f7495ecfff1c9f6f3d03.tar.gz -> benchmark-f7547e29ccaed7b64ef4f7495ecfff1c9f6f3d03.tar.gz
+ https://github.com/google/farmhash/archive/0d859a811870d10f53a594927d0d0b97573ad06d.tar.gz -> farmhash-0d859a811870d10f53a594927d0d0b97573ad06d.tar.gz
+ https://github.com/google/gemmlowp/archive/e844ffd17118c1e17d94e1ba4354c075a4577b88.zip -> gemmlowp-e844ffd17118c1e17d94e1ba4354c075a4577b88.zip
+ https://github.com/google/highwayhash/archive/c13d28517a4db259d738ea4886b1f00352a3cc33.tar.gz -> highwayhash-c13d28517a4db259d738ea4886b1f00352a3cc33.tar.gz
+ https://github.com/google/re2/archive/a276a8c738735a0fe45a6ee590fe2df69bcf4502.tar.gz -> re2-a276a8c738735a0fe45a6ee590fe2df69bcf4502.tar.gz
+ https://github.com/google/ruy/archive/3286a34cc8de6149ac6844107dfdffac91531e72.zip -> ruy-3286a34cc8de6149ac6844107dfdffac91531e72.zip
+ https://github.com/googleapis/googleapis/archive/6b3fdcea8bc5398be4e7e9930c693f0ea09316a0.tar.gz -> googleapis-6b3fdcea8bc5398be4e7e9930c693f0ea09316a0.tar.gz
+ https://github.com/joe-kuo/sobol_data/archive/835a7d7b1ee3bc83e575e302a985c66ec4b65249.tar.gz -> sobol_data-835a7d7b1ee3bc83e575e302a985c66ec4b65249.tar.gz
+ https://github.com/llvm/llvm-project/archive/dc275fd03254d67d29cc70a5a0569acf24d2280d.tar.gz -> llvm-project-dc275fd03254d67d29cc70a5a0569acf24d2280d.tar.gz
+ https://github.com/llvm/llvm-project/releases/download/llvmorg-10.0.1/openmp-10.0.1.src.tar.xz -> llvmorg-10.0.1-openmp-10.0.1.src.tar.xz
+ https://github.com/mborgerding/kissfft/archive/131.1.0.tar.gz -> kissfft-131.1.0.tar.gz
+ https://github.com/oneapi-src/oneDNN/archive/refs/tags/v2.7.3.tar.gz -> oneDNN-v2.7.3.tar.gz
+ https://github.com/oneapi-src/oneDNN/archive/refs/tags/v3.1.tar.gz -> oneDNN-v3.1.tar.gz
+ https://github.com/openxla/stablehlo/archive/43d81c6883ade82052920bd367c61f9e52f09954.zip -> openxla-stablehlo-43d81c6883ade82052920bd367c61f9e52f09954.zip
+ https://github.com/openxla/triton/archive/1627e0c27869b4098e5fa720717645c1baaf5972.tar.gz -> openxla-triton-1627e0c27869b4098e5fa720717645c1baaf5972.tar.gz
+ https://github.com/petewarden/OouraFFT/archive/v1.0.tar.gz -> OouraFFT-v1.0.tar.gz
+ https://github.com/protocolbuffers/protobuf/archive/v3.21.9.zip -> protobuf-3.21.9.zip
+ https://github.com/pybind/pybind11_abseil/archive/2c4932ed6f6204f1656e245838f4f5eae69d2e29.tar.gz -> pybind11_abseil-2c4932ed6f6204f1656e245838f4f5eae69d2e29.tar.gz
+ https://github.com/pybind/pybind11_bazel/archive/72cbbf1fbc830e487e3012862b7b720001b70672.tar.gz -> pybind11_bazel-72cbbf1fbc830e487e3012862b7b720001b70672.tar.gz
+ https://github.com/pybind/pybind11_protobuf/archive/80f3440cd8fee124e077e2e47a8a17b78b451363.zip -> pybind11_protobuf-80f3440cd8fee124e077e2e47a8a17b78b451363.zip
+ https://github.com/pytorch/cpuinfo/archive/3dc310302210c1891ffcfb12ae67b11a3ad3a150.tar.gz -> pytorch-cpuinfo-3dc310302210c1891ffcfb12ae67b11a3ad3a150.tar.gz
+ https://github.com/pytorch/cpuinfo/archive/3dc310302210c1891ffcfb12ae67b11a3ad3a150.zip -> pytorch-cpuinfo-3dc310302210c1891ffcfb12ae67b11a3ad3a150.zip
+ https://github.com/tensorflow/runtime/archive/7d879c8b161085a4374ea481b93a52adb19c0529.tar.gz -> tensorflow-runtime-7d879c8b161085a4374ea481b93a52adb19c0529.tar.gz
+ https://gitlab.com/libeigen/eigen/-/archive/b0f877f8e01e90a5b0f3a79d46ea234899f8b499/eigen-b0f877f8e01e90a5b0f3a79d46ea234899f8b499.tar.gz
+ cuda? (
+ https://github.com/NVIDIA/cudnn-frontend/archive/refs/tags/v0.9.zip -> cudnn-frontend-v0.9.zip
+ https://github.com/NVlabs/cub/archive/1.9.9.zip -> cub-1.9.9.zip
+ https://github.com/nvidia/nccl/archive/v2.16.5-1.tar.gz -> nvidia-nccl-v2.16.5-1.tar.gz
+ )
+ python? (
+ https://github.com/intel/ARM_NEON_2_x86_SSE/archive/a15b489e1222b2087007546b4912e21293ea86ff.tar.gz -> ARM_NEON_2_x86_SSE-a15b489e1222b2087007546b4912e21293ea86ff.tar.gz
+ https://storage.googleapis.com/mirror.tensorflow.org/docs.python.org/2.7/_sources/license.rst.txt -> tensorflow-1.15.0-python-license.rst.txt
+ )"
+
+SRC_URI="https://github.com/${PN}/${PN}/archive/v${MY_PV}.tar.gz -> ${P}.tar.gz
+ ${bazel_external_uris}"
+
+# absl/log/check.h is needed in tensorflow-2.13.1, see
+# https://github.com/tensorflow/tensorflow/blob/v2.13.1/tensorflow/compiler/jit/xla_compile_on_demand_op.cc
+# which requires abseil-cpp>=20230125, see
+# https://github.com/abseil/abseil-cpp/commit/92fdbfb301f8b301b28ab5c99e7361e775c2fb8a
+
+# abseil-cpp need to compile with C++17
+# abseil-cpp>=20230125.3 in repo are built with C++14
+
+# check flatbuffers version in tensorflow/lite/schema/schema_generated.h
+
+# BDEPEND: >=dev-libs/protobuf-3.8.0
+ #>=dev-cpp/abseil-cpp-20230125.0:=
+RDEPEND="
+ app-arch/snappy
+ =dev-cpp/abseil-cpp-20230125.2*:=
+ dev-db/sqlite
+ dev-libs/double-conversion
+ dev-libs/icu:=
+ >=dev-libs/jsoncpp-1.9.2:=
+ >=dev-libs/nsync-1.25.0
+ dev-libs/openssl:0=
+ >=dev-libs/protobuf-3.13.0:=
+ >=dev-libs/re2-0.2019.06.01:=
+ media-libs/giflib
+ media-libs/libjpeg-turbo
+ media-libs/libpng:0
+ >=net-libs/grpc-1.28:=
+ net-misc/curl
+ sys-libs/zlib
+ >=sys-apps/hwloc-2:=
+ cuda? (
+ dev-util/nvidia-cuda-toolkit:=[profiler]
+ =dev-libs/cudnn-8*
+ )
+ mpi? ( virtual/mpi )
+ python? (
+ ${PYTHON_DEPS}
+ ~dev-libs/flatbuffers-23.1.21:=
+ dev-python/absl-py[${PYTHON_USEDEP}]
+ >=dev-python/astor-0.7.1[${PYTHON_USEDEP}]
+ dev-python/astunparse[${PYTHON_USEDEP}]
+ dev-python/clang-python[${PYTHON_USEDEP}]
+ dev-python/dill[${PYTHON_USEDEP}]
+ ~dev-python/flatbuffers-23.1.21[${PYTHON_USEDEP}]
+ >=dev-python/gast-0.3.3[${PYTHON_USEDEP}]
+ dev-python/h5py[${PYTHON_USEDEP}]
+ >=dev-python/numpy-1.19[${PYTHON_USEDEP}]
+ >=dev-python/google-pasta-0.1.8[${PYTHON_USEDEP}]
+ >=dev-python/opt-einsum-3.3.0[${PYTHON_USEDEP}]
+ >=dev-python/protobuf-python-3.13.0[${PYTHON_USEDEP}]
+ dev-python/pybind11[${PYTHON_USEDEP}]
+ dev-python/six[${PYTHON_USEDEP}]
+ dev-python/tblib[${PYTHON_USEDEP}]
+ dev-python/termcolor[${PYTHON_USEDEP}]
+ dev-python/typing-extensions[${PYTHON_USEDEP}]
+ >=dev-python/grpcio-1.28[${PYTHON_USEDEP}]
+ >=dev-python/wrapt-1.11.1[${PYTHON_USEDEP}]
+ >=net-libs/google-cloud-cpp-0.10.0
+ =sci-visualization/tensorboard-${DEP_VER}*[${PYTHON_USEDEP}]
+ )"
+DEPEND="${RDEPEND}
+ python? (
+ dev-python/mock
+ dev-python/setuptools
+ )"
+PDEPEND="python? (
+ =sci-libs/keras-${DEP_VER}*[${PYTHON_USEDEP}]
+ =sci-libs/tensorflow-estimator-${DEP_VER}*[${PYTHON_USEDEP}]
+ )"
+BDEPEND="
+ app-arch/unzip
+ =dev-build/bazel-5*
+ dev-java/java-config
+ cuda? (
+ >=dev-util/nvidia-cuda-toolkit-9.1[profiler]
+ )
+ !python? ( dev-lang/python )
+ python? (
+ dev-python/cython
+ dev-python/mock
+ >=dev-python/grpcio-tools-1.28
+ )
+ dev-util/patchelf"
+REQUIRED_USE="python? ( ${PYTHON_REQUIRED_USE} )"
+
+S="${WORKDIR}/${MY_P}"
+
+DOCS=( AUTHORS CONTRIBUTING.md ISSUE_TEMPLATE.md README.md RELEASE.md )
+CHECKREQS_MEMORY="5G"
+CHECKREQS_DISK_BUILD="10G"
+
+PATCHES=(
+ "${FILESDIR}/tensorflow-2.13.0-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch"
+ "${FILESDIR}/tensorflow-2.13.0-0002-systemlib-Latest-absl-LTS-has-split-cord-libs.patch"
+ "${FILESDIR}/tensorflow-2.13.0-0003-mkl_dnn-Must-link-against-libm-for-round-and-log2.patch"
+ "${FILESDIR}/tensorflow-2.13.0-0004-tensorflow_cc-Add-systemlib-nsync-linkopts.patch"
+ "${FILESDIR}/tensorflow-2.13.0-0005-systemlib-Updates-for-Abseil-20220623-LTS.patch"
+ "${FILESDIR}/tensorflow-2.13.0-0006-systemlib-Update-targets-for-absl_py.patch"
+ "${FILESDIR}/tensorflow-2.13.0-0007-systemlibs-Add-well_known_types_py_pb2-target.patch"
+ "${FILESDIR}/tensorflow-2.13.0-0008-Relax-setup.py-version-requirements.patch"
+ "${FILESDIR}/tensorflow-2.13.0-0009-fix-sparse-transpose-op2.patch"
+ "${FILESDIR}/tensorflow-2.13.0-0010-systemlibs-update-targets-for-absl.patch"
+ "${FILESDIR}/tensorflow-2.13.0-0011-systemlibs-update-targets-for-google_cloud_cpp.patch"
+ "${FILESDIR}/tensorflow-2.13.0-0012-bump-cudnn-frontend-to-v0.9.patch"
+ "${FILESDIR}/tensorflow-2.13.0-0013-Fixing-build-issue-with-Clang-16-and-GCC-13.patch"
+)
+
+get-cpu-flags() {
+ local i f=()
+ # Keep this list in sync with tensorflow/core/platform/cpu_feature_guard.cc.
+ for i in sse sse2 sse3 sse4_1 sse4_2 avx avx2 fma4; do
+ use cpu_flags_x86_${i} && f+=( -m${i/_/.} )
+ done
+ use cpu_flags_x86_fma3 && f+=( -mfma )
+ echo "${f[*]}"
+}
+
+pkg_setup() {
+ local num_pythons_enabled
+ num_pythons_enabled=0
+ count_impls() {
+ num_pythons_enabled=$((${num_pythons_enabled} + 1))
+ }
+ use python && python_foreach_impl count_impls
+
+ # 10G to build C/C++ libs, 6G per python impl
+ CHECKREQS_DISK_BUILD="$((10 + 6 * ${num_pythons_enabled}))G"
+ check-reqs_pkg_setup
+}
+
+src_unpack() {
+ # Only unpack the main distfile
+ unpack "${P}.tar.gz"
+ bazel_load_distfiles "${bazel_external_uris}"
+}
+
+src_prepare() {
+ export JAVA_HOME=$(java-config --jre-home) # so keepwork works
+
+ append-flags $(get-cpu-flags)
+ append-cxxflags -std=c++17
+ export BUILD_CXXFLAGS+=" -std=c++17"
+ filter-flags '-fvtable-verify=@(std|preinit)'
+ bazel_setup_bazelrc
+
+ # Relax version checks in setup.py
+ sed -i "/^ '/s/==/>=/g" tensorflow/tools/pip_package/setup.py || die
+ # sed -i "/config_googleapis/d" tensorflow/workspace0.bzl || die
+
+ # Prefixify hard-coded command locations
+ hprefixify -w /host_compiler_prefix/ third_party/gpus/cuda_configure.bzl
+
+ default
+ use python && python_copy_sources
+
+ use cuda && cuda_add_sandbox
+}
+
+src_configure() {
+ export JAVA_HOME=$(java-config --jre-home) # so keepwork works
+ export KERAS_HOME="${T}/.keras" # otherwise sandbox violation writing ~/.keras
+
+ do_configure() {
+ export CC_OPT_FLAGS=" "
+ export TF_ENABLE_XLA=$(usex xla 1 0)
+ export TF_NEED_OPENCL_SYCL=0
+ export TF_NEED_OPENCL=0
+ export TF_NEED_COMPUTECPP=0
+ export TF_NEED_ROCM=0
+ export TF_NEED_MPI=$(usex mpi 1 0)
+ export TF_SET_ANDROID_WORKSPACE=0
+
+ if use python; then
+ export PYTHON_BIN_PATH="${PYTHON}"
+ export PYTHON_LIB_PATH="$(python_get_sitedir)"
+ else
+ export PYTHON_BIN_PATH="$(which python)"
+ export PYTHON_LIB_PATH="$(python -c 'from distutils.sysconfig import *; print(get_python_lib())')"
+ fi
+
+ export TF_NEED_CUDA=$(usex cuda 1 0)
+ export TF_DOWNLOAD_CLANG=0
+ export TF_CUDA_CLANG=0
+ export TF_NEED_TENSORRT=0 # $(usex cuda 1 0)
+ if use cuda; then
+ export TF_CUDA_PATHS="${EPREFIX}/opt/cuda"
+ export GCC_HOST_COMPILER_PATH="$(cuda_gccdir)/$(tc-getCC)"
+ export TF_CUDA_VERSION="$(cuda_toolkit_version)"
+ export TF_CUDNN_VERSION="$(cuda_cudnn_version)"
+ einfo "Setting CUDA version: $TF_CUDA_VERSION"
+ einfo "Setting CUDNN version: $TF_CUDNN_VERSION"
+
+ if [[ $(cuda-config -s) != *$(gcc-version)* ]]; then
+ ewarn "TensorFlow is being built with Nvidia CUDA support. Your default compiler"
+ ewarn "version is not supported by the currently installed CUDA. TensorFlow will"
+ ewarn "instead be compiled using: ${GCC_HOST_COMPILER_PATH}."
+ ewarn "If the build fails with linker errors try rebuilding the relevant"
+ ewarn "dependencies using the same compiler version."
+ fi
+
+ if [[ -z "$TF_CUDA_COMPUTE_CAPABILITIES" ]]; then
+ ewarn "WARNING: TensorFlow is being built with its default CUDA compute capabilities: 3.5 and 7.0."
+ ewarn "These may not be optimal for your GPU."
+ ewarn ""
+ ewarn "To configure TensorFlow with the CUDA compute capability that is optimal for your GPU,"
+ ewarn "set TF_CUDA_COMPUTE_CAPABILITIES in your make.conf, and re-emerge tensorflow."
+ ewarn "For example, to use CUDA capability 7.5 & 3.5, add: TF_CUDA_COMPUTE_CAPABILITIES=7.5,3.5"
+ ewarn ""
+ ewarn "You can look up your GPU's CUDA compute capability at https://developer.nvidia.com/cuda-gpus"
+ ewarn "or by running /opt/cuda/extras/demo_suite/deviceQuery | grep 'CUDA Capability'"
+ fi
+ fi
+
+ # com_googlesource_code_re2 weird branch using absl, doesnt work with released re2
+ # com_github_googleapis_googleapis
+ # com_google_protobuf is disabled due to https://github.com/tensorflow/tensorflow/issues/61593
+ local SYSLIBS=(
+ absl_py
+ astor_archive
+ astunparse_archive
+ boringssl
+ com_github_googlecloudplatform_google_cloud_cpp
+ com_github_grpc_grpc
+ com_google_absl
+ # com_google_protobuf
+ curl
+ cython
+ dill_archive
+ double_conversion
+ flatbuffers
+ functools32_archive
+ gast_archive
+ gif
+ hwloc
+ icu
+ jsoncpp_git
+ libjpeg_turbo
+ nasm
+ nsync
+ opt_einsum_archive
+ org_sqlite
+ pasta
+ png
+ pybind11
+ six_archive
+ snappy
+ tblib_archive
+ termcolor_archive
+ typing_extensions_archive
+ wrapt
+ zlib
+ )
+
+ export TF_SYSTEM_LIBS="${SYSLIBS[@]}"
+ export TF_IGNORE_MAX_BAZEL_VERSION=1
+
+ # This is not autoconf
+ ./configure || die
+
+ echo 'build --config=noaws --config=nohdfs --config=nonccl' >> .bazelrc || die
+ echo 'build --define tensorflow_mkldnn_contraction_kernel=0' >> .bazelrc || die
+ echo "build --action_env=KERAS_HOME=\"${T}/.keras\"" >> .bazelrc || die
+ echo "build --host_action_env=KERAS_HOME=\"${T}/.keras\"" >> .bazelrc || die
+
+ for cflag in $($(tc-getPKG_CONFIG) jsoncpp --cflags)
+ do
+ echo "build --copt=\"${cflag}\"" >> .bazelrc || die
+ echo "build --host_copt=\"${cflag}\"" >> .bazelrc || die
+ done
+ }
+ if use python; then
+ python_foreach_impl run_in_build_dir do_configure
+ else
+ do_configure
+ fi
+}
+
+src_compile() {
+ export JAVA_HOME=$(java-config --jre-home) # so keepwork works
+ export KERAS_HOME="${T}/.keras" # otherwise sandbox violation writing ~/.keras
+
+ if use python; then
+ python_setup
+ BUILD_DIR="${S}-${EPYTHON/./_}"
+ cd "${BUILD_DIR}" || die
+ fi
+
+ # fail early if any deps are missing
+ ebazel build -k --nobuild \
+ //tensorflow:libtensorflow_framework.so \
+ //tensorflow:libtensorflow.so \
+ //tensorflow:libtensorflow_cc.so \
+ $(usex python '//tensorflow/tools/pip_package:build_pip_package' '')
+
+ ebazel build \
+ //tensorflow:libtensorflow_framework.so \
+ //tensorflow:libtensorflow.so
+ ebazel build //tensorflow:libtensorflow_cc.so
+ ebazel build //tensorflow:install_headers
+ ebazel shutdown
+
+ do_compile() {
+ ebazel build //tensorflow/tools/pip_package:build_pip_package
+ ebazel shutdown
+ }
+ BUILD_DIR="${S}"
+ cd "${BUILD_DIR}" || die
+ use python && python_foreach_impl run_in_build_dir do_compile
+}
+
+src_install() {
+ local i l
+ export JAVA_HOME=$(java-config --jre-home) # so keepwork works
+ export KERAS_HOME="${T}/.keras" # otherwise sandbox violation writing ~/.keras
+
+ do_install() {
+ einfo "Installing ${EPYTHON} files"
+ local srcdir="${T}/src-${MULTIBUILD_VARIANT}"
+ mkdir -p "${srcdir}" || die
+ bazel-bin/tensorflow/tools/pip_package/build_pip_package --src "${srcdir}" || die
+ cd "${srcdir}" || die
+ esetup.py install
+
+ # libtensorflow_framework.so and libtensorflow_cc.so is in /usr/lib already
+ rm -f "${D}/$(python_get_sitedir)"/${PN}/lib${PN}_framework.so* || die
+ rm -f "${D}/$(python_get_sitedir)"/${PN}/lib${PN}_cc.so* || die
+ python_optimize
+ }
+
+ if use python; then
+ python_foreach_impl run_in_build_dir do_install
+
+ # Symlink to python-exec scripts
+ for i in "${ED}"/usr/lib/python-exec/*/*; do
+ n="${i##*/}"
+ [[ -e "${ED}/usr/bin/${n}" ]] || dosym ../lib/python-exec/python-exec2 "/usr/bin/${n}"
+ done
+
+ python_setup
+ local BUILD_DIR="${S}-${EPYTHON/./_}"
+ cd "${BUILD_DIR}" || die
+ fi
+
+ einfo "Installing headers"
+ insinto /usr/include/${PN}/
+ doins -r bazel-bin/tensorflow/include/*
+
+ einfo "Installing libs"
+ # Generate pkg-config file
+ ${PN}/c/generate-pc.sh --prefix="${EPREFIX}"/usr --libdir=$(get_libdir) --version=${MY_PV} || die
+ insinto /usr/$(get_libdir)/pkgconfig
+ doins ${PN}.pc ${PN}_cc.pc
+
+ for l in libtensorflow{,_framework,_cc}.so; do
+ patchelf --add-rpath '/opt/cuda/lib64' bazel-bin/tensorflow/${l}
+ dolib.so bazel-bin/tensorflow/${l}
+ dolib.so bazel-bin/tensorflow/${l}.$(ver_cut 1)
+ dolib.so bazel-bin/tensorflow/${l}.$(ver_cut 1-3)
+ done
+
+ einstalldocs
+
+ # Workaround for https://bugs.gentoo.org/831927
+ export MAKEOPTS="-j1"
+}
diff --git a/sci-libs/tensorflow/tensorflow-2.14.1.ebuild b/sci-libs/tensorflow/tensorflow-2.14.1.ebuild
new file mode 100644
index 000000000000..83e63834a518
--- /dev/null
+++ b/sci-libs/tensorflow/tensorflow-2.14.1.ebuild
@@ -0,0 +1,447 @@
+# Copyright 1999-2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+EAPI=8
+
+DISTUTILS_OPTIONAL=1
+PYTHON_COMPAT=( python3_{10..11} )
+MY_PV=${PV/_rc/-rc}
+MY_P=${PN}-${MY_PV}
+DEP_VER="$(ver_cut 1-2)"
+
+inherit bazel check-reqs cuda distutils-r1 flag-o-matic prefix toolchain-funcs
+
+DESCRIPTION="Computation framework using data flow graphs for scalable machine learning"
+HOMEPAGE="https://www.tensorflow.org/"
+
+RESTRICT="test" # Tests need GPU access
+LICENSE="Apache-2.0"
+SLOT="0"
+KEYWORDS="~amd64"
+IUSE="cuda mpi +python xla"
+CPU_USE_FLAGS_X86="sse sse2 sse3 sse4_1 sse4_2 avx avx2 fma3 fma4"
+for i in $CPU_USE_FLAGS_X86; do
+ IUSE+=" cpu_flags_x86_${i}"
+done
+
+# distfiles that bazel uses for the workspace, will be copied to basel-distdir
+# pkgcheck complains but do NOT change the .zip to .tar.gz, bazel requires the exact tarball (basename and sha256).
+# the build will fail if different archives are used.
+bazel_external_uris="
+ https://github.com/Maratyszcza/FP16/archive/4dfe081cf6bcd15db339cf2680b9281b8451eeb3.zip -> FP16-4dfe081cf6bcd15db339cf2680b9281b8451eeb3.zip
+ https://github.com/Maratyszcza/FXdiv/archive/63058eff77e11aa15bf531df5dd34395ec3017c8.zip -> FXdiv-63058eff77e11aa15bf531df5dd34395ec3017c8.zip
+ https://github.com/Maratyszcza/pthreadpool/archive/b8374f80e42010941bda6c85b0e3f1a1bd77a1e0.zip -> pthreadpool-b8374f80e42010941bda6c85b0e3f1a1bd77a1e0.zip
+ https://github.com/bazelbuild/apple_support/releases/download/1.6.0/apple_support.1.6.0.tar.gz
+ https://github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz
+ https://github.com/bazelbuild/bazel-toolchains/archive/8c717f8258cd5f6c7a45b97d974292755852b658.tar.gz -> bazel-toolchains-8c717f8258cd5f6c7a45b97d974292755852b658.tar.gz
+ https://github.com/bazelbuild/platforms/releases/download/0.0.6/platforms-0.0.6.tar.gz -> bazelbuild-platforms-0.0.6.tar.gz
+ https://github.com/bazelbuild/rules_android/archive/v0.1.1.zip -> bazelbuild-rules_android-v0.1.1.zip
+ https://github.com/bazelbuild/rules_apple/releases/download/2.3.0/rules_apple.2.3.0.tar.gz
+ https://github.com/bazelbuild/rules_cc/archive/081771d4a0e9d7d3aa0eed2ef389fa4700dfb23e.tar.gz -> bazelbuild-rules_cc-081771d4a0e9d7d3aa0eed2ef389fa4700dfb23e.tar.gz
+ https://github.com/bazelbuild/rules_closure/archive/308b05b2419edb5c8ee0471b67a40403df940149.tar.gz -> bazelbuild-rules_closure-308b05b2419edb5c8ee0471b67a40403df940149.tar.gz
+ https://github.com/bazelbuild/rules_docker/releases/download/v0.10.0/rules_docker-v0.10.0.tar.gz -> bazelbuild-rules_docker-v0.10.0.tar.gz
+ https://github.com/bazelbuild/rules_foreign_cc/archive/0.7.1.tar.gz -> bazelbuild-rules_foreign_cc-0.7.1.tar.gz
+ https://github.com/bazelbuild/rules_java/archive/7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip -> bazelbuild-rules_java-7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip
+ https://github.com/bazelbuild/rules_java/releases/download/5.5.1/rules_java-5.5.1.tar.gz -> bazelbuild-rules_java-5.5.1.tar.gz
+ https://github.com/bazelbuild/rules_jvm_external/archive/4.3.zip -> bazelbuild-rules_jvm_external-4.3.zip
+ https://github.com/bazelbuild/rules_pkg/releases/download/0.7.1/rules_pkg-0.7.1.tar.gz -> bazelbuild-rules_pkg-0.7.1.tar.gz
+ https://github.com/bazelbuild/rules_proto/archive/11bf7c25e666dd7ddacbcd4d4c4a9de7a25175f8.tar.gz -> bazelbuild-rules_proto-11bf7c25e666dd7ddacbcd4d4c4a9de7a25175f8.tar.gz
+ https://github.com/bazelbuild/rules_python/releases/download/0.1.0/rules_python-0.1.0.tar.gz -> bazelbuild-rules_python-0.1.0.tar.gz
+ https://github.com/bazelbuild/rules_swift/releases/download/1.0.0/rules_swift.1.0.0.tar.gz -> bazelbuild-rules_swift.1.0.0.tar.gz
+ https://github.com/dmlc/dlpack/archive/9351cf542ab478499294864ff3acfdab5c8c5f3d.tar.gz -> dlpack-9351cf542ab478499294864ff3acfdab5c8c5f3d.tar.gz
+ https://github.com/google/XNNPACK/archive/b9d4073a6913891ce9cbd8965c8d506075d2a45a.zip -> XNNPACK-b9d4073a6913891ce9cbd8965c8d506075d2a45a.zip
+ https://github.com/google/benchmark/archive/f7547e29ccaed7b64ef4f7495ecfff1c9f6f3d03.tar.gz -> benchmark-f7547e29ccaed7b64ef4f7495ecfff1c9f6f3d03.tar.gz
+ https://github.com/google/farmhash/archive/0d859a811870d10f53a594927d0d0b97573ad06d.tar.gz -> farmhash-0d859a811870d10f53a594927d0d0b97573ad06d.tar.gz
+ https://github.com/google/gemmlowp/archive/e844ffd17118c1e17d94e1ba4354c075a4577b88.zip -> gemmlowp-e844ffd17118c1e17d94e1ba4354c075a4577b88.zip
+ https://github.com/google/highwayhash/archive/c13d28517a4db259d738ea4886b1f00352a3cc33.tar.gz -> highwayhash-c13d28517a4db259d738ea4886b1f00352a3cc33.tar.gz
+ https://github.com/google/re2/archive/03da4fc0857c285e3a26782f6bc8931c4c950df4.tar.gz -> re2-03da4fc0857c285e3a26782f6bc8931c4c950df4.tar.gz
+ https://github.com/google/ruy/archive/3286a34cc8de6149ac6844107dfdffac91531e72.zip -> ruy-3286a34cc8de6149ac6844107dfdffac91531e72.zip
+ https://github.com/googleapis/googleapis/archive/6b3fdcea8bc5398be4e7e9930c693f0ea09316a0.tar.gz -> googleapis-6b3fdcea8bc5398be4e7e9930c693f0ea09316a0.tar.gz
+ https://github.com/jax-ml/ml_dtypes/archive/5b9fc9ad978757654843f4a8d899715dbea30e88/ml_dtypes-5b9fc9ad978757654843f4a8d899715dbea30e88.tar.gz
+ https://github.com/joe-kuo/sobol_data/archive/835a7d7b1ee3bc83e575e302a985c66ec4b65249.tar.gz -> sobol_data-835a7d7b1ee3bc83e575e302a985c66ec4b65249.tar.gz
+ https://github.com/llvm/llvm-project/archive/668e33c6401abe7844691fb7d47a3cf2d2012dbc.tar.gz -> llvm-project-668e33c6401abe7844691fb7d47a3cf2d2012dbc.tar.gz
+ https://github.com/llvm/llvm-project/releases/download/llvmorg-10.0.1/openmp-10.0.1.src.tar.xz -> llvmorg-10.0.1-openmp-10.0.1.src.tar.xz
+ https://github.com/mborgerding/kissfft/archive/131.1.0.tar.gz -> kissfft-131.1.0.tar.gz
+ https://github.com/oneapi-src/oneDNN/archive/refs/tags/v3.2.1.tar.gz -> oneDNN-v3.2.1.tar.gz
+ https://github.com/openxla/stablehlo/archive/9ae6c373a6e2941ff84a8831bb3724728cb2b49a.zip -> openxla-stablehlo-9ae6c373a6e2941ff84a8831bb3724728cb2b49a.zip
+ https://github.com/openxla/triton/archive/cl546794996.tar.gz -> openxla-triton-cl546794996.tar.gz
+ https://github.com/petewarden/OouraFFT/archive/v1.0.tar.gz -> OouraFFT-v1.0.tar.gz
+ https://github.com/protocolbuffers/protobuf/archive/v3.21.9.zip -> protobuf-3.21.9.zip
+ https://github.com/pybind/pybind11_abseil/archive/2c4932ed6f6204f1656e245838f4f5eae69d2e29.tar.gz -> pybind11_abseil-2c4932ed6f6204f1656e245838f4f5eae69d2e29.tar.gz
+ https://github.com/pybind/pybind11_bazel/archive/72cbbf1fbc830e487e3012862b7b720001b70672.tar.gz -> pybind11_bazel-72cbbf1fbc830e487e3012862b7b720001b70672.tar.gz
+ https://github.com/pybind/pybind11_protobuf/archive/80f3440cd8fee124e077e2e47a8a17b78b451363.zip -> pybind11_protobuf-80f3440cd8fee124e077e2e47a8a17b78b451363.zip
+ https://github.com/pytorch/cpuinfo/archive/87d8234510367db49a65535021af5e1838a65ac2.tar.gz -> pytorch-cpuinfo-87d8234510367db49a65535021af5e1838a65ac2.tar.gz
+ https://github.com/pytorch/cpuinfo/archive/87d8234510367db49a65535021af5e1838a65ac2.zip -> pytorch-cpuinfo-87d8234510367db49a65535021af5e1838a65ac2.zip
+ https://github.com/tensorflow/runtime/archive/769f5cc9b8732933140b09e8808d13614182b496.tar.gz -> tensorflow-runtime-769f5cc9b8732933140b09e8808d13614182b496.tar.gz
+ https://gitlab.com/libeigen/eigen/-/archive/0b51f763cbbd0ed08168f88972724329f0375498/eigen-0b51f763cbbd0ed08168f88972724329f0375498.tar.gz
+ cuda? (
+ https://github.com/NVIDIA/cudnn-frontend/archive/refs/tags/v0.9.zip -> cudnn-frontend-v0.9.zip
+ https://github.com/NVlabs/cub/archive/1.9.9.zip -> cub-1.9.9.zip
+ https://github.com/nvidia/nccl/archive/v2.16.5-1.tar.gz -> nvidia-nccl-v2.16.5-1.tar.gz
+ )
+ python? (
+ https://github.com/intel/ARM_NEON_2_x86_SSE/archive/a15b489e1222b2087007546b4912e21293ea86ff.tar.gz -> ARM_NEON_2_x86_SSE-a15b489e1222b2087007546b4912e21293ea86ff.tar.gz
+ https://storage.googleapis.com/mirror.tensorflow.org/docs.python.org/2.7/_sources/license.rst.txt -> tensorflow-1.15.0-python-license.rst.txt
+ )"
+
+SRC_URI="https://github.com/${PN}/${PN}/archive/v${MY_PV}.tar.gz -> ${P}.tar.gz
+ ${bazel_external_uris}"
+
+# abseil-cpp-20211102.0-r0 does not work with NVCC
+# check flatbuffers version in tensorflow/lite/schema/schema_generated.h
+RDEPEND="
+ app-arch/snappy
+ =dev-cpp/abseil-cpp-20230125.2*:=
+ dev-db/sqlite
+ dev-libs/double-conversion
+ dev-libs/icu:=
+ >=dev-libs/jsoncpp-1.9.2:=
+ >=dev-libs/nsync-1.25.0
+ dev-libs/openssl:0=
+ >=dev-libs/protobuf-3.13.0:=
+ >=dev-libs/re2-0.2019.06.01:=
+ media-libs/giflib
+ media-libs/libjpeg-turbo
+ media-libs/libpng:0
+ >=net-libs/grpc-1.28:=
+ net-misc/curl
+ sys-libs/zlib
+ >=sys-apps/hwloc-2:=
+ cuda? (
+ dev-util/nvidia-cuda-toolkit:=[profiler]
+ =dev-libs/cudnn-8*
+ )
+ mpi? ( virtual/mpi )
+ python? (
+ ${PYTHON_DEPS}
+ ~dev-libs/flatbuffers-23.5.26:=
+ dev-python/absl-py[${PYTHON_USEDEP}]
+ >=dev-python/astor-0.7.1[${PYTHON_USEDEP}]
+ dev-python/astunparse[${PYTHON_USEDEP}]
+ dev-python/clang-python[${PYTHON_USEDEP}]
+ dev-python/dill[${PYTHON_USEDEP}]
+ ~dev-python/flatbuffers-23.5.26[${PYTHON_USEDEP}]
+ >=dev-python/gast-0.3.3[${PYTHON_USEDEP}]
+ dev-python/h5py[${PYTHON_USEDEP}]
+ <dev-python/ml_dtypes-0.3.0[${PYTHON_USEDEP}]
+ >=dev-python/numpy-1.19[${PYTHON_USEDEP}]
+ >=dev-python/google-pasta-0.1.8[${PYTHON_USEDEP}]
+ >=dev-python/opt-einsum-3.3.0[${PYTHON_USEDEP}]
+ >=dev-python/protobuf-python-3.13.0[${PYTHON_USEDEP}]
+ dev-python/pybind11[${PYTHON_USEDEP}]
+ dev-python/six[${PYTHON_USEDEP}]
+ dev-python/tblib[${PYTHON_USEDEP}]
+ dev-python/termcolor[${PYTHON_USEDEP}]
+ dev-python/typing-extensions[${PYTHON_USEDEP}]
+ >=dev-python/grpcio-1.28[${PYTHON_USEDEP}]
+ >=dev-python/wrapt-1.11.1[${PYTHON_USEDEP}]
+ >=net-libs/google-cloud-cpp-0.10.0
+ =sci-visualization/tensorboard-${DEP_VER}*[${PYTHON_USEDEP}]
+ )"
+DEPEND="${RDEPEND}
+ python? (
+ dev-python/mock
+ dev-python/setuptools
+ )"
+PDEPEND="python? (
+ =sci-libs/keras-${DEP_VER}*[${PYTHON_USEDEP}]
+ =sci-libs/tensorflow-estimator-${DEP_VER}*[${PYTHON_USEDEP}]
+ )"
+# >=dev-libs/protobuf-3.8.0
+BDEPEND="
+ app-arch/unzip
+ =dev-build/bazel-6*
+ <dev-build/bazel-6.3
+ dev-java/java-config
+ cuda? (
+ >=dev-util/nvidia-cuda-toolkit-9.1[profiler]
+ )
+ !python? ( dev-lang/python )
+ python? (
+ dev-python/cython
+ dev-python/mock
+ >=dev-python/grpcio-tools-1.28
+ )
+ dev-util/patchelf"
+REQUIRED_USE="python? ( ${PYTHON_REQUIRED_USE} )"
+
+S="${WORKDIR}/${MY_P}"
+
+DOCS=( AUTHORS CONTRIBUTING.md ISSUE_TEMPLATE.md README.md RELEASE.md )
+CHECKREQS_MEMORY="5G"
+CHECKREQS_DISK_BUILD="10G"
+
+PATCHES=(
+ "${FILESDIR}/${P}-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch"
+ "${FILESDIR}/${P}-0002-systemlib-Latest-absl-LTS-has-split-cord-libs.patch"
+ "${FILESDIR}/${P}-0003-mkl_dnn-Must-link-against-libm-for-round-and-log2.patch"
+ "${FILESDIR}/${P}-0004-tensorflow_cc-Add-systemlib-nsync-linkopts.patch"
+ "${FILESDIR}/${P}-0005-systemlib-Updates-for-Abseil-20220623-LTS.patch"
+ "${FILESDIR}/${P}-0006-systemlib-Update-targets-for-absl_py.patch"
+ "${FILESDIR}/${P}-0007-systemlib-Add-well_known_types_py_pb2-target.patch"
+ "${FILESDIR}/${P}-0008-Relax-setup.py-version-requirements.patch"
+ "${FILESDIR}/${P}-0009-systemlib-update-targets-for-absl.patch"
+ "${FILESDIR}/${P}-0010-systemlib-fix-missing-osx-in-pybind11.patch"
+ "${FILESDIR}/${P}-0011-systemlib-fix-missing-LICENSE-in-flatbuffers.patch"
+ "${FILESDIR}/${P}-0012-build-use-non-hermetic-python.patch"
+ "${FILESDIR}/${P}-0013-installation-remove-cp_local_config_python.patch"
+ "${FILESDIR}/${P}-0014-Fixing-build-issue-with-Clang-16.patch"
+)
+
+get-cpu-flags() {
+ local i f=()
+ # Keep this list in sync with tensorflow/core/platform/cpu_feature_guard.cc.
+ for i in sse sse2 sse3 sse4_1 sse4_2 avx avx2 fma4; do
+ use cpu_flags_x86_${i} && f+=( -m${i/_/.} )
+ done
+ use cpu_flags_x86_fma3 && f+=( -mfma )
+ echo "${f[*]}"
+}
+
+pkg_setup() {
+ local num_pythons_enabled
+ num_pythons_enabled=0
+ count_impls() {
+ num_pythons_enabled=$((${num_pythons_enabled} + 1))
+ }
+ use python && python_foreach_impl count_impls
+
+ # 10G to build C/C++ libs, 6G per python impl
+ CHECKREQS_DISK_BUILD="$((10 + 6 * ${num_pythons_enabled}))G"
+ check-reqs_pkg_setup
+}
+
+src_unpack() {
+ # Only unpack the main distfile
+ unpack "${P}.tar.gz"
+ bazel_load_distfiles "${bazel_external_uris}"
+}
+
+src_prepare() {
+ export JAVA_HOME=$(java-config --jre-home) # so keepwork works
+ export TF_PYTHON_VERSION="${EPYTHON/python/}"
+
+ append-flags $(get-cpu-flags)
+ append-cxxflags -std=c++17
+ export BUILD_CXXFLAGS+=" -std=c++17"
+ filter-flags '-fvtable-verify=@(std|preinit)'
+ bazel_setup_bazelrc
+
+ # Relax version checks in setup.py
+ sed -i "/^ '/s/==/>=/g" tensorflow/tools/pip_package/setup.py || die
+
+ # Prefixify hard-coded command locations
+ hprefixify -w /host_compiler_prefix/ third_party/gpus/cuda_configure.bzl
+
+ default
+ use python && python_copy_sources
+
+ use cuda && cuda_add_sandbox
+}
+
+src_configure() {
+ export JAVA_HOME=$(java-config --jre-home) # so keepwork works
+ export KERAS_HOME="${T}/.keras" # otherwise sandbox violation writing ~/.keras
+
+ do_configure() {
+ export CC_OPT_FLAGS=" "
+ export TF_ENABLE_XLA=$(usex xla 1 0)
+ export TF_NEED_OPENCL_SYCL=0
+ export TF_NEED_OPENCL=0
+ export TF_NEED_COMPUTECPP=0
+ export TF_NEED_ROCM=0
+ export TF_NEED_MPI=$(usex mpi 1 0)
+ export TF_SET_ANDROID_WORKSPACE=0
+
+ if use python; then
+ export PYTHON_BIN_PATH="${PYTHON}"
+ export PYTHON_LIB_PATH="$(python_get_sitedir)"
+ else
+ export PYTHON_BIN_PATH="$(which python)"
+ export PYTHON_LIB_PATH="$(python -c 'from distutils.sysconfig import *; print(get_python_lib())')"
+ fi
+
+ export TF_NEED_CUDA=$(usex cuda 1 0)
+ export TF_DOWNLOAD_CLANG=0
+ export TF_CUDA_CLANG=0
+ export TF_NEED_TENSORRT=0 # $(usex cuda 1 0)
+ if use cuda; then
+ export TF_CUDA_PATHS="${EPREFIX}/opt/cuda"
+ export GCC_HOST_COMPILER_PATH="$(cuda_gccdir)/$(tc-getCC)"
+ export TF_CUDA_VERSION="$(cuda_toolkit_version)"
+ export TF_CUDNN_VERSION="$(cuda_cudnn_version)"
+ einfo "Setting CUDA version: $TF_CUDA_VERSION"
+ einfo "Setting CUDNN version: $TF_CUDNN_VERSION"
+
+ if [[ $(cuda-config -s) != *$(gcc-version)* ]]; then
+ ewarn "TensorFlow is being built with Nvidia CUDA support. Your default compiler"
+ ewarn "version is not supported by the currently installed CUDA. TensorFlow will"
+ ewarn "instead be compiled using: ${GCC_HOST_COMPILER_PATH}."
+ ewarn "If the build fails with linker errors try rebuilding the relevant"
+ ewarn "dependencies using the same compiler version."
+ fi
+
+ if [[ -z "$TF_CUDA_COMPUTE_CAPABILITIES" ]]; then
+ ewarn "WARNING: TensorFlow is being built with its default CUDA compute capabilities: 3.5 and 7.0."
+ ewarn "These may not be optimal for your GPU."
+ ewarn ""
+ ewarn "To configure TensorFlow with the CUDA compute capability that is optimal for your GPU,"
+ ewarn "set TF_CUDA_COMPUTE_CAPABILITIES in your make.conf, and re-emerge tensorflow."
+ ewarn "For example, to use CUDA capability 7.5 & 3.5, add: TF_CUDA_COMPUTE_CAPABILITIES=7.5,3.5"
+ ewarn ""
+ ewarn "You can look up your GPU's CUDA compute capability at https://developer.nvidia.com/cuda-gpus"
+ ewarn "or by running /opt/cuda/extras/demo_suite/deviceQuery | grep 'CUDA Capability'"
+ fi
+ fi
+
+ # com_googlesource_code_re2 weird branch using absl, doesnt work with released re2
+ # com_google_protobuf is disabled due to https://github.com/tensorflow/tensorflow/issues/61593
+ local SYSLIBS=(
+ absl_py
+ astor_archive
+ astunparse_archive
+ boringssl
+ com_github_googlecloudplatform_google_cloud_cpp
+ com_github_grpc_grpc
+ com_google_absl
+ # com_google_protobuf
+ curl
+ cython
+ dill_archive
+ double_conversion
+ flatbuffers
+ functools32_archive
+ gast_archive
+ gif
+ hwloc
+ icu
+ jsoncpp_git
+ libjpeg_turbo
+ nasm
+ nsync
+ opt_einsum_archive
+ org_sqlite
+ pasta
+ png
+ pybind11
+ six_archive
+ snappy
+ tblib_archive
+ termcolor_archive
+ typing_extensions_archive
+ wrapt
+ zlib
+ )
+
+ export TF_SYSTEM_LIBS="${SYSLIBS[@]}"
+ export TF_IGNORE_MAX_BAZEL_VERSION=1
+
+ # This is not autoconf
+ ./configure || die
+
+ echo 'build --config=noaws --config=nohdfs --config=nonccl' >> .bazelrc || die
+ echo 'build --define tensorflow_mkldnn_contraction_kernel=0' >> .bazelrc || die
+ echo "build --action_env=KERAS_HOME=\"${T}/.keras\"" >> .bazelrc || die
+ echo "build --host_action_env=KERAS_HOME=\"${T}/.keras\"" >> .bazelrc || die
+
+ for cflag in $($(tc-getPKG_CONFIG) jsoncpp --cflags)
+ do
+ echo "build --copt=\"${cflag}\"" >> .bazelrc || die
+ echo "build --host_copt=\"${cflag}\"" >> .bazelrc || die
+ done
+ }
+ if use python; then
+ python_foreach_impl run_in_build_dir do_configure
+ else
+ do_configure
+ fi
+}
+
+src_compile() {
+ export JAVA_HOME=$(java-config --jre-home) # so keepwork works
+ export KERAS_HOME="${T}/.keras" # otherwise sandbox violation writing ~/.keras
+
+ if use python; then
+ python_setup
+ BUILD_DIR="${S}-${EPYTHON/./_}"
+ cd "${BUILD_DIR}" || die
+ fi
+
+ # fail early if any deps are missing
+ ebazel build -k --nobuild \
+ //tensorflow:libtensorflow_framework.so \
+ //tensorflow:libtensorflow.so \
+ //tensorflow:libtensorflow_cc.so \
+ $(usex python '//tensorflow/tools/pip_package:build_pip_package' '')
+
+ ebazel build \
+ //tensorflow:libtensorflow_framework.so \
+ //tensorflow:libtensorflow.so
+ ebazel build //tensorflow:libtensorflow_cc.so
+ ebazel build //tensorflow:install_headers
+ ebazel shutdown
+
+ do_compile() {
+ ebazel build //tensorflow/tools/pip_package:build_pip_package
+ ebazel shutdown
+ }
+ BUILD_DIR="${S}"
+ cd "${BUILD_DIR}" || die
+ use python && python_foreach_impl run_in_build_dir do_compile
+}
+
+src_install() {
+ local i l
+ export JAVA_HOME=$(java-config --jre-home) # so keepwork works
+ export KERAS_HOME="${T}/.keras" # otherwise sandbox violation writing ~/.keras
+
+ do_install() {
+ einfo "Installing ${EPYTHON} files"
+ local srcdir="${T}/src-${MULTIBUILD_VARIANT}"
+ mkdir -p "${srcdir}" || die
+ bazel-bin/tensorflow/tools/pip_package/build_pip_package --src "${srcdir}" || die
+ cd "${srcdir}" || die
+ esetup.py install
+
+ # libtensorflow_framework.so and libtensorflow_cc.so is in /usr/lib already
+ rm -f "${D}/$(python_get_sitedir)"/${PN}/lib${PN}_framework.so* || die
+ rm -f "${D}/$(python_get_sitedir)"/${PN}/lib${PN}_cc.so* || die
+ python_optimize
+ }
+
+ if use python; then
+ python_foreach_impl run_in_build_dir do_install
+
+ # Symlink to python-exec scripts
+ for i in "${ED}"/usr/lib/python-exec/*/*; do
+ n="${i##*/}"
+ [[ -e "${ED}/usr/bin/${n}" ]] || dosym ../lib/python-exec/python-exec2 "/usr/bin/${n}"
+ done
+
+ python_setup
+ local BUILD_DIR="${S}-${EPYTHON/./_}"
+ cd "${BUILD_DIR}" || die
+ fi
+
+ einfo "Installing headers"
+ insinto /usr/include/${PN}/
+ doins -r bazel-bin/tensorflow/include/*
+
+ einfo "Installing libs"
+ # Generate pkg-config file
+ ${PN}/c/generate-pc.sh --prefix="${EPREFIX}"/usr --libdir=$(get_libdir) --version=${MY_PV} || die
+ insinto /usr/$(get_libdir)/pkgconfig
+ doins ${PN}.pc ${PN}_cc.pc
+
+ for l in libtensorflow{,_framework,_cc}.so; do
+ patchelf --add-rpath '/opt/cuda/lib64' bazel-bin/tensorflow/${l}
+ dolib.so bazel-bin/tensorflow/${l}
+ dolib.so bazel-bin/tensorflow/${l}.$(ver_cut 1)
+ dolib.so bazel-bin/tensorflow/${l}.$(ver_cut 1-3)
+ done
+
+ einstalldocs
+
+ # Workaround for https://bugs.gentoo.org/831927
+ export MAKEOPTS="-j1"
+}
diff --git a/sci-libs/tensorflow/tensorflow-2.15.0.ebuild b/sci-libs/tensorflow/tensorflow-2.15.0.ebuild
new file mode 100644
index 000000000000..77539bf5c9a4
--- /dev/null
+++ b/sci-libs/tensorflow/tensorflow-2.15.0.ebuild
@@ -0,0 +1,464 @@
+# Copyright 1999-2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+EAPI=8
+
+DISTUTILS_OPTIONAL=1
+PYTHON_COMPAT=( python3_{10..11} )
+MY_PV=${PV/_rc/-rc}
+MY_P=${PN}-${MY_PV}
+DEP_VER="$(ver_cut 1-2)"
+
+inherit bazel check-reqs cuda distutils-r1 flag-o-matic multibuild prefix toolchain-funcs
+
+DESCRIPTION="Computation framework using data flow graphs for scalable machine learning"
+HOMEPAGE="https://www.tensorflow.org/"
+
+RESTRICT="test" # Tests need GPU access
+LICENSE="Apache-2.0"
+SLOT="0"
+KEYWORDS="~amd64"
+IUSE="cuda mpi +python xla"
+CPU_USE_FLAGS_X86="sse sse2 sse3 sse4_1 sse4_2 avx avx2 fma3 fma4"
+for i in $CPU_USE_FLAGS_X86; do
+ IUSE+=" cpu_flags_x86_${i}"
+done
+
+# distfiles that bazel uses for the workspace, will be copied to basel-distdir
+# pkgcheck complains but do NOT change the .zip to .tar.gz, bazel requires the exact tarball (basename and sha256).
+# the build will fail if different archives are used.
+bazel_external_uris="
+ https://github.com/Maratyszcza/FP16/archive/4dfe081cf6bcd15db339cf2680b9281b8451eeb3.zip -> FP16-4dfe081cf6bcd15db339cf2680b9281b8451eeb3.zip
+ https://github.com/Maratyszcza/FXdiv/archive/63058eff77e11aa15bf531df5dd34395ec3017c8.zip -> FXdiv-63058eff77e11aa15bf531df5dd34395ec3017c8.zip
+ https://github.com/Maratyszcza/pthreadpool/archive/4fe0e1e183925bf8cfa6aae24237e724a96479b8.zip -> pthreadpool-4fe0e1e183925bf8cfa6aae24237e724a96479b8.zip
+ https://github.com/bazelbuild/apple_support/releases/download/1.6.0/apple_support.1.6.0.tar.gz
+ https://github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz
+ https://github.com/bazelbuild/bazel-toolchains/archive/8c717f8258cd5f6c7a45b97d974292755852b658.tar.gz -> bazel-toolchains-8c717f8258cd5f6c7a45b97d974292755852b658.tar.gz
+ https://github.com/bazelbuild/platforms/releases/download/0.0.6/platforms-0.0.6.tar.gz -> bazelbuild-platforms-0.0.6.tar.gz
+ https://github.com/bazelbuild/rules_android/archive/v0.1.1.zip -> bazelbuild-rules_android-v0.1.1.zip
+ https://github.com/bazelbuild/rules_apple/releases/download/2.3.0/rules_apple.2.3.0.tar.gz
+ https://github.com/bazelbuild/rules_cc/archive/081771d4a0e9d7d3aa0eed2ef389fa4700dfb23e.tar.gz -> bazelbuild-rules_cc-081771d4a0e9d7d3aa0eed2ef389fa4700dfb23e.tar.gz
+ https://github.com/bazelbuild/rules_closure/archive/308b05b2419edb5c8ee0471b67a40403df940149.tar.gz -> bazelbuild-rules_closure-308b05b2419edb5c8ee0471b67a40403df940149.tar.gz
+ https://github.com/bazelbuild/rules_docker/releases/download/v0.10.0/rules_docker-v0.10.0.tar.gz -> bazelbuild-rules_docker-v0.10.0.tar.gz
+ https://github.com/bazelbuild/rules_foreign_cc/archive/0.7.1.tar.gz -> bazelbuild-rules_foreign_cc-0.7.1.tar.gz
+ https://github.com/bazelbuild/rules_java/archive/7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip -> bazelbuild-rules_java-7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip
+ https://github.com/bazelbuild/rules_java/releases/download/5.5.1/rules_java-5.5.1.tar.gz -> bazelbuild-rules_java-5.5.1.tar.gz
+ https://github.com/bazelbuild/rules_jvm_external/archive/4.3.zip -> bazelbuild-rules_jvm_external-4.3.zip
+ https://github.com/bazelbuild/rules_pkg/releases/download/0.7.1/rules_pkg-0.7.1.tar.gz -> bazelbuild-rules_pkg-0.7.1.tar.gz
+ https://github.com/bazelbuild/rules_proto/archive/11bf7c25e666dd7ddacbcd4d4c4a9de7a25175f8.tar.gz -> bazelbuild-rules_proto-11bf7c25e666dd7ddacbcd4d4c4a9de7a25175f8.tar.gz
+ https://github.com/bazelbuild/rules_python/releases/download/0.1.0/rules_python-0.1.0.tar.gz -> bazelbuild-rules_python-0.1.0.tar.gz
+ https://github.com/bazelbuild/rules_swift/releases/download/1.5.0/rules_swift.1.5.0.tar.gz -> bazelbuild-rules_swift.1.5.0.tar.gz
+ https://github.com/dmlc/dlpack/archive/9351cf542ab478499294864ff3acfdab5c8c5f3d.tar.gz -> dlpack-9351cf542ab478499294864ff3acfdab5c8c5f3d.tar.gz
+ https://github.com/facebook/zstd/archive/v1.4.5.zip -> zstd-v1.4.5.zip
+ https://github.com/google/XNNPACK/archive/bbbaa7352a3ea729987d3e654d37be93e8009691.zip -> XNNPACK-bbbaa7352a3ea729987d3e654d37be93e8009691.zip
+ https://github.com/google/benchmark/archive/f7547e29ccaed7b64ef4f7495ecfff1c9f6f3d03.tar.gz -> benchmark-f7547e29ccaed7b64ef4f7495ecfff1c9f6f3d03.tar.gz
+ https://github.com/google/brotli/archive/3914999fcc1fda92e750ef9190aa6db9bf7bdb07.zip -> brotli-3914999fcc1fda92e750ef9190aa6db9bf7bdb07.zip
+ https://github.com/google/farmhash/archive/0d859a811870d10f53a594927d0d0b97573ad06d.tar.gz -> farmhash-0d859a811870d10f53a594927d0d0b97573ad06d.tar.gz
+ https://github.com/google/gemmlowp/archive/e844ffd17118c1e17d94e1ba4354c075a4577b88.zip -> gemmlowp-e844ffd17118c1e17d94e1ba4354c075a4577b88.zip
+ https://github.com/google/highwayhash/archive/c13d28517a4db259d738ea4886b1f00352a3cc33.tar.gz -> highwayhash-c13d28517a4db259d738ea4886b1f00352a3cc33.tar.gz
+ https://github.com/google/re2/archive/03da4fc0857c285e3a26782f6bc8931c4c950df4.tar.gz -> re2-03da4fc0857c285e3a26782f6bc8931c4c950df4.tar.gz
+ https://github.com/google/riegeli/archive/264ef7b4a1314d97265b37544b27cd3923ea72d2.zip -> riegeli-264ef7b4a1314d97265b37544b27cd3923ea72d2.zip
+ https://github.com/google/ruy/archive/3286a34cc8de6149ac6844107dfdffac91531e72.zip -> ruy-3286a34cc8de6149ac6844107dfdffac91531e72.zip
+ https://github.com/googleapis/googleapis/archive/6b3fdcea8bc5398be4e7e9930c693f0ea09316a0.tar.gz -> googleapis-6b3fdcea8bc5398be4e7e9930c693f0ea09316a0.tar.gz
+ https://github.com/jax-ml/ml_dtypes/archive/2ca30a2b3c0744625ae3d6988f5596740080bbd0/ml_dtypes-2ca30a2b3c0744625ae3d6988f5596740080bbd0.tar.gz
+ https://github.com/joe-kuo/sobol_data/archive/835a7d7b1ee3bc83e575e302a985c66ec4b65249.tar.gz -> sobol_data-835a7d7b1ee3bc83e575e302a985c66ec4b65249.tar.gz
+ https://github.com/llvm/llvm-project/archive/49cb1595c1b3ae1de3684fea6148363c15bae12a.tar.gz -> llvm-project-49cb1595c1b3ae1de3684fea6148363c15bae12a.tar.gz
+ https://github.com/llvm/llvm-project/releases/download/llvmorg-10.0.1/openmp-10.0.1.src.tar.xz -> llvmorg-10.0.1-openmp-10.0.1.src.tar.xz
+ https://github.com/mborgerding/kissfft/archive/131.1.0.tar.gz -> kissfft-131.1.0.tar.gz
+ https://github.com/oneapi-src/oneDNN/archive/refs/tags/v3.2.1.tar.gz -> oneDNN-v3.2.1.tar.gz
+ https://github.com/openxla/stablehlo/archive/78f57e34a25367ef3192cd35da36b01c763f4ecf.zip -> openxla-stablehlo-78f57e34a25367ef3192cd35da36b01c763f4ecf.zip
+ https://github.com/openxla/triton/archive/cl555471166.tar.gz -> openxla-triton-cl555471166.tar.gz
+ https://github.com/petewarden/OouraFFT/archive/v1.0.tar.gz -> OouraFFT-v1.0.tar.gz
+ https://github.com/protocolbuffers/protobuf/archive/v3.21.9.zip -> protobuf-3.21.9.zip
+ https://github.com/pybind/pybind11_abseil/archive/2c4932ed6f6204f1656e245838f4f5eae69d2e29.tar.gz -> pybind11_abseil-2c4932ed6f6204f1656e245838f4f5eae69d2e29.tar.gz
+ https://github.com/pybind/pybind11_bazel/archive/72cbbf1fbc830e487e3012862b7b720001b70672.tar.gz -> pybind11_bazel-72cbbf1fbc830e487e3012862b7b720001b70672.tar.gz
+ https://github.com/pybind/pybind11_protobuf/archive/80f3440cd8fee124e077e2e47a8a17b78b451363.zip -> pybind11_protobuf-80f3440cd8fee124e077e2e47a8a17b78b451363.zip
+ https://github.com/pytorch/cpuinfo/archive/5e63739504f0f8e18e941bd63b2d6d42536c7d90.tar.gz -> pytorch-cpuinfo-5e63739504f0f8e18e941bd63b2d6d42536c7d90.tar.gz
+ https://github.com/pytorch/cpuinfo/archive/959002f82d7962a473d8bf301845f2af720e0aa4.zip -> pytorch-cpuinfo-959002f82d7962a473d8bf301845f2af720e0aa4.zip
+ https://github.com/tensorflow/runtime/archive/70637966e2ec9afccc2cf4d51ed2391172b1b9c5.tar.gz -> tensorflow-runtime-70637966e2ec9afccc2cf4d51ed2391172b1b9c5.tar.gz
+ https://github.com/yugr/Implib.so/archive/5fb84c2a750434b9df1da67d67b749eb929598f1.tar.gz -> Implib.so-5fb84c2a750434b9df1da67d67b749eb929598f1.tar.gz
+ https://gitlab.com/libeigen/eigen/-/archive/66e8f38891841bf88ee976a316c0c78a52f0cee5/eigen-66e8f38891841bf88ee976a316c0c78a52f0cee5.tar.gz
+ https://gitlab.mpcdf.mpg.de/mtr/ducc/-/archive/3d28aadfd8bb0219e3df188613dbbcdfffccc3cd/ducc-3d28aadfd8bb0219e3df188613dbbcdfffccc3cd.tar.gz
+ cuda? (
+ https://github.com/NVIDIA/cudnn-frontend/archive/refs/tags/v0.9.zip -> cudnn-frontend-v0.9.zip
+ https://github.com/NVlabs/cub/archive/1.9.9.zip -> cub-1.9.9.zip
+ https://github.com/nvidia/nccl/archive/v2.16.5-1.tar.gz -> nvidia-nccl-v2.16.5-1.tar.gz
+ )
+ python? (
+ https://github.com/intel/ARM_NEON_2_x86_SSE/archive/a15b489e1222b2087007546b4912e21293ea86ff.tar.gz -> ARM_NEON_2_x86_SSE-a15b489e1222b2087007546b4912e21293ea86ff.tar.gz
+ https://storage.googleapis.com/mirror.tensorflow.org/docs.python.org/2.7/_sources/license.rst.txt -> tensorflow-1.15.0-python-license.rst.txt
+ )"
+
+SRC_URI="https://github.com/${PN}/${PN}/archive/v${MY_PV}.tar.gz -> ${P}.tar.gz
+ ${bazel_external_uris}"
+
+# abseil-cpp-20211102.0-r0 does not work with NVCC
+# check flatbuffers version in tensorflow/lite/schema/schema_generated.h
+RDEPEND="
+ app-arch/snappy
+ =dev-cpp/abseil-cpp-20230125.2*:=
+ dev-db/sqlite
+ dev-libs/double-conversion
+ dev-libs/icu:=
+ >=dev-libs/jsoncpp-1.9.2:=
+ >=dev-libs/nsync-1.25.0
+ dev-libs/openssl:0=
+ >=dev-libs/protobuf-3.13.0:=
+ >=dev-libs/re2-0.2019.06.01:=
+ media-libs/giflib
+ media-libs/libjpeg-turbo
+ media-libs/libpng:0
+ >=net-libs/grpc-1.28:=
+ net-misc/curl
+ sys-libs/zlib
+ >=sys-apps/hwloc-2:=
+ cuda? (
+ dev-util/nvidia-cuda-toolkit:=[profiler]
+ =dev-libs/cudnn-8*
+ )
+ mpi? ( virtual/mpi )
+ python? (
+ ${PYTHON_DEPS}
+ ~dev-libs/flatbuffers-23.5.26:=
+ dev-python/absl-py[${PYTHON_USEDEP}]
+ >=dev-python/astor-0.7.1[${PYTHON_USEDEP}]
+ dev-python/astunparse[${PYTHON_USEDEP}]
+ dev-python/clang-python[${PYTHON_USEDEP}]
+ dev-python/dill[${PYTHON_USEDEP}]
+ ~dev-python/flatbuffers-23.5.26[${PYTHON_USEDEP}]
+ >=dev-python/gast-0.3.3[${PYTHON_USEDEP}]
+ dev-python/h5py[${PYTHON_USEDEP}]
+ >=dev-python/ml_dtypes-0.2.0[${PYTHON_USEDEP}]
+ >=dev-python/numpy-1.19[${PYTHON_USEDEP}]
+ >=dev-python/google-pasta-0.1.8[${PYTHON_USEDEP}]
+ >=dev-python/opt-einsum-3.3.0[${PYTHON_USEDEP}]
+ >=dev-python/protobuf-python-3.13.0[${PYTHON_USEDEP}]
+ dev-python/pybind11[${PYTHON_USEDEP}]
+ dev-python/six[${PYTHON_USEDEP}]
+ dev-python/tblib[${PYTHON_USEDEP}]
+ dev-python/termcolor[${PYTHON_USEDEP}]
+ dev-python/typing-extensions[${PYTHON_USEDEP}]
+ >=dev-python/grpcio-1.28[${PYTHON_USEDEP}]
+ >=dev-python/wrapt-1.11.1[${PYTHON_USEDEP}]
+ >=net-libs/google-cloud-cpp-0.10.0
+ =sci-visualization/tensorboard-${DEP_VER}*[${PYTHON_USEDEP}]
+ )"
+DEPEND="${RDEPEND}
+ python? (
+ dev-python/mock
+ dev-python/setuptools
+ )"
+PDEPEND="python? (
+ =sci-libs/keras-${DEP_VER}*[${PYTHON_USEDEP}]
+ =sci-libs/tensorflow-estimator-${DEP_VER}*[${PYTHON_USEDEP}]
+ )"
+# >=dev-libs/protobuf-3.8.0
+# bazel-6.4 failed with undefined references to `_mlir_ciface_*'
+# see https://discuss.tensorflow.org/t/undefined-references-to-mlir-ciface-symbols/20571
+# bazel-6.3 failed with undefined reference to `riegeli::RecordsMetadata::Clear()'
+# tested successfully on bazel-6.1.2, bazel-6.2.0 and bazel-6.2.1
+BDEPEND="
+ app-arch/unzip
+ =dev-build/bazel-6*
+ <dev-build/bazel-6.3
+ dev-java/java-config
+ cuda? (
+ >=dev-util/nvidia-cuda-toolkit-9.1[profiler]
+ )
+ !python? ( dev-lang/python )
+ python? (
+ dev-python/cython
+ dev-python/mock
+ >=dev-python/grpcio-tools-1.28
+ )
+ dev-util/patchelf"
+REQUIRED_USE="python? ( ${PYTHON_REQUIRED_USE} )"
+
+S="${WORKDIR}/${MY_P}"
+
+DOCS=( AUTHORS CONTRIBUTING.md ISSUE_TEMPLATE.md README.md RELEASE.md )
+CHECKREQS_MEMORY="5G"
+CHECKREQS_DISK_BUILD="10G"
+
+PATCHES=(
+ "${FILESDIR}/${P}-0001-WORKSPACE-add-rules-docker-http_archive-bazel-toolch.patch"
+ "${FILESDIR}/${P}-0002-systemlib-Latest-absl-LTS-has-split-cord-libs.patch"
+ "${FILESDIR}/${P}-0003-mkl_dnn-Must-link-against-libm-for-round-and-log2.patch"
+ "${FILESDIR}/${P}-0004-tensorflow_cc-Add-systemlib-nsync-linkopts.patch"
+ "${FILESDIR}/${P}-0005-systemlib-Updates-for-Abseil-20220623-LTS.patch"
+ "${FILESDIR}/${P}-0006-systemlib-Update-targets-for-absl_py.patch"
+ "${FILESDIR}/${P}-0007-systemlib-Add-well_known_types_py_pb2-target.patch"
+ "${FILESDIR}/${P}-0008-Relax-setup.py-version-requirements.patch"
+ "${FILESDIR}/${P}-0009-systemlib-update-targets-for-absl.patch"
+ "${FILESDIR}/${P}-0010-systemlib-fix-missing-osx-in-pybind11.patch"
+ "${FILESDIR}/${P}-0011-systemlib-fix-missing-LICENSE-in-flatbuffers.patch"
+ "${FILESDIR}/${P}-0012-installation-remove-cp_local_config_python.patch"
+ "${FILESDIR}/${P}-0013-build-use-non-hermetic-python.patch"
+)
+
+get-cpu-flags() {
+ local i f=()
+ # Keep this list in sync with tensorflow/core/platform/cpu_feature_guard.cc.
+ for i in sse sse2 sse3 sse4_1 sse4_2 avx avx2 fma4; do
+ use cpu_flags_x86_${i} && f+=( -m${i/_/.} )
+ done
+ use cpu_flags_x86_fma3 && f+=( -mfma )
+ echo "${f[*]}"
+}
+
+pkg_setup() {
+ local num_pythons_enabled
+ num_pythons_enabled=0
+ count_impls() {
+ num_pythons_enabled=$((${num_pythons_enabled} + 1))
+ }
+ use python && python_foreach_impl count_impls
+
+ # 10G to build C/C++ libs, 6G per python impl
+ CHECKREQS_DISK_BUILD="$((10 + 6 * ${num_pythons_enabled}))G"
+ check-reqs_pkg_setup
+}
+
+src_unpack() {
+ # Only unpack the main distfile
+ unpack "${P}.tar.gz"
+ bazel_load_distfiles "${bazel_external_uris}"
+}
+
+src_prepare() {
+ local d
+ export JAVA_HOME=$(java-config --jre-home) # so keepwork works
+ export TF_PYTHON_VERSION="${EPYTHON/python/}"
+
+ # Use non-hermetic python
+ for d in third_party third_party/xla/third_party third_party/xla/third_party/tsl/third_party;
+ do
+ mv ${d}/py/non_hermetic ${d} || die
+ rm -rf ${d}/py || die
+ mv ${d}/non_hermetic ${d}/py || die
+ done
+
+ append-flags $(get-cpu-flags)
+ append-cxxflags -std=c++17
+ export BUILD_CXXFLAGS+=" -std=c++17"
+ filter-flags '-fvtable-verify=@(std|preinit)'
+ bazel_setup_bazelrc
+
+ # Relax version checks in setup.py
+ # Fixed in patch already
+ # sed -i "/^ '/s/==/>=/g" tensorflow/tools/pip_package/setup.py || die
+
+ # Prefixify hard-coded command locations
+ hprefixify -w /host_compiler_prefix/ third_party/gpus/cuda_configure.bzl
+
+ default
+ use python && python_copy_sources
+
+ use cuda && cuda_add_sandbox
+}
+
+src_configure() {
+ export JAVA_HOME=$(java-config --jre-home) # so keepwork works
+ export KERAS_HOME="${T}/.keras" # otherwise sandbox violation writing ~/.keras
+
+ do_configure() {
+ export CC_OPT_FLAGS=" "
+ export TF_ENABLE_XLA=$(usex xla 1 0)
+ export TF_NEED_OPENCL_SYCL=0
+ export TF_NEED_OPENCL=0
+ export TF_NEED_COMPUTECPP=0
+ export TF_NEED_ROCM=0
+ export TF_NEED_MPI=$(usex mpi 1 0)
+ export TF_SET_ANDROID_WORKSPACE=0
+
+ if use python; then
+ export PYTHON_BIN_PATH="${PYTHON}"
+ export PYTHON_LIB_PATH="$(python_get_sitedir)"
+ else
+ export PYTHON_BIN_PATH="$(which python)"
+ export PYTHON_LIB_PATH="$(python -c 'from distutils.sysconfig import *; print(get_python_lib())')"
+ fi
+
+ export TF_NEED_CUDA=$(usex cuda 1 0)
+ export TF_DOWNLOAD_CLANG=0
+ export TF_CUDA_CLANG=0
+ export TF_NEED_TENSORRT=0 # $(usex cuda 1 0)
+ if use cuda; then
+ export TF_CUDA_PATHS="${EPREFIX}/opt/cuda"
+ export GCC_HOST_COMPILER_PATH="$(cuda_gccdir)/$(tc-getCC)"
+ export TF_CUDA_VERSION="$(cuda_toolkit_version)"
+ export TF_CUDNN_VERSION="$(cuda_cudnn_version)"
+ einfo "Setting CUDA version: $TF_CUDA_VERSION"
+ einfo "Setting CUDNN version: $TF_CUDNN_VERSION"
+
+ if [[ $(cuda-config -s) != *$(gcc-version)* ]]; then
+ ewarn "TensorFlow is being built with Nvidia CUDA support. Your default compiler"
+ ewarn "version is not supported by the currently installed CUDA. TensorFlow will"
+ ewarn "instead be compiled using: ${GCC_HOST_COMPILER_PATH}."
+ ewarn "If the build fails with linker errors try rebuilding the relevant"
+ ewarn "dependencies using the same compiler version."
+ fi
+
+ if [[ -z "$TF_CUDA_COMPUTE_CAPABILITIES" ]]; then
+ ewarn "WARNING: TensorFlow is being built with its default CUDA compute capabilities: 3.5 and 7.0."
+ ewarn "These may not be optimal for your GPU."
+ ewarn ""
+ ewarn "To configure TensorFlow with the CUDA compute capability that is optimal for your GPU,"
+ ewarn "set TF_CUDA_COMPUTE_CAPABILITIES in your make.conf, and re-emerge tensorflow."
+ ewarn "For example, to use CUDA capability 7.5 & 3.5, add: TF_CUDA_COMPUTE_CAPABILITIES=7.5,3.5"
+ ewarn ""
+ ewarn "You can look up your GPU's CUDA compute capability at https://developer.nvidia.com/cuda-gpus"
+ ewarn "or by running /opt/cuda/extras/demo_suite/deviceQuery | grep 'CUDA Capability'"
+ fi
+ fi
+
+ # com_googlesource_code_re2 weird branch using absl, doesnt work with released re2
+ # com_google_protobuf is disabled due to https://github.com/tensorflow/tensorflow/issues/61593
+ local SYSLIBS=(
+ absl_py
+ astor_archive
+ astunparse_archive
+ boringssl
+ com_github_googlecloudplatform_google_cloud_cpp
+ com_github_grpc_grpc
+ com_google_absl
+ # com_google_protobuf
+ curl
+ cython
+ dill_archive
+ double_conversion
+ flatbuffers
+ functools32_archive
+ gast_archive
+ gif
+ hwloc
+ icu
+ jsoncpp_git
+ libjpeg_turbo
+ nasm
+ nsync
+ org_sqlite
+ pasta
+ png
+ pybind11
+ six_archive
+ snappy
+ tblib_archive
+ termcolor_archive
+ typing_extensions_archive
+ wrapt
+ zlib
+ )
+
+ export TF_SYSTEM_LIBS="${SYSLIBS[@]}"
+ export TF_IGNORE_MAX_BAZEL_VERSION=1
+
+ # This is not autoconf
+ ./configure || die
+
+ echo 'build --config=noaws --config=nohdfs --config=nonccl' >> .bazelrc || die
+ echo 'build --define tensorflow_mkldnn_contraction_kernel=0' >> .bazelrc || die
+ echo "build --action_env=KERAS_HOME=\"${T}/.keras\"" >> .bazelrc || die
+ echo "build --host_action_env=KERAS_HOME=\"${T}/.keras\"" >> .bazelrc || die
+
+ for cflag in $($(tc-getPKG_CONFIG) jsoncpp --cflags)
+ do
+ echo "build --copt=\"${cflag}\"" >> .bazelrc || die
+ echo "build --host_copt=\"${cflag}\"" >> .bazelrc || die
+ done
+ }
+ if use python; then
+ python_foreach_impl run_in_build_dir do_configure
+ else
+ do_configure
+ fi
+}
+
+src_compile() {
+ export JAVA_HOME=$(java-config --jre-home) # so keepwork works
+ export KERAS_HOME="${T}/.keras" # otherwise sandbox violation writing ~/.keras
+
+ if use python; then
+ python_setup
+ BUILD_DIR="${S}-${EPYTHON/./_}"
+ cd "${BUILD_DIR}" || die
+ fi
+
+ # fail early if any deps are missing
+ ebazel build -k --nobuild \
+ //tensorflow:libtensorflow_framework.so \
+ //tensorflow:libtensorflow.so \
+ //tensorflow:libtensorflow_cc.so \
+ $(usex python '//tensorflow/tools/pip_package:build_pip_package' '')
+
+ ebazel build \
+ //tensorflow:libtensorflow_framework.so \
+ //tensorflow:libtensorflow.so
+ ebazel build //tensorflow:libtensorflow_cc.so
+ ebazel build //tensorflow:install_headers
+ ebazel shutdown
+
+ do_compile() {
+ ebazel build //tensorflow/tools/pip_package:build_pip_package
+ ebazel shutdown
+ }
+ BUILD_DIR="${S}"
+ cd "${BUILD_DIR}" || die
+ use python && python_foreach_impl run_in_build_dir do_compile
+}
+
+src_install() {
+ local i l
+ export JAVA_HOME=$(java-config --jre-home) # so keepwork works
+ export KERAS_HOME="${T}/.keras" # otherwise sandbox violation writing ~/.keras
+
+ do_install() {
+ einfo "Installing ${EPYTHON} files"
+ local srcdir="${T}/src-${MULTIBUILD_VARIANT}"
+ mkdir -p "${srcdir}" || die
+ bazel-bin/tensorflow/tools/pip_package/build_pip_package --src "${srcdir}" || die
+ cd "${srcdir}" || die
+ esetup.py install
+
+ # libtensorflow_framework.so and libtensorflow_cc.so is in /usr/lib already
+ rm -f "${D}/$(python_get_sitedir)"/${PN}/lib${PN}_framework.so* || die
+ rm -f "${D}/$(python_get_sitedir)"/${PN}/lib${PN}_cc.so* || die
+ python_optimize
+ }
+
+ if use python; then
+ python_foreach_impl run_in_build_dir do_install
+
+ # Symlink to python-exec scripts
+ for i in "${ED}"/usr/lib/python-exec/*/*; do
+ n="${i##*/}"
+ [[ -e "${ED}/usr/bin/${n}" ]] || dosym ../lib/python-exec/python-exec2 "/usr/bin/${n}"
+ done
+
+ python_setup
+ local BUILD_DIR="${S}-${EPYTHON/./_}"
+ cd "${BUILD_DIR}" || die
+ fi
+
+ einfo "Installing headers"
+ insinto /usr/include/${PN}/
+ doins -r bazel-bin/tensorflow/include/*
+
+ einfo "Installing libs"
+ # Generate pkg-config file
+ ${PN}/c/generate-pc.sh --prefix="${EPREFIX}"/usr --libdir=$(get_libdir) --version=${MY_PV} || die
+ insinto /usr/$(get_libdir)/pkgconfig
+ doins ${PN}.pc ${PN}_cc.pc
+
+ for l in libtensorflow{,_framework,_cc}.so; do
+ patchelf --add-rpath '/opt/cuda/lib64' bazel-bin/tensorflow/${l}
+ dolib.so bazel-bin/tensorflow/${l}
+ dolib.so bazel-bin/tensorflow/${l}.$(ver_cut 1)
+ dolib.so bazel-bin/tensorflow/${l}.$(ver_cut 1-3)
+ done
+
+ einstalldocs
+
+ # Workaround for https://bugs.gentoo.org/831927
+ export MAKEOPTS="-j1"
+}