[arch-commits] Commit in tensorflow/repos (5 files)
Evangelos Foutras
foutrelis at archlinux.org
Sat Apr 17 08:55:00 UTC 2021
Date: Saturday, April 17, 2021 @ 08:54:59
Author: foutrelis
Revision: 919638
archrelease: copy trunk to community-testing-x86_64
Added:
tensorflow/repos/community-testing-x86_64/
tensorflow/repos/community-testing-x86_64/PKGBUILD
(from rev 919637, tensorflow/trunk/PKGBUILD)
tensorflow/repos/community-testing-x86_64/build-against-actual-mkl.patch
(from rev 919637, tensorflow/trunk/build-against-actual-mkl.patch)
tensorflow/repos/community-testing-x86_64/fix-h5py3.0.patch
(from rev 919637, tensorflow/trunk/fix-h5py3.0.patch)
tensorflow/repos/community-testing-x86_64/test.py
(from rev 919637, tensorflow/trunk/test.py)
--------------------------------+
PKGBUILD | 322 ++++++++++++++++++++++++++++++++
build-against-actual-mkl.patch | 37 +++
fix-h5py3.0.patch | 391 +++++++++++++++++++++++++++++++++++++++
test.py | 12 +
4 files changed, 762 insertions(+)
Copied: tensorflow/repos/community-testing-x86_64/PKGBUILD (from rev 919637, tensorflow/trunk/PKGBUILD)
===================================================================
--- community-testing-x86_64/PKGBUILD (rev 0)
+++ community-testing-x86_64/PKGBUILD 2021-04-17 08:54:59 UTC (rev 919638)
@@ -0,0 +1,322 @@
+# Maintainer: Sven-Hendrik Haase <svenstaro at gmail.com>
+# Maintainer: Konstantin Gizdov (kgizdov) <arch at kge.pw>
+# Contributor: Adria Arrufat (archdria) <adria.arrufat+AUR at protonmail.ch>
+# Contributor: Thibault Lorrain (fredszaq) <fredszaq at gmail.com>
+
+pkgbase=tensorflow
+pkgname=(tensorflow tensorflow-opt tensorflow-cuda tensorflow-opt-cuda python-tensorflow python-tensorflow-opt python-tensorflow-cuda python-tensorflow-opt-cuda)
+pkgver=2.4.1
+_pkgver=2.4.1
+pkgrel=9
+pkgdesc="Library for computation using data flow graphs for scalable machine learning"
+url="https://www.tensorflow.org/"
+license=('APACHE')
+arch=('x86_64')
+depends=('c-ares' 'intel-mkl' 'onednn' 'pybind11' 'openssl-1.0' 'lmdb' 'libpng' 'curl' 'giflib' 'icu' 'libjpeg-turbo')
+makedepends=('bazel' 'python-numpy' 'cuda' 'nvidia-utils' 'nccl' 'git'
+ 'cudnn' 'python-pip' 'python-wheel' 'python-setuptools' 'python-h5py'
+ 'python-keras-applications' 'python-keras-preprocessing'
+ 'cython')
+optdepends=('tensorboard: Tensorflow visualization toolkit')
+source=("$pkgname-$pkgver.tar.gz::https://github.com/tensorflow/tensorflow/archive/v${_pkgver}.tar.gz"
+ fix-h5py3.0.patch
+ build-against-actual-mkl.patch)
+sha512sums=('be8273f464c1c1c392f3ab0190dbba36d56a0edcc7991c1a86f16604c859056d3188737d11c3b41ec7918e1cf46d13814c50c00be8f459dde9f0fb618740ee3c'
+ '556e3dd4bf0989c139536a7fbfd423b67007839814b04d778790d1c51eb9a4e9f13730a004fc4361a5e6d8a0d08b7b1a28f6e26e184c07f9297c67073a4e9192'
+ 'e51e3f3dced121db3a09fbdaefd33555536095584b72a5eb6f302fa6fa68ab56ea45e8a847ec90ff4ba076db312c06f91ff672e08e95263c658526582494ce08')
+
+# consolidate common dependencies to prevent mishaps
+_common_py_depends=(python-termcolor python-astor python-gast03 python-numpy python-protobuf absl-py python-h5py python-keras-applications python-keras-preprocessing python-tensorflow-estimator python-opt_einsum python-astunparse python-pasta python-flatbuffers)
+
+get_pyver () {
+ python -c 'import sys; print(str(sys.version_info[0]) + "." + str(sys.version_info[1]))'
+}
+
+check_dir() {
+ # first make sure we do not break parsepkgbuild
+ if ! command -v cp &> /dev/null; then
+ >&2 echo "'cp' command not found. PKGBUILD is probably being checked by parsepkgbuild."
+ if ! command -v install &> /dev/null; then
+ >&2 echo "'install' command also not found. PKGBUILD must be getting checked by parsepkgbuild."
+ >&2 echo "Cannot check if directory '${1}' exists. Ignoring."
+ >&2 echo "If you are not running nacmap or parsepkgbuild, please make sure the PATH is correct and try again."
+ >&2 echo "PATH should not be '/dummy': PATH=$PATH"
+ return 0
+ fi
+ fi
+ # if we are running normally, check the given path
+ if [ -d "${1}" ]; then
+ return 0
+ else
+ >&2 echo Directory "${1}" does not exist or is a file! Exiting...
+ exit 1
+ fi
+}
+
+prepare() {
+ # Allow any bazel version
+ echo "*" > tensorflow-${_pkgver}/.bazelversion
+
+ # Tensorflow actually wants to build against a slimmed down version of Intel MKL called MKLML
+ # See https://github.com/intel/mkl-dnn/issues/102
+ # MKLML version that Tensorflow wants to use is https://github.com/intel/mkl-dnn/releases/tag/v0.21
+ # patch -Np1 -d tensorflow-${_pkgver} -i "$srcdir"/build-against-actual-mkl.patch
+
+ # Compile with C++17 by default (FS#65953)
+ #sed -i "s/c++14/c++17/g" tensorflow-${_pkgver}/.bazelrc
+
+ # FS#68488
+ patch -Np1 -d tensorflow-${_pkgver} -i "$srcdir"/fix-h5py3.0.patch
+
+ # Get rid of hardcoded versions. Not like we ever cared about what upstream
+ # thinks about which versions should be used anyway. ;) (FS#68772)
+ sed -i -E "s/'([0-9a-z_-]+) .= [0-9].+[0-9]'/'\1'/" tensorflow-${_pkgver}/tensorflow/tools/pip_package/setup.py
+
+ cp -r tensorflow-${_pkgver} tensorflow-${_pkgver}-opt
+ cp -r tensorflow-${_pkgver} tensorflow-${_pkgver}-cuda
+ cp -r tensorflow-${_pkgver} tensorflow-${_pkgver}-opt-cuda
+
+ # These environment variables influence the behavior of the configure call below.
+ export PYTHON_BIN_PATH=/usr/bin/python
+ export USE_DEFAULT_PYTHON_LIB_PATH=1
+ export TF_NEED_JEMALLOC=1
+ export TF_NEED_KAFKA=1
+ export TF_NEED_OPENCL_SYCL=0
+ export TF_NEED_AWS=1
+ export TF_NEED_GCP=1
+ export TF_NEED_HDFS=1
+ export TF_NEED_S3=1
+ export TF_ENABLE_XLA=1
+ export TF_NEED_GDR=0
+ export TF_NEED_VERBS=0
+ export TF_NEED_OPENCL=0
+ export TF_NEED_MPI=0
+ export TF_NEED_TENSORRT=0
+ export TF_NEED_NGRAPH=0
+ export TF_NEED_IGNITE=0
+ export TF_NEED_ROCM=0
+ # See https://github.com/tensorflow/tensorflow/blob/master/third_party/systemlibs/syslibs_configure.bzl
+ export TF_SYSTEM_LIBS="boringssl,curl,cython,gif,icu,libjpeg_turbo,lmdb,nasm,pcre,png,pybind11,zlib"
+ export TF_SET_ANDROID_WORKSPACE=0
+ export TF_DOWNLOAD_CLANG=0
+ export TF_NCCL_VERSION=2.8
+ export TF_IGNORE_MAX_BAZEL_VERSION=1
+ export TF_MKL_ROOT=/opt/intel/mkl
+ export NCCL_INSTALL_PATH=/usr
+ export GCC_HOST_COMPILER_PATH=/usr/bin/gcc
+ export HOST_C_COMPILER=/usr/bin/gcc
+ export HOST_CXX_COMPILER=/usr/bin/g++
+ export TF_CUDA_CLANG=0 # Clang currently disabled because it's not compatible at the moment.
+ export CLANG_CUDA_COMPILER_PATH=/usr/bin/clang
+ export TF_CUDA_PATHS=/opt/cuda,/usr/lib,/usr
+ export TF_CUDA_VERSION=$(/opt/cuda/bin/nvcc --version | sed -n 's/^.*release \(.*\),.*/\1/p')
+ export TF_CUDNN_VERSION=$(sed -n 's/^#define CUDNN_MAJOR\s*\(.*\).*/\1/p' /usr/include/cudnn_version.h)
+ export TF_CUDA_COMPUTE_CAPABILITIES=5.2,5.3,6.0,6.1,6.2,7.0,7.2,7.5,8.0,8.6
+
+ # Required until https://github.com/tensorflow/tensorflow/issues/39467 is fixed.
+ export CC=gcc
+ export CXX=g++
+
+ export BAZEL_ARGS="--config=mkl -c opt --copt=-I/usr/include/openssl-1.0 --host_copt=-I/usr/include/openssl-1.0 --linkopt=-l:libssl.so.1.0.0 --linkopt=-l:libcrypto.so.1.0.0 --host_linkopt=-l:libssl.so.1.0.0 --host_linkopt=-l:libcrypto.so.1.0.0"
+
+ # Workaround for gcc 10+ warnings related to upb.
+ # See https://github.com/tensorflow/tensorflow/issues/39467
+ export BAZEL_ARGS="$BAZEL_ARGS --host_copt=-Wno-stringop-truncation"
+}
+
+build() {
+ echo "Building without cuda and without non-x86-64 optimizations"
+ cd "${srcdir}"/tensorflow-${_pkgver}
+ export CC_OPT_FLAGS="-march=x86-64"
+ export TF_NEED_CUDA=0
+ ./configure
+ bazel \
+ build ${BAZEL_ARGS[@]} \
+ //tensorflow:libtensorflow.so \
+ //tensorflow:libtensorflow_cc.so \
+ //tensorflow:install_headers \
+ //tensorflow/tools/pip_package:build_pip_package
+ bazel-bin/tensorflow/tools/pip_package/build_pip_package "${srcdir}"/tmp
+
+
+ echo "Building without cuda and with non-x86-64 optimizations"
+ cd "${srcdir}"/tensorflow-${_pkgver}-opt
+ export CC_OPT_FLAGS="-march=haswell -O3"
+ export TF_NEED_CUDA=0
+ ./configure
+ bazel \
+ build --config=avx2_linux \
+ ${BAZEL_ARGS[@]} \
+ //tensorflow:libtensorflow.so \
+ //tensorflow:libtensorflow_cc.so \
+ //tensorflow:install_headers \
+ //tensorflow/tools/pip_package:build_pip_package
+ bazel-bin/tensorflow/tools/pip_package/build_pip_package "${srcdir}"/tmpopt
+
+
+ echo "Building with cuda and without non-x86-64 optimizations"
+ cd "${srcdir}"/tensorflow-${_pkgver}-cuda
+ export CC_OPT_FLAGS="-march=x86-64"
+ export TF_NEED_CUDA=1
+ ./configure
+ bazel \
+ build \
+ ${BAZEL_ARGS[@]} \
+ //tensorflow:libtensorflow.so \
+ //tensorflow:libtensorflow_cc.so \
+ //tensorflow:install_headers \
+ //tensorflow/tools/pip_package:build_pip_package
+ bazel-bin/tensorflow/tools/pip_package/build_pip_package --gpu "${srcdir}"/tmpcuda
+
+
+ echo "Building with cuda and with non-x86-64 optimizations"
+ cd "${srcdir}"/tensorflow-${_pkgver}-opt-cuda
+ export CC_OPT_FLAGS="-march=haswell -O3"
+ export TF_NEED_CUDA=1
+ ./configure
+ bazel \
+ build --config=avx2_linux \
+ ${BAZEL_ARGS[@]} \
+ //tensorflow:libtensorflow.so \
+ //tensorflow:libtensorflow_cc.so \
+ //tensorflow:install_headers \
+ //tensorflow/tools/pip_package:build_pip_package
+ bazel-bin/tensorflow/tools/pip_package/build_pip_package --gpu "${srcdir}"/tmpoptcuda
+}
+
+_package() {
+ # install headers first
+ install -d "${pkgdir}"/usr/include/tensorflow
+ cp -r bazel-bin/tensorflow/include/* "${pkgdir}"/usr/include/tensorflow/
+ # install python-version to get all extra headers
+ WHEEL_PACKAGE=$(find "${srcdir}"/$1 -name "tensor*.whl")
+ pip install --ignore-installed --upgrade --root "${pkgdir}"/ $WHEEL_PACKAGE --no-dependencies
+ # move extra headers to correct location
+ local _srch_path="${pkgdir}/usr/lib/python$(get_pyver)"/site-packages/tensorflow/include
+ check_dir "${_srch_path}" # we need to quit on broken search paths
+ find "${_srch_path}" -maxdepth 1 -mindepth 1 -type d -print0 | while read -rd $'\0' _folder; do
+ cp -nr "${_folder}" "${pkgdir}"/usr/include/tensorflow/
+ done
+ # clean up unneeded files
+ rm -rf "${pkgdir}"/usr/bin
+ rm -rf "${pkgdir}"/usr/lib
+ rm -rf "${pkgdir}"/usr/share
+ # make sure no lib objects are outside valid paths
+ local _so_srch_path="${pkgdir}/usr/include"
+ check_dir "${_so_srch_path}" # we need to quit on broken search paths
+ find "${_so_srch_path}" -type f,l \( -iname "*.so" -or -iname "*.so.*" \) -print0 | while read -rd $'\0' _so_file; do
+ # check if file is a dynamic executable
+ ldd "${_so_file}" &>/dev/null && rm -rf "${_so_file}"
+ done
+
+ # install the rest of tensorflow
+ tensorflow/c/generate-pc.sh --prefix=/usr --version=${pkgver}
+ sed -e 's@/include$@/include/tensorflow@' -i tensorflow.pc -i tensorflow_cc.pc
+ install -Dm644 tensorflow.pc "${pkgdir}"/usr/lib/pkgconfig/tensorflow.pc
+ install -Dm644 tensorflow_cc.pc "${pkgdir}"/usr/lib/pkgconfig/tensorflow_cc.pc
+ install -Dm755 bazel-bin/tensorflow/libtensorflow.so "${pkgdir}"/usr/lib/libtensorflow.so.${pkgver}
+ ln -s libtensorflow.so.${pkgver} "${pkgdir}"/usr/lib/libtensorflow.so.${pkgver:0:1}
+ ln -s libtensorflow.so.${pkgver:0:1} "${pkgdir}"/usr/lib/libtensorflow.so
+ install -Dm755 bazel-bin/tensorflow/libtensorflow_cc.so "${pkgdir}"/usr/lib/libtensorflow_cc.so.${pkgver}
+ ln -s libtensorflow_cc.so.${pkgver} "${pkgdir}"/usr/lib/libtensorflow_cc.so.${pkgver:0:1}
+ ln -s libtensorflow_cc.so.${pkgver:0:1} "${pkgdir}"/usr/lib/libtensorflow_cc.so
+ install -Dm755 bazel-bin/tensorflow/libtensorflow_framework.so "${pkgdir}"/usr/lib/libtensorflow_framework.so.${pkgver}
+ ln -s libtensorflow_framework.so.${pkgver} "${pkgdir}"/usr/lib/libtensorflow_framework.so.${pkgver:0:1}
+ ln -s libtensorflow_framework.so.${pkgver:0:1} "${pkgdir}"/usr/lib/libtensorflow_framework.so
+ install -Dm644 tensorflow/c/c_api.h "${pkgdir}"/usr/include/tensorflow/tensorflow/c/c_api.h
+ install -Dm644 LICENSE "${pkgdir}"/usr/share/licenses/${pkgname}/LICENSE
+}
+
+_python_package() {
+ WHEEL_PACKAGE=$(find "${srcdir}"/$1 -name "tensor*.whl")
+ pip install --ignore-installed --upgrade --root "${pkgdir}"/ $WHEEL_PACKAGE --no-dependencies
+
+ # create symlinks to headers
+ local _srch_path="${pkgdir}/usr/lib/python$(get_pyver)"/site-packages/tensorflow/include/
+ check_dir "${_srch_path}" # we need to quit on broken search paths
+ find "${_srch_path}" -maxdepth 1 -mindepth 1 -type d -print0 | while read -rd $'\0' _folder; do
+ rm -rf "${_folder}"
+ _smlink="$(basename "${_folder}")"
+ ln -s /usr/include/tensorflow/"${_smlink}" "${_srch_path}"
+ done
+
+ # tensorboard has been separated from upstream but they still install it with
+ # tensorflow. I don't know what kind of sense that makes but we have to clean
+ # it out from this pacakge.
+ rm -rf "${pkgdir}"/usr/bin/tensorboard
+
+ install -Dm644 LICENSE "${pkgdir}"/usr/share/licenses/${pkgname}/LICENSE
+}
+
+package_tensorflow() {
+ cd "${srcdir}"/tensorflow-${_pkgver}
+ _package tmp
+}
+
+package_tensorflow-opt() {
+ pkgdesc="Library for computation using data flow graphs for scalable machine learning (with AVX2 CPU optimizations)"
+ conflicts=(tensorflow)
+ provides=(tensorflow)
+
+ cd "${srcdir}"/tensorflow-${_pkgver}-opt
+ _package tmpopt
+}
+
+package_tensorflow-cuda() {
+ pkgdesc="Library for computation using data flow graphs for scalable machine learning (with CUDA)"
+ depends+=(cuda cudnn nccl)
+ conflicts=(tensorflow)
+ provides=(tensorflow)
+
+ cd "${srcdir}"/tensorflow-${_pkgver}-cuda
+ _package tmpcuda
+}
+
+package_tensorflow-opt-cuda() {
+ pkgdesc="Library for computation using data flow graphs for scalable machine learning (with CUDA and AVX2 CPU optimizations)"
+ depends+=(cuda cudnn nccl)
+ conflicts=(tensorflow)
+ provides=(tensorflow tensorflow-cuda)
+
+ cd "${srcdir}"/tensorflow-${_pkgver}-opt-cuda
+ _package tmpoptcuda
+}
+
+package_python-tensorflow() {
+ depends+=(tensorflow "${_common_py_depends[@]}")
+
+ cd "${srcdir}"/tensorflow-${_pkgver}
+ _python_package tmp
+}
+
+package_python-tensorflow-opt() {
+ pkgdesc="Library for computation using data flow graphs for scalable machine learning (with AVX2 CPU optimizations)"
+ depends+=(tensorflow-opt "${_common_py_depends[@]}")
+ conflicts=(python-tensorflow)
+ provides=(python-tensorflow)
+
+ cd "${srcdir}"/tensorflow-${_pkgver}-opt
+ _python_package tmpopt
+}
+
+package_python-tensorflow-cuda() {
+ pkgdesc="Library for computation using data flow graphs for scalable machine learning (with CUDA)"
+ depends+=(tensorflow-cuda cuda cudnn nccl python-pycuda "${_common_py_depends[@]}")
+ conflicts=(python-tensorflow)
+ provides=(python-tensorflow)
+
+ cd "${srcdir}"/tensorflow-${_pkgver}-cuda
+ _python_package tmpcuda
+}
+
+package_python-tensorflow-opt-cuda() {
+ pkgdesc="Library for computation using data flow graphs for scalable machine learning (with CUDA and AVX2 CPU optimizations)"
+ depends+=(tensorflow-opt-cuda cuda cudnn nccl python-pycuda "${_common_py_depends[@]}")
+ conflicts=(python-tensorflow)
+ provides=(python-tensorflow python-tensorflow-cuda)
+
+ cd "${srcdir}"/tensorflow-${_pkgver}-opt-cuda
+ _python_package tmpoptcuda
+}
+
+# vim:set ts=2 sw=2 et:
Copied: tensorflow/repos/community-testing-x86_64/build-against-actual-mkl.patch (from rev 919637, tensorflow/trunk/build-against-actual-mkl.patch)
===================================================================
--- community-testing-x86_64/build-against-actual-mkl.patch (rev 0)
+++ community-testing-x86_64/build-against-actual-mkl.patch 2021-04-17 08:54:59 UTC (rev 919638)
@@ -0,0 +1,37 @@
+diff --git a/third_party/mkl/build_defs.bzl b/third_party/mkl/build_defs.bzl
+index 4b8fb83eb0..f4e1adfb22 100644
+--- a/third_party/mkl/build_defs.bzl
++++ b/third_party/mkl/build_defs.bzl
+@@ -124,7 +124,7 @@ def _mkl_autoconf_impl(repository_ctx):
+ if _enable_local_mkl(repository_ctx):
+ # Symlink lib and include local folders.
+ mkl_root = repository_ctx.os.environ[_TF_MKL_ROOT]
+- mkl_lib_path = "%s/lib" % mkl_root
++ mkl_lib_path = "%s/lib/intel64" % mkl_root
+ repository_ctx.symlink(mkl_lib_path, "lib")
+ mkl_include_path = "%s/include" % mkl_root
+ repository_ctx.symlink(mkl_include_path, "include")
+diff --git a/third_party/mkl/mkl.BUILD b/third_party/mkl/mkl.BUILD
+index 72370182c4..4972bb005e 100644
+--- a/third_party/mkl/mkl.BUILD
++++ b/third_party/mkl/mkl.BUILD
+@@ -5,7 +5,6 @@ exports_files(["license.txt"])
+ filegroup(
+ name = "LICENSE",
+ srcs = [
+- "license.txt",
+ ],
+ visibility = ["//visibility:public"],
+ )
+@@ -21,7 +20,10 @@ cc_library(
+ name = "mkl_libs_linux",
+ srcs = [
+ "lib/libiomp5.so",
+- "lib/libmklml_intel.so",
++ "lib/libmkl_core.so",
++ "lib/libmkl_rt.so",
++ "lib/libmkl_intel_thread.so",
++ "lib/libmkl_intel_lp64.so",
+ ],
+ visibility = ["//visibility:public"],
+ )
Copied: tensorflow/repos/community-testing-x86_64/fix-h5py3.0.patch (from rev 919637, tensorflow/trunk/fix-h5py3.0.patch)
===================================================================
--- community-testing-x86_64/fix-h5py3.0.patch (rev 0)
+++ community-testing-x86_64/fix-h5py3.0.patch 2021-04-17 08:54:59 UTC (rev 919638)
@@ -0,0 +1,391 @@
+From c0dbdfb718de6481b00d7ff17a0ff763cf7a7544 Mon Sep 17 00:00:00 2001
+From: bhack <bhack at users.noreply.github.com>
+Date: Tue, 8 Dec 2020 19:05:35 +0000
+Subject: [PATCH 01/12] Revert "Revert PR #45380: Try to switch to h5py 3.1.0"
+
+This reverts commit d36db2955c460d2abe185ced9cc5bdfabc67b631.
+---
+ .../tools/ci_build/install/install_centos_pip_packages.sh | 2 +-
+ tensorflow/tools/ci_build/install/install_pip_packages.sh | 2 +-
+ .../tools/ci_build/install/install_python3.5_pip_packages.sh | 2 +-
+ .../tools/ci_build/install/install_python3.6_pip_packages.sh | 2 +-
+ tensorflow/tools/ci_build/release/common.sh | 4 ++--
+ tensorflow/tools/ci_build/release/common_win.bat | 4 ++--
+ tensorflow/tools/pip_package/setup.py | 2 +-
+ 7 files changed, 9 insertions(+), 9 deletions(-)
+
+diff --git a/tensorflow/tools/ci_build/install/install_centos_pip_packages.sh b/tensorflow/tools/ci_build/install/install_centos_pip_packages.sh
+index ce7789b3704b2..0f0f182a1bc10 100755
+--- a/tensorflow/tools/ci_build/install/install_centos_pip_packages.sh
++++ b/tensorflow/tools/ci_build/install/install_centos_pip_packages.sh
+@@ -102,7 +102,7 @@ pip3 install --upgrade termcolor
+ pip2 install keras_preprocessing==1.0.5 --no-deps
+ pip3 install keras_preprocessing==1.0.5 --no-deps
+ pip2 install --upgrade h5py==2.8.0
+-pip3 install --upgrade h5py==2.8.0
++pip3 install --upgrade h5py==3.1.0
+
+ # Estimator
+ pip2 install tf-estimator-nightly --no-deps
+diff --git a/tensorflow/tools/ci_build/install/install_pip_packages.sh b/tensorflow/tools/ci_build/install/install_pip_packages.sh
+index 578967a67cf84..f9893f070d596 100755
+--- a/tensorflow/tools/ci_build/install/install_pip_packages.sh
++++ b/tensorflow/tools/ci_build/install/install_pip_packages.sh
+@@ -134,7 +134,7 @@ pip3 install --upgrade termcolor
+ pip2 install keras_preprocessing==1.1.0 --no-deps
+ pip3 install keras_preprocessing==1.1.0 --no-deps
+ pip2 install --upgrade h5py==2.8.0
+-pip3 install --upgrade h5py==2.8.0
++pip3 install --upgrade h5py==3.1.0
+
+ # Estimator
+ pip2 install tf-estimator-nightly --no-deps
+diff --git a/tensorflow/tools/ci_build/install/install_python3.5_pip_packages.sh b/tensorflow/tools/ci_build/install/install_python3.5_pip_packages.sh
+index bb53fc91981aa..9530c9fdf22e8 100755
+--- a/tensorflow/tools/ci_build/install/install_python3.5_pip_packages.sh
++++ b/tensorflow/tools/ci_build/install/install_python3.5_pip_packages.sh
+@@ -87,7 +87,7 @@ pip3.5 install --upgrade termcolor
+
+ # Keras
+ pip3.5 install keras_preprocessing==1.0.5
+-pip3.5 install --upgrade h5py==2.8.0
++pip3.5 install --upgrade h5py==3.1.0
+
+ # Estimator
+ pip3.5 install tf-estimator-nightly==1.12.0.dev20181203 --no-deps
+diff --git a/tensorflow/tools/ci_build/install/install_python3.6_pip_packages.sh b/tensorflow/tools/ci_build/install/install_python3.6_pip_packages.sh
+index bcf0d0b87ab56..f130ab87dc23a 100755
+--- a/tensorflow/tools/ci_build/install/install_python3.6_pip_packages.sh
++++ b/tensorflow/tools/ci_build/install/install_python3.6_pip_packages.sh
+@@ -101,7 +101,7 @@ pip3 install --upgrade astor
+ pip3 install --upgrade gast
+ pip3 install --upgrade termcolor
+
+-pip3 install --upgrade h5py==2.8.0
++pip3 install --upgrade h5py==3.1.0
+
+ # Keras
+ pip3 install keras_preprocessing==1.0.5
+diff --git a/tensorflow/tools/ci_build/release/common_win.bat b/tensorflow/tools/ci_build/release/common_win.bat
+index f27ec3117ed50..dbe159a67767a 100644
+--- a/tensorflow/tools/ci_build/release/common_win.bat
++++ b/tensorflow/tools/ci_build/release/common_win.bat
+@@ -18,7 +18,7 @@ echo on
+ @REM Set Environment Variables
+ @REM
+ IF NOT DEFINED PYTHON_DIRECTORY (
+- SET PYTHON_DIRECTORY=Python36
++ SET PYTHON_DIRECTORY=Python37
+ )
+ SET PY_EXE=C:\%PYTHON_DIRECTORY%\python.exe
+ SET PATH=%PATH%;C:\%PYTHON_DIRECTORY%
+@@ -32,7 +32,7 @@ SET PATH=%PATH%;C:\%PYTHON_DIRECTORY%
+ %PY_EXE% -m pip install "astunparse ~= 1.6.3"
+ %PY_EXE% -m pip install "flatbuffers ~= 1.12.0"
+ %PY_EXE% -m pip install "google_pasta ~= 0.2"
+-%PY_EXE% -m pip install "h5py ~= 2.10.0"
++%PY_EXE% -m pip install "h5py ~= 3.1.0"
+ %PY_EXE% -m pip install "keras_preprocessing ~= 1.1.2"
+ %PY_EXE% -m pip install "numpy ~= 1.19.2"
+ %PY_EXE% -m pip install "opt_einsum ~= 3.3.0"
+diff --git a/tensorflow/tools/pip_package/setup.py b/tensorflow/tools/pip_package/setup.py
+index 613ce9f5bf3e1..d84b08de7a11f 100644
+--- a/tensorflow/tools/pip_package/setup.py
++++ b/tensorflow/tools/pip_package/setup.py
+@@ -79,7 +79,7 @@
+ 'astunparse ~= 1.6.3',
+ 'flatbuffers ~= 1.12.0',
+ 'google_pasta ~= 0.2',
+- 'h5py ~= 2.10.0',
++ 'h5py ~= 3.1.0',
+ 'keras_preprocessing ~= 1.1.2',
+ 'numpy ~= 1.19.2',
+ 'opt_einsum ~= 3.3.0',
+
+From 9631c2de4240f6c435bc559647659ba4e2116572 Mon Sep 17 00:00:00 2001
+From: bhack <bhack at users.noreply.github.com>
+Date: Wed, 9 Dec 2020 14:10:12 +0000
+Subject: [PATCH 02/12] Check if win and Macosx pass
+
+---
+ tensorflow/python/keras/saving/hdf5_format.py | 4 +++-
+ 1 file changed, 3 insertions(+), 1 deletion(-)
+
+diff --git a/tensorflow/python/keras/saving/hdf5_format.py b/tensorflow/python/keras/saving/hdf5_format.py
+index d3bb10c98ddf9..d6b0a773c77ca 100644
+--- a/tensorflow/python/keras/saving/hdf5_format.py
++++ b/tensorflow/python/keras/saving/hdf5_format.py
+@@ -179,7 +179,9 @@ def load_model_from_hdf5(filepath, custom_objects=None, compile=True): # pylint
+ model_config = f.attrs.get('model_config')
+ if model_config is None:
+ raise ValueError('No model found in config file.')
+- model_config = json_utils.decode(model_config.decode('utf-8'))
++ if hasattr(model_config, 'decode'):
++ model_config = model_config.decode('utf-8')
++ model_config = json_utils.decode(model_config)
+ model = model_config_lib.model_from_config(model_config,
+ custom_objects=custom_objects)
+
+
+From 711304d116d38c833928d38c75224b6a03e029d7 Mon Sep 17 00:00:00 2001
+From: bhack <bhack at users.noreply.github.com>
+Date: Wed, 9 Dec 2020 18:57:40 +0000
+Subject: [PATCH 03/12] Conditional decode workaround for win
+
+---
+ tensorflow/python/keras/saving/hdf5_format.py | 19 ++++++++++++++-----
+ 1 file changed, 14 insertions(+), 5 deletions(-)
+
+diff --git a/tensorflow/python/keras/saving/hdf5_format.py b/tensorflow/python/keras/saving/hdf5_format.py
+index d6b0a773c77ca..509caf0ab3351 100644
+--- a/tensorflow/python/keras/saving/hdf5_format.py
++++ b/tensorflow/python/keras/saving/hdf5_format.py
+@@ -191,11 +191,13 @@ def load_model_from_hdf5(filepath, custom_objects=None, compile=True): # pylint
+ if compile:
+ # instantiate optimizer
+ training_config = f.attrs.get('training_config')
++ if hasattr(training_config, 'decode'):
++ training_config = training_config.decode('utf-8')
+ if training_config is None:
+ logging.warning('No training configuration found in the save file, so '
+ 'the model was *not* compiled. Compile it manually.')
+ return model
+- training_config = json_utils.decode(training_config.decode('utf-8'))
++ training_config = json_utils.decode(training_config)
+
+ # Compile model.
+ model.compile(**saving_utils.compile_args_from_training_config(
+@@ -661,11 +663,15 @@ def load_weights_from_hdf5_group(f, layers):
+ and weights file.
+ """
+ if 'keras_version' in f.attrs:
+- original_keras_version = f.attrs['keras_version'].decode('utf8')
++ original_keras_version = f.attrs['keras_version']
++ if hasattr(original_keras_version, 'decode'):
++ original_keras_version = original_keras_version.decode('utf8')
+ else:
+ original_keras_version = '1'
+ if 'backend' in f.attrs:
+- original_backend = f.attrs['backend'].decode('utf8')
++ original_backend = f.attrs['backend']
++ if hasattr(original_backend, 'decode'):
++ original_backend = original_backend.decode('utf8')
+ else:
+ original_backend = None
+
+@@ -732,11 +738,14 @@ def load_weights_from_hdf5_group_by_name(
+ and weights file and skip_match=False.
+ """
+ if 'keras_version' in f.attrs:
+- original_keras_version = f.attrs['keras_version'].decode('utf8')
++ original_keras_version = f.attrs['keras_version']
++ if hasattr(original_keras_version, 'decode'):
++ original_keras_version = original_keras_version.decode('utf8')
+ else:
+ original_keras_version = '1'
+ if 'backend' in f.attrs:
+- original_backend = f.attrs['backend'].decode('utf8')
++ if hasattr(original_backend, 'decode'):
++ original_backend = original_backend.decode('utf8')
+ else:
+ original_backend = None
+
+
+From e25fe38db749897d91bce7a4aac36a83d6b38966 Mon Sep 17 00:00:00 2001
+From: bhack <bhack at users.noreply.github.com>
+Date: Wed, 9 Dec 2020 19:21:35 +0000
+Subject: [PATCH 04/12] Other fixes
+
+---
+ tensorflow/python/keras/saving/hdf5_format.py | 7 +++++--
+ 1 file changed, 5 insertions(+), 2 deletions(-)
+
+diff --git a/tensorflow/python/keras/saving/hdf5_format.py b/tensorflow/python/keras/saving/hdf5_format.py
+index 509caf0ab3351..edf6cc1cf8dc9 100644
+--- a/tensorflow/python/keras/saving/hdf5_format.py
++++ b/tensorflow/python/keras/saving/hdf5_format.py
+@@ -744,6 +744,7 @@ def load_weights_from_hdf5_group_by_name(
+ else:
+ original_keras_version = '1'
+ if 'backend' in f.attrs:
++ original_backend = f.attrs['backend']
+ if hasattr(original_backend, 'decode'):
+ original_backend = original_backend.decode('utf8')
+ else:
+@@ -860,13 +861,15 @@ def load_attributes_from_hdf5_group(group, name):
+ data: Attributes data.
+ """
+ if name in group.attrs:
+- data = [n.decode('utf8') for n in group.attrs[name]]
++ data = [n.decode('utf8') if hasattr(n,"decode") else n
++ for n in group.attrs[name]]
+ else:
+ data = []
+ chunk_id = 0
+ while '%s%d' % (name, chunk_id) in group.attrs:
+ data.extend(
+- [n.decode('utf8') for n in group.attrs['%s%d' % (name, chunk_id)]])
++ [n.decode('utf8') if hasattr(n,"decode") else n
++ for n in group.attrs['%s%d' % (name, chunk_id)]])
+ chunk_id += 1
+ return data
+
+
+From b539145fc87fc10a0a4cfaa94f941b28dd2c7057 Mon Sep 17 00:00:00 2001
+From: bhack <bhack at users.noreply.github.com>
+Date: Wed, 9 Dec 2020 20:20:40 +0000
+Subject: [PATCH 05/12] Change the name for Win
+
+---
+ tensorflow/python/keras/saving/save_test.py | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/common.py b/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/common.py
+index de6180092f560..f48edc3a513ad 100644
+--- a/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/common.py
++++ b/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/common.py
+@@ -22,6 +22,7 @@
+ from __future__ import division
+ from __future__ import print_function
+
++import os
+ import tempfile
+
+ from absl import app
+@@ -79,11 +80,12 @@ def app_main(argv):
+ if FLAGS.save_model_path:
+ save_model_path = FLAGS.save_model_path
+ else:
+- save_model_path = tempfile.mktemp(suffix='.saved_model')
++ fd, save_model_path = tempfile.mkstemp(suffix='.saved_model')
+ save_options = tf.saved_model.SaveOptions(save_debug_info=show_debug_info)
+ tf.saved_model.save(
+ create_module_fn(), save_model_path, options=save_options)
+ logging.info('Saved model to: %s', save_model_path)
++ os.close(fd)
+ mlir = pywrap_mlir.experimental_convert_saved_model_to_mlir(
+ save_model_path, ','.join(exported_names), show_debug_info)
+ # We don't strictly need this, but it serves as a handy sanity check
+
+From 594eacfa18a7f83d1957dcf1c111d8c79b08c734 Mon Sep 17 00:00:00 2001
+From: bhack <bhack at users.noreply.github.com>
+Date: Thu, 10 Dec 2020 04:56:00 +0100
+Subject: [PATCH 09/12] Fix
+
+---
+ .../compiler/mlir/tensorflow/tests/tf_saved_model/common.py | 4 +---
+ 1 file changed, 1 insertion(+), 3 deletions(-)
+
+diff --git a/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/common.py b/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/common.py
+index f48edc3a513ad..47f6b88125217 100644
+--- a/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/common.py
++++ b/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/common.py
+@@ -22,7 +22,6 @@
+ from __future__ import division
+ from __future__ import print_function
+
+-import os
+ import tempfile
+
+ from absl import app
+@@ -80,12 +79,11 @@ def app_main(argv):
+ if FLAGS.save_model_path:
+ save_model_path = FLAGS.save_model_path
+ else:
+- fd, save_model_path = tempfile.mkstemp(suffix='.saved_model')
++ save_model_path = tempfile.mkdtemp(suffix='.saved_model')
+ save_options = tf.saved_model.SaveOptions(save_debug_info=show_debug_info)
+ tf.saved_model.save(
+ create_module_fn(), save_model_path, options=save_options)
+ logging.info('Saved model to: %s', save_model_path)
+- os.close(fd)
+ mlir = pywrap_mlir.experimental_convert_saved_model_to_mlir(
+ save_model_path, ','.join(exported_names), show_debug_info)
+ # We don't strictly need this, but it serves as a handy sanity check
+
+From 59623c8a8c90ca6de6a7f0554d11b6d71deae6ad Mon Sep 17 00:00:00 2001
+From: bhack <bhack at users.noreply.github.com>
+Date: Thu, 10 Dec 2020 11:09:13 +0100
+Subject: [PATCH 10/12] Try with byte
+
+---
+ .../compiler/mlir/tensorflow/tests/tf_saved_model/common.py | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/common.py b/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/common.py
+index 47f6b88125217..5989509bc30e8 100644
+--- a/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/common.py
++++ b/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/common.py
+@@ -79,7 +79,7 @@ def app_main(argv):
+ if FLAGS.save_model_path:
+ save_model_path = FLAGS.save_model_path
+ else:
+- save_model_path = tempfile.mkdtemp(suffix='.saved_model')
++ save_model_path = tempfile.mkdtemp(suffix=b'.saved_model')
+ save_options = tf.saved_model.SaveOptions(save_debug_info=show_debug_info)
+ tf.saved_model.save(
+ create_module_fn(), save_model_path, options=save_options)
+
+From 12fa0161f9d8584ba68bb266127b52939d6092d1 Mon Sep 17 00:00:00 2001
+From: bhack <bhack at users.noreply.github.com>
+Date: Thu, 10 Dec 2020 11:21:59 +0000
+Subject: [PATCH 11/12] print debug info
+
+---
+ .../compiler/mlir/tensorflow/tests/tf_saved_model/common.py | 2 +-
+ tensorflow/python/pywrap_mlir.py | 4 ++++
+ 2 files changed, 5 insertions(+), 1 deletion(-)
+
+diff --git a/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/common.py b/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/common.py
+index 5989509bc30e8..47f6b88125217 100644
+--- a/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/common.py
++++ b/tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/common.py
+@@ -79,7 +79,7 @@ def app_main(argv):
+ if FLAGS.save_model_path:
+ save_model_path = FLAGS.save_model_path
+ else:
+- save_model_path = tempfile.mkdtemp(suffix=b'.saved_model')
++ save_model_path = tempfile.mkdtemp(suffix='.saved_model')
+ save_options = tf.saved_model.SaveOptions(save_debug_info=show_debug_info)
+ tf.saved_model.save(
+ create_module_fn(), save_model_path, options=save_options)
+diff --git a/tensorflow/python/pywrap_mlir.py b/tensorflow/python/pywrap_mlir.py
+index 6db68f0e581eb..098cdc1108b2b 100644
+--- a/tensorflow/python/pywrap_mlir.py
++++ b/tensorflow/python/pywrap_mlir.py
+@@ -37,6 +37,10 @@ def import_function(concrete_function, pass_pipeline):
+
+ def experimental_convert_saved_model_to_mlir(saved_model_path, exported_names,
+ show_debug_info):
++ print(saved_model_path)
++ print(str(saved_model_path).encode('utf-8'))
++ print(exported_names)
++ print(str(exported_names).encode('utf-8'))
+ return ExperimentalConvertSavedModelToMlir(
+ str(saved_model_path).encode('utf-8'),
+ str(exported_names).encode('utf-8'), show_debug_info)
+
+From 928116bb70f07e9197e87db0d22866bc43ad8ef9 Mon Sep 17 00:00:00 2001
+From: bhack <bhack at users.noreply.github.com>
+Date: Thu, 10 Dec 2020 13:04:08 +0000
+Subject: [PATCH 12/12] Remove debug print
+
+---
+ tensorflow/python/pywrap_mlir.py | 4 ----
+ 1 file changed, 4 deletions(-)
+
+diff --git a/tensorflow/python/pywrap_mlir.py b/tensorflow/python/pywrap_mlir.py
+index 098cdc1108b2b..6db68f0e581eb 100644
+--- a/tensorflow/python/pywrap_mlir.py
++++ b/tensorflow/python/pywrap_mlir.py
+@@ -37,10 +37,6 @@ def import_function(concrete_function, pass_pipeline):
+
+ def experimental_convert_saved_model_to_mlir(saved_model_path, exported_names,
+ show_debug_info):
+- print(saved_model_path)
+- print(str(saved_model_path).encode('utf-8'))
+- print(exported_names)
+- print(str(exported_names).encode('utf-8'))
+ return ExperimentalConvertSavedModelToMlir(
+ str(saved_model_path).encode('utf-8'),
+ str(exported_names).encode('utf-8'), show_debug_info)
Copied: tensorflow/repos/community-testing-x86_64/test.py (from rev 919637, tensorflow/trunk/test.py)
===================================================================
--- community-testing-x86_64/test.py (rev 0)
+++ community-testing-x86_64/test.py 2021-04-17 08:54:59 UTC (rev 919638)
@@ -0,0 +1,12 @@
+#!/usr/bin/env python
+
+import tensorflow as tf
+
+with tf.device("/GPU:0"):
+ a = tf.random.normal([1, 2])
+
+
+def temp(x):
+ return tf.shape(x)[0]
+
+tf.autograph.to_graph(temp)
More information about the arch-commits
mailing list