Skip to content
Snippets Groups Projects
Unverified Commit 52df2309 authored by Adam J. Stewart's avatar Adam J. Stewart Committed by GitHub
Browse files

Add TensorFlow 2.1.0 (#14429)

* Add TensorFlow 2.1.0

* kafka and ignite are no longer supported
parent b9820159
Branches
Tags
No related merge requests found
...@@ -13,13 +13,13 @@ class PyTensorflow(Package, CudaPackage): ...@@ -13,13 +13,13 @@ class PyTensorflow(Package, CudaPackage):
""" """
homepage = "https://www.tensorflow.org" homepage = "https://www.tensorflow.org"
url = "https://github.com/tensorflow/tensorflow/archive/v2.0.0.tar.gz" url = "https://github.com/tensorflow/tensorflow/archive/v2.1.0.tar.gz"
maintainers = ['adamjstewart'] maintainers = ['adamjstewart']
import_modules = ['tensorflow'] import_modules = ['tensorflow']
version('2.1.0-rc0', sha256='674cc90223f1d6b7fa2969e82636a630ce453e48a9dec39d73d6dba2fd3fd243') version('2.1.0', sha256='638e541a4981f52c69da4a311815f1e7989bf1d67a41d204511966e1daed14f7')
version('2.0.0', sha256='49b5f0495cd681cbcb5296a4476853d4aea19a43bdd9f179c928a977308a0617', preferred=True) version('2.0.0', sha256='49b5f0495cd681cbcb5296a4476853d4aea19a43bdd9f179c928a977308a0617')
version('1.15.0', sha256='a5d49c00a175a61da7431a9b289747d62339be9cf37600330ad63b611f7f5dc9') version('1.15.0', sha256='a5d49c00a175a61da7431a9b289747d62339be9cf37600330ad63b611f7f5dc9')
version('1.14.0', sha256='aa2a6a1daafa3af66807cfe0bc77bfe1144a9a53df9a96bab52e3e575b3047ed') version('1.14.0', sha256='aa2a6a1daafa3af66807cfe0bc77bfe1144a9a53df9a96bab52e3e575b3047ed')
version('1.13.2', sha256='abe3bf0c47845a628b7df4c57646f41a10ee70f914f1b018a5c761be75e1f1a9') version('1.13.2', sha256='abe3bf0c47845a628b7df4c57646f41a10ee70f914f1b018a5c761be75e1f1a9')
...@@ -58,9 +58,7 @@ class PyTensorflow(Package, CudaPackage): ...@@ -58,9 +58,7 @@ class PyTensorflow(Package, CudaPackage):
variant('mkl', default=False, description='Build with MKL support') variant('mkl', default=False, description='Build with MKL support')
variant('jemalloc', default=False, description='Build with jemalloc as malloc support') variant('jemalloc', default=False, description='Build with jemalloc as malloc support')
# FIXME: ~gcp does not build for 2.0.0 variant('gcp', default=False, description='Build with Google Cloud Platform support')
# See https://github.com/tensorflow/tensorflow/issues/34878
variant('gcp', default=True, description='Build with Google Cloud Platform support')
variant('hdfs', default=False, description='Build with Hadoop File System support') variant('hdfs', default=False, description='Build with Hadoop File System support')
variant('aws', default=False, description='Build with Amazon AWS Platform support') variant('aws', default=False, description='Build with Amazon AWS Platform support')
variant('kafka', default=False, description='Build with Apache Kafka Platform support') variant('kafka', default=False, description='Build with Apache Kafka Platform support')
...@@ -163,6 +161,8 @@ class PyTensorflow(Package, CudaPackage): ...@@ -163,6 +161,8 @@ class PyTensorflow(Package, CudaPackage):
depends_on('py-functools32@3.2.3:', type=('build', 'run'), when='@1.15: ^python@:2') depends_on('py-functools32@3.2.3:', type=('build', 'run'), when='@1.15: ^python@:2')
depends_on('py-six@1.12.0:', type=('build', 'run'), when='@2.1:') depends_on('py-six@1.12.0:', type=('build', 'run'), when='@2.1:')
depends_on('py-six@1.10.0:', type=('build', 'run'), when='@:2.0') depends_on('py-six@1.10.0:', type=('build', 'run'), when='@:2.0')
depends_on('py-scipy@1.2.2', type=('build', 'run'), when='@2.1: ^python@:2')
depends_on('py-scipy@1.4.1', type=('build', 'run'), when='@2.1: ^python@3:')
depends_on('py-grpcio@1.8.6:', type=('build', 'run'), when='@1.6:1.7') depends_on('py-grpcio@1.8.6:', type=('build', 'run'), when='@1.6:1.7')
if sys.byteorder == 'little': if sys.byteorder == 'little':
# Only builds correctly on little-endian machines # Only builds correctly on little-endian machines
...@@ -194,8 +194,8 @@ class PyTensorflow(Package, CudaPackage): ...@@ -194,8 +194,8 @@ class PyTensorflow(Package, CudaPackage):
conflicts('+gcp', when='@:0.8') conflicts('+gcp', when='@:0.8')
conflicts('+hdfs', when='@:0.10') conflicts('+hdfs', when='@:0.10')
conflicts('+aws', when='@:1.3') conflicts('+aws', when='@:1.3')
conflicts('+kafka', when='@:1.5') conflicts('+kafka', when='@:1.5,2.1:')
conflicts('+ignite', when='@:1.11') conflicts('+ignite', when='@:1.11,2.1:')
conflicts('+xla', when='@:0') conflicts('+xla', when='@:0')
conflicts('+gdr', when='@:1.3') conflicts('+gdr', when='@:1.3')
conflicts('+verbs', when='@:1.1') conflicts('+verbs', when='@:1.1')
...@@ -650,15 +650,16 @@ def build(self, spec, prefix): ...@@ -650,15 +650,16 @@ def build(self, spec, prefix):
if '~hdfs' in spec: if '~hdfs' in spec:
args.append('--config=nohdfs') args.append('--config=nohdfs')
if '~nccl' in spec:
args.append('--config=nonccl')
if spec.satisfies('@1.12.1:2.0'):
if '~ignite' in spec: if '~ignite' in spec:
args.append('--config=noignite') args.append('--config=noignite')
if '~kafka' in spec: if '~kafka' in spec:
args.append('--config=nokafka') args.append('--config=nokafka')
if '~nccl' in spec:
args.append('--config=nonccl')
if spec.satisfies('@1.12.1,1.14:'): if spec.satisfies('@1.12.1,1.14:'):
if '+numa' in spec: if '+numa' in spec:
args.append('--config=numa') args.append('--config=numa')
...@@ -669,10 +670,6 @@ def build(self, spec, prefix): ...@@ -669,10 +670,6 @@ def build(self, spec, prefix):
if spec.satisfies('%gcc@5:'): if spec.satisfies('%gcc@5:'):
args.append('--cxxopt=-D_GLIBCXX_USE_CXX11_ABI=0') args.append('--cxxopt=-D_GLIBCXX_USE_CXX11_ABI=0')
if spec.satisfies('@2.1:'):
# TODO: is this needed?
args.append('--define=tensorflow_mkldnn_contraction_kernel=0')
args.append('//tensorflow/tools/pip_package:build_pip_package') args.append('//tensorflow/tools/pip_package:build_pip_package')
bazel(*args) bazel(*args)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment