diff --git a/.azure-pipelines/azure-pipelines-linux.yml b/.azure-pipelines/azure-pipelines-linux.yml new file mode 100755 index 0000000000000000000000000000000000000000..3adf3ac8f18b5a25563fd9ce7827997b2103fde9 --- /dev/null +++ b/.azure-pipelines/azure-pipelines-linux.yml @@ -0,0 +1,48 @@ +# This file was generated automatically from conda-smithy. To update this configuration, +# update the conda-forge.yml and/or the recipe/meta.yaml. +# -*- mode: yaml -*- + +jobs: +- job: linux + pool: + vmImage: ubuntu-latest + strategy: + matrix: + linux_64_python3.7.____cpython: + CONFIG: linux_64_python3.7.____cpython + UPLOAD_PACKAGES: 'True' + DOCKER_IMAGE: quay.io/condaforge/linux-anvil-comp7 + linux_64_python3.8.____cpython: + CONFIG: linux_64_python3.8.____cpython + UPLOAD_PACKAGES: 'True' + DOCKER_IMAGE: quay.io/condaforge/linux-anvil-comp7 + linux_64_python3.9.____cpython: + CONFIG: linux_64_python3.9.____cpython + UPLOAD_PACKAGES: 'True' + DOCKER_IMAGE: quay.io/condaforge/linux-anvil-comp7 + timeoutInMinutes: 360 + + steps: + - script: | + rm -rf /opt/ghc + df -h + displayName: Manage disk space + + # configure qemu binfmt-misc running. This allows us to run docker containers + # embedded qemu-static + - script: | + docker run --rm --privileged multiarch/qemu-user-static:register --reset --credential yes + ls /proc/sys/fs/binfmt_misc/ + condition: not(startsWith(variables['CONFIG'], 'linux_64')) + displayName: Configure binfmt_misc + + - script: | + export CI=azure + export GIT_BRANCH=$BUILD_SOURCEBRANCHNAME + export FEEDSTOCK_NAME=$(basename ${BUILD_REPOSITORY_NAME}) + .scripts/run_docker_build.sh + displayName: Run docker build + env: + BINSTAR_TOKEN: $(BINSTAR_TOKEN) + FEEDSTOCK_TOKEN: $(FEEDSTOCK_TOKEN) + STAGING_BINSTAR_TOKEN: $(STAGING_BINSTAR_TOKEN) \ No newline at end of file diff --git a/.azure-pipelines/azure-pipelines-osx.yml b/.azure-pipelines/azure-pipelines-osx.yml new file mode 100755 index 0000000000000000000000000000000000000000..83e7ee9fc6eaf1d453aab23f6feab04db919d7f2 --- /dev/null +++ b/.azure-pipelines/azure-pipelines-osx.yml @@ -0,0 +1,40 @@ +# This file was generated automatically from conda-smithy. To update this configuration, +# update the conda-forge.yml and/or the recipe/meta.yaml. +# -*- mode: yaml -*- + +jobs: +- job: osx + pool: + vmImage: macOS-10.15 + strategy: + matrix: + osx_64_python3.7.____cpython: + CONFIG: osx_64_python3.7.____cpython + UPLOAD_PACKAGES: 'True' + osx_64_python3.8.____cpython: + CONFIG: osx_64_python3.8.____cpython + UPLOAD_PACKAGES: 'True' + osx_64_python3.9.____cpython: + CONFIG: osx_64_python3.9.____cpython + UPLOAD_PACKAGES: 'True' + osx_arm64_python3.8.____cpython: + CONFIG: osx_arm64_python3.8.____cpython + UPLOAD_PACKAGES: 'True' + osx_arm64_python3.9.____cpython: + CONFIG: osx_arm64_python3.9.____cpython + UPLOAD_PACKAGES: 'True' + timeoutInMinutes: 360 + + steps: + # TODO: Fast finish on azure pipelines? + - script: | + export CI=azure + export OSX_FORCE_SDK_DOWNLOAD="1" + export GIT_BRANCH=$BUILD_SOURCEBRANCHNAME + export FEEDSTOCK_NAME=$(basename ${BUILD_REPOSITORY_NAME}) + ./.scripts/run_osx_build.sh + displayName: Run OSX build + env: + BINSTAR_TOKEN: $(BINSTAR_TOKEN) + FEEDSTOCK_TOKEN: $(FEEDSTOCK_TOKEN) + STAGING_BINSTAR_TOKEN: $(STAGING_BINSTAR_TOKEN) \ No newline at end of file diff --git a/.ci_support/linux_64_python3.7.____cpython.yaml b/.ci_support/linux_64_python3.7.____cpython.yaml index cb66d5c306271beaa0f76e5010583d1f7e6ed4cd..e4e6e8606f6c2fc075c71cdf6c3c6966e7d3d468 100644 --- a/.ci_support/linux_64_python3.7.____cpython.yaml +++ b/.ci_support/linux_64_python3.7.____cpython.yaml @@ -1,24 +1,38 @@ c_compiler: - gcc c_compiler_version: -- '7' +- '9' cdt_name: - cos6 channel_sources: -- conda-forge,defaults +- conda-forge channel_targets: - conda-forge main cxx_compiler: - gxx cxx_compiler_version: -- '7' +- '9' docker_image: - quay.io/condaforge/linux-anvil-comp7 +giflib: +- '5.2' +grpc_cpp: +- '1.39' +icu: +- '68' +jpeg: +- '9' libcurl: - '7' libpng: - '1.6' +libprotobuf: +- '3.16' +openssl: +- 1.1.1 pin_run_as_build: + jpeg: + max_pin: x libcurl: max_pin: x libpng: @@ -26,14 +40,22 @@ pin_run_as_build: python: min_pin: x.x max_pin: x.x + sqlite: + max_pin: x zlib: max_pin: x.x python: - 3.7.* *_cpython +snappy: +- '1' +sqlite: +- '3' target_platform: - linux-64 zip_keys: - - c_compiler_version - cxx_compiler_version +- - cdt_name + - docker_image zlib: - '1.2' diff --git a/.ci_support/linux_64_python3.8.____cpython.yaml b/.ci_support/linux_64_python3.8.____cpython.yaml index d0c5e866ce60e698ab71a64443329847b44bb8d4..4a587097616d083c9b1f6dfeb3e47d333446abe7 100644 --- a/.ci_support/linux_64_python3.8.____cpython.yaml +++ b/.ci_support/linux_64_python3.8.____cpython.yaml @@ -1,24 +1,38 @@ c_compiler: - gcc c_compiler_version: -- '7' +- '9' cdt_name: - cos6 channel_sources: -- conda-forge,defaults +- conda-forge channel_targets: - conda-forge main cxx_compiler: - gxx cxx_compiler_version: -- '7' +- '9' docker_image: - quay.io/condaforge/linux-anvil-comp7 +giflib: +- '5.2' +grpc_cpp: +- '1.39' +icu: +- '68' +jpeg: +- '9' libcurl: - '7' libpng: - '1.6' +libprotobuf: +- '3.16' +openssl: +- 1.1.1 pin_run_as_build: + jpeg: + max_pin: x libcurl: max_pin: x libpng: @@ -26,14 +40,22 @@ pin_run_as_build: python: min_pin: x.x max_pin: x.x + sqlite: + max_pin: x zlib: max_pin: x.x python: - 3.8.* *_cpython +snappy: +- '1' +sqlite: +- '3' target_platform: - linux-64 zip_keys: - - c_compiler_version - cxx_compiler_version +- - cdt_name + - docker_image zlib: - '1.2' diff --git a/.ci_support/linux_64_python3.9.____cpython.yaml b/.ci_support/linux_64_python3.9.____cpython.yaml new file mode 100644 index 0000000000000000000000000000000000000000..880e9bc3f1bb1907bc4ea4816f5d31f403662811 --- /dev/null +++ b/.ci_support/linux_64_python3.9.____cpython.yaml @@ -0,0 +1,61 @@ +c_compiler: +- gcc +c_compiler_version: +- '9' +cdt_name: +- cos6 +channel_sources: +- conda-forge +channel_targets: +- conda-forge main +cxx_compiler: +- gxx +cxx_compiler_version: +- '9' +docker_image: +- quay.io/condaforge/linux-anvil-comp7 +giflib: +- '5.2' +grpc_cpp: +- '1.39' +icu: +- '68' +jpeg: +- '9' +libcurl: +- '7' +libpng: +- '1.6' +libprotobuf: +- '3.16' +openssl: +- 1.1.1 +pin_run_as_build: + jpeg: + max_pin: x + libcurl: + max_pin: x + libpng: + max_pin: x.x + python: + min_pin: x.x + max_pin: x.x + sqlite: + max_pin: x + zlib: + max_pin: x.x +python: +- 3.9.* *_cpython +snappy: +- '1' +sqlite: +- '3' +target_platform: +- linux-64 +zip_keys: +- - c_compiler_version + - cxx_compiler_version +- - cdt_name + - docker_image +zlib: +- '1.2' diff --git a/.ci_support/osx_64_python3.7.____cpython.yaml b/.ci_support/osx_64_python3.7.____cpython.yaml index 163d3ce627616e4c07d4080a359a0a36a791915c..0acf9b6ba9f51bdd303045fde7a7dfd87b802a5d 100644 --- a/.ci_support/osx_64_python3.7.____cpython.yaml +++ b/.ci_support/osx_64_python3.7.____cpython.yaml @@ -1,16 +1,61 @@ MACOSX_DEPLOYMENT_TARGET: - '10.9' +MACOSX_SDK_VERSION: +- '10.12' +c_compiler: +- clang +c_compiler_version: +- '11' channel_sources: -- conda-forge,defaults +- conda-forge channel_targets: - conda-forge main +cxx_compiler: +- clangxx +cxx_compiler_version: +- '11' +giflib: +- '5.2' +grpc_cpp: +- '1.39' +icu: +- '68' +jpeg: +- '9' +libcurl: +- '7' +libpng: +- '1.6' +libprotobuf: +- '3.16' macos_machine: - x86_64-apple-darwin13.4.0 +openssl: +- 1.1.1 pin_run_as_build: + jpeg: + max_pin: x + libcurl: + max_pin: x + libpng: + max_pin: x.x python: min_pin: x.x max_pin: x.x + sqlite: + max_pin: x + zlib: + max_pin: x.x python: - 3.7.* *_cpython +snappy: +- '1' +sqlite: +- '3' target_platform: - osx-64 +zip_keys: +- - c_compiler_version + - cxx_compiler_version +zlib: +- '1.2' diff --git a/.ci_support/osx_64_python3.8.____cpython.yaml b/.ci_support/osx_64_python3.8.____cpython.yaml index 63cee21d973393d4e192b3ac1c94986422e63eb8..80134125e52e233b2fccdb955baedce8a65fb096 100644 --- a/.ci_support/osx_64_python3.8.____cpython.yaml +++ b/.ci_support/osx_64_python3.8.____cpython.yaml @@ -1,16 +1,61 @@ MACOSX_DEPLOYMENT_TARGET: - '10.9' +MACOSX_SDK_VERSION: +- '10.12' +c_compiler: +- clang +c_compiler_version: +- '11' channel_sources: -- conda-forge,defaults +- conda-forge channel_targets: - conda-forge main +cxx_compiler: +- clangxx +cxx_compiler_version: +- '11' +giflib: +- '5.2' +grpc_cpp: +- '1.39' +icu: +- '68' +jpeg: +- '9' +libcurl: +- '7' +libpng: +- '1.6' +libprotobuf: +- '3.16' macos_machine: - x86_64-apple-darwin13.4.0 +openssl: +- 1.1.1 pin_run_as_build: + jpeg: + max_pin: x + libcurl: + max_pin: x + libpng: + max_pin: x.x python: min_pin: x.x max_pin: x.x + sqlite: + max_pin: x + zlib: + max_pin: x.x python: - 3.8.* *_cpython +snappy: +- '1' +sqlite: +- '3' target_platform: - osx-64 +zip_keys: +- - c_compiler_version + - cxx_compiler_version +zlib: +- '1.2' diff --git a/.ci_support/osx_64_python3.9.____cpython.yaml b/.ci_support/osx_64_python3.9.____cpython.yaml new file mode 100644 index 0000000000000000000000000000000000000000..8b8632ec77ac1bf609b8030d9997a0d6020bdda2 --- /dev/null +++ b/.ci_support/osx_64_python3.9.____cpython.yaml @@ -0,0 +1,61 @@ +MACOSX_DEPLOYMENT_TARGET: +- '10.9' +MACOSX_SDK_VERSION: +- '10.12' +c_compiler: +- clang +c_compiler_version: +- '11' +channel_sources: +- conda-forge +channel_targets: +- conda-forge main +cxx_compiler: +- clangxx +cxx_compiler_version: +- '11' +giflib: +- '5.2' +grpc_cpp: +- '1.39' +icu: +- '68' +jpeg: +- '9' +libcurl: +- '7' +libpng: +- '1.6' +libprotobuf: +- '3.16' +macos_machine: +- x86_64-apple-darwin13.4.0 +openssl: +- 1.1.1 +pin_run_as_build: + jpeg: + max_pin: x + libcurl: + max_pin: x + libpng: + max_pin: x.x + python: + min_pin: x.x + max_pin: x.x + sqlite: + max_pin: x + zlib: + max_pin: x.x +python: +- 3.9.* *_cpython +snappy: +- '1' +sqlite: +- '3' +target_platform: +- osx-64 +zip_keys: +- - c_compiler_version + - cxx_compiler_version +zlib: +- '1.2' diff --git a/.ci_support/osx_arm64_python3.8.____cpython.yaml b/.ci_support/osx_arm64_python3.8.____cpython.yaml new file mode 100644 index 0000000000000000000000000000000000000000..3d406fb1131badff4f33590e68baae65a76708e1 --- /dev/null +++ b/.ci_support/osx_arm64_python3.8.____cpython.yaml @@ -0,0 +1,59 @@ +MACOSX_DEPLOYMENT_TARGET: +- '11.0' +c_compiler: +- clang +c_compiler_version: +- '11' +channel_sources: +- conda-forge +channel_targets: +- conda-forge main +cxx_compiler: +- clangxx +cxx_compiler_version: +- '11' +giflib: +- '5.2' +grpc_cpp: +- '1.39' +icu: +- '68' +jpeg: +- '9' +libcurl: +- '7' +libpng: +- '1.6' +libprotobuf: +- '3.16' +macos_machine: +- arm64-apple-darwin20.0.0 +openssl: +- 1.1.1 +pin_run_as_build: + jpeg: + max_pin: x + libcurl: + max_pin: x + libpng: + max_pin: x.x + python: + min_pin: x.x + max_pin: x.x + sqlite: + max_pin: x + zlib: + max_pin: x.x +python: +- 3.8.* *_cpython +snappy: +- '1' +sqlite: +- '3' +target_platform: +- osx-arm64 +zip_keys: +- - c_compiler_version + - cxx_compiler_version +zlib: +- '1.2' diff --git a/.ci_support/osx_arm64_python3.9.____cpython.yaml b/.ci_support/osx_arm64_python3.9.____cpython.yaml new file mode 100644 index 0000000000000000000000000000000000000000..1de2cfd845c1f3bf3ad96d17354d8ac36608e3ab --- /dev/null +++ b/.ci_support/osx_arm64_python3.9.____cpython.yaml @@ -0,0 +1,59 @@ +MACOSX_DEPLOYMENT_TARGET: +- '11.0' +c_compiler: +- clang +c_compiler_version: +- '11' +channel_sources: +- conda-forge +channel_targets: +- conda-forge main +cxx_compiler: +- clangxx +cxx_compiler_version: +- '11' +giflib: +- '5.2' +grpc_cpp: +- '1.39' +icu: +- '68' +jpeg: +- '9' +libcurl: +- '7' +libpng: +- '1.6' +libprotobuf: +- '3.16' +macos_machine: +- arm64-apple-darwin20.0.0 +openssl: +- 1.1.1 +pin_run_as_build: + jpeg: + max_pin: x + libcurl: + max_pin: x + libpng: + max_pin: x.x + python: + min_pin: x.x + max_pin: x.x + sqlite: + max_pin: x + zlib: + max_pin: x.x +python: +- 3.9.* *_cpython +snappy: +- '1' +sqlite: +- '3' +target_platform: +- osx-arm64 +zip_keys: +- - c_compiler_version + - cxx_compiler_version +zlib: +- '1.2' diff --git a/.circleci/config.yml b/.circleci/config.yml new file mode 100644 index 0000000000000000000000000000000000000000..6ad461b80ca0b11489bee9e111e3caaa4e10d167 --- /dev/null +++ b/.circleci/config.yml @@ -0,0 +1,24 @@ +# This file was generated automatically from conda-smithy. To update this configuration, +# update the conda-forge.yml and/or the recipe/meta.yaml. +# -*- mode: yaml -*- + +version: 2 + +jobs: + build: + working_directory: ~/test + machine: true + steps: + - run: + # The Circle-CI build should not be active, but if this is not true for some reason, do a fast finish. + command: exit 0 + +workflows: + version: 2 + build_and_test: + jobs: + - build: + filters: + branches: + ignore: + - /.*/ diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000000000000000000000000000000000000..9060b272b25719c02fac7599ddf8a2c30d192016 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,25 @@ +* text=auto + +*.patch binary +*.diff binary +meta.yaml text eol=lf +build.sh text eol=lf +bld.bat text eol=crlf + +# github helper pieces to make some files not show up in diffs automatically +.azure-pipelines/* linguist-generated=true +.circleci/* linguist-generated=true +.drone/* linguist-generated=true +.drone.yml linguist-generated=true +.github/* linguist-generated=true +.travis/* linguist-generated=true +.appveyor.yml linguist-generated=true +.gitattributes linguist-generated=true +.gitignore linguist-generated=true +.travis.yml linguist-generated=true +.scripts/* linguist-generated=true +LICENSE.txt linguist-generated=true +README.md linguist-generated=true +azure-pipelines.yml linguist-generated=true +build-locally.py linguist-generated=true +shippable.yml linguist-generated=true diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000000000000000000000000000000000000..edbaa153225671487a1c9c7496fd8a1cb564c0df --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1 @@ +* @farhantejani @ghego @gilbertfrancois @h-vetinari @hajapy @jschueller @njzjz @waitingkuo @xhochy \ No newline at end of file diff --git a/.github/workflows/automerge.yml b/.github/workflows/automerge.yml new file mode 100644 index 0000000000000000000000000000000000000000..e8e59028359ba5125629facee09fee32247aa135 --- /dev/null +++ b/.github/workflows/automerge.yml @@ -0,0 +1,18 @@ +on: + status: {} + check_suite: + types: + - completed + +jobs: + automerge-action: + runs-on: ubuntu-latest + name: automerge + steps: + - name: checkout + uses: actions/checkout@v2 + - name: automerge-action + id: automerge-action + uses: conda-forge/automerge-action@master + with: + github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/webservices.yml b/.github/workflows/webservices.yml new file mode 100644 index 0000000000000000000000000000000000000000..78f51e6054078460fe249292a137e7f09b623ac9 --- /dev/null +++ b/.github/workflows/webservices.yml @@ -0,0 +1,12 @@ +on: repository_dispatch + +jobs: + webservices: + runs-on: ubuntu-latest + name: webservices + steps: + - name: webservices + id: webservices + uses: conda-forge/webservices-dispatch-action@master + with: + github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.scripts/build_steps.sh b/.scripts/build_steps.sh index aa9727b9f1413b1555a0b2e8baea35d41847c10a..71a068603a584d90f9eeb529854dffbd36c168c6 100755 --- a/.scripts/build_steps.sh +++ b/.scripts/build_steps.sh @@ -6,8 +6,15 @@ # benefit from the improvement. set -xeuo pipefail -export PYTHONUNBUFFERED=1 export FEEDSTOCK_ROOT="${FEEDSTOCK_ROOT:-/home/conda/feedstock_root}" +source ${FEEDSTOCK_ROOT}/.scripts/logging_utils.sh + + +( endgroup "Start Docker" ) 2> /dev/null + +( startgroup "Configuring conda" ) 2> /dev/null + +export PYTHONUNBUFFERED=1 export RECIPE_ROOT="${RECIPE_ROOT:-/home/conda/recipe_root}" export CI_SUPPORT="${FEEDSTOCK_ROOT}/.ci_support" export CONFIG_FILE="${CI_SUPPORT}/${CONFIG}.yaml" @@ -18,8 +25,9 @@ conda-build: root-dir: ${FEEDSTOCK_ROOT}/build_artifacts CONDARC +BUILD_CMD=build -conda install --yes --quiet "conda-forge-ci-setup=3" conda-build pip -c conda-forge +conda install --yes --quiet "conda-forge-ci-setup=3" conda-build pip ${GET_BOA:-} -c conda-forge # set up the condarc setup_conda_rc "${FEEDSTOCK_ROOT}" "${RECIPE_ROOT}" "${CONFIG_FILE}" @@ -30,6 +38,8 @@ source run_conda_forge_build_setup make_build_number "${FEEDSTOCK_ROOT}" "${RECIPE_ROOT}" "${CONFIG_FILE}" +( endgroup "Configuring conda" ) 2> /dev/null + if [[ "${BUILD_WITH_CONDA_DEBUG:-0}" == 1 ]]; then if [[ "x${BUILD_OUTPUT_ID:-}" != "x" ]]; then EXTRA_CB_OPTIONS="${EXTRA_CB_OPTIONS:-} --output-id ${BUILD_OUTPUT_ID}" @@ -37,17 +47,28 @@ if [[ "${BUILD_WITH_CONDA_DEBUG:-0}" == 1 ]]; then conda debug "${RECIPE_ROOT}" -m "${CI_SUPPORT}/${CONFIG}.yaml" \ ${EXTRA_CB_OPTIONS:-} \ --clobber-file "${CI_SUPPORT}/clobber_${CONFIG}.yaml" + # Drop into an interactive shell /bin/bash else - conda build "${RECIPE_ROOT}" -m "${CI_SUPPORT}/${CONFIG}.yaml" \ + conda $BUILD_CMD "${RECIPE_ROOT}" -m "${CI_SUPPORT}/${CONFIG}.yaml" \ --suppress-variables ${EXTRA_CB_OPTIONS:-} \ --clobber-file "${CI_SUPPORT}/clobber_${CONFIG}.yaml" + ( startgroup "Validating outputs" ) 2> /dev/null + validate_recipe_outputs "${FEEDSTOCK_NAME}" + ( endgroup "Validating outputs" ) 2> /dev/null + + ( startgroup "Uploading packages" ) 2> /dev/null + if [[ "${UPLOAD_PACKAGES}" != "False" ]]; then upload_package --validate --feedstock-name="${FEEDSTOCK_NAME}" "${FEEDSTOCK_ROOT}" "${RECIPE_ROOT}" "${CONFIG_FILE}" fi + + ( endgroup "Uploading packages" ) 2> /dev/null fi +( startgroup "Final checks" ) 2> /dev/null + touch "${FEEDSTOCK_ROOT}/build_artifacts/conda-forge-build-done-${CONFIG}" \ No newline at end of file diff --git a/.scripts/logging_utils.sh b/.scripts/logging_utils.sh new file mode 100644 index 0000000000000000000000000000000000000000..57bc95c2421b4508cf8ec99e9b31ed7994a1990c --- /dev/null +++ b/.scripts/logging_utils.sh @@ -0,0 +1,35 @@ +#!/bin/bash + +# Provide a unified interface for the different logging +# utilities CI providers offer. If unavailable, provide +# a compatible fallback (e.g. bare `echo xxxxxx`). + +function startgroup { + # Start a foldable group of log lines + # Pass a single argument, quoted + case ${CI:-} in + azure ) + echo "##[group]$1";; + travis ) + echo "$1" + echo -en 'travis_fold:start:'"${1// /}"'\\r';; + github_actions ) + echo "::group::$1";; + * ) + echo "$1";; + esac +} 2> /dev/null + +function endgroup { + # End a foldable group of log lines + # Pass a single argument, quoted + + case ${CI:-} in + azure ) + echo "##[endgroup]";; + travis ) + echo -en 'travis_fold:end:'"${1// /}"'\\r';; + github_actions ) + echo "::endgroup::";; + esac +} 2> /dev/null diff --git a/.scripts/run_docker_build.sh b/.scripts/run_docker_build.sh index 87ba4db0460e397314518ae991b4aa3551cff0d2..244ccd1f252dac59a097fe0dea8015c6f9c95a90 100755 --- a/.scripts/run_docker_build.sh +++ b/.scripts/run_docker_build.sh @@ -5,6 +5,10 @@ # changes to this script, consider a proposal to conda-smithy so that other feedstocks can also # benefit from the improvement. +source .scripts/logging_utils.sh + +( startgroup "Configure Docker" ) 2> /dev/null + set -xeo pipefail THISDIR="$( cd "$( dirname "$0" )" >/dev/null && pwd )" @@ -45,10 +49,14 @@ fi if [ -z "${DOCKER_IMAGE}" ]; then SHYAML_INSTALLED="$(shyaml -h || echo NO)" if [ "${SHYAML_INSTALLED}" == "NO" ]; then - echo "WARNING: DOCKER_IMAGE variable not set and shyaml not installed. Falling back to condaforge/linux-anvil-comp7" - DOCKER_IMAGE="condaforge/linux-anvil-comp7" + echo "WARNING: DOCKER_IMAGE variable not set and shyaml not installed. Trying to parse with coreutils" + DOCKER_IMAGE=$(cat .ci_support/${CONFIG}.yaml | grep '^docker_image:$' -A 1 | tail -n 1 | cut -b 3-) + if [ "${DOCKER_IMAGE}" = "" ]; then + echo "No docker_image entry found in ${CONFIG}. Falling back to quay.io/condaforge/linux-anvil-comp7" + DOCKER_IMAGE="quay.io/condaforge/linux-anvil-comp7" + fi else - DOCKER_IMAGE="$(cat "${FEEDSTOCK_ROOT}/.ci_support/${CONFIG}.yaml" | shyaml get-value docker_image.0 condaforge/linux-anvil-comp7 )" + DOCKER_IMAGE="$(cat "${FEEDSTOCK_ROOT}/.ci_support/${CONFIG}.yaml" | shyaml get-value docker_image.0 quay.io/condaforge/linux-anvil-comp7 )" fi fi @@ -62,10 +70,14 @@ if [ -z "${CI}" ]; then DOCKER_RUN_ARGS="-it ${DOCKER_RUN_ARGS}" fi +( endgroup "Configure Docker" ) 2> /dev/null + +( startgroup "Start Docker" ) 2> /dev/null + export UPLOAD_PACKAGES="${UPLOAD_PACKAGES:-True}" docker run ${DOCKER_RUN_ARGS} \ - -v "${RECIPE_ROOT}":/home/conda/recipe_root:rw,z \ - -v "${FEEDSTOCK_ROOT}":/home/conda/feedstock_root:rw,z \ + -v "${RECIPE_ROOT}":/home/conda/recipe_root:rw,z,delegated \ + -v "${FEEDSTOCK_ROOT}":/home/conda/feedstock_root:rw,z,delegated \ -e CONFIG \ -e HOST_USER_ID \ -e UPLOAD_PACKAGES \ @@ -84,4 +96,7 @@ docker run ${DOCKER_RUN_ARGS} \ /home/conda/feedstock_root/${PROVIDER_DIR}/build_steps.sh # verify that the end of the script was reached -test -f "$DONE_CANARY" \ No newline at end of file +test -f "$DONE_CANARY" + +# This closes the last group opened in `build_steps.sh` +( endgroup "Final checks" ) 2> /dev/null \ No newline at end of file diff --git a/.scripts/run_osx_build.sh b/.scripts/run_osx_build.sh index c299b7925719dd913a88e3f19bfbf5335c32cb0d..96d02787b7b039fa6c106db1ca5fd0c9dd4763a6 100755 --- a/.scripts/run_osx_build.sh +++ b/.scripts/run_osx_build.sh @@ -1,57 +1,69 @@ #!/usr/bin/env bash -set -x +source .scripts/logging_utils.sh + +set -xe + +MINIFORGE_HOME=${MINIFORGE_HOME:-${HOME}/miniforge3} + +( startgroup "Installing a fresh version of Miniforge" ) 2> /dev/null -echo -e "\n\nInstalling a fresh version of Miniforge." -if [[ ${CI} == "travis" ]]; then - echo -en 'travis_fold:start:install_miniforge\\r' -fi MINIFORGE_URL="https://github.com/conda-forge/miniforge/releases/latest/download" MINIFORGE_FILE="Miniforge3-MacOSX-x86_64.sh" curl -L -O "${MINIFORGE_URL}/${MINIFORGE_FILE}" -bash $MINIFORGE_FILE -b -if [[ ${CI} == "travis" ]]; then - echo -en 'travis_fold:end:install_miniforge\\r' -fi +bash $MINIFORGE_FILE -b -p ${MINIFORGE_HOME} -echo -e "\n\nConfiguring conda." -if [[ ${CI} == "travis" ]]; then - echo -en 'travis_fold:start:configure_conda\\r' -fi +( endgroup "Installing a fresh version of Miniforge" ) 2> /dev/null + +( startgroup "Configuring conda" ) 2> /dev/null -source ${HOME}/miniforge3/etc/profile.d/conda.sh +BUILD_CMD=build + +source ${MINIFORGE_HOME}/etc/profile.d/conda.sh conda activate base echo -e "\n\nInstalling conda-forge-ci-setup=3 and conda-build." -conda install -n base --quiet --yes "conda-forge-ci-setup=3" conda-build pip +conda install -n base --quiet --yes "conda-forge-ci-setup=3" conda-build pip ${GET_BOA:-} echo -e "\n\nSetting up the condarc and mangling the compiler." setup_conda_rc ./ ./recipe ./.ci_support/${CONFIG}.yaml -mangle_compiler ./ ./recipe .ci_support/${CONFIG}.yaml -echo -e "\n\nMangling homebrew in the CI to avoid conflicts." -/usr/bin/sudo mangle_homebrew -/usr/bin/sudo -k +if [[ "${CI:-}" != "" ]]; then + mangle_compiler ./ ./recipe .ci_support/${CONFIG}.yaml +fi + +if [[ "${CI:-}" != "" ]]; then + echo -e "\n\nMangling homebrew in the CI to avoid conflicts." + /usr/bin/sudo mangle_homebrew + /usr/bin/sudo -k +else + echo -e "\n\nNot mangling homebrew as we are not running in CI" +fi echo -e "\n\nRunning the build setup script." source run_conda_forge_build_setup -if [[ ${CI} == "travis" ]]; then - echo -en 'travis_fold:end:configure_conda\\r' -fi -set -e +( endgroup "Configuring conda" ) 2> /dev/null -echo -e "\n\nMaking the build clobber file and running the build." + +echo -e "\n\nMaking the build clobber file" make_build_number ./ ./recipe ./.ci_support/${CONFIG}.yaml -conda build ./recipe -m ./.ci_support/${CONFIG}.yaml --suppress-variables --clobber-file ./.ci_support/clobber_${CONFIG}.yaml ${EXTRA_CB_OPTIONS:-} +conda $BUILD_CMD ./recipe -m ./.ci_support/${CONFIG}.yaml --suppress-variables --clobber-file ./.ci_support/clobber_${CONFIG}.yaml ${EXTRA_CB_OPTIONS:-} +( startgroup "Validating outputs" ) 2> /dev/null + validate_recipe_outputs "${FEEDSTOCK_NAME}" +( endgroup "Validating outputs" ) 2> /dev/null + +( startgroup "Uploading packages" ) 2> /dev/null + if [[ "${UPLOAD_PACKAGES}" != "False" ]]; then - echo -e "\n\nUploading the packages." upload_package --validate --feedstock-name="${FEEDSTOCK_NAME}" ./ ./recipe ./.ci_support/${CONFIG}.yaml -fi \ No newline at end of file +fi + +( endgroup "Uploading packages" ) 2> /dev/null \ No newline at end of file diff --git a/LICENSE.txt b/LICENSE.txt index 5f302793a84a7e5bf79abee2e013e1673a118f0a..ed3f451e668839be15f0d33ef0458e7d836d3efb 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -1,5 +1,5 @@ BSD 3-clause license -Copyright (c) 2015-2020, conda-forge contributors +Copyright (c) 2015-2021, conda-forge contributors All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: diff --git a/README.md b/README.md index 38d9c56bb3b4f82d0a0e307903a2ca77d6a32815..3797ddfdfdf499003da0a8b77d46b6b80d714409 100644 --- a/README.md +++ b/README.md @@ -36,13 +36,6 @@ Current build status <table> <thead><tr><th>Variant</th><th>Status</th></tr></thead> <tbody><tr> - <td>linux_64_python3.6.____cpython</td> - <td> - <a href="https://dev.azure.com/conda-forge/feedstock-builds/_build/latest?definitionId=4385&branchName=master"> - <img src="https://dev.azure.com/conda-forge/feedstock-builds/_apis/build/status/tensorflow-feedstock?branchName=master&jobName=linux&configuration=linux_64_python3.6.____cpython" alt="variant"> - </a> - </td> - </tr><tr> <td>linux_64_python3.7.____cpython</td> <td> <a href="https://dev.azure.com/conda-forge/feedstock-builds/_build/latest?definitionId=4385&branchName=master"> @@ -57,10 +50,10 @@ Current build status </a> </td> </tr><tr> - <td>osx_64_python3.6.____cpython</td> + <td>linux_64_python3.9.____cpython</td> <td> <a href="https://dev.azure.com/conda-forge/feedstock-builds/_build/latest?definitionId=4385&branchName=master"> - <img src="https://dev.azure.com/conda-forge/feedstock-builds/_apis/build/status/tensorflow-feedstock?branchName=master&jobName=osx&configuration=osx_64_python3.6.____cpython" alt="variant"> + <img src="https://dev.azure.com/conda-forge/feedstock-builds/_apis/build/status/tensorflow-feedstock?branchName=master&jobName=linux&configuration=linux_64_python3.9.____cpython" alt="variant"> </a> </td> </tr><tr> @@ -78,24 +71,24 @@ Current build status </a> </td> </tr><tr> - <td>win_64_python3.6.____cpython</td> + <td>osx_64_python3.9.____cpython</td> <td> <a href="https://dev.azure.com/conda-forge/feedstock-builds/_build/latest?definitionId=4385&branchName=master"> - <img src="https://dev.azure.com/conda-forge/feedstock-builds/_apis/build/status/tensorflow-feedstock?branchName=master&jobName=win&configuration=win_64_python3.6.____cpython" alt="variant"> + <img src="https://dev.azure.com/conda-forge/feedstock-builds/_apis/build/status/tensorflow-feedstock?branchName=master&jobName=osx&configuration=osx_64_python3.9.____cpython" alt="variant"> </a> </td> </tr><tr> - <td>win_64_python3.7.____cpython</td> + <td>osx_arm64_python3.8.____cpython</td> <td> <a href="https://dev.azure.com/conda-forge/feedstock-builds/_build/latest?definitionId=4385&branchName=master"> - <img src="https://dev.azure.com/conda-forge/feedstock-builds/_apis/build/status/tensorflow-feedstock?branchName=master&jobName=win&configuration=win_64_python3.7.____cpython" alt="variant"> + <img src="https://dev.azure.com/conda-forge/feedstock-builds/_apis/build/status/tensorflow-feedstock?branchName=master&jobName=osx&configuration=osx_arm64_python3.8.____cpython" alt="variant"> </a> </td> </tr><tr> - <td>win_64_python3.8.____cpython</td> + <td>osx_arm64_python3.9.____cpython</td> <td> <a href="https://dev.azure.com/conda-forge/feedstock-builds/_build/latest?definitionId=4385&branchName=master"> - <img src="https://dev.azure.com/conda-forge/feedstock-builds/_apis/build/status/tensorflow-feedstock?branchName=master&jobName=win&configuration=win_64_python3.8.____cpython" alt="variant"> + <img src="https://dev.azure.com/conda-forge/feedstock-builds/_apis/build/status/tensorflow-feedstock?branchName=master&jobName=osx&configuration=osx_arm64_python3.9.____cpython" alt="variant"> </a> </td> </tr> @@ -115,6 +108,7 @@ Current release info | [](https://anaconda.org/conda-forge/libtensorflow_cc) | [](https://anaconda.org/conda-forge/libtensorflow_cc) | [](https://anaconda.org/conda-forge/libtensorflow_cc) | [](https://anaconda.org/conda-forge/libtensorflow_cc) | | [](https://anaconda.org/conda-forge/tensorflow) | [](https://anaconda.org/conda-forge/tensorflow) | [](https://anaconda.org/conda-forge/tensorflow) | [](https://anaconda.org/conda-forge/tensorflow) | | [](https://anaconda.org/conda-forge/tensorflow-base) | [](https://anaconda.org/conda-forge/tensorflow-base) | [](https://anaconda.org/conda-forge/tensorflow-base) | [](https://anaconda.org/conda-forge/tensorflow-base) | +| [](https://anaconda.org/conda-forge/tensorflow-estimator) | [](https://anaconda.org/conda-forge/tensorflow-estimator) | [](https://anaconda.org/conda-forge/tensorflow-estimator) | [](https://anaconda.org/conda-forge/tensorflow-estimator) | Installing tensorflow ===================== @@ -123,12 +117,13 @@ Installing `tensorflow` from the `conda-forge` channel can be achieved by adding ``` conda config --add channels conda-forge +conda config --set channel_priority strict ``` -Once the `conda-forge` channel has been enabled, `libtensorflow, libtensorflow_cc, tensorflow, tensorflow-base` can be installed with: +Once the `conda-forge` channel has been enabled, `libtensorflow, libtensorflow_cc, tensorflow, tensorflow-base, tensorflow-estimator` can be installed with: ``` -conda install libtensorflow libtensorflow_cc tensorflow tensorflow-base +conda install libtensorflow libtensorflow_cc tensorflow tensorflow-base tensorflow-estimator ``` It is possible to list all of the versions of `libtensorflow` available on your platform with: @@ -193,9 +188,9 @@ build distinct package versions. In order to produce a uniquely identifiable distribution: * If the version of a package **is not** being increased, please add or increase - the [``build/number``](https://conda.io/docs/user-guide/tasks/build-packages/define-metadata.html#build-number-and-string). + the [``build/number``](https://docs.conda.io/projects/conda-build/en/latest/resources/define-metadata.html#build-number-and-string). * If the version of a package **is** being increased, please remember to return - the [``build/number``](https://conda.io/docs/user-guide/tasks/build-packages/define-metadata.html#build-number-and-string) + the [``build/number``](https://docs.conda.io/projects/conda-build/en/latest/resources/define-metadata.html#build-number-and-string) back to 0. Feedstock Maintainers @@ -204,8 +199,10 @@ Feedstock Maintainers * [@farhantejani](https://github.com/farhantejani/) * [@ghego](https://github.com/ghego/) * [@gilbertfrancois](https://github.com/gilbertfrancois/) +* [@h-vetinari](https://github.com/h-vetinari/) * [@hajapy](https://github.com/hajapy/) * [@jschueller](https://github.com/jschueller/) * [@njzjz](https://github.com/njzjz/) * [@waitingkuo](https://github.com/waitingkuo/) +* [@xhochy](https://github.com/xhochy/) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 6b346f505e5d415eeb91f140f75568318e6c65a1..33a441c1f1d0129ab8ef9893941750260fe1f6ef 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -4,5 +4,4 @@ jobs: - template: ./.azure-pipelines/azure-pipelines-linux.yml - - template: ./.azure-pipelines/azure-pipelines-win.yml - template: ./.azure-pipelines/azure-pipelines-osx.yml \ No newline at end of file diff --git a/build-locally.py b/build-locally.py index a7cdfdfa1a17a40f7815f405c0cbce50cfa3f56a..00eeb34b1c81c9b5f06f9ecb7e01e3493bcf8d02 100755 --- a/build-locally.py +++ b/build-locally.py @@ -3,10 +3,11 @@ # This file has been generated by conda-smithy in order to build the recipe # locally. # -import glob import os +import glob import subprocess from argparse import ArgumentParser +import platform def setup_environment(ns): @@ -16,6 +17,10 @@ def setup_environment(ns): os.environ["BUILD_WITH_CONDA_DEBUG"] = "1" if ns.output_id: os.environ["BUILD_OUTPUT_ID"] = ns.output_id + if "MINIFORGE_HOME" not in os.environ: + os.environ["MINIFORGE_HOME"] = os.path.join( + os.path.dirname(__file__), "miniforge3" + ) def run_docker_build(ns): @@ -23,11 +28,16 @@ def run_docker_build(ns): subprocess.check_call([script]) +def run_osx_build(ns): + script = ".scripts/run_osx_build.sh" + subprocess.check_call([script]) + + def verify_config(ns): valid_configs = { os.path.basename(f)[:-5] for f in glob.glob(".ci_support/*.yaml") } - print("valid configs are {valid_configs}".format(valid_configs=valid_configs)) + print(f"valid configs are {valid_configs}") if ns.config in valid_configs: print("Using " + ns.config + " configuration") return @@ -38,18 +48,24 @@ def verify_config(ns): print("config not selected, please choose from the following:\n") selections = list(enumerate(sorted(valid_configs), 1)) for i, c in selections: - print("{i}. {c}".format(i=i, c=c)) + print(f"{i}. {c}") s = input("\n> ") idx = int(s) - 1 ns.config = selections[idx][1] - print("selected {config}".format(config=ns.config)) + print(f"selected {ns.config}") else: raise ValueError("config " + ns.config + " is not valid") # Remove the following, as implemented - if not ns.config.startswith("linux"): + if ns.config.startswith("win"): raise ValueError( - "only Linux configs currently supported, got {config}".format(config=ns.config) + f"only Linux/macOS configs currently supported, got {ns.config}" ) + elif ns.config.startswith("osx") and platform.system() == "Darwin": + if "OSX_SDK_DIR" not in os.environ: + raise RuntimeError( + "Need OSX_SDK_DIR env variable set. Run 'export OSX_SDK_DIR=/opt'" + "to download the SDK automatically to '/opt/MacOSX<ver>.sdk'" + ) def main(args=None): @@ -68,7 +84,12 @@ def main(args=None): verify_config(ns) setup_environment(ns) - run_docker_build(ns) + if ns.config.startswith("linux") or ( + ns.config.startswith("osx") and platform.system() == "Linux" + ): + run_docker_build(ns) + elif ns.config.startswith("osx"): + run_osx_build(ns) if __name__ == "__main__": diff --git a/conda-forge.yml b/conda-forge.yml index 7abbcf1a1db8e76fa7f7c957bc373961975b316a..073f0ddbaa34a1d0848f9452cf3613cd07028002 100644 --- a/conda-forge.yml +++ b/conda-forge.yml @@ -1,9 +1,5 @@ -travis: - secure: - BINSTAR_TOKEN: NBULVMw70vHV7ch6QWoFGS84M9ghYbix6Fru8O6db3pTyPXZoNlOaNdkHKaOdR88sahmjZx23T00bk9G/UjDqNO+SmPpf30m2Mt8DY+in3rAB9fi9xSJBjZ51xnAVH3JXXGo884wgtjLkCwk7aBYNLtNWtJl0AA5VgxS+1guMGYsqWiPDAtiFEswyhB4ZVO2budldeatef5unjpYC2im9gok5/fGHlRFGf8ds1K4xc22uIDeqnlR4hbu1IUS4n1+qLNi+TYLjoVyoghIm6BBuJyVMoWyGNayGootFByfPGttnfYU/9D9Z/cfonpW3iBkz3oLfq6vtKRE0mwxRdY0ZpPhQitCH6dLWA7z+9aRSLSfnZkwuFGr8uosVWFTqG0WwudgURr2PLSZf14U7tK1sVLnlVaRIfghmCFh9m8GYTImlrfUPZHsS+dtXOnLosPmH97XJke028J6dIwtNXsMaiQSJc+R7ugN6QbGKgDklsgEWJyUm7+LfSVbMf03u9fslU8V7U/TUeCqZye86GWjmcz+s42sqQ0XooPdqj3PMG6f9SeH/muxpMFnnxSoiITnmgB64dhTrymv1U0edWlbgvg9u4F7/opKExfBZtTcL5op5MKrKsMRgOAho6hixedoBXtXUts8UB4xMt3qTzDz43PkaeA096TCqyJbYJTtZqM= -appveyor: - secure: - BINSTAR_TOKEN: tumuXLL8PU75WMnRDemRy02ruEq2RpNxeK3dz0MjFssnosPm2v4EFjfNB4PTotA1 +build_platform: + osx_arm64: osx_64 provider: win: azure conda_forge_output_validation: true diff --git a/recipe/bld.bat b/recipe/bld.bat index 70703d2309b1a1f01e92ab4bd30d25585d10b85e..bca26ebf168b7135562f21ea8303f1072f4d0f6a 100644 --- a/recipe/bld.bat +++ b/recipe/bld.bat @@ -1,2 +1,65 @@ -pip install --no-deps -vv *.whl -for %%f in (*.whl) do pip install --no-deps -vv "%%f" +@echo on + +set "PATH=%CD%:%PATH%" +set LIBDIR=%LIBRARY_BIN% +set INCLUDEDIR=%LIBRARY_INC% + +set "TF_SYSTEM_LIBS=llvm,swig" + +:: do not build with MKL support +set TF_NEED_MKL=0 +set BAZEL_MKL_OPT= + +mkdir -p ./bazel_output_base +set BAZEL_OPTS= + +:: the following arguments are useful for debugging +:: --logging=6 +:: --subcommands +:: jobs can be used to limit parallel builds and reduce resource needs +:: --jobs=20 +:: Set compiler and linker flags as bazel does not account for CFLAGS, +:: CXXFLAGS and LDFLAGS. +BUILD_OPTS=" +--copt=-march=nocona +--copt=-mtune=haswell +--copt=-ftree-vectorize +--copt=-fPIC +--copt=-fstack-protector-strong +--copt=-O2 +--cxxopt=-fvisibility-inlines-hidden +--cxxopt=-fmessage-length=0 +--linkopt=-zrelro +--linkopt=-znow +--verbose_failures +%BAZEL_MKL_OPT% +--config=opt" +set TF_ENABLE_XLA=0 +set BUILD_TARGET="//tensorflow/tools/pip_package:build_pip_package //tensorflow:libtensorflow.so //tensorflow:libtensorflow_cc.so" + +:: Python settings +set PYTHON_BIN_PATH=%PYTHON% +set PYTHON_LIB_PATH=%SP_DIR% +set USE_DEFAULT_PYTHON_LIB_PATH=1 + +:: additional settings +set CC_OPT_FLAGS="-march=nocona -mtune=haswell" +set TF_NEED_OPENCL=0 +set TF_NEED_OPENCL_SYCL=0 +set TF_NEED_COMPUTECPP=0 +set TF_NEED_CUDA=0 +set TF_CUDA_CLANG=0 +set TF_NEED_TENSORRT=0 +set TF_NEED_ROCM=0 +set TF_NEED_MPI=0 +set TF_DOWNLOAD_CLANG=0 +set TF_SET_ANDROID_WORKSPACE=0 +./configure + +:: build using bazel +bazel %BAZEL_OPTS% build %BUILD_OPTS% %BUILD_TARGET% + +:: build a whl file +mkdir -p %SRC_DIR%\\tensorflow_pkg +bazel-bin\\tensorflow\\tools\\pip_package\\build_pip_package %SRC_DIR%\\tensorflow_pkg + diff --git a/recipe/build.sh b/recipe/build.sh index c9c04dbf847731ed6f2c816c2d86df01522daef1..f0504dc967c0d5d1d5b368d72d58f046e93f0e61 100644 --- a/recipe/build.sh +++ b/recipe/build.sh @@ -2,24 +2,49 @@ set -ex -if [[ "$target_platform" == "osx-64" ]]; then - # install the whl using pip - pip install --no-deps *.whl - - # The tensorboard package has the proper entrypoint - ls -l ${SP_DIR} -# rm -f ${PREFIX}/bin/tensorboard - - exit 0 -fi - export PATH="$PWD:$PATH" export CC=$(basename $CC) export CXX=$(basename $CXX) export LIBDIR=$PREFIX/lib export INCLUDEDIR=$PREFIX/include -export TF_SYSTEM_LIBS="llvm,swig" +# Needs a bazel build: +# com_google_absl +# Build failures in tensorflow/core/platform/s3/aws_crypto.cc +# boringssl (i.e. system openssl) +# Most importantly: Write a patch that uses system LLVM libs for sure as well as MLIR and oneDNN/mkldnn +# TODO(check): +# absl_py +# com_github_googleapis_googleapis +# com_github_googlecloudplatform_google_cloud_cpp +# Needs c++17, try on linux +# com_googlesource_code_re2 + +# The possible values are specified in third_party/systemlibs/syslibs_configure.bzl +# The versions for them can be found in tensorflow/workspace.bzl +export TF_SYSTEM_LIBS=" + absl_py + astor_archive + astunparse_archive + boringssl + com_github_googlecloudplatform_google_cloud_cpp + com_github_grpc_grpc + com_google_protobuf + curl + cython + dill_archive + flatbuffers + gast_archive + gif + icu + libjpeg_turbo + org_sqlite + png + pybind11 + snappy + zlib + " +sed -i -e "s/GRPCIO_VERSION/${grpc_cpp}/" tensorflow/tools/pip_package/setup.py # do not build with MKL support export TF_NEED_MKL=0 @@ -27,66 +52,41 @@ export BAZEL_MKL_OPT="" mkdir -p ./bazel_output_base export BAZEL_OPTS="" - -if [[ "$target_platform" == "osx-64" ]]; then - # set up bazel config file for conda provided clang toolchain - cp -r ${RECIPE_DIR}/custom_clang_toolchain . - pushd custom_clang_toolchain - cp ../$CC cc_wrapper.sh - sed -e "s:\${PREFIX}:${BUILD_PREFIX}:" \ - -e "s:\${LD}:${LD}:" \ - -e "s:\${NM}:${NM}:" \ - -e "s:\${STRIP}:${STRIP}:" \ - -e "s:\${LIBTOOL}:${LIBTOOL}:" \ - -e "s:\${CONDA_BUILD_SYSROOT}:${CONDA_BUILD_SYSROOT}:" \ - CROSSTOOL.template > CROSSTOOL - popd - export BAZEL_USE_CPP_ONLY_TOOLCHAIN=1 - BUILD_OPTS=" - --crosstool_top=//custom_clang_toolchain:toolchain - --verbose_failures - ${BAZEL_MKL_OPT} - --config=opt" - export TF_ENABLE_XLA=0 - export BUILD_TARGET="//tensorflow/tools/pip_package:build_pip_package" +export CC_OPT_FLAGS="${CFLAGS}" + +# Quick debug: +# cp -r ${RECIPE_DIR}/build.sh . && bazel clean && bash -x build.sh --logging=6 | tee log.txt +# Dependency graph: +# bazel query 'deps(//tensorflow/tools/lib_package:libtensorflow)' --output graph > graph.in +if [[ "${target_platform}" == osx-* ]]; then + export LDFLAGS="${LDFLAGS} -lz -framework CoreFoundation -Xlinker -undefined -Xlinker dynamic_lookup" else - # Linux - # the following arguments are useful for debugging - # --logging=6 - # --subcommands - # jobs can be used to limit parallel builds and reduce resource needs - # --jobs=20 - # Set compiler and linker flags as bazel does not account for CFLAGS, - # CXXFLAGS and LDFLAGS. - BUILD_OPTS=" - --copt=-march=nocona - --copt=-mtune=haswell - --copt=-ftree-vectorize - --copt=-fPIC - --copt=-fstack-protector-strong - --copt=-O2 - --cxxopt=-fvisibility-inlines-hidden - --cxxopt=-fmessage-length=0 - --linkopt=-zrelro - --linkopt=-znow - --verbose_failures - ${BAZEL_MKL_OPT} - --config=opt" - export TF_ENABLE_XLA=0 - export BUILD_TARGET="//tensorflow/tools/pip_package:build_pip_package //tensorflow:libtensorflow.so //tensorflow:libtensorflow_cc.so" + export LDFLAGS="${LDFLAGS} -lrt" fi -# TODO: publish 2.0 branch in bazel-feedstock -export BAZEL_VERSION=2.0.0 -export EXTRA_BAZEL_ARGS="--host_javabase=@local_jdk//:jdk" -mkdir bazel_local -cd bazel_local -curl -sSOL https://github.com/bazelbuild/bazel/releases/download/${BAZEL_VERSION}/bazel-${BAZEL_VERSION}-dist.zip -unzip bazel-${BAZEL_VERSION}-dist.zip -./compile.sh -cd .. -export PATH="$PWD/bazel_local/output:$PATH" -bazel version +source ${RECIPE_DIR}/gen-bazel-toolchain.sh + +if [[ "${target_platform}" == "osx-64" ]]; then + # Tensorflow doesn't cope yet with an explicit architecture (darwin_x86_64) on osx-64 yet. + TARGET_CPU=darwin +fi + +# If you really want to see what is executed, add --subcommands +BUILD_OPTS=" + --crosstool_top=//custom_toolchain:toolchain + --logging=6 + --verbose_failures + --config=opt + --define=PREFIX=${PREFIX} + --define=PROTOBUF_INCLUDE_PATH=${PREFIX}/include + --config=noaws + --cpu=${TARGET_CPU}" + +if [[ "${target_platform}" == "osx-arm64" ]]; then + BUILD_OPTS="${BUILD_OPTS} --config=macos_arm64" +fi +export TF_ENABLE_XLA=0 +export BUILD_TARGET="//tensorflow/tools/pip_package:build_pip_package //tensorflow/tools/lib_package:libtensorflow //tensorflow:libtensorflow_cc.so" # Python settings export PYTHON_BIN_PATH=${PYTHON} @@ -94,7 +94,6 @@ export PYTHON_LIB_PATH=${SP_DIR} export USE_DEFAULT_PYTHON_LIB_PATH=1 # additional settings -export CC_OPT_FLAGS="-march=nocona -mtune=haswell" export TF_NEED_OPENCL=0 export TF_NEED_OPENCL_SYCL=0 export TF_NEED_COMPUTECPP=0 @@ -105,6 +104,12 @@ export TF_NEED_ROCM=0 export TF_NEED_MPI=0 export TF_DOWNLOAD_CLANG=0 export TF_SET_ANDROID_WORKSPACE=0 +export TF_CONFIGURE_IOS=0 + +# Get rid of unwanted defaults +sed -i -e "/PROTOBUF_INCLUDE_PATH/c\ " .bazelrc +sed -i -e "/PREFIX/c\ " .bazelrc + ./configure # build using bazel @@ -112,10 +117,29 @@ bazel ${BAZEL_OPTS} build ${BUILD_OPTS} ${BUILD_TARGET} # build a whl file mkdir -p $SRC_DIR/tensorflow_pkg -bazel-bin/tensorflow/tools/pip_package/build_pip_package $SRC_DIR/tensorflow_pkg - -# install the whl using pip -pip install --no-deps $SRC_DIR/tensorflow_pkg/*.whl +bash -x bazel-bin/tensorflow/tools/pip_package/build_pip_package $SRC_DIR/tensorflow_pkg + +if [[ "${target_platform}" == linux-* ]]; then + cp $SRC_DIR/bazel-bin/tensorflow/tools/lib_package/libtensorflow.tar.gz $SRC_DIR + mkdir -p $SRC_DIR/libtensorflow_cc_output/lib + cp -d bazel-bin/tensorflow/libtensorflow_cc.so* $SRC_DIR/libtensorflow_cc_output/lib/ + cp -d bazel-bin/tensorflow/libtensorflow_framework.so* $SRC_DIR/libtensorflow_cc_output/lib/ + cp -d $SRC_DIR/libtensorflow_cc_output/lib/libtensorflow_framework.so.2 $SRC_DIR/libtensorflow_cc_output/lib/libtensorflow_framework.so + # Make writable so patchelf can do its magic + chmod u+w $SRC_DIR/libtensorflow_cc_output/lib/libtensorflow* + + mkdir -p $SRC_DIR/libtensorflow_cc_output/include/tensorflow + rsync -avzh --exclude '_virtual_includes/' --exclude 'pip_package/' --exclude 'lib_package/' --include '*/' --include '*.h' --include '*.inc' --exclude '*' bazel-bin/ $SRC_DIR/libtensorflow_cc_output/include + rsync -avzh --include '*/' --include '*.h' --include '*.inc' --exclude '*' tensorflow/cc $SRC_DIR/libtensorflow_cc_output/include/tensorflow/ + rsync -avzh --include '*/' --include '*.h' --include '*.inc' --exclude '*' tensorflow/core $SRC_DIR/libtensorflow_cc_output/include/tensorflow/ + rsync -avzh --include '*/' --include '*' --exclude '*.cc' third_party/ $SRC_DIR/libtensorflow_cc_output/include/third_party/ + rsync -avzh --include '*/' --include '*' --exclude '*.txt' bazel-work/external/eigen_archive/Eigen/ $SRC_DIR/libtensorflow_cc_output/include/Eigen/ + rsync -avzh --include '*/' --include '*' --exclude '*.txt' bazel-work/external/eigen_archive/unsupported/ $SRC_DIR/libtensorflow_cc_output/include/unsupported/ + pushd $SRC_DIR/libtensorflow_cc_output + tar cf ../libtensorflow_cc_output.tar . + popd + chmod -R u+rw $SRC_DIR/libtensorflow_cc_output + rm -r $SRC_DIR/libtensorflow_cc_output +fi -# The tensorboard package has the proper entrypoint -rm -f ${PREFIX}/bin/tensorboard +bazel clean diff --git a/recipe/build_estimator.sh b/recipe/build_estimator.sh new file mode 100644 index 0000000000000000000000000000000000000000..2c577ffa07c26f7c7b424013688d3aaea2b589b6 --- /dev/null +++ b/recipe/build_estimator.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +set -exuo pipefail + +pushd tensorflow-estimator + +WHEEL_DIR=${PWD}/wheel_dir +mkdir -p ${WHEEL_DIR} +bazel build tensorflow_estimator/tools/pip_package:build_pip_package +bazel-bin/tensorflow_estimator/tools/pip_package/build_pip_package ${WHEEL_DIR} +${PYTHON} -m pip install --no-deps ${WHEEL_DIR}/*.whl +bazel clean +popd diff --git a/recipe/build_pkg.bat b/recipe/build_pkg.bat new file mode 100644 index 0000000000000000000000000000000000000000..f7cb14ea7ce83e6f8c3632f1317d4e2897eb3b39 --- /dev/null +++ b/recipe/build_pkg.bat @@ -0,0 +1,5 @@ +:: install the whl using pip +pip install --no-deps %SRC_DIR%\\tensorflow_pkg\\*.whl + +:: The tensorboard package has the proper entrypoint +rm -f %LIBRARY_BIN%\\tensorboard diff --git a/recipe/build_pkg.sh b/recipe/build_pkg.sh new file mode 100644 index 0000000000000000000000000000000000000000..893a97922c55e52be594f0ab1f81d60bc356d621 --- /dev/null +++ b/recipe/build_pkg.sh @@ -0,0 +1,5 @@ +# install the whl using pip +pip install --no-deps $SRC_DIR/tensorflow_pkg/*.whl + +# The tensorboard package has the proper entrypoint +rm -f ${PREFIX}/bin/tensorboard diff --git a/recipe/conda_build_config.yaml b/recipe/conda_build_config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..f8760c8b2fbc8d638911459954ecf3b762c9d8dc --- /dev/null +++ b/recipe/conda_build_config.yaml @@ -0,0 +1,2 @@ +MACOSX_SDK_VERSION: # [osx and x86_64] + - "10.12" # [osx and x86_64] diff --git a/recipe/cp_libtensorflow.sh b/recipe/cp_libtensorflow.sh index 525d78a9459f4af616ecea8bc0d37ab3be600fa0..303f84ea4359cf188b15150550d6a4cd05f4b35c 100644 --- a/recipe/cp_libtensorflow.sh +++ b/recipe/cp_libtensorflow.sh @@ -1,7 +1,5 @@ -# copy libraries -mkdir -p ${PREFIX}/lib -cp bazel-bin/tensorflow/*.so ${PREFIX}/lib/ +# https://github.com/tensorflow/tensorflow/blob/master/tensorflow/tools/lib_package/README.md +tar -C ${PREFIX} -xzf $SRC_DIR/libtensorflow.tar.gz -# copy includes -mkdir -p ${PREFIX}/include/tensorflow/c -cp -R tensorflow/c/* ${PREFIX}/include/tensorflow/c/. +# Make writable so patchelf can do its magic +chmod u+w $PREFIX/lib/libtensorflow* diff --git a/recipe/cp_libtensorflow_cc.sh b/recipe/cp_libtensorflow_cc.sh index c5c8e38ad529334308636e0d6d5ee3ba2ac4e8b9..d72352ff6ba680a332a3f559e2b1d635fd00c9af 100644 --- a/recipe/cp_libtensorflow_cc.sh +++ b/recipe/cp_libtensorflow_cc.sh @@ -1,22 +1 @@ -# copy libraries -mkdir -p ${PREFIX}/lib -cp bazel-bin/tensorflow/libtensorflow_cc.so ${PREFIX}/lib/ -cp bazel-bin/tensorflow/libtensorflow_framework.so ${PREFIX}/lib/ - -# remove cc files -find bazel-genfiles/ -name "*.cc" -type f -delete -find tensorflow/cc -name "*.cc" -type f -delete -find tensorflow/core -name "*.cc" -type f -delete -find third_party -name "*.cc" -type f -delete -find bazel-work/external/com_google_absl/absl -name "*.cc" -type f -delete - -# copy includes -mkdir -p ${PREFIX}/include/tensorflow -cp -r bazel-genfiles/* ${PREFIX}/include/ -cp -r tensorflow/cc/ ${PREFIX}/include/tensorflow -cp -r tensorflow/core/ ${PREFIX}/include/tensorflow -cp -r third_party/ ${PREFIX}/include - -cp -r bazel-work/external/com_google_absl/absl/ ${PREFIX}/include -cp -r bazel-work/external/eigen_archive/Eigen/ ${PREFIX}/include -cp -r bazel-work/external/eigen_archive/unsupported/ ${PREFIX}/include +tar -C ${PREFIX} -xf $SRC_DIR/libtensorflow_cc_output.tar diff --git a/recipe/custom_toolchain/BUILD b/recipe/custom_toolchain/BUILD new file mode 100644 index 0000000000000000000000000000000000000000..c09094a2476f4d40dd33596c39c84979836ae901 --- /dev/null +++ b/recipe/custom_toolchain/BUILD @@ -0,0 +1,68 @@ +package(default_visibility = ["//visibility:public"]) + +filegroup( + name = "empty", + srcs = [], +) + +filegroup( + name = "cc_wrapper", + srcs = ["cc_wrapper.sh"], +) + +filegroup( + name = "compiler_deps", + srcs = glob(["extra_tools/**"]) + [":cc_wrapper"], +) + +load( + ":cc_toolchain_config.bzl", "cc_toolchain_config" + ) + +cc_toolchain_config( + name = "cc-compiler-target-config" +) + +load( + ":cc_toolchain_build_config.bzl", build_cc_toolchain_config = "cc_toolchain_config" +) +build_cc_toolchain_config( + name = "cc-compiler-build-config" +) + +toolchains = { + "BUILD_CPU|compiler": ":cc-compiler-build", + "BUILD_CPU": ":cc-compiler-build", +} +toolchains["TARGET_CPU|compiler"] = ":cc-compiler-target" +toolchains["TARGET_CPU"] = ":cc-compiler-target" +cc_toolchain_suite( + name = "toolchain", + toolchains = toolchains, +) + +cc_toolchain( + name = "cc-compiler-target", + all_files = ":compiler_deps", + compiler_files = ":compiler_deps", + toolchain_identifier = "local", + toolchain_config = ":cc-compiler-target-config", + dwp_files = ":empty", + linker_files = ":compiler_deps", + objcopy_files = ":empty", + strip_files = ":empty", + supports_param_files = 0, +) + +cc_toolchain( + name = "cc-compiler-build", + all_files = ":compiler_deps", + compiler_files = ":compiler_deps", + toolchain_identifier = "build", + toolchain_config = ":cc-compiler-build-config", + dwp_files = ":empty", + linker_files = ":compiler_deps", + objcopy_files = ":empty", + strip_files = ":empty", + supports_param_files = 0, +) diff --git a/recipe/custom_toolchain/CROSSTOOL.template b/recipe/custom_toolchain/CROSSTOOL.template new file mode 100644 index 0000000000000000000000000000000000000000..c77aa9d115def6e219a464aa6ccdb6e4a80b68f9 --- /dev/null +++ b/recipe/custom_toolchain/CROSSTOOL.template @@ -0,0 +1,88 @@ +major_version: "local" +minor_version: "" +default_target_cpu: "same_as_host" + +default_toolchain { + cpu: "darwin" + toolchain_identifier: "local" +} + +toolchain { + toolchain_identifier: "local" + abi_version: "local" + abi_libc_version: "local" + builtin_sysroot: "" + compiler: "compiler" + host_system_name: "local" + needsPic: true + supports_gold_linker: false + supports_incremental_linker: false + supports_fission: false + supports_interface_shared_objects: false + supports_normalizing_ar: false + supports_start_end_lib: false + target_libc: "macosx" + target_cpu: "darwin" + target_system_name: "local" + cxx_flag: "-stdlib=libc++" + cxx_flag: "-fvisibility-inlines-hidden" + cxx_flag: "-std=c++14" + cxx_flag: "-fmessage-length=0" + linker_flag: "-Wl,-pie" + linker_flag: "-headerpad_max_install_names" + linker_flag: "-Wl,-dead_strip_dylibs" + linker_flag: "-undefined" + linker_flag: "dynamic_lookup" + linker_flag: "-force_load" + linker_flag: "${BUILD_PREFIX}/lib/libc++.a" + linker_flag: "-force_load" + linker_flag: "${BUILD_PREFIX}/lib/libc++abi.a" + linker_flag: "-nostdlib" + linker_flag: "-lc" + linker_flag: "-isysroot${CONDA_BUILD_SYSROOT}" + cxx_builtin_include_directory: "${PREFIX}/include/c++/v1" + cxx_builtin_include_directory: "${PREFIX}/lib/clang/10.0.0/include" + cxx_builtin_include_directory: "${CONDA_BUILD_SYSROOT}/usr/include" + cxx_builtin_include_directory: "${CONDA_BUILD_SYSROOT}/System/Library/Frameworks" + objcopy_embed_flag: "-I" + objcopy_embed_flag: "binary" + unfiltered_cxx_flag: "-no-canonical-prefixes" + unfiltered_cxx_flag: "-Wno-builtin-macro-redefined" + unfiltered_cxx_flag: "-D__DATE__=\"redacted\"" + unfiltered_cxx_flag: "-D__TIMESTAMP__=\"redacted\"" + unfiltered_cxx_flag: "-D__TIME__=\"redacted\"" + compiler_flag: "-march=core2" + compiler_flag: "-mtune=haswell" + compiler_flag: "-mssse3" + compiler_flag: "-ftree-vectorize" + compiler_flag: "-fPIC" + compiler_flag: "-fPIE" + compiler_flag: "-fstack-protector-strong" + compiler_flag: "-O2" + compiler_flag: "-pipe" + tool_path {name: "ld" path: "${LD}" } + tool_path {name: "cpp" path: "/usr/bin/cpp" } + tool_path {name: "dwp" path: "/usr/bin/dwp" } + tool_path {name: "gcov" path: "/usr/bin/gcov" } + tool_path {name: "nm" path: "${NM}" } + tool_path {name: "objcopy" path: "/usr/bin/objcopy" } + tool_path {name: "objdump" path: "/usr/bin/objdump" } + tool_path {name: "strip" path: "${STRIP}" } + tool_path {name: "gcc" path: "cc_wrapper.sh" } + tool_path {name: "ar" path: "${LIBTOOL}" } + + compilation_mode_flags { + mode: DBG + compiler_flag: "-g" + } + compilation_mode_flags { + mode: OPT + compiler_flag: "-g0" + compiler_flag: "-O2" + compiler_flag: "-D_FORTIFY_SOURCE=1" + compiler_flag: "-DNDEBUG" + compiler_flag: "-ffunction-sections" + compiler_flag: "-fdata-sections" + } + linking_mode_flags { mode: DYNAMIC } +} diff --git a/recipe/custom_toolchain/WORKSPACE b/recipe/custom_toolchain/WORKSPACE new file mode 100644 index 0000000000000000000000000000000000000000..125586504a29b8a61b4e668d407b866ab5cde480 --- /dev/null +++ b/recipe/custom_toolchain/WORKSPACE @@ -0,0 +1 @@ +workspace(name = "local_config_cc") diff --git a/recipe/custom_toolchain/cc_toolchain_config.bzl b/recipe/custom_toolchain/cc_toolchain_config.bzl new file mode 100644 index 0000000000000000000000000000000000000000..a312d0750b90d070fabb0dbadfdde4480e4dbcff --- /dev/null +++ b/recipe/custom_toolchain/cc_toolchain_config.bzl @@ -0,0 +1,332 @@ +# Adapted from the following links and pulling in options from old CROSSTOOL.template +# https://docs.bazel.build/versions/0.26.0/tutorial/cc-toolchain-config.html +# https://github.com/bazelbuild/bazel/blob/4dfc83d5f11e9190e9e25dee4c7dc2a71cd7b8fd/tools/osx/crosstool/cc_toolchain_config.bzl +# https://docs.bazel.build/versions/master/skylark/lib/cc_common.html#create_cc_toolchain_config_info + +load("@bazel_tools//tools/cpp:cc_toolchain_config_lib.bzl", + "feature", + "flag_group", + "flag_set", + "tool_path", + "with_feature_set", + ) + +load("@bazel_tools//tools/build_defs/cc:action_names.bzl", + "ACTION_NAMES") + +def _impl(ctx): + tool_paths = [ + tool_path( + name = "gcc", + path = "${GCC}", + ), + tool_path( + name = "ld", + path = "${LD}", + ), + tool_path( + name = "ar", + path = "${BUILD_PREFIX}/bin/${AR}", + ), + tool_path( + name = "cpp", + path = "/usr/bin/cpp", + ), + tool_path( + name = "gcov", + path = "/usr/bin/gcov", + ), + tool_path( + name = "nm", + path = "${NM}", + ), + tool_path( + name = "objdump", + path = "/usr/bin/objdump", + ), + tool_path( + name = "strip", + path = "${STRIP}", + ), + ] + + all_compile_actions = [ + ACTION_NAMES.c_compile, + ACTION_NAMES.cpp_compile, + ACTION_NAMES.linkstamp_compile, + ACTION_NAMES.assemble, + ACTION_NAMES.preprocess_assemble, + ACTION_NAMES.cpp_header_parsing, + ACTION_NAMES.cpp_module_compile, + ACTION_NAMES.cpp_module_codegen, + ACTION_NAMES.clif_match, + ACTION_NAMES.lto_backend, + ] + + all_cpp_compile_actions = [ + ACTION_NAMES.cpp_compile, + ACTION_NAMES.linkstamp_compile, + ACTION_NAMES.cpp_header_parsing, + ACTION_NAMES.cpp_module_compile, + ACTION_NAMES.cpp_module_codegen, + ACTION_NAMES.clif_match, + ] + + preprocessor_compile_actions = [ + ACTION_NAMES.c_compile, + ACTION_NAMES.cpp_compile, + ACTION_NAMES.linkstamp_compile, + ACTION_NAMES.preprocess_assemble, + ACTION_NAMES.cpp_header_parsing, + ACTION_NAMES.cpp_module_compile, + ACTION_NAMES.clif_match, + ] + + codegen_compile_actions = [ + ACTION_NAMES.c_compile, + ACTION_NAMES.cpp_compile, + ACTION_NAMES.linkstamp_compile, + ACTION_NAMES.assemble, + ACTION_NAMES.preprocess_assemble, + ACTION_NAMES.cpp_module_codegen, + ACTION_NAMES.lto_backend, + ] + + all_link_actions = [ + ACTION_NAMES.cpp_link_executable, + ACTION_NAMES.cpp_link_dynamic_library, + ACTION_NAMES.cpp_link_nodeps_dynamic_library, + ] + + compiler_flags = feature( + name = "compiler_flags", + enabled = True, + flag_sets = [ + flag_set( + actions = all_compile_actions, + flag_groups = [flag_group(flags = "${CFLAGS} ${CPPFLAGS}".split(" "))], + ), + ], + ) + + objcpp_flags = feature( + name = "objcpp_flags", + enabled = True, + flag_sets = [ + flag_set( + actions = [ + ACTION_NAMES.objcpp_compile, + ], + flag_groups = [flag_group(flags = "${CXXFLAGS} ${CPPFLAGS}".split(" "))], + ), + ], + ) + + cxx_flags = feature( + name = "cxx_flags", + enabled = True, + flag_sets = [ + flag_set( + actions = [ + ACTION_NAMES.assemble, + ACTION_NAMES.preprocess_assemble, + ACTION_NAMES.linkstamp_compile, + ACTION_NAMES.cpp_compile, + ACTION_NAMES.cpp_header_parsing, + ACTION_NAMES.cpp_module_compile, + ACTION_NAMES.cpp_module_codegen, + ACTION_NAMES.lto_backend, + ACTION_NAMES.clif_match, + ], + flag_groups = [flag_group(flags = "${CXXFLAGS} ${CPPFLAGS}".split(" "))], + ), + ], + ) + + if "TARGET_PLATFORM".startswith("osx"): + toolchain_include_directories_flags = [ + "-isystem", + "${BUILD_PREFIX}/include/c++/v1", + "-isystem", + "${BUILD_PREFIX}/lib/clang/${COMPILER_VERSION}/include", + "-isystem", + "${CONDA_BUILD_SYSROOT}/usr/include", + "-isystem", + "${CONDA_BUILD_SYSROOT}/System/Library/Frameworks", + "-isystem", + "${PREFIX}/include", + ] + else: + toolchain_include_directories_flags = [ + "-isystem", + "${PREFIX}/include", + ] + + toolchain_include_directories_feature = feature( + name = "toolchain_include_directories", + enabled = True, + flag_sets = [ + flag_set( + actions = [ + ACTION_NAMES.assemble, + ACTION_NAMES.preprocess_assemble, + ACTION_NAMES.linkstamp_compile, + ACTION_NAMES.c_compile, + ACTION_NAMES.cpp_compile, + ACTION_NAMES.cpp_header_parsing, + ACTION_NAMES.cpp_module_compile, + ACTION_NAMES.cpp_module_codegen, + ACTION_NAMES.lto_backend, + ACTION_NAMES.clif_match, + ], + flag_groups = [ + flag_group( + flags = toolchain_include_directories_flags, + ), + ], + ), + ], + ) + + linker_flags = feature( + name = "linker_flags", + flag_sets = [ + flag_set ( + actions = [ + ACTION_NAMES.assemble, + ACTION_NAMES.preprocess_assemble, + ACTION_NAMES.linkstamp_compile, + ACTION_NAMES.c_compile, + ACTION_NAMES.cpp_compile, + ACTION_NAMES.objcpp_compile, + ACTION_NAMES.cpp_header_parsing, + ACTION_NAMES.cpp_module_compile, + ACTION_NAMES.cpp_module_codegen, + ACTION_NAMES.lto_backend, + ACTION_NAMES.clif_match, + ], + flag_groups = [flag_group(flags = "-l${LIBCXX} ${LDFLAGS}".split(" "))], + ), + ], + ) + + link_libcpp_feature = feature( + name = "link_libc++", + enabled = True, + flag_sets = [ + flag_set( + actions = all_link_actions + + ["objc-executable", "objc++-executable"], + flag_groups = [flag_group(flags = "-l${LIBCXX} ${LDFLAGS}".split(" "))], + ), + ], + ) + + supports_pic_feature = feature( + name = "supports_pic", + enabled = True + ) + + supports_dynamic_linker = feature( + name = "supports_dynamic_linker", + enabled = True + ) + + opt = feature( + name = "opt", + enabled = True, + flag_sets = [ + flag_set( + actions = [ + ACTION_NAMES.assemble, + ACTION_NAMES.preprocess_assemble, + ACTION_NAMES.linkstamp_compile, + ACTION_NAMES.c_compile, + ACTION_NAMES.cpp_compile, + ACTION_NAMES.cpp_header_parsing, + ACTION_NAMES.cpp_module_compile, + ACTION_NAMES.cpp_module_codegen, + ACTION_NAMES.lto_backend, + ACTION_NAMES.clif_match, + ], + flag_groups = [ + flag_group( + flags = [ + "-g0", + "-O2", + "-D_FORTIFY_SOURCE=1", + "-DNDEBUG", + "-ffunction-sections", + "-fdata-sections", + ], + ), + ], + ), + ], + ) + + dbg = feature( + name = "dbg", + enabled = True, + flag_sets = [ + flag_set( + actions = [ + ACTION_NAMES.assemble, + ACTION_NAMES.preprocess_assemble, + ACTION_NAMES.linkstamp_compile, + ACTION_NAMES.c_compile, + ACTION_NAMES.cpp_compile, + ACTION_NAMES.cpp_header_parsing, + ACTION_NAMES.cpp_module_compile, + ACTION_NAMES.cpp_module_codegen, + ACTION_NAMES.lto_backend, + ACTION_NAMES.clif_match, + ], + flag_groups = [ + flag_group( + flags = [ + "-g" + ], + ), + ], + ), + ], + ) + + if "TARGET_PLATFORM".startswith("osx"): + cxx_builtin_include_directories = [ + "${CONDA_BUILD_SYSROOT}/System/Library/Frameworks", + "${CONDA_BUILD_SYSROOT}/usr/include", + "${BUILD_PREFIX}/lib/clang/${COMPILER_VERSION}/include", + "${BUILD_PREFIX}/include/c++/v1", + "${PREFIX}/include", + ] + else: + cxx_builtin_include_directories = [ + "${CONDA_BUILD_SYSROOT}/usr/include", + "${BUILD_PREFIX}/lib/gcc/${HOST}/${COMPILER_VERSION}", + "${BUILD_PREFIX}/${HOST}/include/c++/${COMPILER_VERSION}", + "${PREFIX}/include", + ] + + return cc_common.create_cc_toolchain_config_info( + ctx = ctx, + toolchain_identifier = "local", + host_system_name = "local", + #host_system_name = "TARGET_CPU", + target_system_name = "TARGET_SYSTEM", + target_cpu = "TARGET_CPU", + target_libc = "TARGET_LIBC", + compiler = "compiler", + abi_version = "local", + abi_libc_version = "local", + tool_paths = tool_paths, + cxx_builtin_include_directories = cxx_builtin_include_directories, + features = [toolchain_include_directories_feature, compiler_flags, cxx_flags, supports_pic_feature, linker_flags, supports_dynamic_linker, link_libcpp_feature, objcpp_flags], + ) + +cc_toolchain_config = rule( + implementation = _impl, + attrs = {}, + provides = [CcToolchainConfigInfo], +) diff --git a/recipe/custom_toolchain/cc_wrapper.sh.template b/recipe/custom_toolchain/cc_wrapper.sh.template new file mode 100644 index 0000000000000000000000000000000000000000..559bc1e558d7d53d1f60f0b1a60f2986f0327aac --- /dev/null +++ b/recipe/custom_toolchain/cc_wrapper.sh.template @@ -0,0 +1,107 @@ +#!/bin/bash +# +# Copyright 2015 The Bazel Authors. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# OS X relpath is not really working. This is a wrapper script around gcc +# to simulate relpath behavior. +# +# This wrapper uses install_name_tool to replace all paths in the binary +# (bazel-out/.../path/to/original/library.so) by the paths relative to +# the binary. It parses the command line to behave as rpath is supposed +# to work. +# +# See https://blogs.oracle.com/dipol/entry/dynamic_libraries_rpath_and_mac +# on how to set those paths for Mach-O binaries. +# +set -eu + +INSTALL_NAME_TOOL="${INSTALL_NAME_TOOL}" + +LIBS= +LIB_DIRS= +RPATHS= +OUTPUT= +# let parse the option list +for i in "$@"; do + if [[ "${OUTPUT}" = "1" ]]; then + OUTPUT=$i + elif [[ "$i" =~ ^-l(.*)$ ]]; then + LIBS="${BASH_REMATCH[1]} $LIBS" + elif [[ "$i" =~ ^-L(.*)$ ]]; then + LIB_DIRS="${BASH_REMATCH[1]} $LIB_DIRS" + elif [[ "$i" =~ ^-Wl,-rpath,\@loader_path/(.*)$ ]]; then + RPATHS="${BASH_REMATCH[1]} ${RPATHS}" + elif [[ "$i" = "-o" ]]; then + # output is coming + OUTPUT=1 + fi +done + +# Set-up the environment + + +# Call the C++ compiler +echo CONDA_BUILD_SYSROOT=${CONDA_BUILD_SYSROOT} ${CLANG} "$@" +CONDA_BUILD_SYSROOT=${CONDA_BUILD_SYSROOT} ${CLANG} "$@" + +function get_library_path() { + for libdir in ${LIB_DIRS}; do + if [ -f ${libdir}/lib$1.so ]; then + echo "${libdir}/lib$1.so" + elif [ -f ${libdir}/lib$1.dylib ]; then + echo "${libdir}/lib$1.dylib" + fi + done +} + +# A convenient method to return the actual path even for non symlinks +# and multi-level symlinks. +function get_realpath() { + local previous="$1" + local next=$(readlink "${previous}") + while [ -n "${next}" ]; do + previous="${next}" + next=$(readlink "${previous}") + done + echo "${previous}" +} + +# Get the path of a lib inside a tool +function get_otool_path() { + # the lib path is the path of the original lib relative to the workspace + get_realpath $1 | sed 's|^.*/bazel-out/|bazel-out/|' +} + +# Do replacements in the output +for rpath in ${RPATHS}; do + for lib in ${LIBS}; do + unset libname + if [ -f "$(dirname ${OUTPUT})/${rpath}/lib${lib}.so" ]; then + libname="lib${lib}.so" + elif [ -f "$(dirname ${OUTPUT})/${rpath}/lib${lib}.dylib" ]; then + libname="lib${lib}.dylib" + fi + # ${libname-} --> return $libname if defined, or undefined otherwise. This is to make + # this set -e friendly + if [[ -n "${libname-}" ]]; then + libpath=$(get_library_path ${lib}) + if [ -n "${libpath}" ]; then + ${INSTALL_NAME_TOOL} -change $(get_otool_path "${libpath}") \ + "@loader_path/${rpath}/${libname}" "${OUTPUT}" + fi + fi + done +done + diff --git a/recipe/gen-bazel-toolchain.sh b/recipe/gen-bazel-toolchain.sh new file mode 100755 index 0000000000000000000000000000000000000000..cee9fc7d41adb8297ef14a7155fdc04731b994b9 --- /dev/null +++ b/recipe/gen-bazel-toolchain.sh @@ -0,0 +1,125 @@ +#!/bin/bash + +set -euxo pipefail + +function apply_cc_template() { + sed -ie "s:TARGET_CPU:${TARGET_CPU}:" $1 + sed -ie "s:TARGET_LIBC:${TARGET_LIBC}:" $1 + sed -ie "s:TARGET_SYSTEM:${TARGET_SYSTEM}:" $1 + sed -ie "s:TARGET_PLATFORM:${target_platform}:" $1 + sed -ie "s:\${CONDA_BUILD_SYSROOT}:${CONDA_BUILD_SYSROOT}:" $1 + sed -ie "s:\${COMPILER_VERSION}:${BAZEL_TOOLCHAIN_COMPILER_VERSION:-}:" $1 + sed -ie "s:\${GCC}:${BAZEL_TOOLCHAIN_GCC}:" $1 + sed -ie "s:\${PREFIX}:${PREFIX}:" $1 + sed -ie "s:\${BUILD_PREFIX}:${BUILD_PREFIX}:" $1 + sed -ie "s:\${LD}:${LD}:" $1 + sed -ie "s:\${CFLAGS}:${CFLAGS}:" $1 + sed -ie "s:\${CPPFLAGS}:${CPPFLAGS}:" $1 + sed -ie "s:\${CXXFLAGS}:${CXXFLAGS}:" $1 + sed -ie "s:\${LDFLAGS}:${LDFLAGS}:" $1 + sed -ie "s:\${NM}:${NM}:" $1 + sed -ie "s:\${STRIP}:${STRIP}:" $1 + sed -ie "s:\${AR}:${BAZEL_TOOLCHAIN_AR}:" $1 + sed -ie "s:\${HOST}:${HOST}:" $1 + sed -ie "s:\${LIBCXX}:${BAZEL_TOOLCHAIN_LIBCXX}:" $1 +} + +export BAZEL_USE_CPP_ONLY_TOOLCHAIN=1 + +# set up bazel config file for conda provided clang toolchain +cp -r ${RECIPE_DIR}/custom_toolchain . +pushd custom_toolchain + if [[ "${target_platform}" == osx-* ]]; then + export BAZEL_TOOLCHAIN_COMPILER_VERSION=$($CC -v 2>&1 | head -n1 | cut -d' ' -f3) + sed -e "s:\${CLANG}:${CLANG}:" \ + -e "s:\${INSTALL_NAME_TOOL}:${INSTALL_NAME_TOOL}:" \ + -e "s:\${CONDA_BUILD_SYSROOT}:${CONDA_BUILD_SYSROOT}:" \ + cc_wrapper.sh.template > cc_wrapper.sh + chmod +x cc_wrapper.sh + sed -e "s:\${CLANG}:${CC_FOR_BUILD}:" \ + -e "s:\${INSTALL_NAME_TOOL}:${INSTALL_NAME_TOOL//${HOST}/${BUILD}}:" \ + -e "s:\${CONDA_BUILD_SYSROOT}:${CONDA_BUILD_SYSROOT}:" \ + cc_wrapper.sh.template > cc_wrapper_build.sh + chmod +x cc_wrapper.sh + export BAZEL_TOOLCHAIN_GCC="cc_wrapper.sh" + export BAZEL_TOOLCHAIN_LIBCXX="c++" + export BAZEL_TOOLCHAIN_AR=${LIBTOOL} + else + export BAZEL_TOOLCHAIN_COMPILER_VERSION=$(${CC} -v 2>&1|tail -n1|cut -d' ' -f3) + export BAZEL_TOOLCHAIN_AR=$(basename ${AR}) + touch cc_wrapper.sh + export BAZEL_TOOLCHAIN_LIBCXX="stdc++" + export BAZEL_TOOLCHAIN_GCC="${GCC}" + fi + + export TARGET_SYSTEM="${HOST}" + if [[ "${target_platform}" == "osx-64" ]]; then + export TARGET_LIBC="macosx" + export TARGET_CPU="darwin_x86_64" + export TARGET_SYSTEM="x86_64-apple-macosx" + elif [[ "${target_platform}" == "osx-arm64" ]]; then + export TARGET_LIBC="macosx" + export TARGET_CPU="darwin_arm64" + export TARGET_SYSTEM="arm64-apple-macosx" + elif [[ "${target_platform}" == "linux-64" ]]; then + export TARGET_LIBC="unknown" + export TARGET_CPU="k8" + elif [[ "${target_platform}" == "linux-aarch64" ]]; then + export TARGET_LIBC="unknown" + export TARGET_CPU="aarch64" + elif [[ "${target_platform}" == "linux-ppc64le" ]]; then + export TARGET_LIBC="unknown" + export TARGET_CPU="ppc" + fi + export BUILD_SYSTEM=${BUILD} + if [[ "${build_platform}" == "osx-64" ]]; then + export BUILD_CPU="darwin" + export BUILD_SYSTEM="x86_64-apple-macosx" + elif [[ "${build_platform}" == "osx-arm64" ]]; then + export BUILD_CPU="darwin" + export BUILD_SYSTEM="arm64-apple-macosx" + elif [[ "${build_platform}" == "linux-64" ]]; then + export BUILD_CPU="k8" + elif [[ "${build_platform}" == "linux-aarch64" ]]; then + export BUILD_CPU="aarch64" + elif [[ "${build_platform}" == "linux-ppc64le" ]]; then + export BUILD_CPU="ppc" + fi + # The current Bazel release cannot distinguish between osx-arm64 and osx-64. + # This will change with later releases and then we should get rid of this section again. + #if [[ "${target_platform}" == osx-* ]]; then + # if [[ "${build_platform}" == "${target_platform}" ]]; then + # export TARGET_CPU="darwin" + # export BUILD_CPU="darwin" + # fi + #fi + + sed -ie "s:TARGET_CPU:${TARGET_CPU}:" BUILD + sed -ie "s:BUILD_CPU:${BUILD_CPU}:" BUILD + + cp cc_toolchain_config.bzl cc_toolchain_build_config.bzl + apply_cc_template cc_toolchain_config.bzl + ( + if [[ "${build_platform}" != "${target_platform}" ]]; then + if [[ "${target_platform}" == osx-* ]]; then + BAZEL_TOOLCHAIN_GCC=cc_wrapper_build.sh + else + BAZEL_TOOLCHAIN_GCC=${BAZEL_TOOLCHAIN_GCC//${HOST}/${BUILD}} + fi + TARGET_CPU=${BUILD_CPU} + TARGET_SYSTEM=${BUILD_SYSTEM} + target_platform=${build_platform} + PREFIX=${BUILD_PREFIX} + LD=${LD//${HOST}/${BUILD}} + CFLAGS=${CFLAGS//${PREFIX}/${BUILD_PREFIX}} + CPPFLAGS=${CPPFLAGS//${PREFIX}/${BUILD_PREFIX}} + CXXFLAGS=${CXXFLAGS//${PREFIX}/${BUILD_PREFIX}} + LDFLAGS=${LDFLAGS//${PREFIX}/${BUILD_PREFIX}} + NM=${NM//${HOST}/${BUILD}} + STRIP=${STRIP//${HOST}/${BUILD}} + BAZEL_TOOLCHAIN_AR=${BAZEL_TOOLCHAIN_AR//${HOST}/${BUILD}} + HOST=${BUILD} + fi + apply_cc_template cc_toolchain_build_config.bzl + ) +popd diff --git a/recipe/meta.yaml b/recipe/meta.yaml index 03d98865a6d2d0826dc83ba05aea2b5e54d1df2e..27b8303fb3e7dc629c4e119e0277b68f7ea91a37 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -1,80 +1,98 @@ -{% set version = "2.2.0" %} - -{% set pyver = "cp{}{}".format(PY_VER[0], PY_VER[2]) %} -{% set platform = "macosx_10_11_x86_64" %} # [osx] -{% set platform = "win_amd64" %} # [win] -{% set fn = "{}-{}-{}-{}m-{}.whl".format("tensorflow", version, pyver, pyver, platform) %} # [not linux] -{% set fn = "tensorflow-{{ version }}.tar.gz" %} # [linux] +{% set version = "2.6.0" %} package: - name: tensorflow + name: tensorflow-split version: {{ version }} source: - fn: {{ fn }} - url: https://github.com/tensorflow/tensorflow/archive/v{{ version }}.tar.gz # [linux] - sha256: 69cd836f87b8c53506c4f706f655d423270f5a563b76dc1cfa60fbc3184185a3 # [linux] - url: https://pypi.io/packages/{{ pyver }}/t/tensorflow/{{ fn }} # [not linux] - sha256: 6735486ee9c3cb0807476e2b36ef7a4cd6c597cb24abf496e66b703360e1e54e # [osx and py==36] - sha256: c332c7fc5cfd54cb86d5da99787c9693e3a924848097c54df1b71ee595a39c93 # [osx and py==37] - sha256: bbcfb04738099bd46822db91584db74703fdddacf4cd0a76acfc5e086956b5ba # [osx and py==38] - sha256: 3ee8819732d8594913b7d22ded7b22e48a49aa015050d8dd8464eaa010ba2e41 # [win and py==36] - sha256: 68ea22aee9c269a6a0c1061c141f1ec1cd1b1be7569390519c1bf4773f434a40 # [win and py==37] - sha256: 784ab8217e4b0eb4d121c28430c6cdc2ce56c02634a9720d84fb30598b338b8c # [win and py==38] - - patches: - #- patches/0001-add-rt-to-linkopts.patch # [linux] - #- patches/0002-stable-image-rotate-test.patch # [linux] - - patches/0003-relax-tolerence-in-special_math_test.patch # [linux] - - patches/0004-skip-read-only-debugger_cli_common-test.patch # [linux] - #- patches/0005-fix-lookup_ops-test.patch # [linux] - - patches/0006-relax-precision-in-matrix_log_op_test.patch # [linux] - #- patches/0007-Add-init-symbol-names-prefixed-with-an-underscore.patch # [osx] - #- patches/0008-Add-alternate-clock_gettime-implementation-for-macOS.patch # [osx] - - patches/unbundle_llvm.patch # [linux] + - url: https://github.com/tensorflow/tensorflow/archive/v{{ version }}.tar.gz + sha256: 41b32eeaddcbc02b0583660bcf508469550e4cd0f86b22d2abe72dfebeacde0f + patches: + - patches/0001-relax-tolerence-in-special_math_test.patch + - patches/0002-skip-read-only-debugger_cli_common-test.patch + - patches/0003-relax-precision-in-matrix_log_op_test.patch + # requires setting GRPCIO_VERSION in build script + - patches/0004-loosen-requirements.patch + - patches/0005-remove_deprecated_use_of_error_message.patch + - patches/51450-Fix_protobuf_errors_when_using_system_protobuf.patch + - url: https://github.com/tensorflow/estimator/archive/refs/tags/v{{ version }}.tar.gz + sha256: 947705c60c50da0b4a8ceec1bc058aaf6bf567a7efdcd50d5173ebf6bafcf30f + folder: tensorflow-estimator build: - number: 0 - skip: True # [not x86_64] + number: 1 + skip: true # [win] requirements: build: - - {{ compiler('c') }} # [linux] - - {{ compiler('cxx') }} # [linux] - #- bazel ==2.0.0 # [linux] - - nasm # [linux] - - swig # [linux] + - python # [build_platform != target_platform] + - cross-python_{{ target_platform }} # [build_platform != target_platform] + - numpy 1.19.* # [build_platform != target_platform] + - {{ compiler('c') }} + - {{ compiler('cxx') }} + - bazel + - bazel >=4.2.1 # [osx and arm64] + - grpc-cpp + - libprotobuf + - nasm + - sed + - rsync # [linux] host: # conda build requirements - python - pip - #- setuptools <=39.1.0 - # requirements specified by the package itself - - absl-py >=0.1.6 - - astor >=0.6.0 - - gast >=0.2.0 - - keras-applications >=1.0.6 - - keras-preprocessing >=1.0.5 - - llvmdev >=8.0.0,<9.0.0 # [linux] - - zlib # [linux] - - libpng # [linux] - - libcurl # [linux] - - unzip # [linux] - - zip # [linux] - - openjdk >=8 # [linux] - - nsync # [linux] - - numpy >=1.16.1 - - six >=1.10.0 - - protobuf >=3.6.1 - - termcolor >=1.1.0 - - grpcio >=1.8.6 - - wheel >=0.26 # [linux] - - tensorflow-estimator >=2.2.0,<2.3.0a0 - run: - - {{ pin_subpackage('tensorflow-base', exact=True) }} + - zlib + - libpng + - libcurl + - curl # [win] + - unzip # [not win] + - zip # [not win] + - m2-unzip # [win] + - m2-zip # [win] + - openjdk >=8 + - nsync + # TF_SYSTEM_LIBS + - astor >=0.7.1 + - cython >=0.28.4 + - dill >=0.3.1.1 + - giflib + - grpc-cpp + - flatbuffers + - icu + - jpeg + - libpng + - libprotobuf + - openssl + - pybind11 + - sqlite + - snappy + - zlib + # requirements specified by the package itself, see + # github.com/tensorflow/tensorflow/blob/v{{ version }}/tensorflow/tools/pip_package/setup.py + - absl-py >=0.10,<1 + - astunparse >=1.6.3,<1.7 + - python-flatbuffers >=1.12,<2 + - google-pasta >=0.2,<1 + - h5py >=3.1.0,<3.2 + - keras-preprocessing >=1.1.2,<1.2 + - numpy 1.19.* + - opt_einsum >=3.3,<3.4 + - protobuf >=3.9.2 + - six >=1.15,<1.16 + - termcolor >=1.1,<1.2 + - typing_extensions >=3.7.4,<3.8 + - wheel >=0.35,<1 + - wrapt >=1.12.1,<1.13 + # upstream wants exact pin for gast + - gast ==0.4.0 + # TF-API needs to move in sync + - tensorboard >=2.6,<3 + - keras >=2.6,<3 outputs: - name: tensorflow-base + script: build_pkg.sh # [not win] + script: build_pkg.bat # [win] build: entry_points: - toco_from_protos = tensorflow.lite.toco.python.toco_from_protos:main @@ -88,105 +106,197 @@ outputs: requirements: # build requirements needs to pick up the compiler run_exports build: - - {{ compiler('c') }} # [linux] - - {{ compiler('cxx') }} # [linux] + - {{ compiler('c') }} + - {{ compiler('cxx') }} host: # conda build requirements - python - pip + # TF_SYSTEM_LIBS + - astor >=0.7.1 + - cython >=0.28.4 + - dill >=0.3.1.1 + - giflib + - grpc-cpp + - flatbuffers + - icu + - jpeg + - libcurl + - libpng + - libprotobuf + - openssl + - pybind11 + - snappy + - sqlite + - zlib # requirements specified by the package itself - - absl-py >=0.1.6 - - astor >=0.6.0 - - astunparse - - gast >=0.2.0 - - google-pasta - - h5py - - keras-preprocessing >=1.0.5 - - numpy >=1.16.1 - - six >=1.10.0 - - protobuf >=3.6.1 - - termcolor >=1.1.0 - - grpcio >=1.8.6 - - wheel >=0.26 # [unix] - - tensorflow-estimator >=2.2.0,<2.3.0a0 - - zlib # [linux] - - libpng # [linux] - - libcurl # [linux] + - absl-py >=0.10,<1 + - astunparse >=1.6.3,<1.7 + - python-flatbuffers >=1.12,<2 + - google-pasta >=0.2,<1 + - h5py >=3.1.0,<3.2 + - keras-preprocessing >=1.1.2,<1.2 + - numpy >=1.19.2,<1.20 + - opt_einsum >=3.3,<3.4 + - protobuf >=3.9.2 + - six >=1.15,<1.16 + - termcolor >=1.1,<1.2 + - typing_extensions >=3.7.4,<3.8 + - wheel >=0.35,<1 + - wrapt >=1.12.1,<1.13 + # upstream wants exact pin for gast + - gast ==0.4.0 + # TF-API needs to move in sync + - tensorboard >=2.6,<3 + - keras >=2.6,<3 run: - python - - absl-py >=0.1.6 - - astor >=0.6.0 - - astunparse - - gast >=0.2.0 - - google-pasta - - h5py - - opt_einsum - - keras-preprocessing >=1.0.5 - - {{ pin_compatible('numpy') }} - - scipy - - six >=1.10.0 - - protobuf >=3.6.1 - - termcolor >=1.1.0 - - grpcio >=1.8.6 - - tensorflow-estimator >=2.2.0,<2.3.0a0 - - tensorboard >=2.2.0,<2.3.0a0 + - absl-py >=0.10,<1 + - astunparse >=1.6.3,<1.7 + - python-flatbuffers >=1.12,<2 + - google-pasta >=0.2,<1 + - h5py >=3.1.0,<3.2 + - keras-preprocessing >=1.1.2,<1.2 + - numpy >=1.19.2,<1.20 + - opt_einsum >=3.3,<3.4 + - protobuf >=3.9.2 + - six >=1.15,<1.16 + - termcolor >=1.1,<1.2 + - typing_extensions >=3.7.4,<3.8 + - wheel >=0.35,<1 + - wrapt >=1.12.1,<1.13 + # upstream wants exact pin for gast + - gast ==0.4.0 + # TF-API needs to move in sync + - tensorboard >=2.6,<3 + - keras >=2.6,<3 + - grpcio {{ grpc_cpp }}.* test: + files: + - test_tensorflow_base.py + commands: + - cat test_tensorflow_base.py # [unix] + - python test_tensorflow_base.py + - tf_upgrade_v2 --help + - saved_model_cli --help + - tflite_convert --help # [not win] + - toco_from_protos --help # [not win] + - toco --help # [not win] + + - name: tensorflow-estimator + script: build_estimator.sh # [not win] + script: build_estimator.bat # [win] + requirements: + build: + - {{ compiler('c') }} + - {{ compiler('cxx') }} + - bazel + - bazel >=4.2.1 # [osx and arm64] + host: + - python + - pip + - setuptools + - wheel + # This ensures that a consistent version of openssl is chosen between + # all packages. + # https://github.com/conda-forge/conda-forge.github.io/issues/1528 + - openssl + - {{ pin_subpackage('tensorflow-base', exact=True) }} + run: + - python + - {{ pin_subpackage('tensorflow-base', exact=True) }} + test: + requires: + - pip + commands: + - pip check + + + - name: tensorflow + requirements: + host: + - python + # This ensures that a consistent version of openssl is chosen between + # all packages. + # https://github.com/conda-forge/conda-forge.github.io/issues/1528 + - openssl + - {{ pin_subpackage('tensorflow-base', exact=True) }} + run: + - python + - {{ pin_subpackage('tensorflow-estimator', exact=True) }} + - {{ pin_subpackage('tensorflow-base', exact=True) }} + test: + requires: + - pip imports: - tensorflow commands: - - freeze_graph --help - - saved_model_cli --help - - tflite_convert --help # [not win] - - toco_from_protos --help # [not win] - - toco --help # [not win] + - pip check - name: libtensorflow - script: cp_libtensorflow.sh # [unix] + script: cp_libtensorflow.sh build: skip: true # [not linux] requirements: # build requirements needs to pick up the compiler run_exports build: - - {{ compiler('c') }} # [linux] - - {{ compiler('cxx') }} # [linux] + - {{ compiler('c') }} + - {{ compiler('cxx') }} # host requirements to pick up run_exports host: - - libcurl # [linux] - - zlib # [linux] + - giflib + - grpc-cpp + - icu + - jpeg + - libcurl + - libpng + - libprotobuf + - openssl + - snappy + - sqlite + - zlib test: files: + - test_libtensorflow.sh - test_c.c requires: - {{ compiler('c') }} commands: - test -f $PREFIX/lib/libtensorflow.so # [not win] - - $CC -o test_c -L${PREFIX}/lib/ -ltensorflow -I${PREFIX}/include/ test_c.c && ./test_c # [not win] + - ./test_libtensorflow.sh # [not win] - name: libtensorflow_cc - script: cp_libtensorflow_cc.sh # [unix] + script: cp_libtensorflow_cc.sh build: skip: true # [not linux] requirements: # build requirements needs to pick up the compiler run_exports build: - - {{ compiler('c') }} # [linux] - - {{ compiler('cxx') }} # [linux] + - {{ compiler('c') }} + - {{ compiler('cxx') }} # host requirements to pick up run_exports host: - - libcurl # [linux] - - zlib # [linux] - run: - - libprotobuf >=3.6.1,<3.7 - - nsync + - giflib + - grpc-cpp + - icu + - jpeg + - libcurl + - libpng + - libprotobuf + - openssl + - snappy + - sqlite + - zlib test: files: + - test_libtensorflow_cc.sh - test_cc.cc requires: - {{ compiler('cxx') }} commands: - test -f $PREFIX/lib/libtensorflow_cc.so # [not win] - - $CXX -std=c++11 -o test_cc -L${PREFIX}/lib/ -ltensorflow_cc -ltensorflow_framework -lrt -I${PREFIX}/include/ test_cc.cc && ./test_cc # [not win] + - ./test_libtensorflow_cc.sh # [not win] about: home: http://tensorflow.org/ @@ -203,11 +313,14 @@ about: doc_source_url: https://github.com/tensorflow/tensorflow/tree/master/tensorflow/docs_src extra: + feedstock-name: tensorflow recipe-maintainers: - - waitingkuo + - farhantejani - ghego - - hajapy - gilbertfrancois - - farhantejani - - njzjz + - h-vetinari + - hajapy - jschueller + - njzjz + - waitingkuo + - xhochy diff --git a/recipe/patches/0001-relax-tolerence-in-special_math_test.patch b/recipe/patches/0001-relax-tolerence-in-special_math_test.patch new file mode 100644 index 0000000000000000000000000000000000000000..0b857b9da1e71c90969a8465c6e69d3ca3cce30a --- /dev/null +++ b/recipe/patches/0001-relax-tolerence-in-special_math_test.patch @@ -0,0 +1,25 @@ +From 59ddc914ccd9a223c8411dc8a00eab9206971cd6 Mon Sep 17 00:00:00 2001 +From: Jonathan Helmus <jjhelmus@gmail.com> +Date: Tue, 20 Jun 2017 15:13:14 -0500 +Subject: [PATCH 1/5] relax tolerence in special_math_test + +--- + .../python/kernel_tests/distributions/special_math_test.py | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/tensorflow/python/kernel_tests/distributions/special_math_test.py b/tensorflow/python/kernel_tests/distributions/special_math_test.py +index ce2cd8614f5..091753b4ef5 100644 +--- a/tensorflow/python/kernel_tests/distributions/special_math_test.py ++++ b/tensorflow/python/kernel_tests/distributions/special_math_test.py +@@ -256,7 +256,7 @@ class LogNdtrTestUpper(NdtrTest): + min=sm.LOGNDTR_FLOAT64_UPPER, + max=35., # Beyond this, log_cdf(x) may be zero. + shape=[100]) +- _error32 = ErrorSpec(rtol=1e-6, atol=1e-14) ++ _error32 = ErrorSpec(rtol=1e-6, atol=1e-13) + _error64 = ErrorSpec(rtol=1e-6, atol=1e-14) + + +-- +2.29.2.windows.3 + diff --git a/recipe/patches/0002-skip-read-only-debugger_cli_common-test.patch b/recipe/patches/0002-skip-read-only-debugger_cli_common-test.patch new file mode 100644 index 0000000000000000000000000000000000000000..2153e58699f84c5fbae3e12c167a8c3757b21e98 --- /dev/null +++ b/recipe/patches/0002-skip-read-only-debugger_cli_common-test.patch @@ -0,0 +1,31 @@ +From 5d04da7f2b31df87f38bebc5ed806589880e7eed Mon Sep 17 00:00:00 2001 +From: Jonathan Helmus <jjhelmus@gmail.com> +Date: Thu, 4 Jan 2018 17:17:23 -0600 +Subject: [PATCH 2/5] skip read only debugger_cli_common test + +If tensorflow is built as root the +debugger_cli_common::testCommandHistoryHandlesWritingIOErrorGracoiusly test +will fail as the log never becomes read only. Skip the portion of the test +that depends on the read-only behavior of the log. +--- + tensorflow/python/debug/cli/debugger_cli_common_test.py | 4 +++- + 1 file changed, 3 insertions(+), 1 deletion(-) + +diff --git a/tensorflow/python/debug/cli/debugger_cli_common_test.py b/tensorflow/python/debug/cli/debugger_cli_common_test.py +index 93df845c4c5..814ddf13a16 100644 +--- a/tensorflow/python/debug/cli/debugger_cli_common_test.py ++++ b/tensorflow/python/debug/cli/debugger_cli_common_test.py +@@ -1057,7 +1057,9 @@ class CommandHistoryTest(test_util.TensorFlowTestCase): + + cmd_hist_3 = debugger_cli_common.CommandHistory( + limit=3, history_file_path=self._history_file_path) +- self.assertEqual(["help"], cmd_hist_3.most_recent_n(1)) ++ # when the test are run as root the log file is still writable and this ++ # test fails. ++ #self.assertEqual(["help"], cmd_hist_3.most_recent_n(1)) + + self._restoreFileReadWritePermissions(self._history_file_path) + +-- +2.29.2.windows.3 + diff --git a/recipe/patches/0003-relax-precision-in-matrix_log_op_test.patch b/recipe/patches/0003-relax-precision-in-matrix_log_op_test.patch new file mode 100644 index 0000000000000000000000000000000000000000..42b39f8f106cfca05a09b8c9d49e7d71805a6194 --- /dev/null +++ b/recipe/patches/0003-relax-precision-in-matrix_log_op_test.patch @@ -0,0 +1,25 @@ +From 2e72112b3dd2e6e8c234546935bcad5dbd05280d Mon Sep 17 00:00:00 2001 +From: Jonathan Helmus <jjhelmus@gmail.com> +Date: Mon, 20 Aug 2018 12:19:04 -0500 +Subject: [PATCH 3/5] relax precision in matrix_log_op_test + +--- + tensorflow/python/kernel_tests/matrix_logarithm_op_test.py | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/tensorflow/python/kernel_tests/matrix_logarithm_op_test.py b/tensorflow/python/kernel_tests/matrix_logarithm_op_test.py +index 5004a9c5588..62c2bfe1abe 100644 +--- a/tensorflow/python/kernel_tests/matrix_logarithm_op_test.py ++++ b/tensorflow/python/kernel_tests/matrix_logarithm_op_test.py +@@ -45,7 +45,7 @@ class LogarithmOpTest(test.TestCase): + tf_ans = linalg_impl.matrix_exponential( + gen_linalg_ops.matrix_logarithm(inp)) + out = self.evaluate(tf_ans) +- self.assertAllClose(inp, out, rtol=1e-4, atol=1e-3) ++ self.assertAllClose(inp, out, rtol=1e-3, atol=1e-3) + + def _verifyLogarithmComplex(self, x): + for np_type in [np.complex64, np.complex128]: +-- +2.29.2.windows.3 + diff --git a/recipe/patches/0004-loosen-requirements.patch b/recipe/patches/0004-loosen-requirements.patch new file mode 100644 index 0000000000000000000000000000000000000000..771b89b9b499af741842dd59a443d196b6aa6649 --- /dev/null +++ b/recipe/patches/0004-loosen-requirements.patch @@ -0,0 +1,35 @@ +From b6371c3abefb0ab2b3e943f43088ab42fac9c5bc Mon Sep 17 00:00:00 2001 +From: "Uwe L. Korn" <uwe.korn@quantco.com> +Date: Wed, 8 Sep 2021 06:57:09 +0200 +Subject: [PATCH] Fixup requirements + +* Set grpcio pin to the built grpc version +* Remove unused clang requirement +--- + tensorflow/tools/pip_package/setup.py | 3 +-- + 1 file changed, 1 insertion(+), 2 deletions(-) + +diff --git a/tensorflow/tools/pip_package/setup.py b/tensorflow/tools/pip_package/setup.py +index 3d4a64de..3ea8ac5e 100644 +--- a/tensorflow/tools/pip_package/setup.py ++++ b/tensorflow/tools/pip_package/setup.py +@@ -82,7 +82,6 @@ REQUIRED_PACKAGES = [ + # Install other dependencies + 'absl-py ~= 0.10', + 'astunparse ~= 1.6.3', +- 'clang ~= 5.0', + 'flatbuffers ~= 1.12.0', + 'google_pasta ~= 0.2', + 'h5py ~= 3.1.0', +@@ -126,7 +125,7 @@ if 'tf_nightly' in project_name: + # BoringSSL support. + # See https://github.com/tensorflow/tensorflow/issues/17882. + if sys.byteorder == 'little': +- REQUIRED_PACKAGES.append('grpcio >= 1.37.0, < 2.0') ++ REQUIRED_PACKAGES.append('grpcio >= GRPCIO_VERSION, < 2.0') + + + # Packages which are only needed for testing code. +-- +2.30.1 (Apple Git-130) + diff --git a/recipe/patches/0005-remove_deprecated_use_of_error_message.patch b/recipe/patches/0005-remove_deprecated_use_of_error_message.patch new file mode 100644 index 0000000000000000000000000000000000000000..ac59e24f374daaae1733b27735d39ffe637d8b65 --- /dev/null +++ b/recipe/patches/0005-remove_deprecated_use_of_error_message.patch @@ -0,0 +1,25 @@ +From ec2cc392f3a78d5cda64a36c615572693a80c8d7 Mon Sep 17 00:00:00 2001 +From: Zachary Garrett <zachgarrett@google.com> +Date: Mon, 27 Sep 2021 08:40:23 -0700 +Subject: [PATCH] Replace deprecated `absl::Status::error_message` call with + `absl::Status::message`. + +PiperOrigin-RevId: 399198325 +Change-Id: Ib4a5e1d3a5e1447fefe9de428946341522a021a4 +--- + tensorflow/core/kernels/example_parsing_ops.cc | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/tensorflow/core/kernels/example_parsing_ops.cc b/tensorflow/core/kernels/example_parsing_ops.cc +index a1265cfb5c6a1..ada919bbd7b13 100644 +--- a/tensorflow/core/kernels/example_parsing_ops.cc ++++ b/tensorflow/core/kernels/example_parsing_ops.cc +@@ -1218,7 +1218,7 @@ class DecodeJSONExampleOp : public OpKernel { + resolver_.get(), "type.googleapis.com/tensorflow.Example", &in, &out); + OP_REQUIRES(ctx, status.ok(), + errors::InvalidArgument("Error while parsing JSON: ", +- string(status.error_message()))); ++ string(status.message()))); + } + } + diff --git a/recipe/patches/51450-Fix_protobuf_errors_when_using_system_protobuf.patch b/recipe/patches/51450-Fix_protobuf_errors_when_using_system_protobuf.patch new file mode 100644 index 0000000000000000000000000000000000000000..35ae53f3ba46337430dd937c2af597b50c15788f --- /dev/null +++ b/recipe/patches/51450-Fix_protobuf_errors_when_using_system_protobuf.patch @@ -0,0 +1,26 @@ +From 7fbc5be3b5a3cc17c05f1ae5b0577fdca8a0e0e2 Mon Sep 17 00:00:00 2001 +From: sclarkson <sc@lambdal.com> +Date: Thu, 12 Aug 2021 03:23:28 -0700 +Subject: [PATCH] Fix protobuf errors when using system protobuf + +When tensorflow and python protobuf use the same instance of +libprotobuf, pywrap_tensorflow must be imported before anything +else that would import protobuf definitions. +--- + tensorflow/python/__init__.py | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/tensorflow/python/__init__.py b/tensorflow/python/__init__.py +index c90336802e26f..39b57ffecab3f 100644 +--- a/tensorflow/python/__init__.py ++++ b/tensorflow/python/__init__.py +@@ -37,8 +37,8 @@ + # go/tf-wildcard-import + # pylint: disable=wildcard-import,g-bad-import-order,g-import-not-at-top + +-from tensorflow.python.eager import context + from tensorflow.python import pywrap_tensorflow as _pywrap_tensorflow ++from tensorflow.python.eager import context + + # pylint: enable=wildcard-import + diff --git a/recipe/patches/legacy/0001-add-rt-to-linkopts.patch b/recipe/patches/legacy/0001-add-rt-to-linkopts.patch new file mode 100644 index 0000000000000000000000000000000000000000..84b9c183ddfe2aeca6fa34b1673c30db58119e32 --- /dev/null +++ b/recipe/patches/legacy/0001-add-rt-to-linkopts.patch @@ -0,0 +1,46 @@ +From 30a4ed9b097f1ea9a2a1e9303e044015d0041d63 Mon Sep 17 00:00:00 2001 +From: Jonathan Helmus <jjhelmus@gmail.com> +Date: Wed, 27 Jun 2018 12:46:18 -0500 +Subject: [PATCH 1/8] add rt to linkopts + +--- + tensorflow/BUILD | 1 + + tensorflow/tensorflow.bzl | 4 ++-- + 2 files changed, 3 insertions(+), 2 deletions(-) + +diff --git a/tensorflow/BUILD b/tensorflow/BUILD +index 823ad8f..96e12cb 100644 +--- a/tensorflow/BUILD ++++ b/tensorflow/BUILD +@@ -429,5 +429,6 @@ tf_cc_shared_object( + "//tensorflow:darwin": [], + "//tensorflow:windows": [], + "//conditions:default": [ ++ "-lrt", + "-Wl,--version-script,$(location //tensorflow:tf_framework_version_script.lds)", + ], +diff --git a/tensorflow/tensorflow.bzl b/tensorflow/tensorflow.bzl +index d93e0df..b8438d2 100644 +--- a/tensorflow/tensorflow.bzl ++++ b/tensorflow/tensorflow.bzl +@@ -377,7 +377,7 @@ def tf_cc_shared_object( + srcs = [], + deps = [], + data = [], +- linkopts = [], ++ linkopts = ['-lrt'], + framework_so = tf_binary_additional_srcs(), + kernels = [], + **kwargs): +@@ -413,7 +413,7 @@ def tf_cc_binary( + srcs = [], + deps = [], + data = [], +- linkopts = [], ++ linkopts = ['-lrt'], + copts = tf_copts(), + kernels = [], + **kwargs): +-- +2.7.4 + diff --git a/recipe/patches/legacy/0002-stable-image-rotate-test.patch b/recipe/patches/legacy/0002-stable-image-rotate-test.patch new file mode 100644 index 0000000000000000000000000000000000000000..2a2c4448bbab49217069ccb4082285522269484c --- /dev/null +++ b/recipe/patches/legacy/0002-stable-image-rotate-test.patch @@ -0,0 +1,38 @@ +From 11a1f8a90ccffb2746c1d6a3356fc2033be38bea Mon Sep 17 00:00:00 2001 +From: Jonathan Helmus <jjhelmus@gmail.com> +Date: Thu, 8 Jun 2017 10:48:30 -0500 +Subject: [PATCH 2/8] stable image rotate test + +--- + tensorflow/contrib/image/python/kernel_tests/image_ops_test.py | 8 ++++---- + 1 file changed, 4 insertions(+), 4 deletions(-) + +diff --git a/tensorflow/contrib/image/python/kernel_tests/image_ops_test.py b/tensorflow/contrib/image/python/kernel_tests/image_ops_test.py +index ba5cdfe..41ffac4 100644 +--- a/tensorflow/contrib/image/python/kernel_tests/image_ops_test.py ++++ b/tensorflow/contrib/image/python/kernel_tests/image_ops_test.py +@@ -54,7 +54,7 @@ class ImageOpsTest(test_util.TensorFlowTestCase): + image = array_ops.reshape( + math_ops.cast(math_ops.range(36), dtype), (6, 6)) + image_rep = array_ops.tile(image[None, :, :, None], [3, 1, 1, 1]) +- angles = constant_op.constant([0.0, np.pi / 4.0, np.pi / 2.0], ++ angles = constant_op.constant([0.0, -np.pi / 2.0, np.pi / 2.0], + dtypes.float32) + image_rotated = image_ops.rotate(image_rep, angles) + self.assertAllEqual(image_rotated[:, :, :, 0].eval(), +@@ -63,9 +63,9 @@ class ImageOpsTest(test_util.TensorFlowTestCase): + [18, 19, 20, 21, 22, 23], + [24, 25, 26, 27, 28, 29], + [30, 31, 32, 33, 34, 35]], +- [[0, 3, 4, 11, 17, 0], [2, 3, 9, 16, 23, 23], +- [1, 8, 15, 21, 22, 29], [6, 13, 20, 21, 27, 34], +- [12, 18, 19, 26, 33, 33], [0, 18, 24, 31, 32, 0]], ++ [[30, 24, 18, 12, 6, 0], [31, 25, 19, 13, 7, 1], ++ [32, 26, 20, 14, 8, 2], [33, 27, 21, 15, 9, 3], ++ [34, 28, 22, 16, 10, 4], [35, 29, 23, 17, 11, 5]], + [[5, 11, 17, 23, 29, 35], [4, 10, 16, 22, 28, 34], + [3, 9, 15, 21, 27, 33], [2, 8, 14, 20, 26, 32], + [1, 7, 13, 19, 25, 31], [0, 6, 12, 18, 24, 30]]]) +-- +2.7.4 + diff --git a/recipe/patches/legacy/0005-fix-lookup_ops-test.patch b/recipe/patches/legacy/0005-fix-lookup_ops-test.patch new file mode 100644 index 0000000000000000000000000000000000000000..ffdc155032b13b20233fcb95c141f22ee42d075c --- /dev/null +++ b/recipe/patches/legacy/0005-fix-lookup_ops-test.patch @@ -0,0 +1,30 @@ +From 908dba7dcc7d21e5479a37c5c1acccb6d0b90056 Mon Sep 17 00:00:00 2001 +From: Jonathan Helmus <jjhelmus@gmail.com> +Date: Thu, 11 Jan 2018 13:05:31 -0600 +Subject: [PATCH 5/8] fix lookup_ops test + +The order of the exported data is undefined, perform a valid sort before +comparing. +--- + tensorflow/contrib/lookup/lookup_ops_test.py | 4 ++-- + 1 file changed, 2 insertions(+), 2 deletions(-) + +diff --git a/tensorflow/contrib/lookup/lookup_ops_test.py b/tensorflow/contrib/lookup/lookup_ops_test.py +index 9b2c2dd..81d0648 100644 +--- a/tensorflow/contrib/lookup/lookup_ops_test.py ++++ b/tensorflow/contrib/lookup/lookup_ops_test.py +@@ -505,9 +505,9 @@ class MutableHashTableOpTest(test.TestCase): + msg="Saw shape %s" % exported_values.shape) + # exported data is in the order of the internal map, i.e. undefined + sorted_keys = np.sort(exported_keys.eval()) +- sorted_values = np.sort(exported_values.eval()) ++ sorted_values = np.sort(exported_values.eval(), axis=0) + self.assertAllEqual([b"brain", b"salad", b"surgery"], sorted_keys) +- self.assertAllEqual([[4, 5], [2, 3], [0, 1]], sorted_values) ++ self.assertAllEqual([[0, 1], [2, 3], [4, 5]], sorted_values) + + def testMutableHashTableExportInsert(self): + with self.cached_session(): +-- +2.7.4 + diff --git a/recipe/patches/legacy/0007-Add-init-symbol-names-prefixed-with-an-underscore.patch b/recipe/patches/legacy/0007-Add-init-symbol-names-prefixed-with-an-underscore.patch new file mode 100644 index 0000000000000000000000000000000000000000..9f9a6fa488aaa24fdb9908b114497d43194eb21b --- /dev/null +++ b/recipe/patches/legacy/0007-Add-init-symbol-names-prefixed-with-an-underscore.patch @@ -0,0 +1,53 @@ +From 03d0d2bf4e47ac6b768d9f3371cdb941bf3d36e4 Mon Sep 17 00:00:00 2001 +From: Nehal J Wani <nehaljw.kkd1@gmail.com> +Date: Thu, 23 Aug 2018 11:34:00 -0500 +Subject: [PATCH 7/8] Add (init) symbol names prefixed with an underscore +MIME-Version: 1.0 +Content-Type: text/plain; charset=UTF-8 +Content-Transfer-Encoding: 8bit + +The command: `python -c 'from tensorflow.contrib import tensorrt as trt'` fails +with: + +File "/xxx/lib/python2.7/site-packages/tensorflow/contrib/tensorrt/wrap_conversion.py", line 24, in swig_import_helper + _mod = imp.load_module('_wrap_conversion', fp, pathname, description) +ImportError: dynamic module does not define init function (init_wrap_conversion) + +➜ 1534993632 nm /xxx/lib/python2.7/site-packages/tensorflow/contrib/tensorrt/_wrap_conversion.so | grep wrap_conversion +0000000000003670 t _init_wrap_conversion <---------- symbol defined, but local, not exposed + U init_wrap_conversion + I init_wrap_conversion (indirect for init_wrap_conversion) + +Happens because the linker version script has: `init_wrap_conversion` instead of `_init_wrap_conversion` + +xref: https://github.com/tensorflow/tensorflow/issues/21818 +xref: https://stackoverflow.com/a/37534357 +--- + tensorflow/tensorflow.bzl | 4 ++-- + 1 file changed, 2 insertions(+), 2 deletions(-) + +diff --git a/tensorflow/tensorflow.bzl b/tensorflow/tensorflow.bzl +index b8438d2..f6850e3 100644 +--- a/tensorflow/tensorflow.bzl ++++ b/tensorflow/tensorflow.bzl +@@ -1594,7 +1594,7 @@ def _append_init_to_versionscript_impl(ctx): + template = ctx.file.template_file, + output = ctx.outputs.versionscript, + substitutions = { +- "global:": "global:\n init_%s;\n PyInit_*;" % (mod_name), ++ "global:": "global:\n init_%s;\n _init_%s;\n PyInit_*;\n _PyInit_*;"%(mod_name, mod_name), + }, + is_executable = False, + ) +@@ -1603,7 +1603,7 @@ def _append_init_to_versionscript_impl(ctx): + template = ctx.file.template_file, + output = ctx.outputs.versionscript, + substitutions = { +- "*tensorflow*": "*tensorflow*\ninit_%s\nPyInit_*\n" % (mod_name), ++ "*tensorflow*": "*tensorflow*\ninit_%s\n_init_%s\nPyInit_*\n_PyInit_*\n"%(mod_name, mod_name), + }, + is_executable = False, + ) +-- +2.7.4 + diff --git a/recipe/patches/legacy/0008-Add-alternate-clock_gettime-implementation-for-macOS.patch b/recipe/patches/legacy/0008-Add-alternate-clock_gettime-implementation-for-macOS.patch new file mode 100644 index 0000000000000000000000000000000000000000..8c04e0ff09d25a9758a34267dd3e43938e354ccd --- /dev/null +++ b/recipe/patches/legacy/0008-Add-alternate-clock_gettime-implementation-for-macOS.patch @@ -0,0 +1,55 @@ +From 2f0373b5f8ea3e1ab8708cb9c64c404922b96b81 Mon Sep 17 00:00:00 2001 +From: Nehal J Wani <nehaljw.kkd1@gmail.com> +Date: Sun, 30 Sep 2018 09:16:53 -0500 +Subject: [PATCH 8/8] Add alternate clock_gettime() implementation for macOS < + 10.12 + +clock_gettime is not available in macOS SDK < 10.12 +--- + tensorflow/core/platform/posix/env_time.cc | 21 +++++++++++++++++++++ + 1 file changed, 21 insertions(+) + +diff --git a/tensorflow/core/platform/posix/env_time.cc b/tensorflow/core/platform/posix/env_time.cc +index 59a67b1..88f8a6c 100644 +--- a/tensorflow/core/platform/posix/env_time.cc ++++ b/tensorflow/core/platform/posix/env_time.cc +@@ -18,6 +18,23 @@ limitations under the License. + + #include "tensorflow/core/platform/env_time.h" + ++// Slightly pruned version of https://gist.github.com/alfwatt/3588c5aa1f7a1ef7a3bb ++// Copyright (c) 2015-2018 Alf Watt - Open Source - https://opensource.org/licenses/MIT ++#if defined __APPLE__ ++#include <mach/clock.h> ++#include <mach/mach.h> ++int alt_clock_gettime (int clock_id, timespec *ts) { ++ clock_serv_t cclock; ++ mach_timespec_t mts; ++ host_get_clock_service (mach_host_self (), clock_id, &cclock); ++ clock_get_time (cclock, &mts); ++ mach_port_deallocate (mach_task_self (), cclock); ++ ts->tv_sec = mts.tv_sec; ++ ts->tv_nsec = mts.tv_nsec; ++ return 0; ++} ++#endif ++ + namespace tensorflow { + + namespace { +@@ -28,7 +45,11 @@ class PosixEnvTime : public EnvTime { + + uint64 NowNanos() override { + struct timespec ts; ++#if defined __APPLE__ ++ alt_clock_gettime(CALENDAR_CLOCK, &ts); ++#else + clock_gettime(CLOCK_REALTIME, &ts); ++#endif + return (static_cast<uint64>(ts.tv_sec) * kSecondsToNanos + + static_cast<uint64>(ts.tv_nsec)); + } +-- +2.7.4 + + diff --git a/recipe/test_libtensorflow.sh b/recipe/test_libtensorflow.sh new file mode 100755 index 0000000000000000000000000000000000000000..c5ca3e089cf4ee95909d2b889f4068d9d4509662 --- /dev/null +++ b/recipe/test_libtensorflow.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +set -euxo pipefail + +$CC -o test_c -L${PREFIX}/lib/ -ltensorflow -I${PREFIX}/include/ test_c.c +./test_c diff --git a/recipe/test_libtensorflow_cc.sh b/recipe/test_libtensorflow_cc.sh new file mode 100755 index 0000000000000000000000000000000000000000..aa7026745c79f875f809f67c6c7f0cf56f9c83e7 --- /dev/null +++ b/recipe/test_libtensorflow_cc.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +set -exuo pipefail + +$CXX -std=c++11 -o test_cc -L${PREFIX}/lib/ -ltensorflow_cc -ltensorflow_framework -lrt -I${PREFIX}/include/ test_cc.cc +./test_cc diff --git a/recipe/test_tensorflow_base.py b/recipe/test_tensorflow_base.py new file mode 100644 index 0000000000000000000000000000000000000000..5aa9e52cef6a690c9751b4739e3d89888644b083 --- /dev/null +++ b/recipe/test_tensorflow_base.py @@ -0,0 +1,18 @@ +import tensorflow as tf +hello = tf.constant('Hello, TensorFlow!') +a = tf.constant(10) +b = tf.constant(32) +a + b + +mnist = tf.keras.datasets.mnist + +(x_train, y_train), (x_test, y_test) = mnist.load_data() +x_train, x_test = x_train / 255.0, x_test / 255.0 + +model = tf.keras.models.Sequential([ + tf.keras.layers.Flatten(input_shape=(28, 28)), + tf.keras.layers.Dense(128, activation='relu'), + tf.keras.layers.Dropout(0.2), + tf.keras.layers.Dense(10, activation='softmax') +]) +