declare -x ABI="amd64" declare -x ABI_MIPS="" declare -x ABI_S390="" declare -x ABI_X86="64" declare -x ADAFLAGS="" declare -x ADA_TARGET="" declare -x ALSA_CARDS="" declare -x AMDGPU_TARGETS="" declare -x APACHE2_MODULES="" declare -x APACHE2_MPMS="" declare -x ARCH="amd64" declare BDEPEND=" app-arch/unzip >=dev-libs/protobuf-3.8.0 dev-java/java-config >=dev-util/bazel-5.1.1 cuda? ( >=dev-util/nvidia-cuda-toolkit-9.1[profiler] ) !python? ( dev-lang/python ) python? ( dev-python/cython dev-python/mock >=dev-python/grpcio-tools-1.28 ) >=dev-util/bazel-0.20" declare -x BINPKG_FORMAT="xpak" declare -x BINPKG_GPG_SIGNING_BASE_COMMAND="/usr/bin/flock /run/lock/portage-binpkg-gpg.lock /usr/bin/gpg --sign --armor [PORTAGE_CONFIG]" declare -x BINPKG_GPG_SIGNING_DIGEST="SHA512" declare -x BINPKG_GPG_VERIFY_BASE_COMMAND="/usr/bin/gpg --verify --batch --no-tty --no-auto-check-trustdb --status-fd 2 [PORTAGE_CONFIG] [SIGNATURE]" declare -x BINPKG_GPG_VERIFY_GPG_HOME="/etc/portage/gnupg" declare -x BOOTSTRAP_USE="unicode internal-glib pkg-config split-usr xml python_targets_python3_10 multilib" declare -x BUILD_CXXFLAGS=" -std=c++17" declare -x CALLIGRA_FEATURES="" declare -x CAMERAS="" declare -x CASROOT="/usr" declare -x CBUILD="x86_64-pc-linux-gnu" declare -x CCASFLAGS="" declare -x CC_OPT_FLAGS=" " declare -x CFLAGS="-O2 -pipe -march=native -mfpmath=sse -m64 -falign-loops=2 -falign-functions=2 -falign-jumps=2 -fomit-frame-pointer -fforce-addr -mprefer-avx128 -mprefer-vector-width=128 -ftree-vectorize -msse -msse2 -msse3 -msse4.1 -msse4.2 -mavx" declare -x CFLAGS_amd64="-m64" declare -x CFLAGS_default declare -x CFLAGS_x32="-mx32" declare -x CFLAGS_x86="-m32 -mfpmath=sse" declare -- CHECKREQS_DISK_BUILD="16G" declare -- CHECKREQS_MEMORY="5G" declare -x CHOST="x86_64-pc-linux-gnu" declare -x CHOST_amd64="x86_64-pc-linux-gnu" declare -x CHOST_default="x86_64-pc-linux-gnu" declare -x CHOST_x32="x86_64-pc-linux-gnux32" declare -x CHOST_x86="i686-pc-linux-gnu" declare -x CINNAMON_VERSION="5.2.7" declare -x COLLECTD_PLUGINS="" declare -x COMMON_FLAGS="-O2 -pipe -march=native -mfpmath=sse -m64 -falign-loops=2 -falign-functions=2 -falign-jumps=2 -fomit-frame-pointer -fforce-addr -mprefer-avx128 -mprefer-vector-width=128 -ftree-vectorize" declare -x CPPFLAGS="" declare -x CPU_FLAGS_ARM="" declare -x CPU_FLAGS_PPC="" declare -x CPU_FLAGS_X86="avx sse sse2 sse3 sse4_1 sse4_2" declare -- CPU_USE_FLAGS_X86="sse sse2 sse3 sse4_1 sse4_2 avx avx2 fma3 fma4" declare -x CSF_GraphicShr="/usr/lib64/opencascade/libTKOpenGl.so" declare -x CSF_IGESDefaults="/usr/share/opencascade/resources/XSTEPResource" declare -x CSF_MDTVTexturesDirectory="/usr/share/opencascade/resources/Textures" declare -x CSF_MIGRATION_TYPES="/usr/share/opencascade/resources/StdResource/MigrationSheet.txt" declare -x CSF_PluginDefaults="/usr/share/opencascade/resources/StdResource" declare -x CSF_SHMessage="/usr/share/opencascade/resources/SHMessage" declare -x CSF_STEPDefaults="/usr/share/opencascade/resources/XSTEPResource" declare -x CSF_ShadersDirectory="/usr/share/opencascade/resources/Shaders" declare -x CSF_StandardDefaults="/usr/share/opencascade/resources/StdResource" declare -x CSF_StandardLiteDefaults="/usr/share/opencascade/resources/StdResource" declare -x CSF_UnitsDefinition="/usr/share/opencascade/resources/UnitsAPI/Units.dat" declare -x CSF_XCAFDefaults="/usr/share/opencascade/resources/StdResource" declare -x CSF_XSMessage="/usr/share/opencascade/resources/XSMessage" declare -x CSF_XmlOcafResource="/usr/share/opencascade/resources/XmlOcafResource" declare -x CTARGET_default="x86_64-pc-linux-gnu" declare -- CUDA_VERBOSE="true" declare -x CURL_SSL="" declare -x CXXFLAGS="-O2 -pipe -march=native -mfpmath=sse -m64 -falign-loops=2 -falign-functions=2 -falign-jumps=2 -fomit-frame-pointer -fforce-addr -mprefer-avx128 -mprefer-vector-width=128 -ftree-vectorize -msse -msse2 -msse3 -msse4.1 -msse4.2 -mavx -std=c++17" declare -x DEFAULT_ABI="amd64" declare -x DEFINED_PHASES=" compile configure install prepare pretend setup unpack" declare DEPEND=" app-arch/snappy >=dev-cpp/abseil-cpp-20211102-r2:= dev-db/lmdb dev-db/sqlite dev-libs/double-conversion dev-libs/icu:= >=dev-libs/jsoncpp-1.9.2:= >=dev-libs/nsync-1.25.0 dev-libs/openssl:0= >=dev-libs/protobuf-3.13.0:= >=dev-libs/re2-0.2019.06.01:= media-libs/giflib media-libs/libjpeg-turbo media-libs/libpng:0 >=net-libs/grpc-1.28:= net-misc/curl sys-libs/zlib >=sys-apps/hwloc-2:= cuda? ( =dev-lang/python-3.8.13:3.8 ) python_targets_python3_9? ( >=dev-lang/python-3.9.12:3.9 ) python_targets_python3_10? ( >=dev-lang/python-3.10.4:3.10 ) >=dev-libs/flatbuffers-2.0.6:= dev-python/absl-py[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] >=dev-python/astor-0.7.1[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] dev-python/astunparse[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] dev-python/clang-python[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] dev-python/dill[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] dev-python/flatbuffers[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] >=dev-python/gast-0.3.3[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] dev-python/h5py[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] >=dev-python/numpy-1.19[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] >=dev-python/google-pasta-0.1.8[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] >=dev-python/opt-einsum-3.3.0[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] >=dev-python/protobuf-python-3.13.0[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] dev-python/pybind11[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] dev-python/six[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] dev-python/tblib[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] dev-python/termcolor[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] dev-python/typing-extensions[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] >=dev-python/grpcio-1.28[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] >=dev-python/wrapt-1.11.1[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] >=net-libs/google-cloud-cpp-0.10.0 >=sci-visualization/tensorboard-2.10[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] ) python? ( dev-python/mock dev-python/setuptools ) " declare -- DEP_VER="2.10" declare DESCRIPTION="Computation framework using data flow graphs for scalable machine learning" declare -x DIROPTIONS="-m0755" declare -- DISTUTILS_OPTIONAL="1" declare -a DOCS=([0]="AUTHORS" [1]="CONTRIBUTING.md" [2]="ISSUE_TEMPLATE.md" [3]="README.md" [4]="RELEASE.md") declare -x EAPI="8" declare -x ELIBC="glibc" declare -x ENV_UNSET="CARGO_HOME DBUS_SESSION_BUS_ADDRESS DISPLAY GOBIN GOPATH PERL5LIB PERL5OPT PERLPREFIX PERL_CORE PERL_MB_OPT PERL_MM_OPT XAUTHORITY XDG_CACHE_HOME XDG_CONFIG_HOME XDG_DATA_HOME XDG_RUNTIME_DIR XDG_STATE_HOME" declare -- EPOCHREALTIME="1669222524,366427" declare -- EPOCHSECONDS="1669222524" declare -x EXEOPTIONS="-m0755" declare -x FCFLAGS="-O2 -pipe -march=native -mfpmath=sse -m64 -falign-loops=2 -falign-functions=2 -falign-jumps=2 -fomit-frame-pointer -fforce-addr -mprefer-avx128 -mprefer-vector-width=128 -ftree-vectorize -msse -msse2 -msse3 -msse4.1 -msse4.2 -mavx" declare -x FETCHCOMMAND_SSH="bash -c \"x=\\\${2#ssh://} ; host=\\\${x%%/*} ; port=\\\${host##*:} ; host=\\\${host%:*} ; [[ \\\${host} = \\\${port} ]] && port= ; exec rsync --rsh=\\\"ssh \\\${port:+-p\\\${port}} \\\${3}\\\" -avP \\\"\\\${host}:/\\\${x#*/}\\\" \\\"\\\$1\\\"\" rsync \"\${DISTDIR}/\${FILE}\" \"\${URI}\" \"\${PORTAGE_SSH_OPTS}\"" declare -x FFLAGS="-O2 -pipe -march=native -mfpmath=sse -m64 -falign-loops=2 -falign-functions=2 -falign-jumps=2 -fomit-frame-pointer -fforce-addr -mprefer-avx128 -mprefer-vector-width=128 -ftree-vectorize -msse -msse2 -msse3 -msse4.1 -msse4.2 -mavx" declare -x FFTOOLS="" declare -x FLTK_DOCDIR="/usr/share/doc/fltk-1.3.5-r4/html" declare -x GCC_HOST_COMPILER_PATH="/usr/x86_64-pc-linux-gnu/gcc-bin/11.3.0/x86_64-pc-linux-gnu-gcc" declare -x GCC_SPECS="" declare -x GNOME_DESKTOP_SESSION_ID="this-is-deprecated" declare -x GNOME_TERMINAL_SCREEN="/org/gnome/Terminal/screen/36226693_c9b3_4e87_8823_3a16a29a1e28" declare -x GNOME_TERMINAL_SERVICE=":1.57" declare -x GPG_VERIFY_GROUP_DROP="nogroup" declare -x GPG_VERIFY_USER_DROP="nobody" declare -x GPSD_PROTOCOLS="" declare -x GRUB_PLATFORMS="" declare -x GSETTINGS_BACKEND="dconf" declare -x GTK_OVERLAY_SCROLLING="1" declare HOMEPAGE="https://www.tensorflow.org/" declare -x HUSHLOGIN="FALSE" declare IDEPEND="" declare -x INHERITED=" multiprocessing toolchain-funcs multilib bazel check-reqs flag-o-matic cuda multibuild python-utils-r1 python-r1 distutils-r1 prefix" declare -x INPUT_DEVICES="" declare -x INSOPTIONS="-m0644" declare IUSE="cuda mpi +python xla cpu_flags_x86_sse cpu_flags_x86_sse2 cpu_flags_x86_sse3 cpu_flags_x86_sse4_1 cpu_flags_x86_sse4_2 cpu_flags_x86_avx cpu_flags_x86_avx2 cpu_flags_x86_fma3 cpu_flags_x86_fma4 python_targets_python3_8 python_targets_python3_9 python_targets_python3_10" declare -x IUSE_EFFECTIVE="abi_x86_64 alpha amd64 amd64-linux arm arm64 arm64-macos cpu_flags_x86_avx cpu_flags_x86_avx2 cpu_flags_x86_fma3 cpu_flags_x86_fma4 cpu_flags_x86_sse cpu_flags_x86_sse2 cpu_flags_x86_sse3 cpu_flags_x86_sse4_1 cpu_flags_x86_sse4_2 cuda elibc_Cygwin elibc_Darwin elibc_SunOS elibc_Winnt elibc_bionic elibc_glibc elibc_mingw elibc_musl hppa ia64 kernel_Darwin kernel_SunOS kernel_Winnt kernel_linux loong m68k mips mpi ppc ppc-macos ppc64 ppc64-linux prefix prefix-guest prefix-stack python python_targets_python3_10 python_targets_python3_8 python_targets_python3_9 riscv s390 sparc sparc-solaris sparc64-solaris userland_BSD userland_GNU x64-cygwin x64-macos x64-solaris x64-winnt x86 x86-linux x86-solaris x86-winnt xla" declare -x IUSE_IMPLICIT="abi_x86_64 prefix prefix-guest prefix-stack" declare -x JAVAC="/etc/java-config-2/current-system-vm/bin/javac" declare -x JAVA_HOME="/opt/openjdk-bin-11.0.15_p10" declare -x JDK_HOME="/etc/java-config-2/current-system-vm" declare -x KERAS_HOME="/var/tmp/portage/sci-libs/tensorflow-2.10.0/temp/.keras" declare -x KERNEL="linux" declare -x KERNEL_ABI="amd64" declare -x KEYWORDS="~amd64" declare -x L10N="" declare -x LANG="fr_FR.utf8" declare -x LCD_DEVICES="" declare -x LC_COLLATE="C" declare -x LC_MESSAGES="C" declare -x LDFLAGS="-Wl,-O1 -Wl,--as-needed" declare -x LDFLAGS_amd64="-m elf_x86_64" declare -x LDFLAGS_default declare -x LDFLAGS_x32="-m elf32_x86_64" declare -x LDFLAGS_x86="-m elf_i386" declare -x LIBDIR_amd64="lib64" declare -x LIBDIR_default="lib" declare -x LIBDIR_x32="libx32" declare -x LIBDIR_x86="lib" declare -x LIBOPTIONS="-m0644" declare -x LIBREOFFICE_EXTENSIONS="" declare -x LICENSE="Apache-2.0" declare -x LLVM_TARGETS="" declare -x LUA_SINGLE_TARGET="" declare -x LUA_TARGETS="" declare -x MAIL="/var/mail/root" declare -x MAKEOPTS="-j8" declare -x MANPAGER="manpager" declare -x MONKEYD_PLUGINS="" declare -x MOTD_SHOWN="pam" declare -x MOZ_GMP_PATH="/usr/lib64/nsbrowser/plugins/gmp-gmpopenh264/system-installed" declare -x MULTILIB_ABIS="amd64 x86" declare -x MULTILIB_STRICT_DENY="64-bit.*shared object" declare -x MULTILIB_STRICT_DIRS="/lib32 /lib /usr/lib32 /usr/lib /usr/kde/*/lib32 /usr/kde/*/lib /usr/qt/*/lib32 /usr/qt/*/lib /usr/X11R6/lib32 /usr/X11R6/lib" declare -x MULTILIB_STRICT_EXEMPT="(perl5|gcc|binutils|eclipse-3|debug|portage|udev|systemd|clang|python-exec|llvm)" declare -- MY_P="tensorflow-2.10.0" declare -- MY_PV="2.10.0" declare -x NGINX_MODULES_HTTP="" declare -x NGINX_MODULES_MAIL="" declare -x NGINX_MODULES_STREAM="" declare -- NVCCFLAGS="-O2" declare -x OFFICE_IMPLEMENTATION="" declare -x OPENMPI_FABRICS="" declare -x OPENMPI_OFED_FEATURES="" declare -x OPENMPI_RM="" declare -x PATH="/usr/lib/portage/python3.10/ebuild-helpers/xattr:/usr/lib/portage/python3.10/ebuild-helpers:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/opt/bin:/usr/lib/llvm/15/bin:/usr/lib/llvm/14/bin:/opt/cuda/bin" declare PDEPEND="python? ( >=sci-libs/keras-2.10[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] >=sci-libs/tensorflow-estimator-2.10[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] ) " declare -x PHP_TARGETS="" declare -x PORTAGE_COMPRESSION_COMMAND="zstd" declare -a PORTAGE_DOCOMPRESS=([0]="/usr/share/doc" [1]="/usr/share/info" [2]="/usr/share/man") declare -x PORTAGE_DOCOMPRESS_SIZE_LIMIT="128" declare -a PORTAGE_DOCOMPRESS_SKIP=([0]="/usr/share/doc/tensorflow-2.10.0/html") declare -a PORTAGE_DOSTRIP=([0]="/") declare -a PORTAGE_DOSTRIP_SKIP=() declare -x POSTGRES_TARGETS="" declare -x PROFILE_ONLY_VARIABLES="ARCH ELIBC IUSE_IMPLICIT KERNEL USERLAND USE_EXPAND_IMPLICIT USE_EXPAND_UNPREFIXED USE_EXPAND_VALUES_ARCH USE_EXPAND_VALUES_ELIBC USE_EXPAND_VALUES_KERNEL USE_EXPAND_VALUES_USERLAND" declare -- PROPERTIES="" declare -x PYTHONDONTWRITEBYTECODE="1" declare -x PYTHON_BIN_PATH="/usr/bin/python3.10" declare -a PYTHON_COMPAT=([0]="python3_8" [1]="python3_9" [2]="python3_10") declare -- PYTHON_DEPS="python_targets_python3_8? ( >=dev-lang/python-3.8.13:3.8 ) python_targets_python3_9? ( >=dev-lang/python-3.9.12:3.9 ) python_targets_python3_10? ( >=dev-lang/python-3.10.4:3.10 ) " declare -x PYTHON_LIB_PATH="/usr/lib/python3.10/site-packages" declare -- PYTHON_REQUIRED_USE="|| ( python_targets_python3_8 python_targets_python3_9 python_targets_python3_10 )" declare -x PYTHON_SINGLE_TARGET="" declare -x PYTHON_TARGETS="python3_10" declare -- PYTHON_USEDEP="python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?" declare -x QEMU_SOFTMMU_TARGETS="" declare -x QEMU_USER_TARGETS="" declare RDEPEND=" app-arch/snappy >=dev-cpp/abseil-cpp-20211102-r2:= dev-db/lmdb dev-db/sqlite dev-libs/double-conversion dev-libs/icu:= >=dev-libs/jsoncpp-1.9.2:= >=dev-libs/nsync-1.25.0 dev-libs/openssl:0= >=dev-libs/protobuf-3.13.0:= >=dev-libs/re2-0.2019.06.01:= media-libs/giflib media-libs/libjpeg-turbo media-libs/libpng:0 >=net-libs/grpc-1.28:= net-misc/curl sys-libs/zlib >=sys-apps/hwloc-2:= cuda? ( =dev-lang/python-3.8.13:3.8 ) python_targets_python3_9? ( >=dev-lang/python-3.9.12:3.9 ) python_targets_python3_10? ( >=dev-lang/python-3.10.4:3.10 ) >=dev-libs/flatbuffers-2.0.6:= dev-python/absl-py[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] >=dev-python/astor-0.7.1[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] dev-python/astunparse[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] dev-python/clang-python[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] dev-python/dill[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] dev-python/flatbuffers[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] >=dev-python/gast-0.3.3[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] dev-python/h5py[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] >=dev-python/numpy-1.19[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] >=dev-python/google-pasta-0.1.8[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] >=dev-python/opt-einsum-3.3.0[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] >=dev-python/protobuf-python-3.13.0[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] dev-python/pybind11[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] dev-python/six[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] dev-python/tblib[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] dev-python/termcolor[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] dev-python/typing-extensions[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] >=dev-python/grpcio-1.28[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] >=dev-python/wrapt-1.11.1[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] >=net-libs/google-cloud-cpp-0.10.0 >=sci-visualization/tensorboard-2.10[python_targets_python3_8(-)?,python_targets_python3_9(-)?,python_targets_python3_10(-)?] ) " declare REPOSITORY declare REQUIRED_USE="python? ( || ( python_targets_python3_8 python_targets_python3_9 python_targets_python3_10 ) ) " declare RESTRICT="test" declare -x RESUMECOMMAND_SSH="bash -c \"x=\\\${2#ssh://} ; host=\\\${x%%/*} ; port=\\\${host##*:} ; host=\\\${host%:*} ; [[ \\\${host} = \\\${port} ]] && port= ; exec rsync --rsh=\\\"ssh \\\${port:+-p\\\${port}} \\\${3}\\\" -avP \\\"\\\${host}:/\\\${x#*/}\\\" \\\"\\\$1\\\"\" rsync \"\${DISTDIR}/\${FILE}\" \"\${URI}\" \"\${PORTAGE_SSH_OPTS}\"" declare -x ROS_MESSAGES="" declare -x RUBY_TARGETS="" declare -x RUSTFLAGS="-C target-cpu=native" declare -x S="/var/tmp/portage/sci-libs/tensorflow-2.10.0/work/tensorflow-2.10.0" declare -x SANDBOX_DEBUG="0" declare -x SANDBOX_DENY="" declare -x SANDBOX_METHOD="any" declare -x SANDBOX_PREDICT="/dev/crypto:/dev/nvidia-caps:/dev/nvidia-modeset:/dev/nvidia-uvm:/dev/nvidia-uvm-tools:/dev/nvidia0:/dev/nvidiactl:/dev/random:/proc:/proc/self/coredump_filter:/var/cache/fontconfig:/var/cache/man:/var/tmp/portage/sci-libs/tensorflow-2.10.0/homedir" declare -x SANDBOX_READ="/:/var/tmp/portage" declare -x SANDBOX_VERBOSE="1" declare -x SANDBOX_WRITE=":/dev/console:/dev/fd:/dev/full:/dev/null:/dev/ptmx:/dev/pts/:/dev/pty:/dev/shm:/dev/tts:/dev/tty:/dev/vc/:/dev/zero:/proc/self/fd:/tmp/:/usr/lib/cf:/usr/lib/conftest:/usr/lib32/cf:/usr/lib32/conftest:/usr/lib64/cf:/usr/lib64/conftest:/usr/tmp/:/usr/tmp/cf:/usr/tmp/conftest:/var/tmp/:/var/tmp/portage:/var/tmp/portage/sci-libs/tensorflow-2.10.0/homedir/.bash_history" declare -x SANE_BACKENDS="" declare -x SESSION_MANAGER="local/asrockh61mps4:@/tmp/.ICE-unix/19072,unix/asrockh61mps4:/tmp/.ICE-unix/19072" declare -x SHELL="/bin/bash" declare -x SLOT="0" declare -i SRANDOM="4187392246" declare SRC_URI="https://github.com/tensorflow/tensorflow/archive/v2.10.0.tar.gz -> tensorflow-2.10.0.tar.gz https://dev.gentoo.org/~perfinion/patches/tensorflow-patches-2.10.0.tar.bz2 https://github.com/bazelbuild/platforms/releases/download/0.0.5/platforms-0.0.5.tar.gz -> bazelbuild-platforms-0.0.5.tar.gz https://github.com/bazelbuild/apple_support/releases/download/0.12.1/apple_support.0.12.1.tar.gz https://github.com/bazelbuild/bazel-skylib/releases/download/1.2.1/bazel-skylib-1.2.1.tar.gz https://github.com/bazelbuild/bazel-toolchains/archive/ea243d43269df23de03a797cff2347e1fc3d02bb.tar.gz -> bazel-toolchains-ea243d43269df23de03a797cff2347e1fc3d02bb.tar.gz https://github.com/bazelbuild/rules_android/archive/v0.1.1.zip -> bazelbuild-rules_android-v0.1.1.zip https://github.com/bazelbuild/rules_apple/releases/download/0.33.0/rules_apple.0.33.0.tar.gz https://github.com/bazelbuild/rules_cc/archive/081771d4a0e9d7d3aa0eed2ef389fa4700dfb23e.tar.gz -> bazelbuild-rules_cc-081771d4a0e9d7d3aa0eed2ef389fa4700dfb23e.tar.gz https://github.com/bazelbuild/rules_closure/archive/308b05b2419edb5c8ee0471b67a40403df940149.tar.gz -> bazelbuild-rules_closure-308b05b2419edb5c8ee0471b67a40403df940149.tar.gz https://github.com/bazelbuild/rules_docker/releases/download/v0.10.0/rules_docker-v0.10.0.tar.gz -> bazelbuild-rules_docker-v0.10.0.tar.gz https://github.com/bazelbuild/rules_java/archive/7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip -> bazelbuild-rules_java-7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip https://github.com/bazelbuild/rules_pkg/releases/download/0.7.0/rules_pkg-0.7.0.tar.gz -> bazelbuild-rules_pkg-0.7.0.tar.gz https://github.com/bazelbuild/rules_proto/archive/11bf7c25e666dd7ddacbcd4d4c4a9de7a25175f8.tar.gz -> bazelbuild-rules_proto-11bf7c25e666dd7ddacbcd4d4c4a9de7a25175f8.tar.gz https://github.com/bazelbuild/rules_python/releases/download/0.0.1/rules_python-0.0.1.tar.gz -> bazelbuild-rules_python-0.0.1.tar.gz https://github.com/bazelbuild/rules_swift/releases/download/0.25.0/rules_swift.0.25.0.tar.gz -> bazelbuild-rules_swift.0.25.0.tar.gz https://github.com/dmlc/dlpack/archive/9351cf542ab478499294864ff3acfdab5c8c5f3d.tar.gz -> dlpack-9351cf542ab478499294864ff3acfdab5c8c5f3d.tar.gz https://github.com/google/farmhash/archive/0d859a811870d10f53a594927d0d0b97573ad06d.tar.gz -> farmhash-0d859a811870d10f53a594927d0d0b97573ad06d.tar.gz https://github.com/google/gemmlowp/archive/e844ffd17118c1e17d94e1ba4354c075a4577b88.zip -> gemmlowp-e844ffd17118c1e17d94e1ba4354c075a4577b88.zip https://github.com/google/highwayhash/archive/c13d28517a4db259d738ea4886b1f00352a3cc33.tar.gz -> highwayhash-c13d28517a4db259d738ea4886b1f00352a3cc33.tar.gz https://github.com/google/re2/archive/a276a8c738735a0fe45a6ee590fe2df69bcf4502.tar.gz -> re2-a276a8c738735a0fe45a6ee590fe2df69bcf4502.tar.gz https://github.com/google/ruy/archive/841ea4172ba904fe3536789497f9565f2ef64129.zip -> ruy-841ea4172ba904fe3536789497f9565f2ef64129.zip https://github.com/joe-kuo/sobol_data/archive/835a7d7b1ee3bc83e575e302a985c66ec4b65249.tar.gz -> sobol_data-835a7d7b1ee3bc83e575e302a985c66ec4b65249.tar.gz https://github.com/llvm/llvm-project/archive/0538e5431afdb1fa05bdcedf70ee502ccfcd112a.tar.gz -> llvm-project-0538e5431afdb1fa05bdcedf70ee502ccfcd112a.tar.gz https://github.com/llvm/llvm-project/releases/download/llvmorg-10.0.1/openmp-10.0.1.src.tar.xz -> llvmorg-10.0.1-openmp-10.0.1.src.tar.xz https://github.com/mborgerding/kissfft/archive/131.1.0.tar.gz -> kissfft-131.1.0.tar.gz https://github.com/oneapi-src/oneDNN/archive/refs/tags/v2.6.1.tar.gz -> oneDNN-v2.6.1.tar.gz https://github.com/petewarden/OouraFFT/archive/v1.0.tar.gz -> OouraFFT-v1.0.tar.gz https://github.com/pytorch/cpuinfo/archive/5916273f79a21551890fd3d56fc5375a78d1598d.zip -> pytorch-cpuinfo-5916273f79a21551890fd3d56fc5375a78d1598d.zip https://github.com/pytorch/cpuinfo/archive/5e63739504f0f8e18e941bd63b2d6d42536c7d90.tar.gz -> pytorch-cpuinfo-5e63739504f0f8e18e941bd63b2d6d42536c7d90.tar.gz https://github.com/tensorflow/runtime/archive/6ca793b5d862ef6c50f242d77a811f06cce9b60a.tar.gz -> tensorflow-runtime-6ca793b5d862ef6c50f242d77a811f06cce9b60a.tar.gz https://gitlab.com/libeigen/eigen/-/archive/0e187141679fdb91da33249d18cb79a011c0e2ea/eigen-0e187141679fdb91da33249d18cb79a011c0e2ea.tar.gz https://github.com/google/XNNPACK/archive/6b409ac0a3090ebe74d0cdfb494c4cd91485ad39.zip -> XNNPACK-6b409ac0a3090ebe74d0cdfb494c4cd91485ad39.zip https://github.com/Maratyszcza/pthreadpool/archive/b8374f80e42010941bda6c85b0e3f1a1bd77a1e0.zip -> pthreadpool-b8374f80e42010941bda6c85b0e3f1a1bd77a1e0.zip https://github.com/Maratyszcza/FP16/archive/4dfe081cf6bcd15db339cf2680b9281b8451eeb3.zip -> FP16-4dfe081cf6bcd15db339cf2680b9281b8451eeb3.zip https://github.com/Maratyszcza/FXdiv/archive/63058eff77e11aa15bf531df5dd34395ec3017c8.zip -> FXdiv-63058eff77e11aa15bf531df5dd34395ec3017c8.zip cuda? ( https://github.com/NVIDIA/cudnn-frontend/archive/v0.6.2.zip -> cudnn-frontend-v0.6.2.zip https://github.com/NVlabs/cub/archive/1.9.9.zip -> cub-1.9.9.zip https://github.com/nvidia/nccl/archive/v2.12.12-1.tar.gz -> nvidia-nccl-v2.12.12-1.tar.gz ) python? ( https://github.com/intel/ARM_NEON_2_x86_SSE/archive/1200fe90bb174a6224a525ee60148671a786a71f.tar.gz -> ARM_NEON_2_x86_SSE-1200fe90bb174a6224a525ee60148671a786a71f.tar.gz https://storage.googleapis.com/mirror.tensorflow.org/docs.python.org/2.7/_sources/license.rst.txt -> tensorflow-1.15.0-python-license.rst.txt )" declare -x SYMLINK_LIB="no" declare -x TF_CUDA_CLANG="0" declare -x TF_CUDA_COMPUTE_CAPABILITIES="6.1,3.5" declare -x TF_CUDA_PATHS="/opt/cuda" declare -x TF_CUDA_VERSION="11.7" declare -x TF_CUDNN_VERSION="8.6" declare -x TF_DOWNLOAD_CLANG="0" declare -x TF_ENABLE_XLA="0" declare -x TF_IGNORE_MAX_BAZEL_VERSION="1" declare -x TF_NEED_COMPUTECPP="0" declare -x TF_NEED_CUDA="1" declare -x TF_NEED_MPI="0" declare -x TF_NEED_OPENCL="0" declare -x TF_NEED_OPENCL_SYCL="0" declare -x TF_NEED_ROCM="0" declare -x TF_NEED_TENSORRT="0" declare -x TF_SET_ANDROID_WORKSPACE="0" declare -x TF_SYSTEM_LIBS="absl_py astor_archive astunparse_archive boringssl com_github_googlecloudplatform_google_cloud_cpp com_github_grpc_grpc com_google_absl com_google_protobuf curl cython dill_archive double_conversion flatbuffers functools32_archive gast_archive gif hwloc icu jsoncpp_git libjpeg_turbo lmdb nasm nsync opt_einsum_archive org_sqlite pasta png pybind11 six_archive snappy tblib_archive termcolor_archive typing_extensions_archive wrapt zlib" declare -x TWISTED_DISABLE_WRITING_OF_PLUGIN_CACHE="1" declare -x USE="abi_x86_64 amd64 cpu_flags_x86_avx cpu_flags_x86_sse cpu_flags_x86_sse2 cpu_flags_x86_sse3 cpu_flags_x86_sse4_1 cpu_flags_x86_sse4_2 cuda elibc_glibc kernel_linux python python_targets_python3_10 userland_GNU" declare -x USERLAND="GNU" declare -x USE_EXPAND_IMPLICIT="ARCH ELIBC KERNEL USERLAND" declare -x USE_EXPAND_UNPREFIXED="ARCH" declare -x USE_EXPAND_VALUES_ARCH="alpha amd64 amd64-linux arm arm64 arm64-macos hppa ia64 loong m68k mips ppc ppc64 ppc64-linux ppc-macos riscv s390 sparc sparc64-solaris sparc-solaris x64-cygwin x64-macos x64-solaris x64-winnt x86 x86-linux x86-solaris x86-winnt" declare -x USE_EXPAND_VALUES_ELIBC="bionic Cygwin Darwin glibc mingw musl SunOS Winnt" declare -x USE_EXPAND_VALUES_KERNEL="Darwin linux SunOS Winnt" declare -x USE_EXPAND_VALUES_USERLAND="BSD GNU" declare -x UWSGI_PLUGINS="" declare -x VIDEO_CARDS="" declare -x VOICEMAIL_STORAGE="" declare -x VTE_VERSION="7001" declare -x WINDOWPATH="1" declare -x XDG_CONFIG_DIRS="/etc/xdg" declare -x XDG_CURRENT_DESKTOP="X-Cinnamon" declare -x XDG_DATA_DIRS="/usr/local/share:/usr/share:/usr/share/gdm" declare -x XDG_SEAT="seat0" declare -x XDG_SESSION_CLASS="user" declare -x XDG_SESSION_ID="1" declare -x XDG_SESSION_TYPE="tty" declare -x XDG_VTNR="1" declare -x XTABLES_ADDONS="" declare -- _BAZEL_ECLASS="1" declare -- _CHECK_REQS_ECLASS="1" declare -- _CUDA_ECLASS="1" declare -- _DISTUTILS_FOREACH_IMPL_WARNED="1" declare -- _DISTUTILS_R1="1" declare -x _E_DESTTREE_="/usr" declare -x _E_DOCDESTTREE_="" declare -x _E_EXEDESTTREE_="" declare -x _E_INSDESTTREE_="" declare -- _FLAG_O_MATIC_ECLASS="1" declare -- _MULTIBUILD_ECLASS="1" declare -- _MULTILIB_ECLASS="1" declare -- _MULTIPROCESSING_ECLASS="1" declare -- _PREFIX_ECLASS="1" declare -a _PYTHON_ALL_IMPLS=([0]="pypy3" [1]="python3_8" [2]="python3_9" [3]="python3_10" [4]="python3_11") declare -a _PYTHON_HISTORICAL_IMPLS=([0]="jython2_7" [1]="pypy" [2]="pypy1_8" [3]="pypy1_9" [4]="pypy2_0" [5]="python2_5" [6]="python2_6" [7]="python2_7" [8]="python3_1" [9]="python3_2" [10]="python3_3" [11]="python3_4" [12]="python3_5" [13]="python3_6" [14]="python3_7") declare -- _PYTHON_R1="1" declare -a _PYTHON_SUPPORTED_IMPLS=([0]="python3_8" [1]="python3_9" [2]="python3_10") declare -a _PYTHON_UNSUPPORTED_IMPLS=([0]="pypy3" [1]="python3_11") declare -- _PYTHON_UTILS_R1="1" declare -- _TOOLCHAIN_FUNCS_ECLASS="1" declare -- bazel_external_uris=" https://github.com/bazelbuild/platforms/releases/download/0.0.5/platforms-0.0.5.tar.gz -> bazelbuild-platforms-0.0.5.tar.gz https://github.com/bazelbuild/apple_support/releases/download/0.12.1/apple_support.0.12.1.tar.gz https://github.com/bazelbuild/bazel-skylib/releases/download/1.2.1/bazel-skylib-1.2.1.tar.gz https://github.com/bazelbuild/bazel-toolchains/archive/ea243d43269df23de03a797cff2347e1fc3d02bb.tar.gz -> bazel-toolchains-ea243d43269df23de03a797cff2347e1fc3d02bb.tar.gz https://github.com/bazelbuild/rules_android/archive/v0.1.1.zip -> bazelbuild-rules_android-v0.1.1.zip https://github.com/bazelbuild/rules_apple/releases/download/0.33.0/rules_apple.0.33.0.tar.gz https://github.com/bazelbuild/rules_cc/archive/081771d4a0e9d7d3aa0eed2ef389fa4700dfb23e.tar.gz -> bazelbuild-rules_cc-081771d4a0e9d7d3aa0eed2ef389fa4700dfb23e.tar.gz https://github.com/bazelbuild/rules_closure/archive/308b05b2419edb5c8ee0471b67a40403df940149.tar.gz -> bazelbuild-rules_closure-308b05b2419edb5c8ee0471b67a40403df940149.tar.gz https://github.com/bazelbuild/rules_docker/releases/download/v0.10.0/rules_docker-v0.10.0.tar.gz -> bazelbuild-rules_docker-v0.10.0.tar.gz https://github.com/bazelbuild/rules_java/archive/7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip -> bazelbuild-rules_java-7cf3cefd652008d0a64a419c34c13bdca6c8f178.zip https://github.com/bazelbuild/rules_pkg/releases/download/0.7.0/rules_pkg-0.7.0.tar.gz -> bazelbuild-rules_pkg-0.7.0.tar.gz https://github.com/bazelbuild/rules_proto/archive/11bf7c25e666dd7ddacbcd4d4c4a9de7a25175f8.tar.gz -> bazelbuild-rules_proto-11bf7c25e666dd7ddacbcd4d4c4a9de7a25175f8.tar.gz https://github.com/bazelbuild/rules_python/releases/download/0.0.1/rules_python-0.0.1.tar.gz -> bazelbuild-rules_python-0.0.1.tar.gz https://github.com/bazelbuild/rules_swift/releases/download/0.25.0/rules_swift.0.25.0.tar.gz -> bazelbuild-rules_swift.0.25.0.tar.gz https://github.com/dmlc/dlpack/archive/9351cf542ab478499294864ff3acfdab5c8c5f3d.tar.gz -> dlpack-9351cf542ab478499294864ff3acfdab5c8c5f3d.tar.gz https://github.com/google/farmhash/archive/0d859a811870d10f53a594927d0d0b97573ad06d.tar.gz -> farmhash-0d859a811870d10f53a594927d0d0b97573ad06d.tar.gz https://github.com/google/gemmlowp/archive/e844ffd17118c1e17d94e1ba4354c075a4577b88.zip -> gemmlowp-e844ffd17118c1e17d94e1ba4354c075a4577b88.zip https://github.com/google/highwayhash/archive/c13d28517a4db259d738ea4886b1f00352a3cc33.tar.gz -> highwayhash-c13d28517a4db259d738ea4886b1f00352a3cc33.tar.gz https://github.com/google/re2/archive/a276a8c738735a0fe45a6ee590fe2df69bcf4502.tar.gz -> re2-a276a8c738735a0fe45a6ee590fe2df69bcf4502.tar.gz https://github.com/google/ruy/archive/841ea4172ba904fe3536789497f9565f2ef64129.zip -> ruy-841ea4172ba904fe3536789497f9565f2ef64129.zip https://github.com/joe-kuo/sobol_data/archive/835a7d7b1ee3bc83e575e302a985c66ec4b65249.tar.gz -> sobol_data-835a7d7b1ee3bc83e575e302a985c66ec4b65249.tar.gz https://github.com/llvm/llvm-project/archive/0538e5431afdb1fa05bdcedf70ee502ccfcd112a.tar.gz -> llvm-project-0538e5431afdb1fa05bdcedf70ee502ccfcd112a.tar.gz https://github.com/llvm/llvm-project/releases/download/llvmorg-10.0.1/openmp-10.0.1.src.tar.xz -> llvmorg-10.0.1-openmp-10.0.1.src.tar.xz https://github.com/mborgerding/kissfft/archive/131.1.0.tar.gz -> kissfft-131.1.0.tar.gz https://github.com/oneapi-src/oneDNN/archive/refs/tags/v2.6.1.tar.gz -> oneDNN-v2.6.1.tar.gz https://github.com/petewarden/OouraFFT/archive/v1.0.tar.gz -> OouraFFT-v1.0.tar.gz https://github.com/pytorch/cpuinfo/archive/5916273f79a21551890fd3d56fc5375a78d1598d.zip -> pytorch-cpuinfo-5916273f79a21551890fd3d56fc5375a78d1598d.zip https://github.com/pytorch/cpuinfo/archive/5e63739504f0f8e18e941bd63b2d6d42536c7d90.tar.gz -> pytorch-cpuinfo-5e63739504f0f8e18e941bd63b2d6d42536c7d90.tar.gz https://github.com/tensorflow/runtime/archive/6ca793b5d862ef6c50f242d77a811f06cce9b60a.tar.gz -> tensorflow-runtime-6ca793b5d862ef6c50f242d77a811f06cce9b60a.tar.gz https://gitlab.com/libeigen/eigen/-/archive/0e187141679fdb91da33249d18cb79a011c0e2ea/eigen-0e187141679fdb91da33249d18cb79a011c0e2ea.tar.gz https://github.com/google/XNNPACK/archive/6b409ac0a3090ebe74d0cdfb494c4cd91485ad39.zip -> XNNPACK-6b409ac0a3090ebe74d0cdfb494c4cd91485ad39.zip https://github.com/Maratyszcza/pthreadpool/archive/b8374f80e42010941bda6c85b0e3f1a1bd77a1e0.zip -> pthreadpool-b8374f80e42010941bda6c85b0e3f1a1bd77a1e0.zip https://github.com/Maratyszcza/FP16/archive/4dfe081cf6bcd15db339cf2680b9281b8451eeb3.zip -> FP16-4dfe081cf6bcd15db339cf2680b9281b8451eeb3.zip https://github.com/Maratyszcza/FXdiv/archive/63058eff77e11aa15bf531df5dd34395ec3017c8.zip -> FXdiv-63058eff77e11aa15bf531df5dd34395ec3017c8.zip cuda? ( https://github.com/NVIDIA/cudnn-frontend/archive/v0.6.2.zip -> cudnn-frontend-v0.6.2.zip https://github.com/NVlabs/cub/archive/1.9.9.zip -> cub-1.9.9.zip https://github.com/nvidia/nccl/archive/v2.12.12-1.tar.gz -> nvidia-nccl-v2.12.12-1.tar.gz ) python? ( https://github.com/intel/ARM_NEON_2_x86_SSE/archive/1200fe90bb174a6224a525ee60148671a786a71f.tar.gz -> ARM_NEON_2_x86_SSE-1200fe90bb174a6224a525ee60148671a786a71f.tar.gz https://storage.googleapis.com/mirror.tensorflow.org/docs.python.org/2.7/_sources/license.rst.txt -> tensorflow-1.15.0-python-license.rst.txt )" declare -- cflag="-I/usr/include/jsoncpp" declare -x enable_year2038="no" declare -- i="fma4" declare -- word=")" __eapi6_src_install () { if [[ -f Makefile || -f GNUmakefile || -f makefile ]]; then emake DESTDIR="${D}" install; fi; einstalldocs } __eapi6_src_prepare () { if ___is_indexed_array_var PATCHES; then [[ ${#PATCHES[@]} -gt 0 ]] && eapply "${PATCHES[@]}"; else if [[ -n ${PATCHES} ]]; then eapply ${PATCHES}; fi; fi; eapply_user } __eapi7_ver_compare () { local va=${1} vb=${2} a an al as ar b bn bl bs br re LC_ALL=C; re="^([0-9]+(\.[0-9]+)*)([a-z]?)((_(alpha|beta|pre|rc|p)[0-9]*)*)(-r[0-9]+)?$"; [[ ${va} =~ ${re} ]] || die "${FUNCNAME}: invalid version: ${va}"; an=${BASH_REMATCH[1]}; al=${BASH_REMATCH[3]}; as=${BASH_REMATCH[4]}; ar=${BASH_REMATCH[7]}; [[ ${vb} =~ ${re} ]] || die "${FUNCNAME}: invalid version: ${vb}"; bn=${BASH_REMATCH[1]}; bl=${BASH_REMATCH[3]}; bs=${BASH_REMATCH[4]}; br=${BASH_REMATCH[7]}; __eapi7_ver_compare_int "${an%%.*}" "${bn%%.*}" || return; while [[ ${an} == *.* && ${bn} == *.* ]]; do an=${an#*.}; bn=${bn#*.}; a=${an%%.*}; b=${bn%%.*}; if [[ ${a} == 0* || ${b} == 0* ]]; then [[ ${a} =~ 0+$ ]] && a=${a%"${BASH_REMATCH[0]}"}; [[ ${b} =~ 0+$ ]] && b=${b%"${BASH_REMATCH[0]}"}; [[ ${a} > ${b} ]] && return 3; [[ ${a} < ${b} ]] && return 1; else __eapi7_ver_compare_int "${a}" "${b}" || return; fi; done; [[ ${an} == *.* ]] && return 3; [[ ${bn} == *.* ]] && return 1; [[ ${al} > ${bl} ]] && return 3; [[ ${al} < ${bl} ]] && return 1; as=${as#_}${as:+_}; bs=${bs#_}${bs:+_}; while [[ -n ${as} && -n ${bs} ]]; do a=${as%%_*}; b=${bs%%_*}; if [[ ${a%%[0-9]*} == "${b%%[0-9]*}" ]]; then __eapi7_ver_compare_int "${a##*[a-z]}" "${b##*[a-z]}" || return; else [[ ${a%%[0-9]*} == p ]] && return 3; [[ ${b%%[0-9]*} == p ]] && return 1; [[ ${a} > ${b} ]] && return 3 || return 1; fi; as=${as#*_}; bs=${bs#*_}; done; if [[ -n ${as} ]]; then [[ ${as} == p[_0-9]* ]] && return 3 || return 1; else if [[ -n ${bs} ]]; then [[ ${bs} == p[_0-9]* ]] && return 1 || return 3; fi; fi; __eapi7_ver_compare_int "${ar#-r}" "${br#-r}" || return; return 2 } __eapi7_ver_compare_int () { local a=$1 b=$2 d=$(( ${#1}-${#2} )); if [[ ${d} -gt 0 ]]; then printf -v b "%0${d}d%s" 0 "${b}"; else if [[ ${d} -lt 0 ]]; then printf -v a "%0$(( -d ))d%s" 0 "${a}"; fi; fi; [[ ${a} > ${b} ]] && return 3; [[ ${a} == "${b}" ]] } __eapi7_ver_parse_range () { local range=${1}; local max=${2}; [[ ${range} == [0-9]* ]] || die "${FUNCNAME}: range must start with a number"; start=${range%-*}; [[ ${range} == *-* ]] && end=${range#*-} || end=${start}; if [[ -n ${end} ]]; then [[ ${start} -le ${end} ]] || die "${FUNCNAME}: end of range must be >= start"; [[ ${end} -le ${max} ]] || end=${max}; else end=${max}; fi } __eapi7_ver_split () { local v=${1} LC_ALL=C; comp=(); local s c; while [[ -n ${v} ]]; do s=${v%%[a-zA-Z0-9]*}; v=${v:${#s}}; [[ ${v} == [0-9]* ]] && c=${v%%[^0-9]*} || c=${v%%[^a-zA-Z]*}; v=${v:${#c}}; comp+=("${s}" "${c}"); done } __eapi8_src_prepare () { local f; if ___is_indexed_array_var PATCHES; then [[ ${#PATCHES[@]} -gt 0 ]] && eapply -- "${PATCHES[@]}"; else if [[ -n ${PATCHES} ]]; then eapply -- ${PATCHES}; fi; fi; eapply_user } _check-reqs_disk () { debug-print-function ${FUNCNAME} "$@"; [[ -z ${2} ]] && die "Usage: ${FUNCNAME} [path] [size]"; local path=${1}; local size=${2}; local space_kbi; _check-reqs_start_phase ${size} "disk space at \"${path}\""; space_kbi=$(df -Pk "${1}" 2>/dev/null | awk 'FNR == 2 {print $4}'); if [[ $? == 0 && -n ${space_kbi} ]]; then if [[ ${space_kbi} -lt $(_check-reqs_get_kibibytes ${size}) ]]; then eend 1; _check-reqs_unsatisfied ${size} "disk space at \"${path}\""; else eend 0; fi; else eend 1; ewarn "Couldn't determine disk space, skipping..."; fi } _check-reqs_get_kibibytes () { debug-print-function ${FUNCNAME} "$@"; [[ -z ${1} ]] && die "Usage: ${FUNCNAME} [size]"; local unit=${1:(-1)}; local size=${1%[GMT]}; case ${unit} in M) echo $((1024 * size)) ;; G) echo $((1024 * 1024 * size)) ;; T) echo $((1024 * 1024 * 1024 * size)) ;; *) die "${FUNCNAME}: Unknown unit: ${unit}" ;; esac } _check-reqs_get_number () { debug-print-function ${FUNCNAME} "$@"; [[ -z ${1} ]] && die "Usage: ${FUNCNAME} [size]"; local size=${1%[GMT]}; [[ ${size} == ${1} ]] && die "${FUNCNAME}: Missing unit: ${1}"; echo ${size} } _check-reqs_get_unit () { debug-print-function ${FUNCNAME} "$@"; [[ -z ${1} ]] && die "Usage: ${FUNCNAME} [size]"; local unit=${1:(-1)}; case ${unit} in M) echo "MiB" ;; G) echo "GiB" ;; T) echo "TiB" ;; *) die "${FUNCNAME}: Unknown unit: ${unit}" ;; esac } _check-reqs_memory () { debug-print-function ${FUNCNAME} "$@"; [[ -z ${1} ]] && die "Usage: ${FUNCNAME} [size]"; local size=${1}; local actual_memory; local actual_swap; _check-reqs_start_phase ${size} "RAM"; if [[ -r /proc/meminfo ]]; then actual_memory=$(awk '/MemTotal/ { print $2 }' /proc/meminfo); actual_swap=$(awk '/SwapTotal/ { print $2 }' /proc/meminfo); else actual_memory=$(sysctl hw.physmem 2>/dev/null); [[ $? -eq 0 ]] && actual_memory=$(echo "${actual_memory}" | sed -e 's/^[^:=]*[:=][[:space:]]*//'); actual_swap=$(sysctl vm.swap_total 2>/dev/null); [[ $? -eq 0 ]] && actual_swap=$(echo "${actual_swap}" | sed -e 's/^[^:=]*[:=][[:space:]]*//'); fi; if [[ -n ${actual_memory} ]]; then if [[ ${actual_memory} -ge $(_check-reqs_get_kibibytes ${size}) ]]; then eend 0; else if [[ -n ${actual_swap} && $((${actual_memory} + ${actual_swap})) -ge $(_check-reqs_get_kibibytes ${size}) ]]; then ewarn "Amount of main memory is insufficient, but amount"; ewarn "of main memory combined with swap is sufficient."; ewarn "Build process may make computer very slow!"; eend 0; else eend 1; _check-reqs_unsatisfied ${size} "RAM"; fi; fi; else eend 1; ewarn "Couldn't determine amount of memory, skipping..."; fi } _check-reqs_output () { debug-print-function ${FUNCNAME} "$@"; local msg="ewarn"; [[ ${EBUILD_PHASE} == "pretend" && -z ${CHECKREQS_DONOTHING} ]] && msg="eerror"; if [[ -n ${CHECKREQS_FAILED} ]]; then ${msg}; ${msg} "Space constraints set in the ebuild were not met!"; ${msg} "The build will most probably fail, you should enhance the space"; ${msg} "as per failed tests."; ${msg}; [[ ${EBUILD_PHASE} == "pretend" && -z ${CHECKREQS_DONOTHING} ]] && die "Build requirements not met!"; fi } _check-reqs_prepare () { debug-print-function ${FUNCNAME} "$@"; if [[ -z ${CHECKREQS_MEMORY} && -z ${CHECKREQS_DISK_BUILD} && -z ${CHECKREQS_DISK_USR} && -z ${CHECKREQS_DISK_VAR} ]]; then eerror "Set some check-reqs eclass variables if you want to use it."; eerror "If you are user and see this message file a bug against the package."; die "${FUNCNAME}: check-reqs eclass called but not actually used!"; fi } _check-reqs_run () { debug-print-function ${FUNCNAME} "$@"; unset CHECKREQS_FAILED; if [[ ${MERGE_TYPE} != binary ]]; then [[ -n ${CHECKREQS_MEMORY} ]] && _check-reqs_memory ${CHECKREQS_MEMORY}; [[ -n ${CHECKREQS_DISK_BUILD} ]] && _check-reqs_disk "${T}" "${CHECKREQS_DISK_BUILD}"; fi; if [[ ${MERGE_TYPE} != buildonly ]]; then [[ -n ${CHECKREQS_DISK_USR} ]] && _check-reqs_disk "${EROOT%/}/usr" "${CHECKREQS_DISK_USR}"; [[ -n ${CHECKREQS_DISK_VAR} ]] && _check-reqs_disk "${EROOT%/}/var" "${CHECKREQS_DISK_VAR}"; fi } _check-reqs_start_phase () { debug-print-function ${FUNCNAME} "$@"; [[ -z ${2} ]] && die "Usage: ${FUNCNAME} [size] [location]"; local size=${1}; local location=${2}; local sizeunit="$(_check-reqs_get_number ${size}) $(_check-reqs_get_unit ${size})"; ebegin "Checking for at least ${sizeunit} ${location}" } _check-reqs_unsatisfied () { debug-print-function ${FUNCNAME} "$@"; [[ -z ${2} ]] && die "Usage: ${FUNCNAME} [size] [location]"; local msg="ewarn"; local size=${1}; local location=${2}; local sizeunit="$(_check-reqs_get_number ${size}) $(_check-reqs_get_unit ${size})"; [[ ${EBUILD_PHASE} == "pretend" && -z ${CHECKREQS_DONOTHING} ]] && msg="eerror"; ${msg} "There is NOT at least ${sizeunit} ${location}"; CHECKREQS_FAILED="true" } _clang_fullversion () { local ver="$1"; shift; set -- $($(tc-getCPP "$@") -E -P - <<<"__clang_major__ __clang_minor__ __clang_patchlevel__"); eval echo "$ver" } _distutils-r1_backend_to_key () { debug-print-function ${FUNCNAME} "${@}"; local backend=${1}; case ${backend} in flit_core.buildapi | flit.buildapi) echo flit ;; flit_scm:buildapi) echo flit_scm ;; hatchling.build) echo hatchling ;; jupyter_packaging.build_api) echo jupyter ;; maturin) echo maturin ;; mesonpy) echo meson-python ;; pbr.build) echo pbr ;; pdm.pep517.api) echo pdm ;; poetry.core.masonry.api | poetry.masonry.api) echo poetry ;; setuptools.build_meta | setuptools.build_meta:__legacy__) echo setuptools ;; sipbuild.api) echo sip ;; *) die "Unknown backend: ${backend}" ;; esac } _distutils-r1_check_all_phase_mismatch () { if has "python_${EBUILD_PHASE}" "${FUNCNAME[@]}"; then eqawarn "QA Notice: distutils-r1_python_${EBUILD_PHASE}_all called"; eqawarn "from python_${EBUILD_PHASE}. Did you mean to use"; eqawarn "python_${EBUILD_PHASE}_all()?"; [[ ${EAPI} != [67] ]] && die "distutils-r1_python_${EBUILD_PHASE}_all called from python_${EBUILD_PHASE}."; fi } _distutils-r1_check_namespace_pth () { local f pth=(); while IFS= read -r -d '' f; do pth+=("${f}"); done < <(find "${ED%/}" -name '*-nspkg.pth' -print0); if [[ -n ${pth[@]} ]]; then eerror "The following *-nspkg.pth files were found installed:"; eerror; for f in "${pth[@]}"; do eerror " ${f#${ED%/}}"; done; eerror; eerror "The presence of those files may break namespaces in Python 3.5+. Please"; eerror "read our documentation on reliable handling of namespaces and update"; eerror "the ebuild accordingly:"; eerror; eerror " https://projects.gentoo.org/python/guide/concept.html#namespace-packages"; die "Installing *-nspkg.pth files is banned"; fi } _distutils-r1_clean_egg_info () { if [[ -n ${DISTUTILS_USE_PEP517} ]]; then die "${FUNCNAME} is not implemented in PEP517 mode"; fi; rm -rf "${BUILD_DIR}"/lib/*.egg-info || die } _distutils-r1_copy_egg_info () { if [[ -n ${DISTUTILS_USE_PEP517} ]]; then die "${FUNCNAME} is not implemented in PEP517 mode"; fi; mkdir -p "${BUILD_DIR}" || die; find -name '*.egg-info' -type d -exec cp -R -p {} "${BUILD_DIR}"/ ';' || die } _distutils-r1_create_setup_cfg () { if [[ -n ${DISTUTILS_USE_PEP517} ]]; then die "${FUNCNAME} is not implemented in PEP517 mode"; fi; cat > "${HOME}"/.pydistutils.cfg <<-_EOF_ || [build] build_base = ${BUILD_DIR} # using a single directory for them helps us export # ${PYTHONPATH} and ebuilds find the sources independently # of whether the package installs extensions or not # # note: due to some packages (wxpython) relying on separate # platlib & purelib dirs, we do not set --build-lib (which # can not be overridden with --build-*lib) build_platlib = %(build_base)s/lib build_purelib = %(build_base)s/lib # make the ebuild writer lives easier build_scripts = %(build_base)s/scripts # this is needed by distutils_install_for_testing since # setuptools like to create .egg files for install --home. [bdist_egg] dist_dir = ${BUILD_DIR}/dist # avoid packing up eggs in a zip as it often breaks test suites [options] zip_safe = False _EOF_ die if [[ ${EBUILD_PHASE} == install ]]; then cat >> "${HOME}"/.pydistutils.cfg <<-_EOF_ || # installation paths -- allow calling extra install targets # without the default 'install' [install] compile = True optimize = 2 root = ${D%/} _EOF_ die if [[ ! -n ${DISTUTILS_SINGLE_IMPL} ]]; then cat >> "${HOME}"/.pydistutils.cfg <<-_EOF_ || install_scripts = $(python_get_scriptdir) _EOF_ die; fi; fi } _distutils-r1_disable_ez_setup () { if [[ -n ${DISTUTILS_USE_PEP517} ]]; then die "${FUNCNAME} is not implemented in PEP517 mode"; fi; local stub="def use_setuptools(*args, **kwargs): pass"; if [[ -f ez_setup.py ]]; then echo "${stub}" > ez_setup.py || die; fi; if [[ -f distribute_setup.py ]]; then echo "${stub}" > distribute_setup.py || die; fi } _distutils-r1_get_backend () { debug-print-function ${FUNCNAME} "${@}"; local build_backend legacy_fallback; if [[ -f pyproject.toml ]]; then build_backend=$(gpep517 get-backend); fi; if [[ -z ${build_backend} && ${DISTUTILS_USE_PEP517} == setuptools && -f setup.py ]]; then build_backend=setuptools.build_meta:__legacy__; legacy_fallback=1; fi; if [[ -z ${build_backend} ]]; then die "Unable to obtain build-backend from pyproject.toml"; fi; if [[ ${DISTUTILS_USE_PEP517} != standalone ]]; then local expected_value=$(_distutils-r1_backend_to_key "${build_backend}"); if [[ ${DISTUTILS_USE_PEP517} != ${expected_value} ]]; then eerror "DISTUTILS_USE_PEP517 does not match pyproject.toml!"; eerror " have: DISTUTILS_USE_PEP517=${DISTUTILS_USE_PEP517}"; eerror "expected: DISTUTILS_USE_PEP517=${expected_value}"; eerror "(backend: ${build_backend})"; die "DISTUTILS_USE_PEP517 value incorrect"; fi; local new_backend=; case ${build_backend} in flit.buildapi) new_backend=flit_core.buildapi ;; poetry.masonry.api) new_backend=poetry.core.masonry.api ;; setuptools.build_meta:__legacy__) [[ ! -n ${legacy_fallback} ]] && new_backend=setuptools.build_meta ;; esac; if [[ -n ${new_backend} ]]; then if [[ ! -f ${T}/.distutils_deprecated_backend_warned ]]; then eqawarn "${build_backend} backend is deprecated. Please see:"; eqawarn "https://projects.gentoo.org/python/guide/distutils.html#deprecated-pep-517-backends"; eqawarn "The eclass will be using ${new_backend} instead."; > "${T}"/.distutils_deprecated_backend_warned || die; fi; build_backend=${new_backend}; fi; fi; echo "${build_backend}" } _distutils-r1_handle_pyproject_toml () { if [[ -n ${DISTUTILS_USE_PEP517} ]]; then die "${FUNCNAME} is not implemented in PEP517 mode"; fi; [[ ${DISTUTILS_USE_SETUPTOOLS} == manual ]] && return; if [[ ! -f setup.py && -f pyproject.toml ]]; then eerror "No setup.py found but pyproject.toml is present. Please migrate"; eerror "the package to use DISTUTILS_USE_PEP517. See:"; eerror " https://projects.gentoo.org/python/guide/distutils.html"; die "No setup.py found and PEP517 mode not enabled"; fi } _distutils-r1_post_python_compile () { debug-print-function ${FUNCNAME} "${@}"; local root=${BUILD_DIR}/install; if [[ -n ${DISTUTILS_USE_PEP517} && -d ${root} ]]; then local bindir=${root}${EPREFIX}/usr/bin; local rscriptdir=${root}$(python_get_scriptdir); [[ -d ${rscriptdir} ]] && die "${rscriptdir} should not exist!"; if [[ -d ${bindir} ]]; then mkdir -p "${rscriptdir}" || die; cp -a --reflink=auto "${bindir}"/. "${rscriptdir}"/ || die; fi; mkdir -p "${bindir}" || die; ln -s "${PYTHON}" "${bindir}/${EPYTHON}" || die; ln -s "${EPYTHON}" "${bindir}/python3" || die; ln -s "${EPYTHON}" "${bindir}/python" || die; cat > "${bindir}"/pyvenv.cfg <<-EOF || include-system-site-packages = true EOF die find "${bindir}" -type f -exec sed -i -e "1s@^#!\(${EPREFIX}/usr/bin/\(python\|pypy\)\)@#!${root}\1@" {} + || die; fi } _distutils-r1_post_python_install () { debug-print-function ${FUNCNAME} "${@}"; local sitedir=${D%/}$(python_get_sitedir); if [[ -d ${sitedir} ]]; then local forbidden_package_names=(examples test tests .pytest_cache .hypothesis _trial_temp); local p; for p in "${forbidden_package_names[@]}"; do if [[ -d ${sitedir}/${p} ]]; then die "Package installs '${p}' package which is forbidden and likely a bug in the build system."; fi; done; if [[ -n ${DISTUTILS_USE_PEP517} ]]; then if ! has_version -b ">=dev-python/gpep517-9"; then python_optimize "${sitedir}"; fi; fi; fi } _distutils-r1_post_python_test () { debug-print-function ${FUNCNAME} "${@}"; if [[ ! -n ${DISTUTILS_USE_PEP517} ]]; then _distutils-r1_clean_egg_info; fi } _distutils-r1_print_package_versions () { local packages=(); if [[ -n ${DISTUTILS_USE_PEP517} ]]; then packages+=(dev-python/gpep517 dev-python/installer); case ${DISTUTILS_USE_PEP517} in flit) packages+=(dev-python/flit_core) ;; flit_scm) packages+=(dev-python/flit_core dev-python/flit_scm dev-python/setuptools_scm) ;; hatchling) packages+=(dev-python/hatchling) ;; jupyter) packages+=(dev-python/jupyter_packaging dev-python/setuptools dev-python/setuptools_scm dev-python/wheel) ;; maturin) packages+=(dev-util/maturin) ;; no) return ;; meson-python) packages+=(dev-python/meson-python) ;; pbr) packages+=(dev-python/pbr dev-python/setuptools dev-python/wheel) ;; pdm) packages+=(dev-python/pdm-pep517 dev-python/setuptools) ;; poetry) packages+=(dev-python/poetry-core) ;; setuptools) packages+=(dev-python/setuptools dev-python/setuptools_scm dev-python/wheel) ;; sip) packages+=(dev-python/sip) ;; esac; else case ${DISTUTILS_USE_SETUPTOOLS} in manual | no) return ;; *) packages+=(dev-python/setuptools) ;; esac; fi; local pkg; einfo "Build system packages:"; for pkg in "${packages[@]}"; do local installed=$(best_version "${pkg}"); einfo " $(printf '%-30s' "${pkg}"): ${installed#${pkg}-}"; done } _distutils-r1_run_common_phase () { local DISTUTILS_ORIG_BUILD_DIR=${BUILD_DIR}; local _DISTUTILS_IN_COMMON_IMPL=1; if [[ -n ${DISTUTILS_SINGLE_IMPL} ]]; then _distutils-r1_run_foreach_impl "${@}"; else local -x EPYTHON PYTHON; local -x PATH=${PATH} PKG_CONFIG_PATH=${PKG_CONFIG_PATH}; python_setup "${DISTUTILS_ALL_SUBPHASE_IMPLS[@]}"; local MULTIBUILD_VARIANTS=("${EPYTHON/./_}"); local _DISTUTILS_INITIAL_CWD=${PWD}; multibuild_foreach_variant distutils-r1_run_phase "${@}"; fi } _distutils-r1_run_foreach_impl () { debug-print-function ${FUNCNAME} "${@}"; local _DISTUTILS_INITIAL_CWD=${PWD}; set -- distutils-r1_run_phase "${@}"; if [[ ! -n ${DISTUTILS_SINGLE_IMPL} ]]; then local _DISTUTILS_CALLING_FOREACH_IMPL=1; python_foreach_impl "${@}"; else if [[ ! -n ${EPYTHON} ]]; then die "EPYTHON unset, python-single-r1_pkg_setup not called?!"; fi; local BUILD_DIR=${BUILD_DIR:-${S}}; BUILD_DIR=${BUILD_DIR%%/}_${EPYTHON}; "${@}"; fi } _distutils-r1_wrap_scripts () { debug-print-function ${FUNCNAME} "${@}"; [[ ${#} -eq 1 ]] || die "usage: ${FUNCNAME} "; local bindir=${1}; local scriptdir=$(python_get_scriptdir); local f python_files=() non_python_files=(); if [[ -d ${D%/}${scriptdir} ]]; then for f in "${D%/}${scriptdir}"/*; do [[ -d ${f} ]] && die "Unexpected directory: ${f}"; debug-print "${FUNCNAME}: found executable at ${f#${D%/}/}"; local shebang; read -r shebang < "${f}"; if [[ ${shebang} == '#!'*${EPYTHON}* ]]; then debug-print "${FUNCNAME}: matching shebang: ${shebang}"; python_files+=("${f}"); else debug-print "${FUNCNAME}: non-matching shebang: ${shebang}"; non_python_files+=("${f}"); fi; mkdir -p "${D%/}${bindir}" || die; done; for f in "${python_files[@]}"; do local basename=${f##*/}; debug-print "${FUNCNAME}: installing wrapper at ${bindir}/${basename}"; local dosym=dosym; [[ ${EAPI} == [67] ]] && dosym=dosym8; "${dosym}" -r /usr/lib/python-exec/python-exec2 "${bindir#${EPREFIX}}/${basename}"; done; for f in "${non_python_files[@]}"; do local basename=${f##*/}; debug-print "${FUNCNAME}: moving ${f#${D%/}/} to ${bindir}/${basename}"; mv "${f}" "${D%/}${bindir}/${basename}" || die; done; fi } _eapply_patch () { local f=${1}; local prefix=${2}; ebegin "${prefix:-Applying }${f##*/}"; local all_opts=(-p1 -f -g0 --no-backup-if-mismatch "${patch_options[@]}"); if ${patch_cmd} "${all_opts[@]}" --dry-run -s -F0 < "${f}" &> /dev/null; then all_opts+=(-s -F0); fi; ${patch_cmd} "${all_opts[@]}" < "${f}"; failed=${?}; if ! eend "${failed}"; then __helpers_die "patch -p1 ${patch_options[*]} failed with ${f}"; fi } _filter-hardened () { local f; for f in "$@"; do case "${f}" in -fPIC | -fpic | -fPIE | -fpie | -Wl,pie | -pie) gcc-specs-pie || continue; if ! is-flagq -nopie && ! is-flagq -no-pie; then if test-flags -nopie > /dev/null; then append-flags -nopie; else append-flags -no-pie; fi; fi ;; -fstack-protector) gcc-specs-ssp || continue; is-flagq -fno-stack-protector || append-flags $(test-flags -fno-stack-protector) ;; -fstack-protector-all) gcc-specs-ssp-to-all || continue; is-flagq -fno-stack-protector-all || append-flags $(test-flags -fno-stack-protector-all) ;; -fno-strict-overflow) gcc-specs-nostrict || continue; is-flagq -fstrict-overflow || append-flags $(test-flags -fstrict-overflow) ;; esac; done } _filter-var () { local f x var=$1 new=(); shift; for f in ${!var}; do for x in "$@"; do [[ ${f} == ${x} ]] && continue 2; done; new+=("${f}"); done; export ${var}="${new[*]}" } _gcc-install-dir () { echo "$(LC_ALL=C $(tc-getCC) -print-search-dirs 2> /dev/null | awk '$1=="install:" {print $2}')" } _gcc-specs-directive_raw () { local cc=$(tc-getCC); local specfiles=$(LC_ALL=C ${cc} -v 2>&1 | awk '$1=="Reading" {print $NF}'); ${cc} -dumpspecs 2> /dev/null | cat - ${specfiles} | awk -v directive=$1 'BEGIN { pspec=""; spec=""; outside=1 } $1=="*"directive":" { pspec=spec; spec=""; outside=0; next } outside || NF==0 || ( substr($1,1,1)=="*" && substr($1,length($1),1)==":" ) { outside=1; next } spec=="" && substr($0,1,1)=="+" { spec=pspec " " substr($0,2); next } { spec=spec $0 } END { print spec }'; return 0 } _gcc-specs-exists () { [[ -f $(_gcc-install-dir)/$1 ]] } _gcc_fullversion () { local ver="$1"; shift; set -- $($(tc-getCPP "$@") -E -P - <<<"__GNUC__ __GNUC_MINOR__ __GNUC_PATCHLEVEL__"); eval echo "$ver" } _is_flagq () { local x var="$1[*]"; for x in ${!var}; do [[ ${x} == $2 ]] && return 0; done; return 1 } _multibuild_create_source_copy () { einfo "${MULTIBUILD_VARIANT}: copying to ${BUILD_DIR}"; cp -p -R --reflink=auto "${_MULTIBUILD_INITIAL_BUILD_DIR}" "${BUILD_DIR}" || die } _multibuild_run () { local i=1; while [[ ${!i} == _* ]]; do (( i += 1 )); done; [[ ${i} -le ${#} ]] && einfo "${v}: running ${@:${i}}"; "${@}" } _python_check_EPYTHON () { if [[ -z ${EPYTHON} ]]; then die "EPYTHON unset, invalid call context"; fi } _python_check_locale_sanity () { local -x LC_ALL=${1}; local IFS=; local lc=({a..z}); local uc=({A..Z}); local input="${lc[*]}${uc[*]}"; local output=$(tr '[:lower:][:upper:]' '[:upper:][:lower:]' <<<"${input}"); [[ ${output} == "${uc[*]}${lc[*]}" ]] } _python_export () { debug-print-function ${FUNCNAME} "${@}"; local impl var; case "${1}" in python* | jython*) impl=${1/_/.}; shift ;; pypy | pypy3) impl=${1}; shift ;; *) impl=${EPYTHON}; if [[ -z ${impl} ]]; then die "_python_export called without a python implementation and EPYTHON is unset"; fi ;; esac; debug-print "${FUNCNAME}: implementation: ${impl}"; for var in "$@"; do case "${var}" in EPYTHON) export EPYTHON=${impl}; debug-print "${FUNCNAME}: EPYTHON = ${EPYTHON}" ;; PYTHON) export PYTHON=${EPREFIX}/usr/bin/${impl}; debug-print "${FUNCNAME}: PYTHON = ${PYTHON}" ;; PYTHON_SITEDIR) [[ -n ${PYTHON} ]] || die "PYTHON needs to be set for ${var} to be exported, or requested before it"; PYTHON_SITEDIR=$( "${PYTHON}" - <<-EOF || die import sysconfig print(sysconfig.get_path("purelib")) EOF ); export PYTHON_SITEDIR; debug-print "${FUNCNAME}: PYTHON_SITEDIR = ${PYTHON_SITEDIR}" ;; PYTHON_INCLUDEDIR) [[ -n ${PYTHON} ]] || die "PYTHON needs to be set for ${var} to be exported, or requested before it"; PYTHON_INCLUDEDIR=$( "${PYTHON}" - <<-EOF || die import sysconfig print(sysconfig.get_path("platinclude")) EOF ); export PYTHON_INCLUDEDIR; debug-print "${FUNCNAME}: PYTHON_INCLUDEDIR = ${PYTHON_INCLUDEDIR}"; if [[ ! -d ${PYTHON_INCLUDEDIR} ]]; then die "${impl} does not install any header files!"; fi ;; PYTHON_LIBPATH) [[ -n ${PYTHON} ]] || die "PYTHON needs to be set for ${var} to be exported, or requested before it"; PYTHON_LIBPATH=$( "${PYTHON}" - <<-EOF || die import os.path, sysconfig print( os.path.join( sysconfig.get_config_var("LIBDIR"), sysconfig.get_config_var("LDLIBRARY")) if sysconfig.get_config_var("LDLIBRARY") else "") EOF ); export PYTHON_LIBPATH; debug-print "${FUNCNAME}: PYTHON_LIBPATH = ${PYTHON_LIBPATH}"; if [[ ! -n ${PYTHON_LIBPATH} ]]; then die "${impl} lacks a (usable) dynamic library"; fi ;; PYTHON_CFLAGS) local val; case "${impl}" in python*) val=$($(tc-getPKG_CONFIG) --cflags ${impl/n/n-}) || die ;; *) die "${impl}: obtaining ${var} not supported" ;; esac; export PYTHON_CFLAGS=${val}; debug-print "${FUNCNAME}: PYTHON_CFLAGS = ${PYTHON_CFLAGS}" ;; PYTHON_LIBS) local val; case "${impl}" in python2* | python3.6 | python3.7*) val=$($(tc-getPKG_CONFIG) --libs ${impl/n/n-}) || die ;; python*) val=$($(tc-getPKG_CONFIG) --libs ${impl/n/n-}-embed) || die ;; *) die "${impl}: obtaining ${var} not supported" ;; esac; export PYTHON_LIBS=${val}; debug-print "${FUNCNAME}: PYTHON_LIBS = ${PYTHON_LIBS}" ;; PYTHON_CONFIG) local flags val; case "${impl}" in python*) [[ -n ${PYTHON} ]] || die "PYTHON needs to be set for ${var} to be exported, or requested before it"; flags=$( "${PYTHON}" - <<-EOF || die import sysconfig print(sysconfig.get_config_var("ABIFLAGS") or "") EOF ); val=${PYTHON}${flags}-config ;; *) die "${impl}: obtaining ${var} not supported" ;; esac; export PYTHON_CONFIG=${val}; debug-print "${FUNCNAME}: PYTHON_CONFIG = ${PYTHON_CONFIG}" ;; PYTHON_PKG_DEP) local d; case ${impl} in python2.7) PYTHON_PKG_DEP='>=dev-lang/python-2.7.10_p15:2.7' ;; python3.8) PYTHON_PKG_DEP=">=dev-lang/python-3.8.13:3.8" ;; python3.9) PYTHON_PKG_DEP=">=dev-lang/python-3.9.12:3.9" ;; python3.10) PYTHON_PKG_DEP=">=dev-lang/python-3.10.4:3.10" ;; python3.11) PYTHON_PKG_DEP=">=dev-lang/python-3.11.0_beta4:3.11" ;; python*) PYTHON_PKG_DEP="dev-lang/python:${impl#python}" ;; pypy) PYTHON_PKG_DEP='>=dev-python/pypy-7.3.9:0=' ;; pypy3) PYTHON_PKG_DEP='>=dev-python/pypy3-7.3.9_p1:0=' ;; *) die "Invalid implementation: ${impl}" ;; esac; if [[ -n ${PYTHON_REQ_USE} ]]; then PYTHON_PKG_DEP+=[${PYTHON_REQ_USE}]; fi; export PYTHON_PKG_DEP; debug-print "${FUNCNAME}: PYTHON_PKG_DEP = ${PYTHON_PKG_DEP}" ;; PYTHON_SCRIPTDIR) local dir; export PYTHON_SCRIPTDIR=${EPREFIX}/usr/lib/python-exec/${impl}; debug-print "${FUNCNAME}: PYTHON_SCRIPTDIR = ${PYTHON_SCRIPTDIR}" ;; *) die "_python_export: unknown variable ${var}" ;; esac; done } _python_gen_usedep () { debug-print-function ${FUNCNAME} "${@}"; local impl matches=(); _python_verify_patterns "${@}"; for impl in "${_PYTHON_SUPPORTED_IMPLS[@]}"; do if _python_impl_matches "${impl}" "${@}"; then matches+=("python_targets_${impl}(-)?"); fi; done; [[ -n ${matches[@]} ]] || die "No supported implementations match python_gen_usedep patterns: ${@}"; local out=${matches[@]}; echo "${out// /,}" } _python_impl_matches () { [[ ${#} -ge 1 ]] || die "${FUNCNAME}: takes at least 1 parameter"; [[ ${#} -eq 1 ]] && return 0; local impl=${1/./_} pattern; shift; for pattern in "$@"; do case ${pattern} in -2 | python2* | pypy) if [[ ${EAPI} != [67] ]]; then eerror; eerror "Python 2 is no longer supported in Gentoo, please remove Python 2"; eerror "${FUNCNAME[1]} calls."; die "Passing ${pattern} to ${FUNCNAME[1]} is banned in EAPI ${EAPI}"; fi ;; -3) if [[ ${EAPI} != [67] ]]; then eerror; eerror "Python 2 is no longer supported in Gentoo, please remove Python 2"; eerror "${FUNCNAME[1]} calls."; die "Passing ${pattern} to ${FUNCNAME[1]} is banned in EAPI ${EAPI}"; fi; return 0 ;; 3.9) [[ ${impl} == python${pattern/./_} || ${impl} == pypy3 ]] && return 0 ;; 3.8 | 3.1[01]) [[ ${impl} == python${pattern/./_} ]] && return 0 ;; *) [[ ${impl} == ${pattern/./_} ]] && return 0 ;; esac; done; return 1 } _python_multibuild_wrapper () { debug-print-function ${FUNCNAME} "${@}"; local -x EPYTHON PYTHON; local -x PATH=${PATH} PKG_CONFIG_PATH=${PKG_CONFIG_PATH}; _python_export "${MULTIBUILD_VARIANT}" EPYTHON PYTHON; _python_wrapper_setup; "${@}" } _python_obtain_impls () { _python_validate_useflags; if [[ -n ${PYTHON_COMPAT_OVERRIDE} ]]; then MULTIBUILD_VARIANTS=(${PYTHON_COMPAT_OVERRIDE}); return; fi; MULTIBUILD_VARIANTS=(); local impl; for impl in "${_PYTHON_SUPPORTED_IMPLS[@]}"; do has "${impl}" "${PYTHON_COMPAT[@]}" && use "python_targets_${impl}" && MULTIBUILD_VARIANTS+=("${impl}"); done } _python_run_check_deps () { debug-print-function ${FUNCNAME} "${@}"; local impl=${1}; local hasv_args=(-b); [[ ${EAPI} == 6 ]] && hasv_args=(--host-root); einfo "Checking whether ${impl} is suitable ..."; local PYTHON_PKG_DEP; _python_export "${impl}" PYTHON_PKG_DEP; ebegin " ${PYTHON_PKG_DEP}"; has_version "${hasv_args[@]}" "${PYTHON_PKG_DEP}"; eend ${?} || return 1; declare -f python_check_deps > /dev/null || return 0; local PYTHON_USEDEP="python_targets_${impl}(-)"; local PYTHON_SINGLE_USEDEP="python_single_target_${impl}(-)"; ebegin " python_check_deps"; python_check_deps; eend ${?} } _python_set_impls () { local i; if ! declare -p PYTHON_COMPAT &> /dev/null; then die 'PYTHON_COMPAT not declared.'; fi; if [[ $(declare -p PYTHON_COMPAT) != "declare -a"* ]]; then die 'PYTHON_COMPAT must be an array.'; fi; local obsolete=(); if [[ ! -n ${PYTHON_COMPAT_NO_STRICT} ]]; then for i in "${PYTHON_COMPAT[@]}"; do case ${i} in pypy3 | python2_7 | python3_[89] | python3_1[01]) ;; jython2_7 | pypy | pypy1_[89] | pypy2_0 | python2_[5-6] | python3_[1-7]) obsolete+=("${i}") ;; *) if has "${i}" "${_PYTHON_ALL_IMPLS[@]}" "${_PYTHON_HISTORICAL_IMPLS[@]}"; then die "Mis-synced patterns in _python_set_impls: missing ${i}"; else die "Invalid implementation in PYTHON_COMPAT: ${i}"; fi ;; esac; done; fi; if [[ -n ${obsolete[@]} && ${EBUILD_PHASE} == setup ]]; then if [[ $(head -n 1 "${EBUILD}" 2>/dev/null) == *2022* ]]; then eqawarn "Please clean PYTHON_COMPAT of obsolete implementations:"; eqawarn " ${obsolete[*]}"; fi; fi; local supp=() unsupp=(); for i in "${_PYTHON_ALL_IMPLS[@]}"; do if has "${i}" "${PYTHON_COMPAT[@]}"; then supp+=("${i}"); else unsupp+=("${i}"); fi; done; if [[ ! -n ${supp[@]} ]]; then if [[ -n ${_PYTHON_ALLOW_PY27} ]] && has python2_7 "${PYTHON_COMPAT[@]}"; then supp+=(python2_7); else die "No supported implementation in PYTHON_COMPAT."; fi; fi; if [[ -n ${_PYTHON_SUPPORTED_IMPLS[@]} ]]; then if [[ ${_PYTHON_SUPPORTED_IMPLS[@]} != ${supp[@]} ]]; then eerror "Supported impls (PYTHON_COMPAT) changed between inherits!"; eerror "Before: ${_PYTHON_SUPPORTED_IMPLS[*]}"; eerror "Now : ${supp[*]}"; die "_PYTHON_SUPPORTED_IMPLS integrity check failed"; fi; if [[ ${_PYTHON_UNSUPPORTED_IMPLS[@]} != ${unsupp[@]} ]]; then eerror "Unsupported impls changed between inherits!"; eerror "Before: ${_PYTHON_UNSUPPORTED_IMPLS[*]}"; eerror "Now : ${unsupp[*]}"; die "_PYTHON_UNSUPPORTED_IMPLS integrity check failed"; fi; else _PYTHON_SUPPORTED_IMPLS=("${supp[@]}"); _PYTHON_UNSUPPORTED_IMPLS=("${unsupp[@]}"); readonly _PYTHON_SUPPORTED_IMPLS _PYTHON_UNSUPPORTED_IMPLS; fi } _python_validate_useflags () { debug-print-function ${FUNCNAME} "${@}"; if [[ -n ${PYTHON_COMPAT_OVERRIDE} ]]; then if [[ ! -n ${_PYTHON_COMPAT_OVERRIDE_WARNED} ]]; then ewarn "WARNING: PYTHON_COMPAT_OVERRIDE in effect. The following Python"; ewarn "implementations will be enabled:"; ewarn; ewarn " ${PYTHON_COMPAT_OVERRIDE}"; ewarn; ewarn "Dependencies won't be satisfied, and PYTHON_TARGETS will be ignored."; _PYTHON_COMPAT_OVERRIDE_WARNED=1; fi; return; fi; local i; for i in "${_PYTHON_SUPPORTED_IMPLS[@]}"; do use "python_targets_${i}" && return 0; done; eerror "No Python implementation selected for the build. Please add one"; eerror "of the following values to your PYTHON_TARGETS (in make.conf):"; eerror; eerror "${PYTHON_COMPAT[@]}"; echo; die "No supported Python implementation in PYTHON_TARGETS." } _python_verify_patterns () { debug-print-function ${FUNCNAME} "${@}"; local impl pattern; for pattern in "$@"; do case ${pattern} in -[23] | 3.[89] | 3.1[01]) continue ;; esac; for impl in "${_PYTHON_ALL_IMPLS[@]}" "${_PYTHON_HISTORICAL_IMPLS[@]}"; do [[ ${impl} == ${pattern/./_} ]] && continue 2; done; die "Invalid implementation pattern: ${pattern}"; done } _python_wrapper_setup () { debug-print-function ${FUNCNAME} "${@}"; local workdir=${1:-${T}/${EPYTHON}}; local impl=${2:-${EPYTHON}}; [[ -n ${workdir} ]] || die "${FUNCNAME}: no workdir specified."; [[ -n ${impl} ]] || die "${FUNCNAME}: no impl nor EPYTHON specified."; if [[ ! -x ${workdir}/bin/python ]]; then mkdir -p "${workdir}"/{bin,pkgconfig} || die; rm -f "${workdir}"/bin/python{,2,3}{,-config} || die; rm -f "${workdir}"/bin/2to3 || die; rm -f "${workdir}"/pkgconfig/python{2,3}{,-embed}.pc || die; local EPYTHON PYTHON; _python_export "${impl}" EPYTHON PYTHON; local pyver pyother; if [[ ${EPYTHON} != python2* ]]; then pyver=3; pyother=2; else pyver=2; pyother=3; fi; cat > "${workdir}/bin/python" <<-_EOF_ || #!/bin/sh exec "${PYTHON}" "\${@}" _EOF_ die cp "${workdir}/bin/python" "${workdir}/bin/python${pyver}" || die; chmod +x "${workdir}/bin/python" "${workdir}/bin/python${pyver}" || die; local nonsupp=("python${pyother}" "python${pyother}-config"); if [[ ${EPYTHON} == python* ]]; then cat > "${workdir}/bin/python-config" <<-_EOF_ || #!/bin/sh exec "${PYTHON}-config" "\${@}" _EOF_ die cp "${workdir}/bin/python-config" "${workdir}/bin/python${pyver}-config" || die; chmod +x "${workdir}/bin/python-config" "${workdir}/bin/python${pyver}-config" || die; ln -s "${PYTHON/python/2to3-}" "${workdir}"/bin/2to3 || die; ln -s "${EPREFIX}"/usr/$(get_libdir)/pkgconfig/${EPYTHON/n/n-}.pc "${workdir}"/pkgconfig/python${pyver}.pc || die; if [[ ${EPYTHON} != python[23].[67] ]]; then ln -s "${EPREFIX}"/usr/$(get_libdir)/pkgconfig/${EPYTHON/n/n-}-embed.pc "${workdir}"/pkgconfig/python${pyver}-embed.pc || die; fi; else nonsupp+=(2to3 python-config "python${pyver}-config"); fi; local x; for x in "${nonsupp[@]}"; do cat > "${workdir}"/bin/${x} <<-_EOF_ || #!/bin/sh echo "${ECLASS}: ${FUNCNAME}: ${x} is not supported by ${EPYTHON} (PYTHON_COMPAT)" >&2 exit 127 _EOF_ die chmod +x "${workdir}"/bin/${x} || die; done; fi; if [[ ${PATH##:*} != ${workdir}/bin ]]; then PATH=${workdir}/bin${PATH:+:${PATH}}; fi; if [[ ${PKG_CONFIG_PATH##:*} != ${workdir}/pkgconfig ]]; then PKG_CONFIG_PATH=${workdir}/pkgconfig${PKG_CONFIG_PATH:+:${PKG_CONFIG_PATH}}; fi; export PATH PKG_CONFIG_PATH } _setup-allowed-flags () { ALLOWED_FLAGS=(-pipe -O '-O[12sg]' '-mcpu=*' '-march=*' '-mtune=*' '-fstack-protector*' '-fstack-check*' -fno-stack-check -fstack-clash-protection '-fcf-protection=*' -fbounds-check -fbounds-checking -fno-PIE -fno-pie -nopie -no-pie '-mindirect-branch=*' -mindirect-branch-register '-mfunction-return=*' -mretpoline -fno-unit-at-a-time -fno-strict-overflow '-fsanitize*' '-fno-sanitize*' -g '-g[0-9]' -ggdb '-ggdb[0-9]' -gdwarf '-gdwarf-*' -gstabs -gstabs+ -gz -glldb -fno-diagnostics-color '-fmessage-length=*' -fno-ident -fpermissive -frecord-gcc-switches -frecord-command-line '-fdiagnostics*' '-fplugin*' '-W*' -w '-[DUILR]*' '-Wl,*' '-fuse-ld=*'); ALLOWED_FLAGS+=('-fno-stack-protector*' '-fabi-version=*' -fno-strict-aliasing -fno-bounds-check -fno-bounds-checking -fstrict-overflow -fno-omit-frame-pointer '-fno-builtin*'); ALLOWED_FLAGS+=('-mregparm=*' -mno-app-regs -mapp-regs -mno-mmx -mno-sse -mno-sse2 -mno-sse3 -mno-ssse3 -mno-sse4 -mno-sse4.1 -mno-sse4.2 -mno-avx -mno-aes -mno-pclmul -mno-sse4a -mno-3dnow -mno-popcnt -mno-abm -mips1 -mips2 -mips3 -mips4 -mips32 -mips64 -mips16 -mplt -msoft-float -mno-soft-float -mhard-float -mno-hard-float '-mfpu=*' -mieee -mieee-with-inexact '-mschedule=*' -mfloat-gprs -mspe -mno-spe -mtls-direct-seg-refs -mno-tls-direct-seg-refs -mflat -mno-flat -mno-faster-structs -mfaster-structs -m32 -m64 -mx32 '-mabi=*' -mlittle-endian -mbig-endian -EL -EB -fPIC -mlive-g0 '-mcmodel=*' -mstack-bias -mno-stack-bias -msecure-plt '-m*-toc' '-mfloat-abi=*' -mfix-r4000 -mno-fix-r4000 -mfix-r4400 -mno-fix-r4400 -mfix-rm7000 -mno-fix-rm7000 -mfix-r10000 -mno-fix-r10000 '-mr10k-cache-barrier=*' -mthumb -marm -ffixed-x18 -mno-fma4 -mno-movbe -mno-xop -mno-lwp -mno-fsgsbase -mno-rdrnd -mno-f16c -mno-bmi -mno-tbm -mno-avx2 -mno-bmi2 -mno-fma -mno-lzcnt -mno-fxsr -mno-hle -mno-rtm -mno-xsave -mno-xsaveopt -mno-avx512cd -mno-avx512er -mno-avx512f -mno-avx512pf -mno-sha); ALLOWED_FLAGS+=(-mstackrealign) } _tc-getPROG () { local tuple=$1; local v var vars=$2; local prog=($3); var=${vars%% *}; for v in ${vars}; do if [[ -n ${!v} ]]; then export ${var}="${!v}"; echo "${!v}"; return 0; fi; done; local search=; [[ -n $4 ]] && search=$(type -p $4-${prog[0]}); [[ -z ${search} && -n ${!tuple} ]] && search=$(type -p ${!tuple}-${prog[0]}); [[ -n ${search} ]] && prog[0]=${search##*/}; export ${var}="${prog[*]}"; echo "${!var}" } _tc-has-openmp () { local base="${T}/test-tc-openmp"; cat <<-EOF > "${base}.c" #include int main() { int nthreads, tid, ret = 0; #pragma omp parallel private(nthreads, tid) { tid = omp_get_thread_num(); nthreads = omp_get_num_threads(); ret += tid + nthreads; } return ret; } EOF $(tc-getCC "$@") -fopenmp "${base}.c" -o "${base}" &> /dev/null; local ret=$?; rm -f "${base}"*; return ${ret} } _test-compile-PROG () { local lang=$1; local code=$2; shift 2; [[ -z "${lang}" ]] && return 1; [[ -z "${code}" ]] && return 1; local compiler filename_in filename_out args=() libs=(); case "${lang}" in c) compiler="$(tc-getCC)"; filename_in="${T}/test.c"; filename_out="${T}/test.o"; args+=(${CFLAGS[@]} -xc -c) ;; c++) compiler="$(tc-getCXX)"; filename_in="${T}/test.cc"; filename_out="${T}/test.o"; args+=(${CXXFLAGS[@]} -xc++ -c) ;; f77) compiler="$(tc-getF77)"; filename_in="${T}/test.f"; filename_out="${T}/test.o"; args+=(${FFFLAGS[@]} -xf77 -c) ;; f95) compiler="$(tc-getFC)"; filename_in="${T}/test.f90"; filename_out="${T}/test.o"; args+=(${FCFLAGS[@]} -xf95 -c) ;; c+ld) compiler="$(tc-getCC)"; filename_in="${T}/test.c"; filename_out="${T}/test.exe"; args+=(${CFLAGS[@]} ${LDFLAGS[@]} -xc); libs+=(${LIBS[@]}) ;; c+++ld) compiler="$(tc-getCXX)"; filename_in="${T}/test.cc"; filename_out="${T}/test.exe"; args+=(${CXXFLAGS[@]} ${LDFLAGS[@]} -xc++); libs+=(${LIBS[@]}) ;; f77+ld) compiler="$(tc-getF77)"; filename_in="${T}/test.f"; filename_out="${T}/test.exe"; args+=(${FFLAGS[@]} ${LDFLAGS[@]} -xf77); libs+=(${LIBS[@]}) ;; f95+ld) compiler="$(tc-getFC)"; filename_in="${T}/test.f90"; filename_out="${T}/test.exe"; args+=(${FCFLAGS[@]} ${LDFLAGS[@]} -xf95); libs+=(${LIBS[@]}) ;; *) die "Unknown compiled language ${lang}" ;; esac; printf "%s\n" "${code}" > "${filename_in}" || die "Failed to create '${test_in}'"; "${compiler}" ${args[@]} "${filename_in}" -o "${filename_out}" ${libs[@]} &> /dev/null } _test-flag-PROG () { local comp=$1; local lang=$2; shift 2; if [[ -z ${comp} ]]; then return 1; fi; if [[ -z $1 ]]; then return 1; fi; comp=($(tc-get${comp})); if ! type -p ${comp[0]} > /dev/null; then return 1; fi; local in_src in_ext cmdline_extra=(); case "${lang}" in c) in_ext='c'; in_src='int main(void) { return 0; }'; cmdline_extra+=(-xc -c) ;; c++) in_ext='cc'; in_src='int main(void) { return 0; }'; cmdline_extra+=(-xc++ -c) ;; f77) in_ext='f'; in_src=' end'; cmdline_extra+=(-xf77 -c) ;; f95) in_ext='f90'; in_src='end'; cmdline_extra+=(-xf95 -c) ;; c+ld) in_ext='c'; in_src='int main(void) { return 0; }'; if is-ldflagq -fuse-ld=*; then fuse_ld_value=$(get-flag -fuse-ld=*); cmdline_extra+=(${fuse_ld_value}); fi; cmdline_extra+=(-xc) ;; esac; local test_in=${T}/test-flag.${in_ext}; local test_out=${T}/test-flag.exe; printf "%s\n" "${in_src}" > "${test_in}" || die "Failed to create '${test_in}'"; local cmdline=("${comp[@]}" -Werror "$@" "${cmdline_extra[@]}" "${test_in}" -o "${test_out}"); "${cmdline[@]}" &> /dev/null } _test-flags-PROG () { local comp=$1; local flags=(); local x; shift; [[ -z ${comp} ]] && return 1; while (( $# )); do case "$1" in --param | -B) if test-flag-${comp} "$1" "$2"; then flags+=("$1" "$2"); fi; shift 2 ;; *) if test-flag-${comp} "$1"; then flags+=("$1"); fi; shift 1 ;; esac; done; echo "${flags[*]}"; [[ ${#flags[@]} -gt 0 ]] } all-flag-vars () { echo {ADA,C,CPP,CXX,CCAS,F,FC,LD}FLAGS } append-atomic-flags () { local code; read -r -d '' code <<-EOF int main() { return 0; } EOF _test-compile-PROG "c+ld" "${code}" || return; local bytesizes; [[ "${#}" == "0" ]] && bytesizes=("1" "2" "4" "8") || bytesizes="${@}"; for bytesize in ${bytesizes[@]}; do read -r -d '' code <<-EOF #include int main() { uint$((${bytesize} * 8))_t a = 0; __atomic_add_fetch(&a, 3, __ATOMIC_RELAXED); __atomic_compare_exchange_n(&a, &a, 2, 1, __ATOMIC_RELAXED, __ATOMIC_RELAXED); return 0; } EOF _test-compile-PROG "c+ld" "${code}" && continue test-flags-CCLD "-latomic" &> /dev/null || die "-latomic is required but not supported by $(tc-getCC)"; append-libs "-latomic"; _test-compile-PROG "c+ld" "${code}" || die "libatomic does not include an implementation of ${bytesize}-byte atomics for this toolchain"; return; done } append-cflags () { [[ $# -eq 0 ]] && return 0; export CFLAGS+=" $*"; return 0 } append-cppflags () { [[ $# -eq 0 ]] && return 0; export CPPFLAGS+=" $*"; return 0 } append-cxxflags () { [[ $# -eq 0 ]] && return 0; export CXXFLAGS+=" $*"; return 0 } append-fflags () { [[ $# -eq 0 ]] && return 0; export FFLAGS+=" $*"; export FCFLAGS+=" $*"; return 0 } append-flags () { [[ $# -eq 0 ]] && return 0; case " $* " in *' '-[DIU]*) eqawarn 'Please use append-cppflags for preprocessor flags' ;; *' '-L* | *' '-Wl,*) eqawarn 'Please use append-ldflags for linker flags' ;; esac; append-cflags "$@"; append-cxxflags "$@"; append-fflags "$@"; return 0 } append-ldflags () { [[ $# -eq 0 ]] && return 0; local flag; for flag in "$@"; do [[ ${flag} == -l* ]] && eqawarn "Appending a library link instruction (${flag}); libraries to link to should not be passed through LDFLAGS"; done; export LDFLAGS="${LDFLAGS} $*"; return 0 } append-lfs-flags () { [[ $# -ne 0 ]] && die "append-lfs-flags takes no arguments"; append-cppflags -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE -D_LARGEFILE64_SOURCE } append-libs () { [[ $# -eq 0 ]] && return 0; local flag; for flag in "$@"; do if [[ -z "${flag// }" ]]; then eqawarn "Appending an empty argument to LIBS is invalid! Skipping."; continue; fi; case $flag in -[lL]*) export LIBS="${LIBS} ${flag}" ;; -*) eqawarn "Appending non-library to LIBS (${flag}); Other linker flags should be passed via LDFLAGS"; export LIBS="${LIBS} ${flag}" ;; *) export LIBS="${LIBS} -l${flag}" ;; esac; done; return 0 } bazel_get_flags () { local i fs=(); for i in ${CFLAGS}; do fs+=("--conlyopt=${i}"); done; for i in ${BUILD_CFLAGS}; do fs+=("--host_conlyopt=${i}"); done; for i in ${CXXFLAGS}; do fs+=("--cxxopt=${i}"); done; for i in ${BUILD_CXXFLAGS}; do fs+=("--host_cxxopt=${i}"); done; for i in ${CPPFLAGS}; do fs+=("--conlyopt=${i}" "--cxxopt=${i}"); done; for i in ${BUILD_CPPFLAGS}; do fs+=("--host_conlyopt=${i}" "--host_cxxopt=${i}"); done; for i in ${LDFLAGS}; do fs+=("--linkopt=${i}"); done; for i in ${BUILD_LDFLAGS}; do fs+=("--host_linkopt=${i}"); done; echo "${fs[*]}" } bazel_load_distfiles () { local file=""; local rename=0; [[ -n "${@}" ]] || die "Missing args"; mkdir -p "${T}/bazel-distdir" || die; for word in ${@}; do if [[ "${word}" == "->" ]]; then rename=1; else if [[ "${word}" == ")" ]]; then continue; else if [[ "${word}" == "(" ]]; then continue; else if [[ "${word}" == ?(\!)[A-Za-z0-9]*([A-Za-z0-9+_@-])\? ]]; then continue; else if [[ ${rename} -eq 1 ]]; then if [[ "${A}" == *"${word}"* ]]; then echo "Copying ${word} to bazel distdir as ${file}"; ln -s "${DISTDIR}/${word}" "${T}/bazel-distdir/${file}" || die; fi; rename=0; file=""; else if [[ -n "${file}" && "${A}" == *"${file}"* ]]; then echo "Copying ${file} to bazel distdir"; ln -s "${DISTDIR}/${file}" "${T}/bazel-distdir/${file}" || die; fi; file="${word##*/}"; fi; fi; fi; fi; fi; done; if [[ -n "${file}" ]]; then echo "Copying ${file} to bazel distdir"; ln -s "${DISTDIR}/${file}" "${T}/bazel-distdir/${file}" || die; fi } bazel_setup_bazelrc () { if [[ -f "${T}/bazelrc" ]]; then return; fi; addpredict /proc; mkdir -p "${T}/bazel-cache" || die; mkdir -p "${T}/bazel-distdir" || die; cat > "${T}/bazelrc" <<-EOF || startup --batch # dont strip HOME, portage sets a temp per-package dir build --action_env HOME # make bazel respect MAKEOPTS build --jobs=$(makeopts_jobs) build --compilation_mode=opt --host_compilation_mode=opt # FLAGS build $(bazel_get_flags) # Use standalone strategy to deactivate the bazel sandbox, since it # conflicts with FEATURES=sandbox. build --spawn_strategy=standalone --genrule_strategy=standalone test --spawn_strategy=standalone --genrule_strategy=standalone build --strip=never build --verbose_failures --noshow_loading_progress test --verbose_test_summary --verbose_failures --noshow_loading_progress # make bazel only fetch distfiles from the cache fetch --repository_cache="${T}/bazel-cache/" --distdir="${T}/bazel-distdir/" build --repository_cache="${T}/bazel-cache/" --distdir="${T}/bazel-distdir/" build --define=PREFIX=${EPREFIX}/usr build --define=LIBDIR=\$(PREFIX)/$(get_libdir) build --define=INCLUDEDIR=\$(PREFIX)/include EOF die if tc-is-cross-compiler; then echo "build --distinct_host_configuration" >> "${T}/bazelrc" || die; else echo "build --nodistinct_host_configuration" >> "${T}/bazelrc" || die; fi } build_sphinx () { debug-print-function ${FUNCNAME} "${@}"; [[ ${#} -eq 1 ]] || die "${FUNCNAME} takes 1 arg: "; local dir=${1}; sed -i -e 's:^intersphinx_mapping:disabled_&:' "${dir}"/conf.py || die; local command=("${EPYTHON}" -m sphinx.cmd.build); if ! "${EPYTHON}" -c "import sphinx.cmd.build" 2> /dev/null; then command=(sphinx-build); fi; command+=(-b html -d "${dir}"/_build/doctrees "${dir}" "${dir}"/_build/html); echo "${command[@]}" 1>&2; "${command[@]}" || die; HTML_DOCS+=("${dir}/_build/html/.") } check-reqs_disk () { [[ ${EAPI} == [67] ]] || die "Internal function ${FUNCNAME} is not available in EAPI ${EAPI}."; _check-reqs_disk "$@" } check-reqs_get_kibibytes () { [[ ${EAPI} == [67] ]] || die "Internal function ${FUNCNAME} is not available in EAPI ${EAPI}."; _check-reqs_get_kibibytes "$@" } check-reqs_get_number () { [[ ${EAPI} == [67] ]] || die "Internal function ${FUNCNAME} is not available in EAPI ${EAPI}."; _check-reqs_get_number "$@" } check-reqs_get_unit () { [[ ${EAPI} == [67] ]] || die "Internal function ${FUNCNAME} is not available in EAPI ${EAPI}."; _check-reqs_get_unit "$@" } check-reqs_memory () { [[ ${EAPI} == [67] ]] || die "Internal function ${FUNCNAME} is not available in EAPI ${EAPI}."; _check-reqs_memory "$@" } check-reqs_output () { [[ ${EAPI} == [67] ]] || die "Internal function ${FUNCNAME} is not available in EAPI ${EAPI}."; _check-reqs_get_unit "$@" } check-reqs_pkg_pretend () { debug-print-function ${FUNCNAME} "$@"; check-reqs_pkg_setup "$@" } check-reqs_pkg_setup () { debug-print-function ${FUNCNAME} "$@"; _check-reqs_prepare; _check-reqs_run; _check-reqs_output } check-reqs_prepare () { [[ ${EAPI} == [67] ]] || die "Internal function ${FUNCNAME} is not available in EAPI ${EAPI}."; _check-reqs_prepare "$@" } check-reqs_run () { [[ ${EAPI} == [67] ]] || die "Internal function ${FUNCNAME} is not available in EAPI ${EAPI}."; _check-reqs_run "$@" } check-reqs_start_phase () { [[ ${EAPI} == [67] ]] || die "Internal function ${FUNCNAME} is not available in EAPI ${EAPI}."; _check-reqs_start_phase "$@" } check-reqs_unsatisfied () { [[ ${EAPI} == [67] ]] || die "Internal function ${FUNCNAME} is not available in EAPI ${EAPI}."; _check-reqs_unsatisfied "$@" } clang-fullversion () { _clang_fullversion '$1.$2.$3' "$@" } clang-major-version () { _clang_fullversion '$1' "$@" } clang-micro-version () { _clang_fullversion '$3' "$@" } clang-minor-version () { _clang_fullversion '$2' "$@" } clang-version () { _clang_fullversion '$1.$2' "$@" } count_impls () { num_pythons_enabled=$((${num_pythons_enabled} + 1)) } cuda_add_sandbox () { debug-print-function ${FUNCNAME} "$@"; local i; for i in /dev/nvidia*; do if [[ $1 == '-w' ]]; then addwrite $i; else addpredict $i; fi; done } cuda_cudnn_version () { debug-print-function ${FUNCNAME} "$@"; local v; v="$(best_version dev-libs/cudnn)"; v="${v##*cudnn-}"; ver_cut 1-2 "${v}" } cuda_gccdir () { debug-print-function ${FUNCNAME} "$@"; local dirs gcc_bindir ver vers="" flag; if ! tc-is-gcc; then ewarn "Currently we only support the gnu compiler suite"; return 2; fi; while [[ -n "$1" ]]; do case $1 in -f) flag="--compiler-bindir " ;; *) ;; esac; shift; done; if ! vers="$(cuda-config -s)"; then eerror "Could not execute cuda-config"; eerror "Make sure >=dev-util/nvidia-cuda-toolkit-4.2.9-r1 is installed"; die "cuda-config not found"; fi; if [[ -z ${vers} ]]; then die "Could not determine supported gcc versions from cuda-config"; fi; ver=$(gcc-version); if [[ -n "${ver}" ]] && [[ ${vers} =~ ${ver} ]]; then dirs=(${EPREFIX}/usr/*pc-linux-gnu/gcc-bin/${ver%.*}*/); gcc_bindir="${dirs[${#dirs[@]}-1]}"; fi; if [[ -z ${gcc_bindir} ]]; then ver=$(best_version "sys-devel/gcc"); ver=$(ver_cut 1-2 "${ver##*sys-devel/gcc-}"); if [[ -n "${ver}" ]] && [[ ${vers} =~ ${ver} ]]; then dirs=(${EPREFIX}/usr/*pc-linux-gnu/gcc-bin/${ver%.*}*/); gcc_bindir="${dirs[${#dirs[@]}-1]}"; fi; fi; for ver in ${vers}; do if has_version "=sys-devel/gcc-${ver}*"; then dirs=(${EPREFIX}/usr/*pc-linux-gnu/gcc-bin/${ver%.*}*/); gcc_bindir="${dirs[${#dirs[@]}-1]}"; fi; done; if [[ -n ${gcc_bindir} ]]; then if [[ -n ${flag} ]]; then echo "${flag}\"${gcc_bindir%/}\""; else echo "${gcc_bindir%/}"; fi; return 0; else eerror "Only gcc version(s) ${vers} are supported,"; eerror "of which none is installed"; die "Only gcc version(s) ${vers} are supported"; return 1; fi } cuda_sanitize () { debug-print-function ${FUNCNAME} "$@"; local rawldflags=$(raw-ldflags); [[ "${CUDA_VERBOSE}" == true ]] && NVCCFLAGS+=" -v"; NVCCFLAGS+=" $(cuda_gccdir -f)"; NVCCFLAGS+=" --compiler-options \"${CXXFLAGS}\" --linker-options \"${rawldflags// /,}\""; debug-print "Using ${NVCCFLAGS} for cuda"; export NVCCFLAGS } cuda_src_prepare () { debug-print-function ${FUNCNAME} "$@"; cuda_sanitize } cuda_toolkit_version () { debug-print-function ${FUNCNAME} "$@"; local v; v="$(best_version dev-util/nvidia-cuda-toolkit)"; v="${v##*cuda-toolkit-}"; ver_cut 1-2 "${v}" } distutils-r1_python_compile () { debug-print-function ${FUNCNAME} "${@}"; _python_check_EPYTHON; case ${DISTUTILS_USE_PEP517:-setuptools} in setuptools) if [[ -n ${DISTUTILS_USE_PEP517} ]]; then if [[ -d build ]]; then eqawarn "A 'build' directory exists already. Artifacts from this directory may"; eqawarn "be picked up by setuptools when building for another interpreter."; eqawarn "Please remove this directory prior to building."; fi; else _distutils-r1_copy_egg_info; fi; local jobs=$(makeopts_jobs "${MAKEOPTS} ${*}"); if [[ -n ${DISTUTILS_USE_PEP517} ]]; then if [[ 1 -ne ${jobs} && 2 -eq $( find '(' -name '*.c' -o -name '*.cc' -o -name '*.cpp' -o -name '*.cxx' -o -name '*.c++' -o -name '*.m' -o -name '*.mm' -o -name '*.pyx' ')' -printf '\n' | head -n 2 | wc -l ) ]]; then esetup.py build_ext -j "${jobs}" "${@}"; fi; else esetup.py build -j "${jobs}" "${@}"; fi ;; maturin) local -x MATURIN_PEP517_ARGS=" --jobs=$(makeopts_jobs) --skip-auditwheel $(in_iuse debug && usex debug --profile=dev '') " ;; no) return ;; esac; if [[ -n ${DISTUTILS_USE_PEP517} ]]; then distutils_pep517_install "${BUILD_DIR}/install"; fi } distutils-r1_python_install () { debug-print-function ${FUNCNAME} "${@}"; _python_check_EPYTHON; local scriptdir=${EPREFIX}/usr/bin; local merge_root=; if [[ -n ${DISTUTILS_USE_PEP517} ]]; then local root=${BUILD_DIR}/install; local reg_scriptdir=${root}/${scriptdir}; local wrapped_scriptdir=${root}$(python_get_scriptdir); rm "${reg_scriptdir}"/{"${EPYTHON}",python3,python,pyvenv.cfg} || die; if type diff &> /dev/null; then ( cd "${reg_scriptdir}" && find . -mindepth 1 ) | sort > "${T}"/.distutils-files-bin; assert "listing ${reg_scriptdir} failed"; ( if [[ -d ${wrapped_scriptdir} ]]; then cd "${wrapped_scriptdir}" && find . -mindepth 1; fi ) | sort > "${T}"/.distutils-files-wrapped; assert "listing ${wrapped_scriptdir} failed"; if ! diff -U 0 "${T}"/.distutils-files-{bin,wrapped}; then die "File lists for ${reg_scriptdir} and ${wrapped_scriptdir} differ (see diff above)"; fi; fi; rm -r "${reg_scriptdir}" || die; if [[ -n ${DISTUTILS_SINGLE_IMPL} ]]; then if [[ -d ${wrapped_scriptdir} ]]; then mv "${wrapped_scriptdir}" "${reg_scriptdir}" || die; fi; fi; find "${BUILD_DIR}"/install -type d -empty -delete || die; [[ -d ${BUILD_DIR}/install ]] && merge_root=1; else local root=${D%/}/_${EPYTHON}; [[ -n ${DISTUTILS_SINGLE_IMPL} ]] && root=${D%/}; local args=("${DISTUTILS_ARGS[@]}" "${mydistutilsargs[@]}" install --skip-build --root="${root}" "${args[@]}" "${@}"); local DISTUTILS_ARGS=(); local mydistutilsargs=(); local -x PYTHONDONTWRITEBYTECODE=; addpredict "${EPREFIX}/usr/lib/${EPYTHON}"; addpredict "${EPREFIX}/usr/lib/pypy3.9"; addpredict "${EPREFIX}/usr/local"; if [[ ! -n ${DISTUTILS_SINGLE_IMPL} ]]; then merge_root=1; set -- "${args[@]}"; args=(); while [[ -n ${@} ]]; do local a=${1}; shift; case ${a} in --install-scripts=*) scriptdir=${a#--install-scripts=} ;; --install-scripts) scriptdir=${1}; shift ;; *) args+=("${a}") ;; esac; done; fi; esetup.py "${args[@]}"; fi; if [[ -n ${merge_root} ]]; then multibuild_merge_root "${root}" "${D%/}"; fi; if [[ ! -n ${DISTUTILS_SINGLE_IMPL} ]]; then _distutils-r1_wrap_scripts "${scriptdir}"; fi } distutils-r1_python_install_all () { debug-print-function ${FUNCNAME} "${@}"; _distutils-r1_check_all_phase_mismatch; einstalldocs } distutils-r1_python_prepare_all () { debug-print-function ${FUNCNAME} "${@}"; _distutils-r1_check_all_phase_mismatch; if [[ ! -n ${DISTUTILS_OPTIONAL} ]]; then default; fi; if [[ ! -n ${DISTUTILS_IN_SOURCE_BUILD+1} ]]; then if declare -f python_prepare > /dev/null; then DISTUTILS_IN_SOURCE_BUILD=1; fi; fi; if [[ ! -n ${DISTUTILS_USE_PEP517} ]]; then _distutils-r1_disable_ez_setup; _distutils-r1_handle_pyproject_toml; case ${DISTUTILS_USE_SETUPTOOLS} in no) eqawarn "Non-PEP517 builds are deprecated for ebuilds using plain distutils."; eqawarn "Please migrate to DISTUTILS_USE_PEP517=setuptools."; eqawarn "Please see Python Guide for more details:"; eqawarn " https://projects.gentoo.org/python/guide/distutils.html" ;; esac; fi; if [[ -n ${DISTUTILS_IN_SOURCE_BUILD} && ! -n ${DISTUTILS_SINGLE_IMPL} ]]; then python_copy_sources; fi; python_export_utf8_locale; [[ ${EAPI} == 6 ]] && xdg_environment_reset; _distutils-r1_print_package_versions; _DISTUTILS_DEFAULT_CALLED=1 } distutils-r1_python_test () { debug-print-function ${FUNCNAME} "${@}"; if [[ -z ${_DISTUTILS_TEST_RUNNER} ]]; then die "${FUNCNAME} can be only used after calling distutils_enable_tests"; fi; _python_check_EPYTHON; if [[ -n ${_DISTUTILS_TEST_INSTALL} ]]; then distutils_install_for_testing; fi; case ${_DISTUTILS_TEST_RUNNER} in nose) "${EPYTHON}" -m nose -v "${@}" ;; pytest) epytest ;; setup.py) nonfatal esetup.py test --verbose ;; unittest) eunittest ;; *) die "Mis-synced test runner between ${FUNCNAME} and distutils_enable_testing" ;; esac; if [[ ${?} -ne 0 ]]; then die "Tests failed with ${EPYTHON}"; fi } distutils-r1_run_phase () { debug-print-function ${FUNCNAME} "${@}"; if [[ -n ${DISTUTILS_IN_SOURCE_BUILD} ]]; then [[ -n ${DISTUTILS_USE_PEP517} ]] && die "DISTUTILS_IN_SOURCE_BUILD is not supported in PEP517 mode"; if [[ ! -n ${DISTUTILS_SINGLE_IMPL} ]] && has "${EPYTHON/./_}" ${PYTHON_TARGETS}; then cd "${BUILD_DIR}" || die; fi; local BUILD_DIR=${BUILD_DIR}/build; fi; if [[ -n ${DISTUTILS_USE_PEP517} ]]; then local -x PATH=${BUILD_DIR}/install${EPREFIX}/usr/bin:${PATH}; else local -x PYTHONPATH="${BUILD_DIR}/lib:${PYTHONPATH}"; local -x PATH=${PATH}; local -x SETUPTOOLS_USE_DISTUTILS="${SETUPTOOLS_USE_DISTUTILS:-stdlib}"; mkdir -p "${BUILD_DIR}/lib" || die; fi; local -x AR=${AR} CC=${CC} CPP=${CPP} CXX=${CXX}; tc-export AR CC CPP CXX; local ldopts; case "${CHOST}" in *-aix*) ldopts='-shared -Wl,-berok' ;; *-darwin*) ldopts='-bundle -undefined dynamic_lookup' ;; *) ldopts='-shared' ;; esac; local -x LDSHARED="${CC} ${ldopts}" LDCXXSHARED="${CXX} ${ldopts}"; local _DISTUTILS_POST_PHASE_RM=(); "${@}"; local ret=${?}; if [[ -n ${_DISTUTILS_POST_PHASE_RM} ]]; then rm "${_DISTUTILS_POST_PHASE_RM[@]}" || die; fi; cd "${_DISTUTILS_INITIAL_CWD}" || die; if [[ ! -n ${_DISTUTILS_IN_COMMON_IMPL} ]] && declare -f "_distutils-r1_post_python_${EBUILD_PHASE}" > /dev/null; then "_distutils-r1_post_python_${EBUILD_PHASE}"; fi; return "${ret}" } distutils-r1_src_compile () { debug-print-function ${FUNCNAME} "${@}"; local ret=0; if declare -f python_compile > /dev/null; then _distutils-r1_run_foreach_impl python_compile || ret=${?}; else _distutils-r1_run_foreach_impl distutils-r1_python_compile || ret=${?}; fi; if declare -f python_compile_all > /dev/null; then _distutils-r1_run_common_phase python_compile_all || ret=${?}; fi; return ${ret} } distutils-r1_src_configure () { debug-print-function ${FUNCNAME} "${@}"; local ret=0; if declare -f python_configure > /dev/null; then _distutils-r1_run_foreach_impl python_configure || ret=${?}; fi; if declare -f python_configure_all > /dev/null; then _distutils-r1_run_common_phase python_configure_all || ret=${?}; fi; return ${ret} } distutils-r1_src_install () { debug-print-function ${FUNCNAME} "${@}"; local ret=0; if declare -f python_install > /dev/null; then _distutils-r1_run_foreach_impl python_install || ret=${?}; else _distutils-r1_run_foreach_impl distutils-r1_python_install || ret=${?}; fi; if declare -f python_install_all > /dev/null; then _distutils-r1_run_common_phase python_install_all || ret=${?}; else _distutils-r1_run_common_phase distutils-r1_python_install_all || ret=${?}; fi; _distutils-r1_check_namespace_pth; return ${ret} } distutils-r1_src_prepare () { debug-print-function ${FUNCNAME} "${@}"; local ret=0; local _DISTUTILS_DEFAULT_CALLED; if declare -f python_prepare_all > /dev/null; then python_prepare_all || ret=${?}; else distutils-r1_python_prepare_all || ret=${?}; fi; if [[ ! -n ${_DISTUTILS_DEFAULT_CALLED} ]]; then die "QA: python_prepare_all() didn't call distutils-r1_python_prepare_all"; fi; if declare -f python_prepare > /dev/null; then _distutils-r1_run_foreach_impl python_prepare || ret=${?}; fi; return ${ret} } distutils-r1_src_test () { debug-print-function ${FUNCNAME} "${@}"; local ret=0; if declare -f python_test > /dev/null; then _distutils-r1_run_foreach_impl python_test || ret=${?}; fi; if declare -f python_test_all > /dev/null; then _distutils-r1_run_common_phase python_test_all || ret=${?}; fi; return ${ret} } distutils_enable_sphinx () { debug-print-function ${FUNCNAME} "${@}"; [[ ${#} -ge 1 ]] || die "${FUNCNAME} takes at least one arg: "; _DISTUTILS_SPHINX_SUBDIR=${1}; shift; _DISTUTILS_SPHINX_PLUGINS=("${@}"); local deps autodoc=1 d; deps=">=dev-python/sphinx-4.5.0-r1[\${PYTHON_USEDEP}]"; for d in "$@"; do if [[ ${d} == --no-autodoc ]]; then autodoc=; else deps+=" ${d}[\${PYTHON_USEDEP}]"; if [[ ! -n ${autodoc} ]]; then die "${FUNCNAME}: do not pass --no-autodoc if external plugins are used"; fi; fi; done; if [[ -n ${autodoc} ]]; then if [[ -n ${DISTUTILS_SINGLE_IMPL} ]]; then deps="$(python_gen_cond_dep "${deps}")"; else deps="$(python_gen_any_dep "${deps}")"; fi; function python_check_deps () { use doc || return 0; local p; for p in ">=dev-python/sphinx-4.5.0-r1" "${_DISTUTILS_SPHINX_PLUGINS[@]}"; do python_has_version "${p}[${PYTHON_USEDEP}]" || return 1; done }; else deps=">=dev-python/sphinx-4.5.0-r1"; fi; function sphinx_compile_all () { use doc || return; local confpy=${_DISTUTILS_SPHINX_SUBDIR}/conf.py; [[ -f ${confpy} ]] || die "${confpy} not found, distutils_enable_sphinx call wrong"; if [[ ${_DISTUTILS_SPHINX_PLUGINS[0]} == --no-autodoc ]]; then if grep -F -q 'sphinx.ext.autodoc' "${confpy}"; then die "distutils_enable_sphinx: --no-autodoc passed but sphinx.ext.autodoc found in ${confpy}"; fi; else if [[ -z ${_DISTUTILS_SPHINX_PLUGINS[@]} ]]; then if ! grep -F -q 'sphinx.ext.autodoc' "${confpy}"; then die "distutils_enable_sphinx: sphinx.ext.autodoc not found in ${confpy}, pass --no-autodoc"; fi; fi; fi; build_sphinx "${_DISTUTILS_SPHINX_SUBDIR}" }; function python_compile_all () { sphinx_compile_all }; IUSE+=" doc"; if [[ ${EAPI} == 6 ]]; then DEPEND+=" doc? ( ${deps} )"; else BDEPEND+=" doc? ( ${deps} )"; fi; return 0 } distutils_enable_tests () { debug-print-function ${FUNCNAME} "${@}"; _DISTUTILS_TEST_INSTALL=; case ${1} in --install) if [[ -n ${DISTUTILS_USE_PEP517} ]]; then die "${FUNCNAME} --install is not implemented in PEP517 mode"; fi; _DISTUTILS_TEST_INSTALL=1; shift ;; esac; [[ ${#} -eq 1 ]] || die "${FUNCNAME} takes exactly one argument: test-runner"; local test_pkg; case ${1} in nose) test_pkg=">=dev-python/nose-1.3.7_p20211111_p1-r1" ;; pytest) test_pkg=">=dev-python/pytest-7.1.3" ;; setup.py) ;; unittest) test_pkg="dev-python/unittest-or-fail" ;; *) die "${FUNCNAME}: unsupported argument: ${1}" ;; esac; _DISTUTILS_TEST_RUNNER=${1}; function python_test () { distutils-r1_python_test }; local test_deps=${RDEPEND}; if [[ -n ${test_pkg} ]]; then if [[ ! -n ${DISTUTILS_SINGLE_IMPL} ]]; then test_deps+=" ${test_pkg}[${PYTHON_USEDEP}]"; else test_deps+=" $(python_gen_cond_dep " ${test_pkg}[\${PYTHON_USEDEP}] ")"; fi; fi; if [[ -n ${test_deps} ]]; then IUSE+=" test"; RESTRICT+=" !test? ( test )"; if [[ ${EAPI} == 6 ]]; then DEPEND+=" test? ( ${test_deps} )"; else BDEPEND+=" test? ( ${test_deps} )"; fi; fi; return 0 } distutils_install_for_testing () { debug-print-function ${FUNCNAME} "${@}"; if [[ -n ${DISTUTILS_USE_PEP517} ]]; then die "${FUNCNAME} is not implemented in PEP517 mode"; fi; local install_method=root; case ${1} in --via-home) [[ ${EAPI} == [67] ]] || die "${*} is banned in EAPI ${EAPI}"; install_method=home; shift ;; --via-root) install_method=root; shift ;; --via-venv) install_method=venv; shift ;; esac; TEST_DIR=${BUILD_DIR}/test; local add_args=(); if [[ ${install_method} == venv ]]; then mkdir -p "${TEST_DIR}"/bin || die; ln -s "${PYTHON}" "${TEST_DIR}/bin/${EPYTHON}" || die; ln -s "${EPYTHON}" "${TEST_DIR}/bin/python3" || die; ln -s "${EPYTHON}" "${TEST_DIR}/bin/python" || die; cat > "${TEST_DIR}"/pyvenv.cfg <<-EOF || include-system-site-packages = true EOF die PATH=${TEST_DIR}/bin:${PATH}; unset PYTHONPATH; add_args=(--root=/); else local bindir=${TEST_DIR}/scripts; local libdir=${TEST_DIR}/lib; PATH=${bindir}:${PATH}; PYTHONPATH=${libdir}:${PYTHONPATH}; case ${install_method} in home) add_args=(--home="${TEST_DIR}" --install-lib="${libdir}" --install-scripts="${bindir}"); mkdir -p "${libdir}" || die ;; root) add_args=(--root="${TEST_DIR}" --install-lib=lib --install-scripts=scripts) ;; esac; fi; esetup.py install "${add_args[@]}" "${@}" } distutils_pep517_install () { debug-print-function ${FUNCNAME} "${@}"; [[ ${#} -eq 1 ]] || die "${FUNCNAME} takes exactly one argument: root"; if [[ ! ${DISTUTILS_USE_PEP517:-no} != no ]]; then die "${FUNCNAME} is available only in PEP517 mode"; fi; local root=${1}; local -x WHEEL_BUILD_DIR=${BUILD_DIR}/wheel; mkdir -p "${WHEEL_BUILD_DIR}" || die; if [[ -n ${mydistutilsargs[@]} ]]; then die "mydistutilsargs are banned in PEP517 mode (use DISTUTILS_ARGS)"; fi; local config_settings=; if [[ -n ${DISTUTILS_ARGS[@]} ]]; then case ${DISTUTILS_USE_PEP517} in setuptools) config_settings=$( "${EPYTHON}" - "${DISTUTILS_ARGS[@]}" <<-EOF || die import json import sys print(json.dumps({"--global-option": sys.argv[1:]})) EOF ) ;; sip) local arg; for arg in "${DISTUTILS_ARGS[@]}"; do [[ ${arg} != -* ]] && die "Bare arguments in DISTUTILS_ARGS unsupported: ${arg}"; done; config_settings=$( "${EPYTHON}" - "${DISTUTILS_ARGS[@]}" <<-EOF || die import collections import json import sys args = collections.defaultdict(list) for arg in (x.split("=", 1) for x in sys.argv[1:]): args[arg[0]].extend( [arg[1]] if len(arg) > 1 else []) print(json.dumps(args)) EOF ) ;; *) die "DISTUTILS_ARGS are not supported by ${DISTUTILS_USE_PEP517}" ;; esac; fi; local build_backend=$(_distutils-r1_get_backend); einfo " Building the wheel for ${PWD#${WORKDIR}/} via ${build_backend}"; local config_args=(); [[ -n ${config_settings} ]] && config_args+=(--config-json "${config_settings}"); local cmd=(gpep517 build-wheel --backend "${build_backend}" --output-fd 3 --wheel-dir "${WHEEL_BUILD_DIR}" "${config_args[@]}"); printf '%s\n' "${cmd[*]}"; local wheel=$( "${cmd[@]}" 3>&1 >&2 || die "Wheel build failed" ); [[ -n ${wheel} ]] || die "No wheel name returned"; distutils_wheel_install "${root}" "${WHEEL_BUILD_DIR}/${wheel}"; if [[ ${DISTUTILS_USE_PEP517:-setuptools} == setuptools ]]; then rm -rf build || die; fi } distutils_wheel_install () { debug-print-function ${FUNCNAME} "${@}"; if [[ ${#} -ne 2 ]]; then die "${FUNCNAME} takes exactly two arguments: "; fi; if [[ -z ${PYTHON} ]]; then die "PYTHON unset, invalid call context"; fi; local root=${1}; local wheel=${2}; einfo " Installing ${wheel##*/} to ${root}"; if has_version -b ">=dev-python/gpep517-9"; then local cmd=(gpep517 install-wheel --destdir="${root}" --interpreter="${PYTHON}" --prefix="${EPREFIX}/usr" --optimize=all "${wheel}"); else local cmd=(gpep517 install-wheel --destdir="${root}" --interpreter="${PYTHON}" --prefix="${EPREFIX}/usr" "${wheel}"); fi; printf '%s\n' "${cmd[*]}"; "${cmd[@]}" || die "Wheel install failed"; find "${root}$(python_get_sitedir)" -depth \( -path '*.dist-info/COPYING*' -o -path '*.dist-info/LICENSE*' -o -path '*.dist-info/license_files/*' -o -path '*.dist-info/license_files' \) -delete || die } distutils_write_namespace () { debug-print-function ${FUNCNAME} "${@}"; if [[ ! ${DISTUTILS_USE_PEP517:-no} != no ]]; then die "${FUNCNAME} is available only in PEP517 mode"; fi; if [[ ${EBUILD_PHASE} != test || ! -n ${BUILD_DIR} ]]; then die "${FUNCNAME} should only be used in python_test"; fi; local namespace; for namespace in "$@"; do if [[ ${namespace} == *[./]* ]]; then die "${FUNCNAME} does not support nested namespaces at the moment"; fi; local path=${BUILD_DIR}/install$(python_get_sitedir)/${namespace}/__init__.py; if [[ -f ${path} ]]; then die "Requested namespace ${path} exists already!"; fi; cat > "${path}" <<-EOF || __path__ = __import__('pkgutil').extend_path(__path__, __name__) EOF die _DISTUTILS_POST_PHASE_RM+=("${path}"); done } do_configure () { export CC_OPT_FLAGS=" "; export TF_ENABLE_XLA=$(usex xla 1 0); export TF_NEED_OPENCL_SYCL=0; export TF_NEED_OPENCL=0; export TF_NEED_COMPUTECPP=0; export TF_NEED_ROCM=0; export TF_NEED_MPI=$(usex mpi 1 0); export TF_SET_ANDROID_WORKSPACE=0; if use python; then export PYTHON_BIN_PATH="${PYTHON}"; export PYTHON_LIB_PATH="$(python_get_sitedir)"; else export PYTHON_BIN_PATH="$(which python)"; export PYTHON_LIB_PATH="$(python -c 'from distutils.sysconfig import *; print(get_python_lib())')"; fi; export TF_NEED_CUDA=$(usex cuda 1 0); export TF_DOWNLOAD_CLANG=0; export TF_CUDA_CLANG=0; export TF_NEED_TENSORRT=0; if use cuda; then export TF_CUDA_PATHS="${EPREFIX}/opt/cuda"; export GCC_HOST_COMPILER_PATH="$(cuda_gccdir)/$(tc-getCC)"; export TF_CUDA_VERSION="$(cuda_toolkit_version)"; export TF_CUDNN_VERSION="$(cuda_cudnn_version)"; einfo "Setting CUDA version: $TF_CUDA_VERSION"; einfo "Setting CUDNN version: $TF_CUDNN_VERSION"; if [[ $(cuda-config -s) != *$(gcc-version)* ]]; then ewarn "TensorFlow is being built with Nvidia CUDA support. Your default compiler"; ewarn "version is not supported by the currently installed CUDA. TensorFlow will"; ewarn "instead be compiled using: ${GCC_HOST_COMPILER_PATH}."; ewarn "If the build fails with linker errors try rebuilding the relevant"; ewarn "dependencies using the same compiler version."; fi; if [[ -z "$TF_CUDA_COMPUTE_CAPABILITIES" ]]; then ewarn "WARNING: TensorFlow is being built with its default CUDA compute capabilities: 3.5 and 7.0."; ewarn "These may not be optimal for your GPU."; ewarn ""; ewarn "To configure TensorFlow with the CUDA compute capability that is optimal for your GPU,"; ewarn "set TF_CUDA_COMPUTE_CAPABILITIES in your make.conf, and re-emerge tensorflow."; ewarn "For example, to use CUDA capability 7.5 & 3.5, add: TF_CUDA_COMPUTE_CAPABILITIES=7.5,3.5"; ewarn ""; ewarn "You can look up your GPU's CUDA compute capability at https://developer.nvidia.com/cuda-gpus"; ewarn "or by running /opt/cuda/extras/demo_suite/deviceQuery | grep 'CUDA Capability'"; fi; fi; local SYSLIBS=(absl_py astor_archive astunparse_archive boringssl com_github_googlecloudplatform_google_cloud_cpp com_github_grpc_grpc com_google_absl com_google_protobuf curl cython dill_archive double_conversion flatbuffers functools32_archive gast_archive gif hwloc icu jsoncpp_git libjpeg_turbo lmdb nasm nsync opt_einsum_archive org_sqlite pasta png pybind11 six_archive snappy tblib_archive termcolor_archive typing_extensions_archive wrapt zlib); export TF_SYSTEM_LIBS="${SYSLIBS[@]}"; export TF_IGNORE_MAX_BAZEL_VERSION=1; ./configure || die; echo 'build --config=noaws --config=nohdfs --config=nonccl' >> .bazelrc || die; echo 'build --define tensorflow_mkldnn_contraction_kernel=0' >> .bazelrc || die; echo "build --action_env=KERAS_HOME=\"${T}/.keras\"" >> .bazelrc || die; echo "build --host_action_env=KERAS_HOME=\"${T}/.keras\"" >> .bazelrc || die; for cflag in $($(tc-getPKG_CONFIG) jsoncpp --cflags); do echo "build --copt=\"${cflag}\"" >> .bazelrc || die; echo "build --host_copt=\"${cflag}\"" >> .bazelrc || die; done } eapply () { local failed patch_cmd=patch; local -x LC_COLLATE=POSIX; type -P gpatch > /dev/null && patch_cmd=gpatch; function _eapply_patch () { local f=${1}; local prefix=${2}; ebegin "${prefix:-Applying }${f##*/}"; local all_opts=(-p1 -f -g0 --no-backup-if-mismatch "${patch_options[@]}"); if ${patch_cmd} "${all_opts[@]}" --dry-run -s -F0 < "${f}" &> /dev/null; then all_opts+=(-s -F0); fi; ${patch_cmd} "${all_opts[@]}" < "${f}"; failed=${?}; if ! eend "${failed}"; then __helpers_die "patch -p1 ${patch_options[*]} failed with ${f}"; fi }; local patch_options=() files=(); local i found_doublehyphen; for ((i = 1; i <= ${#@}; ++i )) do if [[ ${@:i:1} == -- ]]; then patch_options=("${@:1:i-1}"); files=("${@:i+1}"); found_doublehyphen=1; break; fi; done; if [[ -z ${found_doublehyphen} ]]; then for ((i = 1; i <= ${#@}; ++i )) do if [[ ${@:i:1} != -* ]]; then patch_options=("${@:1:i-1}"); files=("${@:i}"); break; fi; done; for i in "${files[@]}"; do if [[ ${i} == -* ]]; then die "eapply: all options must be passed before non-options"; fi; done; fi; if [[ ${#files[@]} -eq 0 ]]; then die "eapply: no files specified"; fi; local f; for f in "${files[@]}"; do if [[ -d ${f} ]]; then function _eapply_get_files () { local LC_ALL=POSIX; local prev_shopt=$(shopt -p nullglob); shopt -s nullglob; local f; for f in "${1}"/*; do if [[ ${f} == *.diff || ${f} == *.patch ]]; then files+=("${f}"); fi; done; ${prev_shopt} }; local files=(); _eapply_get_files "${f}"; [[ ${#files[@]} -eq 0 ]] && die "No *.{patch,diff} files in directory ${f}"; einfo "Applying patches from ${f} ..."; local f2; for f2 in "${files[@]}"; do _eapply_patch "${f2}" ' '; [[ ${failed} -ne 0 ]] && return "${failed}"; done; else _eapply_patch "${f}"; [[ ${failed} -ne 0 ]] && return "${failed}"; fi; done; return 0 } eapply_user () { [[ ${EBUILD_PHASE} == prepare ]] || die "eapply_user() called during invalid phase: ${EBUILD_PHASE}"; local tagfile=${T}/.portage_user_patches_applied; [[ -f ${tagfile} ]] && return; >> "${tagfile}"; local basedir=${PORTAGE_CONFIGROOT%/}/etc/portage/patches; local columns=${COLUMNS:-0}; [[ ${columns} == 0 ]] && columns=$(set -- $( ( stty size /dev/null || echo 24 80 ) ; echo $2); (( columns > 0 )) || (( columns = 80 )); local applied d f; local -A _eapply_user_patches; local prev_shopt=$(shopt -p nullglob); shopt -s nullglob; for d in "${basedir}"/${CATEGORY}/{${P}-${PR},${P},${PN}}{:${SLOT%/*},}; do for f in "${d}"/*; do if [[ ( ${f} == *.diff || ${f} == *.patch ) && -z ${_eapply_user_patches[${f##*/}]} ]]; then _eapply_user_patches[${f##*/}]=${f}; fi; done; done; if [[ ${#_eapply_user_patches[@]} -gt 0 ]]; then while read -r -d '' f; do f=${_eapply_user_patches[${f}]}; if [[ -s ${f} ]]; then if [[ -z ${applied} ]]; then einfo "${PORTAGE_COLOR_INFO}$(for ((column = 0; column < ${columns} - 3; column++)); do echo -n =; done)${PORTAGE_COLOR_NORMAL}"; einfo "Applying user patches from ${basedir} ..."; fi; eapply "${f}"; applied=1; fi; done < <(printf -- '%s\0' "${!_eapply_user_patches[@]}" | LC_ALL=C sort -z); fi; ${prev_shopt}; if [[ -n ${applied} ]]; then einfo "User patches applied."; einfo "${PORTAGE_COLOR_INFO}$(for ((column = 0; column < ${columns} - 3; column++)); do echo -n =; done)${PORTAGE_COLOR_NORMAL}"; fi } ebazel () { bazel_setup_bazelrc; local output_base="${BUILD_DIR:-${S}}"; output_base="${output_base%/}-bazel-base"; mkdir -p "${output_base}" || die; set -- bazel --bazelrc="${T}/bazelrc" --output_base="${output_base}" ${@}; echo "${*}" 1>&2; "${@}" || die "ebazel failed" } econf_build () { local CBUILD=${CBUILD:-${CHOST}}; tc-env_build econf --build=${CBUILD} --host=${CBUILD} "$@" } einstalldocs () { ( if [[ $(declare -p DOCS 2>/dev/null) != *=* ]]; then local d; for d in README* ChangeLog AUTHORS NEWS TODO CHANGES THANKS BUGS FAQ CREDITS CHANGELOG; do [[ -f ${d} && -s ${d} ]] && docinto / && dodoc "${d}"; done; else if ___is_indexed_array_var DOCS; then [[ ${#DOCS[@]} -gt 0 ]] && docinto / && dodoc -r "${DOCS[@]}"; else [[ -n ${DOCS} ]] && docinto / && dodoc -r ${DOCS}; fi; fi ); ( if ___is_indexed_array_var HTML_DOCS; then [[ ${#HTML_DOCS[@]} -gt 0 ]] && docinto html && dodoc -r "${HTML_DOCS[@]}"; else [[ -n ${HTML_DOCS} ]] && docinto html && dodoc -r ${HTML_DOCS}; fi ) } eprefixify () { [[ $# -lt 1 ]] && die "at least one argument required"; einfo "Adjusting to prefix ${EPREFIX:-/}"; local x; for x in "$@"; do if [[ -e ${x} ]]; then ebegin " ${x##*/}"; sed -i -e "s|@GENTOO_PORTAGE_EPREFIX@|${EPREFIX}|g" "${x}"; eend $? || die "failed to eprefixify ${x}"; else die "${x} does not exist"; fi; done; return 0 } epytest () { debug-print-function ${FUNCNAME} "${@}"; _python_check_EPYTHON; local color; case ${NOCOLOR} in true | yes) color=no ;; *) color=yes ;; esac; local args=(-vv -ra -l -Wdefault "--color=${color}" -o console_output_style=count -p no:cov -p no:flake8 -p no:flakes -p no:pylint -p no:markdown -p no:sugar -p no:xvfb); local x; for x in "${EPYTEST_DESELECT[@]}"; do args+=(--deselect "${x}"); done; for x in "${EPYTEST_IGNORE[@]}"; do args+=(--ignore "${x}"); done; set -- "${EPYTHON}" -m pytest "${args[@]}" "${@}"; echo "${@}" 1>&2; "${@}" || die -n "pytest failed with ${EPYTHON}"; local ret=${?}; rm -rf .hypothesis .pytest_cache || die; if [[ -n ${BUILD_DIR} && -d ${BUILD_DIR} ]]; then find "${BUILD_DIR}" -name '*-pytest-*.pyc' -delete || die; fi; return ${ret} } esetup.py () { debug-print-function ${FUNCNAME} "${@}"; _python_check_EPYTHON; if [[ -n ${BUILD_DIR} && ! -n ${DISTUTILS_USE_PEP517} ]]; then _distutils-r1_create_setup_cfg; fi; local setup_py=(setup.py); if [[ ! -f setup.py ]]; then if [[ ! -f setup.cfg ]]; then die "${FUNCNAME}: setup.py nor setup.cfg not found"; fi; setup_py=(-c "from setuptools import setup; setup()"); fi; if [[ ${EAPI} != [67] && -n ${mydistutilsargs[@]} ]]; then die "mydistutilsargs is banned in EAPI ${EAPI} (use DISTUTILS_ARGS)"; fi; set -- "${EPYTHON}" "${setup_py[@]}" "${DISTUTILS_ARGS[@]}" "${mydistutilsargs[@]}" "${@}"; echo "${@}" 1>&2; "${@}" || die -n; local ret=${?}; if [[ -n ${BUILD_DIR} && ! -n ${DISTUTILS_USE_PEP517} ]]; then rm "${HOME}"/.pydistutils.cfg || die -n; fi; return ${ret} } eunittest () { debug-print-function ${FUNCNAME} "${@}"; _python_check_EPYTHON; set -- "${EPYTHON}" -m unittest_or_fail discover -v "${@}"; echo "${@}" 1>&2; "${@}" || die -n "Tests failed with ${EPYTHON}"; return ${?} } filter-flags () { _filter-hardened "$@"; local v; for v in $(all-flag-vars); do _filter-var ${v} "$@"; done; return 0 } filter-ldflags () { _filter-var LDFLAGS "$@"; return 0 } filter-lfs-flags () { [[ $# -ne 0 ]] && die "filter-lfs-flags takes no arguments"; filter-flags -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE -D_LARGEFILE64_SOURCE -D_TIME_BITS=64 } filter-lto () { [[ $# -ne 0 ]] && die "filter-lto takes no arguments"; filter-flags '-flto*' -fwhole-program-vtables '-fsanitize=cfi*' } filter-mfpmath () { local orig_mfpmath new_math prune_math; orig_mfpmath=$(get-flag -mfpmath); new_math=$(get-flag mfpmath); new_math=${new_math/both/387,sse}; new_math=" ${new_math//[,+]/ } "; prune_math=""; for prune_math in "$@"; do new_math=${new_math/ ${prune_math} / }; done; new_math=$(echo ${new_math}); new_math=${new_math// /,}; if [[ -z ${new_math} ]]; then filter-flags ${orig_mfpmath}; else replace-flags ${orig_mfpmath} -mfpmath=${new_math}; fi; return 0 } gcc-fullversion () { _gcc_fullversion '$1.$2.$3' "$@" } gcc-major-version () { _gcc_fullversion '$1' "$@" } gcc-micro-version () { _gcc_fullversion '$3' "$@" } gcc-minor-version () { _gcc_fullversion '$2' "$@" } gcc-specs-directive () { local directive subdname subdirective; directive="$(_gcc-specs-directive_raw $1)"; while [[ ${directive} == *%\(*\)* ]]; do subdname=${directive/*%\(}; subdname=${subdname/\)*}; subdirective="$(_gcc-specs-directive_raw ${subdname})"; directive="${directive//\%(${subdname})/${subdirective}}"; done; echo "${directive}"; return 0 } gcc-specs-nostrict () { local directive; directive=$(gcc-specs-directive cc1); [[ "${directive/\{!fstrict-overflow:}" != "${directive}" ]] } gcc-specs-now () { local directive; directive=$(gcc-specs-directive link_command); [[ "${directive/\{!nonow:}" != "${directive}" ]] } gcc-specs-pie () { local directive; directive=$(gcc-specs-directive cc1); [[ "${directive/\{!nopie:}" != "${directive}" ]] } gcc-specs-relro () { local directive; directive=$(gcc-specs-directive link_command); [[ "${directive/\{!norelro:}" != "${directive}" ]] } gcc-specs-ssp () { local directive; directive=$(gcc-specs-directive cc1); [[ "${directive/\{!fno-stack-protector:}" != "${directive}" ]] } gcc-specs-ssp-to-all () { local directive; directive=$(gcc-specs-directive cc1); [[ "${directive/\{!fno-stack-protector-all:}" != "${directive}" ]] } gcc-specs-stack-check () { local directive; directive=$(gcc-specs-directive cc1); [[ "${directive/\{!fno-stack-check:}" != "${directive}" ]] } gcc-version () { _gcc_fullversion '$1.$2' "$@" } gen_usr_ldscript () { ewarn "${FUNCNAME}: Please migrate to usr-ldscript.eclass"; local lib libdir=$(get_libdir) output_format="" auto=false suffix=$(get_libname); [[ -z ${ED+set} ]] && local ED=${D%/}${EPREFIX}/; tc-is-static-only && return; if [[ $(type -t multilib_is_native_abi) == "function" ]]; then multilib_is_native_abi || return 0; fi; case ${CTARGET:-${CHOST}} in *-darwin*) ;; *-android*) return 0 ;; *linux* | *-freebsd* | *-openbsd* | *-netbsd*) use prefix && return 0 ;; *) return 0 ;; esac; dodir /usr/${libdir}; if [[ $1 == "-a" ]]; then auto=true; shift; dodir /${libdir}; fi; local flags=(${CFLAGS} ${LDFLAGS} -Wl,--verbose); if $(tc-getLD) --version | grep -q 'GNU gold'; then local d="${T}/bfd-linker"; mkdir -p "${d}"; ln -sf $(which ${CHOST}-ld.bfd) "${d}"/ld; flags+=(-B"${d}"); fi; output_format=$($(tc-getCC) "${flags[@]}" 2>&1 | sed -n 's/^OUTPUT_FORMAT("\([^"]*\)",.*/\1/p'); [[ -n ${output_format} ]] && output_format="OUTPUT_FORMAT ( ${output_format} )"; for lib in "$@"; do local tlib; if ${auto}; then lib="lib${lib}${suffix}"; else [[ -r ${ED}/${libdir}/${lib} ]] || continue; fi; case ${CTARGET:-${CHOST}} in *-darwin*) if ${auto}; then tlib=$(scanmacho -qF'%S#F' "${ED}"/usr/${libdir}/${lib}); else tlib=$(scanmacho -qF'%S#F' "${ED}"/${libdir}/${lib}); fi; [[ -z ${tlib} ]] && die "unable to read install_name from ${lib}"; tlib=${tlib##*/}; if ${auto}; then mv "${ED}"/usr/${libdir}/${lib%${suffix}}.*${suffix#.} "${ED}"/${libdir}/ || die; if [[ ${tlib} != ${lib%${suffix}}.*${suffix#.} ]]; then mv "${ED}"/usr/${libdir}/${tlib%${suffix}}.*${suffix#.} "${ED}"/${libdir}/ || die; fi; rm -f "${ED}"/${libdir}/${lib}; fi; if [[ ! -w "${ED}/${libdir}/${tlib}" ]]; then chmod u+w "${ED}${libdir}/${tlib}"; local nowrite=yes; fi; install_name_tool -id "${EPREFIX}"/${libdir}/${tlib} "${ED}"/${libdir}/${tlib} || die "install_name_tool failed"; [[ -n ${nowrite} ]] && chmod u-w "${ED}${libdir}/${tlib}"; pushd "${ED}/usr/${libdir}" > /dev/null; ln -snf "../../${libdir}/${tlib}" "${lib}"; popd > /dev/null ;; *) if ${auto}; then tlib=$(scanelf -qF'%S#F' "${ED}"/usr/${libdir}/${lib}); [[ -z ${tlib} ]] && die "unable to read SONAME from ${lib}"; mv "${ED}"/usr/${libdir}/${lib}* "${ED}"/${libdir}/ || die; if [[ ${tlib} != ${lib}* ]]; then mv "${ED}"/usr/${libdir}/${tlib}* "${ED}"/${libdir}/ || die; fi; rm -f "${ED}"/${libdir}/${lib}; else tlib=${lib}; fi; cat > "${ED}/usr/${libdir}/${lib}" <<-END_LDSCRIPT /* GNU ld script Since Gentoo has critical dynamic libraries in /lib, and the static versions in /usr/lib, we need to have a "fake" dynamic lib in /usr/lib, otherwise we run into linking problems. This "fake" dynamic lib is a linker script that redirects the linker to the real lib. And yes, this works in the cross- compiling scenario as the sysroot-ed linker will prepend the real path. See bug https://bugs.gentoo.org/4411 for more info. */ ${output_format} GROUP ( ${EPREFIX}/${libdir}/${tlib} ) END_LDSCRIPT ;; esac fperms a+x "/usr/${libdir}/${lib}" || die "could not change perms on ${lib}"; done } get-cpu-flags () { local i f=(); for i in sse sse2 sse3 sse4_1 sse4_2 avx avx2 fma4; do use cpu_flags_x86_${i} && f+=(-m${i/_/.}); done; use cpu_flags_x86_fma3 && f+=(-mfma); echo "${f[*]}" } get-flag () { [[ $# -ne 1 ]] && die "usage: "; local f var findflag="$1"; for var in $(all-flag-vars); do for f in ${!var}; do if [ "${f/${findflag}}" != "${f}" ]; then printf "%s\n" "${f/-${findflag}=}"; return 0; fi; done; done; return 1 } get_abi_CFLAGS () { get_abi_var CFLAGS "$@" } get_abi_CHOST () { get_abi_var CHOST "$@" } get_abi_CTARGET () { get_abi_var CTARGET "$@" } get_abi_FAKE_TARGETS () { get_abi_var FAKE_TARGETS "$@" } get_abi_LDFLAGS () { get_abi_var LDFLAGS "$@" } get_abi_LIBDIR () { get_abi_var LIBDIR "$@" } get_abi_var () { local flag=$1; local abi=${2:-${ABI:-${DEFAULT_ABI:-default}}}; local var="${flag}_${abi}"; echo ${!var} } get_all_abis () { local x order="" mvar dvar; mvar="MULTILIB_ABIS"; dvar="DEFAULT_ABI"; if [[ -n $1 ]]; then mvar="$1_${mvar}"; dvar="$1_${dvar}"; fi; if [[ -z ${!mvar} ]]; then echo "default"; return 0; fi; for x in ${!mvar}; do if [[ ${x} != ${!dvar} ]]; then order="${order:+${order} }${x}"; fi; done; order="${order:+${order} }${!dvar}"; echo ${order}; return 0 } get_all_libdirs () { local libdirs abi; for abi in ${MULTILIB_ABIS}; do libdirs+=" $(get_abi_LIBDIR ${abi})"; done; [[ " ${libdirs} " != *" lib "* ]] && libdirs+=" lib"; echo "${libdirs}" } get_exeext () { case ${CHOST} in *-cygwin* | mingw* | *-mingw*) echo ".exe" ;; esac } get_install_abis () { local x order=""; if [[ -z ${MULTILIB_ABIS} ]]; then echo "default"; return 0; fi; if [[ ${EMULTILIB_PKG} == "true" ]]; then for x in ${MULTILIB_ABIS}; do if [[ ${x} != "${DEFAULT_ABI}" ]]; then has ${x} ${ABI_DENY} || order="${order} ${x}"; fi; done; has ${DEFAULT_ABI} ${ABI_DENY} || order="${order} ${DEFAULT_ABI}"; if [[ -n ${ABI_ALLOW} ]]; then local ordera=""; for x in ${order}; do if has ${x} ${ABI_ALLOW}; then ordera="${ordera} ${x}"; fi; done; order=${ordera}; fi; else order=${DEFAULT_ABI}; fi; if [[ -z ${order} ]]; then die "The ABI list is empty. Are you using a proper multilib profile? Perhaps your USE flags or MULTILIB_ABIS are too restrictive for this package."; fi; echo ${order}; return 0 } get_libdir () { local libdir_var="LIBDIR_${ABI}"; local libdir="lib"; [[ -n ${ABI} && -n ${!libdir_var} ]] && libdir=${!libdir_var}; echo "${libdir}" } get_libname () { local libname; local ver=$1; case ${CHOST} in *-cygwin*) libname="dll.a" ;; mingw* | *-mingw*) libname="dll" ;; *-darwin*) libname="dylib" ;; *-mint*) libname="irrelevant" ;; hppa*-hpux*) libname="sl" ;; *) libname="so" ;; esac; if [[ -z $* ]]; then echo ".${libname}"; else for ver in "$@"; do case ${CHOST} in *-cygwin*) echo ".${ver}.${libname}" ;; *-darwin*) echo ".${ver}.${libname}" ;; *-mint*) echo ".${libname}" ;; *) echo ".${libname}.${ver}" ;; esac; done; fi } get_modname () { local modname; local ver=$1; case ${CHOST} in *-darwin*) modname="bundle" ;; *) modname="so" ;; esac; echo ".${modname}" } get_nproc () { local nproc; if type -P nproc &> /dev/null; then nproc=$(nproc); fi; if [[ -z ${nproc} ]] && type -P sysctl &> /dev/null; then nproc=$(sysctl -n hw.ncpu 2>/dev/null); fi; if [[ -z ${nproc} ]] && type -P python &> /dev/null; then nproc=$(python -c 'import multiprocessing; print(multiprocessing.cpu_count());' 2>/dev/null); fi; if [[ -n ${nproc} ]]; then echo "${nproc}"; else echo "${1:-1}"; fi } has_multilib_profile () { [ -n "${MULTILIB_ABIS}" -a "${MULTILIB_ABIS}" != "${MULTILIB_ABIS/ /}" ] } hprefixify () { use prefix || return 0; local xl=() x; while [[ $# -gt 0 ]]; do case $1 in -e) local PREFIX_EXTRA_REGEX="$2"; shift ;; -w) local PREFIX_LINE_MATCH="$2"; shift ;; -q) local PREFIX_QUOTE_CHAR="${EPREFIX:+$2}"; shift ;; *) xl+=("$1") ;; esac; shift; done; local dirs="/(usr|lib(|[onx]?32|n?64)|etc|bin|sbin|var|opt|run)" eprefix="${PREFIX_QUOTE_CHAR}${EPREFIX}${PREFIX_QUOTE_CHAR}"; [[ ${#xl[@]} -lt 1 ]] && die "at least one file operand is required"; einfo "Adjusting to prefix ${EPREFIX:-/}"; for x in "${xl[@]}"; do if [[ -e ${x} ]]; then ebegin " ${x##*/}"; sed -r -e "${PREFIX_LINE_MATCH}s,([^[:alnum:]}\)\.])${dirs},\1${eprefix}/\2,g" -e "${PREFIX_LINE_MATCH}s,^${dirs},${eprefix}/\1," -e "${PREFIX_EXTRA_REGEX}" -i "${x}"; eend $? || die "failed to prefixify ${x}"; else die "${x} does not exist"; fi; done } in_iuse () { local use=${1}; if [[ -z "${use}" ]]; then echo "!!! in_iuse() called without a parameter." 1>&2; echo "!!! in_iuse " 1>&2; die "in_iuse() called without a parameter"; fi; local liuse=(${IUSE_EFFECTIVE}); has "${use}" "${liuse[@]#[+-]}" } is-flag () { is-flagq "$@" && echo true } is-flagq () { [[ -n $2 ]] && die "Usage: is-flag "; local var; for var in $(all-flag-vars); do _is_flagq ${var} "$1" && return 0; done; return 1 } is-ldflag () { is-ldflagq "$@" && echo true } is-ldflagq () { [[ -n $2 ]] && die "Usage: is-ldflag "; _is_flagq LDFLAGS $1 } is_final_abi () { has_multilib_profile || return 0; set -- $(get_install_abis); local LAST_ABI=$#; [[ ${!LAST_ABI} == ${ABI} ]] } makeopts_jobs () { [[ $# -eq 0 ]] && set -- "${MAKEOPTS}"; local jobs=$(echo " $* " | sed -r -n -e 's:.*[[:space:]](-[a-z]*j|--jobs[=[:space:]])[[:space:]]*([0-9]+).*:\2:p' -e "s:.*[[:space:]](-[a-z]*j|--jobs)[[:space:]].*:${2:-$(( $(get_nproc) + 1 ))}:p"); echo ${jobs:-1} } makeopts_loadavg () { [[ $# -eq 0 ]] && set -- "${MAKEOPTS}"; local lavg=$(echo " $* " | sed -r -n -e 's:.*[[:space:]](-[a-z]*l|--(load-average|max-load)[=[:space:]])[[:space:]]*([0-9]+(\.[0-9]+)?)[[:space:]].*:\3:p' -e "s:.*[[:space:]](-[a-z]*l|--(load-average|max-load))[[:space:]].*:${2:-999}:p"); echo ${lavg:-${2:-999}} } multibuild_copy_sources () { debug-print-function ${FUNCNAME} "${@}"; local _MULTIBUILD_INITIAL_BUILD_DIR=${BUILD_DIR:-${S}}; einfo "Will copy sources from ${_MULTIBUILD_INITIAL_BUILD_DIR}"; function _multibuild_create_source_copy () { einfo "${MULTIBUILD_VARIANT}: copying to ${BUILD_DIR}"; cp -p -R --reflink=auto "${_MULTIBUILD_INITIAL_BUILD_DIR}" "${BUILD_DIR}" || die }; multibuild_foreach_variant _multibuild_create_source_copy } multibuild_for_best_variant () { debug-print-function ${FUNCNAME} "${@}"; [[ -n ${MULTIBUILD_VARIANTS} ]] || die "MULTIBUILD_VARIANTS need to be set"; local MULTIBUILD_VARIANTS=("${MULTIBUILD_VARIANTS[-1]}"); multibuild_foreach_variant "${@}" } multibuild_foreach_variant () { debug-print-function ${FUNCNAME} "${@}"; [[ -n ${MULTIBUILD_VARIANTS} ]] || die "MULTIBUILD_VARIANTS need to be set"; local bdir=${BUILD_DIR:-${S}}; [[ ${bdir%%/} == ${WORKDIR%%/} ]] && bdir=${WORKDIR}/build; local prev_id=${MULTIBUILD_ID:+${MULTIBUILD_ID}-}; local ret=0 lret=0 v; debug-print "${FUNCNAME}: initial build_dir = ${bdir}"; for v in "${MULTIBUILD_VARIANTS[@]}"; do local MULTIBUILD_VARIANT=${v}; local MULTIBUILD_ID=${prev_id}${v}; local BUILD_DIR=${bdir%%/}-${v}; function _multibuild_run () { local i=1; while [[ ${!i} == _* ]]; do (( i += 1 )); done; [[ ${i} -le ${#} ]] && einfo "${v}: running ${@:${i}}"; "${@}" }; _multibuild_run "${@}" > >(exec tee -a "${T}/build-${MULTIBUILD_ID}.log") 2>&1; lret=${?}; done; [[ ${ret} -eq 0 && ${lret} -ne 0 ]] && ret=${lret}; return ${ret} } multibuild_merge_root () { debug-print-function ${FUNCNAME} "${@}"; local src=${1}; local dest=${2}; cp -a --reflink=auto "${src}"/. "${dest}"/ || die "${MULTIBUILD_VARIANT:-(unknown)}: merging image failed"; rm -rf "${src}" || die } multilib_env () { local CTARGET=${1:-${CTARGET}}; local cpu=${CTARGET%%*-}; if [[ ${CTARGET} = *-musl* ]]; then : ${MULTILIB_ABIS=default}; : ${DEFAULT_ABI=default}; export MULTILIB_ABIS DEFAULT_ABI; return; fi; case ${cpu} in aarch64*) export CFLAGS_arm=${CFLAGS_arm-}; case ${cpu} in aarch64*be) export CHOST_arm="armv8b-${CTARGET#*-}" ;; *) export CHOST_arm="armv8l-${CTARGET#*-}" ;; esac; CHOST_arm=${CHOST_arm/%-gnu/-gnueabi}; export CTARGET_arm=${CHOST_arm}; export LIBDIR_arm="lib"; export CFLAGS_arm64=${CFLAGS_arm64-}; export CHOST_arm64=${CTARGET}; export CTARGET_arm64=${CHOST_arm64}; export LIBDIR_arm64="lib64"; : ${MULTILIB_ABIS=arm64}; : ${DEFAULT_ABI=arm64} ;; x86_64*) export CFLAGS_x86=${CFLAGS_x86--m32}; export CHOST_x86=${CTARGET/x86_64/i686}; CHOST_x86=${CHOST_x86/%-gnux32/-gnu}; export CTARGET_x86=${CHOST_x86}; if [[ ${SYMLINK_LIB} == "yes" ]]; then export LIBDIR_x86="lib32"; else export LIBDIR_x86="lib"; fi; export CFLAGS_amd64=${CFLAGS_amd64--m64}; export CHOST_amd64=${CTARGET/%-gnux32/-gnu}; export CTARGET_amd64=${CHOST_amd64}; export LIBDIR_amd64="lib64"; export CFLAGS_x32=${CFLAGS_x32--mx32}; export CHOST_x32=${CTARGET/%-gnu/-gnux32}; export CTARGET_x32=${CHOST_x32}; export LIBDIR_x32="libx32"; case ${CTARGET} in *-gnux32) : ${MULTILIB_ABIS=x32 amd64 x86}; : ${DEFAULT_ABI=x32} ;; *) : ${MULTILIB_ABIS=amd64 x86}; : ${DEFAULT_ABI=amd64} ;; esac ;; loongarch64*) export CFLAGS_lp64d=${CFLAGS_lp64d--mabi=lp64d}; export CHOST_lp64d=${CTARGET}; export CTARGET_lp64d=${CTARGET}; export LIBDIR_lp64d=${LIBDIR_lp64d-lib64}; : ${MULTILIB_ABIS=lp64d}; : ${DEFAULT_ABI=lp64d} ;; mips64* | mipsisa64*) export CFLAGS_o32=${CFLAGS_o32--mabi=32}; export CHOST_o32=${CTARGET/mips64/mips}; export CHOST_o32=${CHOST_o32/mipsisa64/mipsisa32}; export CTARGET_o32=${CHOST_o32}; export LIBDIR_o32="lib"; export CFLAGS_n32=${CFLAGS_n32--mabi=n32}; export CHOST_n32=${CTARGET}; export CTARGET_n32=${CHOST_n32}; export LIBDIR_n32="lib32"; export CFLAGS_n64=${CFLAGS_n64--mabi=64}; export CHOST_n64=${CTARGET}; export CTARGET_n64=${CHOST_n64}; export LIBDIR_n64="lib64"; : ${MULTILIB_ABIS=n64 n32 o32}; : ${DEFAULT_ABI=n32} ;; powerpc64*) export CFLAGS_ppc=${CFLAGS_ppc--m32}; export CHOST_ppc=${CTARGET/powerpc64/powerpc}; export CTARGET_ppc=${CHOST_ppc}; export LIBDIR_ppc="lib"; export CFLAGS_ppc64=${CFLAGS_ppc64--m64}; export CHOST_ppc64=${CTARGET}; export CTARGET_ppc64=${CHOST_ppc64}; export LIBDIR_ppc64="lib64"; : ${MULTILIB_ABIS=ppc64 ppc}; : ${DEFAULT_ABI=ppc64} ;; riscv64*) : ${MULTILIB_ABIS=lp64d lp64 ilp32d ilp32}; : ${DEFAULT_ABI=lp64d}; local _libdir_riscvdefaultabi_variable="LIBDIR_${DEFAULT_ABI}"; local _libdir_riscvdefaultabi=${!_libdir_riscvdefaultabi_variable}; export ${_libdir_riscvdefaultabi_variable}=${_libdir_riscvdefaultabi:-lib64}; export CFLAGS_lp64d=${CFLAGS_lp64d--mabi=lp64d -march=rv64gc}; export CHOST_lp64d=${CTARGET}; export CTARGET_lp64d=${CTARGET}; export LIBDIR_lp64d=${LIBDIR_lp64d-lib64/lp64d}; export CFLAGS_lp64=${CFLAGS_lp64--mabi=lp64 -march=rv64imac}; export CHOST_lp64=${CTARGET}; export CTARGET_lp64=${CTARGET}; export LIBDIR_lp64=${LIBDIR_lp64-lib64/lp64}; export CFLAGS_ilp32d=${CFLAGS_ilp32d--mabi=ilp32d -march=rv32imafdc}; export CHOST_ilp32d=${CTARGET/riscv64/riscv32}; export CTARGET_ilp32d=${CTARGET/riscv64/riscv32}; export LIBDIR_ilp32d=${LIBDIR_ilp32d-lib32/ilp32d}; export CFLAGS_ilp32=${CFLAGS_ilp32--mabi=ilp32 -march=rv32imac}; export CHOST_ilp32=${CTARGET/riscv64/riscv32}; export CTARGET_ilp32=${CTARGET/riscv64/riscv32}; export LIBDIR_ilp32=${LIBDIR_ilp32-lib32/ilp32} ;; riscv32*) : ${MULTILIB_ABIS=ilp32d ilp32}; : ${DEFAULT_ABI=ilp32d}; local _libdir_riscvdefaultabi_variable="LIBDIR_${DEFAULT_ABI}"; local _libdir_riscvdefaultabi=${!_libdir_riscvdefaultabi_variable}; export ${_libdir_riscvdefaultabi_variable}=${_libdir_riscvdefaultabi:-lib}; export CFLAGS_ilp32d=${CFLAGS_ilp32d--mabi=ilp32d -march=rv32imafdc}; export CHOST_ilp32d=${CTARGET}; export CTARGET_ilp32d=${CTARGET}; export LIBDIR_ilp32d=${LIBDIR_ilp32d-lib32/ilp32d}; export CFLAGS_ilp32=${CFLAGS_ilp32--mabi=ilp32 -march=rv32imac}; export CHOST_ilp32=${CTARGET}; export CTARGET_ilp32=${CTARGET}; export LIBDIR_ilp32=${LIBDIR_ilp32-lib32/ilp32} ;; s390x*) export CFLAGS_s390=${CFLAGS_s390--m31}; export CHOST_s390=${CTARGET/s390x/s390}; export CTARGET_s390=${CHOST_s390}; export LIBDIR_s390="lib"; export CFLAGS_s390x=${CFLAGS_s390x--m64}; export CHOST_s390x=${CTARGET}; export CTARGET_s390x=${CHOST_s390x}; export LIBDIR_s390x="lib64"; : ${MULTILIB_ABIS=s390x s390}; : ${DEFAULT_ABI=s390x} ;; sparc64*) export CFLAGS_sparc32=${CFLAGS_sparc32--m32}; export CHOST_sparc32=${CTARGET/sparc64/sparc}; export CTARGET_sparc32=${CHOST_sparc32}; export LIBDIR_sparc32="lib"; export CFLAGS_sparc64=${CFLAGS_sparc64--m64}; export CHOST_sparc64=${CTARGET}; export CTARGET_sparc64=${CHOST_sparc64}; export LIBDIR_sparc64="lib64"; : ${MULTILIB_ABIS=sparc64 sparc32}; : ${DEFAULT_ABI=sparc64} ;; *) : ${MULTILIB_ABIS=default}; : ${DEFAULT_ABI=default} ;; esac; export MULTILIB_ABIS DEFAULT_ABI } multilib_toolchain_setup () { local v vv; export ABI=$1; local save_restore_variables=(CBUILD CHOST AR CC CXX F77 FC LD NM OBJCOPY OBJDUMP PKG_CONFIG RANLIB READELF STRINGS STRIP PKG_CONFIG_LIBDIR PKG_CONFIG_PATH PKG_CONFIG_SYSTEM_INCLUDE_PATH PKG_CONFIG_SYSTEM_LIBRARY_PATH); if [[ ${_DEFAULT_ABI_SAVED} == "true" ]]; then for v in "${save_restore_variables[@]}"; do vv="_abi_saved_${v}"; [[ ${!vv+set} == "set" ]] && export ${v}="${!vv}" || unset ${v}; unset ${vv}; done; unset _DEFAULT_ABI_SAVED; fi; if [[ ${ABI} != ${DEFAULT_ABI} ]]; then for v in "${save_restore_variables[@]}"; do vv="_abi_saved_${v}"; [[ ${!v+set} == "set" ]] && export ${vv}="${!v}" || unset ${vv}; done; export _DEFAULT_ABI_SAVED="true"; if [[ ${CBUILD} == "${CHOST}" ]]; then export CBUILD=$(get_abi_CHOST $1); fi; export CHOST=$(get_abi_CHOST ${DEFAULT_ABI}); export AR="$(tc-getAR)"; export CC="$(tc-getCC) $(get_abi_CFLAGS)"; export CXX="$(tc-getCXX) $(get_abi_CFLAGS)"; export F77="$(tc-getF77) $(get_abi_CFLAGS)"; export FC="$(tc-getFC) $(get_abi_CFLAGS)"; export LD="$(tc-getLD) $(get_abi_LDFLAGS)"; export NM="$(tc-getNM)"; export OBJCOPY="$(tc-getOBJCOPY)"; export OBJDUMP="$(tc-getOBJDUMP)"; export PKG_CONFIG="$(tc-getPKG_CONFIG)"; export RANLIB="$(tc-getRANLIB)"; export READELF="$(tc-getREADELF)"; export STRINGS="$(tc-getSTRINGS)"; export STRIP="$(tc-getSTRIP)"; export CHOST=$(get_abi_CHOST $1); export PKG_CONFIG_LIBDIR=${EPREFIX}/usr/$(get_libdir)/pkgconfig; export PKG_CONFIG_PATH=${EPREFIX}/usr/share/pkgconfig; export PKG_CONFIG_SYSTEM_INCLUDE_PATH=${EPREFIX}/usr/include; export PKG_CONFIG_SYSTEM_LIBRARY_PATH=${EPREFIX}/$(get_libdir):${EPREFIX}/usr/$(get_libdir); fi } no-as-needed () { [[ $# -ne 0 ]] && die "no-as-needed takes no arguments"; case $($(tc-getLD) -v 2>&1 &2; else hprefixify "${T}"/${f} 1>&2; fi; echo "${x}"; else die "$1 does not exist"; fi } python_copy_sources () { debug-print-function ${FUNCNAME} "${@}"; local MULTIBUILD_VARIANTS; _python_obtain_impls; multibuild_copy_sources } python_doexe () { debug-print-function ${FUNCNAME} "${@}"; [[ ${EBUILD_PHASE} != install ]] && die "${FUNCNAME} can only be used in src_install"; local f; for f in "$@"; do python_newexe "${f}" "${f##*/}"; done } python_doheader () { debug-print-function ${FUNCNAME} "${@}"; [[ ${EBUILD_PHASE} != install ]] && die "${FUNCNAME} can only be used in src_install"; [[ -n ${EPYTHON} ]] || die 'No Python implementation set (EPYTHON is null).'; local includedir=$(python_get_includedir); local d=${includedir#${EPREFIX}}; ( insopts -m 0644; insinto "${d}"; doins -r "${@}" || return ${?} ) } python_domodule () { debug-print-function ${FUNCNAME} "${@}"; [[ -n ${EPYTHON} ]] || die 'No Python implementation set (EPYTHON is null).'; local d; if [[ ${_PYTHON_MODULEROOT} == /* ]]; then d=${_PYTHON_MODULEROOT}; else local sitedir=$(python_get_sitedir); d=${sitedir#${EPREFIX}}/${_PYTHON_MODULEROOT//.//}; fi; if [[ ${EBUILD_PHASE} == install ]]; then ( insopts -m 0644; insinto "${d}"; doins -r "${@}" || return ${?} ); python_optimize "${ED%/}/${d}"; else if [[ -n ${BUILD_DIR} ]]; then local dest=${BUILD_DIR}/install${EPREFIX}/${d}; mkdir -p "${dest}" || die; cp -pR "${@}" "${dest}/" || die; ( cd "${dest}" && chmod -R a+rX "${@##*/}" ) || die; else die "${FUNCNAME} can only be used in src_install or with BUILD_DIR set"; fi; fi } python_doscript () { debug-print-function ${FUNCNAME} "${@}"; [[ ${EBUILD_PHASE} != install ]] && die "${FUNCNAME} can only be used in src_install"; local _PYTHON_REWRITE_SHEBANG=1; python_doexe "${@}" } python_export_utf8_locale () { debug-print-function ${FUNCNAME} "${@}"; type locale &> /dev/null || return 0; if [[ $(locale charmap) != UTF-8 ]]; then local lang locales="C.UTF-8 en_US.UTF-8 en_GB.UTF-8 $(locale -a)"; for lang in ${locales}; do if [[ $(LC_ALL=${lang} locale charmap 2>/dev/null) == UTF-8 ]]; then if _python_check_locale_sanity "${lang}"; then export LC_CTYPE=${lang}; if [[ -n ${LC_ALL} ]]; then export LC_NUMERIC=${LC_ALL}; export LC_TIME=${LC_ALL}; export LC_COLLATE=${LC_ALL}; export LC_MONETARY=${LC_ALL}; export LC_MESSAGES=${LC_ALL}; export LC_PAPER=${LC_ALL}; export LC_NAME=${LC_ALL}; export LC_ADDRESS=${LC_ALL}; export LC_TELEPHONE=${LC_ALL}; export LC_MEASUREMENT=${LC_ALL}; export LC_IDENTIFICATION=${LC_ALL}; export LC_ALL=; fi; return 0; fi; fi; done; ewarn "Could not find a UTF-8 locale. This may trigger build failures in"; ewarn "some python packages. Please ensure that a UTF-8 locale is listed in"; ewarn "/etc/locale.gen and run locale-gen."; return 1; fi; return 0 } python_fix_shebang () { debug-print-function ${FUNCNAME} "${@}"; [[ -n ${EPYTHON} ]] || die "${FUNCNAME}: EPYTHON unset (pkg_setup not called?)"; local PYTHON; _python_export "${EPYTHON}" PYTHON; local force quiet; while [[ -n ${@} ]]; do case "${1}" in -f | --force) force=1; shift ;; -q | --quiet) quiet=1; shift ;; --) shift; break ;; *) break ;; esac; done; [[ -n ${1} ]] || die "${FUNCNAME}: no paths given"; local path f; for path in "$@"; do local any_fixed is_recursive; [[ -d ${path} ]] && is_recursive=1; while IFS= read -r -d '' f; do local shebang i; local error= match=; IFS= read -r shebang < "${f}"; if [[ ${shebang} == '#!'* ]]; then local split_shebang=(); read -r -a split_shebang <<< ${shebang#"#!"} || die; local in_path=${split_shebang[0]}; local from='^#! *[^ ]*'; if [[ ${in_path} == */env ]]; then in_path=${split_shebang[1]}; from+=' *[^ ]*'; fi; case ${in_path##*/} in "${EPYTHON}") match=1 ;; python | python[23]) match=1; [[ ${in_path##*/} == python2 ]] && error=1 ;; python[23].[0-9] | python3.[1-9][0-9] | pypy | pypy3 | jython[23].[0-9]) match=1; error=1 ;; esac; fi; [[ -n ${force} ]] && error=; if [[ ! -n ${match} ]]; then [[ -n ${is_recursive} ]] && continue; error=1; fi; if [[ ! -n ${quiet} ]]; then einfo "Fixing shebang in ${f#${D%/}}."; fi; if [[ ! -n ${error} ]]; then debug-print "${FUNCNAME}: in file ${f#${D%/}}"; debug-print "${FUNCNAME}: rewriting shebang: ${shebang}"; sed -i -e "1s@${from}@#!${PYTHON}@" "${f}" || die; any_fixed=1; else eerror "The file has incompatible shebang:"; eerror " file: ${f#${D%/}}"; eerror " current shebang: ${shebang}"; eerror " requested impl: ${EPYTHON}"; die "${FUNCNAME}: conversion of incompatible shebang requested"; fi; done < <(find -H "${path}" -type f -print0 || die); if [[ ! -n ${any_fixed} ]]; then eerror "QA error: ${FUNCNAME}, ${path#${D%/}} did not match any fixable files."; eerror "There are no Python files in specified directory."; die "${FUNCNAME} did not match any fixable files"; fi; done } python_foreach_impl () { debug-print-function ${FUNCNAME} "${@}"; if [[ -n ${_DISTUTILS_R1} ]]; then if has "${EBUILD_PHASE}" prepare configure compile test install && [[ ! -n ${_DISTUTILS_CALLING_FOREACH_IMPL} && ! -n ${_DISTUTILS_FOREACH_IMPL_WARNED} ]]; then eqawarn "python_foreach_impl has been called directly while using distutils-r1."; eqawarn "Please redefine python_*() phase functions to meet your expectations"; eqawarn "instead."; _DISTUTILS_FOREACH_IMPL_WARNED=1; if ! has "${EAPI}" 6 7 8; then die "Calling python_foreach_impl from distutils-r1 is banned in EAPI ${EAPI}"; fi; fi; local _DISTUTILS_CALLING_FOREACH_IMPL=; fi; local MULTIBUILD_VARIANTS; _python_obtain_impls; multibuild_foreach_variant _python_multibuild_wrapper "${@}" } python_gen_any_dep () { debug-print-function ${FUNCNAME} "${@}"; local depstr=${1}; shift; local i PYTHON_PKG_DEP out=; _python_verify_patterns "${@}"; for i in "${_PYTHON_SUPPORTED_IMPLS[@]}"; do if _python_impl_matches "${i}" "${@}"; then local PYTHON_USEDEP="python_targets_${i}(-)"; local PYTHON_SINGLE_USEDEP="python_single_target_${i}(-)"; _python_export "${i}" PYTHON_PKG_DEP; local i_depstr=${depstr//\$\{PYTHON_USEDEP\}/${PYTHON_USEDEP}}; i_depstr=${i_depstr//\$\{PYTHON_SINGLE_USEDEP\}/${PYTHON_SINGLE_USEDEP}}; out="( ${PYTHON_PKG_DEP/:0=/:0} ${i_depstr} ) ${out}"; fi; done; echo "|| ( ${out})" } python_gen_cond_dep () { debug-print-function ${FUNCNAME} "${@}"; local impl matches=(); local dep=${1}; shift; _python_verify_patterns "${@}"; for impl in "${_PYTHON_SUPPORTED_IMPLS[@]}"; do if _python_impl_matches "${impl}" "${@}"; then if [[ ${dep} == *'${PYTHON_USEDEP}'* ]]; then local usedep=$(_python_gen_usedep "${@}"); dep=${dep//\$\{PYTHON_USEDEP\}/${usedep}}; fi; matches+=("python_targets_${impl}? ( ${dep} )"); fi; done; echo "${matches[@]}" } python_gen_impl_dep () { debug-print-function ${FUNCNAME} "${@}"; local impl matches=(); local PYTHON_REQ_USE=${1}; shift; _python_verify_patterns "${@}"; for impl in "${_PYTHON_SUPPORTED_IMPLS[@]}"; do if _python_impl_matches "${impl}" "${@}"; then local PYTHON_PKG_DEP; _python_export "${impl}" PYTHON_PKG_DEP; matches+=("python_targets_${impl}? ( ${PYTHON_PKG_DEP} )"); fi; done; echo "${matches[@]}" } python_gen_useflags () { debug-print-function ${FUNCNAME} "${@}"; local impl matches=(); _python_verify_patterns "${@}"; for impl in "${_PYTHON_SUPPORTED_IMPLS[@]}"; do if _python_impl_matches "${impl}" "${@}"; then matches+=("python_targets_${impl}"); fi; done; echo "${matches[@]}" } python_get_CFLAGS () { debug-print-function ${FUNCNAME} "${@}"; _python_export "${@}" PYTHON_CFLAGS; echo "${PYTHON_CFLAGS}" } python_get_LIBS () { debug-print-function ${FUNCNAME} "${@}"; _python_export "${@}" PYTHON_LIBS; echo "${PYTHON_LIBS}" } python_get_PYTHON_CONFIG () { debug-print-function ${FUNCNAME} "${@}"; _python_export "${@}" PYTHON_CONFIG; echo "${PYTHON_CONFIG}" } python_get_includedir () { debug-print-function ${FUNCNAME} "${@}"; _python_export "${@}" PYTHON_INCLUDEDIR; echo "${PYTHON_INCLUDEDIR}" } python_get_library_path () { debug-print-function ${FUNCNAME} "${@}"; _python_export "${@}" PYTHON_LIBPATH; echo "${PYTHON_LIBPATH}" } python_get_scriptdir () { debug-print-function ${FUNCNAME} "${@}"; _python_export "${@}" PYTHON_SCRIPTDIR; echo "${PYTHON_SCRIPTDIR}" } python_get_sitedir () { debug-print-function ${FUNCNAME} "${@}"; _python_export "${@}" PYTHON_SITEDIR; echo "${PYTHON_SITEDIR}" } python_has_version () { debug-print-function ${FUNCNAME} "${@}"; local root_arg=(-b); case ${1} in -b | -d | -r) root_arg=("${1}"); shift ;; esac; if [[ ${EAPI} == 6 ]]; then if [[ ${root_arg} == -r ]]; then root_arg=(); else root_arg=(--host-root); fi; fi; local pkg; for pkg in "$@"; do ebegin " ${pkg}"; has_version "${root_arg[@]}" "${pkg}"; eend ${?} || return; done; return 0 } python_moduleinto () { debug-print-function ${FUNCNAME} "${@}"; _PYTHON_MODULEROOT=${1} } python_newexe () { debug-print-function ${FUNCNAME} "${@}"; [[ ${EBUILD_PHASE} != install ]] && die "${FUNCNAME} can only be used in src_install"; [[ -n ${EPYTHON} ]] || die 'No Python implementation set (EPYTHON is null).'; [[ ${#} -eq 2 ]] || die "Usage: ${FUNCNAME} "; local wrapd=${_PYTHON_SCRIPTROOT:-/usr/bin}; local f=${1}; local newfn=${2}; local scriptdir=$(python_get_scriptdir); local d=${scriptdir#${EPREFIX}}; ( dodir "${wrapd}"; exeopts -m 0755; exeinto "${d}"; newexe "${f}" "${newfn}" || return ${?} ); local dosym=dosym; [[ ${EAPI} == [67] ]] && dosym=dosym8; "${dosym}" -r /usr/lib/python-exec/python-exec2 "${wrapd}/${newfn}"; if [[ -n ${_PYTHON_REWRITE_SHEBANG} ]]; then python_fix_shebang -q "${ED%/}/${d}/${newfn}"; fi } python_newscript () { debug-print-function ${FUNCNAME} "${@}"; [[ ${EBUILD_PHASE} != install ]] && die "${FUNCNAME} can only be used in src_install"; local _PYTHON_REWRITE_SHEBANG=1; python_newexe "${@}" } python_optimize () { debug-print-function ${FUNCNAME} "${@}"; [[ -n ${EPYTHON} ]] || die 'No Python implementation set (EPYTHON is null).'; local PYTHON=${PYTHON}; [[ -n ${PYTHON} ]] || _python_export PYTHON; [[ -x ${PYTHON} ]] || die "PYTHON (${PYTHON}) is not executable"; if [[ ${#} -eq 0 ]]; then local f; while IFS= read -r -d '' f; do if [[ ${f} == /* && -d ${D%/}${f} ]]; then set -- "${D%/}${f}" "${@}"; fi; done < <( "${PYTHON}" - <<-EOF || die import sys print("".join(x + "\0" for x in sys.path)) EOF ); debug-print "${FUNCNAME}: using sys.path: ${*/%/;}"; fi; local jobs=$(makeopts_jobs); local d; for d in "$@"; do local instpath=${d#${D%/}}; instpath=/${instpath##/}; einfo "Optimize Python modules for ${instpath}"; case "${EPYTHON}" in python2.7 | python3.[34]) "${PYTHON}" -m compileall -q -f -d "${instpath}" "${d}"; "${PYTHON}" -OO -m compileall -q -f -d "${instpath}" "${d}" ;; python3.[5678] | pypy3) "${PYTHON}" -m compileall -j "${jobs}" -q -f -d "${instpath}" "${d}"; "${PYTHON}" -O -m compileall -j "${jobs}" -q -f -d "${instpath}" "${d}"; "${PYTHON}" -OO -m compileall -j "${jobs}" -q -f -d "${instpath}" "${d}" ;; python*) "${PYTHON}" -m compileall -j "${jobs}" -o 0 -o 1 -o 2 --hardlink-dupes -q -f -d "${instpath}" "${d}" ;; *) "${PYTHON}" -m compileall -q -f -d "${instpath}" "${d}" ;; esac; done } python_replicate_script () { debug-print-function ${FUNCNAME} "${@}"; function _python_replicate_script () { local _PYTHON_FIX_SHEBANG_QUIET=1; local PYTHON_SCRIPTDIR; _python_export PYTHON_SCRIPTDIR; ( exeopts -m 0755; exeinto "${PYTHON_SCRIPTDIR#${EPREFIX}}"; doexe "${files[@]}" ); python_fix_shebang -q "${files[@]/*\//${D%/}/${PYTHON_SCRIPTDIR}/}" }; local files=("${@}"); python_foreach_impl _python_replicate_script; unset -f _python_replicate_script; local f; for f in "$@"; do local dosym=dosym; [[ ${EAPI} == [67] ]] && dosym=dosym8; "${dosym}" -r /usr/lib/python-exec/python-exec2 "${f#${ED}}"; done } python_scriptinto () { debug-print-function ${FUNCNAME} "${@}"; _PYTHON_SCRIPTROOT=${1} } python_setup () { debug-print-function ${FUNCNAME} "${@}"; local has_check_deps; declare -f python_check_deps > /dev/null && has_check_deps=1; if [[ ! -n ${has_check_deps} ]]; then _python_validate_useflags; fi; local pycompat=("${PYTHON_COMPAT[@]}"); if [[ -n ${PYTHON_COMPAT_OVERRIDE} ]]; then pycompat=(${PYTHON_COMPAT_OVERRIDE}); fi; local found i; _python_verify_patterns "${@}"; for ((i = ${#_PYTHON_SUPPORTED_IMPLS[@]} - 1; i >= 0; i-- )) do local impl=${_PYTHON_SUPPORTED_IMPLS[i]}; has "${impl}" "${pycompat[@]}" || continue; if [[ ! -n ${PYTHON_COMPAT_OVERRIDE} && ! -n ${has_check_deps} ]]; then use "python_targets_${impl}" || continue; fi; _python_impl_matches "${impl}" "${@}" || continue; _python_export "${impl}" EPYTHON PYTHON; if [[ -n ${has_check_deps} ]]; then _python_run_check_deps "${impl}" || continue; fi; found=1; break; done; if [[ ! -n ${found} ]]; then eerror "${FUNCNAME}: none of the enabled implementation matched the patterns."; eerror " patterns: ${@-'(*)'}"; eerror "Likely a REQUIRED_USE constraint (possibly USE-conditional) is missing."; eerror " suggested: || ( \$(python_gen_useflags ${@}) )"; eerror "(remember to quote all the patterns with '')"; die "${FUNCNAME}: no enabled implementation satisfy requirements"; fi; _python_wrapper_setup; einfo "Using ${EPYTHON} in global scope" } raw-ldflags () { local x input="$@"; [[ -z ${input} ]] && input=${LDFLAGS}; set --; for x in ${input}; do case ${x} in -Wl,*) x=${x#-Wl,}; set -- "$@" ${x//,/ } ;; *) ;; esac; done; echo "$@" } replace-cpu-flags () { local newcpu="$#"; newcpu="${!newcpu}"; while [ $# -gt 1 ]; do replace-flags "-march=${1}" "-march=${newcpu}"; replace-flags "-mcpu=${1}" "-mcpu=${newcpu}"; replace-flags "-mtune=${1}" "-mtune=${newcpu}"; shift; done; return 0 } replace-flags () { [[ $# != 2 ]] && die "Usage: replace-flags "; local f var new; for var in $(all-flag-vars); do new=(); for f in ${!var}; do [[ ${f} == ${1} ]] && f=${2}; new+=("${f}"); done; export ${var}="${new[*]}"; done; return 0 } replace-sparc64-flags () { [[ $# -ne 0 ]] && die "replace-sparc64-flags takes no arguments"; local SPARC64_CPUS="ultrasparc3 ultrasparc v9"; if [ "${CFLAGS/mtune}" != "${CFLAGS}" ]; then for x in ${SPARC64_CPUS}; do CFLAGS="${CFLAGS/-mcpu=${x}/-mcpu=v8}"; done; else for x in ${SPARC64_CPUS}; do CFLAGS="${CFLAGS/-mcpu=${x}/-mcpu=v8 -mtune=${x}}"; done; fi; if [ "${CXXFLAGS/mtune}" != "${CXXFLAGS}" ]; then for x in ${SPARC64_CPUS}; do CXXFLAGS="${CXXFLAGS/-mcpu=${x}/-mcpu=v8}"; done; else for x in ${SPARC64_CPUS}; do CXXFLAGS="${CXXFLAGS/-mcpu=${x}/-mcpu=v8 -mtune=${x}}"; done; fi; export CFLAGS CXXFLAGS } run_in_build_dir () { debug-print-function ${FUNCNAME} "${@}"; local ret; [[ ${#} -ne 0 ]] || die "${FUNCNAME}: no command specified."; [[ -n ${BUILD_DIR} ]] || die "${FUNCNAME}: BUILD_DIR not set."; mkdir -p "${BUILD_DIR}" || die; pushd "${BUILD_DIR}" > /dev/null || die; "${@}"; ret=${?}; popd > /dev/null || die; return ${ret} } setup-allowed-flags () { [[ ${EAPI} == [67] ]] || die "Internal function ${FUNCNAME} is not available in EAPI ${EAPI}."; _setup-allowed-flags "$@" } src_compile () { export JAVA_HOME=$(java-config --jre-home); export KERAS_HOME="${T}/.keras"; if use python; then python_setup; BUILD_DIR="${S}-${EPYTHON/./_}"; cd "${BUILD_DIR}" || die; fi; ebazel build -k --nobuild //tensorflow:libtensorflow_framework.so //tensorflow:libtensorflow.so //tensorflow:libtensorflow_cc.so $(usex python '//tensorflow/tools/pip_package:build_pip_package' ''); ebazel build //tensorflow:libtensorflow_framework.so //tensorflow:libtensorflow.so; ebazel build //tensorflow:libtensorflow_cc.so; ebazel build //tensorflow:install_headers; ebazel shutdown; function do_compile () { ebazel build //tensorflow/tools/pip_package:build_pip_package; ebazel shutdown }; BUILD_DIR="${S}"; cd "${BUILD_DIR}" || die; use python && python_foreach_impl run_in_build_dir do_compile } src_configure () { export JAVA_HOME=$(java-config --jre-home); export KERAS_HOME="${T}/.keras"; function do_configure () { export CC_OPT_FLAGS=" "; export TF_ENABLE_XLA=$(usex xla 1 0); export TF_NEED_OPENCL_SYCL=0; export TF_NEED_OPENCL=0; export TF_NEED_COMPUTECPP=0; export TF_NEED_ROCM=0; export TF_NEED_MPI=$(usex mpi 1 0); export TF_SET_ANDROID_WORKSPACE=0; if use python; then export PYTHON_BIN_PATH="${PYTHON}"; export PYTHON_LIB_PATH="$(python_get_sitedir)"; else export PYTHON_BIN_PATH="$(which python)"; export PYTHON_LIB_PATH="$(python -c 'from distutils.sysconfig import *; print(get_python_lib())')"; fi; export TF_NEED_CUDA=$(usex cuda 1 0); export TF_DOWNLOAD_CLANG=0; export TF_CUDA_CLANG=0; export TF_NEED_TENSORRT=0; if use cuda; then export TF_CUDA_PATHS="${EPREFIX}/opt/cuda"; export GCC_HOST_COMPILER_PATH="$(cuda_gccdir)/$(tc-getCC)"; export TF_CUDA_VERSION="$(cuda_toolkit_version)"; export TF_CUDNN_VERSION="$(cuda_cudnn_version)"; einfo "Setting CUDA version: $TF_CUDA_VERSION"; einfo "Setting CUDNN version: $TF_CUDNN_VERSION"; if [[ $(cuda-config -s) != *$(gcc-version)* ]]; then ewarn "TensorFlow is being built with Nvidia CUDA support. Your default compiler"; ewarn "version is not supported by the currently installed CUDA. TensorFlow will"; ewarn "instead be compiled using: ${GCC_HOST_COMPILER_PATH}."; ewarn "If the build fails with linker errors try rebuilding the relevant"; ewarn "dependencies using the same compiler version."; fi; if [[ -z "$TF_CUDA_COMPUTE_CAPABILITIES" ]]; then ewarn "WARNING: TensorFlow is being built with its default CUDA compute capabilities: 3.5 and 7.0."; ewarn "These may not be optimal for your GPU."; ewarn ""; ewarn "To configure TensorFlow with the CUDA compute capability that is optimal for your GPU,"; ewarn "set TF_CUDA_COMPUTE_CAPABILITIES in your make.conf, and re-emerge tensorflow."; ewarn "For example, to use CUDA capability 7.5 & 3.5, add: TF_CUDA_COMPUTE_CAPABILITIES=7.5,3.5"; ewarn ""; ewarn "You can look up your GPU's CUDA compute capability at https://developer.nvidia.com/cuda-gpus"; ewarn "or by running /opt/cuda/extras/demo_suite/deviceQuery | grep 'CUDA Capability'"; fi; fi; local SYSLIBS=(absl_py astor_archive astunparse_archive boringssl com_github_googlecloudplatform_google_cloud_cpp com_github_grpc_grpc com_google_absl com_google_protobuf curl cython dill_archive double_conversion flatbuffers functools32_archive gast_archive gif hwloc icu jsoncpp_git libjpeg_turbo lmdb nasm nsync opt_einsum_archive org_sqlite pasta png pybind11 six_archive snappy tblib_archive termcolor_archive typing_extensions_archive wrapt zlib); export TF_SYSTEM_LIBS="${SYSLIBS[@]}"; export TF_IGNORE_MAX_BAZEL_VERSION=1; ./configure || die; echo 'build --config=noaws --config=nohdfs --config=nonccl' >> .bazelrc || die; echo 'build --define tensorflow_mkldnn_contraction_kernel=0' >> .bazelrc || die; echo "build --action_env=KERAS_HOME=\"${T}/.keras\"" >> .bazelrc || die; echo "build --host_action_env=KERAS_HOME=\"${T}/.keras\"" >> .bazelrc || die; for cflag in $($(tc-getPKG_CONFIG) jsoncpp --cflags); do echo "build --copt=\"${cflag}\"" >> .bazelrc || die; echo "build --host_copt=\"${cflag}\"" >> .bazelrc || die; done }; if use python; then python_foreach_impl run_in_build_dir do_configure; else do_configure; fi } src_install () { local i l; export JAVA_HOME=$(java-config --jre-home); export KERAS_HOME="${T}/.keras"; function do_install () { einfo "Installing ${EPYTHON} files"; local srcdir="${T}/src-${MULTIBUILD_VARIANT}"; mkdir -p "${srcdir}" || die; bazel-bin/tensorflow/tools/pip_package/build_pip_package --src "${srcdir}" || die; cd "${srcdir}" || die; esetup.py install; rm -f "${D}/$(python_get_sitedir)"/${PN}/lib${PN}_framework.so* || die; rm -f "${D}/$(python_get_sitedir)"/${PN}_core/lib${PN}_framework.so* || die; python_optimize }; if use python; then python_foreach_impl run_in_build_dir do_install; for i in "${ED}"/usr/lib/python-exec/*/*; do n="${i##*/}"; [[ -e "${ED}/usr/bin/${n}" ]] || dosym ../lib/python-exec/python-exec2 "/usr/bin/${n}"; done; python_setup; local BUILD_DIR="${S}-${EPYTHON/./_}"; cd "${BUILD_DIR}" || die; fi; einfo "Installing headers"; insinto /usr/include/${PN}/; doins -r bazel-bin/tensorflow/include/*; einfo "Installing libs"; ${PN}/c/generate-pc.sh --prefix="${EPREFIX}"/usr --libdir=$(get_libdir) --version=${MY_PV} || die; insinto /usr/$(get_libdir)/pkgconfig; doins ${PN}.pc ${PN}_cc.pc; for l in libtensorflow{,_framework,_cc}.so; do dolib.so bazel-bin/tensorflow/${l}; dolib.so bazel-bin/tensorflow/${l}.$(ver_cut 1); dolib.so bazel-bin/tensorflow/${l}.$(ver_cut 1-3); done; einstalldocs; export MAKEOPTS="-j1" } src_prepare () { export JAVA_HOME=$(java-config --jre-home); append-flags $(get-cpu-flags); append-cxxflags -std=c++17; export BUILD_CXXFLAGS+=" -std=c++17"; filter-flags '-fvtable-verify=@(std|preinit)'; bazel_setup_bazelrc; eapply "${WORKDIR}"/patches/*.patch; sed -i "/^ '/s/==/>=/g" tensorflow/tools/pip_package/setup.py || die; sed -i "/config_googleapis/d" tensorflow/workspace0.bzl || die; hprefixify -w /host_compiler_prefix/ third_party/gpus/cuda_configure.bzl; default; use python && python_copy_sources; use cuda && cuda_add_sandbox } src_test () { default } src_unpack () { unpack "${P}.tar.gz"; unpack tensorflow-patches-${PVR}.tar.bz2; bazel_load_distfiles "${bazel_external_uris}" } strip-flags () { [[ $# -ne 0 ]] && die "strip-flags takes no arguments"; local x y var; local ALLOWED_FLAGS; _setup-allowed-flags; set -f; for var in $(all-flag-vars); do local new=(); for x in ${!var}; do for y in "${ALLOWED_FLAGS[@]}"; do if [[ ${x} == ${y} ]]; then new+=("${x}"); break; fi; done; done; if _is_flagq ${var} "-O*" && ! _is_flagq new "-O*"; then new+=(-O2); fi; if [[ ${!var} != "${new[*]}" ]]; then einfo "strip-flags: ${var}: changed '${!var}' to '${new[*]}'"; fi; export ${var}="${new[*]}"; done; set +f; return 0 } strip-unsupported-flags () { [[ $# -ne 0 ]] && die "strip-unsupported-flags takes no arguments"; export CFLAGS=$(test-flags-CC ${CFLAGS}); export CXXFLAGS=$(test-flags-CXX ${CXXFLAGS}); export FFLAGS=$(test-flags-F77 ${FFLAGS}); export FCFLAGS=$(test-flags-FC ${FCFLAGS}); export LDFLAGS=$(test-flags-CCLD ${LDFLAGS}) } tc-arch () { tc-ninja_magic_to_arch portage "$@" } tc-arch-kernel () { tc-ninja_magic_to_arch kern "$@" } tc-check-openmp () { if ! _tc-has-openmp; then eerror "Your current compiler does not support OpenMP!"; if tc-is-gcc; then eerror "Enable OpenMP support by building sys-devel/gcc with USE=\"openmp\"."; else if tc-is-clang; then eerror "OpenMP support in sys-devel/clang is provided by sys-libs/libomp."; fi; fi; die "Active compiler does not have required support for OpenMP"; fi } tc-cpp-is-true () { local CONDITION=${1}; shift; $(tc-getTARGET_CPP) "${@}" -P - <<-EOF > /dev/null 2>&1 #if ${CONDITION} true #else #error false #endif EOF } tc-detect-is-softfloat () { [[ $(tc-getTARGET_CPP) == "gcc -E" ]] && return 1; case ${CTARGET:-${CHOST}} in *-newlib | *-elf | *-eabi) return 1 ;; arm*) if tc-cpp-is-true "defined(__ARM_PCS_VFP)"; then echo "no"; else if tc-cpp-is-true "defined(__SOFTFP__)"; then echo "yes"; else echo "softfp"; fi; fi; return 0 ;; *) return 1 ;; esac } tc-enables-pie () { tc-cpp-is-true "defined(__PIE__)" ${CPPFLAGS} ${CFLAGS} } tc-enables-ssp () { tc-cpp-is-true "defined(__SSP__) || defined(__SSP_STRONG__) || defined(__SSP_ALL__)" ${CPPFLAGS} ${CFLAGS} } tc-enables-ssp-all () { tc-cpp-is-true "defined(__SSP_ALL__)" ${CPPFLAGS} ${CFLAGS} } tc-enables-ssp-strong () { tc-cpp-is-true "defined(__SSP_STRONG__) || defined(__SSP_ALL__)" ${CPPFLAGS} ${CFLAGS} } tc-endian () { local host=$1; [[ -z ${host} ]] && host=${CTARGET:-${CHOST}}; host=${host%%-*}; case ${host} in aarch64*be) echo big ;; aarch64) echo little ;; alpha*) echo little ;; arm*b*) echo big ;; arm*) echo little ;; cris*) echo little ;; hppa*) echo big ;; i?86*) echo little ;; ia64*) echo little ;; loongarch*) echo little ;; m68*) echo big ;; mips*l*) echo little ;; mips*) echo big ;; powerpc*le) echo little ;; powerpc*) echo big ;; riscv*) echo little ;; s390*) echo big ;; sh*b*) echo big ;; sh*) echo little ;; sparc*) echo big ;; x86_64*) echo little ;; *) echo wtf ;; esac } tc-env_build () { tc-export_build_env; CFLAGS=${BUILD_CFLAGS} CXXFLAGS=${BUILD_CXXFLAGS} CPPFLAGS=${BUILD_CPPFLAGS} LDFLAGS=${BUILD_LDFLAGS} AR=$(tc-getBUILD_AR) AS=$(tc-getBUILD_AS) CC=$(tc-getBUILD_CC) CPP=$(tc-getBUILD_CPP) CXX=$(tc-getBUILD_CXX) LD=$(tc-getBUILD_LD) NM=$(tc-getBUILD_NM) PKG_CONFIG=$(tc-getBUILD_PKG_CONFIG) RANLIB=$(tc-getBUILD_RANLIB) READELF=$(tc-getBUILD_READELF) "$@" } tc-export () { local var; for var in "$@"; do [[ $(type -t "tc-get${var}") != "function" ]] && die "tc-export: invalid export variable '${var}'"; "tc-get${var}" > /dev/null; done } tc-export_build_env () { tc-export "$@"; if tc-is-cross-compiler; then : ${BUILD_CFLAGS:=-O1 -pipe}; : ${BUILD_CXXFLAGS:=-O1 -pipe}; : ${BUILD_CPPFLAGS:= }; : ${BUILD_LDFLAGS:= }; else : ${BUILD_CFLAGS:=${CFLAGS}}; : ${BUILD_CXXFLAGS:=${CXXFLAGS}}; : ${BUILD_CPPFLAGS:=${CPPFLAGS}}; : ${BUILD_LDFLAGS:=${LDFLAGS}}; fi; export BUILD_{C,CXX,CPP,LD}FLAGS; local v; for v in BUILD_{C,CXX,CPP,LD}FLAGS; do export ${v#BUILD_}_FOR_BUILD="${!v}"; done } tc-get-c-rtlib () { local res=$( $(tc-getCC) ${CFLAGS} ${CPPFLAGS} ${LDFLAGS} -print-libgcc-file-name 2>/dev/null ); case ${res} in *libclang_rt*) echo compiler-rt ;; *libgcc*) echo libgcc ;; *) return 1 ;; esac; return 0 } tc-get-compiler-type () { local code=' #if defined(__PATHSCALE__) HAVE_PATHCC #elif defined(__clang__) HAVE_CLANG #elif defined(__GNUC__) HAVE_GCC #endif '; local res=$($(tc-getCPP "$@") -E -P - <<<"${code}"); case ${res} in *HAVE_PATHCC*) echo pathcc ;; *HAVE_CLANG*) echo clang ;; *HAVE_GCC*) echo gcc ;; *) echo unknown ;; esac } tc-get-cxx-stdlib () { local code='#include #if defined(_LIBCPP_VERSION) HAVE_LIBCXX #elif defined(__GLIBCXX__) HAVE_LIBSTDCPP #endif '; local res=$( $(tc-getCXX) ${CXXFLAGS} ${CPPFLAGS} -x c++ -E -P - <<<"${code}" 2>/dev/null ); case ${res} in *HAVE_LIBCXX*) echo libc++ ;; *HAVE_LIBSTDCPP*) echo libstdc++ ;; *) return 1 ;; esac; return 0 } tc-getAR () { tc-getPROG AR ar "$@" } tc-getAS () { tc-getPROG AS as "$@" } tc-getBUILD_AR () { tc-getBUILD_PROG AR ar "$@" } tc-getBUILD_AS () { tc-getBUILD_PROG AS as "$@" } tc-getBUILD_CC () { tc-getBUILD_PROG CC gcc "$@" } tc-getBUILD_CPP () { tc-getBUILD_PROG CPP "$(tc-getBUILD_CC) -E" "$@" } tc-getBUILD_CXX () { tc-getBUILD_PROG CXX g++ "$@" } tc-getBUILD_LD () { tc-getBUILD_PROG LD ld "$@" } tc-getBUILD_NM () { tc-getBUILD_PROG NM nm "$@" } tc-getBUILD_OBJCOPY () { tc-getBUILD_PROG OBJCOPY objcopy "$@" } tc-getBUILD_PKG_CONFIG () { tc-getBUILD_PROG PKG_CONFIG pkg-config "$@" } tc-getBUILD_PROG () { local vars="BUILD_$1 $1_FOR_BUILD HOST$1"; tc-is-cross-compiler || vars+=" $1"; _tc-getPROG CBUILD "${vars}" "${@:2}" } tc-getBUILD_RANLIB () { tc-getBUILD_PROG RANLIB ranlib "$@" } tc-getBUILD_READELF () { tc-getBUILD_PROG READELF readelf "$@" } tc-getBUILD_STRINGS () { tc-getBUILD_PROG STRINGS strings "$@" } tc-getBUILD_STRIP () { tc-getBUILD_PROG STRIP strip "$@" } tc-getCC () { tc-getPROG CC gcc "$@" } tc-getCPP () { tc-getPROG CPP "${CC:-gcc} -E" "$@" } tc-getCXX () { tc-getPROG CXX g++ "$@" } tc-getDLLWRAP () { tc-getPROG DLLWRAP dllwrap "$@" } tc-getF77 () { tc-getPROG F77 gfortran "$@" } tc-getFC () { tc-getPROG FC gfortran "$@" } tc-getGCJ () { tc-getPROG GCJ gcj "$@" } tc-getGO () { tc-getPROG GO gccgo "$@" } tc-getLD () { tc-getPROG LD ld "$@" } tc-getNM () { tc-getPROG NM nm "$@" } tc-getOBJCOPY () { tc-getPROG OBJCOPY objcopy "$@" } tc-getOBJDUMP () { tc-getPROG OBJDUMP objdump "$@" } tc-getPKG_CONFIG () { tc-getPROG PKG_CONFIG pkg-config "$@" } tc-getPROG () { _tc-getPROG CHOST "$@" } tc-getRANLIB () { tc-getPROG RANLIB ranlib "$@" } tc-getRC () { tc-getPROG RC windres "$@" } tc-getREADELF () { tc-getPROG READELF readelf "$@" } tc-getSTRINGS () { tc-getPROG STRINGS strings "$@" } tc-getSTRIP () { tc-getPROG STRIP strip "$@" } tc-getTARGET_CPP () { if [[ -n ${CTARGET} ]]; then _tc-getPROG CTARGET TARGET_CPP "gcc -E" "$@"; else tc-getCPP "$@"; fi } tc-has-openmp () { _tc-has-openmp "$@" } tc-has-tls () { local base="${T}/test-tc-tls"; cat <<-EOF > "${base}.c" int foo(int *i) { static __thread int j = 0; return *i ? j : *i; } EOF local flags; case $1 in -s) flags="-S" ;; -c) flags="-c" ;; -l) ;; -*) die "Usage: tc-has-tls [-c|-l] [toolchain prefix]" ;; esac; : ${flags:=-fPIC -shared -Wl,-z,defs}; [[ $1 == -* ]] && shift; $(tc-getCC "$@") ${flags} "${base}.c" -o "${base}" &> /dev/null; local ret=$?; rm -f "${base}"*; return ${ret} } tc-is-clang () { [[ $(tc-get-compiler-type) == clang ]] } tc-is-cross-compiler () { [[ ${CBUILD:-${CHOST}} != ${CHOST} ]] } tc-is-gcc () { [[ $(tc-get-compiler-type) == gcc ]] } tc-is-softfloat () { tc-detect-is-softfloat || tc-tuple-is-softfloat } tc-is-static-only () { local host=${CTARGET:-${CHOST}}; [[ ${host} == *-mint* ]] } tc-ld-disable-gold () { tc-ld-is-gold "$@" && tc-ld-force-bfd "$@" } tc-ld-force-bfd () { if ! tc-ld-is-gold "$@" && ! tc-ld-is-lld "$@"; then return; fi; ewarn "Forcing usage of the BFD linker"; local ld=$(tc-getLD "$@"); local bfd_ld="${ld%% *}.bfd"; local path_ld=$(which "${bfd_ld}" 2>/dev/null); [[ -e ${path_ld} ]] && export LD=${bfd_ld}; local fallback="true"; if tc-is-gcc; then local major=$(gcc-major-version "$@"); local minor=$(gcc-minor-version "$@"); if [[ ${major} -gt 4 ]] || [[ ${major} -eq 4 && ${minor} -ge 8 ]]; then export LDFLAGS="${LDFLAGS} -fuse-ld=bfd"; fallback="false"; fi; else if tc-is-clang; then local major=$(clang-major-version "$@"); local minor=$(clang-minor-version "$@"); if [[ ${major} -gt 3 ]] || [[ ${major} -eq 3 && ${minor} -ge 5 ]]; then export LDFLAGS="${LDFLAGS} -fuse-ld=bfd"; fallback="false"; fi; fi; fi; if [[ ${fallback} == "true" ]]; then if [[ -e ${path_ld} ]]; then local d="${T}/bfd-linker"; mkdir -p "${d}"; ln -sf "${path_ld}" "${d}"/ld; export LDFLAGS="${LDFLAGS} -B${d}"; else die "unable to locate a BFD linker"; fi; fi } tc-ld-is-gold () { local out; local -x LC_ALL=C; out=$($(tc-getLD "$@") --version 2>&1); if [[ ${out} == *"GNU gold"* ]]; then return 0; fi; local base="${T}/test-tc-gold"; cat <<-EOF > "${base}.c" int main() { return 0; } EOF out=$($(tc-getCC "$@") ${CFLAGS} ${CPPFLAGS} ${LDFLAGS} -Wl,--version "${base}.c" -o "${base}" 2>&1); rm -f "${base}"*; if [[ ${out} == *"GNU gold"* ]]; then return 0; fi; return 1 } tc-ld-is-lld () { local out; local -x LC_ALL=C; out=$($(tc-getLD "$@") --version 2>&1); if [[ ${out} == *"LLD"* ]]; then return 0; fi; local base="${T}/test-tc-lld"; cat <<-EOF > "${base}.c" int main() { return 0; } EOF out=$($(tc-getCC "$@") ${CFLAGS} ${CPPFLAGS} ${LDFLAGS} -Wl,--version "${base}.c" -o "${base}" 2>&1); rm -f "${base}"*; if [[ ${out} == *"LLD"* ]]; then return 0; fi; return 1 } tc-ninja_magic_to_arch () { function ninj () { [[ ${type} == "kern" ]] && echo $1 || echo $2 }; local type=$1; local host=$2; [[ -z ${host} ]] && host=${CTARGET:-${CHOST}}; case ${host} in aarch64*) echo arm64 ;; alpha*) echo alpha ;; arm*) echo arm ;; avr*) ninj avr32 avr ;; bfin*) ninj blackfin bfin ;; c6x*) echo c6x ;; cris*) echo cris ;; frv*) echo frv ;; hexagon*) echo hexagon ;; hppa*) ninj parisc hppa ;; i?86*) if [[ ${type} == "kern" && ${host} == *freebsd* ]]; then echo i386; else echo x86; fi ;; ia64*) echo ia64 ;; loongarch*) ninj loongarch loong ;; m68*) echo m68k ;; metag*) echo metag ;; microblaze*) echo microblaze ;; mips*) echo mips ;; nios2*) echo nios2 ;; nios*) echo nios ;; or1k* | or32*) echo openrisc ;; powerpc*) if [[ ${type} == "kern" ]]; then echo powerpc; else if [[ ${host} == powerpc64* ]]; then echo ppc64; else echo ppc; fi; fi ;; riscv*) echo riscv ;; s390*) echo s390 ;; score*) echo score ;; sh64*) ninj sh64 sh ;; sh*) echo sh ;; sparc64*) ninj sparc64 sparc ;; sparc*) [[ ${PROFILE_ARCH} == "sparc64" ]] && ninj sparc64 sparc || echo sparc ;; tile*) echo tile ;; vax*) echo vax ;; x86_64*freebsd*) echo amd64 ;; x86_64*) if [[ ${type} == "kern" ]]; then echo x86; else echo amd64; fi ;; xtensa*) echo xtensa ;; *) echo unknown ;; esac } tc-stack-grows-down () { case ${ARCH} in hppa | metag) return 1 ;; esac; return 0 } tc-tuple-is-softfloat () { local CTARGET=${CTARGET:-${CHOST}}; case ${CTARGET//_/-} in bfin* | h8300*) echo "only" ;; *-softfloat-*) echo "yes" ;; *-softfp-*) echo "softfp" ;; arm*-hardfloat-* | arm*eabihf) echo "no" ;; *-newlib | *-elf | *-eabi) echo "no" ;; arm*) echo "yes" ;; *) echo "no" ;; esac } test-flag-CC () { _test-flag-PROG CC c "$@" } test-flag-CCLD () { _test-flag-PROG CC c+ld "$@" } test-flag-CXX () { _test-flag-PROG CXX c++ "$@" } test-flag-F77 () { _test-flag-PROG F77 f77 "$@" } test-flag-FC () { _test-flag-PROG FC f95 "$@" } test-flag-PROG () { [[ ${EAPI} == [67] ]] || die "Internal function ${FUNCNAME} is not available in EAPI ${EAPI}."; _test-flag-PROG "$@" } test-flags () { test-flags-CC "$@" } test-flags-CC () { _test-flags-PROG CC "$@" } test-flags-CCLD () { _test-flags-PROG CCLD "$@" } test-flags-CXX () { _test-flags-PROG CXX "$@" } test-flags-F77 () { _test-flags-PROG F77 "$@" } test-flags-FC () { _test-flags-PROG FC "$@" } test-flags-PROG () { [[ ${EAPI} == [67] ]] || die "Internal function ${FUNCNAME} is not available in EAPI ${EAPI}."; _test-flags-PROG "$@" } test_version_info () { if [[ $($(tc-getCC) --version 2>&1) == *$1* ]]; then return 0; else return 1; fi } ver_cut () { local range=${1}; local v=${2:-${PV}}; local start end; local -a comp; __eapi7_ver_split "${v}"; local max=$((${#comp[@]}/2)); __eapi7_ver_parse_range "${range}" "${max}"; local IFS=; if [[ ${start} -gt 0 ]]; then start=$(( start*2 - 1 )); fi; echo "${comp[*]:start:end*2-start}" } ver_rs () { local v; (( ${#} & 1 )) && v=${@: -1} || v=${PV}; local start end i; local -a comp; __eapi7_ver_split "${v}"; local max=$((${#comp[@]}/2 - 1)); while [[ ${#} -ge 2 ]]; do __eapi7_ver_parse_range "${1}" "${max}"; for ((i = start*2; i <= end*2; i+=2 )) do [[ ${i} -eq 0 && -z ${comp[i]} ]] && continue; comp[i]=${2}; done; shift 2; done; local IFS=; echo "${comp[*]}" } ver_test () { local va op vb; if [[ $# -eq 3 ]]; then va=${1}; shift; else va=${PVR}; fi; [[ $# -eq 2 ]] || die "${FUNCNAME}: bad number of arguments"; op=${1}; vb=${2}; case ${op} in -eq | -ne | -lt | -le | -gt | -ge) ;; *) die "${FUNCNAME}: invalid operator: ${op}" ;; esac; __eapi7_ver_compare "${va}" "${vb}"; test $? "${op}" 2 }