Remove unused helpers

This commit is contained in:
Rodrigo Arias 2020-06-17 13:23:29 +02:00
parent ed829aace0
commit 86b4b016b2
51 changed files with 0 additions and 3752 deletions

View File

@ -1,28 +0,0 @@
# `-B@out@/bin' forces gcc to use ld-wrapper.sh when calling ld.
export NIX_CFLAGS_COMPILE="-B@out@/bin/ $NIX_CFLAGS_COMPILE"
if [ -e @out@/nix-support/libc-cflags ]; then
export NIX_CFLAGS_COMPILE="$(cat @out@/nix-support/libc-cflags) $NIX_CFLAGS_COMPILE"
fi
if [ -e @out@/nix-support/gcc-cflags ]; then
export NIX_CFLAGS_COMPILE="$(cat @out@/nix-support/cc-cflags) $NIX_CFLAGS_COMPILE"
fi
if [ -e @out@/nix-support/gnat-cflags ]; then
export NIX_GNATFLAGS_COMPILE="$(cat @out@/nix-support/gnat-cflags) $NIX_GNATFLAGS_COMPILE"
fi
if [ -e @out@/nix-support/libc-ldflags ]; then
export NIX_LDFLAGS+=" $(cat @out@/nix-support/libc-ldflags)"
fi
if [ -e @out@/nix-support/gcc-ldflags ]; then
export NIX_LDFLAGS+=" $(cat @out@/nix-support/cc-ldflags)"
fi
if [ -e @out@/nix-support/libc-ldflags-before ]; then
export NIX_LDFLAGS_BEFORE="$(cat @out@/nix-support/libc-ldflags-before) $NIX_LDFLAGS_BEFORE"
fi
export NIX_CC_WRAPPER_FLAGS_SET=1

View File

@ -1,157 +0,0 @@
#! @shell@ -e
if [ -n "$NIX_CC_WRAPPER_START_HOOK" ]; then
source "$NIX_CC_WRAPPER_START_HOOK"
fi
if [ -z "$NIX_CC_WRAPPER_FLAGS_SET" ]; then
source @out@/nix-support/add-flags.sh
fi
source @out@/nix-support/utils.sh
if [ -e @cc_dir@/nix-support/compiler_setup.sh ]; then
source @cc_dir@/nix-support/compiler_setup.sh
fi
# Figure out if linker flags should be passed. GCC prints annoying
# warnings when they are not needed.
dontLink=0
getVersion=0
nonFlagArgs=0
for i in "$@"; do
if [ "$i" = -c ]; then
dontLink=1
elif [ "$i" = -S ]; then
dontLink=1
elif [ "$i" = -E ]; then
dontLink=1
elif [ "$i" = -E ]; then
dontLink=1
elif [ "$i" = -M ]; then
dontLink=1
elif [ "$i" = -MM ]; then
dontLink=1
elif [ "$i" = -x ]; then
# At least for the cases c-header or c++-header we should set dontLink.
# I expect no one use -x other than making precompiled headers.
dontLink=1
elif [ "${i:0:1}" != - ]; then
nonFlagArgs=1
elif [ "$i" = -m32 ]; then
if [ -e @out@/nix-support/dynamic-linker-m32 ]; then
NIX_LDFLAGS="$NIX_LDFLAGS -dynamic-linker $(cat @out@/nix-support/dynamic-linker-m32)"
fi
fi
done
# If we pass a flag like -Wl, then gcc will call the linker unless it
# can figure out that it has to do something else (e.g., because of a
# "-c" flag). So if no non-flag arguments are given, don't pass any
# linker flags. This catches cases like "gcc" (should just print
# "gcc: no input files") and "gcc -v" (should print the version).
if [ "$nonFlagArgs" = 0 ]; then
dontLink=1
fi
# Optionally filter out paths not refering to the store.
params=("$@")
if [ "$NIX_ENFORCE_PURITY" = 1 -a -n "$NIX_STORE" ]; then
rest=()
n=0
while [ $n -lt ${#params[*]} ]; do
p=${params[n]}
p2=${params[$((n+1))]}
if [ "${p:0:3}" = -L/ ] && badPath "${p:2}"; then
skip $p
elif [ "$p" = -L ] && badPath "$p2"; then
n=$((n + 1)); skip $p2
elif [ "${p:0:3}" = -I/ ] && badPath "${p:2}"; then
skip $p
elif [ "$p" = -I ] && badPath "$p2"; then
n=$((n + 1)); skip $p2
elif [ "$p" = -isystem ] && badPath "$p2"; then
n=$((n + 1)); skip $p2
else
rest=("${rest[@]}" "$p")
fi
n=$((n + 1))
done
params=("${rest[@]}")
fi
if [[ "@prog@" = *++ ]]; then
if echo "$@" | grep -qv -- -nostdlib; then
NIX_CFLAGS_COMPILE="$NIX_CFLAGS_COMPILE ${NIX_CXXSTDLIB_COMPILE-@default_cxx_stdlib_compile@}"
NIX_CFLAGS_LINK="$NIX_CFLAGS_LINK $NIX_CXXSTDLIB_LINK"
fi
fi
# Add the flags for the C compiler proper.
extraAfter=($NIX_CFLAGS_COMPILE)
extraBefore=()
# When enforcing purity, pretend gcc can't find the current date and
# time
if [ "$NIX_ENFORCE_PURITY" = 1 ]; then
extraAfter+=('-D__DATE__="Jan 01 1970"'
'-D__TIME__="00:00:01"'
-Wno-builtin-macro-redefined)
fi
if [ "$dontLink" != 1 ]; then
# Add the flags that should only be passed to the compiler when
# linking.
extraAfter+=($NIX_CFLAGS_LINK)
# Add the flags that should be passed to the linker (and prevent
# `ld-wrapper' from adding NIX_LDFLAGS again).
for i in $NIX_LDFLAGS_BEFORE; do
extraBefore=(${extraBefore[@]} "-Wl,$i")
done
for i in $NIX_LDFLAGS; do
if [ "${i:0:3}" = -L/ ]; then
extraAfter+=("$i")
else
extraAfter+=("-Wl,$i")
fi
done
export NIX_LDFLAGS_SET=1
fi
# As a very special hack, if the arguments are just `-v', then don't
# add anything. This is to prevent `gcc -v' (which normally prints
# out the version number and returns exit code 0) from printing out
# `No input files specified' and returning exit code 1.
if [ "$*" = -v ]; then
extraAfter=()
extraBefore=()
fi
# Optionally print debug info.
if [ -n "$NIX_DEBUG" ]; then
echo "original flags to @prog@:" >&2
for i in "${params[@]}"; do
echo " $i" >&2
done
echo "extraBefore flags to @prog@:" >&2
for i in ${extraBefore[@]}; do
echo " $i" >&2
done
echo "extraAfter flags to @prog@:" >&2
for i in ${extraAfter[@]}; do
echo " $i" >&2
done
fi
if [ -n "$NIX_CC_WRAPPER_EXEC_HOOK" ]; then
source "$NIX_CC_WRAPPER_EXEC_HOOK"
fi
exec @prog@ ${extraBefore[@]} "${params[@]}" "${extraAfter[@]}"

View File

@ -1,296 +0,0 @@
# The Nixpkgs CC is not directly usable, since it doesn't know where
# the C library and standard header files are. Therefore the compiler
# produced by that package cannot be installed directly in a user
# environment and used from the command line. So we use a wrapper
# script that sets up the right environment variables so that the
# compiler and the linker just "work".
{ name ? "", stdenv, nativeTools, nativeLibc, nativePrefix ? ""
, cc ? null, libc ? null, binutils ? null, coreutils ? null, shell ? stdenv.shell
, zlib ? null, extraPackages ? [], extraBuildCommands ? ""
, dyld ? null # TODO: should this be a setup-hook on dyld?
, isGNU ? false, isClang ? cc.isClang or false
}:
with stdenv.lib;
assert cc.isClangWithOmpss;
assert nativeTools -> nativePrefix != "";
assert !nativeTools -> cc != null && binutils != null && coreutils != null;
assert !nativeLibc -> libc != null;
# For ghdl (the vhdl language provider to gcc) we need zlib in the wrapper.
assert cc.langVhdl or false -> zlib != null;
let
ccVersion = (builtins.parseDrvName cc.name).version;
ccName = (builtins.parseDrvName cc.name).name;
in
stdenv.mkDerivation {
name = "THE-wrapper";
#name = "${cc.name}-wrapper-${cc.version}";
#name = (if name != "" then name else ccName + "-wrapper") +
# (if cc != null && ccVersion != "" then "-" + ccVersion else "");
preferLocalBuild = true;
inherit cc shell;
cc_dir = cc;
libc = if nativeLibc then null else libc;
binutils = if nativeTools then null else binutils;
# The wrapper scripts use 'cat', so we may need coreutils.
coreutils = if nativeTools then null else coreutils;
passthru = { inherit nativeTools nativeLibc nativePrefix isGNU isClang; };
buildCommand =
''
mkdir -p $out/bin $out/nix-support
wrap() {
local dst="$1"
local wrapper="$2"
export prog="$3"
substituteAll "$wrapper" "$out/bin/$dst"
chmod +x "$out/bin/$dst"
}
''
+ optionalString (!nativeLibc) (if (!stdenv.isDarwin) then ''
dynamicLinker="$libc/lib/$dynamicLinker"
echo $dynamicLinker > $out/nix-support/dynamic-linker
if [ -e $libc/lib/32/ld-linux.so.2 ]; then
echo $libc/lib/32/ld-linux.so.2 > $out/nix-support/dynamic-linker-m32
fi
# The dynamic linker is passed in `ldflagsBefore' to allow
# explicit overrides of the dynamic linker by callers to gcc/ld
# (the *last* value counts, so ours should come first).
echo "-dynamic-linker" $dynamicLinker > $out/nix-support/libc-ldflags-before
'' else ''
echo $dynamicLinker > $out/nix-support/dynamic-linker
echo "export LD_DYLD_PATH=\"$dynamicLinker\"" >> $out/nix-support/setup-hook
'')
+ optionalString (!nativeLibc) ''
# The "-B$libc/lib/" flag is a quick hack to force gcc to link
# against the crt1.o from our own glibc, rather than the one in
# /usr/lib. (This is only an issue when using an `impure'
# compiler/linker, i.e., one that searches /usr/lib and so on.)
#
# Unfortunately, setting -B appears to override the default search
# path. Thus, the gcc-specific "../includes-fixed" directory is
# now longer searched and glibc's <limits.h> header fails to
# compile, because it uses "#include_next <limits.h>" to find the
# limits.h file in ../includes-fixed. To remedy the problem,
# another -idirafter is necessary to add that directory again.
echo "-B$libc/lib/ -idirafter $libc/include -idirafter $cc/lib/gcc/*/*/include-fixed" > $out/nix-support/libc-cflags
echo "-L$libc/lib" > $out/nix-support/libc-ldflags
echo $libc > $out/nix-support/orig-libc
''
+ (if nativeTools then ''
ccPath="${if stdenv.isDarwin then cc else nativePrefix}/bin"
ldPath="${nativePrefix}/bin"
'' else ''
echo $cc > $out/nix-support/orig-cc
# GCC shows $cc/lib in `gcc -print-search-dirs', but not
# $cc/lib64 (even though it does actually search there...)..
# This confuses libtool. So add it to the compiler tool search
# path explicitly.
if [ -e "$cc/lib64" -a ! -L "$cc/lib64" ]; then
ccLDFlags+=" -L$cc/lib64"
ccCFlags+=" -B$cc/lib64"
fi
ccLDFlags+=" -L$cc/lib"
${optionalString cc.langVhdl or false ''
ccLDFlags+=" -L${zlib}/lib"
''}
# Find the gcc libraries path (may work only without multilib).
${optionalString cc.langAda or false ''
basePath=`echo $cc/lib/*/*/*`
ccCFlags+=" -B$basePath -I$basePath/adainclude"
gnatCFlags="-aI$basePath/adainclude -aO$basePath/adalib"
echo "$gnatCFlags" > $out/nix-support/gnat-cflags
''}
if [ -e $ccPath/clang ]; then
# Need files like crtbegin.o from gcc
# It's unclear if these will ever be provided by an LLVM project
ccCFlags="$ccCFlags -B$basePath"
ccCFlags="$ccCFlags -isystem$cc/lib/clang/$ccVersion/include"
fi
echo "$ccLDFlags" > $out/nix-support/cc-ldflags
echo "$ccCFlags" > $out/nix-support/cc-cflags
ccPath="$cc/bin"
ldPath="$binutils/bin"
# Propagate the wrapped cc so that if you install the wrapper,
# you get tools like gcov, the manpages, etc. as well (including
# for binutils and Glibc).
echo $cc $binutils $libc > $out/nix-support/propagated-user-env-packages
echo ${toString extraPackages} > $out/nix-support/propagated-native-build-inputs
''
+ optionalString (stdenv.isSunOS && nativePrefix != "") ''
# Solaris needs an additional ld wrapper.
ldPath="${nativePrefix}/bin"
ld="$out/bin/ld-solaris"
wrap ld-solaris ${./ld-solaris-wrapper.sh}
'')
+ ''
# Create a symlink to as (the assembler). This is useful when a
# cc-wrapper is installed in a user environment, as it ensures that
# the right assembler is called.
if [ -e $ldPath/as ]; then
ln -s $ldPath/as $out/bin/as
fi
wrap ld ${./ld-wrapper.sh} ''${ld:-$ldPath/ld}
if [ -e $binutils/bin/ld.gold ]; then
wrap ld.gold ${./ld-wrapper.sh} $binutils/bin/ld.gold
fi
if [ -e $binutils/bin/ld.bfd ]; then
wrap ld.bfd ${./ld-wrapper.sh} $binutils/bin/ld.bfd
fi
export real_cc=cc
export real_cxx=c++
export default_cxx_stdlib_compile="${
if stdenv.isLinux && !(cc.isGNU or false)
then "-isystem $(echo -n ${cc.gcc}/include/c++/*) -isystem $(echo -n ${cc.gcc}/include/c++/*)/$(${cc.gcc}/bin/gcc -dumpmachine)"
else ""
}"
if [ -e $ccPath/gcc ]; then
wrap gcc ${./cc-wrapper.sh} $ccPath/gcc
ln -s gcc $out/bin/cc
export real_cc=gcc
export real_cxx=g++
elif [ -a $ccPath/icc ]; then
wrap icc ${./cc-wrapper.sh} $ccPath/icc
ln -s icc $out/bin/cc
export real_cc=icc
elif [ -e $ccPath/clang ]; then
wrap clang ${./cc-wrapper.sh} $ccPath/clang
ln -s clang $out/bin/cc
fi
if [ -e $ccPath/g++ ]; then
wrap g++ ${./cc-wrapper.sh} $ccPath/g++
ln -s g++ $out/bin/c++
elif [ -e $ccPath/icpc ]; then
wrap icpc ${./cc-wrapper.sh} $ccPath/icpc
ln -s icpc $out/bin/c++
elif [ -e $ccPath/clang++ ]; then
wrap clang++ ${./cc-wrapper.sh} $ccPath/clang++
ln -s clang++ $out/bin/c++
fi
if [ -e $ccPath/cpp ]; then
wrap cpp ${./cc-wrapper.sh} $ccPath/cpp
fi
''
+ optionalString cc.langFortran or false ''
if [ -e $ccPath/gfortran ]; then
wrap gfortran ${./cc-wrapper.sh} $ccPath/gfortran
ln -sv gfortran $out/bin/g77
ln -sv gfortran $out/bin/f77
elif [ -e $ccPath/ifort ]; then
wrap ifort ${./cc-wrapper.sh} $ccPath/ifort
ln -sv ifort $out/bin/gfortran
ln -sv ifort $out/bin/g77
ln -sv ifort $out/bin/f77
fi
''
+ optionalString cc.langJava or false ''
wrap gcj ${./cc-wrapper.sh} $ccPath/gcj
''
+ optionalString cc.langGo or false ''
wrap gccgo ${./cc-wrapper.sh} $ccPath/gccgo
''
+ optionalString cc.langAda or false ''
wrap gnatgcc ${./cc-wrapper.sh} $ccPath/gnatgcc
wrap gnatmake ${./gnat-wrapper.sh} $ccPath/gnatmake
wrap gnatbind ${./gnat-wrapper.sh} $ccPath/gnatbind
wrap gnatlink ${./gnatlink-wrapper.sh} $ccPath/gnatlink
''
+ optionalString cc.langVhdl or false ''
ln -s $ccPath/ghdl $out/bin/ghdl
''
+ ''
substituteAll ${./setup-hook.sh} $out/nix-support/setup-hook.tmp
cat $out/nix-support/setup-hook.tmp >> $out/nix-support/setup-hook
rm $out/nix-support/setup-hook.tmp
substituteAll ${./add-flags} $out/nix-support/add-flags.sh
cp -p ${./utils.sh} $out/nix-support/utils.sh
''
+ extraBuildCommands;
crossAttrs = {};
# crossAttrs = {
# shell = shell.crossDrv + shell.crossDrv.shellPath;
# libc = stdenv.ccCross.libc;
# coreutils = coreutils.crossDrv;
# binutils = binutils.crossDrv;
# cc = cc.crossDrv;
# #
# # This is not the best way to do this. I think the reference should be
# # the style in the gcc-cross-wrapper, but to keep a stable stdenv now I
# # do this sufficient if/else.
# dynamicLinker =
# (if stdenv.cross.arch == "arm" then "ld-linux.so.3" else
# if stdenv.cross.arch == "mips" then "ld.so.1" else
# if stdenv.lib.hasSuffix "pc-gnu" stdenv.cross.config then "ld.so.1" else
# abort "don't know the name of the dynamic linker for this platform");
# };
# The dynamic linker has different names on different Linux platforms.
dynamicLinker =
if !nativeLibc then
(if stdenv.system == "i686-linux" then "ld-linux.so.2" else
if stdenv.system == "x86_64-linux" then "ld-linux-x86-64.so.2" else
if stdenv.system == "powerpc64-linux" then "ld64.so.1" else
# ARM with a wildcard, which can be "" or "-armhf".
if stdenv.isArm then "ld-linux*.so.3" else
if stdenv.system == "powerpc-linux" then "ld.so.1" else
if stdenv.system == "mips64el-linux" then "ld.so.1" else
if stdenv.system == "x86_64-darwin" then "/usr/lib/dyld" else
abort "Don't know the name of the dynamic linker for this platform.")
else "";
meta =
let cc_ = if cc != null then cc else {}; in
(if cc_ ? meta then removeAttrs cc.meta ["priority"] else {}) //
{ description =
stdenv.lib.attrByPath ["meta" "description"] "System C compiler" cc_
+ " (wrapper script)";
};
}

View File

@ -1,103 +0,0 @@
#! @shell@ -e
if [ -n "$NIX_GNAT_WRAPPER_START_HOOK" ]; then
source "$NIX_GNAT_WRAPPER_START_HOOK"
fi
if [ -z "$NIX_GNAT_WRAPPER_FLAGS_SET" ]; then
source @out@/nix-support/add-flags.sh
fi
source @out@/nix-support/utils.sh
# Figure out if linker flags should be passed. GCC prints annoying
# warnings when they are not needed.
dontLink=0
getVersion=0
nonFlagArgs=0
for i in "$@"; do
if [ "$i" = -c ]; then
dontLink=1
elif [ "$i" = -M ]; then
dontLink=1
elif [ "${i:0:1}" != - ]; then
nonFlagArgs=1
elif [ "$i" = -m32 ]; then
if [ -e @out@/nix-support/dynamic-linker-m32 ]; then
NIX_LDFLAGS="$NIX_LDFLAGS -dynamic-linker $(cat @out@/nix-support/dynamic-linker-m32)"
fi
fi
done
# If we pass a flag like -Wl, then gcc will call the linker unless it
# can figure out that it has to do something else (e.g., because of a
# "-c" flag). So if no non-flag arguments are given, don't pass any
# linker flags. This catches cases like "gcc" (should just print
# "gcc: no input files") and "gcc -v" (should print the version).
if [ "$nonFlagArgs" = 0 ]; then
dontLink=1
fi
# Optionally filter out paths not refering to the store.
params=("$@")
if [ "$NIX_ENFORCE_PURITY" = 1 -a -n "$NIX_STORE" ]; then
rest=()
n=0
while [ $n -lt ${#params[*]} ]; do
p=${params[n]}
p2=${params[$((n+1))]}
if [ "${p:0:3}" = -L/ ] && badPath "${p:2}"; then
skip $p
elif [ "${p:0:3}" = -I/ ] && badPath "${p:2}"; then
skip $p
elif [ "${p:0:4}" = -aI/ ] && badPath "${p:3}"; then
skip $p
elif [ "${p:0:4}" = -aO/ ] && badPath "${p:3}"; then
skip $p
else
rest=("${rest[@]}" "$p")
fi
n=$((n + 1))
done
params=("${rest[@]}")
fi
# Add the flags for the GNAT compiler proper.
extraAfter=($NIX_GNATFLAGS_COMPILE)
extraBefore=()
if [ "`basename $0`x" = "gnatmakex" ]; then
extraBefore=("--GNATBIND=@out@/bin/gnatbind --GNATLINK=@out@/bin/gnatlink ")
fi
# Add the flags that should be passed to the linker (and prevent
# `ld-wrapper' from adding NIX_LDFLAGS again).
#for i in $NIX_LDFLAGS_BEFORE; do
# extraBefore=(${extraBefore[@]} "-largs $i")
#done
# Optionally print debug info.
if [ -n "$NIX_DEBUG" ]; then
echo "original flags to @prog@:" >&2
for i in "${params[@]}"; do
echo " $i" >&2
done
echo "extraBefore flags to @prog@:" >&2
for i in ${extraBefore[@]}; do
echo " $i" >&2
done
echo "extraAfter flags to @prog@:" >&2
for i in ${extraAfter[@]}; do
echo " $i" >&2
done
fi
if [ -n "$NIX_GNAT_WRAPPER_EXEC_HOOK" ]; then
source "$NIX_GNAT_WRAPPER_EXEC_HOOK"
fi
exec @prog@ ${extraBefore[@]} "${params[@]}" ${extraAfter[@]}

View File

@ -1,33 +0,0 @@
#! @shell@ -e
# Add the flags for the GNAT compiler proper.
extraAfter="--GCC=@out@/bin/gcc"
extraBefore=()
# Add the flags that should be passed to the linker (and prevent
# `ld-wrapper' from adding NIX_LDFLAGS again).
#for i in $NIX_LDFLAGS_BEFORE; do
# extraBefore=(${extraBefore[@]} "-largs $i")
#done
# Optionally print debug info.
if [ -n "$NIX_DEBUG" ]; then
echo "original flags to @prog@:" >&2
for i in "$@"; do
echo " $i" >&2
done
echo "extraBefore flags to @prog@:" >&2
for i in ${extraBefore[@]}; do
echo " $i" >&2
done
echo "extraAfter flags to @prog@:" >&2
for i in ${extraAfter[@]}; do
echo " $i" >&2
done
fi
if [ -n "$NIX_GNAT_WRAPPER_EXEC_HOOK" ]; then
source "$NIX_GNAT_WRAPPER_EXEC_HOOK"
fi
exec @prog@ ${extraBefore[@]} "$@" ${extraAfter[@]}

View File

@ -1,40 +0,0 @@
#!/bin/bash
set -e
set -u
# I've also tried adding -z direct and -z lazyload, but it gave too many problems with C++ exceptions :'(
# Also made sure libgcc would not be lazy-loaded, as suggested here: https://www.illumos.org/issues/2534#note-3
# but still no success.
cmd="@prog@ -z ignore"
args=("$@");
# This loop makes sure all -L arguments are before -l arguments, or ld may complain it cannot find a library.
# GNU binutils does not have this problem:
# http://stackoverflow.com/questions/5817269/does-the-order-of-l-and-l-options-in-the-gnu-linker-matter
i=0;
while [[ $i -lt $# ]]; do
case "${args[$i]}" in
-L) cmd="$cmd ${args[$i]} ${args[($i+1)]}"; i=($i+1); ;;
-L*) cmd="$cmd ${args[$i]}" ;;
*) ;;
esac
i=($i+1);
done
i=0;
while [[ $i -lt $# ]]; do
case "${args[$i]}" in
-L) i=($i+1); ;;
-L*) ;;
*) cmd="$cmd ${args[$i]}" ;;
esac
i=($i+1);
done
# Trace:
set -x
exec $cmd
exit 0

View File

@ -1,166 +0,0 @@
#! @shell@ -e
if [ -n "$NIX_LD_WRAPPER_START_HOOK" ]; then
source "$NIX_LD_WRAPPER_START_HOOK"
fi
if [ -z "$NIX_CC_WRAPPER_FLAGS_SET" ]; then
source @out@/nix-support/add-flags.sh
fi
source @out@/nix-support/utils.sh
# Optionally filter out paths not refering to the store.
params=("$@")
if [ "$NIX_ENFORCE_PURITY" = 1 -a -n "$NIX_STORE" \
-a \( -z "$NIX_IGNORE_LD_THROUGH_GCC" -o -z "$NIX_LDFLAGS_SET" \) ]; then
rest=()
n=0
while [ $n -lt ${#params[*]} ]; do
p=${params[n]}
p2=${params[$((n+1))]}
if [ "${p:0:3}" = -L/ ] && badPath "${p:2}"; then
skip $p
elif [ "$p" = -L ] && badPath "$p2"; then
n=$((n + 1)); skip $p2
elif [ "$p" = -rpath ] && badPath "$p2"; then
n=$((n + 1)); skip $p2
elif [ "$p" = -dynamic-linker ] && badPath "$p2"; then
n=$((n + 1)); skip $p2
elif [ "${p:0:1}" = / ] && badPath "$p"; then
# We cannot skip this; barf.
echo "impure path \`$p' used in link" >&2
exit 1
elif [ "${p:0:9}" = --sysroot ]; then
# Our ld is not built with sysroot support (Can we fix that?)
:
else
rest=("${rest[@]}" "$p")
fi
n=$((n + 1))
done
params=("${rest[@]}")
fi
extra=()
extraBefore=()
if [ -z "$NIX_LDFLAGS_SET" ]; then
extra+=($NIX_LDFLAGS)
extraBefore+=($NIX_LDFLAGS_BEFORE)
fi
extra+=($NIX_LDFLAGS_AFTER)
# Add all used dynamic libraries to the rpath.
if [ "$NIX_DONT_SET_RPATH" != 1 ]; then
libPath=""
addToLibPath() {
local path="$1"
if [ "${path:0:1}" != / ]; then return 0; fi
case "$path" in
*..*|*./*|*/.*|*//*)
local path2
if path2=$(readlink -f "$path"); then
path="$path2"
fi
;;
esac
case $libPath in
*\ $path\ *) return 0 ;;
esac
libPath="$libPath $path "
}
addToRPath() {
# If the path is not in the store, don't add it to the rpath.
# This typically happens for libraries in /tmp that are later
# copied to $out/lib. If not, we're screwed.
if [ "${1:0:${#NIX_STORE}}" != "$NIX_STORE" ]; then return 0; fi
case $rpath in
*\ $1\ *) return 0 ;;
esac
rpath="$rpath $1 "
}
libs=""
addToLibs() {
libs="$libs $1"
}
rpath=""
# First, find all -L... switches.
allParams=("${params[@]}" ${extra[@]})
n=0
while [ $n -lt ${#allParams[*]} ]; do
p=${allParams[n]}
p2=${allParams[$((n+1))]}
if [ "${p:0:3}" = -L/ ]; then
addToLibPath ${p:2}
elif [ "$p" = -L ]; then
addToLibPath ${p2}
n=$((n + 1))
elif [ "$p" = -l ]; then
addToLibs ${p2}
n=$((n + 1))
elif [ "${p:0:2}" = -l ]; then
addToLibs ${p:2}
elif [ "$p" = -dynamic-linker ]; then
# Ignore the dynamic linker argument, or it
# will get into the next 'elif'. We don't want
# the dynamic linker path rpath to go always first.
n=$((n + 1))
elif [[ "$p" =~ ^[^-].*\.so($|\.) ]]; then
# This is a direct reference to a shared library, so add
# its directory to the rpath.
path="$(dirname "$p")";
addToRPath "${path}"
fi
n=$((n + 1))
done
# Second, for each directory in the library search path (-L...),
# see if it contains a dynamic library used by a -l... flag. If
# so, add the directory to the rpath.
# It's important to add the rpath in the order of -L..., so
# the link time chosen objects will be those of runtime linking.
for i in $libPath; do
for j in $libs; do
if [ -f "$i/lib$j.so" ]; then
addToRPath $i
break
fi
done
done
# Finally, add `-rpath' switches.
for i in $rpath; do
extra=(${extra[@]} -rpath $i)
done
fi
# Optionally print debug info.
if [ -n "$NIX_DEBUG" ]; then
echo "original flags to @prog@:" >&2
for i in "${params[@]}"; do
echo " $i" >&2
done
echo "extra flags to @prog@:" >&2
for i in ${extra[@]}; do
echo " $i" >&2
done
fi
if [ -n "$NIX_LD_WRAPPER_EXEC_HOOK" ]; then
source "$NIX_LD_WRAPPER_EXEC_HOOK"
fi
exec @prog@ ${extraBefore[@]} "${params[@]}" ${extra[@]}

View File

@ -1,47 +0,0 @@
export NIX_CC=@out@
addCVars () {
if [ -d $1/include ]; then
export NIX_CFLAGS_COMPILE+=" ${ccIncludeFlag:--isystem} $1/include"
fi
if [ -d $1/lib64 -a ! -L $1/lib64 ]; then
export NIX_LDFLAGS+=" -L$1/lib64"
fi
if [ -d $1/lib ]; then
export NIX_LDFLAGS+=" -L$1/lib"
fi
if test -d $1/Library/Frameworks; then
export NIX_CFLAGS_COMPILE="$NIX_CFLAGS_COMPILE -F$1/Library/Frameworks"
fi
}
envHooks+=(addCVars)
# Note: these come *after* $out in the PATH (see setup.sh).
if [ -n "@cc@" ]; then
addToSearchPath PATH @cc@/bin
fi
if [ -n "@binutils@" ]; then
addToSearchPath PATH @binutils@/bin
fi
if [ -n "@libc@" ]; then
addToSearchPath PATH @libc@/bin
fi
if [ -n "@coreutils@" ]; then
addToSearchPath PATH @coreutils@/bin
fi
if [ -z "$crossConfig" ]; then
export CC=@real_cc@
export CXX=@real_cxx@
else
export BUILD_CC=@real_cc@
export BUILD_CXX=@real_cxx@
fi

View File

@ -1,24 +0,0 @@
skip () {
if [ -n "$NIX_DEBUG" ]; then
echo "skipping impure path $1" >&2
fi
}
# Checks whether a path is impure. E.g., `/lib/foo.so' is impure, but
# `/nix/store/.../lib/foo.so' isn't.
badPath() {
local p=$1
# Relative paths are okay (since they're presumably relative to
# the temporary build directory).
if [ "${p:0:1}" != / ]; then return 1; fi
# Otherwise, the path should refer to the store or some temporary
# directory (including the build directory).
test \
"$p" != "/dev/null" -a \
"${p:0:${#NIX_STORE}}" != "$NIX_STORE" -a \
"${p:0:4}" != "/tmp" -a \
"${p:0:${#NIX_BUILD_TOP}}" != "$NIX_BUILD_TOP"
}

View File

@ -1,57 +0,0 @@
# N.B. It may be a surprise that the derivation-specific variables are exported,
# since this is just sourced by the wrapped binaries---the end consumers. This
# is because one wrapper binary may invoke another (e.g. cc invoking ld). In
# that case, it is cheaper/better to not repeat this step and let the forked
# wrapped binary just inherit the work of the forker's wrapper script.
var_templates_list=(
NIX+CFLAGS_COMPILE
NIX+CFLAGS_COMPILE_BEFORE
NIX+CFLAGS_LINK
NIX+CXXSTDLIB_COMPILE
NIX+CXXSTDLIB_LINK
NIX+GNATFLAGS_COMPILE
)
var_templates_bool=(
NIX+ENFORCE_NO_NATIVE
)
accumulateRoles
# We need to mangle names for hygiene, but also take parameters/overrides
# from the environment.
for var in "${var_templates_list[@]}"; do
mangleVarList "$var" ${role_infixes[@]+"${role_infixes[@]}"}
done
for var in "${var_templates_bool[@]}"; do
mangleVarBool "$var" ${role_infixes[@]+"${role_infixes[@]}"}
done
# `-B@out@/bin' forces cc to use ld-wrapper.sh when calling ld.
NIX_@infixSalt@_CFLAGS_COMPILE="-B@out@/bin/ $NIX_@infixSalt@_CFLAGS_COMPILE"
# Export and assign separately in order that a failing $(..) will fail
# the script.
if [ -e @out@/nix-support/libc-cflags ]; then
NIX_@infixSalt@_CFLAGS_COMPILE="$(< @out@/nix-support/libc-cflags) $NIX_@infixSalt@_CFLAGS_COMPILE"
fi
if [ -e @out@/nix-support/cc-cflags ]; then
NIX_@infixSalt@_CFLAGS_COMPILE="$(< @out@/nix-support/cc-cflags) $NIX_@infixSalt@_CFLAGS_COMPILE"
fi
if [ -e @out@/nix-support/gnat-cflags ]; then
NIX_@infixSalt@_GNATFLAGS_COMPILE="$(< @out@/nix-support/gnat-cflags) $NIX_@infixSalt@_GNATFLAGS_COMPILE"
fi
if [ -e @out@/nix-support/cc-ldflags ]; then
NIX_@infixSalt@_LDFLAGS+=" $(< @out@/nix-support/cc-ldflags)"
fi
if [ -e @out@/nix-support/cc-cflags-before ]; then
NIX_@infixSalt@_CFLAGS_COMPILE_BEFORE="$(< @out@/nix-support/cc-cflags-before) $NIX_@infixSalt@_CFLAGS_COMPILE_BEFORE"
fi
# That way forked processes will not extend these environment variables again.
export NIX_CC_WRAPPER_@infixSalt@_FLAGS_SET=1

View File

@ -1,72 +0,0 @@
declare -a hardeningCFlags=()
declare -A hardeningEnableMap=()
# Intentionally word-split in case 'NIX_HARDENING_ENABLE' is defined in Nix. The
# array expansion also prevents undefined variables from causing trouble with
# `set -u`.
for flag in ${NIX_@infixSalt@_HARDENING_ENABLE-}; do
hardeningEnableMap["$flag"]=1
done
# Remove unsupported flags.
for flag in @hardening_unsupported_flags@; do
unset -v "hardeningEnableMap[$flag]"
done
if (( "${NIX_DEBUG:-0}" >= 1 )); then
declare -a allHardeningFlags=(fortify stackprotector pie pic strictoverflow format)
declare -A hardeningDisableMap=()
# Determine which flags were effectively disabled so we can report below.
for flag in "${allHardeningFlags[@]}"; do
if [[ -z "${hardeningEnableMap[$flag]-}" ]]; then
hardeningDisableMap["$flag"]=1
fi
done
printf 'HARDENING: disabled flags:' >&2
(( "${#hardeningDisableMap[@]}" )) && printf ' %q' "${!hardeningDisableMap[@]}" >&2
echo >&2
if (( "${#hardeningEnableMap[@]}" )); then
echo 'HARDENING: Is active (not completely disabled with "all" flag)' >&2;
fi
fi
for flag in "${!hardeningEnableMap[@]}"; do
case $flag in
fortify)
if (( "${NIX_DEBUG:-0}" >= 1 )); then echo HARDENING: enabling fortify >&2; fi
hardeningCFlags+=('-O2' '-D_FORTIFY_SOURCE=2')
;;
stackprotector)
if (( "${NIX_DEBUG:-0}" >= 1 )); then echo HARDENING: enabling stackprotector >&2; fi
hardeningCFlags+=('-fstack-protector-strong' '--param' 'ssp-buffer-size=4')
;;
pie)
if (( "${NIX_DEBUG:-0}" >= 1 )); then echo HARDENING: enabling CFlags -fPIE >&2; fi
hardeningCFlags+=('-fPIE')
if [[ ! ("$*" =~ " -shared " || "$*" =~ " -static ") ]]; then
if (( "${NIX_DEBUG:-0}" >= 1 )); then echo HARDENING: enabling LDFlags -pie >&2; fi
hardeningCFlags+=('-pie')
fi
;;
pic)
if (( "${NIX_DEBUG:-0}" >= 1 )); then echo HARDENING: enabling pic >&2; fi
hardeningCFlags+=('-fPIC')
;;
strictoverflow)
if (( "${NIX_DEBUG:-0}" >= 1 )); then echo HARDENING: enabling strictoverflow >&2; fi
hardeningCFlags+=('-fno-strict-overflow')
;;
format)
if (( "${NIX_DEBUG:-0}" >= 1 )); then echo HARDENING: enabling format >&2; fi
hardeningCFlags+=('-Wformat' '-Wformat-security' '-Werror=format-security')
;;
*)
# Ignore unsupported. Checked in Nix that at least *some*
# tool supports each flag.
;;
esac
done

View File

@ -1,197 +0,0 @@
#! @shell@
set -eu -o pipefail +o posix
shopt -s nullglob
if (( "${NIX_DEBUG:-0}" >= 7 )); then
set -x
fi
path_backup="$PATH"
# That @-vars are substituted separately from bash evaluation makes
# shellcheck think this, and others like it, are useless conditionals.
# shellcheck disable=SC2157
if [[ -n "@coreutils_bin@" && -n "@gnugrep_bin@" ]]; then
PATH="@coreutils_bin@/bin:@gnugrep_bin@/bin"
fi
source @out@/nix-support/utils.bash
# Flirting with a layer violation here.
if [ -z "${NIX_BINTOOLS_WRAPPER_@infixSalt@_FLAGS_SET:-}" ]; then
source @bintools@/nix-support/add-flags.sh
fi
# Put this one second so libc ldflags take priority.
if [ -z "${NIX_CC_WRAPPER_@infixSalt@_FLAGS_SET:-}" ]; then
source @out@/nix-support/add-flags.sh
fi
# Parse command line options and set several variables.
# For instance, figure out if linker flags should be passed.
# GCC prints annoying warnings when they are not needed.
dontLink=0
nonFlagArgs=0
cc1=0
# shellcheck disable=SC2193
[[ "@prog@" = *++ ]] && isCpp=1 || isCpp=0
cppInclude=1
expandResponseParams "$@"
declare -i n=0
nParams=${#params[@]}
while (( "$n" < "$nParams" )); do
p=${params[n]}
p2=${params[n+1]:-} # handle `p` being last one
if [ "$p" = -c ]; then
dontLink=1
elif [ "$p" = -S ]; then
dontLink=1
elif [ "$p" = -E ]; then
dontLink=1
elif [ "$p" = -E ]; then
dontLink=1
elif [ "$p" = -M ]; then
dontLink=1
elif [ "$p" = -MM ]; then
dontLink=1
elif [[ "$p" = -x && "$p2" = *-header ]]; then
dontLink=1
elif [[ "$p" = -x && "$p2" = c++* && "$isCpp" = 0 ]]; then
isCpp=1
elif [ "$p" = -nostdlib ]; then
isCpp=-1
elif [ "$p" = -nostdinc ]; then
cppInclude=0
elif [ "$p" = -nostdinc++ ]; then
cppInclude=0
elif [[ "$p" != -?* ]]; then
# A dash alone signifies standard input; it is not a flag
nonFlagArgs=1
elif [ "$p" = -cc1 ]; then
cc1=1
fi
n+=1
done
# If we pass a flag like -Wl, then gcc will call the linker unless it
# can figure out that it has to do something else (e.g., because of a
# "-c" flag). So if no non-flag arguments are given, don't pass any
# linker flags. This catches cases like "gcc" (should just print
# "gcc: no input files") and "gcc -v" (should print the version).
if [ "$nonFlagArgs" = 0 ]; then
dontLink=1
fi
# Optionally filter out paths not refering to the store.
if [[ "${NIX_ENFORCE_PURITY:-}" = 1 && -n "$NIX_STORE" ]]; then
rest=()
nParams=${#params[@]}
declare -i n=0
while (( "$n" < "$nParams" )); do
p=${params[n]}
p2=${params[n+1]:-} # handle `p` being last one
if [ "${p:0:3}" = -L/ ] && badPath "${p:2}"; then
skip "${p:2}"
elif [ "$p" = -L ] && badPath "$p2"; then
n+=1; skip "$p2"
elif [ "${p:0:3}" = -I/ ] && badPath "${p:2}"; then
skip "${p:2}"
elif [ "$p" = -I ] && badPath "$p2"; then
n+=1; skip "$p2"
elif [ "$p" = -isystem ] && badPath "$p2"; then
n+=1; skip "$p2"
else
rest+=("$p")
fi
n+=1
done
# Old bash empty array hack
params=(${rest+"${rest[@]}"})
fi
# Clear march/mtune=native -- they bring impurity.
if [ "$NIX_@infixSalt@_ENFORCE_NO_NATIVE" = 1 ]; then
rest=()
# Old bash empty array hack
for p in ${params+"${params[@]}"}; do
if [[ "$p" = -m*=native ]]; then
skip "$p"
else
rest+=("$p")
fi
done
# Old bash empty array hack
params=(${rest+"${rest[@]}"})
fi
if [[ "$isCpp" = 1 ]]; then
if [[ "$cppInclude" = 1 ]]; then
NIX_@infixSalt@_CFLAGS_COMPILE+=" ${NIX_@infixSalt@_CXXSTDLIB_COMPILE:-@default_cxx_stdlib_compile@}"
fi
NIX_@infixSalt@_CFLAGS_LINK+=" $NIX_@infixSalt@_CXXSTDLIB_LINK"
fi
source @out@/nix-support/add-hardening.sh
# Add the flags for the C compiler proper.
extraAfter=($NIX_@infixSalt@_CFLAGS_COMPILE)
extraBefore=(${hardeningCFlags[@]+"${hardeningCFlags[@]}"} $NIX_@infixSalt@_CFLAGS_COMPILE_BEFORE)
if [ "$dontLink" != 1 ]; then
# Add the flags that should only be passed to the compiler when
# linking.
extraAfter+=($NIX_@infixSalt@_CFLAGS_LINK)
# Add the flags that should be passed to the linker (and prevent
# `ld-wrapper' from adding NIX_@infixSalt@_LDFLAGS again).
for i in $NIX_@infixSalt@_LDFLAGS_BEFORE; do
extraBefore+=("-Wl,$i")
done
for i in $NIX_@infixSalt@_LDFLAGS; do
if [ "${i:0:3}" = -L/ ]; then
extraAfter+=("$i")
else
extraAfter+=("-Wl,$i")
fi
done
export NIX_@infixSalt@_LDFLAGS_SET=1
fi
# As a very special hack, if the arguments are just `-v', then don't
# add anything. This is to prevent `gcc -v' (which normally prints
# out the version number and returns exit code 0) from printing out
# `No input files specified' and returning exit code 1.
if [ "$*" = -v ]; then
extraAfter=()
extraBefore=()
fi
# clang's -cc1 mode is not compatible with most options
# that we would pass. Rather than trying to pass only
# options that would work, let's just remove all of them.
if [ "$cc1" = 1 ]; then
extraAfter=()
extraBefore=()
fi
# Optionally print debug info.
if (( "${NIX_DEBUG:-0}" >= 1 )); then
# Old bash workaround, see ld-wrapper for explanation.
echo "extra flags before to @prog@:" >&2
printf " %q\n" ${extraBefore+"${extraBefore[@]}"} >&2
echo "original flags to @prog@:" >&2
printf " %q\n" ${params+"${params[@]}"} >&2
echo "extra flags after to @prog@:" >&2
printf " %q\n" ${extraAfter+"${extraAfter[@]}"} >&2
fi
PATH="$path_backup"
# Old bash workaround, see above.
exec @prog@ \
${extraBefore+"${extraBefore[@]}"} \
${params+"${params[@]}"} \
${extraAfter+"${extraAfter[@]}"}

View File

@ -1,420 +0,0 @@
# The Nixpkgs CC is not directly usable, since it doesn't know where
# the C library and standard header files are. Therefore the compiler
# produced by that package cannot be installed directly in a user
# environment and used from the command line. So we use a wrapper
# script that sets up the right environment variables so that the
# compiler and the linker just "work".
{ name ? ""
, stdenvNoCC
, cc ? null, libc ? null, bintools, coreutils ? null, shell ? stdenvNoCC.shell
, zlib ? null
, nativeTools, noLibc ? false, nativeLibc, nativePrefix ? ""
, propagateDoc ? cc != null && cc ? man
, extraTools ? [], extraPackages ? [], extraBuildCommands ? ""
, isGNU ? false, isClang ? cc.isClang or false, gnugrep ? null
, buildPackages ? {}
, libcxx ? null
}:
with stdenvNoCC.lib;
assert nativeTools -> !propagateDoc && nativePrefix != "";
assert !nativeTools ->
cc != null && coreutils != null && gnugrep != null;
assert !(nativeLibc && noLibc);
assert (noLibc || nativeLibc) == (libc == null);
let
stdenv = stdenvNoCC;
inherit (stdenv) hostPlatform targetPlatform;
# Prefix for binaries. Customarily ends with a dash separator.
#
# TODO(@Ericson2314) Make unconditional, or optional but always true by
# default.
targetPrefix = stdenv.lib.optionalString (targetPlatform != hostPlatform)
(targetPlatform.config + "-");
ccVersion = stdenv.lib.getVersion cc;
ccName = stdenv.lib.removePrefix targetPrefix (stdenv.lib.getName cc);
libc_bin = if libc == null then null else getBin libc;
libc_dev = if libc == null then null else getDev libc;
libc_lib = if libc == null then null else getLib libc;
cc_solib = getLib cc
+ optionalString (targetPlatform != hostPlatform) "/${targetPlatform.config}";
# The wrapper scripts use 'cat' and 'grep', so we may need coreutils.
coreutils_bin = if nativeTools then "" else getBin coreutils;
default_cxx_stdlib_compile = if (targetPlatform.isLinux && !(cc.isGNU or false) && !nativeTools && cc ? gcc) && !(targetPlatform.useLLVM or false) then
"-isystem $(echo -n ${cc.gcc}/include/c++/*) -isystem $(echo -n ${cc.gcc}/include/c++/*)/${targetPlatform.config}"
else if targetPlatform.isDarwin && (libcxx != null) && (cc.isClang or false) && !(targetPlatform.useLLVM or false) then
"-isystem ${libcxx}/include/c++/v1"
else "";
# The "infix salt" is a arbitrary string added in the middle of env vars
# defined by cc-wrapper's hooks so that multiple cc-wrappers can be used
# without interfering. For the moment, it is defined as the target triple,
# adjusted to be a valid bash identifier. This should be considered an
# unstable implementation detail, however.
infixSalt = replaceStrings ["-" "."] ["_" "_"] targetPlatform.config;
expand-response-params = "";
# if buildPackages.stdenv.hasCC && buildPackages.stdenv.cc != "/dev/null"
# then import ../expand-response-params { inherit (buildPackages) stdenv; }
# else "";
# older compilers (for example bootstrap's GCC 5) fail with -march=too-modern-cpu
isGccArchSupported = arch:
if cc.isGNU or false then
{ skylake = versionAtLeast ccVersion "6.0";
skylake-avx512 = versionAtLeast ccVersion "6.0";
cannonlake = versionAtLeast ccVersion "8.0";
icelake-client = versionAtLeast ccVersion "8.0";
icelake-server = versionAtLeast ccVersion "8.0";
knm = versionAtLeast ccVersion "8.0";
}.${arch} or true
else if cc.isClang or false then
{ cannonlake = versionAtLeast ccVersion "5.0";
icelake-client = versionAtLeast ccVersion "7.0";
icelake-server = versionAtLeast ccVersion "7.0";
knm = versionAtLeast ccVersion "7.0";
}.${arch} or true
else
false;
in
# Ensure bintools matches
assert libc_bin == bintools.libc_bin;
assert libc_dev == bintools.libc_dev;
assert libc_lib == bintools.libc_lib;
assert nativeTools == bintools.nativeTools;
assert nativeLibc == bintools.nativeLibc;
assert nativePrefix == bintools.nativePrefix;
stdenv.mkDerivation {
pname = targetPrefix
+ (if name != "" then name else "${ccName}-wrapper");
version = if cc == null then null else ccVersion;
preferLocalBuild = true;
inherit cc libc_bin libc_dev libc_lib bintools coreutils_bin;
shell = getBin shell + shell.shellPath or "";
gnugrep_bin = if nativeTools then "" else gnugrep;
inherit targetPrefix infixSalt;
outputs = [ "out" ] ++ optionals propagateDoc [ "man" "info" ];
passthru = {
# "cc" is the generic name for a C compiler, but there is no one for package
# providing the linker and related tools. The two we use now are GNU
# Binutils, and Apple's "cctools"; "bintools" as an attempt to find an
# unused middle-ground name that evokes both.
inherit bintools;
inherit libc nativeTools nativeLibc nativePrefix isGNU isClang default_cxx_stdlib_compile;
emacsBufferSetup = pkgs: ''
; We should handle propagation here too
(mapc
(lambda (arg)
(when (file-directory-p (concat arg "/include"))
(setenv "NIX_${infixSalt}_CFLAGS_COMPILE" (concat (getenv "NIX_${infixSalt}_CFLAGS_COMPILE") " -isystem " arg "/include"))))
'(${concatStringsSep " " (map (pkg: "\"${pkg}\"") pkgs)}))
'';
};
dontBuild = true;
dontConfigure = true;
unpackPhase = ''
src=$PWD
'';
wrapper = ./cc-wrapper.sh;
installPhase =
''
mkdir -p $out/bin $out/nix-support
wrap() {
local dst="$1"
local wrapper="$2"
export prog="$3"
substituteAll "$wrapper" "$out/bin/$dst"
chmod +x "$out/bin/$dst"
}
''
+ (if nativeTools then ''
echo ${if targetPlatform.isDarwin then cc else nativePrefix} > $out/nix-support/orig-cc
ccPath="${if targetPlatform.isDarwin then cc else nativePrefix}/bin"
'' else ''
echo $cc > $out/nix-support/orig-cc
ccPath="${cc}/bin"
'')
+ ''
# Create symlinks to everything in the bintools wrapper.
for bbin in $bintools/bin/*; do
mkdir -p "$out/bin"
ln -s "$bbin" "$out/bin/$(basename $bbin)"
done
# We export environment variables pointing to the wrapped nonstandard
# cmds, lest some lousy configure script use those to guess compiler
# version.
export named_cc=${targetPrefix}cc
export named_cxx=${targetPrefix}c++
export default_cxx_stdlib_compile="${default_cxx_stdlib_compile}"
if [ -e $ccPath/${targetPrefix}gcc ]; then
wrap ${targetPrefix}gcc $wrapper $ccPath/${targetPrefix}gcc
ln -s ${targetPrefix}gcc $out/bin/${targetPrefix}cc
export named_cc=${targetPrefix}gcc
export named_cxx=${targetPrefix}g++
elif [ -e $ccPath/clang ]; then
wrap ${targetPrefix}clang $wrapper $ccPath/clang
ln -s ${targetPrefix}clang $out/bin/${targetPrefix}cc
export named_cc=${targetPrefix}clang
export named_cxx=${targetPrefix}clang++
fi
if [ -e $ccPath/${targetPrefix}g++ ]; then
wrap ${targetPrefix}g++ $wrapper $ccPath/${targetPrefix}g++
ln -s ${targetPrefix}g++ $out/bin/${targetPrefix}c++
elif [ -e $ccPath/clang++ ]; then
wrap ${targetPrefix}clang++ $wrapper $ccPath/clang++
ln -s ${targetPrefix}clang++ $out/bin/${targetPrefix}c++
fi
if [ -e $ccPath/cpp ]; then
wrap ${targetPrefix}cpp $wrapper $ccPath/cpp
fi
''
+ optionalString cc.langAda or false ''
wrap ${targetPrefix}gnatmake ${./gnat-wrapper.sh} $ccPath/${targetPrefix}gnatmake
wrap ${targetPrefix}gnatbind ${./gnat-wrapper.sh} $ccPath/${targetPrefix}gnatbind
wrap ${targetPrefix}gnatlink ${./gnat-wrapper.sh} $ccPath/${targetPrefix}gnatlink
''
+ optionalString cc.langD or false ''
wrap ${targetPrefix}gdc $wrapper $ccPath/${targetPrefix}gdc
''
+ optionalString cc.langFortran or false ''
wrap ${targetPrefix}gfortran $wrapper $ccPath/${targetPrefix}gfortran
ln -sv ${targetPrefix}gfortran $out/bin/${targetPrefix}g77
ln -sv ${targetPrefix}gfortran $out/bin/${targetPrefix}f77
''
+ optionalString cc.langJava or false ''
wrap ${targetPrefix}gcj $wrapper $ccPath/${targetPrefix}gcj
''
+ optionalString cc.langGo or false ''
wrap ${targetPrefix}gccgo $wrapper $ccPath/${targetPrefix}gccgo
'';
strictDeps = true;
propagatedBuildInputs = [ bintools ] ++ extraTools ++ optionals cc.langD or false [ zlib ];
depsTargetTargetPropagated = extraPackages;
wrapperName = "CC_WRAPPER";
setupHooks = [
../setup-hooks/role.bash
./setup-hook.sh
];
postFixup =
''
# Backwards compatability for packages expecting this file, e.g. with
# `$NIX_CC/nix-support/dynamic-linker`.
#
# TODO(@Ericson2314): Remove this after stable release and force
# everyone to refer to bintools-wrapper directly.
if [[ -f "$bintools/nix-support/dynamic-linker" ]]; then
ln -s "$bintools/nix-support/dynamic-linker" "$out/nix-support"
fi
if [[ -f "$bintools/nix-support/dynamic-linker-m32" ]]; then
ln -s "$bintools/nix-support/dynamic-linker-m32" "$out/nix-support"
fi
''
+ optionalString (libc != null) (''
##
## General libc support
##
# The "-B${libc_lib}/lib/" flag is a quick hack to force gcc to link
# against the crt1.o from our own glibc, rather than the one in
# /usr/lib. (This is only an issue when using an `impure'
# compiler/linker, i.e., one that searches /usr/lib and so on.)
#
# Unfortunately, setting -B appears to override the default search
# path. Thus, the gcc-specific "../includes-fixed" directory is
# now longer searched and glibc's <limits.h> header fails to
# compile, because it uses "#include_next <limits.h>" to find the
# limits.h file in ../includes-fixed. To remedy the problem,
# another -idirafter is necessary to add that directory again.
echo "-B${libc_lib}${libc.libdir or "/lib/"}" >> $out/nix-support/libc-cflags
'' + optionalString (!(cc.langD or false)) ''
echo "-idirafter ${libc_dev}${libc.incdir or "/include"}" >> $out/nix-support/libc-cflags
'' + optionalString (isGNU && (!(cc.langD or false))) ''
for dir in "${cc}"/lib/gcc/*/*/include-fixed; do
echo '-idirafter' ''${dir} >> $out/nix-support/libc-cflags
done
'' + ''
echo "${libc_lib}" > $out/nix-support/orig-libc
echo "${libc_dev}" > $out/nix-support/orig-libc-dev
'')
+ optionalString (!nativeTools) ''
##
## Initial CFLAGS
##
# GCC shows ${cc_solib}/lib in `gcc -print-search-dirs', but not
# ${cc_solib}/lib64 (even though it does actually search there...)..
# This confuses libtool. So add it to the compiler tool search
# path explicitly.
if [ -e "${cc_solib}/lib64" -a ! -L "${cc_solib}/lib64" ]; then
ccLDFlags+=" -L${cc_solib}/lib64"
ccCFlags+=" -B${cc_solib}/lib64"
fi
ccLDFlags+=" -L${cc_solib}/lib"
ccCFlags+=" -B${cc_solib}/lib"
'' + optionalString cc.langAda or false ''
basePath=$(echo $cc/lib/*/*/*)
ccCFlags+=" -B$basePath -I$basePath/adainclude"
gnatCFlags="-I$basePath/adainclude -I$basePath/adalib"
echo "$gnatCFlags" > $out/nix-support/gnat-cflags
'' + ''
echo "$ccLDFlags" > $out/nix-support/cc-ldflags
echo "$ccCFlags" > $out/nix-support/cc-cflags
'' + optionalString (targetPlatform.isDarwin && (libcxx != null) && (cc.isClang or false)) ''
echo " -L${libcxx}/lib" >> $out/nix-support/cc-ldflags
'' + optionalString propagateDoc ''
##
## Man page and info support
##
ln -s ${cc.man} $man
ln -s ${cc.info} $info
'' + optionalString (cc.langD or false) ''
echo "-B${zlib}${zlib.libdir or "/lib/"}" >> $out/nix-support/libc-cflags
''
+ ''
##
## Hardening support
##
export hardening_unsupported_flags="${builtins.concatStringsSep " " (cc.hardeningUnsupportedFlags or [])}"
''
# Machine flags. These are necessary to support
# TODO: We should make a way to support miscellaneous machine
# flags and other gcc flags as well.
# Always add -march based on cpu in triple. Sometimes there is a
# discrepency (x86_64 vs. x86-64), so we provide an "arch" arg in
# that case.
+ optionalString ((targetPlatform ? platform.gcc.arch) &&
isGccArchSupported targetPlatform.platform.gcc.arch) ''
echo "-march=${targetPlatform.platform.gcc.arch}" >> $out/nix-support/cc-cflags-before
''
# -mcpu is not very useful. You should use mtune and march
# instead. Its provided here for backwards compatibility.
+ optionalString (targetPlatform ? platform.gcc.cpu) ''
echo "-mcpu=${targetPlatform.platform.gcc.cpu}" >> $out/nix-support/cc-cflags-before
''
# -mfloat-abi only matters on arm32 but we set it here
# unconditionally just in case. If the abi specifically sets hard
# vs. soft floats we use it here.
+ optionalString (targetPlatform ? platform.gcc.float-abi) ''
echo "-mfloat-abi=${targetPlatform.platform.gcc.float-abi}" >> $out/nix-support/cc-cflags-before
''
+ optionalString (targetPlatform ? platform.gcc.fpu) ''
echo "-mfpu=${targetPlatform.platform.gcc.fpu}" >> $out/nix-support/cc-cflags-before
''
+ optionalString (targetPlatform ? platform.gcc.mode) ''
echo "-mmode=${targetPlatform.platform.gcc.mode}" >> $out/nix-support/cc-cflags-before
''
+ optionalString (targetPlatform ? platform.gcc.tune &&
isGccArchSupported targetPlatform.platform.gcc.tune) ''
echo "-mtune=${targetPlatform.platform.gcc.tune}" >> $out/nix-support/cc-cflags-before
''
# TODO: categorize these and figure out a better place for them
+ optionalString hostPlatform.isCygwin ''
hardening_unsupported_flags+=" pic"
'' + optionalString targetPlatform.isMinGW ''
hardening_unsupported_flags+=" stackprotector"
'' + optionalString targetPlatform.isAvr ''
hardening_unsupported_flags+=" stackprotector pic"
'' + optionalString (targetPlatform.libc == "newlib") ''
hardening_unsupported_flags+=" stackprotector fortify pie pic"
'' + optionalString targetPlatform.isNetBSD ''
hardening_unsupported_flags+=" stackprotector fortify"
'' + optionalString cc.langAda or false ''
hardening_unsupported_flags+=" stackprotector strictoverflow"
'' + optionalString cc.langD or false ''
hardening_unsupported_flags+=" format"
'' + optionalString targetPlatform.isWasm ''
hardening_unsupported_flags+=" stackprotector fortify pie pic"
''
+ optionalString (libc != null && targetPlatform.isAvr) ''
for isa in avr5 avr3 avr4 avr6 avr25 avr31 avr35 avr51 avrxmega2 avrxmega4 avrxmega5 avrxmega6 avrxmega7 tiny-stack; do
echo "-B${getLib libc}/avr/lib/$isa" >> $out/nix-support/libc-cflags
done
''
# There are a few tools (to name one libstdcxx5) which do not work
# well with multi line flags, so make the flags single line again
+ ''
if [ -e "$out/nix-support/libc-cflags" ]; then
substituteInPlace "$out/nix-support/libc-cflags" --replace $'\n' ' '
fi
substituteAll ${./add-flags.sh} $out/nix-support/add-flags.sh
substituteAll ${./add-hardening.sh} $out/nix-support/add-hardening.sh
substituteAll ${../wrapper-common/utils.bash} $out/nix-support/utils.bash
##
## Extra custom steps
##
''
+ extraBuildCommands;
inherit expand-response-params;
# for substitution in utils.bash
expandResponseParams = "${expand-response-params}/bin/expand-response-params";
meta =
let cc_ = if cc != null then cc else {}; in
(if cc_ ? meta then removeAttrs cc.meta ["priority"] else {}) //
{ description =
stdenv.lib.attrByPath ["meta" "description"] "System C compiler" cc_
+ " (wrapper script)";
priority = 10;
};
}

View File

@ -1,165 +0,0 @@
#! @shell@
set -eu -o pipefail +o posix
shopt -s nullglob
if (( "${NIX_DEBUG:-0}" >= 7 )); then
set -x
fi
path_backup="$PATH"
# That @-vars are substituted separately from bash evaluation makes
# shellcheck think this, and others like it, are useless conditionals.
# shellcheck disable=SC2157
if [[ -n "@coreutils_bin@" && -n "@gnugrep_bin@" ]]; then
PATH="@coreutils_bin@/bin:@gnugrep_bin@/bin"
fi
source @out@/nix-support/utils.bash
# Flirting with a layer violation here.
if [ -z "${NIX_BINTOOLS_WRAPPER_@infixSalt@_FLAGS_SET:-}" ]; then
source @bintools@/nix-support/add-flags.sh
fi
# Put this one second so libc ldflags take priority.
if [ -z "${NIX_CC_WRAPPER_@infixSalt@_FLAGS_SET:-}" ]; then
source @out@/nix-support/add-flags.sh
fi
# Parse command line options and set several variables.
# For instance, figure out if linker flags should be passed.
# GCC prints annoying warnings when they are not needed.
dontLink=0
nonFlagArgs=0
# shellcheck disable=SC2193
expandResponseParams "$@"
declare -i n=0
nParams=${#params[@]}
while (( "$n" < "$nParams" )); do
p=${params[n]}
p2=${params[n+1]:-} # handle `p` being last one
if [ "$p" = -c ]; then
dontLink=1
elif [ "$p" = -S ]; then
dontLink=1
elif [ "$p" = -E ]; then
dontLink=1
elif [ "$p" = -E ]; then
dontLink=1
elif [ "$p" = -M ]; then
dontLink=1
elif [ "$p" = -MM ]; then
dontLink=1
elif [[ "$p" = -x && "$p2" = *-header ]]; then
dontLink=1
elif [[ "$p" != -?* ]]; then
# A dash alone signifies standard input; it is not a flag
nonFlagArgs=1
fi
n+=1
done
# If we pass a flag like -Wl, then gcc will call the linker unless it
# can figure out that it has to do something else (e.g., because of a
# "-c" flag). So if no non-flag arguments are given, don't pass any
# linker flags. This catches cases like "gcc" (should just print
# "gcc: no input files") and "gcc -v" (should print the version).
if [ "$nonFlagArgs" = 0 ]; then
dontLink=1
fi
# Optionally filter out paths not refering to the store.
if [[ "${NIX_ENFORCE_PURITY:-}" = 1 && -n "$NIX_STORE" ]]; then
rest=()
nParams=${#params[@]}
declare -i n=0
while (( "$n" < "$nParams" )); do
p=${params[n]}
p2=${params[n+1]:-} # handle `p` being last one
if [ "${p:0:3}" = -L/ ] && badPath "${p:2}"; then
skip "${p:2}"
elif [ "$p" = -L ] && badPath "$p2"; then
n+=1; skip "$p2"
elif [ "${p:0:3}" = -I/ ] && badPath "${p:2}"; then
skip "${p:2}"
elif [ "$p" = -I ] && badPath "$p2"; then
n+=1; skip "$p2"
elif [ "${p:0:4}" = -aI/ ] && badPath "${p:3}"; then
skip "${p:3}"
elif [ "$p" = -aI ] && badPath "$p2"; then
n+=1; skip "$p2"
elif [ "${p:0:4}" = -aO/ ] && badPath "${p:3}"; then
skip "${p:3}"
elif [ "$p" = -aO ] && badPath "$p2"; then
n+=1; skip "$p2"
elif [ "$p" = -isystem ] && badPath "$p2"; then
n+=1; skip "$p2"
else
rest+=("$p")
fi
n+=1
done
# Old bash empty array hack
params=(${rest+"${rest[@]}"})
fi
# Clear march/mtune=native -- they bring impurity.
if [ "$NIX_@infixSalt@_ENFORCE_NO_NATIVE" = 1 ]; then
rest=()
# Old bash empty array hack
for p in ${params+"${params[@]}"}; do
if [[ "$p" = -m*=native ]]; then
skip "$p"
else
rest+=("$p")
fi
done
# Old bash empty array hack
params=(${rest+"${rest[@]}"})
fi
if [ "$(basename $0)x" = "gnatmakex" ]; then
extraBefore=("--GNATBIND=@out@/bin/gnatbind" "--GNATLINK=@out@/bin/gnatlink")
extraAfter=($NIX_@infixSalt@_GNATFLAGS_COMPILE)
fi
if [ "$(basename $0)x" = "gnatbindx" ]; then
extraBefore=()
extraAfter=($NIX_@infixSalt@_GNATFLAGS_COMPILE)
fi
if [ "$(basename $0)x" = "gnatlinkx" ]; then
extraBefore=()
extraAfter=("--GCC=@out@/bin/gcc")
fi
# As a very special hack, if the arguments are just `-v', then don't
# add anything. This is to prevent `gcc -v' (which normally prints
# out the version number and returns exit code 0) from printing out
# `No input files specified' and returning exit code 1.
if [ "$*" = -v ]; then
extraAfter=()
extraBefore=()
fi
# Optionally print debug info.
if (( "${NIX_DEBUG:-0}" >= 1 )); then
# Old bash workaround, see ld-wrapper for explanation.
echo "extra flags before to @prog@:" >&2
printf " %q\n" ${extraBefore+"${extraBefore[@]}"} >&2
echo "original flags to @prog@:" >&2
printf " %q\n" ${params+"${params[@]}"} >&2
echo "extra flags after to @prog@:" >&2
printf " %q\n" ${extraAfter+"${extraAfter[@]}"} >&2
fi
PATH="$path_backup"
# Old bash workaround, see above.
exec @prog@ \
${extraBefore+"${extraBefore[@]}"} \
${params+"${params[@]}"} \
${extraAfter+"${extraAfter[@]}"}

View File

@ -1,120 +0,0 @@
# CC Wrapper hygiene
#
# For at least cross compilation, we need to depend on multiple cc-wrappers at
# once---specifically up to one per sort of dependency. This follows from having
# different tools targeting different platforms, and different flags for those
# tools. For example:
#
# # Flags for compiling (whether or not linking) C code for the...
# NIX_BUILD_CFLAGS_COMPILE # ...build platform
# NIX_CFLAGS_COMPILE # ...host platform
# NIX_TARGET_CFLAGS_COMPILE # ...target platform
#
# Notice that these platforms are the 3 *relative* to the package using
# cc-wrapper, not absolute like `x86_64-pc-linux-gnu`.
#
# The simplest solution would be to have separate cc-wrappers per (3 intended
# use-cases * n absolute concrete platforms). For the use-case axis, we would
# @-splice in 'BUILD_' '' 'TARGET_' to use the write environment variables when
# building the cc-wrapper, and likewise prefix the binaries' names so they didn't
# clobber each other on the PATH. But the need for 3x cc-wrappers, along with
# non-standard name prefixes, is annoying and liable to break packages' build
# systems.
#
# Instead, we opt to have just one cc-wrapper per absolute platform. Matching
# convention, the binaries' names can just be prefixed with their target
# platform. On the other hand, that means packages will depend on not just
# multiple cc-wrappers, but the exact same cc-wrapper derivation multiple ways.
# That means the exact same cc-wrapper derivation must be able to avoid
# conflicting with itself, despite the fact that `setup-hook.sh`, the `addCvars`
# function, and `add-flags.sh` are all communicating with each other with
# environment variables. Yuck.
#
# The basic strategy is:
#
# - Everyone exclusively *adds information* to relative-platform-specific
# environment variables, like `NIX_TARGET_CFLAGS_COMPILE`, to communicate
# with the wrapped binaries.
#
# - The wrapped binaries will exclusively *read* cc-wrapper-derivation-specific
# environment variables distinguished with with `infixSalt`, like
# `NIX_@infixSalt@_CFLAGS_COMPILE`.
#
# - `add-flags`, beyond its old task of reading extra flags stuck inside the
# cc-wrapper derivation, will convert the relative-platform-specific
# variables to cc-wrapper-derivation-specific variables. This conversion is
# the only time all but one of the cc-wrapper-derivation-specific variables
# are set.
#
# This ensures the flow of information is exclusive from
# relative-platform-specific variables to cc-wrapper-derivation-specific
# variables. This allows us to support the general case of a many--many relation
# between relative platforms and cc-wrapper derivations.
#
# For more details, read the individual files where the mechanisms used to
# accomplish this will be individually documented.
# Skip setup hook if we're neither a build-time dep, nor, temporarily, doing a
# native compile.
#
# TODO(@Ericson2314): No native exception
[[ -z ${strictDeps-} ]] || (( "$hostOffset" < 0 )) || return 0
# It's fine that any other cc-wrapper will redefine this. Bash functions close
# over no state, and there's no @-substitutions within, so any redefined
# function is guaranteed to be exactly the same.
ccWrapper_addCVars () {
# See ../setup-hooks/role.bash
local role_post role_pre
getHostRoleEnvHook
if [ -d "$1/include" ]; then
export NIX_${role_pre}CFLAGS_COMPILE+=" -isystem $1/include"
fi
if [ -d "$1/Library/Frameworks" ]; then
export NIX_${role_pre}CFLAGS_COMPILE+=" -iframework $1/Library/Frameworks"
fi
}
# See ../setup-hooks/role.bash
getTargetRole
getTargetRoleWrapper
# We use the `targetOffset` to choose the right env hook to accumulate the right
# sort of deps (those with that offset).
addEnvHooks "$targetOffset" ccWrapper_addCVars
# Note 1: these come *after* $out in the PATH (see setup.sh).
# Note 2: phase separation makes this look useless to shellcheck.
# shellcheck disable=SC2157
if [ -n "@cc@" ]; then
addToSearchPath _PATH @cc@/bin
fi
# shellcheck disable=SC2157
if [ -n "@libc_bin@" ]; then
addToSearchPath _PATH @libc_bin@/bin
fi
# shellcheck disable=SC2157
if [ -n "@coreutils_bin@" ]; then
addToSearchPath _PATH @coreutils_bin@/bin
fi
# Export tool environment variables so various build systems use the right ones.
export NIX_${role_pre}CC=@out@
export ${role_pre}CC=@named_cc@
export ${role_pre}CXX=@named_cxx@
export CC${role_post}=@named_cc@
export CXX${role_post}=@named_cxx@
# If unset, assume the default hardening flags.
: ${NIX_HARDENING_ENABLE="fortify stackprotector pic strictoverflow format relro bindnow"}
export NIX_HARDENING_ENABLE
# No local scope in sourced file
unset -v role_pre role_post

View File

@ -1,37 +0,0 @@
# Ensure that we are always linking against “libblas.so.3” and
# “liblapack.so.3”.
auditBlas() {
local dir="$prefix"
[ -e "$dir" ] || return 0
local i
while IFS= read -r -d $'\0' i; do
if ! isELF "$i"; then continue; fi
if $OBJDUMP -p "$i" | grep 'NEEDED' | awk '{ print $2; }' | grep -q '\(libmkl_rt.so\|libopenblas.so.0\)'; then
echo "$i refers to a specific implementation of BLAS or LAPACK."
echo "This prevents users from switching BLAS/LAPACK implementations."
echo "Add \`blas' or \`lapack' to buildInputs instead of \`mkl' or \`openblas'."
exit 1
fi
(IFS=:
for dir in "$(patchelf --print-rpath "$i")"; do
if [ -f "$dir/libblas.so.3" ] || [ -f "$dir/libblas.so" ]; then
if [ "$dir" != "@blas@/lib" ]; then
echo "$dir is not allowed to contain a library named libblas.so.3"
exit 1
fi
fi
if [ -f "$dir/liblapack.so.3" ] || [ -f "$dir/liblapack.so" ]; then
if [ "$dir" != "@lapack@/lib" ]; then
echo "$dir is not allowed to contain a library named liblapack.so.3"
exit 1
fi
fi
done)
done < <(find "$dir" -type f -print0)
}
fixupOutputHooks+=(auditBlas)

View File

@ -1,41 +0,0 @@
# Check whether RPATHs or wrapper scripts contain references to
# $TMPDIR. This is a serious security bug because it allows any user
# to inject files into search paths of other users' processes.
#
# It might be better to have Nix scan build output for any occurrence
# of $TMPDIR (which would also be good for reproducibility), but at
# the moment that would produce too many spurious errors (e.g. debug
# info or assertion messages that refer to $TMPDIR).
fixupOutputHooks+=('if [[ -z "${noAuditTmpdir-}" && -e "$prefix" ]]; then auditTmpdir "$prefix"; fi')
auditTmpdir() {
local dir="$1"
[ -e "$dir" ] || return 0
header "checking for references to $TMPDIR/ in $dir..."
local i
while IFS= read -r -d $'\0' i; do
if [[ "$i" =~ .build-id ]]; then continue; fi
if isELF "$i"; then
if { printf :; patchelf --print-rpath "$i"; } | grep -q -F ":$TMPDIR/"; then
echo "RPATH of binary $i contains a forbidden reference to $TMPDIR/"
exit 1
fi
fi
if isScript "$i"; then
if [ -e "$(dirname "$i")/.$(basename "$i")-wrapped" ]; then
if grep -q -F "$TMPDIR/" "$i"; then
echo "wrapper script $i contains a forbidden reference to $TMPDIR/"
exit 1
fi
fi
fi
done < <(find "$dir" -type f -print0)
stopNest
}

View File

@ -1,237 +0,0 @@
declare -a autoPatchelfLibs
gatherLibraries() {
autoPatchelfLibs+=("$1/lib")
}
addEnvHooks "$targetOffset" gatherLibraries
isExecutable() {
# For dynamically linked ELF files it would be enough to check just for the
# INTERP section. However, we won't catch statically linked executables as
# they only have an ELF type of EXEC but no INTERP.
#
# So what we do here is just check whether *either* the ELF type is EXEC
# *or* there is an INTERP section. This also catches position-independent
# executables, as they typically have an INTERP section but their ELF type
# is DYN.
isExeResult="$(LANG=C $READELF -h -l "$1" 2> /dev/null \
| grep '^ *Type: *EXEC\>\|^ *INTERP\>')"
# not using grep -q, because it can cause Broken pipe
[ -n "$isExeResult" ]
}
# We cache dependencies so that we don't need to search through all of them on
# every consecutive call to findDependency.
declare -a cachedDependencies
addToDepCache() {
local existing
for existing in "${cachedDependencies[@]}"; do
if [ "$existing" = "$1" ]; then return; fi
done
cachedDependencies+=("$1")
}
declare -gi depCacheInitialised=0
declare -gi doneRecursiveSearch=0
declare -g foundDependency
getDepsFromSo() {
ldd "$1" 2> /dev/null | sed -n -e 's/[^=]*=> *\(.\+\) \+([^)]*)$/\1/p'
}
populateCacheWithRecursiveDeps() {
local so found foundso
for so in "${cachedDependencies[@]}"; do
for found in $(getDepsFromSo "$so"); do
local libdir="${found%/*}"
local base="${found##*/}"
local soname="${base%.so*}"
for foundso in "${found%/*}/$soname".so*; do
addToDepCache "$foundso"
done
done
done
}
getSoArch() {
objdump -f "$1" | sed -ne 's/^architecture: *\([^,]\+\).*/\1/p'
}
# NOTE: If you want to use this function outside of the autoPatchelf function,
# keep in mind that the dependency cache is only valid inside the subshell
# spawned by the autoPatchelf function, so invoking this directly will possibly
# rebuild the dependency cache. See the autoPatchelf function below for more
# information.
findDependency() {
local filename="$1"
local arch="$2"
local lib dep
if [ $depCacheInitialised -eq 0 ]; then
for lib in "${autoPatchelfLibs[@]}"; do
for so in "$lib/"*.so*; do addToDepCache "$so"; done
done
depCacheInitialised=1
fi
for dep in "${cachedDependencies[@]}"; do
if [ "$filename" = "${dep##*/}" ]; then
if [ "$(getSoArch "$dep")" = "$arch" ]; then
foundDependency="$dep"
return 0
fi
fi
done
# Populate the dependency cache with recursive dependencies *only* if we
# didn't find the right dependency so far and afterwards run findDependency
# again, but this time with $doneRecursiveSearch set to 1 so that it won't
# recurse again (and thus infinitely).
if [ $doneRecursiveSearch -eq 0 ]; then
populateCacheWithRecursiveDeps
doneRecursiveSearch=1
findDependency "$filename" "$arch" || return 1
return 0
fi
return 1
}
autoPatchelfFile() {
local dep rpath="" toPatch="$1"
local interpreter="$(< "$NIX_CC/nix-support/dynamic-linker")"
if isExecutable "$toPatch"; then
patchelf --set-interpreter "$interpreter" "$toPatch"
if [ -n "$runtimeDependencies" ]; then
for dep in $runtimeDependencies; do
rpath="$rpath${rpath:+:}$dep/lib"
done
fi
fi
echo "searching for dependencies of $toPatch" >&2
# We're going to find all dependencies based on ldd output, so we need to
# clear the RPATH first.
patchelf --remove-rpath "$toPatch"
local missing="$(
ldd "$toPatch" 2> /dev/null | \
sed -n -e 's/^[\t ]*\([^ ]\+\) => not found.*/\1/p'
)"
# This ensures that we get the output of all missing dependencies instead
# of failing at the first one, because it's more useful when working on a
# new package where you don't yet know its dependencies.
local -i depNotFound=0
for dep in $missing; do
echo -n " $dep -> " >&2
if findDependency "$dep" "$(getSoArch "$toPatch")"; then
rpath="$rpath${rpath:+:}${foundDependency%/*}"
echo "found: $foundDependency" >&2
else
echo "not found!" >&2
depNotFound=1
fi
done
# This makes sure the builder fails if we didn't find a dependency, because
# the stdenv setup script is run with set -e. The actual error is emitted
# earlier in the previous loop.
[ $depNotFound -eq 0 ]
if [ -n "$rpath" ]; then
echo "setting RPATH to: $rpath" >&2
patchelf --set-rpath "$rpath" "$toPatch"
fi
}
# Can be used to manually add additional directories with shared object files
# to be included for the next autoPatchelf invocation.
addAutoPatchelfSearchPath() {
local -a findOpts=()
# XXX: Somewhat similar to the one in the autoPatchelf function, maybe make
# it DRY someday...
while [ $# -gt 0 ]; do
case "$1" in
--) shift; break;;
--no-recurse) shift; findOpts+=("-maxdepth" 1);;
--*)
echo "addAutoPatchelfSearchPath: ERROR: Invalid command line" \
"argument: $1" >&2
return 1;;
*) break;;
esac
done
cachedDependencies+=(
$(find "$@" "${findOpts[@]}" \! -type d \
\( -name '*.so' -o -name '*.so.*' \))
)
}
autoPatchelf() {
local norecurse=
while [ $# -gt 0 ]; do
case "$1" in
--) shift; break;;
--no-recurse) shift; norecurse=1;;
--*)
echo "autoPatchelf: ERROR: Invalid command line" \
"argument: $1" >&2
return 1;;
*) break;;
esac
done
if [ $# -eq 0 ]; then
echo "autoPatchelf: No paths to patch specified." >&2
return 1
fi
echo "automatically fixing dependencies for ELF files" >&2
# Add all shared objects of the current output path to the start of
# cachedDependencies so that it's choosen first in findDependency.
addAutoPatchelfSearchPath ${norecurse:+--no-recurse} -- "$@"
# Here we actually have a subshell, which also means that
# $cachedDependencies is final at this point, so whenever we want to run
# findDependency outside of this, the dependency cache needs to be rebuilt
# from scratch, so keep this in mind if you want to run findDependency
# outside of this function.
while IFS= read -r -d $'\0' file; do
isELF "$file" || continue
segmentHeaders="$(LANG=C $READELF -l "$file")"
# Skip if the ELF file doesn't have segment headers (eg. object files).
# not using grep -q, because it can cause Broken pipe
[ -n "$(echo "$segmentHeaders" | grep '^Program Headers:')" ] || continue
if isExecutable "$file"; then
# Skip if the executable is statically linked.
[ -n "$(echo "$segmentHeaders" | grep "^ *INTERP\\>")" ] || continue
fi
autoPatchelfFile "$file"
done < <(find "$@" ${norecurse:+-maxdepth 1} -type f -print0)
}
# XXX: This should ultimately use fixupOutputHooks but we currently don't have
# a way to enforce the order. If we have $runtimeDependencies set, the setup
# hook of patchelf is going to ruin everything and strip out those additional
# RPATHs.
#
# So what we do here is basically run in postFixup and emulate the same
# behaviour as fixupOutputHooks because the setup hook for patchelf is run in
# fixupOutput and the postFixup hook runs later.
postFixupHooks+=('
if [ -z "${dontAutoPatchelf-}" ]; then
autoPatchelf -- $(for output in $outputs; do
[ -e "${!output}" ] || continue
echo "${!output}"
done)
fi
')

View File

@ -1,7 +0,0 @@
preConfigurePhases+=" autoreconfPhase"
autoreconfPhase() {
runHook preAutoreconf
autoreconf ${autoreconfFlags:---install --force --verbose}
runHook postAutoreconf
}

View File

@ -1,9 +0,0 @@
breakpointHook() {
local red='\033[0;31m'
local no_color='\033[0m'
echo -e "${red}build failed in ${curPhase} with exit code ${exitCode}${no_color}"
printf "To attach install cntr and run the following command as root:\n\n"
sh -c "echo ' cntr attach -t command cntr-${out}'; while true; do sleep 99999999; done"
}
failureHooks+=(breakpointHook)

View File

@ -1,32 +0,0 @@
fixupOutputHooks+=('if [ -z "${dontGzipMan-}" ]; then compressManPages "$prefix"; fi')
compressManPages() {
local dir="$1"
if [ -L "$dir"/share ] || [ -L "$dir"/share/man ] || [ ! -d "$dir/share/man" ]
then return
fi
echo "gzipping man pages under $dir/share/man/"
# Compress all uncompressed manpages. Don't follow symlinks, etc.
find "$dir"/share/man/ -type f -a '!' -regex '.*\.\(bz2\|gz\)$' -print0 \
| while IFS= read -r -d $'\0' f
do
if gzip -c -n "$f" > "$f".gz; then
rm "$f"
else
rm "$f".gz
fi
done
# Point symlinks to compressed manpages.
find "$dir"/share/man/ -type l -a '!' -regex '.*\.\(bz2\|gz\)$' -print0 \
| while IFS= read -r -d $'\0' f
do
local target
target="$(readlink -f "$f")"
if [ -f "$target".gz ]; then
ln -sf "$target".gz "$f".gz && rm "$f"
fi
done
}

View File

@ -1,21 +0,0 @@
# Exit with backtrace and error message
#
# Usage: die "Error message"
die() {
# Let us be a little sloppy with errors, because otherwise the final
# invocation of `caller` below will cause the script to exit.
set +e
# Print our error message
printf "\nBuilder called die: %b\n" "$*"
printf "Backtrace:\n"
# Print a backtrace.
local frame=0
while caller $frame; do
((frame++));
done
printf "\n"
exit 1
}

View File

@ -1,20 +0,0 @@
postPhases+=" cleanupBuildDir"
# Force GCC to build with coverage instrumentation. Also disable
# optimisation, since it may confuse things.
export NIX_CFLAGS_COMPILE="${NIX_CFLAGS_COMPILE:-} -O0 --coverage"
# Get rid of everything that isn't a gcno file or a C source file.
# Also strip the `.tmp_' prefix from gcno files. (The Linux kernel
# creates these.)
cleanupBuildDir() {
if ! [ -e $out/.build ]; then return; fi
find $out/.build/ -type f -a ! \
\( -name "*.c" -o -name "*.cc" -o -name "*.cpp" -o -name "*.h" -o -name "*.hh" -o -name "*.y" -o -name "*.l" -o -name "*.gcno" \) \
| xargs rm -f --
for i in $(find $out/.build/ -name ".tmp_*.gcno"); do
mv "$i" "$(echo $i | sed s/.tmp_//)"
done
}

View File

@ -1,22 +0,0 @@
addXMLCatalogs () {
local d i
# xml/dtd and xml/xsl are deprecated. Catalogs should be
# installed underneath share/xml.
for d in $1/share/xml $1/xml/dtd $1/xml/xsl; do
if [ -d $d ]; then
for i in $(find $d -name catalog.xml); do
XML_CATALOG_FILES+=" $i"
done
fi
done
}
if [ -z "${libxmlHookDone-}" ]; then
libxmlHookDone=1
# Set up XML_CATALOG_FILES. An empty initial value prevents
# xmllint and xsltproc from looking in /etc/xml/catalog.
export XML_CATALOG_FILES=''
if [ -z "$XML_CATALOG_FILES" ]; then XML_CATALOG_FILES=" "; fi
addEnvHooks "$hostOffset" addXMLCatalogs
fi

View File

@ -1,40 +0,0 @@
# On macOS, binaries refer to dynamic library dependencies using
# either relative paths (e.g. "libicudata.dylib", searched relative to
# $DYLD_LIBRARY_PATH) or absolute paths
# (e.g. "/nix/store/.../lib/libicudata.dylib"). In Nix, the latter is
# preferred since it allows programs to just work. When linking
# against a library (e.g. "-licudata"), the linker uses the install
# name embedded in the dylib (which can be shown using "otool -D").
# Most packages create dylibs with absolute install names, but some do
# not. This setup hook fixes dylibs by setting their install names to
# their absolute path (using "install_name_tool -id"). It also
# rewrites references in other dylibs to absolute paths.
fixupOutputHooks+=('fixDarwinDylibNamesIn $prefix')
fixDarwinDylibNames() {
local flags=()
local old_id
for fn in "$@"; do
flags+=(-change "$(basename "$fn")" "$fn")
done
for fn in "$@"; do
if [ -L "$fn" ]; then continue; fi
echo "$fn: fixing dylib"
int_out=$(install_name_tool -id "$fn" "${flags[@]}" "$fn" 2>&1)
result=$?
if [ "$result" -ne 0 ] &&
! grep "shared library stub file and can't be changed" <<< "$out"
then
echo "$int_out" >&2
exit "$result"
fi
done
}
fixDarwinDylibNamesIn() {
local dir="$1"
fixDarwinDylibNames $(find "$dir" -name "*.dylib")
}

View File

@ -1,11 +0,0 @@
unpackPhase="unpackGog"
unpackGog() {
runHook preUnpackGog
innoextract --silent --extract --exclude-temp "${src}"
find . -depth -print -execdir rename -f 'y/A-Z/a-z/' '{}' \;
runHook postUnpackGog
}

View File

@ -1,165 +0,0 @@
#!/bin/bash
# Setup hook for the `installShellFiles` package.
#
# Example usage in a derivation:
#
# { …, installShellFiles, … }:
# stdenv.mkDerivation {
# …
# nativeBuildInputs = [ installShellFiles ];
# postInstall = ''
# installManPage share/doc/foobar.1
# installShellCompletion share/completions/foobar.{bash,fish,zsh}
# '';
# …
# }
#
# See comments on each function for more details.
# installManPage <path> [...<path>]
#
# Each argument is checked for its man section suffix and installed into the appropriate
# share/man<n>/ directory. The function returns an error if any paths don't have the man section
# suffix (with optional .gz compression).
installManPage() {
local path
for path in "$@"; do
if (( "${NIX_DEBUG:-0}" >= 1 )); then
echo "installManPage: installing $path"
fi
if test -z "$path"; then
echo "installManPage: error: path cannot be empty" >&2
return 1
fi
local basename
basename=$(stripHash "$path") # use stripHash in case it's a nix store path
local trimmed=${basename%.gz} # don't get fooled by compressed manpages
local suffix=${trimmed##*.}
if test -z "$suffix" -o "$suffix" = "$trimmed"; then
echo "installManPage: error: path missing manpage section suffix: $path" >&2
return 1
fi
local outRoot
if test "$suffix" = 3; then
outRoot=${!outputDevman:?}
else
outRoot=${!outputMan:?}
fi
install -Dm644 -T "$path" "${outRoot}/share/man/man$suffix/$basename" || return
done
}
# installShellCompletion [--bash|--fish|--zsh] ([--name <name>] <path>)...
#
# Each path is installed into the appropriate directory for shell completions for the given shell.
# If one of `--bash`, `--fish`, or `--zsh` is given the path is assumed to belong to that shell.
# Otherwise the file extension will be examined to pick a shell. If the shell is unknown a warning
# will be logged and the command will return a non-zero status code after processing any remaining
# paths. Any of the shell flags will affect all subsequent paths (unless another shell flag is
# given).
#
# If the shell completion needs to be renamed before installing the optional `--name <name>` flag
# may be given. Any name provided with this flag only applies to the next path.
#
# For zsh completions, if the `--name` flag is not given, the path will be automatically renamed
# such that `foobar.zsh` becomes `_foobar`.
#
# This command accepts multiple shell flags in conjunction with multiple paths if you wish to
# install them all in one command:
#
# installShellCompletion share/completions/foobar.{bash,fish} --zsh share/completions/_foobar
#
# However it may be easier to read if each shell is split into its own invocation, especially when
# renaming is involved:
#
# installShellCompletion --bash --name foobar.bash share/completions.bash
# installShellCompletion --fish --name foobar.fish share/completions.fish
# installShellCompletion --zsh --name _foobar share/completions.zsh
#
# If any argument is `--` the remaining arguments will be treated as paths.
installShellCompletion() {
local shell='' name='' retval=0 parseArgs=1 arg
while { arg=$1; shift; }; do
# Parse arguments
if (( parseArgs )); then
case "$arg" in
--bash|--fish|--zsh)
shell=${arg#--}
continue;;
--name)
name=$1
shift || {
echo 'installShellCompletion: error: --name flag expected an argument' >&2
return 1
}
continue;;
--name=*)
# treat `--name=foo` the same as `--name foo`
name=${arg#--name=}
continue;;
--?*)
echo "installShellCompletion: warning: unknown flag ${arg%%=*}" >&2
retval=2
continue;;
--)
# treat remaining args as paths
parseArgs=0
continue;;
esac
fi
if (( "${NIX_DEBUG:-0}" >= 1 )); then
echo "installShellCompletion: installing $arg${name:+ as $name}"
fi
# if we get here, this is a path
# Identify shell
local basename
basename=$(stripHash "$arg")
local curShell=$shell
if [[ -z "$curShell" ]]; then
# auto-detect the shell
case "$basename" in
?*.bash) curShell=bash;;
?*.fish) curShell=fish;;
?*.zsh) curShell=zsh;;
*)
if [[ "$basename" = _* && "$basename" != *.* ]]; then
# probably zsh
echo "installShellCompletion: warning: assuming path \`$arg' is zsh; please specify with --zsh" >&2
curShell=zsh
else
echo "installShellCompletion: warning: unknown shell for path: $arg" >&2
retval=2
continue
fi;;
esac
fi
# Identify output path
local outName sharePath
outName=${name:-$basename}
case "$curShell" in
bash) sharePath=bash-completion/completions;;
fish) sharePath=fish/vendor_completions.d;;
zsh)
sharePath=zsh/site-functions
# only apply automatic renaming if we didn't have a manual rename
if test -z "$name"; then
# convert a name like `foo.zsh` into `_foo`
outName=${outName%.zsh}
outName=_${outName#_}
fi;;
*)
# Our list of shells is out of sync with the flags we accept or extensions we detect.
echo 'installShellCompletion: internal error' >&2
return 1;;
esac
# Install file
install -Dm644 -T "$arg" "${!outputBin:?}/share/$sharePath/$outName" || return
# Clear the name, it only applies to one path
name=
done
if [[ -n "$name" ]]; then
echo 'installShellCompletion: error: --name flag given with no path' >&2
return 1
fi
return $retval
}

View File

@ -1,6 +0,0 @@
prePhases+=" moveBuildDir"
moveBuildDir() {
mkdir -p $out/.build
cd $out/.build
}

View File

@ -1,5 +0,0 @@
ld-is-cc-hook() {
LD=$CC
}
preConfigureHooks+=(ld-is-cc-hook)

View File

@ -1,25 +0,0 @@
postPhases+=" coverageReportPhase"
coverageReportPhase() {
lcov --directory . --capture --output-file app.info
set -o noglob
lcov --remove app.info ${lcovFilter:-"/nix/store/*"} > app2.info
set +o noglob
mv app2.info app.info
mkdir -p $out/coverage
genhtml app.info $lcovExtraTraceFiles -o $out/coverage > log
# Grab the overall coverage percentage so that Hydra can plot it over time.
mkdir -p $out/nix-support
lineCoverage="$(sed 's/.*lines\.*: \([0-9\.]\+\)%.*/\1/; t ; d' log)"
functionCoverage="$(sed 's/.*functions\.*: \([0-9\.]\+\)%.*/\1/; t ; d' log)"
if [ -z "$lineCoverage" -o -z "$functionCoverage" ]; then
echo "failed to get coverage statistics"
exit 1
fi
echo "lineCoverage $lineCoverage %" >> $out/nix-support/hydra-metrics
echo "functionCoverage $functionCoverage %" >> $out/nix-support/hydra-metrics
echo "report coverage $out/coverage" >> $out/nix-support/hydra-build-products
}

View File

@ -1,28 +0,0 @@
fixupOutputHooks+=(_makeSymlinksRelative)
# For every symlink in $output that refers to another file in $output
# ensure that the symlink is relative. This removes references to the output
# has from the resulting store paths and thus the NAR files.
_makeSymlinksRelative() {
local symlinkTarget
if [ -n "${dontRewriteSymlinks-}" ]; then
return 0
fi
while IFS= read -r -d $'\0' f; do
symlinkTarget=$(readlink "$f")
if [[ "$symlinkTarget"/ != "$prefix"/* ]]; then
# skip this symlink as it doesn't point to $prefix
continue
fi
if [ ! -e "$symlinkTarget" ]; then
echo "the symlink $f is broken, it points to $symlinkTarget (which is missing)"
fi
echo "rewriting symlink $f to be relative to $prefix"
ln -snrf "$symlinkTarget" "$f"
done < <(find $prefix -type l -print0)
}

View File

@ -1,146 +0,0 @@
# Assert that FILE exists and is executable
#
# assertExecutable FILE
assertExecutable() {
local file="$1"
[[ -f "$file" && -x "$file" ]] || \
die "Cannot wrap '$file' because it is not an executable file"
}
# construct an executable file that wraps the actual executable
# makeWrapper EXECUTABLE OUT_PATH ARGS
# ARGS:
# --argv0 NAME : set name of executed process to NAME
# (otherwise its called …-wrapped)
# --set VAR VAL : add VAR with value VAL to the executables
# environment
# --set-default VAR VAL : like --set, but only adds VAR if not already set in
# the environment
# --unset VAR : remove VAR from the environment
# --run COMMAND : run command before the executable
# --add-flags FLAGS : add FLAGS to invocation of executable
# --prefix ENV SEP VAL : suffix/prefix ENV with VAL, separated by SEP
# --suffix
# --suffix-each ENV SEP VALS : like --suffix, but VALS is a list
# --prefix-contents ENV SEP FILES : like --suffix-each, but contents of FILES
# are read first and used as VALS
# --suffix-contents
makeWrapper() {
local original="$1"
local wrapper="$2"
local params varName value command separator n fileNames
local argv0 flagsBefore flags
assertExecutable "$original"
mkdir -p "$(dirname "$wrapper")"
echo "#! @shell@ -e" > "$wrapper"
params=("$@")
for ((n = 2; n < ${#params[*]}; n += 1)); do
p="${params[$n]}"
if [[ "$p" == "--set" ]]; then
varName="${params[$((n + 1))]}"
value="${params[$((n + 2))]}"
n=$((n + 2))
echo "export $varName=${value@Q}" >> "$wrapper"
elif [[ "$p" == "--set-default" ]]; then
varName="${params[$((n + 1))]}"
value="${params[$((n + 2))]}"
n=$((n + 2))
echo "export $varName=\${$varName-${value@Q}}" >> "$wrapper"
elif [[ "$p" == "--unset" ]]; then
varName="${params[$((n + 1))]}"
n=$((n + 1))
echo "unset $varName" >> "$wrapper"
elif [[ "$p" == "--run" ]]; then
command="${params[$((n + 1))]}"
n=$((n + 1))
echo "$command" >> "$wrapper"
elif [[ ("$p" == "--suffix") || ("$p" == "--prefix") ]]; then
varName="${params[$((n + 1))]}"
separator="${params[$((n + 2))]}"
value="${params[$((n + 3))]}"
n=$((n + 3))
if test -n "$value"; then
if test "$p" = "--suffix"; then
echo "export $varName=\$$varName\${$varName:+${separator@Q}}${value@Q}" >> "$wrapper"
else
echo "export $varName=${value@Q}\${$varName:+${separator@Q}}\$$varName" >> "$wrapper"
fi
fi
elif [[ "$p" == "--suffix-each" ]]; then
varName="${params[$((n + 1))]}"
separator="${params[$((n + 2))]}"
values="${params[$((n + 3))]}"
n=$((n + 3))
for value in $values; do
echo "export $varName=\$$varName\${$varName:+$separator}${value@Q}" >> "$wrapper"
done
elif [[ ("$p" == "--suffix-contents") || ("$p" == "--prefix-contents") ]]; then
varName="${params[$((n + 1))]}"
separator="${params[$((n + 2))]}"
fileNames="${params[$((n + 3))]}"
n=$((n + 3))
for fileName in $fileNames; do
contents="$(cat "$fileName")"
if test "$p" = "--suffix-contents"; then
echo "export $varName=\$$varName\${$varName:+$separator}${contents@Q}" >> "$wrapper"
else
echo "export $varName=${contents@Q}\${$varName:+$separator}\$$varName" >> "$wrapper"
fi
done
elif [[ "$p" == "--add-flags" ]]; then
flags="${params[$((n + 1))]}"
n=$((n + 1))
flagsBefore="$flagsBefore $flags"
elif [[ "$p" == "--argv0" ]]; then
argv0="${params[$((n + 1))]}"
n=$((n + 1))
else
die "makeWrapper doesn't understand the arg $p"
fi
done
echo exec ${argv0:+-a \"$argv0\"} \""$original"\" \
"$flagsBefore" '"$@"' >> "$wrapper"
chmod +x "$wrapper"
}
addSuffix() {
suffix="$1"
shift
for name in "$@"; do
echo "$name$suffix"
done
}
filterExisting() {
for fn in "$@"; do
if test -e "$fn"; then
echo "$fn"
fi
done
}
# Syntax: wrapProgram <PROGRAM> <MAKE-WRAPPER FLAGS...>
wrapProgram() {
local prog="$1"
local hidden
assertExecutable "$prog"
hidden="$(dirname "$prog")/.$(basename "$prog")"-wrapped
while [ -e "$hidden" ]; do
hidden="${hidden}_"
done
mv "$prog" "$hidden"
# Silence warning about unexpanded $0:
# shellcheck disable=SC2016
makeWrapper "$hidden" "$prog" --argv0 '$0' "${@:2}"
}

View File

@ -1,23 +0,0 @@
# This setup hook moves $out/{man,doc,info} to $out/share; moves
# $out/share/man to $man/share/man; and moves $out/share/doc to
# $man/share/doc.
preFixupHooks+=(_moveToShare)
_moveToShare() {
forceShare=${forceShare:=man doc info}
if [ -z "$forceShare" -o -z "$out" ]; then return; fi
for d in $forceShare; do
if [ -d "$out/$d" ]; then
if [ -d "$out/share/$d" ]; then
echo "both $d/ and share/$d/ exist!"
else
echo "moving $out/$d to $out/share/$d"
mkdir -p $out/share
mv $out/$d $out/share/
fi
fi
done
}

View File

@ -1,22 +0,0 @@
# This setup hook, for each output, moves everything in $output/lib64
# to $output/lib, and replaces $output/lib64 with a symlink to
# $output/lib. The rationale is that lib64 directories are unnecessary
# in Nix (since 32-bit and 64-bit builds of a package are in different
# store paths anyway).
# If the move would overwrite anything, it should fail on rmdir.
fixupOutputHooks+=(_moveLib64)
_moveLib64() {
if [ "${dontMoveLib64-}" = 1 ]; then return; fi
if [ ! -e "$prefix/lib64" -o -L "$prefix/lib64" ]; then return; fi
echo "moving $prefix/lib64/* to $prefix/lib"
mkdir -p $prefix/lib
shopt -s dotglob
for i in $prefix/lib64/*; do
mv --no-clobber "$i" $prefix/lib
done
shopt -u dotglob
rmdir $prefix/lib64
ln -s lib $prefix/lib64
}

View File

@ -1,19 +0,0 @@
# This setup hook, for each output, moves everything in $output/sbin
# to $output/bin, and replaces $output/sbin with a symlink to
# $output/bin.
fixupOutputHooks+=(_moveSbin)
_moveSbin() {
if [ "${dontMoveSbin-}" = 1 ]; then return; fi
if [ ! -e "$prefix/sbin" -o -L "$prefix/sbin" ]; then return; fi
echo "moving $prefix/sbin/* to $prefix/bin"
mkdir -p $prefix/bin
shopt -s dotglob
for i in $prefix/sbin/*; do
mv "$i" $prefix/bin
done
shopt -u dotglob
rmdir $prefix/sbin
ln -s bin $prefix/sbin
}

View File

@ -1,199 +0,0 @@
# The base package for automatic multiple-output splitting. Used in stdenv as well.
preConfigureHooks+=(_multioutConfig)
preFixupHooks+=(_multioutDocs)
preFixupHooks+=(_multioutDevs)
postFixupHooks+=(_multioutPropagateDev)
# Assign the first string containing nonempty variable to the variable named $1
_assignFirst() {
local varName="$1"
local REMOVE=REMOVE # slightly hacky - we allow REMOVE (i.e. not a variable name)
shift
while (( $# )); do
if [ -n "${!1-}" ]; then eval "${varName}"="$1"; return; fi
shift
done
echo "Error: _assignFirst found no valid variant!"
return 1 # none found
}
# Same as _assignFirst, but only if "$1" = ""
_overrideFirst() {
if [ -z "${!1-}" ]; then
_assignFirst "$@"
fi
}
# Setup chains of sane default values with easy overridability.
# The variables are global to be usable anywhere during the build.
# Typical usage in package is defining outputBin = "dev";
_overrideFirst outputDev "dev" "out"
_overrideFirst outputBin "bin" "out"
_overrideFirst outputInclude "$outputDev"
# so-libs are often among the main things to keep, and so go to $out
_overrideFirst outputLib "lib" "out"
_overrideFirst outputDoc "doc" "out"
_overrideFirst outputDevdoc "devdoc" REMOVE # documentation for developers
# man and info pages are small and often useful to distribute with binaries
_overrideFirst outputMan "man" "$outputBin"
_overrideFirst outputDevman "devman" "devdoc" "$outputMan"
_overrideFirst outputInfo "info" "$outputBin"
# Add standard flags to put files into the desired outputs.
_multioutConfig() {
if [ "$outputs" = "out" ] || [ -z "${setOutputFlags-1}" ]; then return; fi;
# try to detect share/doc/${shareDocName}
# Note: sadly, $configureScript detection comes later in configurePhase,
# and reordering would cause more trouble than worth.
if [ -z "$shareDocName" ]; then
local confScript="$configureScript"
if [ -z "$confScript" ] && [ -x ./configure ]; then
confScript=./configure
fi
if [ -f "$confScript" ]; then
local shareDocName="$(sed -n "s/^PACKAGE_TARNAME='\(.*\)'$/\1/p" < "$confScript")"
fi
# PACKAGE_TARNAME sometimes contains garbage.
if [ -n "$shareDocName" ] || echo "$shareDocName" | grep -q '[^a-zA-Z0-9_-]'; then
shareDocName="$(echo "$name" | sed 's/-[^a-zA-Z].*//')"
fi
fi
configureFlags="\
--bindir=${!outputBin}/bin --sbindir=${!outputBin}/sbin \
--includedir=${!outputInclude}/include --oldincludedir=${!outputInclude}/include \
--mandir=${!outputMan}/share/man --infodir=${!outputInfo}/share/info \
--docdir=${!outputDoc}/share/doc/${shareDocName} \
--libdir=${!outputLib}/lib --libexecdir=${!outputLib}/libexec \
--localedir=${!outputLib}/share/locale \
$configureFlags"
installFlags="\
pkgconfigdir=${!outputDev}/lib/pkgconfig \
m4datadir=${!outputDev}/share/aclocal aclocaldir=${!outputDev}/share/aclocal \
$installFlags"
}
# Add rpath prefixes to library paths, and avoid stdenv doing it for $out.
_addRpathPrefix "${!outputLib}"
NIX_NO_SELF_RPATH=1
# Move subpaths that match pattern $1 from under any output/ to the $2 output/
# Beware: only globbing patterns are accepted, e.g.: * ? {foo,bar}
# A special target "REMOVE" is allowed: moveToOutput foo REMOVE
moveToOutput() {
local patt="$1"
local dstOut="$2"
local output
for output in $outputs; do
if [ "${!output}" = "$dstOut" ]; then continue; fi
local srcPath
for srcPath in "${!output}"/$patt; do
# apply to existing files/dirs, *including* broken symlinks
if [ ! -e "$srcPath" ] && [ ! -L "$srcPath" ]; then continue; fi
if [ "$dstOut" = REMOVE ]; then
echo "Removing $srcPath"
rm -r "$srcPath"
else
local dstPath="$dstOut${srcPath#${!output}}"
echo "Moving $srcPath to $dstPath"
if [ -d "$dstPath" ] && [ -d "$srcPath" ]
then # attempt directory merge
# check the case of trying to move an empty directory
rmdir "$srcPath" --ignore-fail-on-non-empty
if [ -d "$srcPath" ]; then
mv -t "$dstPath" "$srcPath"/*
rmdir "$srcPath"
fi
else # usual move
mkdir -p "$(readlink -m "$dstPath/..")"
mv "$srcPath" "$dstPath"
fi
fi
# remove empty directories, printing iff at least one gets removed
local srcParent="$(readlink -m "$srcPath/..")"
if rmdir "$srcParent"; then
echo "Removing empty $srcParent/ and (possibly) its parents"
rmdir -p --ignore-fail-on-non-empty "$(readlink -m "$srcParent/..")" \
2> /dev/null || true # doesn't ignore failure for some reason
fi
done
done
}
# Move documentation to the desired outputs.
_multioutDocs() {
local REMOVE=REMOVE # slightly hacky - we expand ${!outputFoo}
moveToOutput share/info "${!outputInfo}"
moveToOutput share/doc "${!outputDoc}"
moveToOutput share/gtk-doc "${!outputDevdoc}"
moveToOutput share/devhelp/books "${!outputDevdoc}"
# the default outputMan is in $bin
moveToOutput share/man "${!outputMan}"
moveToOutput share/man/man3 "${!outputDevman}"
}
# Move development-only stuff to the desired outputs.
_multioutDevs() {
if [ "$outputs" = "out" ] || [ -z "${moveToDev-1}" ]; then return; fi;
moveToOutput include "${!outputInclude}"
# these files are sometimes provided even without using the corresponding tool
moveToOutput lib/pkgconfig "${!outputDev}"
moveToOutput share/pkgconfig "${!outputDev}"
moveToOutput lib/cmake "${!outputDev}"
moveToOutput share/aclocal "${!outputDev}"
# don't move *.la, as libtool needs them in the directory of the library
for f in "${!outputDev}"/{lib,share}/pkgconfig/*.pc; do
echo "Patching '$f' includedir to output ${!outputInclude}"
sed -i "/^includedir=/s,=\${prefix},=${!outputInclude}," "$f"
done
}
# Make the "dev" propagate other outputs needed for development.
_multioutPropagateDev() {
if [ "$outputs" = "out" ]; then return; fi;
local outputFirst
for outputFirst in $outputs; do
break
done
local propagaterOutput="$outputDev"
if [ -z "$propagaterOutput" ]; then
propagaterOutput="$outputFirst"
fi
# Default value: propagate binaries, includes and libraries
if [ -z "${propagatedBuildOutputs+1}" ]; then
local po_dirty="$outputBin $outputInclude $outputLib"
set +o pipefail
propagatedBuildOutputs=`echo "$po_dirty" \
| tr -s ' ' '\n' | grep -v -F "$propagaterOutput" \
| sort -u | tr '\n' ' ' `
set -o pipefail
fi
# The variable was explicitly set to empty or we resolved it so
if [ -z "$propagatedBuildOutputs" ]; then
return
fi
mkdir -p "${!propagaterOutput}"/nix-support
for output in $propagatedBuildOutputs; do
echo -n " ${!output}" >> "${!propagaterOutput}"/nix-support/propagated-build-inputs
done
}

View File

@ -1,119 +0,0 @@
# This setup hook causes the fixup phase to rewrite all script
# interpreter file names (`#! /path') to paths found in $PATH. E.g.,
# /bin/sh will be rewritten to /nix/store/<hash>-some-bash/bin/sh.
# /usr/bin/env gets special treatment so that ".../bin/env python" is
# rewritten to /nix/store/<hash>/bin/python. Interpreters that are
# already in the store are left untouched.
# A script file must be marked as executable, otherwise it will not be
# considered.
fixupOutputHooks+=(patchShebangsAuto)
# Run patch shebangs on a directory or file.
# Can take multiple paths as arguments.
# patchShebangs [--build | --host] PATH...
# Flags:
# --build : Lookup commands available at build-time
# --host : Lookup commands available at runtime
# Example use cases,
# $ patchShebangs --host /nix/store/...-hello-1.0/bin
# $ patchShebangs --build configure
patchShebangs() {
local pathName
if [ "$1" = "--host" ]; then
pathName=HOST_PATH
shift
elif [ "$1" = "--build" ]; then
pathName=PATH
shift
fi
echo "patching script interpreter paths in $@"
local f
local oldPath
local newPath
local arg0
local args
local oldInterpreterLine
local newInterpreterLine
if [ $# -eq 0 ]; then
echo "No arguments supplied to patchShebangs" >&2
return 0
fi
local f
while IFS= read -r -d $'\0' f; do
isScript "$f" || continue
oldInterpreterLine=$(head -1 "$f" | tail -c+3)
read -r oldPath arg0 args <<< "$oldInterpreterLine"
if [ -z "$pathName" ]; then
if [ -n "$strictDeps" ] && [[ "$f" = "$NIX_STORE"* ]]; then
pathName=HOST_PATH
else
pathName=PATH
fi
fi
if $(echo "$oldPath" | grep -q "/bin/env$"); then
# Check for unsupported 'env' functionality:
# - options: something starting with a '-'
# - environment variables: foo=bar
if $(echo "$arg0" | grep -q -- "^-.*\|.*=.*"); then
echo "$f: unsupported interpreter directive \"$oldInterpreterLine\" (set dontPatchShebangs=1 and handle shebang patching yourself)" >&2
exit 1
fi
newPath="$(PATH="${!pathName}" command -v "$arg0" || true)"
else
if [ "$oldPath" = "" ]; then
# If no interpreter is specified linux will use /bin/sh. Set
# oldpath="/bin/sh" so that we get /nix/store/.../sh.
oldPath="/bin/sh"
fi
newPath="$(PATH="${!pathName}" command -v "$(basename "$oldPath")" || true)"
args="$arg0 $args"
fi
# Strip trailing whitespace introduced when no arguments are present
newInterpreterLine="$(echo "$newPath $args" | sed 's/[[:space:]]*$//')"
if [ -n "$oldPath" -a "${oldPath:0:${#NIX_STORE}}" != "$NIX_STORE" ]; then
if [ -n "$newPath" -a "$newPath" != "$oldPath" ]; then
echo "$f: interpreter directive changed from \"$oldInterpreterLine\" to \"$newInterpreterLine\""
# escape the escape chars so that sed doesn't interpret them
escapedInterpreterLine=$(echo "$newInterpreterLine" | sed 's|\\|\\\\|g')
# Preserve times, see: https://github.com/NixOS/nixpkgs/pull/33281
timestamp=$(mktemp)
touch -r "$f" "$timestamp"
sed -i -e "1 s|.*|#\!$escapedInterpreterLine|" "$f"
touch -r "$timestamp" "$f"
rm "$timestamp"
fi
fi
done < <(find "$@" -type f -perm -0100 -print0)
stopNest
}
patchShebangsAuto () {
if [ -z "${dontPatchShebangs-}" -a -e "$prefix" ]; then
# Dev output will end up being run on the build platform. An
# example case of this is sdl2-config. Otherwise, we can just
# use the runtime path (--host).
if [ "$output" != out ] && [ "$output" = "$outputDev" ]; then
patchShebangs --build "$prefix"
else
patchShebangs --host "$prefix"
fi
fi
}

View File

@ -1,22 +0,0 @@
# Clear dependency_libs in libtool files for shared libraries.
# Shared libraries already encode their dependencies with locations. .la
# files do not always encode those locations, and sometimes encode the
# locations in the wrong Nix output. .la files are not needed for shared
# libraries, but without dependency_libs they do not hurt either.
fixupOutputHooks+=(_pruneLibtoolFiles)
_pruneLibtoolFiles() {
if [ "${dontPruneLibtoolFiles-}" ] || [ ! -e "$prefix" ]; then
return
fi
# Libtool uses "dlname" and "library_names" fields for shared libraries and
# the "old_library" field for static libraries. We are processing only
# those .la files that do not describe static libraries.
find "$prefix" -type f -name '*.la' \
-exec grep -q '^# Generated by .*libtool' {} \; \
-exec grep -q "^old_library=''" {} \; \
-exec sed -i {} -e "/^dependency_libs='[^']/ c dependency_libs='' #pruned" \;
}

View File

@ -1,75 +0,0 @@
# Since the same derivation can be depend on in multiple ways, we need to
# accumulate *each* role (i.e. host and target platforms relative the depending
# derivation) in which the derivation is used.
#
# The role is intened to be use as part of other variables names like
# - $NIX_${role_pre}_SOMETHING
# - $NIX_SOMETHING_${role_post}
function getRole() {
case $1 in
-1)
role_pre='BUILD_'
role_post='_FOR_BUILD'
;;
0)
role_pre=''
role_post=''
;;
1)
role_pre='TARGET_'
role_post='_FOR_TARGET'
;;
*)
echo "@name@: used as improper sort of dependency" >2
return 1
;;
esac
}
# `hostOffset` describes how the host platform of the package is slid relative
# to the depending package. `targetOffset` likewise describes the target
# platform of the package. Both are brought into scope of the setup hook defined
# for dependency whose setup hook is being processed relative to the package
# being built.
function getHostRole() {
getRole "$hostOffset"
}
function getTargetRole() {
getRole "$targetOffset"
}
# `depHostOffset` describes how the host platform of the dependencies are slid
# relative to the depending package. `depTargetOffset` likewise describes the
# target platform of dependenices. Both are brought into scope of the
# environment hook defined for the dependency being applied relative to the
# package being built.
function getHostRoleEnvHook() {
getRole "$depHostOffset"
}
function getTargetRoleEnvHook() {
getRole "$depTargetOffset"
}
# This variant is inteneded specifically for code-prodocing tool wrapper scripts
# `NIX_@wrapperName@_@infixSalt@_TARGET_*` tracks this (needs to be an exported
# env var so can't use fancier data structures).
function getTargetRoleWrapper() {
case $targetOffset in
-1)
export NIX_@wrapperName@_@infixSalt@_TARGET_BUILD=1
;;
0)
export NIX_@wrapperName@_@infixSalt@_TARGET_HOST=1
;;
1)
export NIX_@wrapperName@_@infixSalt@_TARGET_TARGET=1
;;
*)
echo "@name@: used as improper sort of dependency" >2
return 1
;;
esac
}

View File

@ -1,37 +0,0 @@
export NIX_SET_BUILD_ID=1
export NIX_LDFLAGS+=" --compress-debug-sections=zlib"
export NIX_CFLAGS_COMPILE+=" -ggdb -Wa,--compress-debug-sections"
dontStrip=1
fixupOutputHooks+=(_separateDebugInfo)
_separateDebugInfo() {
[ -e "$prefix" ] || return 0
local dst="${debug:-$out}"
if [ "$prefix" = "$dst" ]; then return 0; fi
dst="$dst/lib/debug/.build-id"
# Find executables and dynamic libraries.
local i magic
while IFS= read -r -d $'\0' i; do
if ! isELF "$i"; then continue; fi
# Extract the Build ID. FIXME: there's probably a cleaner way.
local id="$($READELF -n "$i" | sed 's/.*Build ID: \([0-9a-f]*\).*/\1/; t; d')"
if [ "${#id}" != 40 ]; then
echo "could not find build ID of $i, skipping" >&2
continue
fi
# Extract the debug info.
header "separating debug info from $i (build ID $id)"
mkdir -p "$dst/${id:0:2}"
$OBJCOPY --only-keep-debug "$i" "$dst/${id:0:2}/${id:2}.debug"
$STRIP --strip-debug "$i"
# Also a create a symlink <original-name>.debug.
ln -sfn ".build-id/${id:0:2}/${id:2}.debug" "$dst/../$(basename "$i")"
done < <(find "$prefix" -type f -print0)
}

View File

@ -1,13 +0,0 @@
# This setup hook adds every JAR in the share/java subdirectories of
# the build inputs to $CLASSPATH.
export CLASSPATH
addPkgToClassPath () {
local jar
for jar in $1/share/java/*.jar; do
export CLASSPATH=''${CLASSPATH-}''${CLASSPATH:+:}''${jar}
done
}
addEnvHooks "$targetOffset" addPkgToClassPath

View File

@ -1,34 +0,0 @@
updateSourceDateEpoch() {
local path="$1"
# Get the last modification time of all regular files, sort them,
# and get the most recent. Maybe we should use
# https://github.com/0-wiz-0/findnewest here.
local -a res=($(find "$path" -type f -not -newer "$NIX_BUILD_TOP/.." -printf '%T@ %p\0' \
| sort -n --zero-terminated | tail -n1 --zero-terminated | head -c -1))
local time="${res[0]//\.[0-9]*/}" # remove the fraction part
local newestFile="${res[1]}"
# Update $SOURCE_DATE_EPOCH if the most recent file we found is newer.
if [ "${time:-0}" -gt "$SOURCE_DATE_EPOCH" ]; then
echo "setting SOURCE_DATE_EPOCH to timestamp $time of file $newestFile"
export SOURCE_DATE_EPOCH="$time"
# Warn if the new timestamp is too close to the present. This
# may indicate that we were being applied to a file generated
# during the build, or that an unpacker didn't restore
# timestamps properly.
local now="$(date +%s)"
if [ "$time" -gt $((now - 60)) ]; then
echo "warning: file $newestFile may be generated; SOURCE_DATE_EPOCH may be non-deterministic"
fi
fi
}
postUnpackHooks+=(_updateSourceDateEpochFromSourceRoot)
_updateSourceDateEpochFromSourceRoot() {
if [ -n "$sourceRoot" ]; then
updateSourceDateEpoch "$sourceRoot"
fi
}

View File

@ -1,5 +0,0 @@
setupDebugInfoDirs () {
addToSearchPath NIX_DEBUG_INFO_DIRS $1/lib/debug
}
addEnvHooks "$targetOffset" setupDebugInfoDirs

View File

@ -1,88 +0,0 @@
# This setup hook modifies a Perl script so that any "-I" flags in its shebang
# line are rewritten into a "use lib ..." statement on the next line. This gets
# around a limitation in Darwin, which will not properly handle a script whose
# shebang line exceeds 511 characters.
#
# Each occurrence of "-I /path/to/lib1" or "-I/path/to/lib2" is removed from
# the shebang line, along with the single space that preceded it. These library
# paths are placed into a new line of the form
#
# use lib "/path/to/lib1", "/path/to/lib2";
#
# immediately following the shebang line. If a library appeared in the original
# list more than once, only its first occurrence will appear in the output
# list. In other words, the libraries are deduplicated, but the ordering of the
# first appearance of each one is preserved.
#
# Any flags other than "-I" in the shebang line are left as-is, and the
# interpreter is also left alone (although the script will abort if the
# interpreter does not seem to be either "perl" or else "env" with "perl" as
# its argument). Each line after the shebang line is left unchanged. Each file
# is modified in place.
#
# Usage:
# shortenPerlShebang SCRIPT...
shortenPerlShebang() {
while [ $# -gt 0 ]; do
_shortenPerlShebang "$1"
shift
done
}
_shortenPerlShebang() {
local program="$1"
echo "shortenPerlShebang: rewriting shebang line in $program"
if ! isScript "$program"; then
die "shortenPerlShebang: refusing to modify $program because it is not a script"
fi
local temp="$(mktemp)"
gawk '
(NR == 1) {
if (!($0 ~ /\/(perl|env +perl)\>/)) {
print "shortenPerlShebang: script does not seem to be a Perl script" > "/dev/stderr"
exit 1
}
idx = 0
while (match($0, / -I ?([^ ]+)/, pieces)) {
matches[idx] = pieces[1]
idx++
$0 = gensub(/ -I ?[^ ]+/, "", 1, $0)
}
print $0
if (idx > 0) {
prefix = "use lib "
for (idx in matches) {
path = matches[idx]
if (!(path in seen)) {
printf "%s\"%s\"", prefix, path
seen[path] = 1
prefix = ", "
}
}
print ";"
}
}
(NR > 1 ) {
print
}
' "$program" > "$temp" || die
# Preserve the mode of the original file
cp --preserve=mode --attributes-only "$program" "$temp"
mv "$temp" "$program"
# Measure the new shebang line length and make sure it's okay. We subtract
# one to account for the trailing newline that "head" included in its
# output.
local new_length=$(( $(head -n 1 "$program" | wc -c) - 1 ))
# Darwin is okay when the shebang line contains 511 characters, but not
# when it contains 512 characters.
if [ $new_length -ge 512 ]; then
die "shortenPerlShebang: shebang line is $new_length characters--still too long for Darwin!"
fi
}

View File

@ -1,57 +0,0 @@
# This setup hook strips libraries and executables in the fixup phase.
fixupOutputHooks+=(_doStrip)
_doStrip() {
# We don't bother to strip build platform code because it shouldn't make it
# to $out anyways---if it does, that's a bigger problem that a lack of
# stripping will help catch.
local -ra flags=(dontStripHost dontStripTarget)
local -ra stripCmds=(STRIP TARGET_STRIP)
# Optimization
if [[ "${STRIP-}" == "${TARGET_STRIP-}" ]]; then
dontStripTarget+=1
fi
local i
for i in ${!stripCmds[@]}; do
local -n flag="${flags[$i]}"
local -n stripCmd="${stripCmds[$i]}"
# `dontStrip` disables them all
if [[ "${dontStrip-}" || "${flag-}" ]] || ! type -f "${stripCmd-}" 2>/dev/null
then continue; fi
stripDebugList=${stripDebugList:-lib lib32 lib64 libexec bin sbin}
if [ -n "$stripDebugList" ]; then
stripDirs "$stripCmd" "$stripDebugList" "${stripDebugFlags:--S}"
fi
stripAllList=${stripAllList:-}
if [ -n "$stripAllList" ]; then
stripDirs "$stripCmd" "$stripAllList" "${stripAllFlags:--s}"
fi
done
}
stripDirs() {
local cmd="$1"
local dirs="$2"
local stripFlags="$3"
local dirsNew=
local d
for d in ${dirs}; do
if [ -d "$prefix/$d" ]; then
dirsNew="${dirsNew} $prefix/$d "
fi
done
dirs=${dirsNew}
if [ -n "${dirs}" ]; then
header "stripping (with command $cmd and flags $stripFlags) in$dirs"
find $dirs -type f -print0 | xargs -0 ${xargsFlags:--r} $cmd $commonStripFlags $stripFlags 2>/dev/null || true
stopNest
fi
}

View File

@ -1,12 +0,0 @@
preConfigurePhases+=" updateAutotoolsGnuConfigScriptsPhase"
updateAutotoolsGnuConfigScriptsPhase() {
if [ -n "${dontUpdateAutotoolsGnuConfigScripts-}" ]; then return; fi
for script in config.sub config.guess; do
for f in $(find . -type f -name "$script"); do
echo "Updating Autotools / GNU config script to a newer upstream version: $f"
cp -f "@gnu_config@/$script" "$f"
done
done
}

View File

@ -1 +0,0 @@
export NIX_CFLAGS_COMPILE+=" -D_GLIBCXX_USE_CXX11_ABI=0"

View File

@ -1,19 +0,0 @@
# This setup hook validates each pkgconfig file in each output.
fixupOutputHooks+=(_validatePkgConfig)
_validatePkgConfig() {
for pc in $(find "$prefix" -name '*.pc'); do
local bail=0
# Do not fail immediately. It's nice to see all errors when
# there are multiple pkgconfig files.
if ! pkg-config --validate "$pc"; then
bail=1
fi
done
if [ $bail -eq 1 ]; then
exit 1
fi
}

View File

@ -1,45 +0,0 @@
fixupOutputHooks+=(_linkDLLs)
# For every *.{exe,dll} in $output/bin/ we try to find all (potential)
# transitive dependencies and symlink those DLLs into $output/bin
# so they are found on invocation.
# (DLLs are first searched in the directory of the running exe file.)
# The links are relative, so relocating whole /nix/store won't break them.
_linkDLLs() {
(
if [ ! -d "$prefix/bin" ]; then exit; fi
cd "$prefix/bin"
# Compose path list where DLLs should be located:
# prefix $PATH by currently-built outputs
local DLLPATH=""
local outName
for outName in $outputs; do
addToSearchPath DLLPATH "${!outName}/bin"
done
DLLPATH="$DLLPATH:$PATH"
echo DLLPATH="'$DLLPATH'"
linkCount=0
# Iterate over any DLL that we depend on.
local dll
for dll in $($OBJDUMP -p *.{exe,dll} | sed -n 's/.*DLL Name: \(.*\)/\1/p' | sort -u); do
if [ -e "./$dll" ]; then continue; fi
# Locate the DLL - it should be an *executable* file on $DLLPATH.
local dllPath="$(PATH="$DLLPATH" type -P "$dll")"
if [ -z "$dllPath" ]; then continue; fi
# That DLL might have its own (transitive) dependencies,
# so add also all DLLs from its directory to be sure.
local dllPath2
for dllPath2 in "$dllPath" "$(dirname $(readlink "$dllPath" || echo "$dllPath"))"/*.dll; do
if [ -e ./"$(basename "$dllPath2")" ]; then continue; fi
CYGWIN+=\ winsymlinks:nativestrict ln -sr "$dllPath2" .
linkCount=$(($linkCount+1))
done
done
echo "Created $linkCount DLL link(s) in $prefix/bin"
)
}

View File

@ -1,93 +0,0 @@
# shellcheck shell=bash
gappsWrapperArgs=()
find_gio_modules() {
if [ -d "$1/lib/gio/modules" ] && [ -n "$(ls -A "$1/lib/gio/modules")" ] ; then
gappsWrapperArgs+=(--prefix GIO_EXTRA_MODULES : "$1/lib/gio/modules")
fi
}
addEnvHooks "${targetOffset:?}" find_gio_modules
gappsWrapperArgsHook() {
if [ -n "$GDK_PIXBUF_MODULE_FILE" ]; then
gappsWrapperArgs+=(--set GDK_PIXBUF_MODULE_FILE "$GDK_PIXBUF_MODULE_FILE")
fi
if [ -n "$XDG_ICON_DIRS" ]; then
gappsWrapperArgs+=(--prefix XDG_DATA_DIRS : "$XDG_ICON_DIRS")
fi
if [ -n "$GSETTINGS_SCHEMAS_PATH" ]; then
gappsWrapperArgs+=(--prefix XDG_DATA_DIRS : "$GSETTINGS_SCHEMAS_PATH")
fi
# Check for prefix as well
if [ -d "${prefix:?}/share" ]; then
gappsWrapperArgs+=(--prefix XDG_DATA_DIRS : "$prefix/share")
fi
if [ -d "$prefix/lib/gio/modules" ] && [ -n "$(ls -A "$prefix/lib/gio/modules")" ]; then
gappsWrapperArgs+=(--prefix GIO_EXTRA_MODULES : "$prefix/lib/gio/modules")
fi
for v in ${wrapPrefixVariables:-} GST_PLUGIN_SYSTEM_PATH_1_0 GI_TYPELIB_PATH GRL_PLUGIN_PATH; do
if [ -n "${!v}" ]; then
gappsWrapperArgs+=(--prefix "$v" : "${!v}")
fi
done
}
preFixupPhases+=" gappsWrapperArgsHook"
wrapGApp() {
local program="$1"
shift 1
wrapProgram "$program" "${gappsWrapperArgs[@]}" "$@"
}
# Note: $gappsWrapperArgs still gets defined even if ${dontWrapGApps-} is set.
wrapGAppsHook() {
# guard against running multiple times (e.g. due to propagation)
[ -z "$wrapGAppsHookHasRun" ] || return 0
wrapGAppsHookHasRun=1
if [[ -z "${dontWrapGApps:-}" ]]; then
targetDirsThatExist=()
targetDirsRealPath=()
# wrap binaries
targetDirs=("${prefix}/bin" "${prefix}/libexec")
for targetDir in "${targetDirs[@]}"; do
if [[ -d "${targetDir}" ]]; then
targetDirsThatExist+=("${targetDir}")
targetDirsRealPath+=("$(realpath "${targetDir}")/")
find "${targetDir}" -type f -executable -print0 |
while IFS= read -r -d '' file; do
echo "Wrapping program '${file}'"
wrapGApp "${file}"
done
fi
done
# wrap links to binaries that point outside targetDirs
# Note: links to binaries within targetDirs do not need
# to be wrapped as the binaries have already been wrapped
if [[ ${#targetDirsThatExist[@]} -ne 0 ]]; then
find "${targetDirsThatExist[@]}" -type l -xtype f -executable -print0 |
while IFS= read -r -d '' linkPath; do
linkPathReal=$(realpath "${linkPath}")
for targetPath in "${targetDirsRealPath[@]}"; do
if [[ "$linkPathReal" == "$targetPath"* ]]; then
echo "Not wrapping link: '$linkPath' (already wrapped)"
continue 2
fi
done
echo "Wrapping link: '$linkPath'"
wrapGApp "${linkPath}"
done
fi
fi
}
fixupOutputHooks+=(wrapGAppsHook)

View File

@ -1,92 +0,0 @@
# Accumulate infixes for taking in the right input parameters with the `mangle*`
# functions below. See setup-hook for details.
accumulateRoles() {
declare -ga role_infixes=()
if [ "${NIX_@wrapperName@_@infixSalt@_TARGET_BUILD:-}" ]; then
role_infixes+=(_BUILD_)
fi
if [ "${NIX_@wrapperName@_@infixSalt@_TARGET_HOST:-}" ]; then
role_infixes+=(_)
fi
if [ "${NIX_@wrapperName@_@infixSalt@_TARGET_TARGET:-}" ]; then
role_infixes+=(_TARGET_)
fi
}
mangleVarList() {
local var="$1"
shift
local -a role_infixes=("$@")
local outputVar="${var/+/_@infixSalt@_}"
declare -gx ${outputVar}+=''
# For each role we serve, we accumulate the input parameters into our own
# cc-wrapper-derivation-specific environment variables.
for infix in "${role_infixes[@]}"; do
local inputVar="${var/+/${infix}}"
if [ -v "$inputVar" ]; then
export ${outputVar}+="${!outputVar:+ }${!inputVar}"
fi
done
}
mangleVarBool() {
local var="$1"
shift
local -a role_infixes=("$@")
local outputVar="${var/+/_@infixSalt@_}"
declare -gxi ${outputVar}+=0
for infix in "${role_infixes[@]}"; do
local inputVar="${var/+/${infix}}"
if [ -v "$inputVar" ]; then
# "1" in the end makes `let` return success error code when
# expression itself evaluates to zero.
# We don't use `|| true` because that would silence actual
# syntax errors from bad variable values.
let "${outputVar} |= ${!inputVar:-0}" "1"
fi
done
}
skip () {
if (( "${NIX_DEBUG:-0}" >= 1 )); then
echo "skipping impure path $1" >&2
fi
}
# Checks whether a path is impure. E.g., `/lib/foo.so' is impure, but
# `/nix/store/.../lib/foo.so' isn't.
badPath() {
local p=$1
# Relative paths are okay (since they're presumably relative to
# the temporary build directory).
if [ "${p:0:1}" != / ]; then return 1; fi
# Otherwise, the path should refer to the store or some temporary
# directory (including the build directory).
test \
"$p" != "/dev/null" -a \
"${p:0:${#NIX_STORE}}" != "$NIX_STORE" -a \
"${p:0:4}" != "/tmp" -a \
"${p:0:${#NIX_BUILD_TOP}}" != "$NIX_BUILD_TOP"
}
expandResponseParams() {
declare -ga params=("$@")
local arg
for arg in "$@"; do
if [[ "$arg" == @* ]]; then
# phase separation makes this look useless
# shellcheck disable=SC2157
if [ -x "@expandResponseParams@" ]; then
# params is used by caller
#shellcheck disable=SC2034
readarray -d '' params < <("@expandResponseParams@" "$@")
return 0
fi
fi
done
}