cpic: Compilation ok but fails to run
This commit is contained in:
37
bsc/setup-hooks/audit-blas.sh
Normal file
37
bsc/setup-hooks/audit-blas.sh
Normal file
@@ -0,0 +1,37 @@
|
||||
# Ensure that we are always linking against “libblas.so.3” and
|
||||
# “liblapack.so.3”.
|
||||
|
||||
auditBlas() {
|
||||
local dir="$prefix"
|
||||
[ -e "$dir" ] || return 0
|
||||
|
||||
local i
|
||||
while IFS= read -r -d $'\0' i; do
|
||||
if ! isELF "$i"; then continue; fi
|
||||
|
||||
if $OBJDUMP -p "$i" | grep 'NEEDED' | awk '{ print $2; }' | grep -q '\(libmkl_rt.so\|libopenblas.so.0\)'; then
|
||||
echo "$i refers to a specific implementation of BLAS or LAPACK."
|
||||
echo "This prevents users from switching BLAS/LAPACK implementations."
|
||||
echo "Add \`blas' or \`lapack' to buildInputs instead of \`mkl' or \`openblas'."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
(IFS=:
|
||||
for dir in "$(patchelf --print-rpath "$i")"; do
|
||||
if [ -f "$dir/libblas.so.3" ] || [ -f "$dir/libblas.so" ]; then
|
||||
if [ "$dir" != "@blas@/lib" ]; then
|
||||
echo "$dir is not allowed to contain a library named libblas.so.3"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
if [ -f "$dir/liblapack.so.3" ] || [ -f "$dir/liblapack.so" ]; then
|
||||
if [ "$dir" != "@lapack@/lib" ]; then
|
||||
echo "$dir is not allowed to contain a library named liblapack.so.3"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
done)
|
||||
done < <(find "$dir" -type f -print0)
|
||||
}
|
||||
|
||||
fixupOutputHooks+=(auditBlas)
|
||||
41
bsc/setup-hooks/audit-tmpdir.sh
Normal file
41
bsc/setup-hooks/audit-tmpdir.sh
Normal file
@@ -0,0 +1,41 @@
|
||||
# Check whether RPATHs or wrapper scripts contain references to
|
||||
# $TMPDIR. This is a serious security bug because it allows any user
|
||||
# to inject files into search paths of other users' processes.
|
||||
#
|
||||
# It might be better to have Nix scan build output for any occurrence
|
||||
# of $TMPDIR (which would also be good for reproducibility), but at
|
||||
# the moment that would produce too many spurious errors (e.g. debug
|
||||
# info or assertion messages that refer to $TMPDIR).
|
||||
|
||||
fixupOutputHooks+=('if [[ -z "${noAuditTmpdir-}" && -e "$prefix" ]]; then auditTmpdir "$prefix"; fi')
|
||||
|
||||
auditTmpdir() {
|
||||
local dir="$1"
|
||||
[ -e "$dir" ] || return 0
|
||||
|
||||
header "checking for references to $TMPDIR/ in $dir..."
|
||||
|
||||
local i
|
||||
while IFS= read -r -d $'\0' i; do
|
||||
if [[ "$i" =~ .build-id ]]; then continue; fi
|
||||
|
||||
if isELF "$i"; then
|
||||
if { printf :; patchelf --print-rpath "$i"; } | grep -q -F ":$TMPDIR/"; then
|
||||
echo "RPATH of binary $i contains a forbidden reference to $TMPDIR/"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if isScript "$i"; then
|
||||
if [ -e "$(dirname "$i")/.$(basename "$i")-wrapped" ]; then
|
||||
if grep -q -F "$TMPDIR/" "$i"; then
|
||||
echo "wrapper script $i contains a forbidden reference to $TMPDIR/"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
done < <(find "$dir" -type f -print0)
|
||||
|
||||
stopNest
|
||||
}
|
||||
237
bsc/setup-hooks/auto-patchelf.sh
Normal file
237
bsc/setup-hooks/auto-patchelf.sh
Normal file
@@ -0,0 +1,237 @@
|
||||
declare -a autoPatchelfLibs
|
||||
|
||||
gatherLibraries() {
|
||||
autoPatchelfLibs+=("$1/lib")
|
||||
}
|
||||
|
||||
addEnvHooks "$targetOffset" gatherLibraries
|
||||
|
||||
isExecutable() {
|
||||
# For dynamically linked ELF files it would be enough to check just for the
|
||||
# INTERP section. However, we won't catch statically linked executables as
|
||||
# they only have an ELF type of EXEC but no INTERP.
|
||||
#
|
||||
# So what we do here is just check whether *either* the ELF type is EXEC
|
||||
# *or* there is an INTERP section. This also catches position-independent
|
||||
# executables, as they typically have an INTERP section but their ELF type
|
||||
# is DYN.
|
||||
isExeResult="$(LANG=C $READELF -h -l "$1" 2> /dev/null \
|
||||
| grep '^ *Type: *EXEC\>\|^ *INTERP\>')"
|
||||
# not using grep -q, because it can cause Broken pipe
|
||||
[ -n "$isExeResult" ]
|
||||
}
|
||||
|
||||
# We cache dependencies so that we don't need to search through all of them on
|
||||
# every consecutive call to findDependency.
|
||||
declare -a cachedDependencies
|
||||
|
||||
addToDepCache() {
|
||||
local existing
|
||||
for existing in "${cachedDependencies[@]}"; do
|
||||
if [ "$existing" = "$1" ]; then return; fi
|
||||
done
|
||||
cachedDependencies+=("$1")
|
||||
}
|
||||
|
||||
declare -gi depCacheInitialised=0
|
||||
declare -gi doneRecursiveSearch=0
|
||||
declare -g foundDependency
|
||||
|
||||
getDepsFromSo() {
|
||||
ldd "$1" 2> /dev/null | sed -n -e 's/[^=]*=> *\(.\+\) \+([^)]*)$/\1/p'
|
||||
}
|
||||
|
||||
populateCacheWithRecursiveDeps() {
|
||||
local so found foundso
|
||||
for so in "${cachedDependencies[@]}"; do
|
||||
for found in $(getDepsFromSo "$so"); do
|
||||
local libdir="${found%/*}"
|
||||
local base="${found##*/}"
|
||||
local soname="${base%.so*}"
|
||||
for foundso in "${found%/*}/$soname".so*; do
|
||||
addToDepCache "$foundso"
|
||||
done
|
||||
done
|
||||
done
|
||||
}
|
||||
|
||||
getSoArch() {
|
||||
objdump -f "$1" | sed -ne 's/^architecture: *\([^,]\+\).*/\1/p'
|
||||
}
|
||||
|
||||
# NOTE: If you want to use this function outside of the autoPatchelf function,
|
||||
# keep in mind that the dependency cache is only valid inside the subshell
|
||||
# spawned by the autoPatchelf function, so invoking this directly will possibly
|
||||
# rebuild the dependency cache. See the autoPatchelf function below for more
|
||||
# information.
|
||||
findDependency() {
|
||||
local filename="$1"
|
||||
local arch="$2"
|
||||
local lib dep
|
||||
|
||||
if [ $depCacheInitialised -eq 0 ]; then
|
||||
for lib in "${autoPatchelfLibs[@]}"; do
|
||||
for so in "$lib/"*.so*; do addToDepCache "$so"; done
|
||||
done
|
||||
depCacheInitialised=1
|
||||
fi
|
||||
|
||||
for dep in "${cachedDependencies[@]}"; do
|
||||
if [ "$filename" = "${dep##*/}" ]; then
|
||||
if [ "$(getSoArch "$dep")" = "$arch" ]; then
|
||||
foundDependency="$dep"
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
# Populate the dependency cache with recursive dependencies *only* if we
|
||||
# didn't find the right dependency so far and afterwards run findDependency
|
||||
# again, but this time with $doneRecursiveSearch set to 1 so that it won't
|
||||
# recurse again (and thus infinitely).
|
||||
if [ $doneRecursiveSearch -eq 0 ]; then
|
||||
populateCacheWithRecursiveDeps
|
||||
doneRecursiveSearch=1
|
||||
findDependency "$filename" "$arch" || return 1
|
||||
return 0
|
||||
fi
|
||||
return 1
|
||||
}
|
||||
|
||||
autoPatchelfFile() {
|
||||
local dep rpath="" toPatch="$1"
|
||||
|
||||
local interpreter="$(< "$NIX_CC/nix-support/dynamic-linker")"
|
||||
if isExecutable "$toPatch"; then
|
||||
patchelf --set-interpreter "$interpreter" "$toPatch"
|
||||
if [ -n "$runtimeDependencies" ]; then
|
||||
for dep in $runtimeDependencies; do
|
||||
rpath="$rpath${rpath:+:}$dep/lib"
|
||||
done
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "searching for dependencies of $toPatch" >&2
|
||||
|
||||
# We're going to find all dependencies based on ldd output, so we need to
|
||||
# clear the RPATH first.
|
||||
patchelf --remove-rpath "$toPatch"
|
||||
|
||||
local missing="$(
|
||||
ldd "$toPatch" 2> /dev/null | \
|
||||
sed -n -e 's/^[\t ]*\([^ ]\+\) => not found.*/\1/p'
|
||||
)"
|
||||
|
||||
# This ensures that we get the output of all missing dependencies instead
|
||||
# of failing at the first one, because it's more useful when working on a
|
||||
# new package where you don't yet know its dependencies.
|
||||
local -i depNotFound=0
|
||||
|
||||
for dep in $missing; do
|
||||
echo -n " $dep -> " >&2
|
||||
if findDependency "$dep" "$(getSoArch "$toPatch")"; then
|
||||
rpath="$rpath${rpath:+:}${foundDependency%/*}"
|
||||
echo "found: $foundDependency" >&2
|
||||
else
|
||||
echo "not found!" >&2
|
||||
depNotFound=1
|
||||
fi
|
||||
done
|
||||
|
||||
# This makes sure the builder fails if we didn't find a dependency, because
|
||||
# the stdenv setup script is run with set -e. The actual error is emitted
|
||||
# earlier in the previous loop.
|
||||
[ $depNotFound -eq 0 ]
|
||||
|
||||
if [ -n "$rpath" ]; then
|
||||
echo "setting RPATH to: $rpath" >&2
|
||||
patchelf --set-rpath "$rpath" "$toPatch"
|
||||
fi
|
||||
}
|
||||
|
||||
# Can be used to manually add additional directories with shared object files
|
||||
# to be included for the next autoPatchelf invocation.
|
||||
addAutoPatchelfSearchPath() {
|
||||
local -a findOpts=()
|
||||
|
||||
# XXX: Somewhat similar to the one in the autoPatchelf function, maybe make
|
||||
# it DRY someday...
|
||||
while [ $# -gt 0 ]; do
|
||||
case "$1" in
|
||||
--) shift; break;;
|
||||
--no-recurse) shift; findOpts+=("-maxdepth" 1);;
|
||||
--*)
|
||||
echo "addAutoPatchelfSearchPath: ERROR: Invalid command line" \
|
||||
"argument: $1" >&2
|
||||
return 1;;
|
||||
*) break;;
|
||||
esac
|
||||
done
|
||||
|
||||
cachedDependencies+=(
|
||||
$(find "$@" "${findOpts[@]}" \! -type d \
|
||||
\( -name '*.so' -o -name '*.so.*' \))
|
||||
)
|
||||
}
|
||||
|
||||
autoPatchelf() {
|
||||
local norecurse=
|
||||
|
||||
while [ $# -gt 0 ]; do
|
||||
case "$1" in
|
||||
--) shift; break;;
|
||||
--no-recurse) shift; norecurse=1;;
|
||||
--*)
|
||||
echo "autoPatchelf: ERROR: Invalid command line" \
|
||||
"argument: $1" >&2
|
||||
return 1;;
|
||||
*) break;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ $# -eq 0 ]; then
|
||||
echo "autoPatchelf: No paths to patch specified." >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo "automatically fixing dependencies for ELF files" >&2
|
||||
|
||||
# Add all shared objects of the current output path to the start of
|
||||
# cachedDependencies so that it's choosen first in findDependency.
|
||||
addAutoPatchelfSearchPath ${norecurse:+--no-recurse} -- "$@"
|
||||
|
||||
# Here we actually have a subshell, which also means that
|
||||
# $cachedDependencies is final at this point, so whenever we want to run
|
||||
# findDependency outside of this, the dependency cache needs to be rebuilt
|
||||
# from scratch, so keep this in mind if you want to run findDependency
|
||||
# outside of this function.
|
||||
while IFS= read -r -d $'\0' file; do
|
||||
isELF "$file" || continue
|
||||
segmentHeaders="$(LANG=C $READELF -l "$file")"
|
||||
# Skip if the ELF file doesn't have segment headers (eg. object files).
|
||||
# not using grep -q, because it can cause Broken pipe
|
||||
[ -n "$(echo "$segmentHeaders" | grep '^Program Headers:')" ] || continue
|
||||
if isExecutable "$file"; then
|
||||
# Skip if the executable is statically linked.
|
||||
[ -n "$(echo "$segmentHeaders" | grep "^ *INTERP\\>")" ] || continue
|
||||
fi
|
||||
autoPatchelfFile "$file"
|
||||
done < <(find "$@" ${norecurse:+-maxdepth 1} -type f -print0)
|
||||
}
|
||||
|
||||
# XXX: This should ultimately use fixupOutputHooks but we currently don't have
|
||||
# a way to enforce the order. If we have $runtimeDependencies set, the setup
|
||||
# hook of patchelf is going to ruin everything and strip out those additional
|
||||
# RPATHs.
|
||||
#
|
||||
# So what we do here is basically run in postFixup and emulate the same
|
||||
# behaviour as fixupOutputHooks because the setup hook for patchelf is run in
|
||||
# fixupOutput and the postFixup hook runs later.
|
||||
postFixupHooks+=('
|
||||
if [ -z "${dontAutoPatchelf-}" ]; then
|
||||
autoPatchelf -- $(for output in $outputs; do
|
||||
[ -e "${!output}" ] || continue
|
||||
echo "${!output}"
|
||||
done)
|
||||
fi
|
||||
')
|
||||
7
bsc/setup-hooks/autoreconf.sh
Normal file
7
bsc/setup-hooks/autoreconf.sh
Normal file
@@ -0,0 +1,7 @@
|
||||
preConfigurePhases+=" autoreconfPhase"
|
||||
|
||||
autoreconfPhase() {
|
||||
runHook preAutoreconf
|
||||
autoreconf ${autoreconfFlags:---install --force --verbose}
|
||||
runHook postAutoreconf
|
||||
}
|
||||
9
bsc/setup-hooks/breakpoint-hook.sh
Normal file
9
bsc/setup-hooks/breakpoint-hook.sh
Normal file
@@ -0,0 +1,9 @@
|
||||
breakpointHook() {
|
||||
local red='\033[0;31m'
|
||||
local no_color='\033[0m'
|
||||
|
||||
echo -e "${red}build failed in ${curPhase} with exit code ${exitCode}${no_color}"
|
||||
printf "To attach install cntr and run the following command as root:\n\n"
|
||||
sh -c "echo ' cntr attach -t command cntr-${out}'; while true; do sleep 99999999; done"
|
||||
}
|
||||
failureHooks+=(breakpointHook)
|
||||
32
bsc/setup-hooks/compress-man-pages.sh
Normal file
32
bsc/setup-hooks/compress-man-pages.sh
Normal file
@@ -0,0 +1,32 @@
|
||||
fixupOutputHooks+=('if [ -z "${dontGzipMan-}" ]; then compressManPages "$prefix"; fi')
|
||||
|
||||
compressManPages() {
|
||||
local dir="$1"
|
||||
|
||||
if [ -L "$dir"/share ] || [ -L "$dir"/share/man ] || [ ! -d "$dir/share/man" ]
|
||||
then return
|
||||
fi
|
||||
echo "gzipping man pages under $dir/share/man/"
|
||||
|
||||
# Compress all uncompressed manpages. Don't follow symlinks, etc.
|
||||
find "$dir"/share/man/ -type f -a '!' -regex '.*\.\(bz2\|gz\)$' -print0 \
|
||||
| while IFS= read -r -d $'\0' f
|
||||
do
|
||||
if gzip -c -n "$f" > "$f".gz; then
|
||||
rm "$f"
|
||||
else
|
||||
rm "$f".gz
|
||||
fi
|
||||
done
|
||||
|
||||
# Point symlinks to compressed manpages.
|
||||
find "$dir"/share/man/ -type l -a '!' -regex '.*\.\(bz2\|gz\)$' -print0 \
|
||||
| while IFS= read -r -d $'\0' f
|
||||
do
|
||||
local target
|
||||
target="$(readlink -f "$f")"
|
||||
if [ -f "$target".gz ]; then
|
||||
ln -sf "$target".gz "$f".gz && rm "$f"
|
||||
fi
|
||||
done
|
||||
}
|
||||
21
bsc/setup-hooks/die.sh
Normal file
21
bsc/setup-hooks/die.sh
Normal file
@@ -0,0 +1,21 @@
|
||||
# Exit with backtrace and error message
|
||||
#
|
||||
# Usage: die "Error message"
|
||||
die() {
|
||||
# Let us be a little sloppy with errors, because otherwise the final
|
||||
# invocation of `caller` below will cause the script to exit.
|
||||
set +e
|
||||
|
||||
# Print our error message
|
||||
printf "\nBuilder called die: %b\n" "$*"
|
||||
printf "Backtrace:\n"
|
||||
|
||||
# Print a backtrace.
|
||||
local frame=0
|
||||
while caller $frame; do
|
||||
((frame++));
|
||||
done
|
||||
printf "\n"
|
||||
|
||||
exit 1
|
||||
}
|
||||
20
bsc/setup-hooks/enable-coverage-instrumentation.sh
Normal file
20
bsc/setup-hooks/enable-coverage-instrumentation.sh
Normal file
@@ -0,0 +1,20 @@
|
||||
postPhases+=" cleanupBuildDir"
|
||||
|
||||
# Force GCC to build with coverage instrumentation. Also disable
|
||||
# optimisation, since it may confuse things.
|
||||
export NIX_CFLAGS_COMPILE="${NIX_CFLAGS_COMPILE:-} -O0 --coverage"
|
||||
|
||||
# Get rid of everything that isn't a gcno file or a C source file.
|
||||
# Also strip the `.tmp_' prefix from gcno files. (The Linux kernel
|
||||
# creates these.)
|
||||
cleanupBuildDir() {
|
||||
if ! [ -e $out/.build ]; then return; fi
|
||||
|
||||
find $out/.build/ -type f -a ! \
|
||||
\( -name "*.c" -o -name "*.cc" -o -name "*.cpp" -o -name "*.h" -o -name "*.hh" -o -name "*.y" -o -name "*.l" -o -name "*.gcno" \) \
|
||||
| xargs rm -f --
|
||||
|
||||
for i in $(find $out/.build/ -name ".tmp_*.gcno"); do
|
||||
mv "$i" "$(echo $i | sed s/.tmp_//)"
|
||||
done
|
||||
}
|
||||
22
bsc/setup-hooks/find-xml-catalogs.sh
Normal file
22
bsc/setup-hooks/find-xml-catalogs.sh
Normal file
@@ -0,0 +1,22 @@
|
||||
addXMLCatalogs () {
|
||||
local d i
|
||||
# ‘xml/dtd’ and ‘xml/xsl’ are deprecated. Catalogs should be
|
||||
# installed underneath ‘share/xml’.
|
||||
for d in $1/share/xml $1/xml/dtd $1/xml/xsl; do
|
||||
if [ -d $d ]; then
|
||||
for i in $(find $d -name catalog.xml); do
|
||||
XML_CATALOG_FILES+=" $i"
|
||||
done
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
if [ -z "${libxmlHookDone-}" ]; then
|
||||
libxmlHookDone=1
|
||||
|
||||
# Set up XML_CATALOG_FILES. An empty initial value prevents
|
||||
# xmllint and xsltproc from looking in /etc/xml/catalog.
|
||||
export XML_CATALOG_FILES=''
|
||||
if [ -z "$XML_CATALOG_FILES" ]; then XML_CATALOG_FILES=" "; fi
|
||||
addEnvHooks "$hostOffset" addXMLCatalogs
|
||||
fi
|
||||
40
bsc/setup-hooks/fix-darwin-dylib-names.sh
Normal file
40
bsc/setup-hooks/fix-darwin-dylib-names.sh
Normal file
@@ -0,0 +1,40 @@
|
||||
# On macOS, binaries refer to dynamic library dependencies using
|
||||
# either relative paths (e.g. "libicudata.dylib", searched relative to
|
||||
# $DYLD_LIBRARY_PATH) or absolute paths
|
||||
# (e.g. "/nix/store/.../lib/libicudata.dylib"). In Nix, the latter is
|
||||
# preferred since it allows programs to just work. When linking
|
||||
# against a library (e.g. "-licudata"), the linker uses the install
|
||||
# name embedded in the dylib (which can be shown using "otool -D").
|
||||
# Most packages create dylibs with absolute install names, but some do
|
||||
# not. This setup hook fixes dylibs by setting their install names to
|
||||
# their absolute path (using "install_name_tool -id"). It also
|
||||
# rewrites references in other dylibs to absolute paths.
|
||||
|
||||
fixupOutputHooks+=('fixDarwinDylibNamesIn $prefix')
|
||||
|
||||
fixDarwinDylibNames() {
|
||||
local flags=()
|
||||
local old_id
|
||||
|
||||
for fn in "$@"; do
|
||||
flags+=(-change "$(basename "$fn")" "$fn")
|
||||
done
|
||||
|
||||
for fn in "$@"; do
|
||||
if [ -L "$fn" ]; then continue; fi
|
||||
echo "$fn: fixing dylib"
|
||||
int_out=$(install_name_tool -id "$fn" "${flags[@]}" "$fn" 2>&1)
|
||||
result=$?
|
||||
if [ "$result" -ne 0 ] &&
|
||||
! grep "shared library stub file and can't be changed" <<< "$out"
|
||||
then
|
||||
echo "$int_out" >&2
|
||||
exit "$result"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
fixDarwinDylibNamesIn() {
|
||||
local dir="$1"
|
||||
fixDarwinDylibNames $(find "$dir" -name "*.dylib")
|
||||
}
|
||||
11
bsc/setup-hooks/gog-unpack.sh
Normal file
11
bsc/setup-hooks/gog-unpack.sh
Normal file
@@ -0,0 +1,11 @@
|
||||
unpackPhase="unpackGog"
|
||||
|
||||
unpackGog() {
|
||||
runHook preUnpackGog
|
||||
|
||||
innoextract --silent --extract --exclude-temp "${src}"
|
||||
|
||||
find . -depth -print -execdir rename -f 'y/A-Z/a-z/' '{}' \;
|
||||
|
||||
runHook postUnpackGog
|
||||
}
|
||||
165
bsc/setup-hooks/install-shell-files.sh
Normal file
165
bsc/setup-hooks/install-shell-files.sh
Normal file
@@ -0,0 +1,165 @@
|
||||
#!/bin/bash
|
||||
# Setup hook for the `installShellFiles` package.
|
||||
#
|
||||
# Example usage in a derivation:
|
||||
#
|
||||
# { …, installShellFiles, … }:
|
||||
# stdenv.mkDerivation {
|
||||
# …
|
||||
# nativeBuildInputs = [ installShellFiles ];
|
||||
# postInstall = ''
|
||||
# installManPage share/doc/foobar.1
|
||||
# installShellCompletion share/completions/foobar.{bash,fish,zsh}
|
||||
# '';
|
||||
# …
|
||||
# }
|
||||
#
|
||||
# See comments on each function for more details.
|
||||
|
||||
# installManPage <path> [...<path>]
|
||||
#
|
||||
# Each argument is checked for its man section suffix and installed into the appropriate
|
||||
# share/man<n>/ directory. The function returns an error if any paths don't have the man section
|
||||
# suffix (with optional .gz compression).
|
||||
installManPage() {
|
||||
local path
|
||||
for path in "$@"; do
|
||||
if (( "${NIX_DEBUG:-0}" >= 1 )); then
|
||||
echo "installManPage: installing $path"
|
||||
fi
|
||||
if test -z "$path"; then
|
||||
echo "installManPage: error: path cannot be empty" >&2
|
||||
return 1
|
||||
fi
|
||||
local basename
|
||||
basename=$(stripHash "$path") # use stripHash in case it's a nix store path
|
||||
local trimmed=${basename%.gz} # don't get fooled by compressed manpages
|
||||
local suffix=${trimmed##*.}
|
||||
if test -z "$suffix" -o "$suffix" = "$trimmed"; then
|
||||
echo "installManPage: error: path missing manpage section suffix: $path" >&2
|
||||
return 1
|
||||
fi
|
||||
local outRoot
|
||||
if test "$suffix" = 3; then
|
||||
outRoot=${!outputDevman:?}
|
||||
else
|
||||
outRoot=${!outputMan:?}
|
||||
fi
|
||||
install -Dm644 -T "$path" "${outRoot}/share/man/man$suffix/$basename" || return
|
||||
done
|
||||
}
|
||||
|
||||
# installShellCompletion [--bash|--fish|--zsh] ([--name <name>] <path>)...
|
||||
#
|
||||
# Each path is installed into the appropriate directory for shell completions for the given shell.
|
||||
# If one of `--bash`, `--fish`, or `--zsh` is given the path is assumed to belong to that shell.
|
||||
# Otherwise the file extension will be examined to pick a shell. If the shell is unknown a warning
|
||||
# will be logged and the command will return a non-zero status code after processing any remaining
|
||||
# paths. Any of the shell flags will affect all subsequent paths (unless another shell flag is
|
||||
# given).
|
||||
#
|
||||
# If the shell completion needs to be renamed before installing the optional `--name <name>` flag
|
||||
# may be given. Any name provided with this flag only applies to the next path.
|
||||
#
|
||||
# For zsh completions, if the `--name` flag is not given, the path will be automatically renamed
|
||||
# such that `foobar.zsh` becomes `_foobar`.
|
||||
#
|
||||
# This command accepts multiple shell flags in conjunction with multiple paths if you wish to
|
||||
# install them all in one command:
|
||||
#
|
||||
# installShellCompletion share/completions/foobar.{bash,fish} --zsh share/completions/_foobar
|
||||
#
|
||||
# However it may be easier to read if each shell is split into its own invocation, especially when
|
||||
# renaming is involved:
|
||||
#
|
||||
# installShellCompletion --bash --name foobar.bash share/completions.bash
|
||||
# installShellCompletion --fish --name foobar.fish share/completions.fish
|
||||
# installShellCompletion --zsh --name _foobar share/completions.zsh
|
||||
#
|
||||
# If any argument is `--` the remaining arguments will be treated as paths.
|
||||
installShellCompletion() {
|
||||
local shell='' name='' retval=0 parseArgs=1 arg
|
||||
while { arg=$1; shift; }; do
|
||||
# Parse arguments
|
||||
if (( parseArgs )); then
|
||||
case "$arg" in
|
||||
--bash|--fish|--zsh)
|
||||
shell=${arg#--}
|
||||
continue;;
|
||||
--name)
|
||||
name=$1
|
||||
shift || {
|
||||
echo 'installShellCompletion: error: --name flag expected an argument' >&2
|
||||
return 1
|
||||
}
|
||||
continue;;
|
||||
--name=*)
|
||||
# treat `--name=foo` the same as `--name foo`
|
||||
name=${arg#--name=}
|
||||
continue;;
|
||||
--?*)
|
||||
echo "installShellCompletion: warning: unknown flag ${arg%%=*}" >&2
|
||||
retval=2
|
||||
continue;;
|
||||
--)
|
||||
# treat remaining args as paths
|
||||
parseArgs=0
|
||||
continue;;
|
||||
esac
|
||||
fi
|
||||
if (( "${NIX_DEBUG:-0}" >= 1 )); then
|
||||
echo "installShellCompletion: installing $arg${name:+ as $name}"
|
||||
fi
|
||||
# if we get here, this is a path
|
||||
# Identify shell
|
||||
local basename
|
||||
basename=$(stripHash "$arg")
|
||||
local curShell=$shell
|
||||
if [[ -z "$curShell" ]]; then
|
||||
# auto-detect the shell
|
||||
case "$basename" in
|
||||
?*.bash) curShell=bash;;
|
||||
?*.fish) curShell=fish;;
|
||||
?*.zsh) curShell=zsh;;
|
||||
*)
|
||||
if [[ "$basename" = _* && "$basename" != *.* ]]; then
|
||||
# probably zsh
|
||||
echo "installShellCompletion: warning: assuming path \`$arg' is zsh; please specify with --zsh" >&2
|
||||
curShell=zsh
|
||||
else
|
||||
echo "installShellCompletion: warning: unknown shell for path: $arg" >&2
|
||||
retval=2
|
||||
continue
|
||||
fi;;
|
||||
esac
|
||||
fi
|
||||
# Identify output path
|
||||
local outName sharePath
|
||||
outName=${name:-$basename}
|
||||
case "$curShell" in
|
||||
bash) sharePath=bash-completion/completions;;
|
||||
fish) sharePath=fish/vendor_completions.d;;
|
||||
zsh)
|
||||
sharePath=zsh/site-functions
|
||||
# only apply automatic renaming if we didn't have a manual rename
|
||||
if test -z "$name"; then
|
||||
# convert a name like `foo.zsh` into `_foo`
|
||||
outName=${outName%.zsh}
|
||||
outName=_${outName#_}
|
||||
fi;;
|
||||
*)
|
||||
# Our list of shells is out of sync with the flags we accept or extensions we detect.
|
||||
echo 'installShellCompletion: internal error' >&2
|
||||
return 1;;
|
||||
esac
|
||||
# Install file
|
||||
install -Dm644 -T "$arg" "${!outputBin:?}/share/$sharePath/$outName" || return
|
||||
# Clear the name, it only applies to one path
|
||||
name=
|
||||
done
|
||||
if [[ -n "$name" ]]; then
|
||||
echo 'installShellCompletion: error: --name flag given with no path' >&2
|
||||
return 1
|
||||
fi
|
||||
return $retval
|
||||
}
|
||||
6
bsc/setup-hooks/keep-build-tree.sh
Normal file
6
bsc/setup-hooks/keep-build-tree.sh
Normal file
@@ -0,0 +1,6 @@
|
||||
prePhases+=" moveBuildDir"
|
||||
|
||||
moveBuildDir() {
|
||||
mkdir -p $out/.build
|
||||
cd $out/.build
|
||||
}
|
||||
5
bsc/setup-hooks/ld-is-cc-hook.sh
Normal file
5
bsc/setup-hooks/ld-is-cc-hook.sh
Normal file
@@ -0,0 +1,5 @@
|
||||
ld-is-cc-hook() {
|
||||
LD=$CC
|
||||
}
|
||||
|
||||
preConfigureHooks+=(ld-is-cc-hook)
|
||||
25
bsc/setup-hooks/make-coverage-analysis-report.sh
Normal file
25
bsc/setup-hooks/make-coverage-analysis-report.sh
Normal file
@@ -0,0 +1,25 @@
|
||||
postPhases+=" coverageReportPhase"
|
||||
|
||||
coverageReportPhase() {
|
||||
lcov --directory . --capture --output-file app.info
|
||||
set -o noglob
|
||||
lcov --remove app.info ${lcovFilter:-"/nix/store/*"} > app2.info
|
||||
set +o noglob
|
||||
mv app2.info app.info
|
||||
|
||||
mkdir -p $out/coverage
|
||||
genhtml app.info $lcovExtraTraceFiles -o $out/coverage > log
|
||||
|
||||
# Grab the overall coverage percentage so that Hydra can plot it over time.
|
||||
mkdir -p $out/nix-support
|
||||
lineCoverage="$(sed 's/.*lines\.*: \([0-9\.]\+\)%.*/\1/; t ; d' log)"
|
||||
functionCoverage="$(sed 's/.*functions\.*: \([0-9\.]\+\)%.*/\1/; t ; d' log)"
|
||||
if [ -z "$lineCoverage" -o -z "$functionCoverage" ]; then
|
||||
echo "failed to get coverage statistics"
|
||||
exit 1
|
||||
fi
|
||||
echo "lineCoverage $lineCoverage %" >> $out/nix-support/hydra-metrics
|
||||
echo "functionCoverage $functionCoverage %" >> $out/nix-support/hydra-metrics
|
||||
|
||||
echo "report coverage $out/coverage" >> $out/nix-support/hydra-build-products
|
||||
}
|
||||
28
bsc/setup-hooks/make-symlinks-relative.sh
Normal file
28
bsc/setup-hooks/make-symlinks-relative.sh
Normal file
@@ -0,0 +1,28 @@
|
||||
fixupOutputHooks+=(_makeSymlinksRelative)
|
||||
|
||||
# For every symlink in $output that refers to another file in $output
|
||||
# ensure that the symlink is relative. This removes references to the output
|
||||
# has from the resulting store paths and thus the NAR files.
|
||||
_makeSymlinksRelative() {
|
||||
local symlinkTarget
|
||||
|
||||
if [ -n "${dontRewriteSymlinks-}" ]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
while IFS= read -r -d $'\0' f; do
|
||||
symlinkTarget=$(readlink "$f")
|
||||
if [[ "$symlinkTarget"/ != "$prefix"/* ]]; then
|
||||
# skip this symlink as it doesn't point to $prefix
|
||||
continue
|
||||
fi
|
||||
|
||||
if [ ! -e "$symlinkTarget" ]; then
|
||||
echo "the symlink $f is broken, it points to $symlinkTarget (which is missing)"
|
||||
fi
|
||||
|
||||
echo "rewriting symlink $f to be relative to $prefix"
|
||||
ln -snrf "$symlinkTarget" "$f"
|
||||
|
||||
done < <(find $prefix -type l -print0)
|
||||
}
|
||||
146
bsc/setup-hooks/make-wrapper.sh
Normal file
146
bsc/setup-hooks/make-wrapper.sh
Normal file
@@ -0,0 +1,146 @@
|
||||
# Assert that FILE exists and is executable
|
||||
#
|
||||
# assertExecutable FILE
|
||||
assertExecutable() {
|
||||
local file="$1"
|
||||
[[ -f "$file" && -x "$file" ]] || \
|
||||
die "Cannot wrap '$file' because it is not an executable file"
|
||||
}
|
||||
|
||||
# construct an executable file that wraps the actual executable
|
||||
# makeWrapper EXECUTABLE OUT_PATH ARGS
|
||||
|
||||
# ARGS:
|
||||
# --argv0 NAME : set name of executed process to NAME
|
||||
# (otherwise it’s called …-wrapped)
|
||||
# --set VAR VAL : add VAR with value VAL to the executable’s
|
||||
# environment
|
||||
# --set-default VAR VAL : like --set, but only adds VAR if not already set in
|
||||
# the environment
|
||||
# --unset VAR : remove VAR from the environment
|
||||
# --run COMMAND : run command before the executable
|
||||
# --add-flags FLAGS : add FLAGS to invocation of executable
|
||||
|
||||
# --prefix ENV SEP VAL : suffix/prefix ENV with VAL, separated by SEP
|
||||
# --suffix
|
||||
# --suffix-each ENV SEP VALS : like --suffix, but VALS is a list
|
||||
# --prefix-contents ENV SEP FILES : like --suffix-each, but contents of FILES
|
||||
# are read first and used as VALS
|
||||
# --suffix-contents
|
||||
makeWrapper() {
|
||||
local original="$1"
|
||||
local wrapper="$2"
|
||||
local params varName value command separator n fileNames
|
||||
local argv0 flagsBefore flags
|
||||
|
||||
assertExecutable "$original"
|
||||
|
||||
mkdir -p "$(dirname "$wrapper")"
|
||||
|
||||
echo "#! @shell@ -e" > "$wrapper"
|
||||
|
||||
params=("$@")
|
||||
for ((n = 2; n < ${#params[*]}; n += 1)); do
|
||||
p="${params[$n]}"
|
||||
|
||||
if [[ "$p" == "--set" ]]; then
|
||||
varName="${params[$((n + 1))]}"
|
||||
value="${params[$((n + 2))]}"
|
||||
n=$((n + 2))
|
||||
echo "export $varName=${value@Q}" >> "$wrapper"
|
||||
elif [[ "$p" == "--set-default" ]]; then
|
||||
varName="${params[$((n + 1))]}"
|
||||
value="${params[$((n + 2))]}"
|
||||
n=$((n + 2))
|
||||
echo "export $varName=\${$varName-${value@Q}}" >> "$wrapper"
|
||||
elif [[ "$p" == "--unset" ]]; then
|
||||
varName="${params[$((n + 1))]}"
|
||||
n=$((n + 1))
|
||||
echo "unset $varName" >> "$wrapper"
|
||||
elif [[ "$p" == "--run" ]]; then
|
||||
command="${params[$((n + 1))]}"
|
||||
n=$((n + 1))
|
||||
echo "$command" >> "$wrapper"
|
||||
elif [[ ("$p" == "--suffix") || ("$p" == "--prefix") ]]; then
|
||||
varName="${params[$((n + 1))]}"
|
||||
separator="${params[$((n + 2))]}"
|
||||
value="${params[$((n + 3))]}"
|
||||
n=$((n + 3))
|
||||
if test -n "$value"; then
|
||||
if test "$p" = "--suffix"; then
|
||||
echo "export $varName=\$$varName\${$varName:+${separator@Q}}${value@Q}" >> "$wrapper"
|
||||
else
|
||||
echo "export $varName=${value@Q}\${$varName:+${separator@Q}}\$$varName" >> "$wrapper"
|
||||
fi
|
||||
fi
|
||||
elif [[ "$p" == "--suffix-each" ]]; then
|
||||
varName="${params[$((n + 1))]}"
|
||||
separator="${params[$((n + 2))]}"
|
||||
values="${params[$((n + 3))]}"
|
||||
n=$((n + 3))
|
||||
for value in $values; do
|
||||
echo "export $varName=\$$varName\${$varName:+$separator}${value@Q}" >> "$wrapper"
|
||||
done
|
||||
elif [[ ("$p" == "--suffix-contents") || ("$p" == "--prefix-contents") ]]; then
|
||||
varName="${params[$((n + 1))]}"
|
||||
separator="${params[$((n + 2))]}"
|
||||
fileNames="${params[$((n + 3))]}"
|
||||
n=$((n + 3))
|
||||
for fileName in $fileNames; do
|
||||
contents="$(cat "$fileName")"
|
||||
if test "$p" = "--suffix-contents"; then
|
||||
echo "export $varName=\$$varName\${$varName:+$separator}${contents@Q}" >> "$wrapper"
|
||||
else
|
||||
echo "export $varName=${contents@Q}\${$varName:+$separator}\$$varName" >> "$wrapper"
|
||||
fi
|
||||
done
|
||||
elif [[ "$p" == "--add-flags" ]]; then
|
||||
flags="${params[$((n + 1))]}"
|
||||
n=$((n + 1))
|
||||
flagsBefore="$flagsBefore $flags"
|
||||
elif [[ "$p" == "--argv0" ]]; then
|
||||
argv0="${params[$((n + 1))]}"
|
||||
n=$((n + 1))
|
||||
else
|
||||
die "makeWrapper doesn't understand the arg $p"
|
||||
fi
|
||||
done
|
||||
|
||||
echo exec ${argv0:+-a \"$argv0\"} \""$original"\" \
|
||||
"$flagsBefore" '"$@"' >> "$wrapper"
|
||||
|
||||
chmod +x "$wrapper"
|
||||
}
|
||||
|
||||
addSuffix() {
|
||||
suffix="$1"
|
||||
shift
|
||||
for name in "$@"; do
|
||||
echo "$name$suffix"
|
||||
done
|
||||
}
|
||||
|
||||
filterExisting() {
|
||||
for fn in "$@"; do
|
||||
if test -e "$fn"; then
|
||||
echo "$fn"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
# Syntax: wrapProgram <PROGRAM> <MAKE-WRAPPER FLAGS...>
|
||||
wrapProgram() {
|
||||
local prog="$1"
|
||||
local hidden
|
||||
|
||||
assertExecutable "$prog"
|
||||
|
||||
hidden="$(dirname "$prog")/.$(basename "$prog")"-wrapped
|
||||
while [ -e "$hidden" ]; do
|
||||
hidden="${hidden}_"
|
||||
done
|
||||
mv "$prog" "$hidden"
|
||||
# Silence warning about unexpanded $0:
|
||||
# shellcheck disable=SC2016
|
||||
makeWrapper "$hidden" "$prog" --argv0 '$0' "${@:2}"
|
||||
}
|
||||
23
bsc/setup-hooks/move-docs.sh
Normal file
23
bsc/setup-hooks/move-docs.sh
Normal file
@@ -0,0 +1,23 @@
|
||||
# This setup hook moves $out/{man,doc,info} to $out/share; moves
|
||||
# $out/share/man to $man/share/man; and moves $out/share/doc to
|
||||
# $man/share/doc.
|
||||
|
||||
preFixupHooks+=(_moveToShare)
|
||||
|
||||
_moveToShare() {
|
||||
forceShare=${forceShare:=man doc info}
|
||||
if [ -z "$forceShare" -o -z "$out" ]; then return; fi
|
||||
|
||||
for d in $forceShare; do
|
||||
if [ -d "$out/$d" ]; then
|
||||
if [ -d "$out/share/$d" ]; then
|
||||
echo "both $d/ and share/$d/ exist!"
|
||||
else
|
||||
echo "moving $out/$d to $out/share/$d"
|
||||
mkdir -p $out/share
|
||||
mv $out/$d $out/share/
|
||||
fi
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
22
bsc/setup-hooks/move-lib64.sh
Normal file
22
bsc/setup-hooks/move-lib64.sh
Normal file
@@ -0,0 +1,22 @@
|
||||
# This setup hook, for each output, moves everything in $output/lib64
|
||||
# to $output/lib, and replaces $output/lib64 with a symlink to
|
||||
# $output/lib. The rationale is that lib64 directories are unnecessary
|
||||
# in Nix (since 32-bit and 64-bit builds of a package are in different
|
||||
# store paths anyway).
|
||||
# If the move would overwrite anything, it should fail on rmdir.
|
||||
|
||||
fixupOutputHooks+=(_moveLib64)
|
||||
|
||||
_moveLib64() {
|
||||
if [ "${dontMoveLib64-}" = 1 ]; then return; fi
|
||||
if [ ! -e "$prefix/lib64" -o -L "$prefix/lib64" ]; then return; fi
|
||||
echo "moving $prefix/lib64/* to $prefix/lib"
|
||||
mkdir -p $prefix/lib
|
||||
shopt -s dotglob
|
||||
for i in $prefix/lib64/*; do
|
||||
mv --no-clobber "$i" $prefix/lib
|
||||
done
|
||||
shopt -u dotglob
|
||||
rmdir $prefix/lib64
|
||||
ln -s lib $prefix/lib64
|
||||
}
|
||||
19
bsc/setup-hooks/move-sbin.sh
Normal file
19
bsc/setup-hooks/move-sbin.sh
Normal file
@@ -0,0 +1,19 @@
|
||||
# This setup hook, for each output, moves everything in $output/sbin
|
||||
# to $output/bin, and replaces $output/sbin with a symlink to
|
||||
# $output/bin.
|
||||
|
||||
fixupOutputHooks+=(_moveSbin)
|
||||
|
||||
_moveSbin() {
|
||||
if [ "${dontMoveSbin-}" = 1 ]; then return; fi
|
||||
if [ ! -e "$prefix/sbin" -o -L "$prefix/sbin" ]; then return; fi
|
||||
echo "moving $prefix/sbin/* to $prefix/bin"
|
||||
mkdir -p $prefix/bin
|
||||
shopt -s dotglob
|
||||
for i in $prefix/sbin/*; do
|
||||
mv "$i" $prefix/bin
|
||||
done
|
||||
shopt -u dotglob
|
||||
rmdir $prefix/sbin
|
||||
ln -s bin $prefix/sbin
|
||||
}
|
||||
199
bsc/setup-hooks/multiple-outputs.sh
Normal file
199
bsc/setup-hooks/multiple-outputs.sh
Normal file
@@ -0,0 +1,199 @@
|
||||
# The base package for automatic multiple-output splitting. Used in stdenv as well.
|
||||
preConfigureHooks+=(_multioutConfig)
|
||||
preFixupHooks+=(_multioutDocs)
|
||||
preFixupHooks+=(_multioutDevs)
|
||||
postFixupHooks+=(_multioutPropagateDev)
|
||||
|
||||
# Assign the first string containing nonempty variable to the variable named $1
|
||||
_assignFirst() {
|
||||
local varName="$1"
|
||||
local REMOVE=REMOVE # slightly hacky - we allow REMOVE (i.e. not a variable name)
|
||||
shift
|
||||
while (( $# )); do
|
||||
if [ -n "${!1-}" ]; then eval "${varName}"="$1"; return; fi
|
||||
shift
|
||||
done
|
||||
echo "Error: _assignFirst found no valid variant!"
|
||||
return 1 # none found
|
||||
}
|
||||
|
||||
# Same as _assignFirst, but only if "$1" = ""
|
||||
_overrideFirst() {
|
||||
if [ -z "${!1-}" ]; then
|
||||
_assignFirst "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
# Setup chains of sane default values with easy overridability.
|
||||
# The variables are global to be usable anywhere during the build.
|
||||
# Typical usage in package is defining outputBin = "dev";
|
||||
|
||||
_overrideFirst outputDev "dev" "out"
|
||||
_overrideFirst outputBin "bin" "out"
|
||||
|
||||
_overrideFirst outputInclude "$outputDev"
|
||||
|
||||
# so-libs are often among the main things to keep, and so go to $out
|
||||
_overrideFirst outputLib "lib" "out"
|
||||
|
||||
_overrideFirst outputDoc "doc" "out"
|
||||
_overrideFirst outputDevdoc "devdoc" REMOVE # documentation for developers
|
||||
# man and info pages are small and often useful to distribute with binaries
|
||||
_overrideFirst outputMan "man" "$outputBin"
|
||||
_overrideFirst outputDevman "devman" "devdoc" "$outputMan"
|
||||
_overrideFirst outputInfo "info" "$outputBin"
|
||||
|
||||
|
||||
# Add standard flags to put files into the desired outputs.
|
||||
_multioutConfig() {
|
||||
if [ "$outputs" = "out" ] || [ -z "${setOutputFlags-1}" ]; then return; fi;
|
||||
|
||||
# try to detect share/doc/${shareDocName}
|
||||
# Note: sadly, $configureScript detection comes later in configurePhase,
|
||||
# and reordering would cause more trouble than worth.
|
||||
if [ -z "$shareDocName" ]; then
|
||||
local confScript="$configureScript"
|
||||
if [ -z "$confScript" ] && [ -x ./configure ]; then
|
||||
confScript=./configure
|
||||
fi
|
||||
if [ -f "$confScript" ]; then
|
||||
local shareDocName="$(sed -n "s/^PACKAGE_TARNAME='\(.*\)'$/\1/p" < "$confScript")"
|
||||
fi
|
||||
# PACKAGE_TARNAME sometimes contains garbage.
|
||||
if [ -n "$shareDocName" ] || echo "$shareDocName" | grep -q '[^a-zA-Z0-9_-]'; then
|
||||
shareDocName="$(echo "$name" | sed 's/-[^a-zA-Z].*//')"
|
||||
fi
|
||||
fi
|
||||
|
||||
configureFlags="\
|
||||
--bindir=${!outputBin}/bin --sbindir=${!outputBin}/sbin \
|
||||
--includedir=${!outputInclude}/include --oldincludedir=${!outputInclude}/include \
|
||||
--mandir=${!outputMan}/share/man --infodir=${!outputInfo}/share/info \
|
||||
--docdir=${!outputDoc}/share/doc/${shareDocName} \
|
||||
--libdir=${!outputLib}/lib --libexecdir=${!outputLib}/libexec \
|
||||
--localedir=${!outputLib}/share/locale \
|
||||
$configureFlags"
|
||||
|
||||
installFlags="\
|
||||
pkgconfigdir=${!outputDev}/lib/pkgconfig \
|
||||
m4datadir=${!outputDev}/share/aclocal aclocaldir=${!outputDev}/share/aclocal \
|
||||
$installFlags"
|
||||
}
|
||||
|
||||
|
||||
# Add rpath prefixes to library paths, and avoid stdenv doing it for $out.
|
||||
_addRpathPrefix "${!outputLib}"
|
||||
NIX_NO_SELF_RPATH=1
|
||||
|
||||
|
||||
# Move subpaths that match pattern $1 from under any output/ to the $2 output/
|
||||
# Beware: only globbing patterns are accepted, e.g.: * ? {foo,bar}
|
||||
# A special target "REMOVE" is allowed: moveToOutput foo REMOVE
|
||||
moveToOutput() {
|
||||
local patt="$1"
|
||||
local dstOut="$2"
|
||||
local output
|
||||
for output in $outputs; do
|
||||
if [ "${!output}" = "$dstOut" ]; then continue; fi
|
||||
local srcPath
|
||||
for srcPath in "${!output}"/$patt; do
|
||||
# apply to existing files/dirs, *including* broken symlinks
|
||||
if [ ! -e "$srcPath" ] && [ ! -L "$srcPath" ]; then continue; fi
|
||||
|
||||
if [ "$dstOut" = REMOVE ]; then
|
||||
echo "Removing $srcPath"
|
||||
rm -r "$srcPath"
|
||||
else
|
||||
local dstPath="$dstOut${srcPath#${!output}}"
|
||||
echo "Moving $srcPath to $dstPath"
|
||||
|
||||
if [ -d "$dstPath" ] && [ -d "$srcPath" ]
|
||||
then # attempt directory merge
|
||||
# check the case of trying to move an empty directory
|
||||
rmdir "$srcPath" --ignore-fail-on-non-empty
|
||||
if [ -d "$srcPath" ]; then
|
||||
mv -t "$dstPath" "$srcPath"/*
|
||||
rmdir "$srcPath"
|
||||
fi
|
||||
else # usual move
|
||||
mkdir -p "$(readlink -m "$dstPath/..")"
|
||||
mv "$srcPath" "$dstPath"
|
||||
fi
|
||||
fi
|
||||
|
||||
# remove empty directories, printing iff at least one gets removed
|
||||
local srcParent="$(readlink -m "$srcPath/..")"
|
||||
if rmdir "$srcParent"; then
|
||||
echo "Removing empty $srcParent/ and (possibly) its parents"
|
||||
rmdir -p --ignore-fail-on-non-empty "$(readlink -m "$srcParent/..")" \
|
||||
2> /dev/null || true # doesn't ignore failure for some reason
|
||||
fi
|
||||
done
|
||||
done
|
||||
}
|
||||
|
||||
# Move documentation to the desired outputs.
|
||||
_multioutDocs() {
|
||||
local REMOVE=REMOVE # slightly hacky - we expand ${!outputFoo}
|
||||
|
||||
moveToOutput share/info "${!outputInfo}"
|
||||
moveToOutput share/doc "${!outputDoc}"
|
||||
moveToOutput share/gtk-doc "${!outputDevdoc}"
|
||||
moveToOutput share/devhelp/books "${!outputDevdoc}"
|
||||
|
||||
# the default outputMan is in $bin
|
||||
moveToOutput share/man "${!outputMan}"
|
||||
moveToOutput share/man/man3 "${!outputDevman}"
|
||||
}
|
||||
|
||||
# Move development-only stuff to the desired outputs.
|
||||
_multioutDevs() {
|
||||
if [ "$outputs" = "out" ] || [ -z "${moveToDev-1}" ]; then return; fi;
|
||||
moveToOutput include "${!outputInclude}"
|
||||
# these files are sometimes provided even without using the corresponding tool
|
||||
moveToOutput lib/pkgconfig "${!outputDev}"
|
||||
moveToOutput share/pkgconfig "${!outputDev}"
|
||||
moveToOutput lib/cmake "${!outputDev}"
|
||||
moveToOutput share/aclocal "${!outputDev}"
|
||||
# don't move *.la, as libtool needs them in the directory of the library
|
||||
|
||||
for f in "${!outputDev}"/{lib,share}/pkgconfig/*.pc; do
|
||||
echo "Patching '$f' includedir to output ${!outputInclude}"
|
||||
sed -i "/^includedir=/s,=\${prefix},=${!outputInclude}," "$f"
|
||||
done
|
||||
}
|
||||
|
||||
# Make the "dev" propagate other outputs needed for development.
|
||||
_multioutPropagateDev() {
|
||||
if [ "$outputs" = "out" ]; then return; fi;
|
||||
|
||||
local outputFirst
|
||||
for outputFirst in $outputs; do
|
||||
break
|
||||
done
|
||||
local propagaterOutput="$outputDev"
|
||||
if [ -z "$propagaterOutput" ]; then
|
||||
propagaterOutput="$outputFirst"
|
||||
fi
|
||||
|
||||
# Default value: propagate binaries, includes and libraries
|
||||
if [ -z "${propagatedBuildOutputs+1}" ]; then
|
||||
local po_dirty="$outputBin $outputInclude $outputLib"
|
||||
set +o pipefail
|
||||
propagatedBuildOutputs=`echo "$po_dirty" \
|
||||
| tr -s ' ' '\n' | grep -v -F "$propagaterOutput" \
|
||||
| sort -u | tr '\n' ' ' `
|
||||
set -o pipefail
|
||||
fi
|
||||
|
||||
# The variable was explicitly set to empty or we resolved it so
|
||||
if [ -z "$propagatedBuildOutputs" ]; then
|
||||
return
|
||||
fi
|
||||
|
||||
mkdir -p "${!propagaterOutput}"/nix-support
|
||||
for output in $propagatedBuildOutputs; do
|
||||
echo -n " ${!output}" >> "${!propagaterOutput}"/nix-support/propagated-build-inputs
|
||||
done
|
||||
}
|
||||
119
bsc/setup-hooks/patch-shebangs.sh
Normal file
119
bsc/setup-hooks/patch-shebangs.sh
Normal file
@@ -0,0 +1,119 @@
|
||||
# This setup hook causes the fixup phase to rewrite all script
|
||||
# interpreter file names (`#! /path') to paths found in $PATH. E.g.,
|
||||
# /bin/sh will be rewritten to /nix/store/<hash>-some-bash/bin/sh.
|
||||
# /usr/bin/env gets special treatment so that ".../bin/env python" is
|
||||
# rewritten to /nix/store/<hash>/bin/python. Interpreters that are
|
||||
# already in the store are left untouched.
|
||||
# A script file must be marked as executable, otherwise it will not be
|
||||
# considered.
|
||||
|
||||
fixupOutputHooks+=(patchShebangsAuto)
|
||||
|
||||
# Run patch shebangs on a directory or file.
|
||||
# Can take multiple paths as arguments.
|
||||
# patchShebangs [--build | --host] PATH...
|
||||
|
||||
# Flags:
|
||||
# --build : Lookup commands available at build-time
|
||||
# --host : Lookup commands available at runtime
|
||||
|
||||
# Example use cases,
|
||||
# $ patchShebangs --host /nix/store/...-hello-1.0/bin
|
||||
# $ patchShebangs --build configure
|
||||
|
||||
patchShebangs() {
|
||||
local pathName
|
||||
|
||||
if [ "$1" = "--host" ]; then
|
||||
pathName=HOST_PATH
|
||||
shift
|
||||
elif [ "$1" = "--build" ]; then
|
||||
pathName=PATH
|
||||
shift
|
||||
fi
|
||||
|
||||
echo "patching script interpreter paths in $@"
|
||||
local f
|
||||
local oldPath
|
||||
local newPath
|
||||
local arg0
|
||||
local args
|
||||
local oldInterpreterLine
|
||||
local newInterpreterLine
|
||||
|
||||
if [ $# -eq 0 ]; then
|
||||
echo "No arguments supplied to patchShebangs" >&2
|
||||
return 0
|
||||
fi
|
||||
|
||||
local f
|
||||
while IFS= read -r -d $'\0' f; do
|
||||
isScript "$f" || continue
|
||||
|
||||
oldInterpreterLine=$(head -1 "$f" | tail -c+3)
|
||||
read -r oldPath arg0 args <<< "$oldInterpreterLine"
|
||||
|
||||
if [ -z "$pathName" ]; then
|
||||
if [ -n "$strictDeps" ] && [[ "$f" = "$NIX_STORE"* ]]; then
|
||||
pathName=HOST_PATH
|
||||
else
|
||||
pathName=PATH
|
||||
fi
|
||||
fi
|
||||
|
||||
if $(echo "$oldPath" | grep -q "/bin/env$"); then
|
||||
# Check for unsupported 'env' functionality:
|
||||
# - options: something starting with a '-'
|
||||
# - environment variables: foo=bar
|
||||
if $(echo "$arg0" | grep -q -- "^-.*\|.*=.*"); then
|
||||
echo "$f: unsupported interpreter directive \"$oldInterpreterLine\" (set dontPatchShebangs=1 and handle shebang patching yourself)" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
newPath="$(PATH="${!pathName}" command -v "$arg0" || true)"
|
||||
else
|
||||
if [ "$oldPath" = "" ]; then
|
||||
# If no interpreter is specified linux will use /bin/sh. Set
|
||||
# oldpath="/bin/sh" so that we get /nix/store/.../sh.
|
||||
oldPath="/bin/sh"
|
||||
fi
|
||||
|
||||
newPath="$(PATH="${!pathName}" command -v "$(basename "$oldPath")" || true)"
|
||||
|
||||
args="$arg0 $args"
|
||||
fi
|
||||
|
||||
# Strip trailing whitespace introduced when no arguments are present
|
||||
newInterpreterLine="$(echo "$newPath $args" | sed 's/[[:space:]]*$//')"
|
||||
|
||||
if [ -n "$oldPath" -a "${oldPath:0:${#NIX_STORE}}" != "$NIX_STORE" ]; then
|
||||
if [ -n "$newPath" -a "$newPath" != "$oldPath" ]; then
|
||||
echo "$f: interpreter directive changed from \"$oldInterpreterLine\" to \"$newInterpreterLine\""
|
||||
# escape the escape chars so that sed doesn't interpret them
|
||||
escapedInterpreterLine=$(echo "$newInterpreterLine" | sed 's|\\|\\\\|g')
|
||||
# Preserve times, see: https://github.com/NixOS/nixpkgs/pull/33281
|
||||
timestamp=$(mktemp)
|
||||
touch -r "$f" "$timestamp"
|
||||
sed -i -e "1 s|.*|#\!$escapedInterpreterLine|" "$f"
|
||||
touch -r "$timestamp" "$f"
|
||||
rm "$timestamp"
|
||||
fi
|
||||
fi
|
||||
done < <(find "$@" -type f -perm -0100 -print0)
|
||||
|
||||
stopNest
|
||||
}
|
||||
|
||||
patchShebangsAuto () {
|
||||
if [ -z "${dontPatchShebangs-}" -a -e "$prefix" ]; then
|
||||
|
||||
# Dev output will end up being run on the build platform. An
|
||||
# example case of this is sdl2-config. Otherwise, we can just
|
||||
# use the runtime path (--host).
|
||||
if [ "$output" != out ] && [ "$output" = "$outputDev" ]; then
|
||||
patchShebangs --build "$prefix"
|
||||
else
|
||||
patchShebangs --host "$prefix"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
22
bsc/setup-hooks/prune-libtool-files.sh
Normal file
22
bsc/setup-hooks/prune-libtool-files.sh
Normal file
@@ -0,0 +1,22 @@
|
||||
# Clear dependency_libs in libtool files for shared libraries.
|
||||
|
||||
# Shared libraries already encode their dependencies with locations. .la
|
||||
# files do not always encode those locations, and sometimes encode the
|
||||
# locations in the wrong Nix output. .la files are not needed for shared
|
||||
# libraries, but without dependency_libs they do not hurt either.
|
||||
|
||||
fixupOutputHooks+=(_pruneLibtoolFiles)
|
||||
|
||||
_pruneLibtoolFiles() {
|
||||
if [ "${dontPruneLibtoolFiles-}" ] || [ ! -e "$prefix" ]; then
|
||||
return
|
||||
fi
|
||||
|
||||
# Libtool uses "dlname" and "library_names" fields for shared libraries and
|
||||
# the "old_library" field for static libraries. We are processing only
|
||||
# those .la files that do not describe static libraries.
|
||||
find "$prefix" -type f -name '*.la' \
|
||||
-exec grep -q '^# Generated by .*libtool' {} \; \
|
||||
-exec grep -q "^old_library=''" {} \; \
|
||||
-exec sed -i {} -e "/^dependency_libs='[^']/ c dependency_libs='' #pruned" \;
|
||||
}
|
||||
75
bsc/setup-hooks/role.bash
Normal file
75
bsc/setup-hooks/role.bash
Normal file
@@ -0,0 +1,75 @@
|
||||
# Since the same derivation can be depend on in multiple ways, we need to
|
||||
# accumulate *each* role (i.e. host and target platforms relative the depending
|
||||
# derivation) in which the derivation is used.
|
||||
#
|
||||
# The role is intened to be use as part of other variables names like
|
||||
# - $NIX_${role_pre}_SOMETHING
|
||||
# - $NIX_SOMETHING_${role_post}
|
||||
|
||||
function getRole() {
|
||||
case $1 in
|
||||
-1)
|
||||
role_pre='BUILD_'
|
||||
role_post='_FOR_BUILD'
|
||||
;;
|
||||
0)
|
||||
role_pre=''
|
||||
role_post=''
|
||||
;;
|
||||
1)
|
||||
role_pre='TARGET_'
|
||||
role_post='_FOR_TARGET'
|
||||
;;
|
||||
*)
|
||||
echo "@name@: used as improper sort of dependency" >2
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# `hostOffset` describes how the host platform of the package is slid relative
|
||||
# to the depending package. `targetOffset` likewise describes the target
|
||||
# platform of the package. Both are brought into scope of the setup hook defined
|
||||
# for dependency whose setup hook is being processed relative to the package
|
||||
# being built.
|
||||
|
||||
function getHostRole() {
|
||||
getRole "$hostOffset"
|
||||
}
|
||||
function getTargetRole() {
|
||||
getRole "$targetOffset"
|
||||
}
|
||||
|
||||
# `depHostOffset` describes how the host platform of the dependencies are slid
|
||||
# relative to the depending package. `depTargetOffset` likewise describes the
|
||||
# target platform of dependenices. Both are brought into scope of the
|
||||
# environment hook defined for the dependency being applied relative to the
|
||||
# package being built.
|
||||
|
||||
function getHostRoleEnvHook() {
|
||||
getRole "$depHostOffset"
|
||||
}
|
||||
function getTargetRoleEnvHook() {
|
||||
getRole "$depTargetOffset"
|
||||
}
|
||||
|
||||
# This variant is inteneded specifically for code-prodocing tool wrapper scripts
|
||||
# `NIX_@wrapperName@_@infixSalt@_TARGET_*` tracks this (needs to be an exported
|
||||
# env var so can't use fancier data structures).
|
||||
function getTargetRoleWrapper() {
|
||||
case $targetOffset in
|
||||
-1)
|
||||
export NIX_@wrapperName@_@infixSalt@_TARGET_BUILD=1
|
||||
;;
|
||||
0)
|
||||
export NIX_@wrapperName@_@infixSalt@_TARGET_HOST=1
|
||||
;;
|
||||
1)
|
||||
export NIX_@wrapperName@_@infixSalt@_TARGET_TARGET=1
|
||||
;;
|
||||
*)
|
||||
echo "@name@: used as improper sort of dependency" >2
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
37
bsc/setup-hooks/separate-debug-info.sh
Normal file
37
bsc/setup-hooks/separate-debug-info.sh
Normal file
@@ -0,0 +1,37 @@
|
||||
export NIX_SET_BUILD_ID=1
|
||||
export NIX_LDFLAGS+=" --compress-debug-sections=zlib"
|
||||
export NIX_CFLAGS_COMPILE+=" -ggdb -Wa,--compress-debug-sections"
|
||||
dontStrip=1
|
||||
|
||||
fixupOutputHooks+=(_separateDebugInfo)
|
||||
|
||||
_separateDebugInfo() {
|
||||
[ -e "$prefix" ] || return 0
|
||||
|
||||
local dst="${debug:-$out}"
|
||||
if [ "$prefix" = "$dst" ]; then return 0; fi
|
||||
|
||||
dst="$dst/lib/debug/.build-id"
|
||||
|
||||
# Find executables and dynamic libraries.
|
||||
local i magic
|
||||
while IFS= read -r -d $'\0' i; do
|
||||
if ! isELF "$i"; then continue; fi
|
||||
|
||||
# Extract the Build ID. FIXME: there's probably a cleaner way.
|
||||
local id="$($READELF -n "$i" | sed 's/.*Build ID: \([0-9a-f]*\).*/\1/; t; d')"
|
||||
if [ "${#id}" != 40 ]; then
|
||||
echo "could not find build ID of $i, skipping" >&2
|
||||
continue
|
||||
fi
|
||||
|
||||
# Extract the debug info.
|
||||
header "separating debug info from $i (build ID $id)"
|
||||
mkdir -p "$dst/${id:0:2}"
|
||||
$OBJCOPY --only-keep-debug "$i" "$dst/${id:0:2}/${id:2}.debug"
|
||||
$STRIP --strip-debug "$i"
|
||||
|
||||
# Also a create a symlink <original-name>.debug.
|
||||
ln -sfn ".build-id/${id:0:2}/${id:2}.debug" "$dst/../$(basename "$i")"
|
||||
done < <(find "$prefix" -type f -print0)
|
||||
}
|
||||
13
bsc/setup-hooks/set-java-classpath.sh
Normal file
13
bsc/setup-hooks/set-java-classpath.sh
Normal file
@@ -0,0 +1,13 @@
|
||||
# This setup hook adds every JAR in the share/java subdirectories of
|
||||
# the build inputs to $CLASSPATH.
|
||||
|
||||
export CLASSPATH
|
||||
|
||||
addPkgToClassPath () {
|
||||
local jar
|
||||
for jar in $1/share/java/*.jar; do
|
||||
export CLASSPATH=''${CLASSPATH-}''${CLASSPATH:+:}''${jar}
|
||||
done
|
||||
}
|
||||
|
||||
addEnvHooks "$targetOffset" addPkgToClassPath
|
||||
34
bsc/setup-hooks/set-source-date-epoch-to-latest.sh
Normal file
34
bsc/setup-hooks/set-source-date-epoch-to-latest.sh
Normal file
@@ -0,0 +1,34 @@
|
||||
updateSourceDateEpoch() {
|
||||
local path="$1"
|
||||
|
||||
# Get the last modification time of all regular files, sort them,
|
||||
# and get the most recent. Maybe we should use
|
||||
# https://github.com/0-wiz-0/findnewest here.
|
||||
local -a res=($(find "$path" -type f -not -newer "$NIX_BUILD_TOP/.." -printf '%T@ %p\0' \
|
||||
| sort -n --zero-terminated | tail -n1 --zero-terminated | head -c -1))
|
||||
local time="${res[0]//\.[0-9]*/}" # remove the fraction part
|
||||
local newestFile="${res[1]}"
|
||||
|
||||
# Update $SOURCE_DATE_EPOCH if the most recent file we found is newer.
|
||||
if [ "${time:-0}" -gt "$SOURCE_DATE_EPOCH" ]; then
|
||||
echo "setting SOURCE_DATE_EPOCH to timestamp $time of file $newestFile"
|
||||
export SOURCE_DATE_EPOCH="$time"
|
||||
|
||||
# Warn if the new timestamp is too close to the present. This
|
||||
# may indicate that we were being applied to a file generated
|
||||
# during the build, or that an unpacker didn't restore
|
||||
# timestamps properly.
|
||||
local now="$(date +%s)"
|
||||
if [ "$time" -gt $((now - 60)) ]; then
|
||||
echo "warning: file $newestFile may be generated; SOURCE_DATE_EPOCH may be non-deterministic"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
postUnpackHooks+=(_updateSourceDateEpochFromSourceRoot)
|
||||
|
||||
_updateSourceDateEpochFromSourceRoot() {
|
||||
if [ -n "$sourceRoot" ]; then
|
||||
updateSourceDateEpoch "$sourceRoot"
|
||||
fi
|
||||
}
|
||||
5
bsc/setup-hooks/setup-debug-info-dirs.sh
Normal file
5
bsc/setup-hooks/setup-debug-info-dirs.sh
Normal file
@@ -0,0 +1,5 @@
|
||||
setupDebugInfoDirs () {
|
||||
addToSearchPath NIX_DEBUG_INFO_DIRS $1/lib/debug
|
||||
}
|
||||
|
||||
addEnvHooks "$targetOffset" setupDebugInfoDirs
|
||||
88
bsc/setup-hooks/shorten-perl-shebang.sh
Normal file
88
bsc/setup-hooks/shorten-perl-shebang.sh
Normal file
@@ -0,0 +1,88 @@
|
||||
# This setup hook modifies a Perl script so that any "-I" flags in its shebang
|
||||
# line are rewritten into a "use lib ..." statement on the next line. This gets
|
||||
# around a limitation in Darwin, which will not properly handle a script whose
|
||||
# shebang line exceeds 511 characters.
|
||||
#
|
||||
# Each occurrence of "-I /path/to/lib1" or "-I/path/to/lib2" is removed from
|
||||
# the shebang line, along with the single space that preceded it. These library
|
||||
# paths are placed into a new line of the form
|
||||
#
|
||||
# use lib "/path/to/lib1", "/path/to/lib2";
|
||||
#
|
||||
# immediately following the shebang line. If a library appeared in the original
|
||||
# list more than once, only its first occurrence will appear in the output
|
||||
# list. In other words, the libraries are deduplicated, but the ordering of the
|
||||
# first appearance of each one is preserved.
|
||||
#
|
||||
# Any flags other than "-I" in the shebang line are left as-is, and the
|
||||
# interpreter is also left alone (although the script will abort if the
|
||||
# interpreter does not seem to be either "perl" or else "env" with "perl" as
|
||||
# its argument). Each line after the shebang line is left unchanged. Each file
|
||||
# is modified in place.
|
||||
#
|
||||
# Usage:
|
||||
# shortenPerlShebang SCRIPT...
|
||||
|
||||
shortenPerlShebang() {
|
||||
while [ $# -gt 0 ]; do
|
||||
_shortenPerlShebang "$1"
|
||||
shift
|
||||
done
|
||||
}
|
||||
|
||||
_shortenPerlShebang() {
|
||||
local program="$1"
|
||||
|
||||
echo "shortenPerlShebang: rewriting shebang line in $program"
|
||||
|
||||
if ! isScript "$program"; then
|
||||
die "shortenPerlShebang: refusing to modify $program because it is not a script"
|
||||
fi
|
||||
|
||||
local temp="$(mktemp)"
|
||||
|
||||
gawk '
|
||||
(NR == 1) {
|
||||
if (!($0 ~ /\/(perl|env +perl)\>/)) {
|
||||
print "shortenPerlShebang: script does not seem to be a Perl script" > "/dev/stderr"
|
||||
exit 1
|
||||
}
|
||||
idx = 0
|
||||
while (match($0, / -I ?([^ ]+)/, pieces)) {
|
||||
matches[idx] = pieces[1]
|
||||
idx++
|
||||
$0 = gensub(/ -I ?[^ ]+/, "", 1, $0)
|
||||
}
|
||||
print $0
|
||||
if (idx > 0) {
|
||||
prefix = "use lib "
|
||||
for (idx in matches) {
|
||||
path = matches[idx]
|
||||
if (!(path in seen)) {
|
||||
printf "%s\"%s\"", prefix, path
|
||||
seen[path] = 1
|
||||
prefix = ", "
|
||||
}
|
||||
}
|
||||
print ";"
|
||||
}
|
||||
}
|
||||
(NR > 1 ) {
|
||||
print
|
||||
}
|
||||
' "$program" > "$temp" || die
|
||||
# Preserve the mode of the original file
|
||||
cp --preserve=mode --attributes-only "$program" "$temp"
|
||||
mv "$temp" "$program"
|
||||
|
||||
# Measure the new shebang line length and make sure it's okay. We subtract
|
||||
# one to account for the trailing newline that "head" included in its
|
||||
# output.
|
||||
local new_length=$(( $(head -n 1 "$program" | wc -c) - 1 ))
|
||||
|
||||
# Darwin is okay when the shebang line contains 511 characters, but not
|
||||
# when it contains 512 characters.
|
||||
if [ $new_length -ge 512 ]; then
|
||||
die "shortenPerlShebang: shebang line is $new_length characters--still too long for Darwin!"
|
||||
fi
|
||||
}
|
||||
57
bsc/setup-hooks/strip.sh
Normal file
57
bsc/setup-hooks/strip.sh
Normal file
@@ -0,0 +1,57 @@
|
||||
# This setup hook strips libraries and executables in the fixup phase.
|
||||
|
||||
fixupOutputHooks+=(_doStrip)
|
||||
|
||||
_doStrip() {
|
||||
# We don't bother to strip build platform code because it shouldn't make it
|
||||
# to $out anyways---if it does, that's a bigger problem that a lack of
|
||||
# stripping will help catch.
|
||||
local -ra flags=(dontStripHost dontStripTarget)
|
||||
local -ra stripCmds=(STRIP TARGET_STRIP)
|
||||
|
||||
# Optimization
|
||||
if [[ "${STRIP-}" == "${TARGET_STRIP-}" ]]; then
|
||||
dontStripTarget+=1
|
||||
fi
|
||||
|
||||
local i
|
||||
for i in ${!stripCmds[@]}; do
|
||||
local -n flag="${flags[$i]}"
|
||||
local -n stripCmd="${stripCmds[$i]}"
|
||||
|
||||
# `dontStrip` disables them all
|
||||
if [[ "${dontStrip-}" || "${flag-}" ]] || ! type -f "${stripCmd-}" 2>/dev/null
|
||||
then continue; fi
|
||||
|
||||
stripDebugList=${stripDebugList:-lib lib32 lib64 libexec bin sbin}
|
||||
if [ -n "$stripDebugList" ]; then
|
||||
stripDirs "$stripCmd" "$stripDebugList" "${stripDebugFlags:--S}"
|
||||
fi
|
||||
|
||||
stripAllList=${stripAllList:-}
|
||||
if [ -n "$stripAllList" ]; then
|
||||
stripDirs "$stripCmd" "$stripAllList" "${stripAllFlags:--s}"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
stripDirs() {
|
||||
local cmd="$1"
|
||||
local dirs="$2"
|
||||
local stripFlags="$3"
|
||||
local dirsNew=
|
||||
|
||||
local d
|
||||
for d in ${dirs}; do
|
||||
if [ -d "$prefix/$d" ]; then
|
||||
dirsNew="${dirsNew} $prefix/$d "
|
||||
fi
|
||||
done
|
||||
dirs=${dirsNew}
|
||||
|
||||
if [ -n "${dirs}" ]; then
|
||||
header "stripping (with command $cmd and flags $stripFlags) in$dirs"
|
||||
find $dirs -type f -print0 | xargs -0 ${xargsFlags:--r} $cmd $commonStripFlags $stripFlags 2>/dev/null || true
|
||||
stopNest
|
||||
fi
|
||||
}
|
||||
12
bsc/setup-hooks/update-autotools-gnu-config-scripts.sh
Normal file
12
bsc/setup-hooks/update-autotools-gnu-config-scripts.sh
Normal file
@@ -0,0 +1,12 @@
|
||||
preConfigurePhases+=" updateAutotoolsGnuConfigScriptsPhase"
|
||||
|
||||
updateAutotoolsGnuConfigScriptsPhase() {
|
||||
if [ -n "${dontUpdateAutotoolsGnuConfigScripts-}" ]; then return; fi
|
||||
|
||||
for script in config.sub config.guess; do
|
||||
for f in $(find . -type f -name "$script"); do
|
||||
echo "Updating Autotools / GNU config script to a newer upstream version: $f"
|
||||
cp -f "@gnu_config@/$script" "$f"
|
||||
done
|
||||
done
|
||||
}
|
||||
1
bsc/setup-hooks/use-old-cxx-abi.sh
Normal file
1
bsc/setup-hooks/use-old-cxx-abi.sh
Normal file
@@ -0,0 +1 @@
|
||||
export NIX_CFLAGS_COMPILE+=" -D_GLIBCXX_USE_CXX11_ABI=0"
|
||||
19
bsc/setup-hooks/validate-pkg-config.sh
Normal file
19
bsc/setup-hooks/validate-pkg-config.sh
Normal file
@@ -0,0 +1,19 @@
|
||||
# This setup hook validates each pkgconfig file in each output.
|
||||
|
||||
fixupOutputHooks+=(_validatePkgConfig)
|
||||
|
||||
_validatePkgConfig() {
|
||||
for pc in $(find "$prefix" -name '*.pc'); do
|
||||
local bail=0
|
||||
|
||||
# Do not fail immediately. It's nice to see all errors when
|
||||
# there are multiple pkgconfig files.
|
||||
if ! pkg-config --validate "$pc"; then
|
||||
bail=1
|
||||
fi
|
||||
done
|
||||
|
||||
if [ $bail -eq 1 ]; then
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
45
bsc/setup-hooks/win-dll-link.sh
Normal file
45
bsc/setup-hooks/win-dll-link.sh
Normal file
@@ -0,0 +1,45 @@
|
||||
|
||||
fixupOutputHooks+=(_linkDLLs)
|
||||
|
||||
# For every *.{exe,dll} in $output/bin/ we try to find all (potential)
|
||||
# transitive dependencies and symlink those DLLs into $output/bin
|
||||
# so they are found on invocation.
|
||||
# (DLLs are first searched in the directory of the running exe file.)
|
||||
# The links are relative, so relocating whole /nix/store won't break them.
|
||||
_linkDLLs() {
|
||||
(
|
||||
if [ ! -d "$prefix/bin" ]; then exit; fi
|
||||
cd "$prefix/bin"
|
||||
|
||||
# Compose path list where DLLs should be located:
|
||||
# prefix $PATH by currently-built outputs
|
||||
local DLLPATH=""
|
||||
local outName
|
||||
for outName in $outputs; do
|
||||
addToSearchPath DLLPATH "${!outName}/bin"
|
||||
done
|
||||
DLLPATH="$DLLPATH:$PATH"
|
||||
|
||||
echo DLLPATH="'$DLLPATH'"
|
||||
|
||||
linkCount=0
|
||||
# Iterate over any DLL that we depend on.
|
||||
local dll
|
||||
for dll in $($OBJDUMP -p *.{exe,dll} | sed -n 's/.*DLL Name: \(.*\)/\1/p' | sort -u); do
|
||||
if [ -e "./$dll" ]; then continue; fi
|
||||
# Locate the DLL - it should be an *executable* file on $DLLPATH.
|
||||
local dllPath="$(PATH="$DLLPATH" type -P "$dll")"
|
||||
if [ -z "$dllPath" ]; then continue; fi
|
||||
# That DLL might have its own (transitive) dependencies,
|
||||
# so add also all DLLs from its directory to be sure.
|
||||
local dllPath2
|
||||
for dllPath2 in "$dllPath" "$(dirname $(readlink "$dllPath" || echo "$dllPath"))"/*.dll; do
|
||||
if [ -e ./"$(basename "$dllPath2")" ]; then continue; fi
|
||||
CYGWIN+=\ winsymlinks:nativestrict ln -sr "$dllPath2" .
|
||||
linkCount=$(($linkCount+1))
|
||||
done
|
||||
done
|
||||
echo "Created $linkCount DLL link(s) in $prefix/bin"
|
||||
)
|
||||
}
|
||||
|
||||
93
bsc/setup-hooks/wrap-gapps-hook.sh
Normal file
93
bsc/setup-hooks/wrap-gapps-hook.sh
Normal file
@@ -0,0 +1,93 @@
|
||||
# shellcheck shell=bash
|
||||
gappsWrapperArgs=()
|
||||
|
||||
find_gio_modules() {
|
||||
if [ -d "$1/lib/gio/modules" ] && [ -n "$(ls -A "$1/lib/gio/modules")" ] ; then
|
||||
gappsWrapperArgs+=(--prefix GIO_EXTRA_MODULES : "$1/lib/gio/modules")
|
||||
fi
|
||||
}
|
||||
|
||||
addEnvHooks "${targetOffset:?}" find_gio_modules
|
||||
|
||||
gappsWrapperArgsHook() {
|
||||
if [ -n "$GDK_PIXBUF_MODULE_FILE" ]; then
|
||||
gappsWrapperArgs+=(--set GDK_PIXBUF_MODULE_FILE "$GDK_PIXBUF_MODULE_FILE")
|
||||
fi
|
||||
|
||||
if [ -n "$XDG_ICON_DIRS" ]; then
|
||||
gappsWrapperArgs+=(--prefix XDG_DATA_DIRS : "$XDG_ICON_DIRS")
|
||||
fi
|
||||
|
||||
if [ -n "$GSETTINGS_SCHEMAS_PATH" ]; then
|
||||
gappsWrapperArgs+=(--prefix XDG_DATA_DIRS : "$GSETTINGS_SCHEMAS_PATH")
|
||||
fi
|
||||
|
||||
# Check for prefix as well
|
||||
if [ -d "${prefix:?}/share" ]; then
|
||||
gappsWrapperArgs+=(--prefix XDG_DATA_DIRS : "$prefix/share")
|
||||
fi
|
||||
|
||||
if [ -d "$prefix/lib/gio/modules" ] && [ -n "$(ls -A "$prefix/lib/gio/modules")" ]; then
|
||||
gappsWrapperArgs+=(--prefix GIO_EXTRA_MODULES : "$prefix/lib/gio/modules")
|
||||
fi
|
||||
|
||||
for v in ${wrapPrefixVariables:-} GST_PLUGIN_SYSTEM_PATH_1_0 GI_TYPELIB_PATH GRL_PLUGIN_PATH; do
|
||||
if [ -n "${!v}" ]; then
|
||||
gappsWrapperArgs+=(--prefix "$v" : "${!v}")
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
preFixupPhases+=" gappsWrapperArgsHook"
|
||||
|
||||
wrapGApp() {
|
||||
local program="$1"
|
||||
shift 1
|
||||
wrapProgram "$program" "${gappsWrapperArgs[@]}" "$@"
|
||||
}
|
||||
|
||||
# Note: $gappsWrapperArgs still gets defined even if ${dontWrapGApps-} is set.
|
||||
wrapGAppsHook() {
|
||||
# guard against running multiple times (e.g. due to propagation)
|
||||
[ -z "$wrapGAppsHookHasRun" ] || return 0
|
||||
wrapGAppsHookHasRun=1
|
||||
|
||||
if [[ -z "${dontWrapGApps:-}" ]]; then
|
||||
targetDirsThatExist=()
|
||||
targetDirsRealPath=()
|
||||
|
||||
# wrap binaries
|
||||
targetDirs=("${prefix}/bin" "${prefix}/libexec")
|
||||
for targetDir in "${targetDirs[@]}"; do
|
||||
if [[ -d "${targetDir}" ]]; then
|
||||
targetDirsThatExist+=("${targetDir}")
|
||||
targetDirsRealPath+=("$(realpath "${targetDir}")/")
|
||||
find "${targetDir}" -type f -executable -print0 |
|
||||
while IFS= read -r -d '' file; do
|
||||
echo "Wrapping program '${file}'"
|
||||
wrapGApp "${file}"
|
||||
done
|
||||
fi
|
||||
done
|
||||
|
||||
# wrap links to binaries that point outside targetDirs
|
||||
# Note: links to binaries within targetDirs do not need
|
||||
# to be wrapped as the binaries have already been wrapped
|
||||
if [[ ${#targetDirsThatExist[@]} -ne 0 ]]; then
|
||||
find "${targetDirsThatExist[@]}" -type l -xtype f -executable -print0 |
|
||||
while IFS= read -r -d '' linkPath; do
|
||||
linkPathReal=$(realpath "${linkPath}")
|
||||
for targetPath in "${targetDirsRealPath[@]}"; do
|
||||
if [[ "$linkPathReal" == "$targetPath"* ]]; then
|
||||
echo "Not wrapping link: '$linkPath' (already wrapped)"
|
||||
continue 2
|
||||
fi
|
||||
done
|
||||
echo "Wrapping link: '$linkPath'"
|
||||
wrapGApp "${linkPath}"
|
||||
done
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
fixupOutputHooks+=(wrapGAppsHook)
|
||||
Reference in New Issue
Block a user