Compare commits

..

1 Commits

Author SHA1 Message Date
9deef256a2 Add quickShell to build devshells quickly 2025-10-28 16:49:03 +01:00
44 changed files with 43090 additions and 1221 deletions

8
flake.lock generated
View File

@@ -2,16 +2,16 @@
"nodes": { "nodes": {
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1764522689, "lastModified": 1752436162,
"narHash": "sha256-SqUuBFjhl/kpDiVaKLQBoD8TLD+/cTUzzgVFoaHrkqY=", "narHash": "sha256-Kt1UIPi7kZqkSc5HVj6UY5YLHHEzPBkgpNUByuyxtlw=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "8bb5646e0bed5dbd3ab08c7a7cc15b75ab4e1d0f", "rev": "dfcd5b901dbab46c9c6e80b265648481aafb01f8",
"type": "github" "type": "github"
}, },
"original": { "original": {
"owner": "NixOS", "owner": "NixOS",
"ref": "nixos-25.11", "ref": "nixos-25.05",
"repo": "nixpkgs", "repo": "nixpkgs",
"type": "github" "type": "github"
} }

View File

@@ -1,6 +1,6 @@
{ {
inputs = { inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.11"; nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.05";
}; };
outputs = { self, nixpkgs, ... }: outputs = { self, nixpkgs, ... }:

View File

@@ -1,12 +1,12 @@
{ pkgs, ... }: { pkgs, config, ... }:
{ {
environment.systemPackages = with pkgs; [ environment.systemPackages = with pkgs; [
vim wget git htop tmux pciutils tcpdump ripgrep nix-index nixos-option vim wget git htop tmux pciutils tcpdump ripgrep nix-index nixos-option
nix-diff ipmitool freeipmi ethtool lm_sensors cmake gnumake file tree nix-diff ipmitool freeipmi ethtool lm_sensors cmake gnumake file tree
ncdu perf ldns pv ncdu config.boot.kernelPackages.perf ldns pv
# From jungle overlay # From bsckgs overlay
osumb nixgen osumb
]; ];
programs.direnv.enable = true; programs.direnv.enable = true;

View File

@@ -5,5 +5,5 @@
boot.kernelModules = [ "ipmi_watchdog" ]; boot.kernelModules = [ "ipmi_watchdog" ];
# Enable systemd watchdog with 30 s interval # Enable systemd watchdog with 30 s interval
systemd.settings.Manager.RuntimeWatchdogSec = 30; systemd.watchdog.runtimeTime = "30s";
} }

View File

@@ -93,4 +93,20 @@
wantedBy = [ "multi-user.target" ]; wantedBy = [ "multi-user.target" ];
serviceConfig.ExecStart = script; serviceConfig.ExecStart = script;
}; };
# Only allow SSH connections from users who have a SLURM allocation
# See: https://slurm.schedmd.com/pam_slurm_adopt.html
security.pam.services.sshd.rules.account.slurm = {
control = "required";
enable = true;
modulePath = "${pkgs.slurm}/lib/security/pam_slurm_adopt.so";
args = [ "log_level=debug5" ];
order = 999999; # Make it last one
};
# Disable systemd session (pam_systemd.so) as it will conflict with the
# pam_slurm_adopt.so module. What happens is that the shell is first adopted
# into the slurmstepd task and then into the systemd session, which is not
# what we want, otherwise it will linger even if all jobs are gone.
security.pam.services.sshd.startSession = lib.mkForce false;
} }

View File

@@ -1,10 +1,3 @@
{ {
services.nixseparatedebuginfod2 = { services.nixseparatedebuginfod.enable = true;
enable = true;
substituters = [
"local:"
"https://cache.nixos.org"
"http://hut/cache"
];
};
} }

View File

@@ -1,4 +1,4 @@
{ lib, pkgs, ... }: { lib, ... }:
{ {
imports = [ imports = [
@@ -21,20 +21,4 @@
}; };
services.slurm.client.enable = true; services.slurm.client.enable = true;
# Only allow SSH connections from users who have a SLURM allocation
# See: https://slurm.schedmd.com/pam_slurm_adopt.html
security.pam.services.sshd.rules.account.slurm = {
control = "required";
enable = true;
modulePath = "${pkgs.slurm}/lib/security/pam_slurm_adopt.so";
args = [ "log_level=debug5" ];
order = 999999; # Make it last one
};
# Disable systemd session (pam_systemd.so) as it will conflict with the
# pam_slurm_adopt.so module. What happens is that the shell is first adopted
# into the slurmstepd task and then into the systemd session, which is not
# what we want, otherwise it will linger even if all jobs are gone.
security.pam.services.sshd.startSession = lib.mkForce false;
} }

View File

@@ -1,6 +1,31 @@
{ config, pkgs, ... }: { config, pkgs, ... }:
{ let
suspendProgram = pkgs.writeShellScript "suspend.sh" ''
exec 1>>/var/log/power_save.log 2>>/var/log/power_save.log
set -x
export "PATH=/run/current-system/sw/bin:$PATH"
echo "$(date) Suspend invoked $0 $*" >> /var/log/power_save.log
hosts=$(scontrol show hostnames $1)
for host in $hosts; do
echo Shutting down host: $host
ipmitool -I lanplus -H ''${host}-ipmi -P "" -U "" chassis power off
done
'';
resumeProgram = pkgs.writeShellScript "resume.sh" ''
exec 1>>/var/log/power_save.log 2>>/var/log/power_save.log
set -x
export "PATH=/run/current-system/sw/bin:$PATH"
echo "$(date) Suspend invoked $0 $*" >> /var/log/power_save.log
hosts=$(scontrol show hostnames $1)
for host in $hosts; do
echo Starting host: $host
ipmitool -I lanplus -H ''${host}-ipmi -P "" -U "" chassis power on
done
'';
in {
services.slurm = { services.slurm = {
controlMachine = "apex"; controlMachine = "apex";
clusterName = "jungle"; clusterName = "jungle";
@@ -34,6 +59,16 @@
# the resources. Use the task/cgroup plugin to enable process containment. # the resources. Use the task/cgroup plugin to enable process containment.
TaskPlugin=task/affinity,task/cgroup TaskPlugin=task/affinity,task/cgroup
# Power off unused nodes until they are requested
SuspendProgram=${suspendProgram}
SuspendTimeout=60
ResumeProgram=${resumeProgram}
ResumeTimeout=300
SuspendExcNodes=fox
# Turn the nodes off after 1 hour of inactivity
SuspendTime=3600
# Reduce port range so we can allow only this range in the firewall # Reduce port range so we can allow only this range in the firewall
SrunPortRange=60000-61000 SrunPortRange=60000-61000

View File

@@ -19,11 +19,8 @@ let
cudainfo = prev.callPackage ./pkgs/cudainfo/default.nix { }; cudainfo = prev.callPackage ./pkgs/cudainfo/default.nix { };
#extrae = callPackage ./pkgs/extrae/default.nix { }; # Broken and outdated #extrae = callPackage ./pkgs/extrae/default.nix { }; # Broken and outdated
gpi-2 = callPackage ./pkgs/gpi-2/default.nix { }; gpi-2 = callPackage ./pkgs/gpi-2/default.nix { };
intel-apt = callPackage ./pkgs/intel-oneapi/packages.nix { }; quickShell = import ./pkgs/quick-shell/default.nix { pkgs = final; };
intelPackages_2023 = callPackage ./pkgs/intel-oneapi/2023.nix { }; intelPackages_2023 = callPackage ./pkgs/intel-oneapi/2023.nix { };
intelPackages_2024 = final.intel-apt.hpckit_2024;
intelPackages_2025 = final.intel-apt.hpckit_2025;
intelPackages = final.intelPackages_2025;
jemallocNanos6 = callPackage ./pkgs/nanos6/jemalloc.nix { }; jemallocNanos6 = callPackage ./pkgs/nanos6/jemalloc.nix { };
# FIXME: Extend this to all linuxPackages variants. Open problem, see: # FIXME: Extend this to all linuxPackages variants. Open problem, see:
# https://discourse.nixos.org/t/whats-the-right-way-to-make-a-custom-kernel-module-available/4636 # https://discourse.nixos.org/t/whats-the-right-way-to-make-a-custom-kernel-module-available/4636
@@ -34,21 +31,18 @@ let
amd-uprof-driver = _prev.callPackage ./pkgs/amd-uprof/driver.nix { }; amd-uprof-driver = _prev.callPackage ./pkgs/amd-uprof/driver.nix { };
}); });
lmbench = callPackage ./pkgs/lmbench/default.nix { }; lmbench = callPackage ./pkgs/lmbench/default.nix { };
# Broken and unmantained mcxx = callPackage ./pkgs/mcxx/default.nix { };
# mcxx = callPackage ./pkgs/mcxx/default.nix { };
meteocat-exporter = prev.callPackage ./pkgs/meteocat-exporter/default.nix { }; meteocat-exporter = prev.callPackage ./pkgs/meteocat-exporter/default.nix { };
mpi = final.mpich; # Set MPICH as default mpi = final.mpich; # Set MPICH as default
mpich = callPackage ./pkgs/mpich/default.nix { mpich = prev.mpich; }; mpich = callPackage ./pkgs/mpich/default.nix { mpich = prev.mpich; };
nanos6 = callPackage ./pkgs/nanos6/default.nix { }; nanos6 = callPackage ./pkgs/nanos6/default.nix { };
nanos6Debug = final.nanos6.override { enableDebug = true; }; nanos6Debug = final.nanos6.override { enableDebug = true; };
nixtools = callPackage ./pkgs/nixtools/default.nix { }; nixtools = callPackage ./pkgs/nixtools/default.nix { };
nixgen = callPackage ./pkgs/nixgen/default.nix { };
# Broken because of pkgsStatic.libcap # Broken because of pkgsStatic.libcap
# See: https://github.com/NixOS/nixpkgs/pull/268791 # See: https://github.com/NixOS/nixpkgs/pull/268791
#nix-wrap = callPackage ./pkgs/nix-wrap/default.nix { }; #nix-wrap = callPackage ./pkgs/nix-wrap/default.nix { };
nodes = callPackage ./pkgs/nodes/default.nix { }; nodes = callPackage ./pkgs/nodes/default.nix { };
nosv = callPackage ./pkgs/nosv/default.nix { }; nosv = callPackage ./pkgs/nosv/default.nix { };
oneMath = callPackage ./pkgs/onemath/default.nix { };
openmp = callPackage ./pkgs/llvm-ompss2/openmp.nix { monorepoSrc = final.clangOmpss2Unwrapped.src; version = final.clangOmpss2Unwrapped.version; }; openmp = callPackage ./pkgs/llvm-ompss2/openmp.nix { monorepoSrc = final.clangOmpss2Unwrapped.src; version = final.clangOmpss2Unwrapped.version; };
openmpv = final.openmp.override { enableNosv = true; enableOvni = true; }; openmpv = final.openmp.override { enableNosv = true; enableOvni = true; };
osumb = callPackage ./pkgs/osu/default.nix { }; osumb = callPackage ./pkgs/osu/default.nix { };
@@ -66,8 +60,6 @@ let
stdenvClangOmpss2NodesOmpv = final.stdenv.override { cc = final.clangOmpss2NodesOmpv; allowedRequisites = null; }; stdenvClangOmpss2NodesOmpv = final.stdenv.override { cc = final.clangOmpss2NodesOmpv; allowedRequisites = null; };
tagaspi = callPackage ./pkgs/tagaspi/default.nix { }; tagaspi = callPackage ./pkgs/tagaspi/default.nix { };
tampi = callPackage ./pkgs/tampi/default.nix { }; tampi = callPackage ./pkgs/tampi/default.nix { };
tasycl = callPackage ./pkgs/tasycl/default.nix { };
tasycl-acpp = callPackage ./pkgs/tasycl/default.nix { useIntel = false; };
upc-qaire-exporter = prev.callPackage ./pkgs/upc-qaire-exporter/default.nix { }; upc-qaire-exporter = prev.callPackage ./pkgs/upc-qaire-exporter/default.nix { };
wxparaver = callPackage ./pkgs/paraver/default.nix { }; wxparaver = callPackage ./pkgs/paraver/default.nix { };
}; };
@@ -77,8 +69,6 @@ let
#sigsegv = callPackage ./test/reproducers/sigsegv.nix { }; #sigsegv = callPackage ./test/reproducers/sigsegv.nix { };
hello-c = callPackage ./test/compilers/hello-c.nix { }; hello-c = callPackage ./test/compilers/hello-c.nix { };
hello-cpp = callPackage ./test/compilers/hello-cpp.nix { }; hello-cpp = callPackage ./test/compilers/hello-cpp.nix { };
hello-sycl = callPackage ./test/compilers/hello-sycl.nix { };
hello-syclompss = callPackage ./test/compilers/icpx-ompss2.nix { };
lto = callPackage ./test/compilers/lto.nix { }; lto = callPackage ./test/compilers/lto.nix { };
asan = callPackage ./test/compilers/asan.nix { }; asan = callPackage ./test/compilers/asan.nix { };
intel2023-icx-c = hello-c.override { stdenv = final.intelPackages_2023.stdenv; }; intel2023-icx-c = hello-c.override { stdenv = final.intelPackages_2023.stdenv; };
@@ -88,13 +78,6 @@ let
intel2023-ifort = callPackage ./test/compilers/hello-f.nix { intel2023-ifort = callPackage ./test/compilers/hello-f.nix {
stdenv = final.intelPackages_2023.stdenv-ifort; stdenv = final.intelPackages_2023.stdenv-ifort;
}; };
intel2024-icx-c = hello-c.override { stdenv = final.intelPackages_2024.stdenv; };
intel2025-icx-c = hello-c.override { stdenv = final.intelPackages_2025.stdenv; };
intel2024-icx-cpp = hello-cpp.override { stdenv = final.intelPackages_2024.stdenv; };
intel2025-icx-cpp = hello-cpp.override { stdenv = final.intelPackages_2025.stdenv; };
# intel2023-sycl = hello-sycl.override { intelPackages = final.intelPackages_2023; }; # broken
intel2024-sycl = hello-sycl.override { intelPackages = final.intelPackages_2024; };
intel2025-sycl = hello-sycl.override { intelPackages = final.intelPackages_2025; };
clangOmpss2-lto = lto.override { stdenv = final.stdenvClangOmpss2Nanos6; }; clangOmpss2-lto = lto.override { stdenv = final.stdenvClangOmpss2Nanos6; };
clangOmpss2-asan = asan.override { stdenv = final.stdenvClangOmpss2Nanos6; }; clangOmpss2-asan = asan.override { stdenv = final.stdenvClangOmpss2Nanos6; };
clangOmpss2-task = callPackage ./test/compilers/ompss2.nix { clangOmpss2-task = callPackage ./test/compilers/ompss2.nix {
@@ -118,16 +101,14 @@ let
pkgsTopLevel = filterAttrs (_: isDerivation) bscPkgs; pkgsTopLevel = filterAttrs (_: isDerivation) bscPkgs;
# Native build in that platform doesn't imply cross build works # Native build in that platform doesn't imply cross build works
canCrossCompile = platform: default: pkg: canCrossCompile = platform: pkg:
(isDerivation pkg) && (isDerivation pkg) &&
# If meta.cross is undefined, use default # Must be defined explicitly
(pkg.meta.cross or default) && (pkg.meta.cross or false) &&
(meta.availableOn final.pkgsCross.${platform}.stdenv.hostPlatform pkg); (meta.availableOn platform pkg);
# For now only RISC-V # For now only RISC-V
crossSet = genAttrs [ "riscv64" ] (platform: crossSet = { riscv64 = final.pkgsCross.riscv64.bsc.pkgsTopLevel; };
filterAttrs (_: canCrossCompile platform true)
final.pkgsCross.${platform}.bsc.pkgsTopLevel);
buildList = name: paths: buildList = name: paths:
final.runCommandLocal name { } '' final.runCommandLocal name { } ''
@@ -147,7 +128,7 @@ let
# For now only RISC-V # For now only RISC-V
crossList = buildList "ci-cross" crossList = buildList "ci-cross"
(filter (filter
(canCrossCompile "riscv64" false) # opt-in (pkgs with: meta.cross = true) (canCrossCompile final.pkgsCross.riscv64.stdenv.hostPlatform)
(builtins.attrValues crossSet.riscv64)); (builtins.attrValues crossSet.riscv64));
in bscPkgs // { in bscPkgs // {

View File

@@ -90,7 +90,7 @@ in
meta = { meta = {
description = "Performance analysis tool-suite for x86 based applications"; description = "Performance analysis tool-suite for x86 based applications";
homepage = "https://www.amd.com/es/developer/uprof.html"; homepage = "https://www.amd.com/es/developer/uprof.html";
platforms = [ "x86_64-linux" ]; platforms = lib.platforms.linux;
license = lib.licenses.unfree; license = lib.licenses.unfree;
maintainers = with lib.maintainers.bsc; [ rarias varcila ]; maintainers = with lib.maintainers.bsc; [ rarias varcila ];
}; };

View File

@@ -19,7 +19,7 @@ in stdenv.mkDerivation {
''; '';
hardeningDisable = [ "pic" "format" ]; hardeningDisable = [ "pic" "format" ];
nativeBuildInputs = kernel.moduleBuildDependencies; nativeBuildInputs = kernel.moduleBuildDependencies;
patches = [ ./makefile.patch ./hrtimer.patch ./remove-wr-rdmsrq.patch ]; patches = [ ./makefile.patch ./hrtimer.patch ];
makeFlags = [ makeFlags = [
"KERNEL_VERSION=${kernel.modDirVersion}" "KERNEL_VERSION=${kernel.modDirVersion}"
"KERNEL_DIR=${kernel.dev}/lib/modules/${kernel.modDirVersion}/build" "KERNEL_DIR=${kernel.dev}/lib/modules/${kernel.modDirVersion}/build"

View File

@@ -1,20 +0,0 @@
diff --git a/inc/PwrProfAsm.h b/inc/PwrProfAsm.h
index d77770a..c93a0e9 100644
--- a/inc/PwrProfAsm.h
+++ b/inc/PwrProfAsm.h
@@ -347,6 +347,7 @@
#endif
+/*
#define rdmsrq(msr,val1,val2,val3,val4) ({ \
__asm__ __volatile__( \
"rdmsr\n" \
@@ -362,6 +363,7 @@
:"c"(msr), "a"(val1), "d"(val2), "S"(val3), "D"(val4) \
); \
})
+*/
#define rdmsrpw(msr,val1,val2,val3,val4) ({ \
__asm__ __volatile__( \

View File

@@ -1,6 +1,5 @@
{ {
stdenv stdenv
, lib
, cudatoolkit , cudatoolkit
, cudaPackages , cudaPackages
, autoAddDriverRunpath , autoAddDriverRunpath
@@ -12,7 +11,7 @@ stdenv.mkDerivation (finalAttrs: {
src = ./.; src = ./.;
buildInputs = [ buildInputs = [
cudatoolkit # Required for nvcc cudatoolkit # Required for nvcc
(lib.getOutput "static" cudaPackages.cuda_cudart) # Required for -lcudart_static cudaPackages.cuda_cudart.static # Required for -lcudart_static
autoAddDriverRunpath autoAddDriverRunpath
]; ];
installPhase = '' installPhase = ''
@@ -41,9 +40,4 @@ stdenv.mkDerivation (finalAttrs: {
''; '';
installPhase = "touch $out"; installPhase = "touch $out";
}; };
meta = {
platforms = [ "x86_64-linux" ];
maintainers = with lib.maintainers.bsc; [ rarias ];
};
}) })

View File

@@ -9,6 +9,7 @@
, automake , automake
, libtool , libtool
, mpi , mpi
, rsync
, gfortran , gfortran
}: }:
@@ -43,24 +44,13 @@ stdenv.mkDerivation rec {
configureFlags = [ configureFlags = [
"--with-infiniband=${rdma-core-all}" "--with-infiniband=${rdma-core-all}"
"--with-mpi=yes" # fixes mpi detection when cross-compiling "--with-mpi=${mpiAll}"
"--with-slurm" "--with-slurm"
"CFLAGS=-fPIC" "CFLAGS=-fPIC"
"CXXFLAGS=-fPIC" "CXXFLAGS=-fPIC"
]; ];
nativeBuildInputs = [ buildInputs = [ slurm mpiAll rdma-core-all autoconf automake libtool rsync gfortran ];
autoconf
automake
gfortran
libtool
];
buildInputs = [
slurm
mpiAll
rdma-core-all
];
hardeningDisable = [ "all" ]; hardeningDisable = [ "all" ];
@@ -70,6 +60,5 @@ stdenv.mkDerivation rec {
maintainers = with lib.maintainers.bsc; [ rarias ]; maintainers = with lib.maintainers.bsc; [ rarias ];
platforms = lib.platforms.linux; platforms = lib.platforms.linux;
license = lib.licenses.gpl3Plus; license = lib.licenses.gpl3Plus;
cross = false; # infiniband detection does not work
}; };
} }

View File

@@ -10,7 +10,7 @@
, zlib , zlib
, autoPatchelfHook , autoPatchelfHook
, libfabric , libfabric
, gcc , gcc13
, wrapCCWith , wrapCCWith
}: }:
@@ -33,6 +33,8 @@ let
maintainers = with lib.maintainers.bsc; [ abonerib ]; maintainers = with lib.maintainers.bsc; [ abonerib ];
}; };
gcc = gcc13;
v = { v = {
hpckit = "2023.1.0"; hpckit = "2023.1.0";
compiler = "2023.1.0"; compiler = "2023.1.0";
@@ -40,19 +42,45 @@ let
mpi = "2021.9.0"; mpi = "2021.9.0";
}; };
findMatch = name: aptPackageIndex = stdenv.mkDerivation {
let name = "intel-oneapi-packages";
aptPackages = builtins.fromJSON (builtins.readFile ./packages.json); srcs = [
matches = lib.filter (x: name == x.pname) aptPackages; # Run update.sh to update the package lists
n = lib.length matches; ./amd64-packages ./all-packages
match = builtins.traceVerbose (name + " -- ${builtins.toString n}") (builtins.head matches); ];
phases = [ "installPhase" ];
installPhase = ''
awk -F': ' '\
BEGIN { print "[ {" } \
NR>1 && /^Package: / { print "} {"; } \
/: / { printf "%s = \"%s\";\n", $1, $2 } \
END { print "} ]" }' $srcs > $out
'';
};
aptPackages = import aptPackageIndex;
apthost = "https://apt.repos.intel.com/oneapi/"; apthost = "https://apt.repos.intel.com/oneapi/";
getSum = pkgList: name:
let
matches = lib.filter (x: name == x.Package) pkgList;
#n = lib.length matches;
#match = builtins.trace (name + " -- ${builtins.toString n}") (lib.elemAt matches 0);
match = lib.elemAt matches 0;
in in
{ match.SHA256;
url = apthost + match.filename;
sha256 = match.sha256; getUrl = pkgList: name:
}; let
matches = lib.filter (x: name == x.Package) pkgList;
#match = assert lib.length matches == 1; lib.elemAt matches 0;
n = lib.length matches;
match =
#builtins.trace (name + " -- n=${builtins.toString n}")
(lib.elemAt matches 0);
in
apthost + match.Filename;
uncompressDebs = debs: name: stdenv.mkDerivation { uncompressDebs = debs: name: stdenv.mkDerivation {
name = name; name = name;
@@ -72,7 +100,10 @@ let
joinDebs = name: names: joinDebs = name: names:
let let
debs = builtins.map (x: builtins.fetchurl (findMatch x)) names; urls = builtins.map (x: getUrl aptPackages x) names;
sums = builtins.map (x: getSum aptPackages x) names;
getsrc = url: sha256: builtins.fetchurl { inherit url sha256; };
debs = lib.zipListsWith getsrc urls sums;
in in
uncompressDebs debs "${name}-source"; uncompressDebs debs "${name}-source";
@@ -441,7 +472,7 @@ let
''; '';
}; };
ifort-wrapper = wrapIntel { ifort-wrapper = wrapIntel rec {
cc = intel-compiler-fortran; cc = intel-compiler-fortran;
mygcc = gcc; mygcc = gcc;
extraBuild = '' extraBuild = ''

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@@ -1,297 +0,0 @@
{
lib,
stdenv,
callPackage,
dpkg,
fetchurl,
sqlite,
elfutils,
}:
let
inherit (builtins)
attrNames
attrValues
concatMap
elem
filter
fromJSON
getAttr
groupBy
head
isNull
listToAttrs
map
mapAttrs
readFile
replaceStrings
splitVersion
;
inherit (lib)
converge
findFirst
groupBy'
hasPrefix
optional
pipe
take
toInt
toList
versionAtLeast
versionOlder
;
aptData = fromJSON (readFile ./packages.json);
# Compare versions in debian control file syntax
# See: https://www.debian.org/doc/debian-policy/ch-relationships.html#syntax-of-relationship-fields
#
# NOTE: this is not a proper version comparison
#
# A proper version solver, should aggregate dependencies with the same name
# and compute the constraint (e.g. a (>= 2) a (<< 5) -> 2 <= a << 5)
#
# But in the intel repo, there are no such "duplicated" dependencies to specify
# upper limits, which leads to issues when intel-hpckit-2021 depends on things
# like intel-basekit >= 2021.1.0-2403 and we end up installing the newest
# basekit instead of the one from 2021.
#
# To mitigate this, >= is set to take the latest version with matching major
# and minor (only revision and patch are allowed to change)
compareVersions =
got: kind: want:
let
g0 = take 2 (splitVersion got);
w0 = take 2 (splitVersion want);
in
if isNull want then
true
else if kind == "=" then
got == want
else if kind == "<<" then
versionOlder got want
else if kind == "<=" then
versionAtLeast want got
else if kind == ">>" then
versionOlder want got
else if kind == ">=" then
(g0 == w0) && versionAtLeast got want # always match major version
else
throw "unknown operation: ${kind}";
findMatching =
{
pname,
kind,
version,
}:
findFirst (x: pname == x.pname && compareVersions x.version kind version) null aptData;
isIntel = pkg: (hasPrefix "intel-" pkg.pname);
expandDeps =
pkg: (map findMatching (filter isIntel pkg.dependencies)) ++ (optional (pkg.size != 0) pkg);
# get the oldest by major version. If they have the same major version, take
# the newest. This prevents most issues with resolutions
# versionOlder b a -> true if b is older than a (b `older` a)
getNewerInMajor =
a: b:
let
va = a.version;
vb = b.version;
va0 = head (splitVersion va);
vb0 = head (splitVersion vb);
in
if isNull a then
b
else if va0 != vb0 then
if va0 > vb0 then b else a
else if versionOlder vb va then
a
else
b;
removeDups = l: attrValues (groupBy' getNewerInMajor null (getAttr "provides") l);
_resolveDeps = converge (l: removeDups (concatMap expandDeps l));
resolveDeps =
pkg:
let
deps = _resolveDeps (toList pkg);
namedDeps = (map (x: "${x.pname}-${x.version}") deps);
in
builtins.traceVerbose (builtins.deepSeq namedDeps namedDeps) deps;
blacklist = [
"intel-basekit-env"
"intel-basekit-getting-started"
"intel-hpckit-env"
"intel-hpckit-getting-started"
"intel-oneapi-advisor"
"intel-oneapi-common-licensing"
"intel-oneapi-common-oneapi-vars"
"intel-oneapi-common-vars"
"intel-oneapi-compiler-cpp-eclipse-cfg"
"intel-oneapi-compiler-dpcpp-eclipse-cfg"
"intel-oneapi-condaindex"
"intel-oneapi-dev-utilities-eclipse-cfg"
"intel-oneapi-dpcpp-ct-eclipse-cfg"
"intel-oneapi-eclipse-ide"
"intel-oneapi-hpc-toolkit-getting-started"
"intel-oneapi-icc-eclipse-plugin-cpp"
"intel-oneapi-vtune"
"intel-oneapi-vtune-eclipse-plugin-vtune"
];
isInBlacklist = pkg: elem pkg.provides blacklist;
removeBlacklist = filter (e: !(isInBlacklist e));
dpkgExtractAll =
pname: version:
{ srcs, deps }:
stdenv.mkDerivation {
inherit pname version srcs;
nativeBuildInputs = [ dpkg ];
phases = [ "installPhase" ];
passthru = { inherit deps; };
installPhase = ''
mkdir -p $out
for src in $srcs; do
echo "Unpacking $src"
dpkg -x $src $out
done
'';
};
apthost = "https://apt.repos.intel.com/oneapi/";
fetchDeb =
p:
fetchurl {
url = apthost + p.filename;
inherit (p) sha256;
};
buildIntel =
pkg:
pipe pkg [
resolveDeps
removeBlacklist
(l: {
srcs = map fetchDeb l;
deps = l;
})
(dpkgExtractAll "${pkg.provides}-extracted" pkg.version)
];
findHpcKit =
year:
findMatching {
pname = "intel-hpckit";
kind = "<<";
version = toString (year + 1);
};
years = map toInt (attrNames components);
patchIntel = callPackage ./patch_intel.nix { };
# Version information for each hpckit. This is used to normalize the paths
# so that files are in $out/{bin,lib,include...} instead of all over the place
# in $out/opt/intel/oneapi/*/*/{...}.
#
# The most important is the compiler component, which is used to build the
# stdenv for the hpckit.
#
# NOTE: this have to be manually specified, so we can avoid IFD. To add a
# new version, add a new field with an empty attrset, (e.g. "2026" = {}; ),
# build hpckit_2026.unpatched and use the values from
# result/opt/intel/oneapi/* to populate the attrset.
#
# WARN: if there are more than one version in the folders of the unpatched
# components, our dependency resolution hacks have probably failed and the
# package set may be broken.
components = {
"2025" = {
ishmem = "1.4";
pti = "0.13";
tcm = "1.4";
umf = "0.11";
ccl = "2021.16";
compiler = "2025.2";
dal = "2025.8";
debugger = "2025.2";
dev-utilities = "2025.2";
dnnl = "2025.2";
dpcpp-ct = "2025.2";
dpl = "2022.9";
ipp = "2022.2";
ippcp = "2025.2";
mkl = "2025.2";
mpi = "2021.16";
tbb = "2022.2";
};
"2024" = {
tcm = "1.1";
ccl = "2021.13";
compiler = "2024.2";
dal = "2024.6";
debugger = "2024.2";
dev-utilities = "2024.2";
diagnostics = "2024.2";
dnnl = "2024.2";
dpcpp-ct = "2024.2";
dpl = "2022.6";
ipp = "2021.12";
ippcp = "2021.12";
mkl = "2024.2";
mpi = "2021.13";
tbb = "2021.13";
extraPackages = [
sqlite
elfutils
];
};
};
replaceDots = replaceStrings [ "." ] [ "_" ];
in
lib.recurseIntoAttrs (
listToAttrs (
map (
year:
let
year_str = toString year;
in
{
name = "hpckit_${year_str}";
value = patchIntel {
unpatched = buildIntel (findHpcKit year);
components = components.${year_str};
};
}
) years
)
)
// {
apt = pipe aptData [
(groupBy (p: replaceDots p.provides))
(mapAttrs (
_: l:
listToAttrs (
map (pkg: {
name = replaceDots ("v" + pkg.version);
value = pkg;
}) l
)
))
];
inherit resolveDeps patchIntel buildIntel;
}

View File

@@ -1,189 +0,0 @@
{
stdenv,
stdenvNoCC,
lib,
symlinkJoin,
autoPatchelfHook,
wrapCCWith,
overrideCC,
gcc,
hwloc,
libelf,
libffi_3_3,
libpsm2,
libuuid,
libxml2,
numactl,
ocl-icd,
openssl,
python3,
rdma-core,
ucx,
zlib,
}:
lib.makeOverridable (
{
unpatched,
components ? { },
extraPackages ? components.extraPackages or [ ],
}:
let
inherit (builtins)
attrValues
filter
mapAttrs
removeAttrs
;
__components = removeAttrs components [ "extraPackages" ];
_components = __components;
# _components = lib.traceSeqN 2 {
# inherit unpatched __components;
# deps = builtins.map (x: "${x.pname}-${x.version}") unpatched.deps;
# } __components;
wrapIntel =
cc:
let
targetConfig = stdenv.targetPlatform.config;
in
(wrapCCWith {
inherit cc;
nixSupport = {
cc-ldflags = [
"-L${gcc.cc}/lib/gcc/${targetConfig}/${gcc.version}"
"-L${gcc.cc.lib}/lib"
"-L${cc}/lib"
];
cc-cflags = [
"--gcc-toolchain=${gcc.cc}"
"-isystem \"${cc.original}/lib/clang/*/include\""
"-isystem ${cc}/include"
"-isystem ${cc}/include/intel64"
"-isystem ${gcc.cc}/lib/gcc/${targetConfig}/${gcc.version}/include"
];
libcxx-cxxflags = [
# "--gcc-toolchain=${gcc.cc}"
"-isystem ${gcc.cc}/include/c++/${gcc.version}"
"-isystem ${gcc.cc}/include/c++/${gcc.version}/${targetConfig}"
];
};
extraBuildCommands = ''
# FIXME: We should find a better way to modify the PATH instead of using
# this ugly hack. See https://jungle.bsc.es/git/rarias/bscpkgs/issues/9
echo 'path_backup="${gcc.cc}/bin:$path_backup"' >>$out/nix-support/cc-wrapper-hook
# Disable hardening by default
echo "" > $out/nix-support/add-hardening.sh
wrap icx $wrapper $ccPath/icx
wrap icpx $wrapper $ccPath/icpx
wrap ifx $wrapper $ccPath/ifx
ln -s $out/bin/icpx $out/bin/c++
ln -s $out/bin/icx $out/bin/cc
sed -i 's/.*isCxx=0/isCxx=1/' $out/bin/icpx
# Use this to detect when a compiler subprocess is called
# from icpx (--fsycl-host-compiler)
echo 'export NIX_CC_WRAPPER_INTEL=1' >>$out/nix-support/cc-wrapper-hook
# oneMath looks for sycl libraries in bin/../lib
ln -s ${cc}/lib $out/lib
ln -s ${cc}/include $out/include
'';
}).overrideAttrs
(old: {
installPhase = old.installPhase + ''
export named_cc="icx"
export named_cxx="icpx"
export named_fc="ifx"
'';
});
in
stdenvNoCC.mkDerivation (finalAttrs: {
pname = lib.removeSuffix "-extracted" unpatched.pname;
inherit (unpatched) version;
src = unpatched;
phases = [
"installPhase"
"fixupPhase"
];
buildInputs = [
libffi_3_3
libelf
libxml2
hwloc
numactl
libuuid
libpsm2
zlib
ocl-icd
rdma-core
ucx
openssl
python3
stdenv.cc.cc.lib
]
++ extraPackages;
autoPatchelfIgnoreMissingDeps = [
"libhwloc.so.5"
"libcuda.so.1"
"libze_loader.so.1"
];
# There are broken symlinks that go outside packages, ignore them
dontCheckForBrokenSymlinks = true;
nativeBuildInputs = [ autoPatchelfHook ];
installPhase = ''
cp -r $src/opt/intel/oneapi/ $out
'';
passthru =
let
pkgs = mapAttrs (
folder: version:
let
original = "${finalAttrs.finalPackage}/${folder}/${version}";
in
(symlinkJoin {
pname = "intel-${folder}";
inherit version;
paths = [ original ];
}).overrideAttrs
{ passthru = { inherit original; }; }
) _components;
in
pkgs
// {
inherit unpatched;
pkgs = lib.recurseIntoAttrs pkgs;
components = _components;
# This contains all packages properly symlinked into toplevel directories
# in $out.
#
# NOTE: there are clashes with packages that have symlinks outside their
# scope (libtcm and env/vars.sh)
all = symlinkJoin {
pname = finalAttrs.finalPackage + "-symlinked";
inherit (finalAttrs.finalPackage) version;
paths = filter lib.isDerivation (attrValues finalAttrs.finalPackage.pkgs);
};
stdenv = overrideCC stdenv finalAttrs.finalPackage.cc;
cc = wrapIntel finalAttrs.finalPackage.pkgs.compiler;
};
})
)

View File

@@ -1,29 +0,0 @@
#!/usr/bin/env -S jq -f
def extract_fields: {
pname : .Package,
version : .Version,
provides : .Package | sub("[0-9.-]*$"; ""),
filename : .Filename,
size : ."Installed-Size" | tonumber,
sha256 : .SHA256,
dependencies : .Depends,
} ;
# parses dependencies into a list of [{.pname, .kind, .version}]
# some dependencies do not have a version specified, in which case, kind = version = null
#
# example dependencies:
# intel-oneapi-common-vars (>= 2023.0.0-25325), intel-oneapi-common-licensing-2023.0.0
def split_dependencies : map(try(.dependencies |= split(",\\s?"; "")) // .dependencies |= []) ;
def match_version : capture("(?<pname>[a-zA-Z0-9_\\-.]*) *(\\((?<kind>[<>=]*) *(?<version>.*)\\))?"; "") ;
def parse_dependencies : map_values(.dependencies.[] |= match_version) ;
def sort_version_decreasing : sort_by(.version | split("[-.]"; "") | map(tonumber)) | reverse ;
map(extract_fields) | split_dependencies | parse_dependencies | sort_version_decreasing
# [.[] | select(.pname == "intel-hpckit") | .version]

View File

@@ -1,29 +0,0 @@
#!/usr/bin/env -S awk -f
BEGIN {
FS=": "
prev_empty=1
t=" "
print "[ {"
}
!NF { # empty line, update separator so next non empty line closes the dict
prev_empty=1
t="},\n{ "
next # skip line (we won't match anything else)
}
{
printf t "\"%s\" : \"%s\"\n", $1, $2
if (prev_empty) {
# we were the first after a group of empty lines, following ones have to
# have a comma
prev_empty=0
t=", "
}
}
END { print "} ]" }

View File

@@ -1,11 +1,4 @@
#!/bin/sh #!/bin/sh
out_64=$(mktemp intel-api.64.XXXXXX) curl https://apt.repos.intel.com/oneapi/dists/all/main/binary-amd64/Packages -o amd64-packages
out_all=$(mktemp intel-api.all.XXXXXX) curl https://apt.repos.intel.com/oneapi/dists/all/main/binary-all/Packages -o all-packages
trap 'rm -f "$out_64" "$out_all"' EXIT INT HUP
curl https://apt.repos.intel.com/oneapi/dists/all/main/binary-amd64/Packages -o "$out_64"
curl https://apt.repos.intel.com/oneapi/dists/all/main/binary-all/Packages -o "$out_all"
# NOTE: we use `jq -r tostring` to minify the json (3.2Mb -> 2.3Mb)
cat "$out_64" "$out_all" | ./toJson.awk | ./process.jq | jq -r tostring >packages.json

View File

@@ -16,19 +16,19 @@
, useGit ? false , useGit ? false
, gitUrl ? "ssh://git@bscpm04.bsc.es/llvm-ompss/llvm-mono.git" , gitUrl ? "ssh://git@bscpm04.bsc.es/llvm-ompss/llvm-mono.git"
, gitBranch ? "master" , gitBranch ? "master"
, gitCommit ? "872ba63f86edaefc9787984ef3fae9f2f94e0124" # github-release-2025.11 , gitCommit ? "880e2341c56bad1dc14e8c369fb3356bec19018e"
}: }:
let let
stdenv = llvmPackages_latest.stdenv; stdenv = llvmPackages_latest.stdenv;
release = rec { release = rec {
version = "2025.11"; version = "2025.06";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "bsc-pm"; owner = "bsc-pm";
repo = "llvm"; repo = "llvm";
rev = "refs/tags/github-release-${version}"; rev = "refs/tags/github-release-${version}";
hash = "sha256-UgwMTUkM9Z87dDH205swZFBeFhrcbLAxginViG40pBM="; hash = "sha256-ww9PpRmtz/M9IyLiZ8rAehx2UW4VpQt+svf4XfKBzKo=";
}; };
}; };

View File

@@ -3,7 +3,6 @@
, lib , lib
, gcc , gcc
, clangOmpss2Unwrapped , clangOmpss2Unwrapped
, writeShellScript
, openmp ? null , openmp ? null
, wrapCCWith , wrapCCWith
, llvmPackages_latest , llvmPackages_latest
@@ -28,17 +27,20 @@ let
# We need to replace the lld linker from bintools with our linker just built, # We need to replace the lld linker from bintools with our linker just built,
# otherwise we run into incompatibility issues when mixing compiler and linker # otherwise we run into incompatibility issues when mixing compiler and linker
# versions. # versions.
bintools-unwrapped = llvmPackages_latest.bintools-unwrapped.override { bintools-unwrapped = llvmPackages_latest.tools.bintools-unwrapped.override {
lld = clangOmpss2Unwrapped; lld = clangOmpss2Unwrapped;
}; };
bintools = llvmPackages_latest.bintools.override { bintools = llvmPackages_latest.tools.bintools.override {
bintools = bintools-unwrapped; bintools = bintools-unwrapped;
}; };
targetConfig = stdenv.targetPlatform.config; targetConfig = stdenv.targetPlatform.config;
inherit gcc; inherit gcc;
cc = clangOmpss2Unwrapped; cc = clangOmpss2Unwrapped;
gccVersion = with versions; let v = gcc.version; in concatStringsSep "." [(major v) (minor v) (patch v)]; gccVersion = with versions; let v = gcc.version; in concatStringsSep "." [(major v) (minor v) (patch v)];
in wrapCCWith {
inherit cc bintools;
# extraPackages adds packages to depsTargetTargetPropagated
extraPackages = optional (openmp != null) openmp;
extraBuildCommands = '' extraBuildCommands = ''
echo "-target ${targetConfig}" >> $out/nix-support/cc-cflags echo "-target ${targetConfig}" >> $out/nix-support/cc-cflags
echo "-B${gcc.cc}/lib/gcc/${targetConfig}/${gccVersion}" >> $out/nix-support/cc-cflags echo "-B${gcc.cc}/lib/gcc/${targetConfig}/${gccVersion}" >> $out/nix-support/cc-cflags
@@ -55,9 +57,8 @@ let
echo "--gcc-toolchain=${gcc}" >> $out/nix-support/cc-cflags echo "--gcc-toolchain=${gcc}" >> $out/nix-support/cc-cflags
wrap clang++ $wrapper $ccPath/clang++ wrap clang++ $wrapper $ccPath/clang++
'';
envExports = lib.optionalString (openmp != null) '' '' + optionalString (openmp != null) ''
echo "export OPENMP_RUNTIME=${ompname}" >> $out/nix-support/cc-wrapper-hook echo "export OPENMP_RUNTIME=${ompname}" >> $out/nix-support/cc-wrapper-hook
'' + optionalString (ompss2rt != null) '' '' + optionalString (ompss2rt != null) ''
echo "export OMPSS2_RUNTIME=${rtname}" >> $out/nix-support/cc-wrapper-hook echo "export OMPSS2_RUNTIME=${rtname}" >> $out/nix-support/cc-wrapper-hook
@@ -65,40 +66,5 @@ let
'' + optionalString (ompss2rt != null && ompss2rt.pname == "nodes") '' '' + optionalString (ompss2rt != null && ompss2rt.pname == "nodes") ''
echo "export NOSV_HOME=${ompss2rt.nosv}" >> $out/nix-support/cc-wrapper-hook echo "export NOSV_HOME=${ompss2rt.nosv}" >> $out/nix-support/cc-wrapper-hook
''; '';
}
extraPackages = optional (openmp != null) openmp;
wrappedCC = wrapCCWith {
# extraPackages adds packages to depsTargetTargetPropagated
inherit cc bintools extraPackages;
extraBuildCommands = extraBuildCommands + envExports;
};
resetIntelCCFlags = let tconf = builtins.replaceStrings ["-"] ["_"] targetConfig;
in writeShellScript "remove-intel.sh" ''
if [ "''${NIX_CC_WRAPPER_INTEL:-0}" = 1 ]; then
unset NIX_CFLAGS_COMPILE_${tconf}
unset NIX_CC_WRAPPER_FLAGS_SET_${tconf}
if (( "''${NIX_DEBUG:-0}" >= 1 )); then
echo "ompss2: cleaned NIX_CFLAGS_COMPILE_${tconf} (invokation from intel compiler detected)"
fi
fi
'';
intelExtraBuildCommands = ''
sed -i 's|# Flirting.*|source ${resetIntelCCFlags}\n\n&|' $out/bin/clang
sed -i 's|# Flirting.*|source ${resetIntelCCFlags}\n\n&|' $out/bin/clang++
'';
wrappedCCIntel = wrapCCWith {
inherit cc bintools extraPackages;
# extraPackages adds packages to depsTargetTargetPropagated
extraBuildCommands = intelExtraBuildCommands + envExports;
};
in wrappedCC.overrideAttrs (oldAttrs: {
passthru = oldAttrs.passthru // {
forIcpx = wrappedCCIntel;
};
})

View File

@@ -65,7 +65,6 @@ stdenv.mkDerivation rec {
]; ];
meta = { meta = {
broken = true;
homepage = "https://github.com/bsc-pm/mcxx"; homepage = "https://github.com/bsc-pm/mcxx";
description = "C/C++/Fortran source-to-source compilation infrastructure aimed at fast prototyping"; description = "C/C++/Fortran source-to-source compilation infrastructure aimed at fast prototyping";
maintainers = with lib.maintainers.bsc; [ rpenacob ]; maintainers = with lib.maintainers.bsc; [ rpenacob ];

View File

@@ -1,11 +1,9 @@
{ python3Packages, lib }: { python3Packages, lib }:
python3Packages.buildPythonApplication { python3Packages.buildPythonApplication rec {
pname = "meteocat-exporter"; pname = "meteocat-exporter";
version = "1.0"; version = "1.0";
pyproject = true;
src = ./.; src = ./.;
doCheck = false; doCheck = false;

View File

@@ -6,13 +6,6 @@
, pmix , pmix
, gfortran , gfortran
, symlinkJoin , symlinkJoin
# Disabled when cross-compiling
# To fix cross compilation, we should fill the values in:
# https://github.com/pmodels/mpich/blob/main/maint/fcrosscompile/cross_values.txt.in
# For each arch
, enableFortran ? stdenv.hostPlatform == stdenv.buildPlatform
, perl
, targetPackages
}: }:
let let
@@ -22,13 +15,10 @@ let
paths = [ pmix.dev pmix.out ]; paths = [ pmix.dev pmix.out ];
}; };
in mpich.overrideAttrs (old: { in mpich.overrideAttrs (old: {
buildInputs = old.buildInputs ++ [ buildInput = old.buildInputs ++ [
libfabric libfabric
pmixAll pmixAll
]; ];
nativeBuildInputs = old.nativeBuildInputs ++ [
perl
];
configureFlags = [ configureFlags = [
"--enable-shared" "--enable-shared"
"--enable-sharedlib" "--enable-sharedlib"
@@ -41,21 +31,10 @@ in mpich.overrideAttrs (old: {
] ++ lib.optionals (lib.versionAtLeast gfortran.version "10") [ ] ++ lib.optionals (lib.versionAtLeast gfortran.version "10") [
"FFLAGS=-fallow-argument-mismatch" # https://github.com/pmodels/mpich/issues/4300 "FFLAGS=-fallow-argument-mismatch" # https://github.com/pmodels/mpich/issues/4300
"FCFLAGS=-fallow-argument-mismatch" "FCFLAGS=-fallow-argument-mismatch"
] ++ lib.optionals (!enableFortran) [
"--disable-fortran"
]; ];
preFixup = ''
sed -i 's:^CC=.*:CC=${targetPackages.stdenv.cc}/bin/${targetPackages.stdenv.cc.targetPrefix}cc:' $out/bin/mpicc
sed -i 's:^CXX=.*:CXX=${targetPackages.stdenv.cc}/bin/${targetPackages.stdenv.cc.targetPrefix}c++:' $out/bin/mpicxx
'' + lib.optionalString enableFortran ''
sed -i 's:^FC=.*:FC=${targetPackages.gfortran or gfortran}/bin/${targetPackages.gfortran.targetPrefix or gfortran.targetPrefix}gfortran:' $out/bin/mpifort
'';
hardeningDisable = [ "all" ]; hardeningDisable = [ "all" ];
meta = old.meta // { meta = old.meta // {
maintainers = old.meta.maintainers ++ (with lib.maintainers.bsc; [ rarias ]); maintainers = old.meta.maintainers ++ (with lib.maintainers.bsc; [ rarias ]);
cross = true;
}; };
}) })

View File

@@ -1,22 +0,0 @@
{
stdenv
, lib
}:
stdenv.mkDerivation {
pname = "nixgen";
version = "0.0.1";
src = ./nixgen;
dontUnpack = true;
phases = [ "installPhase" ];
installPhase = ''
mkdir -p $out/bin
cp -a $src $out/bin/nixgen
'';
meta = {
description = "Quickly generate flake.nix from command line";
maintainers = with lib.maintainers.bsc; [ rarias ];
platforms = lib.platforms.linux;
license = lib.licenses.gpl3Plus;
};
}

View File

@@ -1,97 +0,0 @@
#!/bin/sh
#
# Copyright (c) 2025, Barcelona Supercomputing Center (BSC)
# SPDX-License-Identifier: GPL-3.0+
# Author: Rodrigo Arias Mallo <rodrigo.arias@bsc.es>
function usage() {
echo "USAGE: nixgen [-f] [package [...]] [-b package [...]]" >&2
echo " Generates a flake.nix file with the given packages." >&2
echo " After flake.nix is created, use 'nix develop' to enter the shell." >&2
echo "OPTIONS" >&2
echo " -f Overwrite existing flake.nix (default: no)." >&2
echo " packages... Add these packages to the shell." >&2
echo " -b packages... Add the dependencies needed to build these packages." >&2
echo "EXAMPLE" >&2
echo " $ nixgen ovni bigotes -b nosv tampi" >&2
echo " Adds the packages ovni and bigotes as well as all required dependencies" >&2
echo " to build nosv and tampi." >&2
echo "AUTHOR" >&2
echo " Rodrigo Arias Mallo <rodrigo.arias@bsc.es>" >&2
exit 1
}
mode=package
packages=
inputsFrom=
force=
if [[ $# -eq 0 ]]; then
usage
fi
while [[ $# -gt 0 ]]; do
case $1 in -b)
mode=build
shift
;;
-f)
force=1
shift
;;
-h)
usage
;;
-*|--*)
echo "error: unknown option $1" >&2
exit 1
;;
*)
if [ "$mode" == "package" ]; then
packages+="${packages:+ }$1"
else
inputsFrom+="${inputsFrom:+ }$1"
fi
shift
;;
esac
done
if [ ! "$force" -a -e flake.nix ]; then
echo "error: flake.nix exists, force overwrite with -f" >&2
exit 1
fi
cat > flake.nix <<EOF
{
inputs.jungle.url = "git+https://jungle.bsc.es/git/rarias/jungle";
outputs = { self, jungle }:
let
nixpkgs = jungle.inputs.nixpkgs;
customOverlay = (final: prev: {
# Example overlay, for now empty
});
pkgs = import nixpkgs {
system = "x86_64-linux";
overlays = [
# Apply jungle overlay to get our BSC custom packages
jungle.outputs.bscOverlay
# And on top apply our local changes to customize for cluster
customOverlay
];
};
in {
devShells.x86_64-linux.default = pkgs.mkShell {
pname = "devshell";
# Include these packages in the shell
packages = with pkgs; [
$packages
];
# The dependencies needed to build these packages will be also included
inputsFrom = with pkgs; [
$inputsFrom
];
};
};
}
EOF

View File

@@ -14,19 +14,19 @@
, useGit ? false , useGit ? false
, gitUrl ? "ssh://git@gitlab-internal.bsc.es/nos-v/nodes.git" , gitUrl ? "ssh://git@gitlab-internal.bsc.es/nos-v/nodes.git"
, gitBranch ? "master" , gitBranch ? "master"
, gitCommit ? "511489e71504a44381e0930562e7ac80ac69a848" # version-1.4 , gitCommit ? "6002ec9ae6eb876d962cc34366952a3b26599ba6"
}: }:
with lib; with lib;
let let
release = rec { release = rec {
version = "1.4"; version = "1.3";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "bsc-pm"; owner = "bsc-pm";
repo = "nodes"; repo = "nodes";
rev = "version-${version}"; rev = "version-${version}";
hash = "sha256-+lR/R0l3fGZO3XG7whMorFW2y2YZ0ZFnLeOHyQYrAsQ="; hash = "sha256-cFb9pxcjtkMmH0CsGgUO9LTdXDNh7MCqicgGWawLrsU=";
}; };
}; };

View File

@@ -13,19 +13,19 @@
, useGit ? false , useGit ? false
, gitUrl ? "git@gitlab-internal.bsc.es:nos-v/nos-v.git" , gitUrl ? "git@gitlab-internal.bsc.es:nos-v/nos-v.git"
, gitBranch ? "master" , gitBranch ? "master"
, gitCommit ? "1108e4786b58e0feb9a16fa093010b763eb2f8e8" # version 4.0.0 , gitCommit ? "9f47063873c3aa9d6a47482a82c5000a8c813dd8"
}: }:
with lib; with lib;
let let
release = rec { release = rec {
version = "4.0.0"; version = "3.2.0";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "bsc-pm"; owner = "bsc-pm";
repo = "nos-v"; repo = "nos-v";
rev = "${version}"; rev = "${version}";
hash = "sha256-llaq73bd/YxLVKNlMebnUHKa4z3sdcsuDUoVwUxNuw8="; hash = "sha256-yaz92426EM8trdkBJlISmAoG9KJCDTvoAW/HKrasvOw=";
}; };
}; };

View File

@@ -1,91 +0,0 @@
{
lib,
fetchFromGitHub,
cmake,
withCFlags,
intelPackages,
mklSupport ? true,
config,
cudaSupport ? config.cudaSupport,
cudaPackages ? { },
rocmSupport ? config.rocmSupport,
hipTargets ? null, # only one target at a time supported
rocmPackages ? { },
}:
let
# rocmSupport is not enough, we need a specific target
enableHip = rocmSupport && hipTargets != null;
stdenv = withCFlags (lib.optionals cudaSupport [
"--cuda-path=${cudaPackages.cudatoolkit}"
]) intelPackages.stdenv;
in
# at least one backend has to be enabled
assert mklSupport || cudaSupport || enableHip;
stdenv.mkDerivation rec {
pname = "oneMath";
version = "0.8";
src = fetchFromGitHub {
owner = "uxlfoundation";
repo = "oneMath";
rev = "v${version}";
sha256 = "sha256-xK8lKI3oqKlx3xtvdScpMq+HXAuoYCP0BZdkEqnJP5o=";
};
cmakeFlags = [
(lib.cmakeBool "ENABLE_MKLCPU_BACKEND" mklSupport)
(lib.cmakeBool "ENABLE_MKLGPU_BACKEND" mklSupport)
(lib.cmakeBool "ENABLE_CUBLAS_BACKEND" cudaSupport)
(lib.cmakeBool "ENABLE_CUFFT_BACKEND" cudaSupport)
(lib.cmakeBool "ENABLE_CURAND_BACKEND" cudaSupport)
(lib.cmakeBool "ENABLE_CUSOLVER_BACKEND" cudaSupport)
(lib.cmakeBool "ENABLE_CUSPARSE_BACKEND" cudaSupport)
(lib.cmakeBool "ENABLE_ROCBLAS_BACKEND" enableHip)
(lib.cmakeBool "ENABLE_ROCFFT_BACKEND" enableHip)
(lib.cmakeBool "ENABLE_ROCSOLVER_BACKEND" enableHip)
(lib.cmakeBool "ENABLE_ROCRAND_BACKEND" enableHip)
(lib.cmakeBool "ENABLE_ROCSPARSE_BACKEND" enableHip)
(lib.cmakeBool "BUILD_FUNCTIONAL_TESTS" false)
(lib.cmakeBool "BUILD_EXAMPLES" false)
]
++ lib.optionals enableHip [
(lib.cmakeFeature "HIP_TARGETS" hipTargets)
];
nativeBuildInputs = [ cmake ];
buildInputs =
lib.optionals (mklSupport) [
intelPackages.mkl
intelPackages.tbb
]
++ lib.optionals (enableHip) [
rocmPackages.rocmPath
rocmPackages.rocblas
rocmPackages.rocfft
rocmPackages.rocsolver
rocmPackages.rocrand
rocmPackages.rocsparse
]
++ lib.optionals (cudaSupport) [
(lib.getDev cudaPackages.cuda_cudart)
cudaPackages.cudatoolkit
cudaPackages.libcublas
cudaPackages.libcurand
cudaPackages.libcufft
cudaPackages.libcusparse
cudaPackages.libcusolver
];
}

View File

@@ -32,11 +32,6 @@ stdenv.mkDerivation rec {
"CXX=mpicxx" "CXX=mpicxx"
]; ];
env = {
MPICH_CC="${stdenv.cc}/bin/${stdenv.cc.targetPrefix}cc";
MPICH_CXX="${stdenv.cc}/bin/${stdenv.cc.targetPrefix}c++";
};
postInstall = '' postInstall = ''
mkdir -p $out/bin mkdir -p $out/bin
for f in $(find $out -executable -type f); do for f in $(find $out -executable -type f); do
@@ -49,6 +44,5 @@ stdenv.mkDerivation rec {
homepage = "http://mvapich.cse.ohio-state.edu/benchmarks/"; homepage = "http://mvapich.cse.ohio-state.edu/benchmarks/";
maintainers = [ ]; maintainers = [ ];
platforms = lib.platforms.all; platforms = lib.platforms.all;
cross = true;
}; };
} }

View File

@@ -7,7 +7,7 @@
, useGit ? false , useGit ? false
, gitBranch ? "master" , gitBranch ? "master"
, gitUrl ? "ssh://git@bscpm04.bsc.es/rarias/ovni.git" , gitUrl ? "ssh://git@bscpm04.bsc.es/rarias/ovni.git"
, gitCommit ? "06432668f346c8bdc1006fabc23e94ccb81b0d8b" # version 1.13.0 , gitCommit ? "e4f62382076f0cf0b1d08175cf57cc0bc51abc61"
, enableDebug ? false , enableDebug ? false
# Only enable MPI if the build is native (fails on cross-compilation) # Only enable MPI if the build is native (fails on cross-compilation)
, useMpi ? (stdenv.buildPlatform.canExecute stdenv.hostPlatform) , useMpi ? (stdenv.buildPlatform.canExecute stdenv.hostPlatform)
@@ -15,13 +15,13 @@
let let
release = rec { release = rec {
version = "1.13.0"; version = "1.12.0";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "bsc-pm"; owner = "bsc-pm";
repo = "ovni"; repo = "ovni";
rev = "${version}"; rev = "${version}";
hash = "sha256-0l2ryIyWNiZqeYdVlnj/WnQGS3xFCY4ICG8JedX424w="; hash = "sha256-H04JvsVKrdqr3ON7JhU0g17jjlg/jzQ7eTfx9vUNd3E=";
} // { shortRev = "0643266"; }; } // { shortRev = "a73afcf"; };
}; };
git = rec { git = rec {

View File

@@ -12,7 +12,7 @@
, paraverKernel , paraverKernel
, openssl , openssl
, glibcLocales , glibcLocales
, wrapGAppsHook3 , wrapGAppsHook
}: }:
let let
@@ -64,7 +64,7 @@ stdenv.mkDerivation rec {
autoconf autoconf
automake automake
autoreconfHook autoreconfHook
wrapGAppsHook3 wrapGAppsHook
]; ];
buildInputs = [ buildInputs = [

View File

@@ -0,0 +1,76 @@
{ pkgs }:
# This builds devshells using passthru to populate packages and inputsFrom
# lazily.
#
# Packages prefixed with @ are added to inputsFrom and the rest are
# added to packages. This makes it possible to obtain a devshell with combining
# inputsFrom and packages from the comand line:
#
# $ nix develop .#quickShell.@nosv.clang-tools.ovni.@tampi
#
# is equivalent to:
#
# with pkgs; mkShell {
# packages = [ clang-tools ovni ];
# inputsFrom = [ nosv tampi ];
# }
#
let
inherit (builtins) attrNames getAttr listToAttrs;
inherit (pkgs) lib;
# list of all packages' names
names = attrNames pkgs;
# generate a attributes containing the selected packages + all the available
# packages in names ("" -> packages and "@" -> inputsFrom)
go =
selected:
let
mkAttrs =
isInputs:
(map (
name:
let
pkg = getAttr name pkgs;
in
{
name = (if isInputs then "@" else "") + name;
value = go (
if isInputs then
{
inherit (selected) packages;
inputsFrom = selected.inputsFrom ++ [ pkg ];
}
else
{
inherit (selected) inputsFrom;
packages = selected.packages ++ [ pkg ];
}
);
}
) names);
attrs = listToAttrs ((mkAttrs true) ++ (mkAttrs false));
drv = pkgs.mkShell {
inherit (selected) inputsFrom packages;
name = # build a label from inputsFrom
if (selected.inputsFrom == [ ]) then
"nix-quick-shell"
else
lib.concatStringsSep "-" (map (v: v.pname or v.name or "unknown") selected.inputsFrom);
};
in
drv.overrideAttrs {
passthru = attrs // {
_passthru = drv.passthru;
};
};
in
go {
inputsFrom = [ ];
packages = [ ];
}

View File

@@ -35,6 +35,5 @@ stdenv.mkDerivation rec {
maintainers = with lib.maintainers.bsc; [ rarias ]; maintainers = with lib.maintainers.bsc; [ rarias ];
platforms = lib.platforms.linux; platforms = lib.platforms.linux;
license = lib.licenses.mit; license = lib.licenses.mit;
cross = true;
}; };
} }

View File

@@ -5,14 +5,23 @@
, automake , automake
, autoconf , autoconf
, libtool , libtool
, mpi
, autoreconfHook , autoreconfHook
, gpi-2 , gpi-2
, boost , boost
, numactl , numactl
, rdma-core , rdma-core
, gfortran , gfortran
, symlinkJoin
}: }:
let
mpiAll = symlinkJoin {
name = "mpi-all";
paths = [ mpi.all ];
};
in
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "tagaspi"; pname = "tagaspi";
enableParallelBuilding = true; enableParallelBuilding = true;
@@ -26,18 +35,16 @@ stdenv.mkDerivation rec {
hash = "sha256-RGG/Re2uM293HduZfGzKUWioDtwnSYYdfeG9pVrX9EM="; hash = "sha256-RGG/Re2uM293HduZfGzKUWioDtwnSYYdfeG9pVrX9EM=";
}; };
nativeBuildInputs = [ buildInputs = [
autoreconfHook autoreconfHook
automake automake
autoconf autoconf
libtool libtool
gfortran
];
buildInputs = [
boost boost
numactl numactl
rdma-core rdma-core
gfortran
mpiAll
]; ];
dontDisableStatic = true; dontDisableStatic = true;
@@ -56,6 +63,5 @@ stdenv.mkDerivation rec {
maintainers = with lib.maintainers.bsc; [ rarias ]; maintainers = with lib.maintainers.bsc; [ rarias ];
platforms = lib.platforms.linux; platforms = lib.platforms.linux;
license = lib.licenses.gpl3Plus; license = lib.licenses.gpl3Plus;
cross = false; # gpi-2 cannot cross
}; };
} }

View File

@@ -68,6 +68,5 @@ in stdenv.mkDerivation {
maintainers = with lib.maintainers.bsc; [ rarias ]; maintainers = with lib.maintainers.bsc; [ rarias ];
platforms = lib.platforms.linux; platforms = lib.platforms.linux;
license = lib.licenses.gpl3Plus; license = lib.licenses.gpl3Plus;
cross = true;
}; };
} }

View File

@@ -1,87 +0,0 @@
{
lib,
stdenv,
autoconf,
automake,
autoreconfHook,
boost,
fetchFromGitHub,
gnumake,
libtool,
withCFlags,
useIntel ? true,
adaptivecpp ? null,
intelPackages ? null,
useGit ? false,
gitUrl ? "git@gitlab-internal.bsc.es:task-awareness/tasycl/tasycl.git",
gitBranch ? "main",
gitCommit ? "78f98dcf60a66e0eaa3b4ebcf55be076bec64825",
}:
assert !useIntel -> adaptivecpp != null;
assert useIntel -> intelPackages != null;
let
variant = if useIntel then "intel" else "acpp";
syclStdenv = withCFlags [ "-O3" ] (if useIntel then intelPackages.stdenv else stdenv);
release = rec {
version = "2.1.0";
src = fetchFromGitHub {
owner = "bsc-pm";
repo = "tasycl";
rev = version;
hash = "sha256-0kXnb0lHeQNHR27GTLbJ8xbiICLU8k2+FqEnnFSrzzo=";
};
};
git = rec {
version = src.shortRev;
src = builtins.fetchGit {
url = gitUrl;
ref = gitBranch;
rev = gitCommit;
};
};
source = if (useGit) then git else release;
in
syclStdenv.mkDerivation {
pname = "tasycl-${variant}";
inherit (source) src version;
enableParallelBuilding = true;
separateDebugInfo = true;
nativeBuildInputs = [
autoreconfHook
automake
autoconf
libtool
gnumake
];
buildInputs = [
boost
];
configureFlags = lib.optionals (!useIntel) [
"CXX=${lib.getExe adaptivecpp}"
];
# add symlinks so we can explicitly link with tasycl-intel / tasycl-acpp
postInstall = ''
pushd $out/lib
for i in libtasycl* ; do
ln -s "$i" "''\${i/tasycl/tasycl-${variant}}"
done
popd
'';
hardeningDisable = [ "all" ];
}

View File

@@ -1,11 +1,9 @@
{ python3Packages, lib }: { python3Packages, lib }:
python3Packages.buildPythonApplication { python3Packages.buildPythonApplication rec {
pname = "upc-qaire-exporter"; pname = "upc-qaire-exporter";
version = "1.0"; version = "1.0";
pyproject = true;
src = ./.; src = ./.;
doCheck = false; doCheck = false;

View File

@@ -1,62 +0,0 @@
{
intelPackages,
writeText,
strace,
}:
let
stdenv = intelPackages.stdenv;
hello_sycl = writeText "hello.cpp" ''
#include <sycl/sycl.hpp>
class hello_world;
int main(int argc, char** argv) try {
auto device_selector = sycl::default_selector_v;
sycl::queue queue(device_selector);
std::cout << "Running on: "
<< queue.get_device().get_info<sycl::info::device::name>()
<< std::endl;
queue.submit([&] (sycl::handler& cgh) {
auto os = sycl::stream{128, 128, cgh};
cgh.single_task<hello_world>([=]() {
os << "Hello World! (on device)\n";
});
});
return 0;
} catch (sycl::exception &e) {
std::cout << "SYCL exception: " << e.what() << std::endl;
return 0; // we excpect to fail since no devices should be available;
}
'';
in
stdenv.mkDerivation {
version = "0.0.1";
name = "hello-sycl";
buildInputs = [
stdenv
strace
];
src = hello_sycl;
dontUnpack = true;
dontConfigure = true;
NIX_DEBUG = 0;
buildPhase = ''
cp $src hello.cpp
set -x
echo CXX=$CXX
command -v $CXX
$CXX -fsycl hello.cpp -o hello
./hello
set +x
'';
installPhase = ''
touch $out
'';
}

View File

@@ -1,81 +0,0 @@
{
writeText,
intelPackages,
nodes,
nosv,
clangOmpss2Nodes,
strace,
}:
let
hello_cpp = writeText "hello.cpp" ''
#include <cstdio>
#include <sycl/sycl.hpp>
int main(int argc, char** argv) try {
sycl::queue queue;
std::cout << "Running on: "
<< queue.get_device().get_info<sycl::info::device::name>()
<< std::endl;
#pragma oss task
queue.submit([&] (sycl::handler& cgh) {
auto os = sycl::stream{128, 128, cgh};
cgh.single_task<class hello_world>([=]() {
os << "Hello World! (on device)\n";
});
}).wait();
return 0;
} catch (sycl::exception &e) {
std::cout << "SYCL exception: " << e.what() << std::endl;
return 0; // we expect to fail since no devices should be available;
}
'';
in
intelPackages.stdenv.mkDerivation {
version = "0.0.1";
name = "hello-syclompss";
src = hello_cpp;
nativeBuildInputs = [
strace
nodes
nosv
];
dontUnpack = true;
dontConfigure = true;
# NODES requires access to /sys/devices to request NUMA information
requiredSystemFeatures = [ "sys-devices" ];
env.NODES_HOME = nodes;
NIX_DEBUG = 1;
buildPhase = ''
cp $src hello.cpp
set -x
echo CXX=$CXX
echo NODES_HOME=$NODES_HOME
command -v $CXX
icpx -Wno-deprecated-declarations -fsycl \
-fsycl-host-compiler=${clangOmpss2Nodes.forIcpx}/bin/clang++ \
-fsycl-host-compiler-options='-Wno-deprecated-declarations -fompss-2=libnodes' \
-lnodes -lnosv \
$NODES_HOME/lib/nodes-main-wrapper.o \
hello.cpp -o hello
./hello
set +x
'';
installPhase = ''
touch $out
'';
}