2 Commits

Author SHA1 Message Date
76aa169fe8 Add fetchGitMirror function
All checks were successful
CI / build:cross (pull_request) Successful in 6s
CI / build:all (pull_request) Successful in 19s
2025-10-20 16:30:41 +02:00
3edf00af0c Add nix patch to implement builtins.catchAll 2025-10-14 10:17:53 +02:00
61 changed files with 43150 additions and 1588 deletions

8
flake.lock generated
View File

@@ -2,16 +2,16 @@
"nodes": { "nodes": {
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1767634882, "lastModified": 1752436162,
"narHash": "sha256-2GffSfQxe3sedHzK+sTKlYo/NTIAGzbFCIsNMUPAAnk=", "narHash": "sha256-Kt1UIPi7kZqkSc5HVj6UY5YLHHEzPBkgpNUByuyxtlw=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "3c9db02515ef1d9b6b709fc60ba9a540957f661c", "rev": "dfcd5b901dbab46c9c6e80b265648481aafb01f8",
"type": "github" "type": "github"
}, },
"original": { "original": {
"owner": "NixOS", "owner": "NixOS",
"ref": "nixos-25.11", "ref": "nixos-25.05",
"repo": "nixpkgs", "repo": "nixpkgs",
"type": "github" "type": "github"
} }

View File

@@ -1,6 +1,6 @@
{ {
inputs = { inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.11"; nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.05";
}; };
outputs = { self, nixpkgs, ... }: outputs = { self, nixpkgs, ... }:

View File

@@ -57,18 +57,6 @@
}; };
}; };
services.fail2ban = {
enable = true;
maxretry = 5;
bantime-increment = {
enable = true; # Double ban time on each attack
maxtime = "7d"; # Ban up to a week
};
};
# Disable SSH login with password, allow only keypair
services.openssh.settings.PasswordAuthentication = false;
networking.firewall = { networking.firewall = {
extraCommands = '' extraCommands = ''
# Blackhole BSC vulnerability scanner (OpenVAS) as it is spamming our # Blackhole BSC vulnerability scanner (OpenVAS) as it is spamming our

View File

@@ -24,7 +24,7 @@
address = "10.0.40.40"; address = "10.0.40.40";
prefixLength = 24; prefixLength = 24;
} ]; } ];
interfaces.ibs785.ipv4.addresses = [ { interfaces.ibp5s0.ipv4.addresses = [ {
address = "10.0.42.40"; address = "10.0.42.40";
prefixLength = 24; prefixLength = 24;
} ]; } ];

View File

@@ -1,12 +1,12 @@
{ pkgs, ... }: { pkgs, config, ... }:
{ {
environment.systemPackages = with pkgs; [ environment.systemPackages = with pkgs; [
vim wget git htop tmux pciutils tcpdump ripgrep nix-index nixos-option vim wget git htop tmux pciutils tcpdump ripgrep nix-index nixos-option
nix-diff ipmitool freeipmi ethtool lm_sensors cmake gnumake file tree nix-diff ipmitool freeipmi ethtool lm_sensors cmake gnumake file tree
ncdu perf ldns pv ncdu config.boot.kernelPackages.perf ldns pv
# From jungle overlay # From bsckgs overlay
osumb nixgen osumb
]; ];
programs.direnv.enable = true; programs.direnv.enable = true;

View File

@@ -139,7 +139,6 @@
openssh.authorizedKeys.keys = [ openssh.authorizedKeys.keys = [
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKGt0ESYxekBiHJQowmKpfdouw0hVm3N7tUMtAaeLejK vincent@varch" "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKGt0ESYxekBiHJQowmKpfdouw0hVm3N7tUMtAaeLejK vincent@varch"
]; ];
shell = pkgs.zsh;
}; };
pmartin1 = { pmartin1 = {
@@ -181,19 +180,6 @@
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFmMqKqPg4uocNOr3O41kLbZMOMJn3m2ZdN1JvTR96z3 bsccns@arnau-bsc" "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFmMqKqPg4uocNOr3O41kLbZMOMJn3m2ZdN1JvTR96z3 bsccns@arnau-bsc"
]; ];
}; };
aaguirre = {
uid = 9655;
isNormalUser = true;
home = "/home/Computational/aaguirre";
description = "Alejandro Aguirre";
group = "Computational";
hosts = [ "apex" "hut" ];
hashedPassword = "$6$TXRXQT6jjBvxkxU6$E.sh5KspAm1qeG5Ct7OPHpo8REmbGDwjFGvqeGgTVz3GASGOAnPL7UMZsMAsAKBoahOw.v8LNno6XGrTEPzZH1";
openssh.authorizedKeys.keys = [
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIOlRX7ZCnqtUJYCxKgWmgSrFCYuA2LHY96rVwqxXPl86 aaguirre@BSC-8488184117"
];
};
}; };
groups = { groups = {

View File

@@ -5,5 +5,5 @@
boot.kernelModules = [ "ipmi_watchdog" ]; boot.kernelModules = [ "ipmi_watchdog" ];
# Enable systemd watchdog with 30 s interval # Enable systemd watchdog with 30 s interval
systemd.settings.Manager.RuntimeWatchdogSec = 30; systemd.watchdog.runtimeTime = "30s";
} }

View File

@@ -1,6 +1,11 @@
{ pkgs, lib, ... }: { config, pkgs, lib, ... }:
{ {
# add the perf tool
environment.systemPackages = with pkgs; [
config.boot.kernelPackages.perf
];
# allow non-root users to read tracing data from the kernel # allow non-root users to read tracing data from the kernel
boot.kernel.sysctl."kernel.perf_event_paranoid" = -2; boot.kernel.sysctl."kernel.perf_event_paranoid" = -2;
boot.kernel.sysctl."kernel.kptr_restrict" = 0; boot.kernel.sysctl."kernel.kptr_restrict" = 0;

View File

@@ -93,4 +93,20 @@
wantedBy = [ "multi-user.target" ]; wantedBy = [ "multi-user.target" ];
serviceConfig.ExecStart = script; serviceConfig.ExecStart = script;
}; };
# Only allow SSH connections from users who have a SLURM allocation
# See: https://slurm.schedmd.com/pam_slurm_adopt.html
security.pam.services.sshd.rules.account.slurm = {
control = "required";
enable = true;
modulePath = "${pkgs.slurm}/lib/security/pam_slurm_adopt.so";
args = [ "log_level=debug5" ];
order = 999999; # Make it last one
};
# Disable systemd session (pam_systemd.so) as it will conflict with the
# pam_slurm_adopt.so module. What happens is that the shell is first adopted
# into the slurmstepd task and then into the systemd session, which is not
# what we want, otherwise it will linger even if all jobs are gone.
security.pam.services.sshd.startSession = lib.mkForce false;
} }

View File

@@ -17,7 +17,6 @@
./postgresql.nix ./postgresql.nix
./nginx.nix ./nginx.nix
./p.nix ./p.nix
./ompss2-timer.nix
#./pxe.nix #./pxe.nix
]; ];
@@ -45,7 +44,7 @@
address = "10.0.40.7"; address = "10.0.40.7";
prefixLength = 24; prefixLength = 24;
} ]; } ];
interfaces.ibs785.ipv4.addresses = [ { interfaces.ibp5s0.ipv4.addresses = [ {
address = "10.0.42.7"; address = "10.0.42.7";
prefixLength = 24; prefixLength = 24;
} ]; } ];

View File

@@ -29,9 +29,6 @@
}; };
}; };
# Allow gitea user to send mail
users.users.gitea.extraGroups = [ "mail-robot" ];
services.gitea-actions-runner.instances = { services.gitea-actions-runner.instances = {
runrun = { runrun = {
enable = true; enable = true;

View File

@@ -1,11 +1,8 @@
{ config, lib, ... }: { config, lib, ... }:
{ {
# Robot user that can see the password to send mail from jungle-robot
users.groups.mail-robot = {};
age.secrets.jungleRobotPassword = { age.secrets.jungleRobotPassword = {
file = ../../secrets/jungle-robot-password.age; file = ../../secrets/jungle-robot-password.age;
group = "mail-robot"; group = "gitea";
mode = "440"; mode = "440";
}; };

View File

@@ -4,8 +4,8 @@ let
name = "jungle-web"; name = "jungle-web";
src = pkgs.fetchgit { src = pkgs.fetchgit {
url = "https://jungle.bsc.es/git/rarias/jungle-website.git"; url = "https://jungle.bsc.es/git/rarias/jungle-website.git";
rev = "52abaf4d71652a9ef77a0b098db14ca33bffff4c"; rev = "739bf0175a7f05380fe7ad7023ff1d60db1710e1";
hash = "sha256-/ul9GazbOrOkmlvSgDz/+2W+V+ir5725Y7mVLc3rb0M="; hash = "sha256-ea5DzhYTzZ9TmqD+x95rdNdLbxPnBluqlYH2NmBYmc4=";
}; };
buildInputs = [ pkgs.hugo ]; buildInputs = [ pkgs.hugo ];
buildPhase = '' buildPhase = ''

View File

@@ -1,85 +0,0 @@
{ config, pkgs, ... }:
{
systemd.timers = {
"ompss2-closing" = {
wantedBy = [ "timers.target" ];
timerConfig = {
Unit = "ompss2-closing.service";
OnCalendar = [ "*-03-15 07:00:00" "*-09-15 07:00:00"];
};
};
"ompss2-freeze" = {
wantedBy = [ "timers.target" ];
timerConfig = {
Unit = "ompss2-freeze.service";
OnCalendar = [ "*-04-15 07:00:00" "*-10-15 07:00:00" ];
};
};
"ompss2-release" = {
wantedBy = [ "timers.target" ];
timerConfig = {
Unit = "ompss2-release.service";
OnCalendar = [ "*-05-15 07:00:00" "*-11-15 07:00:00" ];
};
};
};
systemd.services =
let
closing = pkgs.writeText "closing.txt"
''
Subject: OmpSs-2 release enters closing period
Hi,
You have one month to merge the remaining features for the next OmpSs-2
release. Please, identify what needs to be merged and discuss it in the next
OmpSs-2 meeting.
Thanks!,
Jungle robot
'';
freeze = pkgs.writeText "freeze.txt"
''
Subject: OmpSs-2 release enters freeze period
Hi,
The period to introduce new features or breaking changes is over, only bug
fixes are allowed now. During this time, please prepare the release notes
to be included in the next OmpSs-2 release.
Thanks!,
Jungle robot
'';
release = pkgs.writeText "release.txt"
''
Subject: OmpSs-2 release now
Hi,
The period to introduce bug fixes is now over. Please, proceed to do the
OmpSs-2 release.
Thanks!,
Jungle robot
'';
mkServ = name: mail: {
"ompss2-${name}" = {
script = ''
set -eu
set -o pipefail
cat ${mail} | ${config.security.wrapperDir}/sendmail star@bsc.es
'';
serviceConfig = {
Type = "oneshot";
DynamicUser = true;
Group = "mail-robot";
};
};
};
in
(mkServ "closing" closing) //
(mkServ "freeze" freeze) //
(mkServ "release" release);
}

View File

@@ -46,7 +46,7 @@
address = "10.0.40.42"; address = "10.0.40.42";
prefixLength = 24; prefixLength = 24;
} ]; } ];
interfaces.ibs785.ipv4.addresses = [ { interfaces.ibp5s0.ipv4.addresses = [ {
address = "10.0.42.42"; address = "10.0.42.42";
prefixLength = 24; prefixLength = 24;
} ]; } ];

View File

@@ -1,10 +1,3 @@
{ {
services.nixseparatedebuginfod2 = { services.nixseparatedebuginfod.enable = true;
enable = true;
substituters = [
"local:"
"https://cache.nixos.org"
"http://hut/cache"
];
};
} }

View File

@@ -1,4 +1,4 @@
{ lib, pkgs, ... }: { lib, ... }:
{ {
imports = [ imports = [
@@ -21,20 +21,4 @@
}; };
services.slurm.client.enable = true; services.slurm.client.enable = true;
# Only allow SSH connections from users who have a SLURM allocation
# See: https://slurm.schedmd.com/pam_slurm_adopt.html
security.pam.services.sshd.rules.account.slurm = {
control = "required";
enable = true;
modulePath = "${pkgs.slurm}/lib/security/pam_slurm_adopt.so";
args = [ "log_level=debug5" ];
order = 999999; # Make it last one
};
# Disable systemd session (pam_systemd.so) as it will conflict with the
# pam_slurm_adopt.so module. What happens is that the shell is first adopted
# into the slurmstepd task and then into the systemd session, which is not
# what we want, otherwise it will linger even if all jobs are gone.
security.pam.services.sshd.startSession = lib.mkForce false;
} }

View File

@@ -1,6 +1,31 @@
{ config, pkgs, ... }: { config, pkgs, ... }:
{ let
suspendProgram = pkgs.writeShellScript "suspend.sh" ''
exec 1>>/var/log/power_save.log 2>>/var/log/power_save.log
set -x
export "PATH=/run/current-system/sw/bin:$PATH"
echo "$(date) Suspend invoked $0 $*" >> /var/log/power_save.log
hosts=$(scontrol show hostnames $1)
for host in $hosts; do
echo Shutting down host: $host
ipmitool -I lanplus -H ''${host}-ipmi -P "" -U "" chassis power off
done
'';
resumeProgram = pkgs.writeShellScript "resume.sh" ''
exec 1>>/var/log/power_save.log 2>>/var/log/power_save.log
set -x
export "PATH=/run/current-system/sw/bin:$PATH"
echo "$(date) Suspend invoked $0 $*" >> /var/log/power_save.log
hosts=$(scontrol show hostnames $1)
for host in $hosts; do
echo Starting host: $host
ipmitool -I lanplus -H ''${host}-ipmi -P "" -U "" chassis power on
done
'';
in {
services.slurm = { services.slurm = {
controlMachine = "apex"; controlMachine = "apex";
clusterName = "jungle"; clusterName = "jungle";
@@ -34,6 +59,16 @@
# the resources. Use the task/cgroup plugin to enable process containment. # the resources. Use the task/cgroup plugin to enable process containment.
TaskPlugin=task/affinity,task/cgroup TaskPlugin=task/affinity,task/cgroup
# Power off unused nodes until they are requested
SuspendProgram=${suspendProgram}
SuspendTimeout=60
ResumeProgram=${resumeProgram}
ResumeTimeout=300
SuspendExcNodes=fox
# Turn the nodes off after 1 hour of inactivity
SuspendTime=3600
# Reduce port range so we can allow only this range in the firewall # Reduce port range so we can allow only this range in the firewall
SrunPortRange=60000-61000 SrunPortRange=60000-61000
@@ -51,7 +86,9 @@
# when a task runs (srun) so we can ssh early. # when a task runs (srun) so we can ssh early.
PrologFlags=Alloc,Contain,X11 PrologFlags=Alloc,Contain,X11
LaunchParameters=use_interactive_step # LaunchParameters=ulimit_pam_adopt will set RLIMIT_RSS in processes
# adopted by the external step, similar to tasks running in regular steps
# LaunchParameters=ulimit_pam_adopt
SlurmdDebug=debug5 SlurmdDebug=debug5
#DebugFlags=Protocol,Cgroup #DebugFlags=Protocol,Cgroup
''; '';

View File

@@ -20,7 +20,7 @@
address = "10.0.40.1"; address = "10.0.40.1";
prefixLength = 24; prefixLength = 24;
} ]; } ];
interfaces.ibs785.ipv4.addresses = [ { interfaces.ibp5s0.ipv4.addresses = [ {
address = "10.0.42.1"; address = "10.0.42.1";
prefixLength = 24; prefixLength = 24;
} ]; } ];

View File

@@ -21,7 +21,7 @@
prefixLength = 24; prefixLength = 24;
} ]; } ];
# Watch out! The OmniPath device is not in the same place here: # Watch out! The OmniPath device is not in the same place here:
interfaces.ibs801.ipv4.addresses = [ { interfaces.ibp129s0.ipv4.addresses = [ {
address = "10.0.42.2"; address = "10.0.42.2";
prefixLength = 24; prefixLength = 24;
} ]; } ];

View File

@@ -27,7 +27,4 @@
}; };
}; };
}; };
# Allow gitea user to send mail
users.users.gitea.extraGroups = [ "mail-robot" ];
} }

View File

@@ -4,8 +4,8 @@ let
name = "jungle-web"; name = "jungle-web";
src = pkgs.fetchgit { src = pkgs.fetchgit {
url = "https://jungle.bsc.es/git/rarias/jungle-website.git"; url = "https://jungle.bsc.es/git/rarias/jungle-website.git";
rev = "52abaf4d71652a9ef77a0b098db14ca33bffff4c"; rev = "739bf0175a7f05380fe7ad7023ff1d60db1710e1";
hash = "sha256-/ul9GazbOrOkmlvSgDz/+2W+V+ir5725Y7mVLc3rb0M="; hash = "sha256-ea5DzhYTzZ9TmqD+x95rdNdLbxPnBluqlYH2NmBYmc4=";
}; };
buildInputs = [ pkgs.hugo ]; buildInputs = [ pkgs.hugo ];
buildPhase = '' buildPhase = ''

View File

@@ -1,9 +1,10 @@
final: /* Future last stage */ final: /* Future last stage */
prev: /* Previous stage */ prev: /* Previous stage */
with final.lib; #with final.lib;
let let
lib = prev.lib;
callPackage = final.callPackage; callPackage = final.callPackage;
bscPkgs = { bscPkgs = {
@@ -19,11 +20,7 @@ let
cudainfo = prev.callPackage ./pkgs/cudainfo/default.nix { }; cudainfo = prev.callPackage ./pkgs/cudainfo/default.nix { };
#extrae = callPackage ./pkgs/extrae/default.nix { }; # Broken and outdated #extrae = callPackage ./pkgs/extrae/default.nix { }; # Broken and outdated
gpi-2 = callPackage ./pkgs/gpi-2/default.nix { }; gpi-2 = callPackage ./pkgs/gpi-2/default.nix { };
intel-apt = callPackage ./pkgs/intel-oneapi/packages.nix { };
intelPackages_2023 = callPackage ./pkgs/intel-oneapi/2023.nix { }; intelPackages_2023 = callPackage ./pkgs/intel-oneapi/2023.nix { };
intelPackages_2024 = final.intel-apt.hpckit_2024;
intelPackages_2025 = final.intel-apt.hpckit_2025;
intelPackages = final.intelPackages_2025;
jemallocNanos6 = callPackage ./pkgs/nanos6/jemalloc.nix { }; jemallocNanos6 = callPackage ./pkgs/nanos6/jemalloc.nix { };
# FIXME: Extend this to all linuxPackages variants. Open problem, see: # FIXME: Extend this to all linuxPackages variants. Open problem, see:
# https://discourse.nixos.org/t/whats-the-right-way-to-make-a-custom-kernel-module-available/4636 # https://discourse.nixos.org/t/whats-the-right-way-to-make-a-custom-kernel-module-available/4636
@@ -34,21 +31,19 @@ let
amd-uprof-driver = _prev.callPackage ./pkgs/amd-uprof/driver.nix { }; amd-uprof-driver = _prev.callPackage ./pkgs/amd-uprof/driver.nix { };
}); });
lmbench = callPackage ./pkgs/lmbench/default.nix { }; lmbench = callPackage ./pkgs/lmbench/default.nix { };
# Broken and unmantained mcxx = callPackage ./pkgs/mcxx/default.nix { };
# mcxx = callPackage ./pkgs/mcxx/default.nix { };
meteocat-exporter = prev.callPackage ./pkgs/meteocat-exporter/default.nix { }; meteocat-exporter = prev.callPackage ./pkgs/meteocat-exporter/default.nix { };
mpi = final.mpich; # Set MPICH as default mpi = final.mpich; # Set MPICH as default
mpich = callPackage ./pkgs/mpich/default.nix { mpich = prev.mpich; }; mpich = callPackage ./pkgs/mpich/default.nix { mpich = prev.mpich; };
nanos6 = callPackage ./pkgs/nanos6/default.nix { }; nanos6 = callPackage ./pkgs/nanos6/default.nix { };
nanos6Debug = final.nanos6.override { enableDebug = true; }; nanos6Debug = final.nanos6.override { enableDebug = true; };
nix = callPackage ./pkgs/nix/default.nix { nix = prev.nix; };
nixtools = callPackage ./pkgs/nixtools/default.nix { }; nixtools = callPackage ./pkgs/nixtools/default.nix { };
nixgen = callPackage ./pkgs/nixgen/default.nix { };
# Broken because of pkgsStatic.libcap # Broken because of pkgsStatic.libcap
# See: https://github.com/NixOS/nixpkgs/pull/268791 # See: https://github.com/NixOS/nixpkgs/pull/268791
#nix-wrap = callPackage ./pkgs/nix-wrap/default.nix { }; #nix-wrap = callPackage ./pkgs/nix-wrap/default.nix { };
nodes = callPackage ./pkgs/nodes/default.nix { }; nodes = callPackage ./pkgs/nodes/default.nix { };
nosv = callPackage ./pkgs/nosv/default.nix { }; nosv = callPackage ./pkgs/nosv/default.nix { };
oneMath = callPackage ./pkgs/onemath/default.nix { };
openmp = callPackage ./pkgs/llvm-ompss2/openmp.nix { monorepoSrc = final.clangOmpss2Unwrapped.src; version = final.clangOmpss2Unwrapped.version; }; openmp = callPackage ./pkgs/llvm-ompss2/openmp.nix { monorepoSrc = final.clangOmpss2Unwrapped.src; version = final.clangOmpss2Unwrapped.version; };
openmpv = final.openmp.override { enableNosv = true; enableOvni = true; }; openmpv = final.openmp.override { enableNosv = true; enableOvni = true; };
osumb = callPackage ./pkgs/osu/default.nix { }; osumb = callPackage ./pkgs/osu/default.nix { };
@@ -58,7 +53,6 @@ let
prometheus-slurm-exporter = prev.callPackage ./pkgs/slurm-exporter/default.nix { }; prometheus-slurm-exporter = prev.callPackage ./pkgs/slurm-exporter/default.nix { };
#pscom = callPackage ./pkgs/parastation/pscom.nix { }; # Unmaintaned #pscom = callPackage ./pkgs/parastation/pscom.nix { }; # Unmaintaned
#psmpi = callPackage ./pkgs/parastation/psmpi.nix { }; # Unmaintaned #psmpi = callPackage ./pkgs/parastation/psmpi.nix { }; # Unmaintaned
slurm = import ./pkgs/slurm/default.nix { slurm = prev.slurm; };
sonar = callPackage ./pkgs/sonar/default.nix { }; sonar = callPackage ./pkgs/sonar/default.nix { };
stdenvClangOmpss2 = final.stdenv.override { cc = final.clangOmpss2; allowedRequisites = null; }; stdenvClangOmpss2 = final.stdenv.override { cc = final.clangOmpss2; allowedRequisites = null; };
stdenvClangOmpss2Nanos6 = final.stdenv.override { cc = final.clangOmpss2Nanos6; allowedRequisites = null; }; stdenvClangOmpss2Nanos6 = final.stdenv.override { cc = final.clangOmpss2Nanos6; allowedRequisites = null; };
@@ -66,8 +60,6 @@ let
stdenvClangOmpss2NodesOmpv = final.stdenv.override { cc = final.clangOmpss2NodesOmpv; allowedRequisites = null; }; stdenvClangOmpss2NodesOmpv = final.stdenv.override { cc = final.clangOmpss2NodesOmpv; allowedRequisites = null; };
tagaspi = callPackage ./pkgs/tagaspi/default.nix { }; tagaspi = callPackage ./pkgs/tagaspi/default.nix { };
tampi = callPackage ./pkgs/tampi/default.nix { }; tampi = callPackage ./pkgs/tampi/default.nix { };
tasycl = callPackage ./pkgs/tasycl/default.nix { };
tasycl-acpp = callPackage ./pkgs/tasycl/default.nix { useIntel = false; };
upc-qaire-exporter = prev.callPackage ./pkgs/upc-qaire-exporter/default.nix { }; upc-qaire-exporter = prev.callPackage ./pkgs/upc-qaire-exporter/default.nix { };
wxparaver = callPackage ./pkgs/paraver/default.nix { }; wxparaver = callPackage ./pkgs/paraver/default.nix { };
}; };
@@ -77,8 +69,6 @@ let
#sigsegv = callPackage ./test/reproducers/sigsegv.nix { }; #sigsegv = callPackage ./test/reproducers/sigsegv.nix { };
hello-c = callPackage ./test/compilers/hello-c.nix { }; hello-c = callPackage ./test/compilers/hello-c.nix { };
hello-cpp = callPackage ./test/compilers/hello-cpp.nix { }; hello-cpp = callPackage ./test/compilers/hello-cpp.nix { };
hello-sycl = callPackage ./test/compilers/hello-sycl.nix { };
hello-syclompss = callPackage ./test/compilers/icpx-ompss2.nix { };
lto = callPackage ./test/compilers/lto.nix { }; lto = callPackage ./test/compilers/lto.nix { };
asan = callPackage ./test/compilers/asan.nix { }; asan = callPackage ./test/compilers/asan.nix { };
intel2023-icx-c = hello-c.override { stdenv = final.intelPackages_2023.stdenv; }; intel2023-icx-c = hello-c.override { stdenv = final.intelPackages_2023.stdenv; };
@@ -88,13 +78,6 @@ let
intel2023-ifort = callPackage ./test/compilers/hello-f.nix { intel2023-ifort = callPackage ./test/compilers/hello-f.nix {
stdenv = final.intelPackages_2023.stdenv-ifort; stdenv = final.intelPackages_2023.stdenv-ifort;
}; };
intel2024-icx-c = hello-c.override { stdenv = final.intelPackages_2024.stdenv; };
intel2025-icx-c = hello-c.override { stdenv = final.intelPackages_2025.stdenv; };
intel2024-icx-cpp = hello-cpp.override { stdenv = final.intelPackages_2024.stdenv; };
intel2025-icx-cpp = hello-cpp.override { stdenv = final.intelPackages_2025.stdenv; };
# intel2023-sycl = hello-sycl.override { intelPackages = final.intelPackages_2023; }; # broken
intel2024-sycl = hello-sycl.override { intelPackages = final.intelPackages_2024; };
intel2025-sycl = hello-sycl.override { intelPackages = final.intelPackages_2025; };
clangOmpss2-lto = lto.override { stdenv = final.stdenvClangOmpss2Nanos6; }; clangOmpss2-lto = lto.override { stdenv = final.stdenvClangOmpss2Nanos6; };
clangOmpss2-asan = asan.override { stdenv = final.stdenvClangOmpss2Nanos6; }; clangOmpss2-asan = asan.override { stdenv = final.stdenvClangOmpss2Nanos6; };
clangOmpss2-task = callPackage ./test/compilers/ompss2.nix { clangOmpss2-task = callPackage ./test/compilers/ompss2.nix {
@@ -114,20 +97,21 @@ let
}; };
}; };
# Load our custom lib functions with import, callPackage fails.
lib' = import ./pkgs/lib.nix { lib = prev.lib; };
# For now, only build toplevel packages in CI/Hydra # For now, only build toplevel packages in CI/Hydra
pkgsTopLevel = filterAttrs (_: isDerivation) bscPkgs; pkgsTopLevel = lib.filterAttrs (_: lib.isDerivation) bscPkgs;
# Native build in that platform doesn't imply cross build works # Native build in that platform doesn't imply cross build works
canCrossCompile = platform: default: pkg: canCrossCompile = platform: pkg:
(isDerivation pkg) && (lib.isDerivation pkg) &&
# If meta.cross is undefined, use default # Must be defined explicitly
(pkg.meta.cross or default) && (pkg.meta.cross or false) &&
(meta.availableOn final.pkgsCross.${platform}.stdenv.hostPlatform pkg); (lib.meta.availableOn platform pkg);
# For now only RISC-V # For now only RISC-V
crossSet = genAttrs [ "riscv64" ] (platform: crossSet = { riscv64 = final.pkgsCross.riscv64.bsc.pkgsTopLevel; };
filterAttrs (_: canCrossCompile platform true)
final.pkgsCross.${platform}.bsc.pkgsTopLevel);
buildList = name: paths: buildList = name: paths:
final.runCommandLocal name { } '' final.runCommandLocal name { } ''
@@ -142,21 +126,17 @@ let
''; '';
pkgsList = buildList "ci-pkgs" (builtins.attrValues pkgsTopLevel); pkgsList = buildList "ci-pkgs" (builtins.attrValues pkgsTopLevel);
testsList = buildList "ci-tests" (collect isDerivation tests); testsList = buildList "ci-tests" (lib.collect lib.isDerivation tests);
allList = buildList' "ci-all" [ pkgsList testsList ]; allList = buildList' "ci-all" [ pkgsList testsList ];
# For now only RISC-V # For now only RISC-V
crossList = buildList "ci-cross" crossList = buildList "ci-cross"
(filter (lib.filter
(canCrossCompile "riscv64" false) # opt-in (pkgs with: meta.cross = true) (canCrossCompile final.pkgsCross.riscv64.stdenv.hostPlatform)
(builtins.attrValues crossSet.riscv64)); (builtins.attrValues crossSet.riscv64));
in bscPkgs // { in bscPkgs // {
lib = prev.lib // { lib = lib';
maintainers = prev.lib.maintainers // {
bsc = import ./pkgs/maintainers.nix;
};
};
# Prevent accidental usage of bsc-ci attribute # Prevent accidental usage of bsc-ci attribute
bsc-ci = throw "the bsc-ci attribute is deprecated, use bsc.ci"; bsc-ci = throw "the bsc-ci attribute is deprecated, use bsc.ci";

View File

@@ -90,7 +90,7 @@ in
meta = { meta = {
description = "Performance analysis tool-suite for x86 based applications"; description = "Performance analysis tool-suite for x86 based applications";
homepage = "https://www.amd.com/es/developer/uprof.html"; homepage = "https://www.amd.com/es/developer/uprof.html";
platforms = [ "x86_64-linux" ]; platforms = lib.platforms.linux;
license = lib.licenses.unfree; license = lib.licenses.unfree;
maintainers = with lib.maintainers.bsc; [ rarias varcila ]; maintainers = with lib.maintainers.bsc; [ rarias varcila ];
}; };

View File

@@ -19,7 +19,7 @@ in stdenv.mkDerivation {
''; '';
hardeningDisable = [ "pic" "format" ]; hardeningDisable = [ "pic" "format" ];
nativeBuildInputs = kernel.moduleBuildDependencies; nativeBuildInputs = kernel.moduleBuildDependencies;
patches = [ ./makefile.patch ./hrtimer.patch ./remove-wr-rdmsrq.patch ]; patches = [ ./makefile.patch ./hrtimer.patch ];
makeFlags = [ makeFlags = [
"KERNEL_VERSION=${kernel.modDirVersion}" "KERNEL_VERSION=${kernel.modDirVersion}"
"KERNEL_DIR=${kernel.dev}/lib/modules/${kernel.modDirVersion}/build" "KERNEL_DIR=${kernel.dev}/lib/modules/${kernel.modDirVersion}/build"

View File

@@ -1,20 +0,0 @@
diff --git a/inc/PwrProfAsm.h b/inc/PwrProfAsm.h
index d77770a..c93a0e9 100644
--- a/inc/PwrProfAsm.h
+++ b/inc/PwrProfAsm.h
@@ -347,6 +347,7 @@
#endif
+/*
#define rdmsrq(msr,val1,val2,val3,val4) ({ \
__asm__ __volatile__( \
"rdmsr\n" \
@@ -362,6 +363,7 @@
:"c"(msr), "a"(val1), "d"(val2), "S"(val3), "D"(val4) \
); \
})
+*/
#define rdmsrpw(msr,val1,val2,val3,val4) ({ \
__asm__ __volatile__( \

View File

@@ -14,16 +14,19 @@
, openblas , openblas
, ovni , ovni
, gitBranch ? "master" , gitBranch ? "master"
, gitURL ? "ssh://git@bscpm04.bsc.es/rarias/bench6.git"
, gitCommit ? "bf29a53113737c3aa74d2fe3d55f59868faea7b4" , gitCommit ? "bf29a53113737c3aa74d2fe3d55f59868faea7b4"
, gitUrls ? [
"ssh://git@bscpm04.bsc.es/rarias/bench6.git"
"https://github.com/rodarima/bench6.git"
]
}: }:
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "bench6"; pname = "bench6";
version = "${src.shortRev}"; version = "${src.shortRev}";
src = builtins.fetchGit { src = lib.fetchGitMirror {
url = gitURL; urls = gitUrls;
ref = gitBranch; ref = gitBranch;
rev = gitCommit; rev = gitCommit;
}; };

View File

@@ -1,6 +1,5 @@
{ {
stdenv stdenv
, lib
, cudatoolkit , cudatoolkit
, cudaPackages , cudaPackages
, autoAddDriverRunpath , autoAddDriverRunpath
@@ -12,7 +11,7 @@ stdenv.mkDerivation (finalAttrs: {
src = ./.; src = ./.;
buildInputs = [ buildInputs = [
cudatoolkit # Required for nvcc cudatoolkit # Required for nvcc
(lib.getOutput "static" cudaPackages.cuda_cudart) # Required for -lcudart_static cudaPackages.cuda_cudart.static # Required for -lcudart_static
autoAddDriverRunpath autoAddDriverRunpath
]; ];
installPhase = '' installPhase = ''
@@ -41,9 +40,4 @@ stdenv.mkDerivation (finalAttrs: {
''; '';
installPhase = "touch $out"; installPhase = "touch $out";
}; };
meta = {
platforms = [ "x86_64-linux" ];
maintainers = with lib.maintainers.bsc; [ rarias ];
};
}) })

View File

@@ -9,6 +9,7 @@
, automake , automake
, libtool , libtool
, mpi , mpi
, rsync
, gfortran , gfortran
}: }:
@@ -43,24 +44,13 @@ stdenv.mkDerivation rec {
configureFlags = [ configureFlags = [
"--with-infiniband=${rdma-core-all}" "--with-infiniband=${rdma-core-all}"
"--with-mpi=yes" # fixes mpi detection when cross-compiling "--with-mpi=${mpiAll}"
"--with-slurm" "--with-slurm"
"CFLAGS=-fPIC" "CFLAGS=-fPIC"
"CXXFLAGS=-fPIC" "CXXFLAGS=-fPIC"
]; ];
nativeBuildInputs = [ buildInputs = [ slurm mpiAll rdma-core-all autoconf automake libtool rsync gfortran ];
autoconf
automake
gfortran
libtool
];
buildInputs = [
slurm
mpiAll
rdma-core-all
];
hardeningDisable = [ "all" ]; hardeningDisable = [ "all" ];
@@ -70,6 +60,5 @@ stdenv.mkDerivation rec {
maintainers = with lib.maintainers.bsc; [ rarias ]; maintainers = with lib.maintainers.bsc; [ rarias ];
platforms = lib.platforms.linux; platforms = lib.platforms.linux;
license = lib.licenses.gpl3Plus; license = lib.licenses.gpl3Plus;
cross = false; # infiniband detection does not work
}; };
} }

View File

@@ -10,7 +10,7 @@
, zlib , zlib
, autoPatchelfHook , autoPatchelfHook
, libfabric , libfabric
, gcc , gcc13
, wrapCCWith , wrapCCWith
}: }:
@@ -33,6 +33,8 @@ let
maintainers = with lib.maintainers.bsc; [ abonerib ]; maintainers = with lib.maintainers.bsc; [ abonerib ];
}; };
gcc = gcc13;
v = { v = {
hpckit = "2023.1.0"; hpckit = "2023.1.0";
compiler = "2023.1.0"; compiler = "2023.1.0";
@@ -40,19 +42,45 @@ let
mpi = "2021.9.0"; mpi = "2021.9.0";
}; };
findMatch = name: aptPackageIndex = stdenv.mkDerivation {
let name = "intel-oneapi-packages";
aptPackages = builtins.fromJSON (builtins.readFile ./packages.json); srcs = [
matches = lib.filter (x: name == x.pname) aptPackages; # Run update.sh to update the package lists
n = lib.length matches; ./amd64-packages ./all-packages
match = builtins.traceVerbose (name + " -- ${builtins.toString n}") (builtins.head matches); ];
phases = [ "installPhase" ];
installPhase = ''
awk -F': ' '\
BEGIN { print "[ {" } \
NR>1 && /^Package: / { print "} {"; } \
/: / { printf "%s = \"%s\";\n", $1, $2 } \
END { print "} ]" }' $srcs > $out
'';
};
apthost = "https://apt.repos.intel.com/oneapi/"; aptPackages = import aptPackageIndex;
apthost = "https://apt.repos.intel.com/oneapi/";
getSum = pkgList: name:
let
matches = lib.filter (x: name == x.Package) pkgList;
#n = lib.length matches;
#match = builtins.trace (name + " -- ${builtins.toString n}") (lib.elemAt matches 0);
match = lib.elemAt matches 0;
in in
{ match.SHA256;
url = apthost + match.filename;
sha256 = match.sha256; getUrl = pkgList: name:
}; let
matches = lib.filter (x: name == x.Package) pkgList;
#match = assert lib.length matches == 1; lib.elemAt matches 0;
n = lib.length matches;
match =
#builtins.trace (name + " -- n=${builtins.toString n}")
(lib.elemAt matches 0);
in
apthost + match.Filename;
uncompressDebs = debs: name: stdenv.mkDerivation { uncompressDebs = debs: name: stdenv.mkDerivation {
name = name; name = name;
@@ -72,7 +100,10 @@ let
joinDebs = name: names: joinDebs = name: names:
let let
debs = builtins.map (x: builtins.fetchurl (findMatch x)) names; urls = builtins.map (x: getUrl aptPackages x) names;
sums = builtins.map (x: getSum aptPackages x) names;
getsrc = url: sha256: builtins.fetchurl { inherit url sha256; };
debs = lib.zipListsWith getsrc urls sums;
in in
uncompressDebs debs "${name}-source"; uncompressDebs debs "${name}-source";
@@ -441,7 +472,7 @@ let
''; '';
}; };
ifort-wrapper = wrapIntel { ifort-wrapper = wrapIntel rec {
cc = intel-compiler-fortran; cc = intel-compiler-fortran;
mygcc = gcc; mygcc = gcc;
extraBuild = '' extraBuild = ''

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@@ -1,297 +0,0 @@
{
lib,
stdenv,
callPackage,
dpkg,
fetchurl,
sqlite,
elfutils,
}:
let
inherit (builtins)
attrNames
attrValues
concatMap
elem
filter
fromJSON
getAttr
groupBy
head
isNull
listToAttrs
map
mapAttrs
readFile
replaceStrings
splitVersion
;
inherit (lib)
converge
findFirst
groupBy'
hasPrefix
optional
pipe
take
toInt
toList
versionAtLeast
versionOlder
;
aptData = fromJSON (readFile ./packages.json);
# Compare versions in debian control file syntax
# See: https://www.debian.org/doc/debian-policy/ch-relationships.html#syntax-of-relationship-fields
#
# NOTE: this is not a proper version comparison
#
# A proper version solver, should aggregate dependencies with the same name
# and compute the constraint (e.g. a (>= 2) a (<< 5) -> 2 <= a << 5)
#
# But in the intel repo, there are no such "duplicated" dependencies to specify
# upper limits, which leads to issues when intel-hpckit-2021 depends on things
# like intel-basekit >= 2021.1.0-2403 and we end up installing the newest
# basekit instead of the one from 2021.
#
# To mitigate this, >= is set to take the latest version with matching major
# and minor (only revision and patch are allowed to change)
compareVersions =
got: kind: want:
let
g0 = take 2 (splitVersion got);
w0 = take 2 (splitVersion want);
in
if isNull want then
true
else if kind == "=" then
got == want
else if kind == "<<" then
versionOlder got want
else if kind == "<=" then
versionAtLeast want got
else if kind == ">>" then
versionOlder want got
else if kind == ">=" then
(g0 == w0) && versionAtLeast got want # always match major version
else
throw "unknown operation: ${kind}";
findMatching =
{
pname,
kind,
version,
}:
findFirst (x: pname == x.pname && compareVersions x.version kind version) null aptData;
isIntel = pkg: (hasPrefix "intel-" pkg.pname);
expandDeps =
pkg: (map findMatching (filter isIntel pkg.dependencies)) ++ (optional (pkg.size != 0) pkg);
# get the oldest by major version. If they have the same major version, take
# the newest. This prevents most issues with resolutions
# versionOlder b a -> true if b is older than a (b `older` a)
getNewerInMajor =
a: b:
let
va = a.version;
vb = b.version;
va0 = head (splitVersion va);
vb0 = head (splitVersion vb);
in
if isNull a then
b
else if va0 != vb0 then
if va0 > vb0 then b else a
else if versionOlder vb va then
a
else
b;
removeDups = l: attrValues (groupBy' getNewerInMajor null (getAttr "provides") l);
_resolveDeps = converge (l: removeDups (concatMap expandDeps l));
resolveDeps =
pkg:
let
deps = _resolveDeps (toList pkg);
namedDeps = (map (x: "${x.pname}-${x.version}") deps);
in
builtins.traceVerbose (builtins.deepSeq namedDeps namedDeps) deps;
blacklist = [
"intel-basekit-env"
"intel-basekit-getting-started"
"intel-hpckit-env"
"intel-hpckit-getting-started"
"intel-oneapi-advisor"
"intel-oneapi-common-licensing"
"intel-oneapi-common-oneapi-vars"
"intel-oneapi-common-vars"
"intel-oneapi-compiler-cpp-eclipse-cfg"
"intel-oneapi-compiler-dpcpp-eclipse-cfg"
"intel-oneapi-condaindex"
"intel-oneapi-dev-utilities-eclipse-cfg"
"intel-oneapi-dpcpp-ct-eclipse-cfg"
"intel-oneapi-eclipse-ide"
"intel-oneapi-hpc-toolkit-getting-started"
"intel-oneapi-icc-eclipse-plugin-cpp"
"intel-oneapi-vtune"
"intel-oneapi-vtune-eclipse-plugin-vtune"
];
isInBlacklist = pkg: elem pkg.provides blacklist;
removeBlacklist = filter (e: !(isInBlacklist e));
dpkgExtractAll =
pname: version:
{ srcs, deps }:
stdenv.mkDerivation {
inherit pname version srcs;
nativeBuildInputs = [ dpkg ];
phases = [ "installPhase" ];
passthru = { inherit deps; };
installPhase = ''
mkdir -p $out
for src in $srcs; do
echo "Unpacking $src"
dpkg -x $src $out
done
'';
};
apthost = "https://apt.repos.intel.com/oneapi/";
fetchDeb =
p:
fetchurl {
url = apthost + p.filename;
inherit (p) sha256;
};
buildIntel =
pkg:
pipe pkg [
resolveDeps
removeBlacklist
(l: {
srcs = map fetchDeb l;
deps = l;
})
(dpkgExtractAll "${pkg.provides}-extracted" pkg.version)
];
findHpcKit =
year:
findMatching {
pname = "intel-hpckit";
kind = "<<";
version = toString (year + 1);
};
years = map toInt (attrNames components);
patchIntel = callPackage ./patch_intel.nix { };
# Version information for each hpckit. This is used to normalize the paths
# so that files are in $out/{bin,lib,include...} instead of all over the place
# in $out/opt/intel/oneapi/*/*/{...}.
#
# The most important is the compiler component, which is used to build the
# stdenv for the hpckit.
#
# NOTE: this have to be manually specified, so we can avoid IFD. To add a
# new version, add a new field with an empty attrset, (e.g. "2026" = {}; ),
# build hpckit_2026.unpatched and use the values from
# result/opt/intel/oneapi/* to populate the attrset.
#
# WARN: if there are more than one version in the folders of the unpatched
# components, our dependency resolution hacks have probably failed and the
# package set may be broken.
components = {
"2025" = {
ishmem = "1.4";
pti = "0.13";
tcm = "1.4";
umf = "0.11";
ccl = "2021.16";
compiler = "2025.2";
dal = "2025.8";
debugger = "2025.2";
dev-utilities = "2025.2";
dnnl = "2025.2";
dpcpp-ct = "2025.2";
dpl = "2022.9";
ipp = "2022.2";
ippcp = "2025.2";
mkl = "2025.2";
mpi = "2021.16";
tbb = "2022.2";
};
"2024" = {
tcm = "1.1";
ccl = "2021.13";
compiler = "2024.2";
dal = "2024.6";
debugger = "2024.2";
dev-utilities = "2024.2";
diagnostics = "2024.2";
dnnl = "2024.2";
dpcpp-ct = "2024.2";
dpl = "2022.6";
ipp = "2021.12";
ippcp = "2021.12";
mkl = "2024.2";
mpi = "2021.13";
tbb = "2021.13";
extraPackages = [
sqlite
elfutils
];
};
};
replaceDots = replaceStrings [ "." ] [ "_" ];
in
lib.recurseIntoAttrs (
listToAttrs (
map (
year:
let
year_str = toString year;
in
{
name = "hpckit_${year_str}";
value = patchIntel {
unpatched = buildIntel (findHpcKit year);
components = components.${year_str};
};
}
) years
)
)
// {
apt = pipe aptData [
(groupBy (p: replaceDots p.provides))
(mapAttrs (
_: l:
listToAttrs (
map (pkg: {
name = replaceDots ("v" + pkg.version);
value = pkg;
}) l
)
))
];
inherit resolveDeps patchIntel buildIntel;
}

View File

@@ -1,189 +0,0 @@
{
stdenv,
stdenvNoCC,
lib,
symlinkJoin,
autoPatchelfHook,
wrapCCWith,
overrideCC,
gcc,
hwloc,
libelf,
libffi_3_3,
libpsm2,
libuuid,
libxml2,
numactl,
ocl-icd,
openssl,
python3,
rdma-core,
ucx,
zlib,
}:
lib.makeOverridable (
{
unpatched,
components ? { },
extraPackages ? components.extraPackages or [ ],
}:
let
inherit (builtins)
attrValues
filter
mapAttrs
removeAttrs
;
__components = removeAttrs components [ "extraPackages" ];
_components = __components;
# _components = lib.traceSeqN 2 {
# inherit unpatched __components;
# deps = builtins.map (x: "${x.pname}-${x.version}") unpatched.deps;
# } __components;
wrapIntel =
cc:
let
targetConfig = stdenv.targetPlatform.config;
in
(wrapCCWith {
inherit cc;
nixSupport = {
cc-ldflags = [
"-L${gcc.cc}/lib/gcc/${targetConfig}/${gcc.version}"
"-L${gcc.cc.lib}/lib"
"-L${cc}/lib"
];
cc-cflags = [
"--gcc-toolchain=${gcc.cc}"
"-isystem \"${cc.original}/lib/clang/*/include\""
"-isystem ${cc}/include"
"-isystem ${cc}/include/intel64"
"-isystem ${gcc.cc}/lib/gcc/${targetConfig}/${gcc.version}/include"
];
libcxx-cxxflags = [
# "--gcc-toolchain=${gcc.cc}"
"-isystem ${gcc.cc}/include/c++/${gcc.version}"
"-isystem ${gcc.cc}/include/c++/${gcc.version}/${targetConfig}"
];
};
extraBuildCommands = ''
# FIXME: We should find a better way to modify the PATH instead of using
# this ugly hack. See https://jungle.bsc.es/git/rarias/bscpkgs/issues/9
echo 'path_backup="${gcc.cc}/bin:$path_backup"' >>$out/nix-support/cc-wrapper-hook
# Disable hardening by default
echo "" > $out/nix-support/add-hardening.sh
wrap icx $wrapper $ccPath/icx
wrap icpx $wrapper $ccPath/icpx
wrap ifx $wrapper $ccPath/ifx
ln -s $out/bin/icpx $out/bin/c++
ln -s $out/bin/icx $out/bin/cc
sed -i 's/.*isCxx=0/isCxx=1/' $out/bin/icpx
# Use this to detect when a compiler subprocess is called
# from icpx (--fsycl-host-compiler)
echo 'export NIX_CC_WRAPPER_INTEL=1' >>$out/nix-support/cc-wrapper-hook
# oneMath looks for sycl libraries in bin/../lib
ln -s ${cc}/lib $out/lib
ln -s ${cc}/include $out/include
'';
}).overrideAttrs
(old: {
installPhase = old.installPhase + ''
export named_cc="icx"
export named_cxx="icpx"
export named_fc="ifx"
'';
});
in
stdenvNoCC.mkDerivation (finalAttrs: {
pname = lib.removeSuffix "-extracted" unpatched.pname;
inherit (unpatched) version;
src = unpatched;
phases = [
"installPhase"
"fixupPhase"
];
buildInputs = [
libffi_3_3
libelf
libxml2
hwloc
numactl
libuuid
libpsm2
zlib
ocl-icd
rdma-core
ucx
openssl
python3
stdenv.cc.cc.lib
]
++ extraPackages;
autoPatchelfIgnoreMissingDeps = [
"libhwloc.so.5"
"libcuda.so.1"
"libze_loader.so.1"
];
# There are broken symlinks that go outside packages, ignore them
dontCheckForBrokenSymlinks = true;
nativeBuildInputs = [ autoPatchelfHook ];
installPhase = ''
cp -r $src/opt/intel/oneapi/ $out
'';
passthru =
let
pkgs = mapAttrs (
folder: version:
let
original = "${finalAttrs.finalPackage}/${folder}/${version}";
in
symlinkJoin {
pname = "intel-${folder}";
inherit version;
paths = [ original ];
passthru = { inherit original; };
}
) _components;
in
pkgs
// {
inherit unpatched;
pkgs = lib.recurseIntoAttrs pkgs;
components = _components;
# This contains all packages properly symlinked into toplevel directories
# in $out.
#
# NOTE: there are clashes with packages that have symlinks outside their
# scope (libtcm and env/vars.sh)
all = symlinkJoin {
pname = finalAttrs.finalPackage.pname + "-symlinked";
inherit (finalAttrs.finalPackage) version;
paths = filter lib.isDerivation (attrValues finalAttrs.finalPackage.pkgs);
};
stdenv = overrideCC stdenv finalAttrs.finalPackage.cc;
cc = wrapIntel finalAttrs.finalPackage.pkgs.compiler;
};
})
)

View File

@@ -1,29 +0,0 @@
#!/usr/bin/env -S jq -f
def extract_fields: {
pname : .Package,
version : .Version,
provides : .Package | sub("[0-9.-]*$"; ""),
filename : .Filename,
size : ."Installed-Size" | tonumber,
sha256 : .SHA256,
dependencies : .Depends,
} ;
# parses dependencies into a list of [{.pname, .kind, .version}]
# some dependencies do not have a version specified, in which case, kind = version = null
#
# example dependencies:
# intel-oneapi-common-vars (>= 2023.0.0-25325), intel-oneapi-common-licensing-2023.0.0
def split_dependencies : map(try(.dependencies |= split(",\\s?"; "")) // .dependencies |= []) ;
def match_version : capture("(?<pname>[a-zA-Z0-9_\\-.]*) *(\\((?<kind>[<>=]*) *(?<version>.*)\\))?"; "") ;
def parse_dependencies : map_values(.dependencies.[] |= match_version) ;
def sort_version_decreasing : sort_by(.version | split("[-.]"; "") | map(tonumber)) | reverse ;
map(extract_fields) | split_dependencies | parse_dependencies | sort_version_decreasing
# [.[] | select(.pname == "intel-hpckit") | .version]

View File

@@ -1,29 +0,0 @@
#!/usr/bin/env -S awk -f
BEGIN {
FS=": "
prev_empty=1
t=" "
print "[ {"
}
!NF { # empty line, update separator so next non empty line closes the dict
prev_empty=1
t="},\n{ "
next # skip line (we won't match anything else)
}
{
printf t "\"%s\" : \"%s\"\n", $1, $2
if (prev_empty) {
# we were the first after a group of empty lines, following ones have to
# have a comma
prev_empty=0
t=", "
}
}
END { print "} ]" }

View File

@@ -1,11 +1,4 @@
#!/bin/sh #!/bin/sh
out_64=$(mktemp intel-api.64.XXXXXX) curl https://apt.repos.intel.com/oneapi/dists/all/main/binary-amd64/Packages -o amd64-packages
out_all=$(mktemp intel-api.all.XXXXXX) curl https://apt.repos.intel.com/oneapi/dists/all/main/binary-all/Packages -o all-packages
trap 'rm -f "$out_64" "$out_all"' EXIT INT HUP
curl https://apt.repos.intel.com/oneapi/dists/all/main/binary-amd64/Packages -o "$out_64"
curl https://apt.repos.intel.com/oneapi/dists/all/main/binary-all/Packages -o "$out_all"
# NOTE: we use `jq -r tostring` to minify the json (3.2Mb -> 2.3Mb)
cat "$out_64" "$out_all" | ./toJson.awk | ./process.jq | jq -r tostring >packages.json

30
pkgs/lib.nix Normal file
View File

@@ -0,0 +1,30 @@
{ lib }:
let
# If not supported, fall back to tryEval, which will fail in the first case.
safeCatchAll = if (builtins ? catchAll)
then builtins.catchAll
else e: (builtins.tryEval e) // { msg = ""; };
in lib.extend (_: lib: {
# Same as fetchGit but accepts a list of mirror urls
fetchGitMirror = { urls, ... } @ args:
let
cleanArgs = lib.removeAttrs args [ "urls" ];
fetchUrl = url: builtins.fetchGit (cleanArgs // { inherit url; });
safeFetch = url: safeCatchAll (fetchUrl url);
complain = url:
let
r = safeFetch url;
in
if (r.success) then r
else lib.warn "cannot fetch ${url}, trying next
mirror:${builtins.replaceStrings ["\n" ] ["\n> "] ("\n"+r.msg)}" r;
fetchList = lib.map (url: complain url) urls;
bad = throw "cannot fetch from any mirror";
good = lib.findFirst (e: e.success) bad fetchList;
in good.value;
maintainers = lib.maintainers // {
bsc = import ./maintainers.nix;
};
})

View File

@@ -16,19 +16,19 @@
, useGit ? false , useGit ? false
, gitUrl ? "ssh://git@bscpm04.bsc.es/llvm-ompss/llvm-mono.git" , gitUrl ? "ssh://git@bscpm04.bsc.es/llvm-ompss/llvm-mono.git"
, gitBranch ? "master" , gitBranch ? "master"
, gitCommit ? "872ba63f86edaefc9787984ef3fae9f2f94e0124" # github-release-2025.11 , gitCommit ? "880e2341c56bad1dc14e8c369fb3356bec19018e"
}: }:
let let
stdenv = llvmPackages_latest.stdenv; stdenv = llvmPackages_latest.stdenv;
release = rec { release = rec {
version = "2025.11"; version = "2025.06";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "bsc-pm"; owner = "bsc-pm";
repo = "llvm"; repo = "llvm";
rev = "refs/tags/github-release-${version}"; rev = "refs/tags/github-release-${version}";
hash = "sha256-UgwMTUkM9Z87dDH205swZFBeFhrcbLAxginViG40pBM="; hash = "sha256-ww9PpRmtz/M9IyLiZ8rAehx2UW4VpQt+svf4XfKBzKo=";
}; };
}; };

View File

@@ -3,7 +3,6 @@
, lib , lib
, gcc , gcc
, clangOmpss2Unwrapped , clangOmpss2Unwrapped
, writeShellScript
, openmp ? null , openmp ? null
, wrapCCWith , wrapCCWith
, llvmPackages_latest , llvmPackages_latest
@@ -28,17 +27,20 @@ let
# We need to replace the lld linker from bintools with our linker just built, # We need to replace the lld linker from bintools with our linker just built,
# otherwise we run into incompatibility issues when mixing compiler and linker # otherwise we run into incompatibility issues when mixing compiler and linker
# versions. # versions.
bintools-unwrapped = llvmPackages_latest.bintools-unwrapped.override { bintools-unwrapped = llvmPackages_latest.tools.bintools-unwrapped.override {
lld = clangOmpss2Unwrapped; lld = clangOmpss2Unwrapped;
}; };
bintools = llvmPackages_latest.bintools.override { bintools = llvmPackages_latest.tools.bintools.override {
bintools = bintools-unwrapped; bintools = bintools-unwrapped;
}; };
targetConfig = stdenv.targetPlatform.config; targetConfig = stdenv.targetPlatform.config;
inherit gcc; inherit gcc;
cc = clangOmpss2Unwrapped; cc = clangOmpss2Unwrapped;
gccVersion = with versions; let v = gcc.version; in concatStringsSep "." [(major v) (minor v) (patch v)]; gccVersion = with versions; let v = gcc.version; in concatStringsSep "." [(major v) (minor v) (patch v)];
in wrapCCWith {
inherit cc bintools;
# extraPackages adds packages to depsTargetTargetPropagated
extraPackages = optional (openmp != null) openmp;
extraBuildCommands = '' extraBuildCommands = ''
echo "-target ${targetConfig}" >> $out/nix-support/cc-cflags echo "-target ${targetConfig}" >> $out/nix-support/cc-cflags
echo "-B${gcc.cc}/lib/gcc/${targetConfig}/${gccVersion}" >> $out/nix-support/cc-cflags echo "-B${gcc.cc}/lib/gcc/${targetConfig}/${gccVersion}" >> $out/nix-support/cc-cflags
@@ -55,50 +57,14 @@ let
echo "--gcc-toolchain=${gcc}" >> $out/nix-support/cc-cflags echo "--gcc-toolchain=${gcc}" >> $out/nix-support/cc-cflags
wrap clang++ $wrapper $ccPath/clang++ wrap clang++ $wrapper $ccPath/clang++
'' + optionalString (openmp != null) ''
echo "export OPENMP_RUNTIME=${ompname}" >> $out/nix-support/cc-wrapper-hook
'' + optionalString (ompss2rt != null) ''
echo "export OMPSS2_RUNTIME=${rtname}" >> $out/nix-support/cc-wrapper-hook
echo "export ${homevar}=${ompss2rt}" >> $out/nix-support/cc-wrapper-hook
'' + optionalString (ompss2rt != null && ompss2rt.pname == "nodes") ''
echo "export NOSV_HOME=${ompss2rt.nosv}" >> $out/nix-support/cc-wrapper-hook
''; '';
}
envExports = lib.optionalString (openmp != null) ''
echo "export OPENMP_RUNTIME=${ompname}" >> $out/nix-support/cc-wrapper-hook
'' + optionalString (ompss2rt != null) ''
echo "export OMPSS2_RUNTIME=${rtname}" >> $out/nix-support/cc-wrapper-hook
echo "export ${homevar}=${ompss2rt}" >> $out/nix-support/cc-wrapper-hook
'' + optionalString (ompss2rt != null && ompss2rt.pname == "nodes") ''
echo "export NOSV_HOME=${ompss2rt.nosv}" >> $out/nix-support/cc-wrapper-hook
'';
extraPackages = optional (openmp != null) openmp;
wrappedCC = wrapCCWith {
# extraPackages adds packages to depsTargetTargetPropagated
inherit cc bintools extraPackages;
extraBuildCommands = extraBuildCommands + envExports;
};
resetIntelCCFlags = let tconf = builtins.replaceStrings ["-"] ["_"] targetConfig;
in writeShellScript "remove-intel.sh" ''
if [ "''${NIX_CC_WRAPPER_INTEL:-0}" = 1 ]; then
unset NIX_CFLAGS_COMPILE_${tconf}
unset NIX_CC_WRAPPER_FLAGS_SET_${tconf}
if (( "''${NIX_DEBUG:-0}" >= 1 )); then
echo "ompss2: cleaned NIX_CFLAGS_COMPILE_${tconf} (invokation from intel compiler detected)"
fi
fi
'';
intelExtraBuildCommands = ''
sed -i 's|# Flirting.*|source ${resetIntelCCFlags}\n\n&|' $out/bin/clang
sed -i 's|# Flirting.*|source ${resetIntelCCFlags}\n\n&|' $out/bin/clang++
'';
wrappedCCIntel = wrapCCWith {
inherit cc bintools extraPackages;
# extraPackages adds packages to depsTargetTargetPropagated
extraBuildCommands = intelExtraBuildCommands + envExports;
};
in wrappedCC.overrideAttrs (oldAttrs: {
passthru = oldAttrs.passthru // {
forIcpx = wrappedCCIntel;
};
})

View File

@@ -65,7 +65,6 @@ stdenv.mkDerivation rec {
]; ];
meta = { meta = {
broken = true;
homepage = "https://github.com/bsc-pm/mcxx"; homepage = "https://github.com/bsc-pm/mcxx";
description = "C/C++/Fortran source-to-source compilation infrastructure aimed at fast prototyping"; description = "C/C++/Fortran source-to-source compilation infrastructure aimed at fast prototyping";
maintainers = with lib.maintainers.bsc; [ rpenacob ]; maintainers = with lib.maintainers.bsc; [ rpenacob ];

View File

@@ -1,11 +1,9 @@
{ python3Packages, lib }: { python3Packages, lib }:
python3Packages.buildPythonApplication { python3Packages.buildPythonApplication rec {
pname = "meteocat-exporter"; pname = "meteocat-exporter";
version = "1.0"; version = "1.0";
pyproject = true;
src = ./.; src = ./.;
doCheck = false; doCheck = false;

View File

@@ -6,13 +6,6 @@
, pmix , pmix
, gfortran , gfortran
, symlinkJoin , symlinkJoin
# Disabled when cross-compiling
# To fix cross compilation, we should fill the values in:
# https://github.com/pmodels/mpich/blob/main/maint/fcrosscompile/cross_values.txt.in
# For each arch
, enableFortran ? stdenv.hostPlatform == stdenv.buildPlatform
, perl
, targetPackages
}: }:
let let
@@ -22,13 +15,10 @@ let
paths = [ pmix.dev pmix.out ]; paths = [ pmix.dev pmix.out ];
}; };
in mpich.overrideAttrs (old: { in mpich.overrideAttrs (old: {
buildInputs = old.buildInputs ++ [ buildInput = old.buildInputs ++ [
libfabric libfabric
pmixAll pmixAll
]; ];
nativeBuildInputs = old.nativeBuildInputs ++ [
perl
];
configureFlags = [ configureFlags = [
"--enable-shared" "--enable-shared"
"--enable-sharedlib" "--enable-sharedlib"
@@ -41,21 +31,10 @@ in mpich.overrideAttrs (old: {
] ++ lib.optionals (lib.versionAtLeast gfortran.version "10") [ ] ++ lib.optionals (lib.versionAtLeast gfortran.version "10") [
"FFLAGS=-fallow-argument-mismatch" # https://github.com/pmodels/mpich/issues/4300 "FFLAGS=-fallow-argument-mismatch" # https://github.com/pmodels/mpich/issues/4300
"FCFLAGS=-fallow-argument-mismatch" "FCFLAGS=-fallow-argument-mismatch"
] ++ lib.optionals (!enableFortran) [
"--disable-fortran"
]; ];
preFixup = ''
sed -i 's:^CC=.*:CC=${targetPackages.stdenv.cc}/bin/${targetPackages.stdenv.cc.targetPrefix}cc:' $out/bin/mpicc
sed -i 's:^CXX=.*:CXX=${targetPackages.stdenv.cc}/bin/${targetPackages.stdenv.cc.targetPrefix}c++:' $out/bin/mpicxx
'' + lib.optionalString enableFortran ''
sed -i 's:^FC=.*:FC=${targetPackages.gfortran or gfortran}/bin/${targetPackages.gfortran.targetPrefix or gfortran.targetPrefix}gfortran:' $out/bin/mpifort
'';
hardeningDisable = [ "all" ]; hardeningDisable = [ "all" ];
meta = old.meta // { meta = old.meta // {
maintainers = old.meta.maintainers ++ (with lib.maintainers.bsc; [ rarias ]); maintainers = old.meta.maintainers ++ (with lib.maintainers.bsc; [ rarias ]);
cross = true;
}; };
}) })

View File

@@ -0,0 +1,64 @@
From 3aa73c21e3afc91522a6121b0d591af6925b4ba6 Mon Sep 17 00:00:00 2001
From: Rodrigo Arias Mallo <rodarima@gmail.com>
Date: Mon, 13 Oct 2025 16:05:30 +0200
Subject: [PATCH] Add builtins.catchAll to catch all types of errors
Allows fetching multiple Git repositories with builtin.fetchGit and
catching any errors thrown by the builtin, in opposition to the builtin
tryEval.
---
src/libexpr/primops.cc | 38 ++++++++++++++++++++++++++++++++++++++
1 file changed, 38 insertions(+)
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 36a67a39d..3b26f9f43 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -849,6 +849,44 @@ static RegisterPrimOp primop_tryEval({
.fun = prim_tryEval,
});
+/* Like tryEval but catch all errors. Success => {success=true; value=something;},
+ * else => {success=false; value=false;} */
+static void prim_catchAll(EvalState & state, const PosIdx pos, Value * * args, Value & v)
+{
+ auto attrs = state.buildBindings(3);
+ try {
+ state.forceValue(*args[0], pos);
+ attrs.insert(state.sValue, args[0]);
+ attrs.alloc("success").mkBool(true);
+ attrs.alloc("msg").mkNull();
+ } catch (Error & e) {
+ attrs.alloc(state.sValue).mkBool(false);
+ attrs.alloc("success").mkBool(false);
+ attrs.alloc("msg").mkString(e.msg());
+ }
+ v.mkAttrs(attrs);
+}
+
+static RegisterPrimOp primop_catchAll({
+ .name = "__catchAll",
+ .args = {"e"},
+ .doc = R"(
+ Try to shallowly evaluate *e*. Return a set containing the
+ attributes `success` (`true` if *e* evaluated successfully,
+ `false` if an error was thrown) and `value`, equalling *e* if
+ successful and `false` otherwise. In contrast with `tryEval`,
+ `catchAll` will prevent all errors from being thrown, including
+ for those created by `abort` and type errors generated by
+ builtins. Also note that this doesn't evaluate *e* deeply, so
+ `let e = { x = throw ""; }; in (builtins.catchAll e).success`
+ will be `true`. Using `builtins.deepSeq` one can get the expected
+ result: `let e = { x = throw ""; }; in
+ (builtins.catchAll (builtins.deepSeq e e)).success` will be
+ `false`.
+ )",
+ .fun = prim_catchAll,
+});
+
/* Return an environment variable. Use with care. */
static void prim_getEnv(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
--
2.51.0

View File

@@ -1,219 +1,7 @@
{ lib, fetchurl, fetchFromGitHub, callPackage { nix }:
, storeDir ? "/nix/store"
, stateDir ? "/nix/var"
, confDir ? "/etc"
, boehmgc
, stdenv, llvmPackages_6
}:
let nix.overrideAttrs (old: {
patches = (old.patches or []) ++ [
common = ./add-catchAll.patch
{ lib, stdenv, fetchpatch, perl, curl, bzip2, sqlite, openssl ? null, xz ];
, bash, coreutils, gzip, gnutar })
, pkg-config, boehmgc, perlPackages, libsodium, brotli, boost, editline, nlohmann_json
, autoreconfHook, autoconf-archive, bison, flex, libxml2, libxslt, docbook5, docbook_xsl_ns
, jq, libarchive, rustc, cargo
# Used by tests
, gmock
, busybox-sandbox-shell
, storeDir
, stateDir
, confDir
, withLibseccomp ? lib.any (lib.meta.platformMatch stdenv.hostPlatform) libseccomp.meta.platforms, libseccomp
, withAWS ? stdenv.isLinux || stdenv.isDarwin, aws-sdk-cpp
, name, suffix ? "", src, crates ? null
}:
let
sh = busybox-sandbox-shell;
nix = stdenv.mkDerivation rec {
inherit name src;
version = lib.getVersion name;
is24 = lib.versionAtLeast version "2.4pre";
isExactly23 = lib.versionAtLeast version "2.3" && lib.versionOlder version "2.4";
VERSION_SUFFIX = suffix;
outputs = [ "out" "dev" "man" "doc" ];
nativeBuildInputs =
[ pkg-config ]
++ lib.optionals is24 [ autoreconfHook autoconf-archive bison flex libxml2 libxslt
docbook5 docbook_xsl_ns jq gmock ];
buildInputs =
[ curl openssl sqlite xz bzip2 nlohmann_json
brotli boost editline
]
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium
++ lib.optionals is24 [ libarchive rustc cargo ]
++ lib.optional withLibseccomp libseccomp
++ lib.optional withAWS
((aws-sdk-cpp.override {
apis = ["s3" "transfer"];
customMemoryManagement = false;
}).overrideDerivation (args: {
patches = args.patches or [] ++ [(fetchpatch {
url = "https://github.com/edolstra/aws-sdk-cpp/commit/7d58e303159b2fb343af9a1ec4512238efa147c7.patch";
sha256 = "103phn6kyvs1yc7fibyin3lgxz699qakhw671kl207484im55id1";
})];
}));
propagatedBuildInputs = [ boehmgc ];
# Seems to be required when using std::atomic with 64-bit types
NIX_LDFLAGS = lib.optionalString (stdenv.hostPlatform.system == "armv5tel-linux" || stdenv.hostPlatform.system == "armv6l-linux") "-latomic";
preConfigure =
# Copy libboost_context so we don't get all of Boost in our closure.
# https://github.com/NixOS/nixpkgs/issues/45462
''
mkdir -p $out/lib
cp -pd ${boost}/lib/{libboost_context*,libboost_thread*,libboost_system*} $out/lib
rm -f $out/lib/*.a
${lib.optionalString stdenv.isLinux ''
chmod u+w $out/lib/*.so.*
patchelf --set-rpath $out/lib:${stdenv.cc.cc.lib}/lib $out/lib/libboost_thread.so.*
''}
'' +
# Unpack the Rust crates.
lib.optionalString is24 ''
tar xvf ${crates} -C nix-rust/
mv nix-rust/nix-vendored-crates* nix-rust/vendor
'' +
# For Nix-2.3, patch around an issue where the Nix configure step pulls in the
# build system's bash and other utilities when cross-compiling
lib.optionalString (stdenv.buildPlatform != stdenv.hostPlatform && isExactly23) ''
mkdir tmp/
substitute corepkgs/config.nix.in tmp/config.nix.in \
--subst-var-by bash ${bash}/bin/bash \
--subst-var-by coreutils ${coreutils}/bin \
--subst-var-by bzip2 ${bzip2}/bin/bzip2 \
--subst-var-by gzip ${gzip}/bin/gzip \
--subst-var-by xz ${xz}/bin/xz \
--subst-var-by tar ${gnutar}/bin/tar \
--subst-var-by tr ${coreutils}/bin/tr
mv tmp/config.nix.in corepkgs/config.nix.in
'';
configureFlags =
[ "--with-store-dir=${storeDir}"
"--localstatedir=${stateDir}"
"--sysconfdir=${confDir}"
"--disable-init-state"
"--enable-gc"
]
++ lib.optionals stdenv.isLinux [
"--with-sandbox-shell=${sh}/bin/busybox"
]
++ lib.optional (
stdenv.hostPlatform != stdenv.buildPlatform && stdenv.hostPlatform ? nix && stdenv.hostPlatform.nix ? system
) ''--with-system=${stdenv.hostPlatform.nix.system}''
# RISC-V support in progress https://github.com/seccomp/libseccomp/pull/50
++ lib.optional (!withLibseccomp) "--disable-seccomp-sandboxing";
makeFlags = [ "profiledir=$(out)/etc/profile.d" ];
installFlags = [ "sysconfdir=$(out)/etc" ];
doInstallCheck = false;
# socket path becomes too long otherwise
#preInstallCheck = lib.optional stdenv.isDarwin ''
# export TMPDIR=$NIX_BUILD_TOP
#'';
separateDebugInfo = stdenv.isLinux;
enableParallelBuilding = true;
meta = {
description = "Powerful package manager that makes package management reliable and reproducible";
longDescription = ''
Nix is a powerful package manager for Linux and other Unix systems that
makes package management reliable and reproducible. It provides atomic
upgrades and rollbacks, side-by-side installation of multiple versions of
a package, multi-user package management and easy setup of build
environments.
'';
homepage = "https://nixos.org/";
license = lib.licenses.lgpl2Plus;
maintainers = [ lib.maintainers.eelco ];
platforms = lib.platforms.unix;
outputsToInstall = [ "out" "man" ];
};
passthru = {
perl-bindings = stdenv.mkDerivation {
pname = "nix-perl";
inherit version;
inherit src;
postUnpack = "sourceRoot=$sourceRoot/perl";
# This is not cross-compile safe, don't have time to fix right now
# but noting for future travellers.
nativeBuildInputs =
[ perl pkg-config curl nix libsodium boost autoreconfHook autoconf-archive ];
configureFlags =
[ "--with-dbi=${perlPackages.DBI}/${perl.libPrefix}"
"--with-dbd-sqlite=${perlPackages.DBDSQLite}/${perl.libPrefix}"
];
preConfigure = "export NIX_STATE_DIR=$TMPDIR";
preBuild = "unset NIX_INDENT_MAKE";
};
};
};
in nix;
in rec {
nix = nixUnstable;
nixUnstable = lib.lowPrio (callPackage common rec {
name = "nix-2.4${suffix}";
suffix = "pre7534_b92f58f6";
#src = /home/Computational/rarias/nix/nix-rodarima;
src = fetchFromGitHub {
owner = "rodarima";
repo = "nix";
rev = "3a642187c33ed46d952d3a50a83b2576b704fab7";
sha256 = "0s8is2czpkcj1x1kcjqgbnsbbl03w3fwjjiclsd44zh1ij3wb90s";
};
crates = fetchurl {
url = "https://hydra.nixos.org/build/118797694/download/1/nix-vendored-crates-2.4pre7534_b92f58f6.tar.xz";
sha256 = "a4c2612bbd81732bbb899bc0c230e07b16f6b6150ffbb19c4907dedbbc2bf9fc";
};
inherit storeDir stateDir confDir boehmgc;
});
nixFlakes = lib.lowPrio (callPackage common rec {
name = "nix-2.4${suffix}";
suffix = "pre20200521_00b562c";
src = fetchFromGitHub {
owner = "NixOS";
repo = "nix";
rev = "00b562c87ec4c3bbe514f5dc1f4d1c41f66f66bf";
sha256 = "0s8is2czpkcj1x1kcjqgbnsbbl03w3fwjjiclsd44zh1ij3wb90s";
};
crates = fetchurl {
url = "https://hydra.nixos.org/build/118093786/download/1/nix-vendored-crates-2.4pre20200501_941f952.tar.xz";
sha256 = "060f4n5srdbb8vsj0m14aqch7im79a4h5g3nrs41p1xc602vhcdl";
};
inherit storeDir stateDir confDir boehmgc;
});
}

View File

@@ -1,22 +0,0 @@
{
stdenv
, lib
}:
stdenv.mkDerivation {
pname = "nixgen";
version = "0.0.1";
src = ./nixgen;
dontUnpack = true;
phases = [ "installPhase" ];
installPhase = ''
mkdir -p $out/bin
cp -a $src $out/bin/nixgen
'';
meta = {
description = "Quickly generate flake.nix from command line";
maintainers = with lib.maintainers.bsc; [ rarias ];
platforms = lib.platforms.linux;
license = lib.licenses.gpl3Plus;
};
}

View File

@@ -1,97 +0,0 @@
#!/bin/sh
#
# Copyright (c) 2025, Barcelona Supercomputing Center (BSC)
# SPDX-License-Identifier: GPL-3.0+
# Author: Rodrigo Arias Mallo <rodrigo.arias@bsc.es>
function usage() {
echo "USAGE: nixgen [-f] [package [...]] [-b package [...]]" >&2
echo " Generates a flake.nix file with the given packages." >&2
echo " After flake.nix is created, use 'nix develop' to enter the shell." >&2
echo "OPTIONS" >&2
echo " -f Overwrite existing flake.nix (default: no)." >&2
echo " packages... Add these packages to the shell." >&2
echo " -b packages... Add the dependencies needed to build these packages." >&2
echo "EXAMPLE" >&2
echo " $ nixgen ovni bigotes -b nosv tampi" >&2
echo " Adds the packages ovni and bigotes as well as all required dependencies" >&2
echo " to build nosv and tampi." >&2
echo "AUTHOR" >&2
echo " Rodrigo Arias Mallo <rodrigo.arias@bsc.es>" >&2
exit 1
}
mode=package
packages=
inputsFrom=
force=
if [[ $# -eq 0 ]]; then
usage
fi
while [[ $# -gt 0 ]]; do
case $1 in -b)
mode=build
shift
;;
-f)
force=1
shift
;;
-h)
usage
;;
-*|--*)
echo "error: unknown option $1" >&2
exit 1
;;
*)
if [ "$mode" == "package" ]; then
packages+="${packages:+ }$1"
else
inputsFrom+="${inputsFrom:+ }$1"
fi
shift
;;
esac
done
if [ ! "$force" -a -e flake.nix ]; then
echo "error: flake.nix exists, force overwrite with -f" >&2
exit 1
fi
cat > flake.nix <<EOF
{
inputs.jungle.url = "git+https://jungle.bsc.es/git/rarias/jungle";
outputs = { self, jungle }:
let
nixpkgs = jungle.inputs.nixpkgs;
customOverlay = (final: prev: {
# Example overlay, for now empty
});
pkgs = import nixpkgs {
system = "x86_64-linux";
overlays = [
# Apply jungle overlay to get our BSC custom packages
jungle.outputs.bscOverlay
# And on top apply our local changes to customize for cluster
customOverlay
];
};
in {
devShells.x86_64-linux.default = pkgs.mkShell {
pname = "devshell";
# Include these packages in the shell
packages = with pkgs; [
$packages
];
# The dependencies needed to build these packages will be also included
inputsFrom = with pkgs; [
$inputsFrom
];
};
};
}
EOF

View File

@@ -3,6 +3,7 @@
, lib , lib
, fetchFromGitHub , fetchFromGitHub
, pkg-config , pkg-config
, perl
, numactl , numactl
, hwloc , hwloc
, boost , boost
@@ -10,23 +11,22 @@
, ovni , ovni
, nosv , nosv
, clangOmpss2 , clangOmpss2
, which
, useGit ? false , useGit ? false
, gitUrl ? "ssh://git@gitlab-internal.bsc.es/nos-v/nodes.git" , gitUrl ? "ssh://git@gitlab-internal.bsc.es/nos-v/nodes.git"
, gitBranch ? "master" , gitBranch ? "master"
, gitCommit ? "511489e71504a44381e0930562e7ac80ac69a848" # version-1.4 , gitCommit ? "6002ec9ae6eb876d962cc34366952a3b26599ba6"
}: }:
with lib; with lib;
let let
release = rec { release = rec {
version = "1.4"; version = "1.3";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "bsc-pm"; owner = "bsc-pm";
repo = "nodes"; repo = "nodes";
rev = "version-${version}"; rev = "version-${version}";
hash = "sha256-+lR/R0l3fGZO3XG7whMorFW2y2YZ0ZFnLeOHyQYrAsQ="; hash = "sha256-cFb9pxcjtkMmH0CsGgUO9LTdXDNh7MCqicgGWawLrsU=";
}; };
}; };
@@ -59,7 +59,6 @@ in
doCheck = false; doCheck = false;
nativeCheckInputs = [ nativeCheckInputs = [
clangOmpss2 clangOmpss2
which
]; ];
# The "bindnow" flags are incompatible with ifunc resolution mechanism. We # The "bindnow" flags are incompatible with ifunc resolution mechanism. We

View File

@@ -13,19 +13,19 @@
, useGit ? false , useGit ? false
, gitUrl ? "git@gitlab-internal.bsc.es:nos-v/nos-v.git" , gitUrl ? "git@gitlab-internal.bsc.es:nos-v/nos-v.git"
, gitBranch ? "master" , gitBranch ? "master"
, gitCommit ? "1108e4786b58e0feb9a16fa093010b763eb2f8e8" # version 4.0.0 , gitCommit ? "9f47063873c3aa9d6a47482a82c5000a8c813dd8"
}: }:
with lib; with lib;
let let
release = rec { release = rec {
version = "4.0.0"; version = "3.2.0";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "bsc-pm"; owner = "bsc-pm";
repo = "nos-v"; repo = "nos-v";
rev = "${version}"; rev = "${version}";
hash = "sha256-llaq73bd/YxLVKNlMebnUHKa4z3sdcsuDUoVwUxNuw8="; hash = "sha256-yaz92426EM8trdkBJlISmAoG9KJCDTvoAW/HKrasvOw=";
}; };
}; };

View File

@@ -1,91 +0,0 @@
{
lib,
fetchFromGitHub,
cmake,
withCFlags,
intelPackages,
mklSupport ? true,
config,
cudaSupport ? config.cudaSupport,
cudaPackages ? { },
rocmSupport ? config.rocmSupport,
hipTargets ? null, # only one target at a time supported
rocmPackages ? { },
}:
let
# rocmSupport is not enough, we need a specific target
enableHip = rocmSupport && hipTargets != null;
stdenv = withCFlags (lib.optionals cudaSupport [
"--cuda-path=${cudaPackages.cudatoolkit}"
]) intelPackages.stdenv;
in
# at least one backend has to be enabled
assert mklSupport || cudaSupport || enableHip;
stdenv.mkDerivation rec {
pname = "oneMath";
version = "0.8";
src = fetchFromGitHub {
owner = "uxlfoundation";
repo = "oneMath";
rev = "v${version}";
sha256 = "sha256-xK8lKI3oqKlx3xtvdScpMq+HXAuoYCP0BZdkEqnJP5o=";
};
cmakeFlags = [
(lib.cmakeBool "ENABLE_MKLCPU_BACKEND" mklSupport)
(lib.cmakeBool "ENABLE_MKLGPU_BACKEND" mklSupport)
(lib.cmakeBool "ENABLE_CUBLAS_BACKEND" cudaSupport)
(lib.cmakeBool "ENABLE_CUFFT_BACKEND" cudaSupport)
(lib.cmakeBool "ENABLE_CURAND_BACKEND" cudaSupport)
(lib.cmakeBool "ENABLE_CUSOLVER_BACKEND" cudaSupport)
(lib.cmakeBool "ENABLE_CUSPARSE_BACKEND" cudaSupport)
(lib.cmakeBool "ENABLE_ROCBLAS_BACKEND" enableHip)
(lib.cmakeBool "ENABLE_ROCFFT_BACKEND" enableHip)
(lib.cmakeBool "ENABLE_ROCSOLVER_BACKEND" enableHip)
(lib.cmakeBool "ENABLE_ROCRAND_BACKEND" enableHip)
(lib.cmakeBool "ENABLE_ROCSPARSE_BACKEND" enableHip)
(lib.cmakeBool "BUILD_FUNCTIONAL_TESTS" false)
(lib.cmakeBool "BUILD_EXAMPLES" false)
]
++ lib.optionals enableHip [
(lib.cmakeFeature "HIP_TARGETS" hipTargets)
];
nativeBuildInputs = [ cmake ];
buildInputs =
lib.optionals (mklSupport) [
intelPackages.mkl
intelPackages.tbb
]
++ lib.optionals (enableHip) [
rocmPackages.rocmPath
rocmPackages.rocblas
rocmPackages.rocfft
rocmPackages.rocsolver
rocmPackages.rocrand
rocmPackages.rocsparse
]
++ lib.optionals (cudaSupport) [
(lib.getDev cudaPackages.cuda_cudart)
cudaPackages.cudatoolkit
cudaPackages.libcublas
cudaPackages.libcurand
cudaPackages.libcufft
cudaPackages.libcusparse
cudaPackages.libcusolver
];
}

View File

@@ -32,11 +32,6 @@ stdenv.mkDerivation rec {
"CXX=mpicxx" "CXX=mpicxx"
]; ];
env = {
MPICH_CC="${stdenv.cc}/bin/${stdenv.cc.targetPrefix}cc";
MPICH_CXX="${stdenv.cc}/bin/${stdenv.cc.targetPrefix}c++";
};
postInstall = '' postInstall = ''
mkdir -p $out/bin mkdir -p $out/bin
for f in $(find $out -executable -type f); do for f in $(find $out -executable -type f); do
@@ -49,6 +44,5 @@ stdenv.mkDerivation rec {
homepage = "http://mvapich.cse.ohio-state.edu/benchmarks/"; homepage = "http://mvapich.cse.ohio-state.edu/benchmarks/";
maintainers = [ ]; maintainers = [ ];
platforms = lib.platforms.all; platforms = lib.platforms.all;
cross = true;
}; };
} }

View File

@@ -7,7 +7,7 @@
, useGit ? false , useGit ? false
, gitBranch ? "master" , gitBranch ? "master"
, gitUrl ? "ssh://git@bscpm04.bsc.es/rarias/ovni.git" , gitUrl ? "ssh://git@bscpm04.bsc.es/rarias/ovni.git"
, gitCommit ? "06432668f346c8bdc1006fabc23e94ccb81b0d8b" # version 1.13.0 , gitCommit ? "e4f62382076f0cf0b1d08175cf57cc0bc51abc61"
, enableDebug ? false , enableDebug ? false
# Only enable MPI if the build is native (fails on cross-compilation) # Only enable MPI if the build is native (fails on cross-compilation)
, useMpi ? (stdenv.buildPlatform.canExecute stdenv.hostPlatform) , useMpi ? (stdenv.buildPlatform.canExecute stdenv.hostPlatform)
@@ -15,13 +15,13 @@
let let
release = rec { release = rec {
version = "1.13.0"; version = "1.12.0";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "bsc-pm"; owner = "bsc-pm";
repo = "ovni"; repo = "ovni";
rev = "${version}"; rev = "${version}";
hash = "sha256-0l2ryIyWNiZqeYdVlnj/WnQGS3xFCY4ICG8JedX424w="; hash = "sha256-H04JvsVKrdqr3ON7JhU0g17jjlg/jzQ7eTfx9vUNd3E=";
} // { shortRev = "0643266"; }; } // { shortRev = "a73afcf"; };
}; };
git = rec { git = rec {

View File

@@ -12,7 +12,7 @@
, paraverKernel , paraverKernel
, openssl , openssl
, glibcLocales , glibcLocales
, wrapGAppsHook3 , wrapGAppsHook
}: }:
let let
@@ -64,7 +64,7 @@ stdenv.mkDerivation rec {
autoconf autoconf
automake automake
autoreconfHook autoreconfHook
wrapGAppsHook3 wrapGAppsHook
]; ];
buildInputs = [ buildInputs = [

View File

@@ -35,6 +35,5 @@ stdenv.mkDerivation rec {
maintainers = with lib.maintainers.bsc; [ rarias ]; maintainers = with lib.maintainers.bsc; [ rarias ];
platforms = lib.platforms.linux; platforms = lib.platforms.linux;
license = lib.licenses.mit; license = lib.licenses.mit;
cross = true;
}; };
} }

View File

@@ -5,14 +5,23 @@
, automake , automake
, autoconf , autoconf
, libtool , libtool
, mpi
, autoreconfHook , autoreconfHook
, gpi-2 , gpi-2
, boost , boost
, numactl , numactl
, rdma-core , rdma-core
, gfortran , gfortran
, symlinkJoin
}: }:
let
mpiAll = symlinkJoin {
name = "mpi-all";
paths = [ mpi.all ];
};
in
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "tagaspi"; pname = "tagaspi";
enableParallelBuilding = true; enableParallelBuilding = true;
@@ -26,18 +35,16 @@ stdenv.mkDerivation rec {
hash = "sha256-RGG/Re2uM293HduZfGzKUWioDtwnSYYdfeG9pVrX9EM="; hash = "sha256-RGG/Re2uM293HduZfGzKUWioDtwnSYYdfeG9pVrX9EM=";
}; };
nativeBuildInputs = [ buildInputs = [
autoreconfHook autoreconfHook
automake automake
autoconf autoconf
libtool libtool
gfortran
];
buildInputs = [
boost boost
numactl numactl
rdma-core rdma-core
gfortran
mpiAll
]; ];
dontDisableStatic = true; dontDisableStatic = true;
@@ -56,6 +63,5 @@ stdenv.mkDerivation rec {
maintainers = with lib.maintainers.bsc; [ rarias ]; maintainers = with lib.maintainers.bsc; [ rarias ];
platforms = lib.platforms.linux; platforms = lib.platforms.linux;
license = lib.licenses.gpl3Plus; license = lib.licenses.gpl3Plus;
cross = false; # gpi-2 cannot cross
}; };
} }

View File

@@ -68,6 +68,5 @@ in stdenv.mkDerivation {
maintainers = with lib.maintainers.bsc; [ rarias ]; maintainers = with lib.maintainers.bsc; [ rarias ];
platforms = lib.platforms.linux; platforms = lib.platforms.linux;
license = lib.licenses.gpl3Plus; license = lib.licenses.gpl3Plus;
cross = true;
}; };
} }

View File

@@ -1,87 +0,0 @@
{
lib,
stdenv,
autoconf,
automake,
autoreconfHook,
boost,
fetchFromGitHub,
gnumake,
libtool,
withCFlags,
useIntel ? true,
adaptivecpp ? null,
intelPackages ? null,
useGit ? false,
gitUrl ? "git@gitlab-internal.bsc.es:task-awareness/tasycl/tasycl.git",
gitBranch ? "main",
gitCommit ? "78f98dcf60a66e0eaa3b4ebcf55be076bec64825",
}:
assert !useIntel -> adaptivecpp != null;
assert useIntel -> intelPackages != null;
let
variant = if useIntel then "intel" else "acpp";
syclStdenv = withCFlags [ "-O3" ] (if useIntel then intelPackages.stdenv else stdenv);
release = rec {
version = "2.1.0";
src = fetchFromGitHub {
owner = "bsc-pm";
repo = "tasycl";
rev = version;
hash = "sha256-0kXnb0lHeQNHR27GTLbJ8xbiICLU8k2+FqEnnFSrzzo=";
};
};
git = rec {
version = src.shortRev;
src = builtins.fetchGit {
url = gitUrl;
ref = gitBranch;
rev = gitCommit;
};
};
source = if (useGit) then git else release;
in
syclStdenv.mkDerivation {
pname = "tasycl-${variant}";
inherit (source) src version;
enableParallelBuilding = true;
separateDebugInfo = true;
nativeBuildInputs = [
autoreconfHook
automake
autoconf
libtool
gnumake
];
buildInputs = [
boost
];
configureFlags = lib.optionals (!useIntel) [
"CXX=${lib.getExe adaptivecpp}"
];
# add symlinks so we can explicitly link with tasycl-intel / tasycl-acpp
postInstall = ''
pushd $out/lib
for i in libtasycl* ; do
ln -s "$i" "''\${i/tasycl/tasycl-${variant}}"
done
popd
'';
hardeningDisable = [ "all" ];
}

View File

@@ -1,11 +1,9 @@
{ python3Packages, lib }: { python3Packages, lib }:
python3Packages.buildPythonApplication { python3Packages.buildPythonApplication rec {
pname = "upc-qaire-exporter"; pname = "upc-qaire-exporter";
version = "1.0"; version = "1.0";
pyproject = true;
src = ./.; src = ./.;
doCheck = false; doCheck = false;

View File

@@ -1,62 +0,0 @@
{
intelPackages,
writeText,
strace,
}:
let
stdenv = intelPackages.stdenv;
hello_sycl = writeText "hello.cpp" ''
#include <sycl/sycl.hpp>
class hello_world;
int main(int argc, char** argv) try {
auto device_selector = sycl::default_selector_v;
sycl::queue queue(device_selector);
std::cout << "Running on: "
<< queue.get_device().get_info<sycl::info::device::name>()
<< std::endl;
queue.submit([&] (sycl::handler& cgh) {
auto os = sycl::stream{128, 128, cgh};
cgh.single_task<hello_world>([=]() {
os << "Hello World! (on device)\n";
});
});
return 0;
} catch (sycl::exception &e) {
std::cout << "SYCL exception: " << e.what() << std::endl;
return 0; // we expect to fail since no devices should be available;
}
'';
in
stdenv.mkDerivation {
version = "0.0.1";
name = "hello-sycl";
buildInputs = [
stdenv
strace
];
src = hello_sycl;
dontUnpack = true;
dontConfigure = true;
NIX_DEBUG = 0;
buildPhase = ''
cp $src hello.cpp
set -x
echo CXX=$CXX
command -v $CXX
$CXX -fsycl hello.cpp -o hello
./hello
set +x
'';
installPhase = ''
touch $out
'';
}

View File

@@ -1,81 +0,0 @@
{
writeText,
intelPackages,
nodes,
nosv,
clangOmpss2Nodes,
strace,
}:
let
hello_cpp = writeText "hello.cpp" ''
#include <cstdio>
#include <sycl/sycl.hpp>
int main(int argc, char** argv) try {
sycl::queue queue;
std::cout << "Running on: "
<< queue.get_device().get_info<sycl::info::device::name>()
<< std::endl;
#pragma oss task
queue.submit([&] (sycl::handler& cgh) {
auto os = sycl::stream{128, 128, cgh};
cgh.single_task<class hello_world>([=]() {
os << "Hello World! (on device)\n";
});
}).wait();
return 0;
} catch (sycl::exception &e) {
std::cout << "SYCL exception: " << e.what() << std::endl;
return 0; // we expect to fail since no devices should be available;
}
'';
in
intelPackages.stdenv.mkDerivation {
version = "0.0.1";
name = "hello-syclompss";
src = hello_cpp;
nativeBuildInputs = [
strace
nodes
nosv
];
dontUnpack = true;
dontConfigure = true;
# NODES requires access to /sys/devices to request NUMA information
requiredSystemFeatures = [ "sys-devices" ];
env.NODES_HOME = nodes;
NIX_DEBUG = 1;
buildPhase = ''
cp $src hello.cpp
set -x
echo CXX=$CXX
echo NODES_HOME=$NODES_HOME
command -v $CXX
icpx -Wno-deprecated-declarations -fsycl \
-fsycl-host-compiler=${clangOmpss2Nodes.forIcpx}/bin/clang++ \
-fsycl-host-compiler-options='-Wno-deprecated-declarations -fompss-2=libnodes' \
-lnodes -lnosv \
$NODES_HOME/lib/nodes-main-wrapper.o \
hello.cpp -o hello
./hello
set +x
'';
installPhase = ''
touch $out
'';
}