Remove old experiments

This commit is contained in:
Rodrigo Arias 2020-10-09 16:32:28 +02:00
parent e6e42dcec9
commit 9d2ce2a1c2
6 changed files with 6 additions and 605 deletions

View File

@ -1,178 +0,0 @@
{
stdenv
, nixpkgs
, pkgs
, genApp
, genConfigs
, runWrappers
}:
with stdenv.lib;
let
# Set variable configuration for the experiment
varConfig = {
cc = [ pkgs.bsc.icc ];
blocksize = [ 1024 ];
};
# Common configuration
common = {
# Compile time nbody config
gitBranch = "garlic/mpi+send";
mpi = pkgs.bsc.impi;
# nbody runtime options
particles = 1024*64;
timesteps = 10;
# Resources
ntasksPerNode = "48";
nodes = "1";
# Stage configuration
enableSbatch = true;
enableControl = true;
enableExtrae = false;
enablePerf = false;
# MN4 path
nixPrefix = "/gpfs/projects/bsc15/nix";
};
# Compute the cartesian product of all configurations
configs = map (conf: conf // common) (genConfigs varConfig);
stageProgram = stage:
if stage ? programPath
then "${stage}${stage.programPath}" else "${stage}";
w = runWrappers;
sbatch = {stage, conf, ...}: with conf; w.sbatch {
program = stageProgram stage;
exclusive = true;
time = "02:00:00";
qos = "debug";
jobName = "nbody-bs";
inherit nixPrefix nodes ntasksPerNode;
};
control = {stage, conf, ...}: with conf; w.control {
program = stageProgram stage;
};
srun = {stage, conf, ...}: with conf; w.srun {
program = stageProgram stage;
srunOptions = "--cpu-bind=verbose,rank";
inherit nixPrefix;
};
statspy = {stage, conf, ...}: with conf; w.statspy {
program = stageProgram stage;
};
perf = {stage, conf, ...}: with conf; w.perf {
program = stageProgram stage;
perfArgs = "sched record -a";
};
nixsetup = {stage, conf, ...}: with conf; w.nixsetup {
program = stageProgram stage;
nixsetup = "${nixPrefix}/bin/nix-setup";
};
extrae = {stage, conf, ...}:
let
# We set the mpi implementation to the one specified in the conf, so all
# packages in bsc will use that one.
customPkgs = genPkgs (self: super: {
bsc = super.bsc // { mpi = conf.mpi; };
});
extrae = customPkgs.bsc.extrae;
in
w.extrae {
program = stageProgram stage;
extrae = extrae;
traceLib = "mpi"; # mpi -> libtracempi.so
configFile = ./extrae.xml;
};
argv = {stage, conf, ...}: w.argv {
program = stageProgram stage;
env = ''
set -e
export I_MPI_THREAD_SPLIT=1
'';
argv = ''( -t ${toString conf.timesteps}
-p ${toString conf.particles} )'';
};
bscOverlay = import ../../../overlay.nix;
genPkgs = newOverlay: nixpkgs {
overlays = [
bscOverlay
newOverlay
];
};
# Print the environment to ensure we don't get anything nasty
envRecord = {stage, conf, ...}: w.envRecord {
program = stageProgram stage;
};
broom = {stage, conf, ...}: w.broom {
program = stageProgram stage;
};
# We may be able to use overlays by invoking the fix function directly, but we
# have to get the definition of the bsc packages and the garlic ones as
# overlays.
nbodyFn = {stage, conf, ...}: with conf;
let
# We set the mpi implementation to the one specified in the conf, so all
# packages in bsc will use that one.
customPkgs = genPkgs (self: super: {
bsc = super.bsc // { mpi = conf.mpi; };
});
in
customPkgs.bsc.garlic.nbody.override {
inherit cc blocksize mpi gitBranch;
};
stages = with common; []
# Cleans ALL environment variables
++ [ broom ]
# Use sbatch to request resources first
++ optionals enableSbatch [ sbatch nixsetup ]
# Record the current env vars set by SLURM to verify we don't have something
# nasty (like sourcing .bashrc). Take a look at #26
++ [ envRecord ]
# Repeats the next stages N=30 times
++ optional enableControl control
# Executes srun to launch the program in the requested nodes, and
# immediately after enters the nix environment again, as slurmstepd launches
# the next stages from outside the namespace.
++ [ srun nixsetup ]
# Intrumentation with extrae
++ optional enableExtrae extrae
# Optionally profile the next stages with perf
++ optional enablePerf perf
# Execute the nbody app with the argv and env vars
++ [ argv nbodyFn ];
# List of actual programs to be executed
jobs = map (conf: w.stagen { inherit conf stages; }) configs;
in
# We simply run each program one after another
w.launch jobs

View File

@ -1,74 +0,0 @@
{
stdenv
, nixpkgs
, pkgs
, stages
, machineConf
}:
with stdenv.lib;
let
bsc = pkgs.bsc;
w = runWrappers;
in
{
/* Returns the path of the executable of a stage */
stageProgram = stage:
if stage ? programPath
then "${stage}${stage.programPath}"
else "${stage}";
/* Takes a list of units and builds an experiment, after executing the
trebuchet and the isolate stages. Returns the trebuchet stage. */
buildExperiment = {units, conf, ...}: stage.trebuchet {
inherit (machineConf) nixPrefix;
nextStage = stage.isolate {
inherit (machineConf) nixPrefix;
nextStage = stage.experiment {
inherit units;
}
}
};
sbatch = {nextStage, conf, ...}: with conf; w.sbatch (
# Allow a user to define a custom reservation for the job in MareNostrum4,
# by setting the garlic.sbatch.reservation attribute in the
# ~/.config/nixpkgs/config.nix file. If the attribute is not set, no
# reservation is used. The user reservation may be overwritten by the
# experiment, if the reservation is set like with nodes or ntasksPerNode.
optionalAttrs (pkgs.config ? garlic.sbatch.reservation) {
inherit (pkgs.config.garlic.sbatch) reservation;
} // {
exclusive = true;
time = "02:00:00";
qos = "debug";
jobName = "nbody-tampi";
inherit nextStage nixPrefix nodes ntasksPerNode;
}
);
control = {nextStage, conf, ...}: stages.control {
inherit (conf) loops;
inherit nextStage;
};
srun = {nextStage, conf, ...}: stages.srun {
inherit (conf) nixPrefix cpuBind;
inherit nextStage;
};
isolate = {nextStage, conf, ...}: stages.isolate {
clusterName = machineConf.name;
inherit (conf) nixPrefix;
inherit nextStage;
};
stdStages = [
sbatch
isolate
control
srun
isolate
];
}

View File

@ -1,88 +0,0 @@
{
bsc
, nbody
, genApp
, genConfigs
# Wrappers
, launchWrapper
, sbatchWrapper
, srunWrapper
, argvWrapper
, controlWrapper
, nixsetupWrapper
}:
let
# Set the configuration for the experiment
config = {
cc = [ bsc.icc ];
blocksize = [ 2048 ];
mpi = [ bsc.impi bsc.openmpi bsc.mpich ];
};
extraConfig = {
particles = 32*1024;
timesteps = 10;
ntasksPerNode = 2;
nodes = 1;
time = "00:10:00";
qos = "debug";
#mpi = bsc.impi;
#mpi = bsc.openmpi;
gitBranch = "garlic/mpi+send";
gitURL = "ssh://git@bscpm02.bsc.es/garlic/apps/nbody.git";
};
# Compute the cartesian product of all configurations
configs = map (conf: conf // extraConfig) (genConfigs config);
sbatch = conf: app: sbatchWrapper {
app = app;
nixPrefix = "/gpfs/projects/bsc15/nix";
exclusive = false;
ntasksPerNode = "${toString conf.ntasksPerNode}";
nodes = "${toString conf.nodes}";
time = conf.time;
qos = conf.qos;
chdirPrefix = "/home/bsc15/bsc15557/bsc-nixpkgs/out";
};
srun = app: srunWrapper {
app = app;
nixPrefix = "/gpfs/projects/bsc15/nix";
};
argv = conf: app:
with conf;
argvWrapper {
app = app;
argv = ''(-t ${toString timesteps} -p ${toString particles})'';
env = ''
export I_MPI_THREAD_SPLIT=1
'';
};
nbodyFn = conf:
with conf;
nbody.override { inherit cc mpi blocksize gitBranch gitURL; };
pipeline = conf:
sbatch conf (
srun (
nixsetupWrapper (
argv conf (
nbodyFn conf
)
)
)
)
;
# Ideally it should look like this:
#pipeline = sbatch nixsetup control argv nbodyFn;
jobs = map pipeline configs;
in
launchWrapper jobs

View File

@ -1,218 +0,0 @@
{
stdenv
, nixpkgs
, pkgs
, genApp
, genConfigs
, runWrappers
}:
with stdenv.lib;
let
bsc = pkgs.bsc;
# Set variable configuration for the experiment
varConfig = {
cc = [ bsc.icc ];
mpi = [ bsc.impi ];
#mpi = [ bsc.mpichDebug ];
blocksize = [ 1024 2048 ];
};
# Common configuration
common = {
# Compile time nbody config
gitBranch = "garlic/mpi+send";
#gitBranch = "garlic/tampi+send+oss+task";
# nbody runtime options
particles = 1024*4;
timesteps = 10;
# Resources
ntasksPerNode = "2";
nodes = "1";
# Stage configuration
enableTrebuchet = true;
enableSbatch = true;
enableControl = true;
enableExtrae = false;
enablePerf = false;
enableCtf = false;
enableStrace = true;
# MN4 path
nixPrefix = "/gpfs/projects/bsc15/nix";
};
# Compute the cartesian product of all configurations
configs = map (conf: conf // common) (genConfigs varConfig);
stageProgram = stage:
if stage ? programPath
then "${stage}${stage.programPath}" else "${stage}";
w = runWrappers;
sbatch = {stage, conf, ...}: with conf; w.sbatch (
# Allow a user to define a custom reservation for the job in MareNostrum4,
# by setting the garlic.sbatch.reservation attribute in the
# ~/.config/nixpkgs/config.nix file. If the attribute is not set, no
# reservation is used. The user reservation may be overwritten by the
# experiment, if the reservation is set like with nodes or ntasksPerNode.
optionalAttrs (pkgs.config ? garlic.sbatch.reservation) {
inherit (pkgs.config.garlic.sbatch) reservation;
} // {
program = stageProgram stage;
exclusive = true;
time = "02:00:00";
qos = "debug";
jobName = "nbody-tampi";
inherit nixPrefix nodes ntasksPerNode;
}
);
control = {stage, conf, ...}: with conf; w.control {
program = stageProgram stage;
};
srun = {stage, conf, ...}: with conf; w.srun {
program = stageProgram stage;
srunOptions = "--cpu-bind=verbose,socket";
inherit nixPrefix;
};
statspy = {stage, conf, ...}: with conf; w.statspy {
program = stageProgram stage;
};
perf = {stage, conf, ...}: with conf; w.perf {
program = stageProgram stage;
perfArgs = "sched record -a";
};
nixsetup = {stage, conf, ...}: with conf; w.nixsetup {
program = stageProgram stage;
nixsetup = "${nixPrefix}/bin/nix-setup";
};
isolate = {stage, conf, ...}: with conf; w.isolate {
program = stageProgram stage;
clusterName = "mn4";
inherit stage nixPrefix;
};
extrae = {stage, conf, ...}: w.extrae {
program = stageProgram stage;
traceLib = "mpi"; # mpi -> libtracempi.so
configFile = ./extrae.xml;
};
ctf = {stage, conf, ...}: w.argv {
program = stageProgram stage;
env = ''
export NANOS6=ctf
export NANOS6_CTF2PRV=0
'';
};
strace = {stage, conf, ...}: w.strace {
program = stageProgram stage;
};
argv = {stage, conf, ...}: w.argv {
program = stageProgram stage;
#env = ''
# #export I_MPI_PMI_LIBRARY=${bsc.slurm17-libpmi2}/lib/libpmi2.so
# export I_MPI_DEBUG=+1000
# #export I_MPI_FABRICS=shm
# export MPICH_DBG_OUTPUT=VERBOSE
# export MPICH_DBG_CLASS=ALL
# export MPICH_DBG_OUTPUT=stdout
# export FI_LOG_LEVEL=Info
#'';
argv = ''( -t ${toString conf.timesteps}
-p ${toString conf.particles} )'';
};
bscOverlay = import ../../../overlay.nix;
genPkgs = newOverlay: nixpkgs {
overlays = [
bscOverlay
newOverlay
];
};
# We may be able to use overlays by invoking the fix function directly, but we
# have to get the definition of the bsc packages and the garlic ones as
# overlays.
nbodyFn = {stage, conf, ...}: with conf;
let
# We set the mpi implementation to the one specified in the conf, so all
# packages in bsc will use that one.
customPkgs = genPkgs (self: super: {
bsc = super.bsc // { mpi = conf.mpi; };
});
in
customPkgs.bsc.garlic.nbody.override {
inherit cc blocksize mpi gitBranch;
};
experimentFn = w.experiment.override {
nixPrefix = common.nixPrefix;
};
stdStages = [
sbatch
isolate
control
srun
isolate
];
debugStages = with common; []
# Intrumentation with extrae
++ optional enableExtrae extrae
# Optionally profile the next stages with perf
++ optional enablePerf perf
# Optionally profile nanos6 with the new ctf
++ optional enableCtf ctf
# Optionally run the program with strace
++ optional enableStrace strace
;
stages = stdStages ++ debugStages ++ [ argv nbodyFn ];
# List of actual programs to be executed
units = map (conf: w.unit { inherit conf stages; }) configs;
experiment = experimentFn units;
trebuchet = stage: w.trebuchet {
program = stageProgram stage;
nixPrefix = common.nixPrefix;
#experiment = experiment;
inherit stage;
};
isolatedRun = stage: isolate {
inherit stage;
conf = common;
};
final = trebuchet (isolatedRun experiment);
in
# We simply run each program one after another
#launch jobs
final
#jobs

View File

@ -1,7 +1,7 @@
{
stdenv
, stdexp
, pkgs
, bsc
, targetMachine
, stages
}:
@ -9,8 +9,6 @@
with stdenv.lib;
let
bsc = pkgs.bsc;
# Configurations for each unit (using the cartesian product)
confUnit = with bsc; {
blocksize = [ 1024 2048 ];

View File

@ -4,7 +4,7 @@ super: /* Previous stage */
let
inherit (self.lib) callPackageWith;
inherit (self.lib) callPackagesWith;
callPackage = callPackageWith (self // self.bsc);
callPackage = callPackageWith (self // self.bsc // self.garlic);
# --------------------------------------------------------- #
# BSC Packages
@ -236,57 +236,18 @@ let
hist = callPackage ./garlic/postprocess/hist { };
exp = {
#noise = callPackage ./garlic/exp/noise.nix { };
nbody = {
bs = callPackage ./garlic/exp/nbody/bs.nix {
pkgs = self // self.bsc.garlic;
nixpkgs = import <nixpkgs>;
genApp = self.bsc.garlic.genApp;
genConfigs = self.bsc.garlic.genConfigs;
stages = self.bsc.garlic.stages;
};
tampi = callPackage ./garlic/exp/nbody/tampi.nix {
pkgs = self // self.bsc.garlic;
nixpkgs = import <nixpkgs>;
genApp = self.bsc.garlic.genApp;
genConfigs = self.bsc.garlic.genConfigs;
stages = self.bsc.garlic.stages;
};
test = callPackage ./garlic/exp/nbody/test.nix {
pkgs = self // self.bsc.garlic;
inherit (self.garlic) stdexp targetMachine stages;
};
# mpi = callPackage ./bsc/garlic/exp/nbody/mpi.nix { };
test = callPackage ./garlic/exp/nbody/test.nix { };
};
saiph = {
numcomm = callPackage ./garlic/exp/saiph/numcomm.nix {
pkgs = self // self.bsc.garlic;
nixpkgs = import <nixpkgs>;
genApp = self.bsc.garlic.genApp;
genConfigs = self.bsc.garlic.genConfigs;
stages = self.bsc.garlic.stages;
};
numcomm = callPackage ./garlic/exp/saiph/numcomm.nix { };
};
creams = {
ss = {
pure = callPackage ./garlic/exp/creams/ss+pure.nix {
pkgs = self // self.bsc.garlic;
nixpkgs = import <nixpkgs>;
genApp = self.bsc.garlic.genApp;
genConfigs = self.bsc.garlic.genConfigs;
stages = self.bsc.garlic.stages;
};
hybrid = callPackage ./garlic/exp/creams/ss+hybrid.nix {
pkgs = self // self.bsc.garlic;
nixpkgs = import <nixpkgs>;
genApp = self.bsc.garlic.genApp;
genConfigs = self.bsc.garlic.genConfigs;
stages = self.bsc.garlic.stages;
};
pure = callPackage ./garlic/exp/creams/ss+pure.nix { };
hybrid = callPackage ./garlic/exp/creams/ss+hybrid.nix { };
};
};