osu: update experiments using stdexp
This commit is contained in:
parent
ebcbf91fbe
commit
0c9e89dcc0
@ -94,4 +94,8 @@
|
|||||||
lulesh = {
|
lulesh = {
|
||||||
test = callPackage ./lulesh/test.nix { };
|
test = callPackage ./lulesh/test.nix { };
|
||||||
};
|
};
|
||||||
|
|
||||||
|
osu = {
|
||||||
|
latency = callPackage ./osu/latency.nix { };
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
@ -1,86 +1,52 @@
|
|||||||
{
|
{
|
||||||
bsc
|
stdenv
|
||||||
, genApp
|
, stdexp
|
||||||
, genConfigs
|
, bsc
|
||||||
|
, targetMachine
|
||||||
# Wrappers
|
, stages
|
||||||
, launchWrapper
|
|
||||||
, sbatchWrapper
|
|
||||||
, srunWrapper
|
|
||||||
, argvWrapper
|
|
||||||
, controlWrapper
|
|
||||||
, nixsetupWrapper
|
|
||||||
|
|
||||||
# Should we test the network (true) or the shared memory (false)?
|
# Should we test the network (true) or the shared memory (false)?
|
||||||
, interNode ? true
|
, interNode ? true
|
||||||
|
|
||||||
# Enable multiple threads?
|
|
||||||
, multiThread ? false
|
|
||||||
}:
|
}:
|
||||||
|
|
||||||
let
|
let
|
||||||
# Set the configuration for the experiment
|
# Initial variable configuration
|
||||||
config = {
|
varConf = with bsc; {
|
||||||
mpi = [ bsc.impi bsc.openmpi bsc.mpich ];
|
mpi = [ impi bsc.openmpi mpich ];
|
||||||
};
|
};
|
||||||
|
|
||||||
extraConfig = {
|
# Generate the complete configuration for each unit
|
||||||
|
genConf = with bsc; c: targetMachine.config // rec {
|
||||||
nodes = if interNode then 2 else 1;
|
nodes = if interNode then 2 else 1;
|
||||||
ntasksPerNode = if interNode then 1 else 2;
|
ntasksPerNode = if interNode then 1 else 2;
|
||||||
|
cpusPerTask = 1;
|
||||||
time = "00:10:00";
|
time = "00:10:00";
|
||||||
qos = "debug";
|
qos = "debug";
|
||||||
|
loops = 30;
|
||||||
|
expName = "osu-latency-${mpi.name}";
|
||||||
|
unitName = expName;
|
||||||
|
jobName = expName;
|
||||||
|
inherit (c) mpi;
|
||||||
};
|
};
|
||||||
|
|
||||||
# Compute the cartesian product of all configurations
|
# Compute the array of configurations
|
||||||
configs = map (conf: conf // extraConfig) (genConfigs config);
|
configs = stdexp.buildConfigs {
|
||||||
|
inherit varConf genConf;
|
||||||
sbatch = conf: app: sbatchWrapper {
|
|
||||||
app = app;
|
|
||||||
nixPrefix = "/gpfs/projects/bsc15/nix";
|
|
||||||
exclusive = true;
|
|
||||||
ntasksPerNode = "${toString conf.ntasksPerNode}";
|
|
||||||
nodes = "${toString conf.nodes}";
|
|
||||||
time = conf.time;
|
|
||||||
qos = conf.qos;
|
|
||||||
chdirPrefix = "/home/bsc15/bsc15557/bsc-nixpkgs/out";
|
|
||||||
};
|
};
|
||||||
|
|
||||||
srun = app: srunWrapper {
|
exec = {nextStage, conf, ...}: with conf; stages.exec {
|
||||||
app = app;
|
inherit nextStage;
|
||||||
nixPrefix = "/gpfs/projects/bsc15/nix";
|
# We simply run the osu_latency test
|
||||||
|
program = "${nextStage}/bin/osu_latency";
|
||||||
};
|
};
|
||||||
|
|
||||||
argv = app:
|
program = {nextStage, conf, ...}: bsc.osumb.override {
|
||||||
argvWrapper {
|
# Use the specified MPI implementation
|
||||||
app = app;
|
inherit (conf) mpi;
|
||||||
program = "bin/osu_latency";
|
};
|
||||||
argv = "()";
|
|
||||||
env = ''
|
|
||||||
export I_MPI_THREAD_SPLIT=1
|
|
||||||
'';
|
|
||||||
};
|
|
||||||
|
|
||||||
osumbFn = conf:
|
pipeline = stdexp.stdPipeline ++ [ exec program ];
|
||||||
with conf;
|
|
||||||
bsc.osumb.override { inherit mpi; };
|
|
||||||
|
|
||||||
|
|
||||||
pipeline = conf:
|
|
||||||
sbatch conf (
|
|
||||||
nixsetupWrapper (
|
|
||||||
controlWrapper (
|
|
||||||
srun (
|
|
||||||
nixsetupWrapper (
|
|
||||||
argv (
|
|
||||||
osumbFn conf))))));
|
|
||||||
|
|
||||||
#pipeline = conf: sbatch conf (srun (nixsetupWrapper (argv (osumbFn conf))));
|
|
||||||
#pipeline = conf: sbatch conf (srun (nixsetupWrapper (argv bsc.osumb)));
|
|
||||||
|
|
||||||
# Ideally it should look like this:
|
|
||||||
#pipeline = sbatch nixsetup control argv nbodyFn;
|
|
||||||
|
|
||||||
jobs = map pipeline configs;
|
|
||||||
|
|
||||||
in
|
in
|
||||||
launchWrapper jobs
|
|
||||||
|
stdexp.genExperiment { inherit configs pipeline; }
|
||||||
|
Loading…
Reference in New Issue
Block a user