Remove garlic from bscpkgs
This commit is contained in:
parent
7b72b38023
commit
4533c94b4f
@ -1,54 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, cc
|
||||
, nanos6 ? null
|
||||
, mcxx ? null
|
||||
, mpi
|
||||
, gitBranch
|
||||
}:
|
||||
|
||||
with lib;
|
||||
stdenv.mkDerivation rec {
|
||||
name = "bigsort";
|
||||
|
||||
src = builtins.fetchGit {
|
||||
url = "ssh://git@bscpm03.bsc.es/dalvare1/bigsort.git";
|
||||
ref = "${gitBranch}";
|
||||
};
|
||||
|
||||
#sourceRoot = "./BigSort";
|
||||
|
||||
preBuild = ''
|
||||
cd BigSort
|
||||
export I_MPI_CXX=${cc.CXX}
|
||||
'';
|
||||
|
||||
buildInputs = [
|
||||
cc
|
||||
mpi
|
||||
]
|
||||
++ optional (mcxx != null) mcxx
|
||||
++ optional (nanos6 != null) nanos6;
|
||||
|
||||
makeFlags = [
|
||||
"CC=${cc.CC}"
|
||||
"CXX=${cc.CXX}"
|
||||
"CPP_BIN=mpicxx"
|
||||
"CLUSTER=MareNostrum4"
|
||||
"OPENMP=yes"
|
||||
"Debug=no"
|
||||
"OPENMP_FLAGS=-qopenmp"
|
||||
];
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out/bin
|
||||
cp bigsort $out/bin/BigSort
|
||||
'';
|
||||
|
||||
programPath = "/bin/BigSort";
|
||||
|
||||
hardeningDisable = [ "all" ];
|
||||
}
|
@ -1,46 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, cc
|
||||
, mpi
|
||||
}:
|
||||
|
||||
with lib;
|
||||
stdenv.mkDerivation rec {
|
||||
name = "genseq";
|
||||
|
||||
src = builtins.fetchGit {
|
||||
url = "ssh://git@bscpm03.bsc.es/dalvare1/bigsort.git";
|
||||
ref = "garlic/mpi+send+omp+task";
|
||||
};
|
||||
|
||||
postUnpack = "sourceRoot=$sourceRoot/GenSeq";
|
||||
|
||||
# FIXME: Remove the ../commons/Makefile as is not useful here, we only need
|
||||
# the CPP_SRC and OBJ variables.
|
||||
postPatch = ''
|
||||
sed -i '1cCPP_SRC = $(wildcard *.cpp)' Makefile
|
||||
sed -i '2cOBJ = $(CPP_SRC:.cpp=.o)' Makefile
|
||||
'';
|
||||
|
||||
buildInputs = [
|
||||
cc
|
||||
mpi
|
||||
];
|
||||
|
||||
makeFlags = [
|
||||
"I_MPI_CXX=${cc.CXX}"
|
||||
"CPP_BIN=mpicxx"
|
||||
];
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out/bin
|
||||
cp genseq $out/bin/genseq
|
||||
'';
|
||||
|
||||
programPath = "/bin/genseq";
|
||||
|
||||
hardeningDisable = [ "all" ];
|
||||
}
|
@ -1,46 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, cc
|
||||
, mpi
|
||||
}:
|
||||
|
||||
with lib;
|
||||
stdenv.mkDerivation rec {
|
||||
name = "shuffle";
|
||||
|
||||
src = builtins.fetchGit {
|
||||
url = "ssh://git@bscpm03.bsc.es/dalvare1/bigsort.git";
|
||||
ref = "garlic/mpi+send+omp+task";
|
||||
};
|
||||
|
||||
postUnpack = "sourceRoot=$sourceRoot/ShuffleSeq";
|
||||
|
||||
# FIXME: Remove the ../commons/Makefile as is not useful here, we only need
|
||||
# the CPP_SRC and OBJ variables.
|
||||
postPatch = ''
|
||||
sed -i '1cCPP_SRC = $(wildcard *.cpp)' Makefile
|
||||
sed -i '2cOBJ = $(CPP_SRC:.cpp=.o)' Makefile
|
||||
'';
|
||||
|
||||
buildInputs = [
|
||||
cc
|
||||
mpi
|
||||
];
|
||||
|
||||
makeFlags = [
|
||||
"I_MPI_CXX=${cc.CXX}"
|
||||
"CPP_BIN=mpicxx"
|
||||
];
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out/bin
|
||||
cp shuffle $out/bin/shuffle
|
||||
'';
|
||||
|
||||
programPath = "/bin/shuffle";
|
||||
|
||||
hardeningDisable = [ "all" ];
|
||||
}
|
@ -1,43 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, libconfig
|
||||
, nanos6
|
||||
, mpi
|
||||
, uthash
|
||||
, fftw
|
||||
, tampi
|
||||
, hdf5
|
||||
}:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
name = "cpic";
|
||||
|
||||
# Use my current cpic version, so I can test changes without commits
|
||||
#src = /home/Computational/rarias/cpic;
|
||||
|
||||
src = builtins.fetchGit {
|
||||
url = "https://github.com/rodarima/cpic";
|
||||
# rev = "73bd70448587f0925b89e24c8f17e412ea3958e6";
|
||||
ref = "simd";
|
||||
};
|
||||
|
||||
enableParallelBuilding = true;
|
||||
dontStrip = true;
|
||||
|
||||
buildInputs = [
|
||||
libconfig
|
||||
nanos6
|
||||
mpi
|
||||
uthash
|
||||
fftw
|
||||
tampi
|
||||
hdf5
|
||||
];
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out/bin
|
||||
cp cpic $out/bin/cpic
|
||||
'';
|
||||
|
||||
hardeningDisable = [ "all" ];
|
||||
}
|
@ -1,57 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, nanos6
|
||||
, mpi
|
||||
, openmpi
|
||||
, impi
|
||||
, tampi
|
||||
, mcxx
|
||||
, gnuDef
|
||||
, intelDef
|
||||
, cc
|
||||
, gitBranch ? "garlic/mpi+send+seq"
|
||||
, gitCommit ? null
|
||||
, garlicTools
|
||||
}:
|
||||
|
||||
assert (mpi == impi || mpi == openmpi);
|
||||
|
||||
let
|
||||
# FIXME: We should find a better way to specify the MPI implementation
|
||||
# and the compiler.
|
||||
mpiName = if mpi == openmpi then "OpenMPI" else "IntelMPI";
|
||||
compName = if cc == intelDef then "Intel" else "GNU";
|
||||
|
||||
gitSource = garlicTools.fetchGarlicApp {
|
||||
appName = "creams";
|
||||
inherit gitCommit gitBranch;
|
||||
gitTable = import ./git-table.nix;
|
||||
};
|
||||
in
|
||||
stdenv.mkDerivation rec {
|
||||
name = "creams";
|
||||
|
||||
inherit (gitSource) src gitBranch gitCommit;
|
||||
|
||||
programPath = "/bin/creams.exe";
|
||||
|
||||
buildInputs = [ nanos6 mpi cc tampi mcxx ];
|
||||
|
||||
configurePhase = ''
|
||||
export TAMPI_HOME=${tampi}
|
||||
|
||||
. etc/bashrc
|
||||
|
||||
export FORTRAN_COMPILER=${compName}
|
||||
export MPI_LIB=${mpiName}
|
||||
|
||||
CREAMS_UPDATE_ENVIRONMENT
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out/bin
|
||||
cp -a build/* $out/bin
|
||||
'';
|
||||
|
||||
hardeningDisable = [ "all" ];
|
||||
}
|
@ -1,49 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(description="Generate a grid.dat input file for CREAMS")
|
||||
|
||||
parser.add_argument('--npx', type=int, help='number of processes in X', default=1)
|
||||
parser.add_argument('--npy', type=int, help='number of processes in Y', default=1)
|
||||
parser.add_argument('--npz', type=int, help='number of processes in Z', default=32)
|
||||
parser.add_argument('--grain', type=int, help='granularity', default=9)
|
||||
parser.add_argument('--nx', type=int, help='number of points in X', default=20)
|
||||
parser.add_argument('--ny', type=int, help='number of points in Y', default=20)
|
||||
parser.add_argument('--nz', type=int, help='number of points in Z', default=7000)
|
||||
parser.add_argument('--dx', type=float, help='grid spacing in X', default=0.0025062657)
|
||||
parser.add_argument('--dy', type=float, help='grid spacing in Y', default=0.0025062657)
|
||||
parser.add_argument('--dz', type=float, help='grid spacing in Z', default=0.0025062657)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
grain_str = "%d %d" % (args.grain, args.grain)
|
||||
boundary = "extrapolation"
|
||||
|
||||
# Print
|
||||
print(' %-49d number of processes in x-direction (0 if automatic)' % args.npx)
|
||||
print(' %-49d number of processes in y-direction (0 if automatic)' % args.npy)
|
||||
print(' %-49d number of processes in z-direction (0 if automatic)' % args.npz)
|
||||
print(' ')
|
||||
print(' %-49s subdomain granularity' % grain_str)
|
||||
print(' ')
|
||||
print(' %-49s -x boundary' % boundary)
|
||||
print(' %-49s +x boundary' % boundary)
|
||||
print(' %-49s -y boundary' % boundary)
|
||||
print(' %-49s +y boundary' % boundary)
|
||||
print(' %-49s -z boundary' % boundary)
|
||||
print(' %-49s +z boundary' % boundary)
|
||||
print(' ')
|
||||
print(' x-direction')
|
||||
for i in range(args.nx):
|
||||
print("%.9e" % (i * args.dx))
|
||||
print(' ')
|
||||
print(' y-direction')
|
||||
for i in range(args.ny):
|
||||
print("%.9e" % (i * args.dy))
|
||||
print(' ')
|
||||
print(' z-direction')
|
||||
for i in range(args.nz):
|
||||
print("%.9e" % (i * args.dz))
|
||||
print(' ')
|
||||
print(' END')
|
@ -1,12 +0,0 @@
|
||||
{
|
||||
# Auto-generated with garlic-git-table on 2021-03-31 for repo:
|
||||
# ssh://git@bscpm03.bsc.es/pmartin1/creams-simplified.git
|
||||
|
||||
"garlic/mpi+isend+omp+task" = "e6aa540820ee12d3d45d0eef8e7eeb2f0f1daea2";
|
||||
"garlic/mpi+isend+oss+task" = "016f33b8bec996a4546e8f08b1b6b1709f00499b";
|
||||
"garlic/mpi+send+omp+fork" = "e56e059264ad1bfe5e0c96a8b9303d21dd7fa20a";
|
||||
"garlic/mpi+send+omp+task" = "919580213de34bc5b6ba60c768c5dde5e501a1f6";
|
||||
"garlic/mpi+send+oss+task" = "adab8b66f27317d51445648302e7b133edf4837d";
|
||||
"garlic/mpi+send+seq" = "956125f9334493d31ceee3fa7024efa65bee9ca5";
|
||||
"garlic/tampi+isend+oss+task" = "14a121627679a251909d4b8103d260e27eac1d29";
|
||||
}
|
@ -1,48 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, python3
|
||||
, granul ? 9
|
||||
, nprocx ? 1
|
||||
, nprocy ? 1
|
||||
, nprocz ? 1
|
||||
, nx ? 20
|
||||
, ny ? 20
|
||||
, nz ? 7000
|
||||
, gitBranch ? "garlic/mpi+send+seq"
|
||||
, gitCommit ? null
|
||||
, garlicTools
|
||||
}:
|
||||
|
||||
let
|
||||
gitSource = garlicTools.fetchGarlicApp {
|
||||
appName = "creams";
|
||||
inherit gitCommit gitBranch;
|
||||
gitTable = import ./git-table.nix;
|
||||
};
|
||||
|
||||
gen = ./gen_grid.py;
|
||||
in
|
||||
stdenv.mkDerivation rec {
|
||||
name = "creams-input";
|
||||
|
||||
buildInputs = [ python3 ];
|
||||
|
||||
inherit (gitSource) src gitBranch gitCommit;
|
||||
|
||||
phases = [ "unpackPhase" "installPhase" ];
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out
|
||||
cp -a SodTubeBenchmark $out/
|
||||
|
||||
python3 ${gen} \
|
||||
--npx ${toString nprocx} \
|
||||
--npy ${toString nprocy} \
|
||||
--npz ${toString nprocz} \
|
||||
--grain ${toString granul} \
|
||||
--nx ${toString nx} \
|
||||
--ny ${toString ny} \
|
||||
--nz ${toString nz} \
|
||||
> $out/SodTubeBenchmark/grid.dat
|
||||
'';
|
||||
}
|
@ -1,77 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, mpi ? null
|
||||
, tampi ? null
|
||||
, mcxx ? null
|
||||
, cc
|
||||
, gitBranch ? "garlic/tampi+send+oss+task"
|
||||
, gitCommit ? null
|
||||
, fwiParams
|
||||
, garlicTools
|
||||
}:
|
||||
|
||||
with lib;
|
||||
|
||||
assert !(tampi != null && mcxx == null);
|
||||
|
||||
let
|
||||
gitSource = garlicTools.fetchGarlicApp {
|
||||
appName = "fwi";
|
||||
inherit gitCommit gitBranch;
|
||||
gitTable = import ./git-table.nix;
|
||||
};
|
||||
in
|
||||
stdenv.mkDerivation rec {
|
||||
name = "fwi";
|
||||
|
||||
inherit (gitSource) src gitBranch gitCommit;
|
||||
|
||||
enableParallelBuilding = false;
|
||||
|
||||
buildInputs = [
|
||||
cc
|
||||
]
|
||||
++ optional (mpi != null) mpi
|
||||
++ optional (tampi != null) tampi
|
||||
++ optional (mcxx != null) mcxx;
|
||||
|
||||
# FIXME: Correct this on the Makefile so we can just type "make fwi"
|
||||
# FIXME: Allow multiple MPI implementations
|
||||
postPatch = ''
|
||||
sed -i 's/= OPENMPI$/= INTEL/g' Makefile
|
||||
sed -i 's/USE_O_DIRECT ?= NO/USE_O_DIRECT ?= YES/g' Makefile || true
|
||||
'';
|
||||
|
||||
# FIXME: This is an ugly hack.
|
||||
# When using _GNU_SOURCE or any other definition used in features.h, we need
|
||||
# to define them before mcc includes nanos6.h from the command line. So the
|
||||
# only chance is by setting it at the command line with -D. Using the DEFINES
|
||||
# below, reaches the command line of the preprocessing stage with gcc.
|
||||
preConfigure = ''
|
||||
export DEFINES=-D_GNU_SOURCE
|
||||
|
||||
make depend
|
||||
|
||||
cp ${fwiParams}/generated_model_params.h src/
|
||||
'';
|
||||
|
||||
# We compile the ModelGenerator using gcc *only*, as otherwise it will
|
||||
# be compiled with nanos6, which requires access to /sys to determine
|
||||
# hardware capabilities. So it will fail in the nix-build environment,
|
||||
# as there is no /sys mounted.
|
||||
makeFlags = [
|
||||
#"COMPILER=GNU"
|
||||
#"CC=${cc.cc.CC}"
|
||||
"fwi"
|
||||
];
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out/bin
|
||||
cp fwi $out/bin
|
||||
'';
|
||||
|
||||
programPath = "/bin/fwi";
|
||||
|
||||
hardeningDisable = [ "all" ];
|
||||
}
|
@ -1,15 +0,0 @@
|
||||
{
|
||||
# Auto-generated with garlic-git-table on 2021-04-19 for repo:
|
||||
# ssh://git@bscpm03.bsc.es/garlic/apps/fwi.git
|
||||
|
||||
"garlic/mpi+send+omp+fork" = "ea1ed53f20858dc082f9cbbe0e7e8fb28a6fe58a";
|
||||
"garlic/mpi+send+omp+task" = "aa8881056fb3fa98832d203899beacfb8fa702f6";
|
||||
"garlic/mpi+send+oss+task" = "c184484af8498fd939761575b34bb46ba3be0dde";
|
||||
"garlic/mpi+send+oss+task+NOREUSE" = "0062093ef744c694d69673d1881719958bbed353";
|
||||
"garlic/mpi+send+seq" = "cc184ad77f143481e2506933d0cdc038c349f071";
|
||||
"garlic/omp+task" = "1fe23690d74ae89ace5383ab165a6ce6346c2afd";
|
||||
"garlic/oss+task" = "68e6e8f17ee03addb460745acea2a38241ca89ee";
|
||||
"garlic/seq" = "aa6b6c5857125796c65fbf23018d557e4693f1ae";
|
||||
"garlic/tampi+isend+oss+task" = "7c98194e13786c4e8ecfa8a144587e5a95e09205";
|
||||
"garlic/tampi+send+oss+task" = "e08d66f7453c4034a363bb2d22c5248fe86ed740";
|
||||
}
|
@ -1,65 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, nz ? 200
|
||||
, nx ? 200
|
||||
, ny ? 500
|
||||
, gitBranch ? "garlic/seq"
|
||||
, gitCommit ? null
|
||||
, garlicTools
|
||||
}:
|
||||
|
||||
with lib;
|
||||
with builtins;
|
||||
|
||||
let
|
||||
gitSource = garlicTools.fetchGarlicApp {
|
||||
appName = "fwi";
|
||||
inherit gitCommit gitBranch;
|
||||
gitTable = import ./git-table.nix;
|
||||
};
|
||||
in
|
||||
stdenv.mkDerivation rec {
|
||||
name = "fwi-params";
|
||||
|
||||
inherit (gitSource) src gitBranch gitCommit;
|
||||
|
||||
enableParallelBuilding = false;
|
||||
|
||||
# Set the input size with the weird order (nz,nx,ny).
|
||||
postPatch = ''
|
||||
sed -i 1c${toString nz} SetupParams/fwi_params.txt
|
||||
sed -i 2c${toString nx} SetupParams/fwi_params.txt
|
||||
sed -i 3c${toString ny} SetupParams/fwi_params.txt
|
||||
'';
|
||||
|
||||
# FIXME: This is an ugly hack.
|
||||
# When using _GNU_SOURCE or any other definition used in features.h, we need
|
||||
# to define them before mcc includes nanos6.h from the command line. So the
|
||||
# only chance is by setting it at the command line with -D. Using the DEFINES
|
||||
# below, reaches the command line of the preprocessing stage with gcc.
|
||||
preConfigure = ''
|
||||
export DEFINES=-D_GNU_SOURCE
|
||||
'';
|
||||
|
||||
# We compile the ModelGenerator using gcc *only*, as otherwise it will
|
||||
# be compiled with nanos6, which requires access to /sys to determine
|
||||
# hardware capabilities. So it will fail in the nix-build environment,
|
||||
# as there is no /sys mounted.
|
||||
# Also, we need to compile it with the builder platform as target, as is going
|
||||
# to be executed during the build to generate the src/generated_model_params.h
|
||||
# header.
|
||||
makeFlags = [ "COMPILER=GNU" "params" ];
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out/
|
||||
cp src/generated_model_params.h $out/
|
||||
cp SetupParams/fwi_params.txt $out/
|
||||
cp SetupParams/fwi_frequencies.txt $out/
|
||||
|
||||
mkdir -p $out/bin
|
||||
cp ModelGenerator $out/bin/
|
||||
'';
|
||||
|
||||
hardeningDisable = [ "all" ];
|
||||
}
|
@ -1,39 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, mpi
|
||||
, tampi
|
||||
, mcxx
|
||||
, gitBranch ? "garlic/mpi+send+seq"
|
||||
, gitCommit ? null
|
||||
, garlicTools
|
||||
}:
|
||||
|
||||
let
|
||||
gitSource = garlicTools.fetchGarlicApp {
|
||||
appName = "heat";
|
||||
inherit gitCommit gitBranch;
|
||||
gitTable = import ./git-table.nix;
|
||||
};
|
||||
in
|
||||
stdenv.mkDerivation rec {
|
||||
|
||||
name = "heat";
|
||||
|
||||
inherit (gitSource) src gitBranch gitCommit;
|
||||
|
||||
patches = [ ./print-times.patch ];
|
||||
|
||||
buildInputs = [ mpi mcxx tampi ];
|
||||
|
||||
programPath = "/bin/${name}";
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out/bin
|
||||
cp ${name} $out/bin/
|
||||
|
||||
mkdir -p $out/etc
|
||||
cp heat.conf $out/etc/
|
||||
'';
|
||||
|
||||
hardeningDisable = [ "all" ];
|
||||
}
|
@ -1,9 +0,0 @@
|
||||
{
|
||||
# Auto-generated with garlic-git-table on 2021-04-01 for repo:
|
||||
# ssh://git@bscpm03.bsc.es/garlic/apps/heat.git
|
||||
|
||||
"garlic/mpi+send+oss+task" = "947c80070d4c53e441df54b8bfac8928b10c5fb2";
|
||||
"garlic/mpi+send+seq" = "f41e1433808d0cbecd88a869b451c927747e5d42";
|
||||
"garlic/tampi+isend+oss+task" = "b1273f9b4db32ba6e15e3d41343e67407ce2f54f";
|
||||
"garlic/tampi+send+oss+task" = "554bec249f9aa23dd92edcfa2ada1e03e05e121d";
|
||||
}
|
@ -1,13 +0,0 @@
|
||||
diff --git a/src/mpi/main.c b/src/mpi/main.c
|
||||
index 44f4a99..08a1f5c 100644
|
||||
--- a/src/mpi/main.c
|
||||
+++ b/src/mpi/main.c
|
||||
@@ -83,6 +83,8 @@ int main(int argc, char **argv)
|
||||
conf.rows, conf.cols, conf.rows/nranks, totalElements, totalElements/nranks,
|
||||
conf.rbs, conf.cbs, nranks, threads, conf.timesteps, end-start, throughput);
|
||||
printf("time %e\n", end - start);
|
||||
+ printf("start_time %.9f\n", start);
|
||||
+ printf("end_time %.9f\n", end);
|
||||
}
|
||||
|
||||
if (conf.generateImage) {
|
@ -1,55 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, icc
|
||||
, mpi ? null
|
||||
, tampi ? null
|
||||
, mcxx ? null
|
||||
, gitBranch ? "garlic/mpi+isend+seq"
|
||||
, gitCommit ? null
|
||||
, garlicTools
|
||||
}:
|
||||
|
||||
assert !(tampi != null && mcxx == null);
|
||||
|
||||
with lib;
|
||||
|
||||
let
|
||||
gitSource = garlicTools.fetchGarlicApp {
|
||||
appName = "hpccg";
|
||||
inherit gitCommit gitBranch;
|
||||
gitTable = import ./git-table.nix;
|
||||
};
|
||||
in
|
||||
stdenv.mkDerivation rec {
|
||||
name = "hpccg";
|
||||
|
||||
inherit (gitSource) src gitBranch gitCommit;
|
||||
|
||||
programPath = "/bin/test_HPCCG-mpi.exe";
|
||||
|
||||
buildInputs = [
|
||||
icc
|
||||
]
|
||||
++ optional (mpi != null) mpi
|
||||
++ optional (tampi != null) tampi
|
||||
++ optional (mcxx != null) mcxx;
|
||||
|
||||
# The hpccg app fails to compile in parallel. Makefile must be fixed before.
|
||||
enableParallelBuilding = false;
|
||||
|
||||
makeFlags = [
|
||||
"USE_MPI=-DUSING_MPI"
|
||||
]
|
||||
++ optional (tampi != null) "TAMPI_HOME=${tampi}";
|
||||
|
||||
dontPatchShebangs = true;
|
||||
|
||||
installPhase = ''
|
||||
echo ${tampi}
|
||||
mkdir -p $out/bin
|
||||
cp test_HPCCG-mpi.exe $out/bin
|
||||
'';
|
||||
|
||||
hardeningDisable = [ "all" ];
|
||||
}
|
@ -1,9 +0,0 @@
|
||||
{
|
||||
# Auto-generated with garlic-git-table on 2021-04-20 for repo:
|
||||
# ssh://git@bscpm03.bsc.es/garlic/apps/hpccg.git
|
||||
|
||||
"garlic/mpi+isend+omp+fork" = "c84af0480d231961201f2904ee4e3fced9d5f9be";
|
||||
"garlic/mpi+isend+seq" = "d1b47cd459440700de1b68233ec4fe794343dbd4";
|
||||
"garlic/tampi+isend+oss+task" = "7238e9be2e4a7b028abc05d40b476462eaa3de6a";
|
||||
"garlic/tampi+isend+oss+taskfor" = "02ec60f43b8d68d74575ea0563a9029fd441f1f1";
|
||||
}
|
@ -1,44 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, cc
|
||||
, nanos6
|
||||
, mcxx
|
||||
, mpi
|
||||
, tampi
|
||||
, gitBranch ? "garlic/seq"
|
||||
, gitCommit ? null
|
||||
, garlicTools
|
||||
}:
|
||||
|
||||
let
|
||||
gitSource = garlicTools.fetchGarlicApp {
|
||||
appName = "hpcg";
|
||||
inherit gitCommit gitBranch;
|
||||
gitTable = import ./git-table.nix;
|
||||
};
|
||||
in
|
||||
stdenv.mkDerivation rec {
|
||||
name = "hpcg";
|
||||
|
||||
inherit (gitSource) src gitBranch gitCommit;
|
||||
|
||||
buildInputs = [
|
||||
cc nanos6 mcxx mpi tampi
|
||||
];
|
||||
|
||||
makeFlags = [
|
||||
"CC=${cc.CC}"
|
||||
"CXX=${cc.CXX}"
|
||||
];
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out/bin
|
||||
cp bin/* $out/bin/
|
||||
'';
|
||||
|
||||
programPath = "/bin/xhpcg";
|
||||
|
||||
hardeningDisable = [ "all" ];
|
||||
}
|
@ -1,18 +0,0 @@
|
||||
{
|
||||
# Auto-generated with garlic-git-table on 2021-04-20 for repo:
|
||||
# ssh://git@bscpm03.bsc.es/garlic/apps/hpcg.git
|
||||
|
||||
"garlic/mpi" = "8c94ccfd97518ed947bd6be3386260b72fdcdff2";
|
||||
"garlic/mpi+SAVEMAT" = "5dd2ad9eba13dba67086f46c6e8519804d837383";
|
||||
"garlic/mpi+omp" = "d24c372dd9fda584e711efb612f172e5c3602804";
|
||||
"garlic/mpi+oss" = "519a867bb3a3e07440df05e60a62abad764524e5";
|
||||
"garlic/mpi+send+omp+fork" = "a08d31aedbc108e1c0081cdc5021827ac9022688";
|
||||
"garlic/omp" = "dcc8a40831cda884b9240af47e883ac997150ed3";
|
||||
"garlic/omp+SAVEMAT" = "40dbac86c905e192ecc8146e0e65e4c3a3c6dbf8";
|
||||
"garlic/omp+fork" = "042752b3dbcd9b0f4db524b6cdc911278ee1a51b";
|
||||
"garlic/omp+initsplit" = "5370e7ee26fb72ef100a79624f73ed2baa6bcc79";
|
||||
"garlic/oss" = "7e6e2d969b7904572f2475bf471e637651337761";
|
||||
"garlic/oss+task" = "034940756ccab88876609c3cba4dea0a0f5c944b";
|
||||
"garlic/seq" = "dee225571ab2572d7aa51df9846b01237ee941a1";
|
||||
"garlic/tampi+isend+oss+task" = "449a3980a767f91ca65d429490080961dcfba498";
|
||||
}
|
@ -1,13 +0,0 @@
|
||||
--- a/setup/Make.MPI_ICPC_OSS 2020-07-13 16:02:33.272257865 +0200
|
||||
+++ b/setup/Make.MPI_ICPC_OSS 2020-07-13 16:04:34.344413390 +0200
|
||||
@@ -91,8 +91,8 @@
|
||||
# - HPCG includes / libraries / specifics -------------------------------
|
||||
# ----------------------------------------------------------------------
|
||||
#
|
||||
-HPCG_INCLUDES = -I$(INCdir) -I$(INCdir)/$(arch) $(MPinc) -I{TAMPI_HOME}/include
|
||||
-HPCG_LIBS = ${TAMPI_HOME}/lib/libtampi.a
|
||||
+HPCG_INCLUDES = -I$(INCdir) -I$(INCdir)/$(arch) $(MPinc) -I$(TAMPI_HOME)/include
|
||||
+HPCG_LIBS = -l:libtampi.a
|
||||
#
|
||||
# - Compile time options -----------------------------------------------
|
||||
#
|
@ -1,60 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, mpi
|
||||
, gfortran
|
||||
, tampi
|
||||
, nanos6
|
||||
, mcxx
|
||||
, gitBranch ? "garlic/mpi+isend+seq"
|
||||
, gitCommit ? null
|
||||
, garlicTools
|
||||
}:
|
||||
|
||||
with lib;
|
||||
|
||||
let
|
||||
gitSource = garlicTools.fetchGarlicApp {
|
||||
appName = "ifsker";
|
||||
inherit gitCommit gitBranch;
|
||||
gitTable = import ./git-table.nix;
|
||||
};
|
||||
in
|
||||
stdenv.mkDerivation rec {
|
||||
name = "ifsker";
|
||||
|
||||
inherit (gitSource) src gitBranch gitCommit;
|
||||
|
||||
buildInputs = [ tampi mpi nanos6 mcxx gfortran ];
|
||||
|
||||
# Mercurium seems to fail when building with fortran in parallel
|
||||
enableParallelBuilding = false;
|
||||
|
||||
# FIXME: Patch mcxx to use other directory than $HOME for the lock
|
||||
# files.
|
||||
preConfigure = ''
|
||||
export TAMPI_HOME=${tampi}
|
||||
|
||||
# $HOME is required for the lock files by mcxx to compile fortran.
|
||||
# So we use the $TMPDIR to store them.
|
||||
export HOME=$TMPDIR
|
||||
'';
|
||||
|
||||
makeFlags = [
|
||||
"-f" "Makefile.gcc"
|
||||
];
|
||||
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out/bin
|
||||
cp ${name} $out/bin/
|
||||
|
||||
mkdir -p $out/etc
|
||||
cp -r data $out/etc/
|
||||
cp nanos6.toml $out/etc
|
||||
'';
|
||||
|
||||
programPath = "/bin/${name}";
|
||||
|
||||
hardeningDisable = [ "all" ];
|
||||
}
|
@ -1,10 +0,0 @@
|
||||
{
|
||||
# Auto-generated with garlic-git-table on 2021-04-20 for repo:
|
||||
# ssh://git@bscpm03.bsc.es/garlic/apps/ifsker.git
|
||||
|
||||
"garlic/mpi+isend+oss+fork" = "a6a20303101cb140571ddc1166e66843fbe83017";
|
||||
"garlic/mpi+isend+oss+task" = "a7bdb6c9b57aafbc50cdc790eb42e5bdd850f213";
|
||||
"garlic/mpi+isend+seq" = "bc97cf30835bbf6a825209485bb96fc8314e5bcb";
|
||||
"garlic/tampi+isend+oss+task" = "33408215bc231b70b60733fddea3d1b7431bb0d1";
|
||||
"garlic/tampi+send+oss+task" = "19dae956b4ef69916c0e8ad15bb6ced0085275cd";
|
||||
}
|
@ -1,55 +0,0 @@
|
||||
{
|
||||
super
|
||||
, self
|
||||
, bsc
|
||||
, garlic
|
||||
, callPackage
|
||||
}:
|
||||
|
||||
{
|
||||
nbody = callPackage ./nbody/default.nix { };
|
||||
|
||||
saiph = callPackage ./saiph/default.nix {
|
||||
cc = bsc.clangOmpss2;
|
||||
L3SizeKB = garlic.targetMachine.config.hw.cacheSizeKB.L3;
|
||||
cachelineBytes = garlic.targetMachine.config.hw.cachelineBytes;
|
||||
};
|
||||
|
||||
creams = callPackage ./creams/default.nix {
|
||||
gnuDef = self.gfortran10 ; # Default GNU compiler version
|
||||
intelDef = bsc.icc ; # Default Intel compiler version
|
||||
};
|
||||
|
||||
creamsInput = callPackage ./creams/input.nix { };
|
||||
|
||||
hpcg = callPackage ./hpcg/default.nix { };
|
||||
|
||||
bigsort = {
|
||||
sort = callPackage ./bigsort/default.nix {
|
||||
gitBranch = "garlic/mpi+send+omp+task";
|
||||
};
|
||||
|
||||
genseq = callPackage ./bigsort/genseq.nix { };
|
||||
|
||||
shuffle = callPackage ./bigsort/shuffle.nix { };
|
||||
};
|
||||
|
||||
heat = callPackage ./heat/default.nix { };
|
||||
|
||||
miniamr = callPackage ./miniamr/default.nix {
|
||||
variant = "ompss-2";
|
||||
};
|
||||
|
||||
ifsker = callPackage ./ifsker/default.nix { };
|
||||
|
||||
lulesh = callPackage ./lulesh/default.nix { };
|
||||
|
||||
hpccg = callPackage ./hpccg/default.nix { };
|
||||
|
||||
fwi = rec {
|
||||
params = callPackage ./fwi/params.nix { };
|
||||
solver = callPackage ./fwi/default.nix {
|
||||
fwiParams = params;
|
||||
};
|
||||
};
|
||||
}
|
@ -1,48 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, impi
|
||||
, mcxx
|
||||
, icc
|
||||
, tampi ? null
|
||||
, gitBranch ? "garlic/mpi+isend+seq"
|
||||
, gitCommit ? null
|
||||
, garlicTools
|
||||
}:
|
||||
|
||||
with lib;
|
||||
|
||||
let
|
||||
gitSource = garlicTools.fetchGarlicApp {
|
||||
appName = "lulesh";
|
||||
inherit gitCommit gitBranch;
|
||||
gitTable = import ./git-table.nix;
|
||||
};
|
||||
in
|
||||
stdenv.mkDerivation rec {
|
||||
name = "lulesh";
|
||||
|
||||
inherit (gitSource) src gitBranch gitCommit;
|
||||
|
||||
dontConfigure = true;
|
||||
|
||||
preBuild = optionalString (tampi != null) "export TAMPI_HOME=${tampi}";
|
||||
|
||||
#TODO: Allow multiple MPI implementations and compilers
|
||||
buildInputs = [
|
||||
impi
|
||||
icc
|
||||
mcxx
|
||||
];
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
#TODO: Can we build an executable named "lulesh" in all branches?
|
||||
installPhase = ''
|
||||
mkdir -p $out/bin
|
||||
find . -name 'lulesh*' -type f -executable -exec cp \{\} $out/bin/${name} \;
|
||||
'';
|
||||
programPath = "/bin/${name}";
|
||||
|
||||
hardeningDisable = [ "all" ];
|
||||
}
|
@ -1,12 +0,0 @@
|
||||
{
|
||||
# Auto-generated with garlic-git-table on 2021-04-20 for repo:
|
||||
# ssh://git@bscpm03.bsc.es/garlic/apps/lulesh.git
|
||||
|
||||
"garlic/mpi+isend+omp+fork" = "6cc85c55cb4840d6cde12bb285f5ab1ae7878618";
|
||||
"garlic/mpi+isend+oss+task" = "0a9e2cd1d64ab4fcf1860ace02866278ad289637";
|
||||
"garlic/mpi+isend+seq" = "9df5475c7dd2b345559fae5bd07ceea38f2e7b91";
|
||||
"garlic/tampi+isend+oss+task" = "28ce0cd69f9b4e65eff8141ec455d5f60e9b98b3";
|
||||
"garlic/tampi+isend+oss+taskfor" = "928f315ea426585a32231d950da651399e48d762";
|
||||
"garlic/tampi+isend+oss+taskloop" = "7957c1a2c84ae80edddcec9eafe7efdeefa68d58";
|
||||
"garlic/tampi+isend+oss+taskloopfor" = "7efa0535130a6726f5a46669cf171412d21adc9b";
|
||||
}
|
@ -1,48 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, tampi
|
||||
, clangOmpss2
|
||||
, mpi
|
||||
, nanos6
|
||||
, mcxx
|
||||
, variant
|
||||
}:
|
||||
|
||||
with lib;
|
||||
|
||||
assert (assertOneOf "variant" variant [ "openmp" "openmp-tasks" "ompss-2" ]);
|
||||
|
||||
let
|
||||
cc=mcxx;
|
||||
in
|
||||
stdenv.mkDerivation rec {
|
||||
name = "miniamr";
|
||||
|
||||
src = builtins.fetchGit {
|
||||
url = "ssh://git@bscpm03.bsc.es/ksala/miniamr.git";
|
||||
ref = "master";
|
||||
};
|
||||
|
||||
postUnpack = ''
|
||||
sourceRoot=$sourceRoot/${variant}
|
||||
'';
|
||||
|
||||
buildInputs = [ tampi clangOmpss2 mpi nanos6 mcxx ];
|
||||
|
||||
makeFlags = [
|
||||
"CC=${cc.CC}"
|
||||
"CXX=${cc.CXX}"
|
||||
];
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out/bin
|
||||
cp miniAMR.x $out/bin/
|
||||
'';
|
||||
|
||||
programPath = "/bin/miniAMR.x";
|
||||
|
||||
hardeningDisable = [ "all" ];
|
||||
}
|
@ -1,59 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, cc
|
||||
, mpi ? null
|
||||
, tampi ? null
|
||||
, mcxx ? null
|
||||
, cflags ? null
|
||||
, gitBranch ? "garlic/seq"
|
||||
, gitCommit ? null
|
||||
, blocksize ? 2048
|
||||
, garlicTools
|
||||
}:
|
||||
|
||||
assert !(tampi != null && mcxx == null);
|
||||
|
||||
with lib;
|
||||
|
||||
let
|
||||
gitSource = garlicTools.fetchGarlicApp {
|
||||
appName = "nbody";
|
||||
inherit gitCommit gitBranch;
|
||||
gitTable = import ./git-table.nix;
|
||||
};
|
||||
in
|
||||
stdenv.mkDerivation rec {
|
||||
name = "nbody";
|
||||
|
||||
inherit (gitSource) src gitBranch gitCommit;
|
||||
|
||||
programPath = "/bin/nbody";
|
||||
|
||||
buildInputs = [
|
||||
cc
|
||||
]
|
||||
++ optional (mpi != null) mpi
|
||||
++ optional (tampi != null) tampi
|
||||
++ optional (mcxx != null) mcxx;
|
||||
|
||||
preBuild = (if cflags != null then ''
|
||||
makeFlagsArray+=(CFLAGS="${cflags}")
|
||||
'' else "");
|
||||
|
||||
makeFlags = [
|
||||
"CC=${cc.CC}"
|
||||
"BS=${toString blocksize}"
|
||||
]
|
||||
++ optional (tampi != null) "TAMPI_HOME=${tampi}";
|
||||
|
||||
dontPatchShebangs = true;
|
||||
|
||||
installPhase = ''
|
||||
echo ${tampi}
|
||||
mkdir -p $out/bin
|
||||
cp nbody* $out/bin/${name}
|
||||
'';
|
||||
|
||||
hardeningDisable = [ "all" ];
|
||||
}
|
@ -1,13 +0,0 @@
|
||||
{
|
||||
# Auto-generated with garlic-git-table on 2021-04-20 for repo:
|
||||
# ssh://git@bscpm03.bsc.es/garlic/apps/nbody.git
|
||||
|
||||
"garlic/mpi+send+oss+task" = "20aa856baa3268d99262588807911ad0b3318d09";
|
||||
"garlic/mpi+send+seq" = "3be64af0f949db5fd60fcd0334cf2cd8c9fa25c3";
|
||||
"garlic/omp+fork" = "9c869272df7c775f467a2220211a414e33321c00";
|
||||
"garlic/oss+task" = "13ab26fbad8662a1052cc94410386080bbf6a2ba";
|
||||
"garlic/seq" = "9dfea29189d14477bd75e6f741f0518e7e4e5e72";
|
||||
"garlic/seq+BLOCK" = "99408705628b374df4308dcf1cdbe2d21d1451c2";
|
||||
"garlic/tampi+isend+oss+task" = "653d26e4a0913d36ea18d4e72e65a04838bb138a";
|
||||
"garlic/tampi+send+oss+task" = "b1440ebc5f79165e5dfaa6a4ce7916eda410ec9a";
|
||||
}
|
@ -1,35 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, mpi
|
||||
, fetchurl
|
||||
}:
|
||||
|
||||
stdenv.mkDerivation {
|
||||
name = "ppong";
|
||||
|
||||
src = fetchurl {
|
||||
url = "http://www.csl.mtu.edu/cs4331/common/PPong.c";
|
||||
sha256 = "0d1w72gq9627448cb7ykknhgp2wszwd117dlbalbrpf7d0la8yc0";
|
||||
};
|
||||
|
||||
unpackCmd = ''
|
||||
mkdir src
|
||||
cp $src src/ppong.c
|
||||
'';
|
||||
|
||||
dontConfigure = true;
|
||||
|
||||
buildPhase = ''
|
||||
echo mpicc -include stdlib.h ppong.c -o ppong
|
||||
mpicc -include stdlib.h ppong.c -o ppong
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out/bin
|
||||
cp ppong $out/bin/ppong
|
||||
ln -s $out/bin/ppong $out/bin/run
|
||||
'';
|
||||
|
||||
buildInputs = [ mpi ];
|
||||
hardeningDisable = [ "all" ];
|
||||
}
|
@ -1,106 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, nanos6
|
||||
, mpi
|
||||
, tampi
|
||||
, cc
|
||||
, vtk
|
||||
, boost
|
||||
, gitBranch ? "master"
|
||||
, gitCommit ? null
|
||||
, enableManualDist ? false
|
||||
, nbgx ? null
|
||||
, nbgy ? null
|
||||
, nbgz ? null
|
||||
, nblx ? null
|
||||
, nbly ? null
|
||||
, nblz ? null
|
||||
, nsteps ? null
|
||||
, numComm ? null
|
||||
, enableVectFlags ? false
|
||||
, enableDebugFlags ? false
|
||||
, enableAsanFlags ? false
|
||||
, cachelineBytes ? null
|
||||
, L3SizeKB ? null
|
||||
# Problem size:
|
||||
, sizex ? 3
|
||||
, sizey ? 4
|
||||
, sizez ? 4
|
||||
, garlicTools
|
||||
}:
|
||||
|
||||
assert enableManualDist -> (nbgx != null);
|
||||
assert enableManualDist -> (nbgy != null);
|
||||
assert enableManualDist -> (nbgz != null);
|
||||
|
||||
with lib;
|
||||
with lib.versions;
|
||||
|
||||
let
|
||||
gitSource = garlicTools.fetchGarlicApp {
|
||||
appName = "saiph";
|
||||
inherit gitCommit gitBranch;
|
||||
gitTable = import ./git-table.nix;
|
||||
};
|
||||
in
|
||||
stdenv.mkDerivation rec {
|
||||
name = "saiph";
|
||||
|
||||
inherit (gitSource) src gitBranch gitCommit;
|
||||
|
||||
programPath = "/bin/Heat3D_vect";
|
||||
|
||||
enableParallelBuilding = true;
|
||||
dontStrip = true;
|
||||
enableDebugging = true;
|
||||
|
||||
buildInputs = [
|
||||
nanos6
|
||||
mpi
|
||||
tampi
|
||||
cc
|
||||
vtk
|
||||
boost
|
||||
];
|
||||
|
||||
preBuild = ''
|
||||
cd saiphv2/cpp/src
|
||||
export VTK_VERSION=${majorMinor (getVersion vtk.name)}
|
||||
export VTK_HOME=${vtk}
|
||||
make clean
|
||||
|
||||
sed -i '/SIZEX =/s/3/${toString sizex}/g' testApp/Heat3D_vect.cpp
|
||||
sed -i '/SIZEY =/s/4/${toString sizey}/g' testApp/Heat3D_vect.cpp
|
||||
sed -i '/SIZEZ =/s/4/${toString sizez}/g' testApp/Heat3D_vect.cpp
|
||||
'';
|
||||
|
||||
makeFlags = [
|
||||
"-f" "Makefile.${cc.CC}"
|
||||
"apps"
|
||||
"APP=Heat3D_vect"
|
||||
] ++ optional (cachelineBytes != null) "ROW_ALIGNMENT=${toString cachelineBytes}"
|
||||
++ optional (L3SizeKB != null) "L3_SIZE_K=${toString L3SizeKB}"
|
||||
++ optional (enableManualDist) "DIST_SET=1"
|
||||
++ optional (enableManualDist) "NBG_X=${toString nbgx}"
|
||||
++ optional (enableManualDist) "NBG_Y=${toString nbgy}"
|
||||
++ optional (enableManualDist) "NBG_Z=${toString nbgz}"
|
||||
++ optional (nblx != null) "NBL_X=${toString nblx}"
|
||||
++ optional (nbly != null) "NBL_Y=${toString nbly}"
|
||||
++ optional (nblz != null) "NBL_Z=${toString nblz}"
|
||||
++ optional (nsteps != null) "NSTEPS=${toString nsteps}"
|
||||
++ optional (numComm != null) "NUM_COMM=${toString numComm}"
|
||||
++ optional (enableVectFlags) "VECT_CHECKS=1"
|
||||
++ optional (enableDebugFlags) "DEBUG_CHECKS=1"
|
||||
++ optional (enableAsanFlags) "SANITIZE_CHECKS=1"
|
||||
;
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out/lib
|
||||
mkdir -p $out/bin
|
||||
cp obj/libsaiphv2.so $out/lib/
|
||||
cp bin/Heat3D_vect $out/bin/
|
||||
'';
|
||||
|
||||
hardeningDisable = [ "all" ];
|
||||
}
|
@ -1,10 +0,0 @@
|
||||
{
|
||||
# Auto-generated with garlic-git-table on 2021-04-19 for repo:
|
||||
# ssh://git@bscpm03.bsc.es/garlic/apps/saiph.git
|
||||
|
||||
"garlic/mpi+isend+omp+fork+simd" = "96823846b327b6860f05d428f0cd5ed8ca537a0e";
|
||||
"garlic/mpi+isend+omp+task+simd" = "de0346a559120f561bff554aa86b34d01214b714";
|
||||
"garlic/mpi+isend+seq+simd" = "1411dad765231f5d3cec9f621526583974232d42";
|
||||
"garlic/tampi+isend+omp+task+simd" = "587a7651df8eb69cae4a79bdfc5cb7f50723f3ce";
|
||||
"garlic/tampi+isend+oss+task+simd" = "3731197d3e35df248fa6bdb7e4cb05c5dd4f2597";
|
||||
}
|
@ -1,31 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, fig
|
||||
}:
|
||||
|
||||
stdenv.mkDerivation {
|
||||
name = "report.tar.gz";
|
||||
src = ./report;
|
||||
buildPhase = ''
|
||||
pwd
|
||||
ls -l
|
||||
grep -o '@[^ @]*@' report.tex | sed 's/@//g' | sort -u > list
|
||||
|
||||
echo "fig:" > fun.nix
|
||||
echo "'''" >> fun.nix
|
||||
for line in $(cat list); do
|
||||
localPath=$(echo $line | tr '.' '/')
|
||||
echo "mkdir -p $localPath" >> fun.nix
|
||||
echo "cp -r \''${$line}/* $localPath" >> fun.nix
|
||||
echo "sed -i 's;@$line@;$localPath;g' report.tex" >> fun.nix
|
||||
done
|
||||
echo "'''" >> fun.nix
|
||||
|
||||
echo " ---------- this is the fun.nix -------------"
|
||||
cat fun.nix
|
||||
echo " --------------------------------------------"
|
||||
'';
|
||||
installPhase = ''
|
||||
cp fun.nix $out
|
||||
'';
|
||||
}
|
@ -1 +0,0 @@
|
||||
import ../default.nix
|
@ -1,58 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, bash
|
||||
, bashInteractive
|
||||
, busybox
|
||||
, extraInputs ? []
|
||||
}:
|
||||
|
||||
let
|
||||
inputrc = ./inputrc;
|
||||
in
|
||||
stdenv.mkDerivation {
|
||||
name = "develop";
|
||||
preferLocalBuild = true;
|
||||
phases = [ "installPhase" ];
|
||||
buildInputs = extraInputs ++ [ busybox ];
|
||||
installPhase = ''
|
||||
cat > $out <<EOF
|
||||
#!${bash}/bin/bash
|
||||
|
||||
# This program loads a environment with the given programs available.
|
||||
# Requires /nix to be available.
|
||||
|
||||
curdir="\$(pwd)"
|
||||
export "buildInputs=$buildInputs"
|
||||
# ${stdenv}
|
||||
export "PATH=$PATH"
|
||||
export "out=/fake-output-directory"
|
||||
export NIX_BUILD_TOP=.
|
||||
export NIX_STORE=/nix/store
|
||||
export PS1='\[\033[1;32m\]develop\$\[\033[0m\] '
|
||||
|
||||
export TMUX_TMPDIR=/tmp
|
||||
export TMPDIR=/tmp
|
||||
export TEMPDIR=/tmp
|
||||
export TMP=/tmp
|
||||
export TEMP=/tmp
|
||||
|
||||
export LANG=en_US.UTF-8
|
||||
|
||||
source ${stdenv}/setup
|
||||
|
||||
# Access to bin and nix tools for srun, as it keeps the PATH
|
||||
export "PATH=\$PATH:/bin"
|
||||
export "PATH=$PATH:/gpfs/projects/bsc15/nix/bin"
|
||||
export "SHELL=${bashInteractive}/bin/bash"
|
||||
export HISTFILE="\$curdir/.histfile"
|
||||
export INPUTRC=${inputrc}
|
||||
|
||||
if [[ -z "\$@" ]]; then
|
||||
exec ${bashInteractive}/bin/bash
|
||||
else
|
||||
exec "\$@"
|
||||
fi
|
||||
EOF
|
||||
chmod +x $out
|
||||
'';
|
||||
}
|
@ -1,37 +0,0 @@
|
||||
# inputrc borrowed from CentOS (RHEL).
|
||||
|
||||
set bell-style none
|
||||
|
||||
set meta-flag on
|
||||
set input-meta on
|
||||
set convert-meta off
|
||||
set output-meta on
|
||||
set colored-stats on
|
||||
|
||||
#set mark-symlinked-directories on
|
||||
|
||||
$if mode=emacs
|
||||
|
||||
# for linux console and RH/Debian xterm
|
||||
"\e[1~": beginning-of-line
|
||||
"\e[4~": end-of-line
|
||||
"\e[5~": beginning-of-history
|
||||
"\e[6~": end-of-history
|
||||
"\e[3~": delete-char
|
||||
"\e[2~": quoted-insert
|
||||
"\e[5C": forward-word
|
||||
"\e[5D": backward-word
|
||||
"\e[1;5C": forward-word
|
||||
"\e[1;5D": backward-word
|
||||
|
||||
# for rxvt
|
||||
"\e[8~": end-of-line
|
||||
|
||||
# for non RH/Debian xterm, can't hurt for RH/DEbian xterm
|
||||
"\eOH": beginning-of-line
|
||||
"\eOF": end-of-line
|
||||
|
||||
# for freebsd console
|
||||
"\e[H": beginning-of-line
|
||||
"\e[F": end-of-line
|
||||
$endif
|
6
garlic/doc/.gitignore
vendored
6
garlic/doc/.gitignore
vendored
@ -1,6 +0,0 @@
|
||||
*.utf8
|
||||
*.ascii
|
||||
*.html
|
||||
*.pdf
|
||||
grohtml*
|
||||
doc.tar.gz
|
@ -1,42 +0,0 @@
|
||||
all: ug.pdf ug.html
|
||||
|
||||
TTYOPT=-rPO=4m -rLL=72m
|
||||
PDFOPT=-dpaper=a4 -rPO=4c -rLL=13c
|
||||
#MMOPT=-dpaper=a4 -rpo=5c -rll=13c
|
||||
PREPROC=-k -t -p -R
|
||||
POSTPROC=
|
||||
REGISTERS=-dcurdate="`date '+%Y-%m-%d'`"
|
||||
REGISTERS+=-dgitcommit="`git rev-parse HEAD`"
|
||||
|
||||
PREPROC+=$(REGISTERS)
|
||||
HTML_OPT=$(PREPROC) -P-Dimg -P-i120 -Thtml
|
||||
# Embed fonts?
|
||||
#POSTPROC+=-P -e
|
||||
|
||||
blackbox.pdf: blackbox.ms Makefile
|
||||
REFER=ref.i groff -ms $(PREPROC) -dpaper=a4 -rPO=2c -rLL=17c -Tpdf $< > $@
|
||||
|
||||
%.html: %.ms Makefile
|
||||
mkdir -p img
|
||||
REFER=ref.i groff -ms -mwww $(HTML_OPT) $< > $@
|
||||
echo $(HTML_OPT)
|
||||
sed -i '/<\/head>/i<link rel="stylesheet" href="s.css">' $@
|
||||
sed -i 's/^<a name="\([^"]*\)"><\/a>/<a name="\1" href="#\1">\§<\/a>/g' $@
|
||||
#sed -i '/<h1 /,/<hr>/s/^<a href="#[0-9]\+\.[0-9]\+\.[0-9]\+.*//' $@
|
||||
sed -i '/<h1 /,/<hr>/s/^<a href="#[0-9]\+\.[0-9]\+.*//' $@
|
||||
|
||||
%.pdf: %.ms Makefile
|
||||
REFER=ref.i groff -ms -mwww $(PREPROC) $(PDFOPT) -Tpdf $< > $@
|
||||
-killall -HUP mupdf
|
||||
|
||||
%.utf8: %.ms
|
||||
REFER=ref.i groff -ms -mwww $(PREPROC) $(TTYOPT) -Tutf8 $^ > $@
|
||||
|
||||
%.ascii: %.ms
|
||||
REFER=ref.i groff -ms -mwww -c $(PREPROC) $(TTYOPT) -Tascii $^ > $@
|
||||
|
||||
doc.tar.gz: ug.pdf ug.html s.css
|
||||
tar czf $@ $^ img s.css
|
||||
|
||||
clean:
|
||||
rm -rf img ug.pdf ug.html doc.tar.gz
|
@ -1,40 +0,0 @@
|
||||
.\" Use helvetica family
|
||||
.fam H
|
||||
.PS
|
||||
moveht=0.1
|
||||
boxwid=1
|
||||
sht=boxht + 0.1
|
||||
hspace = boxwid + 0.2
|
||||
right
|
||||
G: [
|
||||
boxwid=1
|
||||
extrawid=1.8
|
||||
right
|
||||
A: box "nix"; arrow;
|
||||
B1: box wid extrawid "App source code" "PM branch, defines...";
|
||||
move to (A.sw.x, A.y - boxht - moveht)
|
||||
A: box "garlic/nix"; arrow;
|
||||
B2: box wid extrawid "App run config" "Input size, algorithm...";
|
||||
move to (A.sw.x, A.y - boxht - moveht)
|
||||
A: box "garlic/nix"; arrow;
|
||||
B3: box wid extrawid "Build config" "MPI impl, O3, CC version...";
|
||||
move to (A.sw.x, A.y - boxht - moveht)
|
||||
A: box "garlic/nix"; arrow;
|
||||
B4: box wid extrawid "Run config" "Nodes, affinity";
|
||||
move to (A.sw.x, A.y - boxht - moveht)
|
||||
A: box "MN4"; arrow;
|
||||
B5: box wid extrawid "Hardware" "Cache size, intercomm...";
|
||||
]
|
||||
movewid=1
|
||||
move
|
||||
circlerad=0.4
|
||||
E: circle "Execution"
|
||||
arrow
|
||||
box "Result"
|
||||
rspline = 0.5
|
||||
arrow from G.B1.e to E chop 0 chop circlerad
|
||||
arrow from G.B2.e to E chop 0 chop circlerad
|
||||
arrow from G.B3.e to E chop 0 chop circlerad
|
||||
arrow from G.B4.e to E chop 0 chop circlerad
|
||||
arrow from G.B5.e to E chop 0 chop circlerad
|
||||
.PE
|
@ -1,22 +0,0 @@
|
||||
.\".fam CMU
|
||||
.\".TL
|
||||
.\"Garlic: git branch name
|
||||
.\".AU
|
||||
.\"Rodrigo Arias Mallo
|
||||
.\".AI
|
||||
.\"Barcelona Supercomputing Center
|
||||
.\"#####################################################################
|
||||
.nr GROWPS 3
|
||||
.nr PSINCR 1.5p
|
||||
.\".nr PD 0.5m
|
||||
.nr PI 2m
|
||||
\".2C
|
||||
.\"#####################################################################
|
||||
\".NH 1
|
||||
\"Instructions
|
||||
\".LP
|
||||
\"To name a branch of a program, please follow the flowchart below.
|
||||
.ps -2
|
||||
.PS 5/25.4
|
||||
copy "gitbranch.pic"
|
||||
.PE
|
@ -1,254 +0,0 @@
|
||||
.TL
|
||||
Garlic: the execution pipeline
|
||||
.AU
|
||||
Rodrigo Arias Mallo
|
||||
.AI
|
||||
Barcelona Supercomputing Center
|
||||
.AB
|
||||
.LP
|
||||
This document covers the execution of experiments in the Garlic
|
||||
benchmark, which are performed under strict conditions. The several
|
||||
stages of the execution are documented so the experimenter can have a
|
||||
global overview of how the benchmark runs under the hood.
|
||||
The results of the experiment are stored in a known path to be used in
|
||||
posterior processing steps.
|
||||
.AE
|
||||
.\"#####################################################################
|
||||
.nr GROWPS 3
|
||||
.nr PSINCR 1.5p
|
||||
.\".nr PD 0.5m
|
||||
.nr PI 2m
|
||||
\".2C
|
||||
.\"#####################################################################
|
||||
.NH 1
|
||||
Introduction
|
||||
.LP
|
||||
Every experiment in the Garlic
|
||||
benchmark is controlled by a single
|
||||
.I nix
|
||||
file placed in the
|
||||
.CW garlic/exp
|
||||
subdirectory.
|
||||
Experiments are formed by several
|
||||
.I "experimental units"
|
||||
or simply
|
||||
.I units .
|
||||
A unit is the result of each unique configuration of the experiment
|
||||
(typically involves the cartesian product of all factors) and
|
||||
consists of several shell scripts executed sequentially to setup the
|
||||
.I "execution environment" ,
|
||||
which finally launch the actual program being analyzed.
|
||||
The scripts that prepare the environment and the program itself are
|
||||
called the
|
||||
.I stages
|
||||
of the execution and altogether form the
|
||||
.I "execution pipeline"
|
||||
or simply the
|
||||
.I pipeline .
|
||||
The experimenter must know with very good details all the stages
|
||||
involved in the pipeline, as they have a large impact on the execution.
|
||||
.PP
|
||||
Additionally, the execution time is impacted by the target machine in
|
||||
which the experiments run. The software used for the benchmark is
|
||||
carefully configured and tuned for the hardware used in the execution;
|
||||
in particular, the experiments are designed to run in MareNostrum 4
|
||||
cluster with the SLURM workload manager and the Omni-Path
|
||||
interconnection network. In the future we plan to add
|
||||
support for other clusters in order to execute the experiments in other
|
||||
machines.
|
||||
.\"#####################################################################
|
||||
.NH 1
|
||||
Isolation
|
||||
.LP
|
||||
The benchmark is designed so that both the compilation of every software
|
||||
package and the execution of the experiment is performed under strict
|
||||
conditions. We can ensure that two executions of the same experiment are
|
||||
actually running the same program in the same software environment.
|
||||
.PP
|
||||
All the software used by an experiment is included in the
|
||||
.I "nix store"
|
||||
which is, by convention, located at the
|
||||
.CW /nix
|
||||
directory. Unfortunately, it is common for libraries to try to load
|
||||
software from other paths like
|
||||
.CW /usr
|
||||
or
|
||||
.CW /lib .
|
||||
It is also common that configuration files are loaded from
|
||||
.CW /etc
|
||||
and from the home directory of the user that runs the experiment.
|
||||
Additionally, some environment variables are recognized by the libraries
|
||||
used in the experiment, which change their behavior. As we cannot
|
||||
control the software and configuration files in those directories, we
|
||||
couldn't guarantee that the execution behaves as intended.
|
||||
.PP
|
||||
In order to avoid this problem, we create a
|
||||
.I sandbox
|
||||
where only the files in the nix store are available (with some other
|
||||
exceptions). Therefore, even if the libraries try to access any path
|
||||
outside the nix store, they will find that the files are not there
|
||||
anymore. Additionally, the environment variables are cleared before
|
||||
entering the environment (with some exceptions as well).
|
||||
.\"#####################################################################
|
||||
.NH 1
|
||||
Execution pipeline
|
||||
.LP
|
||||
Several predefined stages form the
|
||||
.I standard
|
||||
execution pipeline and are defined in the
|
||||
.I stdPipeline
|
||||
array. The standard pipeline prepares the resources and the environment
|
||||
to run a program (usually in parallel) in the compute nodes. It is
|
||||
divided in two main parts:
|
||||
connecting to the target machine to submit a job and executing the job.
|
||||
Finally, the complete execution pipeline ends by running the actual
|
||||
program, which is not part of the standard pipeline, as should be
|
||||
defined differently for each program.
|
||||
.NH 2
|
||||
Job submission
|
||||
.LP
|
||||
Some stages are involved in the job submission: the
|
||||
.I trebuchet
|
||||
stage connects via
|
||||
.I ssh
|
||||
to the target machine and executes the next stage there. Once in the
|
||||
target machine, the
|
||||
.I runexp
|
||||
stage computes the output path to store the experiment results, using
|
||||
the user in the target machine and changes the working directory there.
|
||||
In MareNostrum 4 the output path is at
|
||||
.CW /gpfs/projects/bsc15/garlic/$user/out .
|
||||
Then the
|
||||
.I isolate
|
||||
stage is executed to enter the sandbox and the
|
||||
.I experiment
|
||||
stage begins, which creates a directory to store the experiment output,
|
||||
and launches several
|
||||
.I unit
|
||||
stages.
|
||||
.PP
|
||||
Each unit executes a
|
||||
.I sbatch
|
||||
stage which runs the
|
||||
.I sbatch(1)
|
||||
program with a job script that simply calls the next stage. The
|
||||
sbatch program internally reads the
|
||||
.CW /etc/slurm/slurm.conf
|
||||
file from outside the sandbox, so we must explicitly allow this file to
|
||||
be available, as well as the
|
||||
.I munge
|
||||
socket used for authentication by the SLURM daemon. Once the jobs are
|
||||
submitted to SLURM, the experiment stage ends and the trebuchet finishes
|
||||
the execution. The jobs will be queued for execution without any other
|
||||
intervention from the user.
|
||||
.PP
|
||||
The rationale behind running sbatch from the sandbox is because the
|
||||
options provided in environment variables override the options from the
|
||||
job script. Therefore, we avoid this problem by running sbatch from the
|
||||
sandbox, where the interfering environment variables are removed. The
|
||||
sbatch program is also provided in the
|
||||
.I "nix store" ,
|
||||
with a version compatible with the SLURM daemon running in the target
|
||||
machine.
|
||||
.NH 2
|
||||
Job execution
|
||||
.LP
|
||||
Once an unit job has been selected for execution, SLURM
|
||||
allocates the resources (usually several nodes) and then selects one of
|
||||
the nodes to run the job script: it is not executed in parallel yet.
|
||||
The job script runs from a child process forked from on of the SLURM
|
||||
daemon processes, which are outside the sandbox. Therefore, we first run the
|
||||
.I isolate
|
||||
stage
|
||||
to enter the sandbox again.
|
||||
.PP
|
||||
The next stage is called
|
||||
.I control
|
||||
and determines if enough data has been generated by the experiment unit
|
||||
or if it should continue repeating the execution. At the current time,
|
||||
it is only implemented as a simple loop that runs the next stage a fixed
|
||||
amount of times (by default, it is repeated 30 times).
|
||||
.PP
|
||||
The following stage is
|
||||
.I srun
|
||||
which launches several copies of the next stage to run in
|
||||
parallel (when using more than one task). Runs one copy per task,
|
||||
effectively creating one process per task. The CPUs affinity is
|
||||
configured by the parameter
|
||||
.I --cpu-bind
|
||||
and is important to set it correctly (see more details in the
|
||||
.I srun(1)
|
||||
manual). Appending the
|
||||
.I verbose
|
||||
value to the cpu bind option causes srun to print the assigned affinity
|
||||
of each task, which is very valuable when examining the execution log.
|
||||
.PP
|
||||
The mechanism by which srun executes multiple processes is the same used
|
||||
by sbatch, it forks from a SLURM daemon running in the computing nodes.
|
||||
Therefore, the execution begins outside the sandbox. The next stage is
|
||||
.I isolate
|
||||
which enters again the sandbox in every task. All remaining stages are
|
||||
running now in parallel.
|
||||
.\" ###################################################################
|
||||
.NH 2
|
||||
The program
|
||||
.LP
|
||||
At this point in the execution, the standard pipeline has been
|
||||
completely executed, and we are ready to run the actual program that is
|
||||
the matter of the experiment. Usually, programs require some arguments
|
||||
to be passed in the command line. The
|
||||
.I exec
|
||||
stage sets the arguments (and optionally some environment variables) and
|
||||
executes the last stage, the
|
||||
.I program .
|
||||
.PP
|
||||
The experimenters are required to define these last stages, as they
|
||||
define the specific way in which the program must be executed.
|
||||
Additional stages may be included before or after the program run, so
|
||||
they can perform additional steps.
|
||||
.\" ###################################################################
|
||||
.NH 2
|
||||
Stage overview
|
||||
.LP
|
||||
The complete execution pipeline using the standard pipeline is shown in
|
||||
the Table 1. Some properties are also reflected about the execution
|
||||
stages.
|
||||
.KF
|
||||
.TS
|
||||
center;
|
||||
lB cB cB cB cB cB
|
||||
l c c c c c.
|
||||
_
|
||||
Stage Target Safe Copies User Std
|
||||
_
|
||||
trebuchet xeon no no yes yes
|
||||
runexp login no no yes yes
|
||||
isolate login no no no yes
|
||||
experiment login yes no no yes
|
||||
unit login yes no no yes
|
||||
sbatch login yes no no yes
|
||||
_
|
||||
isolate comp no no no yes
|
||||
control comp yes no no yes
|
||||
srun comp yes no no yes
|
||||
isolate comp no yes no yes
|
||||
_
|
||||
exec comp yes yes no no
|
||||
program comp yes yes no no
|
||||
_
|
||||
.TE
|
||||
.QS
|
||||
.B "Table 1" :
|
||||
The stages of a complete execution pipeline. The
|
||||
.B target
|
||||
column determines where the stage is running,
|
||||
.B safe
|
||||
states if the stage begins the execution inside the sandbox,
|
||||
.B user
|
||||
if it can be executed directly by the user,
|
||||
.B copies
|
||||
if there are several instances running in parallel and
|
||||
.B std
|
||||
if is part of the standard execution pipeline.
|
||||
.QE
|
||||
.KE
|
@ -1,152 +0,0 @@
|
||||
#.ps -3
|
||||
#.fam CMU
|
||||
#.PS 4.5/25.4 # Scale drawing to 20/25.4 in =
|
||||
# = 20/25.4[in]/25.4[mm/in] = 20 mm
|
||||
# FLOWCHART - Basic flow chart blocks.
|
||||
scale=25.4 #Scale units from inches to mm
|
||||
csize=2.0 #Cell size in mm
|
||||
pstricks=0
|
||||
dx=0; dy=2;
|
||||
define process
|
||||
{[
|
||||
box $1;
|
||||
]}
|
||||
# decision(): rhomboid -> if block
|
||||
define decision {[
|
||||
boxwid=boxwid*1.2
|
||||
boxht=boxht*1.2
|
||||
B: box invis $1;
|
||||
line from B.n to B.e to B.s to B.w to B.n;
|
||||
]}
|
||||
|
||||
#--- END OF MACROS ---
|
||||
boxwid=30
|
||||
fillval=1
|
||||
circlerad=10
|
||||
down
|
||||
START: circle "Start"
|
||||
arrow
|
||||
D_MPIB: decision("MPI-based?")
|
||||
arrow " Yes" ljust
|
||||
D_TAMPI: decision("TAMPI?")
|
||||
arrow " Yes" ljust
|
||||
TAMPI: process("\fB+tampi\fP")
|
||||
right
|
||||
move to D_TAMPI.e
|
||||
arrow "No" above
|
||||
D_MPI: decision("MPI?")
|
||||
down
|
||||
move to D_MPI.s
|
||||
arrow " Yes" ljust
|
||||
MPI: process("\fB+mpi\fP")
|
||||
move to TAMPI.s
|
||||
A_TAMPI: arrow linewid
|
||||
line from MPI.s to MPI.c - (0,boxht) \
|
||||
to A_TAMPI.c
|
||||
circle at A_TAMPI.c rad 0.7 filled
|
||||
move at A_TAMPI
|
||||
D_ISEND: decision("MPI_Isend()?")
|
||||
arrow " Yes" ljust
|
||||
ISEND: process("\fB+isend\fP")
|
||||
A_ISEND: arrow
|
||||
right
|
||||
move to D_ISEND.e
|
||||
arrow "No" above
|
||||
D_SEND: decision("MPI_Send()?")
|
||||
down
|
||||
move to D_SEND.s
|
||||
arrow " Yes" ljust
|
||||
SEND: process("\fB+send\fP")
|
||||
right
|
||||
move to D_SEND.e
|
||||
arrow "No" above
|
||||
D_RMA: decision("MPI_Get()?")
|
||||
down
|
||||
move to D_RMA.s
|
||||
arrow " Yes" ljust
|
||||
RMA: process("\fB+rma\fP")
|
||||
line "No" above from D_MPIB.w to D_MPIB.w - (boxwid,0)
|
||||
line to (D_MPIB.w.x-boxwid, A_ISEND.c.y) \
|
||||
to A_ISEND.c
|
||||
line from SEND.s to SEND.c - (0,boxht) \
|
||||
to A_ISEND.c
|
||||
line from RMA.s to RMA.c - (0,boxht) \
|
||||
to SEND.c - (0,boxht)
|
||||
circle at A_ISEND.c rad 0.7 filled
|
||||
move at A_ISEND
|
||||
D_MT: decision("multithread?")
|
||||
arrow " Yes" ljust
|
||||
D_OMP: decision("OpenMP?")
|
||||
arrow " Yes" ljust
|
||||
OMP: process("\fB+omp\fP")
|
||||
right
|
||||
move to D_OMP.e
|
||||
arrow "No" above
|
||||
D_OSS: decision("OmpSs-2?")
|
||||
down
|
||||
move to D_OSS.s
|
||||
arrow " Yes" ljust
|
||||
OSS: process("\fB+oss\fP")
|
||||
down
|
||||
move to OMP.s
|
||||
A_OMP: arrow
|
||||
circle at A_OMP.c rad 0.7 filled
|
||||
line from OSS.s to OSS.c - (0,boxht) \
|
||||
to A_OMP.c
|
||||
move to A_OMP.s
|
||||
D_FJ: decision("fork-join?")
|
||||
arrow " Yes" ljust
|
||||
FJ: process("\fB+fork\fP")
|
||||
right
|
||||
move to D_FJ.e
|
||||
arrow "No" above
|
||||
D_TASKFOR: decision("task for?")
|
||||
arrow "No" above
|
||||
down
|
||||
move to D_TASKFOR.s
|
||||
arrow " Yes" ljust
|
||||
TASKFOR: process("\fB+taskfor\fP")
|
||||
right
|
||||
move to D_TASKFOR.e
|
||||
arrow "No" above
|
||||
D_TASK: decision("task model?")
|
||||
down
|
||||
move to D_TASK.s
|
||||
arrow " Yes" ljust
|
||||
TASK: process("\fB+task\fP")
|
||||
move to FJ.s
|
||||
A_FJ: arrow
|
||||
circle at A_FJ.c rad 0.7 filled
|
||||
line from TASKFOR.s to TASKFOR.c - (0,boxht) \
|
||||
to A_FJ.c
|
||||
line from TASK.s to TASK.c - (0,boxht) \
|
||||
to TASKFOR.c - (0,boxht)
|
||||
left
|
||||
move to OMP.c - (boxwid,0)
|
||||
SEQ: process("\fB+seq\fP")
|
||||
line "No" above from D_MT.w to (SEQ.x, D_MT.w.y)
|
||||
arrow to SEQ.n
|
||||
line from SEQ.s to (SEQ.s.x, A_FJ.c.y) to A_FJ.c
|
||||
down
|
||||
move to A_FJ.s
|
||||
D_SIMD: decision("SIMD opt.?")
|
||||
move to D_SIMD.e
|
||||
right
|
||||
arrow "Yes" above
|
||||
SIMD: process("\fB+simd\fP")
|
||||
down
|
||||
move to D_SIMD.s
|
||||
arrow " No" ljust
|
||||
END: circle "End"
|
||||
circle radius circlerad*0.9 at END
|
||||
arrow from SIMD.s to (SIMD.x, END.y) to END.e
|
||||
|
||||
# Error lines
|
||||
ERR: circle "Error" at (TASK.x+boxwid, END.y)
|
||||
circle radius circlerad*0.9 at ERR
|
||||
line "No" above from D_TASK.e to (ERR.n.x,D_TASK.e.y)
|
||||
line "No" above from D_OSS.e to (ERR.n.x,D_OSS.e.y)
|
||||
line "No" above from D_RMA.e to (ERR.n.x,D_RMA.e.y)
|
||||
line "No" above from D_MPI.e to (ERR.n.x,D_MPI.e.y)
|
||||
arrow to ERR.n
|
||||
#.PE
|
256
garlic/doc/pp.ms
256
garlic/doc/pp.ms
@ -1,256 +0,0 @@
|
||||
.TL
|
||||
Garlic: the postprocess pipeline
|
||||
.AU
|
||||
Rodrigo Arias Mallo
|
||||
.AI
|
||||
Barcelona Supercomputing Center
|
||||
.AB
|
||||
.LP
|
||||
This document covers the format used to store the results of the
|
||||
execution of experiments and the postprocess steps used to generate a
|
||||
set of figures from the results to present the data. The several stages
|
||||
of the postprocess pipeline are documented to provide a general picture.
|
||||
.AE
|
||||
.\"#####################################################################
|
||||
.nr GROWPS 3
|
||||
.nr PSINCR 1.5p
|
||||
.\".nr PD 0.5m
|
||||
.nr PI 2m
|
||||
.\".2C
|
||||
.R1
|
||||
bracket-label " [" ] ", "
|
||||
accumulate
|
||||
move-punctuation
|
||||
.R2
|
||||
.\"#####################################################################
|
||||
.NH 1
|
||||
Introduction
|
||||
.LP
|
||||
After the correct execution of an experiment the results are stored for
|
||||
further investigation. Typically the time of the execution or other
|
||||
quantities are measured and presented later in a figure (generally a
|
||||
plot or a table). The
|
||||
.I "postprocess pipeline"
|
||||
consists of all the steps required to create a set of figures from the
|
||||
results. Similarly to the execution pipeline where several stages run
|
||||
sequentially,
|
||||
.[
|
||||
garlic execution
|
||||
.]
|
||||
the postprocess pipeline is also formed by multiple stages executed
|
||||
in order.
|
||||
.PP
|
||||
The rationale behind dividing execution and postprocess is
|
||||
that usually the experiments are costly to run (they take a long time to
|
||||
complete) while generating a figure require less time. Refining the
|
||||
figures multiple times reusing the same experimental results doesn't
|
||||
require the execution of the complete experiment, so the experimenter
|
||||
can try multiple ways to present the data without waiting a large delay.
|
||||
.NH 1
|
||||
Results
|
||||
.LP
|
||||
The results are generated in the same
|
||||
.I "target"
|
||||
machine where the experiment is executed and are stored in the garlic
|
||||
\fCout\fP
|
||||
directory, organized into a tree structure following the experiment
|
||||
name, the unit name and the run number (governed by the
|
||||
.I control
|
||||
stage):
|
||||
.DS L
|
||||
\fC
|
||||
|-- 6lp88vlj7m8hvvhpfz25p5mvvg7ycflb-experiment
|
||||
| |-- 8lpmmfix52a8v7kfzkzih655awchl9f1-unit
|
||||
| | |-- 1
|
||||
| | | |-- stderr.log
|
||||
| | | |-- stdout.log
|
||||
| | | |-- ...
|
||||
| | |-- 2
|
||||
\&...
|
||||
\fP
|
||||
.DE
|
||||
In order to provide an easier access to the results, an index is also
|
||||
created by taking the
|
||||
.I expName
|
||||
and
|
||||
.I unitName
|
||||
attributes (defined in the experiment configuration) and linking them to
|
||||
the appropriate experiment and unit directories. These links are
|
||||
overwritten by the last experiment with the same names so they are only
|
||||
valid for the last execution. The out and index directories are
|
||||
placed into a per-user directory, as we cannot guarantee the complete
|
||||
execution of each unit when multiple users share units.
|
||||
.PP
|
||||
The messages printed to
|
||||
.I stdout
|
||||
and
|
||||
.I stderr
|
||||
are stored in the log files with the same name inside each run
|
||||
directory. Additional data is sometimes generated by the experiments,
|
||||
and is found in each run directory. As the generated data can be very
|
||||
large, is ignored by default when fetching the results.
|
||||
.NH 1
|
||||
Fetching the results
|
||||
.LP
|
||||
Consider a program of interest for which an experiment has been designed to
|
||||
measure some properties that the experimenter wants to present in a
|
||||
visual plot. When the experiment is launched, the execution
|
||||
pipeline (EP) is completely executed and it will generate some
|
||||
results. In this escenario, the execution pipeline depends on the
|
||||
program\[em]any changes in the program will cause nix to build the
|
||||
pipeline again
|
||||
using the updated program. The results will also depend on the
|
||||
execution pipeline as well as the postprocess pipeline (PP) and the plot
|
||||
on the results. This chain of dependencies can be shown in the
|
||||
following dependency graph:
|
||||
.ie t \{\
|
||||
.PS
|
||||
circlerad=0.22;
|
||||
linewid=0.3;
|
||||
right
|
||||
circle "Prog"
|
||||
arrow
|
||||
circle "EP"
|
||||
arrow
|
||||
circle "Result"
|
||||
arrow
|
||||
circle "PP"
|
||||
arrow
|
||||
circle "Plot"
|
||||
.PE
|
||||
.\}
|
||||
.el \{\
|
||||
.nf
|
||||
|
||||
Prog ---> EP ---> Result ---> PP ---> Plot
|
||||
|
||||
.fi
|
||||
.\}
|
||||
Ideally, the dependencies should be handled by nix, so it can detect any
|
||||
change and rebuild the necessary parts automatically. Unfortunately, nix
|
||||
is not able to build the result as a derivation directly, as it requires
|
||||
access to the
|
||||
.I "target"
|
||||
machine with several user accounts. In order to let several users reuse
|
||||
the same results from a shared cache, we would like to use the
|
||||
.I "nix store" .
|
||||
.PP
|
||||
To generate the results from the
|
||||
experiment, we add some extra steps that must be executed manually:
|
||||
.PS
|
||||
circle "Prog"
|
||||
arrow
|
||||
diag=linewid + circlerad;
|
||||
far=circlerad*3 + linewid*4
|
||||
E: circle "EP"
|
||||
R: circle "Result" at E + (far,0)
|
||||
RUN: circle "Run" at E + (diag,-diag) dashed
|
||||
FETCH: circle "Fetch" at R + (-diag,-diag) dashed
|
||||
move to R.e
|
||||
arrow
|
||||
P: circle "PP"
|
||||
arrow
|
||||
circle "Plot"
|
||||
arrow dashed from E to RUN chop
|
||||
arrow dashed from RUN to FETCH chop
|
||||
arrow dashed from FETCH to R chop
|
||||
arrow from E to R chop
|
||||
.PE
|
||||
The run and fetch steps are provided by the helper tool
|
||||
.I "garlic(1)" ,
|
||||
which launches the experiment using the user credentials at the
|
||||
.I "target"
|
||||
machine and then fetches the results, placing them in a directory known
|
||||
by nix. When the result derivation needs to be built, nix will look in
|
||||
this directory for the results of the execution. If the directory is not
|
||||
found, a message is printed to suggest the user to launch the experiment
|
||||
and the build process is stopped. When the result is successfully built
|
||||
by any user, is stored in the
|
||||
.I "nix store"
|
||||
and it won't need to be rebuilt again until the experiment changes, as
|
||||
the hash only depends on the experiment and not on the contents of the
|
||||
results.
|
||||
.PP
|
||||
Notice that this mechanism violates the deterministic nature of the nix
|
||||
store, as from a given input (the experiment) we can generate different
|
||||
outputs (each result from different executions). We knowingly relaxed
|
||||
this restriction by providing a guarantee that the results are
|
||||
equivalent and there is no need to execute an experiment more than once.
|
||||
.PP
|
||||
To force the execution of an experiment you can use the
|
||||
.I rev
|
||||
attribute which is a number assigned to each experiment
|
||||
and can be incremented to create copies that only differs on that
|
||||
number. The experiment hash will change but the experiment will be the
|
||||
same, as long as the revision number is ignored along the execution
|
||||
stages.
|
||||
.NH 1
|
||||
Postprocess stages
|
||||
.LP
|
||||
Once the results are completely generated in the
|
||||
.I "target"
|
||||
machine there are several stages required to build a set of figures:
|
||||
.PP
|
||||
.I fetch \[em]
|
||||
waits until all the experiment units are completed and then executes the
|
||||
next stage. This stage is performed by the
|
||||
.I garlic(1)
|
||||
tool using the
|
||||
.I -F
|
||||
option and also reports the current state of the execution.
|
||||
.PP
|
||||
.I store \[em]
|
||||
copies from the
|
||||
.I target
|
||||
machine into the nix store all log files generated by the experiment,
|
||||
keeping the same directory structure. It tracks the execution state of
|
||||
each unit and only copies the results once the experiment is complete.
|
||||
Other files are ignored as they are often very large and not required
|
||||
for the subsequent stages.
|
||||
.PP
|
||||
.I timetable \[em]
|
||||
converts the results of the experiment into a NDJSON file with one
|
||||
line per run for each unit. Each line is a valid JSON object, containing
|
||||
the
|
||||
.I exp ,
|
||||
.I unit
|
||||
and
|
||||
.I run
|
||||
keys and the unit configuration (as a JSON object) in the
|
||||
.I config
|
||||
key. The execution time is captured from the standard output and is
|
||||
added in the
|
||||
.I time
|
||||
key.
|
||||
.PP
|
||||
.I merge \[em]
|
||||
one or more timetable datasets are joined, by simply concatenating them.
|
||||
This step allows building one dataset to compare multiple experiments in
|
||||
the same figure.
|
||||
.PP
|
||||
.I rPlot \[em]
|
||||
one ot more figures are generated by a single R script
|
||||
.[
|
||||
r cookbook
|
||||
.]
|
||||
which takes as input the previously generated dataset.
|
||||
The path of the dataset is recorded in the figure as well, which
|
||||
contains enough information to determine all the stages in the execution
|
||||
and postprocess pipelines.
|
||||
.SH 1
|
||||
Appendix A: Current setup
|
||||
.LP
|
||||
As of this moment, the
|
||||
.I build
|
||||
machine which contains the nix store is
|
||||
.I xeon07
|
||||
and the
|
||||
.I "target"
|
||||
machine used to run the experiments is Mare Nostrum 4 with the
|
||||
.I output
|
||||
directory placed at
|
||||
.CW /gpfs/projects/bsc15/garlic .
|
||||
By default, the experiment results are never deleted from the
|
||||
.I target
|
||||
so you may want to remove the ones already stored in the nix store to
|
||||
free space.
|
@ -1,9 +0,0 @@
|
||||
%A Rodrigo Arias Mallo
|
||||
%D 2020
|
||||
%T Garlic: the execution pipeline
|
||||
|
||||
%A Winston Chang
|
||||
%T R Graphics Cookbook: Practical Recipes for Visualizing Data
|
||||
%D 2020
|
||||
%I O'Reilly Media
|
||||
%O 2nd edition
|
@ -1,19 +0,0 @@
|
||||
html {
|
||||
line-height: 1.6;
|
||||
margin-bottom: 50px;
|
||||
padding-bottom: 80px;
|
||||
}
|
||||
|
||||
body {
|
||||
max-width: 700px;
|
||||
text-align: justify;
|
||||
margin:0 auto;
|
||||
}
|
||||
|
||||
pre {
|
||||
overflow: auto;
|
||||
display: block;
|
||||
padding: 3px 3px;
|
||||
line-height: 1.4;
|
||||
background-color: #eeeeee;
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -1,181 +0,0 @@
|
||||
.\"usage: NS title
|
||||
.EQ
|
||||
delim $$
|
||||
.EN
|
||||
.de NS \" New Slide
|
||||
.SK
|
||||
.ev gp-top
|
||||
.fam H
|
||||
.vs 1.5m
|
||||
.ll \\n[@ll]u
|
||||
.lt \\n[@ll]u
|
||||
.rs
|
||||
.sp 2v
|
||||
.ps +5
|
||||
\\$*
|
||||
.ps -5
|
||||
.sp 1.5v
|
||||
.br
|
||||
.ev
|
||||
..
|
||||
.\" Remove headers
|
||||
.de TP
|
||||
..
|
||||
.\" Bigger page number in footer
|
||||
.de EOP
|
||||
.fam H
|
||||
.ps +2
|
||||
. ie o .tl \\*[pg*odd-footer]
|
||||
. el .tl \\*[pg*even-footer]
|
||||
. ds hd*format \\g[P]
|
||||
. af P 0
|
||||
. ie (\\n[P]=1)&(\\n[N]=1) .tl \\*[pg*header]
|
||||
. el .tl \\*[pg*footer]
|
||||
. af P \\*[hd*format]
|
||||
. tl ''\\*[Pg_type!\\n[@copy_type]]''
|
||||
..
|
||||
.\" Remove top and bottom margin
|
||||
.VM 0 0
|
||||
.\"
|
||||
.\"
|
||||
.\" Set virtual page dimensions for a physical size of 16x12 cm
|
||||
.PGFORM 14c 12c 1c 1
|
||||
.ND "January 14, 2021"
|
||||
.\" .vs 1.5m
|
||||
.S C 1.5m
|
||||
.fam H
|
||||
.\".PH "'cosas'''"
|
||||
.COVER ms
|
||||
.de cov@print-date
|
||||
.DS C
|
||||
.fam H
|
||||
.B
|
||||
\\*[cov*new-date]
|
||||
.DE
|
||||
..
|
||||
.TL
|
||||
.ps 20
|
||||
.fam H
|
||||
Garlic experiments
|
||||
.AF "Barcelona Supercomputing Center"
|
||||
.AU "Rodrigo Arias Mallo"
|
||||
.COVEND
|
||||
.PF "'''%'"
|
||||
.\" Turn off justification
|
||||
.SA 0
|
||||
.\".PF '''%'
|
||||
.\"==================================================================
|
||||
.NS "Approach 1"
|
||||
This was the approach proposed for hybrids PM
|
||||
.BL
|
||||
.LI
|
||||
Perform a granularity experiment with a \fIreasonable\fP problem size.
|
||||
.LI
|
||||
Take the best blocksize
|
||||
.LI
|
||||
Analyze strong and weak scaling with that blocksize.
|
||||
.LI
|
||||
Plot speedup and efficiency comparing multiple PM.
|
||||
.LE 1
|
||||
The main problem is that it may lead to \fBbogus comparisons\fP.
|
||||
Additionally, there is no guarantee that the best blocksize is the one
|
||||
that performs better with more resources.
|
||||
.\"==================================================================
|
||||
.NS "Approach 2"
|
||||
We want to measure scalability of the application \fBonly\fP, not mixed
|
||||
with runtime overhead or lack of parallelism.
|
||||
.P
|
||||
We define \fBsaturation\fP as the state of an execution that allows a
|
||||
program to potentially use all the resources (the name comes from the
|
||||
transistor state, when current flows freely).
|
||||
.P
|
||||
Design a new experiment which tests multiple blocksizes and multiple
|
||||
input sizes to find these states: \fBthe saturation experiment\fP.
|
||||
.P
|
||||
Begin with small problems and increase the size, so you get to the
|
||||
answer quickly.
|
||||
.\"==================================================================
|
||||
.NS "Saturation experiment"
|
||||
.2C
|
||||
\X'pdf: pdfpic sat.png.tk.pdf -R 7c'
|
||||
.NCOL
|
||||
.S -1 -3
|
||||
.BL 1m
|
||||
.LI
|
||||
The objetive is to find the minimum input size that allows us to get
|
||||
meaningful scalability results.
|
||||
.LI
|
||||
More precisely, a unit is in \fBsaturation state\fP if the median time
|
||||
is below the \fBsaturation time limit\fP, currently set to 110% the minimum
|
||||
median time (red dashed lines).
|
||||
.LI
|
||||
An input size is in \fBsaturation zone\fP if it allows at least K=3
|
||||
consecutive points in the saturation state.
|
||||
.LI
|
||||
With less than 512 particles/CPU (green line) we cannot be sure that the
|
||||
performance is not impacted by the runtime overhead or lack of
|
||||
parallelism.
|
||||
.LE
|
||||
.S P P
|
||||
.1C
|
||||
.\"==================================================================
|
||||
.NS "Experiment space"
|
||||
.2C
|
||||
\X'pdf: pdfpic scaling-region.svg.tk.pdf -L 7c'
|
||||
.NCOL
|
||||
.S -1 -3
|
||||
.BL 1m
|
||||
.LI
|
||||
\fBSaturation limit\fP: small tasks cannot be solved without overhead
|
||||
from the runtime, no matter the blocksize.
|
||||
.LI
|
||||
Different limits for OmpSs-2 and OpenMP.
|
||||
.LI
|
||||
Experiment A will show the scaling of the app while in the saturation
|
||||
zone.
|
||||
.LI
|
||||
Experiment B will show that OpenMP scales bad in the last 2 points.
|
||||
.LI
|
||||
Experiment C will show that at some point both OpenMP and OmpSs-2 scale
|
||||
bad.
|
||||
.LE
|
||||
.S P P
|
||||
.1C
|
||||
.\"==================================================================
|
||||
.NS "Experiment space: experiment C"
|
||||
.2C
|
||||
\X'pdf: pdfpic scalability.svg.tk.pdf -L 7c'
|
||||
.NCOL
|
||||
.BL 1m
|
||||
.LI
|
||||
The experiment C will show a difference in performance when approached
|
||||
to the saturation limit.
|
||||
.LI
|
||||
We could say that OmpSs-2 introduces less overhead, therefore allows
|
||||
better scalability.
|
||||
.LE
|
||||
.1C
|
||||
.\"==================================================================
|
||||
.NS "Reproducibility"
|
||||
How easy can we get the same results? Three properties R0 < R1 < R2 (no common nomenclature yet!):
|
||||
.BL 1m
|
||||
.LI
|
||||
R0: \fBSame\fP humans on the \fBsame\fP machine obtain the same result
|
||||
.LI
|
||||
R1: \fBDifferent\fP humans on the \fBsame\fP machine obtain the same result
|
||||
.LI
|
||||
R2: \fBDifferent\fP humans on a \fBdifferent\fP machine obtain same result
|
||||
.LE
|
||||
.P
|
||||
Garlic provides 2 types of properties: for software and for experimental
|
||||
results:
|
||||
.BL 1m
|
||||
.LI
|
||||
Software is R2: you can get the exact same software by any one, in any
|
||||
machine
|
||||
.LI
|
||||
Experimental results are R1: you cannot change the machine MN4 (yet)
|
||||
.LE
|
||||
.P
|
||||
Same experimental result means that the mean of your results is in the confidence
|
||||
interval of our results \fBand the relative std is < 1%\fP.
|
Binary file not shown.
@ -1,29 +0,0 @@
|
||||
GENFIG=scaling-region.svg.tk.pdf \
|
||||
sat.png.tk.pdf \
|
||||
scalability.svg.tk.pdf
|
||||
GENPDF=2.pdf 3.pdf
|
||||
|
||||
all: $(GENPDF)
|
||||
|
||||
keep_figs: $(GENFIG)
|
||||
|
||||
%.svg.pdf: %.svg Makefile
|
||||
inkscape $< --export-pdf=$@
|
||||
|
||||
%.png.pdf: %.png Makefile
|
||||
gm convert $< -density 30 $@
|
||||
|
||||
%.tk.pdf: %.pdf Makefile
|
||||
pdftk $< output $@
|
||||
|
||||
%.pdf: %.mm $(GENFIG)
|
||||
groff -Tpdf -e -t -p -P-p12c,16c -mm $< > $@
|
||||
-killall -HUP mupdf
|
||||
|
||||
watch:
|
||||
while [ 1 ]; do inotifywait -e modify *; make; done
|
||||
|
||||
.PRECIOUS: *.svg.pdf *.tk.pdf
|
||||
|
||||
clean:
|
||||
rm -f $(GENFIG) $(GENPDF)
|
@ -1,479 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||
xmlns:cc="http://creativecommons.org/ns#"
|
||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
width="1280"
|
||||
height="720"
|
||||
viewBox="0 0 338.66667 190.5"
|
||||
version="1.1"
|
||||
id="svg8"
|
||||
inkscape:version="1.0.1 (3bc2e813f5, 2020-09-07)"
|
||||
sodipodi:docname="overview.svg">
|
||||
<defs
|
||||
id="defs2">
|
||||
<inkscape:path-effect
|
||||
effect="spiro"
|
||||
id="path-effect1043"
|
||||
is_visible="true"
|
||||
lpeversion="1" />
|
||||
<inkscape:path-effect
|
||||
effect="spiro"
|
||||
id="path-effect1015"
|
||||
is_visible="true"
|
||||
lpeversion="1" />
|
||||
<inkscape:path-effect
|
||||
effect="spiro"
|
||||
id="path-effect942"
|
||||
is_visible="true"
|
||||
lpeversion="0" />
|
||||
<inkscape:path-effect
|
||||
effect="spiro"
|
||||
id="path-effect946"
|
||||
is_visible="true"
|
||||
lpeversion="0" />
|
||||
<inkscape:path-effect
|
||||
effect="spiro"
|
||||
id="path-effect950"
|
||||
is_visible="true"
|
||||
lpeversion="0" />
|
||||
<inkscape:path-effect
|
||||
effect="spiro"
|
||||
id="path-effect954"
|
||||
is_visible="true"
|
||||
lpeversion="0" />
|
||||
</defs>
|
||||
<sodipodi:namedview
|
||||
id="base"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#666666"
|
||||
borderopacity="1.0"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:zoom="0.98994949"
|
||||
inkscape:cx="650.6739"
|
||||
inkscape:cy="347.57627"
|
||||
inkscape:document-units="mm"
|
||||
inkscape:current-layer="layer1"
|
||||
inkscape:document-rotation="0"
|
||||
showgrid="false"
|
||||
units="px"
|
||||
inkscape:window-width="1914"
|
||||
inkscape:window-height="1025"
|
||||
inkscape:window-x="0"
|
||||
inkscape:window-y="24"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:snap-bbox="true"
|
||||
inkscape:bbox-nodes="true">
|
||||
<inkscape:grid
|
||||
type="xygrid"
|
||||
id="grid1071" />
|
||||
</sodipodi:namedview>
|
||||
<metadata
|
||||
id="metadata5">
|
||||
<rdf:RDF>
|
||||
<cc:Work
|
||||
rdf:about="">
|
||||
<dc:format>image/svg+xml</dc:format>
|
||||
<dc:type
|
||||
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
|
||||
<dc:title></dc:title>
|
||||
</cc:Work>
|
||||
</rdf:RDF>
|
||||
</metadata>
|
||||
<g
|
||||
inkscape:label="Layer 1"
|
||||
inkscape:groupmode="layer"
|
||||
id="layer1">
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:11.2889px;line-height:1.25;font-family:'Linux Libertine';-inkscape-font-specification:'Linux Libertine, Normal';letter-spacing:0px;word-spacing:0px;stroke-width:0.264583"
|
||||
x="119.66053"
|
||||
y="23.383175"
|
||||
id="text12"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan10"
|
||||
x="119.66053"
|
||||
y="23.383175"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:11.2889px;font-family:sans-serif;-inkscape-font-specification:sans-serif;fill:#000000;stroke-width:0.264583">Garlic overview</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:8.46667px;line-height:1.25;font-family:'Linux Libertine';-inkscape-font-specification:'Linux Libertine, Normal';letter-spacing:0px;word-spacing:0px;stroke-width:0.264583"
|
||||
x="234.71022"
|
||||
y="112.26602"
|
||||
id="text1019"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan1017"
|
||||
x="234.71022"
|
||||
y="112.26602"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:8.46667px;font-family:sans-serif;-inkscape-font-specification:sans-serif;stroke-width:0.264583">bscpkgs</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:7.05556px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;stroke-width:0.264583"
|
||||
x="235.14818"
|
||||
y="99.37706"
|
||||
id="text1023"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan1021"
|
||||
x="235.14818"
|
||||
y="99.37706"
|
||||
style="stroke-width:0.264583" /></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:8.46667px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;stroke-width:0.264583"
|
||||
x="22.981543"
|
||||
y="40.828518"
|
||||
id="text1027"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan1025"
|
||||
x="22.981543"
|
||||
y="40.828518"
|
||||
style="font-size:8.46667px;stroke-width:0.264583">Isolation</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:8.46667px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;stroke-width:0.264583"
|
||||
x="235.41302"
|
||||
y="141.11386"
|
||||
id="text1031"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan1029"
|
||||
x="235.41302"
|
||||
y="141.11386"
|
||||
style="stroke-width:0.264583">Apps</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:8.46667px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;stroke-width:0.264583"
|
||||
x="22.981543"
|
||||
y="98.77639"
|
||||
id="text1035"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan1033"
|
||||
x="22.981543"
|
||||
y="98.77639"
|
||||
style="stroke-width:0.264583">Postprocess</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:8.46667px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;stroke-width:0.264583"
|
||||
x="22.981543"
|
||||
y="128.14102"
|
||||
id="text1039"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan1037"
|
||||
x="22.981543"
|
||||
y="128.14102"
|
||||
style="stroke-width:0.264583">Replicable</tspan></text>
|
||||
<g
|
||||
id="g939"
|
||||
transform="matrix(0.83871913,0,0,0.83871913,126.82493,-113.88842)">
|
||||
<path
|
||||
style="opacity:1;fill:#71c837;fill-opacity:1;stroke:none;stroke-width:0.252538;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
d="m 35.855789,282.06376 c -4.85096,-1.86589 -9.321157,-5.5573 -12.052686,-9.95291 -1.42608,-2.29485 -2.550183,-5.13416 -3.142218,-7.93675 -0.468005,-2.21545 -0.584044,-5.74948 -0.261382,-7.96055 0.751234,-5.14793 3.001156,-9.88538 6.376496,-13.42641 1.064876,-1.11715 1.141312,-1.23333 0.967644,-1.47084 -0.180189,-0.24642 -0.218712,-0.22848 -0.867441,0.40409 -2.541844,2.4785 -4.781497,6.13785 -6.015517,9.8287 -2.151697,6.43554 -1.660797,12.92798 1.44394,19.09701 2.373529,4.71614 5.881378,8.24463 10.798715,10.86225 l 1.469982,0.78252 -0.947613,-0.075 c -1.18859,-0.0941 -3.165274,-0.59068 -4.535339,-1.13928 -2.14739,-0.85986 -4.899448,-2.77408 -7.344667,-5.10867 -4.3499,-4.1531 -7.205196,-9.05097 -8.004667,-13.7309 -0.27279,-1.59685 -0.278638,-4.42186 -0.0123,-5.94906 0.477293,-2.73701 1.939155,-6.11115 3.97591,-9.17685 2.546688,-3.83325 4.620429,-5.89727 10.387406,-10.3387 3.493318,-2.69038 4.736076,-3.72889 6.15496,-5.14339 3.568843,-3.55782 6.467152,-8.6731 7.339454,-12.95356 0.148482,-0.72861 0.269968,-1.50206 0.269968,-1.71878 0,-0.39331 0.0019,-0.39417 0.968851,-0.46738 1.365353,-0.10337 3.052225,-0.70711 4.744035,-1.69791 l 0.701582,-0.41089 0.0026,1.5061 c 0.0045,2.60243 0.476784,5.34744 1.339734,7.78693 1.52088,4.2994 4.18253,7.41948 11.62023,13.62161 4.276695,3.56624 7.746677,6.95204 9.117036,8.89585 2.259996,3.20572 3.348103,6.20096 3.568194,9.82215 0.595117,9.79157 -3.768748,17.911 -12.44142,23.14856 -1.296532,0.78299 -3.637803,1.90233 -3.985585,1.90546 -0.110249,10e-4 0.150921,-0.21006 0.580377,-0.46902 5.436653,-3.27826 9.475507,-9.15074 10.803453,-15.70819 0.267552,-1.32118 0.309211,-1.92299 0.309211,-4.46686 0,-2.53757 -0.042,-3.14727 -0.306856,-4.45455 -1.060807,-5.23597 -3.854233,-10.15023 -7.65502,-13.4669 -0.618908,-0.54008 -1.155922,-0.98329 -1.193366,-0.98492 -0.03745,-0.002 -0.139946,0.11212 -0.227777,0.25276 -0.141502,0.22658 -0.07607,0.32023 0.574336,0.82202 2.259362,1.74308 4.915725,5.31474 6.310574,8.48498 2.018451,4.58759 2.502477,9.49237 1.415167,14.34033 -1.629156,7.26389 -6.611462,13.30354 -13.416428,16.26369 -1.085205,0.47206 -1.217885,0.49706 -2.973134,0.56011 l -1.837235,0.066 1.006237,-2.07559 c 2.368455,-4.8854 3.538316,-10.024 3.535808,-15.5308 -0.0027,-5.88707 -1.061958,-10.50484 -5.157827,-22.48487 -0.6345,-1.85585 -1.191602,-3.37335 -1.237999,-3.37223 -0.0464,0.001 -0.17008,0.0563 -0.274844,0.1226 -0.156832,0.0993 0.130482,1.08447 1.626465,5.57721 2.644622,7.94233 3.529431,11.22265 4.132003,15.31894 0.372002,2.52887 0.400976,6.90859 0.06118,9.24748 -0.627637,4.32015 -1.762573,7.98369 -3.564128,11.5049 -0.508662,0.9942 -0.719381,1.53437 -0.616966,1.58158 0.08555,0.0394 -0.686359,0.0277 -1.715347,-0.0261 -1.028986,-0.0538 -2.81859,-0.0876 -3.976896,-0.0751 -1.158304,0.0125 -2.076599,-0.0249 -2.04065,-0.0831 0.03595,-0.0582 -0.513096,-0.57953 -1.220099,-1.15859 -5.871245,-4.80873 -8.903676,-12.46261 -8.334531,-21.0364 0.514302,-7.74762 3.570269,-16.48358 8.451646,-24.16033 0.756092,-1.18908 0.878686,-1.62601 0.456232,-1.62601 -0.269476,0 -2.187459,3.11792 -3.581049,5.82143 -6.527942,12.66396 -7.778054,24.87321 -3.460077,33.79294 1.354494,2.79801 3.875849,5.96086 6.265727,7.85989 l 0.882295,0.70108 -1.131244,0.087 c -2.28504,0.17566 -3.206655,0.12456 -4.125092,-0.22871 z"
|
||||
id="path1121"
|
||||
inkscape:connector-curvature="0" />
|
||||
<path
|
||||
style="opacity:1;fill:none;stroke:#000000;stroke-width:1.6;stroke-linecap:round;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
d="m 48.867489,212.94095 c 0,0 -0.86438,7.76605 2.713226,13.63584 6.494115,10.65492 22.215215,15.10271 22.945555,29.57717 0.73034,14.47445 -8.97919,21.92729 -15.29676,24.84412 -6.317568,2.91683 -10.522303,1.607 -14.936018,1.58965 -4.413716,-0.0174 -11.542077,1.89608 -18.275069,-2.46922 -6.732992,-4.36531 -17.7526266,-15.9095 -10.997271,-29.49573 6.755356,-13.58623 16.532136,-14.09272 22.847212,-24.4691 3.894154,-6.39853 3.425017,-10.20633 3.425017,-10.20633"
|
||||
id="path115"
|
||||
inkscape:connector-curvature="0"
|
||||
sodipodi:nodetypes="cszzzzzsc" />
|
||||
<path
|
||||
style="opacity:1;fill:none;stroke:#000000;stroke-width:1.6;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
d="m 41.293381,215.94735 c 0,0 1.628798,0.38316 4.106055,-0.68681 2.477257,-1.06997 3.458673,-2.01071 3.468053,-2.31959 0.01165,-0.38377 -2.218113,-0.51799 -4.760631,0.84257 -2.542518,1.36055 -2.815487,1.75415 -2.813477,2.16383 z"
|
||||
id="path932"
|
||||
inkscape:connector-curvature="0"
|
||||
sodipodi:nodetypes="szszs" />
|
||||
<path
|
||||
style="opacity:1;fill:none;stroke:#000000;stroke-width:1;stroke-linecap:round;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
d="m 96.572915,115.66592 c 3.453405,-6.0297 5.055035,-13.10355 4.535715,-20.032736 -0.39271,-5.239899 -1.955101,-10.314508 -3.590774,-15.308037 -0.858872,-2.622043 -1.740862,-5.236513 -2.645833,-7.843004"
|
||||
id="path940"
|
||||
inkscape:connector-curvature="0"
|
||||
inkscape:path-effect="#path-effect942"
|
||||
inkscape:original-d="m 96.572915,115.66592 c 1.512171,-6.67784 3.024074,-13.35542 4.535715,-20.032736 1.51164,-6.677316 -2.393584,-10.205622 -3.590774,-15.308037 -1.197189,-5.102413 -1.763625,-5.228934 -2.645833,-7.843004"
|
||||
transform="translate(-47.04174,166.51122)" />
|
||||
<path
|
||||
style="opacity:1;fill:none;stroke:#000000;stroke-width:1;stroke-linecap:round;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
d="m 88.162945,67.662944 c -3.064224,4.547552 -5.489214,9.525165 -7.181547,14.741074 -1.328054,4.09317 -2.208726,8.359616 -2.235731,12.662759 -0.02701,4.303143 0.823506,8.651583 2.802696,12.472653 1.648228,3.18211 4.067739,5.96128 6.992559,8.032"
|
||||
id="path944"
|
||||
inkscape:connector-curvature="0"
|
||||
inkscape:path-effect="#path-effect946"
|
||||
inkscape:original-d="m 88.162945,67.662944 c -2.015609,4.976424 -5.165409,9.764121 -7.181547,14.741074 -2.016138,4.97695 0.378241,16.756679 0.566965,25.135412 0.188722,8.37874 4.661969,5.3544 6.992559,8.032"
|
||||
sodipodi:nodetypes="cssc"
|
||||
transform="translate(-47.04174,166.51122)" />
|
||||
<path
|
||||
style="opacity:1;fill:none;stroke:#000000;stroke-width:1;stroke-linecap:round;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
d="m 74.744792,74.844492 c -5.08443,4.722113 -8.015278,11.678462 -7.843007,18.615327 0.124172,5.000037 1.820316,9.949651 4.788964,13.974931 2.968648,4.02529 7.196284,7.10792 11.936482,8.70364"
|
||||
id="path948"
|
||||
inkscape:connector-curvature="0"
|
||||
inkscape:path-effect="#path-effect950"
|
||||
inkscape:original-d="m 74.744792,74.844492 c -2.897553,5.417394 -4.944925,13.197404 -7.843007,18.615327 -2.898081,5.417923 11.402546,13.732871 16.725446,22.678571"
|
||||
sodipodi:nodetypes="csc"
|
||||
transform="translate(-47.04174,166.51122)" />
|
||||
<path
|
||||
style="opacity:1;fill:none;stroke:#000000;stroke-width:1;stroke-linecap:round;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
d="m 106.91469,75.255709 c 5.52653,4.277036 8.97564,11.122946 9.124,18.109618 0.1032,4.860277 -1.34637,9.741783 -4.08548,13.758033 -2.73912,4.01624 -6.75454,7.14782 -11.31705,8.82604"
|
||||
id="path952"
|
||||
inkscape:connector-curvature="0"
|
||||
inkscape:path-effect="#path-effect954"
|
||||
inkscape:original-d="m 106.91469,75.255709 c 2.5516,4.944922 6.57292,13.164164 9.124,18.109618 2.55107,4.945451 -10.26809,15.055783 -15.40253,22.584073"
|
||||
sodipodi:nodetypes="ccc"
|
||||
transform="translate(-47.04174,166.51122)" />
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-width:0.264583px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
d="m 46.507366,283.1457 c -0.0079,0.37985 0.887358,0.72396 1.120981,1.53188 0.191658,0.66279 -0.270534,1.84628 -0.270534,1.84628 0,0 0.282724,-1.19113 0.01191,-1.83123 -0.270812,-0.64011 -1.689183,-1.6178 -1.689183,-1.6178"
|
||||
id="path1082"
|
||||
inkscape:connector-curvature="0"
|
||||
sodipodi:nodetypes="csczc" />
|
||||
<path
|
||||
sodipodi:nodetypes="csczc"
|
||||
inkscape:connector-curvature="0"
|
||||
id="path1084"
|
||||
d="m 43.483556,283.02758 c -0.0079,0.37985 -0.459182,0.51135 -0.225559,1.31927 0.191658,0.66279 0.887018,1.42106 0.887018,1.42106 0,0 -0.874828,-0.76591 -1.145642,-1.40601 -0.270812,-0.64011 -0.01191,-1.54693 -0.01191,-1.54693"
|
||||
style="fill:none;stroke:#000000;stroke-width:0.264583px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
|
||||
<path
|
||||
sodipodi:nodetypes="csczc"
|
||||
inkscape:connector-curvature="0"
|
||||
id="path1086"
|
||||
d="m 41.404687,283.12207 c -0.0079,0.37985 -0.424683,0.81322 -0.509041,1.65 -0.09182,0.91084 -1.211218,1.96222 -1.211218,1.96222 0,0 0.998985,-1.05902 0.952594,-1.94717 -0.04639,-0.88815 0.318816,-1.6178 0.318816,-1.6178"
|
||||
style="fill:none;stroke:#000000;stroke-width:0.264583px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-width:0.264583px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
d="m 50.003646,283.07483 c -0.0079,0.37985 0.252593,0.74066 0.486216,1.54858 0.191658,0.66279 0.364231,1.82958 0.364231,1.82958 0,0 -0.235111,-1.0575 -0.505925,-1.6976 -0.270812,-0.64011 -1.171348,-1.75143 -1.171348,-1.75143"
|
||||
id="path1088"
|
||||
inkscape:connector-curvature="0"
|
||||
sodipodi:nodetypes="csczc" />
|
||||
<path
|
||||
sodipodi:nodetypes="cscsc"
|
||||
inkscape:connector-curvature="0"
|
||||
id="path1090"
|
||||
d="m 38.971466,282.67323 c -0.0079,0.37985 0.246274,1.23214 -0.438171,1.72087 -0.564294,0.40293 -2.160414,0.6651 -2.160414,0.6651 0,0 1.532556,-0.55416 2.019908,-0.90991 0.451361,-0.32948 0.05896,-1.68867 0.05896,-1.68867"
|
||||
style="fill:none;stroke:#000000;stroke-width:0.264583px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
|
||||
<path
|
||||
style="fill:#000000;stroke:#000000;stroke-width:0.264583px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
d="m 36.490995,283.38903 c 4.989684,-0.34688 10.337396,-1.14471 14.741072,-0.16534 0,0 -3.573827,0.52887 -5.976752,0.50791 -2.402925,-0.021 -8.76432,-0.34255 -8.76432,-0.34255 z"
|
||||
id="path1092"
|
||||
inkscape:connector-curvature="0"
|
||||
sodipodi:nodetypes="cczcc" />
|
||||
<path
|
||||
sodipodi:nodetypes="csczc"
|
||||
inkscape:connector-curvature="0"
|
||||
id="path1094"
|
||||
d="m 44.524621,282.45677 c -0.0079,0.37985 0.252593,0.74066 0.486216,1.54858 0.191658,0.66279 -0.387464,1.06118 -0.387464,1.06118 0,0 0.516584,-0.2891 0.24577,-0.9292 -0.270812,-0.64011 -1.171348,-1.75143 -1.171348,-1.75143"
|
||||
style="fill:none;stroke:#000000;stroke-width:0.264583px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
|
||||
<path
|
||||
style="opacity:1;fill:#44aa00;fill-opacity:1;stroke:none;stroke-width:0.0568211;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
d="m 42.235134,215.49137 c 0,-0.0278 0.523283,-0.36314 1.010613,-0.64767 1.006039,-0.5874 1.720562,-0.94274 2.372019,-1.17967 0.784749,-0.28539 1.669558,-0.47173 2.23003,-0.46962 l 0.175395,5.3e-4 -0.05846,0.0509 c -0.108667,0.0946 -0.491339,0.36075 -0.734991,0.51123 -0.328242,0.20274 -0.628185,0.3686 -1.031687,0.57048 -1.382274,0.69159 -2.424417,1.03259 -3.501443,1.14569 -0.235215,0.0247 -0.461471,0.0335 -0.461471,0.018 z"
|
||||
id="path1125"
|
||||
inkscape:connector-curvature="0" />
|
||||
</g>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:4.23333px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;stroke-width:0.264583"
|
||||
x="23.397022"
|
||||
y="48.195503"
|
||||
id="text1047"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan1045"
|
||||
x="23.397022"
|
||||
y="48.195503"
|
||||
style="stroke-width:0.264583">Filesystem isolation ensures the apps</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
x="23.397022"
|
||||
y="53.487164"
|
||||
style="stroke-width:0.264583"
|
||||
id="tspan1176">always use the same software libraries</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:4.23333px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;stroke-width:0.264583"
|
||||
x="23.622332"
|
||||
y="106.40382"
|
||||
id="text1051"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan1049"
|
||||
x="23.622332"
|
||||
y="106.40382"
|
||||
style="stroke-width:0.264583">Zero human intervention, all data</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
x="23.622332"
|
||||
y="111.69549"
|
||||
style="stroke-width:0.264583"
|
||||
id="tspan1053">postprocessing scripts are included</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:4.23333px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;stroke-width:0.264583"
|
||||
x="23.779432"
|
||||
y="135.50798"
|
||||
id="text1063"><tspan
|
||||
sodipodi:role="line"
|
||||
x="23.779432"
|
||||
y="135.50798"
|
||||
style="stroke-width:0.264583"
|
||||
id="tspan1127">Different team, same results on</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
x="23.779432"
|
||||
y="140.79964"
|
||||
style="stroke-width:0.264583"
|
||||
id="tspan1131">the same system (ACM)</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:4.23333px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;stroke-width:0.264583"
|
||||
x="235.24559"
|
||||
y="119.633"
|
||||
id="text1069"><tspan
|
||||
sodipodi:role="line"
|
||||
x="235.24559"
|
||||
y="119.633"
|
||||
style="stroke-width:0.264583"
|
||||
id="tspan1093">Repository with BSC and custom</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
x="235.24559"
|
||||
y="124.92467"
|
||||
style="stroke-width:0.264583"
|
||||
id="tspan1111">packages </tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:8.46667px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;stroke-width:0.264583"
|
||||
x="234.71022"
|
||||
y="54.057686"
|
||||
id="text1075"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan1073"
|
||||
x="234.71022"
|
||||
y="54.057686"
|
||||
style="stroke-width:0.264583">nix</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:8.46667px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;stroke-width:0.264583"
|
||||
x="22.981543"
|
||||
y="69.932686"
|
||||
id="text1079"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan1077"
|
||||
x="22.981543"
|
||||
y="69.932686"
|
||||
style="stroke-width:0.264583">Backwards tracking</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:4.23333px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;stroke-width:0.264583"
|
||||
x="23.824903"
|
||||
y="77.299644"
|
||||
id="text1083"><tspan
|
||||
sodipodi:role="line"
|
||||
x="23.824903"
|
||||
y="77.299644"
|
||||
style="stroke-width:0.264583"
|
||||
id="tspan1121">Every result is hashed by the software</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
x="23.824903"
|
||||
y="82.591309"
|
||||
style="stroke-width:0.264583"
|
||||
id="tspan1125">used to generate it (datasets, plots ...)</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:4.23333px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;stroke-width:0.264583"
|
||||
x="235.06369"
|
||||
y="61.424671"
|
||||
id="text1087"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan1085"
|
||||
x="235.06369"
|
||||
y="61.424671"
|
||||
style="stroke-width:0.264583">Reproducible builds from source using</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
x="235.06369"
|
||||
y="66.716331"
|
||||
style="stroke-width:0.264583"
|
||||
id="tspan1107">the nix package manager</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:4.23333px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;stroke-width:0.264583"
|
||||
x="235.01408"
|
||||
y="148.7413"
|
||||
id="text1091"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan1089"
|
||||
x="235.01408"
|
||||
y="148.7413"
|
||||
style="stroke-width:0.264583">Several benchmark programs with</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
x="235.01408"
|
||||
y="154.03296"
|
||||
style="stroke-width:0.264583"
|
||||
id="tspan1105">multiple programming models</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:8.46667px;line-height:1.25;font-family:'Linux Libertine';-inkscape-font-specification:'Linux Libertine, Normal';letter-spacing:0px;word-spacing:0px;stroke-width:0.264583"
|
||||
x="234.71022"
|
||||
y="83.16185"
|
||||
id="text1097"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan1095"
|
||||
x="234.71022"
|
||||
y="83.16185"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:8.46667px;font-family:sans-serif;-inkscape-font-specification:sans-serif;stroke-width:0.264583">nixpkgs</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:4.23333px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;stroke-width:0.264583"
|
||||
x="235.0947"
|
||||
y="90.528831"
|
||||
id="text1101"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan1099"
|
||||
x="235.0947"
|
||||
y="90.528831"
|
||||
style="stroke-width:0.264583">Large repository of common packages,</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
x="235.0947"
|
||||
y="95.820496"
|
||||
style="stroke-width:0.264583"
|
||||
id="tspan1103">maintaned by the community</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:8.46667px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;fill:#008000;stroke-width:0.264583"
|
||||
x="22.981543"
|
||||
y="157.24518"
|
||||
id="text1039-8"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan1037-1"
|
||||
x="22.981543"
|
||||
y="157.24518"
|
||||
style="fill:#008000;stroke-width:0.264583">Reproducible</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:4.23333px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;fill:#008000;stroke-width:0.264583"
|
||||
x="23.779427"
|
||||
y="164.61215"
|
||||
id="text1063-0"><tspan
|
||||
sodipodi:role="line"
|
||||
x="23.779427"
|
||||
y="164.61215"
|
||||
style="fill:#008000;stroke-width:0.264583"
|
||||
id="tspan1131-0">Different team, same results on</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
x="23.779427"
|
||||
y="169.90381"
|
||||
style="fill:#008000;stroke-width:0.264583"
|
||||
id="tspan1162">a different system (ACM).</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:italic;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:4.58611px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif Italic';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;stroke-width:0.264583"
|
||||
x="163.37598"
|
||||
y="138.77432"
|
||||
id="text1166"><tspan
|
||||
sodipodi:role="line"
|
||||
x="163.37598"
|
||||
y="138.77432"
|
||||
style="font-style:italic;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:4.58611px;font-family:sans-serif;-inkscape-font-specification:'sans-serif Italic';text-align:center;text-anchor:middle;stroke-width:0.264583"
|
||||
id="tspan1168">Is a complete benchmark framework</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
x="163.37598"
|
||||
y="144.50696"
|
||||
style="font-style:italic;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:4.58611px;font-family:sans-serif;-inkscape-font-specification:'sans-serif Italic';text-align:center;text-anchor:middle;stroke-width:0.264583"
|
||||
id="tspan1172">(<tspan
|
||||
style="fill:#008000"
|
||||
id="tspan1174">green</tspan> = not mature yet)</tspan></text>
|
||||
</g>
|
||||
</svg>
|
Before Width: | Height: | Size: 27 KiB |
Binary file not shown.
Before Width: | Height: | Size: 77 KiB |
@ -1,265 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||
xmlns:cc="http://creativecommons.org/ns#"
|
||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
width="500"
|
||||
height="500"
|
||||
viewBox="0 0 132.29167 132.29167"
|
||||
version="1.1"
|
||||
id="svg8"
|
||||
inkscape:version="1.0.1 (3bc2e813f5, 2020-09-07)"
|
||||
sodipodi:docname="scalability.svg">
|
||||
<defs
|
||||
id="defs2">
|
||||
<marker
|
||||
style="overflow:visible;"
|
||||
id="marker2584"
|
||||
refX="0.0"
|
||||
refY="0.0"
|
||||
orient="auto"
|
||||
inkscape:stockid="Arrow1Mend"
|
||||
inkscape:isstock="true">
|
||||
<path
|
||||
transform="scale(0.4) rotate(180) translate(10,0)"
|
||||
style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt;stroke-opacity:1;fill:#000000;fill-opacity:1"
|
||||
d="M 0.0,0.0 L 5.0,-5.0 L -12.5,0.0 L 5.0,5.0 L 0.0,0.0 z "
|
||||
id="path2582" />
|
||||
</marker>
|
||||
<marker
|
||||
style="overflow:visible;"
|
||||
id="marker1521"
|
||||
refX="0.0"
|
||||
refY="0.0"
|
||||
orient="auto"
|
||||
inkscape:stockid="Arrow1Mend"
|
||||
inkscape:isstock="true">
|
||||
<path
|
||||
transform="scale(0.4) rotate(180) translate(10,0)"
|
||||
style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt;stroke-opacity:1;fill:#000000;fill-opacity:1"
|
||||
d="M 0.0,0.0 L 5.0,-5.0 L -12.5,0.0 L 5.0,5.0 L 0.0,0.0 z "
|
||||
id="path1519" />
|
||||
</marker>
|
||||
<marker
|
||||
style="overflow:visible"
|
||||
id="marker1313"
|
||||
refX="0"
|
||||
refY="0"
|
||||
orient="auto"
|
||||
inkscape:stockid="Arrow1Lend"
|
||||
inkscape:isstock="true">
|
||||
<path
|
||||
transform="matrix(-0.8,0,0,-0.8,-10,0)"
|
||||
style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1pt;stroke-opacity:1"
|
||||
d="M 0,0 5,-5 -12.5,0 5,5 Z"
|
||||
id="path1311" />
|
||||
</marker>
|
||||
<marker
|
||||
style="overflow:visible"
|
||||
id="Arrow1Lend"
|
||||
refX="0"
|
||||
refY="0"
|
||||
orient="auto"
|
||||
inkscape:stockid="Arrow1Lend"
|
||||
inkscape:isstock="true"
|
||||
inkscape:collect="always">
|
||||
<path
|
||||
transform="matrix(-0.8,0,0,-0.8,-10,0)"
|
||||
style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1pt;stroke-opacity:1"
|
||||
d="M 0,0 5,-5 -12.5,0 5,5 Z"
|
||||
id="path842" />
|
||||
</marker>
|
||||
<marker
|
||||
style="overflow:visible"
|
||||
id="marker1129"
|
||||
refX="0"
|
||||
refY="0"
|
||||
orient="auto"
|
||||
inkscape:stockid="Arrow1Lstart"
|
||||
inkscape:isstock="true">
|
||||
<path
|
||||
transform="matrix(0.8,0,0,0.8,10,0)"
|
||||
style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1pt;stroke-opacity:1"
|
||||
d="M 0,0 5,-5 -12.5,0 5,5 Z"
|
||||
id="path1127" />
|
||||
</marker>
|
||||
<marker
|
||||
style="overflow:visible"
|
||||
id="Arrow1Lstart"
|
||||
refX="0"
|
||||
refY="0"
|
||||
orient="auto"
|
||||
inkscape:stockid="Arrow1Lstart"
|
||||
inkscape:isstock="true">
|
||||
<path
|
||||
transform="matrix(0.8,0,0,0.8,10,0)"
|
||||
style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1pt;stroke-opacity:1"
|
||||
d="M 0,0 5,-5 -12.5,0 5,5 Z"
|
||||
id="path839" />
|
||||
</marker>
|
||||
</defs>
|
||||
<sodipodi:namedview
|
||||
id="base"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#666666"
|
||||
borderopacity="1.0"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:zoom="1.4"
|
||||
inkscape:cx="225.60325"
|
||||
inkscape:cy="274.75145"
|
||||
inkscape:document-units="mm"
|
||||
inkscape:current-layer="layer1"
|
||||
inkscape:document-rotation="0"
|
||||
showgrid="true"
|
||||
units="px"
|
||||
inkscape:snap-bbox="true"
|
||||
inkscape:snap-bbox-midpoints="true"
|
||||
inkscape:window-width="1914"
|
||||
inkscape:window-height="1025"
|
||||
inkscape:window-x="0"
|
||||
inkscape:window-y="24"
|
||||
inkscape:window-maximized="1">
|
||||
<inkscape:grid
|
||||
type="xygrid"
|
||||
id="grid10" />
|
||||
</sodipodi:namedview>
|
||||
<metadata
|
||||
id="metadata5">
|
||||
<rdf:RDF>
|
||||
<cc:Work
|
||||
rdf:about="">
|
||||
<dc:format>image/svg+xml</dc:format>
|
||||
<dc:type
|
||||
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
|
||||
<dc:title></dc:title>
|
||||
</cc:Work>
|
||||
</rdf:RDF>
|
||||
</metadata>
|
||||
<g
|
||||
inkscape:label="Layer 1"
|
||||
inkscape:groupmode="layer"
|
||||
id="layer1">
|
||||
<path
|
||||
style="fill:#c5ffc5;fill-opacity:1;stroke:none;stroke-width:0.189286;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:0.189286, 0.567857;stroke-dashoffset:0"
|
||||
d="m 36.122769,118.97047 -20.300019,-0.0177 0.107096,-11.93674 c 0.0589,-6.56521 0.08242,-27.953609 0.05225,-47.529788 l -0.05485,-35.593051 27.686723,-0.04266 27.686722,-0.04266 0.07339,47.624942 0.07339,47.624937 -7.512341,-0.0348 c -4.131786,-0.0191 -16.647348,-0.0428 -27.812359,-0.0525 z"
|
||||
id="path3092" />
|
||||
<path
|
||||
style="fill:#f98f84;fill-opacity:1;stroke:none;stroke-width:0.264583px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
d="m 87.3125,119.0625 h 15.875 V 23.812501 h -15.875 z"
|
||||
id="path2031"
|
||||
sodipodi:nodetypes="ccccc" />
|
||||
<path
|
||||
style="fill:#dc8add;stroke:none;stroke-width:0.264583px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
d="m 71.4375,119.0625 h 15.875 V 23.812501 h -15.875 z"
|
||||
id="path2027"
|
||||
sodipodi:nodetypes="ccccc" />
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-width:0.265;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:2.12, 1.06;stroke-dashoffset:0;stroke-opacity:1"
|
||||
d="M 71.4375,119.0625 V 23.812501"
|
||||
id="path1305"
|
||||
sodipodi:nodetypes="cc" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:4.23333px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;stroke-width:0.264583"
|
||||
x="8.1803885"
|
||||
y="11.751222"
|
||||
id="text1213"><tspan
|
||||
sodipodi:role="line"
|
||||
x="8.1803885"
|
||||
y="11.751222"
|
||||
style="stroke-width:0.264583"
|
||||
id="tspan1263">Efficiency</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:4.23333px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;stroke-width:0.264583"
|
||||
x="95.932426"
|
||||
y="126.27518"
|
||||
id="text1217"><tspan
|
||||
sodipodi:role="line"
|
||||
x="95.932426"
|
||||
y="126.27518"
|
||||
style="stroke-width:0.264583"
|
||||
id="tspan1219">Nodes: log(n)</tspan></text>
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-width:0.265;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:2.12, 1.06;stroke-dashoffset:0;stroke-opacity:1"
|
||||
d="M 87.3125,119.0625 V 23.812501"
|
||||
id="path1307"
|
||||
sodipodi:nodetypes="cc" />
|
||||
<path
|
||||
style="fill:#cccccc;stroke:none;stroke-width:0.264583px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
d="M 103.1875,119.0625 V 23.812501 h 13.22917 V 119.0625 Z"
|
||||
id="path1415"
|
||||
sodipodi:nodetypes="ccccc" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.175px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:end;letter-spacing:0px;word-spacing:0px;text-anchor:end;stroke-width:0.264583"
|
||||
x="-80.841827"
|
||||
y="110.98573"
|
||||
id="text1419"
|
||||
transform="rotate(-90)"><tspan
|
||||
sodipodi:role="line"
|
||||
x="-80.841827"
|
||||
y="110.98573"
|
||||
style="font-size:3.175px;text-align:end;text-anchor:end;stroke-width:0.264583"
|
||||
id="tspan2029">Too small</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.175px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;fill:#000000;stroke-width:0.264583"
|
||||
x="-96.356133"
|
||||
y="96.125916"
|
||||
id="text1601"
|
||||
transform="rotate(-90)"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan1599"
|
||||
x="-96.356133"
|
||||
y="96.125916"
|
||||
style="font-size:3.175px;fill:#000000;stroke-width:0.264583">OmpSs-2 scale bad</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.175px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;fill:#000000;stroke-width:0.264583"
|
||||
x="-95.687462"
|
||||
y="80.250916"
|
||||
id="text1605"
|
||||
transform="rotate(-90)"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan1603"
|
||||
x="-95.687462"
|
||||
y="80.250916"
|
||||
style="font-size:3.175px;fill:#000000;stroke-width:0.264583">OpenMP scale bad</tspan></text>
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-width:0.265;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;marker-start:url(#marker1129);marker-end:url(#Arrow1Lend)"
|
||||
d="M 15.874997,15.875001 V 119.0625 H 124.35417"
|
||||
id="path14"
|
||||
sodipodi:nodetypes="ccc" />
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-width:0.265;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:2.12, 1.06;stroke-dashoffset:0;stroke-opacity:1"
|
||||
d="M 103.1875,119.0625 V 23.812501"
|
||||
id="path2545"
|
||||
sodipodi:nodetypes="cc" />
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-width:0.865;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
d="M 96.572917,116.41667 85.989584,71.437501 70.114584,46.302085 15.875,34.395834 l 68.791667,13.229167 15.875003,19.84375 9.26042,48.947919"
|
||||
id="path2820"
|
||||
sodipodi:nodetypes="ccccccc" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:4.23333px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;stroke-width:0.264583"
|
||||
x="51.457352"
|
||||
y="32.917889"
|
||||
id="text3711"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan3709"
|
||||
x="51.457352"
|
||||
y="32.917889"
|
||||
style="stroke-width:0.264583">We can explain this difference</tspan></text>
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-width:0.264583px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-end:url(#marker1313)"
|
||||
d="m 80.697916,35.718751 -1.322917,9.260416"
|
||||
id="path3713" />
|
||||
</g>
|
||||
</svg>
|
Before Width: | Height: | Size: 10 KiB |
@ -1,806 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||
xmlns:cc="http://creativecommons.org/ns#"
|
||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
width="500"
|
||||
height="500"
|
||||
viewBox="0 0 132.29167 132.29167"
|
||||
version="1.1"
|
||||
id="svg8"
|
||||
inkscape:version="1.0.1 (3bc2e813f5, 2020-09-07)"
|
||||
sodipodi:docname="scaling-region.svg">
|
||||
<defs
|
||||
id="defs2">
|
||||
<marker
|
||||
style="overflow:visible;"
|
||||
id="marker2584"
|
||||
refX="0.0"
|
||||
refY="0.0"
|
||||
orient="auto"
|
||||
inkscape:stockid="Arrow1Mend"
|
||||
inkscape:isstock="true">
|
||||
<path
|
||||
transform="scale(0.4) rotate(180) translate(10,0)"
|
||||
style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt;stroke-opacity:1;fill:#000000;fill-opacity:1"
|
||||
d="M 0.0,0.0 L 5.0,-5.0 L -12.5,0.0 L 5.0,5.0 L 0.0,0.0 z "
|
||||
id="path2582" />
|
||||
</marker>
|
||||
<marker
|
||||
style="overflow:visible;"
|
||||
id="marker2429"
|
||||
refX="0.0"
|
||||
refY="0.0"
|
||||
orient="auto"
|
||||
inkscape:stockid="Arrow1Mend"
|
||||
inkscape:isstock="true"
|
||||
inkscape:collect="always">
|
||||
<path
|
||||
transform="scale(0.4) rotate(180) translate(10,0)"
|
||||
style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt;stroke-opacity:1;fill:#000000;fill-opacity:1"
|
||||
d="M 0.0,0.0 L 5.0,-5.0 L -12.5,0.0 L 5.0,5.0 L 0.0,0.0 z "
|
||||
id="path2427" />
|
||||
</marker>
|
||||
<marker
|
||||
style="overflow:visible;"
|
||||
id="marker2313"
|
||||
refX="0.0"
|
||||
refY="0.0"
|
||||
orient="auto"
|
||||
inkscape:stockid="Arrow1Mend"
|
||||
inkscape:isstock="true"
|
||||
inkscape:collect="always">
|
||||
<path
|
||||
transform="scale(0.4) rotate(180) translate(10,0)"
|
||||
style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt;stroke-opacity:1;fill:#000000;fill-opacity:1"
|
||||
d="M 0.0,0.0 L 5.0,-5.0 L -12.5,0.0 L 5.0,5.0 L 0.0,0.0 z "
|
||||
id="path2311" />
|
||||
</marker>
|
||||
<marker
|
||||
style="overflow:visible;"
|
||||
id="marker1521"
|
||||
refX="0.0"
|
||||
refY="0.0"
|
||||
orient="auto"
|
||||
inkscape:stockid="Arrow1Mend"
|
||||
inkscape:isstock="true">
|
||||
<path
|
||||
transform="scale(0.4) rotate(180) translate(10,0)"
|
||||
style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt;stroke-opacity:1;fill:#000000;fill-opacity:1"
|
||||
d="M 0.0,0.0 L 5.0,-5.0 L -12.5,0.0 L 5.0,5.0 L 0.0,0.0 z "
|
||||
id="path1519" />
|
||||
</marker>
|
||||
<marker
|
||||
style="overflow:visible;"
|
||||
id="Arrow1Mend"
|
||||
refX="0.0"
|
||||
refY="0.0"
|
||||
orient="auto"
|
||||
inkscape:stockid="Arrow1Mend"
|
||||
inkscape:isstock="true"
|
||||
inkscape:collect="always">
|
||||
<path
|
||||
transform="scale(0.4) rotate(180) translate(10,0)"
|
||||
style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt;stroke-opacity:1;fill:#000000;fill-opacity:1"
|
||||
d="M 0.0,0.0 L 5.0,-5.0 L -12.5,0.0 L 5.0,5.0 L 0.0,0.0 z "
|
||||
id="path848" />
|
||||
</marker>
|
||||
<marker
|
||||
style="overflow:visible"
|
||||
id="marker1313"
|
||||
refX="0"
|
||||
refY="0"
|
||||
orient="auto"
|
||||
inkscape:stockid="Arrow1Lend"
|
||||
inkscape:isstock="true">
|
||||
<path
|
||||
transform="matrix(-0.8,0,0,-0.8,-10,0)"
|
||||
style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1pt;stroke-opacity:1"
|
||||
d="M 0,0 5,-5 -12.5,0 5,5 Z"
|
||||
id="path1311" />
|
||||
</marker>
|
||||
<marker
|
||||
style="overflow:visible"
|
||||
id="Arrow1Lend"
|
||||
refX="0"
|
||||
refY="0"
|
||||
orient="auto"
|
||||
inkscape:stockid="Arrow1Lend"
|
||||
inkscape:isstock="true"
|
||||
inkscape:collect="always">
|
||||
<path
|
||||
transform="matrix(-0.8,0,0,-0.8,-10,0)"
|
||||
style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1pt;stroke-opacity:1"
|
||||
d="M 0,0 5,-5 -12.5,0 5,5 Z"
|
||||
id="path842" />
|
||||
</marker>
|
||||
<marker
|
||||
style="overflow:visible"
|
||||
id="marker1129"
|
||||
refX="0"
|
||||
refY="0"
|
||||
orient="auto"
|
||||
inkscape:stockid="Arrow1Lstart"
|
||||
inkscape:isstock="true">
|
||||
<path
|
||||
transform="matrix(0.8,0,0,0.8,10,0)"
|
||||
style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1pt;stroke-opacity:1"
|
||||
d="M 0,0 5,-5 -12.5,0 5,5 Z"
|
||||
id="path1127" />
|
||||
</marker>
|
||||
<marker
|
||||
style="overflow:visible"
|
||||
id="Arrow1Lstart"
|
||||
refX="0"
|
||||
refY="0"
|
||||
orient="auto"
|
||||
inkscape:stockid="Arrow1Lstart"
|
||||
inkscape:isstock="true">
|
||||
<path
|
||||
transform="matrix(0.8,0,0,0.8,10,0)"
|
||||
style="fill:#000000;fill-opacity:1;fill-rule:evenodd;stroke:#000000;stroke-width:1pt;stroke-opacity:1"
|
||||
d="M 0,0 5,-5 -12.5,0 5,5 Z"
|
||||
id="path839" />
|
||||
</marker>
|
||||
</defs>
|
||||
<sodipodi:namedview
|
||||
id="base"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#666666"
|
||||
borderopacity="1.0"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:zoom="1.4"
|
||||
inkscape:cx="207.26458"
|
||||
inkscape:cy="264.61042"
|
||||
inkscape:document-units="mm"
|
||||
inkscape:current-layer="layer1"
|
||||
inkscape:document-rotation="0"
|
||||
showgrid="true"
|
||||
units="px"
|
||||
inkscape:snap-bbox="true"
|
||||
inkscape:snap-bbox-midpoints="true"
|
||||
inkscape:window-width="1914"
|
||||
inkscape:window-height="1025"
|
||||
inkscape:window-x="0"
|
||||
inkscape:window-y="24"
|
||||
inkscape:window-maximized="1">
|
||||
<inkscape:grid
|
||||
type="xygrid"
|
||||
id="grid10" />
|
||||
</sodipodi:namedview>
|
||||
<metadata
|
||||
id="metadata5">
|
||||
<rdf:RDF>
|
||||
<cc:Work
|
||||
rdf:about="">
|
||||
<dc:format>image/svg+xml</dc:format>
|
||||
<dc:type
|
||||
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
|
||||
<dc:title></dc:title>
|
||||
</cc:Work>
|
||||
</rdf:RDF>
|
||||
</metadata>
|
||||
<g
|
||||
inkscape:label="Layer 1"
|
||||
inkscape:groupmode="layer"
|
||||
id="layer1">
|
||||
<path
|
||||
style="fill:#c5ffc5;stroke:none;stroke-width:0.264583px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;fill-opacity:1"
|
||||
d="m 58.208333,23.8125 -47.625,47.624999 v 50.270831 h 23.8125 L 103.1875,52.916666 V 23.8125 Z"
|
||||
id="path2519" />
|
||||
<path
|
||||
style="fill:#deddda;stroke:none;stroke-width:0.264583px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
d="m 58.208333,23.8125 -47.625,47.625 0,-13.229167 L 44.979167,23.8125 Z"
|
||||
id="path2273"
|
||||
sodipodi:nodetypes="ccccc" />
|
||||
<rect
|
||||
style="fill:#cdab8f;stroke:none;stroke-width:0.264999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="rect2041"
|
||||
width="69.808571"
|
||||
height="2.6832228"
|
||||
x="-64.919769"
|
||||
y="88.492943"
|
||||
ry="1.1225308"
|
||||
transform="rotate(-45)" />
|
||||
<path
|
||||
style="fill:#f98f84;stroke:none;stroke-width:0.264583px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;fill-opacity:1"
|
||||
d="m 55.5625,121.70833 18.520833,0 L 103.1875,92.604167 V 74.083333 Z"
|
||||
id="path2031"
|
||||
sodipodi:nodetypes="ccccc" />
|
||||
<path
|
||||
style="fill:#dc8add;stroke:none;stroke-width:0.264583px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
d="M 34.395833,121.70833 H 55.562499 L 103.1875,74.083332 V 52.916666 Z"
|
||||
id="path2027" />
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-width:0.26499999;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;stroke-miterlimit:4;stroke-dasharray:2.1199999,1.05999995;stroke-dashoffset:0"
|
||||
d="M 34.395833,121.70833 103.1875,52.916667"
|
||||
id="path1305"
|
||||
sodipodi:nodetypes="cc" />
|
||||
<rect
|
||||
style="fill:#99c1f1;stroke:none;stroke-width:0.264999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="rect2043"
|
||||
width="58.208332"
|
||||
height="2.6458333"
|
||||
x="-117.73958"
|
||||
y="41.010418"
|
||||
ry="1.3229166"
|
||||
transform="rotate(-90)" />
|
||||
<rect
|
||||
style="fill:#f6d32d;stroke:none;stroke-width:0.264999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="rect2037"
|
||||
width="58.208332"
|
||||
height="2.6458333"
|
||||
x="17.197916"
|
||||
y="107.15625"
|
||||
ry="1.3229166" />
|
||||
<rect
|
||||
style="fill:#f6d32d;stroke:none;stroke-width:0.264999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="rect2039"
|
||||
width="58.208332"
|
||||
height="2.6458333"
|
||||
x="17.197916"
|
||||
y="91.28125"
|
||||
ry="1.3229166" />
|
||||
<rect
|
||||
style="fill:#f6d32d;stroke:none;stroke-width:0.264999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="rect2035"
|
||||
width="58.208332"
|
||||
height="2.6458333"
|
||||
x="17.197916"
|
||||
y="75.40625"
|
||||
ry="1.3229166" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:4.23333px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;stroke-width:0.264583"
|
||||
x="3.9904637"
|
||||
y="9.1053886"
|
||||
id="text1213"><tspan
|
||||
sodipodi:role="line"
|
||||
x="3.9904637"
|
||||
y="9.1053886"
|
||||
style="stroke-width:0.264583"
|
||||
id="tspan1221">Problem</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
x="3.9904637"
|
||||
y="14.397051"
|
||||
style="stroke-width:0.264583"
|
||||
id="tspan1263">size: log(N)</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:4.23333px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;stroke-width:0.264583"
|
||||
x="99.107437"
|
||||
y="128.92101"
|
||||
id="text1217"><tspan
|
||||
sodipodi:role="line"
|
||||
x="99.107437"
|
||||
y="128.92101"
|
||||
style="stroke-width:0.264583"
|
||||
id="tspan1219">Nodes: log(n)</tspan></text>
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-width:0.265;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:0.265, 0.795;stroke-dashoffset:0;stroke-opacity:1"
|
||||
d="M 10.583333,121.70833 103.1875,29.104167"
|
||||
id="path1289"
|
||||
sodipodi:nodetypes="cc" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:2.82222px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;fill:#000000;stroke-width:0.264583"
|
||||
x="117.66448"
|
||||
y="16.21262"
|
||||
id="text1293"><tspan
|
||||
sodipodi:role="line"
|
||||
x="117.66448"
|
||||
y="16.21262"
|
||||
style="font-size:2.82222px;text-align:center;text-anchor:middle;fill:#000000;stroke-width:0.264583"
|
||||
id="tspan1299">Constant work</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
x="117.66448"
|
||||
y="19.740395"
|
||||
style="font-size:2.82222px;text-align:center;text-anchor:middle;fill:#000000;stroke-width:0.264583"
|
||||
id="tspan1303">per CPU</tspan></text>
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-width:0.26499999;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;stroke-miterlimit:4;stroke-dasharray:2.1199999,1.05999995;stroke-dashoffset:0"
|
||||
d="m 55.5625,121.70833 47.625,-47.624997"
|
||||
id="path1307"
|
||||
sodipodi:nodetypes="cc" />
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-width:0.264583px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-end:url(#Arrow1Mend)"
|
||||
d="m 111.12501,21.166666 -6.61459,6.614584"
|
||||
id="path1309"
|
||||
sodipodi:nodetypes="cc" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.52778px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;stroke-width:0.264583"
|
||||
x="110.29613"
|
||||
y="39.111828"
|
||||
id="text1391"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan1389"
|
||||
x="110.29613"
|
||||
y="39.111828"
|
||||
style="font-size:3.52778px;stroke-width:0.264583">OpenMP</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
x="110.29613"
|
||||
y="43.521553"
|
||||
style="font-size:3.52778px;stroke-width:0.264583"
|
||||
id="tspan1393">sat. limit</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.52778px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;stroke-width:0.264583"
|
||||
x="110.06047"
|
||||
y="60.278492"
|
||||
id="text1397"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan1395"
|
||||
x="110.06047"
|
||||
y="60.278492"
|
||||
style="font-size:3.52778px;stroke-width:0.264583">OmpSs-2</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
x="110.06047"
|
||||
y="64.688217"
|
||||
style="font-size:3.52778px;stroke-width:0.264583"
|
||||
id="tspan1399">sat. limit</tspan></text>
|
||||
<path
|
||||
style="fill:#cccccc;stroke:none;stroke-width:0.264583px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
d="M 74.083333,121.70833 103.1875,92.604167 v 29.104163 z"
|
||||
id="path1415"
|
||||
sodipodi:nodetypes="cccc" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.175px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:end;letter-spacing:0px;word-spacing:0px;text-anchor:end;stroke-width:0.264583"
|
||||
x="-5.693222"
|
||||
y="146.30673"
|
||||
id="text1419"
|
||||
transform="rotate(-45)"><tspan
|
||||
sodipodi:role="line"
|
||||
x="-5.693222"
|
||||
y="146.30673"
|
||||
style="font-size:3.175px;text-align:end;text-anchor:end;stroke-width:0.264583"
|
||||
id="tspan2029">Too small</tspan></text>
|
||||
<path
|
||||
style="fill:#cccccc;stroke:none;stroke-width:0.264583px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
d="M 44.97917,23.8125 10.583333,58.20833 V 23.8125 Z"
|
||||
id="path1421"
|
||||
sodipodi:nodetypes="cccc" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:4.23333px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;stroke-width:0.264583"
|
||||
x="-18.932135"
|
||||
y="40.831676"
|
||||
id="text1425"
|
||||
transform="rotate(-45)"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan1423"
|
||||
x="-18.932135"
|
||||
y="40.831676"
|
||||
style="stroke-width:0.264583">ENOMEM</tspan></text>
|
||||
<path
|
||||
style="fill:#0000ff;stroke:#000000;stroke-width:0.264583px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
d="m 18.520833,92.604166 55.5625,10e-7"
|
||||
id="path1427"
|
||||
sodipodi:nodetypes="cc" />
|
||||
<ellipse
|
||||
style="fill:#000000;stroke:none;stroke-width:0.158999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="path1429"
|
||||
cx="18.520834"
|
||||
cy="92.604164"
|
||||
rx="0.79374415"
|
||||
ry="0.7937445" />
|
||||
<ellipse
|
||||
style="fill:#000000;stroke:none;stroke-width:0.158999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="ellipse1431"
|
||||
cx="26.458334"
|
||||
cy="92.604164"
|
||||
rx="0.79374415"
|
||||
ry="0.7937445" />
|
||||
<ellipse
|
||||
style="fill:#000000;stroke:none;stroke-width:0.158999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="ellipse1433"
|
||||
cx="34.395832"
|
||||
cy="92.604164"
|
||||
rx="0.79374415"
|
||||
ry="0.7937445" />
|
||||
<ellipse
|
||||
style="fill:#000000;stroke:none;stroke-width:0.158999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="ellipse1435"
|
||||
cx="42.333332"
|
||||
cy="92.604164"
|
||||
rx="0.79374415"
|
||||
ry="0.7937445" />
|
||||
<ellipse
|
||||
style="fill:#000000;stroke:none;stroke-width:0.158999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="ellipse1445"
|
||||
cx="50.270832"
|
||||
cy="92.604164"
|
||||
rx="0.79374415"
|
||||
ry="0.7937445" />
|
||||
<ellipse
|
||||
style="fill:#000000;stroke:none;stroke-width:0.158999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="ellipse1447"
|
||||
cx="58.208332"
|
||||
cy="92.604164"
|
||||
rx="0.79374415"
|
||||
ry="0.7937445" />
|
||||
<ellipse
|
||||
style="fill:#000000;stroke:none;stroke-width:0.158999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="ellipse1449"
|
||||
cx="66.145828"
|
||||
cy="92.604164"
|
||||
rx="0.79374415"
|
||||
ry="0.7937445" />
|
||||
<ellipse
|
||||
style="fill:#000000;stroke:none;stroke-width:0.158999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="ellipse1451"
|
||||
cx="74.083328"
|
||||
cy="92.604164"
|
||||
rx="0.79374415"
|
||||
ry="0.7937445" />
|
||||
<path
|
||||
style="fill:#0000ff;stroke:#000000;stroke-width:0.264583px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
d="m 18.520833,108.47917 55.5625,0"
|
||||
id="path1453"
|
||||
sodipodi:nodetypes="cc" />
|
||||
<ellipse
|
||||
style="fill:#000000;stroke:none;stroke-width:0.158999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="ellipse1455"
|
||||
cx="18.520834"
|
||||
cy="108.47916"
|
||||
rx="0.79374415"
|
||||
ry="0.7937445" />
|
||||
<ellipse
|
||||
style="fill:#000000;stroke:none;stroke-width:0.158999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="ellipse1457"
|
||||
cx="26.458334"
|
||||
cy="108.47916"
|
||||
rx="0.79374415"
|
||||
ry="0.7937445" />
|
||||
<ellipse
|
||||
style="fill:#000000;stroke:none;stroke-width:0.158999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="ellipse1459"
|
||||
cx="34.395832"
|
||||
cy="108.47916"
|
||||
rx="0.79374415"
|
||||
ry="0.7937445" />
|
||||
<ellipse
|
||||
style="fill:#000000;stroke:none;stroke-width:0.158999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="ellipse1461"
|
||||
cx="42.333332"
|
||||
cy="108.47916"
|
||||
rx="0.79374415"
|
||||
ry="0.7937445" />
|
||||
<ellipse
|
||||
style="fill:#000000;stroke:none;stroke-width:0.158999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="ellipse1463"
|
||||
cx="50.270832"
|
||||
cy="108.47916"
|
||||
rx="0.79374415"
|
||||
ry="0.7937445" />
|
||||
<ellipse
|
||||
style="fill:#000000;stroke:none;stroke-width:0.158999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="ellipse1465"
|
||||
cx="58.208332"
|
||||
cy="108.47916"
|
||||
rx="0.79374415"
|
||||
ry="0.7937445" />
|
||||
<ellipse
|
||||
style="fill:#000000;stroke:none;stroke-width:0.158999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="ellipse1467"
|
||||
cx="66.145828"
|
||||
cy="108.47916"
|
||||
rx="0.79374415"
|
||||
ry="0.7937445" />
|
||||
<ellipse
|
||||
style="fill:#000000;stroke:none;stroke-width:0.158999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="ellipse1469"
|
||||
cx="74.083328"
|
||||
cy="108.47916"
|
||||
rx="0.79374415"
|
||||
ry="0.7937445" />
|
||||
<path
|
||||
style="fill:#0000ff;stroke:#000000;stroke-width:0.264583px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
d="m 18.52084,76.729181 h 55.5625"
|
||||
id="path1471"
|
||||
sodipodi:nodetypes="cc" />
|
||||
<ellipse
|
||||
style="fill:#000000;stroke:none;stroke-width:0.158999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="ellipse1473"
|
||||
cx="18.520842"
|
||||
cy="76.729164"
|
||||
rx="0.79374415"
|
||||
ry="0.7937445" />
|
||||
<ellipse
|
||||
style="fill:#000000;stroke:none;stroke-width:0.158999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="ellipse1475"
|
||||
cx="26.458342"
|
||||
cy="76.729164"
|
||||
rx="0.79374415"
|
||||
ry="0.7937445" />
|
||||
<ellipse
|
||||
style="fill:#000000;stroke:none;stroke-width:0.158999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="ellipse1477"
|
||||
cx="34.39584"
|
||||
cy="76.729164"
|
||||
rx="0.79374415"
|
||||
ry="0.7937445" />
|
||||
<ellipse
|
||||
style="fill:#000000;stroke:none;stroke-width:0.158999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="ellipse1479"
|
||||
cx="42.33334"
|
||||
cy="76.729164"
|
||||
rx="0.79374415"
|
||||
ry="0.7937445" />
|
||||
<ellipse
|
||||
style="fill:#000000;stroke:none;stroke-width:0.158999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="ellipse1481"
|
||||
cx="50.27084"
|
||||
cy="76.729164"
|
||||
rx="0.79374415"
|
||||
ry="0.7937445" />
|
||||
<ellipse
|
||||
style="fill:#000000;stroke:none;stroke-width:0.158999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="ellipse1483"
|
||||
cx="58.20834"
|
||||
cy="76.729164"
|
||||
rx="0.79374415"
|
||||
ry="0.7937445" />
|
||||
<ellipse
|
||||
style="fill:#000000;stroke:none;stroke-width:0.158999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="ellipse1485"
|
||||
cx="66.145836"
|
||||
cy="76.729164"
|
||||
rx="0.79374415"
|
||||
ry="0.7937445" />
|
||||
<ellipse
|
||||
style="fill:#000000;stroke:none;stroke-width:0.158999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="ellipse1487"
|
||||
cx="74.083336"
|
||||
cy="76.729164"
|
||||
rx="0.79374415"
|
||||
ry="0.7937445" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:4.23333px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;stroke-width:0.264583;fill:#000000;"
|
||||
x="18.050591"
|
||||
y="73.928307"
|
||||
id="text1491"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan1489"
|
||||
x="18.050591"
|
||||
y="73.928307"
|
||||
style="stroke-width:0.264583;fill:#000000;">Strong</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:4.23333px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;stroke-width:0.264583"
|
||||
x="13.229166"
|
||||
y="78.052086"
|
||||
id="text1495"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan1493"
|
||||
x="13.229166"
|
||||
y="78.052086"
|
||||
style="stroke-width:0.264583">A</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:4.23333px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;stroke-width:0.264583"
|
||||
x="13.229166"
|
||||
y="93.927086"
|
||||
id="text1499"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan1497"
|
||||
x="13.229166"
|
||||
y="93.927086"
|
||||
style="stroke-width:0.264583">B</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:4.23333px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;stroke-width:0.264583"
|
||||
x="13.070004"
|
||||
y="110.02016"
|
||||
id="text1503"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan1501"
|
||||
x="13.070004"
|
||||
y="110.02016"
|
||||
style="stroke-width:0.264583">C</tspan></text>
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-width:0.264583px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
d="m 18.520833,108.47917 47.625,-47.625003"
|
||||
id="path1505"
|
||||
sodipodi:nodetypes="cc" />
|
||||
<ellipse
|
||||
style="fill:#000000;stroke:none;stroke-width:0.158999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="ellipse1507"
|
||||
cx="42.333332"
|
||||
cy="84.666664"
|
||||
rx="0.79374415"
|
||||
ry="0.7937445" />
|
||||
<ellipse
|
||||
style="fill:#000000;stroke:none;stroke-width:0.158999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="ellipse1509"
|
||||
cx="26.458334"
|
||||
cy="100.54166"
|
||||
rx="0.79374415"
|
||||
ry="0.7937445" />
|
||||
<ellipse
|
||||
style="fill:#000000;stroke:none;stroke-width:0.158999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="ellipse1511"
|
||||
cx="58.208332"
|
||||
cy="68.791664"
|
||||
rx="0.79374415"
|
||||
ry="0.7937445" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:4.23333px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;fill:#000000;stroke-width:0.264583"
|
||||
x="-13.374139"
|
||||
y="87.636398"
|
||||
id="text1515"
|
||||
transform="rotate(-45)"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan1513"
|
||||
x="-13.374139"
|
||||
y="87.636398"
|
||||
style="fill:#000000;stroke-width:0.264583">Weak</tspan></text>
|
||||
<ellipse
|
||||
style="fill:#000000;stroke:none;stroke-width:0.158999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="ellipse1575"
|
||||
cx="66.145836"
|
||||
cy="60.854168"
|
||||
rx="0.79374415"
|
||||
ry="0.7937445" />
|
||||
<path
|
||||
style="fill:#0000ff;stroke:#000000;stroke-width:0.264583px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1"
|
||||
d="M 42.333333,60.854171 V 116.41666"
|
||||
id="path1577"
|
||||
sodipodi:nodetypes="cc" />
|
||||
<ellipse
|
||||
style="fill:#000000;stroke:none;stroke-width:0.158999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="ellipse1579"
|
||||
cx="60.854172"
|
||||
cy="-42.333347"
|
||||
rx="0.79374415"
|
||||
ry="0.7937445"
|
||||
transform="rotate(90)" />
|
||||
<ellipse
|
||||
style="fill:#000000;stroke:none;stroke-width:0.158999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="ellipse1581"
|
||||
cx="68.791672"
|
||||
cy="-42.333347"
|
||||
rx="0.79374415"
|
||||
ry="0.7937445"
|
||||
transform="rotate(90)" />
|
||||
<ellipse
|
||||
style="fill:#000000;stroke:none;stroke-width:0.158999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="ellipse1589"
|
||||
cx="100.54168"
|
||||
cy="-42.333347"
|
||||
rx="0.79374415"
|
||||
ry="0.7937445"
|
||||
transform="rotate(90)" />
|
||||
<ellipse
|
||||
style="fill:#000000;stroke:none;stroke-width:0.158999;stroke-linecap:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0"
|
||||
id="ellipse1593"
|
||||
cx="116.41668"
|
||||
cy="-42.333347"
|
||||
rx="0.79374415"
|
||||
ry="0.7937445"
|
||||
transform="rotate(90)" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:4.23333px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;fill:#000000;stroke-width:0.264583"
|
||||
x="-70.524887"
|
||||
y="39.94278"
|
||||
id="text1597"
|
||||
transform="rotate(-90)"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan1595"
|
||||
x="-70.524887"
|
||||
y="39.94278"
|
||||
style="fill:#000000;stroke-width:0.264583">Saturation</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.175px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;fill:#000000;stroke-width:0.264583"
|
||||
x="-5.0723147"
|
||||
y="118.84846"
|
||||
id="text1601"
|
||||
transform="rotate(-45)"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan1599"
|
||||
x="-5.0723147"
|
||||
y="118.84846"
|
||||
style="font-size:3.175px;fill:#000000;stroke-width:0.264583">OpenMP scale bad</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.175px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;fill:#000000;stroke-width:0.264583"
|
||||
x="-19.945276"
|
||||
y="133.06721"
|
||||
id="text1605"
|
||||
transform="rotate(-45)"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan1603"
|
||||
x="-19.945276"
|
||||
y="133.06721"
|
||||
style="font-size:3.175px;fill:#000000;stroke-width:0.264583">OmpSs-2 scale bad</tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:4.23333px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;stroke-width:0.264583"
|
||||
x="-19.867579"
|
||||
y="54.863327"
|
||||
id="text2277"
|
||||
transform="rotate(-45)"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan2275"
|
||||
x="-19.867579"
|
||||
y="54.863327"
|
||||
style="stroke-width:0.264583">ENOTIME</tspan></text>
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-width:0.264583px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-end:url(#marker2313)"
|
||||
d="m 111.125,44.979167 -6.61458,6.614583"
|
||||
id="path2309"
|
||||
sodipodi:nodetypes="cc" />
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-width:0.264583px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-end:url(#marker2429)"
|
||||
d="m 111.125,66.145834 -6.61458,6.614583"
|
||||
id="path2425"
|
||||
sodipodi:nodetypes="cc" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:4.23333px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';text-align:center;letter-spacing:0px;word-spacing:0px;text-anchor:middle;stroke-width:0.264583"
|
||||
x="74.123642"
|
||||
y="35.563721"
|
||||
id="text2515"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan2513"
|
||||
x="74.123642"
|
||||
y="35.563721"
|
||||
style="text-align:center;text-anchor:middle;stroke-width:0.264583">Saturation</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
x="74.123642"
|
||||
y="40.855385"
|
||||
style="text-align:center;text-anchor:middle;stroke-width:0.264583"
|
||||
id="tspan2517">zone</tspan></text>
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-width:0.265;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1;marker-start:url(#marker1129);marker-end:url(#Arrow1Lend)"
|
||||
d="M 10.58333,18.520833 V 121.70833 H 119.0625"
|
||||
id="path14"
|
||||
sodipodi:nodetypes="ccc" />
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-width:0.265;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:2.12, 1.06;stroke-dashoffset:0;stroke-opacity:1"
|
||||
d="M 74.083333,121.70833 103.1875,92.604167"
|
||||
id="path2545"
|
||||
sodipodi:nodetypes="cc" />
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-width:0.264999;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:2.12, 1.06;stroke-dashoffset:0;stroke-opacity:1"
|
||||
d="m 10.583333,71.4375 47.625,-47.625"
|
||||
id="path2545-3"
|
||||
sodipodi:nodetypes="cc" />
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-width:0.264999;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:2.12, 1.06;stroke-dashoffset:0;stroke-opacity:1"
|
||||
d="M 10.583334,58.208326 44.979167,23.8125"
|
||||
id="path2545-6"
|
||||
sodipodi:nodetypes="cc" />
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-size:3.52778px;line-height:1.25;font-family:sans-serif;-inkscape-font-specification:'sans-serif, Normal';letter-spacing:0px;word-spacing:0px;stroke-width:0.264583"
|
||||
x="110.06046"
|
||||
y="78.799332"
|
||||
id="text2578"><tspan
|
||||
sodipodi:role="line"
|
||||
x="110.06046"
|
||||
y="78.799332"
|
||||
style="font-size:3.52778px;stroke-width:0.264583"
|
||||
id="tspan2678">1 task/CPU</tspan><tspan
|
||||
sodipodi:role="line"
|
||||
x="110.06046"
|
||||
y="83.209053"
|
||||
style="font-size:3.52778px;stroke-width:0.264583"
|
||||
id="tspan2682">limit</tspan></text>
|
||||
<path
|
||||
style="fill:none;stroke:#000000;stroke-width:0.264583px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;marker-end:url(#marker2584)"
|
||||
d="m 111.125,84.666667 -6.61458,6.614583"
|
||||
id="path2580"
|
||||
sodipodi:nodetypes="cc" />
|
||||
</g>
|
||||
</svg>
|
Before Width: | Height: | Size: 32 KiB |
1691
garlic/doc/ug.ms
1691
garlic/doc/ug.ms
File diff suppressed because it is too large
Load Diff
@ -1,83 +0,0 @@
|
||||
import json, re, sys, os, glob
|
||||
from os import path
|
||||
|
||||
def eprint(*args, **kwargs):
|
||||
print(*args, file=sys.stderr, flush=True, **kwargs)
|
||||
|
||||
def process_run(tree, runPath):
|
||||
|
||||
ctf_mode = {}
|
||||
|
||||
with open(".garlic/time_mode_runtime.csv", "r") as f:
|
||||
ctf_mode['runtime'] = float(f.readline())
|
||||
|
||||
with open(".garlic/time_mode_dead.csv", "r") as f:
|
||||
ctf_mode['dead'] = float(f.readline())
|
||||
|
||||
with open(".garlic/time_mode_task.csv", "r") as f:
|
||||
ctf_mode['task'] = float(f.readline())
|
||||
|
||||
tree['ctf_mode'] = ctf_mode
|
||||
|
||||
with open("stdout.log", "r") as f:
|
||||
lines = [line.strip() for line in f.readlines()]
|
||||
|
||||
time_line = None
|
||||
for line in lines:
|
||||
|
||||
if re.match(r'^ ?time .*', line):
|
||||
time_line = line
|
||||
break
|
||||
|
||||
if time_line is None:
|
||||
eprint("missing time line, aborting")
|
||||
eprint("stdout file = {}/stdout.log".format(runPath))
|
||||
exit(1)
|
||||
|
||||
time_str = time_line.split()[1]
|
||||
|
||||
tree['time'] = float(time_str)
|
||||
|
||||
print(json.dumps(tree))
|
||||
|
||||
def process_result_tree(resultTree):
|
||||
|
||||
eprint("processing resultTree: " + resultTree)
|
||||
|
||||
os.chdir(resultTree)
|
||||
|
||||
experiments = glob.glob(resultTree + "/*-experiment")
|
||||
|
||||
for exp in glob.glob("*-experiment"):
|
||||
eprint("found experiment: " + exp)
|
||||
expPath = path.join(resultTree, exp)
|
||||
os.chdir(expPath)
|
||||
|
||||
for unit in glob.glob("*-unit"):
|
||||
eprint("found unit: " + unit)
|
||||
unitPath = path.join(resultTree, exp, unit)
|
||||
os.chdir(unitPath)
|
||||
|
||||
with open('garlic_config.json') as json_file:
|
||||
garlic_conf = json.load(json_file)
|
||||
|
||||
tree = {"exp":exp, "unit":unit, "config":garlic_conf}
|
||||
|
||||
for i in range(garlic_conf['loops']):
|
||||
run = str(i + 1)
|
||||
runPath = path.join(resultTree, exp, unit, run)
|
||||
if path.isdir(runPath) == False:
|
||||
eprint("missing run {}, aborting".format(run))
|
||||
exit(1)
|
||||
|
||||
tree["run"] = run
|
||||
os.chdir(runPath)
|
||||
|
||||
process_run(tree, runPath)
|
||||
|
||||
|
||||
if len(sys.argv) != 2:
|
||||
eprint("usage: python {} <resultTree>".format(argv[0]))
|
||||
exit(1)
|
||||
|
||||
process_result_tree(sys.argv[1])
|
@ -1,18 +0,0 @@
|
||||
{
|
||||
super
|
||||
, self
|
||||
, bsc
|
||||
, garlic
|
||||
, callPackage
|
||||
}:
|
||||
|
||||
rec {
|
||||
|
||||
py = callPackage ./py.nix {};
|
||||
|
||||
std.timetable = py { script = ./std-timetable.py; compress = false; };
|
||||
osu.latency = py { script = ./osu-latency.py; };
|
||||
osu.bw = py { script = ./osu-bw.py; };
|
||||
perf.stat = py { script = ./perf-stat.py; };
|
||||
ctf.mode = py { script = ./ctf-mode.py; };
|
||||
}
|
@ -1,64 +0,0 @@
|
||||
import json, re, sys, os, glob
|
||||
from os import path
|
||||
|
||||
def eprint(*args, **kwargs):
|
||||
print(*args, file=sys.stderr, flush=True, **kwargs)
|
||||
|
||||
def process_run(tree, runPath):
|
||||
with open("stdout.log", "r") as f:
|
||||
lines = [line.strip() for line in f.readlines()]
|
||||
|
||||
for line in lines:
|
||||
|
||||
if not re.match('^[0-9]+ *[0-9\.]+$', line):
|
||||
continue
|
||||
|
||||
slices = line.split()
|
||||
size = slices[0]
|
||||
bw = slices[1]
|
||||
|
||||
tree['size'] = int(size)
|
||||
tree['bw'] = float(bw)
|
||||
print(json.dumps(tree))
|
||||
|
||||
def process_result_tree(resultTree):
|
||||
|
||||
eprint("processing resultTree: " + resultTree)
|
||||
|
||||
os.chdir(resultTree)
|
||||
|
||||
experiments = glob.glob(resultTree + "/*-experiment")
|
||||
|
||||
for exp in glob.glob("*-experiment"):
|
||||
eprint("found experiment: " + exp)
|
||||
expPath = path.join(resultTree, exp)
|
||||
os.chdir(expPath)
|
||||
|
||||
for unit in glob.glob("*-unit"):
|
||||
eprint("found unit: " + unit)
|
||||
unitPath = path.join(resultTree, exp, unit)
|
||||
os.chdir(unitPath)
|
||||
|
||||
with open('garlic_config.json') as json_file:
|
||||
garlic_conf = json.load(json_file)
|
||||
|
||||
tree = {"exp":exp, "unit":unit, "config":garlic_conf}
|
||||
|
||||
for i in range(garlic_conf['loops']):
|
||||
run = str(i + 1)
|
||||
runPath = path.join(resultTree, exp, unit, run)
|
||||
if path.isdir(runPath) == False:
|
||||
eprint("missing run {}, aborting".format(run))
|
||||
exit(1)
|
||||
|
||||
tree["run"] = run
|
||||
os.chdir(runPath)
|
||||
|
||||
process_run(tree, runPath)
|
||||
|
||||
|
||||
if len(sys.argv) != 2:
|
||||
eprint("usage: python {} <resultTree>".format(argv[0]))
|
||||
exit(1)
|
||||
|
||||
process_result_tree(sys.argv[1])
|
@ -1,64 +0,0 @@
|
||||
import json, re, sys, os, glob
|
||||
from os import path
|
||||
|
||||
def eprint(*args, **kwargs):
|
||||
print(*args, file=sys.stderr, flush=True, **kwargs)
|
||||
|
||||
def process_run(tree, runPath):
|
||||
with open("stdout.log", "r") as f:
|
||||
lines = [line.strip() for line in f.readlines()]
|
||||
|
||||
for line in lines:
|
||||
|
||||
if not re.match('^[0-9]+ *[0-9\.]+$', line):
|
||||
continue
|
||||
|
||||
slices = line.split()
|
||||
size = slices[0]
|
||||
latency = slices[1]
|
||||
|
||||
tree['size'] = int(size)
|
||||
tree['latency'] = float(latency)
|
||||
print(json.dumps(tree))
|
||||
|
||||
def process_result_tree(resultTree):
|
||||
|
||||
eprint("processing resultTree: " + resultTree)
|
||||
|
||||
os.chdir(resultTree)
|
||||
|
||||
experiments = glob.glob(resultTree + "/*-experiment")
|
||||
|
||||
for exp in glob.glob("*-experiment"):
|
||||
eprint("found experiment: " + exp)
|
||||
expPath = path.join(resultTree, exp)
|
||||
os.chdir(expPath)
|
||||
|
||||
for unit in glob.glob("*-unit"):
|
||||
eprint("found unit: " + unit)
|
||||
unitPath = path.join(resultTree, exp, unit)
|
||||
os.chdir(unitPath)
|
||||
|
||||
with open('garlic_config.json') as json_file:
|
||||
garlic_conf = json.load(json_file)
|
||||
|
||||
tree = {"exp":exp, "unit":unit, "config":garlic_conf}
|
||||
|
||||
for i in range(garlic_conf['loops']):
|
||||
run = str(i + 1)
|
||||
runPath = path.join(resultTree, exp, unit, run)
|
||||
if path.isdir(runPath) == False:
|
||||
eprint("missing run {}, aborting".format(run))
|
||||
exit(1)
|
||||
|
||||
tree["run"] = run
|
||||
os.chdir(runPath)
|
||||
|
||||
process_run(tree, runPath)
|
||||
|
||||
|
||||
if len(sys.argv) != 2:
|
||||
eprint("usage: python {} <resultTree>".format(argv[0]))
|
||||
exit(1)
|
||||
|
||||
process_result_tree(sys.argv[1])
|
@ -1,90 +0,0 @@
|
||||
import json, re, sys, os, glob
|
||||
from os import path
|
||||
|
||||
def eprint(*args, **kwargs):
|
||||
print(*args, file=sys.stderr, flush=True, **kwargs)
|
||||
|
||||
def process_run(tree, runPath):
|
||||
with open(".garlic/perf.csv", "r") as f:
|
||||
lines = [line.strip() for line in f.readlines()]
|
||||
|
||||
perf_data = {}
|
||||
|
||||
for line in lines:
|
||||
if len(line) == 0: continue
|
||||
if line[0] == '#': continue
|
||||
|
||||
slices = line.split(',')
|
||||
if len(slices) != 7:
|
||||
print("error: mismatched columns")
|
||||
exit(1)
|
||||
|
||||
name = slices[2].replace("-", "_")
|
||||
value = float(slices[0])
|
||||
|
||||
perf_data[name] = value
|
||||
|
||||
tree['perf'] = perf_data
|
||||
|
||||
with open("stdout.log", "r") as f:
|
||||
lines = [line.strip() for line in f.readlines()]
|
||||
|
||||
time_line = None
|
||||
for line in lines:
|
||||
|
||||
if re.match(r'^ ?time .*', line):
|
||||
time_line = line
|
||||
break
|
||||
|
||||
if time_line is None:
|
||||
eprint("missing time line, aborting")
|
||||
eprint("stdout file = {}/stdout.log".format(runPath))
|
||||
exit(1)
|
||||
|
||||
time_str = time_line.split()[1]
|
||||
|
||||
tree['time'] = float(time_str)
|
||||
|
||||
print(json.dumps(tree))
|
||||
|
||||
def process_result_tree(resultTree):
|
||||
|
||||
eprint("processing resultTree: " + resultTree)
|
||||
|
||||
os.chdir(resultTree)
|
||||
|
||||
experiments = glob.glob(resultTree + "/*-experiment")
|
||||
|
||||
for exp in glob.glob("*-experiment"):
|
||||
eprint("found experiment: " + exp)
|
||||
expPath = path.join(resultTree, exp)
|
||||
os.chdir(expPath)
|
||||
|
||||
for unit in glob.glob("*-unit"):
|
||||
eprint("found unit: " + unit)
|
||||
unitPath = path.join(resultTree, exp, unit)
|
||||
os.chdir(unitPath)
|
||||
|
||||
with open('garlic_config.json') as json_file:
|
||||
garlic_conf = json.load(json_file)
|
||||
|
||||
tree = {"exp":exp, "unit":unit, "config":garlic_conf}
|
||||
|
||||
for i in range(garlic_conf['loops']):
|
||||
run = str(i + 1)
|
||||
runPath = path.join(resultTree, exp, unit, run)
|
||||
if path.isdir(runPath) == False:
|
||||
eprint("missing run {}, aborting".format(run))
|
||||
exit(1)
|
||||
|
||||
tree["run"] = run
|
||||
os.chdir(runPath)
|
||||
|
||||
process_run(tree, runPath)
|
||||
|
||||
|
||||
if len(sys.argv) != 2:
|
||||
eprint("usage: python {} <resultTree>".format(argv[0]))
|
||||
exit(1)
|
||||
|
||||
process_result_tree(sys.argv[1])
|
@ -1,34 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, python3
|
||||
, gzip
|
||||
}:
|
||||
|
||||
{
|
||||
script,
|
||||
compress ? true
|
||||
}:
|
||||
|
||||
tree:
|
||||
|
||||
stdenv.mkDerivation {
|
||||
name = "dataset";
|
||||
preferLocalBuild = true;
|
||||
phases = [ "installPhase" ];
|
||||
buildInputs = [ python3 gzip ];
|
||||
installPhase = ''
|
||||
mkdir -p $out
|
||||
ln -s ${tree} $out/tree
|
||||
ln -s ${script} $out/script
|
||||
|
||||
COMPRESS_DATASET=${toString compress}
|
||||
|
||||
if [ $COMPRESS_DATASET ]; then
|
||||
python $out/script $out/tree | gzip > $out/dataset.json.gz
|
||||
ln -s dataset.json.gz $out/dataset
|
||||
else
|
||||
python $out/script $out/tree > $out/dataset.json
|
||||
ln -s dataset.json $out/dataset
|
||||
fi
|
||||
'';
|
||||
}
|
@ -1,77 +0,0 @@
|
||||
import json, re, sys, os, glob
|
||||
from os import path
|
||||
|
||||
def eprint(*args, **kwargs):
|
||||
print(*args, file=sys.stderr, flush=True, **kwargs)
|
||||
|
||||
def process_run(tree, runPath):
|
||||
|
||||
with open(".garlic/total_time_start", "r") as f:
|
||||
total_time_start = float(f.readline().strip())
|
||||
|
||||
with open(".garlic/total_time_end", "r") as f:
|
||||
total_time_end = float(f.readline().strip())
|
||||
|
||||
with open("stdout.log", "r") as f:
|
||||
lines = [line.strip() for line in f.readlines()]
|
||||
|
||||
time_line = None
|
||||
for line in lines:
|
||||
|
||||
if re.match(r'^ ?time .*', line):
|
||||
time_line = line
|
||||
break
|
||||
|
||||
if time_line is None:
|
||||
eprint("missing time line, aborting")
|
||||
eprint("stdout file = {}/stdout.log".format(runPath))
|
||||
exit(1)
|
||||
|
||||
time_str = time_line.split()[1]
|
||||
|
||||
tree['time'] = float(time_str)
|
||||
tree['total_time'] = total_time_end - total_time_start
|
||||
|
||||
print(json.dumps(tree))
|
||||
|
||||
def process_result_tree(resultTree):
|
||||
|
||||
eprint("processing resultTree: " + resultTree)
|
||||
|
||||
os.chdir(resultTree)
|
||||
|
||||
experiments = glob.glob(resultTree + "/*-experiment")
|
||||
|
||||
for exp in glob.glob("*-experiment"):
|
||||
eprint("found experiment: " + exp)
|
||||
expPath = path.join(resultTree, exp)
|
||||
os.chdir(expPath)
|
||||
|
||||
for unit in glob.glob("*-unit"):
|
||||
eprint("found unit: " + unit)
|
||||
unitPath = path.join(resultTree, exp, unit)
|
||||
os.chdir(unitPath)
|
||||
|
||||
with open('garlic_config.json') as json_file:
|
||||
garlic_conf = json.load(json_file)
|
||||
|
||||
tree = {"exp":exp, "unit":unit, "config":garlic_conf}
|
||||
|
||||
for i in range(garlic_conf['loops']):
|
||||
run = str(i + 1)
|
||||
runPath = path.join(resultTree, exp, unit, run)
|
||||
if path.isdir(runPath) == False:
|
||||
eprint("missing run {}, aborting".format(run))
|
||||
exit(1)
|
||||
|
||||
tree["run"] = run
|
||||
os.chdir(runPath)
|
||||
|
||||
process_run(tree, runPath)
|
||||
|
||||
|
||||
if len(sys.argv) != 2:
|
||||
eprint("usage: python {} <resultTree>".format(argv[0]))
|
||||
exit(1)
|
||||
|
||||
process_result_tree(sys.argv[1])
|
@ -1,73 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
, n # must be a string
|
||||
, dram # must be a string
|
||||
, strace
|
||||
}:
|
||||
|
||||
with lib;
|
||||
|
||||
# Ensure the arguments are strings, to avoid problems with large numbers
|
||||
assert (isString n);
|
||||
assert (isString dram);
|
||||
|
||||
let
|
||||
# Initial variable configuration
|
||||
varConf = with bsc; { };
|
||||
|
||||
inherit (targetMachine) fs;
|
||||
|
||||
# Generate the complete configuration for each unit
|
||||
genConf = with bsc; c: targetMachine.config // rec {
|
||||
expName = "genseq";
|
||||
unitName = "${expName}.n${n}.dram${dram}";
|
||||
inherit (targetMachine.config) hw;
|
||||
inherit n dram;
|
||||
|
||||
# Don't repeat
|
||||
loops = 1;
|
||||
|
||||
# Resources
|
||||
qos = "debug";
|
||||
ntasksPerNode = 1;
|
||||
nodes = 1;
|
||||
time = "01:00:00";
|
||||
cpusPerTask = hw.cpusPerNode;
|
||||
jobName = unitName;
|
||||
};
|
||||
|
||||
# Compute the array of configurations
|
||||
configs = stdexp.buildConfigs {
|
||||
inherit varConf genConf;
|
||||
};
|
||||
|
||||
exec = {nextStage, conf, ...}: with conf;
|
||||
let
|
||||
#FIXME: We need a better mechanism to get the output paths
|
||||
outDir = "${fs.shared.fast}/out/$GARLIC_USER/$GARLIC_UNIT/$GARLIC_RUN";
|
||||
outFile = "${outDir}/seq.dat";
|
||||
in
|
||||
stages.exec {
|
||||
inherit nextStage;
|
||||
pre = ''
|
||||
mkdir -p "${outDir}"
|
||||
'';
|
||||
argv = [ n dram outFile ];
|
||||
post = ''
|
||||
# Link the output here
|
||||
ln -s "${outFile}" seq.dat
|
||||
'';
|
||||
};
|
||||
|
||||
program = {...}: bsc.apps.bigsort.genseq;
|
||||
|
||||
pipeline = stdexp.stdPipeline ++ [ exec program ];
|
||||
|
||||
in
|
||||
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,102 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
, inputTre
|
||||
, n
|
||||
, dram
|
||||
, garlicTools
|
||||
, resultFromTrebuchet
|
||||
}:
|
||||
|
||||
with lib;
|
||||
with garlicTools;
|
||||
|
||||
let
|
||||
# Initial variable configuration
|
||||
varConf = with bsc; { };
|
||||
|
||||
inherit (targetMachine) fs;
|
||||
|
||||
# Generate the complete configuration for each unit
|
||||
genConf = with bsc; c: targetMachine.config // rec {
|
||||
expName = "shuffle";
|
||||
unitName = "${expName}.n${n}.dram${dram}";
|
||||
inherit (targetMachine.config) hw;
|
||||
inherit n dram;
|
||||
|
||||
# Don't repeat
|
||||
loops = 1;
|
||||
|
||||
# Resources
|
||||
qos = "debug";
|
||||
ntasksPerNode = 1;
|
||||
nodes = 1;
|
||||
time = "01:00:00";
|
||||
cpusPerTask = hw.cpusPerNode;
|
||||
jobName = unitName;
|
||||
|
||||
# We need access to a fast shared filesystem to store the shuffled input
|
||||
# dataset
|
||||
extraMounts = [ fs.shared.fast ];
|
||||
};
|
||||
|
||||
# Compute the array of configurations
|
||||
configs = stdexp.buildConfigs {
|
||||
inherit varConf genConf;
|
||||
};
|
||||
|
||||
exec = {nextStage, conf, ...}: with conf;
|
||||
let
|
||||
inputExp = inputTre.experiment;
|
||||
inputUnit = elemAt inputExp.units 0;
|
||||
unitName = baseNameOf (toString inputUnit);
|
||||
|
||||
# We also need the result. This is only used to ensure that we have the
|
||||
# results, so it has been executed.
|
||||
inputRes = resultFromTrebuchet inputTre;
|
||||
|
||||
#FIXME: We need a better mechanism to get the output paths
|
||||
inFile = "${fs.shared.fast}/out/$GARLIC_USER/${unitName}/1/seq.dat";
|
||||
outDir = "${fs.shared.fast}/out/$GARLIC_USER/$GARLIC_UNIT/$GARLIC_RUN";
|
||||
outFile = "${outDir}/shuffled.dat";
|
||||
|
||||
in
|
||||
stages.exec {
|
||||
inherit nextStage;
|
||||
pre = ''
|
||||
# This line ensures that the previous results are complete:
|
||||
# ${inputRes}
|
||||
|
||||
# Exit on error
|
||||
set -e
|
||||
|
||||
# Ensure the input file exists
|
||||
if [ ! -f "${inFile}" ]; then
|
||||
echo "input file not found: ${inFile}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mkdir -p "${outDir}"
|
||||
|
||||
# Copy the input as we are going to overwrite it
|
||||
cp "${inFile}" "${outFile}"
|
||||
'';
|
||||
argv = [ n dram outFile 16 64 ];
|
||||
post = ''
|
||||
# Link the output here
|
||||
ln -s "${outFile}" shuffled.dat
|
||||
'';
|
||||
};
|
||||
|
||||
program = {...}:
|
||||
bsc.apps.bigsort.shuffle;
|
||||
|
||||
pipeline = stdexp.stdPipeline ++ [ exec program ];
|
||||
|
||||
in
|
||||
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,126 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
, removeOutput ? true
|
||||
, resultFromTrebuchet
|
||||
, inputTre
|
||||
}:
|
||||
|
||||
with lib;
|
||||
|
||||
let
|
||||
varConf = { }; # Not used
|
||||
|
||||
inherit (targetMachine) fs;
|
||||
|
||||
# Generate the complete configuration for each unit
|
||||
genConf = with bsc; c: targetMachine.config // rec {
|
||||
expName = "bigsort";
|
||||
unitName = "${expName}.bs${toString bs}";
|
||||
inherit (targetMachine.config) hw;
|
||||
|
||||
# bigsort options
|
||||
n = 1024 * 1024 * 1024 / 8; # In longs (?)
|
||||
bs = n; # In bytes
|
||||
pageSize = bs / 2; # In bytes (?)
|
||||
cc = bsc.icc;
|
||||
mpi = bsc.impi;
|
||||
gitBranch = "garlic/mpi+send+omp+task";
|
||||
|
||||
# Repeat the execution of each unit 30 times
|
||||
loops = 1;
|
||||
|
||||
# Resources
|
||||
qos = "debug";
|
||||
ntasksPerNode = 1;
|
||||
nodes = 1;
|
||||
time = "01:00:00";
|
||||
# All CPUs of the socket to each task
|
||||
cpusPerTask = hw.cpusPerSocket;
|
||||
jobName = "bigsort-${toString n}-${toString bs}-${gitBranch}";
|
||||
|
||||
# Load the dataset from the same fs where it was stored in the shuffle
|
||||
# step. Also we use a local temp fs to store intermediate results.
|
||||
extraMounts = [ fs.shared.fast fs.local.temp ];
|
||||
|
||||
rev = 1;
|
||||
};
|
||||
|
||||
# Compute the array of configurations
|
||||
configs = stdexp.buildConfigs {
|
||||
inherit varConf genConf;
|
||||
};
|
||||
|
||||
exec = {nextStage, conf, ...}: with conf;
|
||||
let
|
||||
inputExp = inputTre.experiment;
|
||||
unit = elemAt inputExp.units 0;
|
||||
expName = baseNameOf (toString inputExp);
|
||||
unitName = baseNameOf (toString unit);
|
||||
|
||||
# We also need the result. This is only used to ensure that we have the
|
||||
# results, so it has been executed.
|
||||
inputRes = resultFromTrebuchet inputTre;
|
||||
|
||||
#FIXME: We need a better mechanism to get the output paths
|
||||
inFile = "${fs.shared.fast}/out/$GARLIC_USER/${unitName}/1/shuffled.dat";
|
||||
outDir = "${fs.shared.fast}/out/$GARLIC_USER/$GARLIC_UNIT/$GARLIC_RUN";
|
||||
outFile = "${outDir}/sorted.dat";
|
||||
tmpDir = fs.local.temp;
|
||||
in
|
||||
stages.exec {
|
||||
inherit nextStage;
|
||||
pre = ''
|
||||
# This line ensures that the shuffled results are complete: nix needs to
|
||||
# compute the hash of the execution log to write the path here.
|
||||
# ${inputRes}
|
||||
|
||||
# Exit on error
|
||||
set -e
|
||||
|
||||
# Ensure the input file exists
|
||||
if [ ! -f "${inFile}" ]; then
|
||||
echo "input file not found: ${inFile}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Create the output path
|
||||
mkdir -p ${outDir}
|
||||
|
||||
# Verbose args:
|
||||
echo "INPUT = ${inFile}"
|
||||
echo "OUTPUT = ${outFile}"
|
||||
echo "TMPDIR = ${tmpDir}"
|
||||
'';
|
||||
|
||||
argv = [ n bs inFile outFile tmpDir pageSize ];
|
||||
|
||||
# Optionally remove the potentially large output dataset
|
||||
post = ''
|
||||
# Link the output here
|
||||
ln -s "${outFile}" sorted.dat
|
||||
'' + optionalString (removeOutput) ''
|
||||
# Remove the sorted output
|
||||
stat "${outFile}" > "${outFile}.stat"
|
||||
echo "file removed to save space" > "${outFile}"
|
||||
'';
|
||||
};
|
||||
|
||||
program = {nextStage, conf, ...}: with conf;
|
||||
let
|
||||
customPkgs = stdexp.replaceMpi conf.mpi;
|
||||
in
|
||||
customPkgs.apps.bigsort.sort.override {
|
||||
inherit cc mpi gitBranch;
|
||||
};
|
||||
|
||||
pipeline = stdexp.stdPipeline ++ [ exec program ];
|
||||
|
||||
in
|
||||
|
||||
#{ inherit configs pipeline; }
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,164 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, pkgs
|
||||
, targetMachine
|
||||
, stages
|
||||
, garlicTools
|
||||
, writeText
|
||||
, enableHWC ? false
|
||||
}:
|
||||
|
||||
with lib;
|
||||
with garlicTools;
|
||||
|
||||
let
|
||||
# Initial variable configuration
|
||||
varConf = {
|
||||
nodes = [ 2 ];
|
||||
};
|
||||
|
||||
machineConfig = targetMachine.config;
|
||||
|
||||
genConf = c: targetMachine.config // rec {
|
||||
expName = "cn6-nbody";
|
||||
unitName = expName + "-nodes${toString nodes}";
|
||||
|
||||
inherit (machineConfig) hw;
|
||||
|
||||
# Parameters for nbody
|
||||
particles = 4 * 512 * hw.cpusPerSocket;
|
||||
timesteps = 2;
|
||||
blocksize = 512;
|
||||
gitBranch = "garlic/tampi+isend+oss+task";
|
||||
|
||||
loops = 1;
|
||||
|
||||
# Resources
|
||||
cpusPerTask = hw.cpusPerSocket;
|
||||
ntasksPerNode = hw.socketsPerNode;
|
||||
nodes = c.nodes;
|
||||
|
||||
qos = "debug";
|
||||
time = "02:00:00";
|
||||
|
||||
jobName = unitName;
|
||||
};
|
||||
|
||||
configs = stdexp.buildConfigs {
|
||||
inherit varConf genConf;
|
||||
};
|
||||
|
||||
# Custom BSC packages
|
||||
bsc' = bsc.extend (self: super: {
|
||||
|
||||
# For nanos6 we use my fork for distributed instrumentation at the
|
||||
# latest commit
|
||||
nanos6 = (super.nanos6Git.override {
|
||||
enableJemalloc = true;
|
||||
}).overrideAttrs (old: rec {
|
||||
|
||||
src = builtins.fetchGit {
|
||||
url = "git@bscpm03.bsc.es:nanos6/forks/nanos6-fixes.git";
|
||||
ref = "distributed-instrumentation-fixes";
|
||||
rev = "80058512527961fbde9bd81474b0a29141d7982c";
|
||||
};
|
||||
|
||||
dontStrip = false;
|
||||
version = src.shortRev;
|
||||
|
||||
# Disable all unused instrumentations for faster builds
|
||||
configureFlags = old.configureFlags ++ [
|
||||
"--disable-extrae-instrumentation"
|
||||
"--disable-lint-instrumentation"
|
||||
"--disable-graph-instrumentation"
|
||||
"--disable-stats-instrumentation"
|
||||
"--with-babeltrace2=${super.babeltrace2}"
|
||||
];
|
||||
});
|
||||
|
||||
# Use clang from master
|
||||
clangOmpss2Unwrapped = super.clangOmpss2Unwrapped.overrideAttrs (old: rec {
|
||||
version = src.shortRev;
|
||||
src = builtins.fetchGit {
|
||||
url = "ssh://git@bscpm03.bsc.es/llvm-ompss/llvm-mono.git";
|
||||
ref = "master";
|
||||
rev = "ce47d99d2b2b968c87187cc7818cc5040b082d6c";
|
||||
};
|
||||
});
|
||||
|
||||
# Use mcxx from master
|
||||
mcxx = super.mcxxGit;
|
||||
|
||||
# We also need the instrumented version of TAMPI
|
||||
tampi = super.tampiGit.overrideAttrs (old: rec {
|
||||
version = src.shortRev;
|
||||
#dontStrip = true;
|
||||
#NIX_CFLAGS = "-O0 -g";
|
||||
src = builtins.fetchGit {
|
||||
url = "ssh://git@bscpm03.bsc.es/interoperability/tampi.git";
|
||||
ref = "master";
|
||||
rev = "f1e77e6f439a0e964e98b5e0a4738b2e95e4fd3d";
|
||||
};
|
||||
});
|
||||
});
|
||||
|
||||
ctf = {nextStage, conf, ...}: let
|
||||
# Create the nanos6 configuration file
|
||||
nanos6ConfigFile = writeText "nanos6.toml" ''
|
||||
version.instrument = "ctf"
|
||||
turbo.enabled = false
|
||||
instrument.ctf.converter.enabled = true
|
||||
instrument.ctf.converter.fast = false
|
||||
'';
|
||||
|
||||
in stages.exec {
|
||||
inherit nextStage;
|
||||
|
||||
# And use it
|
||||
env = ''
|
||||
export NANOS6_CONFIG=${nanos6ConfigFile}
|
||||
|
||||
# Add nanos6 and babeltrace2 binaries to the PATH
|
||||
export PATH="$PATH:${bsc'.nanos6}/bin:${bsc'.babeltrace2}/bin"
|
||||
|
||||
# Also add the babeltrace2 python module to python search path
|
||||
export PYTHONPATH="$PYTHONPATH:${bsc'.babeltrace2}/lib/python3.8/site-packages"
|
||||
'';
|
||||
|
||||
post = ''
|
||||
rank=$SLURM_PROCID
|
||||
tracedir=trace_nbody
|
||||
|
||||
# Merge on rank 0 only
|
||||
if [ $rank != 0 ]; then
|
||||
exit 0;
|
||||
fi
|
||||
|
||||
# Wait a bit for all ranks to finish the conversion
|
||||
sleep 5
|
||||
|
||||
# Run the merger
|
||||
nanos6-mergeprv "$tracedir"
|
||||
'';
|
||||
};
|
||||
|
||||
exec = {nextStage, conf, ...}: stages.exec {
|
||||
inherit nextStage;
|
||||
argv = with conf; [
|
||||
"-t" timesteps
|
||||
"-p" particles
|
||||
];
|
||||
};
|
||||
|
||||
program = {nextStage, conf, ...}: bsc'.garlic.apps.nbody.override {
|
||||
inherit (conf) blocksize gitBranch;
|
||||
};
|
||||
|
||||
pipeline = stdexp.stdPipeline ++ [ ctf exec program ];
|
||||
|
||||
in
|
||||
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,171 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, pkgs
|
||||
, targetMachine
|
||||
, stages
|
||||
, garlicTools
|
||||
, writeText
|
||||
, enableHWC ? false
|
||||
}:
|
||||
|
||||
with lib;
|
||||
with garlicTools;
|
||||
|
||||
let
|
||||
# Initial variable configuration
|
||||
varConf = {
|
||||
nodes = [ 1 2 4 8 ];
|
||||
};
|
||||
|
||||
machineConfig = targetMachine.config;
|
||||
|
||||
genConf = c: targetMachine.config // rec {
|
||||
expName = "timediff";
|
||||
unitName = expName + "-nodes${toString nodes}";
|
||||
|
||||
inherit (machineConfig) hw;
|
||||
|
||||
loops = 1;
|
||||
|
||||
# Resources
|
||||
cpusPerTask = hw.cpusPerSocket;
|
||||
ntasksPerNode = hw.socketsPerNode;
|
||||
nodes = c.nodes;
|
||||
|
||||
qos = "debug";
|
||||
time = "02:00:00";
|
||||
|
||||
jobName = unitName;
|
||||
};
|
||||
|
||||
configs = stdexp.buildConfigs {
|
||||
inherit varConf genConf;
|
||||
};
|
||||
|
||||
# Custom BSC packages
|
||||
bsc' = bsc.extend (self: super: {
|
||||
|
||||
# For nanos6 we use my fork for distributed instrumentation at the
|
||||
# latest commit
|
||||
nanos6 = (super.nanos6Git.override {
|
||||
enableJemalloc = true;
|
||||
}).overrideAttrs (old: rec {
|
||||
|
||||
src = builtins.fetchGit {
|
||||
url = "git@bscpm03.bsc.es:rarias/nanos6.git";
|
||||
ref = "rodrigo";
|
||||
rev = "5cbeabb4e0446c2c293cc3005f76e6139465caee";
|
||||
};
|
||||
|
||||
dontStrip = false;
|
||||
version = src.shortRev;
|
||||
|
||||
# Disable all unused instrumentations for faster builds
|
||||
configureFlags = old.configureFlags ++ [
|
||||
"--disable-extrae-instrumentation"
|
||||
"--disable-lint-instrumentation"
|
||||
"--disable-graph-instrumentation"
|
||||
"--disable-stats-instrumentation"
|
||||
];
|
||||
});
|
||||
|
||||
# Use clang from master
|
||||
clangOmpss2Unwrapped = super.clangOmpss2Unwrapped.overrideAttrs (old: rec {
|
||||
version = src.shortRev;
|
||||
src = builtins.fetchGit {
|
||||
url = "ssh://git@bscpm03.bsc.es/llvm-ompss/llvm-mono.git";
|
||||
ref = "master";
|
||||
rev = "ce47d99d2b2b968c87187cc7818cc5040b082d6c";
|
||||
};
|
||||
});
|
||||
|
||||
# Use mcxx from master
|
||||
mcxx = super.mcxxGit;
|
||||
|
||||
# We also need the instrumented version of TAMPI
|
||||
tampi = super.tampiGit.overrideAttrs (old: rec {
|
||||
version = src.shortRev;
|
||||
dontStrip = true;
|
||||
NIX_CFLAGS = "-O0 -g";
|
||||
src = builtins.fetchGit {
|
||||
url = "ssh://git@bscpm03.bsc.es/rarias/tampi.git";
|
||||
ref = "instrument";
|
||||
rev = "6e4294299bf761a1cc31f4181d9479cefa1c7f3e";
|
||||
};
|
||||
});
|
||||
|
||||
# We use the latest commit in master as src for cn6
|
||||
cn6Git = ((super.cn6.overrideAttrs (old: rec {
|
||||
version = src.shortRev;
|
||||
src = builtins.fetchGit {
|
||||
url = "ssh://git@bscpm03.bsc.es/rarias/cn6.git";
|
||||
ref = "master";
|
||||
rev = "1d23d01d60164b8641746d5a204128a9d31b9650";
|
||||
};
|
||||
})).override { enableTest = true; });
|
||||
|
||||
cn6 = self.cn6Git;
|
||||
});
|
||||
|
||||
|
||||
ctf = {nextStage, conf, ...}: let
|
||||
# Create the nanos6 configuration file
|
||||
nanos6ConfigFile = writeText "nanos6.toml" ''
|
||||
version.instrument = "ctf"
|
||||
turbo.enabled = false
|
||||
instrument.ctf.converter.enabled = false
|
||||
'' + optionalString (enableHWC) ''
|
||||
hardware_counters.papi.enabled = true
|
||||
hardware_counters.papi.counters = [
|
||||
"PAPI_TOT_INS", "PAPI_TOT_CYC",
|
||||
"PAPI_L1_TCM", "PAPI_L2_TCM", "PAPI_L3_TCM"
|
||||
]
|
||||
'';
|
||||
|
||||
in stages.exec {
|
||||
inherit nextStage;
|
||||
|
||||
# And use it
|
||||
env = ''
|
||||
export NANOS6_CONFIG=${nanos6ConfigFile}
|
||||
'';
|
||||
|
||||
post = ''
|
||||
rank=$SLURM_PROCID
|
||||
tracedir=trace_timediff_mpi
|
||||
|
||||
# Convert CTF trace to PRV
|
||||
${bsc'.cn6}/bin/cn6 $tracedir/$rank
|
||||
|
||||
# Merge on rank 0 only
|
||||
if [ $rank != 0 ]; then
|
||||
exit 0;
|
||||
fi
|
||||
|
||||
# Wait a bit for all ranks to finish the conversion
|
||||
sleep 5
|
||||
|
||||
# Run the merger
|
||||
${bsc'.cn6}/bin/merge-prv $tracedir
|
||||
|
||||
# We need some tools the path
|
||||
export PATH="$PATH:${bsc'.babeltrace2}/bin:${pkgs.ministat}/bin"
|
||||
|
||||
${bsc'.cn6}/bin/sync-err.sh $tracedir
|
||||
'';
|
||||
};
|
||||
|
||||
exec = {nextStage, conf, ...}: stages.exec {
|
||||
inherit nextStage;
|
||||
program = "${bsc'.cn6}/bin/timediff_mpi";
|
||||
argv = [ conf.cpusPerTask ];
|
||||
};
|
||||
|
||||
pipeline = stdexp.stdPipeline ++ [ ctf exec ];
|
||||
|
||||
in
|
||||
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,126 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
, garlicTools
|
||||
, enableExtended ? false
|
||||
}:
|
||||
|
||||
with lib;
|
||||
with garlicTools;
|
||||
|
||||
let
|
||||
# Initial variable configuration
|
||||
varConf = {
|
||||
granul = range2 4 128;
|
||||
|
||||
gitBranch = [
|
||||
"garlic/tampi+isend+oss+task"
|
||||
"garlic/mpi+isend+omp+task"
|
||||
] ++ optionals (enableExtended) [
|
||||
#"garlic/mpi+send+omp+fork" # Don't use fork for granularity
|
||||
"garlic/mpi+send+seq"
|
||||
"garlic/mpi+send+omp+task"
|
||||
"garlic/mpi+send+oss+task"
|
||||
"garlic/mpi+isend+oss+task"
|
||||
];
|
||||
|
||||
# Max. number of iterations
|
||||
iterations = [ 20 ] ++ optionals (enableExtended) [ 10 ];
|
||||
|
||||
nodes = [ 1 ] ++ optionals (enableExtended) (range2 2 16);
|
||||
};
|
||||
|
||||
# We use these auxiliary functions to assign different configurations
|
||||
# depending on the git branch.
|
||||
getGranul = branch: oldGranul:
|
||||
if (branch == "garlic/mpi+send+seq")
|
||||
then 999999 else oldGranul;
|
||||
|
||||
getCpusPerTask = branch: hw:
|
||||
if (branch == "garlic/mpi+send+seq")
|
||||
then 1 else hw.cpusPerSocket;
|
||||
|
||||
getNtasksPerNode = branch: hw:
|
||||
if (branch == "garlic/mpi+send+seq")
|
||||
then hw.cpusPerNode else hw.socketsPerNode;
|
||||
|
||||
# Generate the complete configuration for each unit
|
||||
genConf = c: targetMachine.config // rec {
|
||||
|
||||
expName = "creams-gran";
|
||||
unitName = "${expName}"+
|
||||
"-nodes${toString nodes}"+
|
||||
"-granul${toString granul}"+
|
||||
"-${gitBranch}";
|
||||
|
||||
inherit (targetMachine.config) hw;
|
||||
|
||||
# Options for creams
|
||||
inherit (c) gitBranch nodes iterations;
|
||||
granul = getGranul gitBranch c.granul;
|
||||
nprocz = ntasksPerNode * nodes;
|
||||
|
||||
# Repeat the execution of each unit 10 times
|
||||
loops = 10;
|
||||
|
||||
# Resources
|
||||
qos = "debug";
|
||||
time = "02:00:00";
|
||||
ntasksPerNode = getNtasksPerNode gitBranch hw;
|
||||
cpusPerTask = getCpusPerTask gitBranch hw;
|
||||
jobName = unitName;
|
||||
};
|
||||
|
||||
# Compute the array of configurations
|
||||
configs = unique (stdexp.buildConfigs {
|
||||
inherit varConf genConf;
|
||||
});
|
||||
|
||||
# Custom srun stage to copy the creams input dataset
|
||||
customSrun = {nextStage, conf, ...}:
|
||||
let
|
||||
input = bsc.garlic.apps.creamsInput.override {
|
||||
inherit (conf) gitBranch granul nprocz;
|
||||
};
|
||||
in
|
||||
stdexp.stdStages.srun {
|
||||
inherit nextStage conf;
|
||||
# Now we add some commands to execute before calling srun. These will
|
||||
# only run in one rank (the first in the list of allocated nodes)
|
||||
preSrun = ''
|
||||
cp -r ${input}/SodTubeBenchmark/* .
|
||||
chmod +w -R .
|
||||
sed -i '/maximum number of iterations/s/50/${toString conf.iterations}/' input.dat
|
||||
rm -f nanos6.toml
|
||||
'';
|
||||
};
|
||||
|
||||
exec = {nextStage, conf, ...}: stages.exec {
|
||||
inherit nextStage;
|
||||
env = ''
|
||||
export NANOS6_CONFIG_OVERRIDE="version.dependencies=regions"
|
||||
'';
|
||||
|
||||
# Remove restarts as is not needed and is huge
|
||||
post = ''
|
||||
rm -rf restarts || true
|
||||
'';
|
||||
};
|
||||
|
||||
# Creams program
|
||||
creams = {nextStage, conf, ...}: bsc.apps.creams.override {
|
||||
inherit (conf) gitBranch;
|
||||
};
|
||||
|
||||
pipeline = stdexp.stdPipelineOverride {
|
||||
# Replace the stdandard srun stage with our own
|
||||
overrides = { srun = customSrun; };
|
||||
} ++ [ exec creams ];
|
||||
|
||||
in
|
||||
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,132 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
, garlicTools
|
||||
, enableExtended ? false
|
||||
}:
|
||||
|
||||
with lib;
|
||||
with garlicTools;
|
||||
|
||||
let
|
||||
# Initial variable configuration
|
||||
varConf = {
|
||||
|
||||
#nodes = range2 1 16;
|
||||
nodes = [ 16 ];
|
||||
sizeFactor = [ 1 2 4 8 16 32 ];
|
||||
granul = [ 1 2 4 8 16 ];
|
||||
|
||||
# Max. number of iterations
|
||||
iterations = [ 20 ] ++ optionals (enableExtended) [ 10 ];
|
||||
|
||||
gitBranch = [
|
||||
"garlic/tampi+isend+oss+task"
|
||||
#"garlic/mpi+send+omp+fork"
|
||||
#"garlic/mpi+send+omp+task"
|
||||
#"garlic/mpi+send+seq"
|
||||
] ++ optionals (enableExtended) [
|
||||
"garlic/mpi+send+oss+task"
|
||||
"garlic/mpi+isend+omp+task"
|
||||
"garlic/mpi+isend+oss+task"
|
||||
];
|
||||
};
|
||||
|
||||
# We use these auxiliary functions to assign different configurations
|
||||
# depending on the git branch.
|
||||
getGranul = branch: oldGranul:
|
||||
if (branch == "garlic/mpi+send+seq")
|
||||
then 999999 else oldGranul;
|
||||
|
||||
getCpusPerTask = branch: hw:
|
||||
if (branch == "garlic/mpi+send+seq")
|
||||
then 1 else hw.cpusPerSocket;
|
||||
|
||||
getNtasksPerNode = branch: hw:
|
||||
if (branch == "garlic/mpi+send+seq")
|
||||
then hw.cpusPerNode else hw.socketsPerNode;
|
||||
|
||||
# Generate the complete configuration for each unit
|
||||
genConf = c: targetMachine.config // rec {
|
||||
|
||||
expName = "creams-granularity16";
|
||||
unitName = "${expName}"
|
||||
+ "-granul.${toString granul}"
|
||||
+ "-sf.${toString sizeFactor}";
|
||||
|
||||
inherit (targetMachine.config) hw;
|
||||
|
||||
# Options for creams
|
||||
inherit (c) iterations gitBranch nodes sizeFactor;
|
||||
granul = getGranul gitBranch c.granul;
|
||||
nprocz = ntasksPerNode * nodes;
|
||||
baseSizePerCpu = 2;
|
||||
baseSize = baseSizePerCpu * cpusPerTask * ntasksPerNode * nodes;
|
||||
|
||||
nz = baseSize * sizeFactor;
|
||||
|
||||
# Repeat the execution of each unit 10 times
|
||||
loops = 10;
|
||||
|
||||
# Resources
|
||||
qos = "debug";
|
||||
time = "02:00:00";
|
||||
ntasksPerNode = getNtasksPerNode gitBranch hw;
|
||||
cpusPerTask = getCpusPerTask gitBranch hw;
|
||||
jobName = unitName;
|
||||
};
|
||||
|
||||
# Compute the array of configurations
|
||||
configs = unique (
|
||||
filter (c: !(c.granul == 1 && c.sizeFactor >= 32))(stdexp.buildConfigs {
|
||||
inherit varConf genConf;
|
||||
}));
|
||||
|
||||
# Custom srun stage to copy the creams input dataset
|
||||
customSrun = {nextStage, conf, ...}:
|
||||
let
|
||||
input = bsc.garlic.apps.creamsInput.override {
|
||||
inherit (conf) gitBranch granul nprocz nz;
|
||||
};
|
||||
in
|
||||
stdexp.stdStages.srun {
|
||||
inherit nextStage conf;
|
||||
# Now we add some commands to execute before calling srun. These will
|
||||
# only run in one rank (the first in the list of allocated nodes)
|
||||
preSrun = ''
|
||||
cp -r ${input}/SodTubeBenchmark/* .
|
||||
chmod +w -R .
|
||||
sed -i '/maximum number of iterations/s/50/${toString conf.iterations}/' input.dat
|
||||
rm -f nanos6.toml
|
||||
'';
|
||||
};
|
||||
|
||||
exec = {nextStage, conf, ...}: stages.exec {
|
||||
inherit nextStage;
|
||||
env = ''
|
||||
export NANOS6_CONFIG_OVERRIDE="version.dependencies=regions"
|
||||
'';
|
||||
|
||||
# Remove restarts as is not needed and is huge
|
||||
post = ''
|
||||
rm -rf restarts || true
|
||||
'';
|
||||
};
|
||||
|
||||
# Creams program
|
||||
creams = {nextStage, conf, ...}: bsc.apps.creams.override {
|
||||
inherit (conf) gitBranch;
|
||||
};
|
||||
|
||||
pipeline = stdexp.stdPipelineOverride {
|
||||
# Replace the stdandard srun stage with our own
|
||||
overrides = { srun = customSrun; };
|
||||
} ++ [ exec creams ];
|
||||
|
||||
in
|
||||
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,131 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
, garlicTools
|
||||
, enableExtended ? false
|
||||
}:
|
||||
|
||||
with lib;
|
||||
with garlicTools;
|
||||
|
||||
let
|
||||
# Initial variable configuration
|
||||
varConf = {
|
||||
|
||||
#nodes = range2 1 16;
|
||||
nodes = [ 16 ];
|
||||
sizeFactor = range2 1 32;
|
||||
baseGranul = [ 1 ] ++ optionals (enableExtended) [ 2 4 8 ];
|
||||
|
||||
# Max. number of iterations
|
||||
iterations = [ 20 ] ++ optionals (enableExtended) [ 10 ];
|
||||
|
||||
gitBranch = [
|
||||
"garlic/tampi+isend+oss+task"
|
||||
"garlic/mpi+send+omp+fork"
|
||||
#"garlic/mpi+send+omp+task"
|
||||
#"garlic/mpi+send+seq"
|
||||
] ++ (optionals (enableExtended) [
|
||||
"garlic/mpi+send+oss+task"
|
||||
"garlic/mpi+isend+omp+task"
|
||||
"garlic/mpi+isend+oss+task"
|
||||
]);
|
||||
};
|
||||
|
||||
# We use these auxiliary functions to assign different configurations
|
||||
# depending on the git branch.
|
||||
getGranul = branch: oldGranul:
|
||||
if (branch == "garlic/mpi+send+seq")
|
||||
then 999999 else oldGranul;
|
||||
|
||||
getCpusPerTask = branch: hw:
|
||||
if (branch == "garlic/mpi+send+seq")
|
||||
then 1 else hw.cpusPerSocket;
|
||||
|
||||
getNtasksPerNode = branch: hw:
|
||||
if (branch == "garlic/mpi+send+seq")
|
||||
then hw.cpusPerNode else hw.socketsPerNode;
|
||||
|
||||
# Generate the complete configuration for each unit
|
||||
genConf = c: targetMachine.config // rec {
|
||||
|
||||
expName = "creams-size";
|
||||
unitName = "${expName}"
|
||||
+ "-granul.${toString granul}"
|
||||
+ "-sf.${toString sizeFactor}";
|
||||
|
||||
inherit (targetMachine.config) hw;
|
||||
|
||||
# Options for creams
|
||||
inherit (c) iterations gitBranch nodes sizeFactor baseGranul;
|
||||
granul = getGranul gitBranch (max 2 (baseGranul * sizeFactor));
|
||||
nprocz = ntasksPerNode * nodes;
|
||||
baseSizePerCpu = 2;
|
||||
baseSize = baseSizePerCpu * cpusPerTask * ntasksPerNode * nodes;
|
||||
|
||||
nz = baseSize * sizeFactor;
|
||||
|
||||
# Repeat the execution of each unit 10 times
|
||||
loops = 10;
|
||||
|
||||
# Resources
|
||||
qos = "debug";
|
||||
time = "02:00:00";
|
||||
ntasksPerNode = getNtasksPerNode gitBranch hw;
|
||||
cpusPerTask = getCpusPerTask gitBranch hw;
|
||||
jobName = unitName;
|
||||
};
|
||||
|
||||
# Compute the array of configurations
|
||||
configs = unique (stdexp.buildConfigs {
|
||||
inherit varConf genConf;
|
||||
});
|
||||
|
||||
# Custom srun stage to copy the creams input dataset
|
||||
customSrun = {nextStage, conf, ...}:
|
||||
let
|
||||
input = bsc.garlic.apps.creamsInput.override {
|
||||
inherit (conf) gitBranch granul nprocz nz;
|
||||
};
|
||||
in
|
||||
stdexp.stdStages.srun {
|
||||
inherit nextStage conf;
|
||||
# Now we add some commands to execute before calling srun. These will
|
||||
# only run in one rank (the first in the list of allocated nodes)
|
||||
preSrun = ''
|
||||
cp -r ${input}/SodTubeBenchmark/* .
|
||||
chmod +w -R .
|
||||
sed -i '/maximum number of iterations/s/50/${toString conf.iterations}/' input.dat
|
||||
rm -f nanos6.toml
|
||||
'';
|
||||
};
|
||||
|
||||
exec = {nextStage, conf, ...}: stages.exec {
|
||||
inherit nextStage;
|
||||
env = ''
|
||||
export NANOS6_CONFIG_OVERRIDE="version.dependencies=regions"
|
||||
'';
|
||||
|
||||
# Remove restarts as is not needed and is huge
|
||||
post = ''
|
||||
rm -rf restarts || true
|
||||
'';
|
||||
};
|
||||
|
||||
# Creams program
|
||||
creams = {nextStage, conf, ...}: bsc.apps.creams.override {
|
||||
inherit (conf) gitBranch;
|
||||
};
|
||||
|
||||
pipeline = stdexp.stdPipelineOverride {
|
||||
# Replace the stdandard srun stage with our own
|
||||
overrides = { srun = customSrun; };
|
||||
} ++ [ exec creams ];
|
||||
|
||||
in
|
||||
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,126 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
, garlicTools
|
||||
, enableExtended ? false
|
||||
}:
|
||||
|
||||
with lib;
|
||||
with garlicTools;
|
||||
|
||||
let
|
||||
# Initial variable configuration
|
||||
varConf = {
|
||||
|
||||
nodes = range2 1 16;
|
||||
granul = [ 16 ] ++ optionals (enableExtended) [ 8 32 ];
|
||||
|
||||
# Max. number of iterations
|
||||
iterations = [ 20 ] ++ optionals (enableExtended) [ 10 ];
|
||||
|
||||
gitBranch = [
|
||||
"garlic/tampi+isend+oss+task"
|
||||
"garlic/mpi+send+omp+task"
|
||||
"garlic/mpi+send+seq"
|
||||
] ++ optionals (enableExtended) [
|
||||
"garlic/mpi+send+omp+fork"
|
||||
"garlic/mpi+send+oss+task"
|
||||
"garlic/mpi+isend+omp+task"
|
||||
"garlic/mpi+isend+oss+task"
|
||||
];
|
||||
};
|
||||
|
||||
# We use these auxiliary functions to assign different configurations
|
||||
# depending on the git branch.
|
||||
getGranul = branch: oldGranul:
|
||||
if (branch == "garlic/mpi+send+seq")
|
||||
then 999999 else oldGranul;
|
||||
|
||||
getCpusPerTask = branch: hw:
|
||||
if (branch == "garlic/mpi+send+seq")
|
||||
then 1 else hw.cpusPerSocket;
|
||||
|
||||
getNtasksPerNode = branch: hw:
|
||||
if (branch == "garlic/mpi+send+seq")
|
||||
then hw.cpusPerNode else hw.socketsPerNode;
|
||||
|
||||
# Generate the complete configuration for each unit
|
||||
genConf = c: targetMachine.config // rec {
|
||||
|
||||
expName = "creams-ss";
|
||||
unitName = "${expName}"+
|
||||
"-nodes${toString nodes}"+
|
||||
"-granul${toString granul}"+
|
||||
"-${gitBranch}";
|
||||
|
||||
inherit (targetMachine.config) hw;
|
||||
|
||||
# Options for creams
|
||||
inherit (c) iterations gitBranch nodes;
|
||||
granul = getGranul gitBranch c.granul;
|
||||
nprocz = ntasksPerNode * nodes;
|
||||
|
||||
# Repeat the execution of each unit 10 times
|
||||
loops = 10;
|
||||
|
||||
# Resources
|
||||
qos = "debug";
|
||||
time = "02:00:00";
|
||||
ntasksPerNode = getNtasksPerNode gitBranch hw;
|
||||
cpusPerTask = getCpusPerTask gitBranch hw;
|
||||
jobName = unitName;
|
||||
};
|
||||
|
||||
# Compute the array of configurations
|
||||
configs = unique (stdexp.buildConfigs {
|
||||
inherit varConf genConf;
|
||||
});
|
||||
|
||||
# Custom srun stage to copy the creams input dataset
|
||||
customSrun = {nextStage, conf, ...}:
|
||||
let
|
||||
input = bsc.garlic.apps.creamsInput.override {
|
||||
inherit (conf) gitBranch granul nprocz;
|
||||
};
|
||||
in
|
||||
stdexp.stdStages.srun {
|
||||
inherit nextStage conf;
|
||||
# Now we add some commands to execute before calling srun. These will
|
||||
# only run in one rank (the first in the list of allocated nodes)
|
||||
preSrun = ''
|
||||
cp -r ${input}/SodTubeBenchmark/* .
|
||||
chmod +w -R .
|
||||
sed -i '/maximum number of iterations/s/50/${toString conf.iterations}/' input.dat
|
||||
rm -f nanos6.toml
|
||||
'';
|
||||
};
|
||||
|
||||
exec = {nextStage, conf, ...}: stages.exec {
|
||||
inherit nextStage;
|
||||
env = ''
|
||||
export NANOS6_CONFIG_OVERRIDE="version.dependencies=regions"
|
||||
'';
|
||||
|
||||
# Remove restarts as is not needed and is huge
|
||||
post = ''
|
||||
rm -rf restarts || true
|
||||
'';
|
||||
};
|
||||
|
||||
# Creams program
|
||||
creams = {nextStage, conf, ...}: bsc.apps.creams.override {
|
||||
inherit (conf) gitBranch;
|
||||
};
|
||||
|
||||
pipeline = stdexp.stdPipelineOverride {
|
||||
# Replace the stdandard srun stage with our own
|
||||
overrides = { srun = customSrun; };
|
||||
} ++ [ exec creams ];
|
||||
|
||||
in
|
||||
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,189 +0,0 @@
|
||||
# This file defines an experiment. It is designed as a function that takes
|
||||
# several parameters and returns a derivation. This derivation, when built will
|
||||
# create several scripts that can be executed and launch the experiment.
|
||||
|
||||
# These are the inputs to this function: an attribute set which must contain the
|
||||
# following keys:
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
, garlicTools
|
||||
}:
|
||||
|
||||
# We import in the scope the content of the `lib` attribute, which
|
||||
# contain useful functions like `toString`, which will be used later. This is
|
||||
# handy to avoid writting `lib.tostring`.
|
||||
|
||||
with lib;
|
||||
|
||||
# We also have some functions specific to the garlic benchmark which we import
|
||||
# as well. Take a look at the garlic/tools.nix file for more details.
|
||||
with garlicTools;
|
||||
|
||||
# The `let` keyword allows us to define some local variables which will be used
|
||||
# later. It works as the local variable concept in the C language.
|
||||
let
|
||||
|
||||
# Initial variable configuration: every attribute in this set contains lists
|
||||
# of options which will be used to compute the configuration of the units. The
|
||||
# cartesian product of all the values will be computed.
|
||||
varConf = {
|
||||
# In this case we will vary the columns and rows of the blocksize. This
|
||||
# configuration will create 3 x 2 = 6 units.
|
||||
cbs = [ 256 1024 4096 ];
|
||||
rbs = [ 512 1024 ];
|
||||
};
|
||||
|
||||
# Generate the complete configuration for each unit: genConf is a function
|
||||
# that accepts the argument `c` and returns a attribute set. The attribute set
|
||||
# is formed by joining the configuration of the machine (which includes
|
||||
# details like the number of nodes or the architecture) and the configuration
|
||||
# that we define for our units.
|
||||
#
|
||||
# Notice the use of the `rec` keyword, which allows us to access the elements
|
||||
# of the set while is being defined.
|
||||
genConf = c: targetMachine.config // rec {
|
||||
|
||||
# These attributes are user defined, and thus the user will need to handle
|
||||
# them manually. They are not read by the standard pipeline:
|
||||
|
||||
# Here we load the `hw` attribute from the machine configuration, so we can
|
||||
# access it, for example, the number of CPUs per socket as hw.cpusPerSocket.
|
||||
hw = targetMachine.config.hw;
|
||||
|
||||
# These options will be used by the heat app, be we write them here so they
|
||||
# are stored in the unit configuration.
|
||||
timesteps = 10;
|
||||
cols = 1024 * 16; # Columns
|
||||
rows = 1024 * 16; # Rows
|
||||
|
||||
# The blocksize is set to the values passed in the `c` parameter, which will
|
||||
# be set to one of all the configurations of the cartesian product. for
|
||||
# example: cbs = 256 and rbs = 512.
|
||||
# We can also write `inherit (c) cbs rbs`, as is a shorthand notation.
|
||||
cbs = c.cbs;
|
||||
rbs = c.rbs;
|
||||
|
||||
# The git branch is specified here as well, as will be used when we specify
|
||||
# the heat app
|
||||
gitBranch = "garlic/tampi+isend+oss+task";
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
# These attributes are part of the standard pipeline, and are required for
|
||||
# each experiment. They are automatically recognized by the standard
|
||||
# execution pipeline.
|
||||
|
||||
# The experiment name:
|
||||
expName = "example-granularity-heat";
|
||||
|
||||
# The experimental unit name. It will be used to create a symlink in the
|
||||
# index (at /gpfs/projects/bsc15/garlic/$USER/index/) so you can easily find
|
||||
# the unit. Notice that the symlink is overwritten each time you run a unit
|
||||
# with the same same.
|
||||
#
|
||||
# We use the toString function to convert the numeric value of cbs and rbs
|
||||
# to a string like: "example-granularity-heat.cbs-256.rbs-512"
|
||||
unitName = expName +
|
||||
".cbs-${toString cbs}" +
|
||||
".rbs-${toString rbs}";
|
||||
|
||||
# Repeat the execution of each unit a few times: this option is
|
||||
# automatically taken by the experiment, which will repeat the execution of
|
||||
# the program that many times. It is recommended to run the app at least 30
|
||||
# times, but we only used 10 here for demostration purposes (as it will be
|
||||
# faster to run)
|
||||
loops = 10;
|
||||
|
||||
# Resources: here we configure the resources in the machine. The queue to be
|
||||
# used is `debug` as is the fastest for small jobs.
|
||||
qos = "debug";
|
||||
|
||||
# Then the number of MPI processes or tasks per node:
|
||||
ntasksPerNode = 1;
|
||||
|
||||
# And the number of nodes:
|
||||
nodes = 1;
|
||||
|
||||
# We use all the CPUs available in one socket to each MPI process or task.
|
||||
# Notice that the number of CPUs per socket is not specified directly. but
|
||||
# loaded from the configuration of the machine that will be used to run our
|
||||
# experiment. The affinity mask is set accordingly.
|
||||
cpusPerTask = hw.cpusPerSocket;
|
||||
|
||||
# The time will limit the execution of the program in case of a deadlock
|
||||
time = "02:00:00";
|
||||
|
||||
# The job name will appear in the `squeue` and helps to identify what is
|
||||
# running. Currently is set to the name of the unit.
|
||||
jobName = unitName;
|
||||
};
|
||||
|
||||
# Using the `varConf` and our function `genConf` we compute a list of the
|
||||
# complete configuration of every unit.
|
||||
configs = stdexp.buildConfigs {
|
||||
inherit varConf genConf;
|
||||
};
|
||||
|
||||
# Now that we have the list of configs, we need to write how that information
|
||||
# is used to run our program. In our case we will use some params such as the
|
||||
# number of rows and columns of the input problem or the blocksize as argv
|
||||
# values.
|
||||
|
||||
# The exec stage is used to run a program with some arguments.
|
||||
exec = {nextStage, conf, ...}: stages.exec {
|
||||
# All stages require the nextStage attribute, which is passed as parameter.
|
||||
inherit nextStage;
|
||||
|
||||
# Then, we fill the argv array with the elements that will be used when
|
||||
# running our program. Notice that we load the attributes from the
|
||||
# configuration which is passed as argument as well.
|
||||
argv = [
|
||||
"--rows" conf.rows
|
||||
"--cols" conf.cols
|
||||
"--rbs" conf.rbs
|
||||
"--cbs" conf.cbs
|
||||
"--timesteps" conf.timesteps
|
||||
];
|
||||
|
||||
# This program requires a file called `head.conf` in the current directory.
|
||||
# To do it, we run this small script in the `pre` hook, which simple runs
|
||||
# some commands before running the program. Notice that this command is
|
||||
# executed in every MPI task.
|
||||
pre = ''
|
||||
ln -sf ${nextStage}/etc/heat.conf heat.conf || true
|
||||
'';
|
||||
};
|
||||
|
||||
# The program stage is only used to specify which program we should run.
|
||||
# We use this stage to specify build-time parameters such as the gitBranch,
|
||||
# which will be used to fetch the source code. We use the `override` function
|
||||
# of the `bsc.garlic.apps.heat` derivation to change the input paramenters.
|
||||
program = {nextStage, conf, ...}: bsc.garlic.apps.heat.override {
|
||||
inherit (conf) gitBranch;
|
||||
};
|
||||
|
||||
# Other stages may be defined here, in case that we want to do something
|
||||
# additional, like running the program under `perf stats` or set some
|
||||
# envionment variables.
|
||||
|
||||
# Once all the stages are defined, we build the pipeline array. The
|
||||
# `stdexp.stdPipeline` contains the standard pipeline stages, so we don't need
|
||||
# to specify them. We only specify how we run our program, and what program
|
||||
# exactly, by adding our `exec` and `program` stages:
|
||||
pipeline = stdexp.stdPipeline ++ [ exec program ];
|
||||
|
||||
# Then, we use the `configs` and the `pipeline` just defined inside the `in`
|
||||
# part, to build the complete experiment:
|
||||
in
|
||||
|
||||
# The `stdexp.genExperiment` function generates an experiment by calling every
|
||||
# stage of the pipeline with the different configs, and thus creating
|
||||
# different units. The result is the top level derivation which is the
|
||||
# `trebuchet`, which is the script that, when executed, launches the complete
|
||||
# experiment.
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,128 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, stages
|
||||
}:
|
||||
|
||||
with lib;
|
||||
|
||||
# Common definitions used by fwi experiments
|
||||
rec {
|
||||
|
||||
branchesWithoutBlocksize = [
|
||||
"garlic/mpi+send+omp+fork"
|
||||
"garlic/mpi+send+seq"
|
||||
];
|
||||
|
||||
# Returns true if the given config is in the forkJoinBranches list
|
||||
needsBlocksize = c: ! any (e: c.gitBranch == e) branchesWithoutBlocksize;
|
||||
|
||||
# Set the blocksize to null for the fork join branch
|
||||
fixBlocksize = c: if (needsBlocksize c) then c
|
||||
else (c // { blocksize = null; });
|
||||
|
||||
# Generate the configs by filtering the unneded blocksizes
|
||||
getConfigs = {varConf, genConf}:
|
||||
let
|
||||
allConfigs = stdexp.buildConfigs { inherit varConf genConf; };
|
||||
in
|
||||
# The unique function ensures that we only run one config for the fork
|
||||
# join branch, even if we have multiple blocksizes.
|
||||
unique (map fixBlocksize allConfigs);
|
||||
|
||||
getResources = {gitBranch, hw}:
|
||||
if (gitBranch == "garlic/mpi+send+seq") then {
|
||||
cpusPerTask = 1;
|
||||
ntasksPerNode = hw.cpusPerNode;
|
||||
} else {
|
||||
cpusPerTask = hw.cpusPerSocket;
|
||||
ntasksPerNode = hw.socketsPerNode;
|
||||
};
|
||||
|
||||
exec = {nextStage, conf, ...}:
|
||||
let
|
||||
fwiParams = bsc.apps.fwi.params.override {
|
||||
inherit (conf) nx ny nz;
|
||||
};
|
||||
|
||||
ioFreq = if (conf.enableIO) then (conf.ioFreq or "-1") else "9999";
|
||||
|
||||
in stages.exec {
|
||||
inherit nextStage;
|
||||
|
||||
# FIXME: FWI should allow the I/O directory to be specified as a
|
||||
# parameter
|
||||
pre = ''
|
||||
FWI_SRUNDIR=$(pwd)
|
||||
FWI_EXECDIR="${conf.tempDir}/out/$GARLIC_USER/$GARLIC_UNIT/$GARLIC_RUN"
|
||||
FWI_PARAMS="${fwiParams}/fwi_params.txt"
|
||||
FWI_FREQ="${fwiParams}/fwi_frequencies.txt"
|
||||
|
||||
# Run fwi in a directory with fast local storage
|
||||
mkdir -p "$FWI_EXECDIR"
|
||||
cd "$FWI_EXECDIR"
|
||||
|
||||
# Only generate the input if we have the CPU 0 (once per node)
|
||||
if grep -o 'Cpus_allowed_list:[[:space:]]0' \
|
||||
/proc/self/status > /dev/null;
|
||||
then
|
||||
FWI_CAPTAIN=1
|
||||
fi
|
||||
|
||||
if [ $FWI_CAPTAIN ]; then
|
||||
>&2 echo "generating the input dataset"
|
||||
${fwiParams}/bin/ModelGenerator -m "$FWI_PARAMS" "$FWI_FREQ"
|
||||
fi
|
||||
|
||||
echo >&2 "Current dir: $(pwd)"
|
||||
echo >&2 "Using PARAMS=$FWI_PARAMS and FREQ=$FWI_FREQ"
|
||||
'' + optionalString (conf.enableCTF) ''
|
||||
export NANOS6_CONFIG_OVERRIDE="version.instrument=ctf"
|
||||
'';
|
||||
|
||||
argv = [
|
||||
''"$FWI_PARAMS"''
|
||||
''"$FWI_FREQ"''
|
||||
] ++ optional (needsBlocksize conf) conf.blocksize ++ [
|
||||
"-1" # Fordward steps
|
||||
"-1" # Backward steps
|
||||
ioFreq # Write/read frequency
|
||||
];
|
||||
|
||||
post = ''
|
||||
# Go back to the garlic out directory
|
||||
cd "$FWI_SRUNDIR"
|
||||
|
||||
if [ $FWI_CAPTAIN ]; then
|
||||
'' + optionalString (conf.enableCTF) ''
|
||||
# FIXME: We should specify the path in the nanos6 config, so we
|
||||
# can avoid the race condition while they are generating the
|
||||
# traces
|
||||
sleep 3
|
||||
|
||||
# Save the traces
|
||||
mv "$FWI_EXECDIR"/trace_* .
|
||||
'' + ''
|
||||
rm -rf "$FWI_EXECDIR"
|
||||
fi
|
||||
'';
|
||||
};
|
||||
|
||||
apps = bsc.garlic.apps;
|
||||
|
||||
# FWI program
|
||||
program = {nextStage, conf, ...}:
|
||||
let
|
||||
fwiParams = bsc.apps.fwi.params.override {
|
||||
inherit (conf) nx ny nz;
|
||||
};
|
||||
in
|
||||
apps.fwi.solver.override {
|
||||
inherit (conf) gitBranch;
|
||||
inherit fwiParams;
|
||||
};
|
||||
|
||||
pipeline = stdexp.stdPipeline ++ [ exec program ];
|
||||
}
|
@ -1,71 +0,0 @@
|
||||
# Regular granularity test for FWI
|
||||
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
, garlicTools
|
||||
, callPackage
|
||||
}:
|
||||
|
||||
with lib;
|
||||
with garlicTools;
|
||||
|
||||
let
|
||||
|
||||
inherit (targetMachine) fs;
|
||||
|
||||
# Initial variable configuration
|
||||
varConf = {
|
||||
gitBranch = [ "garlic/tampi+isend+oss+task" ];
|
||||
blocksize = range2 1 256;
|
||||
n = [ {nx=100; nz=100; ny=8000; ntpn=2; nodes=1;} ];
|
||||
};
|
||||
|
||||
machineConfig = targetMachine.config;
|
||||
|
||||
# Generate the complete configuration for each unit
|
||||
genConf = c: targetMachine.config // rec {
|
||||
expName = "fwi-granularity";
|
||||
unitName = "${expName}"
|
||||
+ "-bs${toString blocksize}"
|
||||
+ "-${toString gitBranch}";
|
||||
|
||||
inherit (machineConfig) hw;
|
||||
inherit (c) gitBranch blocksize;
|
||||
inherit (c.n) nx ny nz ntpn nodes;
|
||||
|
||||
# Other FWI parameters
|
||||
enableIO = true;
|
||||
enableCTF = false;
|
||||
|
||||
# Repeat the execution of each unit several times
|
||||
loops = 10;
|
||||
|
||||
# Resources
|
||||
cpusPerTask = hw.cpusPerSocket;
|
||||
ntasksPerNode = ntpn;
|
||||
qos = "debug";
|
||||
time = "02:00:00";
|
||||
jobName = unitName;
|
||||
|
||||
# Enable permissions to write in the local storage
|
||||
extraMounts = [ fs.local.temp ];
|
||||
tempDir = fs.local.temp;
|
||||
|
||||
};
|
||||
|
||||
common = callPackage ./common.nix {};
|
||||
|
||||
inherit (common) getConfigs pipeline;
|
||||
|
||||
configs = getConfigs {
|
||||
inherit varConf genConf;
|
||||
};
|
||||
|
||||
in
|
||||
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,75 +0,0 @@
|
||||
# Test FWI variants based on tasks with and without I/O.
|
||||
# This experiment solves a computationally expensive input which brings the
|
||||
# storage devices to saturation when I/O is enabled. The same input runs
|
||||
# without I/O for comparison purposes. Also, a range of block sizes
|
||||
# deemed as efficient according to the granularity experiment are
|
||||
# explored.
|
||||
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
, callPackage
|
||||
, enableExtended ? false
|
||||
}:
|
||||
|
||||
with lib;
|
||||
|
||||
let
|
||||
common = callPackage ./common.nix {};
|
||||
inherit (common) getConfigs getResources pipeline;
|
||||
|
||||
inherit (targetMachine) fs;
|
||||
|
||||
# Initial variable configuration
|
||||
varConf = {
|
||||
gitBranch = [ "garlic/tampi+send+oss+task" ];
|
||||
blocksize = [ 1 2 4 8 ];
|
||||
n = [ {nx=500; nz=500; ny=16000;} ];
|
||||
nodes = if (enableExtended) then range2 1 16 else [ 4 ];
|
||||
enableIO = [ false true ];
|
||||
};
|
||||
|
||||
machineConfig = targetMachine.config;
|
||||
|
||||
# Generate the complete configuration for each unit
|
||||
genConf = c: targetMachine.config // rec {
|
||||
expName = "fwi-io";
|
||||
unitName = "${expName}"
|
||||
+ "-nodes${toString nodes}"
|
||||
+ "-bs${toString blocksize}"
|
||||
+ (if (enableIO) then "-io1" else "-io0")
|
||||
+ "-${toString gitBranch}";
|
||||
|
||||
inherit (machineConfig) hw;
|
||||
inherit (c) gitBranch blocksize enableIO nodes;
|
||||
inherit (c.n) nx ny nz;
|
||||
|
||||
# Repeat the execution of each unit several times
|
||||
loops = 10;
|
||||
|
||||
# Resources
|
||||
inherit (getResources { inherit gitBranch hw; })
|
||||
cpusPerTask ntasksPerNode;
|
||||
|
||||
qos = "debug";
|
||||
time = "02:00:00";
|
||||
jobName = unitName;
|
||||
|
||||
enableCTF = false;
|
||||
|
||||
# Enable permissions to write in the local storage
|
||||
extraMounts = [ fs.local.temp ];
|
||||
tempDir = fs.local.temp;
|
||||
};
|
||||
|
||||
configs = getConfigs {
|
||||
inherit varConf genConf;
|
||||
};
|
||||
|
||||
in
|
||||
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,91 +0,0 @@
|
||||
# This test compares a FWI version using poor data locality (+NOREUSE) versus
|
||||
# the optimized version (used for all other experiments). Follows a pseudocode
|
||||
# snippet illustrating the fundamental difference between version.
|
||||
#
|
||||
# NOREUSE
|
||||
# ----------------------
|
||||
# for (y) for (x) for (z)
|
||||
# computA(v[y][x][z]);
|
||||
# for (y) for (x) for (z)
|
||||
# computB(v[y][x][z]);
|
||||
# for (y) for (x) for (z)
|
||||
# computC(v[y][x][z]);
|
||||
#
|
||||
# Optimized version
|
||||
# ----------------------
|
||||
# for (y) for (x) for (z)
|
||||
# computA(v[y][x][z]);
|
||||
# computB(v[y][x][z]);
|
||||
# computC(v[y][x][z]);
|
||||
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
, callPackage
|
||||
}:
|
||||
|
||||
with lib;
|
||||
|
||||
let
|
||||
|
||||
inherit (targetMachine) fs;
|
||||
|
||||
# Initial variable configuration
|
||||
varConf = {
|
||||
gitBranch = [
|
||||
"garlic/mpi+send+oss+task"
|
||||
"garlic/mpi+send+oss+task+NOREUSE"
|
||||
];
|
||||
|
||||
blocksize = [ 1 2 4 8 ];
|
||||
|
||||
n = [ {nx=300; ny=2000; nz=300;} ]; # / half node
|
||||
};
|
||||
|
||||
machineConfig = targetMachine.config;
|
||||
|
||||
# Generate the complete configuration for each unit
|
||||
genConf = c: targetMachine.config // rec {
|
||||
expName = "fwi-reuse";
|
||||
unitName = "${expName}"
|
||||
+ "-bs${toString blocksize}"
|
||||
+ "-${toString gitBranch}";
|
||||
|
||||
inherit (machineConfig) hw;
|
||||
inherit (c) gitBranch blocksize;
|
||||
inherit (c.n) nx ny nz;
|
||||
|
||||
enableCTF = false;
|
||||
enableIO = true;
|
||||
|
||||
# Repeat the execution of each unit several times
|
||||
loops = 10;
|
||||
|
||||
# Resources
|
||||
cpusPerTask = hw.cpusPerSocket;
|
||||
ntasksPerNode = 1;
|
||||
nodes = 1;
|
||||
qos = "debug";
|
||||
time = "02:00:00";
|
||||
jobName = unitName;
|
||||
|
||||
# Enable permissions to write in the local storage
|
||||
extraMounts = [ fs.local.temp ];
|
||||
tempDir = fs.local.temp;
|
||||
};
|
||||
|
||||
common = callPackage ./common.nix {};
|
||||
|
||||
inherit (common) getConfigs pipeline;
|
||||
|
||||
configs = getConfigs {
|
||||
inherit varConf genConf;
|
||||
};
|
||||
|
||||
in
|
||||
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,90 +0,0 @@
|
||||
# Strong scaling test for FWI variants based on tasks. This
|
||||
# experiment explores a range of block sizes deemed as efficient
|
||||
# according to the granularity experiment.
|
||||
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
, garlicTools
|
||||
, callPackage
|
||||
, enableExtended ? false
|
||||
}:
|
||||
|
||||
with lib;
|
||||
with garlicTools;
|
||||
|
||||
let
|
||||
common = callPackage ./common.nix {};
|
||||
inherit (common) getConfigs getResources pipeline;
|
||||
|
||||
inherit (targetMachine) fs;
|
||||
|
||||
# Initial variable configuration
|
||||
varConf = {
|
||||
gitBranch = [
|
||||
"garlic/tampi+isend+oss+task"
|
||||
] ++ optionals (enableExtended) [
|
||||
"garlic/tampi+send+oss+task"
|
||||
"garlic/mpi+send+omp+task"
|
||||
"garlic/mpi+send+oss+task"
|
||||
"garlic/mpi+send+omp+fork"
|
||||
# FIXME: the mpi pure version has additional constraints with the
|
||||
# number of planes in Y. By now is disabled.
|
||||
#"garlic/mpi+send+seq"
|
||||
];
|
||||
|
||||
blocksize = if (enableExtended)
|
||||
then range2 1 16
|
||||
else [ 2 ];
|
||||
|
||||
n = [ { nx=100; ny=8000; nz=100; } ];
|
||||
|
||||
nodes = range2 1 16;
|
||||
};
|
||||
|
||||
machineConfig = targetMachine.config;
|
||||
|
||||
# Generate the complete configuration for each unit
|
||||
genConf = c: machineConfig // rec {
|
||||
expName = "fwi-ss";
|
||||
unitName = "${expName}"
|
||||
+ "-nodes${toString nodes}"
|
||||
+ "-bs${toString blocksize}"
|
||||
+ "-${toString gitBranch}";
|
||||
|
||||
inherit (machineConfig) hw;
|
||||
inherit (c) gitBranch blocksize;
|
||||
inherit (c.n) nx ny nz;
|
||||
|
||||
# Other FWI parameters
|
||||
enableIO = true;
|
||||
enableCTF = false;
|
||||
|
||||
# Repeat the execution of each unit several times
|
||||
loops = 10;
|
||||
|
||||
# Resources
|
||||
inherit (getResources { inherit gitBranch hw; })
|
||||
cpusPerTask ntasksPerNode;
|
||||
|
||||
nodes = c.nodes;
|
||||
qos = "debug";
|
||||
time = "02:00:00";
|
||||
jobName = unitName;
|
||||
|
||||
# Enable permissions to write in the local storage
|
||||
extraMounts = [ fs.local.temp ];
|
||||
tempDir = fs.local.temp;
|
||||
};
|
||||
|
||||
configs = getConfigs {
|
||||
inherit varConf genConf;
|
||||
};
|
||||
|
||||
in
|
||||
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,175 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
, garlicTools
|
||||
, writeText
|
||||
, enablePerf ? false
|
||||
, enableCTF ? false
|
||||
, enableHWC ? false
|
||||
, enableExtended ? false
|
||||
}:
|
||||
|
||||
# TODO: Finish HWC first
|
||||
assert (enableHWC == false);
|
||||
|
||||
with lib;
|
||||
with garlicTools;
|
||||
|
||||
let
|
||||
# Initial variable configuration
|
||||
varConf = with bsc; {
|
||||
cbs = range2 32 4096;
|
||||
rbs = range2 32 4096;
|
||||
};
|
||||
|
||||
machineConfig = targetMachine.config;
|
||||
|
||||
# Generate the complete configuration for each unit
|
||||
genConf = with bsc; c: targetMachine.config // rec {
|
||||
expName = "heat";
|
||||
unitName = expName +
|
||||
".cbs-${toString cbs}" +
|
||||
".rbs-${toString rbs}";
|
||||
|
||||
inherit (machineConfig) hw;
|
||||
|
||||
# heat options
|
||||
timesteps = 10;
|
||||
cols = 1024 * 16; # Columns
|
||||
rows = 1024 * 16; # Rows
|
||||
inherit (c) cbs rbs;
|
||||
gitBranch = "garlic/tampi+isend+oss+task";
|
||||
|
||||
# Repeat the execution of each unit 30 times
|
||||
loops = 10;
|
||||
|
||||
# Resources
|
||||
qos = "debug";
|
||||
ntasksPerNode = 1;
|
||||
nodes = 1;
|
||||
time = "02:00:00";
|
||||
# Assign one socket to each task (only one process)
|
||||
cpusPerTask = hw.cpusPerSocket;
|
||||
jobName = unitName;
|
||||
};
|
||||
|
||||
filterConfigs = c: let
|
||||
# Too small sizes lead to huge overheads
|
||||
goodSize = (c.cbs * c.rbs >= 1024);
|
||||
# When the extended units are not enabled, we only select those in
|
||||
# the diagonal.
|
||||
extended = if (enableExtended) then true
|
||||
else c.cbs == c.rbs;
|
||||
in
|
||||
goodSize && extended;
|
||||
|
||||
# Compute the array of configurations
|
||||
configs = filter (filterConfigs) (stdexp.buildConfigs {
|
||||
inherit varConf genConf;
|
||||
});
|
||||
|
||||
perf = {nextStage, conf, ...}: stages.perf {
|
||||
inherit nextStage;
|
||||
perfOptions = "stat -o .garlic/perf.csv -x , " +
|
||||
"-e cycles,instructions,cache-references,cache-misses";
|
||||
};
|
||||
|
||||
ctf = {nextStage, conf, ...}: let
|
||||
# Create the nanos6 configuration file
|
||||
nanos6ConfigFile = writeText "nanos6.toml" ''
|
||||
version.instrument = "ctf"
|
||||
turbo.enabled = false
|
||||
instrument.ctf.converter.enabled = false
|
||||
'' + optionalString (enableHWC) ''
|
||||
hardware_counters.papi.enabled = true
|
||||
hardware_counters.papi.counters = [
|
||||
"PAPI_TOT_INS", "PAPI_TOT_CYC",
|
||||
"PAPI_L1_TCM", "PAPI_L2_TCM", "PAPI_L3_TCM"
|
||||
]
|
||||
'';
|
||||
|
||||
in stages.exec {
|
||||
inherit nextStage;
|
||||
|
||||
# And use it
|
||||
env = ''
|
||||
export NANOS6_CONFIG=${nanos6ConfigFile}
|
||||
'';
|
||||
|
||||
# FIXME: We should run a hook *after* srun has ended, so we can
|
||||
# execute it in one process only (not in N ranks). This hack works
|
||||
# with one process only. Or be able to compute the name of the trace
|
||||
# directory so we can begin the conversion in parallel
|
||||
post = assert (conf.nodes * conf.ntasksPerNode == 1); ''
|
||||
tracedir=$(ls -d trace_* | head -1)
|
||||
echo "using tracedir=$tracedir"
|
||||
|
||||
offset=$(grep 'offset =' $tracedir/ctf/ust/uid/1000/64-bit/metadata | \
|
||||
grep -o '[0-9]*')
|
||||
echo "offset = $offset"
|
||||
|
||||
start_time=$(awk '/^start_time / {print $2}' stdout.log)
|
||||
end_time=$(awk '/^end_time / {print $2}' stdout.log)
|
||||
|
||||
begin=$(awk "BEGIN{print $start_time*1e9 - $offset}")
|
||||
end=$(awk "BEGIN{print $end_time*1e9 - $offset}")
|
||||
|
||||
echo "only events between $begin and $end"
|
||||
|
||||
${bsc.cn6}/bin/cn6 -s $tracedir
|
||||
|
||||
${bsc.cn6}/bin/cut $begin $end < $tracedir/prv/trace.prv |\
|
||||
${bsc.cn6}/bin/hcut 1 ${toString conf.cpusPerTask} \
|
||||
> $tracedir/prv/trace-cut.prv
|
||||
|
||||
${bsc.cn6}/bin/dur 6400025 0 < $tracedir/prv/trace-cut.prv |\
|
||||
awk '{s+=$1} END {print s}' >> .garlic/time_mode_dead.csv &
|
||||
|
||||
${bsc.cn6}/bin/dur 6400025 1 < $tracedir/prv/trace-cut.prv |\
|
||||
awk '{s+=$1} END {print s}' >> .garlic/time_mode_runtime.csv &
|
||||
|
||||
${bsc.cn6}/bin/dur 6400025 3 < $tracedir/prv/trace-cut.prv |\
|
||||
awk '{s+=$1} END {print s}' >> .garlic/time_mode_task.csv &
|
||||
|
||||
wait
|
||||
|
||||
# Remove the traces at the end, as they are huge
|
||||
rm -rf $tracedir
|
||||
'';
|
||||
# TODO: To enable HWC we need to first add a taskwait before the
|
||||
# first get_time() measurement, otherwise we get the HWC of the
|
||||
# main task, which will be huge.
|
||||
};
|
||||
|
||||
exec = {nextStage, conf, ...}: stages.exec {
|
||||
inherit nextStage;
|
||||
argv = [
|
||||
"--rows" conf.rows
|
||||
"--cols" conf.cols
|
||||
"--rbs" conf.rbs
|
||||
"--cbs" conf.cbs
|
||||
"--timesteps" conf.timesteps
|
||||
];
|
||||
|
||||
# The next stage is the program
|
||||
env = ''
|
||||
ln -sf ${nextStage}/etc/heat.conf heat.conf || true
|
||||
'';
|
||||
};
|
||||
|
||||
program = {nextStage, conf, ...}: bsc.garlic.apps.heat.override {
|
||||
inherit (conf) gitBranch;
|
||||
};
|
||||
|
||||
pipeline = stdexp.stdPipeline ++
|
||||
(optional enablePerf perf) ++
|
||||
(optional enableCTF ctf) ++
|
||||
[ exec program ];
|
||||
|
||||
in
|
||||
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,73 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, stages
|
||||
, callPackage
|
||||
}:
|
||||
|
||||
with lib;
|
||||
|
||||
rec {
|
||||
|
||||
checkInput = {nextStage, conf, ...}: stages.exec {
|
||||
inherit nextStage;
|
||||
pre = optionalString (! (conf.enableGen or false)) (
|
||||
let
|
||||
gen = callPackage ./gen.nix { };
|
||||
inputTre = gen.getInputTre conf;
|
||||
exp = inputTre.experiment;
|
||||
unit = elemAt exp.units 0;
|
||||
expName = baseNameOf (toString exp);
|
||||
unitName = baseNameOf (toString unit);
|
||||
inputPath = "$GARLIC_OUT/${expName}/${unitName}/1";
|
||||
in
|
||||
''
|
||||
# Force the generation of the input resultTree as a dependency:
|
||||
# ${toString inputTre.result}
|
||||
|
||||
# Ensure the input dataset is still available
|
||||
export HPCG_INPUT_PATH="${toString inputPath}"
|
||||
|
||||
if [ ! -e "$HPCG_INPUT_PATH" ]; then
|
||||
>&2 echo "Missing input dataset: $HPCG_INPUT_PATH"
|
||||
exit 1
|
||||
fi
|
||||
''
|
||||
);
|
||||
};
|
||||
|
||||
getSizePerTask = cpusPerTask: sizePerCpu:
|
||||
mapAttrs (name: val: val * cpusPerTask) sizePerCpu;
|
||||
|
||||
exec = {nextStage, conf, ...}: let
|
||||
actionArg = if (conf.enableGen or false)
|
||||
then "--store=."
|
||||
else "--load=\"$HPCG_INPUT_PATH\"";
|
||||
|
||||
in stages.exec {
|
||||
inherit nextStage;
|
||||
argv = [
|
||||
"--nx=${toString conf.sizePerTask.x}"
|
||||
"--ny=${toString conf.sizePerTask.y}"
|
||||
"--nz=${toString conf.sizePerTask.z}"
|
||||
"--npx=${toString conf.nprocs.x}"
|
||||
"--npy=${toString conf.nprocs.y}"
|
||||
"--npz=${toString conf.nprocs.z}"
|
||||
"--nblocks=${toString conf.nblocks}"
|
||||
"--ncomms=${toString conf.ncomms}"
|
||||
# The input symlink is generated by the input stage, which is generated by
|
||||
# the genInput function.
|
||||
actionArg
|
||||
] ++ optional (conf.disableAspectRatio or false) "--no-ar=1";
|
||||
};
|
||||
|
||||
program = {nextStage, conf, ...}: bsc.apps.hpcg.override {
|
||||
inherit (conf) gitBranch;
|
||||
};
|
||||
|
||||
pipeline = stdexp.stdPipeline ++ [
|
||||
checkInput
|
||||
exec program ];
|
||||
}
|
@ -1,52 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
, garlicTools
|
||||
, callPackage
|
||||
}:
|
||||
|
||||
with lib;
|
||||
with garlicTools;
|
||||
|
||||
rec {
|
||||
|
||||
# Generate the complete configuration for each unit
|
||||
genConf = c: targetMachine.config // rec {
|
||||
expName = "hpcg-gen";
|
||||
unitName = expName
|
||||
+ "-nodes${toString nodes}"
|
||||
+ "-spt.z${toString sizePerTask.z}";
|
||||
|
||||
inherit (targetMachine.config) hw;
|
||||
|
||||
# Inherit options from the current conf
|
||||
inherit (c) sizePerTask nprocs disableAspectRatio gitBranch
|
||||
cpusPerTask ntasksPerNode nodes;
|
||||
|
||||
# nblocks and ncomms are ignored from c
|
||||
ncomms = 1;
|
||||
nblocks = 1;
|
||||
|
||||
# We only need one run
|
||||
loops = 1;
|
||||
|
||||
# Generate the input
|
||||
enableGen = true;
|
||||
|
||||
# Resources
|
||||
qos = "debug";
|
||||
time = "02:00:00";
|
||||
jobName = unitName;
|
||||
};
|
||||
|
||||
common = callPackage ./common.nix {};
|
||||
|
||||
getInputTre = conf: stdexp.genExperiment {
|
||||
configs = [ (genConf conf) ];
|
||||
pipeline = common.pipeline;
|
||||
};
|
||||
}
|
@ -1,76 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
, garlicTools
|
||||
, callPackage
|
||||
}:
|
||||
|
||||
with lib;
|
||||
with garlicTools;
|
||||
|
||||
let
|
||||
common = callPackage ./common.nix { };
|
||||
|
||||
inherit (common) pipeline getSizePerTask;
|
||||
|
||||
maxNodes = 16;
|
||||
|
||||
# Initial variable configuration
|
||||
varConf = {
|
||||
blocksPerCpu = range2 0.5 256;
|
||||
gitBranch = [
|
||||
"garlic/tampi+isend+oss+task"
|
||||
];
|
||||
};
|
||||
|
||||
# Generate the complete configuration for each unit
|
||||
genConf = c: targetMachine.config // rec {
|
||||
expName = "hpcg-granularity";
|
||||
unitName = "${expName}"
|
||||
+ "-nodes${toString nodes}"
|
||||
+ "-bpc${toString blocksPerCpu}";
|
||||
|
||||
inherit (targetMachine.config) hw;
|
||||
|
||||
inherit maxNodes;
|
||||
sizeFactor = maxNodes / nodes;
|
||||
|
||||
# hpcg options
|
||||
inherit (c) blocksPerCpu gitBranch;
|
||||
baseSizeZ = 16;
|
||||
nodes = 1;
|
||||
totalTasks = ntasksPerNode * nodes;
|
||||
sizePerCpu = {
|
||||
x = 2;
|
||||
y = 2;
|
||||
z = baseSizeZ * sizeFactor;
|
||||
};
|
||||
sizePerTask = getSizePerTask cpusPerTask sizePerCpu;
|
||||
nprocs = { x=1; y=1; z=totalTasks; };
|
||||
nblocks = floatTruncate (blocksPerCpu * cpusPerTask);
|
||||
ncomms = 1;
|
||||
disableAspectRatio = true;
|
||||
|
||||
# Repeat the execution of each unit several times
|
||||
loops = 3;
|
||||
|
||||
# Resources
|
||||
qos = "debug";
|
||||
time = "02:00:00";
|
||||
cpusPerTask = hw.cpusPerSocket;
|
||||
ntasksPerNode = hw.socketsPerNode;
|
||||
jobName = unitName;
|
||||
};
|
||||
|
||||
# Compute the array of configurations
|
||||
configs = stdexp.buildConfigs {
|
||||
inherit varConf genConf;
|
||||
};
|
||||
|
||||
in
|
||||
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,78 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
, garlicTools
|
||||
, callPackage
|
||||
, enableExtended ? false
|
||||
, enableStrong ? true
|
||||
}:
|
||||
|
||||
with lib;
|
||||
with garlicTools;
|
||||
|
||||
let
|
||||
common = callPackage ./common.nix { };
|
||||
|
||||
inherit (common) pipeline getSizePerTask;
|
||||
|
||||
maxNodes = 16;
|
||||
|
||||
# Initial variable configuration
|
||||
varConf = {
|
||||
nodes = range2 1 maxNodes;
|
||||
baseSizeZ = if (enableExtended) then [ 8 16 ] else [ 16 ];
|
||||
blocksPerCpu = if (enableExtended) then range2 1 8 else [ 4 ];
|
||||
gitBranch = [
|
||||
"garlic/tampi+isend+oss+task"
|
||||
];
|
||||
};
|
||||
|
||||
# Generate the complete configuration for each unit
|
||||
genConf = c: targetMachine.config // rec {
|
||||
expName = if (enableStrong) then "hpcg-ss" else "hpcg-ws";
|
||||
unitName = "${expName}"
|
||||
+ "-nodes${toString nodes}"
|
||||
+ "-bpc${toString blocksPerCpu}";
|
||||
|
||||
inherit (targetMachine.config) hw;
|
||||
|
||||
inherit maxNodes;
|
||||
sizeFactor = if (enableStrong) then maxNodes / nodes else 1;
|
||||
|
||||
# hpcg options
|
||||
inherit (c) nodes blocksPerCpu gitBranch;
|
||||
totalTasks = ntasksPerNode * nodes;
|
||||
sizePerCpu = {
|
||||
x = 2;
|
||||
y = 2;
|
||||
z = c.baseSizeZ * sizeFactor;
|
||||
};
|
||||
sizePerTask = getSizePerTask cpusPerTask sizePerCpu;
|
||||
nprocs = { x=1; y=1; z=totalTasks; };
|
||||
nblocks = blocksPerCpu * cpusPerTask;
|
||||
ncomms = 1;
|
||||
disableAspectRatio = true;
|
||||
|
||||
# Repeat the execution of each unit several times
|
||||
loops = 10;
|
||||
|
||||
# Resources
|
||||
qos = "debug";
|
||||
time = "02:00:00";
|
||||
cpusPerTask = hw.cpusPerSocket;
|
||||
ntasksPerNode = hw.socketsPerNode;
|
||||
jobName = unitName;
|
||||
};
|
||||
|
||||
# Compute the array of configurations
|
||||
configs = stdexp.buildConfigs {
|
||||
inherit varConf genConf;
|
||||
};
|
||||
|
||||
in
|
||||
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,65 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
, garlicTools
|
||||
, callPackage
|
||||
}:
|
||||
|
||||
with lib;
|
||||
with garlicTools;
|
||||
|
||||
let
|
||||
common = callPackage ./common.nix { };
|
||||
|
||||
inherit (common) pipeline getSizePerTask;
|
||||
|
||||
# Initial variable configuration
|
||||
varConf = {
|
||||
sizeFactor = [ 1 2 4 8 16 32 ];
|
||||
};
|
||||
|
||||
# Generate the complete configuration for each unit
|
||||
genConf = c: targetMachine.config // rec {
|
||||
expName = "hpcg-size";
|
||||
unitName = "${expName}"
|
||||
+ "-nodes${toString nodes}"
|
||||
+ "-sf${toString sizeFactor}";
|
||||
|
||||
inherit (targetMachine.config) hw;
|
||||
|
||||
# hpcg options
|
||||
inherit (c) sizeFactor;
|
||||
gitBranch = "garlic/tampi+isend+oss+task";
|
||||
nodes = 16;
|
||||
totalTasks = ntasksPerNode * nodes;
|
||||
sizePerCpu = { x = 2; y = 2; z = 4 * sizeFactor; };
|
||||
sizePerTask = getSizePerTask cpusPerTask sizePerCpu;
|
||||
nprocs = { x=1; y=1; z=totalTasks; };
|
||||
blocksPerCpu = 4;
|
||||
nblocks = blocksPerCpu * cpusPerTask;
|
||||
ncomms = 1;
|
||||
disableAspectRatio = true;
|
||||
|
||||
# Repeat the execution of each unit several times
|
||||
loops = 5;
|
||||
|
||||
# Resources
|
||||
qos = "debug";
|
||||
time = "02:00:00";
|
||||
cpusPerTask = hw.cpusPerSocket;
|
||||
ntasksPerNode = hw.socketsPerNode;
|
||||
jobName = unitName;
|
||||
};
|
||||
|
||||
# Compute the array of configurations
|
||||
configs = stdexp.buildConfigs {
|
||||
inherit varConf genConf;
|
||||
};
|
||||
|
||||
in
|
||||
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,108 +0,0 @@
|
||||
{
|
||||
super
|
||||
, self
|
||||
, bsc
|
||||
, garlic
|
||||
, callPackage
|
||||
}:
|
||||
|
||||
{
|
||||
nbody = rec {
|
||||
granularity = callPackage ./nbody/granularity.nix { };
|
||||
ss = callPackage ./nbody/ss.nix { };
|
||||
numa = callPackage ./nbody/numa.nix { };
|
||||
};
|
||||
|
||||
saiph = {
|
||||
granularity = callPackage ./saiph/granularity.nix { };
|
||||
ss = callPackage ./saiph/ss.nix { };
|
||||
};
|
||||
|
||||
creams = rec {
|
||||
ss = callPackage ./creams/ss.nix { };
|
||||
granularity = callPackage ./creams/granularity.nix { };
|
||||
size = callPackage ./creams/size.nix { };
|
||||
granularity16 = callPackage ./creams/granularity16.nix { };
|
||||
|
||||
# These experiments are the extended versions of the previous
|
||||
# ones. We split them so we can keep a reasonable execution time
|
||||
big.granularity = granularity.override { enableExtended = true; };
|
||||
big.ss = granularity.override { enableExtended = true; };
|
||||
};
|
||||
|
||||
hpcg = rec {
|
||||
granularity = callPackage ./hpcg/granularity.nix { };
|
||||
ss = callPackage ./hpcg/scaling.nix { };
|
||||
ws = ss.override { enableStrong=false; };
|
||||
size = callPackage ./hpcg/size.nix { };
|
||||
|
||||
big.ss = ss.override { enableExtended = true; };
|
||||
};
|
||||
|
||||
heat = rec {
|
||||
granularity = callPackage ./heat/granularity.nix { };
|
||||
cache = granularity.override { enablePerf = true; };
|
||||
ctf = granularity.override { enableCTF = true; };
|
||||
};
|
||||
|
||||
bigsort = rec {
|
||||
genseq = callPackage ./bigsort/genseq.nix {
|
||||
n = toString (1024 * 1024 * 1024 / 8); # 1 GB input size
|
||||
dram = toString (1024 * 1024 * 1024); # 1 GB chunk
|
||||
};
|
||||
|
||||
shuffle = callPackage ./bigsort/shuffle.nix {
|
||||
inputTre = genseq;
|
||||
n = toString (1024 * 1024 * 1024 / 8); # 1 GB input size
|
||||
dram = toString (1024 * 1024 * 1024); # 1 GB chunk
|
||||
inherit (bsc.garlic.pp) resultFromTrebuchet;
|
||||
};
|
||||
|
||||
sort = callPackage ./bigsort/sort.nix {
|
||||
inputTre = shuffle;
|
||||
inherit (bsc.garlic.pp) resultFromTrebuchet;
|
||||
removeOutput = false;
|
||||
};
|
||||
};
|
||||
|
||||
slurm = {
|
||||
cpu = callPackage ./slurm/cpu.nix { };
|
||||
sigsegv = callPackage ./slurm/sigsegv.nix { };
|
||||
exit1 = callPackage ./slurm/exit1.nix { };
|
||||
};
|
||||
|
||||
lulesh = {
|
||||
test = callPackage ./lulesh/test.nix { };
|
||||
};
|
||||
|
||||
fwi = rec {
|
||||
granularity = callPackage ./fwi/granularity.nix { };
|
||||
ss = callPackage ./fwi/ss.nix { };
|
||||
reuse = callPackage ./fwi/reuse.nix { };
|
||||
io = callPackage ./fwi/io.nix { };
|
||||
|
||||
# Extended experiments
|
||||
big.io = io.override { enableExtended = true; };
|
||||
};
|
||||
|
||||
osu = rec {
|
||||
latency = callPackage ./osu/latency.nix { };
|
||||
latencyShm = latency.override { interNode = false; };
|
||||
latencyMt = latency.override { enableMultithread = true; };
|
||||
latencyMtShm = latency.override { enableMultithread = true; interNode = true; };
|
||||
bw = callPackage ./osu/bw.nix { };
|
||||
impi = callPackage ./osu/impi.nix { };
|
||||
bwShm = bw.override { interNode = false; };
|
||||
mtu = callPackage ./osu/mtu.nix { };
|
||||
eager = callPackage ./osu/eager.nix { };
|
||||
};
|
||||
|
||||
examples = {
|
||||
granularity = callPackage ./examples/granularity.nix { };
|
||||
};
|
||||
|
||||
cn6 = {
|
||||
timediff = callPackage ./cn6/timediff.nix { };
|
||||
nbody = callPackage ./cn6/nbody.nix { };
|
||||
};
|
||||
}
|
@ -1,85 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
}:
|
||||
|
||||
with lib;
|
||||
|
||||
let
|
||||
|
||||
# Initial variable configuration
|
||||
varConf = with bsc; {
|
||||
gitBranch = [
|
||||
"garlic/mpi+isend+seq"
|
||||
"garlic/tampi+isend+oss+taskloop"
|
||||
"garlic/tampi+isend+oss+taskfor"
|
||||
"garlic/tampi+isend+oss+task"
|
||||
"garlic/mpi+isend+seq"
|
||||
"garlic/mpi+isend+oss+task"
|
||||
"garlic/mpi+isend+omp+fork"
|
||||
"garlic/tampi+isend+oss+taskloopfor"
|
||||
];
|
||||
};
|
||||
|
||||
machineConfig = targetMachine.config;
|
||||
|
||||
# Generate the complete configuration for each unit
|
||||
genConf = with bsc; c: targetMachine.config // rec {
|
||||
expName = "lulesh";
|
||||
unitName = "${expName}-test";
|
||||
inherit (machineConfig) hw;
|
||||
|
||||
# options
|
||||
iterations = 10;
|
||||
size = 30;
|
||||
gitBranch = c.gitBranch;
|
||||
|
||||
# Repeat the execution of each unit several times
|
||||
loops = 10;
|
||||
|
||||
# Resources
|
||||
qos = "debug";
|
||||
cpusPerTask = hw.cpusPerSocket;
|
||||
ntasksPerNode = 1;
|
||||
nodes = 1;
|
||||
time = "02:00:00";
|
||||
jobName = unitName;
|
||||
};
|
||||
|
||||
# Compute the array of configurations
|
||||
configs = stdexp.buildConfigs {
|
||||
inherit varConf genConf;
|
||||
};
|
||||
|
||||
/* Lulesh options:
|
||||
-q : quiet mode - suppress all stdout
|
||||
-i <iterations> : number of cycles to run
|
||||
-s <size> : length of cube mesh along side
|
||||
-r <numregions> : Number of distinct regions (def: 11)
|
||||
-b <balance> : Load balance between regions of a domain (def: 1)
|
||||
-c <cost> : Extra cost of more expensive regions (def: 1)
|
||||
-f <numfiles> : Number of files to split viz dump into (def: (np+10)/9)
|
||||
-p : Print out progress
|
||||
-v : Output viz file (requires compiling with -DVIZ_MESH
|
||||
-h : This message
|
||||
*/
|
||||
exec = {nextStage, conf, ...}: with conf; stages.exec {
|
||||
inherit nextStage;
|
||||
argv = [ "-i" iterations "-s" size ];
|
||||
};
|
||||
|
||||
apps = bsc.garlic.apps;
|
||||
|
||||
program = {nextStage, conf, ...}: apps.lulesh.override {
|
||||
inherit (conf) gitBranch;
|
||||
};
|
||||
|
||||
pipeline = stdexp.stdPipeline ++ [ exec program ];
|
||||
|
||||
in
|
||||
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,37 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, stages
|
||||
, numactl
|
||||
, garlicTools
|
||||
}:
|
||||
|
||||
with lib;
|
||||
with garlicTools;
|
||||
|
||||
rec {
|
||||
getConfigs = {varConf, genConf}: stdexp.buildConfigs {
|
||||
inherit varConf genConf;
|
||||
};
|
||||
|
||||
exec = {nextStage, conf, ...}: stages.exec
|
||||
(
|
||||
{
|
||||
inherit nextStage;
|
||||
argv = with conf; [ "-t" timesteps "-p" particles ];
|
||||
}
|
||||
# Use numactl to use the interleave policy if requested (default is
|
||||
# false)
|
||||
// optionalAttrs (conf.interleaveMem or false) {
|
||||
program = "${numactl}/bin/numactl --interleave=all ${stageProgram nextStage}";
|
||||
}
|
||||
);
|
||||
|
||||
program = {nextStage, conf, ...}: bsc.garlic.apps.nbody.override {
|
||||
inherit (conf) blocksize gitBranch;
|
||||
};
|
||||
|
||||
pipeline = stdexp.stdPipeline ++ [ exec program ];
|
||||
}
|
@ -1,60 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
, garlicTools
|
||||
, callPackage
|
||||
}:
|
||||
|
||||
with lib;
|
||||
with garlicTools;
|
||||
|
||||
let
|
||||
# Initial variable configuration
|
||||
varConf = {
|
||||
blocksize = range2 64 2048;
|
||||
gitBranch = [
|
||||
# "garlic/mpi+send+oss+task"
|
||||
# "garlic/tampi+send+oss+task"
|
||||
"garlic/tampi+isend+oss+task"
|
||||
];
|
||||
};
|
||||
|
||||
# Generate the complete configuration for each unit
|
||||
genConf = c: targetMachine.config // rec {
|
||||
hw = targetMachine.config.hw;
|
||||
particles = 8 * 1024 * hw.cpusPerSocket;
|
||||
timesteps = 10;
|
||||
blocksize = c.blocksize;
|
||||
gitBranch = c.gitBranch;
|
||||
|
||||
expName = "nbody-granularity";
|
||||
unitName = expName +
|
||||
"-${toString gitBranch}" +
|
||||
"-bs${toString blocksize}";
|
||||
|
||||
loops = 10;
|
||||
|
||||
qos = "debug";
|
||||
cpusPerTask = hw.cpusPerSocket;
|
||||
ntasksPerNode = hw.socketsPerNode;
|
||||
nodes = 1;
|
||||
time = "02:00:00";
|
||||
jobName = unitName;
|
||||
};
|
||||
|
||||
|
||||
common = callPackage ./common.nix {};
|
||||
|
||||
inherit (common) getConfigs pipeline;
|
||||
|
||||
configs = getConfigs {
|
||||
inherit varConf genConf;
|
||||
};
|
||||
|
||||
in
|
||||
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,64 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
, garlicTools
|
||||
, numactl
|
||||
, callPackage
|
||||
}:
|
||||
|
||||
with lib;
|
||||
with garlicTools;
|
||||
|
||||
let
|
||||
# Initial variable configuration
|
||||
varConf = {
|
||||
blocksize = range2 256 1024;
|
||||
gitBranch = [ "garlic/tampi+send+oss+task" ];
|
||||
attachToSocket = [ true false ];
|
||||
interleaveMem = [ true false ];
|
||||
};
|
||||
|
||||
# Generate the complete configuration for each unit
|
||||
genConf = c: targetMachine.config // rec {
|
||||
hw = targetMachine.config.hw;
|
||||
particles = 4 * 1024 * hw.cpusPerSocket;
|
||||
timesteps = 10;
|
||||
|
||||
inherit (c) attachToSocket interleaveMem gitBranch blocksize;
|
||||
|
||||
expName = "nbody-numa";
|
||||
unitName = expName +
|
||||
"-${toString gitBranch}" +
|
||||
"-bs.${toString blocksize}" +
|
||||
"-tpn.${toString ntasksPerNode}" +
|
||||
"-interleave.${if (interleaveMem) then "yes" else "no"}";
|
||||
|
||||
loops = 10;
|
||||
|
||||
qos = "debug";
|
||||
cpusPerTask = if (attachToSocket)
|
||||
then hw.cpusPerSocket
|
||||
else hw.cpusPerNode;
|
||||
ntasksPerNode = if (attachToSocket)
|
||||
then hw.socketsPerNode
|
||||
else 1;
|
||||
nodes = 4;
|
||||
time = "02:00:00";
|
||||
jobName = unitName;
|
||||
};
|
||||
|
||||
common = callPackage ./common.nix {};
|
||||
|
||||
inherit (common) getConfigs pipeline;
|
||||
|
||||
configs = getConfigs {
|
||||
inherit varConf genConf;
|
||||
};
|
||||
|
||||
in
|
||||
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,60 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
, garlicTools
|
||||
}:
|
||||
|
||||
with lib;
|
||||
with garlicTools;
|
||||
|
||||
let
|
||||
|
||||
# Initial variable configuration
|
||||
varConf = {
|
||||
blocksize = [ 128 256 512 1024 2048 4096 ];
|
||||
};
|
||||
|
||||
# Generate the complete configuration for each unit
|
||||
genConf = c: targetMachine.config // rec {
|
||||
hw = targetMachine.config.hw;
|
||||
particles = 4096 * hw.cpusPerSocket;
|
||||
timesteps = 10;
|
||||
blocksize = c.blocksize;
|
||||
|
||||
gitBranch = "garlic/oss+task";
|
||||
expName = "nbody-granularity";
|
||||
unitName = expName + "-bs${toString blocksize}";
|
||||
|
||||
loops = 30;
|
||||
|
||||
qos = "debug";
|
||||
ntasksPerNode = 1;
|
||||
nodes = 1;
|
||||
time = "02:00:00";
|
||||
cpusPerTask = hw.cpusPerSocket;
|
||||
jobName = unitName;
|
||||
};
|
||||
|
||||
# Compute the array of configurations
|
||||
configs = stdexp.buildConfigs {
|
||||
inherit varConf genConf;
|
||||
};
|
||||
|
||||
exec = {nextStage, conf, ...}: stages.exec {
|
||||
inherit nextStage;
|
||||
argv = [ "-t" conf.timesteps "-p" conf.particles ];
|
||||
};
|
||||
|
||||
program = {nextStage, conf, ...}: with conf; bsc.garlic.apps.nbody.override {
|
||||
inherit (conf) blocksize gitBranch;
|
||||
};
|
||||
|
||||
pipeline = stdexp.stdPipeline ++ [ exec program ];
|
||||
|
||||
in
|
||||
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,104 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
, garlicTools
|
||||
|
||||
# Options for the experiment
|
||||
, enableCTF ? false
|
||||
# Number of cases tested
|
||||
, steps ? 7
|
||||
# nbody iterations
|
||||
, timesteps ? 10
|
||||
# nbody total number of particles
|
||||
, particles ? null
|
||||
, gitBranch ? "garlic/tampi+send+oss+task"
|
||||
, loops ? 10
|
||||
, nblocks0 ? null
|
||||
}:
|
||||
|
||||
with lib;
|
||||
with garlicTools;
|
||||
|
||||
let
|
||||
|
||||
defaultOpt = var: def: if (var != null) then var else def;
|
||||
|
||||
machineConfig = targetMachine.config;
|
||||
inherit (machineConfig) hw;
|
||||
|
||||
# Initial variable configuration
|
||||
varConf = with bsc; {
|
||||
# Create a list with values 2^n with n from 0 to (steps - 1) inclusive
|
||||
i = expRange 2 0 (steps - 1);
|
||||
};
|
||||
|
||||
# Generate the complete configuration for each unit
|
||||
genConf = var: fix (self: var // targetMachine.config // {
|
||||
expName = "nbody-nblocks";
|
||||
unitName = "${self.expName}${toString self.nblocks}";
|
||||
|
||||
inherit (machineConfig) hw;
|
||||
|
||||
# nbody options
|
||||
particles = defaultOpt particles (4096 * self.hw.cpusPerSocket);
|
||||
nblocks0 = defaultOpt nblocks0 (self.hw.cpusPerSocket / 2);
|
||||
# The number of blocks is then computed from the multiplier "i" and
|
||||
# the initial number of blocks "nblocks0"
|
||||
nblocks = self.i * self.nblocks0;
|
||||
|
||||
totalTasks = self.ntasksPerNode * self.nodes;
|
||||
particlesPerTask = self.particles / self.totalTasks;
|
||||
blocksize = self.particlesPerTask / self.nblocks;
|
||||
cc = bsc.icc;
|
||||
mpi = bsc.impi;
|
||||
cflags = "-g";
|
||||
inherit timesteps gitBranch enableCTF loops;
|
||||
|
||||
# Resources
|
||||
qos = "debug";
|
||||
cpusPerTask = self.hw.cpusPerSocket;
|
||||
ntasksPerNode = self.hw.socketsPerNode;
|
||||
nodes = 1;
|
||||
jobName = self.unitName;
|
||||
});
|
||||
|
||||
# Compute the array of configurations
|
||||
configs = stdexp.buildConfigs {
|
||||
inherit varConf genConf;
|
||||
};
|
||||
|
||||
perf = {nextStage, conf, ...}: with conf; stages.perf {
|
||||
inherit nextStage;
|
||||
perfOptions = "record --call-graph dwarf -o \\$\\$.perf";
|
||||
};
|
||||
|
||||
ctf = {nextStage, conf, ...}: with conf; stages.exec {
|
||||
inherit nextStage;
|
||||
env = optionalString (conf.enableCTF) ''
|
||||
export NANOS6_CONFIG_OVERRIDE="version.instrument=ctf,\
|
||||
instrument.ctf.converter.enabled=false"
|
||||
'';
|
||||
};
|
||||
|
||||
exec = {nextStage, conf, ...}: with conf; stages.exec {
|
||||
inherit nextStage;
|
||||
argv = [ "-t" timesteps "-p" particles ];
|
||||
};
|
||||
|
||||
program = {nextStage, conf, ...}: with conf;
|
||||
let
|
||||
customPkgs = stdexp.replaceMpi conf.mpi;
|
||||
in
|
||||
customPkgs.apps.nbody.override ({
|
||||
inherit cc blocksize mpi gitBranch cflags;
|
||||
});
|
||||
|
||||
pipeline = stdexp.stdPipeline ++ [ ctf exec program ];
|
||||
|
||||
in
|
||||
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,110 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
, garlicTools
|
||||
|
||||
# Options for the experiment
|
||||
, enableCTF ? false
|
||||
# Number of cases tested
|
||||
, steps ? 6
|
||||
# nbody iterations
|
||||
, timesteps ? 10
|
||||
# nbody total number of particles
|
||||
, particles ? null
|
||||
, loops ? 10
|
||||
, nblocks0 ? null
|
||||
}:
|
||||
|
||||
with lib;
|
||||
with garlicTools;
|
||||
|
||||
let
|
||||
|
||||
defaultOpt = var: def: if (var != null) then var else def;
|
||||
|
||||
machineConfig = targetMachine.config;
|
||||
inherit (machineConfig) hw;
|
||||
|
||||
# Initial variable configuration
|
||||
varConf = with bsc; {
|
||||
# Create a list with values 2^n with n from 0 to (steps - 1) inclusive
|
||||
i = expRange 2 0 (steps - 1);
|
||||
nodes = [ 1 2 4 8 16 ];
|
||||
gitBranch = [
|
||||
"garlic/tampi+send+oss+task"
|
||||
"garlic/tampi+isend+oss+task"
|
||||
"garlic/mpi+send+oss+task"
|
||||
];
|
||||
};
|
||||
|
||||
# Generate the complete configuration for each unit
|
||||
genConf = var: fix (self: var // targetMachine.config // {
|
||||
expName = "nbody-scaling";
|
||||
unitName = self.expName +
|
||||
"-nb${toString self.nblocks}"+
|
||||
"-nodes${toString self.nodes}";
|
||||
|
||||
inherit (machineConfig) hw;
|
||||
|
||||
# nbody options
|
||||
particles = defaultOpt particles (4096 * self.hw.cpusPerSocket);
|
||||
nblocks0 = defaultOpt nblocks0 (self.hw.cpusPerSocket / 2);
|
||||
# The number of blocks is then computed from the multiplier "i" and
|
||||
# the initial number of blocks "nblocks0"
|
||||
nblocks = self.i * self.nblocks0;
|
||||
|
||||
totalTasks = self.ntasksPerNode * self.nodes;
|
||||
particlesPerTask = self.particles / self.totalTasks;
|
||||
blocksize = self.particlesPerTask / self.nblocks;
|
||||
cc = bsc.icc;
|
||||
mpi = bsc.impi;
|
||||
cflags = "-g";
|
||||
inherit timesteps enableCTF loops;
|
||||
|
||||
# Resources
|
||||
qos = "debug";
|
||||
cpusPerTask = self.hw.cpusPerSocket;
|
||||
ntasksPerNode = self.hw.socketsPerNode;
|
||||
jobName = self.unitName;
|
||||
});
|
||||
|
||||
# Compute the array of configurations
|
||||
configs = stdexp.buildConfigs {
|
||||
inherit varConf genConf;
|
||||
};
|
||||
|
||||
perf = {nextStage, conf, ...}: with conf; stages.perf {
|
||||
inherit nextStage;
|
||||
perfOptions = "record --call-graph dwarf -o \\$\\$.perf";
|
||||
};
|
||||
|
||||
ctf = {nextStage, conf, ...}: with conf; stages.exec {
|
||||
inherit nextStage;
|
||||
env = optionalString (conf.enableCTF) ''
|
||||
export NANOS6_CONFIG_OVERRIDE="version.instrument=ctf,\
|
||||
instrument.ctf.converter.enabled=false"
|
||||
'';
|
||||
};
|
||||
|
||||
exec = {nextStage, conf, ...}: with conf; stages.exec {
|
||||
inherit nextStage;
|
||||
argv = [ "-t" timesteps "-p" particles ];
|
||||
};
|
||||
|
||||
program = {nextStage, conf, ...}: with conf;
|
||||
let
|
||||
customPkgs = stdexp.replaceMpi conf.mpi;
|
||||
in
|
||||
customPkgs.apps.nbody.override ({
|
||||
inherit (conf) cc blocksize mpi gitBranch cflags;
|
||||
});
|
||||
|
||||
pipeline = stdexp.stdPipeline ++ [ ctf exec program ];
|
||||
|
||||
in
|
||||
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,72 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
}:
|
||||
|
||||
with lib;
|
||||
|
||||
let
|
||||
# Initial variable configuration
|
||||
varConf = with bsc; {
|
||||
numProcsAndParticles = [ 1 2 4 8 16 32 48 ];
|
||||
input = [
|
||||
{ numParticles=1 ; cpuMask="0x1"; }
|
||||
{ numParticles=2 ; cpuMask="0x3"; }
|
||||
{ numParticles=4 ; cpuMask="0xf"; }
|
||||
{ numParticles=8 ; cpuMask="0xff"; }
|
||||
{ numParticles=16; cpuMask="0xffff"; }
|
||||
{ numParticles=32; cpuMask="0xffffffff"; }
|
||||
{ numParticles=48; cpuMask="0xffffffffffff"; }
|
||||
];
|
||||
};
|
||||
|
||||
# Generate the complete configuration for each unit
|
||||
genConf = with bsc; c: targetMachine.config // rec {
|
||||
# nbody options
|
||||
inherit (c.input) numParticles cpuMask;
|
||||
particles = 1024 * numParticles * 2;
|
||||
timesteps = 10;
|
||||
blocksize = 1024;
|
||||
cc = icc;
|
||||
mpi = impi;
|
||||
gitBranch = "garlic/oss+task";
|
||||
|
||||
# Repeat the execution of each unit 30 times
|
||||
loops = 30;
|
||||
|
||||
# Resources
|
||||
qos = "debug";
|
||||
ntasksPerNode = 1;
|
||||
nodes = 1;
|
||||
time = "02:00:00";
|
||||
cpuBind = "verbose,mask_cpu:${cpuMask}";
|
||||
jobName = "nbody-bs-${toString numParticles}-${gitBranch}";
|
||||
};
|
||||
|
||||
# Compute the array of configurations
|
||||
configs = stdexp.buildConfigs {
|
||||
inherit varConf genConf;
|
||||
};
|
||||
|
||||
exec = {nextStage, conf, ...}: with conf; stages.exec {
|
||||
inherit nextStage;
|
||||
argv = [ "-t" timesteps "-p" particles ];
|
||||
};
|
||||
|
||||
program = {nextStage, conf, ...}: with conf;
|
||||
let
|
||||
customPkgs = stdexp.replaceMpi conf.mpi;
|
||||
in
|
||||
customPkgs.apps.nbody.override {
|
||||
inherit cc blocksize mpi gitBranch;
|
||||
};
|
||||
|
||||
pipeline = stdexp.stdPipeline ++ [ exec program ];
|
||||
|
||||
in
|
||||
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,63 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
}:
|
||||
|
||||
with lib;
|
||||
|
||||
let
|
||||
# Initial variable configuration
|
||||
varConf = with bsc; {
|
||||
numProcs = [ 1 2 4 8 16 32 48 ];
|
||||
};
|
||||
|
||||
# Generate the complete configuration for each unit
|
||||
genConf = with bsc; c: targetMachine.config // rec {
|
||||
# nbody options
|
||||
particles = 1024 * 64;
|
||||
timesteps = 10;
|
||||
blocksize = 1024;
|
||||
inherit (c) numProcs;
|
||||
cc = icc;
|
||||
mpi = impi;
|
||||
gitBranch = "garlic/mpi+send";
|
||||
|
||||
# Repeat the execution of each unit 30 times
|
||||
loops = 30;
|
||||
|
||||
# Resources
|
||||
qos = "debug";
|
||||
ntasksPerNode = numProcs;
|
||||
nodes = 1;
|
||||
time = "02:00:00";
|
||||
cpuBind = "sockets,verbose";
|
||||
jobName = "nbody-bs-${toString numProcs}-${gitBranch}";
|
||||
};
|
||||
|
||||
# Compute the array of configurations
|
||||
configs = stdexp.buildConfigs {
|
||||
inherit varConf genConf;
|
||||
};
|
||||
|
||||
exec = {nextStage, conf, ...}: with conf; stages.exec {
|
||||
inherit nextStage;
|
||||
argv = [ "-t" timesteps "-p" particles ];
|
||||
};
|
||||
|
||||
program = {nextStage, conf, ...}: with conf;
|
||||
let
|
||||
customPkgs = stdexp.replaceMpi conf.mpi;
|
||||
in
|
||||
customPkgs.apps.nbody.override {
|
||||
inherit cc blocksize mpi gitBranch;
|
||||
};
|
||||
|
||||
pipeline = stdexp.stdPipeline ++ [ exec program ];
|
||||
|
||||
in
|
||||
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,63 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
}:
|
||||
|
||||
with lib;
|
||||
|
||||
let
|
||||
# Initial variable configuration
|
||||
varConf = with bsc; {
|
||||
cpuMask = [ "0x1" "0x3" "0xf" "0xff" "0xffff" "0xffffffff" "0xffffffffffff" ];
|
||||
};
|
||||
|
||||
# Generate the complete configuration for each unit
|
||||
genConf = with bsc; c: targetMachine.config // rec {
|
||||
# nbody options
|
||||
particles = 1024 * 64;
|
||||
timesteps = 10;
|
||||
blocksize = 1024;
|
||||
inherit (c) cpuMask;
|
||||
cc = icc;
|
||||
mpi = impi;
|
||||
gitBranch = "garlic/oss+task";
|
||||
|
||||
# Repeat the execution of each unit 30 times
|
||||
loops = 30;
|
||||
|
||||
# Resources
|
||||
qos = "debug";
|
||||
ntasksPerNode = 1;
|
||||
nodes = 1;
|
||||
time = "02:00:00";
|
||||
cpuBind = "verbose,mask_cpu:${cpuMask}";
|
||||
jobName = "nbody-bs-${cpuMask}-${gitBranch}";
|
||||
};
|
||||
|
||||
# Compute the array of configurations
|
||||
configs = stdexp.buildConfigs {
|
||||
inherit varConf genConf;
|
||||
};
|
||||
|
||||
exec = {nextStage, conf, ...}: with conf; stages.exec {
|
||||
inherit nextStage;
|
||||
argv = [ "-t" timesteps "-p" particles ];
|
||||
};
|
||||
|
||||
program = {nextStage, conf, ...}: with conf;
|
||||
let
|
||||
customPkgs = stdexp.replaceMpi conf.mpi;
|
||||
in
|
||||
customPkgs.apps.nbody.override {
|
||||
inherit cc blocksize mpi gitBranch;
|
||||
};
|
||||
|
||||
pipeline = stdexp.stdPipeline ++ [ exec program ];
|
||||
|
||||
in
|
||||
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,59 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
, garlicTools
|
||||
, callPackage
|
||||
}:
|
||||
|
||||
with lib;
|
||||
with garlicTools;
|
||||
|
||||
let
|
||||
# Initial variable configuration
|
||||
varConf = {
|
||||
blocksize = [ 128 ];
|
||||
nodes = range2 1 16;
|
||||
gitBranch = [
|
||||
# "garlic/mpi+send+oss+task"
|
||||
# "garlic/tampi+send+oss+task"
|
||||
"garlic/tampi+isend+oss+task"
|
||||
];
|
||||
};
|
||||
|
||||
# Generate the complete configuration for each unit
|
||||
genConf = c: targetMachine.config // rec {
|
||||
hw = targetMachine.config.hw;
|
||||
particles = 8 * 1024 * hw.cpusPerSocket;
|
||||
timesteps = 10;
|
||||
|
||||
inherit (c) blocksize nodes gitBranch;
|
||||
|
||||
expName = "nbody-scaling";
|
||||
unitName = expName +
|
||||
"-${toString gitBranch}" +
|
||||
"-nodes${toString nodes}";
|
||||
|
||||
loops = 5;
|
||||
|
||||
qos = "debug";
|
||||
ntasksPerNode = hw.socketsPerNode;
|
||||
time = "02:00:00";
|
||||
cpusPerTask = hw.cpusPerSocket;
|
||||
jobName = unitName;
|
||||
};
|
||||
|
||||
common = callPackage ./common.nix {};
|
||||
|
||||
inherit (common) getConfigs pipeline;
|
||||
|
||||
configs = getConfigs {
|
||||
inherit varConf genConf;
|
||||
};
|
||||
|
||||
in
|
||||
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,60 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
|
||||
# Should we test the network (true) or the shared memory (false)?
|
||||
, interNode ? true
|
||||
}:
|
||||
|
||||
with builtins;
|
||||
with lib;
|
||||
|
||||
let
|
||||
|
||||
machineConfig = targetMachine.config;
|
||||
|
||||
# Initial variable configuration
|
||||
varConf = with bsc; {
|
||||
mpi = [ impi bsc.openmpi mpich ]; #psmpi ];
|
||||
};
|
||||
|
||||
# Generate the complete configuration for each unit
|
||||
genConf = with bsc; c: targetMachine.config // rec {
|
||||
inherit (machineConfig) hw;
|
||||
nodes = if interNode then 2 else 1;
|
||||
ntasksPerNode = if interNode then 1 else 2;
|
||||
cpusPerTask = 1;
|
||||
time = "00:10:00";
|
||||
qos = "debug";
|
||||
loops = 30;
|
||||
expName = "osu-bw-${mpi.name}";
|
||||
unitName = expName;
|
||||
jobName = expName;
|
||||
inherit (c) mpi;
|
||||
};
|
||||
|
||||
# Compute the array of configurations
|
||||
configs = stdexp.buildConfigs {
|
||||
inherit varConf genConf;
|
||||
};
|
||||
|
||||
exec = {nextStage, conf, ...}: with conf; stages.exec {
|
||||
inherit nextStage;
|
||||
|
||||
program = "${nextStage}/bin/osu_bw";
|
||||
};
|
||||
|
||||
program = {nextStage, conf, ...}: bsc.osumb.override {
|
||||
# Use the specified MPI implementation
|
||||
inherit (conf) mpi;
|
||||
};
|
||||
|
||||
pipeline = stdexp.stdPipeline ++ [ exec program ];
|
||||
|
||||
in
|
||||
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,84 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
}:
|
||||
|
||||
with builtins;
|
||||
with lib;
|
||||
|
||||
let
|
||||
|
||||
machineConfig = targetMachine.config;
|
||||
|
||||
# Initial variable configuration
|
||||
varConf = with bsc; {
|
||||
sizeKB = range 5 25;
|
||||
mpi = [ impi ];
|
||||
#mpi = [ impi bsc.openmpi mpich ]; #psmpi ];
|
||||
PSM2_MQ_EAGER_SDMA_SZ_KB = [ 16 20 24 ];
|
||||
PSM2_MTU_KB = [ 10 ];
|
||||
};
|
||||
|
||||
# Generate the complete configuration for each unit
|
||||
genConf = with bsc; c: targetMachine.config // rec {
|
||||
inherit (machineConfig) hw;
|
||||
nodes = 2;
|
||||
ntasksPerNode = 1;
|
||||
cpusPerTask = 1;
|
||||
time = "00:30:00";
|
||||
qos = "debug";
|
||||
loops = 10;
|
||||
iterations = 50000;
|
||||
#FIXME: Notice the switchover is 16000 and MTU is 10240
|
||||
PSM2_MQ_EAGER_SDMA_SZ = PSM2_MQ_EAGER_SDMA_SZ_KB * 1000;
|
||||
PSM2_MTU = PSM2_MTU_KB * 1024;
|
||||
expName = "osu-bw";
|
||||
unitName = expName +
|
||||
"-size.${toString sizeKB}K" +
|
||||
"-mtu.${toString PSM2_MTU_KB}K" +
|
||||
"-sdma.${toString PSM2_MQ_EAGER_SDMA_SZ_KB}K";
|
||||
jobName = expName;
|
||||
inherit (c) mpi sizeKB
|
||||
PSM2_MQ_EAGER_SDMA_SZ_KB
|
||||
PSM2_MTU_KB;
|
||||
|
||||
size = sizeKB * 1024;
|
||||
};
|
||||
|
||||
# Compute the array of configurations
|
||||
configs = stdexp.buildConfigs {
|
||||
inherit varConf genConf;
|
||||
};
|
||||
|
||||
exec = {nextStage, conf, ...}: with conf; stages.exec {
|
||||
inherit nextStage;
|
||||
|
||||
program = "${nextStage}/bin/osu_bw";
|
||||
|
||||
env = ''
|
||||
export PSM2_MQ_EAGER_SDMA_SZ=${toString PSM2_MQ_EAGER_SDMA_SZ}
|
||||
export PSM2_MTU=${toString PSM2_MTU}
|
||||
export PSM2_TRACEMASK=0x101
|
||||
export PSM2_MQ_PRINT_STATS=-1
|
||||
'';
|
||||
|
||||
argv = [
|
||||
"-m" "${toString size}:${toString size}"
|
||||
"-i" iterations
|
||||
];
|
||||
};
|
||||
|
||||
program = {nextStage, conf, ...}: bsc.osumb.override {
|
||||
# Use the specified MPI implementation
|
||||
inherit (conf) mpi;
|
||||
};
|
||||
|
||||
pipeline = stdexp.stdPipeline ++ [ exec program ];
|
||||
|
||||
in
|
||||
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,69 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
|
||||
# Should we test the network (true) or the shared memory (false)?
|
||||
, interNode ? true
|
||||
}:
|
||||
|
||||
with builtins;
|
||||
with lib;
|
||||
|
||||
let
|
||||
|
||||
machineConfig = targetMachine.config;
|
||||
|
||||
# Initial variable configuration
|
||||
varConf = with bsc; {
|
||||
threshold = [ 8000 16000 32000 64000 ];
|
||||
#threshold = [ 4096 8192 10240 ];
|
||||
};
|
||||
|
||||
# Generate the complete configuration for each unit
|
||||
genConf = with bsc; c: targetMachine.config // rec {
|
||||
inherit (machineConfig) hw;
|
||||
nodes = if interNode then 2 else 1;
|
||||
ntasksPerNode = if interNode then 1 else 2;
|
||||
mpi = impi;
|
||||
cpusPerTask = 1;
|
||||
time = "00:10:00";
|
||||
qos = "debug";
|
||||
loops = 10;
|
||||
expName = "osu-impi-rndv";
|
||||
unitName = expName + "-${toString threshold}";
|
||||
jobName = expName;
|
||||
inherit (c) threshold;
|
||||
};
|
||||
|
||||
# Compute the array of configurations
|
||||
configs = stdexp.buildConfigs {
|
||||
inherit varConf genConf;
|
||||
};
|
||||
|
||||
exec = {nextStage, conf, ...}: stages.exec {
|
||||
inherit nextStage;
|
||||
env = ''
|
||||
export PSM2_MQ_RNDV_SHM_THRESH=${toString conf.threshold}
|
||||
export PSM2_MQ_RNDV_HFI_THRESH=${toString conf.threshold}
|
||||
export PSM2_MQ_EAGER_SDMA_SZ=${toString conf.threshold}
|
||||
#export PSM2_MTU=${toString conf.threshold}
|
||||
export PSM2_TRACEMASK=0x101
|
||||
'';
|
||||
|
||||
program = "${nextStage}/bin/osu_bw";
|
||||
};
|
||||
|
||||
program = {nextStage, conf, ...}: bsc.osumb.override {
|
||||
# Use the specified MPI implementation
|
||||
inherit (conf) mpi;
|
||||
};
|
||||
|
||||
pipeline = stdexp.stdPipeline ++ [ exec program ];
|
||||
|
||||
in
|
||||
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,69 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
|
||||
# Should we test the network (true) or the shared memory (false)?
|
||||
, interNode ? true
|
||||
, enableMultithread ? false
|
||||
}:
|
||||
|
||||
with builtins;
|
||||
with lib;
|
||||
|
||||
let
|
||||
|
||||
machineConfig = targetMachine.config;
|
||||
|
||||
# Initial variable configuration
|
||||
varConf = with bsc; {
|
||||
mpi = [ impi bsc.openmpi mpich ]; #psmpi ];
|
||||
};
|
||||
|
||||
# Generate the complete configuration for each unit
|
||||
genConf = with bsc; c: targetMachine.config // rec {
|
||||
inherit (machineConfig) hw;
|
||||
nodes = if interNode then 2 else 1;
|
||||
ntasksPerNode = if interNode then 1 else 2;
|
||||
cpusPerTask = if (enableMultithread) then hw.cpusPerSocket else 1;
|
||||
time = "00:10:00";
|
||||
qos = "debug";
|
||||
loops = 30;
|
||||
expName = "osu-latency-${mpi.name}";
|
||||
unitName = expName;
|
||||
jobName = expName;
|
||||
inherit (c) mpi;
|
||||
inherit enableMultithread;
|
||||
};
|
||||
|
||||
# Compute the array of configurations
|
||||
configs = stdexp.buildConfigs {
|
||||
inherit varConf genConf;
|
||||
};
|
||||
|
||||
exec = {nextStage, conf, ...}: with conf; stages.exec {
|
||||
inherit nextStage;
|
||||
|
||||
program = if (enableMultithread) then
|
||||
"${nextStage}/bin/osu_latency_mt"
|
||||
else
|
||||
"${nextStage}/bin/osu_latency";
|
||||
|
||||
argv = optionals (enableMultithread) [
|
||||
"-t" "${toString conf.cpusPerTask}:${toString conf.cpusPerTask}"
|
||||
];
|
||||
};
|
||||
|
||||
program = {nextStage, conf, ...}: bsc.osumb.override {
|
||||
# Use the specified MPI implementation
|
||||
inherit (conf) mpi;
|
||||
};
|
||||
|
||||
pipeline = stdexp.stdPipeline ++ [ exec program ];
|
||||
|
||||
in
|
||||
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,84 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
}:
|
||||
|
||||
with builtins;
|
||||
with lib;
|
||||
|
||||
let
|
||||
|
||||
machineConfig = targetMachine.config;
|
||||
|
||||
# Initial variable configuration
|
||||
varConf = with bsc; {
|
||||
sizeKB = range 5 25;
|
||||
mpi = [ impi ];
|
||||
#mpi = [ impi bsc.openmpi mpich ]; #psmpi ];
|
||||
PSM2_MQ_EAGER_SDMA_SZ_KB = [ 16 ];
|
||||
PSM2_MTU_KB = [ 8 10 ];
|
||||
};
|
||||
|
||||
# Generate the complete configuration for each unit
|
||||
genConf = with bsc; c: targetMachine.config // rec {
|
||||
inherit (machineConfig) hw;
|
||||
nodes = 2;
|
||||
ntasksPerNode = 1;
|
||||
cpusPerTask = 1;
|
||||
time = "00:30:00";
|
||||
qos = "debug";
|
||||
loops = 10;
|
||||
iterations = 50000;
|
||||
#FIXME: Notice the switchover is 16000 and MTU is 10240
|
||||
PSM2_MQ_EAGER_SDMA_SZ = PSM2_MQ_EAGER_SDMA_SZ_KB * 1000;
|
||||
PSM2_MTU = PSM2_MTU_KB * 1024;
|
||||
expName = "osu-bw";
|
||||
unitName = expName +
|
||||
"-size.${toString sizeKB}K" +
|
||||
"-mtu.${toString PSM2_MTU_KB}K" +
|
||||
"-sdma.${toString PSM2_MQ_EAGER_SDMA_SZ_KB}K";
|
||||
jobName = expName;
|
||||
inherit (c) mpi sizeKB
|
||||
PSM2_MQ_EAGER_SDMA_SZ_KB
|
||||
PSM2_MTU_KB;
|
||||
|
||||
size = sizeKB * 1024;
|
||||
};
|
||||
|
||||
# Compute the array of configurations
|
||||
configs = stdexp.buildConfigs {
|
||||
inherit varConf genConf;
|
||||
};
|
||||
|
||||
exec = {nextStage, conf, ...}: with conf; stages.exec {
|
||||
inherit nextStage;
|
||||
|
||||
program = "${nextStage}/bin/osu_bw";
|
||||
|
||||
env = ''
|
||||
export PSM2_MQ_EAGER_SDMA_SZ=${toString PSM2_MQ_EAGER_SDMA_SZ}
|
||||
export PSM2_MTU=${toString PSM2_MTU}
|
||||
export PSM2_TRACEMASK=0x101
|
||||
export PSM2_MQ_PRINT_STATS=-1
|
||||
'';
|
||||
|
||||
argv = [
|
||||
"-m" "${toString size}:${toString size}"
|
||||
"-i" iterations
|
||||
];
|
||||
};
|
||||
|
||||
program = {nextStage, conf, ...}: bsc.osumb.override {
|
||||
# Use the specified MPI implementation
|
||||
inherit (conf) mpi;
|
||||
};
|
||||
|
||||
pipeline = stdexp.stdPipeline ++ [ exec program ];
|
||||
|
||||
in
|
||||
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,132 +0,0 @@
|
||||
#######################################################################
|
||||
# Saiph, granularity experiment:
|
||||
#
|
||||
# App:Heat 3D - garlic/tampi+isend+oss+task+simd branch
|
||||
# App details:
|
||||
# 3D mesh of ~400*400*400 points
|
||||
# nbgx = global blocks in the X dimension
|
||||
# nbgy = global blocks in the Y dimension
|
||||
# nbgz = global blocks in the Z dimension
|
||||
# --> nbgx*nbgy*nbgz = global distributed blocks
|
||||
# nbly = local blocks in the Y dimension
|
||||
# nblz = local blocks in the Z dimension
|
||||
# --> nbly*nblz = local blocks (#tasks)
|
||||
#
|
||||
# Granularity experiment configuration:
|
||||
# Single-core run
|
||||
# MPI binded to sockets: MPI procs = 2
|
||||
# Mesh distributed across third dimension to ensure contiguous
|
||||
# communications
|
||||
# --> nbgx = 1, nbgy = 1
|
||||
# First dimension cannot be locally blocked (simd reasons)
|
||||
# Second and third dimension local blocking limited by local mesh size
|
||||
#
|
||||
#######################################################################
|
||||
|
||||
# Common packages, tools and options
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
, garlicTools
|
||||
}:
|
||||
|
||||
with lib;
|
||||
with garlicTools;
|
||||
|
||||
let
|
||||
|
||||
# Variable configurations
|
||||
varConf = with targetMachine.config; {
|
||||
# Local blocks per dimension
|
||||
nblx = [ 1 ]; # SIMD
|
||||
nbly = range2 1 (hw.cpusPerNode * 8);
|
||||
nblz = [ 8 ];
|
||||
sizex = [ 3 ];
|
||||
gitBranch = [ "garlic/tampi+isend+oss+task+simd" ];
|
||||
};
|
||||
|
||||
# Generate the complete configuration for each unit
|
||||
genConf = c: targetMachine.config // rec {
|
||||
|
||||
# Experiment, units and job names
|
||||
expName = "saiph-granularity";
|
||||
unitName = "${expName}"
|
||||
+ "-N${toString nodes}"
|
||||
+ "-nbg.x${toString nbgx}.y${toString nbgy}.z${toString nbgz}"
|
||||
+ "-nbl.x${toString nblx}.y${toString nbly}.z${toString nblz}";
|
||||
|
||||
jobName = unitName;
|
||||
|
||||
# saiph options
|
||||
totalTasks = ntasksPerNode * nodes;
|
||||
nodes = 1;
|
||||
enableManualDist = true; # allows to manually set nbg{x-y-z}
|
||||
nbgx = 1;
|
||||
nbgy = 1;
|
||||
nbgz = totalTasks; # forcing distribution by last dim
|
||||
|
||||
inherit (c) nblx nbly nblz gitBranch sizex;
|
||||
|
||||
blocksPerTask = nblx * nbly * nblz * 1.0;
|
||||
blocksPerCpu = blocksPerTask / cpusPerTask;
|
||||
|
||||
# fix a specific commit
|
||||
gitCommit = "8052494d7dc62bef95ebaca9938e82fb029686f6";
|
||||
|
||||
# Repeat the execution of each unit 10 times
|
||||
loops = 10;
|
||||
|
||||
# Resources
|
||||
inherit (targetMachine.config) hw;
|
||||
qos = "debug";
|
||||
ntasksPerNode = hw.socketsPerNode; # MPI binded to sockets
|
||||
cpusPerTask = hw.cpusPerSocket; # Using the 24 CPUs of each socket
|
||||
};
|
||||
|
||||
#*** Compute the final set of configurations ***
|
||||
# Compute the array of configurations: cartesian product of all
|
||||
# factors
|
||||
allConfigs = stdexp.buildConfigs {
|
||||
inherit varConf genConf;
|
||||
};
|
||||
|
||||
# Filter to remove non-desired configurations:
|
||||
# --> tasks/proc < 0.5
|
||||
# --> nblz > 50
|
||||
isGoodConfig = c:
|
||||
let
|
||||
maxNblz = c.cpusPerTask * 2;
|
||||
in
|
||||
! (c.blocksPerCpu < 0.5 || c.nblz > maxNblz);
|
||||
|
||||
configs = filter (isGoodConfig) allConfigs;
|
||||
|
||||
#*** Sets the env/argv of the program ***
|
||||
exec = {nextStage, conf, ...}: stages.exec {
|
||||
inherit nextStage;
|
||||
env = ''
|
||||
export OMP_NUM_THREADS=${toString conf.cpusPerTask}
|
||||
'';
|
||||
};
|
||||
|
||||
#*** Configure the program according to the app ***
|
||||
program = {nextStage, conf, ...}: bsc.apps.saiph.override {
|
||||
inherit (conf) enableManualDist
|
||||
nbgx nbgy nbgz nblx nbly nblz
|
||||
sizex
|
||||
gitBranch gitCommit;
|
||||
|
||||
L3SizeKB = conf.hw.cacheSizeKB.L3;
|
||||
cachelineBytes = conf.hw.cachelineBytes;
|
||||
};
|
||||
|
||||
#*** Add stages to the pipeline ***
|
||||
pipeline = stdexp.stdPipeline ++ [ exec program ];
|
||||
|
||||
in
|
||||
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,122 +0,0 @@
|
||||
######################################################################################
|
||||
# Saiph, scalability experiment:
|
||||
#
|
||||
# App:Heat 3D - garlic/tampi+isend+oss+task+simd branch
|
||||
# App details:
|
||||
# 3D mesh of ~400*400*400 points
|
||||
# nbgx = global blocks in the X dimension
|
||||
# nbgy = global blocks in the Y dimension
|
||||
# nbgz = global blocks in the Z dimension
|
||||
# --> nbgx*nbgy*nbgz = global distributed blocks
|
||||
# nbly = local blocks in the Y dimension
|
||||
# nblz = local blocks in the Z dimension
|
||||
# --> nbly*nblz = local blocks (#tasks)
|
||||
#
|
||||
# Scalability experiment configuration:
|
||||
# From a single-core granularity experiment, use a suited local blocking set:
|
||||
# --> nbly*nblz >= 48 (at least 3tasks/proc)
|
||||
# MPI binded to sockets: MPI procs = 2*nodes
|
||||
# Mesh distributed across third dimension to ensure contiguous communications
|
||||
# --> nbgx = 1, nbgy = 1
|
||||
# Global distribution limited by global mesh size
|
||||
# First dimension cannot be locally blocked (simd reasons)
|
||||
# Second and third dimension local blocking limited by local mesh size
|
||||
#
|
||||
######################################################################################
|
||||
|
||||
# Common packages, tools and options
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
, garlicTools
|
||||
}:
|
||||
|
||||
with lib;
|
||||
with garlicTools;
|
||||
|
||||
let
|
||||
|
||||
#*** Variable configurations ***
|
||||
varConf = with targetMachine.config; {
|
||||
# FIXME: None of those selected nbl* and problem size is able to give good
|
||||
# efficiency when testing strong scaling. We should find better values.
|
||||
# Local blocks per dimension
|
||||
nblx = [ 1 ]; # SIMD
|
||||
nbly = [ 32 ];
|
||||
nblz = [ 8 ];
|
||||
sizex = [ 3 6 ];
|
||||
gitBranch = [ "garlic/tampi+isend+oss+task+simd" ];
|
||||
nodes = range2 1 8;
|
||||
};
|
||||
|
||||
#*** Generate the complete configuration for each unit ***
|
||||
genConf = c: targetMachine.config // rec {
|
||||
|
||||
# Experiment, units and job names
|
||||
expName = "saiph-ss";
|
||||
unitName = "${expName}"
|
||||
+ "-N${toString nodes}"
|
||||
+ "-nbg.x${toString nbgx}.y${toString nbgy}.z${toString nbgz}"
|
||||
+ "-nbl.x${toString nblx}.y${toString nbly}.z${toString nblz}";
|
||||
jobName = unitName;
|
||||
|
||||
# saiph options
|
||||
enableManualDist = true; # allows to manually set nbg{x-y-z}
|
||||
nbgx = 1;
|
||||
nbgy = 1;
|
||||
nbgz = nodes * ntasksPerNode; # forcing distribution by last dim
|
||||
|
||||
inherit (c) nblx nbly nblz nodes sizex;
|
||||
|
||||
gitBranch = "garlic/tampi+isend+oss+task+simd";
|
||||
gitCommit = "8052494d7dc62bef95ebaca9938e82fb029686f6"; # fix a specific commit
|
||||
|
||||
blocksPerTask = nblx * nbly * nblz * 1.0;
|
||||
blocksPerCpu = blocksPerTask / cpusPerTask;
|
||||
|
||||
# Repeat the execution of each unit 10 times
|
||||
loops = 10;
|
||||
|
||||
# Resources
|
||||
inherit (targetMachine.config) hw;
|
||||
|
||||
qos = "debug";
|
||||
ntasksPerNode = hw.socketsPerNode; # MPI binded to sockets
|
||||
cpusPerTask = hw.cpusPerSocket; # Using the 24 CPUs of each socket
|
||||
};
|
||||
|
||||
#*** Compute the final set of configurations ***
|
||||
# Compute the array of configurations: cartesian product of all factors
|
||||
configs = stdexp.buildConfigs {
|
||||
inherit varConf genConf;
|
||||
};
|
||||
|
||||
#*** Sets the env/argv of the program ***
|
||||
exec = {nextStage, conf, ...}: with conf; stages.exec {
|
||||
inherit nextStage;
|
||||
env = ''
|
||||
export OMP_NUM_THREADS=${toString hw.cpusPerSocket}
|
||||
'';
|
||||
};
|
||||
|
||||
#*** Configure the program according to the app ***
|
||||
program = {nextStage, conf, ...}: bsc.apps.saiph.override {
|
||||
inherit (conf) enableManualDist
|
||||
nbgx nbgy nbgz nblx nbly nblz
|
||||
sizex
|
||||
gitBranch gitCommit;
|
||||
|
||||
L3SizeKB = conf.hw.cacheSizeKB.L3;
|
||||
cachelineBytes = conf.hw.cachelineBytes;
|
||||
};
|
||||
|
||||
#*** Add stages to the pipeline ***
|
||||
pipeline = stdexp.stdPipeline ++ [ exec program ];
|
||||
|
||||
in
|
||||
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
@ -1,63 +0,0 @@
|
||||
{
|
||||
stdenv
|
||||
, lib
|
||||
, stdexp
|
||||
, bsc
|
||||
, targetMachine
|
||||
, stages
|
||||
, garlicTools
|
||||
}:
|
||||
|
||||
with lib;
|
||||
with garlicTools;
|
||||
|
||||
let
|
||||
|
||||
machineConfig = targetMachine.config;
|
||||
|
||||
inherit (machineConfig) hw;
|
||||
|
||||
# Initial variable configuration
|
||||
varConf = with bsc; {
|
||||
# Create a list of cpus per task by computing the divisors of the number of
|
||||
# cpus per socket, example: divisors 24 = [ 1 2 3 4 6 8 12 24 ]
|
||||
cpusPerTask = divisors hw.cpusPerSocket;
|
||||
};
|
||||
|
||||
# Generate the complete configuration for each unit
|
||||
genConf = with bsc; c: targetMachine.config // rec {
|
||||
expName = "cpu";
|
||||
unitName = "${expName}.${toString cpusPerTask}";
|
||||
|
||||
inherit (machineConfig) hw;
|
||||
|
||||
# Repeat the execution of each unit 30 times
|
||||
loops = 1;
|
||||
|
||||
# Resources
|
||||
qos = "debug";
|
||||
inherit (c) cpusPerTask;
|
||||
# As cpusPerTask is a divisor of the cpusPerSocket and thus cpusPerNode, we
|
||||
# know the remainder is zero:
|
||||
ntasksPerNode = hw.cpusPerNode / cpusPerTask;
|
||||
nodes = 1;
|
||||
jobName = unitName;
|
||||
};
|
||||
|
||||
# Compute the array of configurations
|
||||
configs = stdexp.buildConfigs {
|
||||
inherit varConf genConf;
|
||||
};
|
||||
|
||||
exec = {nextStage, conf, ...}: with conf; stages.exec {
|
||||
inherit nextStage;
|
||||
env = "date";
|
||||
};
|
||||
|
||||
program = {nextStage, conf, ...}: bsc.dummy;
|
||||
|
||||
pipeline = stdexp.stdPipeline ++ [ program ];
|
||||
|
||||
in
|
||||
|
||||
stdexp.genExperiment { inherit configs pipeline; }
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user