nbody: update granularity tests

This commit is contained in:
Antoni Navarro 2021-03-24 10:12:05 +01:00 committed by Rodrigo Arias Mallo
parent 3e197da8a3
commit ea66d7e4e0
5 changed files with 143 additions and 65 deletions

View File

@ -9,6 +9,8 @@
{
nbody = rec {
baseline = callPackage ./nbody/nblocks.nix { };
granularity = callPackage ./nbody/granularity-mpi.nix { };
scaling = callPackage ./nbody/scaling.nix { };
# Experiment variants
small = baseline.override {
@ -25,10 +27,6 @@
};
};
scaling = callPackage ./nbody/scaling.nix {
particles = 12 * 4096;
};
};
saiph = {

View File

@ -4,48 +4,42 @@
, bsc
, targetMachine
, stages
, garlicTools
}:
with stdenv.lib;
with garlicTools;
let
# Initial variable configuration
varConf = with bsc; {
blocksize = [ 128 256 512 1024 2048 4096 ];
gitBranch = [
"garlic/mpi+send+oss+task"
"garlic/tampi+send+oss+task"
"garlic/tampi+isend+oss+task"
];
};
machineConfig = targetMachine.config;
# Generate the complete configuration for each unit
genConf = with bsc; c: targetMachine.config // rec {
expName = "nbody.test";
unitName = "${expName}.nb-${toString nblocks}";
inherit (machineConfig) hw;
# nbody options
particles = 1024 * 64;
genConf = c: targetMachine.config // rec {
hw = targetMachine.config.hw;
particles = 4096 * hw.cpusPerSocket;
timesteps = 10;
inherit (c) blocksize;
totalTasks = ntasksPerNode * nodes;
particlesPerTask = particles / totalTasks;
cc = icc;
mpi = impi;
gitBranch = "garlic/mpi+send";
blocksize = c.blocksize;
gitBranch = c.gitBranch;
# Repeat the execution of each unit 30 times
loops = 10;
expName = "nbody-granularity";
unitName = expName + "-${toString gitBranch}" + "-bs${toString blocksize}";
# Resources
qos = "debug";
ntasksPerNode = 48;
loops = 30;
qos = "bsc_cs";
ntasksPerNode = 1;
nodes = 1;
time = "02:00:00";
cpuBind = "sockets,verbose";
jobName = "bs-${toString blocksize}-${gitBranch}-nbody";
# Experiment revision: this allows a user to run again a experiment already
# executed
rev = 0;
time = "04:00:00";
cpusPerTask = hw.cpusPerSocket;
jobName = unitName;
};
# Compute the array of configurations
@ -53,18 +47,13 @@ let
inherit varConf genConf;
};
exec = {nextStage, conf, ...}: with conf; stages.exec {
exec = {nextStage, conf, ...}: stages.exec {
inherit nextStage;
argv = [ "-t" timesteps "-p" particles ];
argv = [ "-t" conf.timesteps "-p" conf.particles ];
};
program = {nextStage, conf, ...}: with conf;
# FIXME: This is becoming very slow:
#let
# customPkgs = stdexp.replaceMpi conf.mpi;
#in
bsc.garlic.apps.nbody.override {
inherit cc blocksize mpi gitBranch;
program = {nextStage, conf, ...}: with conf; bsc.garlic.apps.nbody.override {
inherit (conf) blocksize gitBranch;
};
pipeline = stdexp.stdPipeline ++ [ exec program ];

View File

@ -4,36 +4,38 @@
, bsc
, targetMachine
, stages
, garlicTools
}:
with stdenv.lib;
with garlicTools;
let
# Initial variable configuration
varConf = with bsc; {
varConf = {
blocksize = [ 128 256 512 1024 2048 4096 ];
};
# Generate the complete configuration for each unit
genConf = with bsc; c: targetMachine.config // rec {
# nbody options
particles = 1024 * 64;
genConf = c: targetMachine.config // rec {
hw = targetMachine.config.hw;
particles = 4096 * hw.cpusPerSocket;
timesteps = 10;
inherit (c) blocksize;
cc = icc;
mpi = impi;
gitBranch = "garlic/oss+task";
blocksize = c.blocksize;
gitBranch = "garlic/oss+task";
expName = "nbody-granularity";
unitName = expName + "-bs${toString blocksize}";
# Repeat the execution of each unit 30 times
loops = 30;
# Resources
qos = "debug";
ntasksPerNode = 1;
nodes = 1;
time = "02:00:00";
cpuBind = "sockets,verbose";
jobName = "nbody-bs-${toString blocksize}-${gitBranch}";
cpusPerTask = hw.cpusPerSocket;
jobName = unitName;
};
# Compute the array of configurations
@ -41,18 +43,14 @@ let
inherit varConf genConf;
};
exec = {nextStage, conf, ...}: with conf; stages.exec {
exec = {nextStage, conf, ...}: stages.exec {
inherit nextStage;
argv = [ "-t" timesteps "-p" particles ];
argv = [ "-t" conf.timesteps "-p" conf.particles ];
};
program = {nextStage, conf, ...}: with conf;
let
customPkgs = stdexp.replaceMpi conf.mpi;
in
customPkgs.apps.nbody.override {
inherit cc blocksize mpi gitBranch;
};
program = {nextStage, conf, ...}: with conf; bsc.garlic.apps.nbody.override {
inherit (conf) blocksize gitBranch;
};
pipeline = stdexp.stdPipeline ++ [ exec program ];

View File

@ -30,11 +30,12 @@ let
in
{
nbody = with exp.nbody; {
baseline = stdPlot ./nbody/baseline.R [ baseline ];
small = stdPlot ./nbody/baseline.R [ small ];
jemalloc = stdPlot ./nbody/jemalloc.R [ baseline jemalloc ];
ctf = stdPlot ./nbody/baseline.R [ ctf ];
scaling = stdPlot ./nbody/baseline.R [ scaling ];
baseline = stdPlot ./nbody/baseline.R [ baseline ];
small = stdPlot ./nbody/baseline.R [ small ];
jemalloc = stdPlot ./nbody/jemalloc.R [ baseline jemalloc ];
ctf = stdPlot ./nbody/baseline.R [ ctf ];
scaling = stdPlot ./nbody/baseline.R [ scaling ];
granularity = stdPlot ./nbody/granularity.R [ granularity ];
};
hpcg = with exp.hpcg; {

View File

@ -0,0 +1,92 @@
library(ggplot2)
library(dplyr, warn.conflicts = FALSE)
library(scales)
library(jsonlite)
library(viridis, warn.conflicts = FALSE)
# Load the arguments (argv)
args = commandArgs(trailingOnly=TRUE)
if (length(args)>0) { input_file = args[1] } else { input_file = "input" }
df = jsonlite::stream_in(file(input_file), verbose=FALSE) %>%
jsonlite::flatten() %>%
select(config.blocksize, config.gitBranch, unit, time) %>%
rename(blocksize=config.blocksize, branch=config.gitBranch) %>%
mutate(blocksize = as.factor(blocksize)) %>%
mutate(branch = as.factor(branch)) %>%
mutate(unit = as.factor(unit)) %>%
group_by(unit) %>%
mutate(median.time = median(time)) %>%
mutate(normalized.time = time / median.time - 1) %>%
mutate(log.median.time = log(median.time)) %>%
ungroup()
dpi = 300
h = 5
w = 8
# ---------------------------------------------------------------------
p = ggplot(df, aes(x=blocksize, y=median.time, color=branch)) +
geom_point() +
geom_line(aes(group=branch)) +
theme_bw() +
labs(x="Blocksize", y="Median time (s)", title="NBody Granularity: Median Time",
subtitle=input_file) +
theme(plot.subtitle=element_text(size=5)) +
theme(legend.position="bottom") +
theme(legend.text = element_text(size=7))
ggsave("median.time.png", plot=p, width=w, height=h, dpi=dpi)
ggsave("median.time.pdf", plot=p, width=w, height=h, dpi=dpi)
# ---------------------------------------------------------------------
p = ggplot(df, aes(x=blocksize, y=normalized.time, color=branch)) +
geom_boxplot() +
geom_hline(yintercept=c(-0.01, 0.01), linetype="dashed", color="red") +
facet_wrap(~ branch) +
theme_bw() +
labs(x="Blocksize", y="Normalized Time", title="NBody Granularity: Normalized Time",
subtitle=input_file) +
theme(plot.subtitle=element_text(size=5)) +
theme(legend.position="bottom") +
theme(legend.text = element_text(size=7))
ggsave("normalized.time.png", plot=p, width=w, height=h, dpi=dpi)
ggsave("normalized.time.pdf", plot=p, width=w, height=h, dpi=dpi)
# ---------------------------------------------------------------------
p = ggplot(df, aes(x=blocksize, y=time, color=branch)) +
geom_point(shape=21, size=3) +
theme_bw() +
labs(x="Blocksize", y="Time (s)", title="NBody Granularity: Time",
subtitle=input_file) +
theme(plot.subtitle=element_text(size=5)) +
theme(legend.position="bottom") +
theme(legend.text = element_text(size=7))
ggsave("time.png", plot=p, width=w, height=h, dpi=dpi)
ggsave("time.pdf", plot=p, width=w, height=h, dpi=dpi)
# ---------------------------------------------------------------------
p = ggplot(df, aes(x=blocksize, y=branch, fill=median.time)) +
geom_raster() +
scale_fill_viridis(option="plasma") +
coord_fixed() +
theme_bw() +
labs(x="Blocksize", y="Branch", title="NBody Granularity: Time",
subtitle=input_file) +
theme(plot.subtitle=element_text(size=5)) +
theme(legend.position="bottom") +
theme(legend.text = element_text(size=7))
ggsave("time.heatmap.png", plot=p, width=w, height=h, dpi=dpi)
ggsave("time.heatmap.pdf", plot=p, width=w, height=h, dpi=dpi)