Compare commits
11 Commits
4a79f92c9b
...
add-fox-ma
| Author | SHA1 | Date | |
|---|---|---|---|
| b58370cf83 | |||
| 581efb4312 | |||
| c32c1bd03b | |||
| 1ddc5b7248 | |||
| 8968deb4db | |||
| 5a21baf2be | |||
| f4534e1e5a | |||
| d6ed4b4521 | |||
| 049ad4d062 | |||
| 07ab4018d8 | |||
| a1135306ed |
5
keys.nix
5
keys.nix
@@ -9,12 +9,11 @@ rec {
|
||||
koro = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIImiTFDbxyUYPumvm8C4mEnHfuvtBY1H8undtd6oDd67 koro";
|
||||
bay = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAICvGBzpRQKuQYHdlUQeAk6jmdbkrhmdLwTBqf3el7IgU bay";
|
||||
lake2 = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAINo66//S1yatpQHE/BuYD/Gfq64TY7ZN5XOGXmNchiO0 lake2";
|
||||
fox = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIDwItIk5uOJcQEVPoy/CVGRzfmE1ojrdDcI06FrU4NFT fox";
|
||||
fox = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIDa9lId4rB/EKGkkCCVOy0cuId2SYLs+8W8kx0kmpO1y fox";
|
||||
};
|
||||
|
||||
hostGroup = with hosts; rec {
|
||||
untrusted = [ fox ];
|
||||
compute = [ owl1 owl2 ];
|
||||
compute = [ owl1 owl2 fox ];
|
||||
playground = [ eudy koro ];
|
||||
storage = [ bay lake2 ];
|
||||
monitor = [ hut ];
|
||||
|
||||
@@ -23,7 +23,6 @@
|
||||
trusted-users = [ "@wheel" ];
|
||||
flake-registry = pkgs.writeText "global-registry.json"
|
||||
''{"flakes":[],"version":2}'';
|
||||
keep-outputs = true;
|
||||
};
|
||||
|
||||
gc = {
|
||||
|
||||
@@ -10,7 +10,7 @@ in
|
||||
|
||||
# Connect to intranet git hosts via proxy
|
||||
programs.ssh.extraConfig = ''
|
||||
Host bscpm02.bsc.es bscpm03.bsc.es bscpm04.bsc.es gitlab-internal.bsc.es alya.gitlab.bsc.es
|
||||
Host bscpm02.bsc.es bscpm03.bsc.es gitlab-internal.bsc.es alya.gitlab.bsc.es
|
||||
User git
|
||||
ProxyCommand nc -X connect -x hut:23080 %h %p
|
||||
|
||||
@@ -22,7 +22,6 @@ in
|
||||
programs.ssh.knownHosts = hostsKeys // {
|
||||
"gitlab-internal.bsc.es".publicKey = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIF9arsAOSRB06hdy71oTvJHG2Mg8zfebADxpvc37lZo3";
|
||||
"bscpm03.bsc.es".publicKey = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIM2NuSUPsEhqz1j5b4Gqd+MWFnRqyqY57+xMvBUqHYUS";
|
||||
"bscpm04.bsc.es".publicKey = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIPx4mC0etyyjYUT2Ztc/bs4ZXSbVMrogs1ZTP924PDgT";
|
||||
"glogin1.bsc.es".publicKey = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFsHsZGCrzpd4QDVn5xoDOtrNBkb0ylxKGlyBt6l9qCz";
|
||||
"glogin2.bsc.es".publicKey = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFsHsZGCrzpd4QDVn5xoDOtrNBkb0ylxKGlyBt6l9qCz";
|
||||
};
|
||||
|
||||
@@ -81,7 +81,7 @@
|
||||
home = "/home/Computational/abonerib";
|
||||
description = "Aleix Boné";
|
||||
group = "Computational";
|
||||
hosts = [ "owl1" "owl2" "hut" "raccoon" "fox" ];
|
||||
hosts = [ "owl1" "owl2" "hut" "raccoon" ];
|
||||
hashedPassword = "$6$V1EQWJr474whv7XJ$OfJ0wueM2l.dgiJiiah0Tip9ITcJ7S7qDvtSycsiQ43QBFyP4lU0e0HaXWps85nqB4TypttYR4hNLoz3bz662/";
|
||||
openssh.authorizedKeys.keys = [
|
||||
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIIFiqXqt88VuUfyANkZyLJNiuroIITaGlOOTMhVDKjf abonerib@bsc"
|
||||
@@ -126,19 +126,6 @@
|
||||
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIGEfy6F4rF80r4Cpo2H5xaWqhuUZzUsVsILSKGJzt5jF dalvare1@ssfhead"
|
||||
];
|
||||
};
|
||||
|
||||
varcila = {
|
||||
uid = 5650;
|
||||
isNormalUser = true;
|
||||
home = "/home/Computational/varcila";
|
||||
description = "Vincent Arcila";
|
||||
group = "Computational";
|
||||
hosts = [ "hut" "fox" ];
|
||||
hashedPassword = "$6$oB0Tcn99DcM4Ch$Vn1A0ulLTn/8B2oFPi9wWl/NOsJzaFAWjqekwcuC9sMC7cgxEVb.Nk5XSzQ2xzYcNe5MLtmzkVYnRS1CqP39Y0";
|
||||
openssh.authorizedKeys.keys = [
|
||||
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKGt0ESYxekBiHJQowmKpfdouw0hVm3N7tUMtAaeLejK vincent@varch"
|
||||
];
|
||||
};
|
||||
};
|
||||
|
||||
groups = {
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
|
||||
proxy = {
|
||||
default = "http://hut:23080/";
|
||||
noProxy = "127.0.0.1,localhost,internal.domain,10.0.40.40,hut";
|
||||
noProxy = "127.0.0.1,localhost,internal.domain,10.0.40.40";
|
||||
# Don't set all_proxy as go complains and breaks the gitlab runner, see:
|
||||
# https://github.com/golang/go/issues/16715
|
||||
allProxy = null;
|
||||
|
||||
@@ -2,9 +2,11 @@
|
||||
|
||||
{
|
||||
imports = [
|
||||
../common/base.nix
|
||||
../common/xeon/console.nix
|
||||
../common/xeon.nix
|
||||
../module/ceph.nix
|
||||
../module/emulation.nix
|
||||
../module/slurm-client.nix
|
||||
../module/slurm-firewall.nix
|
||||
];
|
||||
|
||||
# Select the this using the ID to avoid mismatches
|
||||
@@ -20,51 +22,11 @@
|
||||
hardware.cpu.intel.updateMicrocode = lib.mkForce false;
|
||||
|
||||
networking = {
|
||||
defaultGateway = "147.83.30.130";
|
||||
nameservers = [ "8.8.8.8" ];
|
||||
hostName = "fox";
|
||||
interfaces.enp1s0f0np0.ipv4.addresses = [
|
||||
{
|
||||
# BSC network (to be removed)
|
||||
address = "10.0.40.26";
|
||||
prefixLength = 24;
|
||||
}
|
||||
{
|
||||
# UPC network
|
||||
# Public IP configuration:
|
||||
# - Hostname: fox.ac.upc.edu
|
||||
# - IP: 147.83.30.141
|
||||
# - Gateway: 147.83.30.130
|
||||
# - NetMask: 255.255.255.192
|
||||
# Private IP configuration for BMC:
|
||||
# - Hostname: fox-ipmi.ac.upc.edu
|
||||
# - IP: 147.83.35.27
|
||||
# - Gateway: 147.83.35.2
|
||||
# - NetMask: 255.255.255.0
|
||||
address = "147.83.30.141";
|
||||
prefixLength = 26; # 255.255.255.192
|
||||
}
|
||||
];
|
||||
extraHosts = ''
|
||||
# Fox UPC
|
||||
147.83.30.141 fox.ac.upc.edu
|
||||
147.83.35.27 fox-ipmi.ac.upc.edu
|
||||
# Fox BSC
|
||||
10.0.40.26 fox
|
||||
10.0.40.126 fox-ipmi
|
||||
'' +
|
||||
# To be removed:
|
||||
''
|
||||
# Hut BSC
|
||||
10.0.40.7 hut
|
||||
'';
|
||||
|
||||
# To be removed:
|
||||
proxy = {
|
||||
default = "http://hut:23080/";
|
||||
noProxy = "127.0.0.1,localhost,internal.domain,10.0.40.40,hut";
|
||||
allProxy = null;
|
||||
};
|
||||
interfaces.enp1s0f0np0.ipv4.addresses = [ {
|
||||
address = "10.0.40.26";
|
||||
prefixLength = 24;
|
||||
} ];
|
||||
};
|
||||
|
||||
# Configure Nvidia driver to use with CUDA
|
||||
@@ -94,4 +56,20 @@
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
serviceConfig.ExecStart = script;
|
||||
};
|
||||
|
||||
# Only allow SSH connections from users who have a SLURM allocation
|
||||
# See: https://slurm.schedmd.com/pam_slurm_adopt.html
|
||||
security.pam.services.sshd.rules.account.slurm = {
|
||||
control = "required";
|
||||
enable = true;
|
||||
modulePath = "${pkgs.slurm}/lib/security/pam_slurm_adopt.so";
|
||||
args = [ "log_level=debug5" ];
|
||||
order = 999999; # Make it last one
|
||||
};
|
||||
|
||||
# Disable systemd session (pam_systemd.so) as it will conflict with the
|
||||
# pam_slurm_adopt.so module. What happens is that the shell is first adopted
|
||||
# into the slurmstepd task and then into the systemd session, which is not
|
||||
# what we want, otherwise it will linger even if all jobs are gone.
|
||||
security.pam.services.sshd.startSession = lib.mkForce false;
|
||||
}
|
||||
|
||||
@@ -56,11 +56,6 @@
|
||||
iptables -A nixos-fw -p tcp -s 10.0.40.30 --dport 23080 -j nixos-fw-log-refuse
|
||||
iptables -A nixos-fw -p tcp -s 10.0.40.0/24 --dport 23080 -j nixos-fw-accept
|
||||
'';
|
||||
# Flush all rules and chains on stop so it won't break on start
|
||||
extraStopCommands = ''
|
||||
iptables -F
|
||||
iptables -X
|
||||
'';
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
@@ -22,8 +22,8 @@
|
||||
"--docker-network-mode host"
|
||||
];
|
||||
environmentVariables = {
|
||||
https_proxy = "http://hut:23080";
|
||||
http_proxy = "http://hut:23080";
|
||||
https_proxy = "http://localhost:23080";
|
||||
http_proxy = "http://localhost:23080";
|
||||
};
|
||||
};
|
||||
in {
|
||||
@@ -38,13 +38,14 @@
|
||||
gitlab-bsc-docker = {
|
||||
# gitlab.bsc.es still uses the old token mechanism
|
||||
registrationConfigFile = config.age.secrets.gitlab-bsc-docker.path;
|
||||
tagList = [ "docker" "hut" ];
|
||||
environmentVariables = {
|
||||
# We cannot access the hut local interface from docker, so we connect
|
||||
# to hut directly via the ethernet one.
|
||||
https_proxy = "http://hut:23080";
|
||||
http_proxy = "http://hut:23080";
|
||||
https_proxy = "http://localhost:23080";
|
||||
http_proxy = "http://localhost:23080";
|
||||
};
|
||||
# FIXME
|
||||
registrationFlags = [
|
||||
"--docker-network-mode host"
|
||||
];
|
||||
executor = "docker";
|
||||
dockerImage = "alpine";
|
||||
dockerVolumes = [
|
||||
@@ -52,15 +53,7 @@
|
||||
"/nix/var/nix/db:/nix/var/nix/db:ro"
|
||||
"/nix/var/nix/daemon-socket:/nix/var/nix/daemon-socket:ro"
|
||||
];
|
||||
dockerExtraHosts = [
|
||||
# Required to pass the proxy via hut
|
||||
"hut:10.0.40.7"
|
||||
];
|
||||
dockerDisableCache = true;
|
||||
registrationFlags = [
|
||||
# Increase build log length to 64 MiB
|
||||
"--output-limit 65536"
|
||||
];
|
||||
preBuildScript = pkgs.writeScript "setup-container" ''
|
||||
mkdir -p -m 0755 /nix/var/log/nix/drvs
|
||||
mkdir -p -m 0755 /nix/var/nix/gcroots
|
||||
@@ -73,39 +66,32 @@
|
||||
mkdir -p -m 0700 "$HOME/.nix-defexpr"
|
||||
mkdir -p -m 0700 "$HOME/.ssh"
|
||||
cat > "$HOME/.ssh/config" << EOF
|
||||
Host bscpm04.bsc.es gitlab-internal.bsc.es
|
||||
Host bscpm03.bsc.es gitlab-internal.bsc.es
|
||||
User git
|
||||
ProxyCommand nc -X connect -x hut:23080 %h %p
|
||||
Host amdlogin1.bsc.es armlogin1.bsc.es hualogin1.bsc.es glogin1.bsc.es glogin2.bsc.es fpgalogin1.bsc.es
|
||||
ProxyCommand nc -X connect -x hut:23080 %h %p
|
||||
EOF
|
||||
cat >> "$HOME/.ssh/known_hosts" << EOF
|
||||
bscpm04.bsc.es ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIPx4mC0etyyjYUT2Ztc/bs4ZXSbVMrogs1ZTP924PDgT
|
||||
bscpm03.bsc.es ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIM2NuSUPsEhqz1j5b4Gqd+MWFnRqyqY57+xMvBUqHYUS
|
||||
gitlab-internal.bsc.es ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIF9arsAOSRB06hdy71oTvJHG2Mg8zfebADxpvc37lZo3
|
||||
EOF
|
||||
. ${pkgs.nix}/etc/profile.d/nix-daemon.sh
|
||||
# Required to load SSL certificate paths
|
||||
. ${pkgs.cacert}/nix-support/setup-hook
|
||||
${pkgs.nix}/bin/nix-channel --add https://nixos.org/channels/nixos-24.11 nixpkgs
|
||||
${pkgs.nix}/bin/nix-channel --update nixpkgs
|
||||
${pkgs.nix}/bin/nix-env -i ${lib.concatStringsSep " " (with pkgs; [ nix cacert git openssh netcat curl ])}
|
||||
'';
|
||||
environmentVariables = {
|
||||
ENV = "/etc/profile";
|
||||
USER = "root";
|
||||
NIX_REMOTE = "daemon";
|
||||
PATH = "${config.system.path}/bin:/bin:/sbin:/usr/bin:/usr/sbin";
|
||||
PATH = "/nix/var/nix/profiles/default/bin:/nix/var/nix/profiles/default/sbin:/bin:/sbin:/usr/bin:/usr/sbin";
|
||||
NIX_SSL_CERT_FILE = "/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt";
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
# DOCKER* chains are useless, override at FORWARD and nixos-fw
|
||||
networking.firewall.extraCommands = ''
|
||||
# Don't forward any traffic from docker
|
||||
iptables -I FORWARD 1 -p all -i docker0 -j nixos-fw-log-refuse
|
||||
|
||||
# Allow incoming traffic from docker to 23080
|
||||
iptables -A nixos-fw -p tcp -i docker0 -d hut --dport 23080 -j ACCEPT
|
||||
'';
|
||||
|
||||
#systemd.services.gitlab-runner.serviceConfig.Shell = "${pkgs.bash}/bin/bash";
|
||||
systemd.services.gitlab-runner.serviceConfig.DynamicUser = lib.mkForce false;
|
||||
systemd.services.gitlab-runner.serviceConfig.User = "gitlab-runner";
|
||||
|
||||
@@ -3,10 +3,7 @@
|
||||
{
|
||||
imports = [
|
||||
../module/slurm-exporter.nix
|
||||
../module/meteocat-exporter.nix
|
||||
../module/upc-qaire-exporter.nix
|
||||
./gpfs-probe.nix
|
||||
./nix-daemon-exporter.nix
|
||||
];
|
||||
|
||||
age.secrets.grafanaJungleRobotPassword = {
|
||||
@@ -49,7 +46,7 @@
|
||||
services.prometheus = {
|
||||
enable = true;
|
||||
port = 9001;
|
||||
retentionTime = "5y";
|
||||
retentionTime = "1y";
|
||||
listenAddress = "127.0.0.1";
|
||||
};
|
||||
|
||||
@@ -79,7 +76,7 @@
|
||||
group = "root";
|
||||
user = "root";
|
||||
configFile = config.age.secrets.ipmiYml.path;
|
||||
# extraFlags = [ "--log.level=debug" ];
|
||||
extraFlags = [ "--log.level=debug" ];
|
||||
listenAddress = "127.0.0.1";
|
||||
};
|
||||
node = {
|
||||
@@ -111,9 +108,6 @@
|
||||
"127.0.0.1:${toString config.services.prometheus.exporters.smartctl.port}"
|
||||
"127.0.0.1:9341" # Slurm exporter
|
||||
"127.0.0.1:9966" # GPFS custom exporter
|
||||
"127.0.0.1:9999" # Nix-daemon custom exporter
|
||||
"127.0.0.1:9929" # Meteocat custom exporter
|
||||
"127.0.0.1:9928" # UPC Qaire custom exporter
|
||||
"127.0.0.1:${toString config.services.prometheus.exporters.blackbox.port}"
|
||||
];
|
||||
}];
|
||||
@@ -169,7 +163,6 @@
|
||||
"8.8.8.8"
|
||||
"ssfhead"
|
||||
"anella-bsc.cesca.cat"
|
||||
"fox.ac.upc.edu"
|
||||
];
|
||||
}];
|
||||
relabel_configs = [
|
||||
@@ -257,14 +250,6 @@
|
||||
module = [ "raccoon" ];
|
||||
};
|
||||
}
|
||||
{
|
||||
job_name = "raccoon";
|
||||
static_configs = [
|
||||
{
|
||||
targets = [ "127.0.0.1:19002" ]; # Node exporter
|
||||
}
|
||||
];
|
||||
}
|
||||
{
|
||||
job_name = "ipmi-fox";
|
||||
metrics_path = "/ipmi";
|
||||
|
||||
@@ -12,19 +12,16 @@ let
|
||||
installPhase = ''
|
||||
cp -r public $out
|
||||
'';
|
||||
# Don't mess doc/
|
||||
dontFixup = true;
|
||||
};
|
||||
in
|
||||
{
|
||||
networking.firewall.allowedTCPPorts = [ 80 ];
|
||||
services.nginx = {
|
||||
enable = true;
|
||||
virtualHosts."jungle.bsc.es" = {
|
||||
root = "${website}";
|
||||
listen = [
|
||||
{
|
||||
addr = "0.0.0.0";
|
||||
addr = "127.0.0.1";
|
||||
port = 80;
|
||||
}
|
||||
];
|
||||
@@ -41,7 +38,7 @@ in
|
||||
proxy_redirect http:// $scheme://;
|
||||
}
|
||||
location /cache {
|
||||
rewrite ^/cache/(.*) /$1 break;
|
||||
rewrite ^/cache(.*) /$1 break;
|
||||
proxy_pass http://127.0.0.1:5000;
|
||||
proxy_redirect http:// $scheme://;
|
||||
}
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# Locate nix daemon pid
|
||||
nd=$(pgrep -o nix-daemon)
|
||||
|
||||
# Locate children of nix-daemon
|
||||
pids1=$(tr ' ' '\n' < "/proc/$nd/task/$nd/children")
|
||||
|
||||
# For each children, locate 2nd level children
|
||||
pids2=$(echo "$pids1" | xargs -I @ /bin/sh -c 'cat /proc/@/task/*/children' | tr ' ' '\n')
|
||||
|
||||
cat <<EOF
|
||||
HTTP/1.1 200 OK
|
||||
Content-Type: text/plain; version=0.0.4; charset=utf-8; escaping=values
|
||||
|
||||
# HELP nix_daemon_build Nix daemon derivation build state.
|
||||
# TYPE nix_daemon_build gauge
|
||||
EOF
|
||||
|
||||
for pid in $pids2; do
|
||||
name=$(cat /proc/$pid/environ 2>/dev/null | tr '\0' '\n' | rg "^name=(.+)" - --replace '$1' | tr -dc ' [:alnum:]_\-\.')
|
||||
user=$(ps -o uname= -p "$pid")
|
||||
if [ -n "$name" -a -n "$user" ]; then
|
||||
printf 'nix_daemon_build{user="%s",name="%s"} 1\n' "$user" "$name"
|
||||
fi
|
||||
done
|
||||
@@ -1,23 +0,0 @@
|
||||
{ pkgs, config, lib, ... }:
|
||||
let
|
||||
script = pkgs.runCommand "nix-daemon-exporter.sh" { }
|
||||
''
|
||||
cp ${./nix-daemon-builds.sh} $out;
|
||||
chmod +x $out
|
||||
''
|
||||
;
|
||||
in
|
||||
{
|
||||
systemd.services.nix-daemon-exporter = {
|
||||
description = "Daemon to export nix-daemon metrics";
|
||||
path = [ pkgs.procps pkgs.ripgrep ];
|
||||
wantedBy = [ "default.target" ];
|
||||
serviceConfig = {
|
||||
Type = "simple";
|
||||
ExecStart = "${pkgs.socat}/bin/socat TCP4-LISTEN:9999,fork EXEC:${script}";
|
||||
# Needed root to read the environment, potentially unsafe
|
||||
User = "root";
|
||||
Group = "root";
|
||||
};
|
||||
};
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
{ config, ... }:
|
||||
{
|
||||
nix.settings =
|
||||
# Don't add hut as a cache to itself
|
||||
assert config.networking.hostName != "hut";
|
||||
{
|
||||
substituters = [ "http://hut/cache" ];
|
||||
trusted-public-keys = [ "jungle.bsc.es:pEc7MlAT0HEwLQYPtpkPLwRsGf80ZI26aj29zMw/HH0=" ];
|
||||
};
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
{ config, lib, pkgs, ... }:
|
||||
|
||||
with lib;
|
||||
|
||||
{
|
||||
systemd.services."prometheus-meteocat-exporter" = {
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
after = [ "network.target" ];
|
||||
serviceConfig = {
|
||||
Restart = mkDefault "always";
|
||||
PrivateTmp = mkDefault true;
|
||||
WorkingDirectory = mkDefault "/tmp";
|
||||
DynamicUser = mkDefault true;
|
||||
ExecStart = "${pkgs.meteocat-exporter}/bin/meteocat-exporter";
|
||||
};
|
||||
};
|
||||
}
|
||||
@@ -27,6 +27,22 @@ let
|
||||
done
|
||||
'';
|
||||
|
||||
prolog = pkgs.writeScript "prolog.sh" ''
|
||||
#!/usr/bin/env bash
|
||||
|
||||
echo "hello from the prolog"
|
||||
|
||||
exit 0
|
||||
'';
|
||||
|
||||
epilog = pkgs.writeScript "epilog.sh" ''
|
||||
#!/usr/bin/env bash
|
||||
|
||||
echo "hello from the epilog"
|
||||
|
||||
exit 0
|
||||
'';
|
||||
|
||||
in {
|
||||
systemd.services.slurmd.serviceConfig = {
|
||||
# Kill all processes in the control group on stop/restart. This will kill
|
||||
@@ -43,11 +59,14 @@ in {
|
||||
clusterName = "jungle";
|
||||
nodeName = [
|
||||
"owl[1,2] Sockets=2 CoresPerSocket=14 ThreadsPerCore=2 Feature=owl"
|
||||
"fox Sockets=2 CoresPerSocket=96 ThreadsPerCore=2 Feature=fox"
|
||||
"hut Sockets=2 CoresPerSocket=14 ThreadsPerCore=2"
|
||||
];
|
||||
|
||||
partitionName = [
|
||||
"owl Nodes=owl[1-2] Default=YES DefaultTime=01:00:00 MaxTime=INFINITE State=UP"
|
||||
"fox Nodes=fox Default=NO DefaultTime=01:00:00 MaxTime=INFINITE State=UP"
|
||||
"all Nodes=owl[1-2],hut Default=NO DefaultTime=01:00:00 MaxTime=INFINITE State=UP"
|
||||
];
|
||||
|
||||
# See slurm.conf(5) for more details about these options.
|
||||
@@ -75,7 +94,7 @@ in {
|
||||
SuspendTimeout=60
|
||||
ResumeProgram=${resumeProgram}
|
||||
ResumeTimeout=300
|
||||
SuspendExcNodes=hut
|
||||
SuspendExcNodes=hut,fox
|
||||
|
||||
# Turn the nodes off after 1 hour of inactivity
|
||||
SuspendTime=3600
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
{ config, lib, pkgs, ... }:
|
||||
|
||||
with lib;
|
||||
|
||||
{
|
||||
systemd.services."prometheus-upc-qaire-exporter" = {
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
after = [ "network.target" ];
|
||||
serviceConfig = {
|
||||
Restart = mkDefault "always";
|
||||
PrivateTmp = mkDefault true;
|
||||
WorkingDirectory = mkDefault "/tmp";
|
||||
DynamicUser = mkDefault true;
|
||||
ExecStart = "${pkgs.upc-qaire-exporter}/bin/upc-qaire-exporter";
|
||||
};
|
||||
};
|
||||
}
|
||||
@@ -8,7 +8,6 @@
|
||||
../module/slurm-client.nix
|
||||
../module/slurm-firewall.nix
|
||||
../module/debuginfod.nix
|
||||
../module/hut-substituter.nix
|
||||
];
|
||||
|
||||
# Select the this using the ID to avoid mismatches
|
||||
|
||||
@@ -8,7 +8,6 @@
|
||||
../module/slurm-client.nix
|
||||
../module/slurm-firewall.nix
|
||||
../module/debuginfod.nix
|
||||
../module/hut-substituter.nix
|
||||
];
|
||||
|
||||
# Select the this using the ID to avoid mismatches
|
||||
|
||||
@@ -25,11 +25,6 @@
|
||||
} ];
|
||||
};
|
||||
|
||||
nix.settings = {
|
||||
substituters = [ "https://jungle.bsc.es/cache" ];
|
||||
trusted-public-keys = [ "jungle.bsc.es:pEc7MlAT0HEwLQYPtpkPLwRsGf80ZI26aj29zMw/HH0=" ];
|
||||
};
|
||||
|
||||
# Configure Nvidia driver to use with CUDA
|
||||
hardware.nvidia.package = config.boot.kernelPackages.nvidiaPackages.production;
|
||||
hardware.graphics.enable = true;
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
{ python3Packages, lib }:
|
||||
|
||||
python3Packages.buildPythonApplication rec {
|
||||
pname = "meteocat-exporter";
|
||||
version = "1.0";
|
||||
|
||||
src = ./.;
|
||||
|
||||
doCheck = false;
|
||||
|
||||
build-system = with python3Packages; [
|
||||
setuptools
|
||||
];
|
||||
|
||||
dependencies = with python3Packages; [
|
||||
beautifulsoup4
|
||||
lxml
|
||||
prometheus-client
|
||||
];
|
||||
|
||||
meta = with lib; {
|
||||
description = "MeteoCat Prometheus Exporter";
|
||||
platforms = platforms.linux;
|
||||
};
|
||||
}
|
||||
@@ -1,54 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import time
|
||||
from prometheus_client import start_http_server, Gauge
|
||||
from bs4 import BeautifulSoup
|
||||
from urllib import request
|
||||
|
||||
# Configuration -------------------------------------------
|
||||
meteo_station = "X8" # Barcelona - Zona Universitària
|
||||
listening_port = 9929
|
||||
update_period = 60 * 5 # Each 5 min
|
||||
# ---------------------------------------------------------
|
||||
|
||||
metric_tmin = Gauge('meteocat_temp_min', 'Min temperature')
|
||||
metric_tmax = Gauge('meteocat_temp_max', 'Max temperature')
|
||||
metric_tavg = Gauge('meteocat_temp_avg', 'Average temperature')
|
||||
metric_srad = Gauge('meteocat_solar_radiation', 'Solar radiation')
|
||||
|
||||
def update(st):
|
||||
url = 'https://www.meteo.cat/observacions/xema/dades?codi=' + st
|
||||
response = request.urlopen(url)
|
||||
data = response.read()
|
||||
soup = BeautifulSoup(data, 'lxml')
|
||||
table = soup.find("table", {"class" : "tblperiode"})
|
||||
rows = table.find_all('tr')
|
||||
row = rows[-1] # Take the last row
|
||||
row_data = []
|
||||
header = row.find('th')
|
||||
header_text = header.text.strip()
|
||||
row_data.append(header_text)
|
||||
for col in row.find_all('td'):
|
||||
row_data.append(col.text)
|
||||
try:
|
||||
# Sometimes it will return '(s/d)' and fail to parse
|
||||
metric_tavg.set(float(row_data[1]))
|
||||
metric_tmax.set(float(row_data[2]))
|
||||
metric_tmin.set(float(row_data[3]))
|
||||
metric_srad.set(float(row_data[10]))
|
||||
#print("ok: temp_avg={}".format(float(row_data[1])))
|
||||
except:
|
||||
print("cannot parse row: {}".format(row))
|
||||
metric_tavg.set(float("nan"))
|
||||
metric_tmax.set(float("nan"))
|
||||
metric_tmin.set(float("nan"))
|
||||
metric_srad.set(float("nan"))
|
||||
|
||||
if __name__ == '__main__':
|
||||
start_http_server(port=listening_port, addr="localhost")
|
||||
while True:
|
||||
try:
|
||||
update(meteo_station)
|
||||
except:
|
||||
print("update failed")
|
||||
time.sleep(update_period)
|
||||
@@ -1,11 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
setup(name='meteocat-exporter',
|
||||
version='1.0',
|
||||
# Modules to import from other scripts:
|
||||
packages=find_packages(),
|
||||
# Executables
|
||||
scripts=["meteocat-exporter"],
|
||||
)
|
||||
@@ -54,6 +54,4 @@ final: prev:
|
||||
});
|
||||
|
||||
prometheus-slurm-exporter = prev.callPackage ./slurm-exporter.nix { };
|
||||
meteocat-exporter = prev.callPackage ./meteocat-exporter/default.nix { };
|
||||
upc-qaire-exporter = prev.callPackage ./upc-qaire-exporter/default.nix { };
|
||||
}
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
{ python3Packages, lib }:
|
||||
|
||||
python3Packages.buildPythonApplication rec {
|
||||
pname = "upc-qaire-exporter";
|
||||
version = "1.0";
|
||||
|
||||
src = ./.;
|
||||
|
||||
doCheck = false;
|
||||
|
||||
build-system = with python3Packages; [
|
||||
setuptools
|
||||
];
|
||||
|
||||
dependencies = with python3Packages; [
|
||||
prometheus-client
|
||||
requests
|
||||
];
|
||||
|
||||
meta = with lib; {
|
||||
description = "UPC Qaire Prometheus Exporter";
|
||||
platforms = platforms.linux;
|
||||
};
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
setup(name='upc-qaire-exporter',
|
||||
version='1.0',
|
||||
# Modules to import from other scripts:
|
||||
packages=find_packages(),
|
||||
# Executables
|
||||
scripts=["upc-qaire-exporter"],
|
||||
)
|
||||
@@ -1,74 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import time
|
||||
from prometheus_client import start_http_server, Gauge
|
||||
import requests, json
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
# Configuration -------------------------------------------
|
||||
listening_port = 9928
|
||||
update_period = 60 * 5 # Each 5 min
|
||||
# ---------------------------------------------------------
|
||||
|
||||
metric_temp = Gauge('upc_c6_s302_temp', 'UPC C6 S302 temperature sensor')
|
||||
|
||||
def genparams():
|
||||
d = {}
|
||||
d['topic'] = 'TEMPERATURE'
|
||||
d['shift_dates_to'] = ''
|
||||
d['datapoints'] = 301
|
||||
d['devicesAndColors'] = '1148418@@@#40ACB6'
|
||||
|
||||
now = datetime.now()
|
||||
|
||||
d['fromDate'] = now.strftime('%d/%m/%Y')
|
||||
d['toDate'] = now.strftime('%d/%m/%Y')
|
||||
d['serviceFrequency'] = 'NONE'
|
||||
|
||||
# WTF!
|
||||
for i in range(7):
|
||||
for j in range(48):
|
||||
key = 'week.days[{}].hours[{}].value'.format(i, j)
|
||||
d[key] = 'OPEN'
|
||||
|
||||
return d
|
||||
|
||||
def measure():
|
||||
# First we need to load session
|
||||
s = requests.Session()
|
||||
r = s.get("https://upc.edu/sirena")
|
||||
if r.status_code != 200:
|
||||
print("bad HTTP status code on new session: {}".format(r.status_code))
|
||||
return
|
||||
|
||||
if s.cookies.get("JSESSIONID") is None:
|
||||
print("cannot get JSESSIONID")
|
||||
return
|
||||
|
||||
# Now we can pull the data
|
||||
url = "https://upcsirena.app.dexma.com/l_12535/analysis/by_datapoints/data.json"
|
||||
r = s.post(url, data=genparams())
|
||||
|
||||
if r.status_code != 200:
|
||||
print("bad HTTP status code on data: {}".format(r.status_code))
|
||||
return
|
||||
|
||||
#print(r.text)
|
||||
j = json.loads(r.content)
|
||||
|
||||
# Just take the last one
|
||||
last = j['data']['chartElementList'][-1]
|
||||
temp = last['values']['1148418-Temperatura']
|
||||
|
||||
return temp
|
||||
|
||||
if __name__ == '__main__':
|
||||
start_http_server(port=listening_port, addr="localhost")
|
||||
while True:
|
||||
try:
|
||||
metric_temp.set(measure())
|
||||
except:
|
||||
print("measure failed")
|
||||
metric_temp.set(float("nan"))
|
||||
|
||||
time.sleep(update_period)
|
||||
Binary file not shown.
Binary file not shown.
@@ -1,10 +1,9 @@
|
||||
age-encryption.org/v1
|
||||
-> ssh-ed25519 HY2yRg XPOFoZqY+AnKC77jrgNqAm1ADphurfuhO4NRrfiuUDc
|
||||
iCfMMpGHyaYHGy6ci8sqjUtcPeteLlyvLGEF79VPOEc
|
||||
-> ssh-ed25519 CAWG4Q 6OsGrnM+/c5lTN81Rvp166K+ygmSIFeSYzXxYg25KGE
|
||||
Av1zTw2zK4Gufzti9kQaye7C362GCiDRRHzCqBLR33g
|
||||
-> ssh-ed25519 MSF3dg 8CHqJ7mEDvjvqbmF+eE6Em1Wi6eHAzEUpiExC1gm7S0
|
||||
bdwzYHw3RAbdHq+RsiFUP++sQ586VUlSnAzAOhiQUjI
|
||||
--- gA5XSUfjUBol938sC5DbUf8PvQUIr2pNkS2nL95OF9c
|
||||
<EFBFBD><EFBFBD><EFBFBD>Ea1G7<47><37>ݩ[R<><52>\{~$Go<47>cQ<63>wKP&<26><><EFBFBD>w<EFBFBD><77><EFBFBD>6]
|
||||
<EFBFBD><EFBFBD>ѣ<EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>^z<7A>̄ 1k<31><6B><03><><EFBFBD><EFBFBD><EFBFBD>Y<EFBFBD><59>2<15>p<>2<EFBFBD><32>K<EFBFBD><4B><EFBFBD><EFBFBD>nok<>/X<><58>pt''<27><>$0co=<3D><EFBFBD>
|
||||
-> ssh-ed25519 HY2yRg 4Xns3jybBuv8flzd+h3DArVBa/AlKjt1J9jAyJsasCE
|
||||
uyVjJxh5i8aGgAgCpPl6zTYeIkf9mIwURof51IKWvwE
|
||||
-> ssh-ed25519 CAWG4Q T2r6r1tyNgq1XlYXVtLJFfOfUnm6pSVlPwUqC1pkyRo
|
||||
9yDoKU0EC34QMUXYnsJvhPCLm6oD9w7NlTi2sheoBqQ
|
||||
-> ssh-ed25519 MSF3dg Bh9DekFTq+QMUEAonwcaIAJX4Js1O7cHjDniCD0gtm8
|
||||
t/Ro0URLeDUWcvb7rlkG2s03PZ+9Rr3N4TIX03tXpVc
|
||||
--- E5+/D4aK2ihKRR4YC5XOTmUbKgOqBR0Nk0gYvFOzXOI
|
||||
<EFBFBD><EFBFBD><EFBFBD>yKF~dj<64><6A>r%<25><>'<27><><EFBFBD>P<EFBFBD>&_-l<><6C><EFBFBD>&<26>o<EFBFBD>_<EFBFBD>r<><72>r<EFBFBD><72>߁<EFBFBD>0<18>,<2C>U7<55>nC<6E>Te<54><18>[f<>97<39><37><EFBFBD><EFBFBD><EFBFBD><EFBFBD><10><><EFBFBD>C!D<>E<EFBFBD>W<EFBFBD>*<2A>LA<4C>x6<78>#<23><>
|
||||
@@ -1,9 +1,10 @@
|
||||
age-encryption.org/v1
|
||||
-> ssh-ed25519 HY2yRg pXNTB/ailRwSEJG1pXvrzzpz5HqkDZdWVWnOH7JGeQ4
|
||||
NzA+2fxfkNRy/u+Zq96A02K1Vxy0ETYZjMkDVTKyCY8
|
||||
-> ssh-ed25519 CAWG4Q 7CLJWn+EAxoWDduXaOSrHaBFHQ4GIpYP/62FFTj3ZTI
|
||||
vSYV1pQg2qI2ngCzM0nCZAnqdz1tbT4hM5m+/TyGU2c
|
||||
-> ssh-ed25519 MSF3dg Akmp4NcZcDuaYHta/Vej6zulNSrAOCd5lmSV+OiBGC4
|
||||
qTxqVzTyywur+GjtUQdbaIUdH1fqCqPe6qPf8iHRa4w
|
||||
--- uCKNqD1TmZZThOzlpsecBKx/k+noIWhCVMr/pzNwBr8
|
||||
r'<27>Ƌs4<15>˺<EFBFBD>Aĥ<41>P<05>L<EFBFBD><4C>7` <09><02><>)H<><48>-<2D>0<EFBFBD>AH<41>5<16><><1F><>L<EFBFBD>Qe<51><65>H2b<32><1D>B<42>CJG<4A>"-S<><1C>\<5C><><0C>H<n<>V
|
||||
-> ssh-ed25519 HY2yRg GdmdkW+BqqwBgu30b846jv3J7jtCM+a3rgOERuA050A
|
||||
FeGqM75jG9egesR+yyVKHm0/M+uBBp5Hclg4+qN0BR8
|
||||
-> ssh-ed25519 CAWG4Q a0wTWHgulQUYDAMZmXf3dOf6PdYgCqNtSylzWVVRNVM
|
||||
Bx+WSYaiY4ZwlSZJo2a1XPMQmbKOU7F0tKAqVRLBOPo
|
||||
-> ssh-ed25519 MSF3dg KccUvZZUbxbCrRWUWrX8KcHF6vQ5FV/BqUqI59G7dj4
|
||||
CFr7GXpZ9rPgy7HBfOyiYF9FnZUw6KcZwq9f7/0KaU8
|
||||
--- E0Rp6RR/8+o0jvB1lRdhnlabxvI6uu/IgL2ZpPXzTc8
|
||||
<EFBFBD><13>#<23><>H<EFBFBD>$<24>F;<3B><EFBFBD><7F>%<25><>6<><02>2<EFBFBD><32>rfX<66>\Dn <20>ш<EFBFBD>ȉ<EFBFBD>x<EFBFBD><78>><3E><>&;<3B>c<EFBFBD>U<EFBFBD>I=<3D><>M<EFBFBD><4D><EFBFBD>?T<><54>Ǹ<EFBFBD><16>"px<70>ӭ\s<><73><EFBFBD>bF<62><46><EFBFBD><EFBFBD>WD<>{<7B>
|
||||
AW>?U<><55><EFBFBD><17><>HԳ
|
||||
@@ -1,9 +1,9 @@
|
||||
age-encryption.org/v1
|
||||
-> ssh-ed25519 HY2yRg s6iI9f25xulF4KXt+XY07kXXPKxXo7f2Ql/OTHN55Hk
|
||||
WO4Fd2H9c+HL3+XhUF3BmEZVILlcchGxSrSmL2OEdGw
|
||||
-> ssh-ed25519 CAWG4Q TBkdpx8k8K1NvW3wcvaF7omKFwEJ2DxWJp3tIOTjwCA
|
||||
LcYgWRix23AQnw0OQ7f8+8S3J84CHUElX1vKZSETiLE
|
||||
-> ssh-ed25519 MSF3dg WzrF8kjTP7BXXDjmUp7kPCKguthAW12RPo6Vy2RMmh4
|
||||
8C3mT9ktudCTANDxhyNszUkbeDG6X4wOJdx825++dYM
|
||||
--- /w3YQ2UeTi67H1JR0GsdPz2KoLN2Y7BIZfFY+//AWjY
|
||||
<EFBFBD>ӣ-`P<>@<40><>ބ<><DE84><EFBFBD>)9<>9l<EFBFBD><EFBFBD><EFBFBD><EFBFBD>Zf<EFBFBD><EFBFBD>V?I><>w鉐<77>z40 <20>2{i@<40>Z<EFBFBD><5A>x<EFBFBD><78>AHn<48>%<25><><0C>ʤ<EFBFBD>/W<EFBFBD><EFBFBD>Ĕ<EFBFBD>l<EFBFBD><EFBFBD><EFBFBD><EFBFBD>}<7D>&<08>얶<EFBFBD>(<EFBFBD><EFBFBD><EFBFBD>K<EFBFBD><EFBFBD><EFBFBD>S<EFBFBD><EFBFBD>o<EFBFBD>z<EFBFBD>=<1F>d
|
||||
-> ssh-ed25519 HY2yRg xWRxJGWSzA5aplRYCYLB6aBwrUrQQJ2MtDYaD75V5nI
|
||||
J07XF3NQiaYKKKNRcNWi9MloJD2wXHd+2K7bo6lF+QU
|
||||
-> ssh-ed25519 CAWG4Q jNWymbyCczcm8RcaIEbFQBlOMALsuxTl4+pLUi0aR20
|
||||
z5NixlrRD+Y7Z/aFPs6hiDW4/lp8CBQCeJYpbuG9yYM
|
||||
-> ssh-ed25519 MSF3dg QsUQloEKN3k1G49FQnNR/Do6ILgGpjFcw3zu5kk1Ako
|
||||
IHwyFWUEWqCStNcFprnpBa8L5J6zKIsn+7HcgGRv3sM
|
||||
--- oUia0fsL6opeYWACyXtHAu/Ld+bUIt/7S1VszYTvwgU
|
||||
<EFBFBD><EFBFBD>V<EFBFBD><16>*<2A>t<1B>2-<2D>7<><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>h<EFBFBD>&<26><>͢_!տ+<2B><><EFBFBD><EFBFBD>(<28><0F><11>n<EFBFBD><6E> <09><>(<28><19><EFBFBD>/}<EFBFBD><EFBFBD><EFBFBD><EFBFBD>C<EFBFBD>Nͷ|<04>N<>u<EFBFBD>5<EFBFBD>ù勚K<E58B9A><EFBFBD>l<EFBFBD>"<EFBFBD><EFBFBD>klOX<EFBFBD>y<EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>A<EFBFBD><EFBFBD>e<><65>$
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -13,115 +13,6 @@ which is available at `hut` or `xeon07`. It runs the following services:
|
||||
- Grafana: to plot the data in the web browser.
|
||||
- Slurmctld: to manage the SLURM nodes.
|
||||
- Gitlab runner: to run CI jobs from Gitlab.
|
||||
- Nix binary cache: to serve cached nix builds
|
||||
|
||||
This node is prone to interruptions from all the services it runs, so it is not
|
||||
a good candidate for low noise executions.
|
||||
|
||||
# Binary cache
|
||||
|
||||
We provide a binary cache in `hut`, with the aim of avoiding unnecessary
|
||||
recompilation of packages.
|
||||
|
||||
The cache should contain common packages from bscpkgs, but we don't provide
|
||||
any guarantee that of what will be available in the cache, or for how long.
|
||||
We recommend following the latest version of the `jungle` flake to avoid cache
|
||||
misses.
|
||||
|
||||
## Usage
|
||||
|
||||
### From NixOS
|
||||
|
||||
In NixOS, we can add the cache through the `nix.settings` option, which will
|
||||
enable it for all builds in the system.
|
||||
|
||||
```nix
|
||||
{ ... }: {
|
||||
nix.settings = {
|
||||
substituters = [ "https://jungle.bsc.es/cache" ];
|
||||
trusted-public-keys = [ "jungle.bsc.es:pEc7MlAT0HEwLQYPtpkPLwRsGf80ZI26aj29zMw/HH0=" ];
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
### Interactively
|
||||
|
||||
The cache can also be specified in a per-command basis through the flags
|
||||
`--substituters` and `--trusted-public-keys`:
|
||||
|
||||
```sh
|
||||
nix build --substituters "https://jungle.bsc.es/cache" --trusted-public-keys "jungle.bsc.es:pEc7MlAT0HEwLQYPtpkPLwRsGf80ZI26aj29zMw/HH0=" <...>
|
||||
```
|
||||
|
||||
Note: you'll have to be a trusted user.
|
||||
|
||||
### Nix configuration file (non-nixos)
|
||||
|
||||
If using nix outside of NixOS, you'll have to update `/etc/nix/nix.conf`
|
||||
|
||||
```
|
||||
# echo "substituters = https://jungle.bsc.es/cache" >> /etc/nix/nix.conf
|
||||
# echo "trusted-public-keys = jungle.bsc.es:pEc7MlAT0HEwLQYPtpkPLwRsGf80ZI26aj29zMw/HH0=" >> /etc/nix/nix.conf
|
||||
```
|
||||
|
||||
### Hint in flakes
|
||||
|
||||
By adding the configuration below to a `flake.nix`, when someone uses the flake,
|
||||
`nix` will interactively ask to trust and use the provided binary cache:
|
||||
|
||||
```nix
|
||||
{
|
||||
nixConfig = {
|
||||
extra-substituters = [
|
||||
"https://jungle.bsc.es/cache"
|
||||
];
|
||||
extra-trusted-public-keys = [
|
||||
"jungle.bsc.es:pEc7MlAT0HEwLQYPtpkPLwRsGf80ZI26aj29zMw/HH0="
|
||||
];
|
||||
};
|
||||
outputs = { ... }: {
|
||||
...
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
### Querying the cache
|
||||
|
||||
Check if the cache is available:
|
||||
```sh
|
||||
$ curl https://jungle.bsc.es/cache/nix-cache-info
|
||||
StoreDir: /nix/store
|
||||
WantMassQuery: 1
|
||||
Priority: 30
|
||||
```
|
||||
|
||||
Prevent nix from building locally:
|
||||
```bash
|
||||
nix build --max-jobs 0 <...>
|
||||
```
|
||||
|
||||
Check if a package is in cache:
|
||||
```bash
|
||||
# Do a raw eval on the <package>.outPath (this should not build the package)
|
||||
$ nix eval --raw jungle#openmp.outPath
|
||||
/nix/store/dwnn4dgm1m4184l4xbi0qfrprji9wjmi-openmp-2024.11
|
||||
# Take the hash (everything from / to - in the basename) and curl <hash>.narinfo
|
||||
# if it exists in the cache, it will return HTTP 200 and some information
|
||||
# if not, it will return 404
|
||||
$ curl https://jungle.bsc.es/cache/dwnn4dgm1m4184l4xbi0qfrprji9wjmi.narinfo
|
||||
StorePath: /nix/store/dwnn4dgm1m4184l4xbi0qfrprji9wjmi-openmp-2024.11
|
||||
URL: nar/dwnn4dgm1m4184l4xbi0qfrprji9wjmi-17imkdfqzmnb013d14dx234bx17bnvws8baf3ii1xra5qi2y1wiz.nar
|
||||
Compression: none
|
||||
NarHash: sha256:17imkdfqzmnb013d14dx234bx17bnvws8baf3ii1xra5qi2y1wiz
|
||||
NarSize: 1519328
|
||||
References: 4gk773fqcsv4fh2rfkhs9bgfih86fdq8-gcc-13.3.0-lib nqb2ns2d1lahnd5ncwmn6k84qfd7vx2k-glibc-2.40-36
|
||||
Deriver: vcn0x8hikc4mvxdkvrdxp61bwa5r7lr6-openmp-2024.11.drv
|
||||
Sig: jungle.bsc.es:GDTOUEs1jl91wpLbb+gcKsAZjpKdARO9j5IQqb3micBeqzX2M/NDtKvgCS1YyiudOUdcjwa3j+hyzV2njokcCA==
|
||||
# In oneline:
|
||||
$ curl "https://jungle.bsc.es/cache/$(nix eval --raw jungle#<package>.outPath | cut -d '/' -f4 | cut -d '-' -f1).narinfo"
|
||||
```
|
||||
|
||||
#### References
|
||||
|
||||
- https://nix.dev/guides/recipes/add-binary-cache.html
|
||||
- https://nixos.wiki/wiki/Binary_Cache
|
||||
|
||||
Reference in New Issue
Block a user