Compare commits
30 Commits
add-fox-ma
...
7a2f37aaa2
| Author | SHA1 | Date | |
|---|---|---|---|
| 7a2f37aaa2 | |||
| aae6585f66 | |||
| 1c15e77c83 | |||
| 82fc3209de | |||
| abeab18270 | |||
| 1985b58619 | |||
| 44bd061823 | |||
| e8c309f584 | |||
| 71ae7fb585 | |||
| 8834d561d2 | |||
| 29daa3c364 | |||
| 9c503fbefb | |||
| 51b6a8b612 | |||
| 52213d388d | |||
| edf744db8d | |||
| b82894eaec | |||
| 1c47199891 | |||
| 8738bd4eeb | |||
| 7699783aac | |||
| fee1d4da7e | |||
| b77ce7fb56 | |||
| b4a12625c5 | |||
| 302106ea9a | |||
| 96877de8d9 | |||
| 8878985be6 | |||
| 737578db34 | |||
| 88555e3f8c | |||
| feb2060be7 | |||
| 00999434c2 | |||
| 29d58cc62d |
@@ -23,6 +23,7 @@
|
||||
trusted-users = [ "@wheel" ];
|
||||
flake-registry = pkgs.writeText "global-registry.json"
|
||||
''{"flakes":[],"version":2}'';
|
||||
keep-outputs = true;
|
||||
};
|
||||
|
||||
gc = {
|
||||
|
||||
@@ -10,7 +10,7 @@ in
|
||||
|
||||
# Connect to intranet git hosts via proxy
|
||||
programs.ssh.extraConfig = ''
|
||||
Host bscpm02.bsc.es bscpm03.bsc.es gitlab-internal.bsc.es alya.gitlab.bsc.es
|
||||
Host bscpm02.bsc.es bscpm03.bsc.es bscpm04.bsc.es gitlab-internal.bsc.es alya.gitlab.bsc.es
|
||||
User git
|
||||
ProxyCommand nc -X connect -x hut:23080 %h %p
|
||||
|
||||
@@ -22,6 +22,7 @@ in
|
||||
programs.ssh.knownHosts = hostsKeys // {
|
||||
"gitlab-internal.bsc.es".publicKey = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIF9arsAOSRB06hdy71oTvJHG2Mg8zfebADxpvc37lZo3";
|
||||
"bscpm03.bsc.es".publicKey = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIM2NuSUPsEhqz1j5b4Gqd+MWFnRqyqY57+xMvBUqHYUS";
|
||||
"bscpm04.bsc.es".publicKey = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIPx4mC0etyyjYUT2Ztc/bs4ZXSbVMrogs1ZTP924PDgT";
|
||||
"glogin1.bsc.es".publicKey = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFsHsZGCrzpd4QDVn5xoDOtrNBkb0ylxKGlyBt6l9qCz";
|
||||
"glogin2.bsc.es".publicKey = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFsHsZGCrzpd4QDVn5xoDOtrNBkb0ylxKGlyBt6l9qCz";
|
||||
};
|
||||
|
||||
@@ -81,7 +81,7 @@
|
||||
home = "/home/Computational/abonerib";
|
||||
description = "Aleix Boné";
|
||||
group = "Computational";
|
||||
hosts = [ "owl1" "owl2" "hut" "raccoon" ];
|
||||
hosts = [ "owl1" "owl2" "hut" "raccoon" "fox" ];
|
||||
hashedPassword = "$6$V1EQWJr474whv7XJ$OfJ0wueM2l.dgiJiiah0Tip9ITcJ7S7qDvtSycsiQ43QBFyP4lU0e0HaXWps85nqB4TypttYR4hNLoz3bz662/";
|
||||
openssh.authorizedKeys.keys = [
|
||||
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIIFiqXqt88VuUfyANkZyLJNiuroIITaGlOOTMhVDKjf abonerib@bsc"
|
||||
@@ -126,6 +126,19 @@
|
||||
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIGEfy6F4rF80r4Cpo2H5xaWqhuUZzUsVsILSKGJzt5jF dalvare1@ssfhead"
|
||||
];
|
||||
};
|
||||
|
||||
varcila = {
|
||||
uid = 5650;
|
||||
isNormalUser = true;
|
||||
home = "/home/Computational/varcila";
|
||||
description = "Vincent Arcila";
|
||||
group = "Computational";
|
||||
hosts = [ "hut" "fox" ];
|
||||
hashedPassword = "$6$oB0Tcn99DcM4Ch$Vn1A0ulLTn/8B2oFPi9wWl/NOsJzaFAWjqekwcuC9sMC7cgxEVb.Nk5XSzQ2xzYcNe5MLtmzkVYnRS1CqP39Y0";
|
||||
openssh.authorizedKeys.keys = [
|
||||
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKGt0ESYxekBiHJQowmKpfdouw0hVm3N7tUMtAaeLejK vincent@varch"
|
||||
];
|
||||
};
|
||||
};
|
||||
|
||||
groups = {
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
|
||||
proxy = {
|
||||
default = "http://hut:23080/";
|
||||
noProxy = "127.0.0.1,localhost,internal.domain,10.0.40.40";
|
||||
noProxy = "127.0.0.1,localhost,internal.domain,10.0.40.40,hut";
|
||||
# Don't set all_proxy as go complains and breaks the gitlab runner, see:
|
||||
# https://github.com/golang/go/issues/16715
|
||||
allProxy = null;
|
||||
|
||||
@@ -56,6 +56,11 @@
|
||||
iptables -A nixos-fw -p tcp -s 10.0.40.30 --dport 23080 -j nixos-fw-log-refuse
|
||||
iptables -A nixos-fw -p tcp -s 10.0.40.0/24 --dport 23080 -j nixos-fw-accept
|
||||
'';
|
||||
# Flush all rules and chains on stop so it won't break on start
|
||||
extraStopCommands = ''
|
||||
iptables -F
|
||||
iptables -X
|
||||
'';
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
@@ -22,8 +22,8 @@
|
||||
"--docker-network-mode host"
|
||||
];
|
||||
environmentVariables = {
|
||||
https_proxy = "http://localhost:23080";
|
||||
http_proxy = "http://localhost:23080";
|
||||
https_proxy = "http://hut:23080";
|
||||
http_proxy = "http://hut:23080";
|
||||
};
|
||||
};
|
||||
in {
|
||||
@@ -38,14 +38,13 @@
|
||||
gitlab-bsc-docker = {
|
||||
# gitlab.bsc.es still uses the old token mechanism
|
||||
registrationConfigFile = config.age.secrets.gitlab-bsc-docker.path;
|
||||
tagList = [ "docker" "hut" ];
|
||||
environmentVariables = {
|
||||
https_proxy = "http://localhost:23080";
|
||||
http_proxy = "http://localhost:23080";
|
||||
# We cannot access the hut local interface from docker, so we connect
|
||||
# to hut directly via the ethernet one.
|
||||
https_proxy = "http://hut:23080";
|
||||
http_proxy = "http://hut:23080";
|
||||
};
|
||||
# FIXME
|
||||
registrationFlags = [
|
||||
"--docker-network-mode host"
|
||||
];
|
||||
executor = "docker";
|
||||
dockerImage = "alpine";
|
||||
dockerVolumes = [
|
||||
@@ -53,7 +52,15 @@
|
||||
"/nix/var/nix/db:/nix/var/nix/db:ro"
|
||||
"/nix/var/nix/daemon-socket:/nix/var/nix/daemon-socket:ro"
|
||||
];
|
||||
dockerExtraHosts = [
|
||||
# Required to pass the proxy via hut
|
||||
"hut:10.0.40.7"
|
||||
];
|
||||
dockerDisableCache = true;
|
||||
registrationFlags = [
|
||||
# Increase build log length to 64 MiB
|
||||
"--output-limit 65536"
|
||||
];
|
||||
preBuildScript = pkgs.writeScript "setup-container" ''
|
||||
mkdir -p -m 0755 /nix/var/log/nix/drvs
|
||||
mkdir -p -m 0755 /nix/var/nix/gcroots
|
||||
@@ -66,32 +73,39 @@
|
||||
mkdir -p -m 0700 "$HOME/.nix-defexpr"
|
||||
mkdir -p -m 0700 "$HOME/.ssh"
|
||||
cat > "$HOME/.ssh/config" << EOF
|
||||
Host bscpm03.bsc.es gitlab-internal.bsc.es
|
||||
Host bscpm04.bsc.es gitlab-internal.bsc.es
|
||||
User git
|
||||
ProxyCommand nc -X connect -x hut:23080 %h %p
|
||||
Host amdlogin1.bsc.es armlogin1.bsc.es hualogin1.bsc.es glogin1.bsc.es glogin2.bsc.es fpgalogin1.bsc.es
|
||||
ProxyCommand nc -X connect -x hut:23080 %h %p
|
||||
EOF
|
||||
cat >> "$HOME/.ssh/known_hosts" << EOF
|
||||
bscpm03.bsc.es ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIM2NuSUPsEhqz1j5b4Gqd+MWFnRqyqY57+xMvBUqHYUS
|
||||
bscpm04.bsc.es ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIPx4mC0etyyjYUT2Ztc/bs4ZXSbVMrogs1ZTP924PDgT
|
||||
gitlab-internal.bsc.es ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIF9arsAOSRB06hdy71oTvJHG2Mg8zfebADxpvc37lZo3
|
||||
EOF
|
||||
. ${pkgs.nix}/etc/profile.d/nix-daemon.sh
|
||||
${pkgs.nix}/bin/nix-channel --add https://nixos.org/channels/nixos-24.11 nixpkgs
|
||||
${pkgs.nix}/bin/nix-channel --update nixpkgs
|
||||
${pkgs.nix}/bin/nix-env -i ${lib.concatStringsSep " " (with pkgs; [ nix cacert git openssh netcat curl ])}
|
||||
# Required to load SSL certificate paths
|
||||
. ${pkgs.cacert}/nix-support/setup-hook
|
||||
'';
|
||||
environmentVariables = {
|
||||
ENV = "/etc/profile";
|
||||
USER = "root";
|
||||
NIX_REMOTE = "daemon";
|
||||
PATH = "/nix/var/nix/profiles/default/bin:/nix/var/nix/profiles/default/sbin:/bin:/sbin:/usr/bin:/usr/sbin";
|
||||
NIX_SSL_CERT_FILE = "/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt";
|
||||
PATH = "${config.system.path}/bin:/bin:/sbin:/usr/bin:/usr/sbin";
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
# DOCKER* chains are useless, override at FORWARD and nixos-fw
|
||||
networking.firewall.extraCommands = ''
|
||||
# Don't forward any traffic from docker
|
||||
iptables -I FORWARD 1 -p all -i docker0 -j nixos-fw-log-refuse
|
||||
|
||||
# Allow incoming traffic from docker to 23080
|
||||
iptables -A nixos-fw -p tcp -i docker0 -d hut --dport 23080 -j ACCEPT
|
||||
'';
|
||||
|
||||
#systemd.services.gitlab-runner.serviceConfig.Shell = "${pkgs.bash}/bin/bash";
|
||||
systemd.services.gitlab-runner.serviceConfig.DynamicUser = lib.mkForce false;
|
||||
systemd.services.gitlab-runner.serviceConfig.User = "gitlab-runner";
|
||||
|
||||
@@ -3,7 +3,10 @@
|
||||
{
|
||||
imports = [
|
||||
../module/slurm-exporter.nix
|
||||
../module/meteocat-exporter.nix
|
||||
../module/upc-qaire-exporter.nix
|
||||
./gpfs-probe.nix
|
||||
./nix-daemon-exporter.nix
|
||||
];
|
||||
|
||||
age.secrets.grafanaJungleRobotPassword = {
|
||||
@@ -46,7 +49,7 @@
|
||||
services.prometheus = {
|
||||
enable = true;
|
||||
port = 9001;
|
||||
retentionTime = "1y";
|
||||
retentionTime = "5y";
|
||||
listenAddress = "127.0.0.1";
|
||||
};
|
||||
|
||||
@@ -76,7 +79,7 @@
|
||||
group = "root";
|
||||
user = "root";
|
||||
configFile = config.age.secrets.ipmiYml.path;
|
||||
extraFlags = [ "--log.level=debug" ];
|
||||
# extraFlags = [ "--log.level=debug" ];
|
||||
listenAddress = "127.0.0.1";
|
||||
};
|
||||
node = {
|
||||
@@ -108,6 +111,9 @@
|
||||
"127.0.0.1:${toString config.services.prometheus.exporters.smartctl.port}"
|
||||
"127.0.0.1:9341" # Slurm exporter
|
||||
"127.0.0.1:9966" # GPFS custom exporter
|
||||
"127.0.0.1:9999" # Nix-daemon custom exporter
|
||||
"127.0.0.1:9929" # Meteocat custom exporter
|
||||
"127.0.0.1:9928" # UPC Qaire custom exporter
|
||||
"127.0.0.1:${toString config.services.prometheus.exporters.blackbox.port}"
|
||||
];
|
||||
}];
|
||||
@@ -250,6 +256,14 @@
|
||||
module = [ "raccoon" ];
|
||||
};
|
||||
}
|
||||
{
|
||||
job_name = "raccoon";
|
||||
static_configs = [
|
||||
{
|
||||
targets = [ "127.0.0.1:19002" ]; # Node exporter
|
||||
}
|
||||
];
|
||||
}
|
||||
{
|
||||
job_name = "ipmi-fox";
|
||||
metrics_path = "/ipmi";
|
||||
|
||||
@@ -12,16 +12,19 @@ let
|
||||
installPhase = ''
|
||||
cp -r public $out
|
||||
'';
|
||||
# Don't mess doc/
|
||||
dontFixup = true;
|
||||
};
|
||||
in
|
||||
{
|
||||
networking.firewall.allowedTCPPorts = [ 80 ];
|
||||
services.nginx = {
|
||||
enable = true;
|
||||
virtualHosts."jungle.bsc.es" = {
|
||||
root = "${website}";
|
||||
listen = [
|
||||
{
|
||||
addr = "127.0.0.1";
|
||||
addr = "0.0.0.0";
|
||||
port = 80;
|
||||
}
|
||||
];
|
||||
@@ -38,7 +41,7 @@ in
|
||||
proxy_redirect http:// $scheme://;
|
||||
}
|
||||
location /cache {
|
||||
rewrite ^/cache(.*) /$1 break;
|
||||
rewrite ^/cache/(.*) /$1 break;
|
||||
proxy_pass http://127.0.0.1:5000;
|
||||
proxy_redirect http:// $scheme://;
|
||||
}
|
||||
|
||||
26
m/hut/nix-daemon-builds.sh
Executable file
26
m/hut/nix-daemon-builds.sh
Executable file
@@ -0,0 +1,26 @@
|
||||
#!/bin/sh
|
||||
|
||||
# Locate nix daemon pid
|
||||
nd=$(pgrep -o nix-daemon)
|
||||
|
||||
# Locate children of nix-daemon
|
||||
pids1=$(tr ' ' '\n' < "/proc/$nd/task/$nd/children")
|
||||
|
||||
# For each children, locate 2nd level children
|
||||
pids2=$(echo "$pids1" | xargs -I @ /bin/sh -c 'cat /proc/@/task/*/children' | tr ' ' '\n')
|
||||
|
||||
cat <<EOF
|
||||
HTTP/1.1 200 OK
|
||||
Content-Type: text/plain; version=0.0.4; charset=utf-8; escaping=values
|
||||
|
||||
# HELP nix_daemon_build Nix daemon derivation build state.
|
||||
# TYPE nix_daemon_build gauge
|
||||
EOF
|
||||
|
||||
for pid in $pids2; do
|
||||
name=$(cat /proc/$pid/environ 2>/dev/null | tr '\0' '\n' | rg "^name=(.+)" - --replace '$1' | tr -dc ' [:alnum:]_\-\.')
|
||||
user=$(ps -o uname= -p "$pid")
|
||||
if [ -n "$name" -a -n "$user" ]; then
|
||||
printf 'nix_daemon_build{user="%s",name="%s"} 1\n' "$user" "$name"
|
||||
fi
|
||||
done
|
||||
23
m/hut/nix-daemon-exporter.nix
Normal file
23
m/hut/nix-daemon-exporter.nix
Normal file
@@ -0,0 +1,23 @@
|
||||
{ pkgs, config, lib, ... }:
|
||||
let
|
||||
script = pkgs.runCommand "nix-daemon-exporter.sh" { }
|
||||
''
|
||||
cp ${./nix-daemon-builds.sh} $out;
|
||||
chmod +x $out
|
||||
''
|
||||
;
|
||||
in
|
||||
{
|
||||
systemd.services.nix-daemon-exporter = {
|
||||
description = "Daemon to export nix-daemon metrics";
|
||||
path = [ pkgs.procps pkgs.ripgrep ];
|
||||
wantedBy = [ "default.target" ];
|
||||
serviceConfig = {
|
||||
Type = "simple";
|
||||
ExecStart = "${pkgs.socat}/bin/socat TCP4-LISTEN:9999,fork EXEC:${script}";
|
||||
# Needed root to read the environment, potentially unsafe
|
||||
User = "root";
|
||||
Group = "root";
|
||||
};
|
||||
};
|
||||
}
|
||||
10
m/module/hut-substituter.nix
Normal file
10
m/module/hut-substituter.nix
Normal file
@@ -0,0 +1,10 @@
|
||||
{ config, ... }:
|
||||
{
|
||||
nix.settings =
|
||||
# Don't add hut as a cache to itself
|
||||
assert config.networking.hostName != "hut";
|
||||
{
|
||||
substituters = [ "http://hut/cache" ];
|
||||
trusted-public-keys = [ "jungle.bsc.es:pEc7MlAT0HEwLQYPtpkPLwRsGf80ZI26aj29zMw/HH0=" ];
|
||||
};
|
||||
}
|
||||
17
m/module/meteocat-exporter.nix
Normal file
17
m/module/meteocat-exporter.nix
Normal file
@@ -0,0 +1,17 @@
|
||||
{ config, lib, pkgs, ... }:
|
||||
|
||||
with lib;
|
||||
|
||||
{
|
||||
systemd.services."prometheus-meteocat-exporter" = {
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
after = [ "network.target" ];
|
||||
serviceConfig = {
|
||||
Restart = mkDefault "always";
|
||||
PrivateTmp = mkDefault true;
|
||||
WorkingDirectory = mkDefault "/tmp";
|
||||
DynamicUser = mkDefault true;
|
||||
ExecStart = "${pkgs.meteocat-exporter}/bin/meteocat-exporter";
|
||||
};
|
||||
};
|
||||
}
|
||||
@@ -27,22 +27,6 @@ let
|
||||
done
|
||||
'';
|
||||
|
||||
prolog = pkgs.writeScript "prolog.sh" ''
|
||||
#!/usr/bin/env bash
|
||||
|
||||
echo "hello from the prolog"
|
||||
|
||||
exit 0
|
||||
'';
|
||||
|
||||
epilog = pkgs.writeScript "epilog.sh" ''
|
||||
#!/usr/bin/env bash
|
||||
|
||||
echo "hello from the epilog"
|
||||
|
||||
exit 0
|
||||
'';
|
||||
|
||||
in {
|
||||
systemd.services.slurmd.serviceConfig = {
|
||||
# Kill all processes in the control group on stop/restart. This will kill
|
||||
@@ -59,14 +43,13 @@ in {
|
||||
clusterName = "jungle";
|
||||
nodeName = [
|
||||
"owl[1,2] Sockets=2 CoresPerSocket=14 ThreadsPerCore=2 Feature=owl"
|
||||
"fox Sockets=2 CoresPerSocket=96 ThreadsPerCore=2 Feature=fox"
|
||||
"fox Sockets=2 CoresPerSocket=96 ThreadsPerCore=1 Feature=fox"
|
||||
"hut Sockets=2 CoresPerSocket=14 ThreadsPerCore=2"
|
||||
];
|
||||
|
||||
partitionName = [
|
||||
"owl Nodes=owl[1-2] Default=YES DefaultTime=01:00:00 MaxTime=INFINITE State=UP"
|
||||
"fox Nodes=fox Default=NO DefaultTime=01:00:00 MaxTime=INFINITE State=UP"
|
||||
"all Nodes=owl[1-2],hut Default=NO DefaultTime=01:00:00 MaxTime=INFINITE State=UP"
|
||||
];
|
||||
|
||||
# See slurm.conf(5) for more details about these options.
|
||||
|
||||
17
m/module/upc-qaire-exporter.nix
Normal file
17
m/module/upc-qaire-exporter.nix
Normal file
@@ -0,0 +1,17 @@
|
||||
{ config, lib, pkgs, ... }:
|
||||
|
||||
with lib;
|
||||
|
||||
{
|
||||
systemd.services."prometheus-upc-qaire-exporter" = {
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
after = [ "network.target" ];
|
||||
serviceConfig = {
|
||||
Restart = mkDefault "always";
|
||||
PrivateTmp = mkDefault true;
|
||||
WorkingDirectory = mkDefault "/tmp";
|
||||
DynamicUser = mkDefault true;
|
||||
ExecStart = "${pkgs.upc-qaire-exporter}/bin/upc-qaire-exporter";
|
||||
};
|
||||
};
|
||||
}
|
||||
@@ -8,6 +8,7 @@
|
||||
../module/slurm-client.nix
|
||||
../module/slurm-firewall.nix
|
||||
../module/debuginfod.nix
|
||||
../module/hut-substituter.nix
|
||||
];
|
||||
|
||||
# Select the this using the ID to avoid mismatches
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
../module/slurm-client.nix
|
||||
../module/slurm-firewall.nix
|
||||
../module/debuginfod.nix
|
||||
../module/hut-substituter.nix
|
||||
];
|
||||
|
||||
# Select the this using the ID to avoid mismatches
|
||||
|
||||
@@ -25,6 +25,11 @@
|
||||
} ];
|
||||
};
|
||||
|
||||
nix.settings = {
|
||||
substituters = [ "https://jungle.bsc.es/cache" ];
|
||||
trusted-public-keys = [ "jungle.bsc.es:pEc7MlAT0HEwLQYPtpkPLwRsGf80ZI26aj29zMw/HH0=" ];
|
||||
};
|
||||
|
||||
# Configure Nvidia driver to use with CUDA
|
||||
hardware.nvidia.package = config.boot.kernelPackages.nvidiaPackages.production;
|
||||
hardware.graphics.enable = true;
|
||||
|
||||
25
pkgs/meteocat-exporter/default.nix
Normal file
25
pkgs/meteocat-exporter/default.nix
Normal file
@@ -0,0 +1,25 @@
|
||||
{ python3Packages, lib }:
|
||||
|
||||
python3Packages.buildPythonApplication rec {
|
||||
pname = "meteocat-exporter";
|
||||
version = "1.0";
|
||||
|
||||
src = ./.;
|
||||
|
||||
doCheck = false;
|
||||
|
||||
build-system = with python3Packages; [
|
||||
setuptools
|
||||
];
|
||||
|
||||
dependencies = with python3Packages; [
|
||||
beautifulsoup4
|
||||
lxml
|
||||
prometheus-client
|
||||
];
|
||||
|
||||
meta = with lib; {
|
||||
description = "MeteoCat Prometheus Exporter";
|
||||
platforms = platforms.linux;
|
||||
};
|
||||
}
|
||||
54
pkgs/meteocat-exporter/meteocat-exporter
Normal file
54
pkgs/meteocat-exporter/meteocat-exporter
Normal file
@@ -0,0 +1,54 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import time
|
||||
from prometheus_client import start_http_server, Gauge
|
||||
from bs4 import BeautifulSoup
|
||||
from urllib import request
|
||||
|
||||
# Configuration -------------------------------------------
|
||||
meteo_station = "X8" # Barcelona - Zona Universitària
|
||||
listening_port = 9929
|
||||
update_period = 60 * 5 # Each 5 min
|
||||
# ---------------------------------------------------------
|
||||
|
||||
metric_tmin = Gauge('meteocat_temp_min', 'Min temperature')
|
||||
metric_tmax = Gauge('meteocat_temp_max', 'Max temperature')
|
||||
metric_tavg = Gauge('meteocat_temp_avg', 'Average temperature')
|
||||
metric_srad = Gauge('meteocat_solar_radiation', 'Solar radiation')
|
||||
|
||||
def update(st):
|
||||
url = 'https://www.meteo.cat/observacions/xema/dades?codi=' + st
|
||||
response = request.urlopen(url)
|
||||
data = response.read()
|
||||
soup = BeautifulSoup(data, 'lxml')
|
||||
table = soup.find("table", {"class" : "tblperiode"})
|
||||
rows = table.find_all('tr')
|
||||
row = rows[-1] # Take the last row
|
||||
row_data = []
|
||||
header = row.find('th')
|
||||
header_text = header.text.strip()
|
||||
row_data.append(header_text)
|
||||
for col in row.find_all('td'):
|
||||
row_data.append(col.text)
|
||||
try:
|
||||
# Sometimes it will return '(s/d)' and fail to parse
|
||||
metric_tavg.set(float(row_data[1]))
|
||||
metric_tmax.set(float(row_data[2]))
|
||||
metric_tmin.set(float(row_data[3]))
|
||||
metric_srad.set(float(row_data[10]))
|
||||
#print("ok: temp_avg={}".format(float(row_data[1])))
|
||||
except:
|
||||
print("cannot parse row: {}".format(row))
|
||||
metric_tavg.set(float("nan"))
|
||||
metric_tmax.set(float("nan"))
|
||||
metric_tmin.set(float("nan"))
|
||||
metric_srad.set(float("nan"))
|
||||
|
||||
if __name__ == '__main__':
|
||||
start_http_server(port=listening_port, addr="localhost")
|
||||
while True:
|
||||
try:
|
||||
update(meteo_station)
|
||||
except:
|
||||
print("update failed")
|
||||
time.sleep(update_period)
|
||||
11
pkgs/meteocat-exporter/setup.py
Normal file
11
pkgs/meteocat-exporter/setup.py
Normal file
@@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
setup(name='meteocat-exporter',
|
||||
version='1.0',
|
||||
# Modules to import from other scripts:
|
||||
packages=find_packages(),
|
||||
# Executables
|
||||
scripts=["meteocat-exporter"],
|
||||
)
|
||||
@@ -54,4 +54,6 @@ final: prev:
|
||||
});
|
||||
|
||||
prometheus-slurm-exporter = prev.callPackage ./slurm-exporter.nix { };
|
||||
meteocat-exporter = prev.callPackage ./meteocat-exporter/default.nix { };
|
||||
upc-qaire-exporter = prev.callPackage ./upc-qaire-exporter/default.nix { };
|
||||
}
|
||||
|
||||
24
pkgs/upc-qaire-exporter/default.nix
Normal file
24
pkgs/upc-qaire-exporter/default.nix
Normal file
@@ -0,0 +1,24 @@
|
||||
{ python3Packages, lib }:
|
||||
|
||||
python3Packages.buildPythonApplication rec {
|
||||
pname = "upc-qaire-exporter";
|
||||
version = "1.0";
|
||||
|
||||
src = ./.;
|
||||
|
||||
doCheck = false;
|
||||
|
||||
build-system = with python3Packages; [
|
||||
setuptools
|
||||
];
|
||||
|
||||
dependencies = with python3Packages; [
|
||||
prometheus-client
|
||||
requests
|
||||
];
|
||||
|
||||
meta = with lib; {
|
||||
description = "UPC Qaire Prometheus Exporter";
|
||||
platforms = platforms.linux;
|
||||
};
|
||||
}
|
||||
11
pkgs/upc-qaire-exporter/setup.py
Normal file
11
pkgs/upc-qaire-exporter/setup.py
Normal file
@@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
setup(name='upc-qaire-exporter',
|
||||
version='1.0',
|
||||
# Modules to import from other scripts:
|
||||
packages=find_packages(),
|
||||
# Executables
|
||||
scripts=["upc-qaire-exporter"],
|
||||
)
|
||||
74
pkgs/upc-qaire-exporter/upc-qaire-exporter
Normal file
74
pkgs/upc-qaire-exporter/upc-qaire-exporter
Normal file
@@ -0,0 +1,74 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import time
|
||||
from prometheus_client import start_http_server, Gauge
|
||||
import requests, json
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
# Configuration -------------------------------------------
|
||||
listening_port = 9928
|
||||
update_period = 60 * 5 # Each 5 min
|
||||
# ---------------------------------------------------------
|
||||
|
||||
metric_temp = Gauge('upc_c6_s302_temp', 'UPC C6 S302 temperature sensor')
|
||||
|
||||
def genparams():
|
||||
d = {}
|
||||
d['topic'] = 'TEMPERATURE'
|
||||
d['shift_dates_to'] = ''
|
||||
d['datapoints'] = 301
|
||||
d['devicesAndColors'] = '1148418@@@#40ACB6'
|
||||
|
||||
now = datetime.now()
|
||||
|
||||
d['fromDate'] = now.strftime('%d/%m/%Y')
|
||||
d['toDate'] = now.strftime('%d/%m/%Y')
|
||||
d['serviceFrequency'] = 'NONE'
|
||||
|
||||
# WTF!
|
||||
for i in range(7):
|
||||
for j in range(48):
|
||||
key = 'week.days[{}].hours[{}].value'.format(i, j)
|
||||
d[key] = 'OPEN'
|
||||
|
||||
return d
|
||||
|
||||
def measure():
|
||||
# First we need to load session
|
||||
s = requests.Session()
|
||||
r = s.get("https://upc.edu/sirena")
|
||||
if r.status_code != 200:
|
||||
print("bad HTTP status code on new session: {}".format(r.status_code))
|
||||
return
|
||||
|
||||
if s.cookies.get("JSESSIONID") is None:
|
||||
print("cannot get JSESSIONID")
|
||||
return
|
||||
|
||||
# Now we can pull the data
|
||||
url = "https://upcsirena.app.dexma.com/l_12535/analysis/by_datapoints/data.json"
|
||||
r = s.post(url, data=genparams())
|
||||
|
||||
if r.status_code != 200:
|
||||
print("bad HTTP status code on data: {}".format(r.status_code))
|
||||
return
|
||||
|
||||
#print(r.text)
|
||||
j = json.loads(r.content)
|
||||
|
||||
# Just take the last one
|
||||
last = j['data']['chartElementList'][-1]
|
||||
temp = last['values']['1148418-Temperatura']
|
||||
|
||||
return temp
|
||||
|
||||
if __name__ == '__main__':
|
||||
start_http_server(port=listening_port, addr="localhost")
|
||||
while True:
|
||||
try:
|
||||
metric_temp.set(measure())
|
||||
except:
|
||||
print("measure failed")
|
||||
metric_temp.set(float("nan"))
|
||||
|
||||
time.sleep(update_period)
|
||||
@@ -1,9 +1,11 @@
|
||||
age-encryption.org/v1
|
||||
-> ssh-ed25519 HY2yRg 4Xns3jybBuv8flzd+h3DArVBa/AlKjt1J9jAyJsasCE
|
||||
uyVjJxh5i8aGgAgCpPl6zTYeIkf9mIwURof51IKWvwE
|
||||
-> ssh-ed25519 CAWG4Q T2r6r1tyNgq1XlYXVtLJFfOfUnm6pSVlPwUqC1pkyRo
|
||||
9yDoKU0EC34QMUXYnsJvhPCLm6oD9w7NlTi2sheoBqQ
|
||||
-> ssh-ed25519 MSF3dg Bh9DekFTq+QMUEAonwcaIAJX4Js1O7cHjDniCD0gtm8
|
||||
t/Ro0URLeDUWcvb7rlkG2s03PZ+9Rr3N4TIX03tXpVc
|
||||
--- E5+/D4aK2ihKRR4YC5XOTmUbKgOqBR0Nk0gYvFOzXOI
|
||||
<EFBFBD><EFBFBD><EFBFBD>yKF~dj<64><6A>r%<25><>'<27><><EFBFBD>P<EFBFBD>&_-l<><6C><EFBFBD>&<26>o<EFBFBD>_<EFBFBD>r<><72>r<EFBFBD><72>߁<EFBFBD>0<18>,<2C>U7<55>nC<6E>Te<54><18>[f<>97<39><37><EFBFBD><EFBFBD><EFBFBD><EFBFBD><10><><EFBFBD>C!D<>E<EFBFBD>W<EFBFBD>*<2A>LA<4C>x6<78>#<EFBFBD><EFBFBD>
|
||||
-> ssh-ed25519 HY2yRg WSdjyQPzBJ4JbzQpGeq1AAYpWKoXmLI1ZtmNmM5QOzs
|
||||
qGDlDT31DQF1DdHen0+5+52DdsQlabJdA2pOB5O1I6g
|
||||
-> ssh-ed25519 CAWG4Q wioWMDxQjN+d4JdIbCwZg0DLQu1OH2mV6gukRprjuAs
|
||||
670fE61hidOEh20hHiQAhP0+CjDF0WMBNzgwkGT8Yqg
|
||||
-> ssh-ed25519 MSF3dg DN19uvAEtqq4708P6HpuX9i/o/qAvHX6dj69dCF2H1o
|
||||
4Lu9GnjiFLMeXJ2C7aVPJsCHCQVlhylNWJi896Av92s
|
||||
--- 7cKBwOYNOUZ2h3/kAY09aSMASZSxX7hZIT4kvlIiT6w
|
||||
<EFBFBD>6<EFBFBD><02><><EFBFBD><06>fQF5=<3D>bX+<2B>v e`<60>7/<2F><05>A~P<><50>Ѧ7<EFBFBD><EFBFBD>
|
||||
<EFBFBD><EFBFBD>A<EFBFBD>)<29>h<05><>=oZ<6F>$<24>^<5E>V0<56>/܅<>r
|
||||
k<EFBFBD>u<EFBFBD>bĶ:R<><52>>^g<><67><06>ik_*%<0B>a7<61>KG<4B><47><EFBFBD><EFBFBD><EFBFBD><EFBFBD>&<26>PI<50><49>n
|
||||
@@ -13,6 +13,115 @@ which is available at `hut` or `xeon07`. It runs the following services:
|
||||
- Grafana: to plot the data in the web browser.
|
||||
- Slurmctld: to manage the SLURM nodes.
|
||||
- Gitlab runner: to run CI jobs from Gitlab.
|
||||
- Nix binary cache: to serve cached nix builds
|
||||
|
||||
This node is prone to interruptions from all the services it runs, so it is not
|
||||
a good candidate for low noise executions.
|
||||
|
||||
# Binary cache
|
||||
|
||||
We provide a binary cache in `hut`, with the aim of avoiding unnecessary
|
||||
recompilation of packages.
|
||||
|
||||
The cache should contain common packages from bscpkgs, but we don't provide
|
||||
any guarantee that of what will be available in the cache, or for how long.
|
||||
We recommend following the latest version of the `jungle` flake to avoid cache
|
||||
misses.
|
||||
|
||||
## Usage
|
||||
|
||||
### From NixOS
|
||||
|
||||
In NixOS, we can add the cache through the `nix.settings` option, which will
|
||||
enable it for all builds in the system.
|
||||
|
||||
```nix
|
||||
{ ... }: {
|
||||
nix.settings = {
|
||||
substituters = [ "https://jungle.bsc.es/cache" ];
|
||||
trusted-public-keys = [ "jungle.bsc.es:pEc7MlAT0HEwLQYPtpkPLwRsGf80ZI26aj29zMw/HH0=" ];
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
### Interactively
|
||||
|
||||
The cache can also be specified in a per-command basis through the flags
|
||||
`--substituters` and `--trusted-public-keys`:
|
||||
|
||||
```sh
|
||||
nix build --substituters "https://jungle.bsc.es/cache" --trusted-public-keys "jungle.bsc.es:pEc7MlAT0HEwLQYPtpkPLwRsGf80ZI26aj29zMw/HH0=" <...>
|
||||
```
|
||||
|
||||
Note: you'll have to be a trusted user.
|
||||
|
||||
### Nix configuration file (non-nixos)
|
||||
|
||||
If using nix outside of NixOS, you'll have to update `/etc/nix/nix.conf`
|
||||
|
||||
```
|
||||
# echo "substituters = https://jungle.bsc.es/cache" >> /etc/nix/nix.conf
|
||||
# echo "trusted-public-keys = jungle.bsc.es:pEc7MlAT0HEwLQYPtpkPLwRsGf80ZI26aj29zMw/HH0=" >> /etc/nix/nix.conf
|
||||
```
|
||||
|
||||
### Hint in flakes
|
||||
|
||||
By adding the configuration below to a `flake.nix`, when someone uses the flake,
|
||||
`nix` will interactively ask to trust and use the provided binary cache:
|
||||
|
||||
```nix
|
||||
{
|
||||
nixConfig = {
|
||||
extra-substituters = [
|
||||
"https://jungle.bsc.es/cache"
|
||||
];
|
||||
extra-trusted-public-keys = [
|
||||
"jungle.bsc.es:pEc7MlAT0HEwLQYPtpkPLwRsGf80ZI26aj29zMw/HH0="
|
||||
];
|
||||
};
|
||||
outputs = { ... }: {
|
||||
...
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
### Querying the cache
|
||||
|
||||
Check if the cache is available:
|
||||
```sh
|
||||
$ curl https://jungle.bsc.es/cache/nix-cache-info
|
||||
StoreDir: /nix/store
|
||||
WantMassQuery: 1
|
||||
Priority: 30
|
||||
```
|
||||
|
||||
Prevent nix from building locally:
|
||||
```bash
|
||||
nix build --max-jobs 0 <...>
|
||||
```
|
||||
|
||||
Check if a package is in cache:
|
||||
```bash
|
||||
# Do a raw eval on the <package>.outPath (this should not build the package)
|
||||
$ nix eval --raw jungle#openmp.outPath
|
||||
/nix/store/dwnn4dgm1m4184l4xbi0qfrprji9wjmi-openmp-2024.11
|
||||
# Take the hash (everything from / to - in the basename) and curl <hash>.narinfo
|
||||
# if it exists in the cache, it will return HTTP 200 and some information
|
||||
# if not, it will return 404
|
||||
$ curl https://jungle.bsc.es/cache/dwnn4dgm1m4184l4xbi0qfrprji9wjmi.narinfo
|
||||
StorePath: /nix/store/dwnn4dgm1m4184l4xbi0qfrprji9wjmi-openmp-2024.11
|
||||
URL: nar/dwnn4dgm1m4184l4xbi0qfrprji9wjmi-17imkdfqzmnb013d14dx234bx17bnvws8baf3ii1xra5qi2y1wiz.nar
|
||||
Compression: none
|
||||
NarHash: sha256:17imkdfqzmnb013d14dx234bx17bnvws8baf3ii1xra5qi2y1wiz
|
||||
NarSize: 1519328
|
||||
References: 4gk773fqcsv4fh2rfkhs9bgfih86fdq8-gcc-13.3.0-lib nqb2ns2d1lahnd5ncwmn6k84qfd7vx2k-glibc-2.40-36
|
||||
Deriver: vcn0x8hikc4mvxdkvrdxp61bwa5r7lr6-openmp-2024.11.drv
|
||||
Sig: jungle.bsc.es:GDTOUEs1jl91wpLbb+gcKsAZjpKdARO9j5IQqb3micBeqzX2M/NDtKvgCS1YyiudOUdcjwa3j+hyzV2njokcCA==
|
||||
# In oneline:
|
||||
$ curl "https://jungle.bsc.es/cache/$(nix eval --raw jungle#<package>.outPath | cut -d '/' -f4 | cut -d '-' -f1).narinfo"
|
||||
```
|
||||
|
||||
#### References
|
||||
|
||||
- https://nix.dev/guides/recipes/add-binary-cache.html
|
||||
- https://nixos.wiki/wiki/Binary_Cache
|
||||
|
||||
Reference in New Issue
Block a user