{ config, lib, nodes, ... }: let cfg = config.bagel.services.prometheus; inherit (lib) mkEnableOption mkIf; forEachMachine = fn: map fn (builtins.attrValues nodes); allMetas = forEachMachine (machine: { name = machine.config.networking.hostName; address = machine.config.bagel.meta.monitoring.address or null; exporters = machine.config.bagel.meta.monitoring.exporters or []; }); scrapableMetas = builtins.filter (m: m.address != null && m.exporters != []) allMetas; toJobConfig = m: { job_name = m.name; static_configs = [ { targets = map (e: m.address + ":" + (toString e.port)) m.exporters; } ]; }; jobConfigs = map toJobConfig scrapableMetas; in { options.bagel.services.prometheus.enable = mkEnableOption "Prometheus scraper"; config = mkIf cfg.enable { age.secrets.mimir-environment.file = ../../../secrets/mimir-environment.age; services.prometheus = { enable = true; enableAgentMode = true; listenAddress = "127.0.0.1"; port = 9001; globalConfig.scrape_interval = "15s"; scrapeConfigs = jobConfigs; remoteWrite = [ { url = "http://localhost:9009/api/v1/push"; } ]; }; services.mimir = { enable = true; extraFlags = ["--config.expand-env=true"]; configuration = { multitenancy_enabled = false; common.storage = { backend = "s3"; s3 = { endpoint = "s3.delroth.net"; bucket_name = "bagel-mimir"; secret_access_key = "\${S3_KEY}"; # This is a secret injected via an environment variable access_key_id = "\${S3_KEY_ID}"; }; }; server = { http_listen_port = 9009; grpc_server_max_recv_msg_size = 104857600; grpc_server_max_send_msg_size = 104857600; grpc_server_max_concurrent_streams = 1000; }; ingester.ring.replication_factor = 1; blocks_storage.backend = "s3"; ruler_storage = { backend = "local"; local.directory = ./alerts; }; }; }; systemd.services.mimir.serviceConfig.EnvironmentFile = [ config.age.secrets.mimir-environment.path ]; }; }