Raito Bezarius
8c0c7b517f
This help us getting rid of useless traffic by crawlers. It is enabled for gerrit01 which is suffering the most from this. Signed-off-by: Raito Bezarius <masterancpp@gmail.com>
32 lines
883 B
Nix
32 lines
883 B
Nix
{ pkgs, config, lib, ... }:
|
|
let
|
|
inherit (lib) mkEnableOption mkIf mkOption types concatStringsSep mkDefault splitString;
|
|
cfg = config.bagel.services.nginx.crawler-blocker;
|
|
mkRobotsFile = blockedUAs: pkgs.writeText "robots.txt" ''
|
|
${concatStringsSep "\n" (map (ua: "User-agent: ${ua}") blockedUAs)}
|
|
Disallow: /
|
|
'';
|
|
in
|
|
{
|
|
options = {
|
|
bagel.services.nginx.crawler-blocker = {
|
|
enable = mkEnableOption "the crawler blocker";
|
|
|
|
userAgents = mkOption {
|
|
type = types.listOf types.str;
|
|
default = splitString "\n" (builtins.readFile ./blocked-ua.txt);
|
|
};
|
|
};
|
|
|
|
services.nginx.virtualHosts = mkOption {
|
|
type = types.attrsOf (types.submodule {
|
|
config = {
|
|
locations."= /robots.txt" = mkIf cfg.enable (mkDefault {
|
|
alias = mkRobotsFile cfg.userAgents;
|
|
});
|
|
};
|
|
});
|
|
};
|
|
};
|
|
}
|
|
|