raito
6f56566032
This help us getting rid of useless traffic by crawlers. It is enabled for gerrit01 which is suffering the most from this. Signed-off-by: Raito Bezarius <masterancpp@gmail.com>
33 lines
874 B
Nix
33 lines
874 B
Nix
{ pkgs, config, lib, ... }:
|
|
let
|
|
inherit (lib) mkEnableOption mkIf mkOption types concatStringsSep mkDefault;
|
|
cfg = config.bagel.services.nginx.crawler-blocker;
|
|
mkRobotsFile = blockedUAs: pkgs.writeText "robots.txt" ''
|
|
${concatStringsSep "\n" (map (ua: "User-Agent: ${ua}") blockedUAs)}
|
|
Disallow: /
|
|
'';
|
|
in
|
|
{
|
|
options = {
|
|
bagel.services.nginx.crawler-blocker = {
|
|
enable = mkEnableOption "the crawler blocker";
|
|
|
|
userAgents = mkOption {
|
|
type = types.listOf types.str;
|
|
default = builtins.split "\n" (builtins.readFile ./blocked-ua.txt);
|
|
};
|
|
};
|
|
|
|
services.nginx.virtualHosts = mkOption {
|
|
type = types.attrsOf (types.submodule {
|
|
config = {
|
|
locations."= /robots.txt" = mkIf cfg.enable (mkDefault {
|
|
alias = mkRobotsFile cfg.userAgents;
|
|
});
|
|
};
|
|
});
|
|
};
|
|
};
|
|
}
|
|
|