* Generate robots.txt in a more Catalystic way.

This commit is contained in:
Eelco Dolstra 2009-03-31 14:14:45 +00:00
parent d6e996d01c
commit 156f40130e

View file

@ -246,18 +246,35 @@ sub get_builds : Chained('/') PathPart('') CaptureArgs(0) {
sub robots_txt : Path('robots.txt') { sub robots_txt : Path('robots.txt') {
my ($self, $c) = @_; my ($self, $c) = @_;
sub uri_for {
my ($controller, $action, @args) = @_;
return $c->uri_for($c->controller($controller)->action_for($action), @args)->path;
}
sub channelUris {
my ($controller, $bindings) = @_;
return
( "Disallow: " . uri_for($controller, 'closure', $bindings, "*")
, "Disallow: " . uri_for($controller, 'manifest', $bindings)
, "Disallow: " . uri_for($controller, 'nar', $bindings, "*")
, "Disallow: " . uri_for($controller, 'pkg', $bindings, "*")
, "Disallow: " . uri_for($controller, 'nixexprs', $bindings)
);
}
# Put actions that are expensive or not useful for indexing in # Put actions that are expensive or not useful for indexing in
# robots.txt. Note: wildcards are not universally supported in # robots.txt. Note: wildcards are not universally supported in
# robots.txt, but apparently Google supports them. # robots.txt, but apparently Google supports them.
my @rules = my @rules =
( "User-agent: *" ( "User-agent: *"
, "Disallow: /*/nix/closure/*" , "Disallow: " . uri_for('Build', 'buildtimedeps', ["*"])
, "Disallow: /*/channel/*/MANIFEST.bz2" , "Disallow: " . uri_for('Build', 'runtimedeps', ["*"])
, "Disallow: /*/nar/*" , "Disallow: " . uri_for('Build', 'view_nixlog', ["*"], "*/tail")
, "Disallow: /*.nixpkg" , channelUris('Root', ["*"])
, "Disallow: /build/*/buildtime-deps" , channelUris('Project', ["*", "*"])
, "Disallow: /build/*/runtime-deps" , channelUris('Jobset', ["*", "*", "*"])
, "Disallow: /build/*/nixlog/*/tail" , channelUris('Job', ["*", "*", "*", "*"])
, channelUris('Build', ["*"])
); );
$c->stash->{'plain'} = { data => join("\n", @rules) }; $c->stash->{'plain'} = { data => join("\n", @rules) };