* Generate robots.txt in a more Catalystic way.

This commit is contained in:
Eelco Dolstra 2009-03-31 14:14:45 +00:00
parent d6e996d01c
commit 156f40130e

View file

@ -246,18 +246,35 @@ sub get_builds : Chained('/') PathPart('') CaptureArgs(0) {
sub robots_txt : Path('robots.txt') {
my ($self, $c) = @_;
sub uri_for {
my ($controller, $action, @args) = @_;
return $c->uri_for($c->controller($controller)->action_for($action), @args)->path;
}
sub channelUris {
my ($controller, $bindings) = @_;
return
( "Disallow: " . uri_for($controller, 'closure', $bindings, "*")
, "Disallow: " . uri_for($controller, 'manifest', $bindings)
, "Disallow: " . uri_for($controller, 'nar', $bindings, "*")
, "Disallow: " . uri_for($controller, 'pkg', $bindings, "*")
, "Disallow: " . uri_for($controller, 'nixexprs', $bindings)
);
}
# Put actions that are expensive or not useful for indexing in
# robots.txt. Note: wildcards are not universally supported in
# robots.txt, but apparently Google supports them.
my @rules =
( "User-agent: *"
, "Disallow: /*/nix/closure/*"
, "Disallow: /*/channel/*/MANIFEST.bz2"
, "Disallow: /*/nar/*"
, "Disallow: /*.nixpkg"
, "Disallow: /build/*/buildtime-deps"
, "Disallow: /build/*/runtime-deps"
, "Disallow: /build/*/nixlog/*/tail"
, "Disallow: " . uri_for('Build', 'buildtimedeps', ["*"])
, "Disallow: " . uri_for('Build', 'runtimedeps', ["*"])
, "Disallow: " . uri_for('Build', 'view_nixlog', ["*"], "*/tail")
, channelUris('Root', ["*"])
, channelUris('Project', ["*", "*"])
, channelUris('Jobset', ["*", "*", "*"])
, channelUris('Job', ["*", "*", "*", "*"])
, channelUris('Build', ["*"])
);
$c->stash->{'plain'} = { data => join("\n", @rules) };