diff options
author | Andreas Enge <andreas@enge.fr> | 2013-12-08 22:18:58 +0100 |
---|---|---|
committer | Andreas Enge <andreas@enge.fr> | 2013-12-08 22:18:58 +0100 |
commit | a3eb919fb4621b94d56522a661b6564d391196e8 (patch) | |
tree | 3f4ef7729891be5dcf1e9ad589362985f6e1e263 /gnu/packages | |
parent | e7444ee273977d045ec9b61c9e98bc33405b3427 (diff) | |
download | guix-a3eb919fb4621b94d56522a661b6564d391196e8.tar.gz guix-a3eb919fb4621b94d56522a661b6564d391196e8.zip |
gnu: Add perl-www-robotrules.
* gnu/packages/web.scm (perl-www-robotrules): New variable.
Diffstat (limited to 'gnu/packages')
-rw-r--r-- | gnu/packages/web.scm | 25 |
1 files changed, 25 insertions, 0 deletions
diff --git a/gnu/packages/web.scm b/gnu/packages/web.scm index 013efc1790..d9821d87ec 100644 --- a/gnu/packages/web.scm +++ b/gnu/packages/web.scm @@ -295,3 +295,28 @@ the selection of a preferred content representation based upon attributes of the negotiable variants and the value of the various Accept* header fields in the request.") (home-page "http://search.cpan.org/~gaas/HTTP-Negotiate/"))) + +(define-public perl-www-robotrules + (package + (name "perl-www-robotrules") + (version "6.02") + (source (origin + (method url-fetch) + (uri (string-append + "mirror://cpan/authors/id/G/GA/GAAS/WWW-RobotRules-" + version ".tar.gz")) + (sha256 + (base32 + "07m50dp5n5jxv3m93i55qvnd67a6g7cvbvlik115kmc8lbkh5da6")))) + (build-system perl-build-system) + (inputs + `(("perl-uri" ,perl-uri))) + (license (package-license perl)) + (synopsis "Perl database of robots.txt-derived permissions") + (description + "The WWW::RobotRules module parses /robots.txt files as specified in +\"A Standard for Robot Exclusion\", at +<http://www.robotstxt.org/wc/norobots.html>. Webmasters can use the +/robots.txt file to forbid conforming robots from accessing parts of +their web site.") + (home-page "http://search.cpan.org/~gaas/WWW-RobotRules/"))) |