aboutsummaryrefslogtreecommitdiff
path: root/perl
diff options
context:
space:
mode:
authorLukenShiro <lukenshiro@ngi.it>2011-12-14 13:05:27 -0600
committerNiels Horn <niels.horn@slackbuilds.org>2011-12-18 00:07:49 -0200
commitd2894d596903d61f8cf80b155968a23e45581a23 (patch)
tree3bb44beae444a83d1d12afda5e642e864f2d6c9d /perl
parentca407505737e5d4712f1675a0a44a404de6debc8 (diff)
perl/perl-www-robotrules: Added (DB of robots.txt-derived perms)
Signed-off-by: Robby Workman <rworkman@slackbuilds.org>
Diffstat (limited to 'perl')
-rw-r--r--perl/perl-www-robotrules/README10
-rw-r--r--perl/perl-www-robotrules/perl-www-robotrules.SlackBuild90
-rw-r--r--perl/perl-www-robotrules/perl-www-robotrules.info10
-rw-r--r--perl/perl-www-robotrules/slack-desc18
4 files changed, 128 insertions, 0 deletions
diff --git a/perl/perl-www-robotrules/README b/perl/perl-www-robotrules/README
new file mode 100644
index 0000000000000..54915b3f0b47b
--- /dev/null
+++ b/perl/perl-www-robotrules/README
@@ -0,0 +1,10 @@
+This module parses /robots.txt files as specified in "A Standard for
+Robot Exclusion", at <http://www.robotstxt.org/wc/norobots.html>
+Webmasters can use the /robots.txt file to forbid conforming robots
+from accessing parts of their web site.
+The parsed files are kept in a WWW::RobotRules object, and this
+object provides methods to check if access to a given URL is
+prohibited. The same WWW::RobotRules object can be used for one
+or more parsed /robots.txt files on any number of hosts.
+
+This requires perl-uri-escape.
diff --git a/perl/perl-www-robotrules/perl-www-robotrules.SlackBuild b/perl/perl-www-robotrules/perl-www-robotrules.SlackBuild
new file mode 100644
index 0000000000000..afa3c392f1a6b
--- /dev/null
+++ b/perl/perl-www-robotrules/perl-www-robotrules.SlackBuild
@@ -0,0 +1,90 @@
+#!/bin/sh
+
+# Slackware build script for perl-www-robotrules
+
+# Copyright 2011 LukenShiro, Italy
+# All rights reserved.
+#
+# Redistribution and use of this script, with or without modification, is
+# permitted provided that the following conditions are met:
+#
+# 1. Redistributions of this script must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#
+# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ''AS IS'' AND ANY EXPRESS OR IMPLIED
+# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+# EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+PRGNAM=perl-www-robotrules
+VERSION=${VERSION:-6.01}
+BUILD=${BUILD:-1}
+TAG=${TAG:-_SBo}
+
+# Automatically determine the architecture we're building on:
+if [ -z "$ARCH" ]; then
+ case "$( uname -m )" in
+ i?86) ARCH=i486 ;;
+ arm*) ARCH=arm ;;
+ *) ARCH=$( uname -m ) ;;
+ esac
+fi
+
+CWD=$(pwd)
+TMP=${TMP:-/tmp/SBo}
+PKG=$TMP/package-$PRGNAM
+OUTPUT=${OUTPUT:-/tmp}
+
+SRCNAM=WWW-RobotRules
+DOCFILES="Changes README"
+
+set -e # Exit on most errors
+
+rm -rf $PKG
+mkdir -p $TMP $PKG $OUTPUT
+cd $TMP
+rm -rf $SRCNAM-$VERSION
+tar xvf $CWD/$SRCNAM-$VERSION.tar.gz
+cd $SRCNAM-$VERSION
+chown -R root:root .
+find . \
+ \( -perm 777 -o -perm 775 -o -perm 711 -o -perm 555 -o -perm 511 \) \
+ -exec chmod 755 {} \; -o \
+ \( -perm 666 -o -perm 664 -o -perm 600 -o -perm 444 -o -perm 440 -o -perm 400 \) \
+ -exec chmod 644 {} \;
+
+echo "y" | CFLAGS="$SLKCFLAGS" perl Makefile.PL \
+ PREFIX=/usr \
+ INSTALLDIRS=vendor \
+ INSTALLVENDORMAN3DIR=/usr/man/man3
+make
+make test
+make install DESTDIR=$PKG
+
+# Remove perllocal.pod and other special files that don't need to be installed
+find $PKG -name perllocal.pod -o -name ".packlist" -o -name "*.bs" | xargs rm -f || true
+
+# Remove empty directories
+find $PKG -depth -type d -empty -exec rm -rf {} \;
+
+find $PKG -print0 | xargs -0 file | grep -e "executable" -e "shared object" | grep ELF \
+ | cut -f 1 -d : | xargs strip --strip-unneeded 2> /dev/null || true
+
+find $PKG/usr/man -type f -exec gzip -9 {} \;
+for i in $( find . -type l ) ; do ln -s $( readlink $i ).gz $i.gz ; rm $i ; done
+
+mkdir -p $PKG/usr/doc/$PRGNAM-$VERSION
+cp -a $DOCFILES $PKG/usr/doc/$PRGNAM-$VERSION
+cat $CWD/$PRGNAM.SlackBuild > $PKG/usr/doc/$PRGNAM-$VERSION/$PRGNAM.SlackBuild
+
+mkdir -p $PKG/install
+cat $CWD/slack-desc > $PKG/install/slack-desc
+
+cd $PKG
+/sbin/makepkg -l y -c n $OUTPUT/$PRGNAM-$VERSION-$ARCH-$BUILD$TAG.${PKGTYPE:-tgz}
diff --git a/perl/perl-www-robotrules/perl-www-robotrules.info b/perl/perl-www-robotrules/perl-www-robotrules.info
new file mode 100644
index 0000000000000..bee067f755a15
--- /dev/null
+++ b/perl/perl-www-robotrules/perl-www-robotrules.info
@@ -0,0 +1,10 @@
+PRGNAM="perl-www-robotrules"
+VERSION="6.01"
+HOMEPAGE="http://search.cpan.org/dist/WWW-RobotRules/"
+DOWNLOAD="http://search.cpan.org/CPAN/authors/id/G/GA/GAAS/WWW-RobotRules-6.01.tar.gz"
+MD5SUM="b1252da49727320a1b5e20b3521d2499"
+DOWNLOAD_x86_64=""
+MD5SUM_x86_64=""
+MAINTAINER="LukenShiro"
+EMAIL="lukenshiro@ngi.it"
+APPROVED="rworkman"
diff --git a/perl/perl-www-robotrules/slack-desc b/perl/perl-www-robotrules/slack-desc
new file mode 100644
index 0000000000000..a9900b5749f0d
--- /dev/null
+++ b/perl/perl-www-robotrules/slack-desc
@@ -0,0 +1,18 @@
+# HOW TO EDIT THIS FILE:
+# The handy ruler below makes it easier to edit a package description. Line
+# up the first '|' above the ':' following the base package name, and the '|' on
+# the right side marks the last column you can put a character in. You must make
+# exactly 11 lines for the formatting to be correct. It's also customary to
+# leave one space after the ':'.
+# |----------------------------------------------------------|
+perl-www-robotrules: perl-www-robotrules (DB of robots.txt-derived perms)
+perl-www-robotrules:
+perl-www-robotrules: This perl module parses /robots.txt files as specified
+perl-www-robotrules: in "A Standard for Robot Exclusion". Webmasters can use
+perl-www-robotrules: the /robots.txt file to forbid conforming robots from
+perl-www-robotrules: accessing parts of their web site
+perl-www-robotrules:
+perl-www-robotrules: http://search.cpan.org/dist/WWW-RobotRules/
+perl-www-robotrules:
+perl-www-robotrules:
+perl-www-robotrules: