aboutsummaryrefslogtreecommitdiff
path: root/www/py-robot-detection
diff options
context:
space:
mode:
Diffstat (limited to 'www/py-robot-detection')
-rw-r--r--www/py-robot-detection/Makefile18
-rw-r--r--www/py-robot-detection/distinfo3
-rw-r--r--www/py-robot-detection/pkg-descr2
-rw-r--r--www/py-robot-detection/pkg-message14
4 files changed, 37 insertions, 0 deletions
diff --git a/www/py-robot-detection/Makefile b/www/py-robot-detection/Makefile
new file mode 100644
index 000000000000..1d225251a46d
--- /dev/null
+++ b/www/py-robot-detection/Makefile
@@ -0,0 +1,18 @@
+PORTNAME= robot-detection
+DISTVERSION= 0.4
+CATEGORIES= www python
+MASTER_SITES= PYPI
+PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX}
+
+MAINTAINER= einar@isnic.is
+COMMENT= Detect if a HTTP User-Agent header is likely to be a bot
+WWW= https://github.com/amandasaurus/robot-detection
+
+LICENSE= GPLv3+
+
+USES= python
+USE_PYTHON= autoplist distutils
+
+RUN_DEPENDS= ${PYTHON_PKGNAMEPREFIX}six>0:devel/py-six@${PY_FLAVOR}
+
+.include <bsd.port.mk>
diff --git a/www/py-robot-detection/distinfo b/www/py-robot-detection/distinfo
new file mode 100644
index 000000000000..7699a29a34e2
--- /dev/null
+++ b/www/py-robot-detection/distinfo
@@ -0,0 +1,3 @@
+TIMESTAMP = 1725370739
+SHA256 (robot-detection-0.4.tar.gz) = 3d8fb72ca47164b8ce55e33bdda93742f62c348def7d3cc3b42b0ceb4795a2f5
+SIZE (robot-detection-0.4.tar.gz) = 6387
diff --git a/www/py-robot-detection/pkg-descr b/www/py-robot-detection/pkg-descr
new file mode 100644
index 000000000000..fd974991a39b
--- /dev/null
+++ b/www/py-robot-detection/pkg-descr
@@ -0,0 +1,2 @@
+Library for detecting if a HTTP User Agent header is likely to be a bot.
+It uses the list of registered robots from https://www.robotstxt.org/.
diff --git a/www/py-robot-detection/pkg-message b/www/py-robot-detection/pkg-message
new file mode 100644
index 000000000000..e3e1aed11a1a
--- /dev/null
+++ b/www/py-robot-detection/pkg-message
@@ -0,0 +1,14 @@
+[
+{ type: install
+ message: <<EOM
+You can download a new version of the Robot Database from https://www.robotstxt.org/db/all.txt.
+
+Download the database dump, and run the file robot_detection.py with the file as first argument.
+
+ $ fetch https://www.robotstxt.org/db/all.txt
+ $ python robot_detection.py all.txt
+
+If the database has changed, it'll print out the new version of robot_useragents variable that you need to put into the source code.
+EOM
+}
+]