diff options
-rw-r--r-- | www/py-urlgrabber/Makefile | 24 | ||||
-rw-r--r-- | www/py-urlgrabber/files/patch-setup.py | 2 | ||||
-rw-r--r-- | www/py-urlgrabber/files/patch-urlgrabber-grabber.py | 47 | ||||
-rw-r--r-- | www/py-urlgrabber/files/patch-urlgrabber__grabber.py | 54 | ||||
-rw-r--r-- | www/py-urlgrabber/pkg-descr | 3 |
5 files changed, 69 insertions, 61 deletions
diff --git a/www/py-urlgrabber/Makefile b/www/py-urlgrabber/Makefile index 90a0b9169798..d1c67669c452 100644 --- a/www/py-urlgrabber/Makefile +++ b/www/py-urlgrabber/Makefile @@ -1,20 +1,17 @@ -# New ports collection makefile for: py-urlgrabber -# Date created: 2007-09-22 -# Whom: Nicola Vitale <nivit@FreeBSD.org> -# +# Created by: Nicola Vitale <nivit@FreeBSD.org> # $FreeBSD$ -# PORTNAME= urlgrabber PORTVERSION= 3.9.1 -PORTREVISION= 1 +PORTREVISION= 2 CATEGORIES= www python -MASTER_SITES= http://urlgrabber.baseurl.org/download/ \ - http://nivi.interfree.it/distfiles/${PORTNAME}/ +MASTER_SITES= http://urlgrabber.baseurl.org/download/ PKGNAMEPREFIX= ${PYTHON_PKGNAMEPREFIX} MAINTAINER= nivit@FreeBSD.org -COMMENT= A high-level cross-protocol url-grabber +COMMENT= High-level cross-protocol url-grabber + +LICENSE= LGPL21 BUILD_DEPENDS= ${PKGNAMEPREFIX}curl>=7.19.0_1:${PORTSDIR}/ftp/py-curl RUN_DEPENDS= ${PKGNAMEPREFIX}curl>=7.19.0_1:${PORTSDIR}/ftp/py-curl @@ -25,8 +22,13 @@ USE_PYDISTUTILS= yes REINPLACE_ARGS= -i.bak -e 's,%%DOCSDIR%%,${DOCSDIR},' DOCSDIR= ${PREFIX}/share/doc/${PYTHON_PKGNAMEPREFIX}${PORTNAME} -.if defined(NOPORTDOCS) -MAKE_ENV= NOPORTDOCS=${NOPORTDOCS} +OPTIONSFILE?= ${PORT_DBDIR}/py-${PORTNAME}/options +OPTIONS_DEFINE= DOCS + +.include <bsd.port.options.mk> + +.if ${PORT_OPTIONS:MDOCS} +MAKE_ENV= INSTALL_DOCS=yes .endif post-patch: diff --git a/www/py-urlgrabber/files/patch-setup.py b/www/py-urlgrabber/files/patch-setup.py index 4f9a3f0132a2..5d0e779dc6e4 100644 --- a/www/py-urlgrabber/files/patch-setup.py +++ b/www/py-urlgrabber/files/patch-setup.py @@ -19,7 +19,7 @@ if k.startswith('_'): del config[k] + import os -+ if not os.environ.has_key('NOPORTDOCS'): ++ if os.environ.has_key('INSTALL_DOCS'): + config.setdefault('data_files', [('%%DOCSDIR%%', ['README','LICENSE', 'TODO', 'ChangeLog'])]) + from distutils.core import setup diff --git a/www/py-urlgrabber/files/patch-urlgrabber-grabber.py b/www/py-urlgrabber/files/patch-urlgrabber-grabber.py deleted file mode 100644 index 95f5b2b539b4..000000000000 --- a/www/py-urlgrabber/files/patch-urlgrabber-grabber.py +++ /dev/null @@ -1,47 +0,0 @@ -From: Seth Vidal <skvidal@fedoraproject.org> -Date: Fri, 25 Sep 2009 20:16:08 +0000 (-0400) -Subject: - fileobject size = 0 not None -X-Git-Url: http://yum.baseurl.org/gitweb?p=urlgrabber.git;a=commitdiff_plain;h=f4e57ece7ded0f7ad83c8a40fe8423fab7812264;hp=926062a18852bc73686a5ef60307526841df8a32 - -- fileobject size = 0 not None -- if the filesize is small enough we could receive the whole thing in on chunk - and our max size message would never get out - so we make sure -- make sure we multiply correctly b/c python is anal ---- - -diff --git a/urlgrabber/grabber.py b/urlgrabber/grabber.py -index e090e90..c4916d5 100644 ---- urlgrabber/grabber.py -+++ urlgrabber/grabber.py -@@ -1052,7 +1052,7 @@ class PyCurlFileObject(): - self._reget_length = 0 - self._prog_running = False - self._error = (None, None) -- self.size = None -+ self.size = 0 - self._do_open() - - -@@ -1299,6 +1299,12 @@ class PyCurlFileObject(): - err.code = code - err.exception = e - raise err -+ else: -+ if self._error[1]: -+ msg = self._error[1] -+ err = URLGRabError(14, msg) -+ err.url = self.url -+ raise err - - def _do_open(self): - self.curl_obj = _curl_cache -@@ -1536,7 +1542,8 @@ class PyCurlFileObject(): - if self.opts.size: # if we set an opts size use that, no matter what - max_size = self.opts.size - if not max_size: return False # if we have None for all of the Max then this is dumb -- if cur > max_size + max_size*.10: -+ -+ if cur > int(float(max_size) * 1.10): - - msg = _("Downloaded more than max size for %s: %s > %s") \ - % (self.url, cur, max_size) diff --git a/www/py-urlgrabber/files/patch-urlgrabber__grabber.py b/www/py-urlgrabber/files/patch-urlgrabber__grabber.py new file mode 100644 index 000000000000..1fcc6d6e0e1f --- /dev/null +++ b/www/py-urlgrabber/files/patch-urlgrabber__grabber.py @@ -0,0 +1,54 @@ +--- ./urlgrabber/grabber.py.orig 2009-09-25 18:19:50.000000000 +0200 ++++ ./urlgrabber/grabber.py 2013-08-15 17:18:28.000000000 +0200 +@@ -1052,7 +1052,7 @@ + self._reget_length = 0 + self._prog_running = False + self._error = (None, None) +- self.size = None ++ self.size = 0 + self._do_open() + + +@@ -1127,6 +1127,9 @@ + if not opts: + opts = self.opts + ++ # keepalives ++ if not opts.keepalive: ++ self.curl_obj.setopt(pycurl.FORBID_REUSE, 1) + + # defaults we're always going to set + self.curl_obj.setopt(pycurl.NOPROGRESS, False) +@@ -1158,7 +1161,8 @@ + self.curl_obj.setopt(pycurl.CAPATH, opts.ssl_ca_cert) + self.curl_obj.setopt(pycurl.CAINFO, opts.ssl_ca_cert) + self.curl_obj.setopt(pycurl.SSL_VERIFYPEER, opts.ssl_verify_peer) +- self.curl_obj.setopt(pycurl.SSL_VERIFYHOST, opts.ssl_verify_host) ++ if opts.ssl_verify_host: # 1 is meaningless to curl ++ self.curl_obj.setopt(pycurl.SSL_VERIFYHOST, 2) + if opts.ssl_key: + self.curl_obj.setopt(pycurl.SSLKEY, opts.ssl_key) + if opts.ssl_key_type: +@@ -1299,6 +1303,12 @@ + err.code = code + err.exception = e + raise err ++ else: ++ if self._error[1]: ++ msg = self._error[1] ++ err = URLGRabError(14, msg) ++ err.url = self.url ++ raise err + + def _do_open(self): + self.curl_obj = _curl_cache +@@ -1536,7 +1546,8 @@ + if self.opts.size: # if we set an opts size use that, no matter what + max_size = self.opts.size + if not max_size: return False # if we have None for all of the Max then this is dumb +- if cur > max_size + max_size*.10: ++ ++ if cur > int(float(max_size) * 1.10): + + msg = _("Downloaded more than max size for %s: %s > %s") \ + % (self.url, cur, max_size) diff --git a/www/py-urlgrabber/pkg-descr b/www/py-urlgrabber/pkg-descr index 2cdd89c74320..93428a2c22fa 100644 --- a/www/py-urlgrabber/pkg-descr +++ b/www/py-urlgrabber/pkg-descr @@ -6,5 +6,4 @@ a clean interface to protocol-independant file-access. Best of all, urlgrabber takes care of all those pesky file-fetching details, and lets you focus on whatever it is that your program is written to do! -Author: Michael D. Stenner, Ryan Tomayko -WWW: http://urlgrabber.baseurl.org/ +WWW: http://urlgrabber.baseurl.org/ |