Skip site navigation (1)Skip section navigation (2)
Date:      Sun, 28 Apr 2013 14:22:52 +0200
From:      Ralf van der Enden <tremere@cainites.net>
To:        FreeBSD-gnats-submit@freebsd.org
Cc:        william88@gmail.com
Subject:   ports/178219: [PATCH] multimedia/py27-subliminal: fix compatibility with py-requests 1.1 (now default in portstree)
Message-ID:  <E1UWQd2-000Nja-CH@cainites.net>
Resent-Message-ID: <201304281230.r3SCU0o3049846@freefall.freebsd.org>

next in thread | raw e-mail | index | archive | help

>Number:         178219
>Category:       ports
>Synopsis:       [PATCH] multimedia/py27-subliminal: fix compatibility with py-requests 1.1 (now default in portstree)
>Confidential:   no
>Severity:       non-critical
>Priority:       high
>Responsible:    freebsd-ports-bugs
>State:          open
>Quarter:        
>Keywords:       
>Date-Required:
>Class:          change-request
>Submitter-Id:   current-users
>Arrival-Date:   Sun Apr 28 12:30:00 UTC 2013
>Closed-Date:
>Last-Modified:
>Originator:     Ralf van der Enden
>Release:        FreeBSD 9.1-RELEASE-p2 amd64
>Organization:
>Environment:
System: FreeBSD lan.cainites.net 9.1-RELEASE-p2 FreeBSD 9.1-RELEASE-p2 #0: Thu Apr  4 03:16:08 CEST
>Description:
[DESCRIBE CHANGES]
Add a patch to fix compatibility with the now default py-requests 1.1 (based on subliminal 0.7-dev sources)
Also added the correct versions to the RUN_DEPENDS lines
Added devel/lxml (an optional requirement for subliminal) in order to make the bierdopje service work

Port maintainer (william88@gmail.com) is cc'd.

Generated with FreeBSD Port Tools 0.99_7 (mode: change, diff: SVN)
>How-To-Repeat:
>Fix:

--- py27-subliminal-0.6.3_1.patch begins here ---
Index: Makefile
===================================================================
--- Makefile	(revision 316712)
+++ Makefile	(working copy)
@@ -3,6 +3,7 @@
 
 PORTNAME=	subliminal
 PORTVERSION=	0.6.3
+PORTREVISION=	1
 CATEGORIES=	multimedia python
 MASTER_SITES=	CHEESESHOP
 PKGNAMEPREFIX=	${PYTHON_PKGNAMEPREFIX}
@@ -13,10 +14,11 @@
 LICENSE=	LGPL3
 
 RUN_DEPENDS=	${PYTHON_PKGNAMEPREFIX}beautifulsoup>=4.0:${PORTSDIR}/www/py-beautifulsoup \
-		${PYTHON_PKGNAMEPREFIX}enzyme>0:${PORTSDIR}/multimedia/py-enzyme \
-		${PYTHON_PKGNAMEPREFIX}guessit>0:${PORTSDIR}/multimedia/py-guessit \
-		${PYTHON_PKGNAMEPREFIX}requests<1:${PORTSDIR}/www/py-requests \
-		${PYTHON_PKGNAMEPREFIX}html5lib>0:${PORTSDIR}/www/py-html5lib
+		${PYTHON_PKGNAMEPREFIX}enzyme>=0.1:${PORTSDIR}/multimedia/py-enzyme \
+		${PYTHON_PKGNAMEPREFIX}guessit>=0.4.1:${PORTSDIR}/multimedia/py-guessit \
+		${PYTHON_PKGNAMEPREFIX}requests>=1.1:${PORTSDIR}/www/py-requests \
+		${PYTHON_PKGNAMEPREFIX}html5lib>0:${PORTSDIR}/www/py-html5lib \
+		${PYTHON_PKGNAMEPREFIX}lxml>0:${PORTSDIR}/devel/py-lxml
 
 USE_PYTHON=	-2.7
 USE_PYDISTUTILS=	easy_install
Index: files/patch-requests-1_1
===================================================================
--- files/patch-requests-1_1	(revision 0)
+++ files/patch-requests-1_1	(working copy)
@@ -0,0 +1,143 @@
+--- setup.py.orig	2013-01-17 21:00:33.000000000 +0100
++++ setup.py	2013-04-27 23:26:09.000000000 +0200
+@@ -24,7 +24,7 @@
+ def read(fname):
+     return open(os.path.join(os.path.dirname(__file__), fname)).read()
+ 
+-required = ['beautifulsoup4 >= 4.0', 'guessit >= 0.4.1', 'requests < 1.0', 'enzyme >= 0.1', 'html5lib']
++required = ['beautifulsoup4 >= 4.0', 'guessit >= 0.4.1', 'requests >= 1.1', 'enzyme >= 0.1', 'html5lib']
+ if sys.hexversion < 0x20700f0:
+     required.append('argparse >= 1.1')
+ 
+--- subliminal/services/__init__.py.orig	2013-01-17 20:47:10.000000000 +0100
++++ subliminal/services/__init__.py	2013-04-27 23:33:06.000000000 +0200
+@@ -81,7 +81,8 @@
+     def init(self):
+         """Initialize connection"""
+         logger.debug(u'Initializing %s' % self.__class__.__name__)
+-        self.session = requests.session(timeout=10, headers={'User-Agent': self.user_agent})
++        self.session = requests.Session()
++        self.session.headers.update({'User-Agent': self.user_agent})
+ 
+     def init_cache(self):
+         """Initialize cache, make sure it is loaded from disk"""
+--- subliminal/services/addic7ed.py.orig	2013-01-17 20:47:10.000000000 +0100
++++ subliminal/services/addic7ed.py	2013-04-27 23:40:43.000000000 +0200
+@@ -48,7 +48,7 @@
+     @cachedmethod
+     def get_series_id(self, name):
+         """Get the show page and cache every show found in it"""
+-        r = self.session.get('%s/shows.php' % self.server_url)
++        r = self.session.get('%s/shows.php' % self.server_url, timeout=self.timeout)
+         soup = BeautifulSoup(r.content, self.required_features)
+         for html_series in soup.select('h3 > a'):
+             series_name = html_series.text.lower()
+@@ -70,7 +70,7 @@
+         except KeyError:
+             logger.debug(u'Could not find series id for %s' % series)
+             return []
+-        r = self.session.get('%s/show/%d&season=%d' % (self.server_url, series_id, season))
++        r = self.session.get('%s/show/%d&season=%d' % (self.server_url, series_id, season), timeout=self.timeout)
+         soup = BeautifulSoup(r.content, self.required_features)
+         subtitles = []
+         for row in soup('tr', {'class': 'epeven completed'}):
+@@ -102,7 +102,7 @@
+     def download(self, subtitle):
+         logger.info(u'Downloading %s in %s' % (subtitle.link, subtitle.path))
+         try:
+-            r = self.session.get(subtitle.link, headers={'Referer': subtitle.link, 'User-Agent': self.user_agent})
++            r = self.session.get(subtitle.link, timeout=self.timeout, headers={'Referer': subtitle.link})
+             soup = BeautifulSoup(r.content, self.required_features)
+             if soup.title is not None and u'Addic7ed.com' in soup.title.text.strip():
+                 raise DownloadFailedError('Download limit exceeded')
+--- subliminal/services/bierdopje.py.orig	2013-01-17 20:47:10.000000000 +0100
++++ subliminal/services/bierdopje.py	2013-04-27 23:42:59.000000000 +0200
+@@ -45,7 +45,7 @@
+ 
+     @cachedmethod
+     def get_show_id(self, series):
+-        r = self.session.get('%sGetShowByName/%s' % (self.server_url, urllib.quote(series.lower())))
++        r = self.session.get('%sGetShowByName/%s' % (self.server_url, urllib.quote(series.lower())), timeout=self.timeout)
+         if r.status_code != 200:
+             logger.error(u'Request %s returned status code %d' % (r.url, r.status_code))
+             return None
+@@ -78,7 +78,7 @@
+         subtitles = []
+         for language in languages:
+             logger.debug(u'Getting subtitles for %s %d season %d episode %d with language %s' % (request_source, request_id, season, episode, language.alpha2))
+-            r = self.session.get('%sGetAllSubsFor/%s/%s/%s/%s/%s' % (self.server_url, request_id, season, episode, language.alpha2, request_is_tvdbid))
++            r = self.session.get('%sGetAllSubsFor/%s/%s/%s/%s/%s' % (self.server_url, request_id, season, episode, language.alpha2, request_is_tvdbid), timeout=self.timeout)
+             if r.status_code != 200:
+                 logger.error(u'Request %s returned status code %d' % (r.url, r.status_code))
+                 return []
+--- subliminal/services/subswiki.py.orig	2013-01-17 20:47:10.000000000 +0100
++++ subliminal/services/subswiki.py	2013-04-27 23:44:34.000000000 +0200
+@@ -55,7 +55,7 @@
+             if isinstance(request_series, unicode):
+                 request_series = request_series.encode('utf-8')
+             logger.debug(u'Getting subtitles for %s season %d episode %d with languages %r' % (series, season, episode, languages))
+-            r = self.session.get('%s/serie/%s/%s/%s/' % (self.server_url, urllib.quote(request_series), season, episode))
++            r = self.session.get('%s/serie/%s/%s/%s/' % (self.server_url, urllib.quote(request_series), season, episode), timeout=self.timeout)
+             if r.status_code == 404:
+                 logger.debug(u'Could not find subtitles for %s season %d episode %d with languages %r' % (series, season, episode, languages))
+                 return []
+@@ -64,7 +64,7 @@
+             if isinstance(request_movie, unicode):
+                 request_movie = request_movie.encode('utf-8')
+             logger.debug(u'Getting subtitles for %s (%d) with languages %r' % (movie, year, languages))
+-            r = self.session.get('%s/film/%s_(%d)' % (self.server_url, urllib.quote(request_movie), year))
++            r = self.session.get('%s/film/%s_(%d)' % (self.server_url, urllib.quote(request_movie), year), timeout=self.timeout)
+             if r.status_code == 404:
+                 logger.debug(u'Could not find subtitles for %s (%d) with languages %r' % (movie, year, languages))
+                 return []
+--- subliminal/services/subtitulos.py.orig	2013-01-17 20:47:10.000000000 +0100
++++ subliminal/services/subtitulos.py	2013-04-27 23:45:04.000000000 +0200
+@@ -54,7 +54,7 @@
+         if isinstance(request_series, unicode):
+             request_series = unicodedata.normalize('NFKD', request_series).encode('ascii', 'ignore')
+         logger.debug(u'Getting subtitles for %s season %d episode %d with languages %r' % (series, season, episode, languages))
+-        r = self.session.get('%s/%s/%sx%.2d' % (self.server_url, urllib.quote(request_series), season, episode))
++        r = self.session.get('%s/%s/%sx%.2d' % (self.server_url, urllib.quote(request_series), season, episode), timeout=self.timeout)
+         if r.status_code == 404:
+             logger.debug(u'Could not find subtitles for %s season %d episode %d with languages %r' % (series, season, episode, languages))
+             return []
+--- subliminal/services/thesubdb.py.orig	2013-01-17 20:47:10.000000000 +0100
++++ subliminal/services/thesubdb.py	2013-04-27 23:47:24.000000000 +0200
+@@ -40,7 +40,7 @@
+         return self.query(video.path, video.hashes['TheSubDB'], languages)
+ 
+     def query(self, filepath, moviehash, languages):
+-        r = self.session.get(self.server_url, params={'action': 'search', 'hash': moviehash})
++        r = self.session.get(self.server_url, timeout=self.timeout, params={'action': 'search', 'hash': moviehash})
+         if r.status_code == 404:
+             logger.debug(u'Could not find subtitles for hash %s' % moviehash)
+             return []
+--- subliminal/services/tvsubtitles.py.orig	2013-01-17 20:47:10.000000000 +0100
++++ subliminal/services/tvsubtitles.py	2013-04-27 23:48:50.000000000 +0200
+@@ -52,7 +52,7 @@
+ 
+     @cachedmethod
+     def get_likely_series_id(self, name):
+-        r = self.session.post('%s/search.php' % self.server_url, data={'q': name})
++        r = self.session.post('%s/search.php' % self.server_url, data={'q': name}, timeout=self.timeout)
+         soup = BeautifulSoup(r.content, self.required_features)
+         maindiv = soup.find('div', 'left')
+         results = []
+@@ -70,7 +70,7 @@
+         could be found."""
+         # download the page of the season, contains ids for all episodes
+         episode_id = None
+-        r = self.session.get('%s/tvshow-%d-%d.html' % (self.server_url, series_id, season))
++        r = self.session.get('%s/tvshow-%d-%d.html' % (self.server_url, series_id, season),timeout=self.timeout)
+         soup = BeautifulSoup(r.content, self.required_features)
+         table = soup.find('table', id='table5')
+         for row in table.find_all('tr'):
+@@ -93,7 +93,7 @@
+     # subtitles
+     def get_sub_ids(self, episode_id):
+         subids = []
+-        r = self.session.get('%s/episode-%d.html' % (self.server_url, episode_id))
++        r = self.session.get('%s/episode-%d.html' % (self.server_url, episode_id), timeout=self.timeout)
+         epsoup = BeautifulSoup(r.content, self.required_features)
+         for subdiv in epsoup.find_all('a'):
+             if 'href' not in subdiv.attrs or not subdiv['href'].startswith('/subtitle'):
--- py27-subliminal-0.6.3_1.patch ends here ---

>Release-Note:
>Audit-Trail:
>Unformatted:



Want to link to this message? Use this URL: <https://mail-archive.FreeBSD.org/cgi/mid.cgi?E1UWQd2-000Nja-CH>