Skip to content

Commit

Permalink
Updated addon to 2.4.2; fixed rare but dangerous website favs problem;
Browse files Browse the repository at this point in the history
fixed login problem.
  • Loading branch information
tknorris committed Aug 5, 2014
1 parent 7670445 commit 2a29f65
Show file tree
Hide file tree
Showing 4 changed files with 14 additions and 8 deletions.
2 changes: 1 addition & 1 deletion addon.xml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--suppress ALL -->
<addon id="plugin.video.1channel" version="2.4.1" name="1Channel" provider-name="Bstrdsmkr">
<addon id="plugin.video.1channel" version="2.4.2" name="1Channel" provider-name="Bstrdsmkr">
<requires>
<import addon="xbmc.python" version="2.1.0"/>
<import addon="script.module.urlresolver" version="2.2.0"/>
Expand Down
5 changes: 5 additions & 0 deletions changelog.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
[B]Version 2.4.2[/B]
[B]Minor[/B]
Fixed problem with website favorites hanging XBMC when not logged in
Worked around infinite redirect problem when logging into site

[B]Version 2.4.1[/B]
[B]Minor[/B]
Added retry on http requests to recover more gracefully from transient server errors (e.g. HTTP Error 503)
Expand Down
2 changes: 1 addition & 1 deletion help.faq
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
[B][COLOR blue]2.4.1 Release Notes[/COLOR][/B]
[B][COLOR blue]2.4.2 Release Notes[/COLOR][/B]
- The attempt in V2.3.0 to use XBMC's native resume functionality ultimately failed due to XBMC bugs that couldn't be worked around. The new approach is purely custom, but should work better and more consistently. One benefit of this approach is that resume points can be consistent within the addon and library.
- Under the covers major changes in the code base have been made to make the whole addon easier to maintain.
- The advanced search function has been rewritten to make it far more usable.
Expand Down
13 changes: 7 additions & 6 deletions pw_scraper.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,18 +100,17 @@ def get_favorities(self, section, page=None, paginate=False):

def __get_fav_gen(self, html, url, page, paginate):
if not page: page=1
pattern = '''<div class="index_item"> <a href="(.+?)"><img src="(.+?(\d{1,4})?\.jpg)" width="150" border="0">.+?<td align="center"><a href=".+?">(.+?)</a></td>.+?class="favs_deleted"><a href=\'(.+?)\' ref=\'delete_fav\''''
pattern = '''<div class="index_item"> <a href="(.+?)"><img src="(.+?(\d{1,4})?\.jpg)" width="150" border="0">.+?<td align="center"><a href=".+?">(.+?)</a>'''
regex = re.compile(pattern, re.IGNORECASE | re.DOTALL)
while True:
fav={}
for item in regex.finditer(html):
link, img, year, title, delete = item.groups()
link, img, year, title = item.groups()
if not year or len(year) != 4: year = ''
fav['url']=link
fav['img']=img
fav['year']=year
fav['title']=title
fav['delete']=delete
yield fav

# if we're not paginating, then keep yielding until we run out of pages or hit the max
Expand Down Expand Up @@ -325,11 +324,12 @@ def __season_gen(self, html):
yield (season_label,season_html)

def __get_url(self,url, headers={}, login=False):
_1CH.log('Fetching URL: %s' % url)
before = time.time()
html = self.__http_get_with_retry_1(url, headers)

if login and not '<a href="/logout.php">[ Logout ]</a>' in html:
if self.__login(url):
_1CH.log('Logging in for url %s' % url)
if self.__login(self.base_url):
html = self.__http_get_with_retry_1(url, headers)
else:
html=None
Expand Down Expand Up @@ -427,7 +427,6 @@ def __get_cached_url(self, url, cache_limit=8):
return body

def __login(self,redirect):
_1CH.log('Logging in for url %s' % redirect)
url = self.base_url + '/login.php'
net = Net()
cookiejar = _1CH.get_profile()
Expand All @@ -443,6 +442,7 @@ def __login(self,redirect):
return False

def __http_get_with_retry_1(self, url, headers):
_1CH.log('Fetching URL: %s' % url)
net = Net()
cookiejar = _1CH.get_profile()
cookiejar = os.path.join(cookiejar, 'cookies')
Expand All @@ -469,6 +469,7 @@ def __http_get_with_retry_1(self, url, headers):
return html

def __http_get_with_retry_2(self, url, request):
_1CH.log('Fetching URL: %s' % request.get_full_url())
retries=0
html=None
while retries<=MAX_RETRIES:
Expand Down

0 comments on commit 2a29f65

Please sign in to comment.