Added mechanism retry for url fetch

This commit is contained in:
Laszlo Zeke 2017-01-27 23:11:29 +01:00
parent 34de0cb7e5
commit cd31a1a8bf
1 changed files with 10 additions and 7 deletions

View File

@ -112,12 +112,15 @@ class RSSVoDServer(webapp2.RequestHandler):
'Client-ID': TWITCH_CLIENT_ID 'Client-ID': TWITCH_CLIENT_ID
} }
request = urllib2.Request(url, headers=headers) request = urllib2.Request(url, headers=headers)
retries = 0
while retries < 3:
try: try:
result = urllib2.urlopen(request) result = urllib2.urlopen(request, timeout=3)
logging.debug('Fetch from twitch for %s with code %s' % (id, result.getcode())) logging.debug('Fetch from twitch for %s with code %s' % (id, result.getcode()))
return result.read() return result.read()
except urllib2.URLError as e: except BaseException as e:
logging.warning("Fetch exception caught: %s" % e) logging.warning("Fetch exception caught: %s" % e)
retries += 1
return '' return ''
def extract_userid(self, user_info): def extract_userid(self, user_info):