switch to using requests with timeout for rss. cope with slow imap servers.

workspace
Brendan Howell 8 years ago
parent 95ef24b788
commit 166705d798

@ -1,5 +1,6 @@
# publications office for reading and writing # publications office for reading and writing
from datetime import datetime from datetime import datetime
import io
import json import json
import os import os
import random import random
@ -11,6 +12,7 @@ import feedparser
import lxml.html import lxml.html
import PIL import PIL
from readability import readability from readability import readability
import requests
from bureau import Bureau, add_command, add_api from bureau import Bureau, add_command, add_api
@ -98,8 +100,10 @@ class Publications(Bureau):
news = self._get_news() news = self._get_news()
# TODO: get weather # TODO: get weather
# TODO: get finance # TODO: get finance
inbox = self.send("PO", "unread")
print("news", news[0]) print("news", news[0])
inbox = self.send("PO", "unread")
if inbox is None:
inbox = [] # if IMAP times out just move on...
self.print_full("news.html", news=news, inbox=inbox) self.print_full("news.html", news=news, inbox=inbox)
@add_command("r", "Print a web page for reading") @add_command("r", "Print a web page for reading")
@ -164,7 +168,16 @@ class Publications(Bureau):
for source in feeds: for source in feeds:
url = source[0] url = source[0]
num_entries = source[1] num_entries = source[1]
feed = feedparser.parse(url)
# get feed data with requests using a timeout
try:
resp = requests.get(url, timeout=20.0)
except requests.ReadTimeout:
self.log("Timeout reading RSS feed %s", url)
continue
feed_data = io.BytesIO(resp.content)
feed = feedparser.parse(feed_data)
# work around if we don't have enough news # work around if we don't have enough news
if num_entries > len(feed.entries): if num_entries > len(feed.entries):

Loading…
Cancel
Save