From 166705d798e0cdd4e673516700c765718111e5b0 Mon Sep 17 00:00:00 2001 From: Brendan Howell Date: Sat, 6 May 2017 00:32:52 +0200 Subject: [PATCH] switch to using requests with timeout for rss. cope with slow imap servers. --- screenless/bureau/publications/publications.py | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/screenless/bureau/publications/publications.py b/screenless/bureau/publications/publications.py index b280709..878ab0d 100644 --- a/screenless/bureau/publications/publications.py +++ b/screenless/bureau/publications/publications.py @@ -1,5 +1,6 @@ # publications office for reading and writing from datetime import datetime +import io import json import os import random @@ -11,6 +12,7 @@ import feedparser import lxml.html import PIL from readability import readability +import requests from bureau import Bureau, add_command, add_api @@ -98,8 +100,10 @@ class Publications(Bureau): news = self._get_news() # TODO: get weather # TODO: get finance - inbox = self.send("PO", "unread") print("news", news[0]) + inbox = self.send("PO", "unread") + if inbox is None: + inbox = [] # if IMAP times out just move on... self.print_full("news.html", news=news, inbox=inbox) @add_command("r", "Print a web page for reading") @@ -164,7 +168,16 @@ class Publications(Bureau): for source in feeds: url = source[0] num_entries = source[1] - feed = feedparser.parse(url) + + # get feed data with requests using a timeout + try: + resp = requests.get(url, timeout=20.0) + except requests.ReadTimeout: + self.log("Timeout reading RSS feed %s", url) + continue + + feed_data = io.BytesIO(resp.content) + feed = feedparser.parse(feed_data) # work around if we don't have enough news if num_entries > len(feed.entries):