|
|
|
@ -1,5 +1,6 @@
|
|
|
|
|
# publications office for reading and writing
|
|
|
|
|
from datetime import datetime
|
|
|
|
|
import io
|
|
|
|
|
import json
|
|
|
|
|
import os
|
|
|
|
|
import random
|
|
|
|
@ -11,6 +12,7 @@ import feedparser
|
|
|
|
|
import lxml.html
|
|
|
|
|
import PIL
|
|
|
|
|
from readability import readability
|
|
|
|
|
import requests
|
|
|
|
|
|
|
|
|
|
from bureau import Bureau, add_command, add_api
|
|
|
|
|
|
|
|
|
@ -98,8 +100,10 @@ class Publications(Bureau):
|
|
|
|
|
news = self._get_news()
|
|
|
|
|
# TODO: get weather
|
|
|
|
|
# TODO: get finance
|
|
|
|
|
inbox = self.send("PO", "unread")
|
|
|
|
|
print("news", news[0])
|
|
|
|
|
inbox = self.send("PO", "unread")
|
|
|
|
|
if inbox is None:
|
|
|
|
|
inbox = [] # if IMAP times out just move on...
|
|
|
|
|
self.print_full("news.html", news=news, inbox=inbox)
|
|
|
|
|
|
|
|
|
|
@add_command("r", "Print a web page for reading")
|
|
|
|
@ -164,7 +168,16 @@ class Publications(Bureau):
|
|
|
|
|
for source in feeds:
|
|
|
|
|
url = source[0]
|
|
|
|
|
num_entries = source[1]
|
|
|
|
|
feed = feedparser.parse(url)
|
|
|
|
|
|
|
|
|
|
# get feed data with requests using a timeout
|
|
|
|
|
try:
|
|
|
|
|
resp = requests.get(url, timeout=20.0)
|
|
|
|
|
except requests.ReadTimeout:
|
|
|
|
|
self.log("Timeout reading RSS feed %s", url)
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
feed_data = io.BytesIO(resp.content)
|
|
|
|
|
feed = feedparser.parse(feed_data)
|
|
|
|
|
|
|
|
|
|
# work around if we don't have enough news
|
|
|
|
|
if num_entries > len(feed.entries):
|
|
|
|
|