2020-05-01 09:46:21 +02:00
|
|
|
# pylint: disable=C0111,R0903
|
|
|
|
|
|
|
|
"""RSS news ticker
|
|
|
|
|
|
|
|
Fetches rss news items and shows these as a news ticker.
|
|
|
|
Left-clicking will open the full story in a browser.
|
|
|
|
New stories are highlighted.
|
|
|
|
|
|
|
|
Parameters:
|
|
|
|
* rss.feeds : Space-separated list of RSS URLs
|
|
|
|
* rss.length : Maximum length of the module, default is 60
|
2020-05-08 20:58:35 +02:00
|
|
|
|
|
|
|
contributed by `lonesomebyte537 <https://github.com/lonesomebyte537>`_ - many thanks!
|
2020-05-01 09:46:21 +02:00
|
|
|
"""
|
|
|
|
|
2020-05-01 09:54:21 +02:00
|
|
|
import feedparser
|
2020-05-01 09:46:21 +02:00
|
|
|
|
|
|
|
import webbrowser
|
|
|
|
import time
|
|
|
|
import os
|
|
|
|
import tempfile
|
|
|
|
import logging
|
|
|
|
import random
|
|
|
|
import re
|
|
|
|
import json
|
|
|
|
|
2020-05-01 09:54:21 +02:00
|
|
|
import core.module
|
|
|
|
import core.widget
|
|
|
|
import core.input
|
2020-05-01 09:46:21 +02:00
|
|
|
|
|
|
|
# pylint: disable=too-many-instance-attributes
|
2020-05-01 09:54:21 +02:00
|
|
|
class Module(core.module.Module):
|
2020-05-01 09:46:21 +02:00
|
|
|
REFRESH_DELAY = 600
|
|
|
|
SCROLL_SPEED = 3
|
2020-05-03 11:15:52 +02:00
|
|
|
LAYOUT_STYLES_ITEMS = [[1, 1, 1], [3, 3, 2], [2, 3, 3], [3, 2, 3]]
|
|
|
|
HISTORY_FILENAME = ".config/i3/rss.hist"
|
2020-05-01 09:54:21 +02:00
|
|
|
|
|
|
|
def __init__(self, config, theme):
|
|
|
|
super().__init__(config, theme, core.widget.Widget(self.ticker_update))
|
|
|
|
|
2020-05-03 11:15:52 +02:00
|
|
|
self._feeds = self.parameter(
|
|
|
|
"feeds", "https://www.espn.com/espn/rss/news"
|
|
|
|
).split(" ")
|
2020-05-01 09:46:21 +02:00
|
|
|
self._feeds_to_update = []
|
2020-05-03 11:15:52 +02:00
|
|
|
self._response = ""
|
2020-05-01 09:46:21 +02:00
|
|
|
|
2020-05-03 11:15:52 +02:00
|
|
|
self._max_title_length = int(self.parameter("length", 60))
|
2020-05-01 09:46:21 +02:00
|
|
|
|
|
|
|
self._items = []
|
|
|
|
self._current_item = None
|
|
|
|
|
|
|
|
self._ticker_offset = 0
|
|
|
|
self._pre_delay = 0
|
|
|
|
self._post_delay = 0
|
|
|
|
|
|
|
|
self._state = []
|
|
|
|
|
2022-02-11 13:44:10 +01:00
|
|
|
self._newspaper_file = tempfile.NamedTemporaryFile(mode="w", suffix=".html")
|
2020-05-01 09:46:21 +02:00
|
|
|
|
|
|
|
self._last_refresh = 0
|
|
|
|
self._last_update = 0
|
|
|
|
|
2020-05-01 09:54:21 +02:00
|
|
|
core.input.register(self, button=core.input.LEFT_MOUSE, cmd=self._open)
|
2020-05-03 11:15:52 +02:00
|
|
|
core.input.register(
|
|
|
|
self, button=core.input.RIGHT_MOUSE, cmd=self._create_newspaper
|
|
|
|
)
|
2020-05-01 09:46:21 +02:00
|
|
|
|
2020-05-03 11:15:52 +02:00
|
|
|
self._history = {"ticker": {}, "newspaper": {}}
|
2020-05-01 09:46:21 +02:00
|
|
|
self._load_history()
|
|
|
|
|
|
|
|
def _load_history(self):
|
|
|
|
if os.path.isfile(self.HISTORY_FILENAME):
|
2020-05-03 11:15:52 +02:00
|
|
|
self._history = json.loads(open(self.HISTORY_FILENAME, "r").read())
|
2020-05-01 09:46:21 +02:00
|
|
|
|
|
|
|
def _update_history(self, group):
|
2020-05-03 11:15:52 +02:00
|
|
|
sources = set([i["source"] for i in self._items])
|
|
|
|
self._history[group] = dict(
|
|
|
|
[
|
|
|
|
[s, [i["title"] for i in self._items if i["source"] == s]]
|
|
|
|
for s in sources
|
|
|
|
]
|
|
|
|
)
|
2020-05-01 09:46:21 +02:00
|
|
|
|
|
|
|
def _save_history(self):
|
|
|
|
if not os.path.exists(os.path.dirname(self.HISTORY_FILENAME)):
|
|
|
|
os.makedirs(os.path.dirname(self.HISTORY_FILENAME))
|
2020-05-03 11:15:52 +02:00
|
|
|
open(self.HISTORY_FILENAME, "w").write(json.dumps(self._history))
|
2020-05-01 09:46:21 +02:00
|
|
|
|
|
|
|
def _check_history(self, items, group):
|
|
|
|
for i in items:
|
2020-05-03 11:15:52 +02:00
|
|
|
i["new"] = not (
|
|
|
|
i["source"] in self._history[group]
|
|
|
|
and i["title"] in self._history[group][i["source"]]
|
|
|
|
)
|
2020-05-01 09:46:21 +02:00
|
|
|
|
|
|
|
def _open(self, _):
|
|
|
|
if self._current_item:
|
2020-05-03 11:15:52 +02:00
|
|
|
webbrowser.open(self._current_item["link"])
|
2020-05-01 09:46:21 +02:00
|
|
|
|
|
|
|
def _check_for_image(self, entry):
|
2020-05-03 11:15:52 +02:00
|
|
|
image = next(
|
|
|
|
iter([l["href"] for l in entry["links"] if l["rel"] == "enclosure"]), None
|
|
|
|
)
|
|
|
|
if not image and "media_content" in entry:
|
2020-05-01 09:46:21 +02:00
|
|
|
try:
|
2020-05-03 11:15:52 +02:00
|
|
|
media = sorted(
|
|
|
|
entry["media_content"],
|
|
|
|
key=lambda i: i["height"] if "height" in i else 0,
|
|
|
|
reverse=True,
|
|
|
|
)
|
|
|
|
image = next(
|
|
|
|
iter([i["url"] for i in media if i["medium"] == "image"]), None
|
|
|
|
)
|
2020-05-01 09:46:21 +02:00
|
|
|
except Exception:
|
|
|
|
pass
|
|
|
|
if not image:
|
2020-05-03 11:15:52 +02:00
|
|
|
match = re.search(
|
|
|
|
r"<img[^>]*src\s*=['\']*([^\s^>^'^\']*)['\']*", entry["summary"]
|
|
|
|
)
|
2020-05-01 09:46:21 +02:00
|
|
|
if match:
|
|
|
|
image = match.group(1)
|
2020-05-03 11:15:52 +02:00
|
|
|
return image if image else ""
|
2020-05-01 09:46:21 +02:00
|
|
|
|
|
|
|
def _remove_tags(self, txt):
|
2020-06-02 20:13:39 +02:00
|
|
|
return re.sub(r"<[^>]*>", "", txt)
|
2020-05-01 09:46:21 +02:00
|
|
|
|
|
|
|
def _create_item(self, entry, url, feed):
|
2020-05-03 11:15:52 +02:00
|
|
|
return {
|
|
|
|
"title": self._remove_tags(entry["title"].replace("\n", " ")),
|
|
|
|
"link": entry["link"],
|
|
|
|
"new": True,
|
|
|
|
"source": url,
|
|
|
|
"summary": self._remove_tags(entry["summary"]),
|
|
|
|
"feed": feed,
|
|
|
|
"image": self._check_for_image(entry),
|
|
|
|
"published": time.mktime(entry.published_parsed)
|
|
|
|
if hasattr(entry, "published_parsed")
|
|
|
|
else 0,
|
|
|
|
}
|
2020-05-01 09:46:21 +02:00
|
|
|
|
|
|
|
def _update_items_from_feed(self, url):
|
|
|
|
parser = feedparser.parse(url)
|
2020-05-03 11:15:52 +02:00
|
|
|
new_items = [
|
|
|
|
self._create_item(entry, url, parser["feed"]["title"])
|
|
|
|
for entry in parser["entries"]
|
|
|
|
]
|
2020-05-01 09:46:21 +02:00
|
|
|
# Check history
|
2020-05-03 11:15:52 +02:00
|
|
|
self._check_history(new_items, "ticker")
|
2020-05-01 09:46:21 +02:00
|
|
|
# Remove the previous items
|
2020-05-03 11:15:52 +02:00
|
|
|
self._items = [i for i in self._items if i["source"] != url]
|
2020-05-01 09:46:21 +02:00
|
|
|
# Add the new items
|
|
|
|
self._items.extend(new_items)
|
|
|
|
# Sort the items on publish date
|
2020-05-03 11:15:52 +02:00
|
|
|
self._items.sort(key=lambda i: i["published"], reverse=True)
|
2020-05-01 09:46:21 +02:00
|
|
|
|
|
|
|
def _check_for_refresh(self):
|
|
|
|
if self._feeds_to_update:
|
|
|
|
# Update one feed at a time to not overload this update cycle
|
|
|
|
url = self._feeds_to_update.pop()
|
|
|
|
self._update_items_from_feed(url)
|
|
|
|
|
|
|
|
if not self._feeds_to_update:
|
2020-05-03 11:15:52 +02:00
|
|
|
self._update_history("ticker")
|
2020-05-01 09:46:21 +02:00
|
|
|
self._save_history()
|
|
|
|
|
|
|
|
if not self._current_item:
|
|
|
|
self._next_item()
|
2020-05-03 11:15:52 +02:00
|
|
|
elif time.time() - self._last_refresh >= self.REFRESH_DELAY:
|
2020-05-01 09:46:21 +02:00
|
|
|
# Populate the list with feeds to update
|
|
|
|
self._feeds_to_update = self._feeds[:]
|
|
|
|
# Update the refresh time
|
|
|
|
self._last_refresh = time.time()
|
|
|
|
|
|
|
|
def _next_item(self):
|
|
|
|
self._ticker_offset = 0
|
|
|
|
self._pre_delay = 2
|
|
|
|
self._post_delay = 4
|
|
|
|
|
|
|
|
if not self._items:
|
|
|
|
return
|
|
|
|
|
|
|
|
# Index of the current element
|
2020-05-03 11:15:52 +02:00
|
|
|
idx = (
|
|
|
|
self._items.index(self._current_item)
|
|
|
|
if self._current_item in self._items
|
|
|
|
else -1
|
|
|
|
)
|
2020-05-01 09:46:21 +02:00
|
|
|
|
|
|
|
# First show new items, else show next
|
2020-05-03 11:15:52 +02:00
|
|
|
new_items = [i for i in self._items if i["new"]]
|
|
|
|
self._current_item = next(
|
|
|
|
iter(new_items), self._items[(idx + 1) % len(self._items)]
|
|
|
|
)
|
2020-05-01 09:46:21 +02:00
|
|
|
|
|
|
|
def _check_scroll_done(self):
|
|
|
|
# Check if the complete title has been shown
|
2020-05-03 11:15:52 +02:00
|
|
|
if self._ticker_offset + self._max_title_length > len(
|
|
|
|
self._current_item["title"]
|
|
|
|
):
|
2020-05-01 09:46:21 +02:00
|
|
|
# Do not immediately show next item after scroll
|
|
|
|
self._post_delay -= 1
|
|
|
|
if self._post_delay == 0:
|
2020-05-03 11:15:52 +02:00
|
|
|
self._current_item["new"] = False
|
2020-05-01 09:46:21 +02:00
|
|
|
# Mark the previous item as 'old'
|
|
|
|
self._next_item()
|
|
|
|
else:
|
|
|
|
# Increase scroll position
|
|
|
|
self._ticker_offset += self.SCROLL_SPEED
|
|
|
|
|
|
|
|
def ticker_update(self, _):
|
|
|
|
# Only update the ticker once a second
|
|
|
|
now = time.time()
|
2020-05-03 11:15:52 +02:00
|
|
|
if now - self._last_update < 1:
|
2020-05-01 09:46:21 +02:00
|
|
|
return self._response
|
|
|
|
|
|
|
|
self._last_update = now
|
|
|
|
|
|
|
|
self._check_for_refresh()
|
|
|
|
|
|
|
|
# If no items were retrieved, return an empty string
|
|
|
|
if not self._current_item:
|
2020-05-03 11:15:52 +02:00
|
|
|
return " " * self._max_title_length
|
2020-05-01 09:46:21 +02:00
|
|
|
|
|
|
|
# Prepare a substring of the item title
|
2020-05-03 11:15:52 +02:00
|
|
|
self._response = self._current_item["title"][
|
|
|
|
self._ticker_offset : self._ticker_offset + self._max_title_length
|
|
|
|
]
|
2020-05-01 09:46:21 +02:00
|
|
|
# Add spaces if too short
|
|
|
|
self._response = self._response.ljust(self._max_title_length)
|
|
|
|
|
|
|
|
# Do not immediately scroll
|
|
|
|
if self._pre_delay > 0:
|
|
|
|
# Change state during pre_delay for new items
|
2020-05-03 11:15:52 +02:00
|
|
|
if self._current_item["new"]:
|
|
|
|
self._state = ["warning"]
|
2020-05-01 09:46:21 +02:00
|
|
|
self._pre_delay -= 1
|
|
|
|
return self._response
|
|
|
|
|
|
|
|
self._state = []
|
|
|
|
self._check_scroll_done()
|
|
|
|
|
|
|
|
return self._response
|
|
|
|
|
|
|
|
def state(self, _):
|
|
|
|
return self._state
|
|
|
|
|
|
|
|
def _create_news_element(self, item, overlay_title):
|
|
|
|
try:
|
2020-05-03 11:15:52 +02:00
|
|
|
timestr = (
|
|
|
|
"" if item["published"] == 0 else str(time.ctime(item["published"]))
|
|
|
|
)
|
2020-05-01 09:46:21 +02:00
|
|
|
except Exception as exc:
|
|
|
|
logging.error(str(exc))
|
|
|
|
raise e
|
2020-05-03 11:15:52 +02:00
|
|
|
element = "<div class='item' onclick=window.open('" + item["link"] + "')>"
|
2020-05-01 09:46:21 +02:00
|
|
|
element += "<div class='titlecontainer'>"
|
2020-05-03 11:15:52 +02:00
|
|
|
element += (
|
|
|
|
" <img "
|
|
|
|
+ ("" if item["image"] else "class='noimg' ")
|
|
|
|
+ "src='"
|
|
|
|
+ item["image"]
|
|
|
|
+ "'>"
|
|
|
|
)
|
|
|
|
element += (
|
|
|
|
" <div class='title"
|
|
|
|
+ (" overlay" if overlay_title else "")
|
|
|
|
+ "'>"
|
|
|
|
+ ("<span class='star'>★</span>" if item["new"] else "")
|
|
|
|
+ item["title"]
|
|
|
|
+ "</div>"
|
|
|
|
)
|
2020-05-01 09:46:21 +02:00
|
|
|
element += "</div>"
|
2020-05-03 11:15:52 +02:00
|
|
|
element += "<div class='summary'>" + item["summary"] + "</div>"
|
|
|
|
element += (
|
|
|
|
"<div class='info'><span class='author'>"
|
|
|
|
+ item["feed"]
|
|
|
|
+ "</span><span class='published'>"
|
|
|
|
+ timestr
|
|
|
|
+ "</span></div>"
|
|
|
|
)
|
2020-05-01 09:46:21 +02:00
|
|
|
element += "</div>"
|
|
|
|
return element
|
|
|
|
|
|
|
|
def _create_news_section(self, newspaper_items):
|
|
|
|
style = random.randint(0, 3)
|
2020-05-03 11:15:52 +02:00
|
|
|
section = "<table><tr class='style" + str(style) + "'>"
|
2020-05-01 09:46:21 +02:00
|
|
|
for i in range(0, 3):
|
|
|
|
section += "<td><div class='itemcontainer'>"
|
|
|
|
for _ in range(0, self.LAYOUT_STYLES_ITEMS[style][i]):
|
|
|
|
if newspaper_items:
|
2020-05-03 11:15:52 +02:00
|
|
|
section += self._create_news_element(
|
|
|
|
newspaper_items[0], self.LAYOUT_STYLES_ITEMS[style][i] != 3
|
|
|
|
)
|
2020-05-01 09:46:21 +02:00
|
|
|
del newspaper_items[0]
|
|
|
|
section += "</div></td>"
|
|
|
|
section += "</tr></table>"
|
|
|
|
return section
|
|
|
|
|
|
|
|
def _create_newspaper(self, _):
|
2020-05-03 11:15:52 +02:00
|
|
|
content = ""
|
2020-05-01 09:46:21 +02:00
|
|
|
newspaper_items = self._items[:]
|
2020-05-03 11:15:52 +02:00
|
|
|
self._check_history(newspaper_items, "newspaper")
|
2020-05-01 09:46:21 +02:00
|
|
|
|
|
|
|
# Make sure new items are always listed first, independent of publish date
|
2020-05-03 11:15:52 +02:00
|
|
|
newspaper_items.sort(
|
|
|
|
key=lambda i: i["published"] + (10000000 if i["new"] else 0), reverse=True
|
|
|
|
)
|
2020-05-01 09:46:21 +02:00
|
|
|
|
|
|
|
while newspaper_items:
|
|
|
|
content += self._create_news_section(newspaper_items)
|
2022-02-11 13:44:10 +01:00
|
|
|
self._newspaper_file.write(
|
2020-05-03 11:15:52 +02:00
|
|
|
HTML_TEMPLATE.replace("[[CONTENT]]", content)
|
|
|
|
)
|
2022-02-11 13:44:10 +01:00
|
|
|
self._newspaper_file.flush()
|
|
|
|
webbrowser.open("file://" + self._newspaper_file.name)
|
2020-05-03 11:15:52 +02:00
|
|
|
self._update_history("newspaper")
|
2020-05-01 09:46:21 +02:00
|
|
|
self._save_history()
|
|
|
|
|
2020-05-03 11:15:52 +02:00
|
|
|
|
2020-05-01 09:46:21 +02:00
|
|
|
HTML_TEMPLATE = """<!DOCTYPE html>
|
|
|
|
<html>
|
|
|
|
<head>
|
|
|
|
<script>
|
|
|
|
window.onload = function() {
|
|
|
|
var images = document.getElementsByTagName('img');
|
|
|
|
// Remove very small images
|
|
|
|
for(var i = 0; i < images.length; i++) {
|
|
|
|
if (images[i].naturalWidth<50 || images[i].naturalHeight<50) {
|
|
|
|
images[i].src = ''
|
|
|
|
images[i].className+=' noimg'
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
</script>
|
|
|
|
</head>
|
|
|
|
<style>
|
|
|
|
body {background: #eee; font-family: Helvetica neue;}
|
|
|
|
td {background: #fff; height: 100%;}
|
|
|
|
tr.style0 td {width: 33%;}
|
|
|
|
tr.style1 td {width: 20%;}
|
|
|
|
tr.style1 td:last-child {width: 60%;}
|
|
|
|
tr.style2 td {width: 20%;}
|
|
|
|
tr.style2 td:first-child {width: 60%;}
|
|
|
|
tr.style3 td {width: 20%;}
|
|
|
|
tr.style3 td:nth-child(2) {width: 60%;}
|
|
|
|
img {width: 100%; display: block; }
|
|
|
|
img.noimg {min-height:250px; background: #1299c8;}
|
|
|
|
#content {width: 1500px; margin: auto; background: #eee; padding: 1px;}
|
|
|
|
#newspapertitle {text-align: center; font-size: 60px; font-family: Arial Black; background: #1299c8; font-style: Italic; padding: 10px; color: #fff; }
|
|
|
|
.star {color: #ffa515; font-size: 24px;}
|
|
|
|
.section {display: flex;}
|
|
|
|
.column {display: flex;}
|
|
|
|
.itemcontainer {width: 100%; height: 100%; position: relative; display: inline-table;}
|
|
|
|
.item {cursor: pointer; }
|
|
|
|
.titlecontainer {position: relative;}
|
|
|
|
.title.overlay {font-family: Arial; position: absolute; bottom: 10px; color: #fff; font-weight: bold; text-align: right; max-width: 75%; right: 10px; font-size: 23px; text-shadow: 1px 0 0 #000, 0 -1px 0 #000, 0 1px 0 #000, -1px 0 0 #000;}
|
|
|
|
.title:not(.overlay) {font-weight: bold; padding: 0px 10px;}
|
|
|
|
.summary {color: #444; padding: 10px 10px 0px 10px; font-family: Times new roman; font-size: 18px; flex: 1;max-height: 105px; overflow: hidden;}
|
|
|
|
.info {color: #aaa; font-family: arial; font-size: 13px; padding: 10px;}
|
|
|
|
.published {float: right;}
|
|
|
|
</style>
|
|
|
|
<body>
|
|
|
|
<div id='content'>
|
|
|
|
<div id='newspapertitle'>Bumblebee Daily</div>
|
|
|
|
[[CONTENT]]
|
|
|
|
</div>
|
|
|
|
</body>
|
|
|
|
</html>"""
|
|
|
|
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|