Skip to content

Instantly share code, notes, and snippets.

@ryukinix
Last active June 6, 2020 13:49
Show Gist options
  • Star 2 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save ryukinix/4bdd0eafb26c3073d0b5111c42ea8ec5 to your computer and use it in GitHub Desktop.
Save ryukinix/4bdd0eafb26c3073d0b5111c42ea8ec5 to your computer and use it in GitHub Desktop.
An alternative parser for rss because conky doesn't have ATOM supported yet
# — SETTINGS — #
update_interval 20
total_run_times 0
net_avg_samples 1
cpu_avg_samples 1
imlib_cache_size 0
double_buffer yes
no_buffers yes
use_xft yes
xftfont Zekton:size=9
override_utf8_locale yes
text_buffer_size 2048
# — WINDOW — #
own_window yes
own_window_type desktop
own_window_transparent no
own_window_hints undecorated,below,sticky,skip_taskbar,skip_pager
own_window_argb_visual yes
own_window_argb_value 200
own_window_colour 000000
# — SIZE — #
minimum_size 280 280
default_bar_size 30 8
# — ALIGNMENT — #
alignment top_right
gap_x 340
gap_y 40
# — GRAPHIC — #
draw_shades no
default_color ffffff
default_shade_color 000000
color0 00d9ff
color1 ffffff
color2 ffffff
TEXT
${font Hack:size=9}${color0}RSS Feed ${hr 3}$color
${font Hack:size=9}HackerNews ${hr 3}
${font Monospace:size=9}${execi 60 python rss_parser.py -u "http://hackernews.demos.monkeylearn.com/feed.xml?" -w 60 -l 5 -r HackerNews}
${font Hack:size=9}Programming Reddit ${hr 3}
${font Monospace:size=9}${execi 60 python rss_parser.py -u "https://www.reddit.com/user/ryukinix/m/programming/.rss" -w 60 -l 5 -r Programming}
${font Hack:size=9}Ryukinix Github ${hr 3}
${font Monospace:size=9}${execi 60 python rss_parser.py -u "https://github.com/ryukinix.private.atom?token=AHSe_nopvy6_F1ZmsDbo_GHQCQIwVHUaks61Q17BwA==" -w 60 -l 5 -r Github}
# i have some troubles with this feed
${font Hack:size=9}Google News ${hr 3}
${font Monospace:size=9}${execi 60 python rss_parser.py -u "https://news.google.com.br/news?cf=all&hl=pt-BR&pz=1&ned=pt-BR_br&output=rss" -w 60 -l 5 -i "Últimas notícias: " -r "Google News" --disable-notifications}
${font Hack:size=9}Anime Reddit ${hr 3}
${font Monospace:size=9}${execi 60 python rss_parser.py -u "https://www.reddit.com/user/ryukinix/m/anime/.rss" -w 60 -l 10 -r Anime}
#!/usr/bin/env python
# coding=utf-8
#
# Python Script
#
# Copyleft © Manoel Vilela
#
#
from argparse import ArgumentParser
from string import ascii_letters as alphabet
from json.decoder import JSONDecodeError
# pip install feedparser
import feedparser
import os
import json
import sys
import webbrowser
import gi
gi.require_version('Notify', '0.7')
from gi.repository import GObject # noqa
from gi.repository import Notify # noqa
from gi.repository import GLib
CACHE_FILE = '.cache.json'
MAX_CACHE_SIZE = 100 # ITEMS
parser = ArgumentParser()
parser.add_argument(
'-u', '--url',
default="http://hackernews.demos.monkeylearn.com/feed.xml?",
dest='url',
type=str,
help='The url to be parsed'
)
parser.add_argument(
'-l', '--lines',
default=5,
dest='lines',
type=int
)
parser.add_argument(
'-w', '--width',
default=80,
dest='width',
type=int,
help='The horizontal limit'
)
parser.add_argument(
'-p', '--prefix',
default='- ',
dest='prefix',
type=str,
help='A prefix attached each feed'
)
parser.add_argument(
'-i', '--ignore',
default='',
dest='ignore',
type=str,
help='Useless string to remove'
)
parser.add_argument(
'-n', '--disable-notifications',
default=True,
dest='notifications',
action='store_false',
help='Disable notifications (default True)'
)
parser.add_argument(
'-r', '--rss-label',
default='RSS',
dest='rss_label',
type=str,
help='A simple label for what is fetching'
)
class RssNotifier(GObject.Object):
loop = GLib.MainLoop()
Notify.init("rss_conky")
notifications = []
def __init__(self, label):
self.label = label
super(RssNotifier, self).__init__()
GObject.timeout_add(100, self.exit_when_empty)
# lets initialise with the application name
def send_notification(self, title, text, url, file_path_to_icon=""):
n = Notify.Notification.new(title, text, file_path_to_icon)
# print('put notification')
self.notifications.append(n)
n.add_action(url, 'open', self.open_webbrowser)
n.connect('closed', self.close_notification, n)
n.show()
def send_rss(self, rss, url):
self.send_notification(self.label, rss, url, 'rss')
def open_webbrowser(self, n, url):
# print(':: webbrowse opening')
webbrowser.open(url)
def close_notification(self, n, arg):
self.notifications.remove(n)
# print(':: remove notification')
# print(':: notifications: ', self.notifications)
def exit_when_empty(self):
# print('exit check')
if not any(RssNotifier.notifications):
self.loop.quit()
return False
return True
def get_label(entry):
if entry.get('tags'):
label = '{}: '.format(entry.get('tags').pop()['term'])
else:
label = ''
return label
def long_title_clean(title):
if len(title) > options.width:
return (title[:options.width] + '\n' +
' ' * (len(options.prefix)) +
long_title_clean(title[options.width:].strip()))
return title
def translate_name(url):
return '.' + ''.join([x for x in url if x in alphabet]) + '.cache'
def save_cache(new_cache, key):
cache_file = get_cache_file()
if cache_file.get(key):
cache_file[key].extend(new_cache)
else:
cache_file[key] = new_cache
while len(cache_file[key]) >= MAX_CACHE_SIZE:
cache_file[key].pop(0)
with open(CACHE_FILE, 'w') as f:
json.dump(cache_file, f)
def get_cache(key):
cache = get_cache_file()
return cache.get(key) or []
def get_cache_text(key):
return '\n'.join((x for x, _ in get_cache(key)))
def get_cache_file():
try:
with open(CACHE_FILE, 'r') as f:
return json.load(f)
except FileNotFoundError:
pass
except JSONDecodeError:
with open(CACHE_FILE, 'w'):
pass
return {}
def notify(new_rss):
notifier = RssNotifier(options.rss_label)
for rss, url in new_rss:
notifier.send_rss(rss, url)
notifier.loop.run()
def ignore_pattern(title):
return title.replace(options.ignore, '')
def parallel_notifications(new_rss):
if any(new_rss) and options.notifications:
if os.fork() == 0:
os.setsid()
notify(new_rss)
def parse_print_rss(feed):
rss = []
for entry in feed['entries']:
if len(rss) >= options.lines:
break
label = get_label(entry)
output = '{}{}{!s}'.format(options.prefix, label, entry.title)
title = long_title_clean(ignore_pattern(output))
if title not in rss:
rss.append([title, entry['link']])
return rss
def send_rss(rss):
for i, (rss, _) in enumerate(rss):
if i >= options.lines:
break
print(rss)
def main():
global options
options = parser.parse_args()
feed = feedparser.parse(options.url)
keyname = translate_name(options.url)
if not any(feed['entries']):
new_cache = get_cache(keyname)
else:
new_cache = parse_print_rss(feed)
old_cache = get_cache(keyname)
new_rss = [x for x in new_cache if x not in old_cache]
send_rss(new_cache)
save_cache(new_rss, keyname)
parallel_notifications(new_rss)
if __name__ == '__main__':
main()
@Strykar
Copy link

Strykar commented Sep 25, 2019

Throws a warning - ./rss_parser.py:86: PyGIDeprecationWarning: GObject.MainLoop is deprecated; use GLib.MainLoop instead

This is exactly what I was looking for as the available perl/bash readers don't parse XML.
@ryukinix Is that a personal token you're using for Github? When I use a personal token, there's no output. Other feeds work fine.

@ryukinix
Copy link
Author

ryukinix commented Jun 6, 2020

I tried to fix the warning, I pushed the change to this repository @Strykar. Give a new try. I remove too daemon import which is pretty useless dependency.

@ryukinix Is that a personal token you're using for Github? When I use a personal token, there's no output. Other feeds work fine.

That github rss feed doesn't works anymore for me, I'm not sure if something at github atom rss changed or is just my token that was expired. I found some useful information here: https://stackoverflow.com/a/10707384 (but seems a little outdated)

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment