2020-11-10 13:26:35 +00:00
|
|
|
import configparser
|
|
|
|
from datetime import date
|
|
|
|
import feedparser
|
|
|
|
from mdutils import MdUtils
|
2020-11-11 02:23:43 +00:00
|
|
|
from os import path
|
|
|
|
from shutil import copy
|
2020-11-10 13:26:35 +00:00
|
|
|
|
2020-11-10 14:12:05 +00:00
|
|
|
#################
|
|
|
|
# Configuration #
|
|
|
|
#################
|
|
|
|
|
2020-11-10 13:26:35 +00:00
|
|
|
markdown = True
|
2020-11-10 13:30:02 +00:00
|
|
|
pdf = False
|
|
|
|
html = True
|
2020-11-10 14:54:00 +00:00
|
|
|
animations = True
|
2020-11-10 13:26:35 +00:00
|
|
|
html_stylesheet = "styles/simple.css"
|
|
|
|
title_animation = "fade-down"
|
|
|
|
heading_animation = "fade-right"
|
|
|
|
list_animation = "fade-left"
|
|
|
|
ConvertToHTML = True
|
|
|
|
title = date.today().strftime('%d %B, %Y')
|
2020-11-11 02:23:43 +00:00
|
|
|
og_title = title
|
|
|
|
feed_configuration = ['feeds.ini','covid.ini']
|
2020-11-11 10:23:46 +00:00
|
|
|
archive_configuration = {"feeds.ini":{"id":"daily-dose-list","folder":"archive-daily-dose",'title':"Daily-Dose","hide_overflow":True},"covid.ini":{"id":"covid-19-list","folder":"archive-covid-19","title":"Covid-19 Sentry","hide_overflow":False}}
|
2020-11-11 02:23:43 +00:00
|
|
|
update_archive = True
|
2020-11-10 14:12:05 +00:00
|
|
|
|
2020-11-11 02:23:43 +00:00
|
|
|
#############
|
|
|
|
# Functinos #
|
|
|
|
#############
|
2020-11-10 13:26:35 +00:00
|
|
|
|
|
|
|
def GetPosts(feed):
|
2020-11-12 19:10:18 +00:00
|
|
|
errored = False
|
2020-11-10 13:26:35 +00:00
|
|
|
Posts = {}
|
|
|
|
ToIgnore = []
|
|
|
|
ReadSummary = True
|
|
|
|
URL = None
|
2020-11-10 14:49:02 +00:00
|
|
|
ShowLink = False
|
2020-11-10 13:26:35 +00:00
|
|
|
ToRead = 5
|
|
|
|
try:
|
|
|
|
URL = feeds[feed]["URL"]
|
|
|
|
except KeyError:
|
|
|
|
print("Improper configuration for %s. No URL Specified")
|
|
|
|
exit(1)
|
|
|
|
try:
|
|
|
|
ToIgnore = eval(feeds[feed]["IGNORE"])
|
|
|
|
except KeyError:
|
|
|
|
None
|
|
|
|
try:
|
|
|
|
ReadSummary = eval(feeds[feed]["SUMMARY"])
|
|
|
|
except KeyError:
|
|
|
|
ReadSummary = True
|
|
|
|
try:
|
|
|
|
ToRead = eval(feeds[feed]["ToRead"])
|
|
|
|
except:
|
|
|
|
None
|
2020-11-10 14:49:02 +00:00
|
|
|
try:
|
|
|
|
ShowLink = eval(feeds[feed]["LINK"])
|
|
|
|
except KeyError:
|
|
|
|
None
|
2020-11-10 13:26:35 +00:00
|
|
|
|
2020-11-12 19:10:18 +00:00
|
|
|
try:
|
|
|
|
rss = feedparser.parse(URL)
|
|
|
|
except:
|
|
|
|
print("Could not fetch data for feed: %s" % feed)
|
|
|
|
errored = True
|
|
|
|
|
|
|
|
if errored:
|
|
|
|
return None
|
|
|
|
|
2020-11-10 13:26:35 +00:00
|
|
|
maximum = len(rss.entries)
|
|
|
|
|
|
|
|
count = 0
|
|
|
|
|
|
|
|
if ToRead == -1 or ToRead > maximum:
|
|
|
|
ToRead = maximum
|
|
|
|
|
|
|
|
posts = []
|
|
|
|
summaries = []
|
2020-11-10 14:49:02 +00:00
|
|
|
links = []
|
2020-11-10 14:12:05 +00:00
|
|
|
added = 0
|
|
|
|
while count != maximum and added != ToRead:
|
2020-11-10 13:26:35 +00:00
|
|
|
Skip = False
|
|
|
|
Title = str(rss.entries[count].title)
|
|
|
|
Summary = rss.entries[count].summary
|
|
|
|
for words in ToIgnore:
|
|
|
|
if (words.lower() in Title.lower()) or (words.lower() in Summary.lower()):
|
|
|
|
Skip = True
|
|
|
|
break
|
|
|
|
if not Skip:
|
2020-11-10 14:12:05 +00:00
|
|
|
added += 1
|
2020-11-10 13:26:35 +00:00
|
|
|
posts.append(Title)
|
|
|
|
if ReadSummary:
|
|
|
|
summaries.append(Summary)
|
2020-11-10 14:49:02 +00:00
|
|
|
if ShowLink:
|
|
|
|
links.append(rss.entries[count].link)
|
2020-11-10 13:26:35 +00:00
|
|
|
count += 1
|
|
|
|
|
|
|
|
for idx in range(len(posts)):
|
|
|
|
Posts[idx] = {}
|
|
|
|
Posts[idx]["post"] = {}
|
|
|
|
Posts[idx]["post"]["title"] = posts[idx].encode('utf-8')
|
|
|
|
if ReadSummary:
|
|
|
|
Posts[idx]["post"]["summary"] = summaries[idx].encode('utf-8')
|
|
|
|
else:
|
|
|
|
Posts[idx]["summary"] = None
|
2020-11-10 14:49:02 +00:00
|
|
|
if ShowLink:
|
|
|
|
Posts[idx]["post"]["link"] = links[idx].encode('utf-8')
|
|
|
|
else:
|
|
|
|
Posts[idx]["post"]["link"] = None
|
2020-11-10 13:26:35 +00:00
|
|
|
|
|
|
|
Posts["NoOfPosts"] = len(posts)
|
|
|
|
Posts["Summary"] = ReadSummary
|
2020-11-10 14:49:02 +00:00
|
|
|
Posts["ShowLink"] = ShowLink
|
2020-11-10 13:26:35 +00:00
|
|
|
|
|
|
|
return Posts
|
|
|
|
|
2020-11-11 02:23:43 +00:00
|
|
|
def add_today(HomeFile,TagID,folder):
|
|
|
|
title = og_title
|
|
|
|
if path.exists(title+".html") or path.exists(str("./%s/"%folder)+title+".html"):
|
|
|
|
if path.exists(str("./%s/"%folder)+title+".html"):
|
|
|
|
None
|
|
|
|
else:
|
|
|
|
copy(str(title+".html"),folder)
|
|
|
|
else:
|
|
|
|
print("File does not exist!")
|
|
|
|
exit(1)
|
2020-11-10 13:26:35 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
2020-11-11 02:23:43 +00:00
|
|
|
TagToFind = 'ul'
|
|
|
|
TagID = TagID
|
2020-11-10 13:26:35 +00:00
|
|
|
|
2020-11-10 16:57:23 +00:00
|
|
|
soup = None
|
|
|
|
|
2020-11-11 02:23:43 +00:00
|
|
|
with open(HomeFile) as fp:
|
|
|
|
soup = BeautifulSoup(fp,'html.parser')
|
|
|
|
ToUpdate = soup.find(TagToFind,{"id":TagID})
|
|
|
|
FilePath = str("./%s/"%folder) + str(str(title)+".html")
|
2020-11-10 13:26:35 +00:00
|
|
|
|
2020-11-11 02:23:43 +00:00
|
|
|
NewTag = soup.new_tag('li')
|
|
|
|
anchor = soup.new_tag('a',href=str('./%s/'%folder+title+".html"))
|
|
|
|
anchor.string = title
|
|
|
|
NewTag.append(anchor)
|
2020-11-10 13:26:35 +00:00
|
|
|
|
2020-11-11 02:23:43 +00:00
|
|
|
ToUpdate.insert(0,NewTag)
|
2020-11-10 14:54:00 +00:00
|
|
|
|
2020-11-11 02:23:43 +00:00
|
|
|
with open(HomeFile,'w') as fp:
|
|
|
|
fp.write(str(soup))
|
2020-11-10 14:54:00 +00:00
|
|
|
|
2020-11-11 02:23:43 +00:00
|
|
|
################
|
|
|
|
# Main Program #
|
|
|
|
###############
|
2020-11-10 13:26:35 +00:00
|
|
|
|
2020-11-11 02:23:43 +00:00
|
|
|
if (pdf and not markdown) or (html and not markdown):
|
|
|
|
print("Markdown should be True to convert to pdf/html")
|
|
|
|
exit(1)
|
2020-11-10 13:26:35 +00:00
|
|
|
|
2020-11-11 02:23:43 +00:00
|
|
|
if (update_archive and not html):
|
|
|
|
print("HTML is required to update archive")
|
|
|
|
exit(1)
|
2020-11-10 13:26:35 +00:00
|
|
|
|
2020-11-11 02:23:43 +00:00
|
|
|
for config in feed_configuration:
|
|
|
|
|
|
|
|
title = og_title
|
|
|
|
feeds = configparser.ConfigParser()
|
2020-11-10 13:26:35 +00:00
|
|
|
|
2020-11-11 02:23:43 +00:00
|
|
|
if feeds.read(config) == []:
|
|
|
|
print("%s does not exist!"%config)
|
|
|
|
exit(1)
|
|
|
|
else:
|
|
|
|
print("Reading %s"%config)
|
|
|
|
feeds.read(config)
|
|
|
|
|
|
|
|
rss_feeds = [x for x in feeds.keys()]
|
|
|
|
rss_feeds.pop(0)
|
|
|
|
print("Read %s feeds from the configuration file" % str(len(rss_feeds)))
|
|
|
|
print(rss_feeds)
|
|
|
|
|
2020-11-10 13:26:35 +00:00
|
|
|
|
2020-11-11 02:23:43 +00:00
|
|
|
posts = {}
|
|
|
|
for feed in rss_feeds:
|
|
|
|
sauce = GetPosts(feed)
|
2020-11-12 19:10:18 +00:00
|
|
|
if sauce == None:
|
|
|
|
rss_feeds.remove(feed)
|
|
|
|
else:
|
|
|
|
posts[feed] = sauce
|
2020-11-11 02:23:43 +00:00
|
|
|
|
|
|
|
#print(posts)
|
|
|
|
|
|
|
|
if markdown:
|
|
|
|
mdfile = None
|
|
|
|
mdfile = MdUtils(file_name=title,title=archive_configuration[config]['title'])
|
|
|
|
for feed in posts:
|
|
|
|
mdfile.new_header(level=1,title="From %s" % feed)
|
|
|
|
for idx in range(posts[feed]["NoOfPosts"]):
|
|
|
|
mdfile.write(str("* **"+posts[feed][idx]["post"]["title"].decode('utf-8'))+"**")
|
|
|
|
if posts[feed]["Summary"]:
|
|
|
|
mdfile.write(" - ")
|
|
|
|
mdfile.write(posts[feed][idx]["post"]["summary"].decode('utf-8'))
|
|
|
|
if posts[feed]["ShowLink"]:
|
|
|
|
mdfile.write(" - [link](%s)"%posts[feed][idx]["post"]["link"].decode('utf-8'))
|
|
|
|
mdfile.write("\n\n")
|
|
|
|
elif posts[feed]["Summary"]:
|
|
|
|
mdfile.write("\n\n")
|
|
|
|
else:
|
|
|
|
mdfile.write("\n")
|
|
|
|
mdfile.new_line("\n")
|
|
|
|
mdfile.new_table_of_contents(table_title='Contents', depth=2)
|
|
|
|
mdfile.create_md_file()
|
|
|
|
|
|
|
|
|
|
|
|
import pypandoc
|
|
|
|
|
|
|
|
if pdf:
|
|
|
|
ifname = str(title.strip()) + ".md"
|
|
|
|
ofname = str(title.strip()) + ".pdf"
|
|
|
|
args = ['-V', 'geometry:margin=1.5cm']
|
|
|
|
|
|
|
|
convert = pypandoc.convert_file(ifname, 'pdf', outputfile=ofname,extra_args=args)
|
|
|
|
|
|
|
|
if html:
|
|
|
|
from bs4 import BeautifulSoup
|
|
|
|
|
|
|
|
|
|
|
|
ifname = str(title.strip()) + ".md"
|
|
|
|
ofname = str(title.strip()) + ".html"
|
|
|
|
|
|
|
|
convert = pypandoc.convert_file(ifname,'html',outputfile=ofname,extra_args=['-s'])
|
|
|
|
assert(convert) == ''
|
|
|
|
|
|
|
|
soup = None
|
|
|
|
|
|
|
|
with open(ofname) as fp:
|
|
|
|
soup = BeautifulSoup(fp,'html5lib')
|
|
|
|
|
|
|
|
title = soup.new_tag('title')
|
|
|
|
title.string = archive_configuration[config]['title']
|
|
|
|
|
|
|
|
soup.head.append(title)
|
|
|
|
viewport = soup.new_tag("meta",content="width=device-width, initial-scale=1.0")
|
|
|
|
viewport.attrs["name"] = "viewport"
|
|
|
|
soup.head.append(viewport)
|
|
|
|
|
|
|
|
custom_css = soup.new_tag('link',href=html_stylesheet,rel='stylesheet')
|
|
|
|
soup.head.append(custom_css)
|
|
|
|
|
|
|
|
custom_css = soup.new_tag('link',href=str("../"+html_stylesheet),rel='stylesheet')
|
|
|
|
soup.head.append(custom_css)
|
|
|
|
|
2020-11-11 10:23:46 +00:00
|
|
|
if archive_configuration[config]["hide_overflow"]:
|
|
|
|
a = soup.new_tag('style')
|
|
|
|
a.string = "*{overflow-x:hidden;}"
|
|
|
|
soup.head.append(a)
|
|
|
|
|
2020-11-11 02:23:43 +00:00
|
|
|
if animations:
|
|
|
|
aos_css = soup.new_tag('link',href='https://unpkg.com/aos@2.3.1/dist/aos.css',rel='stylesheet')
|
|
|
|
soup.head.append(aos_css)
|
|
|
|
|
|
|
|
aos_js = soup.new_tag('script',src="https://unpkg.com/aos@2.3.1/dist/aos.js")
|
|
|
|
soup.head.append(aos_js)
|
|
|
|
|
|
|
|
aos_script = soup.new_tag('script')
|
|
|
|
aos_script.string = "AOS.init();"
|
|
|
|
soup.body.append(aos_script)
|
|
|
|
|
|
|
|
for feed in rss_feeds:
|
|
|
|
ToFindID = str("from-"+str(feed.strip().replace(":","").replace(" ","-").lower()))
|
|
|
|
ToEdit = soup.find("h1", {"id": ToFindID})
|
|
|
|
ToEdit['data-aos'] = 'fade-right'
|
|
|
|
|
|
|
|
soup.find_all("h1")[0]['data-aos'] = title_animation
|
|
|
|
soup.find("h1",{"id":"contents"})['data-aos'] = heading_animation
|
|
|
|
soup.find("h1",{"id":"contents"})['data-aos-anchor-placement'] = "top-bottom"
|
|
|
|
|
|
|
|
paragraphs = soup.find_all("p")
|
|
|
|
for paras in paragraphs:
|
|
|
|
paras['data-aos'] = list_animation
|
|
|
|
paras['data-aos-anchor-placement'] = "bottom-bottom"
|
|
|
|
|
|
|
|
lis = soup.find_all("li")
|
|
|
|
for li in lis:
|
|
|
|
if li.a == None:
|
|
|
|
li['data-aos'] = list_animation
|
|
|
|
li['data-aos-anchor-placement'] = "bottom-bottom"
|
|
|
|
|
|
|
|
with open(ofname, "w") as outf:
|
|
|
|
outf.write(str(soup))
|
|
|
|
|
|
|
|
if update_archive:
|
|
|
|
add_today('index.html',archive_configuration[config]["id"],folder=archive_configuration[config]["folder"])
|
2020-11-10 13:26:35 +00:00
|
|
|
|
|
|
|
|
|
|
|
|