refactored code
This commit is contained in:
parent
f20a851d9f
commit
d69f0c27a4
260
main.py
260
main.py
|
@ -2,7 +2,8 @@ import configparser
|
|||
from datetime import date
|
||||
import feedparser
|
||||
from mdutils import MdUtils
|
||||
|
||||
from os import path
|
||||
from shutil import copy
|
||||
|
||||
#################
|
||||
# Configuration #
|
||||
|
@ -18,25 +19,14 @@ heading_animation = "fade-right"
|
|||
list_animation = "fade-left"
|
||||
ConvertToHTML = True
|
||||
title = date.today().strftime('%d %B, %Y')
|
||||
feeds = configparser.ConfigParser()
|
||||
og_title = title
|
||||
feed_configuration = ['feeds.ini','covid.ini']
|
||||
archive_configuration = {"feeds.ini":{"id":"daily-dose-list","folder":"archive-daily-dose",'title':"Daily-Dose"},"covid.ini":{"id":"covid-19-list","folder":"archive-covid-19","title":"Covid-19 Sentry"}}
|
||||
update_archive = True
|
||||
|
||||
################
|
||||
# Main Program #
|
||||
###############
|
||||
|
||||
if (pdf and not markdown) or (html and not markdown):
|
||||
print("Markdown should be True to convert to pdf/html")
|
||||
|
||||
if feeds.read("feeds.ini") == []:
|
||||
print("feeds.ini does not exist!")
|
||||
exit(1)
|
||||
else:
|
||||
print("Reading feeds.ini")
|
||||
feeds.read("feeds.ini")
|
||||
|
||||
rss_feeds = [x for x in feeds.keys()]
|
||||
rss_feeds.pop(0)
|
||||
print("Read %s feeds from the configuration file" % str(len(rss_feeds)))
|
||||
#############
|
||||
# Functinos #
|
||||
#############
|
||||
|
||||
def GetPosts(feed):
|
||||
Posts = {}
|
||||
|
@ -115,106 +105,172 @@ def GetPosts(feed):
|
|||
|
||||
return Posts
|
||||
|
||||
|
||||
posts = {}
|
||||
for feed in rss_feeds:
|
||||
sauce = GetPosts(feed)
|
||||
posts[feed] = sauce
|
||||
|
||||
#print(posts)
|
||||
|
||||
if markdown:
|
||||
mdfile = MdUtils(file_name=title,title='Daily Dose')
|
||||
for feed in posts:
|
||||
mdfile.new_header(level=1,title="From %s" % feed)
|
||||
for idx in range(posts[feed]["NoOfPosts"]):
|
||||
mdfile.write(str("* **"+posts[feed][idx]["post"]["title"].decode('utf-8'))+"**")
|
||||
if posts[feed]["Summary"]:
|
||||
mdfile.write(" - ")
|
||||
mdfile.write(posts[feed][idx]["post"]["summary"].decode('utf-8'))
|
||||
if posts[feed]["ShowLink"]:
|
||||
mdfile.write(" - [link](%s)"%posts[feed][idx]["post"]["link"].decode('utf-8'))
|
||||
mdfile.write("\n\n")
|
||||
elif posts[feed]["Summary"]:
|
||||
mdfile.write("\n\n")
|
||||
else:
|
||||
mdfile.write("\n")
|
||||
mdfile.new_line("\n")
|
||||
mdfile.new_table_of_contents(table_title='Contents', depth=2)
|
||||
mdfile.create_md_file()
|
||||
def add_today(HomeFile,TagID,folder):
|
||||
title = og_title
|
||||
if path.exists(title+".html") or path.exists(str("./%s/"%folder)+title+".html"):
|
||||
if path.exists(str("./%s/"%folder)+title+".html"):
|
||||
None
|
||||
else:
|
||||
copy(str(title+".html"),folder)
|
||||
else:
|
||||
print("File does not exist!")
|
||||
exit(1)
|
||||
|
||||
|
||||
import pypandoc
|
||||
|
||||
if pdf:
|
||||
ifname = str(title.strip()) + ".md"
|
||||
ofname = str(title.strip()) + ".pdf"
|
||||
args = ['-V', 'geometry:margin=1.5cm']
|
||||
|
||||
convert = pypandoc.convert_file(ifname, 'pdf', outputfile=ofname,extra_args=args)
|
||||
|
||||
if html:
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
|
||||
ifname = str(title.strip()) + ".md"
|
||||
ofname = str(title.strip()) + ".html"
|
||||
|
||||
convert = pypandoc.convert_file(ifname,'html',outputfile=ofname,extra_args=['-s'])
|
||||
assert(convert) == ''
|
||||
TagToFind = 'ul'
|
||||
TagID = TagID
|
||||
|
||||
soup = None
|
||||
|
||||
with open(ofname) as fp:
|
||||
soup = BeautifulSoup(fp,'html5lib')
|
||||
|
||||
title = soup.new_tag('title')
|
||||
title.string = "DailyDose"
|
||||
with open(HomeFile) as fp:
|
||||
soup = BeautifulSoup(fp,'html.parser')
|
||||
ToUpdate = soup.find(TagToFind,{"id":TagID})
|
||||
FilePath = str("./%s/"%folder) + str(str(title)+".html")
|
||||
|
||||
soup.head.append(title)
|
||||
viewport = soup.new_tag("meta",content="width=device-width, initial-scale=1.0")
|
||||
viewport.attrs["name"] = "viewport"
|
||||
soup.head.append(viewport)
|
||||
NewTag = soup.new_tag('li')
|
||||
anchor = soup.new_tag('a',href=str('./%s/'%folder+title+".html"))
|
||||
anchor.string = title
|
||||
NewTag.append(anchor)
|
||||
|
||||
custom_css = soup.new_tag('link',href=html_stylesheet,rel='stylesheet')
|
||||
soup.head.append(custom_css)
|
||||
ToUpdate.insert(0,NewTag)
|
||||
|
||||
custom_css = soup.new_tag('link',href=str("../"+html_stylesheet),rel='stylesheet')
|
||||
soup.head.append(custom_css)
|
||||
with open(HomeFile,'w') as fp:
|
||||
fp.write(str(soup))
|
||||
|
||||
if animations:
|
||||
aos_css = soup.new_tag('link',href='https://unpkg.com/aos@2.3.1/dist/aos.css',rel='stylesheet')
|
||||
soup.head.append(aos_css)
|
||||
################
|
||||
# Main Program #
|
||||
###############
|
||||
|
||||
aos_js = soup.new_tag('script',src="https://unpkg.com/aos@2.3.1/dist/aos.js")
|
||||
soup.head.append(aos_js)
|
||||
if (pdf and not markdown) or (html and not markdown):
|
||||
print("Markdown should be True to convert to pdf/html")
|
||||
exit(1)
|
||||
|
||||
aos_script = soup.new_tag('script')
|
||||
aos_script.string = "AOS.init();"
|
||||
soup.body.append(aos_script)
|
||||
if (update_archive and not html):
|
||||
print("HTML is required to update archive")
|
||||
exit(1)
|
||||
|
||||
for feed in rss_feeds:
|
||||
ToFindID = str("from-"+str(feed.strip().replace(":","").replace(" ","-").lower()))
|
||||
ToEdit = soup.find("h1", {"id": ToFindID})
|
||||
ToEdit['data-aos'] = 'fade-right'
|
||||
for config in feed_configuration:
|
||||
|
||||
title = og_title
|
||||
feeds = configparser.ConfigParser()
|
||||
|
||||
soup.find("h1",{"id":"daily-dose"})['data-aos'] = title_animation
|
||||
soup.find("h1",{"id":"contents"})['data-aos'] = heading_animation
|
||||
soup.find("h1",{"id":"contents"})['data-aos-anchor-placement'] = "top-bottom"
|
||||
if feeds.read(config) == []:
|
||||
print("%s does not exist!"%config)
|
||||
exit(1)
|
||||
else:
|
||||
print("Reading %s"%config)
|
||||
feeds.read(config)
|
||||
|
||||
paragraphs = soup.find_all("p")
|
||||
for paras in paragraphs:
|
||||
paras['data-aos'] = list_animation
|
||||
paras['data-aos-anchor-placement'] = "bottom-bottom"
|
||||
rss_feeds = [x for x in feeds.keys()]
|
||||
rss_feeds.pop(0)
|
||||
print("Read %s feeds from the configuration file" % str(len(rss_feeds)))
|
||||
print(rss_feeds)
|
||||
|
||||
|
||||
lis = soup.find_all("li")
|
||||
for li in lis:
|
||||
if li.a == None:
|
||||
li['data-aos'] = list_animation
|
||||
li['data-aos-anchor-placement'] = "bottom-bottom"
|
||||
posts = {}
|
||||
for feed in rss_feeds:
|
||||
sauce = GetPosts(feed)
|
||||
posts[feed] = sauce
|
||||
|
||||
with open(ofname, "w") as outf:
|
||||
outf.write(str(soup))
|
||||
#print(posts)
|
||||
|
||||
if markdown:
|
||||
mdfile = None
|
||||
mdfile = MdUtils(file_name=title,title=archive_configuration[config]['title'])
|
||||
for feed in posts:
|
||||
mdfile.new_header(level=1,title="From %s" % feed)
|
||||
for idx in range(posts[feed]["NoOfPosts"]):
|
||||
mdfile.write(str("* **"+posts[feed][idx]["post"]["title"].decode('utf-8'))+"**")
|
||||
if posts[feed]["Summary"]:
|
||||
mdfile.write(" - ")
|
||||
mdfile.write(posts[feed][idx]["post"]["summary"].decode('utf-8'))
|
||||
if posts[feed]["ShowLink"]:
|
||||
mdfile.write(" - [link](%s)"%posts[feed][idx]["post"]["link"].decode('utf-8'))
|
||||
mdfile.write("\n\n")
|
||||
elif posts[feed]["Summary"]:
|
||||
mdfile.write("\n\n")
|
||||
else:
|
||||
mdfile.write("\n")
|
||||
mdfile.new_line("\n")
|
||||
mdfile.new_table_of_contents(table_title='Contents', depth=2)
|
||||
mdfile.create_md_file()
|
||||
|
||||
|
||||
import pypandoc
|
||||
|
||||
if pdf:
|
||||
ifname = str(title.strip()) + ".md"
|
||||
ofname = str(title.strip()) + ".pdf"
|
||||
args = ['-V', 'geometry:margin=1.5cm']
|
||||
|
||||
convert = pypandoc.convert_file(ifname, 'pdf', outputfile=ofname,extra_args=args)
|
||||
|
||||
if html:
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
|
||||
ifname = str(title.strip()) + ".md"
|
||||
ofname = str(title.strip()) + ".html"
|
||||
|
||||
convert = pypandoc.convert_file(ifname,'html',outputfile=ofname,extra_args=['-s'])
|
||||
assert(convert) == ''
|
||||
|
||||
soup = None
|
||||
|
||||
with open(ofname) as fp:
|
||||
soup = BeautifulSoup(fp,'html5lib')
|
||||
|
||||
title = soup.new_tag('title')
|
||||
title.string = archive_configuration[config]['title']
|
||||
|
||||
soup.head.append(title)
|
||||
viewport = soup.new_tag("meta",content="width=device-width, initial-scale=1.0")
|
||||
viewport.attrs["name"] = "viewport"
|
||||
soup.head.append(viewport)
|
||||
|
||||
custom_css = soup.new_tag('link',href=html_stylesheet,rel='stylesheet')
|
||||
soup.head.append(custom_css)
|
||||
|
||||
custom_css = soup.new_tag('link',href=str("../"+html_stylesheet),rel='stylesheet')
|
||||
soup.head.append(custom_css)
|
||||
|
||||
if animations:
|
||||
aos_css = soup.new_tag('link',href='https://unpkg.com/aos@2.3.1/dist/aos.css',rel='stylesheet')
|
||||
soup.head.append(aos_css)
|
||||
|
||||
aos_js = soup.new_tag('script',src="https://unpkg.com/aos@2.3.1/dist/aos.js")
|
||||
soup.head.append(aos_js)
|
||||
|
||||
aos_script = soup.new_tag('script')
|
||||
aos_script.string = "AOS.init();"
|
||||
soup.body.append(aos_script)
|
||||
|
||||
for feed in rss_feeds:
|
||||
ToFindID = str("from-"+str(feed.strip().replace(":","").replace(" ","-").lower()))
|
||||
ToEdit = soup.find("h1", {"id": ToFindID})
|
||||
ToEdit['data-aos'] = 'fade-right'
|
||||
|
||||
soup.find_all("h1")[0]['data-aos'] = title_animation
|
||||
soup.find("h1",{"id":"contents"})['data-aos'] = heading_animation
|
||||
soup.find("h1",{"id":"contents"})['data-aos-anchor-placement'] = "top-bottom"
|
||||
|
||||
paragraphs = soup.find_all("p")
|
||||
for paras in paragraphs:
|
||||
paras['data-aos'] = list_animation
|
||||
paras['data-aos-anchor-placement'] = "bottom-bottom"
|
||||
|
||||
lis = soup.find_all("li")
|
||||
for li in lis:
|
||||
if li.a == None:
|
||||
li['data-aos'] = list_animation
|
||||
li['data-aos-anchor-placement'] = "bottom-bottom"
|
||||
|
||||
with open(ofname, "w") as outf:
|
||||
outf.write(str(soup))
|
||||
|
||||
if update_archive:
|
||||
add_today('index.html',archive_configuration[config]["id"],folder=archive_configuration[config]["folder"])
|
||||
|
||||
|
||||
|
||||
|
|
Loading…
Reference in New Issue