added check for skipped articles.
Earlier even if the count threshold had not been matched it would break the loop
This commit is contained in:
parent
57ccecf3fa
commit
9c7b9bdfaa
30
main.py
30
main.py
|
@ -4,6 +4,10 @@ import feedparser
|
||||||
from mdutils import MdUtils
|
from mdutils import MdUtils
|
||||||
|
|
||||||
|
|
||||||
|
#################
|
||||||
|
# Configuration #
|
||||||
|
#################
|
||||||
|
|
||||||
markdown = True
|
markdown = True
|
||||||
pdf = False
|
pdf = False
|
||||||
html = True
|
html = True
|
||||||
|
@ -11,16 +15,17 @@ html_stylesheet = "styles/simple.css"
|
||||||
title_animation = "fade-down"
|
title_animation = "fade-down"
|
||||||
heading_animation = "fade-right"
|
heading_animation = "fade-right"
|
||||||
list_animation = "fade-left"
|
list_animation = "fade-left"
|
||||||
|
debug = False
|
||||||
|
ConvertToHTML = True
|
||||||
|
title = date.today().strftime('%d %B, %Y')
|
||||||
|
feeds = configparser.ConfigParser()
|
||||||
|
|
||||||
|
################
|
||||||
|
# Main Program #
|
||||||
|
###############
|
||||||
|
|
||||||
if (pdf and not markdown) or (html and not markdown):
|
if (pdf and not markdown) or (html and not markdown):
|
||||||
print("Markdown should be True to convert to pdf/html")
|
print("Markdown should be True to convert to pdf/html")
|
||||||
debug = False
|
|
||||||
ConvertToHTML = True
|
|
||||||
|
|
||||||
title = date.today().strftime('%d %B, %Y')
|
|
||||||
|
|
||||||
feeds = configparser.ConfigParser()
|
|
||||||
|
|
||||||
if feeds.read("feeds.ini") == []:
|
if feeds.read("feeds.ini") == []:
|
||||||
print("feeds.ini does not exist!")
|
print("feeds.ini does not exist!")
|
||||||
|
@ -69,7 +74,8 @@ def GetPosts(feed):
|
||||||
|
|
||||||
posts = []
|
posts = []
|
||||||
summaries = []
|
summaries = []
|
||||||
while count != ToRead:
|
added = 0
|
||||||
|
while count != maximum and added != ToRead:
|
||||||
Skip = False
|
Skip = False
|
||||||
Title = str(rss.entries[count].title)
|
Title = str(rss.entries[count].title)
|
||||||
Summary = rss.entries[count].summary
|
Summary = rss.entries[count].summary
|
||||||
|
@ -78,6 +84,7 @@ def GetPosts(feed):
|
||||||
Skip = True
|
Skip = True
|
||||||
break
|
break
|
||||||
if not Skip:
|
if not Skip:
|
||||||
|
added += 1
|
||||||
posts.append(Title)
|
posts.append(Title)
|
||||||
if ReadSummary:
|
if ReadSummary:
|
||||||
summaries.append(Summary)
|
summaries.append(Summary)
|
||||||
|
@ -138,7 +145,7 @@ if html:
|
||||||
ifname = str(title.strip()) + ".md"
|
ifname = str(title.strip()) + ".md"
|
||||||
ofname = str(title.strip()) + ".html"
|
ofname = str(title.strip()) + ".html"
|
||||||
|
|
||||||
convert = pypandoc.convert_file(ifname,'html5',outputfile=ofname,extra_args=['-s'])
|
convert = pypandoc.convert_file(ifname,'html',outputfile=ofname,extra_args=['-s'])
|
||||||
assert(convert) == ''
|
assert(convert) == ''
|
||||||
|
|
||||||
with open(ofname) as fp:
|
with open(ofname) as fp:
|
||||||
|
@ -159,6 +166,10 @@ if html:
|
||||||
|
|
||||||
aos_script = soup.new_tag('script')
|
aos_script = soup.new_tag('script')
|
||||||
aos_script.string = "AOS.init();"
|
aos_script.string = "AOS.init();"
|
||||||
|
# <meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
|
viewport = soup.new_tag("meta",content="width=device-width, initial-scale=1.0")
|
||||||
|
viewport.attrs["name"] = "viewport"
|
||||||
|
soup.head.append(viewport)
|
||||||
|
|
||||||
soup.body.append(aos_script)
|
soup.body.append(aos_script)
|
||||||
|
|
||||||
|
@ -169,15 +180,18 @@ if html:
|
||||||
|
|
||||||
soup.find("h1",{"id":"daily-dose"})['data-aos'] = title_animation
|
soup.find("h1",{"id":"daily-dose"})['data-aos'] = title_animation
|
||||||
soup.find("h1",{"id":"contents"})['data-aos'] = heading_animation
|
soup.find("h1",{"id":"contents"})['data-aos'] = heading_animation
|
||||||
|
soup.find("h1",{"id":"contents"})['data-aos-anchor-placement'] = "top-bottom"
|
||||||
|
|
||||||
paragraphs = soup.find_all("p")
|
paragraphs = soup.find_all("p")
|
||||||
for paras in paragraphs:
|
for paras in paragraphs:
|
||||||
paras['data-aos'] = list_animation
|
paras['data-aos'] = list_animation
|
||||||
|
paras['data-aos-anchor-placement'] = "bottom-bottom"
|
||||||
|
|
||||||
lis = soup.find_all("li")
|
lis = soup.find_all("li")
|
||||||
for li in lis:
|
for li in lis:
|
||||||
if li.a == None:
|
if li.a == None:
|
||||||
li['data-aos'] = list_animation
|
li['data-aos'] = list_animation
|
||||||
|
li['data-aos-anchor-placement'] = "bottom-bottom"
|
||||||
|
|
||||||
with open(ofname, "w") as outf:
|
with open(ofname, "w") as outf:
|
||||||
outf.write(str(soup))
|
outf.write(str(soup))
|
||||||
|
|
Loading…
Reference in New Issue