import configparser from datetime import date import feedparser from mdutils import MdUtils from os import path from shutil import copy ################# # Configuration # ################# markdown = True pdf = False html = True animations = True html_stylesheet = "styles/simple.css" title_animation = "fade-down" heading_animation = "fade-right" list_animation = "fade-left" ConvertToHTML = True title = date.today().strftime('%d %B, %Y') og_title = title feed_configuration = ['feeds.ini','covid.ini'] archive_configuration = {"feeds.ini":{"id":"daily-dose-list","folder":"archive-daily-dose",'title':"Daily-Dose","hide_overflow":True},"covid.ini":{"id":"covid-19-list","folder":"archive-covid-19","title":"Covid-19 Sentry","hide_overflow":False}} update_archive = True ############# # Functinos # ############# def GetPosts(feed): errored = False Posts = {} ToIgnore = [] ReadSummary = True URL = None ShowLink = False ToRead = 5 try: URL = feeds[feed]["URL"] except KeyError: print("Improper configuration for %s. No URL Specified") exit(1) try: ToIgnore = eval(feeds[feed]["IGNORE"]) except KeyError: None try: ReadSummary = eval(feeds[feed]["SUMMARY"]) except KeyError: ReadSummary = True try: ToRead = eval(feeds[feed]["ToRead"]) except: None try: ShowLink = eval(feeds[feed]["LINK"]) except KeyError: None try: rss = feedparser.parse(URL) except: print("Could not fetch data for feed: %s" % feed) errored = True if errored: return None maximum = len(rss.entries) count = 0 if ToRead == -1 or ToRead > maximum: ToRead = maximum posts = [] summaries = [] links = [] added = 0 while count != maximum and added != ToRead: Skip = False Title = str(rss.entries[count].title) Summary = rss.entries[count].summary for words in ToIgnore: if (words.lower() in Title.lower()) or (words.lower() in Summary.lower()): Skip = True break if not Skip: added += 1 posts.append(Title) if ReadSummary: summaries.append(Summary) if ShowLink: links.append(rss.entries[count].link) count += 1 for idx in range(len(posts)): Posts[idx] = {} Posts[idx]["post"] = {} Posts[idx]["post"]["title"] = posts[idx].encode('utf-8') if ReadSummary: Posts[idx]["post"]["summary"] = summaries[idx].encode('utf-8') else: Posts[idx]["summary"] = None if ShowLink: Posts[idx]["post"]["link"] = links[idx].encode('utf-8') else: Posts[idx]["post"]["link"] = None Posts["NoOfPosts"] = len(posts) Posts["Summary"] = ReadSummary Posts["ShowLink"] = ShowLink return Posts def add_today(HomeFile,TagID,folder): title = og_title if path.exists(title+".html") or path.exists(str("./%s/"%folder)+title+".html"): if path.exists(str("./%s/"%folder)+title+".html"): None else: copy(str(title+".html"),folder) else: print("File does not exist!") exit(1) TagToFind = 'ul' TagID = TagID soup = None with open(HomeFile) as fp: soup = BeautifulSoup(fp,'html.parser') ToUpdate = soup.find(TagToFind,{"id":TagID}) FilePath = str("./%s/"%folder) + str(str(title)+".html") NewTag = soup.new_tag('li') anchor = soup.new_tag('a',href=str('./%s/'%folder+title+".html")) anchor.string = title NewTag.append(anchor) ToUpdate.insert(0,NewTag) with open(HomeFile,'w') as fp: fp.write(str(soup)) ################ # Main Program # ############### if (pdf and not markdown) or (html and not markdown): print("Markdown should be True to convert to pdf/html") exit(1) if (update_archive and not html): print("HTML is required to update archive") exit(1) for config in feed_configuration: title = og_title feeds = configparser.ConfigParser() if feeds.read(config) == []: print("%s does not exist!"%config) exit(1) else: print("Reading %s"%config) feeds.read(config) rss_feeds = [x for x in feeds.keys()] rss_feeds.pop(0) print("Read %s feeds from the configuration file" % str(len(rss_feeds))) print(rss_feeds) posts = {} for feed in rss_feeds: sauce = GetPosts(feed) if sauce == None: rss_feeds.remove(feed) else: posts[feed] = sauce #print(posts) if markdown: mdfile = None mdfile = MdUtils(file_name=title,title=archive_configuration[config]['title']) for feed in posts: mdfile.new_header(level=1,title="From %s" % feed) for idx in range(posts[feed]["NoOfPosts"]): mdfile.write(str("* **"+posts[feed][idx]["post"]["title"].decode('utf-8'))+"**") if posts[feed]["Summary"]: mdfile.write(" - ") mdfile.write(posts[feed][idx]["post"]["summary"].decode('utf-8')) if posts[feed]["ShowLink"]: mdfile.write(" - [link](%s)"%posts[feed][idx]["post"]["link"].decode('utf-8')) mdfile.write("\n\n") elif posts[feed]["Summary"]: mdfile.write("\n\n") else: mdfile.write("\n") mdfile.new_line("\n") mdfile.new_table_of_contents(table_title='Contents', depth=2) mdfile.create_md_file() import pypandoc if pdf: ifname = str(title.strip()) + ".md" ofname = str(title.strip()) + ".pdf" args = ['-V', 'geometry:margin=1.5cm'] convert = pypandoc.convert_file(ifname, 'pdf', outputfile=ofname,extra_args=args) if html: from bs4 import BeautifulSoup ifname = str(title.strip()) + ".md" ofname = str(title.strip()) + ".html" convert = pypandoc.convert_file(ifname,'html',outputfile=ofname,extra_args=['-s']) assert(convert) == '' soup = None with open(ofname) as fp: soup = BeautifulSoup(fp,'html5lib') title = soup.new_tag('title') title.string = archive_configuration[config]['title'] soup.head.append(title) viewport = soup.new_tag("meta",content="width=device-width, initial-scale=1.0") viewport.attrs["name"] = "viewport" soup.head.append(viewport) custom_css = soup.new_tag('link',href=html_stylesheet,rel='stylesheet') soup.head.append(custom_css) custom_css = soup.new_tag('link',href=str("../"+html_stylesheet),rel='stylesheet') soup.head.append(custom_css) if archive_configuration[config]["hide_overflow"]: a = soup.new_tag('style') a.string = "*{overflow-x:hidden;}" soup.head.append(a) if animations: aos_css = soup.new_tag('link',href='https://unpkg.com/aos@2.3.1/dist/aos.css',rel='stylesheet') soup.head.append(aos_css) aos_js = soup.new_tag('script',src="https://unpkg.com/aos@2.3.1/dist/aos.js") soup.head.append(aos_js) aos_script = soup.new_tag('script') aos_script.string = "AOS.init();" soup.body.append(aos_script) for feed in rss_feeds: ToFindID = str("from-"+str(feed.strip().replace(":","").replace(" ","-").lower())) ToEdit = soup.find("h1", {"id": ToFindID}) ToEdit['data-aos'] = 'fade-right' soup.find_all("h1")[0]['data-aos'] = title_animation soup.find("h1",{"id":"contents"})['data-aos'] = heading_animation soup.find("h1",{"id":"contents"})['data-aos-anchor-placement'] = "top-bottom" paragraphs = soup.find_all("p") for paras in paragraphs: paras['data-aos'] = list_animation paras['data-aos-anchor-placement'] = "bottom-bottom" lis = soup.find_all("li") for li in lis: if li.a == None: li['data-aos'] = list_animation li['data-aos-anchor-placement'] = "bottom-bottom" with open(ofname, "w") as outf: outf.write(str(soup)) if update_archive: add_today('index.html',archive_configuration[config]["id"],folder=archive_configuration[config]["folder"])