#!/usr/bin/env python3
from unbiasedObjects import *
from unbiasedFunctions import *
from parser import *
import time
def main():
while True:
print('-----------------------')
run()
print('-----------------------')
time.sleep(600)
def run():
sourceList=[]
sourceList.append(NewsSource('BBC US',
'http://www.bbc.com/news/world/us_and_canada',
['buzzard-item', '','',
None, None))
sourceList.append(NewsSource('The Blaze',
'http://theblaze.com',
['', 'href="'],
[],
'', '',
'', '',
None, None))
wkl=buildWeeklyStandard()
sourceList.append(wkl)
nyt=buildNYT()
sourceList.append(nyt)
fox=buildFoxNews()
sourceList.append(fox)
#scrape all urls and build data structure
newsSourceArr=buildNewsSourceArr(sourceList)
#build the output file HTML
outputHTML=buildOutput(newsSourceArr)
#print the output file HTML
printOutputHTML(outputHTML, '/var/www/html/index.html')
if __name__=="__main__":
main()