summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rwxr-xr-xunbiased/main.py15
-rw-r--r--unbiased/unbiasedFunctions.py8
-rw-r--r--unbiased/unbiasedObjects.py2
3 files changed, 15 insertions, 10 deletions
diff --git a/unbiased/main.py b/unbiased/main.py
index df2b209..f81321e 100755
--- a/unbiased/main.py
+++ b/unbiased/main.py
@@ -9,7 +9,9 @@ from unbiased.unbiasedObjects import *
from unbiased.unbiasedFunctions import *
from unbiased.parser import *
-logging.config.dictConfig({
+logger = logging.getLogger('unbiased')
+
+logging_config = {
'version': 1,
'formatters': {
'console': {
@@ -29,7 +31,7 @@ logging.config.dictConfig({
'class': 'logging.handlers.RotatingFileHandler',
'level': 'DEBUG',
'formatter': 'file',
- 'filename': '/opt/unbiased/logs/unbiased.debug.log',
+ 'filename': '',
'maxBytes': 1024 * 1024,
'backupCount': 3,
},
@@ -42,14 +44,17 @@ logging.config.dictConfig({
'root': {
'level': 'DEBUG',
}
-})
-logger = logging.getLogger('unbiased')
+}
def main():
parser = argparse.ArgumentParser()
- parser.add_argument('-w', '--webroot', default='/var/www/ubiased', help='location to write the output html')
+ parser.add_argument('-w', '--webroot', help='location of config file')
+ parser.add_argument('-l', '--log-dir', help='location to write logs')
args = parser.parse_args()
+ logging_config['handlers']['file']['filename'] = os.path.join(args.log_dir, 'unbiased.debug.log')
+ logging.config.dictConfig(logging_config)
+
crawl_frequency = 600
while True:
logger.info('Starting crawl')
diff --git a/unbiased/unbiasedFunctions.py b/unbiased/unbiasedFunctions.py
index 2053ba5..46dae19 100644
--- a/unbiased/unbiasedFunctions.py
+++ b/unbiased/unbiasedFunctions.py
@@ -30,7 +30,7 @@ def buildArticle(url, sourceName, encoding=None):#, titleDelStart, titleDelEnd,
try:
res = requests.get(url)
except Exception as ex:
- logger.error("""ARTICLE DOWNLOADING ERROR
+ logger.debug("""ARTICLE DOWNLOADING ERROR
SOURCE:\t{}
URL:\t{}""".format(sourceName, url))
return None
@@ -38,7 +38,7 @@ def buildArticle(url, sourceName, encoding=None):#, titleDelStart, titleDelEnd,
if res.status_code == 200:
content = res.text
else:
- logger.error("""ARTICLE DOWNLOADING ERROR
+ logger.debug("""ARTICLE DOWNLOADING ERROR
SOURCE:\t{}
URL:\t{}""".format(sourceName, url))
return None
@@ -135,7 +135,7 @@ def buildArticle(url, sourceName, encoding=None):#, titleDelStart, titleDelEnd,
return a
except Exception:
- logger.error("""ARTICLE PARSING ERROR
+ logger.debug("""ARTICLE PARSING ERROR
SOURCE:\t{}
URL:\t{}""".format(sourceName, url))
return None
@@ -243,7 +243,7 @@ def pullImage(url, index, webroot, target_width=350, target_height=200):
if res.status_code == 200:
content = res.content
else:
- logger.error('Image not found: url={}'.format(url))
+ logger.debug('Image not found: url={}'.format(url))
return ''
img = Image.open(io.BytesIO(content))
# crop to aspect ratio
diff --git a/unbiased/unbiasedObjects.py b/unbiased/unbiasedObjects.py
index 9372d3a..7908fbb 100644
--- a/unbiased/unbiasedObjects.py
+++ b/unbiased/unbiasedObjects.py
@@ -90,5 +90,5 @@ class NewsSource():
elif level==3:
self.h3Arr.append(article)
else:
- logger.error("Invalid level in NewsSource.addArtlce: " + level)
+ logger.debug("Invalid level in NewsSource.addArtlce: " + level)