Add logging
This commit is contained in:
parent
60a944cf5e
commit
916fcf1bab
3 changed files with 39 additions and 6 deletions
|
@ -3,6 +3,10 @@ A module containing all console script functions.
|
|||
"""
|
||||
import argparse
|
||||
import asyncio
|
||||
import logging
|
||||
import logging.config
|
||||
from logging import Logger
|
||||
from typing import Tuple
|
||||
import yaml
|
||||
|
||||
from chweb.collector import Collector
|
||||
|
@ -10,7 +14,7 @@ from chweb.consumer import Consumer
|
|||
from chweb.models import Config
|
||||
|
||||
|
||||
def configure() -> Config:
|
||||
def configure(name) -> Tuple[Config, Logger]:
|
||||
"""
|
||||
Gets the configuration and creates a Pydantic model from the parsed YAML.
|
||||
"""
|
||||
|
@ -23,7 +27,9 @@ def configure() -> Config:
|
|||
args = parser.parse_args()
|
||||
with open(args.config, 'r') as conf_file:
|
||||
config = yaml.load(conf_file, Loader=yaml.FullLoader)
|
||||
return Config(**config)
|
||||
logging.config.dictConfig(config['logging'])
|
||||
logger = logging.getLogger("chweb.{}".format(name))
|
||||
return (Config(**config), logger)
|
||||
|
||||
|
||||
def run(Service):
|
||||
|
@ -32,8 +38,11 @@ def run(Service):
|
|||
"""
|
||||
loop = asyncio.get_event_loop()
|
||||
queue = asyncio.Queue()
|
||||
config = configure()
|
||||
service = Service(config, loop, queue)
|
||||
config, logger = configure(Service.__name__)
|
||||
logger.info(("Starting service on kafka [cluster]/topic: "
|
||||
"{}/{}").format(config.kafka.servers,
|
||||
config.kafka.topic))
|
||||
service = Service(config, logger, loop, queue)
|
||||
service.run()
|
||||
|
||||
|
||||
|
|
|
@ -56,7 +56,12 @@ class Collector:
|
|||
:param site: A site object from the config.
|
||||
"""
|
||||
while True:
|
||||
data = await self.check(site.url, site.regex)
|
||||
try:
|
||||
data = await self.check(site.url, site.regex)
|
||||
except Exception as exc:
|
||||
errmsg = "{}; {}".format(site.url, exc)
|
||||
self.logger.error(errmsg)
|
||||
break # Break the loop and destroy the Task.
|
||||
self.queue.put_nowait(data)
|
||||
await asyncio.sleep(site.check_interval)
|
||||
|
||||
|
@ -77,6 +82,7 @@ class Collector:
|
|||
msg = bytes(check.json().encode("utf-8"))
|
||||
await producer.send_and_wait(self.config.kafka.topic, msg)
|
||||
finally:
|
||||
self.logger.warning("Kafka producer destroyed!")
|
||||
await producer.stop()
|
||||
|
||||
def run(self):
|
||||
|
@ -88,3 +94,4 @@ class Collector:
|
|||
tasks = list(map(create_task, self.config.sites))
|
||||
tasks.append(self.loop.create_task(self.produce()))
|
||||
self.loop.run_until_complete(asyncio.gather(*tasks))
|
||||
self.logger.info("Checker stopped ...")
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue