Refactor once again
- Split collector and producer. - Rename Application to Service. - Use __call__() instead of run().
This commit is contained in:
parent
cc4b49a34e
commit
fff8e6e4cc
4 changed files with 58 additions and 61 deletions
|
@ -7,7 +7,7 @@ import logging
|
||||||
from chweb.models import Config
|
from chweb.models import Config
|
||||||
|
|
||||||
|
|
||||||
class Application:
|
class Service:
|
||||||
"""
|
"""
|
||||||
A base class for applications / services.
|
A base class for applications / services.
|
||||||
"""
|
"""
|
||||||
|
@ -19,6 +19,3 @@ class Application:
|
||||||
self.logger = logger
|
self.logger = logger
|
||||||
self.loop = event_loop
|
self.loop = event_loop
|
||||||
self.queue = queue
|
self.queue = queue
|
||||||
|
|
||||||
def run(self):
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
|
@ -9,7 +9,7 @@ from logging import Logger
|
||||||
from typing import Tuple
|
from typing import Tuple
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
from chweb.collector import Collector
|
from chweb.collector import Collector, Producer
|
||||||
from chweb.consumer import Consumer
|
from chweb.consumer import Consumer
|
||||||
from chweb.models import Config
|
from chweb.models import Config
|
||||||
|
|
||||||
|
@ -20,7 +20,7 @@ def configure(name) -> Tuple[Config, Logger]:
|
||||||
"""
|
"""
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description='Website availibility checker.')
|
description='Website availibility checker.')
|
||||||
parser.add_argument('--config', type=str,
|
parser.add_argument('-c', '--config', type=str,
|
||||||
default="/etc/checker.yaml",
|
default="/etc/checker.yaml",
|
||||||
help=('The yaml config file. '
|
help=('The yaml config file. '
|
||||||
'Defaults to /etc/checker.yaml'))
|
'Defaults to /etc/checker.yaml'))
|
||||||
|
@ -32,29 +32,31 @@ def configure(name) -> Tuple[Config, Logger]:
|
||||||
return (Config(**config), logger)
|
return (Config(**config), logger)
|
||||||
|
|
||||||
|
|
||||||
def run(Service):
|
|
||||||
"""
|
|
||||||
Runs a service in an event loop.
|
|
||||||
"""
|
|
||||||
loop = asyncio.get_event_loop()
|
|
||||||
queue = asyncio.Queue()
|
|
||||||
config, logger = configure(Service.__name__)
|
|
||||||
logger.info(("Starting service on kafka [cluster]/topic: "
|
|
||||||
"{}/{}").format(config.kafka.servers,
|
|
||||||
config.kafka.topic))
|
|
||||||
service = Service(config, logger, loop, queue)
|
|
||||||
service.run()
|
|
||||||
|
|
||||||
|
|
||||||
def collect():
|
def collect():
|
||||||
"""
|
"""
|
||||||
Main producer event loop.
|
Main producer event loop.
|
||||||
"""
|
"""
|
||||||
run(Collector)
|
loop = asyncio.get_event_loop()
|
||||||
|
queue = asyncio.Queue()
|
||||||
|
|
||||||
|
config, logger = configure("collector")
|
||||||
|
logger.info("Starting collector for %d sites.", len(config.sites))
|
||||||
|
collector = Collector(config, logger, loop, queue)
|
||||||
|
logger.info(("Starting kafka producer on kafka [cluster]/topic: "
|
||||||
|
"%s/%s"), config.kafka.servers, config.kafka.topic)
|
||||||
|
producer = Producer(config, logger, loop, queue)
|
||||||
|
loop.run_until_complete(asyncio.gather(collector(), producer()))
|
||||||
|
|
||||||
|
|
||||||
def consume():
|
def consume():
|
||||||
"""
|
"""
|
||||||
Main consumer event loop.
|
Main consumer event loop.
|
||||||
"""
|
"""
|
||||||
run(Consumer)
|
loop = asyncio.get_event_loop()
|
||||||
|
queue = asyncio.Queue()
|
||||||
|
|
||||||
|
config, logger = configure("consumer")
|
||||||
|
logger.info(("Starting kafka consumer on kafka [cluster]/topic: "
|
||||||
|
"%s/%s"), config.kafka.servers, config.kafka.topic)
|
||||||
|
consumer = Consumer(config, logger, loop, queue)
|
||||||
|
loop.run_until_complete(consumer())
|
||||||
|
|
|
@ -8,12 +8,13 @@ from urllib.parse import urlparse
|
||||||
|
|
||||||
import aiokafka # type: ignore
|
import aiokafka # type: ignore
|
||||||
import requests
|
import requests
|
||||||
|
from requests import ConnectionError
|
||||||
|
|
||||||
from chweb.base import Application
|
from chweb.base import Service
|
||||||
from chweb.models import Check, SiteConfig
|
from chweb.models import Check, SiteConfig
|
||||||
|
|
||||||
|
|
||||||
class Collector(Application):
|
class Collector(Service):
|
||||||
"""
|
"""
|
||||||
A class that contains all methods needed to check the statuses of all
|
A class that contains all methods needed to check the statuses of all
|
||||||
websites present in the config.
|
websites present in the config.
|
||||||
|
@ -44,10 +45,10 @@ class Collector(Application):
|
||||||
|
|
||||||
async def check_forever(self, site: SiteConfig):
|
async def check_forever(self, site: SiteConfig):
|
||||||
"""
|
"""
|
||||||
A void function that checks a site and sends the result to an
|
A void function that checks the status of a site and sends the result
|
||||||
:class:`asyncio.Queue` for further processing, i.e. sends to a Kafka
|
in an :class:`asyncio.Queue` for further processing, i.e. the check
|
||||||
topic when it's read from the queue in
|
info is sent to a Kafka topic in
|
||||||
:meth:`chweb.collector.Collector.produce` (as in produce data for the
|
:meth:`chweb.collector.Producer.produce` (as in produce data for the
|
||||||
Kafka consumers, defined in :class:`chweb.consumer.Consumer.consume`.
|
Kafka consumers, defined in :class:`chweb.consumer.Consumer.consume`.
|
||||||
|
|
||||||
:param site: A :py:class:`chweb.models.SiteConfig` object from the
|
:param site: A :py:class:`chweb.models.SiteConfig` object from the
|
||||||
|
@ -56,13 +57,27 @@ class Collector(Application):
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
data = await self.check(site.url, site.regex)
|
data = await self.check(site.url, site.regex)
|
||||||
except Exception as exc:
|
except ConnectionError as exc:
|
||||||
errmsg = "{}; {}".format(site.url, exc)
|
errmsg = "{}; {}".format(site.url, exc)
|
||||||
self.logger.error(errmsg)
|
self.logger.error(errmsg)
|
||||||
break # Break the loop and destroy the Task.
|
break # Break the loop and destroy the Task.
|
||||||
self.queue.put_nowait(data)
|
self.queue.put_nowait(data)
|
||||||
await asyncio.sleep(site.check_interval)
|
await asyncio.sleep(site.check_interval)
|
||||||
|
|
||||||
|
def __call__(self) -> asyncio.Future:
|
||||||
|
def create_task(site) -> asyncio.Task:
|
||||||
|
return self.loop.create_task(self.check_forever(site))
|
||||||
|
tasks = map(create_task, self.config.sites)
|
||||||
|
return asyncio.gather(*tasks)
|
||||||
|
|
||||||
|
|
||||||
|
class Producer(Service):
|
||||||
|
"""
|
||||||
|
Kafka producer.
|
||||||
|
|
||||||
|
Reads from the queue that :class:`chweb.collector.Collector` writes in and
|
||||||
|
sends all messages in a kafka topic.
|
||||||
|
"""
|
||||||
async def produce(self):
|
async def produce(self):
|
||||||
"""
|
"""
|
||||||
Creates and starts an ``aiokafka.AIOKafkaProducer`` and runs a loop
|
Creates and starts an ``aiokafka.AIOKafkaProducer`` and runs a loop
|
||||||
|
@ -83,13 +98,5 @@ class Collector(Application):
|
||||||
self.logger.warning("Kafka producer destroyed!")
|
self.logger.warning("Kafka producer destroyed!")
|
||||||
await producer.stop()
|
await producer.stop()
|
||||||
|
|
||||||
def run(self):
|
def __call__(self) -> asyncio.Future:
|
||||||
"""
|
return self.produce()
|
||||||
Runs all tasks in the event loop.
|
|
||||||
"""
|
|
||||||
def create_task(site) -> asyncio.Task:
|
|
||||||
return self.loop.create_task(self.check_forever(site))
|
|
||||||
tasks = list(map(create_task, self.config.sites))
|
|
||||||
tasks.append(self.loop.create_task(self.produce()))
|
|
||||||
self.loop.run_until_complete(asyncio.gather(*tasks))
|
|
||||||
self.logger.info("Checker stopped ...")
|
|
||||||
|
|
|
@ -9,14 +9,14 @@ from typing import Any, Dict
|
||||||
import aiokafka # type: ignore
|
import aiokafka # type: ignore
|
||||||
import asyncpg # type: ignore
|
import asyncpg # type: ignore
|
||||||
|
|
||||||
from chweb.base import Application
|
from chweb.base import Service
|
||||||
from chweb.models import Check
|
from chweb.models import Check
|
||||||
|
|
||||||
|
|
||||||
class Consumer(Application):
|
class Consumer(Service):
|
||||||
async def consume(self):
|
async def consume(self):
|
||||||
"""
|
"""
|
||||||
Consumes messages from a Kafka topic.
|
Consumes messages from a kafka topic and writes them in the database.
|
||||||
"""
|
"""
|
||||||
consumer = aiokafka.AIOKafkaConsumer(
|
consumer = aiokafka.AIOKafkaConsumer(
|
||||||
self.config.kafka.topic,
|
self.config.kafka.topic,
|
||||||
|
@ -25,33 +25,24 @@ class Consumer(Application):
|
||||||
|
|
||||||
await consumer.start()
|
await consumer.start()
|
||||||
try:
|
try:
|
||||||
# Consume messages
|
# Consume messages from the kafka topic.
|
||||||
async for msg in consumer:
|
async for msg in consumer:
|
||||||
self.queue.put_nowait(Check(**json.loads(msg.value)))
|
check_info = Check(**json.loads(msg.value))
|
||||||
|
self.queue.put_nowait(check_info)
|
||||||
|
self.logger.info(check_info)
|
||||||
finally:
|
finally:
|
||||||
# Will leave consumer group; perform autocommit if enabled.
|
# Will leave consumer group; perform autocommit if enabled.
|
||||||
await consumer.stop()
|
await consumer.stop()
|
||||||
|
|
||||||
|
def __call__(self) -> asyncio.Future:
|
||||||
|
return self.consume()
|
||||||
|
|
||||||
async def save(self, pool, data):
|
|
||||||
async with pool.acquire() as conn:
|
|
||||||
async with conn.cursor() as cur:
|
|
||||||
await cur.execute("SELECT 1")
|
|
||||||
|
|
||||||
async def write(self):
|
class Db:
|
||||||
|
async def consume_and_save(self):
|
||||||
try:
|
try:
|
||||||
while True:
|
while True:
|
||||||
status = await self.queue.get()
|
status = await self.queue.get()
|
||||||
print(status)
|
yield status
|
||||||
finally:
|
finally:
|
||||||
print("EXITED!")
|
self.logger.info("Queue reader stopped.")
|
||||||
|
|
||||||
def run(self):
|
|
||||||
"""
|
|
||||||
Runs all tasks in the event loop.
|
|
||||||
"""
|
|
||||||
tasks = [
|
|
||||||
self.loop.create_task(self.consume()),
|
|
||||||
self.loop.create_task(self.write()),
|
|
||||||
]
|
|
||||||
self.loop.run_until_complete(asyncio.gather(*tasks))
|
|
||||||
|
|
Loading…
Reference in a new issue