Add unit tests with 68% coverage, refactor a bit

This commit is contained in:
Vladan Popovic 2020-09-06 00:23:34 +02:00
parent 7967b1d024
commit 2b9435dddd
9 changed files with 213 additions and 39 deletions

View File

@ -15,9 +15,6 @@ import sys
import sphinx_typlog_theme import sphinx_typlog_theme
sys.path.insert(0, os.path.join(os.path.abspath('.'),
"..", "..", "src"))
# -- Project information ----------------------------------------------------- # -- Project information -----------------------------------------------------
project = 'chweb' project = 'chweb'

0
src/chweb/__init__.py Normal file
View File

View File

@ -6,7 +6,7 @@ import asyncio
import logging import logging
import logging.config import logging.config
from logging import Logger from logging import Logger
from typing import Tuple from typing import Any, Dict, Tuple
import os import os
import yaml import yaml
@ -27,6 +27,15 @@ def configure(name) -> Tuple[Config, Logger]:
'Defaults to /etc/checker.yaml')) 'Defaults to /etc/checker.yaml'))
args = parser.parse_args() args = parser.parse_args()
with open(args.config, 'r') as conf_file:
config = yaml.load(conf_file, Loader=yaml.FullLoader)
logging.config.dictConfig(config['logging'])
logger = logging.getLogger("chweb.{}".format(name))
return (config, logger)
def create_config(conf: Dict[str, Any]):
kafka_servers_env = os.getenv('KAFKA_SERVERS') kafka_servers_env = os.getenv('KAFKA_SERVERS')
if kafka_servers_env is not None: if kafka_servers_env is not None:
kafka_servers = kafka_servers_env.split(',') kafka_servers = kafka_servers_env.split(',')
@ -39,23 +48,18 @@ def configure(name) -> Tuple[Config, Logger]:
pg_user = os.getenv('POSTGRES_USER') pg_user = os.getenv('POSTGRES_USER')
pg_pass = os.getenv('POSTGRES_PASS') pg_pass = os.getenv('POSTGRES_PASS')
with open(args.config, 'r') as conf_file: config = Config(**conf)
config = yaml.load(conf_file, Loader=yaml.FullLoader) config.kafka.servers = (kafka_servers if kafka_servers_env
logging.config.dictConfig(config['logging']) else config.kafka.servers)
config.kafka.topic = kafka_topic or config.kafka.topic
config.postgres.dbhost = pg_host or config.postgres.dbhost
config.postgres.dbname = pg_db or config.postgres.dbname
config.postgres.dbport = (int(pg_port) if pg_port is not None
else config.postgres.dbport)
config.postgres.dbuser = pg_user or config.postgres.dbuser
config.postgres.dbpass = pg_pass or config.postgres.dbpass
config = Config(**config) return config
config.kafka.servers = (kafka_servers if kafka_servers_env
else config.kafka.servers)
config.kafka.topic = kafka_topic or config.kafka.topic
config.postgres.dbhost = pg_host or config.postgres.dbhost
config.postgres.dbname = pg_db or config.postgres.dbname
config.postgres.dbport = pg_port or config.postgres.dbport
config.postgres.dbuser = pg_user or config.postgres.dbuser
config.postgres.dbpass = pg_pass or config.postgres.dbpass
logger = logging.getLogger("chweb.{}".format(name))
print(config)
return (config, logger)
def collect(): def collect():

View File

@ -2,16 +2,17 @@
Checks status of web servers and sends them to a configured Kafka topic. Checks status of web servers and sends them to a configured Kafka topic.
""" """
import asyncio import asyncio
import logging
import re import re
from typing import Optional from typing import Optional
from urllib.parse import urlparse from urllib.parse import urlparse
import aiokafka # type: ignore import aiokafka # type: ignore
import requests import requests
from requests import ConnectionError from requests import exceptions as rqexc
from chweb.base import Service from chweb.base import Service
from chweb.models import Check, SiteConfig from chweb.models import Check, Config, SiteConfig
class Collector(Service): class Collector(Service):
@ -30,8 +31,8 @@ class Collector(Service):
``chweb.collector.Collector.check_forever``. ``chweb.collector.Collector.check_forever``.
""" """
res = await self.loop.run_in_executor(None, requests.get, url) res = await self.loop.run_in_executor(None, requests.get, url)
matches = None # The matches value should be None since the regex can matches = None # The matches value should be None since the regex can
# be ommited from the config. # be ommited from the config.
if regex is not None: if regex is not None:
matches = re.search(regex, res.text) is not None matches = re.search(regex, res.text) is not None
return Check( return Check(
@ -57,10 +58,10 @@ class Collector(Service):
while True: while True:
try: try:
data = await self.check(site.url, site.regex) data = await self.check(site.url, site.regex)
except ConnectionError as exc: except rqexc.ConnectionError as exc:
errmsg = "{}; {}".format(site.url, exc) errmsg = "{}; {}".format(site.url, exc)
self.logger.error(errmsg) self.logger.error(errmsg)
break # Break the loop and destroy the Task. break # Break the loop and destroy the Task.
self.queue.put_nowait(data) self.queue.put_nowait(data)
await asyncio.sleep(site.check_interval) await asyncio.sleep(site.check_interval)
@ -75,28 +76,36 @@ class Producer(Service):
""" """
Kafka producer. Kafka producer.
Reads from the queue that :class:`chweb.collector.Collector` writes in and Reads checks from the queue written by :class:`chweb.collector.Collector`
sends all messages in a kafka topic. and sends all messages in a kafka topic.
""" """
async def produce(self):
""" def __init__(self, config: Config,
Creates and starts an ``aiokafka.AIOKafkaProducer`` and runs a loop logger: logging.Logger,
that reads from the queue and sends the messages to the topic from the event_loop: asyncio.AbstractEventLoop,
config. queue: asyncio.Queue):
""" super().__init__(config, logger, event_loop, queue)
producer = aiokafka.AIOKafkaProducer( self.producer = aiokafka.AIOKafkaProducer(
loop=self.loop, loop=self.loop,
bootstrap_servers=self.config.kafka.servers) bootstrap_servers=self.config.kafka.servers)
await producer.start() async def produce(self):
"""
Creates and starts an ``aiokafka.AIOKafkaProducer`` and runs a loop
that reads from the queue and sends the messages to the topic defined
in the config.
"""
await self.producer.start()
try: try:
while True: while True:
check = await self.queue.get() check = await self.queue.get()
msg = bytes(check.json().encode("utf-8")) msg = bytes(check.json().encode("utf-8"))
await producer.send_and_wait(self.config.kafka.topic, msg) await self.producer.send_and_wait(self.config.kafka.topic, msg)
except Exception as exc:
self.logger.error(exc)
finally: finally:
self.logger.warning("Kafka producer destroyed!") self.logger.warning("Kafka producer destroyed!")
await producer.stop() await self.producer.stop()
def __call__(self) -> asyncio.Future: def __call__(self) -> asyncio.Future:
return self.produce() return self.produce()

0
tests/__init__.py Normal file
View File

49
tests/conftest.py Normal file
View File

@ -0,0 +1,49 @@
import asyncio
import pytest
from chweb.cmd import create_config
@pytest.fixture()
def config():
config_dict = {
'kafka': {
'servers': ["localhost:9992"],
'topic': "sample",
},
'postgres': {
'dbhost': "localhost",
'dbport': 5432,
'dbname': "chweb",
'dbuser': "vladan",
'dbpass': "",
},
'sites': [{
'url': "https://example.com",
'regex': "aaaaaaaaaaaaa",
'check_interval': 8,
},
]
}
return create_config(config_dict)
@pytest.fixture
def config_invalid():
config_dict = {
'kafka': {
'servers': ["localhost:9992"],
'topic': "sample",
},
'postgres': {
'dbhost': "localhost",
'dbport': 5432,
'dbname': "chweb",
'dbuser': "vladan",
'dbpass': "",
},
'sites': [{
'url': "https://dsadakjhkjsahkjh.com",
'regex': "domain",
'check_interval': 5,
},
]
}
return create_config(config_dict)

67
tests/test_checker.py Normal file
View File

@ -0,0 +1,67 @@
"""
All tests fot the ``chweb.checker`` module.
"""
import asyncio
from mock import Mock
import pytest
import requests
from chweb.collector import Collector
@pytest.mark.asyncio
async def test_valid_site_200(config, event_loop):
queue = asyncio.Queue()
coll = Collector(config, Mock(), event_loop, queue)
check = await coll.check('https://example.com', None)
assert check.domain == 'example.com'
assert check.regex_matches is None
assert check.status == 200
assert check.response_time > 0
@pytest.mark.asyncio
async def test_valid_site_404(config, event_loop):
queue = asyncio.Queue()
coll = Collector(config, Mock(), event_loop, queue)
check = await coll.check('https://example.com/404', None)
assert check.domain == 'example.com'
assert check.regex_matches is None
assert check.status == 404
assert check.response_time > 0
@pytest.mark.asyncio
async def test_invalid_site(config, event_loop):
queue = asyncio.Queue()
coll = Collector(config, Mock(), event_loop, queue)
with pytest.raises(requests.exceptions.ConnectionError):
_ = await coll.check('https://non.existant.domain.noooo', None)
@pytest.mark.asyncio
async def test_check_forever_valid(config, event_loop):
"""
The :meth:`chweb.collector.Collector.check_forever` method runs an infinite
loop, so we'll test if it's running for 2s and assume it's ok.
"""
queue = asyncio.Queue()
coll = Collector(config, Mock(), event_loop, queue)
task = event_loop.create_task(coll.check_forever(config.sites[0]))
await asyncio.sleep(2)
assert not task.done()
task.cancel()
@pytest.mark.asyncio
async def test_check_forever_invalid(config_invalid, event_loop):
"""
The :meth:`chweb.collector.Collector.check_forever` method cancels the Task
on error, so if we get an invalid site, the task should be done.
"""
queue = asyncio.Queue()
coll = Collector(config_invalid, Mock(), event_loop, queue)
task = event_loop.create_task(coll.check_forever(config_invalid.sites[0]))
await asyncio.sleep(1)
assert task.done()

47
tests/test_producer.py Normal file
View File

@ -0,0 +1,47 @@
import asyncio
import aiokafka
from mock import Mock
import pytest
from chweb.collector import Producer
from chweb.models import Check
@pytest.mark.asyncio
async def test_producer_called(config, event_loop):
queue = asyncio.Queue()
producer = Producer(config, Mock(), event_loop, queue)
check = Check()
await queue.put(check)
async def async_patch():
pass
Mock.__await__ = lambda x: async_patch().__await__()
producer.producer = Mock()
task = event_loop.create_task(producer.produce())
await asyncio.sleep(0)
producer.producer.send_and_wait.assert_called_with(
config.kafka.topic, bytes(check.json().encode('utf-8')))
task.cancel()
@pytest.mark.asyncio
async def test_producer_called_invalid(config, event_loop):
queue = asyncio.Queue()
producer = Producer(config, Mock(), event_loop, queue)
check = Check()
await queue.put('')
async def async_patch():
pass
Mock.__await__ = lambda x: async_patch().__await__()
producer.producer = Mock()
task = event_loop.create_task(producer.produce())
await asyncio.sleep(0)
producer.logger.error.assert_called()
assert task.done()

View File

@ -5,6 +5,7 @@ envlist = clean,lint,py3,report
deps = deps =
mock mock
pytest pytest
pytest-asyncio
pytest-cov pytest-cov
pytest-mock pytest-mock
commands = commands =