Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 26 additions & 0 deletions cache_stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import sys
import traceback
from dataclasses import dataclass
from datetime import datetime

import rss2irc
from lib import CachedData
Expand Down Expand Up @@ -161,11 +162,35 @@ def main():
pct = round(value.count / item_cnt * 100, 1)
logger.info("%s:%s%%", key, pct)

print_data_source_info(logger, cache)
logger.info("---")
logger.info("All done.")
sys.exit(0)


def print_data_source_info(logger: logging.Logger, cache: CachedData) -> None:
"""Printout information about data sources."""
if not cache.data_sources:
logger.debug("Cache has no data sources - nothing to printout.")
return

logger.info("---")
for data_source in cache.data_sources.values():
logger.info("Source URL: '%s'", data_source.url)
try:
last_used = datetime.fromtimestamp(data_source.last_used_ts)
last_used_formatted = last_used.strftime("%Y-%m-%d")
except Exception:
last_used_formatted = "error"
logger.exception(
"Failed to convert '%s' to datetime due to exception.",
data_source.last_used_ts,
)

logger.info("Last used: '%s'", last_used_formatted)
logger.info("Error count: '%s'", data_source.http_error_count)


def parse_args() -> argparse.Namespace:
"""Return parsed CLI args."""
parser = argparse.ArgumentParser()
Expand All @@ -186,6 +211,7 @@ def parse_args() -> argparse.Namespace:
)
args = parser.parse_args()
args.log_level = utils.calc_log_level(args.verbose)
args.log_level = min(args.log_level, logging.INFO)

return args

Expand Down
183 changes: 182 additions & 1 deletion tests/test_cache_stats.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
#!/usr/bin/env python3
"""Unit tests for cache_stats.py."""
import io
import logging
import os
import sys
import time
from datetime import datetime
from unittest.mock import patch

import cache_stats
Expand All @@ -13,7 +15,7 @@
SCRIPT_PATH = os.path.dirname(os.path.realpath(__file__))


def test_main_ideal(fixture_cache_file):
def test_main_ideal(fixture_cache_file, caplog):
"""Simple run-through test."""
rss_url = "https://example.com/rss"

Expand All @@ -33,6 +35,19 @@ def test_main_ideal(fixture_cache_file):
source1.last_used_ts = int(time.time()) - 2 * 86400
rss2irc.write_cache(cache, fixture_cache_file)

expected_log_tuples = [
(
"cache_stats",
20,
"Number of items in cache '{:s}' is 7.".format(fixture_cache_file),
),
(
"cache_stats",
20,
"Source URL: '{:s}'".format(rss_url),
),
]

exception = None
args = [
"./cache_stats.py",
Expand All @@ -57,3 +72,169 @@ def test_main_ideal(fixture_cache_file):
assert isinstance(exception, SystemExit) is True
assert exception.code == 0
assert out.getvalue().strip() == ""
assert expected_log_tuples[0] in caplog.record_tuples
assert expected_log_tuples[1] in caplog.record_tuples


def test_print_data_source_info_no_sources(caplog):
"""Test print_data_source_info() when cache has no data sources."""
expected_log_tuples = [
(
"test_cache_stats",
10,
"Cache has no data sources - nothing to printout.",
),
]
logger = logging.getLogger("test_cache_stats")
logger.setLevel(logging.DEBUG)
cache = CachedData()

cache_stats.print_data_source_info(logger, cache)
assert expected_log_tuples == caplog.record_tuples


def test_print_data_source_info_one_source(caplog):
"""Test printout of one data source in print_data_source_info()."""
rss_url = "https://example.com/rss"
error_count = 20
current_ts = int(time.time())
current_dt = datetime.fromtimestamp(current_ts)
dt_formatted = current_dt.strftime("%Y-%m-%d")

expected_log_tuples = [
(
"test_cache_stats",
20,
"---",
),
(
"test_cache_stats",
20,
"Source URL: '{:s}'".format(rss_url),
),
(
"test_cache_stats",
20,
"Last used: '{:s}'".format(dt_formatted),
),
(
"test_cache_stats",
20,
"Error count: '{:d}'".format(error_count),
),
]

logger = logging.getLogger("test_cache_stats")
logger.setLevel(logging.DEBUG)
cache = CachedData()
source1 = cache.get_source_by_url(rss_url)
source1.http_error_count = error_count
source1.last_used_ts = current_ts

cache_stats.print_data_source_info(logger, cache)
assert expected_log_tuples == caplog.record_tuples


def test_print_data_source_info_multiple_sources(caplog):
"""Test printout of multiple data sources in print_data_source_info()."""
rss_url1 = "https://one.example.com/rss"
rss_url2 = "https://two.example.com/rss"
error_count = 20
current_ts = int(time.time())
current_dt = datetime.fromtimestamp(current_ts)
dt_formatted = current_dt.strftime("%Y-%m-%d")

expected_log_tuples = [
(
"test_cache_stats",
20,
"---",
),
(
"test_cache_stats",
20,
"Source URL: '{:s}'".format(rss_url1),
),
(
"test_cache_stats",
20,
"Last used: '{:s}'".format(dt_formatted),
),
(
"test_cache_stats",
20,
"Error count: '{:d}'".format(error_count),
),
(
"test_cache_stats",
20,
"Source URL: 'https://two.example.com/rss'",
),
(
"test_cache_stats",
20,
"Last used: '{:s}'".format(dt_formatted),
),
(
"test_cache_stats",
20,
"Error count: '0'",
),
]

logger = logging.getLogger("test_cache_stats")
logger.setLevel(logging.DEBUG)
cache = CachedData()
source1 = cache.get_source_by_url(rss_url1)
source1.http_error_count = error_count
source1.last_used_ts = current_ts
source2 = cache.get_source_by_url(rss_url2)
source2.http_error_count = 0
source2.last_used_ts = current_ts

cache_stats.print_data_source_info(logger, cache)
assert expected_log_tuples == caplog.record_tuples


def test_print_data_source_info_invalid_last_used(caplog):
"""Test handling of invalid last_used value in print_data_source_info()."""
rss_url = "https://example.com/rss"
error_count = 20

expected_log_tuples = [
(
"test_cache_stats",
20,
"---",
),
(
"test_cache_stats",
20,
"Source URL: '{:s}'".format(rss_url),
),
(
"test_cache_stats",
40,
"Failed to convert 'abcefg' to datetime due to exception.",
),
(
"test_cache_stats",
20,
"Last used: 'error'",
),
(
"test_cache_stats",
20,
"Error count: '{:d}'".format(error_count),
),
]

logger = logging.getLogger("test_cache_stats")
logger.setLevel(logging.DEBUG)
cache = CachedData()
source1 = cache.get_source_by_url(rss_url)
source1.http_error_count = error_count
source1.last_used_ts = "abcefg"

cache_stats.print_data_source_info(logger, cache)
assert expected_log_tuples == caplog.record_tuples