From 78d2142c51c3c4b694efd7582ea3c4d12d300f65 Mon Sep 17 00:00:00 2001 From: Daniel Lysfjord Date: Sun, 31 Mar 2024 18:02:45 +0200 Subject: [PATCH] Fixed some comments, deleted some commented lines, added some notes --- routerstats_collector.py | 25 +++++-------------------- 1 file changed, 5 insertions(+), 20 deletions(-) diff --git a/routerstats_collector.py b/routerstats_collector.py index f79a8ce..e65b73c 100755 --- a/routerstats_collector.py +++ b/routerstats_collector.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 -#from socketserver import UnixStreamServer, BaseRequestHandler import os import io from multiprocessing import Process, Queue @@ -16,11 +15,9 @@ import numpy as np logging.basicConfig( format='%(asctime)s %(funcName)20s %(levelname)-8s %(message)s', - level=logging.DEBUG, + level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S') -#DATASTORE = ['timestamp', 'zone', 'SRC', 'DST', 'SPT', 'DPT'] - def datathingy(query_queue, query_response, collector_queue, signal_queue): '''Handle the datastore. Updating every N secs, fetching anything from the collector queue, and responding to the query queue''' @@ -31,13 +28,6 @@ def datathingy(query_queue, query_response, collector_queue, signal_queue): # As the request is most important, we spin on that with a N ms timeout? Then fetch wathever our logparser got us, rince and repeat.. # Not the most elegant solution, but not that horrible:) try: - #For the server query - #We support the following requests: - # loc-net, net_dnat - #The following arguments can be passed: - # 'all' -> number of entries of specified type since program was started - # 'since' -> SQL style date query... - # N (minute|hour|day) request_query = query_queue.get(timeout=0.1) req = request_query.split("\n") request = req[0] @@ -63,6 +53,7 @@ def datathingy(query_queue, query_response, collector_queue, signal_queue): next except IndexError: pass + #If someone ever knows how this should be done..... if argument[0] == 'minutes': to_subtract = timedelta(minutes = subtract_argument) elif argument[0] == 'hours': @@ -143,7 +134,7 @@ def parse_line(input_line: str) -> dict: key, val = stringy.split('=') retval[str(key)] = [str(val)] except ValueError: - #this might not match up as expected:) + #Log entries are not perfectly symmetrical. We don't care pass retval['timestamp'] = datetime.now() logging.debug('Parsed line to ' + str(retval)) @@ -153,10 +144,6 @@ def parse_line(input_line: str) -> dict: class RequestHandler(server.SimpleHTTPRequestHandler): '''Subclassing to change behaviour''' - def set_queues(self, queue_in, queue_out): - self.queue_in = queue_in - self.queue_out = queue_out - def send_head(self): self.send_response(HTTPStatus.OK) self.send_header("Content-type", "text/html") @@ -230,7 +217,7 @@ def serve_http(httpd_port: int, queue_in, queue_out): with server.ThreadingHTTPServer(("", httpd_port), RequestHandler) as httpd: httpd.queue_in = queue_in httpd.queue_out = queue_out - logging.debug("serving at port %s", httpd_port) + logging.info("serving at port %s", httpd_port) httpd.serve_forever() def main(): @@ -242,7 +229,6 @@ def main(): collector_queue = Queue() signal_queue = Queue() - started_processes = [] datastore_process = Process(target=datathingy, args=(query_queue, query_response, collector_queue, signal_queue)) @@ -257,8 +243,7 @@ def main(): http_process.start() started_processes.append(http_process) -# serve_http(httpd_port, query_response, query_queue) - + #No idea how to manage this better? while True: try: for p in started_processes: