Commit eeb82292 authored by Olivier Kaufmann's avatar Olivier Kaufmann
Browse files

Implements the generic _process_command method. Cleans up the code following PEP.

Showing with 219 additions and 179 deletions
+219 -179
......@@ -5,11 +5,10 @@ import zipfile
class CompressedSizedTimedRotatingFileHandler(handlers.TimedRotatingFileHandler):
"""
Handler for logging to a set of files, which switches from one file
"""Handler for logging to a set of files, which switches from one file
to the next when the current file reaches a certain size, or at certain
timed intervals
"""
timed intervals"""
def __init__(self, filename, max_bytes=0, backup_count=0, encoding=None,
delay=0, when='h', interval=1, utc=False, zip_mode=zipfile.ZIP_DEFLATED):
handlers.TimedRotatingFileHandler.__init__(self, filename=filename, when=when, interval=interval, utc=utc,
......@@ -18,11 +17,10 @@ class CompressedSizedTimedRotatingFileHandler(handlers.TimedRotatingFileHandler)
self.zip_mode = zip_mode
def shouldRollover(self, record):
"""
Determine if rollover should occur.
Basically, see if the supplied record would cause the file to exceed
the size limit we have.
"""
"""Determines if rollover should occur.
Basically, sees if the supplied record would cause the file to exceed
the size limit we have."""
if self.stream is None: # delay was set...
self.stream = self._open()
if self.maxBytes > 0: # are we rolling over?
......@@ -36,6 +34,8 @@ class CompressedSizedTimedRotatingFileHandler(handlers.TimedRotatingFileHandler)
return False
def find_last_rotated_file(self):
"""Looks for the last rotated file and returns it"""
dir_name, base_name = os.path.split(self.baseFilename)
file_names = os.listdir(dir_name)
result = []
......@@ -47,6 +47,8 @@ class CompressedSizedTimedRotatingFileHandler(handlers.TimedRotatingFileHandler)
return os.path.join(dir_name, result[0])
def doRollover(self):
"""Does the roll-over by compressing the current file then deleting the uncompressed file"""
super(CompressedSizedTimedRotatingFileHandler, self).doRollover()
dfn = self.find_last_rotated_file()
......
......@@ -2,8 +2,8 @@ import logging
from paho.mqtt.client import MQTTv31
mqtt_broker = 'localhost'
logging_suffix = '_interactive'
mqtt_broker = 'mg3d-dev.umons.ac.be' # 'localhost'
logging_suffix = ''
# OhmPi configuration
OHMPI_CONFIG = {
'id': '0001', # Unique identifier of the OhmPi board (string)
......@@ -21,13 +21,9 @@ OHMPI_CONFIG = {
'board_version': '22.10'
} # TODO: add a dictionary with INA models and associated gain values
# CONTROL_CONFIG = {
# 'tcp_port': 5555,
# 'interface': 'mqtt_interface.py' # 'http_interface'
# }
# Execution logging configuration
EXEC_LOGGING_CONFIG = {
'logging_level': logging.DEBUG,
'logging_level': logging.INFO,
'logging_to_console': True,
'file_name': f'exec{logging_suffix}.log',
'max_bytes': 262144,
......
......@@ -6,9 +6,11 @@ import logging
from mqtt_logger import MQTTHandler
from compressed_sized_timed_rotating_logger import CompressedSizedTimedRotatingFileHandler
import sys
from termcolor import colored
def setup_loggers(mqtt=True):
msg = ''
# Message logging setup
log_path = path.join(path.dirname(__file__), 'logs')
if not path.isdir(log_path):
......@@ -37,13 +39,12 @@ def setup_loggers(mqtt=True):
interval=EXEC_LOGGING_CONFIG['interval'])
exec_formatter = logging.Formatter(log_format)
exec_formatter.converter = gmtime
exec_formatter.datefmt = '%Y/%m/%d %H:%M:%S UTC'
exec_formatter.datefmt = '%Y-%m-%d %H:%M:%S UTC'
exec_handler.setFormatter(exec_formatter)
exec_logger.addHandler(exec_handler)
exec_logger.setLevel(EXEC_LOGGING_CONFIG['logging_level'])
if logging_to_console:
print(f'logging exec ? {logging_to_console}') # TODO: delete this line
console_exec_handler = logging.StreamHandler(sys.stdout)
console_exec_handler.setLevel(EXEC_LOGGING_CONFIG['logging_level'])
console_exec_handler.setFormatter(exec_formatter)
......@@ -52,14 +53,16 @@ def setup_loggers(mqtt=True):
if mqtt:
mqtt_settings = MQTT_LOGGING_CONFIG.copy()
[mqtt_settings.pop(i) for i in ['client_id', 'exec_topic', 'data_topic', 'soh_topic']]
mqtt_settings.update({'topic':MQTT_LOGGING_CONFIG['exec_topic']})
mqtt_settings.update({'topic': MQTT_LOGGING_CONFIG['exec_topic']})
# TODO: handle the case of MQTT broker down or temporarily unavailable
try:
mqtt_exec_handler = MQTTHandler(**mqtt_settings)
mqtt_exec_handler.setLevel(EXEC_LOGGING_CONFIG['logging_level'])
mqtt_exec_handler.setFormatter(exec_formatter)
exec_logger.addHandler(mqtt_exec_handler)
except:
msg+=colored(f"\n\u2611 Publishes execution as {MQTT_LOGGING_CONFIG['exec_topic']} topic on the {MQTT_LOGGING_CONFIG['hostname']} broker", 'blue')
except Exception as e:
msg += colored(f'\nWarning: Unable to connect to exec topic on broker\n{e}', 'yellow')
mqtt = False
# Set data logging format and level
......@@ -88,17 +91,22 @@ def setup_loggers(mqtt=True):
mqtt_settings = MQTT_LOGGING_CONFIG.copy()
[mqtt_settings.pop(i) for i in ['client_id', 'exec_topic', 'data_topic', 'soh_topic']]
mqtt_settings.update({'topic': MQTT_LOGGING_CONFIG['data_topic']})
mqtt_data_handler = MQTTHandler(**mqtt_settings)
mqtt_data_handler.setLevel(DATA_LOGGING_CONFIG['logging_level'])
mqtt_data_handler.setFormatter(data_formatter)
data_logger.addHandler(mqtt_data_handler)
try:
mqtt_data_handler = MQTTHandler(**mqtt_settings)
mqtt_data_handler.setLevel(DATA_LOGGING_CONFIG['logging_level'])
mqtt_data_handler.setFormatter(data_formatter)
data_logger.addHandler(mqtt_data_handler)
msg += colored(f"\n\u2611 Publishes data as {MQTT_LOGGING_CONFIG['data_topic']} topic on the {MQTT_LOGGING_CONFIG['hostname']} broker", 'blue')
except Exception as e:
msg += colored(f'\nWarning: Unable to connect to data topic on broker\n{e}', 'yellow')
mqtt = False
try:
init_logging(exec_logger, data_logger, EXEC_LOGGING_CONFIG['logging_level'], log_path, data_log_filename)
except Exception as err:
print(f'ERROR: Could not initialize logging!\n{err}')
msg += colored(f'\n\u26A0 ERROR: Could not initialize logging!\n{err}', 'red')
finally:
return exec_logger, exec_log_filename, data_logger, data_log_filename, EXEC_LOGGING_CONFIG['logging_level']
return exec_logger, exec_log_filename, data_logger, data_log_filename, EXEC_LOGGING_CONFIG['logging_level'], msg
def init_logging(exec_logger, data_logger, exec_logging_level, log_path, data_log_filename):
......@@ -110,7 +118,7 @@ def init_logging(exec_logger, data_logger, exec_logging_level, log_path, data_lo
exec_logger.info('*** NEW SESSION STARTING ***')
exec_logger.info('****************************')
exec_logger.info('')
exec_logger.info('Logging level: %s' % exec_logging_level)
exec_logger.debug('Logging level: %s' % exec_logging_level)
try:
st = statvfs('.')
available_space = st.f_bavail * st.f_frsize / 1024 / 1024
......@@ -118,15 +126,14 @@ def init_logging(exec_logger, data_logger, exec_logging_level, log_path, data_lo
except Exception as e:
exec_logger.debug('Unable to get remaining disk space: {e}')
exec_logger.info('Saving data log to ' + data_log_filename)
exec_logger.info('OhmPi settings:')
# TODO Add OhmPi settings
config_dict = {'execution logging configuration': json.dumps(EXEC_LOGGING_CONFIG, indent=4),
'data logging configuration': json.dumps(DATA_LOGGING_CONFIG, indent=4),
'mqtt logging configuration': json.dumps(MQTT_LOGGING_CONFIG, indent=4),
'mqtt control configuration': json.dumps(MQTT_CONTROL_CONFIG, indent=4)}
for k, v in config_dict.items():
exec_logger.info(f'{k}:\n{v}')
exec_logger.info('')
exec_logger.info(f'init_logging_status: {init_logging_status}')
exec_logger.debug(f'{k}:\n{v}')
exec_logger.debug('')
if not init_logging_status:
exec_logger.warning(f'Logging initialisation has encountered a problem.')
data_logger.info('Starting_session')
return init_logging_status
This diff is collapsed.
......@@ -2,4 +2,3 @@ numpy
paho-mqtt
termcolor
pandas
pyzmq
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment