Explorar o código

Optimized log format

liqudong %!s(int64=2) %!d(string=hai) anos
pai
achega
9fe139a906

+ 3 - 4
readme.md → README.md

@@ -1,9 +1,7 @@
 # 多参数监测终端源码仓库
 
 ## 程序打包exe命令
-pyinstaller -F -p C:\Users\wenge\AppData\Local\Programs\Python\Python38\Lib\site-packages  gateway.spec
-pyinstaller -F -p D:\DevTools\Python38\Lib\site-packages  gateway.spec
-pyinstaller -F -p D:DevTools\Python38\Lib\site-packages gateway.py
+pyinstaller -F -p E:\WorkSpace\DataCollectionDevice\venv\Lib\site-packages gateway.py
 ## 更新日志
 1. 第一次提交。时间:2021/07/19 15:23:21
 2. 修改一些代码bug,删除不必要注释。时间:2021/08/19 13:35:50
@@ -17,4 +15,5 @@ pyinstaller -F -p D:DevTools\Python38\Lib\site-packages gateway.py
 10. 更新README文件。时间:2022/03/09 16:28:15
 11. 日志采用sanic日志系统输出。时间:2022/04/18 16:10:40
 12. 根据经海6号测试,优化水质传感器连接器、单点流速仪解析器,增加创建数据库标准sql文件。 时间:2022/07/19
-13. 根据480调试,adcp解析器增加点位:温度。时间:2022/08/10
+13. 根据480调试,adcp解析器增加点位:温度。时间:2022/08/10
+14. 增加新的国产单点流速仪的解析器。时间:2022/09/24

+ 2 - 1
apis/cancel_alarm.py

@@ -2,6 +2,7 @@ from abstract_api import AbstractApi
 from apis.operate_mysql import OperateMysql
 import traceback
 
+
 class Cancel_alarm(AbstractApi):
 
     def operation(self, request):
@@ -13,4 +14,4 @@ class Cancel_alarm(AbstractApi):
             return "告警关闭成功"
         except:
             print(traceback.format_exc())
-            return "告警关闭失败"
+            return "告警关闭失败"

+ 7 - 5
apis/maximum_wind_speed.py

@@ -2,12 +2,14 @@ from abstract_api import AbstractApi
 from apis.operate_mysql import OperateMysql
 import datetime
 
+
 class Maximum_wind_speed(AbstractApi):
     """返回最大风速(给定时段内的每隔10分钟的平均风速中的最大值)"""
+
     def operation(self, request):
         operate_mysql = OperateMysql()
-        wind_direction = request['basic_datas'][0]      # 风向,如c1
-        wind_speed = request['basic_datas'][1]          # 风速,如c2
+        wind_direction = request['basic_datas'][0]  # 风向,如c1
+        wind_speed = request['basic_datas'][1]  # 风速,如c2
 
         res_wind_speed = operate_mysql.return_result(request, wind_speed.replace('c', ''))
         begin_time = res_wind_speed['begin_time']
@@ -15,9 +17,9 @@ class Maximum_wind_speed(AbstractApi):
 
         count = (int(request['end_time']) - int(request['begin_time'])) / 600
         max_speed = 0
-        status = 0      # 状态,用于判断所查询的时间段内是否有数据
+        status = 0  # 状态,用于判断所查询的时间段内是否有数据
         while count > 0:
-            time1 = begin_time+datetime.timedelta(minutes=10)
+            time1 = begin_time + datetime.timedelta(minutes=10)
 
             sql1 = "SELECT avg(%s) FROM %s WHERE times >= \'%s\' and times < \'%s\';" % (wind_speed, res_wind_speed['table_name'], begin_time, time1)
             res1 = operate_mysql.execute_sql(sql1)
@@ -41,4 +43,4 @@ class Maximum_wind_speed(AbstractApi):
 
             return res
         else:
-            return None
+            return None

+ 3 - 1
apis/min_from_database.py

@@ -1,6 +1,7 @@
 from abstract_api import AbstractApi
 from apis.operate_mysql import OperateMysql
 
+
 class Min_from_database(AbstractApi):
 
     def operation(self, request):
@@ -8,7 +9,8 @@ class Min_from_database(AbstractApi):
         basic_data = request['basic_data']
         serial_number = basic_data.replace('c', '')
         res1 = operate_mysql.return_result(request, serial_number)
-        sql1 = "SELECT %s, CAST(times AS CHAR) as times FROM %s WHERE times > \'%s\' and times < \'%s\' ORDER BY %s limit 1;" % (basic_data, res1['table_name'], res1['begin_time'], res1['end_time'], basic_data)
+        sql1 = "SELECT %s, CAST(times AS CHAR) as times FROM %s WHERE times > \'%s\' and times < \'%s\' ORDER BY %s limit 1;" % (
+        basic_data, res1['table_name'], res1['begin_time'], res1['end_time'], basic_data)
         res2 = operate_mysql.execute_sql(sql1)
         if len(res2) != 0:
             res = {request['keys'][0]: res2[0][basic_data], request['keys'][1]: res2[0]['times']}

+ 1 - 1
connectors/shuizhi_tcp_connector.py

@@ -9,7 +9,7 @@ import struct
 import socket
 from connector import Connector
 from event_storage import EventStorage
-from logging_config import logger
+from logging_config import shuizhi_file_logger as logger
 from binascii import *
 from crcmod import *
 

+ 3 - 5
connectors/tcp_connector.py

@@ -115,7 +115,7 @@ class TcpConnector(Connector, threading.Thread):
             recv_data = self.__sock.recv(self.__size)
             return recv_data
         except Exception as e:
-            logger.error(f"{e}")
+            logger.error(f"{self.name}: {e}")
 
     def command_polling(self, command_list=None):
         if command_list:
@@ -124,7 +124,6 @@ class TcpConnector(Connector, threading.Thread):
                     command_item = command_list[i]
                     recv_data = self.exec_command(command=command_item)
                     format_data = self.__converter.convert(self.__data_point_config, recv_data)
-                    logger.info(f'{self.name}:{format_data}')
                     if format_data and format_data != "error" and format_data != 'pass':
                         self.__storager.real_time_data_storage(format_data)
             except Exception as e:
@@ -135,12 +134,11 @@ class TcpConnector(Connector, threading.Thread):
             try:
                 recv_data = self.__sock.recv(self.__size)
                 format_data = self.__converter.convert(self.__data_point_config, recv_data)
-                # logger.info(f'{self.name}: {format_data}')
                 if format_data and format_data != "error" and format_data != 'pass':
                     self.__storager.real_time_data_storage(format_data)
             except socket.timeout as e:
-                logger.error(f"{e}")
+                logger.error(f"{self.name}: {e}")
             except Exception as e:
-                logger.error(f"{e}")
+                logger.error(f"{self.name}: {e}")
                 time.sleep(5)
                 self.__reconnect()

+ 1 - 1
converters/adcp_converter.py

@@ -2,7 +2,7 @@
 @Date  :2021/5/21/00219:10:57
 @Desc  :
 """
-from logging_config import logger
+from logging_config import adcp_file_logger as logger
 from converter import Converter
 
 

+ 1 - 1
converters/dandian_converter.py

@@ -6,7 +6,7 @@
 """
 import json
 import re
-from logging_config import logger
+from logging_config import dandian_file_logger as logger
 from converter import Converter
 
 

+ 1 - 1
converters/nmea0183_converter.py

@@ -2,7 +2,7 @@
 @Date  :2021/5/21/00219:10:57
 @Desc  :
 """
-from logging_config import logger
+from logging_config import sm140_file_logger as logger
 import binascii
 
 from converter import Converter

+ 1 - 1
converters/td266_converter.py

@@ -1,6 +1,6 @@
 import json
 import re
-from logging_config import logger
+from logging_config import td266_file_logger as logger
 
 from converter import Converter
 

+ 1 - 1
converters/wxt536_converter.py

@@ -1,4 +1,4 @@
-from logging_config import logger
+from logging_config import wxt536_file_logger as logger
 import re
 from converter import Converter
 

+ 16 - 3
gateway.py

@@ -2,7 +2,7 @@ import asyncio
 import datetime
 import sys
 import time
-
+import os
 import wmi
 from sanic import Sanic
 from sanic_cors import CORS, cross_origin
@@ -18,8 +18,21 @@ from hard_disk_storage import HardDiskStorage
 from api_context import ApiContext
 from AES_crypt import decrypt, encrypt
 from logging_config import LOGGING_CONFIG
-
-app = Sanic(__name__, log_config=LOGGING_CONFIG)
+import logging.config
+
+# logging config
+logging.config.dictConfig(LOGGING_CONFIG)
+handlers = LOGGING_CONFIG['handlers']
+for handler in handlers:
+    item = handlers[handler]
+    if 'filename' in item:
+        filename = item['filename']
+        dirname = os.path.dirname(filename)
+        if not os.path.exists(dirname):
+            os.makedirs(dirname)
+# --------------------------
+
+app = Sanic(__name__)
 CORS(app)
 
 

+ 100 - 47
logging_config.py

@@ -11,81 +11,134 @@ LOGGING_CONFIG = dict(
     version=1,
     disable_existing_loggers=False,
     loggers={
-        "sanic.root": {
-            "level": "INFO",  # 默认DEBUG
-            "handlers": ["console"]
+        # 新曾自定义日志,用于数据采集程序
+        "console": {
+            "level": "DEBUG",
+            "handlers": ["console", "connector_file"],
+            "propagate": True,
+            "qualname": "console.debug",
         },
-        "sanic.error": {
-            "level": "INFO",
-            "handlers": ["error_console"],
+        "sm140_file": {
+            "level": "DEBUG",
+            "handlers": ["console", "sm140_file"],
             "propagate": True,
-            "qualname": "sanic.error",
+            "qualname": "sm140.debug",
         },
-        "sanic.access": {
-            "level": "INFO",  # 默认DEBUG
-            "handlers": ["access_console"],
+        "wxt536_file": {
+            "level": "DEBUG",
+            "handlers": ["console", "wxt536_file"],
             "propagate": True,
-            "qualname": "sanic.access",
+            "qualname": "wxt536.debug",
         },
-        # 新曾自定义日志,用于数据采集程序
-        "sanic.my": {
-            "level": "INFO",
-            "handlers": ["my_console", "file_console"],
+        "adcp_file": {
+            "level": "DEBUG",
+            "handlers": ["console", "adcp_file"],
             "propagate": True,
-            "qualname": "my.debug",
+            "qualname": "adcp.debug",
+        },
+        "dandian_file": {
+            "level": "DEBUG",
+            "handlers": ["console", "dandian_file"],
+            "propagate": True,
+            "qualname": "dandian.debug",
+        },
+        "td266_file": {
+            "level": "DEBUG",
+            "handlers": ["console", "td266_file"],
+            "propagate": True,
+            "qualname": "td266.debug",
+        },
+        "shuizhi_file": {
+            "level": "DEBUG",
+            "handlers": ["console", "shuizhi_file"],
+            "propagate": True,
+            "qualname": "shuizhi.debug",
         },
     },
     handlers={
+        # 数据采集程序控制台输出handler
         "console": {
             "class": "logging.StreamHandler",
             "formatter": "generic",
             "stream": sys.stdout,
         },
-        "error_console": {
-            "class": "logging.StreamHandler",
+        "connector_file": {
+            'class': 'logging.handlers.RotatingFileHandler',
+            'filename': 'log/connector_log/connector_file.log',
+            'maxBytes': 10 * 1024 * 1024,
+            'delay': True,
             "formatter": "generic",
-            "stream": sys.stderr,
+            "backupCount": 20,
+            "encoding": "utf-8"
         },
-        "access_console": {
-            "class": "logging.StreamHandler",
-            "formatter": "access",
-            "stream": sys.stdout,
+        "sm140_file": {
+            'class': 'logging.handlers.RotatingFileHandler',
+            'filename': 'log/sm140_log/sm140_file.log',
+            'maxBytes': 10 * 1024 * 1024,
+            'delay': True,
+            "formatter": "generic",
+            "backupCount": 20,
+            "encoding": "utf-8"
         },
-        # 数据采集程序控制台输出handler
-        "my_console": {
-            "class": "logging.StreamHandler",
-            "formatter": "my",
-            "stream": sys.stdout,
+        "wxt536_file": {
+            'class': 'logging.handlers.RotatingFileHandler',
+            'filename': 'log/wxt536_log/wxt536_log.log',
+            'maxBytes': 10 * 1024 * 1024,
+            'delay': True,
+            "formatter": "generic",
+            "backupCount": 20,
+            "encoding": "utf-8"
         },
-        # 数据采集程序文件输出handler
-        "file_console": {
+        "adcp_file": {
             'class': 'logging.handlers.RotatingFileHandler',
-            'filename': 'app.log',
-            'maxBytes': 200 * 1024,
+            'filename': 'log/adcp_log/adcp_log.log',
+            'maxBytes': 10 * 1024 * 1024,
             'delay': True,
-            "formatter": "my",
-            "backupCount": 10,
+            "formatter": "generic",
+            "backupCount": 20,
+            "encoding": "utf-8"
+        },
+        "dandian_file": {
+            'class': 'logging.handlers.RotatingFileHandler',
+            'filename': 'log/dandian_log/dandian_log.log',
+            'maxBytes': 10 * 1024 * 1024,
+            'delay': True,
+            "formatter": "generic",
+            "backupCount": 20,
+            "encoding": "utf-8"
+        },
+        "td266_file": {
+            'class': 'logging.handlers.RotatingFileHandler',
+            'filename': 'log/td266_log/td266_log.log',
+            'maxBytes': 10 * 1024 * 1024,
+            'delay': True,
+            "formatter": "generic",
+            "backupCount": 20,
+            "encoding": "utf-8"
+        },
+        "shuizhi_file": {
+            'class': 'logging.handlers.RotatingFileHandler',
+            'filename': 'log/shuizhi_log/shuizhi_log.log',
+            'maxBytes': 10 * 1024 * 1024,
+            'delay': True,
+            "formatter": "generic",
+            "backupCount": 20,
             "encoding": "utf-8"
         },
     },
     formatters={
-        "generic": {
-            "format": "%(asctime)s [%(process)d] [%(levelname)s] %(message)s",
-            "datefmt": "[%Y-%m-%d %H:%M:%S %z]",
-            "class": "logging.Formatter",
-        },
         # 自定义文件格式化器
-        "my": {
+        "generic": {
             "format": "%(asctime)s [%(filename)s:%(lineno)d] [%(levelname)s] %(message)s",
             "datefmt": "[%Y-%m-%d %H:%M:%S]",
             "class": "logging.Formatter",
         },
-        "access": {
-            "format": "%(asctime)s - (%(name)s) [%(levelname)s][%(host)s]: "
-                      + "%(request)s %(message)s %(status)d %(byte)d",
-            "datefmt": "[%Y-%m-%d %H:%M:%S %z]",
-            "class": "logging.Formatter",
-        },
     },
 )
-logger = logging.getLogger("sanic.my")
+logger = logging.getLogger("console")
+sm140_file_logger = logging.getLogger("sm140_file")
+wxt536_file_logger = logging.getLogger("wxt536_file")
+adcp_file_logger = logging.getLogger("adcp_file")
+dandian_file_logger = logging.getLogger("dandian_file")
+td266_file_logger = logging.getLogger("td266_file")
+shuizhi_file_logger = logging.getLogger("shuizhi_file")