修改logging
This commit is contained in:
parent
3715268329
commit
df97c9815d
61
main.py
61
main.py
@ -1,16 +1,13 @@
|
||||
# uvicorn main:app --host=127.0.0.1 --port=8000 --reload
|
||||
from config.config import settings
|
||||
from fastapi import Depends, FastAPI, BackgroundTasks, Request
|
||||
from dependencies import get_query_token, get_token_header
|
||||
# from routers import items, users
|
||||
# from internal import admin
|
||||
from dependencies import get_token_header
|
||||
from ops.common import common
|
||||
from ops.deploy import deploy
|
||||
from typing import Optional
|
||||
from scripts.common.redis import get_redis_pool
|
||||
import pdb
|
||||
from fastapi.logger import logger as fastapi_logger
|
||||
import logging
|
||||
from scripts.logger import logger
|
||||
|
||||
tags_metadata = [
|
||||
# {
|
||||
@ -28,60 +25,11 @@ tags_metadata = [
|
||||
]
|
||||
|
||||
|
||||
app_log = None
|
||||
root = None
|
||||
|
||||
LOGGING_CONFIG = {
|
||||
"version": 1,
|
||||
"disable_existing_loggers": False,
|
||||
"formatters": {
|
||||
"default": {
|
||||
"()": "uvicorn.logging.DefaultFormatter",
|
||||
"fmt": "%(asctime)s %(levelname)s %(name)s %(processName)s.%(threadName)s[%(process)d.%(thread)d]: %(message)s",
|
||||
"use_colors": None,
|
||||
},
|
||||
"access": {
|
||||
"()": "uvicorn.logging.AccessFormatter",
|
||||
"fmt": '%(asctime)s %(levelname)s %(name)s %(processName)s.%(threadName)s[%(process)d.%(thread)d]: %(client_addr)s - "%(request_line)s" %(status_code)s',
|
||||
},
|
||||
},
|
||||
"handlers": {
|
||||
"default": {
|
||||
"formatter": "default",
|
||||
"class": "logging.StreamHandler",
|
||||
"stream": "ext://sys.stderr",
|
||||
},
|
||||
"access": {
|
||||
"formatter": "access",
|
||||
"class": "logging.StreamHandler",
|
||||
"stream": "ext://sys.stdout",
|
||||
},
|
||||
},
|
||||
"loggers": {
|
||||
"": {"handlers": ["default"], "level": "INFO"},
|
||||
"uvicorn.error": {"level": "INFO"},
|
||||
"uvicorn.access": {"handlers": ["access"], "level": "INFO", "propagate": False},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def init_log():
|
||||
if settings.is_debug:
|
||||
LOGGING_CONFIG["loggers"] = {
|
||||
"": {"handlers": ["default"], "level": "DEBUG"},
|
||||
"uvicorn.error": {"level": "DEBUG"},
|
||||
"uvicorn.access": {"handlers": ["access"], "level": "DEBUG", "propagate": False},
|
||||
}
|
||||
return LOGGING_CONFIG
|
||||
|
||||
|
||||
def create_app():
|
||||
application = FastAPI(dependencies=[Depends(get_token_header)],
|
||||
openapi_tags=tags_metadata)
|
||||
application.include_router(common.router, prefix="/common")
|
||||
application.include_router(deploy.router, prefix="/deploy")
|
||||
init_log()
|
||||
|
||||
return application
|
||||
|
||||
|
||||
@ -103,7 +51,7 @@ async def shutdown_event():
|
||||
async def root(request: Request):
|
||||
redis_client = request.app.state.redis
|
||||
keys = await redis_client.get("online_devices")
|
||||
app_log.info("get keys was {0} with {1}".format(keys, request.url))
|
||||
logger.info("get keys was {0} with {1}".format(keys, request.url))
|
||||
if keys:
|
||||
return {"message": "Hello Bigger Applications! {}".format(keys)}
|
||||
else:
|
||||
@ -128,6 +76,7 @@ async def send_notification(request: Request, email: str, background_tasks: Back
|
||||
# pdb.set_trace()
|
||||
redis_client = request.app.state.redis
|
||||
keys = await redis_client.get("online_devices")
|
||||
logger.info("get keys = {}".format(keys))
|
||||
# print(keys)
|
||||
message = f"message to {email} \n"
|
||||
background_tasks.add_task(write_log, message)
|
||||
@ -137,4 +86,4 @@ async def send_notification(request: Request, email: str, background_tasks: Back
|
||||
if __name__ == '__main__':
|
||||
import uvicorn
|
||||
uvicorn.run(app='main:app', host="127.0.0.1",
|
||||
port=8010, reload=True, debug=True, log_config=LOGGING_CONFIG)
|
||||
port=8010, reload=True, debug=True)
|
||||
|
@ -4,11 +4,12 @@ from starlette.requests import Request
|
||||
from starlette.responses import JSONResponse
|
||||
from config.config import settings
|
||||
from scripts.common.deploy import ProjectInfo, deploy_service
|
||||
|
||||
from uuid import uuid1
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.post("/server")
|
||||
async def simple_send(project: ProjectInfo, tag: str, background_tasks: BackgroundTasks) -> JSONResponse:
|
||||
background_tasks.add_task(deploy_service, project, tag)
|
||||
return JSONResponse(status_code=200, content={"message": "{0} {1} deploy success!".format(project.name, tag)})
|
||||
uuid = uuid1
|
||||
background_tasks.add_task(deploy_service, project, tag, uuid)
|
||||
return JSONResponse(status_code=200, content={"message": "{0} {1} deploy success,uuid={2}!".format(project.name, tag, uuid)})
|
||||
|
@ -2,8 +2,10 @@ import re
|
||||
from pydantic import BaseModel
|
||||
from scripts.common.cvs import GitRepository
|
||||
import os
|
||||
from fastapi import HTTPException
|
||||
from fastapi import HTTPException, Request
|
||||
from . import run_cmd
|
||||
from .ansible import AnsibleAPI, write_host
|
||||
from scripts.logger import logger
|
||||
|
||||
|
||||
class ProjectInfo(BaseModel):
|
||||
@ -15,7 +17,9 @@ class ProjectInfo(BaseModel):
|
||||
name: str
|
||||
|
||||
|
||||
async def deploy_service(project: ProjectInfo, tag: str):
|
||||
async def deploy_service(request: Request, project: ProjectInfo, tag: str, uuid: str):
|
||||
client = request.app.state.redis
|
||||
dd = await client.set('x_token')
|
||||
# git clone
|
||||
local_path = project.base_dir+'/'+tag
|
||||
#print(local_path, project.git_url, tag)
|
||||
@ -25,5 +29,11 @@ async def deploy_service(project: ProjectInfo, tag: str):
|
||||
# run pre scripts
|
||||
run_cmd(project.pre_script)
|
||||
# find tag files
|
||||
pass
|
||||
hosts = write_host(project.host)
|
||||
an = AnsibleAPI(hosts)
|
||||
run_data = {
|
||||
"desc": "a"
|
||||
}
|
||||
run_out = an.run_playbook('test,yml', run_data)
|
||||
logger.info(run_out)
|
||||
# run ansible-play deloy tag files && run start script in remote
|
||||
|
52
scripts/log.py
Normal file
52
scripts/log.py
Normal file
@ -0,0 +1,52 @@
|
||||
# pip install loguru
|
||||
import logging
|
||||
from types import FrameType
|
||||
from typing import cast
|
||||
import os
|
||||
import sys
|
||||
from loguru import logger
|
||||
|
||||
BASE_DIR = os.path.abspath('.')
|
||||
|
||||
|
||||
class InterceptHandler(logging.Handler):
|
||||
def emit(self, record: logging.LogRecord) -> None: # pragma: no cover
|
||||
# Get corresponding Loguru level if it exists
|
||||
try:
|
||||
level = logger.level(record.levelname).name
|
||||
except ValueError:
|
||||
level = str(record.levelno)
|
||||
|
||||
# Find caller from where originated the logged message
|
||||
frame, depth = logging.currentframe(), 2
|
||||
while frame.f_code.co_filename == logging.__file__: # noqa: WPS609
|
||||
frame = cast(FrameType, frame.f_back)
|
||||
depth += 1
|
||||
|
||||
logger.opt(depth=depth, exception=record.exc_info).log(
|
||||
level, record.getMessage(),
|
||||
)
|
||||
|
||||
|
||||
LOGGING_LEVEL = logging.DEBUG if logging.DEBUG else logging.INFO
|
||||
LOGGERS = ("uvicorn.asgi", "uvicorn.access")
|
||||
|
||||
logging.getLogger().handlers = [InterceptHandler()]
|
||||
for logger_name in LOGGERS:
|
||||
logging_logger = logging.getLogger(logger_name)
|
||||
logging_logger.handlers = [InterceptHandler(level=LOGGING_LEVEL)]
|
||||
|
||||
log_file_path = os.path.join(BASE_DIR, 'logs/wise.log')
|
||||
err_log_file_path = os.path.join(BASE_DIR, 'logs/wise.err.log')
|
||||
|
||||
loguru_config = {
|
||||
"handlers": [
|
||||
{"sink": sys.stderr, "level": "INFO",
|
||||
"format": "<green>{time:YYYY-mm-dd HH:mm:ss.SSS}</green> | {thread.name} | <level>{level}</level> | "
|
||||
"<cyan>{module}</cyan>:<cyan>{function}</cyan>:<cyan>{line}</cyan> - <level>{message}</level>"},
|
||||
{"sink": log_file_path, "rotation": "500 MB", "encoding": 'utf-8'},
|
||||
{"sink": err_log_file_path, "serialize": True, "level": 'ERROR', "rotation": "500 MB",
|
||||
"encoding": 'utf-8'},
|
||||
],
|
||||
}
|
||||
logger.configure(**loguru_config)
|
20
scripts/logger.py
Normal file
20
scripts/logger.py
Normal file
@ -0,0 +1,20 @@
|
||||
import os
|
||||
import time
|
||||
from loguru import logger
|
||||
|
||||
basedir = os.path.dirname(os.path.dirname(
|
||||
os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
# print(f"log basedir{basedir}") # /xxx/python_code/FastAdmin/backend/app
|
||||
# 定位到log日志文件
|
||||
log_path = os.path.join(basedir, 'logs')
|
||||
|
||||
if not os.path.exists(log_path):
|
||||
os.mkdir(log_path)
|
||||
|
||||
log_path_error = os.path.join(
|
||||
log_path, f'{time.strftime("%Y-%m-%d")}_error.log')
|
||||
|
||||
# 日志简单配置
|
||||
# 具体其他配置 可自行参考 https://github.com/Delgan/loguru
|
||||
logger.add(log_path_error, rotation="12:00", retention="5 days", enqueue=True)
|
Loading…
x
Reference in New Issue
Block a user