Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 20 additions & 3 deletions fastdeploy/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
# This must be done before importing any modules that may use the logger
import logging
import os
import sys
from contextlib import contextmanager

# Create standard format (without color)
Expand All @@ -29,6 +30,15 @@
_original_getLogger = logging.getLogger


class _MaxLevelFilter(logging.Filter):
def __init__(self, level):
super().__init__()
self.level = level

def filter(self, record):
return record.levelno < self.level


@contextmanager
def _intercept_paddle_loggers():
"""Intercept and configure paddle loggers during import."""
Expand Down Expand Up @@ -57,9 +67,16 @@ def _configure_logger(name=None):
logger.setLevel(logging.DEBUG if envs.FD_DEBUG else logging.INFO)
for handler in logger.handlers[:]:
logger.removeHandler(handler)
handler = logging.StreamHandler()
handler.setFormatter(_root_formatter)
logger.addHandler(handler)
stdout_handler = logging.StreamHandler(sys.stdout)
stdout_handler.setLevel(logging.DEBUG)
stdout_handler.addFilter(_MaxLevelFilter(logging.ERROR))
stdout_handler.setFormatter(_root_formatter)
logger.addHandler(stdout_handler)

stderr_handler = logging.StreamHandler(sys.stderr)
stderr_handler.setLevel(logging.ERROR)
stderr_handler.setFormatter(_root_formatter)
logger.addHandler(stderr_handler)
logger.propagate = False
return logger

Expand Down
10 changes: 4 additions & 6 deletions fastdeploy/engine/sched/scheduler_metrics_logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,13 @@
"""

import logging
import sys
import threading
import time
from typing import Iterable

from fastdeploy import envs
from fastdeploy.logger.logger import _LOG_FORMAT


class SchedulerMetricsLogger:
Expand Down Expand Up @@ -47,12 +49,8 @@ def _get_logger(self) -> logging.Logger:
if not getattr(logger, "_fd_scheduler_metrics_configured", False):
logger.setLevel(logging.INFO)
logger.propagate = False
handler = logging.StreamHandler()
formatter = logging.Formatter(
"[%(asctime)s] [%(process)d] [%(levelname)s] %(message)s",
"%Y-%m-%d %H:%M:%S",
)
handler.setFormatter(formatter)
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(logging.Formatter(_LOG_FORMAT))
logger.addHandler(handler)
logger._fd_scheduler_metrics_configured = True
return logger
Expand Down
3 changes: 2 additions & 1 deletion fastdeploy/entrypoints/openai/api_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
import json
import os
import signal
import sys
import threading
import time
import traceback
Expand Down Expand Up @@ -194,7 +195,7 @@ async def lifespan(app: FastAPI):
"%(levelname)-8s %(asctime)s %(process)-5s %(filename)s[line:%(lineno)d] %(message)s"
)

handler = logging.StreamHandler()
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(formatter)
uvicorn_access.addHandler(handler)
uvicorn_access.propagate = False
Expand Down
13 changes: 10 additions & 3 deletions fastdeploy/entrypoints/openai/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,28 +52,35 @@
}
},
"handlers": {
# INFO/DEBUG logs go to stdout
"default": {
"class": "colorlog.StreamHandler",
"stream": "ext://sys.stdout",
"formatter": "custom",
},
# ERROR+ logs go to stderr
"error": {
"class": "colorlog.StreamHandler",
"stream": "ext://sys.stderr",
"level": "ERROR",
"formatter": "custom",
},
},
"loggers": {
"uvicorn": {
"level": "INFO",
"handlers": ["default"],
"handlers": ["default", "error"],
"propagate": False,
},
"uvicorn.error": {
"level": "INFO",
"handlers": ["default"],
"handlers": ["default", "error"],
"propagate": False,
Comment on lines +55 to 78
Copy link

Copilot AI Apr 27, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

当前 dictConfig 下同时把 "default"(stdout) 和 "error"(stderr) 两个 handler 绑定到 uvicorn / uvicorn.error,但 "default" handler 没有过滤 ERROR 级别,导致 ERROR+ 日志会同时输出到 stdout 和 stderr(重复一份),与注释“ERROR+ logs go to stderr”不一致。建议给 stdout handler 增加“仅低于 ERROR”的 filter(或等价的过滤方案),确保 ERROR+ 只走 stderr。

Copilot uses AI. Check for mistakes.
},
"uvicorn.access": {
"level": "INFO",
"handlers": ["default"],
"propagate": False,
"formatter": "custom",
},
},
}
Expand Down
50 changes: 31 additions & 19 deletions fastdeploy/logger/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,30 @@
_LOG_FORMAT = "%(levelname)-8s %(asctime)s %(process)-5s %(filename)s[line:%(lineno)d] %(message)s"


class _MaxLevelFilter(logging.Filter):
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🟡 建议 _MaxLevelFilter 在本文件(logger.py)、fastdeploy/__init__.pyfastdeploy/logger/setup_logging.py(名为 MaxLevelFilter)中各自定义了一遍,实现完全相同,存在三处重复。

建议将该类统一放在 fastdeploy/logger/setup_logging.py 或单独的 fastdeploy/logger/filters.py 中,其他地方 import 复用,避免后续维护时三处逻辑不一致。

def __init__(self, level):
super().__init__()
self.level = level

def filter(self, record):
return record.levelno < self.level


def _add_console_handlers(logger, formatter=None):
stdout_handler = logging.StreamHandler(sys.stdout)
stdout_handler.setLevel(logging.DEBUG)
stdout_handler.addFilter(_MaxLevelFilter(logging.ERROR))
if formatter is not None:
stdout_handler.setFormatter(formatter)
logger.addHandler(stdout_handler)

stderr_handler = logging.StreamHandler(sys.stderr)
stderr_handler.setLevel(logging.ERROR)
if formatter is not None:
stderr_handler.setFormatter(formatter)
logger.addHandler(stderr_handler)


class FastDeployLogger:
_instance = None
_initialized = False
Expand Down Expand Up @@ -238,16 +262,11 @@ def get_trace_logger(self, name, file_name, without_formater=False, print_to_con
logger.addHandler(handler)
logger.addHandler(error_handler)

# Console handler
# Console handlers: route INFO/DEBUG to stdout and ERROR/CRITICAL to stderr
if print_to_console:
console_handler = logging.StreamHandler()
if not without_formater:
console_handler.setFormatter(formatter)
logger.addHandler(console_handler)
console_handler.propagate = False
_add_console_handlers(logger, None if without_formater else formatter)

# Set propagate (maintain original logic)
# logger.propagate = False
logger.propagate = False

return logger

Expand Down Expand Up @@ -303,16 +322,11 @@ def _get_legacy_logger(self, name, file_name, without_formater=False, print_to_c
logger.addHandler(handler)
logger.addHandler(error_handler)

# Console handler
# Console handlers: route INFO/DEBUG to stdout and ERROR/CRITICAL to stderr
if print_to_console:
console_handler = logging.StreamHandler()
if not without_formater:
console_handler.setFormatter(formatter)
logger.addHandler(console_handler)
console_handler.propagate = False
_add_console_handlers(logger, None if without_formater else formatter)

# Set propagate (maintain original logic)
# logger.propagate = False
logger.propagate = False

return logger

Expand All @@ -331,9 +345,7 @@ def _patched(name=None):
logger.setLevel(logging.DEBUG if envs.FD_DEBUG else logging.INFO)
for handler in logger.handlers[:]:
logger.removeHandler(handler)
stream_handler = logging.StreamHandler()
stream_handler.setFormatter(formatter)
logger.addHandler(stream_handler)
_add_console_handlers(logger, formatter)
logger.propagate = False
return logger

Expand Down
9 changes: 9 additions & 0 deletions fastdeploy/logger/setup_logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,6 +181,15 @@ def setup_logging(log_dir=None, config_file=None):
# Ensure log directory exists
Path(log_dir).mkdir(parents=True, exist_ok=True)

# Isolate the 'legacy' logger namespace from the root logger so that
# legacy.* loggers never propagate to root even before get_trace_logger
# has been called. This is scoped to our own namespace and does NOT
# affect third-party libraries that rely on root-level lastResort output.
legacy_logger = logging.getLogger("legacy")
if not legacy_logger.handlers:
legacy_logger.addHandler(logging.NullHandler())
legacy_logger.propagate = False

# Store log_dir for later use
setup_logging._log_dir = log_dir

Expand Down
5 changes: 4 additions & 1 deletion tests/logger/test_logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,10 @@ def test_legacy_logger_creation(self):
def test_logger_propagate(self):
"""Test log propagation settings"""
legacy_logger = self.logger._get_legacy_logger("test", "test.log")
self.assertTrue(legacy_logger.propagate)
self.assertFalse(legacy_logger.propagate)
Comment thread
gongweibao marked this conversation as resolved.
# Also verify get_trace_logger
trace_logger = self.logger.get_trace_logger("test_trace", "test_trace.log")
self.assertFalse(trace_logger.propagate)

def test_get_trace_logger_basic(self):
"""Test basic functionality of get_trace_logger"""
Expand Down
Loading