Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
39 commits
Select commit Hold shift + click to select a range
db432ad
feat: update log context
Oct 22, 2025
9502acc
feat: update log context
Oct 22, 2025
d74e628
feat: update mcp
Oct 23, 2025
32b2ac1
feat: update mcp
Oct 23, 2025
e4c6b92
feat: add error log
Oct 23, 2025
c27bd61
feat: add error log
Oct 23, 2025
6769b4c
feat: add error log
Oct 23, 2025
01547e1
feat: update log
Oct 24, 2025
a19584f
feat: add chat_time
Oct 24, 2025
8dfa338
feat: add chat_time
Oct 24, 2025
a91e3e2
feat: add chat_time
Oct 24, 2025
5b962e2
feat: update log
Oct 24, 2025
69a6e9a
feat: update log
Oct 24, 2025
d325a31
feat: update log
Oct 24, 2025
f0e5f5c
feat: update log
Oct 24, 2025
7fc8c05
feat: update log
Oct 24, 2025
185ed93
feat: add arms
Oct 26, 2025
f641b70
feat: add arms
Oct 26, 2025
d5c59a0
fix: format
Oct 26, 2025
b144470
fix: format
Oct 26, 2025
33921b7
feat: add dockerfile
Oct 26, 2025
49a9079
feat: add dockerfile
Oct 26, 2025
27c49b6
feat: add arms config
Oct 26, 2025
60c5dd8
feat: update log
Oct 26, 2025
3096321
feat: add sleep time
Oct 26, 2025
204efef
feat: add sleep time
Oct 26, 2025
e2c9cbf
fix: conflict
Oct 28, 2025
33a41e8
feat: update log
Oct 28, 2025
cf23174
feat: delete dockerfile
Oct 28, 2025
18e2eda
feat: delete dockerfile
Oct 28, 2025
f9a18a5
feat: update dockerfile
Oct 28, 2025
399e200
fix: conflict
Oct 28, 2025
1d4f3d1
fix: conflict
Oct 28, 2025
92be50b
feat: replace ThreadPool to context
Oct 28, 2025
8a1fd64
feat: add timed log
Oct 28, 2025
7d7f731
fix: conflict
Oct 28, 2025
9fea59b
feat: add request log
Nov 3, 2025
4b72a63
feat: add request log
Nov 3, 2025
c3b9e83
fix: merge dev conflict
Nov 3, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
151 changes: 145 additions & 6 deletions src/memos/api/middleware/request_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@
Request context middleware for automatic trace_id injection.
"""

import json
import os
import time

from collections.abc import Callable
Expand All @@ -17,6 +19,9 @@

logger = memos.log.get_logger(__name__)

# Maximum body size to read for logging (in bytes) - bodies larger than this will be skipped
MAX_BODY_LOG_SIZE = os.getenv("MAX_BODY_LOG_SIZE", 10 * 1024)


def extract_trace_id_from_headers(request: Request) -> str | None:
"""Extract trace_id from various possible headers with priority: g-trace-id > x-trace-id > trace-id."""
Expand All @@ -26,6 +31,127 @@ def extract_trace_id_from_headers(request: Request) -> str | None:
return None


def _is_json_request(request: Request) -> tuple[bool, str]:
"""
Check if request is a JSON request.

Args:
request: The request object

Returns:
Tuple of (is_json, content_type)
"""
if request.method not in ("POST", "PUT", "PATCH", "DELETE"):
return False, ""

content_type = request.headers.get("content-type", "")
if not content_type:
return False, ""

is_json = "application/json" in content_type.lower()
return is_json, content_type


def _should_read_body(content_length: str | None) -> tuple[bool, int | None]:
"""
Check if body should be read based on content-length header.

Args:
content_length: Content-Length header value

Returns:
Tuple of (should_read, body_size). body_size is None if header is invalid.
"""
if not content_length:
return True, None

try:
body_size = int(content_length)
return body_size <= MAX_BODY_LOG_SIZE, body_size
except ValueError:
return True, None


def _create_body_info(content_type: str, body_size: int) -> dict:
"""Create body_info dict for large bodies that are skipped."""
return {
"content_type": content_type,
"content_length": body_size,
"note": f"body too large ({body_size} bytes), skipping read",
}


def _parse_json_body(body_bytes: bytes) -> dict | str:
"""
Parse JSON body bytes.

Args:
body_bytes: Raw body bytes

Returns:
Parsed JSON dict, or error message string if parsing fails
"""
try:
return json.loads(body_bytes)
except (json.JSONDecodeError, UnicodeDecodeError) as e:
return f"<unable to parse JSON: {e!s}>"


async def get_request_params(request: Request) -> tuple[dict, bytes | None]:
"""
Extract request parameters (query params and body) for logging.

Only reads body for application/json requests that are within size limits.

This function is wrapped with exception handling to ensure logging failures
don't affect the actual request processing.

Args:
request: The incoming request object

Returns:
Tuple of (params_dict, body_bytes). body_bytes is None if body was not read.
Returns empty dict and None on any error.
"""
try:
params_log = {}

# Check if this is a JSON request
is_json, content_type = _is_json_request(request)
if not is_json:
return params_log, None

# Pre-check body size using content-length header
content_length = request.headers.get("content-length")
should_read, body_size = _should_read_body(content_length)

if not should_read and body_size is not None:
params_log["body_info"] = _create_body_info(content_type, body_size)
return params_log, None

# Read body
body_bytes = await request.body()

if not body_bytes:
return params_log, None

# Post-check: verify actual size (content-length might be missing or wrong)
actual_size = len(body_bytes)
if actual_size > MAX_BODY_LOG_SIZE:
params_log["body_info"] = _create_body_info(content_type, actual_size)
return params_log, None

# Parse JSON body
params_log["body"] = _parse_json_body(body_bytes)
return params_log, body_bytes

except Exception as e:
# Catch-all for any unexpected errors
logger.error(f"Unexpected error in get_request_params: {e}", exc_info=True)
# Return empty dict to ensure request can continue
return {}, None


class RequestContextMiddleware(BaseHTTPMiddleware):
"""
Middleware to automatically inject request context for every HTTP request.
Expand Down Expand Up @@ -55,14 +181,27 @@ async def dispatch(self, request: Request, call_next: Callable) -> Response:
)
set_request_context(context)

# Log request start with parameters
params_log = {}
# Get request parameters for logging
# Wrap in try-catch to ensure logging failures don't break the request
params_log, body_bytes = await get_request_params(request)

# Re-create the request receive function if body was read
# This ensures downstream handlers can still read the body
if body_bytes is not None:
try:

# Get query parameters
if request.query_params:
params_log["query_params"] = dict(request.query_params)
async def receive():
return {"type": "http.request", "body": body_bytes, "more_body": False}

logger.info(f"Request started, params: {params_log}, headers: {request.headers}")
request._receive = receive
except Exception as e:
logger.error(f"Failed to recreate request receive function: {e}")
# Continue without restoring body, downstream handlers will handle it

logger.info(
f"Request started, method: {request.method}, path: {request.url.path}, "
f"request params: {params_log}, headers: {request.headers}"
)

# Process the request
try:
Expand Down
5 changes: 5 additions & 0 deletions src/memos/api/routers/server_router.py
Original file line number Diff line number Diff line change
Expand Up @@ -412,6 +412,8 @@ def _search_pref():
search_req.include_preference,
)

logger.info(f"Search memories result: {memories_result}")

return SearchResponse(
message="Search completed successfully",
data=memories_result,
Expand Down Expand Up @@ -618,6 +620,9 @@ def _process_pref_mem() -> list[dict[str, str]]:
text_response_data = text_future.result()
pref_response_data = pref_future.result()

logger.info(f"add_memories Text response data: {text_response_data}")
logger.info(f"add_memories Pref response data: {pref_response_data}")

return MemoryResponse(
message="Memory added successfully",
data=text_response_data + pref_response_data,
Expand Down
Loading