Skip to content

Commit 34e6a2c

Browse files
authored
Fix #92: Correct regression on "Released lock at" (#93)
* PidFileLock.release. The method in `src/docbuild/utils/pidlock.py` can be called during interpreter shutdown via atexit. At that point, the logging system may already be partially shut down, causing the `log.debug` call to fail with a `ValueError: I/O operation on closed file`. * test_stale_lock_is_cleaned_up In `tests/utils/test_pidlock.py` acquires a lock but never explicitly releases it. It relies on the atexit handler for cleanup, which triggers the aforementioned logging error during test execution. * Use fork() and stabilize CLI/CI
1 parent 0fdc6bd commit 34e6a2c

11 files changed

Lines changed: 544 additions & 487 deletions

File tree

.coveragerc

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
[run]
22
concurrency = multiprocessing
33
parallel = true
4+
branch = true
45

56
[report]
67
precision = 1

.github/workflows/ci.yml

Lines changed: 18 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ jobs:
2424
test-ubuntu:
2525
name: ubuntu-python-${{ matrix.python-version }}
2626
runs-on: ubuntu-latest
27+
timeout-minutes: 15
2728
strategy:
2829
matrix:
2930
python-version: &python-matrix
@@ -71,16 +72,26 @@ jobs:
7172
- name: Print the installed version
7273
run: echo "Installed uv version is $(.venv/bin/uv --version)"
7374

74-
- name: Install dependencies
75+
- name: Install dependencies and Coverage Tools
7576
run: |
7677
source .venv/bin/activate
7778
uv sync --frozen --group devel
79+
# CRITICAL FIX: Ensure coverage tools are installed for the CI runner
80+
pip install coverage pytest-cov
7881
79-
- name: Run tests
82+
- name: Run tests and Report Coverage # Unified step for test execution and reporting
8083
run: |
8184
source .venv/bin/activate
82-
pytest -vv
83-
85+
# 1. Run tests and collect coverage for src/ and tests/
86+
pytest --cov=src --cov=tests --cov-report=term-missing -vv
87+
88+
# 2. Combine and report data (needed after parallel execution)
89+
python -m coverage combine --append || true
90+
python -m coverage report
91+
python -m coverage erase # Final cleanup
92+
env:
93+
PYTHONSTARTMETHOD: spawn
94+
8495
test-macos:
8596
name: macos-python-${{ matrix.python-version }}
8697
runs-on: macos-latest
@@ -139,4 +150,6 @@ jobs:
139150
- name: Run tests
140151
run: |
141152
source .venv/bin/activate
142-
pytest -vv
153+
pytest --cov=src -vv
154+
env:
155+
PYTHONSTARTMETHOD: spawn

.pytest.ini

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -28,3 +28,8 @@ doctest_optionflags = NORMALIZE_WHITESPACE IGNORE_EXCEPTION_DETAIL
2828

2929
# From pytest-asyncio:
3030
asyncio_mode = auto
31+
32+
# --- ADDED: Warning Filter for Multiprocessing Stability ---
33+
filterwarnings =
34+
# Suppress the DeprecationWarning caused by pytest-cov/multiprocessing using fork()
35+
ignore:This process \(pid=.*\) is multi-threaded, use of fork\(\) may lead to deadlocks in the child\.:DeprecationWarning:multiprocessing

changelog.d/93.bugfix.rst

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Fix regression in lock files

src/docbuild/cli/cmd_cli.py

Lines changed: 26 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
from rich.pretty import install
1010
from rich.traceback import install as install_traceback
1111
from pydantic import ValidationError
12+
from typing import Any, cast
1213

1314
from ..__about__ import __version__
1415
from ..config.app import replace_placeholders
@@ -23,7 +24,7 @@
2324
PROJECT_LEVEL_APP_CONFIG_FILENAMES,
2425
)
2526
from ..logging import setup_logging
26-
from ..utils.pidlock import PidFileLock
27+
from ..utils.pidlock import PidFileLock, LockAcquisitionError
2728
from .cmd_build import build
2829
from .cmd_c14n import c14n
2930
from .cmd_config import config
@@ -36,14 +37,12 @@
3637
PYTHON_VERSION = (
3738
f'{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}'
3839
)
39-
log = logging.getLogger(__name__) # Ensure logger is available globally
40+
log = logging.getLogger(__name__)
4041
CONSOLE = rich.console.Console(stderr=True, highlight=False)
4142

4243
def _setup_console() -> None:
4344
"""Configures the rich console."""
4445
install_traceback(console=CONSOLE, show_locals=True)
45-
# The Rich console is now handled by the logging setup.
46-
# install(console=CONSOLE)
4746

4847
@click.group(
4948
name=APP_NAME,
@@ -127,7 +126,7 @@ def cli(
127126
context.dry_run = dry_run
128127
context.debug = debug
129128

130-
# --- PHASE 1: Load and Validate Application Config ---
129+
# --- PHASE 1: Load and Validate Application Config (and setup logging) ---
131130

132131
# 1. Load the raw application config dictionary
133132
(
@@ -142,6 +141,9 @@ def cli(
142141
DEFAULT_APP_CONFIG,
143142
)
144143

144+
# Explicitly cast the raw_appconfig type to silence Pylance
145+
raw_appconfig = cast(dict[str, Any], raw_appconfig)
146+
145147
# 2. Validate the raw config dictionary using Pydantic
146148
try:
147149
# Pydantic validation also handles placeholder replacement via @model_validator
@@ -174,32 +176,38 @@ def cli(
174176

175177
env_config_path = context.envconfigfiles[0] if context.envconfigfiles else None
176178

177-
# --- CONCURRENCY CONTROL ---
179+
# Explicitly cast the context.envconfig type to silence Pylance
180+
context.envconfig = cast(dict[str, Any], context.envconfig)
181+
182+
# --- CONCURRENCY CONTROL: Use explicit __enter__ and cleanup registration ---
178183
if env_config_path:
179-
# Wrap the core execution in the PidFileLock context manager
180-
# If the lock cannot be acquired, a RuntimeError is raised, which exits the program.
184+
# 1. Instantiate the lock object
185+
ctx.obj.env_lock = PidFileLock(resource_path=env_config_path)
186+
181187
try:
182-
# Acquire the lock using the environment config file path as the resource ID
183-
ctx.obj.env_lock = PidFileLock(resource_path=env_config_path)
184-
ctx.obj.env_lock.acquire()
188+
# 2. Acquire the lock by explicitly calling the __enter__ method.
189+
ctx.obj.env_lock.__enter__()
185190
log.info("Acquired lock for environment config: %r", env_config_path.name)
186-
except RuntimeError as e:
187-
# If the lock is already held, log the error and exit gracefully.
191+
192+
except LockAcquisitionError as e:
193+
# Handles lock contention
188194
log.error(str(e))
189195
ctx.exit(1)
190196
except Exception as e:
191-
# Catch all other potential issues during lock acquisition/setup
197+
# Handles critical errors
192198
log.error("Failed to set up environment lock: %s", e)
193199
ctx.exit(1)
194-
200+
201+
# 3. Register the lock's __exit__ method to be called when the context terminates.
202+
# We use a lambda to supply the three mandatory positional arguments (None)
203+
# expected by __exit__, satisfying the click.call_on_close requirement.
204+
ctx.call_on_close(lambda: ctx.obj.env_lock.__exit__(None, None, None))
205+
195206
# Final config processing must happen outside the lock acquisition check
196-
# Note: Placeholder replacement is still necessary here because EnvConfig is not validated yet.
197207
context.envconfig = replace_placeholders(
198208
context.envconfig,
199209
)
200210

201-
# The acquired lock will be automatically released when the program exits via the atexit.register call in PidFileLock.acquire().
202-
203211
# Add subcommand
204212
cli.add_command(build)
205213
cli.add_command(c14n)

src/docbuild/logging.py

Lines changed: 89 additions & 60 deletions
Original file line numberDiff line numberDiff line change
@@ -7,14 +7,30 @@
77
import logging.handlers
88
import os
99
import queue
10+
import threading
1011
import time
12+
import contextvars
1113
from pathlib import Path
12-
from typing import Any, Sequence
14+
from typing import Any, List
1315

1416
from .constants import APP_NAME, BASE_LOG_DIR, GITLOGGER_NAME
1517

18+
# --- Defensive macOS-safe patch ---
19+
logging.raiseExceptions = False # Suppress internal logging exception tracebacks
20+
_original_emit = logging.StreamHandler.emit
21+
22+
23+
def _safe_emit(self, record):
24+
try:
25+
_original_emit(self, record)
26+
except ValueError:
27+
# Happens if a background thread logs after sys.stdout/stderr closed.
28+
pass
29+
30+
31+
logging.StreamHandler.emit = _safe_emit
32+
1633
# --- Default Logging Configuration ---
17-
# This dictionary provides the minimal required configuration structure.
1834
DEFAULT_LOGGING_CONFIG = {
1935
"version": 1,
2036
"disable_existing_loggers": False,
@@ -59,51 +75,81 @@
5975
},
6076
}
6177

62-
LOGLEVELS = {
63-
0: logging.WARNING,
64-
1: logging.INFO,
65-
2: logging.DEBUG,
78+
LOGLEVELS = {0: logging.WARNING, 1: logging.INFO, 2: logging.DEBUG}
79+
80+
# --- Context-aware global state ---
81+
_LOGGING_STATE: dict[str, contextvars.ContextVar] = {
82+
"listener": contextvars.ContextVar("listener", default=None),
83+
"handlers": contextvars.ContextVar("handlers", default=[]),
84+
"background_threads": contextvars.ContextVar("background_threads", default=[]),
6685
}
6786

87+
88+
def _shutdown_logging():
89+
"""Ensure all logging threads and handlers shut down cleanly."""
90+
listener = _LOGGING_STATE["listener"].get()
91+
handlers: List[logging.Handler] = _LOGGING_STATE["handlers"].get()
92+
bg_threads: List[threading.Thread] = _LOGGING_STATE["background_threads"].get()
93+
94+
if listener:
95+
try:
96+
listener.stop()
97+
except Exception:
98+
pass
99+
100+
for handler in handlers:
101+
try:
102+
handler.close()
103+
except Exception:
104+
pass
105+
106+
# Join all registered background threads
107+
for t in bg_threads:
108+
if t.is_alive():
109+
try:
110+
t.join(timeout=5)
111+
except Exception:
112+
pass
113+
114+
# Reset contextvars
115+
_LOGGING_STATE["listener"].set(None)
116+
_LOGGING_STATE["handlers"].set([])
117+
_LOGGING_STATE["background_threads"].set([])
118+
119+
120+
def register_background_thread(thread: threading.Thread):
121+
"""Register a thread to be joined on logging shutdown."""
122+
threads = _LOGGING_STATE["background_threads"].get()
123+
threads.append(thread)
124+
_LOGGING_STATE["background_threads"].set(threads)
125+
126+
68127
def create_base_log_dir(base_log_dir: str | Path = BASE_LOG_DIR) -> Path:
69-
"""Create the base log directory if it doesn't exist.
70-
71-
This directory is typically located at :file:`~/.local/state/docbuild/logs`
72-
as per the XDG Base Directory Specification.
73-
:param base_log_dir: The base directory where logs should be stored.
74-
Considers the `XDG_STATE_HOME` environment variable if set.
75-
:return: The path to the base log directory.
76-
"""
128+
"""Create the base log directory if it doesn't exist."""
77129
log_dir = Path(os.getenv("XDG_STATE_HOME", base_log_dir))
78130
log_dir.mkdir(mode=0o700, parents=True, exist_ok=True)
79131
return log_dir
80132

133+
81134
def _resolve_class(path: str):
82135
"""Dynamically imports and returns a class from a string path."""
83136
module_name, class_name = path.rsplit(".", 1)
84137
module = importlib.import_module(module_name)
85138
return getattr(module, class_name)
86139

87-
def setup_logging(
88-
cliverbosity: int,
89-
user_config: dict[str, Any] | None = None,
90-
) -> None:
91-
"""Sets up a non-blocking, configurable logging system.
92-
93-
This function merges the default logging configuration with the validated
94-
user configuration and sets up the asynchronous handlers.
95-
"""
140+
141+
def setup_logging(cliverbosity: int, user_config: dict[str, Any] | None = None) -> None:
142+
"""Sets up a non-blocking, configurable logging system."""
96143
config = copy.deepcopy(DEFAULT_LOGGING_CONFIG)
97144

98145
if user_config and "logging" in user_config:
99-
# Use a more robust deep merge approach
100146
def deep_merge(target: dict, source: dict) -> None:
101147
for k, v in source.items():
102148
if k in target and isinstance(target[k], dict) and isinstance(v, dict):
103149
deep_merge(target[k], v)
104150
else:
105151
target[k] = v
106-
152+
107153
deep_merge(config, user_config.get("logging", {}))
108154

109155
# --- Verbosity & Log File Path Setup ---
@@ -117,69 +163,52 @@ def deep_merge(target: dict, source: dict) -> None:
117163
config["handlers"]["file"]["filename"] = str(log_path)
118164

119165
built_handlers = []
120-
121-
# --- Handler and Listener Initialization ---
122-
built_handlers = []
123-
124-
# Required keys to ignore when instantiating a handler (they are for dictConfig lookup)
166+
167+
# --- Handler Initialization ---
125168
HANDLER_INTERNAL_KEYS = ["class", "formatter", "level", "class_name"]
126-
FORMATTER_INTERNAL_KEYS = ["class", "formatter", "level", "class_name", "validate"] # Added 'validate'
169+
FORMATTER_INTERNAL_KEYS = ["class", "formatter", "level", "class_name", "validate"]
127170

128171
for hname, hconf in config["handlers"].items():
129172
cls = _resolve_class(hconf["class"])
130-
131-
handler_args = {
132-
k: v for k, v in hconf.items() if k not in HANDLER_INTERNAL_KEYS
133-
}
134-
173+
handler_args = {k: v for k, v in hconf.items() if k not in HANDLER_INTERNAL_KEYS}
135174
handler = cls(**handler_args)
136-
137175
handler.setLevel(hconf.get("level", "NOTSET"))
138-
176+
139177
formatter_name = hconf.get("formatter")
140178
if formatter_name and formatter_name in config["formatters"]:
141179
fmt_conf = config["formatters"][formatter_name]
142-
143-
# --- FIX APPLIED HERE ---
144-
# Filter out keys that are Pydantic metadata/aliases (like 'class_name')
145-
# We specifically filter out 'format' because the Formatter.__init__
146-
# expects 'fmt' or uses 'style', not 'format' directly as a kwarg.
147180
formatter_kwargs = {
148-
k: v for k, v in fmt_conf.items()
149-
if k not in FORMATTER_INTERNAL_KEYS and k not in ['format']
181+
k: v for k, v in fmt_conf.items() if k not in FORMATTER_INTERNAL_KEYS and k not in ["format"]
150182
}
151-
152-
# Pass 'fmt' instead of 'format' to the standard Formatter constructor
153-
formatter_kwargs['fmt'] = fmt_conf.get("format")
154-
formatter_kwargs['datefmt'] = fmt_conf.get("datefmt")
155-
formatter_kwargs['style'] = fmt_conf.get("style") # Should pass style if present
156-
157-
# Remove keys that are None
183+
formatter_kwargs["fmt"] = fmt_conf.get("format")
184+
formatter_kwargs["datefmt"] = fmt_conf.get("datefmt")
185+
formatter_kwargs["style"] = fmt_conf.get("style")
158186
formatter_kwargs = {k: v for k, v in formatter_kwargs.items() if v is not None}
159-
160187
fmt_cls = _resolve_class(fmt_conf.get("class", "logging.Formatter"))
161-
162188
handler.setFormatter(fmt_cls(**formatter_kwargs))
163-
189+
164190
built_handlers.append(handler)
165-
191+
166192
# --- Asynchronous Queue Setup ---
167193
log_queue = queue.Queue(-1)
168194
queue_handler = logging.handlers.QueueHandler(log_queue)
169195
listener = logging.handlers.QueueListener(
170196
log_queue, *built_handlers, respect_handler_level=True
171197
)
172198
listener.start()
173-
atexit.register(listener.stop)
174199

175200
# --- Logger Initialization ---
176201
for lname, lconf in config["loggers"].items():
177202
logger = logging.getLogger(lname)
178203
logger.setLevel(lconf["level"])
179204
logger.addHandler(queue_handler)
180205
logger.propagate = lconf.get("propagate", False)
181-
182-
# Configure the root logger separately
206+
183207
root_logger = logging.getLogger()
184208
root_logger.setLevel(config["root"]["level"])
185-
root_logger.addHandler(queue_handler)
209+
root_logger.addHandler(queue_handler)
210+
211+
# --- Register graceful shutdown ---
212+
_LOGGING_STATE["listener"].set(listener)
213+
_LOGGING_STATE["handlers"].set(built_handlers)
214+
atexit.register(_shutdown_logging)

0 commit comments

Comments
 (0)