From a0dff47b16cd9ca7ea02faceae2aee5b9ff15694 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sat, 3 Jan 2026 16:25:37 +0000 Subject: [PATCH 01/17] Initial plan From a13837630592dbeb65172e3e341e47a041485031 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sat, 3 Jan 2026 16:29:29 +0000 Subject: [PATCH 02/17] Add FastAPI pub/sub example with tests and GitHub Action Co-authored-by: jason810496 <68415893+jason810496@users.noreply.github.com> --- .github/workflows/examples.yml | 72 ++++++++ examples/fastapi_pub_sub/README.md | 110 ++++++++++++ examples/fastapi_pub_sub/api.py | 165 +++++++++++++++++ examples/fastapi_pub_sub/consumer.py | 118 +++++++++++++ examples/fastapi_pub_sub/requirements.txt | 5 + examples_tests/__init__.py | 1 + examples_tests/conftest.py | 40 +++++ examples_tests/test_fastapi_pub_sub.py | 204 ++++++++++++++++++++++ 8 files changed, 715 insertions(+) create mode 100644 .github/workflows/examples.yml create mode 100644 examples/fastapi_pub_sub/README.md create mode 100644 examples/fastapi_pub_sub/api.py create mode 100644 examples/fastapi_pub_sub/consumer.py create mode 100644 examples/fastapi_pub_sub/requirements.txt create mode 100644 examples_tests/__init__.py create mode 100644 examples_tests/conftest.py create mode 100644 examples_tests/test_fastapi_pub_sub.py diff --git a/.github/workflows/examples.yml b/.github/workflows/examples.yml new file mode 100644 index 0000000..12c10ff --- /dev/null +++ b/.github/workflows/examples.yml @@ -0,0 +1,72 @@ +name: Examples Tests + +on: + push: + branches: [main, develop] + paths: + - 'examples/**' + - 'examples_tests/**' + - 'pgmq_sqlalchemy/**' + - '.github/workflows/examples.yml' + pull_request: + branches: [main, develop] + paths: + - 'examples/**' + - 'examples_tests/**' + - 'pgmq_sqlalchemy/**' + - '.github/workflows/examples.yml' + +jobs: + test-examples: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.9", "3.10", "3.11", "3.12"] + + name: Test Examples (Python ${{ matrix.python-version }}) + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: Install uv + run: | + curl -LsSf https://astral.sh/uv/install.sh | sh + echo "$HOME/.local/bin" >> $GITHUB_PATH + + - name: Install dependencies + run: | + uv sync --extra dev + # Install additional dependencies for examples + uv pip install fastapi uvicorn httpx + + - name: Start PostgreSQL + run: | + cp pgmq_postgres.template.env pgmq_postgres.env + cp pgmq_tests.template.env pgmq_tests.env + make start-db + + - name: Setup database for examples tests + run: | + docker compose exec -T pgmq_postgres psql -U postgres -c "CREATE EXTENSION IF NOT EXISTS pgmq CASCADE;" + + - name: Run examples tests + run: | + uv run pytest examples_tests --cov=examples --cov-report=xml:coverage-examples-py${{ matrix.python-version }}.xml -v + + - name: Upload coverage artifact + if: always() + uses: actions/upload-artifact@v4 + with: + name: coverage-examples-py${{ matrix.python-version }} + path: coverage-examples-py${{ matrix.python-version }}.xml + retention-days: 1 + + - name: Cleanup + if: always() + run: | + docker compose down diff --git a/examples/fastapi_pub_sub/README.md b/examples/fastapi_pub_sub/README.md new file mode 100644 index 0000000..e28d7ab --- /dev/null +++ b/examples/fastapi_pub_sub/README.md @@ -0,0 +1,110 @@ +# FastAPI Pub/Sub Example with PGMQ + +This example demonstrates a real-world scenario of using PGMQ with FastAPI for an order management system. It shows how to: + +- Use PGMQ with FastAPI and sync SQLAlchemy sessions (psycopg2) +- Publish messages using `PGMQOperation` (op) in a web API +- Consume messages asynchronously using `PGMQueue` with asyncpg + +## Architecture + +- **API Server (api.py)**: FastAPI application that creates orders and publishes them to PGMQ + - Uses sync database driver (psycopg2) + - Uses `PGMQOperation` (imported as `op`) for publishing messages + - Provides REST endpoints for creating and retrieving orders + +- **Consumer (consumer.py)**: Async worker that processes orders from the queue + - Uses async database driver (asyncpg) + - Uses `PGMQueue` class for reading messages + - Processes messages concurrently with asyncio + +## Prerequisites + +- PostgreSQL with PGMQ extension installed +- Python 3.9 or higher + +Quick setup: +```bash +docker run -d --name postgres -e POSTGRES_PASSWORD=postgres -p 5432:5432 quay.io/tembo/pg16-pgmq:latest +``` + +## Installation + +Install required dependencies: + +```bash +pip install -r requirements.txt +``` + +## Running the Example + +### 1. Start the API Server + +```bash +python api.py +``` + +The API will be available at http://localhost:8000 + +### 2. Start the Consumer + +In a separate terminal: + +```bash +python consumer.py +``` + +### 3. Create Orders + +Create an order via the API: + +```bash +curl -X POST "http://localhost:8000/orders" \ + -H "Content-Type: application/json" \ + -d '{ + "customer_name": "John Doe", + "product_name": "Widget", + "quantity": 5, + "price": 29.99 + }' +``` + +You should see: +- The API returns the created order with a message ID +- The consumer logs show the order being processed + +### 4. View Order + +Get an order by ID: + +```bash +curl "http://localhost:8000/orders/1" +``` + +## API Endpoints + +- `POST /orders` - Create a new order +- `GET /orders/{order_id}` - Get order by ID +- `GET /health` - Health check endpoint + +## How It Works + +1. When an order is created via the API: + - The order is saved to the database + - A message is published to PGMQ using `op.send()` + - The message contains order details + +2. The consumer: + - Continuously polls the queue for new messages + - Processes messages concurrently using asyncio + - Deletes successfully processed messages + - Leaves failed messages in the queue for retry + +## Configuration + +You can modify the following constants in the files: + +- `DATABASE_URL`: PostgreSQL connection string +- `QUEUE_NAME`: Name of the PGMQ queue (default: "order_queue") +- `batch_size`: Number of messages to process in each batch (consumer.py) +- `vt`: Visibility timeout in seconds (consumer.py) diff --git a/examples/fastapi_pub_sub/api.py b/examples/fastapi_pub_sub/api.py new file mode 100644 index 0000000..4a8a374 --- /dev/null +++ b/examples/fastapi_pub_sub/api.py @@ -0,0 +1,165 @@ +"""FastAPI API server for Order management with PGMQ message publishing. + +This example demonstrates: +- Using FastAPI with SQLAlchemy sync session (psycopg2) +- Publishing messages to PGMQ using PGMQOperation (op) +- Creating orders and sending them to a message queue +""" +from typing import Generator +from contextlib import contextmanager + +from fastapi import FastAPI, Depends, HTTPException +from pydantic import BaseModel +from sqlalchemy import create_engine, Column, Integer, String, Float, DateTime +from sqlalchemy.orm import Session, sessionmaker, declarative_base +from datetime import datetime + +from pgmq_sqlalchemy import op + +# Database configuration +DATABASE_URL = "postgresql+psycopg2://postgres:postgres@localhost:5432/postgres" +QUEUE_NAME = "order_queue" + +# SQLAlchemy setup +engine = create_engine(DATABASE_URL) +SessionLocal = sessionmaker(bind=engine, autocommit=False, autoflush=False) +Base = declarative_base() + + +# Order Model (SQLAlchemy ORM) +class Order(Base): + __tablename__ = "orders" + + id = Column(Integer, primary_key=True, index=True) + customer_name = Column(String, nullable=False) + product_name = Column(String, nullable=False) + quantity = Column(Integer, nullable=False) + price = Column(Float, nullable=False) + created_at = Column(DateTime, default=datetime.utcnow) + + +# Pydantic models for request/response +class OrderCreate(BaseModel): + customer_name: str + product_name: str + quantity: int + price: float + + +class OrderResponse(BaseModel): + id: int + customer_name: str + product_name: str + quantity: int + price: float + created_at: datetime + message_id: int + + class Config: + from_attributes = True + + +# FastAPI app +app = FastAPI(title="Order Management with PGMQ") + + +# Database dependency +def get_db() -> Generator[Session, None, None]: + """Database session dependency.""" + db = SessionLocal() + try: + yield db + finally: + db.close() + + +@app.on_event("startup") +def startup_event(): + """Initialize database tables and PGMQ queue on startup.""" + # Create tables if they don't exist + Base.metadata.create_all(bind=engine) + + # Initialize PGMQ queue + with SessionLocal() as session: + op.check_pgmq_ext(session=session, commit=True) + + # Create queue if it doesn't exist (will not fail if exists) + try: + op.create_queue(QUEUE_NAME, session=session, commit=True) + except Exception: + # Queue might already exist, which is fine + pass + + +@app.post("/orders", response_model=OrderResponse, status_code=201) +def create_order(order_data: OrderCreate, db: Session = Depends(get_db)): + """Create a new order and publish it to the message queue. + + Args: + order_data: Order information + db: Database session + + Returns: + Created order with message ID + """ + # Create order in database + db_order = Order( + customer_name=order_data.customer_name, + product_name=order_data.product_name, + quantity=order_data.quantity, + price=order_data.price, + ) + db.add(db_order) + db.commit() + db.refresh(db_order) + + # Publish message to PGMQ using op + message_data = { + "order_id": db_order.id, + "customer_name": db_order.customer_name, + "product_name": db_order.product_name, + "quantity": db_order.quantity, + "price": db_order.price, + "created_at": db_order.created_at.isoformat(), + } + + msg_id = op.send(QUEUE_NAME, message_data, session=db, commit=True) + + # Return order with message ID + return OrderResponse( + id=db_order.id, + customer_name=db_order.customer_name, + product_name=db_order.product_name, + quantity=db_order.quantity, + price=db_order.price, + created_at=db_order.created_at, + message_id=msg_id, + ) + + +@app.get("/orders/{order_id}", response_model=OrderCreate) +def get_order(order_id: int, db: Session = Depends(get_db)): + """Get order by ID. + + Args: + order_id: Order ID + db: Database session + + Returns: + Order information + """ + order = db.query(Order).filter(Order.id == order_id).first() + if not order: + raise HTTPException(status_code=404, detail="Order not found") + return order + + +@app.get("/health") +def health_check(): + """Health check endpoint.""" + return {"status": "ok"} + + +if __name__ == "__main__": + import uvicorn + uvicorn.run(app, host="0.0.0.0", port=8000) diff --git a/examples/fastapi_pub_sub/consumer.py b/examples/fastapi_pub_sub/consumer.py new file mode 100644 index 0000000..0a65ee1 --- /dev/null +++ b/examples/fastapi_pub_sub/consumer.py @@ -0,0 +1,118 @@ +"""Async consumer for processing orders from PGMQ. + +This example demonstrates: +- Using asyncio for asynchronous message processing +- Using asyncpg driver with PGMQueue +- Reading and processing messages from PGMQ +- Deleting messages after successful processing +""" +import asyncio +import logging +from typing import Optional + +from pgmq_sqlalchemy import PGMQueue +from pgmq_sqlalchemy.schema import Message + +# Configure logging +logging.basicConfig( + level=logging.INFO, + format='%(asctime)s - %(name)s - %(levelname)s - %(message)s' +) +logger = logging.getLogger(__name__) + +# Database configuration +DATABASE_URL = "postgresql+asyncpg://postgres:postgres@localhost:5432/postgres" +QUEUE_NAME = "order_queue" + + +async def process_order(message: Message) -> bool: + """Process an order message. + + Args: + message: Message from PGMQ containing order data + + Returns: + True if processing was successful, False otherwise + """ + try: + order_data = message.message + logger.info(f"Processing order {order_data.get('order_id')}") + logger.info(f" Customer: {order_data.get('customer_name')}") + logger.info(f" Product: {order_data.get('product_name')}") + logger.info(f" Quantity: {order_data.get('quantity')}") + logger.info(f" Price: ${order_data.get('price')}") + + # Simulate order processing (e.g., inventory check, payment processing, etc.) + await asyncio.sleep(1) + + logger.info(f"Order {order_data.get('order_id')} processed successfully") + return True + except Exception as e: + logger.error(f"Error processing order: {e}") + return False + + +async def consume_messages(pgmq: PGMQueue, batch_size: int = 10, vt: int = 30): + """Continuously consume and process messages from the queue. + + Args: + pgmq: PGMQueue instance + batch_size: Number of messages to read in each batch + vt: Visibility timeout in seconds + """ + logger.info(f"Starting consumer for queue: {QUEUE_NAME}") + logger.info(f"Batch size: {batch_size}, Visibility timeout: {vt}s") + + while True: + try: + # Read a batch of messages + messages = await pgmq.read_batch(QUEUE_NAME, vt=vt, batch_size=batch_size) + + if not messages: + logger.debug("No messages available, waiting...") + await asyncio.sleep(1) + continue + + logger.info(f"Received {len(messages)} messages") + + # Process messages concurrently + tasks = [] + for message in messages: + task = process_order(message) + tasks.append((message.msg_id, task)) + + # Wait for all processing to complete + results = await asyncio.gather(*[t[1] for t in tasks], return_exceptions=True) + + # Delete successfully processed messages + for (msg_id, _), result in zip(tasks, results): + if isinstance(result, bool) and result: + await pgmq.delete(QUEUE_NAME, msg_id) + logger.info(f"Deleted message {msg_id}") + elif isinstance(result, Exception): + logger.error(f"Exception processing message {msg_id}: {result}") + else: + logger.warning(f"Message {msg_id} processing failed, will retry later") + + except KeyboardInterrupt: + logger.info("Received shutdown signal, stopping consumer...") + break + except Exception as e: + logger.error(f"Error in consumer loop: {e}") + await asyncio.sleep(5) + + +async def main(): + """Main entry point for the consumer.""" + # Initialize PGMQueue with async driver + pgmq = PGMQueue(dsn=DATABASE_URL) + + try: + # Start consuming messages + await consume_messages(pgmq, batch_size=10, vt=30) + finally: + logger.info("Consumer stopped") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/fastapi_pub_sub/requirements.txt b/examples/fastapi_pub_sub/requirements.txt new file mode 100644 index 0000000..f185598 --- /dev/null +++ b/examples/fastapi_pub_sub/requirements.txt @@ -0,0 +1,5 @@ +fastapi>=0.104.0 +uvicorn>=0.24.0 +psycopg2-binary>=2.9.9 +asyncpg>=0.29.0 +pgmq-sqlalchemy>=0.1.2 diff --git a/examples_tests/__init__.py b/examples_tests/__init__.py new file mode 100644 index 0000000..860d914 --- /dev/null +++ b/examples_tests/__init__.py @@ -0,0 +1 @@ +"""Tests for examples.""" diff --git a/examples_tests/conftest.py b/examples_tests/conftest.py new file mode 100644 index 0000000..efe8a15 --- /dev/null +++ b/examples_tests/conftest.py @@ -0,0 +1,40 @@ +"""Pytest configuration for examples tests.""" +import os +import pytest + + +def pytest_addoption(parser): + """Add custom command-line options for pytest.""" + parser.addoption( + "--db-name", + action="store", + default=None, + help="Specify the database name to use for testing", + ) + + +@pytest.fixture(scope="module") +def database_url(request): + """Get database URL from environment or CLI.""" + db_name = request.config.getoption("--db-name") + if not db_name: + db_name = os.getenv("SQLALCHEMY_DB", "postgres") + + host = os.getenv("SQLALCHEMY_HOST", "localhost") + port = os.getenv("SQLALCHEMY_PORT", "5432") + user = os.getenv("SQLALCHEMY_USER", "postgres") + password = os.getenv("SQLALCHEMY_PASSWORD", "postgres") + + return f"postgresql://{user}:{password}@{host}:{port}/{db_name}" + + +@pytest.fixture(scope="module") +def sync_database_url(database_url): + """Get sync database URL with psycopg2 driver.""" + return database_url.replace("postgresql://", "postgresql+psycopg2://") + + +@pytest.fixture(scope="module") +def async_database_url(database_url): + """Get async database URL with asyncpg driver.""" + return database_url.replace("postgresql://", "postgresql+asyncpg://") diff --git a/examples_tests/test_fastapi_pub_sub.py b/examples_tests/test_fastapi_pub_sub.py new file mode 100644 index 0000000..9f4b074 --- /dev/null +++ b/examples_tests/test_fastapi_pub_sub.py @@ -0,0 +1,204 @@ +"""Tests for FastAPI pub/sub example.""" +import asyncio +import time +import pytest +from fastapi.testclient import TestClient +from sqlalchemy import create_engine, text +from sqlalchemy.orm import sessionmaker + +from pgmq_sqlalchemy import PGMQueue + + +@pytest.fixture(scope="module") +def test_queue_name(): + """Return a unique queue name for testing.""" + return "test_order_queue" + + +@pytest.fixture(scope="module") +def setup_api_app(sync_database_url, test_queue_name): + """Setup the FastAPI app with test configuration.""" + # Import after fixture is set up + import sys + import os + + # Add examples directory to path + examples_dir = os.path.join( + os.path.dirname(os.path.dirname(__file__)), + "examples", + "fastapi_pub_sub" + ) + sys.path.insert(0, examples_dir) + + # Import and configure api module + import api + + # Override configuration with test values + api.DATABASE_URL = sync_database_url + api.QUEUE_NAME = test_queue_name + + # Create new engine and session maker with test config + api.engine = create_engine(sync_database_url) + api.SessionLocal = sessionmaker(bind=api.engine, autocommit=False, autoflush=False) + + # Create tables and queue + api.Base.metadata.create_all(bind=api.engine) + + with api.SessionLocal() as session: + api.op.check_pgmq_ext(session=session, commit=True) + try: + api.op.create_queue(test_queue_name, session=session, commit=True) + except Exception: + pass + + yield api + + # Cleanup + with api.SessionLocal() as session: + # Drop the test queue + try: + api.op.drop_queue(test_queue_name, session=session, commit=True) + except Exception: + pass + + # Drop tables + session.execute(text("DROP TABLE IF EXISTS orders CASCADE")) + session.commit() + + # Remove from path + sys.path.remove(examples_dir) + + +@pytest.fixture(scope="module") +def client(setup_api_app): + """Create a test client for the FastAPI app.""" + return TestClient(setup_api_app.app) + + +def test_health_check(client): + """Test the health check endpoint.""" + response = client.get("/health") + assert response.status_code == 200 + assert response.json() == {"status": "ok"} + + +def test_create_order(client, sync_database_url, test_queue_name): + """Test creating an order via the API.""" + order_data = { + "customer_name": "John Doe", + "product_name": "Widget", + "quantity": 5, + "price": 29.99 + } + + response = client.post("/orders", json=order_data) + assert response.status_code == 201 + + data = response.json() + assert data["customer_name"] == order_data["customer_name"] + assert data["product_name"] == order_data["product_name"] + assert data["quantity"] == order_data["quantity"] + assert data["price"] == order_data["price"] + assert "id" in data + assert "message_id" in data + assert "created_at" in data + + # Verify message was published to queue + engine = create_engine(sync_database_url) + SessionLocal = sessionmaker(bind=engine) + + with SessionLocal() as session: + from pgmq_sqlalchemy import op + msg = op.read(test_queue_name, session=session, commit=True) + + assert msg is not None + assert msg.message["order_id"] == data["id"] + assert msg.message["customer_name"] == order_data["customer_name"] + + # Clean up message + op.delete(test_queue_name, msg.msg_id, session=session, commit=True) + + +def test_get_order(client): + """Test retrieving an order by ID.""" + # First create an order + order_data = { + "customer_name": "Jane Smith", + "product_name": "Gadget", + "quantity": 3, + "price": 49.99 + } + + create_response = client.post("/orders", json=order_data) + assert create_response.status_code == 201 + order_id = create_response.json()["id"] + + # Then retrieve it + get_response = client.get(f"/orders/{order_id}") + assert get_response.status_code == 200 + + data = get_response.json() + assert data["customer_name"] == order_data["customer_name"] + assert data["product_name"] == order_data["product_name"] + assert data["quantity"] == order_data["quantity"] + assert data["price"] == order_data["price"] + + +def test_get_nonexistent_order(client): + """Test retrieving a non-existent order.""" + response = client.get("/orders/999999") + assert response.status_code == 404 + assert response.json()["detail"] == "Order not found" + + +@pytest.mark.asyncio +async def test_consumer_processing(async_database_url, sync_database_url, test_queue_name): + """Test the async consumer processing messages.""" + # Create a test order message directly in the queue + engine = create_engine(sync_database_url) + SessionLocal = sessionmaker(bind=engine) + + test_message = { + "order_id": 12345, + "customer_name": "Test Customer", + "product_name": "Test Product", + "quantity": 10, + "price": 99.99, + "created_at": "2024-01-01T00:00:00" + } + + msg_id = None + with SessionLocal() as session: + from pgmq_sqlalchemy import op + msg_id = op.send(test_queue_name, test_message, session=session, commit=True) + + assert msg_id is not None + + # Now test consumer logic by reading and processing + pgmq = PGMQueue(dsn=async_database_url) + + # Read the message + messages = await pgmq.read_batch(test_queue_name, vt=30, batch_size=10) + assert len(messages) >= 1 + + # Find our test message + test_msg = None + for msg in messages: + if msg.message.get("order_id") == 12345: + test_msg = msg + break + + assert test_msg is not None + assert test_msg.message["customer_name"] == "Test Customer" + assert test_msg.message["product_name"] == "Test Product" + + # Simulate processing and deletion + await pgmq.delete(test_queue_name, test_msg.msg_id) + + # Verify message was deleted + time.sleep(1) # Wait a bit for deletion + remaining_messages = await pgmq.read_batch(test_queue_name, vt=30, batch_size=100) + + # Our message should not be in the remaining messages + for msg in remaining_messages: + assert msg.msg_id != test_msg.msg_id From 147f471c309c6d43b2c5b377c58f460c34929679 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sat, 3 Jan 2026 16:34:45 +0000 Subject: [PATCH 03/17] Fix tests for FastAPI pub/sub example Co-authored-by: jason810496 <68415893+jason810496@users.noreply.github.com> --- examples_tests/test_fastapi_pub_sub.py | 51 +++++++++++++++++++------- 1 file changed, 37 insertions(+), 14 deletions(-) diff --git a/examples_tests/test_fastapi_pub_sub.py b/examples_tests/test_fastapi_pub_sub.py index 9f4b074..ccd5683 100644 --- a/examples_tests/test_fastapi_pub_sub.py +++ b/examples_tests/test_fastapi_pub_sub.py @@ -109,7 +109,7 @@ def test_create_order(client, sync_database_url, test_queue_name): with SessionLocal() as session: from pgmq_sqlalchemy import op - msg = op.read(test_queue_name, session=session, commit=True) + msg = op.read(test_queue_name, vt=30, session=session, commit=True) assert msg is not None assert msg.message["order_id"] == data["id"] @@ -155,8 +155,22 @@ def test_get_nonexistent_order(client): async def test_consumer_processing(async_database_url, sync_database_url, test_queue_name): """Test the async consumer processing messages.""" # Create a test order message directly in the queue - engine = create_engine(sync_database_url) - SessionLocal = sessionmaker(bind=engine) + from sqlalchemy import create_engine as sync_create_engine + from sqlalchemy.orm import sessionmaker as sync_sessionmaker + from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession + from sqlalchemy.orm import sessionmaker as async_sessionmaker + from pgmq_sqlalchemy import op + + engine = sync_create_engine(sync_database_url) + SessionLocal = sync_sessionmaker(bind=engine) + + # Create the queue first + with SessionLocal() as session: + op.check_pgmq_ext(session=session, commit=True) + try: + op.create_queue(test_queue_name, session=session, commit=True) + except Exception: + pass # Queue might already exist test_message = { "order_id": 12345, @@ -169,16 +183,18 @@ async def test_consumer_processing(async_database_url, sync_database_url, test_q msg_id = None with SessionLocal() as session: - from pgmq_sqlalchemy import op msg_id = op.send(test_queue_name, test_message, session=session, commit=True) assert msg_id is not None - # Now test consumer logic by reading and processing - pgmq = PGMQueue(dsn=async_database_url) + # Now test consumer logic by reading and processing with async operations + async_engine = create_async_engine(async_database_url) + async_session_maker = async_sessionmaker(bind=async_engine, class_=AsyncSession) + + # Read the message using async operations directly + async with async_session_maker() as session: + messages = await op.read_batch_async(test_queue_name, vt=30, batch_size=10, session=session, commit=True) - # Read the message - messages = await pgmq.read_batch(test_queue_name, vt=30, batch_size=10) assert len(messages) >= 1 # Find our test message @@ -193,12 +209,19 @@ async def test_consumer_processing(async_database_url, sync_database_url, test_q assert test_msg.message["product_name"] == "Test Product" # Simulate processing and deletion - await pgmq.delete(test_queue_name, test_msg.msg_id) + async with async_session_maker() as session: + deleted = await op.delete_async(test_queue_name, test_msg.msg_id, session=session, commit=True) + assert deleted is True # Verify message was deleted - time.sleep(1) # Wait a bit for deletion - remaining_messages = await pgmq.read_batch(test_queue_name, vt=30, batch_size=100) + await asyncio.sleep(1) # Wait a bit for deletion + async with async_session_maker() as session: + remaining_messages = await op.read_batch_async(test_queue_name, vt=30, batch_size=100, session=session, commit=True) - # Our message should not be in the remaining messages - for msg in remaining_messages: - assert msg.msg_id != test_msg.msg_id + # Our message should not be in the remaining messages (if any) + if remaining_messages: + for msg in remaining_messages: + assert msg.msg_id != test_msg.msg_id + + # Cleanup + await async_engine.dispose() From a256a1876365394562fda43502112c1824d41702 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sat, 3 Jan 2026 16:36:54 +0000 Subject: [PATCH 04/17] Fix consumer to work with async event loop properly Co-authored-by: jason810496 <68415893+jason810496@users.noreply.github.com> --- examples/fastapi_pub_sub/consumer.py | 32 ++++++++++++++++++++++++---- 1 file changed, 28 insertions(+), 4 deletions(-) diff --git a/examples/fastapi_pub_sub/consumer.py b/examples/fastapi_pub_sub/consumer.py index 0a65ee1..d66ef7a 100644 --- a/examples/fastapi_pub_sub/consumer.py +++ b/examples/fastapi_pub_sub/consumer.py @@ -60,13 +60,16 @@ async def consume_messages(pgmq: PGMQueue, batch_size: int = 10, vt: int = 30): batch_size: Number of messages to read in each batch vt: Visibility timeout in seconds """ + from pgmq_sqlalchemy import op + logger.info(f"Starting consumer for queue: {QUEUE_NAME}") logger.info(f"Batch size: {batch_size}, Visibility timeout: {vt}s") while True: try: # Read a batch of messages - messages = await pgmq.read_batch(QUEUE_NAME, vt=vt, batch_size=batch_size) + async with pgmq.session_maker() as session: + messages = await op.read_batch_async(QUEUE_NAME, vt=vt, batch_size=batch_size, session=session, commit=True) if not messages: logger.debug("No messages available, waiting...") @@ -87,7 +90,8 @@ async def consume_messages(pgmq: PGMQueue, batch_size: int = 10, vt: int = 30): # Delete successfully processed messages for (msg_id, _), result in zip(tasks, results): if isinstance(result, bool) and result: - await pgmq.delete(QUEUE_NAME, msg_id) + async with pgmq.session_maker() as session: + await op.delete_async(QUEUE_NAME, msg_id, session=session, commit=True) logger.info(f"Deleted message {msg_id}") elif isinstance(result, Exception): logger.error(f"Exception processing message {msg_id}: {result}") @@ -104,14 +108,34 @@ async def consume_messages(pgmq: PGMQueue, batch_size: int = 10, vt: int = 30): async def main(): """Main entry point for the consumer.""" - # Initialize PGMQueue with async driver - pgmq = PGMQueue(dsn=DATABASE_URL) + # Initialize PGMQueue with async session maker + from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession + from sqlalchemy.orm import sessionmaker + + async_engine = create_async_engine(DATABASE_URL) + async_session_maker = sessionmaker(bind=async_engine, class_=AsyncSession) + + # Create PGMQueue instance manually to avoid event loop issues + pgmq = PGMQueue.__new__(PGMQueue) + pgmq.engine = async_engine + pgmq.session_maker = async_session_maker + pgmq.is_async = True + pgmq.delay = 0 + pgmq.vt = 30 + pgmq.loop = None + pgmq.is_pg_partman_ext_checked = True + + # Check PGMQ extension manually + async with async_session_maker() as session: + from pgmq_sqlalchemy import op + await op.check_pgmq_ext_async(session=session, commit=True) try: # Start consuming messages await consume_messages(pgmq, batch_size=10, vt=30) finally: logger.info("Consumer stopped") + await async_engine.dispose() if __name__ == "__main__": From aca65a35737c8f02bc11c6eee8f568743a90226c Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sat, 3 Jan 2026 16:39:09 +0000 Subject: [PATCH 05/17] Address code review feedback: use lifespan, improve exception handling Co-authored-by: jason810496 <68415893+jason810496@users.noreply.github.com> --- examples/fastapi_pub_sub/api.py | 49 ++++++++++++++------------ examples/fastapi_pub_sub/consumer.py | 6 +++- examples_tests/test_fastapi_pub_sub.py | 17 +++++---- 3 files changed, 42 insertions(+), 30 deletions(-) diff --git a/examples/fastapi_pub_sub/api.py b/examples/fastapi_pub_sub/api.py index 4a8a374..0c11fa4 100644 --- a/examples/fastapi_pub_sub/api.py +++ b/examples/fastapi_pub_sub/api.py @@ -6,10 +6,10 @@ - Creating orders and sending them to a message queue """ from typing import Generator -from contextlib import contextmanager +from contextlib import contextmanager, asynccontextmanager from fastapi import FastAPI, Depends, HTTPException -from pydantic import BaseModel +from pydantic import BaseModel, ConfigDict from sqlalchemy import create_engine, Column, Integer, String, Float, DateTime from sqlalchemy.orm import Session, sessionmaker, declarative_base from datetime import datetime @@ -47,6 +47,8 @@ class OrderCreate(BaseModel): class OrderResponse(BaseModel): + model_config = ConfigDict(from_attributes=True) + id: int customer_name: str product_name: str @@ -55,40 +57,41 @@ class OrderResponse(BaseModel): created_at: datetime message_id: int - class Config: - from_attributes = True - - -# FastAPI app -app = FastAPI(title="Order Management with PGMQ") - - -# Database dependency -def get_db() -> Generator[Session, None, None]: - """Database session dependency.""" - db = SessionLocal() - try: - yield db - finally: - db.close() - -@app.on_event("startup") -def startup_event(): +# Lifespan context manager for startup/shutdown +@asynccontextmanager +async def lifespan(app: FastAPI): """Initialize database tables and PGMQ queue on startup.""" - # Create tables if they don't exist + # Startup Base.metadata.create_all(bind=engine) # Initialize PGMQ queue with SessionLocal() as session: op.check_pgmq_ext(session=session, commit=True) - # Create queue if it doesn't exist (will not fail if exists) + # Create queue if it doesn't exist try: op.create_queue(QUEUE_NAME, session=session, commit=True) except Exception: # Queue might already exist, which is fine pass + + yield + + # Shutdown (if needed) + + +# FastAPI app with lifespan +app = FastAPI(title="Order Management with PGMQ", lifespan=lifespan) + +# Database dependency +def get_db() -> Generator[Session, None, None]: + """Database session dependency.""" + db = SessionLocal() + try: + yield db + finally: + db.close() @app.post("/orders", response_model=OrderResponse, status_code=201) diff --git a/examples/fastapi_pub_sub/consumer.py b/examples/fastapi_pub_sub/consumer.py index d66ef7a..5d9b97c 100644 --- a/examples/fastapi_pub_sub/consumer.py +++ b/examples/fastapi_pub_sub/consumer.py @@ -115,7 +115,11 @@ async def main(): async_engine = create_async_engine(DATABASE_URL) async_session_maker = sessionmaker(bind=async_engine, class_=AsyncSession) - # Create PGMQueue instance manually to avoid event loop issues + # Note: Manual PGMQueue setup to avoid event loop conflicts + # PGMQueue.__init__ tries to run a nested event loop which conflicts + # with asyncio.run(). This is a known limitation when using PGMQueue + # in an async context manager like asyncio.run(). + # For proper usage, consider using PGMQOperation methods directly with sessions. pgmq = PGMQueue.__new__(PGMQueue) pgmq.engine = async_engine pgmq.session_maker = async_session_maker diff --git a/examples_tests/test_fastapi_pub_sub.py b/examples_tests/test_fastapi_pub_sub.py index ccd5683..4828c31 100644 --- a/examples_tests/test_fastapi_pub_sub.py +++ b/examples_tests/test_fastapi_pub_sub.py @@ -48,8 +48,10 @@ def setup_api_app(sync_database_url, test_queue_name): api.op.check_pgmq_ext(session=session, commit=True) try: api.op.create_queue(test_queue_name, session=session, commit=True) - except Exception: - pass + except Exception as e: + # Queue already exists from a previous test run + import logging + logging.warning(f"Could not create queue (may already exist): {e}") yield api @@ -58,8 +60,9 @@ def setup_api_app(sync_database_url, test_queue_name): # Drop the test queue try: api.op.drop_queue(test_queue_name, session=session, commit=True) - except Exception: - pass + except Exception as e: + import logging + logging.warning(f"Could not drop queue: {e}") # Drop tables session.execute(text("DROP TABLE IF EXISTS orders CASCADE")) @@ -169,8 +172,10 @@ async def test_consumer_processing(async_database_url, sync_database_url, test_q op.check_pgmq_ext(session=session, commit=True) try: op.create_queue(test_queue_name, session=session, commit=True) - except Exception: - pass # Queue might already exist + except Exception as e: + # Queue already exists from a previous test run + import logging + logging.warning(f"Could not create queue (may already exist): {e}") test_message = { "order_id": 12345, From 0a6d22e222032ce512c01fc697ad775154c4fbb5 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sat, 3 Jan 2026 16:40:59 +0000 Subject: [PATCH 06/17] Add workflow permissions to examples GitHub Action Co-authored-by: jason810496 <68415893+jason810496@users.noreply.github.com> --- .github/workflows/examples.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/examples.yml b/.github/workflows/examples.yml index 12c10ff..500ff30 100644 --- a/.github/workflows/examples.yml +++ b/.github/workflows/examples.yml @@ -19,6 +19,10 @@ on: jobs: test-examples: runs-on: ubuntu-latest + + permissions: + contents: read + strategy: matrix: python-version: ["3.9", "3.10", "3.11", "3.12"] From 0d2e4bf3b1987cb84c6557a30816f6d2d92fc295 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sun, 4 Jan 2026 04:23:56 +0000 Subject: [PATCH 07/17] Address review comments: fix transaction, use pgmq methods, add messages endpoint, reorganize tests Co-authored-by: jason810496 <68415893+jason810496@users.noreply.github.com> --- examples/fastapi_pub_sub/README.md | 11 +- examples/fastapi_pub_sub/api.py | 51 ++++- examples/fastapi_pub_sub/consumer.py | 43 ++-- examples_tests/integration/__init__.py | 1 + .../integration/test_fastapi_integration.py | 186 ++++++++++++++++++ examples_tests/unit/__init__.py | 1 + .../{ => unit}/test_fastapi_pub_sub.py | 2 +- 7 files changed, 257 insertions(+), 38 deletions(-) create mode 100644 examples_tests/integration/__init__.py create mode 100644 examples_tests/integration/test_fastapi_integration.py create mode 100644 examples_tests/unit/__init__.py rename examples_tests/{ => unit}/test_fastapi_pub_sub.py (99%) diff --git a/examples/fastapi_pub_sub/README.md b/examples/fastapi_pub_sub/README.md index e28d7ab..51ae5e8 100644 --- a/examples/fastapi_pub_sub/README.md +++ b/examples/fastapi_pub_sub/README.md @@ -30,10 +30,17 @@ docker run -d --name postgres -e POSTGRES_PASSWORD=postgres -p 5432:5432 quay.io ## Installation -Install required dependencies: +Install required dependencies using uv: ```bash -pip install -r requirements.txt +uv pip install fastapi uvicorn psycopg2-binary asyncpg pgmq-sqlalchemy +``` + +Or install from the project root with uv: + +```bash +cd /path/to/pgmq-sqlalchemy +uv pip install -e ".[psycopg2-binary,asyncpg]" ``` ## Running the Example diff --git a/examples/fastapi_pub_sub/api.py b/examples/fastapi_pub_sub/api.py index 0c11fa4..9383ff7 100644 --- a/examples/fastapi_pub_sub/api.py +++ b/examples/fastapi_pub_sub/api.py @@ -5,6 +5,7 @@ - Publishing messages to PGMQ using PGMQOperation (op) - Creating orders and sending them to a message queue """ +import os from typing import Generator from contextlib import contextmanager, asynccontextmanager @@ -16,9 +17,9 @@ from pgmq_sqlalchemy import op -# Database configuration -DATABASE_URL = "postgresql+psycopg2://postgres:postgres@localhost:5432/postgres" -QUEUE_NAME = "order_queue" +# Database configuration - can be overridden by environment variables +DATABASE_URL = os.getenv("DATABASE_URL", "postgresql+psycopg2://postgres:postgres@localhost:5432/postgres") +QUEUE_NAME = os.getenv("QUEUE_NAME", "order_queue") # SQLAlchemy setup engine = create_engine(DATABASE_URL) @@ -113,10 +114,9 @@ def create_order(order_data: OrderCreate, db: Session = Depends(get_db)): price=order_data.price, ) db.add(db_order) - db.commit() - db.refresh(db_order) + db.flush() # Flush to get the ID without committing - # Publish message to PGMQ using op + # Publish message to PGMQ using op in the same transaction message_data = { "order_id": db_order.id, "customer_name": db_order.customer_name, @@ -126,7 +126,11 @@ def create_order(order_data: OrderCreate, db: Session = Depends(get_db)): "created_at": db_order.created_at.isoformat(), } - msg_id = op.send(QUEUE_NAME, message_data, session=db, commit=True) + msg_id = op.send(QUEUE_NAME, message_data, session=db, commit=False) + + # Commit both order and message in the same transaction + db.commit() + db.refresh(db_order) # Return order with message ID return OrderResponse( @@ -157,6 +161,39 @@ def get_order(order_id: int, db: Session = Depends(get_db)): return order +@app.get("/messages") +def get_messages(limit: int = 10, db: Session = Depends(get_db)): + """Read messages from the PGMQ queue. + + Args: + limit: Number of messages to read (default: 10) + db: Database session + + Returns: + List of messages from the queue + """ + from pgmq_sqlalchemy.schema import Message + from typing import List + + messages = op.read_batch(QUEUE_NAME, vt=30, batch_size=limit, session=db, commit=True) + + if not messages: + return {"messages": []} + + return { + "messages": [ + { + "msg_id": msg.msg_id, + "read_ct": msg.read_ct, + "enqueued_at": msg.enqueued_at.isoformat(), + "vt": msg.vt.isoformat(), + "message": msg.message, + } + for msg in messages + ] + } + + @app.get("/health") def health_check(): """Health check endpoint.""" diff --git a/examples/fastapi_pub_sub/consumer.py b/examples/fastapi_pub_sub/consumer.py index 5d9b97c..8889606 100644 --- a/examples/fastapi_pub_sub/consumer.py +++ b/examples/fastapi_pub_sub/consumer.py @@ -8,6 +8,7 @@ """ import asyncio import logging +import os from typing import Optional from pgmq_sqlalchemy import PGMQueue @@ -20,9 +21,9 @@ ) logger = logging.getLogger(__name__) -# Database configuration -DATABASE_URL = "postgresql+asyncpg://postgres:postgres@localhost:5432/postgres" -QUEUE_NAME = "order_queue" +# Database configuration - can be overridden by environment variables +DATABASE_URL = os.getenv("DATABASE_URL", "postgresql+asyncpg://postgres:postgres@localhost:5432/postgres") +QUEUE_NAME = os.getenv("QUEUE_NAME", "order_queue") async def process_order(message: Message) -> bool: @@ -60,16 +61,14 @@ async def consume_messages(pgmq: PGMQueue, batch_size: int = 10, vt: int = 30): batch_size: Number of messages to read in each batch vt: Visibility timeout in seconds """ - from pgmq_sqlalchemy import op - logger.info(f"Starting consumer for queue: {QUEUE_NAME}") logger.info(f"Batch size: {batch_size}, Visibility timeout: {vt}s") while True: try: - # Read a batch of messages + # Read a batch of messages using pgmq instance method async with pgmq.session_maker() as session: - messages = await op.read_batch_async(QUEUE_NAME, vt=vt, batch_size=batch_size, session=session, commit=True) + messages = await pgmq.read_batch_async(QUEUE_NAME, vt=vt, batch_size=batch_size, session=session, commit=True) if not messages: logger.debug("No messages available, waiting...") @@ -87,12 +86,13 @@ async def consume_messages(pgmq: PGMQueue, batch_size: int = 10, vt: int = 30): # Wait for all processing to complete results = await asyncio.gather(*[t[1] for t in tasks], return_exceptions=True) - # Delete successfully processed messages + # Delete successfully processed messages using pgmq instance method for (msg_id, _), result in zip(tasks, results): if isinstance(result, bool) and result: async with pgmq.session_maker() as session: - await op.delete_async(QUEUE_NAME, msg_id, session=session, commit=True) - logger.info(f"Deleted message {msg_id}") + deleted = await pgmq.delete_async(QUEUE_NAME, msg_id, session=session, commit=True) + if deleted: + logger.info(f"Deleted message {msg_id}") elif isinstance(result, Exception): logger.error(f"Exception processing message {msg_id}: {result}") else: @@ -108,31 +108,18 @@ async def consume_messages(pgmq: PGMQueue, batch_size: int = 10, vt: int = 30): async def main(): """Main entry point for the consumer.""" - # Initialize PGMQueue with async session maker + # Initialize PGMQueue with async session maker and event loop from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession from sqlalchemy.orm import sessionmaker async_engine = create_async_engine(DATABASE_URL) async_session_maker = sessionmaker(bind=async_engine, class_=AsyncSession) - # Note: Manual PGMQueue setup to avoid event loop conflicts - # PGMQueue.__init__ tries to run a nested event loop which conflicts - # with asyncio.run(). This is a known limitation when using PGMQueue - # in an async context manager like asyncio.run(). - # For proper usage, consider using PGMQOperation methods directly with sessions. - pgmq = PGMQueue.__new__(PGMQueue) - pgmq.engine = async_engine - pgmq.session_maker = async_session_maker - pgmq.is_async = True - pgmq.delay = 0 - pgmq.vt = 30 - pgmq.loop = None - pgmq.is_pg_partman_ext_checked = True + # Get the current event loop to pass to PGMQueue + loop = asyncio.get_event_loop() - # Check PGMQ extension manually - async with async_session_maker() as session: - from pgmq_sqlalchemy import op - await op.check_pgmq_ext_async(session=session, commit=True) + # Initialize PGMQueue with the event loop to avoid conflicts + pgmq = PGMQueue(session_maker=async_session_maker, loop=loop) try: # Start consuming messages diff --git a/examples_tests/integration/__init__.py b/examples_tests/integration/__init__.py new file mode 100644 index 0000000..999fec0 --- /dev/null +++ b/examples_tests/integration/__init__.py @@ -0,0 +1 @@ +"""Integration tests for examples.""" diff --git a/examples_tests/integration/test_fastapi_integration.py b/examples_tests/integration/test_fastapi_integration.py new file mode 100644 index 0000000..212add8 --- /dev/null +++ b/examples_tests/integration/test_fastapi_integration.py @@ -0,0 +1,186 @@ +"""Integration tests for FastAPI pub/sub example with subprocess.""" +import asyncio +import os +import subprocess +import sys +import time +import signal +import pytest +import requests +from sqlalchemy import create_engine, text +from sqlalchemy.orm import sessionmaker + + +@pytest.fixture(scope="module") +def examples_dir(): + """Return the path to the examples directory.""" + return os.path.join( + os.path.dirname(os.path.dirname(os.path.dirname(__file__))), + "examples", + "fastapi_pub_sub" + ) + + +@pytest.fixture(scope="module") +def test_queue_name(): + """Return a unique queue name for testing.""" + return "test_integration_order_queue" + + +@pytest.fixture(scope="module") +def database_url(request): + """Get database URL from environment or CLI.""" + db_name = request.config.getoption("--db-name") + if not db_name: + db_name = os.getenv("SQLALCHEMY_DB", "postgres") + + host = os.getenv("SQLALCHEMY_HOST", "localhost") + port = os.getenv("SQLALCHEMY_PORT", "5432") + user = os.getenv("SQLALCHEMY_USER", "postgres") + password = os.getenv("SQLALCHEMY_PASSWORD", "postgres") + + return f"postgresql+psycopg2://{user}:{password}@{host}:{port}/{db_name}" + + +@pytest.fixture(scope="module", autouse=True) +def api_instance(examples_dir, database_url, test_queue_name): + """Fixture to spin up the API server as a subprocess.""" + # Update the API to use test queue + api_py = os.path.join(examples_dir, "api.py") + + # Set environment variables for the subprocess + env = os.environ.copy() + env["DATABASE_URL"] = database_url + env["QUEUE_NAME"] = test_queue_name + + # Start the API server + process = subprocess.Popen( + [sys.executable, api_py], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + env=env, + preexec_fn=os.setsid if hasattr(os, 'setsid') else None + ) + + # Wait for the server to start + max_attempts = 30 + for i in range(max_attempts): + try: + response = requests.get("http://localhost:8000/health", timeout=1) + if response.status_code == 200: + break + except requests.exceptions.RequestException: + time.sleep(1) + else: + # Kill the process if it didn't start + if hasattr(os, 'killpg'): + os.killpg(os.getpgid(process.pid), signal.SIGTERM) + else: + process.terminate() + pytest.fail("API server failed to start") + + yield process + + # Teardown: kill the API server + if hasattr(os, 'killpg'): + os.killpg(os.getpgid(process.pid), signal.SIGTERM) + else: + process.terminate() + process.wait(timeout=10) + + +@pytest.fixture(scope="module", autouse=True) +def consumer_instance(examples_dir, database_url, test_queue_name, api_instance): + """Fixture to spin up the consumer as a subprocess.""" + # Update the consumer to use test queue + consumer_py = os.path.join(examples_dir, "consumer.py") + + # Set environment variables for the subprocess + env = os.environ.copy() + env["DATABASE_URL"] = database_url + env["QUEUE_NAME"] = test_queue_name + + # Start the consumer + process = subprocess.Popen( + [sys.executable, consumer_py], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + env=env, + preexec_fn=os.setsid if hasattr(os, 'setsid') else None + ) + + # Give the consumer some time to start + time.sleep(3) + + yield process + + # Teardown: kill the consumer + if hasattr(os, 'killpg'): + os.killpg(os.getpgid(process.pid), signal.SIGTERM) + else: + process.terminate() + process.wait(timeout=10) + + +def test_api_consumer_integration(api_instance, consumer_instance, database_url): + """Test creating 100 orders parallelly and waiting for consumer to process them all.""" + import concurrent.futures + + # Create 100 orders in parallel + num_orders = 100 + + def create_order(order_num): + """Helper function to create a single order.""" + order_data = { + "customer_name": f"Customer {order_num}", + "product_name": f"Product {order_num}", + "quantity": order_num % 10 + 1, + "price": 10.0 + (order_num % 50) + } + response = requests.post("http://localhost:8000/orders", json=order_data, timeout=5) + return response.status_code == 201, response.json() if response.status_code == 201 else None + + # Create orders in parallel + with concurrent.futures.ThreadPoolExecutor(max_workers=20) as executor: + futures = [executor.submit(create_order, i) for i in range(num_orders)] + results = [future.result() for future in concurrent.futures.as_completed(futures)] + + # Check that all orders were created successfully + successful_orders = sum(1 for success, _ in results if success) + assert successful_orders == num_orders, f"Only {successful_orders}/{num_orders} orders were created" + + # Wait for the consumer to process all messages + # Check the queue periodically until it's empty + engine = create_engine(database_url) + SessionLocal = sessionmaker(bind=engine) + + max_wait = 120 # Wait up to 2 minutes + start_time = time.time() + + while time.time() - start_time < max_wait: + # Check queue metrics to see if there are any messages left + with SessionLocal() as session: + from pgmq_sqlalchemy import op + # Get the test queue name from environment or use default + test_queue = os.getenv("QUEUE_NAME", "test_integration_order_queue") + + try: + metrics = op.metrics(test_queue, session=session, commit=True) + if metrics.queue_length == 0: + # All messages have been processed + break + except Exception as e: + # Queue might not exist yet or other error + print(f"Error checking metrics: {e}") + + time.sleep(2) + else: + pytest.fail(f"Consumer did not process all messages within {max_wait} seconds") + + # Verify that all messages were processed + with SessionLocal() as session: + test_queue = os.getenv("QUEUE_NAME", "test_integration_order_queue") + metrics = op.metrics(test_queue, session=session, commit=True) + assert metrics.queue_length == 0, f"Queue still has {metrics.queue_length} messages" + # The total_messages should be at least num_orders (could be more if retries happened) + assert metrics.total_messages >= num_orders, f"Expected at least {num_orders} total messages, got {metrics.total_messages}" diff --git a/examples_tests/unit/__init__.py b/examples_tests/unit/__init__.py new file mode 100644 index 0000000..08dbf30 --- /dev/null +++ b/examples_tests/unit/__init__.py @@ -0,0 +1 @@ +"""Unit tests for examples.""" diff --git a/examples_tests/test_fastapi_pub_sub.py b/examples_tests/unit/test_fastapi_pub_sub.py similarity index 99% rename from examples_tests/test_fastapi_pub_sub.py rename to examples_tests/unit/test_fastapi_pub_sub.py index 4828c31..3831119 100644 --- a/examples_tests/test_fastapi_pub_sub.py +++ b/examples_tests/unit/test_fastapi_pub_sub.py @@ -24,7 +24,7 @@ def setup_api_app(sync_database_url, test_queue_name): # Add examples directory to path examples_dir = os.path.join( - os.path.dirname(os.path.dirname(__file__)), + os.path.dirname(os.path.dirname(os.path.dirname(__file__))), "examples", "fastapi_pub_sub" ) From 8c40f420f7026520d75afbda9635ae87a1269c89 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sun, 4 Jan 2026 04:26:34 +0000 Subject: [PATCH 08/17] Fix code review issues: move imports to top of file and remove unused imports Co-authored-by: jason810496 <68415893+jason810496@users.noreply.github.com> --- examples/fastapi_pub_sub/api.py | 3 --- examples_tests/integration/test_fastapi_integration.py | 3 ++- examples_tests/unit/test_fastapi_pub_sub.py | 3 +-- 3 files changed, 3 insertions(+), 6 deletions(-) diff --git a/examples/fastapi_pub_sub/api.py b/examples/fastapi_pub_sub/api.py index 9383ff7..75a1a48 100644 --- a/examples/fastapi_pub_sub/api.py +++ b/examples/fastapi_pub_sub/api.py @@ -172,9 +172,6 @@ def get_messages(limit: int = 10, db: Session = Depends(get_db)): Returns: List of messages from the queue """ - from pgmq_sqlalchemy.schema import Message - from typing import List - messages = op.read_batch(QUEUE_NAME, vt=30, batch_size=limit, session=db, commit=True) if not messages: diff --git a/examples_tests/integration/test_fastapi_integration.py b/examples_tests/integration/test_fastapi_integration.py index 212add8..c550713 100644 --- a/examples_tests/integration/test_fastapi_integration.py +++ b/examples_tests/integration/test_fastapi_integration.py @@ -10,6 +10,8 @@ from sqlalchemy import create_engine, text from sqlalchemy.orm import sessionmaker +from pgmq_sqlalchemy import op + @pytest.fixture(scope="module") def examples_dir(): @@ -160,7 +162,6 @@ def create_order(order_num): while time.time() - start_time < max_wait: # Check queue metrics to see if there are any messages left with SessionLocal() as session: - from pgmq_sqlalchemy import op # Get the test queue name from environment or use default test_queue = os.getenv("QUEUE_NAME", "test_integration_order_queue") diff --git a/examples_tests/unit/test_fastapi_pub_sub.py b/examples_tests/unit/test_fastapi_pub_sub.py index 3831119..1243f7f 100644 --- a/examples_tests/unit/test_fastapi_pub_sub.py +++ b/examples_tests/unit/test_fastapi_pub_sub.py @@ -6,7 +6,7 @@ from sqlalchemy import create_engine, text from sqlalchemy.orm import sessionmaker -from pgmq_sqlalchemy import PGMQueue +from pgmq_sqlalchemy import PGMQueue, op @pytest.fixture(scope="module") @@ -111,7 +111,6 @@ def test_create_order(client, sync_database_url, test_queue_name): SessionLocal = sessionmaker(bind=engine) with SessionLocal() as session: - from pgmq_sqlalchemy import op msg = op.read(test_queue_name, vt=30, session=session, commit=True) assert msg is not None From 01992d89bec87c358d5f8f02c7816886815c303b Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sun, 4 Jan 2026 04:41:18 +0000 Subject: [PATCH 09/17] Simplify consumer code and add pyproject.toml for example Co-authored-by: jason810496 <68415893+jason810496@users.noreply.github.com> --- examples/fastapi_pub_sub/README.md | 9 ++++++++- examples/fastapi_pub_sub/consumer.py | 6 ++---- examples/fastapi_pub_sub/pyproject.toml | 17 +++++++++++++++++ 3 files changed, 27 insertions(+), 5 deletions(-) create mode 100644 examples/fastapi_pub_sub/pyproject.toml diff --git a/examples/fastapi_pub_sub/README.md b/examples/fastapi_pub_sub/README.md index 51ae5e8..3a54757 100644 --- a/examples/fastapi_pub_sub/README.md +++ b/examples/fastapi_pub_sub/README.md @@ -30,7 +30,14 @@ docker run -d --name postgres -e POSTGRES_PASSWORD=postgres -p 5432:5432 quay.io ## Installation -Install required dependencies using uv: +Install required dependencies using uv with the example's pyproject.toml: + +```bash +cd examples/fastapi_pub_sub +uv pip install -e . +``` + +Or install dependencies directly: ```bash uv pip install fastapi uvicorn psycopg2-binary asyncpg pgmq-sqlalchemy diff --git a/examples/fastapi_pub_sub/consumer.py b/examples/fastapi_pub_sub/consumer.py index 8889606..03efbb9 100644 --- a/examples/fastapi_pub_sub/consumer.py +++ b/examples/fastapi_pub_sub/consumer.py @@ -67,8 +67,7 @@ async def consume_messages(pgmq: PGMQueue, batch_size: int = 10, vt: int = 30): while True: try: # Read a batch of messages using pgmq instance method - async with pgmq.session_maker() as session: - messages = await pgmq.read_batch_async(QUEUE_NAME, vt=vt, batch_size=batch_size, session=session, commit=True) + messages = await pgmq.read_batch(QUEUE_NAME, vt=vt, batch_size=batch_size) if not messages: logger.debug("No messages available, waiting...") @@ -89,8 +88,7 @@ async def consume_messages(pgmq: PGMQueue, batch_size: int = 10, vt: int = 30): # Delete successfully processed messages using pgmq instance method for (msg_id, _), result in zip(tasks, results): if isinstance(result, bool) and result: - async with pgmq.session_maker() as session: - deleted = await pgmq.delete_async(QUEUE_NAME, msg_id, session=session, commit=True) + deleted = await pgmq.delete(QUEUE_NAME, msg_id) if deleted: logger.info(f"Deleted message {msg_id}") elif isinstance(result, Exception): diff --git a/examples/fastapi_pub_sub/pyproject.toml b/examples/fastapi_pub_sub/pyproject.toml new file mode 100644 index 0000000..7865352 --- /dev/null +++ b/examples/fastapi_pub_sub/pyproject.toml @@ -0,0 +1,17 @@ +[project] +name = "fastapi-pub-sub-example" +version = "0.1.0" +description = "FastAPI pub/sub example using pgmq-sqlalchemy" +readme = "README.md" +requires-python = ">=3.9" +dependencies = [ + "fastapi>=0.104.0", + "uvicorn>=0.24.0", + "psycopg2-binary>=2.9.9", + "asyncpg>=0.29.0", + "pgmq-sqlalchemy>=0.1.2", +] + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" From 71a58686f546480f5ccb1751e49a9f1e31304db2 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sun, 4 Jan 2026 04:44:47 +0000 Subject: [PATCH 10/17] Update pytest flags to -vss for better test output visibility Co-authored-by: jason810496 <68415893+jason810496@users.noreply.github.com> --- .github/workflows/examples.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/examples.yml b/.github/workflows/examples.yml index 500ff30..f4fecaf 100644 --- a/.github/workflows/examples.yml +++ b/.github/workflows/examples.yml @@ -60,7 +60,7 @@ jobs: - name: Run examples tests run: | - uv run pytest examples_tests --cov=examples --cov-report=xml:coverage-examples-py${{ matrix.python-version }}.xml -v + uv run pytest examples_tests --cov=examples --cov-report=xml:coverage-examples-py${{ matrix.python-version }}.xml -vss - name: Upload coverage artifact if: always() From d351aeffd2fecf47541fd6b6c4b3aa90a81d00eb Mon Sep 17 00:00:00 2001 From: LIU ZHE YOU Date: Mon, 5 Jan 2026 13:48:46 +0800 Subject: [PATCH 11/17] Add pre-commit hooks and scripts for async method checks in PGMQueue Distinguish sync and async operations in PGMQueue - Introduced a pre-commit hook to check for missing async methods in PGMQueue. - Added scripts to identify and generate missing async methods. - Created utility functions for AST manipulation and method transformation. - Established configuration for project paths and console output. --- .pre-commit-config.yaml | 8 +- pgmq_sqlalchemy/queue.py | 102 ++++++-------- .../check_sync_async_method_for_queue.py | 55 ++++++++ scripts/compelete_missing_async_methods.py | 96 ++++++++++++++ scripts/scripts_utils/__init__.py | 0 scripts/scripts_utils/common_ast.py | 124 ++++++++++++++++++ scripts/scripts_utils/config.py | 5 + scripts/scripts_utils/console.py | 18 +++ scripts/scripts_utils/formatting.py | 24 ++++ scripts/scripts_utils/queue_ast.py | 98 ++++++++++++++ 10 files changed, 464 insertions(+), 66 deletions(-) create mode 100644 scripts/ci/pre_commit/check_sync_async_method_for_queue.py create mode 100644 scripts/compelete_missing_async_methods.py create mode 100644 scripts/scripts_utils/__init__.py create mode 100644 scripts/scripts_utils/common_ast.py create mode 100644 scripts/scripts_utils/config.py create mode 100644 scripts/scripts_utils/console.py create mode 100644 scripts/scripts_utils/formatting.py create mode 100644 scripts/scripts_utils/queue_ast.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 89809dd..005a812 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -7,4 +7,10 @@ repos: - id: ruff args: [ --fix ] # Run the formatter. - - id: ruff-format \ No newline at end of file + - id: ruff-format + - repo: local + hooks: + - id: check-sync-async-method-for-queue + name: Check sync/async method for queue + entry: ./scripts/ci/pre_commit/check_sync_async_method_for_queue.py + language: python \ No newline at end of file diff --git a/pgmq_sqlalchemy/queue.py b/pgmq_sqlalchemy/queue.py index 7be8fc1..2580dc4 100644 --- a/pgmq_sqlalchemy/queue.py +++ b/pgmq_sqlalchemy/queue.py @@ -1,9 +1,11 @@ import asyncio -from typing import List, Optional +from typing import List, Optional, TYPE_CHECKING from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker from sqlalchemy.ext.asyncio import create_async_engine +from asgiref.sync import async_to_sync + from .schema import Message, QueueMetrics from ._types import ENGINE_TYPE, SESSION_TYPE @@ -14,6 +16,10 @@ ) from .operation import PGMQOperation +if TYPE_CHECKING: + from sqlalchemy.orm import Session + from sqlalchemy.ext.asyncio import AsyncSession + class PGMQueue: engine: ENGINE_TYPE = None @@ -118,47 +124,20 @@ def __init__( # create pgmq extension if not exists self._check_pgmq_ext() - async def _check_pgmq_ext_async(self) -> None: - """Check if the pgmq extension exists.""" - async with self.session_maker() as session: - await PGMQOperation.check_pgmq_ext_async(session=session, commit=True) - - def _check_pgmq_ext_sync(self) -> None: - """Check if the pgmq extension exists.""" - with self.session_maker() as session: - PGMQOperation.check_pgmq_ext(session=session, commit=True) - def _check_pgmq_ext(self) -> None: """Check if the pgmq extension exists.""" - if self.is_async: - return self.loop.run_until_complete(self._check_pgmq_ext_async()) - return self._check_pgmq_ext_sync() - - async def _check_pg_partman_ext_async(self) -> None: - """Check if the pg_partman extension exists.""" - async with self.session_maker() as session: - await PGMQOperation.check_pg_partman_ext_async(session=session, commit=True) - - def _check_pg_partman_ext_sync(self) -> None: - """Check if the pg_partman extension exists.""" - with self.session_maker() as session: - PGMQOperation.check_pg_partman_ext(session=session, commit=True) + self._execute_operation(PGMQOperation.check_pgmq_ext, session=None, commit=True) def _check_pg_partman_ext(self) -> None: """Check if the pg_partman extension exists.""" - if self.is_pg_partman_ext_checked: - return - self.is_pg_partman_ext_checked - - if self.is_async: - return self.loop.run_until_complete(self._check_pg_partman_ext_async()) - return self._check_pg_partman_ext_sync() + self._execute_operation( + PGMQOperation.check_pg_partman_ext, session=None, commit=True + ) def _execute_operation( self, op_sync, - op_async, - session: Optional[SESSION_TYPE], + session: Optional["Session"], commit: bool, *args, **kwargs, @@ -167,7 +146,6 @@ def _execute_operation( Args: op_sync: The synchronous operation function from PGMQOperation - op_async: The asynchronous operation function from PGMQOperation session: Optional session to use (if None, creates a new one) commit: Whether to commit the transaction *args: Positional arguments to pass to the operation @@ -176,23 +154,36 @@ def _execute_operation( Returns: The result from the operation """ - if self.is_async: - if session is None: - - async def _run(): - async with self.session_maker() as s: - return await op_async(*args, session=s, commit=commit, **kwargs) - - return self.loop.run_until_complete(_run()) - return self.loop.run_until_complete( - op_async(*args, session=session, commit=commit, **kwargs) - ) - if session is None: with self.session_maker() as s: return op_sync(*args, session=s, commit=commit, **kwargs) return op_sync(*args, session=session, commit=commit, **kwargs) + async def _execute_async_operation( + self, + op_async, + session: Optional["AsyncSession"], + commit: bool, + *args, + **kwargs, + ): + """Helper method to execute sync or async operations with session management. + + Args: + op_async: The asynchronous operation function from PGMQOperation + session: Optional session to use (if None, creates a new one) + commit: Whether to commit the transaction + *args: Positional arguments to pass to the operation + **kwargs: Keyword arguments to pass to the operation + + Returns: + The result from the operation + """ + if session is None: + async with self.session_maker() as s: + return await op_async(*args, session=s, commit=commit, **kwargs) + return await op_async(*args, session=session, commit=commit, **kwargs) + def create_queue( self, queue_name: str, @@ -219,7 +210,6 @@ def create_queue( """ return self._execute_operation( PGMQOperation.create_queue, - PGMQOperation.create_queue_async, session, commit, queue_name, @@ -272,7 +262,6 @@ def create_partitioned_queue( return self._execute_operation( PGMQOperation.create_partitioned_queue, - PGMQOperation.create_partitioned_queue_async, session, commit, queue_name, @@ -292,7 +281,6 @@ def validate_queue_name( """ return self._execute_operation( PGMQOperation.validate_queue_name, - PGMQOperation.validate_queue_name_async, session, commit, queue_name, @@ -329,7 +317,6 @@ def drop_queue( return self._execute_operation( PGMQOperation.drop_queue, - PGMQOperation.drop_queue_async, session, commit, queue, @@ -351,7 +338,6 @@ def list_queues( """ return self._execute_operation( PGMQOperation.list_queues, - PGMQOperation.list_queues_async, session, commit, ) @@ -385,7 +371,6 @@ def send( """ return self._execute_operation( PGMQOperation.send, - PGMQOperation.send_async, session, commit, queue_name, @@ -416,7 +401,6 @@ def send_batch( """ return self._execute_operation( PGMQOperation.send_batch, - PGMQOperation.send_batch_async, session, commit, queue_name, @@ -496,7 +480,6 @@ def read( return self._execute_operation( PGMQOperation.read, - PGMQOperation.read_async, session, commit, queue_name, @@ -533,7 +516,6 @@ def read_batch( return self._execute_operation( PGMQOperation.read_batch, - PGMQOperation.read_batch_async, session, commit, queue_name, @@ -605,7 +587,6 @@ def read_with_poll( return self._execute_operation( PGMQOperation.read_with_poll, - PGMQOperation.read_with_poll_async, session, commit, queue_name, @@ -683,7 +664,6 @@ def consumer_with_backoff_retry(pgmq_client: PGMQueue, queue_name: str): return self._execute_operation( PGMQOperation.set_vt, - PGMQOperation.set_vt_async, session, commit, queue_name, @@ -710,7 +690,6 @@ def pop( """ return self._execute_operation( PGMQOperation.pop, - PGMQOperation.pop_async, session, commit, queue_name, @@ -743,7 +722,6 @@ def delete( """ return self._execute_operation( PGMQOperation.delete, - PGMQOperation.delete_async, session, commit, queue_name, @@ -776,7 +754,6 @@ def delete_batch( """ return self._execute_operation( PGMQOperation.delete_batch, - PGMQOperation.delete_batch_async, session, commit, queue_name, @@ -813,7 +790,6 @@ def archive( """ return self._execute_operation( PGMQOperation.archive, - PGMQOperation.archive_async, session, commit, queue_name, @@ -843,7 +819,6 @@ def archive_batch( """ return self._execute_operation( PGMQOperation.archive_batch, - PGMQOperation.archive_batch_async, session, commit, queue_name, @@ -870,7 +845,6 @@ def purge( """ return self._execute_operation( PGMQOperation.purge, - PGMQOperation.purge_async, session, commit, queue_name, @@ -903,7 +877,6 @@ def metrics( """ return self._execute_operation( PGMQOperation.metrics, - PGMQOperation.metrics_async, session, commit, queue_name, @@ -950,7 +923,6 @@ def metrics_all( """ return self._execute_operation( PGMQOperation.metrics_all, - PGMQOperation.metrics_all_async, session, commit, ) diff --git a/scripts/ci/pre_commit/check_sync_async_method_for_queue.py b/scripts/ci/pre_commit/check_sync_async_method_for_queue.py new file mode 100644 index 0000000..48d2e79 --- /dev/null +++ b/scripts/ci/pre_commit/check_sync_async_method_for_queue.py @@ -0,0 +1,55 @@ +#!/usr/bin/env python +# /// script +# requires-python = ">=3.10,<3.11" +# dependencies = [ +# "rich>=13.6.0", +# ] +# /// +""" +Script to check for missing async methods in PGMQueue for per-commit. + +For each public sync method (not starting with _), checks if there's a corresponding +async method with the same name plus '_async' suffix. +""" + +import ast +import sys +from pathlib import Path + + +sys.path.insert(0, str(Path(__name__).parent.parent.joinpath("scripts").resolve())) + +from scripts_utils.config import QUEUE_FILE # noqa: E402 +from scripts_utils.console import console # noqa: E402 +from scripts_utils.common_ast import parse_methods_info_from_target_class # noqa: E402 + + +def main(): + """Main function.""" + + module_tree = ast.parse(source=QUEUE_FILE.read_text(), filename=QUEUE_FILE) + _, missing_async = parse_methods_info_from_target_class( + module_tree, target_class="PGMQueue" + ) + + if not missing_async: + console.print( + "[bold green]SUCCESS:[/bold green] All public methods have corresponding async versions!" + ) + sys.exit(0) + + # log all the missing async methods + console.print() + console.print( + f"[bold yellow]WARNING:[/bold yellow] Found {len(missing_async)} missing async methods:", + style="bold", + ) + for method in missing_async: + console.print(f" [yellow]-[/yellow] {method}_async") + console.print() + + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/scripts/compelete_missing_async_methods.py b/scripts/compelete_missing_async_methods.py new file mode 100644 index 0000000..bc919c2 --- /dev/null +++ b/scripts/compelete_missing_async_methods.py @@ -0,0 +1,96 @@ +#!/usr/bin/env python +# /// script +# requires-python = ">=3.10,<3.11" +# dependencies = [ +# "rich>=13.6.0", +# ] +# /// +""" +Script to check for missing async methods in PGMQueue class and generate them. + +For each public sync method (not starting with _), checks if there's a corresponding +async method with the same name plus '_async' suffix. If missing, generates it. +""" + +import ast +import sys +from pathlib import Path +import contextlib + +import tempfile + + +from scripts_utils.config import QUEUE_FILE +from scripts_utils.console import console +from scripts_utils.common_ast import ( + parse_methods_info_from_target_class, + fill_missing_methods_to_class, +) +from scripts_utils.formatting import format_file, compare_file +from scripts_utils.queue_ast import get_async_methods_to_add + + +def main(): + """Main function.""" + + module_tree = ast.parse(source=QUEUE_FILE.read_text(), filename=QUEUE_FILE) + sync_methods, missing_async = parse_methods_info_from_target_class( + module_tree, target_class="PGMQueue" + ) + + if not missing_async: + console.print( + "[bold green]SUCCESS:[/bold green] All public methods have corresponding async versions!" + ) + sys.exit(0) + + # log all the missing async methods + console.print() + console.print( + f"[bold yellow]WARNING:[/bold yellow] Found {len(missing_async)} missing async methods:", + style="bold", + ) + for method in missing_async: + console.print(f" [yellow]-[/yellow] {method}_async") + console.print() + + # create missing async method from + async_methods_to_add = get_async_methods_to_add(sync_methods, missing_async) + # insert back to class + fill_missing_methods_to_class(module_tree, "PGMQueue", async_methods_to_add) + module_tree = ast.fix_missing_locations(module_tree) + + # write back to tmp file for comparison + tmp_file = "" + with tempfile.NamedTemporaryFile(mode="w+t", delete=False, suffix=".py") as f: + f.write(ast.unparse(module_tree)) + f.flush() + tmp_file = f.name + console.log(f"Complete missing async methods at {tmp_file}") + + if tmp_file: + max_formatting = 3 + for _ in range(max_formatting): + if format_file(tmp_file): + break + + _, missing_async_for_tmp = parse_methods_info_from_target_class( + ast.parse(Path(tmp_file).read_text()), "PGMQueue" + ) + + if missing_async_for_tmp: + console.log( + f"[error]Still get async methods to add after generating missing async methods in {tmp_file}: {missing_async_for_tmp}[/]" + ) + else: + console.log("[success]All missing async methods are generated[/]") + + # compare existed queue.py and tmp.py + with contextlib.suppress(Exception): + compare_file(QUEUE_FILE, tmp_file) + + sys.exit(0) + + +if __name__ == "__main__": + main() diff --git a/scripts/scripts_utils/__init__.py b/scripts/scripts_utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/scripts/scripts_utils/common_ast.py b/scripts/scripts_utils/common_ast.py new file mode 100644 index 0000000..7cd625f --- /dev/null +++ b/scripts/scripts_utils/common_ast.py @@ -0,0 +1,124 @@ +from typing import List, Dict, Tuple, Literal + +import ast + + +class MethodInfo: + """Information about a method.""" + + def __init__(self, name: str, node: ast.FunctionDef): + self.name = name + self.node = node + self.is_target = not name.startswith( + "_" + ) # all the public method is our target method for further processing + self.is_async = name.endswith("_async") + self.base_name = name[:-6] if self.is_async else name + + +class ParseTargetClassFunctionsVisitor(ast.NodeVisitor): + """AST visitor to parse functions out of target class name for given module tree""" + + def __init__(self, class_name: str): + self.class_name = class_name + self.methods: List[MethodInfo] = [] + self.is_cur_node_in_target_class = False + + def visit_ClassDef(self, node: ast.ClassDef): + if node.name == self.class_name: + self.is_cur_node_in_target_class = True + self.generic_visit(node) + self.is_cur_node_in_target_class = False + else: + self.generic_visit(node) + + def visit_FunctionDef(self, node: ast.FunctionDef): + if self.is_cur_node_in_target_class: + # add all the method to the methods + self.methods.append(MethodInfo(node.name, node)) + self.generic_visit(node) + + def visit_AsyncFunctionDef(self, node): + if self.is_cur_node_in_target_class: + # add all the method to the methods + self.methods.append(MethodInfo(node.name, node)) + self.generic_visit(node) + + +class FillMissingMethodsToClass(ast.NodeTransformer): + """AST Transformer to fill missing async_methods back to target class""" + + def __init__(self, class_name: str, to_add_async_methods: Dict[str, MethodInfo]): + self.class_name = class_name + self.to_add_async_methods = to_add_async_methods + + def visit_ClassDef(self, node: ast.ClassDef): + if node.name == self.class_name: + for sync_func_name, async_func_node in self.to_add_async_methods.items(): + idx = next( + ( + i + for i, stmt in enumerate(node.body) + if isinstance(stmt, ast.FunctionDef) + and stmt.name == sync_func_name + ), + -1, + ) + + if idx != -1: + node.body.insert(idx + 1, async_func_node.node) + + return self.generic_visit(node) + + +def parse_methods_info_from_target_class( + module_tree: ast.Module, target_class: Literal["PGMQueue", "PGMQOperation"] +) -> Tuple[List[MethodInfo], set[str]]: + """ + Parse methods of target class from give module AST Tree + + Args: + module_tree: ast.Module + target_class: either "PGMQueue" or "PGMQOperation" str + + Returns: + Tuple of sync_methods, missing_async_set + """ + + analyzer = ParseTargetClassFunctionsVisitor(target_class) + analyzer.visit(module_tree) + + # Categorize methods + # We use sync methods as source of truth + async_methods_set = set() + missing_async_set = set() + + for method_info in analyzer.methods: + # skip non target methods + if not method_info.is_target: + continue + + if method_info.is_async: + async_methods_set.add(method_info.base_name) + + # Find missing async methods and generate class with interleaved methods + for method_info in analyzer.methods: + # skip non target methods + if not method_info.is_target: + continue + + if method_info.base_name not in async_methods_set: + missing_async_set.add(method_info.base_name) + + return analyzer.methods, missing_async_set + + +def fill_missing_methods_to_class( + module_tree: ast.Module, + target_class: Literal["PGMQueue", "PGMQOperation"], + to_add_async_methods: Dict[str, MethodInfo], +): + transformer = FillMissingMethodsToClass( + class_name=target_class, to_add_async_methods=to_add_async_methods + ) + transformer.visit(module_tree) diff --git a/scripts/scripts_utils/config.py b/scripts/scripts_utils/config.py new file mode 100644 index 0000000..f2af5bf --- /dev/null +++ b/scripts/scripts_utils/config.py @@ -0,0 +1,5 @@ +from pathlib import Path + +PROJECT_ROOT = Path(__file__).parent.parent.parent +SOURCE_PATH = PROJECT_ROOT / "pgmq_sqlalchemy" +QUEUE_FILE = SOURCE_PATH / "queue.py" diff --git a/scripts/scripts_utils/console.py b/scripts/scripts_utils/console.py new file mode 100644 index 0000000..ccbc02c --- /dev/null +++ b/scripts/scripts_utils/console.py @@ -0,0 +1,18 @@ +from rich.console import Console +from rich.theme import Theme + + +console = Console( + force_terminal=True, + color_system="standard", + theme=Theme( + { + "success": "green", + "info": "bright_blue", + "warning": "bright_yellow", + "error": "red", + "special": "magenta", + } + ), + width=202, +) diff --git a/scripts/scripts_utils/formatting.py b/scripts/scripts_utils/formatting.py new file mode 100644 index 0000000..a67950b --- /dev/null +++ b/scripts/scripts_utils/formatting.py @@ -0,0 +1,24 @@ +import subprocess +import sys +from pathlib import Path + + +sys.path.insert(0, str(Path(__name__).parent.parent.joinpath("scripts").resolve())) + + +def format_file(file_path: str) -> bool: + try: + ruff_stdout = subprocess.check_output(["ruff", "format", file_path]).decode() + except Exception as e: + raise e + + return "unchanged" in ruff_stdout + + +def compare_file(existed_file: str, new_file: str): + try: + subprocess.check_call( + ["git", "difftool", "--tool=vimdiff", "--no-index", existed_file, new_file] + ) + except Exception as e: + raise e diff --git a/scripts/scripts_utils/queue_ast.py b/scripts/scripts_utils/queue_ast.py new file mode 100644 index 0000000..a5c7eea --- /dev/null +++ b/scripts/scripts_utils/queue_ast.py @@ -0,0 +1,98 @@ +import ast +import re +import sys +from pathlib import Path +from typing import List, Set, Dict +import copy + + +sys.path.insert(0, str(Path(__name__).parent.parent.joinpath("scripts").resolve())) + +from scripts_utils.common_ast import MethodInfo # noqa: E402 + + +class AsyncFuncTransformer(ast.NodeTransformer): + to_replace_execute_func_attr: str = "_execute_operation" + target_execute_func_attr: str = "_execute_async_operation" + + def visit_Call(self, node): + if isinstance(node.func, ast.Attribute): + # Handle PGMQOperation.method calls + for arg in node.args: + if isinstance(arg, ast.Attribute) and ( + isinstance(arg.value, ast.Name) and arg.value.id == "PGMQOperation" + ): + # Add _async suffix to method name + arg.attr = f"{arg.attr}_async" + + # Replace `self._execute_operation` to `self._execute_async_operation` + if ( + isinstance(node.func.value, ast.Name) + and node.func.value.id == "self" + and node.func.attr == self.to_replace_execute_func_attr + ): + node.func.attr = self.target_execute_func_attr + + return self.generic_visit(node) + + def visit_FunctionDef(self, node): + # Transform function to async + new_node = ast.AsyncFunctionDef( + name=f"{node.name}_async", + args=node.args, + body=node.body, + decorator_list=node.decorator_list, + returns=node.returns, + lineno=node.lineno, + col_offset=node.col_offset, + ) + + # Transform docstring if exists + if orig_doc_string := ast.get_docstring(node): + transformed_docstring = self.transform_docstring(orig_doc_string) + + # Create proper AST node for docstring + docstring_node = ast.Expr(value=ast.Constant(value=transformed_docstring)) + new_node.body[0] = docstring_node + + # Transform return statements + for i, stmt in enumerate(new_node.body): + if isinstance(stmt, ast.Return) and stmt.value: + # Wrap return value in await + new_node.body[i] = ast.Return(value=ast.Await(value=stmt.value)) + + return self.generic_visit(new_node) + + def transform_docstring(self, docstring: str) -> str: + """Transform docstring for async version.""" + # replace ` = pgmq_client.(` with ` = await pgmq_client._async(` + # replace `time.sleep` with `await asyncio.sleep` + modified = re.sub(r"(pgmq_client\.)(\w+)", r"await \1\2_async", docstring) + modified = re.sub(r"time\.sleep\(", r"await asyncio.sleep(", docstring) + return modified + + +def transform_to_async( + transformer: AsyncFuncTransformer, method_info: MethodInfo +) -> MethodInfo: + orig_sync_func_node = method_info.node + async_node = copy.deepcopy(orig_sync_func_node) + + async_node = transformer.visit(async_node) + async_node = ast.fix_missing_locations(async_node) + + return MethodInfo(f"{method_info.base_name}_async", async_node) + + +def get_async_methods_to_add( + sync_methods: List[MethodInfo], missing_async: Set[str] +) -> Dict[str, MethodInfo]: + transformer = AsyncFuncTransformer() + async_methods: Dict[str, MethodInfo] = {} + for method_info in sync_methods: + if method_info.base_name in missing_async: + async_methods[method_info.base_name] = transform_to_async( + transformer, method_info + ) + + return async_methods From 20e2fba41dd08759fa241ee1962d9890b7f1b5f8 Mon Sep 17 00:00:00 2001 From: LIU ZHE YOU Date: Mon, 5 Jan 2026 15:46:16 +0800 Subject: [PATCH 12/17] WIP --- .github/workflows/codecov.yml | 2 +- .github/workflows/examples.yml | 4 +- examples/fastapi_pub_sub/pyproject.toml | 17 - examples/fastapi_pub_sub/requirements.txt | 5 - examples_tests/conftest.py | 19 + .../integration/test_fastapi_integration.py | 81 +-- pyproject.toml | 26 +- uv.lock | 652 +++++++++++++++++- 8 files changed, 695 insertions(+), 111 deletions(-) delete mode 100644 examples/fastapi_pub_sub/pyproject.toml delete mode 100644 examples/fastapi_pub_sub/requirements.txt diff --git a/.github/workflows/codecov.yml b/.github/workflows/codecov.yml index e37f013..deb7a5b 100644 --- a/.github/workflows/codecov.yml +++ b/.github/workflows/codecov.yml @@ -33,7 +33,7 @@ jobs: curl -LsSf https://astral.sh/uv/install.sh | sh echo "$HOME/.local/bin" >> $GITHUB_PATH - name: Install dependencies - run: uv sync --extra dev + run: uv sync --group postgresql-drivers --group test - name: Start PostgreSQL run: | cp pgmq_postgres.template.env pgmq_postgres.env diff --git a/.github/workflows/examples.yml b/.github/workflows/examples.yml index f4fecaf..4e6c834 100644 --- a/.github/workflows/examples.yml +++ b/.github/workflows/examples.yml @@ -44,9 +44,7 @@ jobs: - name: Install dependencies run: | - uv sync --extra dev - # Install additional dependencies for examples - uv pip install fastapi uvicorn httpx + uv sync --all-groups --no-group docs - name: Start PostgreSQL run: | diff --git a/examples/fastapi_pub_sub/pyproject.toml b/examples/fastapi_pub_sub/pyproject.toml deleted file mode 100644 index 7865352..0000000 --- a/examples/fastapi_pub_sub/pyproject.toml +++ /dev/null @@ -1,17 +0,0 @@ -[project] -name = "fastapi-pub-sub-example" -version = "0.1.0" -description = "FastAPI pub/sub example using pgmq-sqlalchemy" -readme = "README.md" -requires-python = ">=3.9" -dependencies = [ - "fastapi>=0.104.0", - "uvicorn>=0.24.0", - "psycopg2-binary>=2.9.9", - "asyncpg>=0.29.0", - "pgmq-sqlalchemy>=0.1.2", -] - -[build-system] -requires = ["hatchling"] -build-backend = "hatchling.build" diff --git a/examples/fastapi_pub_sub/requirements.txt b/examples/fastapi_pub_sub/requirements.txt deleted file mode 100644 index f185598..0000000 --- a/examples/fastapi_pub_sub/requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -fastapi>=0.104.0 -uvicorn>=0.24.0 -psycopg2-binary>=2.9.9 -asyncpg>=0.29.0 -pgmq-sqlalchemy>=0.1.2 diff --git a/examples_tests/conftest.py b/examples_tests/conftest.py index efe8a15..691b479 100644 --- a/examples_tests/conftest.py +++ b/examples_tests/conftest.py @@ -2,6 +2,8 @@ import os import pytest +import logging + def pytest_addoption(parser): """Add custom command-line options for pytest.""" @@ -12,6 +14,23 @@ def pytest_addoption(parser): help="Specify the database name to use for testing", ) +@pytest.fixture(scope="module") +def configure_logger(): + logging.basicConfig( + level=logging.INFO, + format="[%(levelname)s][%(asctime)s][%(name)s] %(message)s" + ) + + +@pytest.fixture(scope="module") +def examples_dir(): + """Return the path to the examples directory.""" + return os.path.join( + os.path.dirname(os.path.dirname(__file__)), + "examples", + "fastapi_pub_sub" + ) + @pytest.fixture(scope="module") def database_url(request): diff --git a/examples_tests/integration/test_fastapi_integration.py b/examples_tests/integration/test_fastapi_integration.py index c550713..004c53c 100644 --- a/examples_tests/integration/test_fastapi_integration.py +++ b/examples_tests/integration/test_fastapi_integration.py @@ -1,26 +1,20 @@ """Integration tests for FastAPI pub/sub example with subprocess.""" -import asyncio + import os import subprocess import sys import time import signal +import logging + import pytest -import requests -from sqlalchemy import create_engine, text +import httpx +from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker from pgmq_sqlalchemy import op - -@pytest.fixture(scope="module") -def examples_dir(): - """Return the path to the examples directory.""" - return os.path.join( - os.path.dirname(os.path.dirname(os.path.dirname(__file__))), - "examples", - "fastapi_pub_sub" - ) +logger = logging.getLogger(__name__) @pytest.fixture(scope="module") @@ -29,49 +23,35 @@ def test_queue_name(): return "test_integration_order_queue" -@pytest.fixture(scope="module") -def database_url(request): - """Get database URL from environment or CLI.""" - db_name = request.config.getoption("--db-name") - if not db_name: - db_name = os.getenv("SQLALCHEMY_DB", "postgres") - - host = os.getenv("SQLALCHEMY_HOST", "localhost") - port = os.getenv("SQLALCHEMY_PORT", "5432") - user = os.getenv("SQLALCHEMY_USER", "postgres") - password = os.getenv("SQLALCHEMY_PASSWORD", "postgres") - - return f"postgresql+psycopg2://{user}:{password}@{host}:{port}/{db_name}" - - @pytest.fixture(scope="module", autouse=True) -def api_instance(examples_dir, database_url, test_queue_name): +def api_instance(examples_dir, sync_database_url, test_queue_name): """Fixture to spin up the API server as a subprocess.""" # Update the API to use test queue api_py = os.path.join(examples_dir, "api.py") # Set environment variables for the subprocess env = os.environ.copy() - env["DATABASE_URL"] = database_url + env["DATABASE_URL"] = sync_database_url env["QUEUE_NAME"] = test_queue_name # Start the API server process = subprocess.Popen( [sys.executable, api_py], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, + stdout=sys.stdout, + stderr=sys.stderr, env=env, preexec_fn=os.setsid if hasattr(os, 'setsid') else None ) + logger.info("Create API Server Process") # Wait for the server to start max_attempts = 30 for i in range(max_attempts): try: - response = requests.get("http://localhost:8000/health", timeout=1) + response = httpx.get("http://localhost:8000/health", timeout=1) if response.status_code == 200: break - except requests.exceptions.RequestException: + except Exception: time.sleep(1) else: # Kill the process if it didn't start @@ -80,7 +60,8 @@ def api_instance(examples_dir, database_url, test_queue_name): else: process.terminate() pytest.fail("API server failed to start") - + + logger.info("API Server is healthy") yield process # Teardown: kill the API server @@ -89,30 +70,32 @@ def api_instance(examples_dir, database_url, test_queue_name): else: process.terminate() process.wait(timeout=10) + logger.info("Terminate API Server") @pytest.fixture(scope="module", autouse=True) -def consumer_instance(examples_dir, database_url, test_queue_name, api_instance): +def consumer_instance(examples_dir, async_database_url, test_queue_name): """Fixture to spin up the consumer as a subprocess.""" # Update the consumer to use test queue consumer_py = os.path.join(examples_dir, "consumer.py") # Set environment variables for the subprocess env = os.environ.copy() - env["DATABASE_URL"] = database_url + env["DATABASE_URL"] = async_database_url env["QUEUE_NAME"] = test_queue_name # Start the consumer process = subprocess.Popen( [sys.executable, consumer_py], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, + stdout=sys.stdout, + stderr=sys.stderr, env=env, preexec_fn=os.setsid if hasattr(os, 'setsid') else None ) # Give the consumer some time to start time.sleep(3) + logger.info("Create Consumer Process") yield process @@ -122,16 +105,17 @@ def consumer_instance(examples_dir, database_url, test_queue_name, api_instance) else: process.terminate() process.wait(timeout=10) + logger.info("Terminate Consumer Process") -def test_api_consumer_integration(api_instance, consumer_instance, database_url): +def test_api_consumer_integration(sync_database_url): """Test creating 100 orders parallelly and waiting for consumer to process them all.""" import concurrent.futures # Create 100 orders in parallel num_orders = 100 - def create_order(order_num): + def create_order(order_num: int): """Helper function to create a single order.""" order_data = { "customer_name": f"Customer {order_num}", @@ -139,7 +123,7 @@ def create_order(order_num): "quantity": order_num % 10 + 1, "price": 10.0 + (order_num % 50) } - response = requests.post("http://localhost:8000/orders", json=order_data, timeout=5) + response = httpx.post("http://localhost:8000/orders", json=order_data, timeout=5) return response.status_code == 201, response.json() if response.status_code == 201 else None # Create orders in parallel @@ -150,15 +134,17 @@ def create_order(order_num): # Check that all orders were created successfully successful_orders = sum(1 for success, _ in results if success) assert successful_orders == num_orders, f"Only {successful_orders}/{num_orders} orders were created" + logger.info("Create %d successful orders via API Server", successful_orders) # Wait for the consumer to process all messages # Check the queue periodically until it's empty - engine = create_engine(database_url) + engine = create_engine(sync_database_url) SessionLocal = sessionmaker(bind=engine) max_wait = 120 # Wait up to 2 minutes start_time = time.time() + logger.info("Wait for Consumer to process all the orders"); while time.time() - start_time < max_wait: # Check queue metrics to see if there are any messages left with SessionLocal() as session: @@ -167,15 +153,20 @@ def create_order(order_num): try: metrics = op.metrics(test_queue, session=session, commit=True) - if metrics.queue_length == 0: - # All messages have been processed - break + if metrics: + logger.info("%s queue metrics: %s", test_queue, str(metrics)) + if metrics.queue_length == 0: + # All messages have been processed + break except Exception as e: # Queue might not exist yet or other error print(f"Error checking metrics: {e}") time.sleep(2) else: + metrics = op.metrics(test_queue, session=session, commit=True) + if metrics: + logger.info("%s queue metrics: %s", test_queue, str(metrics)) pytest.fail(f"Consumer did not process all messages within {max_wait} seconds") # Verify that all messages were processed diff --git a/pyproject.toml b/pyproject.toml index 5cc8557..8b8bf71 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,28 +38,46 @@ pg8000 = ["pg8000>=1.31.2"] psycopg = ["psycopg>=3.2.1"] psycopg2-binary = ["psycopg2-binary>=2.9.9"] psycopg2cffi = ["psycopg2cffi>=2.9.0"] + +# Include dependencies by `uv sync --group dev` +[dependency-groups] dev = [ - # postgresql drivers + {include-group = "postgresql-drivers"}, + {include-group = "test"}, + {include-group = "docs"}, + {include-group = "fastapi-pub-sub-example"}, + "rich>=14.2.0", + "ruff>=0.14.10", + "libcst>=1.8.6", +] +postgresql-drivers = [ "asyncpg>=0.29.0", "greenlet>=3.0.3", "pg8000>=1.31.2", "psycopg>=3.2.1", "psycopg2-binary>=2.9.9", "psycopg2cffi>=2.9.0", - # testing +] +test = [ "pytest>=7.4.4,<8.0", "pytest-asyncio>=0.23.8", "pytest-lazy-fixture>=0.6.3", "pytest-cov>=5.0.0", "pytest-xdist>=3.6.1", "filelock>=3.15.4", - # docs +] +docs = [ "sphinx>=7.3.7", "sphinx-autobuild>=2024.4.16", "sphinx-rtd-theme>=2.0.0", "sphinx-copybutton>=0.5.2", ] +fastapi-pub-sub-example = [ + "fastapi>=0.104.0", + "httpx>=0.28.1", + "uvicorn>=0.24.0", +] [build-system] requires = ["hatchling"] -build-backend = "hatchling.build" \ No newline at end of file +build-backend = "hatchling.build" diff --git a/uv.lock b/uv.lock index fa07dc9..9c6ced8 100644 --- a/uv.lock +++ b/uv.lock @@ -2,7 +2,9 @@ version = 1 revision = 1 requires-python = ">=3.9" resolution-markers = [ - "python_full_version >= '3.11'", + "python_full_version >= '3.14'", + "python_full_version == '3.13.*'", + "python_full_version >= '3.11' and python_full_version < '3.13'", "python_full_version == '3.10.*'", "python_full_version < '3.10'", ] @@ -24,7 +26,9 @@ name = "alabaster" version = "1.0.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.11'", + "python_full_version >= '3.14'", + "python_full_version == '3.13.*'", + "python_full_version >= '3.11' and python_full_version < '3.13'", "python_full_version == '3.10.*'", ] sdist = { url = "https://files.pythonhosted.org/packages/a6/f8/d9c74d0daf3f742840fd818d69cfae176fa332022fd44e3469487d5a9420/alabaster-1.0.0.tar.gz", hash = "sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e", size = 24210 } @@ -32,6 +36,24 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7e/b3/6b4067be973ae96ba0d615946e314c5ae35f9f993eca561b356540bb0c2b/alabaster-1.0.0-py3-none-any.whl", hash = "sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b", size = 13929 }, ] +[[package]] +name = "annotated-doc" +version = "0.0.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/57/ba/046ceea27344560984e26a590f90bc7f4a75b06701f653222458922b558c/annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4", size = 7288 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320", size = 5303 }, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 }, +] + [[package]] name = "anyio" version = "4.12.0" @@ -368,7 +390,9 @@ name = "click" version = "8.3.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.11'", + "python_full_version >= '3.14'", + "python_full_version == '3.13.*'", + "python_full_version >= '3.11' and python_full_version < '3.13'", "python_full_version == '3.10.*'", ] dependencies = [ @@ -512,7 +536,9 @@ name = "coverage" version = "7.13.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.11'", + "python_full_version >= '3.14'", + "python_full_version == '3.13.*'", + "python_full_version >= '3.11' and python_full_version < '3.13'", "python_full_version == '3.10.*'", ] sdist = { url = "https://files.pythonhosted.org/packages/b6/45/2c665ca77ec32ad67e25c77daf1cee28ee4558f3bc571cdbaf88a00b9f23/coverage-7.13.0.tar.gz", hash = "sha256:a394aa27f2d7ff9bc04cf703817773a59ad6dfbd577032e690f961d2460ee936", size = 820905 } @@ -645,6 +671,22 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ab/84/02fc1827e8cdded4aa65baef11296a9bbe595c474f0d6d758af082d849fd/execnet-2.1.2-py3-none-any.whl", hash = "sha256:67fba928dd5a544b783f6056f449e5e3931a5c378b128bc18501f7ea79e296ec", size = 40708 }, ] +[[package]] +name = "fastapi" +version = "0.128.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-doc" }, + { name = "pydantic" }, + { name = "starlette", version = "0.49.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "starlette", version = "0.50.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/52/08/8c8508db6c7b9aae8f7175046af41baad690771c9bcde676419965e338c7/fastapi-0.128.0.tar.gz", hash = "sha256:1cc179e1cef10a6be60ffe429f79b829dce99d8de32d7acb7e6c8dfdf7f2645a", size = 365682 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5c/05/5cbb59154b093548acd0f4c7c474a118eda06da25aa75c616b72d8fcd92a/fastapi-0.128.0-py3-none-any.whl", hash = "sha256:aebd93f9716ee3b4f4fcfe13ffb7cf308d99c9f3ab5622d8877441072561582d", size = 103094 }, +] + [[package]] name = "filelock" version = "3.19.1" @@ -662,7 +704,9 @@ name = "filelock" version = "3.20.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.11'", + "python_full_version >= '3.14'", + "python_full_version == '3.13.*'", + "python_full_version >= '3.11' and python_full_version < '3.13'", "python_full_version == '3.10.*'", ] sdist = { url = "https://files.pythonhosted.org/packages/a7/23/ce7a1126827cedeb958fc043d61745754464eb56c5937c35bbf2b8e26f34/filelock-3.20.1.tar.gz", hash = "sha256:b8360948b351b80f420878d8516519a2204b07aefcdcfd24912a5d33127f188c", size = 19476 } @@ -751,7 +795,9 @@ name = "greenlet" version = "3.3.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.11'", + "python_full_version >= '3.14'", + "python_full_version == '3.13.*'", + "python_full_version >= '3.11' and python_full_version < '3.13'", "python_full_version == '3.10.*'", ] sdist = { url = "https://files.pythonhosted.org/packages/c7/e5/40dbda2736893e3e53d25838e0f19a2b417dfc122b9989c91918db30b5d3/greenlet-3.3.0.tar.gz", hash = "sha256:a82bb225a4e9e4d653dd2fb7b8b2d36e4fb25bc0165422a11e48b88e9e6f78fb", size = 190651 } @@ -814,6 +860,34 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515 }, ] +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784 }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517 }, +] + [[package]] name = "idna" version = "3.11" @@ -861,7 +935,9 @@ name = "iniconfig" version = "2.3.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.11'", + "python_full_version >= '3.14'", + "python_full_version == '3.13.*'", + "python_full_version >= '3.11' and python_full_version < '3.13'", "python_full_version == '3.10.*'", ] sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503 } @@ -881,6 +957,116 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899 }, ] +[[package]] +name = "libcst" +version = "1.8.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyyaml", marker = "python_full_version != '3.13.*'" }, + { name = "pyyaml-ft", marker = "python_full_version == '3.13.*'" }, + { name = "typing-extensions", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/de/cd/337df968b38d94c5aabd3e1b10630f047a2b345f6e1d4456bd9fe7417537/libcst-1.8.6.tar.gz", hash = "sha256:f729c37c9317126da9475bdd06a7208eb52fcbd180a6341648b45a56b4ba708b", size = 891354 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c4/52/97d5454dee9d014821fe0c88f3dc0e83131b97dd074a4d49537056a75475/libcst-1.8.6-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:a20c5182af04332cc94d8520792befda06d73daf2865e6dddc5161c72ea92cb9", size = 2211698 }, + { url = "https://files.pythonhosted.org/packages/6c/a4/d1205985d378164687af3247a9c8f8bdb96278b0686ac98ab951bc6d336a/libcst-1.8.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:36473e47cb199b7e6531d653ee6ffed057de1d179301e6c67f651f3af0b499d6", size = 2093104 }, + { url = "https://files.pythonhosted.org/packages/9e/de/1338da681b7625b51e584922576d54f1b8db8fc7ff4dc79121afc5d4d2cd/libcst-1.8.6-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:06fc56335a45d61b7c1b856bfab4587b84cfe31e9d6368f60bb3c9129d900f58", size = 2237419 }, + { url = "https://files.pythonhosted.org/packages/50/06/ee66f2d83b870534756e593d464d8b33b0914c224dff3a407e0f74dc04e0/libcst-1.8.6-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:6b23d14a7fc0addd9795795763af26b185deb7c456b1e7cc4d5228e69dab5ce8", size = 2300820 }, + { url = "https://files.pythonhosted.org/packages/9c/ca/959088729de8e0eac8dd516e4fb8623d8d92bad539060fa85c9e94d418a5/libcst-1.8.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:16cfe0cfca5fd840e1fb2c30afb628b023d3085b30c3484a79b61eae9d6fe7ba", size = 2301201 }, + { url = "https://files.pythonhosted.org/packages/c2/4c/2a21a8c452436097dfe1da277f738c3517f3f728713f16d84b9a3d67ca8d/libcst-1.8.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:455f49a93aea4070132c30ebb6c07c2dea0ba6c1fde5ffde59fc45dbb9cfbe4b", size = 2408213 }, + { url = "https://files.pythonhosted.org/packages/3e/26/8f7b671fad38a515bb20b038718fd2221ab658299119ac9bcec56c2ced27/libcst-1.8.6-cp310-cp310-win_amd64.whl", hash = "sha256:72cca15800ffc00ba25788e4626189fe0bc5fe2a0c1cb4294bce2e4df21cc073", size = 2119189 }, + { url = "https://files.pythonhosted.org/packages/5b/bf/ffb23a48e27001165cc5c81c5d9b3d6583b21b7f5449109e03a0020b060c/libcst-1.8.6-cp310-cp310-win_arm64.whl", hash = "sha256:6cad63e3a26556b020b634d25a8703b605c0e0b491426b3e6b9e12ed20f09100", size = 2001736 }, + { url = "https://files.pythonhosted.org/packages/dc/15/95c2ecadc0fb4af8a7057ac2012a4c0ad5921b9ef1ace6c20006b56d3b5f/libcst-1.8.6-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3649a813660fbffd7bc24d3f810b1f75ac98bd40d9d6f56d1f0ee38579021073", size = 2211289 }, + { url = "https://files.pythonhosted.org/packages/80/c3/7e1107acd5ed15cf60cc07c7bb64498a33042dc4821874aea3ec4942f3cd/libcst-1.8.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0cbe17067055829607c5ba4afa46bfa4d0dd554c0b5a583546e690b7367a29b6", size = 2092927 }, + { url = "https://files.pythonhosted.org/packages/c1/ff/0d2be87f67e2841a4a37d35505e74b65991d30693295c46fc0380ace0454/libcst-1.8.6-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:59a7e388c57d21d63722018978a8ddba7b176e3a99bd34b9b84a576ed53f2978", size = 2237002 }, + { url = "https://files.pythonhosted.org/packages/69/99/8c4a1b35c7894ccd7d33eae01ac8967122f43da41325223181ca7e4738fe/libcst-1.8.6-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:b6c1248cc62952a3a005792b10cdef2a4e130847be9c74f33a7d617486f7e532", size = 2301048 }, + { url = "https://files.pythonhosted.org/packages/9b/8b/d1aa811eacf936cccfb386ae0585aa530ea1221ccf528d67144e041f5915/libcst-1.8.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6421a930b028c5ef4a943b32a5a78b7f1bf15138214525a2088f11acbb7d3d64", size = 2300675 }, + { url = "https://files.pythonhosted.org/packages/c6/6b/7b65cd41f25a10c1fef2389ddc5c2b2cc23dc4d648083fa3e1aa7e0eeac2/libcst-1.8.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6d8b67874f2188399a71a71731e1ba2d1a2c3173b7565d1cc7ffb32e8fbaba5b", size = 2407934 }, + { url = "https://files.pythonhosted.org/packages/c5/8b/401cfff374bb3b785adfad78f05225225767ee190997176b2a9da9ed9460/libcst-1.8.6-cp311-cp311-win_amd64.whl", hash = "sha256:b0d8c364c44ae343937f474b2e492c1040df96d94530377c2f9263fb77096e4f", size = 2119247 }, + { url = "https://files.pythonhosted.org/packages/f1/17/085f59eaa044b6ff6bc42148a5449df2b7f0ba567307de7782fe85c39ee2/libcst-1.8.6-cp311-cp311-win_arm64.whl", hash = "sha256:5dcaaebc835dfe5755bc85f9b186fb7e2895dda78e805e577fef1011d51d5a5c", size = 2001774 }, + { url = "https://files.pythonhosted.org/packages/0c/3c/93365c17da3d42b055a8edb0e1e99f1c60c776471db6c9b7f1ddf6a44b28/libcst-1.8.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0c13d5bd3d8414a129e9dccaf0e5785108a4441e9b266e1e5e9d1f82d1b943c9", size = 2206166 }, + { url = "https://files.pythonhosted.org/packages/1d/cb/7530940e6ac50c6dd6022349721074e19309eb6aa296e942ede2213c1a19/libcst-1.8.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f1472eeafd67cdb22544e59cf3bfc25d23dc94058a68cf41f6654ff4fcb92e09", size = 2083726 }, + { url = "https://files.pythonhosted.org/packages/1b/cf/7e5eaa8c8f2c54913160671575351d129170db757bb5e4b7faffed022271/libcst-1.8.6-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:089c58e75cb142ec33738a1a4ea7760a28b40c078ab2fd26b270dac7d2633a4d", size = 2235755 }, + { url = "https://files.pythonhosted.org/packages/55/54/570ec2b0e9a3de0af9922e3bb1b69a5429beefbc753a7ea770a27ad308bd/libcst-1.8.6-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:c9d7aeafb1b07d25a964b148c0dda9451efb47bbbf67756e16eeae65004b0eb5", size = 2301473 }, + { url = "https://files.pythonhosted.org/packages/11/4c/163457d1717cd12181c421a4cca493454bcabd143fc7e53313bc6a4ad82a/libcst-1.8.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:207481197afd328aa91d02670c15b48d0256e676ce1ad4bafb6dc2b593cc58f1", size = 2298899 }, + { url = "https://files.pythonhosted.org/packages/35/1d/317ddef3669883619ef3d3395ea583305f353ef4ad87d7a5ac1c39be38e3/libcst-1.8.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:375965f34cc6f09f5f809244d3ff9bd4f6cb6699f571121cebce53622e7e0b86", size = 2408239 }, + { url = "https://files.pythonhosted.org/packages/9a/a1/f47d8cccf74e212dd6044b9d6dbc223636508da99acff1d54786653196bc/libcst-1.8.6-cp312-cp312-win_amd64.whl", hash = "sha256:da95b38693b989eaa8d32e452e8261cfa77fe5babfef1d8d2ac25af8c4aa7e6d", size = 2119660 }, + { url = "https://files.pythonhosted.org/packages/19/d0/dd313bf6a7942cdf951828f07ecc1a7695263f385065edc75ef3016a3cb5/libcst-1.8.6-cp312-cp312-win_arm64.whl", hash = "sha256:bff00e1c766658adbd09a175267f8b2f7616e5ee70ce45db3d7c4ce6d9f6bec7", size = 1999824 }, + { url = "https://files.pythonhosted.org/packages/90/01/723cd467ec267e712480c772aacc5aa73f82370c9665162fd12c41b0065b/libcst-1.8.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7445479ebe7d1aff0ee094ab5a1c7718e1ad78d33e3241e1a1ec65dcdbc22ffb", size = 2206386 }, + { url = "https://files.pythonhosted.org/packages/17/50/b944944f910f24c094f9b083f76f61e3985af5a376f5342a21e01e2d1a81/libcst-1.8.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4fc3fef8a2c983e7abf5d633e1884c5dd6fa0dcb8f6e32035abd3d3803a3a196", size = 2083945 }, + { url = "https://files.pythonhosted.org/packages/36/a1/bd1b2b2b7f153d82301cdaddba787f4a9fc781816df6bdb295ca5f88b7cf/libcst-1.8.6-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:1a3a5e4ee870907aa85a4076c914ae69066715a2741b821d9bf16f9579de1105", size = 2235818 }, + { url = "https://files.pythonhosted.org/packages/b9/ab/f5433988acc3b4d188c4bb154e57837df9488cc9ab551267cdeabd3bb5e7/libcst-1.8.6-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:6609291c41f7ad0bac570bfca5af8fea1f4a27987d30a1fa8b67fe5e67e6c78d", size = 2301289 }, + { url = "https://files.pythonhosted.org/packages/5d/57/89f4ba7a6f1ac274eec9903a9e9174890d2198266eee8c00bc27eb45ecf7/libcst-1.8.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:25eaeae6567091443b5374b4c7d33a33636a2d58f5eda02135e96fc6c8807786", size = 2299230 }, + { url = "https://files.pythonhosted.org/packages/f2/36/0aa693bc24cce163a942df49d36bf47a7ed614a0cd5598eee2623bc31913/libcst-1.8.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04030ea4d39d69a65873b1d4d877def1c3951a7ada1824242539e399b8763d30", size = 2408519 }, + { url = "https://files.pythonhosted.org/packages/db/18/6dd055b5f15afa640fb3304b2ee9df8b7f72e79513814dbd0a78638f4a0e/libcst-1.8.6-cp313-cp313-win_amd64.whl", hash = "sha256:8066f1b70f21a2961e96bedf48649f27dfd5ea68be5cd1bed3742b047f14acde", size = 2119853 }, + { url = "https://files.pythonhosted.org/packages/c9/ed/5ddb2a22f0b0abdd6dcffa40621ada1feaf252a15e5b2733a0a85dfd0429/libcst-1.8.6-cp313-cp313-win_arm64.whl", hash = "sha256:c188d06b583900e662cd791a3f962a8c96d3dfc9b36ea315be39e0a4c4792ebf", size = 1999808 }, + { url = "https://files.pythonhosted.org/packages/25/d3/72b2de2c40b97e1ef4a1a1db4e5e52163fc7e7740ffef3846d30bc0096b5/libcst-1.8.6-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:c41c76e034a1094afed7057023b1d8967f968782433f7299cd170eaa01ec033e", size = 2190553 }, + { url = "https://files.pythonhosted.org/packages/0d/20/983b7b210ccc3ad94a82db54230e92599c4a11b9cfc7ce3bc97c1d2df75c/libcst-1.8.6-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5432e785322aba3170352f6e72b32bea58d28abd141ac37cc9b0bf6b7c778f58", size = 2074717 }, + { url = "https://files.pythonhosted.org/packages/13/f2/9e01678fedc772e09672ed99930de7355757035780d65d59266fcee212b8/libcst-1.8.6-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:85b7025795b796dea5284d290ff69de5089fc8e989b25d6f6f15b6800be7167f", size = 2225834 }, + { url = "https://files.pythonhosted.org/packages/4a/0d/7bed847b5c8c365e9f1953da274edc87577042bee5a5af21fba63276e756/libcst-1.8.6-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:536567441182a62fb706e7aa954aca034827b19746832205953b2c725d254a93", size = 2287107 }, + { url = "https://files.pythonhosted.org/packages/02/f0/7e51fa84ade26c518bfbe7e2e4758b56d86a114c72d60309ac0d350426c4/libcst-1.8.6-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2f04d3672bde1704f383a19e8f8331521abdbc1ed13abb349325a02ac56e5012", size = 2288672 }, + { url = "https://files.pythonhosted.org/packages/ad/cd/15762659a3f5799d36aab1bc2b7e732672722e249d7800e3c5f943b41250/libcst-1.8.6-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:7f04febcd70e1e67917be7de513c8d4749d2e09206798558d7fe632134426ea4", size = 2392661 }, + { url = "https://files.pythonhosted.org/packages/e4/6b/b7f9246c323910fcbe021241500f82e357521495dcfe419004dbb272c7cb/libcst-1.8.6-cp313-cp313t-win_amd64.whl", hash = "sha256:1dc3b897c8b0f7323412da3f4ad12b16b909150efc42238e19cbf19b561cc330", size = 2105068 }, + { url = "https://files.pythonhosted.org/packages/a6/0b/4fd40607bc4807ec2b93b054594373d7fa3d31bb983789901afcb9bcebe9/libcst-1.8.6-cp313-cp313t-win_arm64.whl", hash = "sha256:44f38139fa95e488db0f8976f9c7ca39a64d6bc09f2eceef260aa1f6da6a2e42", size = 1985181 }, + { url = "https://files.pythonhosted.org/packages/3a/60/4105441989e321f7ad0fd28ffccb83eb6aac0b7cfb0366dab855dcccfbe5/libcst-1.8.6-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:b188e626ce61de5ad1f95161b8557beb39253de4ec74fc9b1f25593324a0279c", size = 2204202 }, + { url = "https://files.pythonhosted.org/packages/67/2f/51a6f285c3a183e50cfe5269d4a533c21625aac2c8de5cdf2d41f079320d/libcst-1.8.6-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:87e74f7d7dfcba9efa91127081e22331d7c42515f0a0ac6e81d4cf2c3ed14661", size = 2083581 }, + { url = "https://files.pythonhosted.org/packages/2f/64/921b1c19b638860af76cdb28bc81d430056592910b9478eea49e31a7f47a/libcst-1.8.6-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:3a926a4b42015ee24ddfc8ae940c97bd99483d286b315b3ce82f3bafd9f53474", size = 2236495 }, + { url = "https://files.pythonhosted.org/packages/12/a8/b00592f9bede618cbb3df6ffe802fc65f1d1c03d48a10d353b108057d09c/libcst-1.8.6-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:3f4fbb7f569e69fd9e89d9d9caa57ca42c577c28ed05062f96a8c207594e75b8", size = 2301466 }, + { url = "https://files.pythonhosted.org/packages/af/df/790d9002f31580fefd0aec2f373a0f5da99070e04c5e8b1c995d0104f303/libcst-1.8.6-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:08bd63a8ce674be431260649e70fca1d43f1554f1591eac657f403ff8ef82c7a", size = 2300264 }, + { url = "https://files.pythonhosted.org/packages/21/de/dc3f10e65bab461be5de57850d2910a02c24c3ddb0da28f0e6e4133c3487/libcst-1.8.6-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e00e275d4ba95d4963431ea3e409aa407566a74ee2bf309a402f84fc744abe47", size = 2408572 }, + { url = "https://files.pythonhosted.org/packages/20/3b/35645157a7590891038b077db170d6dd04335cd2e82a63bdaa78c3297dfe/libcst-1.8.6-cp314-cp314-win_amd64.whl", hash = "sha256:fea5c7fa26556eedf277d4f72779c5ede45ac3018650721edd77fd37ccd4a2d4", size = 2193917 }, + { url = "https://files.pythonhosted.org/packages/b3/a2/1034a9ba7d3e82f2c2afaad84ba5180f601aed676d92b76325797ad60951/libcst-1.8.6-cp314-cp314-win_arm64.whl", hash = "sha256:bb9b4077bdf8857b2483879cbbf70f1073bc255b057ec5aac8a70d901bb838e9", size = 2078748 }, + { url = "https://files.pythonhosted.org/packages/95/a1/30bc61e8719f721a5562f77695e6154e9092d1bdf467aa35d0806dcd6cea/libcst-1.8.6-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:55ec021a296960c92e5a33b8d93e8ad4182b0eab657021f45262510a58223de1", size = 2188980 }, + { url = "https://files.pythonhosted.org/packages/2c/14/c660204532407c5628e3b615015a902ed2d0b884b77714a6bdbe73350910/libcst-1.8.6-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ba9ab2b012fbd53b36cafd8f4440a6b60e7e487cd8b87428e57336b7f38409a4", size = 2074828 }, + { url = "https://files.pythonhosted.org/packages/82/e2/c497c354943dff644749f177ee9737b09ed811b8fc842b05709a40fe0d1b/libcst-1.8.6-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:c0a0cc80aebd8aa15609dd4d330611cbc05e9b4216bcaeabba7189f99ef07c28", size = 2225568 }, + { url = "https://files.pythonhosted.org/packages/86/ef/45999676d07bd6d0eefa28109b4f97124db114e92f9e108de42ba46a8028/libcst-1.8.6-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:42a4f68121e2e9c29f49c97f6154e8527cd31021809cc4a941c7270aa64f41aa", size = 2286523 }, + { url = "https://files.pythonhosted.org/packages/f4/6c/517d8bf57d9f811862f4125358caaf8cd3320a01291b3af08f7b50719db4/libcst-1.8.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8a434c521fadaf9680788b50d5c21f4048fa85ed19d7d70bd40549fbaeeecab1", size = 2288044 }, + { url = "https://files.pythonhosted.org/packages/83/ce/24d7d49478ffb61207f229239879845da40a374965874f5ee60f96b02ddb/libcst-1.8.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6a65f844d813ab4ef351443badffa0ae358f98821561d19e18b3190f59e71996", size = 2392605 }, + { url = "https://files.pythonhosted.org/packages/39/c3/829092ead738b71e96a4e96896c96f276976e5a8a58b4473ed813d7c962b/libcst-1.8.6-cp314-cp314t-win_amd64.whl", hash = "sha256:bdb14bc4d4d83a57062fed2c5da93ecb426ff65b0dc02ddf3481040f5f074a82", size = 2181581 }, + { url = "https://files.pythonhosted.org/packages/98/6d/5d6a790a02eb0d9d36c4aed4f41b277497e6178900b2fa29c35353aa45ed/libcst-1.8.6-cp314-cp314t-win_arm64.whl", hash = "sha256:819c8081e2948635cab60c603e1bbdceccdfe19104a242530ad38a36222cb88f", size = 2065000 }, + { url = "https://files.pythonhosted.org/packages/0c/09/69a0cd1eeb358f03c3ccd79ca22778afc1c1c723158270ad84ce86266eed/libcst-1.8.6-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:cb2679ef532f9fa5be5c5a283b6357cb6e9888a8dd889c4bb2b01845a29d8c0b", size = 2211812 }, + { url = "https://files.pythonhosted.org/packages/ff/38/b965fa7bc4409520404261ce6bdf019e56bed1674b9a68ddfc9e25bc904c/libcst-1.8.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:203ec2a83f259baf686b9526268cd23d048d38be5589594ef143aee50a4faf7e", size = 2093137 }, + { url = "https://files.pythonhosted.org/packages/a9/7c/083084b91db049343c49a27279c226f4eb27d28bef4942965386418e643e/libcst-1.8.6-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6366ab2107425bf934b0c83311177f2a371bfc757ee8c6ad4a602d7cbcc2f363", size = 2237609 }, + { url = "https://files.pythonhosted.org/packages/26/c5/fcf60600a809b9e4cf75e82484a7a9a4bdc80ba3c9939a6a18af3379c6c7/libcst-1.8.6-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:6aa11df6c58812f731172b593fcb485d7ba09ccc3b52fea6c7f26a43377dc748", size = 2301394 }, + { url = "https://files.pythonhosted.org/packages/9f/73/d72942eb3f520bc9444e61a48236694dee3cdc13f6b59179e5288d725b93/libcst-1.8.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:351ab879c2fd20d9cb2844ed1ea3e617ed72854d3d1e2b0880ede9c3eea43ba8", size = 2301816 }, + { url = "https://files.pythonhosted.org/packages/03/a9/5732b20569a434ee3ff96f1b263e6e3f3df70d8dba5cf7c8f7d4b1d6aa41/libcst-1.8.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:98fa1ca321c81fb1f02e5c43f956ca543968cc1a30b264fd8e0a2e1b0b0bf106", size = 2408392 }, + { url = "https://files.pythonhosted.org/packages/f9/ad/ecb1275796504a34a9d6d5d4f73bd81cb12930064e98871ad4b4042b82e1/libcst-1.8.6-cp39-cp39-win_amd64.whl", hash = "sha256:25fc7a1303cad7639ad45ec38c06789b4540b7258e9a108924aaa2c132af4aca", size = 2119206 }, + { url = "https://files.pythonhosted.org/packages/94/32/b6521d32a7cde089380efa948e05a7cff95c7ece8f7c36380dd6b4bf2263/libcst-1.8.6-cp39-cp39-win_arm64.whl", hash = "sha256:4d7bbdd35f3abdfb5ac5d1a674923572dab892b126a58da81ff2726102d6ec2e", size = 2001882 }, +] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +dependencies = [ + { name = "mdurl", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528 }, +] + +[[package]] +name = "markdown-it-py" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.14'", + "python_full_version == '3.13.*'", + "python_full_version >= '3.11' and python_full_version < '3.13'", + "python_full_version == '3.10.*'", +] +dependencies = [ + { name = "mdurl", marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321 }, +] + [[package]] name = "markupsafe" version = "3.0.3" @@ -977,6 +1163,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/4e/d3/fe08482b5cd995033556d45041a4f4e76e7f0521112a9c9991d40d39825f/markupsafe-3.0.3-cp39-cp39-win_arm64.whl", hash = "sha256:38664109c14ffc9e7437e86b4dceb442b0096dfe3541d7864d9cbe1da4cf36c8", size = 13928 }, ] +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 }, +] + [[package]] name = "packaging" version = "25.0" @@ -1013,12 +1208,30 @@ asyncpg = [ { name = "greenlet", version = "3.2.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "greenlet", version = "3.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, ] +pg8000 = [ + { name = "pg8000" }, +] +psycopg = [ + { name = "psycopg", version = "3.2.13", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "psycopg", version = "3.3.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, +] +psycopg2-binary = [ + { name = "psycopg2-binary" }, +] +psycopg2cffi = [ + { name = "psycopg2cffi" }, +] + +[package.dev-dependencies] dev = [ { name = "asyncpg" }, + { name = "fastapi" }, { name = "filelock", version = "3.19.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "filelock", version = "3.20.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "greenlet", version = "3.2.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "greenlet", version = "3.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "httpx" }, + { name = "libcst" }, { name = "pg8000" }, { name = "psycopg", version = "3.2.13", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "psycopg", version = "3.3.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, @@ -1029,6 +1242,8 @@ dev = [ { name = "pytest-cov" }, { name = "pytest-lazy-fixture" }, { name = "pytest-xdist" }, + { name = "rich" }, + { name = "ruff" }, { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, @@ -1036,48 +1251,108 @@ dev = [ { name = "sphinx-autobuild", version = "2025.8.25", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "sphinx-copybutton" }, { name = "sphinx-rtd-theme" }, + { name = "uvicorn", version = "0.39.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "uvicorn", version = "0.40.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, ] -pg8000 = [ - { name = "pg8000" }, +docs = [ + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "sphinx-autobuild", version = "2024.10.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" }, + { name = "sphinx-autobuild", version = "2025.8.25", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "sphinx-copybutton" }, + { name = "sphinx-rtd-theme" }, ] -psycopg = [ +fastapi-pub-sub-example = [ + { name = "fastapi" }, + { name = "httpx" }, + { name = "uvicorn", version = "0.39.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "uvicorn", version = "0.40.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, +] +postgresql-drivers = [ + { name = "asyncpg" }, + { name = "greenlet", version = "3.2.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "greenlet", version = "3.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "pg8000" }, { name = "psycopg", version = "3.2.13", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "psycopg", version = "3.3.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, -] -psycopg2-binary = [ { name = "psycopg2-binary" }, -] -psycopg2cffi = [ { name = "psycopg2cffi" }, ] +test = [ + { name = "filelock", version = "3.19.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "filelock", version = "3.20.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "pytest-cov" }, + { name = "pytest-lazy-fixture" }, + { name = "pytest-xdist" }, +] [package.metadata] requires-dist = [ { name = "asyncpg", marker = "extra == 'asyncpg'", specifier = ">=0.29.0" }, - { name = "asyncpg", marker = "extra == 'dev'", specifier = ">=0.29.0" }, - { name = "filelock", marker = "extra == 'dev'", specifier = ">=3.15.4" }, { name = "greenlet", marker = "extra == 'asyncpg'", specifier = ">=3.0.3" }, - { name = "greenlet", marker = "extra == 'dev'", specifier = ">=3.0.3" }, - { name = "pg8000", marker = "extra == 'dev'", specifier = ">=1.31.2" }, { name = "pg8000", marker = "extra == 'pg8000'", specifier = ">=1.31.2" }, - { name = "psycopg", marker = "extra == 'dev'", specifier = ">=3.2.1" }, { name = "psycopg", marker = "extra == 'psycopg'", specifier = ">=3.2.1" }, - { name = "psycopg2-binary", marker = "extra == 'dev'", specifier = ">=2.9.9" }, { name = "psycopg2-binary", marker = "extra == 'psycopg2-binary'", specifier = ">=2.9.9" }, - { name = "psycopg2cffi", marker = "extra == 'dev'", specifier = ">=2.9.0" }, { name = "psycopg2cffi", marker = "extra == 'psycopg2cffi'", specifier = ">=2.9.0" }, - { name = "pytest", marker = "extra == 'dev'", specifier = ">=7.4.4,<8.0" }, - { name = "pytest-asyncio", marker = "extra == 'dev'", specifier = ">=0.23.8" }, - { name = "pytest-cov", marker = "extra == 'dev'", specifier = ">=5.0.0" }, - { name = "pytest-lazy-fixture", marker = "extra == 'dev'", specifier = ">=0.6.3" }, - { name = "pytest-xdist", marker = "extra == 'dev'", specifier = ">=3.6.1" }, - { name = "sphinx", marker = "extra == 'dev'", specifier = ">=7.3.7" }, - { name = "sphinx-autobuild", marker = "extra == 'dev'", specifier = ">=2024.4.16" }, - { name = "sphinx-copybutton", marker = "extra == 'dev'", specifier = ">=0.5.2" }, - { name = "sphinx-rtd-theme", marker = "extra == 'dev'", specifier = ">=2.0.0" }, { name = "sqlalchemy", specifier = ">=2.0.31" }, ] -provides-extras = ["asyncpg", "pg8000", "psycopg", "psycopg2-binary", "psycopg2cffi", "dev"] +provides-extras = ["asyncpg", "pg8000", "psycopg", "psycopg2-binary", "psycopg2cffi"] + +[package.metadata.requires-dev] +dev = [ + { name = "asyncpg", specifier = ">=0.29.0" }, + { name = "fastapi", specifier = ">=0.104.0" }, + { name = "filelock", specifier = ">=3.15.4" }, + { name = "greenlet", specifier = ">=3.0.3" }, + { name = "httpx", specifier = ">=0.28.1" }, + { name = "libcst", specifier = ">=1.8.6" }, + { name = "pg8000", specifier = ">=1.31.2" }, + { name = "psycopg", specifier = ">=3.2.1" }, + { name = "psycopg2-binary", specifier = ">=2.9.9" }, + { name = "psycopg2cffi", specifier = ">=2.9.0" }, + { name = "pytest", specifier = ">=7.4.4,<8.0" }, + { name = "pytest-asyncio", specifier = ">=0.23.8" }, + { name = "pytest-cov", specifier = ">=5.0.0" }, + { name = "pytest-lazy-fixture", specifier = ">=0.6.3" }, + { name = "pytest-xdist", specifier = ">=3.6.1" }, + { name = "rich", specifier = ">=14.2.0" }, + { name = "ruff", specifier = ">=0.14.10" }, + { name = "sphinx", specifier = ">=7.3.7" }, + { name = "sphinx-autobuild", specifier = ">=2024.4.16" }, + { name = "sphinx-copybutton", specifier = ">=0.5.2" }, + { name = "sphinx-rtd-theme", specifier = ">=2.0.0" }, + { name = "uvicorn", specifier = ">=0.24.0" }, +] +docs = [ + { name = "sphinx", specifier = ">=7.3.7" }, + { name = "sphinx-autobuild", specifier = ">=2024.4.16" }, + { name = "sphinx-copybutton", specifier = ">=0.5.2" }, + { name = "sphinx-rtd-theme", specifier = ">=2.0.0" }, +] +fastapi-pub-sub-example = [ + { name = "fastapi", specifier = ">=0.104.0" }, + { name = "httpx", specifier = ">=0.28.1" }, + { name = "uvicorn", specifier = ">=0.24.0" }, +] +postgresql-drivers = [ + { name = "asyncpg", specifier = ">=0.29.0" }, + { name = "greenlet", specifier = ">=3.0.3" }, + { name = "pg8000", specifier = ">=1.31.2" }, + { name = "psycopg", specifier = ">=3.2.1" }, + { name = "psycopg2-binary", specifier = ">=2.9.9" }, + { name = "psycopg2cffi", specifier = ">=2.9.0" }, +] +test = [ + { name = "filelock", specifier = ">=3.15.4" }, + { name = "pytest", specifier = ">=7.4.4,<8.0" }, + { name = "pytest-asyncio", specifier = ">=0.23.8" }, + { name = "pytest-cov", specifier = ">=5.0.0" }, + { name = "pytest-lazy-fixture", specifier = ">=0.6.3" }, + { name = "pytest-xdist", specifier = ">=3.6.1" }, +] [[package]] name = "pluggy" @@ -1109,7 +1384,9 @@ name = "psycopg" version = "3.3.2" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.11'", + "python_full_version >= '3.14'", + "python_full_version == '3.13.*'", + "python_full_version >= '3.11' and python_full_version < '3.13'", "python_full_version == '3.10.*'", ] dependencies = [ @@ -1214,6 +1491,152 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size = 118140 }, ] +[[package]] +name = "pydantic" +version = "2.12.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580 }, +] + +[[package]] +name = "pydantic-core" +version = "2.41.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/90/32c9941e728d564b411d574d8ee0cf09b12ec978cb22b294995bae5549a5/pydantic_core-2.41.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:77b63866ca88d804225eaa4af3e664c5faf3568cea95360d21f4725ab6e07146", size = 2107298 }, + { url = "https://files.pythonhosted.org/packages/fb/a8/61c96a77fe28993d9a6fb0f4127e05430a267b235a124545d79fea46dd65/pydantic_core-2.41.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dfa8a0c812ac681395907e71e1274819dec685fec28273a28905df579ef137e2", size = 1901475 }, + { url = "https://files.pythonhosted.org/packages/5d/b6/338abf60225acc18cdc08b4faef592d0310923d19a87fba1faf05af5346e/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5921a4d3ca3aee735d9fd163808f5e8dd6c6972101e4adbda9a4667908849b97", size = 1918815 }, + { url = "https://files.pythonhosted.org/packages/d1/1c/2ed0433e682983d8e8cba9c8d8ef274d4791ec6a6f24c58935b90e780e0a/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25c479382d26a2a41b7ebea1043564a937db462816ea07afa8a44c0866d52f9", size = 2065567 }, + { url = "https://files.pythonhosted.org/packages/b3/24/cf84974ee7d6eae06b9e63289b7b8f6549d416b5c199ca2d7ce13bbcf619/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f547144f2966e1e16ae626d8ce72b4cfa0caedc7fa28052001c94fb2fcaa1c52", size = 2230442 }, + { url = "https://files.pythonhosted.org/packages/fd/21/4e287865504b3edc0136c89c9c09431be326168b1eb7841911cbc877a995/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f52298fbd394f9ed112d56f3d11aabd0d5bd27beb3084cc3d8ad069483b8941", size = 2350956 }, + { url = "https://files.pythonhosted.org/packages/a8/76/7727ef2ffa4b62fcab916686a68a0426b9b790139720e1934e8ba797e238/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:100baa204bb412b74fe285fb0f3a385256dad1d1879f0a5cb1499ed2e83d132a", size = 2068253 }, + { url = "https://files.pythonhosted.org/packages/d5/8c/a4abfc79604bcb4c748e18975c44f94f756f08fb04218d5cb87eb0d3a63e/pydantic_core-2.41.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05a2c8852530ad2812cb7914dc61a1125dc4e06252ee98e5638a12da6cc6fb6c", size = 2177050 }, + { url = "https://files.pythonhosted.org/packages/67/b1/de2e9a9a79b480f9cb0b6e8b6ba4c50b18d4e89852426364c66aa82bb7b3/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:29452c56df2ed968d18d7e21f4ab0ac55e71dc59524872f6fc57dcf4a3249ed2", size = 2147178 }, + { url = "https://files.pythonhosted.org/packages/16/c1/dfb33f837a47b20417500efaa0378adc6635b3c79e8369ff7a03c494b4ac/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:d5160812ea7a8a2ffbe233d8da666880cad0cbaf5d4de74ae15c313213d62556", size = 2341833 }, + { url = "https://files.pythonhosted.org/packages/47/36/00f398642a0f4b815a9a558c4f1dca1b4020a7d49562807d7bc9ff279a6c/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:df3959765b553b9440adfd3c795617c352154e497a4eaf3752555cfb5da8fc49", size = 2321156 }, + { url = "https://files.pythonhosted.org/packages/7e/70/cad3acd89fde2010807354d978725ae111ddf6d0ea46d1ea1775b5c1bd0c/pydantic_core-2.41.5-cp310-cp310-win32.whl", hash = "sha256:1f8d33a7f4d5a7889e60dc39856d76d09333d8a6ed0f5f1190635cbec70ec4ba", size = 1989378 }, + { url = "https://files.pythonhosted.org/packages/76/92/d338652464c6c367e5608e4488201702cd1cbb0f33f7b6a85a60fe5f3720/pydantic_core-2.41.5-cp310-cp310-win_amd64.whl", hash = "sha256:62de39db01b8d593e45871af2af9e497295db8d73b085f6bfd0b18c83c70a8f9", size = 2013622 }, + { url = "https://files.pythonhosted.org/packages/e8/72/74a989dd9f2084b3d9530b0915fdda64ac48831c30dbf7c72a41a5232db8/pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6", size = 2105873 }, + { url = "https://files.pythonhosted.org/packages/12/44/37e403fd9455708b3b942949e1d7febc02167662bf1a7da5b78ee1ea2842/pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b", size = 1899826 }, + { url = "https://files.pythonhosted.org/packages/33/7f/1d5cab3ccf44c1935a359d51a8a2a9e1a654b744b5e7f80d41b88d501eec/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a", size = 1917869 }, + { url = "https://files.pythonhosted.org/packages/6e/6a/30d94a9674a7fe4f4744052ed6c5e083424510be1e93da5bc47569d11810/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8", size = 2063890 }, + { url = "https://files.pythonhosted.org/packages/50/be/76e5d46203fcb2750e542f32e6c371ffa9b8ad17364cf94bb0818dbfb50c/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e", size = 2229740 }, + { url = "https://files.pythonhosted.org/packages/d3/ee/fed784df0144793489f87db310a6bbf8118d7b630ed07aa180d6067e653a/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1", size = 2350021 }, + { url = "https://files.pythonhosted.org/packages/c8/be/8fed28dd0a180dca19e72c233cbf58efa36df055e5b9d90d64fd1740b828/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b", size = 2066378 }, + { url = "https://files.pythonhosted.org/packages/b0/3b/698cf8ae1d536a010e05121b4958b1257f0b5522085e335360e53a6b1c8b/pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b", size = 2175761 }, + { url = "https://files.pythonhosted.org/packages/b8/ba/15d537423939553116dea94ce02f9c31be0fa9d0b806d427e0308ec17145/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284", size = 2146303 }, + { url = "https://files.pythonhosted.org/packages/58/7f/0de669bf37d206723795f9c90c82966726a2ab06c336deba4735b55af431/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594", size = 2340355 }, + { url = "https://files.pythonhosted.org/packages/e5/de/e7482c435b83d7e3c3ee5ee4451f6e8973cff0eb6007d2872ce6383f6398/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e", size = 2319875 }, + { url = "https://files.pythonhosted.org/packages/fe/e6/8c9e81bb6dd7560e33b9053351c29f30c8194b72f2d6932888581f503482/pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b", size = 1987549 }, + { url = "https://files.pythonhosted.org/packages/11/66/f14d1d978ea94d1bc21fc98fcf570f9542fe55bfcc40269d4e1a21c19bf7/pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe", size = 2011305 }, + { url = "https://files.pythonhosted.org/packages/56/d8/0e271434e8efd03186c5386671328154ee349ff0354d83c74f5caaf096ed/pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f", size = 1972902 }, + { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990 }, + { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003 }, + { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200 }, + { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578 }, + { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504 }, + { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816 }, + { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366 }, + { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698 }, + { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603 }, + { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591 }, + { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068 }, + { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908 }, + { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145 }, + { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179 }, + { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403 }, + { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206 }, + { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307 }, + { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258 }, + { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917 }, + { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186 }, + { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164 }, + { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146 }, + { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788 }, + { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133 }, + { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852 }, + { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679 }, + { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766 }, + { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005 }, + { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622 }, + { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725 }, + { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040 }, + { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691 }, + { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897 }, + { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302 }, + { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877 }, + { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680 }, + { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960 }, + { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102 }, + { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039 }, + { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126 }, + { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489 }, + { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288 }, + { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255 }, + { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760 }, + { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092 }, + { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385 }, + { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832 }, + { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585 }, + { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078 }, + { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914 }, + { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560 }, + { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244 }, + { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955 }, + { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906 }, + { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607 }, + { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769 }, + { url = "https://files.pythonhosted.org/packages/54/db/160dffb57ed9a3705c4cbcbff0ac03bdae45f1ca7d58ab74645550df3fbd/pydantic_core-2.41.5-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:8bfeaf8735be79f225f3fefab7f941c712aaca36f1128c9d7e2352ee1aa87bdf", size = 2107999 }, + { url = "https://files.pythonhosted.org/packages/a3/7d/88e7de946f60d9263cc84819f32513520b85c0f8322f9b8f6e4afc938383/pydantic_core-2.41.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:346285d28e4c8017da95144c7f3acd42740d637ff41946af5ce6e5e420502dd5", size = 1929745 }, + { url = "https://files.pythonhosted.org/packages/d5/c2/aef51e5b283780e85e99ff19db0f05842d2d4a8a8cd15e63b0280029b08f/pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a75dafbf87d6276ddc5b2bf6fae5254e3d0876b626eb24969a574fff9149ee5d", size = 1920220 }, + { url = "https://files.pythonhosted.org/packages/c7/97/492ab10f9ac8695cd76b2fdb24e9e61f394051df71594e9bcc891c9f586e/pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7b93a4d08587e2b7e7882de461e82b6ed76d9026ce91ca7915e740ecc7855f60", size = 2067296 }, + { url = "https://files.pythonhosted.org/packages/ec/23/984149650e5269c59a2a4c41d234a9570adc68ab29981825cfaf4cfad8f4/pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8465ab91a4bd96d36dde3263f06caa6a8a6019e4113f24dc753d79a8b3a3f82", size = 2231548 }, + { url = "https://files.pythonhosted.org/packages/71/0c/85bcbb885b9732c28bec67a222dbed5ed2d77baee1f8bba2002e8cd00c5c/pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:299e0a22e7ae2b85c1a57f104538b2656e8ab1873511fd718a1c1c6f149b77b5", size = 2362571 }, + { url = "https://files.pythonhosted.org/packages/c0/4a/412d2048be12c334003e9b823a3fa3d038e46cc2d64dd8aab50b31b65499/pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:707625ef0983fcfb461acfaf14de2067c5942c6bb0f3b4c99158bed6fedd3cf3", size = 2068175 }, + { url = "https://files.pythonhosted.org/packages/73/f4/c58b6a776b502d0a5540ad02e232514285513572060f0d78f7832ca3c98b/pydantic_core-2.41.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f41eb9797986d6ebac5e8edff36d5cef9de40def462311b3eb3eeded1431e425", size = 2177203 }, + { url = "https://files.pythonhosted.org/packages/ed/ae/f06ea4c7e7a9eead3d165e7623cd2ea0cb788e277e4f935af63fc98fa4e6/pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0384e2e1021894b1ff5a786dbf94771e2986ebe2869533874d7e43bc79c6f504", size = 2148191 }, + { url = "https://files.pythonhosted.org/packages/c1/57/25a11dcdc656bf5f8b05902c3c2934ac3ea296257cc4a3f79a6319e61856/pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:f0cd744688278965817fd0839c4a4116add48d23890d468bc436f78beb28abf5", size = 2343907 }, + { url = "https://files.pythonhosted.org/packages/96/82/e33d5f4933d7a03327c0c43c65d575e5919d4974ffc026bc917a5f7b9f61/pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:753e230374206729bf0a807954bcc6c150d3743928a73faffee51ac6557a03c3", size = 2322174 }, + { url = "https://files.pythonhosted.org/packages/81/45/4091be67ce9f469e81656f880f3506f6a5624121ec5eb3eab37d7581897d/pydantic_core-2.41.5-cp39-cp39-win32.whl", hash = "sha256:873e0d5b4fb9b89ef7c2d2a963ea7d02879d9da0da8d9d4933dee8ee86a8b460", size = 1990353 }, + { url = "https://files.pythonhosted.org/packages/44/8a/a98aede18db6e9cd5d66bcacd8a409fcf8134204cdede2e7de35c5a2c5ef/pydantic_core-2.41.5-cp39-cp39-win_amd64.whl", hash = "sha256:e4f4a984405e91527a0d62649ee21138f8e3d0ef103be488c1dc11a80d7f184b", size = 2015698 }, + { url = "https://files.pythonhosted.org/packages/11/72/90fda5ee3b97e51c494938a4a44c3a35a9c96c19bba12372fb9c634d6f57/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034", size = 2115441 }, + { url = "https://files.pythonhosted.org/packages/1f/53/8942f884fa33f50794f119012dc6a1a02ac43a56407adaac20463df8e98f/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c", size = 1930291 }, + { url = "https://files.pythonhosted.org/packages/79/c8/ecb9ed9cd942bce09fc888ee960b52654fbdbede4ba6c2d6e0d3b1d8b49c/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2", size = 1948632 }, + { url = "https://files.pythonhosted.org/packages/2e/1b/687711069de7efa6af934e74f601e2a4307365e8fdc404703afc453eab26/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad", size = 2138905 }, + { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495 }, + { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388 }, + { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879 }, + { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017 }, + { url = "https://files.pythonhosted.org/packages/e6/b0/1a2aa41e3b5a4ba11420aba2d091b2d17959c8d1519ece3627c371951e73/pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b5819cd790dbf0c5eb9f82c73c16b39a65dd6dd4d1439dcdea7816ec9adddab8", size = 2103351 }, + { url = "https://files.pythonhosted.org/packages/a4/ee/31b1f0020baaf6d091c87900ae05c6aeae101fa4e188e1613c80e4f1ea31/pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5a4e67afbc95fa5c34cf27d9089bca7fcab4e51e57278d710320a70b956d1b9a", size = 1925363 }, + { url = "https://files.pythonhosted.org/packages/e1/89/ab8e86208467e467a80deaca4e434adac37b10a9d134cd2f99b28a01e483/pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ece5c59f0ce7d001e017643d8d24da587ea1f74f6993467d85ae8a5ef9d4f42b", size = 2135615 }, + { url = "https://files.pythonhosted.org/packages/99/0a/99a53d06dd0348b2008f2f30884b34719c323f16c3be4e6cc1203b74a91d/pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16f80f7abe3351f8ea6858914ddc8c77e02578544a0ebc15b4c2e1a0e813b0b2", size = 2175369 }, + { url = "https://files.pythonhosted.org/packages/6d/94/30ca3b73c6d485b9bb0bc66e611cff4a7138ff9736b7e66bcf0852151636/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:33cb885e759a705b426baada1fe68cbb0a2e68e34c5d0d0289a364cf01709093", size = 2144218 }, + { url = "https://files.pythonhosted.org/packages/87/57/31b4f8e12680b739a91f472b5671294236b82586889ef764b5fbc6669238/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:c8d8b4eb992936023be7dee581270af5c6e0697a8559895f527f5b7105ecd36a", size = 2329951 }, + { url = "https://files.pythonhosted.org/packages/7d/73/3c2c8edef77b8f7310e6fb012dbc4b8551386ed575b9eb6fb2506e28a7eb/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:242a206cd0318f95cd21bdacff3fcc3aab23e79bba5cac3db5a841c9ef9c6963", size = 2318428 }, + { url = "https://files.pythonhosted.org/packages/2f/02/8559b1f26ee0d502c74f9cca5c0d2fd97e967e083e006bbbb4e97f3a043a/pydantic_core-2.41.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d3a978c4f57a597908b7e697229d996d77a6d3c94901e9edee593adada95ce1a", size = 2147009 }, + { url = "https://files.pythonhosted.org/packages/5f/9b/1b3f0e9f9305839d7e84912f9e8bfbd191ed1b1ef48083609f0dabde978c/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26", size = 2101980 }, + { url = "https://files.pythonhosted.org/packages/a4/ed/d71fefcb4263df0da6a85b5d8a7508360f2f2e9b3bf5814be9c8bccdccc1/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808", size = 1923865 }, + { url = "https://files.pythonhosted.org/packages/ce/3a/626b38db460d675f873e4444b4bb030453bbe7b4ba55df821d026a0493c4/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc", size = 2134256 }, + { url = "https://files.pythonhosted.org/packages/83/d9/8412d7f06f616bbc053d30cb4e5f76786af3221462ad5eee1f202021eb4e/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1", size = 2174762 }, + { url = "https://files.pythonhosted.org/packages/55/4c/162d906b8e3ba3a99354e20faa1b49a85206c47de97a639510a0e673f5da/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84", size = 2143141 }, + { url = "https://files.pythonhosted.org/packages/1f/f2/f11dd73284122713f5f89fc940f370d035fa8e1e078d446b3313955157fe/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770", size = 2330317 }, + { url = "https://files.pythonhosted.org/packages/88/9d/b06ca6acfe4abb296110fb1273a4d848a0bfb2ff65f3ee92127b3244e16b/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f", size = 2316992 }, + { url = "https://files.pythonhosted.org/packages/36/c7/cfc8e811f061c841d7990b0201912c3556bfeb99cdcb7ed24adc8d6f8704/pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51", size = 2145302 }, +] + [[package]] name = "pygments" version = "2.19.2" @@ -1305,6 +1728,103 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 }, ] +[[package]] +name = "pyyaml" +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/a0/39350dd17dd6d6c6507025c0e53aef67a9293a6d37d3511f23ea510d5800/pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b", size = 184227 }, + { url = "https://files.pythonhosted.org/packages/05/14/52d505b5c59ce73244f59c7a50ecf47093ce4765f116cdb98286a71eeca2/pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956", size = 174019 }, + { url = "https://files.pythonhosted.org/packages/43/f7/0e6a5ae5599c838c696adb4e6330a59f463265bfa1e116cfd1fbb0abaaae/pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8", size = 740646 }, + { url = "https://files.pythonhosted.org/packages/2f/3a/61b9db1d28f00f8fd0ae760459a5c4bf1b941baf714e207b6eb0657d2578/pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198", size = 840793 }, + { url = "https://files.pythonhosted.org/packages/7a/1e/7acc4f0e74c4b3d9531e24739e0ab832a5edf40e64fbae1a9c01941cabd7/pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b", size = 770293 }, + { url = "https://files.pythonhosted.org/packages/8b/ef/abd085f06853af0cd59fa5f913d61a8eab65d7639ff2a658d18a25d6a89d/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0", size = 732872 }, + { url = "https://files.pythonhosted.org/packages/1f/15/2bc9c8faf6450a8b3c9fc5448ed869c599c0a74ba2669772b1f3a0040180/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69", size = 758828 }, + { url = "https://files.pythonhosted.org/packages/a3/00/531e92e88c00f4333ce359e50c19b8d1de9fe8d581b1534e35ccfbc5f393/pyyaml-6.0.3-cp310-cp310-win32.whl", hash = "sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e", size = 142415 }, + { url = "https://files.pythonhosted.org/packages/2a/fa/926c003379b19fca39dd4634818b00dec6c62d87faf628d1394e137354d4/pyyaml-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c", size = 158561 }, + { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826 }, + { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577 }, + { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556 }, + { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114 }, + { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638 }, + { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463 }, + { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986 }, + { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543 }, + { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763 }, + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063 }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973 }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116 }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011 }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870 }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089 }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181 }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658 }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003 }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344 }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669 }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252 }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081 }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159 }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626 }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613 }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115 }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427 }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090 }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246 }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814 }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809 }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454 }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355 }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175 }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228 }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194 }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429 }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912 }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108 }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641 }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901 }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132 }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261 }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272 }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923 }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062 }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341 }, + { url = "https://files.pythonhosted.org/packages/9f/62/67fc8e68a75f738c9200422bf65693fb79a4cd0dc5b23310e5202e978090/pyyaml-6.0.3-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:b865addae83924361678b652338317d1bd7e79b1f4596f96b96c77a5a34b34da", size = 184450 }, + { url = "https://files.pythonhosted.org/packages/ae/92/861f152ce87c452b11b9d0977952259aa7df792d71c1053365cc7b09cc08/pyyaml-6.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c3355370a2c156cffb25e876646f149d5d68f5e0a3ce86a5084dd0b64a994917", size = 174319 }, + { url = "https://files.pythonhosted.org/packages/d0/cd/f0cfc8c74f8a030017a2b9c771b7f47e5dd702c3e28e5b2071374bda2948/pyyaml-6.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3c5677e12444c15717b902a5798264fa7909e41153cdf9ef7ad571b704a63dd9", size = 737631 }, + { url = "https://files.pythonhosted.org/packages/ef/b2/18f2bd28cd2055a79a46c9b0895c0b3d987ce40ee471cecf58a1a0199805/pyyaml-6.0.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5ed875a24292240029e4483f9d4a4b8a1ae08843b9c54f43fcc11e404532a8a5", size = 836795 }, + { url = "https://files.pythonhosted.org/packages/73/b9/793686b2d54b531203c160ef12bec60228a0109c79bae6c1277961026770/pyyaml-6.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0150219816b6a1fa26fb4699fb7daa9caf09eb1999f3b70fb6e786805e80375a", size = 750767 }, + { url = "https://files.pythonhosted.org/packages/a9/86/a137b39a611def2ed78b0e66ce2fe13ee701a07c07aebe55c340ed2a050e/pyyaml-6.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fa160448684b4e94d80416c0fa4aac48967a969efe22931448d853ada8baf926", size = 727982 }, + { url = "https://files.pythonhosted.org/packages/dd/62/71c27c94f457cf4418ef8ccc71735324c549f7e3ea9d34aba50874563561/pyyaml-6.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:27c0abcb4a5dac13684a37f76e701e054692a9b2d3064b70f5e4eb54810553d7", size = 755677 }, + { url = "https://files.pythonhosted.org/packages/29/3d/6f5e0d58bd924fb0d06c3a6bad00effbdae2de5adb5cda5648006ffbd8d3/pyyaml-6.0.3-cp39-cp39-win32.whl", hash = "sha256:1ebe39cb5fc479422b83de611d14e2c0d3bb2a18bbcb01f229ab3cfbd8fee7a0", size = 142592 }, + { url = "https://files.pythonhosted.org/packages/f0/0c/25113e0b5e103d7f1490c0e947e303fe4a696c10b501dea7a9f49d4e876c/pyyaml-6.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:2e71d11abed7344e42a8849600193d15b6def118602c4c176f748e4583246007", size = 158777 }, +] + +[[package]] +name = "pyyaml-ft" +version = "8.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5e/eb/5a0d575de784f9a1f94e2b1288c6886f13f34185e13117ed530f32b6f8a8/pyyaml_ft-8.0.0.tar.gz", hash = "sha256:0c947dce03954c7b5d38869ed4878b2e6ff1d44b08a0d84dc83fdad205ae39ab", size = 141057 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/68/ba/a067369fe61a2e57fb38732562927d5bae088c73cb9bb5438736a9555b29/pyyaml_ft-8.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8c1306282bc958bfda31237f900eb52c9bedf9b93a11f82e1aab004c9a5657a6", size = 187027 }, + { url = "https://files.pythonhosted.org/packages/ad/c5/a3d2020ce5ccfc6aede0d45bcb870298652ac0cf199f67714d250e0cdf39/pyyaml_ft-8.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:30c5f1751625786c19de751e3130fc345ebcba6a86f6bddd6e1285342f4bbb69", size = 176146 }, + { url = "https://files.pythonhosted.org/packages/e3/bb/23a9739291086ca0d3189eac7cd92b4d00e9fdc77d722ab610c35f9a82ba/pyyaml_ft-8.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3fa992481155ddda2e303fcc74c79c05eddcdbc907b888d3d9ce3ff3e2adcfb0", size = 746792 }, + { url = "https://files.pythonhosted.org/packages/5f/c2/e8825f4ff725b7e560d62a3609e31d735318068e1079539ebfde397ea03e/pyyaml_ft-8.0.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cec6c92b4207004b62dfad1f0be321c9f04725e0f271c16247d8b39c3bf3ea42", size = 786772 }, + { url = "https://files.pythonhosted.org/packages/35/be/58a4dcae8854f2fdca9b28d9495298fd5571a50d8430b1c3033ec95d2d0e/pyyaml_ft-8.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06237267dbcab70d4c0e9436d8f719f04a51123f0ca2694c00dd4b68c338e40b", size = 778723 }, + { url = "https://files.pythonhosted.org/packages/86/ed/fed0da92b5d5d7340a082e3802d84c6dc9d5fa142954404c41a544c1cb92/pyyaml_ft-8.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:8a7f332bc565817644cdb38ffe4739e44c3e18c55793f75dddb87630f03fc254", size = 758478 }, + { url = "https://files.pythonhosted.org/packages/f0/69/ac02afe286275980ecb2dcdc0156617389b7e0c0a3fcdedf155c67be2b80/pyyaml_ft-8.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7d10175a746be65f6feb86224df5d6bc5c049ebf52b89a88cf1cd78af5a367a8", size = 799159 }, + { url = "https://files.pythonhosted.org/packages/4e/ac/c492a9da2e39abdff4c3094ec54acac9747743f36428281fb186a03fab76/pyyaml_ft-8.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:58e1015098cf8d8aec82f360789c16283b88ca670fe4275ef6c48c5e30b22a96", size = 158779 }, + { url = "https://files.pythonhosted.org/packages/5d/9b/41998df3298960d7c67653669f37710fa2d568a5fc933ea24a6df60acaf6/pyyaml_ft-8.0.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:e64fa5f3e2ceb790d50602b2fd4ec37abbd760a8c778e46354df647e7c5a4ebb", size = 191331 }, + { url = "https://files.pythonhosted.org/packages/0f/16/2710c252ee04cbd74d9562ebba709e5a284faeb8ada88fcda548c9191b47/pyyaml_ft-8.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8d445bf6ea16bb93c37b42fdacfb2f94c8e92a79ba9e12768c96ecde867046d1", size = 182879 }, + { url = "https://files.pythonhosted.org/packages/9a/40/ae8163519d937fa7bfa457b6f78439cc6831a7c2b170e4f612f7eda71815/pyyaml_ft-8.0.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c56bb46b4fda34cbb92a9446a841da3982cdde6ea13de3fbd80db7eeeab8b49", size = 811277 }, + { url = "https://files.pythonhosted.org/packages/f9/66/28d82dbff7f87b96f0eeac79b7d972a96b4980c1e445eb6a857ba91eda00/pyyaml_ft-8.0.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dab0abb46eb1780da486f022dce034b952c8ae40753627b27a626d803926483b", size = 831650 }, + { url = "https://files.pythonhosted.org/packages/e8/df/161c4566facac7d75a9e182295c223060373d4116dead9cc53a265de60b9/pyyaml_ft-8.0.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd48d639cab5ca50ad957b6dd632c7dd3ac02a1abe0e8196a3c24a52f5db3f7a", size = 815755 }, + { url = "https://files.pythonhosted.org/packages/05/10/f42c48fa5153204f42eaa945e8d1fd7c10d6296841dcb2447bf7da1be5c4/pyyaml_ft-8.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:052561b89d5b2a8e1289f326d060e794c21fa068aa11255fe71d65baf18a632e", size = 810403 }, + { url = "https://files.pythonhosted.org/packages/d5/d2/e369064aa51009eb9245399fd8ad2c562bd0bcd392a00be44b2a824ded7c/pyyaml_ft-8.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3bb4b927929b0cb162fb1605392a321e3333e48ce616cdcfa04a839271373255", size = 835581 }, + { url = "https://files.pythonhosted.org/packages/c0/28/26534bed77109632a956977f60d8519049f545abc39215d086e33a61f1f2/pyyaml_ft-8.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:de04cfe9439565e32f178106c51dd6ca61afaa2907d143835d501d84703d3793", size = 171579 }, +] + [[package]] name = "requests" version = "2.32.5" @@ -1320,6 +1840,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738 }, ] +[[package]] +name = "rich" +version = "14.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py", version = "3.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "markdown-it-py", version = "4.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fb/d2/8920e102050a0de7bfabeb4c4614a49248cf8d5d7a8d01885fbb24dc767a/rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4", size = 219990 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/7a/b0178788f8dc6cafce37a212c99565fa1fe7872c70c6c9c1e1a372d9d88f/rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd", size = 243393 }, +] + [[package]] name = "roman-numerals" version = "4.1.0" @@ -1341,6 +1875,32 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/27/2c/daca29684cbe9fd4bc711f8246da3c10adca1ccc4d24436b17572eb2590e/roman_numerals_py-4.1.0-py3-none-any.whl", hash = "sha256:553114c1167141c1283a51743759723ecd05604a1b6b507225e91dc1a6df0780", size = 4547 }, ] +[[package]] +name = "ruff" +version = "0.14.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/57/08/52232a877978dd8f9cf2aeddce3e611b40a63287dfca29b6b8da791f5e8d/ruff-0.14.10.tar.gz", hash = "sha256:9a2e830f075d1a42cd28420d7809ace390832a490ed0966fe373ba288e77aaf4", size = 5859763 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/60/01/933704d69f3f05ee16ef11406b78881733c186fe14b6a46b05cfcaf6d3b2/ruff-0.14.10-py3-none-linux_armv6l.whl", hash = "sha256:7a3ce585f2ade3e1f29ec1b92df13e3da262178df8c8bdf876f48fa0e8316c49", size = 13527080 }, + { url = "https://files.pythonhosted.org/packages/df/58/a0349197a7dfa603ffb7f5b0470391efa79ddc327c1e29c4851e85b09cc5/ruff-0.14.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:674f9be9372907f7257c51f1d4fc902cb7cf014b9980152b802794317941f08f", size = 13797320 }, + { url = "https://files.pythonhosted.org/packages/7b/82/36be59f00a6082e38c23536df4e71cdbc6af8d7c707eade97fcad5c98235/ruff-0.14.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d85713d522348837ef9df8efca33ccb8bd6fcfc86a2cde3ccb4bc9d28a18003d", size = 12918434 }, + { url = "https://files.pythonhosted.org/packages/a6/00/45c62a7f7e34da92a25804f813ebe05c88aa9e0c25e5cb5a7d23dd7450e3/ruff-0.14.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6987ebe0501ae4f4308d7d24e2d0fe3d7a98430f5adfd0f1fead050a740a3a77", size = 13371961 }, + { url = "https://files.pythonhosted.org/packages/40/31/a5906d60f0405f7e57045a70f2d57084a93ca7425f22e1d66904769d1628/ruff-0.14.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16a01dfb7b9e4eee556fbfd5392806b1b8550c9b4a9f6acd3dbe6812b193c70a", size = 13275629 }, + { url = "https://files.pythonhosted.org/packages/3e/60/61c0087df21894cf9d928dc04bcd4fb10e8b2e8dca7b1a276ba2155b2002/ruff-0.14.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7165d31a925b7a294465fa81be8c12a0e9b60fb02bf177e79067c867e71f8b1f", size = 14029234 }, + { url = "https://files.pythonhosted.org/packages/44/84/77d911bee3b92348b6e5dab5a0c898d87084ea03ac5dc708f46d88407def/ruff-0.14.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c561695675b972effb0c0a45db233f2c816ff3da8dcfbe7dfc7eed625f218935", size = 15449890 }, + { url = "https://files.pythonhosted.org/packages/e9/36/480206eaefa24a7ec321582dda580443a8f0671fdbf6b1c80e9c3e93a16a/ruff-0.14.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4bb98fcbbc61725968893682fd4df8966a34611239c9fd07a1f6a07e7103d08e", size = 15123172 }, + { url = "https://files.pythonhosted.org/packages/5c/38/68e414156015ba80cef5473d57919d27dfb62ec804b96180bafdeaf0e090/ruff-0.14.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f24b47993a9d8cb858429e97bdf8544c78029f09b520af615c1d261bf827001d", size = 14460260 }, + { url = "https://files.pythonhosted.org/packages/b3/19/9e050c0dca8aba824d67cc0db69fb459c28d8cd3f6855b1405b3f29cc91d/ruff-0.14.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59aabd2e2c4fd614d2862e7939c34a532c04f1084476d6833dddef4afab87e9f", size = 14229978 }, + { url = "https://files.pythonhosted.org/packages/51/eb/e8dd1dd6e05b9e695aa9dd420f4577debdd0f87a5ff2fedda33c09e9be8c/ruff-0.14.10-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:213db2b2e44be8625002dbea33bb9c60c66ea2c07c084a00d55732689d697a7f", size = 14338036 }, + { url = "https://files.pythonhosted.org/packages/6a/12/f3e3a505db7c19303b70af370d137795fcfec136d670d5de5391e295c134/ruff-0.14.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b914c40ab64865a17a9a5b67911d14df72346a634527240039eb3bd650e5979d", size = 13264051 }, + { url = "https://files.pythonhosted.org/packages/08/64/8c3a47eaccfef8ac20e0484e68e0772013eb85802f8a9f7603ca751eb166/ruff-0.14.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1484983559f026788e3a5c07c81ef7d1e97c1c78ed03041a18f75df104c45405", size = 13283998 }, + { url = "https://files.pythonhosted.org/packages/12/84/534a5506f4074e5cc0529e5cd96cfc01bb480e460c7edf5af70d2bcae55e/ruff-0.14.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c70427132db492d25f982fffc8d6c7535cc2fd2c83fc8888f05caaa248521e60", size = 13601891 }, + { url = "https://files.pythonhosted.org/packages/0d/1e/14c916087d8598917dbad9b2921d340f7884824ad6e9c55de948a93b106d/ruff-0.14.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5bcf45b681e9f1ee6445d317ce1fa9d6cba9a6049542d1c3d5b5958986be8830", size = 14336660 }, + { url = "https://files.pythonhosted.org/packages/f2/1c/d7b67ab43f30013b47c12b42d1acd354c195351a3f7a1d67f59e54227ede/ruff-0.14.10-py3-none-win32.whl", hash = "sha256:104c49fc7ab73f3f3a758039adea978869a918f31b73280db175b43a2d9b51d6", size = 13196187 }, + { url = "https://files.pythonhosted.org/packages/fb/9c/896c862e13886fae2af961bef3e6312db9ebc6adc2b156fe95e615dee8c1/ruff-0.14.10-py3-none-win_amd64.whl", hash = "sha256:466297bd73638c6bdf06485683e812db1c00c7ac96d4ddd0294a338c62fdc154", size = 14661283 }, + { url = "https://files.pythonhosted.org/packages/74/31/b0e29d572670dca3674eeee78e418f20bdf97fa8aa9ea71380885e175ca0/ruff-0.14.10-py3-none-win_arm64.whl", hash = "sha256:e51d046cf6dda98a4633b8a8a771451107413b0f07183b2bef03f075599e44e6", size = 13729839 }, +] + [[package]] name = "scramp" version = "1.4.6" @@ -1439,7 +1999,9 @@ name = "sphinx" version = "8.2.3" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.11'", + "python_full_version >= '3.14'", + "python_full_version == '3.13.*'", + "python_full_version >= '3.11' and python_full_version < '3.13'", ] dependencies = [ { name = "alabaster", version = "1.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, @@ -1494,7 +2056,9 @@ name = "sphinx-autobuild" version = "2025.8.25" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.11'", + "python_full_version >= '3.14'", + "python_full_version == '3.13.*'", + "python_full_version >= '3.11' and python_full_version < '3.13'", ] dependencies = [ { name = "colorama", marker = "python_full_version >= '3.11'" }, @@ -1685,7 +2249,9 @@ name = "starlette" version = "0.50.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.11'", + "python_full_version >= '3.14'", + "python_full_version == '3.13.*'", + "python_full_version >= '3.11' and python_full_version < '3.13'", "python_full_version == '3.10.*'", ] dependencies = [ @@ -1755,6 +2321,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614 }, ] +[[package]] +name = "typing-inspection" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611 }, +] + [[package]] name = "tzdata" version = "2025.3" @@ -1795,7 +2373,9 @@ name = "uvicorn" version = "0.40.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ - "python_full_version >= '3.11'", + "python_full_version >= '3.14'", + "python_full_version == '3.13.*'", + "python_full_version >= '3.11' and python_full_version < '3.13'", "python_full_version == '3.10.*'", ] dependencies = [ From 615beeaa0c8105c9fe5d8a00c056de9ce6735029 Mon Sep 17 00:00:00 2001 From: LIU ZHE YOU Date: Wed, 7 Jan 2026 15:46:13 +0800 Subject: [PATCH 13/17] Add MultiSubprocessesRenderer utils --- examples_tests/utils/__init__.py | 0 examples_tests/utils/console.py | 207 +++++++++++++++++++++++++++++++ 2 files changed, 207 insertions(+) create mode 100644 examples_tests/utils/__init__.py create mode 100644 examples_tests/utils/console.py diff --git a/examples_tests/utils/__init__.py b/examples_tests/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/examples_tests/utils/console.py b/examples_tests/utils/console.py new file mode 100644 index 0000000..9f84a65 --- /dev/null +++ b/examples_tests/utils/console.py @@ -0,0 +1,207 @@ +from typing import Callable, ParamSpec + +import time +import subprocess +import sys +import os +import fcntl +from collections import deque +from dataclasses import dataclass + +from rich.console import Console +from rich.live import Live +from rich.layout import Layout +from rich.panel import Panel +from rich.text import Text + +PS = ParamSpec("PS") + + +@dataclass +class CmdArg: + cmd: list[str] + panel_title: str + + @property + def layout_name(self): + "\n".join(self.cmd) + self.panel_title + + +class ProcessInstance: + def __init__(self, cmd_arg: CmdArg, console: Console): + console.print(f"[yellow]Starting {cmd_arg.panel_title}...[/yellow]") + + self.process = ProcessInstance.create_process(cmd_arg) + self.panel_title = cmd_arg.panel_title + self.layout_name = cmd_arg.layout_name + self.panel_output_buffer = deque() + self.panel_output_buffer.append(f"Starting {cmd_arg.panel_title}...") + + console.print( + f"[green]{cmd_arg.panel_title} started with PID: {self.process.pid}[/green]" + ) + + @staticmethod + def set_non_blocking(fd) -> None: + """Set file descriptor to non-blocking mode""" + flags = fcntl.fcntl(fd, fcntl.F_GETFL) + fcntl.fcntl(fd, fcntl.F_SETFL, flags | os.O_NONBLOCK) + + @staticmethod + def create_process(cmd_arg: CmdArg) -> subprocess.Popen: + process = subprocess.Popen( + cmd_arg.cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True, + bufsize=1, + ) + # Set file descriptors to non-blocking mode + ProcessInstance.set_non_blocking(process.stdout) + ProcessInstance.set_non_blocking(process.stderr) + + return process + + @staticmethod + def _get_output(process: subprocess.Popen) -> tuple[str | None, str | None]: + """Get stdout and stderr output from process (non-blocking)""" + stdout_line = None + stderr_line = None + + # Try to read from stdout (non-blocking) + if process.stdout: + try: + line = process.stdout.readline() + if line: + stdout_line = line.rstrip() + except (IOError, OSError): + pass # No data available + + # Try to read from stderr (non-blocking) + if process.stderr: + try: + line = process.stderr.readline() + if line: + stderr_line = line.rstrip() + except (IOError, OSError): + pass # No data available + + return stdout_line, stderr_line + + def get_output(self) -> tuple[str | None, str | None]: + return self._get_output(self.process) + + def get_panel_output(self, max_lines: int) -> Panel | None: + stdout_line, stderr_line = self.get_output() + if stdout_line is None and stderr_line is None: + return None + + content = "" + + if stdout_line: + self.panel_output_buffer.append(stdout_line) + content += "\n".join(self.panel_output_buffer) + if stderr_line: + self.panel_output_buffer.append(f"[red]{stderr_line}[/red]") + content += "\n".join(self.panel_output_buffer) + + if len(self.panel_output_buffer) > max_lines: + self.panel_output_buffer.popleft() + + return Panel(content, title=self.panel_title) + + +class MultiSubprocessesRenderer: + def __init__( + self, + cmds: list[CmdArg], + stop_condition_callable: Callable[PS, bool] | None = None, + render_interval: float = 0.05, + init_subprocesses_wait_time: int = 2, + max_lines: int = 50, + ) -> None: + self.cmds = cmds + self.render_interval = render_interval + self.init_subprocesses_wait_time = init_subprocesses_wait_time + self.max_lines = max_lines + if stop_condition_callable is None: + self.stop_condition_callable = lambda: False + else: + self.stop_condition_callable = stop_condition_callable + + # rich attributes + self.console = Console() + + def _init_layouts(self) -> None: + # Set up rich layout + self.layout = Layout() + self.layout.split_row(*[Layout(name=cmd.layout_name) for cmd in self.cmds]) + # Initial content for panels + for cmd in self.cmds: + self.layout[cmd.layout_name].update( + Panel( + Text(f"Starting {cmd.panel_title}...", justify="center"), + title=cmd.panel_title, + ) + ) + + def _init_processes(self) -> None: + self.process_instances = [ + ProcessInstance(cmd_arg, self.console) for cmd_arg in self.cmds + ] + + def _update_processes_panel_output_to_layout(self): + for p in self.process_instances: + if new_panel_content := p.get_panel_output(self.max_lines): + self.layout[p.layout_name].update(new_panel_content) + + @property + def _any_process_is_not_stop(self) -> bool: + # if p.poll() is None, which means the process is not stop + for p in self.process_instances: + if p.process.poll() is None: + return True + + def __enter__(self): + self._init_layouts() + self._init_processes() + time.sleep(self.init_subprocesses_wait_time) + + with Live( + self.layout, screen=True, redirect_stdout=False, redirect_stderr=False + ) as live: + # Loop while processes are alive + while self._any_process_is_not_stop: + # Small sleep to prevent busy waiting + time.sleep(self.render_interval) + self._update_processes_panel_output_to_layout() + # Manually refresh the live display + live.update(self.layout) + # If we reach the condition, we will exit the loop + if self.stop_condition_callable(): + break + + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + for p in self.process_instances: + # if the process is not yet stop + if p.process.poll() is not None: + p.process.kill() + + +# 2. Main script to manage the display and processes +if __name__ == "__main__": + with MultiSubprocessesRenderer( + cmds=[ + CmdArg( + [sys.executable, "-u", "examples/fastapi_pub_sub/api.py"], + panel_title="API process", + ), + CmdArg( + [sys.executable, "-u", "examples/fastapi_pub_sub/consumer.py"], + panel_title="Consumer process", + ), + ] + ) as renderer: + renderer.console.print("[bold green]All processes finished![/bold green]") From e36cc8fb4059622ce74da17033c0c08c27e85817 Mon Sep 17 00:00:00 2001 From: LIU ZHE YOU Date: Wed, 7 Jan 2026 16:48:50 +0800 Subject: [PATCH 14/17] Fix MultiSubprocessesRenderer not rendering issue --- examples_tests/utils/console.py | 76 ++++++++++++++++++++++++--------- 1 file changed, 56 insertions(+), 20 deletions(-) diff --git a/examples_tests/utils/console.py b/examples_tests/utils/console.py index 9f84a65..c83cead 100644 --- a/examples_tests/utils/console.py +++ b/examples_tests/utils/console.py @@ -24,7 +24,7 @@ class CmdArg: @property def layout_name(self): - "\n".join(self.cmd) + self.panel_title + return "_".join(self.cmd) + "_" + self.panel_title class ProcessInstance: @@ -41,6 +41,24 @@ def __init__(self, cmd_arg: CmdArg, console: Console): f"[green]{cmd_arg.panel_title} started with PID: {self.process.pid}[/green]" ) + @property + def alive(self) -> bool: + # if p.poll() is None, which means the process is not stop + return self.process.poll() is None + + @property + def pid(self) -> int: + return self.process.pid + + def kill(self) -> None: + return self.process.kill() + + def terminate(self) -> None: + return self.process.terminate() + + def wait(self, timeout: float | None = None): + return self.process.wait(timeout=timeout) + @staticmethod def set_non_blocking(fd) -> None: """Set file descriptor to non-blocking mode""" @@ -96,18 +114,18 @@ def get_panel_output(self, max_lines: int) -> Panel | None: if stdout_line is None and stderr_line is None: return None - content = "" - + # Append new output to buffer if stdout_line: self.panel_output_buffer.append(stdout_line) - content += "\n".join(self.panel_output_buffer) if stderr_line: self.panel_output_buffer.append(f"[red]{stderr_line}[/red]") - content += "\n".join(self.panel_output_buffer) - if len(self.panel_output_buffer) > max_lines: + # Remove old lines if buffer exceeds max_lines + while len(self.panel_output_buffer) > max_lines: self.panel_output_buffer.popleft() + # Build content from buffer + content = "\n".join(self.panel_output_buffer) return Panel(content, title=self.panel_title) @@ -117,13 +135,13 @@ def __init__( cmds: list[CmdArg], stop_condition_callable: Callable[PS, bool] | None = None, render_interval: float = 0.05, - init_subprocesses_wait_time: int = 2, max_lines: int = 50, + show_pid_in_panel_title: bool = True, ) -> None: self.cmds = cmds self.render_interval = render_interval - self.init_subprocesses_wait_time = init_subprocesses_wait_time self.max_lines = max_lines + self.show_pid_in_panel_title = show_pid_in_panel_title if stop_condition_callable is None: self.stop_condition_callable = lambda: False else: @@ -149,6 +167,9 @@ def _init_processes(self) -> None: self.process_instances = [ ProcessInstance(cmd_arg, self.console) for cmd_arg in self.cmds ] + if self.show_pid_in_panel_title: + for p in self.process_instances: + p.panel_title = f"{p.panel_title}: {p.pid}" def _update_processes_panel_output_to_layout(self): for p in self.process_instances: @@ -157,16 +178,22 @@ def _update_processes_panel_output_to_layout(self): @property def _any_process_is_not_stop(self) -> bool: - # if p.poll() is None, which means the process is not stop for p in self.process_instances: - if p.process.poll() is None: + if p.alive: return True + return False - def __enter__(self): - self._init_layouts() - self._init_processes() - time.sleep(self.init_subprocesses_wait_time) - + def _graceful_cleanup_processes(self) -> None: + for p in self.process_instances: + # if the process is still running, kill it + if p.alive: + p.terminate() + try: + p.wait(timeout=2) + except subprocess.TimeoutExpired: + p.kill() + + def start_render(self) -> None: with Live( self.layout, screen=True, redirect_stdout=False, redirect_stderr=False ) as live: @@ -181,13 +208,22 @@ def __enter__(self): if self.stop_condition_callable(): break + def __enter__(self): + self._init_layouts() + self._init_processes() return self def __exit__(self, exc_type, exc_val, exc_tb): - for p in self.process_instances: - # if the process is not yet stop - if p.process.poll() is not None: - p.process.kill() + if exc_type is KeyboardInterrupt: + self.console.print( + "\n[yellow]Keyboard Interrupt detected.\nCleaning up all processes before continuing...[/yellow]" + ) + self._graceful_cleanup_processes() + return True + + self._graceful_cleanup_processes() + self.console.print("Reach stop_condition_callable, stop rendering.") + return False # 2. Main script to manage the display and processes @@ -204,4 +240,4 @@ def __exit__(self, exc_type, exc_val, exc_tb): ), ] ) as renderer: - renderer.console.print("[bold green]All processes finished![/bold green]") + renderer.start_render() From efa27b55525d1f6e88fec2abe6ab34069435a287 Mon Sep 17 00:00:00 2001 From: LIU ZHE YOU Date: Wed, 7 Jan 2026 17:08:49 +0800 Subject: [PATCH 15/17] Add timeout arg and verify stop_condition_callable --- examples_tests/utils/console.py | 34 +++++++++++++++++++++++++++++++-- 1 file changed, 32 insertions(+), 2 deletions(-) diff --git a/examples_tests/utils/console.py b/examples_tests/utils/console.py index c83cead..54f7a3e 100644 --- a/examples_tests/utils/console.py +++ b/examples_tests/utils/console.py @@ -134,11 +134,13 @@ def __init__( self, cmds: list[CmdArg], stop_condition_callable: Callable[PS, bool] | None = None, + timeout: int | float | None = None, render_interval: float = 0.05, max_lines: int = 50, show_pid_in_panel_title: bool = True, ) -> None: self.cmds = cmds + self.timeout = timeout self.render_interval = render_interval self.max_lines = max_lines self.show_pid_in_panel_title = show_pid_in_panel_title @@ -183,6 +185,12 @@ def _any_process_is_not_stop(self) -> bool: return True return False + @property + def _is_timeout(self) -> bool: + return (self.timeout is not None) and ( + time.time() - self.start_time + ) > self.timeout + def _graceful_cleanup_processes(self) -> None: for p in self.process_instances: # if the process is still running, kill it @@ -194,6 +202,8 @@ def _graceful_cleanup_processes(self) -> None: p.kill() def start_render(self) -> None: + self.start_time = time.time() + self.stop_condition_match = False with Live( self.layout, screen=True, redirect_stdout=False, redirect_stderr=False ) as live: @@ -206,8 +216,21 @@ def start_render(self) -> None: live.update(self.layout) # If we reach the condition, we will exit the loop if self.stop_condition_callable(): + self.stop_condition_match = True + break + # If we reach timeout + if self._is_timeout: break + if self._is_timeout and self.start_time is not None: + self.console.print( + f"[red]Timeout after {(time.time() - self.start_time)} seconds. Shutdown.[/red]" + ) + if self.stop_condition_match: + self.console.print( + "[yellow]Reach stop_condition_callable, stop rendering.[/yellow]" + ) + def __enter__(self): self._init_layouts() self._init_processes() @@ -222,12 +245,18 @@ def __exit__(self, exc_type, exc_val, exc_tb): return True self._graceful_cleanup_processes() - self.console.print("Reach stop_condition_callable, stop rendering.") return False # 2. Main script to manage the display and processes if __name__ == "__main__": + start_time = time.time() + + def stop_condition_callable(): + if (time.time() - start_time) > 1: + return True + return False + with MultiSubprocessesRenderer( cmds=[ CmdArg( @@ -238,6 +267,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): [sys.executable, "-u", "examples/fastapi_pub_sub/consumer.py"], panel_title="Consumer process", ), - ] + ], + stop_condition_callable=stop_condition_callable, ) as renderer: renderer.start_render() From 40cabda1d6cba1424e243e2b238f569ba26b2152 Mon Sep 17 00:00:00 2001 From: LIU ZHE YOU Date: Wed, 7 Jan 2026 19:05:38 +0800 Subject: [PATCH 16/17] Refactor FastAPI example and add create orders coordinator script - Update FastAPI API to improve response models and logging. - Introduce create_orders_coordinator.py for parallel order creation. - Enhance consumer.py with verbose logging and improved error handling. - Modify integration tests to support new features and improve readability. --- examples/fastapi_pub_sub/api.py | 62 +++--- examples/fastapi_pub_sub/consumer.py | 128 +++++++---- .../create_orders_coordinator.py | 68 ++++++ .../integration/test_fastapi_integration.py | 210 ++++++------------ examples_tests/utils/console.py | 9 + 5 files changed, 269 insertions(+), 208 deletions(-) create mode 100644 examples/fastapi_pub_sub/create_orders_coordinator.py diff --git a/examples/fastapi_pub_sub/api.py b/examples/fastapi_pub_sub/api.py index 75a1a48..06b0fad 100644 --- a/examples/fastapi_pub_sub/api.py +++ b/examples/fastapi_pub_sub/api.py @@ -7,9 +7,9 @@ """ import os from typing import Generator -from contextlib import contextmanager, asynccontextmanager +from contextlib import asynccontextmanager -from fastapi import FastAPI, Depends, HTTPException +from fastapi import FastAPI, Depends, HTTPException, status from pydantic import BaseModel, ConfigDict from sqlalchemy import create_engine, Column, Integer, String, Float, DateTime from sqlalchemy.orm import Session, sessionmaker, declarative_base @@ -18,7 +18,9 @@ from pgmq_sqlalchemy import op # Database configuration - can be overridden by environment variables -DATABASE_URL = os.getenv("DATABASE_URL", "postgresql+psycopg2://postgres:postgres@localhost:5432/postgres") +DATABASE_URL = os.getenv( + "DATABASE_URL", "postgresql+psycopg2://postgres:postgres@localhost:5432/postgres" +) QUEUE_NAME = os.getenv("QUEUE_NAME", "order_queue") # SQLAlchemy setup @@ -47,9 +49,9 @@ class OrderCreate(BaseModel): price: float -class OrderResponse(BaseModel): +class CreateOrderResponse(BaseModel): model_config = ConfigDict(from_attributes=True) - + id: int customer_name: str product_name: str @@ -65,26 +67,27 @@ async def lifespan(app: FastAPI): """Initialize database tables and PGMQ queue on startup.""" # Startup Base.metadata.create_all(bind=engine) - + # Initialize PGMQ queue with SessionLocal() as session: op.check_pgmq_ext(session=session, commit=True) - + # Create queue if it doesn't exist try: op.create_queue(QUEUE_NAME, session=session, commit=True) except Exception: # Queue might already exist, which is fine pass - + yield - + # Shutdown (if needed) # FastAPI app with lifespan app = FastAPI(title="Order Management with PGMQ", lifespan=lifespan) + # Database dependency def get_db() -> Generator[Session, None, None]: """Database session dependency.""" @@ -95,14 +98,16 @@ def get_db() -> Generator[Session, None, None]: db.close() -@app.post("/orders", response_model=OrderResponse, status_code=201) -def create_order(order_data: OrderCreate, db: Session = Depends(get_db)): +@app.post("/orders", status_code=status.HTTP_201_CREATED) +def create_order( + order_data: OrderCreate, db: Session = Depends(get_db) +) -> CreateOrderResponse: """Create a new order and publish it to the message queue. - + Args: order_data: Order information db: Database session - + Returns: Created order with message ID """ @@ -115,7 +120,7 @@ def create_order(order_data: OrderCreate, db: Session = Depends(get_db)): ) db.add(db_order) db.flush() # Flush to get the ID without committing - + # Publish message to PGMQ using op in the same transaction message_data = { "order_id": db_order.id, @@ -125,15 +130,15 @@ def create_order(order_data: OrderCreate, db: Session = Depends(get_db)): "price": db_order.price, "created_at": db_order.created_at.isoformat(), } - + msg_id = op.send(QUEUE_NAME, message_data, session=db, commit=False) - + # Commit both order and message in the same transaction db.commit() db.refresh(db_order) - + # Return order with message ID - return OrderResponse( + return CreateOrderResponse( id=db_order.id, customer_name=db_order.customer_name, product_name=db_order.product_name, @@ -144,14 +149,14 @@ def create_order(order_data: OrderCreate, db: Session = Depends(get_db)): ) -@app.get("/orders/{order_id}", response_model=OrderCreate) -def get_order(order_id: int, db: Session = Depends(get_db)): +@app.get("/orders/{order_id}") +def get_order(order_id: int, db: Session = Depends(get_db)) -> OrderCreate: """Get order by ID. - + Args: order_id: Order ID db: Database session - + Returns: Order information """ @@ -164,19 +169,21 @@ def get_order(order_id: int, db: Session = Depends(get_db)): @app.get("/messages") def get_messages(limit: int = 10, db: Session = Depends(get_db)): """Read messages from the PGMQ queue. - + Args: limit: Number of messages to read (default: 10) db: Database session - + Returns: List of messages from the queue """ - messages = op.read_batch(QUEUE_NAME, vt=30, batch_size=limit, session=db, commit=True) - + messages = op.read_batch( + QUEUE_NAME, vt=30, batch_size=limit, session=db, commit=True + ) + if not messages: return {"messages": []} - + return { "messages": [ { @@ -199,4 +206,5 @@ def health_check(): if __name__ == "__main__": import uvicorn + uvicorn.run(app, host="0.0.0.0", port=8000) diff --git a/examples/fastapi_pub_sub/consumer.py b/examples/fastapi_pub_sub/consumer.py index 03efbb9..bcf2e9d 100644 --- a/examples/fastapi_pub_sub/consumer.py +++ b/examples/fastapi_pub_sub/consumer.py @@ -6,10 +6,10 @@ - Reading and processing messages from PGMQ - Deleting messages after successful processing """ +import argparse import asyncio import logging import os -from typing import Optional from pgmq_sqlalchemy import PGMQueue from pgmq_sqlalchemy.schema import Message @@ -17,85 +17,120 @@ # Configure logging logging.basicConfig( level=logging.INFO, - format='%(asctime)s - %(name)s - %(levelname)s - %(message)s' + format="[%(asctime)s][%(levelname)s] - %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", ) logger = logging.getLogger(__name__) # Database configuration - can be overridden by environment variables -DATABASE_URL = os.getenv("DATABASE_URL", "postgresql+asyncpg://postgres:postgres@localhost:5432/postgres") +DATABASE_URL = os.getenv( + "DATABASE_URL", "postgresql+asyncpg://postgres:postgres@localhost:5432/postgres" +) QUEUE_NAME = os.getenv("QUEUE_NAME", "order_queue") +BATCH_SIZE = int(os.getenv("BATCH_SIZE", "30")) +VT = int(os.getenv("VT", "10")) -async def process_order(message: Message) -> bool: +async def process_order(message: Message, verbose: bool = False) -> bool: """Process an order message. - + Args: message: Message from PGMQ containing order data - + verbose: If True, log detailed order information + Returns: True if processing was successful, False otherwise """ try: order_data = message.message - logger.info(f"Processing order {order_data.get('order_id')}") - logger.info(f" Customer: {order_data.get('customer_name')}") - logger.info(f" Product: {order_data.get('product_name')}") - logger.info(f" Quantity: {order_data.get('quantity')}") - logger.info(f" Price: ${order_data.get('price')}") - + if verbose: + logger.info(f"Processing order {order_data.get('order_id')}") + logger.info(f" Customer: {order_data.get('customer_name')}") + logger.info(f" Product: {order_data.get('product_name')}") + logger.info(f" Quantity: {order_data.get('quantity')}") + logger.info(f" Price: ${order_data.get('price')}") + # Simulate order processing (e.g., inventory check, payment processing, etc.) await asyncio.sleep(1) - - logger.info(f"Order {order_data.get('order_id')} processed successfully") + + # Simulate msg_id%6 will fail twice, msg_id%2 will fail once + if message.msg_id % 2 == 0 and message.read_ct == 1: + logger.info( + f"Order {order_data.get('order_id')} processed fail at first try" + ) + return False + elif message.msg_id % 3 == 0 and message.read_ct == 2: + logger.info( + f"Order {order_data.get('order_id')} processed fail at second try" + ) + return False + + if verbose: + logger.info(f"Order {order_data.get('order_id')} processed successfully") return True except Exception as e: logger.error(f"Error processing order: {e}") return False -async def consume_messages(pgmq: PGMQueue, batch_size: int = 10, vt: int = 30): +async def consume_messages( + pgmq: PGMQueue, batch_size: int, vt: int, verbose: bool = False +): """Continuously consume and process messages from the queue. - + Args: pgmq: PGMQueue instance batch_size: Number of messages to read in each batch vt: Visibility timeout in seconds + verbose: If True, log detailed order information """ logger.info(f"Starting consumer for queue: {QUEUE_NAME}") logger.info(f"Batch size: {batch_size}, Visibility timeout: {vt}s") - + if verbose: + logger.info("Verbose mode enabled") + while True: try: # Read a batch of messages using pgmq instance method - messages = await pgmq.read_batch(QUEUE_NAME, vt=vt, batch_size=batch_size) - + messages = await pgmq.read_batch_async( + QUEUE_NAME, vt=vt, batch_size=batch_size + ) + if not messages: logger.debug("No messages available, waiting...") await asyncio.sleep(1) continue - + logger.info(f"Received {len(messages)} messages") - + # Process messages concurrently tasks = [] for message in messages: - task = process_order(message) + task = process_order(message, verbose=verbose) tasks.append((message.msg_id, task)) - + # Wait for all processing to complete - results = await asyncio.gather(*[t[1] for t in tasks], return_exceptions=True) - + results = await asyncio.gather( + *[t[1] for t in tasks], return_exceptions=True + ) + # Delete successfully processed messages using pgmq instance method + deleted_cnt = 0 for (msg_id, _), result in zip(tasks, results): if isinstance(result, bool) and result: - deleted = await pgmq.delete(QUEUE_NAME, msg_id) + deleted = await pgmq.delete_async(QUEUE_NAME, msg_id) if deleted: - logger.info(f"Deleted message {msg_id}") + deleted_cnt += 1 + if verbose: + logger.info("Deleted message %d", msg_id) elif isinstance(result, Exception): logger.error(f"Exception processing message {msg_id}: {result}") else: - logger.warning(f"Message {msg_id} processing failed, will retry later") - + logger.warning( + f"Message {msg_id} processing failed, will retry later" + ) + logger.info("%d messages processed successfully", deleted_cnt) + except KeyboardInterrupt: logger.info("Received shutdown signal, stopping consumer...") break @@ -104,28 +139,39 @@ async def consume_messages(pgmq: PGMQueue, batch_size: int = 10, vt: int = 30): await asyncio.sleep(5) -async def main(): - """Main entry point for the consumer.""" +async def main(verbose: bool = False): + """Main entry point for the consumer. + + Args: + verbose: If True, log detailed order information + """ # Initialize PGMQueue with async session maker and event loop from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession from sqlalchemy.orm import sessionmaker - + async_engine = create_async_engine(DATABASE_URL) async_session_maker = sessionmaker(bind=async_engine, class_=AsyncSession) - - # Get the current event loop to pass to PGMQueue - loop = asyncio.get_event_loop() - - # Initialize PGMQueue with the event loop to avoid conflicts - pgmq = PGMQueue(session_maker=async_session_maker, loop=loop) - + + pgmq = PGMQueue(session_maker=async_session_maker) + try: # Start consuming messages - await consume_messages(pgmq, batch_size=10, vt=30) + await consume_messages(pgmq, batch_size=BATCH_SIZE, vt=VT, verbose=verbose) finally: logger.info("Consumer stopped") await async_engine.dispose() if __name__ == "__main__": - asyncio.run(main()) + parser = argparse.ArgumentParser( + description="PGMQ async consumer for processing orders" + ) + parser.add_argument( + "-v", + "--verbose", + action="store_true", + help="Enable verbose logging with order details", + ) + args = parser.parse_args() + + asyncio.run(main(verbose=args.verbose)) diff --git a/examples/fastapi_pub_sub/create_orders_coordinator.py b/examples/fastapi_pub_sub/create_orders_coordinator.py new file mode 100644 index 0000000..59767c5 --- /dev/null +++ b/examples/fastapi_pub_sub/create_orders_coordinator.py @@ -0,0 +1,68 @@ +import time +import logging + +import httpx + + +# Configure logging +logging.basicConfig( + level=logging.INFO, + format="[%(asctime)s][%(levelname)s] - %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", +) +logger = logging.getLogger(__name__) + + +def wait_until_api_server_to_start() -> None: + # Wait for the server to start + max_attempts = 30 + for _ in range(max_attempts): + try: + response = httpx.get("http://localhost:8000/health", timeout=1) + if response.status_code == 200: + logger.info("API Server is ready!") + return + except Exception: + time.sleep(1) + logger.info("API Server is not ready...") + + raise RuntimeError("API server failed to start") + + +def create_order(order_num: int): + """Helper function to create a single order.""" + order_data = { + "customer_name": f"Customer {order_num}", + "product_name": f"Product {order_num}", + "quantity": order_num % 10 + 1, + "price": 10.0 + (order_num % 50), + } + response = httpx.post("http://localhost:8000/orders", json=order_data, timeout=5) + return ( + response.status_code == 201, + response.json() if response.status_code == 201 else None, + ) + + +def create_orders_parallel(num_orders: int): + import concurrent.futures + + # Create orders in parallel + with concurrent.futures.ThreadPoolExecutor(max_workers=20) as executor: + futures = [executor.submit(create_order, i) for i in range(num_orders)] + results = [ + future.result() for future in concurrent.futures.as_completed(futures) + ] + + # Check that all orders were created successfully + successful_orders = sum(1 for success, _ in results if success) + assert ( + successful_orders == num_orders + ), f"Only {successful_orders}/{num_orders} orders were created" + logger.info() + logger.info("Create %d successful orders via API Server", successful_orders) + + +if __name__ == "__main__": + wait_until_api_server_to_start() + create_orders_parallel(num_orders=100) diff --git a/examples_tests/integration/test_fastapi_integration.py b/examples_tests/integration/test_fastapi_integration.py index 004c53c..b197822 100644 --- a/examples_tests/integration/test_fastapi_integration.py +++ b/examples_tests/integration/test_fastapi_integration.py @@ -1,178 +1,108 @@ """Integration tests for FastAPI pub/sub example with subprocess.""" import os -import subprocess import sys -import time -import signal import logging +import time import pytest -import httpx from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker from pgmq_sqlalchemy import op +from examples_tests.utils.console import MultiSubprocessesRenderer, CmdArg + logger = logging.getLogger(__name__) @pytest.fixture(scope="module") def test_queue_name(): """Return a unique queue name for testing.""" - return "test_integration_order_queue" - - -@pytest.fixture(scope="module", autouse=True) -def api_instance(examples_dir, sync_database_url, test_queue_name): - """Fixture to spin up the API server as a subprocess.""" - # Update the API to use test queue - api_py = os.path.join(examples_dir, "api.py") - - # Set environment variables for the subprocess - env = os.environ.copy() - env["DATABASE_URL"] = sync_database_url - env["QUEUE_NAME"] = test_queue_name - - # Start the API server - process = subprocess.Popen( - [sys.executable, api_py], - stdout=sys.stdout, - stderr=sys.stderr, - env=env, - preexec_fn=os.setsid if hasattr(os, 'setsid') else None - ) - logger.info("Create API Server Process") - - # Wait for the server to start - max_attempts = 30 - for i in range(max_attempts): - try: - response = httpx.get("http://localhost:8000/health", timeout=1) - if response.status_code == 200: - break - except Exception: - time.sleep(1) - else: - # Kill the process if it didn't start - if hasattr(os, 'killpg'): - os.killpg(os.getpgid(process.pid), signal.SIGTERM) - else: - process.terminate() - pytest.fail("API server failed to start") - - logger.info("API Server is healthy") - yield process - - # Teardown: kill the API server - if hasattr(os, 'killpg'): - os.killpg(os.getpgid(process.pid), signal.SIGTERM) - else: - process.terminate() - process.wait(timeout=10) - logger.info("Terminate API Server") - - -@pytest.fixture(scope="module", autouse=True) -def consumer_instance(examples_dir, async_database_url, test_queue_name): - """Fixture to spin up the consumer as a subprocess.""" - # Update the consumer to use test queue - consumer_py = os.path.join(examples_dir, "consumer.py") - - # Set environment variables for the subprocess - env = os.environ.copy() - env["DATABASE_URL"] = async_database_url - env["QUEUE_NAME"] = test_queue_name - - # Start the consumer - process = subprocess.Popen( - [sys.executable, consumer_py], - stdout=sys.stdout, - stderr=sys.stderr, - env=env, - preexec_fn=os.setsid if hasattr(os, 'setsid') else None - ) - - # Give the consumer some time to start - time.sleep(3) - logger.info("Create Consumer Process") - - yield process - - # Teardown: kill the consumer - if hasattr(os, 'killpg'): - os.killpg(os.getpgid(process.pid), signal.SIGTERM) - else: - process.terminate() - process.wait(timeout=10) - logger.info("Terminate Consumer Process") - - -def test_api_consumer_integration(sync_database_url): + return os.getenv("QUEUE_NAME", "order_queue") + + +def test_api_consumer_integration( + sync_database_url: str, test_queue_name: str, request: pytest.FixtureRequest +): """Test creating 100 orders parallelly and waiting for consumer to process them all.""" - import concurrent.futures - - # Create 100 orders in parallel - num_orders = 100 - - def create_order(order_num: int): - """Helper function to create a single order.""" - order_data = { - "customer_name": f"Customer {order_num}", - "product_name": f"Product {order_num}", - "quantity": order_num % 10 + 1, - "price": 10.0 + (order_num % 50) - } - response = httpx.post("http://localhost:8000/orders", json=order_data, timeout=5) - return response.status_code == 201, response.json() if response.status_code == 201 else None - - # Create orders in parallel - with concurrent.futures.ThreadPoolExecutor(max_workers=20) as executor: - futures = [executor.submit(create_order, i) for i in range(num_orders)] - results = [future.result() for future in concurrent.futures.as_completed(futures)] - - # Check that all orders were created successfully - successful_orders = sum(1 for success, _ in results if success) - assert successful_orders == num_orders, f"Only {successful_orders}/{num_orders} orders were created" - logger.info("Create %d successful orders via API Server", successful_orders) - + + # Check if pytest was run with -v flag + verbose_mode = request.config.getoption("verbose") > 0 + # Wait for the consumer to process all messages # Check the queue periodically until it's empty engine = create_engine(sync_database_url) SessionLocal = sessionmaker(bind=engine) - + max_wait = 120 # Wait up to 2 minutes - start_time = time.time() - - logger.info("Wait for Consumer to process all the orders"); - while time.time() - start_time < max_wait: + num_orders = 100 + + def stop_condition() -> bool: # Check queue metrics to see if there are any messages left with SessionLocal() as session: # Get the test queue name from environment or use default - test_queue = os.getenv("QUEUE_NAME", "test_integration_order_queue") - try: - metrics = op.metrics(test_queue, session=session, commit=True) + metrics = op.metrics(test_queue_name, session=session, commit=True) if metrics: - logger.info("%s queue metrics: %s", test_queue, str(metrics)) + logger.info("%s queue metrics: %s", test_queue_name, str(metrics)) if metrics.queue_length == 0: # All messages have been processed - break + return True except Exception as e: # Queue might not exist yet or other error - print(f"Error checking metrics: {e}") - - time.sleep(2) - else: - metrics = op.metrics(test_queue, session=session, commit=True) + logger.error(f"Error checking metrics: {e}") + return False + + logger.info("Wait for Consumer to process all the orders") + start_time = None + + # Build consumer command with optional verbose flag + consumer_cmd = [sys.executable, "-u", "examples/fastapi_pub_sub/consumer.py"] + if verbose_mode: + consumer_cmd.append("-v") + + with MultiSubprocessesRenderer( + cmds=[ + CmdArg( + [sys.executable, "-u", "examples/fastapi_pub_sub/api.py"], + panel_title="API process", + ), + CmdArg( + consumer_cmd, + panel_title="Consumer process", + ), + CmdArg( + [ + sys.executable, + "-u", + "examples/fastapi_pub_sub/create_orders_coordinator.py", + ], + panel_title="Create Orders process", + ), + ], + timeout=max_wait, + wait_process_init_time=3, + stop_condition_callable=stop_condition, + ) as renderer: + start_time = time.time() + renderer.start_render() + + with SessionLocal() as session: + metrics = op.metrics(test_queue_name, session=session, commit=True) if metrics: - logger.info("%s queue metrics: %s", test_queue, str(metrics)) + logger.info("%s queue metrics: %s", test_queue_name, str(metrics)) + + if (time.time() - start_time) > max_wait: pytest.fail(f"Consumer did not process all messages within {max_wait} seconds") - + # Verify that all messages were processed with SessionLocal() as session: - test_queue = os.getenv("QUEUE_NAME", "test_integration_order_queue") - metrics = op.metrics(test_queue, session=session, commit=True) - assert metrics.queue_length == 0, f"Queue still has {metrics.queue_length} messages" + metrics = op.metrics(test_queue_name, session=session, commit=True) + assert ( + metrics.queue_length == 0 + ), f"Queue still has {metrics.queue_length} messages" # The total_messages should be at least num_orders (could be more if retries happened) - assert metrics.total_messages >= num_orders, f"Expected at least {num_orders} total messages, got {metrics.total_messages}" + assert ( + metrics.total_messages >= num_orders + ), f"Expected at least {num_orders} total messages, got {metrics.total_messages}" diff --git a/examples_tests/utils/console.py b/examples_tests/utils/console.py index 54f7a3e..4308643 100644 --- a/examples_tests/utils/console.py +++ b/examples_tests/utils/console.py @@ -135,12 +135,14 @@ def __init__( cmds: list[CmdArg], stop_condition_callable: Callable[PS, bool] | None = None, timeout: int | float | None = None, + wait_process_init_time: int | None = None, render_interval: float = 0.05, max_lines: int = 50, show_pid_in_panel_title: bool = True, ) -> None: self.cmds = cmds self.timeout = timeout + self.wait_process_init_time = wait_process_init_time self.render_interval = render_interval self.max_lines = max_lines self.show_pid_in_panel_title = show_pid_in_panel_title @@ -166,6 +168,7 @@ def _init_layouts(self) -> None: ) def _init_processes(self) -> None: + self.console.print("") self.process_instances = [ ProcessInstance(cmd_arg, self.console) for cmd_arg in self.cmds ] @@ -230,10 +233,16 @@ def start_render(self) -> None: self.console.print( "[yellow]Reach stop_condition_callable, stop rendering.[/yellow]" ) + if self.start_time is not None: + self.console.print( + f"Took {(time.time() - self.start_time)} seconds to finish." + ) def __enter__(self): self._init_layouts() self._init_processes() + if self.wait_process_init_time: + time.sleep(self.wait_process_init_time) return self def __exit__(self, exc_type, exc_val, exc_tb): From fc86752f6bc722e58e0421842aa7424041664405 Mon Sep 17 00:00:00 2001 From: LIU ZHE YOU Date: Wed, 7 Jan 2026 19:11:42 +0800 Subject: [PATCH 17/17] Remove Python 3.9 from examples workflow matrix --- .github/workflows/examples.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/examples.yml b/.github/workflows/examples.yml index 4e6c834..8511beb 100644 --- a/.github/workflows/examples.yml +++ b/.github/workflows/examples.yml @@ -25,7 +25,7 @@ jobs: strategy: matrix: - python-version: ["3.9", "3.10", "3.11", "3.12"] + python-version: ["3.10", "3.11", "3.12"] name: Test Examples (Python ${{ matrix.python-version }})