Skip to content

Commit 20e2fba

Browse files
committed
WIP
1 parent d351aef commit 20e2fba

8 files changed

Lines changed: 695 additions & 111 deletions

File tree

.github/workflows/codecov.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ jobs:
3333
curl -LsSf https://astral.sh/uv/install.sh | sh
3434
echo "$HOME/.local/bin" >> $GITHUB_PATH
3535
- name: Install dependencies
36-
run: uv sync --extra dev
36+
run: uv sync --group postgresql-drivers --group test
3737
- name: Start PostgreSQL
3838
run: |
3939
cp pgmq_postgres.template.env pgmq_postgres.env

.github/workflows/examples.yml

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -44,9 +44,7 @@ jobs:
4444
4545
- name: Install dependencies
4646
run: |
47-
uv sync --extra dev
48-
# Install additional dependencies for examples
49-
uv pip install fastapi uvicorn httpx
47+
uv sync --all-groups --no-group docs
5048
5149
- name: Start PostgreSQL
5250
run: |

examples/fastapi_pub_sub/pyproject.toml

Lines changed: 0 additions & 17 deletions
This file was deleted.

examples/fastapi_pub_sub/requirements.txt

Lines changed: 0 additions & 5 deletions
This file was deleted.

examples_tests/conftest.py

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,8 @@
22
import os
33
import pytest
44

5+
import logging
6+
57

68
def pytest_addoption(parser):
79
"""Add custom command-line options for pytest."""
@@ -12,6 +14,23 @@ def pytest_addoption(parser):
1214
help="Specify the database name to use for testing",
1315
)
1416

17+
@pytest.fixture(scope="module")
18+
def configure_logger():
19+
logging.basicConfig(
20+
level=logging.INFO,
21+
format="[%(levelname)s][%(asctime)s][%(name)s] %(message)s"
22+
)
23+
24+
25+
@pytest.fixture(scope="module")
26+
def examples_dir():
27+
"""Return the path to the examples directory."""
28+
return os.path.join(
29+
os.path.dirname(os.path.dirname(__file__)),
30+
"examples",
31+
"fastapi_pub_sub"
32+
)
33+
1534

1635
@pytest.fixture(scope="module")
1736
def database_url(request):

examples_tests/integration/test_fastapi_integration.py

Lines changed: 36 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -1,26 +1,20 @@
11
"""Integration tests for FastAPI pub/sub example with subprocess."""
2-
import asyncio
2+
33
import os
44
import subprocess
55
import sys
66
import time
77
import signal
8+
import logging
9+
810
import pytest
9-
import requests
10-
from sqlalchemy import create_engine, text
11+
import httpx
12+
from sqlalchemy import create_engine
1113
from sqlalchemy.orm import sessionmaker
1214

1315
from pgmq_sqlalchemy import op
1416

15-
16-
@pytest.fixture(scope="module")
17-
def examples_dir():
18-
"""Return the path to the examples directory."""
19-
return os.path.join(
20-
os.path.dirname(os.path.dirname(os.path.dirname(__file__))),
21-
"examples",
22-
"fastapi_pub_sub"
23-
)
17+
logger = logging.getLogger(__name__)
2418

2519

2620
@pytest.fixture(scope="module")
@@ -29,49 +23,35 @@ def test_queue_name():
2923
return "test_integration_order_queue"
3024

3125

32-
@pytest.fixture(scope="module")
33-
def database_url(request):
34-
"""Get database URL from environment or CLI."""
35-
db_name = request.config.getoption("--db-name")
36-
if not db_name:
37-
db_name = os.getenv("SQLALCHEMY_DB", "postgres")
38-
39-
host = os.getenv("SQLALCHEMY_HOST", "localhost")
40-
port = os.getenv("SQLALCHEMY_PORT", "5432")
41-
user = os.getenv("SQLALCHEMY_USER", "postgres")
42-
password = os.getenv("SQLALCHEMY_PASSWORD", "postgres")
43-
44-
return f"postgresql+psycopg2://{user}:{password}@{host}:{port}/{db_name}"
45-
46-
4726
@pytest.fixture(scope="module", autouse=True)
48-
def api_instance(examples_dir, database_url, test_queue_name):
27+
def api_instance(examples_dir, sync_database_url, test_queue_name):
4928
"""Fixture to spin up the API server as a subprocess."""
5029
# Update the API to use test queue
5130
api_py = os.path.join(examples_dir, "api.py")
5231

5332
# Set environment variables for the subprocess
5433
env = os.environ.copy()
55-
env["DATABASE_URL"] = database_url
34+
env["DATABASE_URL"] = sync_database_url
5635
env["QUEUE_NAME"] = test_queue_name
5736

5837
# Start the API server
5938
process = subprocess.Popen(
6039
[sys.executable, api_py],
61-
stdout=subprocess.PIPE,
62-
stderr=subprocess.PIPE,
40+
stdout=sys.stdout,
41+
stderr=sys.stderr,
6342
env=env,
6443
preexec_fn=os.setsid if hasattr(os, 'setsid') else None
6544
)
45+
logger.info("Create API Server Process")
6646

6747
# Wait for the server to start
6848
max_attempts = 30
6949
for i in range(max_attempts):
7050
try:
71-
response = requests.get("http://localhost:8000/health", timeout=1)
51+
response = httpx.get("http://localhost:8000/health", timeout=1)
7252
if response.status_code == 200:
7353
break
74-
except requests.exceptions.RequestException:
54+
except Exception:
7555
time.sleep(1)
7656
else:
7757
# Kill the process if it didn't start
@@ -80,7 +60,8 @@ def api_instance(examples_dir, database_url, test_queue_name):
8060
else:
8161
process.terminate()
8262
pytest.fail("API server failed to start")
83-
63+
64+
logger.info("API Server is healthy")
8465
yield process
8566

8667
# Teardown: kill the API server
@@ -89,30 +70,32 @@ def api_instance(examples_dir, database_url, test_queue_name):
8970
else:
9071
process.terminate()
9172
process.wait(timeout=10)
73+
logger.info("Terminate API Server")
9274

9375

9476
@pytest.fixture(scope="module", autouse=True)
95-
def consumer_instance(examples_dir, database_url, test_queue_name, api_instance):
77+
def consumer_instance(examples_dir, async_database_url, test_queue_name):
9678
"""Fixture to spin up the consumer as a subprocess."""
9779
# Update the consumer to use test queue
9880
consumer_py = os.path.join(examples_dir, "consumer.py")
9981

10082
# Set environment variables for the subprocess
10183
env = os.environ.copy()
102-
env["DATABASE_URL"] = database_url
84+
env["DATABASE_URL"] = async_database_url
10385
env["QUEUE_NAME"] = test_queue_name
10486

10587
# Start the consumer
10688
process = subprocess.Popen(
10789
[sys.executable, consumer_py],
108-
stdout=subprocess.PIPE,
109-
stderr=subprocess.PIPE,
90+
stdout=sys.stdout,
91+
stderr=sys.stderr,
11092
env=env,
11193
preexec_fn=os.setsid if hasattr(os, 'setsid') else None
11294
)
11395

11496
# Give the consumer some time to start
11597
time.sleep(3)
98+
logger.info("Create Consumer Process")
11699

117100
yield process
118101

@@ -122,24 +105,25 @@ def consumer_instance(examples_dir, database_url, test_queue_name, api_instance)
122105
else:
123106
process.terminate()
124107
process.wait(timeout=10)
108+
logger.info("Terminate Consumer Process")
125109

126110

127-
def test_api_consumer_integration(api_instance, consumer_instance, database_url):
111+
def test_api_consumer_integration(sync_database_url):
128112
"""Test creating 100 orders parallelly and waiting for consumer to process them all."""
129113
import concurrent.futures
130114

131115
# Create 100 orders in parallel
132116
num_orders = 100
133117

134-
def create_order(order_num):
118+
def create_order(order_num: int):
135119
"""Helper function to create a single order."""
136120
order_data = {
137121
"customer_name": f"Customer {order_num}",
138122
"product_name": f"Product {order_num}",
139123
"quantity": order_num % 10 + 1,
140124
"price": 10.0 + (order_num % 50)
141125
}
142-
response = requests.post("http://localhost:8000/orders", json=order_data, timeout=5)
126+
response = httpx.post("http://localhost:8000/orders", json=order_data, timeout=5)
143127
return response.status_code == 201, response.json() if response.status_code == 201 else None
144128

145129
# Create orders in parallel
@@ -150,15 +134,17 @@ def create_order(order_num):
150134
# Check that all orders were created successfully
151135
successful_orders = sum(1 for success, _ in results if success)
152136
assert successful_orders == num_orders, f"Only {successful_orders}/{num_orders} orders were created"
137+
logger.info("Create %d successful orders via API Server", successful_orders)
153138

154139
# Wait for the consumer to process all messages
155140
# Check the queue periodically until it's empty
156-
engine = create_engine(database_url)
141+
engine = create_engine(sync_database_url)
157142
SessionLocal = sessionmaker(bind=engine)
158143

159144
max_wait = 120 # Wait up to 2 minutes
160145
start_time = time.time()
161146

147+
logger.info("Wait for Consumer to process all the orders");
162148
while time.time() - start_time < max_wait:
163149
# Check queue metrics to see if there are any messages left
164150
with SessionLocal() as session:
@@ -167,15 +153,20 @@ def create_order(order_num):
167153

168154
try:
169155
metrics = op.metrics(test_queue, session=session, commit=True)
170-
if metrics.queue_length == 0:
171-
# All messages have been processed
172-
break
156+
if metrics:
157+
logger.info("%s queue metrics: %s", test_queue, str(metrics))
158+
if metrics.queue_length == 0:
159+
# All messages have been processed
160+
break
173161
except Exception as e:
174162
# Queue might not exist yet or other error
175163
print(f"Error checking metrics: {e}")
176164

177165
time.sleep(2)
178166
else:
167+
metrics = op.metrics(test_queue, session=session, commit=True)
168+
if metrics:
169+
logger.info("%s queue metrics: %s", test_queue, str(metrics))
179170
pytest.fail(f"Consumer did not process all messages within {max_wait} seconds")
180171

181172
# Verify that all messages were processed

pyproject.toml

Lines changed: 22 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -38,28 +38,46 @@ pg8000 = ["pg8000>=1.31.2"]
3838
psycopg = ["psycopg>=3.2.1"]
3939
psycopg2-binary = ["psycopg2-binary>=2.9.9"]
4040
psycopg2cffi = ["psycopg2cffi>=2.9.0"]
41+
42+
# Include dependencies by `uv sync --group dev`
43+
[dependency-groups]
4144
dev = [
42-
# postgresql drivers
45+
{include-group = "postgresql-drivers"},
46+
{include-group = "test"},
47+
{include-group = "docs"},
48+
{include-group = "fastapi-pub-sub-example"},
49+
"rich>=14.2.0",
50+
"ruff>=0.14.10",
51+
"libcst>=1.8.6",
52+
]
53+
postgresql-drivers = [
4354
"asyncpg>=0.29.0",
4455
"greenlet>=3.0.3",
4556
"pg8000>=1.31.2",
4657
"psycopg>=3.2.1",
4758
"psycopg2-binary>=2.9.9",
4859
"psycopg2cffi>=2.9.0",
49-
# testing
60+
]
61+
test = [
5062
"pytest>=7.4.4,<8.0",
5163
"pytest-asyncio>=0.23.8",
5264
"pytest-lazy-fixture>=0.6.3",
5365
"pytest-cov>=5.0.0",
5466
"pytest-xdist>=3.6.1",
5567
"filelock>=3.15.4",
56-
# docs
68+
]
69+
docs = [
5770
"sphinx>=7.3.7",
5871
"sphinx-autobuild>=2024.4.16",
5972
"sphinx-rtd-theme>=2.0.0",
6073
"sphinx-copybutton>=0.5.2",
6174
]
75+
fastapi-pub-sub-example = [
76+
"fastapi>=0.104.0",
77+
"httpx>=0.28.1",
78+
"uvicorn>=0.24.0",
79+
]
6280

6381
[build-system]
6482
requires = ["hatchling"]
65-
build-backend = "hatchling.build"
83+
build-backend = "hatchling.build"

0 commit comments

Comments
 (0)