Skip to content

Commit 98c1609

Browse files
author
Daan Manneke
committed
feat(source-rate-limits): run source rate limit tests after others for isolation
1 parent 1fdf12b commit 98c1609

File tree

4 files changed

+47
-16
lines changed

4 files changed

+47
-16
lines changed

.github/workflows/test-public-api.yml

Lines changed: 31 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -157,14 +157,15 @@ jobs:
157157
BACKEND_IMAGE: test-backend:latest
158158
run: |
159159
cd backend
160-
# Exclude rate_limit tests from CI/CD - they consume quota and should run separately
161-
# To run rate limit tests locally: pytest tests/e2e/smoke -m rate_limit
160+
# Exclude api_rate_limit tests (general API rate limiter) from CI
161+
# Source rate_limit tests run sequentially after other tests for proper Redis isolation
162+
# To run only rate limit tests locally: pytest tests/e2e/smoke -m rate_limit
162163
163164
MAX_RETRIES=2
164165
165-
# Run all tests first
166+
# Run all non-rate-limit tests in parallel first
166167
set +e
167-
pytest tests/e2e/smoke -m "not rate_limit" -n 3 -v --tb=short 2>&1 | tee pytest_output.txt
168+
pytest tests/e2e/smoke -m "not rate_limit and not api_rate_limit" -n 3 -v --tb=short 2>&1 | tee pytest_output.txt
168169
EXIT_CODE=${PIPESTATUS[0]}
169170
set -e
170171
@@ -209,6 +210,32 @@ jobs:
209210
echo "Tests failed after $MAX_RETRIES retry attempts"
210211
exit 1
211212
213+
- name: Run Source Rate Limit Tests (Sequential)
214+
if: success()
215+
env:
216+
TEST_ENV: local
217+
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
218+
TEST_COMPOSIO_API_KEY: ${{ secrets.TEST_COMPOSIO_API_KEY }}
219+
TEST_COMPOSIO_NOTION_AUTH_CONFIG_ID_1: ${{ secrets.TEST_COMPOSIO_NOTION_AUTH_CONFIG_ID_1 }}
220+
TEST_COMPOSIO_NOTION_ACCOUNT_ID_1: ${{ secrets.TEST_COMPOSIO_NOTION_ACCOUNT_ID_1 }}
221+
TEST_COMPOSIO_NOTION_AUTH_CONFIG_ID_2: ${{ secrets.TEST_COMPOSIO_NOTION_AUTH_CONFIG_ID_2 }}
222+
TEST_COMPOSIO_NOTION_ACCOUNT_ID_2: ${{ secrets.TEST_COMPOSIO_NOTION_ACCOUNT_ID_2 }}
223+
TEST_COMPOSIO_GOOGLE_DRIVE_AUTH_CONFIG_ID_1: ${{ secrets.TEST_COMPOSIO_GOOGLE_DRIVE_AUTH_CONFIG_ID_1 }}
224+
TEST_COMPOSIO_GOOGLE_DRIVE_ACCOUNT_ID_1: ${{ secrets.TEST_COMPOSIO_GOOGLE_DRIVE_ACCOUNT_ID_1 }}
225+
TEST_COMPOSIO_GOOGLE_DRIVE_AUTH_CONFIG_ID_2: ${{ secrets.TEST_COMPOSIO_GOOGLE_DRIVE_AUTH_CONFIG_ID_2 }}
226+
TEST_COMPOSIO_GOOGLE_DRIVE_ACCOUNT_ID_2: ${{ secrets.TEST_COMPOSIO_GOOGLE_DRIVE_ACCOUNT_ID_2 }}
227+
TEST_PIPEDREAM_RATE_LIMIT_CLIENT_ID: ${{ secrets.TEST_PIPEDREAM_RATE_LIMIT_CLIENT_ID }}
228+
TEST_PIPEDREAM_RATE_LIMIT_CLIENT_SECRET: ${{ secrets.TEST_PIPEDREAM_RATE_LIMIT_CLIENT_SECRET }}
229+
TEST_PIPEDREAM_RATE_LIMIT_PROJECT_ID: ${{ secrets.TEST_PIPEDREAM_RATE_LIMIT_PROJECT_ID }}
230+
TEST_PIPEDREAM_GOOGLE_DRIVE_DEFAULT_OAUTH_ACCOUNT_ID: ${{ secrets.TEST_PIPEDREAM_GOOGLE_DRIVE_DEFAULT_OAUTH_ACCOUNT_ID }}
231+
TEST_PIPEDREAM_GOOGLE_DRIVE_DEFAULT_OAUTH_EXTERNAL_USER_ID: ${{ secrets.TEST_PIPEDREAM_GOOGLE_DRIVE_DEFAULT_OAUTH_EXTERNAL_USER_ID }}
232+
TEST_PIPEDREAM_NOTION_DEFAULT_OAUTH_ACCOUNT_ID: ${{ secrets.TEST_PIPEDREAM_NOTION_DEFAULT_OAUTH_ACCOUNT_ID }}
233+
TEST_PIPEDREAM_NOTION_DEFAULT_OAUTH_EXTERNAL_USER_ID: ${{ secrets.TEST_PIPEDREAM_NOTION_DEFAULT_OAUTH_EXTERNAL_USER_ID }}
234+
run: |
235+
cd backend
236+
echo "Running source rate limit tests sequentially (no parallelization for Redis isolation)..."
237+
pytest tests/e2e/smoke -m rate_limit -v --tb=short
238+
212239
- name: Print service logs on failure
213240
if: failure()
214241
run: |

backend/pytest.ini

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,3 +19,5 @@ asyncio_mode = auto
1919
# Register custom markers
2020
markers =
2121
integration: marks tests as integration tests (deselect with '-m "not integration"')
22+
rate_limit: marks tests that test rate limiting (run sequentially for proper isolation)
23+
api_rate_limit: marks tests for API-level rate limiting (excluded from CI)

backend/tests/e2e/smoke/test_rate_limiting.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,8 +32,9 @@
3232
from config import settings
3333

3434
# Module-level marker - applies to all tests in this file
35+
# Use api_rate_limit to distinguish from source rate limiting tests
3536
pytestmark = [
36-
pytest.mark.rate_limit,
37+
pytest.mark.api_rate_limit,
3738
pytest.mark.asyncio,
3839
]
3940

backend/tests/e2e/smoke/test_source_rate_limiting.py

Lines changed: 12 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
- Feature flag behavior
88
- Atomic Lua script prevents race conditions
99
10-
All tests marked with @pytest.mark.rate_limit to run last and avoid quota interference.
10+
All tests marked with @pytest.mark.rate_limit to run sequentially for proper isolation.
1111
"""
1212

1313
import asyncio
@@ -18,6 +18,10 @@
1818
import httpx
1919
import pytest
2020

21+
# Mark all tests in this module to run sequentially (no parallel execution)
22+
# This ensures proper Redis isolation between tests
23+
pytestmark = [pytest.mark.rate_limit]
24+
2125

2226
# Test rate limit configuration
2327
RATE_LIMIT = 1 # requests
@@ -423,9 +427,9 @@ async def test_notion_connection_level_rate_limiting_isolated(
423427
verify_sync_stats_only_inserts(job1, "connection 1")
424428
verify_sync_stats_only_inserts(job2, "connection 2")
425429

426-
# Verify Redis has 2 separate keys (connection-level tracking)
430+
# Verify Redis has exactly 2 separate keys (connection-level tracking)
427431
# Use monitoring data since keys expire quickly (6s TTL)
428-
assert len(monitoring_data) == 2, f"Expected 2 connection-level keys during sync, got {len(monitoring_data)}: {list(monitoring_data.keys())}"
432+
assert len(monitoring_data) == 2, f"Expected exactly 2 connection-level keys during sync, got {len(monitoring_data)}: {list(monitoring_data.keys())}"
429433
print(f"✅ Found 2 separate connection-level rate limit keys during sync")
430434

431435
# Verify rate limits were enforced during sync (check monitoring data)
@@ -545,9 +549,10 @@ async def test_google_drive_org_level_rate_limiting_aggregated(
545549
if job2["status"] == "completed":
546550
verify_sync_stats_only_inserts(job2, "connection 2")
547551

548-
# Verify Redis has only 1 shared key (org-level tracking)
549-
redis_keys = await get_redis_keys("source_rate_limit:*:google_drive:org:*")
550-
assert len(redis_keys) == 1, f"Expected 1 org-level key, got {len(redis_keys)}: {redis_keys}"
552+
# Verify Redis had exactly 1 shared key during sync (org-level tracking)
553+
# Use monitoring data since keys expire quickly (3s TTL)
554+
assert len(monitoring_data) == 1, f"Expected exactly 1 org-level key during sync, got {len(monitoring_data)}: {list(monitoring_data.keys())}"
555+
print(f"✅ Found 1 shared org-level rate limit key during sync")
551556

552557
# Verify rate limits were enforced during sync (check monitoring data)
553558
print(f"\n📊 Redis monitoring during sync:")
@@ -556,14 +561,10 @@ async def test_google_drive_org_level_rate_limiting_aggregated(
556561
print(f" {key}: max={max_counter}/{RATE_LIMIT}, samples={counters}")
557562
assert max_counter <= RATE_LIMIT, f"Rate limit exceeded during sync: {max_counter}/{RATE_LIMIT}"
558563

559-
# Verify the shared counter respected the limit
560-
counter = await get_redis_counter(redis_keys[0])
561-
print(f"\n📊 Shared org-level counter: {counter}/{RATE_LIMIT}")
562-
563564
# With aggressive limits and concurrent syncs, expect some contention
564565
# At least one should complete (demonstrates shared quota)
565566
completed_count = sum(1 for job in [job1, job2] if job["status"] == "completed")
566-
print(f"\n✅ Org-level aggregation verified: 1 shared quota, {completed_count}/2 syncs completed")
567+
print(f"\n✅ Org-level aggregation verified: Shared quota enforced, {completed_count}/2 syncs completed")
567568

568569
finally:
569570
# Cleanup

0 commit comments

Comments
 (0)