Initial commit: fastapi-traffic rate limiting library
- Core rate limiting with multiple algorithms (sliding window, token bucket, etc.) - SQLite and memory backends - Decorator and dependency injection patterns - Middleware support - Example usage files
This commit is contained in:
10
.gitignore
vendored
Normal file
10
.gitignore
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
# Python-generated files
|
||||
__pycache__/
|
||||
*.py[oc]
|
||||
build/
|
||||
dist/
|
||||
wheels/
|
||||
*.egg-info
|
||||
|
||||
# Virtual environments
|
||||
.venv
|
||||
1
.python-version
Normal file
1
.python-version
Normal file
@@ -0,0 +1 @@
|
||||
3.10
|
||||
56
.vscode/sessions.json
vendored
Normal file
56
.vscode/sessions.json
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
{
|
||||
"$schema": "https://cdn.statically.io/gh/nguyenngoclongdev/cdn/main/schema/v11/terminal-keeper.json",
|
||||
"theme": "tribe",
|
||||
"active": "default",
|
||||
"activateOnStartup": true,
|
||||
"keepExistingTerminals": false,
|
||||
"sessions": {
|
||||
"default": [
|
||||
{
|
||||
"name": "hello",
|
||||
"autoExecuteCommands": true,
|
||||
"icon": "person",
|
||||
"color": "terminal.ansiGreen",
|
||||
"commands": [
|
||||
"echo hello"
|
||||
]
|
||||
},
|
||||
[
|
||||
{
|
||||
"name": "docker:ros",
|
||||
"commands": [
|
||||
""
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "docker:k8s",
|
||||
"commands": [
|
||||
""
|
||||
]
|
||||
}
|
||||
],
|
||||
[
|
||||
{
|
||||
"name": "docker:nats",
|
||||
"commands": [
|
||||
""
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "docker:fleet",
|
||||
"commands": [
|
||||
""
|
||||
]
|
||||
}
|
||||
]
|
||||
],
|
||||
"saved-session": [
|
||||
{
|
||||
"name": "connect",
|
||||
"commands": [
|
||||
""
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
239
README.md
Normal file
239
README.md
Normal file
@@ -0,0 +1,239 @@
|
||||
# FastAPI Traffic
|
||||
|
||||
Production-grade rate limiting for FastAPI with multiple algorithms and storage backends.
|
||||
|
||||
## Features
|
||||
|
||||
- **Multiple Algorithms**: Token Bucket, Sliding Window, Fixed Window, Leaky Bucket, Sliding Window Counter
|
||||
- **Multiple Backends**: In-memory (default), SQLite (persistent), Redis (distributed)
|
||||
- **Decorator-based**: Simple `@rate_limit` decorator for endpoints
|
||||
- **Middleware Support**: Global rate limiting via middleware
|
||||
- **Dependency Injection**: Use as FastAPI dependency for more control
|
||||
- **Strict Typing**: Full type hints, pyright/mypy compatible
|
||||
- **Customizable**: Custom key extractors, exemptions, callbacks
|
||||
- **Production Ready**: Connection pooling, async support, proper error handling
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
# Basic installation (memory backend only)
|
||||
pip install fastapi-traffic
|
||||
|
||||
# With Redis support
|
||||
pip install fastapi-traffic[redis]
|
||||
|
||||
# With all extras
|
||||
pip install fastapi-traffic[all]
|
||||
```
|
||||
|
||||
## Quick Start
|
||||
|
||||
### Basic Usage with Decorator
|
||||
|
||||
```python
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi_traffic import rate_limit
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
@app.get("/api/resource")
|
||||
@rate_limit(100, 60) # 100 requests per 60 seconds
|
||||
async def get_resource(request: Request):
|
||||
return {"message": "Hello, World!"}
|
||||
```
|
||||
|
||||
### Using Different Algorithms
|
||||
|
||||
```python
|
||||
from fastapi_traffic import rate_limit, Algorithm
|
||||
|
||||
# Token Bucket - allows bursts
|
||||
@app.get("/api/burst")
|
||||
@rate_limit(100, 60, algorithm=Algorithm.TOKEN_BUCKET, burst_size=20)
|
||||
async def burst_endpoint(request: Request):
|
||||
return {"message": "Burst allowed"}
|
||||
|
||||
# Sliding Window - precise rate limiting
|
||||
@app.get("/api/precise")
|
||||
@rate_limit(100, 60, algorithm=Algorithm.SLIDING_WINDOW)
|
||||
async def precise_endpoint(request: Request):
|
||||
return {"message": "Precise limiting"}
|
||||
|
||||
# Fixed Window - simple and efficient
|
||||
@app.get("/api/simple")
|
||||
@rate_limit(100, 60, algorithm=Algorithm.FIXED_WINDOW)
|
||||
async def simple_endpoint(request: Request):
|
||||
return {"message": "Fixed window"}
|
||||
```
|
||||
|
||||
### Custom Key Extraction
|
||||
|
||||
```python
|
||||
def api_key_extractor(request: Request) -> str:
|
||||
"""Rate limit by API key instead of IP."""
|
||||
return request.headers.get("X-API-Key", "anonymous")
|
||||
|
||||
@app.get("/api/by-key")
|
||||
@rate_limit(1000, 3600, key_extractor=api_key_extractor)
|
||||
async def api_key_endpoint(request: Request):
|
||||
return {"message": "Rate limited by API key"}
|
||||
```
|
||||
|
||||
### Using SQLite Backend (Persistent)
|
||||
|
||||
```python
|
||||
from fastapi_traffic import RateLimiter, SQLiteBackend
|
||||
from fastapi_traffic.core.limiter import set_limiter
|
||||
|
||||
# Configure persistent storage
|
||||
backend = SQLiteBackend("rate_limits.db")
|
||||
limiter = RateLimiter(backend)
|
||||
set_limiter(limiter)
|
||||
|
||||
@app.on_event("startup")
|
||||
async def startup():
|
||||
await limiter.initialize()
|
||||
|
||||
@app.on_event("shutdown")
|
||||
async def shutdown():
|
||||
await limiter.close()
|
||||
```
|
||||
|
||||
### Using Redis Backend (Distributed)
|
||||
|
||||
```python
|
||||
from fastapi_traffic import RateLimiter
|
||||
from fastapi_traffic.backends.redis import RedisBackend
|
||||
|
||||
# Create Redis backend
|
||||
backend = await RedisBackend.from_url("redis://localhost:6379/0")
|
||||
limiter = RateLimiter(backend)
|
||||
set_limiter(limiter)
|
||||
```
|
||||
|
||||
### Global Middleware
|
||||
|
||||
```python
|
||||
from fastapi_traffic.middleware import RateLimitMiddleware
|
||||
|
||||
app.add_middleware(
|
||||
RateLimitMiddleware,
|
||||
limit=1000,
|
||||
window_size=60,
|
||||
exempt_paths={"/health", "/docs"},
|
||||
exempt_ips={"127.0.0.1"},
|
||||
)
|
||||
```
|
||||
|
||||
### Dependency Injection
|
||||
|
||||
```python
|
||||
from fastapi import Depends
|
||||
from fastapi_traffic.core.decorator import RateLimitDependency
|
||||
|
||||
rate_dep = RateLimitDependency(limit=100, window_size=60)
|
||||
|
||||
@app.get("/api/with-info")
|
||||
async def endpoint_with_info(
|
||||
request: Request,
|
||||
rate_info = Depends(rate_dep)
|
||||
):
|
||||
return {
|
||||
"remaining": rate_info.remaining,
|
||||
"reset_at": rate_info.reset_at,
|
||||
}
|
||||
```
|
||||
|
||||
### Exception Handling
|
||||
|
||||
```python
|
||||
from fastapi_traffic import RateLimitExceeded
|
||||
|
||||
@app.exception_handler(RateLimitExceeded)
|
||||
async def rate_limit_handler(request: Request, exc: RateLimitExceeded):
|
||||
return JSONResponse(
|
||||
status_code=429,
|
||||
content={
|
||||
"error": "rate_limit_exceeded",
|
||||
"retry_after": exc.retry_after,
|
||||
},
|
||||
headers=exc.limit_info.to_headers() if exc.limit_info else {},
|
||||
)
|
||||
```
|
||||
|
||||
## Algorithms
|
||||
|
||||
| Algorithm | Description | Use Case |
|
||||
|-----------|-------------|----------|
|
||||
| `TOKEN_BUCKET` | Allows bursts up to bucket capacity | APIs that need burst handling |
|
||||
| `SLIDING_WINDOW` | Precise request counting | High-accuracy rate limiting |
|
||||
| `FIXED_WINDOW` | Simple time-based windows | Simple, low-overhead limiting |
|
||||
| `LEAKY_BUCKET` | Smooths out request rate | Consistent throughput |
|
||||
| `SLIDING_WINDOW_COUNTER` | Balance of precision and efficiency | General purpose (default) |
|
||||
|
||||
## Backends
|
||||
|
||||
### MemoryBackend (Default)
|
||||
- In-memory storage with LRU eviction
|
||||
- Best for single-process applications
|
||||
- No persistence across restarts
|
||||
|
||||
### SQLiteBackend
|
||||
- Persistent storage using SQLite
|
||||
- WAL mode for better performance
|
||||
- Suitable for single-node deployments
|
||||
|
||||
### RedisBackend
|
||||
- Distributed storage using Redis
|
||||
- Required for multi-node deployments
|
||||
- Supports atomic operations via Lua scripts
|
||||
|
||||
## Configuration Options
|
||||
|
||||
```python
|
||||
@rate_limit(
|
||||
limit=100, # Max requests in window
|
||||
window_size=60.0, # Window size in seconds
|
||||
algorithm=Algorithm.SLIDING_WINDOW_COUNTER,
|
||||
key_prefix="api", # Prefix for rate limit keys
|
||||
key_extractor=func, # Custom key extraction function
|
||||
burst_size=None, # Burst size (token/leaky bucket)
|
||||
include_headers=True, # Add rate limit headers to response
|
||||
error_message="...", # Custom error message
|
||||
status_code=429, # HTTP status when limited
|
||||
skip_on_error=False, # Skip limiting on backend errors
|
||||
cost=1, # Cost per request
|
||||
exempt_when=func, # Function to check exemption
|
||||
on_blocked=func, # Callback when request is blocked
|
||||
)
|
||||
```
|
||||
|
||||
## Response Headers
|
||||
|
||||
When `include_headers=True`, responses include:
|
||||
|
||||
- `X-RateLimit-Limit`: Maximum requests allowed
|
||||
- `X-RateLimit-Remaining`: Remaining requests in window
|
||||
- `X-RateLimit-Reset`: Unix timestamp when limit resets
|
||||
- `Retry-After`: Seconds until retry (when rate limited)
|
||||
|
||||
## Development
|
||||
|
||||
```bash
|
||||
# Install dev dependencies
|
||||
pip install -e ".[dev]"
|
||||
|
||||
# Run tests
|
||||
pytest
|
||||
|
||||
# Type checking
|
||||
pyright
|
||||
|
||||
# Linting
|
||||
ruff check .
|
||||
ruff format .
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
MIT License
|
||||
60
examples/01_quickstart.py
Normal file
60
examples/01_quickstart.py
Normal file
@@ -0,0 +1,60 @@
|
||||
"""Quickstart example - minimal setup to get rate limiting working."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from fastapi_traffic import (
|
||||
MemoryBackend,
|
||||
RateLimitExceeded,
|
||||
RateLimiter,
|
||||
rate_limit,
|
||||
)
|
||||
from fastapi_traffic.core.limiter import set_limiter
|
||||
|
||||
# Step 1: Create a backend and limiter
|
||||
backend = MemoryBackend()
|
||||
limiter = RateLimiter(backend)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
"""Lifespan context manager for startup/shutdown."""
|
||||
await limiter.initialize()
|
||||
set_limiter(limiter)
|
||||
yield
|
||||
await limiter.close()
|
||||
|
||||
|
||||
app = FastAPI(title="Quickstart Example", lifespan=lifespan)
|
||||
|
||||
|
||||
# Step 2: Add exception handler for rate limit errors
|
||||
@app.exception_handler(RateLimitExceeded)
|
||||
async def rate_limit_handler(request: Request, exc: RateLimitExceeded) -> JSONResponse:
|
||||
return JSONResponse(
|
||||
status_code=429,
|
||||
content={"error": "Too many requests", "retry_after": exc.retry_after},
|
||||
)
|
||||
|
||||
|
||||
# Step 3: Apply rate limiting to endpoints
|
||||
@app.get("/")
|
||||
@rate_limit(10, 60) # 10 requests per minute
|
||||
async def hello(request: Request) -> dict[str, str]:
|
||||
return {"message": "Hello, World!"}
|
||||
|
||||
|
||||
@app.get("/api/data")
|
||||
@rate_limit(100, 60) # 100 requests per minute
|
||||
async def get_data(request: Request) -> dict[str, str]:
|
||||
return {"data": "Some important data"}
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
|
||||
uvicorn.run(app, host="0.0.0.0", port=8000)
|
||||
122
examples/02_algorithms.py
Normal file
122
examples/02_algorithms.py
Normal file
@@ -0,0 +1,122 @@
|
||||
"""Examples demonstrating all available rate limiting algorithms."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from fastapi_traffic import (
|
||||
Algorithm,
|
||||
MemoryBackend,
|
||||
RateLimitExceeded,
|
||||
RateLimiter,
|
||||
rate_limit,
|
||||
)
|
||||
from fastapi_traffic.core.limiter import set_limiter
|
||||
|
||||
backend = MemoryBackend()
|
||||
limiter = RateLimiter(backend)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
await limiter.initialize()
|
||||
set_limiter(limiter)
|
||||
yield
|
||||
await limiter.close()
|
||||
|
||||
|
||||
app = FastAPI(title="Rate Limiting Algorithms", lifespan=lifespan)
|
||||
|
||||
|
||||
@app.exception_handler(RateLimitExceeded)
|
||||
async def rate_limit_handler(request: Request, exc: RateLimitExceeded) -> JSONResponse:
|
||||
return JSONResponse(
|
||||
status_code=429,
|
||||
content={
|
||||
"error": "rate_limit_exceeded",
|
||||
"message": exc.message,
|
||||
"retry_after": exc.retry_after,
|
||||
},
|
||||
headers=exc.limit_info.to_headers() if exc.limit_info else {},
|
||||
)
|
||||
|
||||
|
||||
# 1. Fixed Window - Simple, resets at fixed intervals
|
||||
# Best for: Simple use cases, low memory usage
|
||||
# Drawback: Can allow 2x burst at window boundaries
|
||||
@app.get("/fixed-window")
|
||||
@rate_limit(
|
||||
limit=10,
|
||||
window_size=60,
|
||||
algorithm=Algorithm.FIXED_WINDOW,
|
||||
)
|
||||
async def fixed_window(request: Request) -> dict[str, str]:
|
||||
"""Fixed window resets counter at fixed time intervals."""
|
||||
return {"algorithm": "fixed_window", "description": "Counter resets every 60 seconds"}
|
||||
|
||||
|
||||
# 2. Sliding Window Log - Most precise
|
||||
# Best for: When accuracy is critical
|
||||
# Drawback: Higher memory usage (stores all timestamps)
|
||||
@app.get("/sliding-window")
|
||||
@rate_limit(
|
||||
limit=10,
|
||||
window_size=60,
|
||||
algorithm=Algorithm.SLIDING_WINDOW,
|
||||
)
|
||||
async def sliding_window(request: Request) -> dict[str, str]:
|
||||
"""Sliding window tracks exact timestamps for precise limiting."""
|
||||
return {"algorithm": "sliding_window", "description": "Precise tracking with timestamp log"}
|
||||
|
||||
|
||||
# 3. Sliding Window Counter - Balance of precision and efficiency
|
||||
# Best for: Most production use cases (default algorithm)
|
||||
# Combines benefits of fixed window efficiency with sliding window precision
|
||||
@app.get("/sliding-window-counter")
|
||||
@rate_limit(
|
||||
limit=10,
|
||||
window_size=60,
|
||||
algorithm=Algorithm.SLIDING_WINDOW_COUNTER,
|
||||
)
|
||||
async def sliding_window_counter(request: Request) -> dict[str, str]:
|
||||
"""Sliding window counter uses weighted counts from current and previous windows."""
|
||||
return {"algorithm": "sliding_window_counter", "description": "Efficient approximation"}
|
||||
|
||||
|
||||
# 4. Token Bucket - Allows controlled bursts
|
||||
# Best for: APIs that need to allow occasional bursts
|
||||
# Tokens refill gradually, burst_size controls max burst
|
||||
@app.get("/token-bucket")
|
||||
@rate_limit(
|
||||
limit=10,
|
||||
window_size=60,
|
||||
algorithm=Algorithm.TOKEN_BUCKET,
|
||||
burst_size=5, # Allow bursts of up to 5 requests
|
||||
)
|
||||
async def token_bucket(request: Request) -> dict[str, str]:
|
||||
"""Token bucket allows bursts up to burst_size, then refills gradually."""
|
||||
return {"algorithm": "token_bucket", "description": "Allows controlled bursts"}
|
||||
|
||||
|
||||
# 5. Leaky Bucket - Smooths out traffic
|
||||
# Best for: Protecting downstream services from bursts
|
||||
# Processes requests at a constant rate
|
||||
@app.get("/leaky-bucket")
|
||||
@rate_limit(
|
||||
limit=10,
|
||||
window_size=60,
|
||||
algorithm=Algorithm.LEAKY_BUCKET,
|
||||
burst_size=5, # Queue capacity
|
||||
)
|
||||
async def leaky_bucket(request: Request) -> dict[str, str]:
|
||||
"""Leaky bucket smooths traffic to a constant rate."""
|
||||
return {"algorithm": "leaky_bucket", "description": "Constant output rate"}
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
|
||||
uvicorn.run(app, host="0.0.0.0", port=8000)
|
||||
108
examples/03_backends.py
Normal file
108
examples/03_backends.py
Normal file
@@ -0,0 +1,108 @@
|
||||
"""Examples demonstrating different storage backends."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import Any
|
||||
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from fastapi_traffic import (
|
||||
MemoryBackend,
|
||||
RateLimitExceeded,
|
||||
RateLimiter,
|
||||
SQLiteBackend,
|
||||
rate_limit,
|
||||
)
|
||||
from fastapi_traffic.core.limiter import set_limiter
|
||||
|
||||
|
||||
# Choose backend based on environment
|
||||
def get_backend():
|
||||
"""Select appropriate backend based on environment."""
|
||||
backend_type = os.getenv("RATE_LIMIT_BACKEND", "memory")
|
||||
|
||||
if backend_type == "sqlite":
|
||||
# SQLite - Good for single-instance apps, persists across restarts
|
||||
return SQLiteBackend("rate_limits.db")
|
||||
|
||||
elif backend_type == "redis":
|
||||
# Redis - Required for distributed/multi-instance deployments
|
||||
# Requires: pip install redis
|
||||
try:
|
||||
from fastapi_traffic import RedisBackend
|
||||
import asyncio
|
||||
|
||||
async def create_redis():
|
||||
return await RedisBackend.from_url(
|
||||
os.getenv("REDIS_URL", "redis://localhost:6379/0"),
|
||||
key_prefix="myapp_ratelimit",
|
||||
)
|
||||
|
||||
return asyncio.get_event_loop().run_until_complete(create_redis())
|
||||
except ImportError:
|
||||
print("Redis not installed, falling back to memory backend")
|
||||
return MemoryBackend()
|
||||
|
||||
else:
|
||||
# Memory - Fast, but resets on restart, not shared across instances
|
||||
return MemoryBackend()
|
||||
|
||||
|
||||
backend = get_backend()
|
||||
limiter = RateLimiter(backend)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
await limiter.initialize()
|
||||
set_limiter(limiter)
|
||||
yield
|
||||
await limiter.close()
|
||||
|
||||
|
||||
app = FastAPI(title="Storage Backends Example", lifespan=lifespan)
|
||||
|
||||
|
||||
@app.exception_handler(RateLimitExceeded)
|
||||
async def rate_limit_handler(request: Request, exc: RateLimitExceeded) -> JSONResponse:
|
||||
return JSONResponse(
|
||||
status_code=429,
|
||||
content={"error": "rate_limit_exceeded", "retry_after": exc.retry_after},
|
||||
)
|
||||
|
||||
|
||||
@app.get("/api/resource")
|
||||
@rate_limit(100, 60)
|
||||
async def get_resource(request: Request) -> dict[str, str]:
|
||||
return {"message": "Resource data", "backend": type(backend).__name__}
|
||||
|
||||
|
||||
@app.get("/backend-info")
|
||||
async def backend_info() -> dict[str, Any]:
|
||||
"""Get information about the current backend."""
|
||||
info = {
|
||||
"backend_type": type(backend).__name__,
|
||||
"description": "",
|
||||
}
|
||||
|
||||
if isinstance(backend, MemoryBackend):
|
||||
info["description"] = "In-memory storage, fast but ephemeral"
|
||||
elif isinstance(backend, SQLiteBackend):
|
||||
info["description"] = "SQLite storage, persistent, single-instance"
|
||||
else:
|
||||
info["description"] = "Redis storage, distributed, multi-instance"
|
||||
|
||||
return info
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
|
||||
# Run with different backends:
|
||||
# RATE_LIMIT_BACKEND=memory python 03_backends.py
|
||||
# RATE_LIMIT_BACKEND=sqlite python 03_backends.py
|
||||
# RATE_LIMIT_BACKEND=redis REDIS_URL=redis://localhost:6379/0 python 03_backends.py
|
||||
uvicorn.run(app, host="0.0.0.0", port=8000)
|
||||
153
examples/04_key_extractors.py
Normal file
153
examples/04_key_extractors.py
Normal file
@@ -0,0 +1,153 @@
|
||||
"""Examples demonstrating custom key extractors for rate limiting."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from fastapi_traffic import (
|
||||
MemoryBackend,
|
||||
RateLimitExceeded,
|
||||
RateLimiter,
|
||||
rate_limit,
|
||||
)
|
||||
from fastapi_traffic.core.limiter import set_limiter
|
||||
|
||||
backend = MemoryBackend()
|
||||
limiter = RateLimiter(backend)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
await limiter.initialize()
|
||||
set_limiter(limiter)
|
||||
yield
|
||||
await limiter.close()
|
||||
|
||||
|
||||
app = FastAPI(title="Custom Key Extractors", lifespan=lifespan)
|
||||
|
||||
|
||||
@app.exception_handler(RateLimitExceeded)
|
||||
async def rate_limit_handler(request: Request, exc: RateLimitExceeded) -> JSONResponse:
|
||||
return JSONResponse(
|
||||
status_code=429,
|
||||
content={"error": "rate_limit_exceeded", "retry_after": exc.retry_after},
|
||||
)
|
||||
|
||||
|
||||
# 1. Default: Rate limit by IP address
|
||||
@app.get("/by-ip")
|
||||
@rate_limit(10, 60) # Uses default IP-based key extractor
|
||||
async def by_ip(request: Request) -> dict[str, str]:
|
||||
"""Rate limited by client IP address (default behavior)."""
|
||||
return {"limited_by": "ip", "client_ip": request.client.host if request.client else "unknown"}
|
||||
|
||||
|
||||
# 2. Rate limit by API key
|
||||
def api_key_extractor(request: Request) -> str:
|
||||
"""Extract API key from header."""
|
||||
api_key = request.headers.get("X-API-Key", "anonymous")
|
||||
return f"api_key:{api_key}"
|
||||
|
||||
|
||||
@app.get("/by-api-key")
|
||||
@rate_limit(
|
||||
limit=100,
|
||||
window_size=3600, # 100 requests per hour per API key
|
||||
key_extractor=api_key_extractor,
|
||||
)
|
||||
async def by_api_key(request: Request) -> dict[str, str]:
|
||||
"""Rate limited by API key from X-API-Key header."""
|
||||
api_key = request.headers.get("X-API-Key", "anonymous")
|
||||
return {"limited_by": "api_key", "api_key": api_key}
|
||||
|
||||
|
||||
# 3. Rate limit by user ID (from JWT or session)
|
||||
def user_id_extractor(request: Request) -> str:
|
||||
"""Extract user ID from request state or header."""
|
||||
# In real apps, this would come from decoded JWT or session
|
||||
user_id = request.headers.get("X-User-ID", "anonymous")
|
||||
return f"user:{user_id}"
|
||||
|
||||
|
||||
@app.get("/by-user")
|
||||
@rate_limit(
|
||||
limit=50,
|
||||
window_size=60,
|
||||
key_extractor=user_id_extractor,
|
||||
)
|
||||
async def by_user(request: Request) -> dict[str, str]:
|
||||
"""Rate limited by user ID."""
|
||||
user_id = request.headers.get("X-User-ID", "anonymous")
|
||||
return {"limited_by": "user_id", "user_id": user_id}
|
||||
|
||||
|
||||
# 4. Rate limit by endpoint + IP (separate limits per endpoint)
|
||||
def endpoint_ip_extractor(request: Request) -> str:
|
||||
"""Combine endpoint path with IP for per-endpoint limits."""
|
||||
ip = request.client.host if request.client else "unknown"
|
||||
path = request.url.path
|
||||
return f"endpoint:{path}:ip:{ip}"
|
||||
|
||||
|
||||
@app.get("/endpoint-specific")
|
||||
@rate_limit(
|
||||
limit=5,
|
||||
window_size=60,
|
||||
key_extractor=endpoint_ip_extractor,
|
||||
)
|
||||
async def endpoint_specific(request: Request) -> dict[str, str]:
|
||||
"""Each endpoint has its own rate limit counter."""
|
||||
return {"limited_by": "endpoint+ip"}
|
||||
|
||||
|
||||
# 5. Rate limit by organization/tenant (multi-tenant apps)
|
||||
def tenant_extractor(request: Request) -> str:
|
||||
"""Extract tenant from subdomain or header."""
|
||||
# Could also parse from subdomain: tenant.example.com
|
||||
tenant = request.headers.get("X-Tenant-ID", "default")
|
||||
return f"tenant:{tenant}"
|
||||
|
||||
|
||||
@app.get("/by-tenant")
|
||||
@rate_limit(
|
||||
limit=1000,
|
||||
window_size=3600, # 1000 requests per hour per tenant
|
||||
key_extractor=tenant_extractor,
|
||||
)
|
||||
async def by_tenant(request: Request) -> dict[str, str]:
|
||||
"""Rate limited by tenant/organization."""
|
||||
tenant = request.headers.get("X-Tenant-ID", "default")
|
||||
return {"limited_by": "tenant", "tenant_id": tenant}
|
||||
|
||||
|
||||
# 6. Composite key: User + Action type
|
||||
def user_action_extractor(request: Request) -> str:
|
||||
"""Rate limit specific actions per user."""
|
||||
user_id = request.headers.get("X-User-ID", "anonymous")
|
||||
action = request.query_params.get("action", "default")
|
||||
return f"user:{user_id}:action:{action}"
|
||||
|
||||
|
||||
@app.get("/user-action")
|
||||
@rate_limit(
|
||||
limit=10,
|
||||
window_size=60,
|
||||
key_extractor=user_action_extractor,
|
||||
)
|
||||
async def user_action(
|
||||
request: Request,
|
||||
action: str = "default",
|
||||
) -> dict[str, str]:
|
||||
"""Rate limited by user + action combination."""
|
||||
user_id = request.headers.get("X-User-ID", "anonymous")
|
||||
return {"limited_by": "user+action", "user_id": user_id, "action": action}
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
|
||||
uvicorn.run(app, host="0.0.0.0", port=8000)
|
||||
109
examples/05_middleware.py
Normal file
109
examples/05_middleware.py
Normal file
@@ -0,0 +1,109 @@
|
||||
"""Examples demonstrating middleware-based rate limiting."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import FastAPI, Request
|
||||
|
||||
from fastapi_traffic import MemoryBackend
|
||||
from fastapi_traffic.middleware import RateLimitMiddleware
|
||||
|
||||
# Alternative middleware options (uncomment to use):
|
||||
# from fastapi_traffic.middleware import SlidingWindowMiddleware
|
||||
# from fastapi_traffic.middleware import TokenBucketMiddleware
|
||||
|
||||
app = FastAPI(title="Middleware Rate Limiting")
|
||||
|
||||
|
||||
# Custom key extractor for middleware
|
||||
def get_client_identifier(request: Request) -> str:
|
||||
"""Extract client identifier from request."""
|
||||
# Check for API key first
|
||||
api_key = request.headers.get("X-API-Key")
|
||||
if api_key:
|
||||
return f"api_key:{api_key}"
|
||||
|
||||
# Fall back to IP
|
||||
if request.client:
|
||||
return f"ip:{request.client.host}"
|
||||
|
||||
return "unknown"
|
||||
|
||||
|
||||
# Option 1: Basic middleware with defaults
|
||||
# Uncomment to use:
|
||||
# app.add_middleware(
|
||||
# RateLimitMiddleware,
|
||||
# limit=100,
|
||||
# window_size=60,
|
||||
# )
|
||||
|
||||
# Option 2: Middleware with custom configuration
|
||||
app.add_middleware(
|
||||
RateLimitMiddleware,
|
||||
limit=100,
|
||||
window_size=60,
|
||||
backend=MemoryBackend(),
|
||||
key_prefix="global",
|
||||
include_headers=True,
|
||||
error_message="You have exceeded the rate limit. Please slow down.",
|
||||
status_code=429,
|
||||
skip_on_error=True, # Don't block requests if backend fails
|
||||
exempt_paths={"/health", "/docs", "/openapi.json", "/redoc"},
|
||||
exempt_ips={"127.0.0.1"}, # Exempt localhost
|
||||
key_extractor=get_client_identifier,
|
||||
)
|
||||
|
||||
|
||||
# Option 3: Convenience middleware for specific algorithms
|
||||
# SlidingWindowMiddleware - precise rate limiting
|
||||
# app.add_middleware(
|
||||
# SlidingWindowMiddleware,
|
||||
# limit=100,
|
||||
# window_size=60,
|
||||
# )
|
||||
|
||||
# TokenBucketMiddleware - allows bursts
|
||||
# app.add_middleware(
|
||||
# TokenBucketMiddleware,
|
||||
# limit=100,
|
||||
# window_size=60,
|
||||
# )
|
||||
|
||||
|
||||
@app.get("/")
|
||||
async def root() -> dict[str, str]:
|
||||
"""Root endpoint - rate limited by middleware."""
|
||||
return {"message": "Hello, World!"}
|
||||
|
||||
|
||||
@app.get("/api/data")
|
||||
async def get_data() -> dict[str, str]:
|
||||
"""API endpoint - rate limited by middleware."""
|
||||
return {"data": "Some important data"}
|
||||
|
||||
|
||||
@app.get("/api/users")
|
||||
async def get_users() -> dict[str, list[str]]:
|
||||
"""Users endpoint - rate limited by middleware."""
|
||||
return {"users": ["alice", "bob", "charlie"]}
|
||||
|
||||
|
||||
@app.get("/health")
|
||||
async def health() -> dict[str, str]:
|
||||
"""Health check - exempt from rate limiting."""
|
||||
return {"status": "healthy"}
|
||||
|
||||
|
||||
@app.get("/docs-info")
|
||||
async def docs_info() -> dict[str, str]:
|
||||
"""Info about documentation endpoints."""
|
||||
return {
|
||||
"message": "Visit /docs for Swagger UI or /redoc for ReDoc",
|
||||
"note": "These endpoints are exempt from rate limiting",
|
||||
}
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
|
||||
uvicorn.run(app, host="0.0.0.0", port=8000)
|
||||
221
examples/06_dependency_injection.py
Normal file
221
examples/06_dependency_injection.py
Normal file
@@ -0,0 +1,221 @@
|
||||
"""Examples demonstrating rate limiting with FastAPI dependency injection."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import Any
|
||||
|
||||
from fastapi import Depends, FastAPI, Request
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from fastapi_traffic import (
|
||||
MemoryBackend,
|
||||
RateLimitExceeded,
|
||||
RateLimiter,
|
||||
)
|
||||
from fastapi_traffic.core.decorator import RateLimitDependency
|
||||
from fastapi_traffic.core.limiter import set_limiter
|
||||
|
||||
backend = MemoryBackend()
|
||||
limiter = RateLimiter(backend)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
"""Lifespan context manager for startup/shutdown."""
|
||||
await limiter.initialize()
|
||||
set_limiter(limiter)
|
||||
yield
|
||||
await limiter.close()
|
||||
|
||||
|
||||
app = FastAPI(title="Dependency Injection Example", lifespan=lifespan)
|
||||
|
||||
|
||||
@app.exception_handler(RateLimitExceeded)
|
||||
async def rate_limit_handler(request: Request, exc: RateLimitExceeded) -> JSONResponse:
|
||||
return JSONResponse(
|
||||
status_code=429,
|
||||
content={"error": "rate_limit_exceeded", "retry_after": exc.retry_after},
|
||||
)
|
||||
|
||||
|
||||
# 1. Basic dependency - rate limit info available in endpoint
|
||||
basic_rate_limit = RateLimitDependency(limit=10, window_size=60)
|
||||
|
||||
|
||||
@app.get("/basic")
|
||||
async def basic_endpoint(
|
||||
request: Request,
|
||||
rate_info: Any = Depends(basic_rate_limit),
|
||||
) -> dict[str, Any]:
|
||||
"""Access rate limit info in your endpoint logic."""
|
||||
return {
|
||||
"message": "Success",
|
||||
"rate_limit": {
|
||||
"limit": rate_info.limit,
|
||||
"remaining": rate_info.remaining,
|
||||
"reset_at": rate_info.reset_at,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
# 2. Different limits for different user tiers
|
||||
def get_user_tier(request: Request) -> str:
|
||||
"""Get user tier from header (in real app, from JWT/database)."""
|
||||
return request.headers.get("X-User-Tier", "free")
|
||||
|
||||
|
||||
free_tier_limit = RateLimitDependency(
|
||||
limit=10,
|
||||
window_size=60,
|
||||
key_prefix="free",
|
||||
)
|
||||
|
||||
pro_tier_limit = RateLimitDependency(
|
||||
limit=100,
|
||||
window_size=60,
|
||||
key_prefix="pro",
|
||||
)
|
||||
|
||||
enterprise_tier_limit = RateLimitDependency(
|
||||
limit=1000,
|
||||
window_size=60,
|
||||
key_prefix="enterprise",
|
||||
)
|
||||
|
||||
|
||||
async def tiered_rate_limit(
|
||||
request: Request,
|
||||
tier: str = Depends(get_user_tier),
|
||||
) -> Any:
|
||||
"""Apply different rate limits based on user tier."""
|
||||
if tier == "enterprise":
|
||||
return await enterprise_tier_limit(request)
|
||||
elif tier == "pro":
|
||||
return await pro_tier_limit(request)
|
||||
else:
|
||||
return await free_tier_limit(request)
|
||||
|
||||
|
||||
@app.get("/tiered")
|
||||
async def tiered_endpoint(
|
||||
request: Request,
|
||||
rate_info: Any = Depends(tiered_rate_limit),
|
||||
) -> dict[str, Any]:
|
||||
"""Endpoint with tier-based rate limiting."""
|
||||
tier = get_user_tier(request)
|
||||
return {
|
||||
"message": "Success",
|
||||
"tier": tier,
|
||||
"rate_limit": {
|
||||
"limit": rate_info.limit,
|
||||
"remaining": rate_info.remaining,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
# 3. Conditional rate limiting based on request properties
|
||||
def api_key_extractor(request: Request) -> str:
|
||||
"""Extract API key for rate limiting."""
|
||||
api_key = request.headers.get("X-API-Key", "anonymous")
|
||||
return f"api:{api_key}"
|
||||
|
||||
|
||||
api_rate_limit = RateLimitDependency(
|
||||
limit=100,
|
||||
window_size=3600,
|
||||
key_extractor=api_key_extractor,
|
||||
)
|
||||
|
||||
|
||||
@app.get("/api/resource")
|
||||
async def api_resource(
|
||||
request: Request,
|
||||
rate_info: Any = Depends(api_rate_limit),
|
||||
) -> dict[str, Any]:
|
||||
"""API endpoint with per-API-key rate limiting."""
|
||||
return {
|
||||
"data": "Resource data",
|
||||
"requests_remaining": rate_info.remaining,
|
||||
}
|
||||
|
||||
|
||||
# 4. Combine multiple rate limits (e.g., per-minute AND per-hour)
|
||||
per_minute_limit = RateLimitDependency(
|
||||
limit=10,
|
||||
window_size=60,
|
||||
key_prefix="minute",
|
||||
)
|
||||
|
||||
per_hour_limit = RateLimitDependency(
|
||||
limit=100,
|
||||
window_size=3600,
|
||||
key_prefix="hour",
|
||||
)
|
||||
|
||||
|
||||
async def combined_rate_limit(
|
||||
request: Request,
|
||||
minute_info: Any = Depends(per_minute_limit),
|
||||
hour_info: Any = Depends(per_hour_limit),
|
||||
) -> dict[str, Any]:
|
||||
"""Apply both per-minute and per-hour limits."""
|
||||
return {
|
||||
"minute": {
|
||||
"limit": minute_info.limit,
|
||||
"remaining": minute_info.remaining,
|
||||
},
|
||||
"hour": {
|
||||
"limit": hour_info.limit,
|
||||
"remaining": hour_info.remaining,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@app.get("/combined")
|
||||
async def combined_endpoint(
|
||||
request: Request,
|
||||
rate_info: dict[str, Any] = Depends(combined_rate_limit),
|
||||
) -> dict[str, Any]:
|
||||
"""Endpoint with multiple rate limit tiers."""
|
||||
return {
|
||||
"message": "Success",
|
||||
"rate_limits": rate_info,
|
||||
}
|
||||
|
||||
|
||||
# 5. Rate limit with custom exemption logic
|
||||
def is_internal_request(request: Request) -> bool:
|
||||
"""Check if request is from internal service."""
|
||||
internal_token = request.headers.get("X-Internal-Token")
|
||||
return internal_token == "internal-secret-token"
|
||||
|
||||
|
||||
internal_exempt_limit = RateLimitDependency(
|
||||
limit=10,
|
||||
window_size=60,
|
||||
exempt_when=is_internal_request,
|
||||
)
|
||||
|
||||
|
||||
@app.get("/internal-exempt")
|
||||
async def internal_exempt_endpoint(
|
||||
request: Request,
|
||||
rate_info: Any = Depends(internal_exempt_limit),
|
||||
) -> dict[str, Any]:
|
||||
"""Internal requests are exempt from rate limiting."""
|
||||
is_internal = is_internal_request(request)
|
||||
return {
|
||||
"message": "Success",
|
||||
"is_internal": is_internal,
|
||||
"rate_limit": None if is_internal else {
|
||||
"remaining": rate_info.remaining,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
|
||||
uvicorn.run(app, host="0.0.0.0", port=8000)
|
||||
197
examples/07_redis_distributed.py
Normal file
197
examples/07_redis_distributed.py
Normal file
@@ -0,0 +1,197 @@
|
||||
"""Example demonstrating Redis backend for distributed rate limiting.
|
||||
|
||||
This example shows how to use Redis for rate limiting across multiple
|
||||
application instances (e.g., in a Kubernetes deployment or load-balanced setup).
|
||||
|
||||
Requirements:
|
||||
pip install redis
|
||||
|
||||
Environment variables:
|
||||
REDIS_URL: Redis connection URL (default: redis://localhost:6379/0)
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from fastapi import Depends, FastAPI, Request
|
||||
from fastapi.responses import JSONResponse
|
||||
from typing import Annotated
|
||||
|
||||
from fastapi_traffic import (
|
||||
Algorithm,
|
||||
MemoryBackend,
|
||||
RateLimitExceeded,
|
||||
RateLimiter,
|
||||
rate_limit,
|
||||
)
|
||||
from fastapi_traffic.core.limiter import set_limiter
|
||||
from fastapi_traffic.backends.redis import RedisBackend
|
||||
|
||||
|
||||
async def create_redis_backend():
|
||||
"""Create Redis backend with fallback to memory."""
|
||||
try:
|
||||
from fastapi_traffic import RedisBackend
|
||||
|
||||
redis_url = os.getenv("REDIS_URL", "redis://localhost:6379/0")
|
||||
backend = await RedisBackend.from_url(
|
||||
redis_url,
|
||||
key_prefix="myapp",
|
||||
)
|
||||
|
||||
# Verify connection
|
||||
if await backend.ping():
|
||||
print(f"Connected to Redis at {redis_url}")
|
||||
return backend
|
||||
else:
|
||||
print("Redis ping failed, falling back to memory backend")
|
||||
return MemoryBackend()
|
||||
|
||||
except ImportError:
|
||||
print("Redis package not installed. Install with: pip install redis")
|
||||
print("Falling back to memory backend")
|
||||
return MemoryBackend()
|
||||
|
||||
except Exception as e:
|
||||
print(f"Failed to connect to Redis: {e}")
|
||||
print("Falling back to memory backend")
|
||||
return MemoryBackend()
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
"""Lifespan context manager for startup/shutdown."""
|
||||
app.state.backend = await create_redis_backend()
|
||||
app.state.limiter = RateLimiter(app.state.backend)
|
||||
await app.state.limiter.initialize()
|
||||
set_limiter(app.state.limiter)
|
||||
|
||||
yield
|
||||
|
||||
await app.state.limiter.close()
|
||||
|
||||
|
||||
app = FastAPI(
|
||||
title="Distributed Rate Limiting with Redis",
|
||||
lifespan=lifespan,
|
||||
)
|
||||
|
||||
|
||||
def get_backend(request: Request) -> RedisBackend | MemoryBackend:
|
||||
"""Dependency to get the rate limiting backend."""
|
||||
return request.app.state.backend
|
||||
|
||||
|
||||
def get_limiter(request: Request) -> RateLimiter:
|
||||
"""Dependency to get the rate limiter."""
|
||||
return request.app.state.limiter
|
||||
|
||||
|
||||
BackendDep = Annotated[RedisBackend | MemoryBackend, Depends(get_backend)]
|
||||
LimiterDep = Annotated[RateLimiter, Depends(get_limiter)]
|
||||
|
||||
|
||||
@app.exception_handler(RateLimitExceeded)
|
||||
async def rate_limit_handler(request: Request, exc: RateLimitExceeded) -> JSONResponse:
|
||||
return JSONResponse(
|
||||
status_code=429,
|
||||
content={
|
||||
"error": "rate_limit_exceeded",
|
||||
"message": exc.message,
|
||||
"retry_after": exc.retry_after,
|
||||
},
|
||||
headers=exc.limit_info.to_headers() if exc.limit_info else {},
|
||||
)
|
||||
|
||||
|
||||
# Rate limits are shared across all instances when using Redis
|
||||
@app.get("/api/shared-limit")
|
||||
@rate_limit(
|
||||
limit=100,
|
||||
window_size=60,
|
||||
key_prefix="shared",
|
||||
)
|
||||
async def shared_limit(request: Request) -> dict[str, str]:
|
||||
"""This rate limit is shared across all application instances."""
|
||||
return {
|
||||
"message": "Success",
|
||||
"note": "Rate limit counter is shared via Redis",
|
||||
}
|
||||
|
||||
|
||||
# Per-user limits also work across instances
|
||||
def user_extractor(request: Request) -> str:
|
||||
user_id = request.headers.get("X-User-ID", "anonymous")
|
||||
return f"user:{user_id}"
|
||||
|
||||
|
||||
@app.get("/api/user-limit")
|
||||
@rate_limit(
|
||||
limit=50,
|
||||
window_size=60,
|
||||
key_extractor=user_extractor,
|
||||
key_prefix="user_api",
|
||||
)
|
||||
async def user_limit(request: Request) -> dict[str, str]:
|
||||
"""Per-user rate limit shared across instances."""
|
||||
user_id = request.headers.get("X-User-ID", "anonymous")
|
||||
return {
|
||||
"message": "Success",
|
||||
"user_id": user_id,
|
||||
}
|
||||
|
||||
|
||||
# Token bucket works well with Redis for burst handling
|
||||
@app.get("/api/burst-allowed")
|
||||
@rate_limit(
|
||||
limit=100,
|
||||
window_size=60,
|
||||
algorithm=Algorithm.TOKEN_BUCKET,
|
||||
burst_size=20,
|
||||
key_prefix="burst",
|
||||
)
|
||||
async def burst_allowed(request: Request) -> dict[str, str]:
|
||||
"""Token bucket with Redis allows controlled bursts across instances."""
|
||||
return {"message": "Burst request successful"}
|
||||
|
||||
|
||||
@app.get("/health")
|
||||
async def health(backend: BackendDep) -> dict[str, object]:
|
||||
"""Health check with Redis status."""
|
||||
redis_healthy = False
|
||||
backend_type = type(backend).__name__
|
||||
|
||||
if hasattr(backend, "ping"):
|
||||
try:
|
||||
redis_healthy = await backend.ping()
|
||||
except Exception:
|
||||
redis_healthy = False
|
||||
|
||||
return {
|
||||
"status": "healthy",
|
||||
"backend": backend_type,
|
||||
"redis_connected": redis_healthy,
|
||||
}
|
||||
|
||||
|
||||
@app.get("/stats")
|
||||
async def stats(backend: BackendDep) -> dict[str, object]:
|
||||
"""Get rate limiting statistics from Redis."""
|
||||
if hasattr(backend, "get_stats"):
|
||||
try:
|
||||
return await backend.get_stats()
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
return {"message": "Stats not available for this backend"}
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
|
||||
# Run multiple instances on different ports to test distributed limiting:
|
||||
# REDIS_URL=redis://localhost:6379/0 python 07_redis_distributed.py
|
||||
# In another terminal:
|
||||
# uvicorn 07_redis_distributed:app --port 8001
|
||||
uvicorn.run(app, host="0.0.0.0", port=8000)
|
||||
256
examples/08_tiered_api.py
Normal file
256
examples/08_tiered_api.py
Normal file
@@ -0,0 +1,256 @@
|
||||
"""Example of a production-ready tiered API with different rate limits per plan."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import asynccontextmanager
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import Any
|
||||
|
||||
from fastapi import Depends, FastAPI, HTTPException, Request
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from fastapi_traffic import (
|
||||
Algorithm,
|
||||
MemoryBackend,
|
||||
RateLimitExceeded,
|
||||
RateLimiter,
|
||||
)
|
||||
from fastapi_traffic.core.decorator import RateLimitDependency
|
||||
from fastapi_traffic.core.limiter import set_limiter
|
||||
|
||||
backend = MemoryBackend()
|
||||
limiter = RateLimiter(backend)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
await limiter.initialize()
|
||||
set_limiter(limiter)
|
||||
yield
|
||||
await limiter.close()
|
||||
|
||||
|
||||
app = FastAPI(
|
||||
title="Tiered API Example",
|
||||
description="API with different rate limits based on subscription tier",
|
||||
lifespan=lifespan,
|
||||
)
|
||||
|
||||
|
||||
class Tier(str, Enum):
|
||||
FREE = "free"
|
||||
STARTER = "starter"
|
||||
PRO = "pro"
|
||||
ENTERPRISE = "enterprise"
|
||||
|
||||
|
||||
@dataclass
|
||||
class TierConfig:
|
||||
requests_per_minute: int
|
||||
requests_per_hour: int
|
||||
requests_per_day: int
|
||||
burst_size: int
|
||||
features: list[str]
|
||||
|
||||
|
||||
# Tier configurations
|
||||
TIER_CONFIGS: dict[Tier, TierConfig] = {
|
||||
Tier.FREE: TierConfig(
|
||||
requests_per_minute=10,
|
||||
requests_per_hour=100,
|
||||
requests_per_day=500,
|
||||
burst_size=5,
|
||||
features=["basic_api"],
|
||||
),
|
||||
Tier.STARTER: TierConfig(
|
||||
requests_per_minute=60,
|
||||
requests_per_hour=1000,
|
||||
requests_per_day=10000,
|
||||
burst_size=20,
|
||||
features=["basic_api", "webhooks"],
|
||||
),
|
||||
Tier.PRO: TierConfig(
|
||||
requests_per_minute=300,
|
||||
requests_per_hour=10000,
|
||||
requests_per_day=100000,
|
||||
burst_size=50,
|
||||
features=["basic_api", "webhooks", "analytics", "priority_support"],
|
||||
),
|
||||
Tier.ENTERPRISE: TierConfig(
|
||||
requests_per_minute=1000,
|
||||
requests_per_hour=50000,
|
||||
requests_per_day=500000,
|
||||
burst_size=200,
|
||||
features=["basic_api", "webhooks", "analytics", "priority_support", "sla", "custom_integrations"],
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
# Simulated API key database
|
||||
API_KEYS: dict[str, dict[str, Any]] = {
|
||||
"free-key-123": {"tier": Tier.FREE, "user_id": "user_1"},
|
||||
"starter-key-456": {"tier": Tier.STARTER, "user_id": "user_2"},
|
||||
"pro-key-789": {"tier": Tier.PRO, "user_id": "user_3"},
|
||||
"enterprise-key-000": {"tier": Tier.ENTERPRISE, "user_id": "user_4"},
|
||||
}
|
||||
|
||||
|
||||
@app.exception_handler(RateLimitExceeded)
|
||||
async def rate_limit_handler(request: Request, exc: RateLimitExceeded) -> JSONResponse:
|
||||
api_key = request.headers.get("X-API-Key", "")
|
||||
key_info = API_KEYS.get(api_key, {})
|
||||
tier = key_info.get("tier", Tier.FREE)
|
||||
|
||||
return JSONResponse(
|
||||
status_code=429,
|
||||
content={
|
||||
"error": "rate_limit_exceeded",
|
||||
"message": exc.message,
|
||||
"retry_after": exc.retry_after,
|
||||
"tier": tier.value,
|
||||
"upgrade_url": "https://example.com/pricing" if tier != Tier.ENTERPRISE else None,
|
||||
},
|
||||
headers=exc.limit_info.to_headers() if exc.limit_info else {},
|
||||
)
|
||||
|
||||
|
||||
def get_api_key_info(request: Request) -> dict[str, Any]:
|
||||
"""Validate API key and return info."""
|
||||
api_key = request.headers.get("X-API-Key")
|
||||
if not api_key:
|
||||
raise HTTPException(status_code=401, detail="API key required")
|
||||
|
||||
key_info = API_KEYS.get(api_key)
|
||||
if not key_info:
|
||||
raise HTTPException(status_code=401, detail="Invalid API key")
|
||||
|
||||
return {"api_key": api_key, **key_info}
|
||||
|
||||
|
||||
def get_tier_config(key_info: dict[str, Any] = Depends(get_api_key_info)) -> TierConfig:
|
||||
"""Get rate limit config for user's tier."""
|
||||
tier = key_info.get("tier", Tier.FREE)
|
||||
return TIER_CONFIGS[tier]
|
||||
|
||||
|
||||
# Create rate limit dependencies for each tier
|
||||
tier_rate_limits: dict[Tier, RateLimitDependency] = {}
|
||||
for tier, config in TIER_CONFIGS.items():
|
||||
tier_rate_limits[tier] = RateLimitDependency(
|
||||
limit=config.requests_per_minute,
|
||||
window_size=60,
|
||||
algorithm=Algorithm.TOKEN_BUCKET,
|
||||
burst_size=config.burst_size,
|
||||
key_prefix=f"tier_{tier.value}",
|
||||
)
|
||||
|
||||
|
||||
def api_key_extractor(request: Request) -> str:
|
||||
"""Extract API key for rate limiting."""
|
||||
api_key = request.headers.get("X-API-Key", "anonymous")
|
||||
return f"api:{api_key}"
|
||||
|
||||
|
||||
async def apply_tier_rate_limit(
|
||||
request: Request,
|
||||
key_info: dict[str, Any] = Depends(get_api_key_info),
|
||||
) -> dict[str, Any]:
|
||||
"""Apply rate limit based on user's tier."""
|
||||
tier = key_info.get("tier", Tier.FREE)
|
||||
rate_limit_dep = tier_rate_limits[tier]
|
||||
rate_info = await rate_limit_dep(request)
|
||||
|
||||
return {
|
||||
"tier": tier,
|
||||
"config": TIER_CONFIGS[tier],
|
||||
"rate_info": rate_info,
|
||||
"key_info": key_info,
|
||||
}
|
||||
|
||||
|
||||
@app.get("/api/v1/data")
|
||||
async def get_data(
|
||||
request: Request,
|
||||
limit_info: dict[str, Any] = Depends(apply_tier_rate_limit),
|
||||
) -> dict[str, Any]:
|
||||
"""Get data with tier-based rate limiting."""
|
||||
return {
|
||||
"data": {"items": ["item1", "item2", "item3"]},
|
||||
"tier": limit_info["tier"].value,
|
||||
"rate_limit": {
|
||||
"limit": limit_info["rate_info"].limit,
|
||||
"remaining": limit_info["rate_info"].remaining,
|
||||
"reset_at": limit_info["rate_info"].reset_at,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@app.get("/api/v1/analytics")
|
||||
async def get_analytics(
|
||||
request: Request,
|
||||
limit_info: dict[str, Any] = Depends(apply_tier_rate_limit),
|
||||
) -> dict[str, Any]:
|
||||
"""Analytics endpoint - requires Pro tier or higher."""
|
||||
tier = limit_info["tier"]
|
||||
config = limit_info["config"]
|
||||
|
||||
if "analytics" not in config.features:
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail=f"Analytics requires Pro tier or higher. Current tier: {tier.value}",
|
||||
)
|
||||
|
||||
return {
|
||||
"analytics": {
|
||||
"total_requests": 12345,
|
||||
"unique_users": 567,
|
||||
"avg_response_time_ms": 45,
|
||||
},
|
||||
"tier": tier.value,
|
||||
}
|
||||
|
||||
|
||||
@app.get("/api/v1/tier-info")
|
||||
async def get_tier_info(
|
||||
key_info: dict[str, Any] = Depends(get_api_key_info),
|
||||
) -> dict[str, Any]:
|
||||
"""Get information about current tier and limits."""
|
||||
tier = key_info.get("tier", Tier.FREE)
|
||||
config = TIER_CONFIGS[tier]
|
||||
|
||||
return {
|
||||
"tier": tier.value,
|
||||
"limits": {
|
||||
"requests_per_minute": config.requests_per_minute,
|
||||
"requests_per_hour": config.requests_per_hour,
|
||||
"requests_per_day": config.requests_per_day,
|
||||
"burst_size": config.burst_size,
|
||||
},
|
||||
"features": config.features,
|
||||
"upgrade_options": [t.value for t in Tier if TIER_CONFIGS[t].requests_per_minute > config.requests_per_minute],
|
||||
}
|
||||
|
||||
|
||||
@app.get("/pricing")
|
||||
async def pricing() -> dict[str, Any]:
|
||||
"""Public pricing information."""
|
||||
return {
|
||||
"tiers": {
|
||||
tier.value: {
|
||||
"requests_per_minute": config.requests_per_minute,
|
||||
"requests_per_day": config.requests_per_day,
|
||||
"features": config.features,
|
||||
}
|
||||
for tier, config in TIER_CONFIGS.items()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
|
||||
# Test with different API keys:
|
||||
# curl -H "X-API-Key: free-key-123" http://localhost:8000/api/v1/data
|
||||
# curl -H "X-API-Key: pro-key-789" http://localhost:8000/api/v1/analytics
|
||||
uvicorn.run(app, host="0.0.0.0", port=8000)
|
||||
208
examples/09_custom_responses.py
Normal file
208
examples/09_custom_responses.py
Normal file
@@ -0,0 +1,208 @@
|
||||
"""Examples demonstrating custom rate limit responses and callbacks."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from contextlib import asynccontextmanager
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any
|
||||
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.responses import HTMLResponse, JSONResponse, PlainTextResponse
|
||||
|
||||
from fastapi_traffic import (
|
||||
MemoryBackend,
|
||||
RateLimitExceeded,
|
||||
RateLimiter,
|
||||
rate_limit,
|
||||
)
|
||||
from fastapi_traffic.core.limiter import set_limiter
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
backend = MemoryBackend()
|
||||
limiter = RateLimiter(backend)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
await limiter.initialize()
|
||||
set_limiter(limiter)
|
||||
yield
|
||||
await limiter.close()
|
||||
|
||||
|
||||
app = FastAPI(title="Custom Responses Example", lifespan=lifespan)
|
||||
|
||||
|
||||
# 1. Standard JSON error response
|
||||
@app.exception_handler(RateLimitExceeded)
|
||||
async def json_rate_limit_handler(request: Request, exc: RateLimitExceeded) -> JSONResponse:
|
||||
"""Standard JSON response for API clients."""
|
||||
headers = exc.limit_info.to_headers() if exc.limit_info else {}
|
||||
|
||||
return JSONResponse(
|
||||
status_code=429,
|
||||
content={
|
||||
"error": {
|
||||
"code": "RATE_LIMIT_EXCEEDED",
|
||||
"message": exc.message,
|
||||
"retry_after_seconds": exc.retry_after,
|
||||
"documentation_url": "https://docs.example.com/rate-limits",
|
||||
},
|
||||
"request_id": request.headers.get("X-Request-ID", "unknown"),
|
||||
"timestamp": datetime.now(timezone.utc).isoformat(),
|
||||
},
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
|
||||
# 2. Callback for logging/monitoring when requests are blocked
|
||||
async def log_blocked_request(request: Request, info: Any) -> None:
|
||||
"""Log blocked requests for monitoring."""
|
||||
client_ip = request.client.host if request.client else "unknown"
|
||||
path = request.url.path
|
||||
user_agent = request.headers.get("User-Agent", "unknown")
|
||||
|
||||
logger.warning(
|
||||
"Rate limit exceeded: ip=%s path=%s user_agent=%s remaining=%s",
|
||||
client_ip,
|
||||
path,
|
||||
user_agent,
|
||||
info.remaining if info else "unknown",
|
||||
)
|
||||
|
||||
# In production, you might:
|
||||
# - Send to metrics system (Prometheus, DataDog, etc.)
|
||||
# - Trigger alerts for suspicious patterns
|
||||
# - Update a blocklist for repeat offenders
|
||||
|
||||
|
||||
@app.get("/api/monitored")
|
||||
@rate_limit(
|
||||
limit=5,
|
||||
window_size=60,
|
||||
on_blocked=log_blocked_request,
|
||||
)
|
||||
async def monitored_endpoint(request: Request) -> dict[str, str]:
|
||||
"""Endpoint with blocked request logging."""
|
||||
return {"message": "Success"}
|
||||
|
||||
|
||||
# 3. Custom error messages per endpoint
|
||||
@app.get("/api/search")
|
||||
@rate_limit(
|
||||
limit=10,
|
||||
window_size=60,
|
||||
error_message="Search rate limit exceeded. Please wait before searching again.",
|
||||
)
|
||||
async def search_endpoint(request: Request, q: str = "") -> dict[str, Any]:
|
||||
"""Search with custom error message."""
|
||||
return {"query": q, "results": []}
|
||||
|
||||
|
||||
@app.get("/api/upload")
|
||||
@rate_limit(
|
||||
limit=5,
|
||||
window_size=300, # 5 uploads per 5 minutes
|
||||
error_message="Upload limit reached. You can upload 5 files every 5 minutes.",
|
||||
)
|
||||
async def upload_endpoint(request: Request) -> dict[str, str]:
|
||||
"""Upload with custom error message."""
|
||||
return {"message": "Upload successful"}
|
||||
|
||||
|
||||
# 4. Different response formats based on Accept header
|
||||
@app.get("/api/flexible")
|
||||
@rate_limit(limit=10, window_size=60)
|
||||
async def flexible_endpoint(request: Request) -> dict[str, str]:
|
||||
"""Endpoint that returns different formats."""
|
||||
return {"message": "Success", "data": "Some data"}
|
||||
|
||||
|
||||
# Custom exception handler that respects Accept header
|
||||
@app.exception_handler(RateLimitExceeded)
|
||||
async def flexible_rate_limit_handler(request: Request, exc: RateLimitExceeded):
|
||||
"""Return response in format matching Accept header."""
|
||||
accept = request.headers.get("Accept", "application/json")
|
||||
headers = exc.limit_info.to_headers() if exc.limit_info else {}
|
||||
|
||||
if "text/html" in accept:
|
||||
html_content = f"""
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head><title>Rate Limit Exceeded</title></head>
|
||||
<body>
|
||||
<h1>429 - Too Many Requests</h1>
|
||||
<p>{exc.message}</p>
|
||||
<p>Please try again in {exc.retry_after:.0f} seconds.</p>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
return HTMLResponse(content=html_content, status_code=429, headers=headers)
|
||||
|
||||
elif "text/plain" in accept:
|
||||
return PlainTextResponse(
|
||||
content=f"Rate limit exceeded. Retry after {exc.retry_after:.0f} seconds.",
|
||||
status_code=429,
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
else:
|
||||
return JSONResponse(
|
||||
status_code=429,
|
||||
content={
|
||||
"error": "rate_limit_exceeded",
|
||||
"message": exc.message,
|
||||
"retry_after": exc.retry_after,
|
||||
},
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
|
||||
# 5. Include helpful information in response headers
|
||||
@app.get("/api/verbose-headers")
|
||||
@rate_limit(
|
||||
limit=10,
|
||||
window_size=60,
|
||||
include_headers=True, # Includes X-RateLimit-* headers
|
||||
)
|
||||
async def verbose_headers_endpoint(request: Request) -> dict[str, Any]:
|
||||
"""Response includes detailed rate limit headers."""
|
||||
return {
|
||||
"message": "Check response headers for rate limit info",
|
||||
"headers_included": [
|
||||
"X-RateLimit-Limit",
|
||||
"X-RateLimit-Remaining",
|
||||
"X-RateLimit-Reset",
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
# 6. Graceful degradation - return cached/stale data instead of error
|
||||
cached_data = {"data": "Cached response", "cached_at": datetime.now(timezone.utc).isoformat()}
|
||||
|
||||
|
||||
async def return_cached_on_limit(request: Request, info: Any) -> None:
|
||||
"""Log when rate limited (callback doesn't prevent exception)."""
|
||||
logger.info("Returning cached data due to rate limit")
|
||||
# This callback is called when blocked, but doesn't prevent the exception
|
||||
# To actually return cached data, you'd need custom middleware
|
||||
|
||||
|
||||
@app.get("/api/graceful")
|
||||
@rate_limit(
|
||||
limit=5,
|
||||
window_size=60,
|
||||
on_blocked=return_cached_on_limit,
|
||||
)
|
||||
async def graceful_endpoint(request: Request) -> dict[str, str]:
|
||||
"""Endpoint with graceful degradation."""
|
||||
return {"message": "Fresh data", "timestamp": datetime.now(timezone.utc).isoformat()}
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
|
||||
uvicorn.run(app, host="0.0.0.0", port=8000)
|
||||
325
examples/10_advanced_patterns.py
Normal file
325
examples/10_advanced_patterns.py
Normal file
@@ -0,0 +1,325 @@
|
||||
"""Advanced patterns and real-world use cases for rate limiting."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import time
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import Any
|
||||
|
||||
from fastapi import Depends, FastAPI, Request
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from fastapi_traffic import (
|
||||
Algorithm,
|
||||
MemoryBackend,
|
||||
RateLimitExceeded,
|
||||
RateLimiter,
|
||||
rate_limit,
|
||||
)
|
||||
from fastapi_traffic.core.decorator import RateLimitDependency
|
||||
from fastapi_traffic.core.limiter import set_limiter
|
||||
|
||||
backend = MemoryBackend()
|
||||
limiter = RateLimiter(backend)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
await limiter.initialize()
|
||||
set_limiter(limiter)
|
||||
yield
|
||||
await limiter.close()
|
||||
|
||||
|
||||
app = FastAPI(title="Advanced Patterns", lifespan=lifespan)
|
||||
|
||||
|
||||
@app.exception_handler(RateLimitExceeded)
|
||||
async def rate_limit_handler(request: Request, exc: RateLimitExceeded) -> JSONResponse:
|
||||
return JSONResponse(
|
||||
status_code=429,
|
||||
content={"error": "rate_limit_exceeded", "retry_after": exc.retry_after},
|
||||
headers=exc.limit_info.to_headers() if exc.limit_info else {},
|
||||
)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Pattern 1: Cost-based rate limiting
|
||||
# Different operations consume different amounts of quota
|
||||
# =============================================================================
|
||||
|
||||
@app.get("/api/list")
|
||||
@rate_limit(limit=100, window_size=60, cost=1)
|
||||
async def list_items(request: Request) -> dict[str, Any]:
|
||||
"""Cheap operation - costs 1 token."""
|
||||
return {"items": ["a", "b", "c"], "cost": 1}
|
||||
|
||||
|
||||
@app.get("/api/details/{item_id}")
|
||||
@rate_limit(limit=100, window_size=60, cost=5)
|
||||
async def get_details(request: Request, item_id: str) -> dict[str, Any]:
|
||||
"""Medium operation - costs 5 tokens."""
|
||||
return {"item_id": item_id, "details": "...", "cost": 5}
|
||||
|
||||
|
||||
@app.post("/api/generate")
|
||||
@rate_limit(limit=100, window_size=60, cost=20)
|
||||
async def generate_content(request: Request) -> dict[str, Any]:
|
||||
"""Expensive operation - costs 20 tokens."""
|
||||
return {"generated": "AI-generated content...", "cost": 20}
|
||||
|
||||
|
||||
@app.post("/api/bulk-export")
|
||||
@rate_limit(limit=100, window_size=60, cost=50)
|
||||
async def bulk_export(request: Request) -> dict[str, Any]:
|
||||
"""Very expensive operation - costs 50 tokens."""
|
||||
return {"export_url": "https://...", "cost": 50}
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Pattern 2: Sliding scale exemptions
|
||||
# Gradually reduce limits instead of hard blocking
|
||||
# =============================================================================
|
||||
|
||||
def get_request_priority(request: Request) -> int:
|
||||
"""Determine request priority (higher = more important)."""
|
||||
# Premium users get higher priority
|
||||
if request.headers.get("X-Premium-User") == "true":
|
||||
return 100
|
||||
|
||||
# Authenticated users get medium priority
|
||||
if request.headers.get("Authorization"):
|
||||
return 50
|
||||
|
||||
# Anonymous users get lowest priority
|
||||
return 10
|
||||
|
||||
|
||||
def should_exempt_high_priority(request: Request) -> bool:
|
||||
"""Exempt high-priority requests from rate limiting."""
|
||||
return get_request_priority(request) >= 100
|
||||
|
||||
|
||||
@app.get("/api/priority-based")
|
||||
@rate_limit(
|
||||
limit=10,
|
||||
window_size=60,
|
||||
exempt_when=should_exempt_high_priority,
|
||||
)
|
||||
async def priority_endpoint(request: Request) -> dict[str, Any]:
|
||||
"""Premium users are exempt from rate limits."""
|
||||
priority = get_request_priority(request)
|
||||
return {
|
||||
"message": "Success",
|
||||
"priority": priority,
|
||||
"exempt": priority >= 100,
|
||||
}
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Pattern 3: Rate limit by resource, not just user
|
||||
# Prevent abuse of specific resources
|
||||
# =============================================================================
|
||||
|
||||
def resource_key_extractor(request: Request) -> str:
|
||||
"""Rate limit by resource ID + user."""
|
||||
resource_id = request.path_params.get("resource_id", "unknown")
|
||||
user_id = request.headers.get("X-User-ID", "anonymous")
|
||||
return f"resource:{resource_id}:user:{user_id}"
|
||||
|
||||
|
||||
@app.get("/api/resources/{resource_id}")
|
||||
@rate_limit(
|
||||
limit=10,
|
||||
window_size=60,
|
||||
key_extractor=resource_key_extractor,
|
||||
)
|
||||
async def get_resource(request: Request, resource_id: str) -> dict[str, str]:
|
||||
"""Each user can access each resource 10 times per minute."""
|
||||
return {"resource_id": resource_id, "data": "..."}
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Pattern 4: Login/authentication rate limiting
|
||||
# Prevent brute force attacks
|
||||
# =============================================================================
|
||||
|
||||
def login_key_extractor(request: Request) -> str:
|
||||
"""Rate limit by IP + username to prevent brute force."""
|
||||
ip = request.client.host if request.client else "unknown"
|
||||
# In real app, parse username from request body
|
||||
username = request.headers.get("X-Username", "unknown")
|
||||
return f"login:{ip}:{username}"
|
||||
|
||||
|
||||
@app.post("/auth/login")
|
||||
@rate_limit(
|
||||
limit=5,
|
||||
window_size=300, # 5 attempts per 5 minutes
|
||||
algorithm=Algorithm.SLIDING_WINDOW, # Precise tracking for security
|
||||
key_extractor=login_key_extractor,
|
||||
error_message="Too many login attempts. Please try again in 5 minutes.",
|
||||
)
|
||||
async def login(request: Request) -> dict[str, str]:
|
||||
"""Login endpoint with brute force protection."""
|
||||
return {"message": "Login successful", "token": "..."}
|
||||
|
||||
|
||||
# Password reset - even stricter limits
|
||||
def password_reset_key(request: Request) -> str:
|
||||
ip = request.client.host if request.client else "unknown"
|
||||
return f"password_reset:{ip}"
|
||||
|
||||
|
||||
@app.post("/auth/password-reset")
|
||||
@rate_limit(
|
||||
limit=3,
|
||||
window_size=3600, # 3 attempts per hour
|
||||
key_extractor=password_reset_key,
|
||||
error_message="Too many password reset requests. Please try again later.",
|
||||
)
|
||||
async def password_reset(request: Request) -> dict[str, str]:
|
||||
"""Password reset with strict rate limiting."""
|
||||
return {"message": "Password reset email sent"}
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Pattern 5: Webhook/callback rate limiting
|
||||
# Limit outgoing requests to prevent overwhelming external services
|
||||
# =============================================================================
|
||||
|
||||
webhook_rate_limit = RateLimitDependency(
|
||||
limit=100,
|
||||
window_size=60,
|
||||
key_prefix="webhook",
|
||||
)
|
||||
|
||||
|
||||
async def check_webhook_limit(
|
||||
request: Request,
|
||||
webhook_url: str,
|
||||
) -> None:
|
||||
"""Check rate limit before sending webhook."""
|
||||
# Create key based on destination domain
|
||||
from urllib.parse import urlparse
|
||||
domain = urlparse(webhook_url).netloc
|
||||
_key = f"webhook:{domain}" # Would be used with limiter in production
|
||||
|
||||
# Manually check limit (simplified example)
|
||||
# In production, you'd use the limiter directly
|
||||
_ = _key # Suppress unused variable warning
|
||||
|
||||
|
||||
@app.post("/api/send-webhook")
|
||||
async def send_webhook(
|
||||
request: Request,
|
||||
webhook_url: str = "https://example.com/webhook",
|
||||
rate_info: Any = Depends(webhook_rate_limit),
|
||||
) -> dict[str, Any]:
|
||||
"""Send webhook with rate limiting to protect external services."""
|
||||
# await check_webhook_limit(request, webhook_url)
|
||||
return {
|
||||
"message": "Webhook sent",
|
||||
"destination": webhook_url,
|
||||
"remaining_quota": rate_info.remaining,
|
||||
}
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Pattern 6: Request fingerprinting
|
||||
# Detect and limit similar requests (e.g., spam prevention)
|
||||
# =============================================================================
|
||||
|
||||
def request_fingerprint(request: Request) -> str:
|
||||
"""Create fingerprint based on request characteristics."""
|
||||
ip = request.client.host if request.client else "unknown"
|
||||
user_agent = request.headers.get("User-Agent", "")
|
||||
accept_language = request.headers.get("Accept-Language", "")
|
||||
|
||||
# Create hash of request characteristics
|
||||
fingerprint_data = f"{ip}:{user_agent}:{accept_language}"
|
||||
fingerprint = hashlib.md5(fingerprint_data.encode()).hexdigest()[:16]
|
||||
|
||||
return f"fingerprint:{fingerprint}"
|
||||
|
||||
|
||||
@app.post("/api/submit-form")
|
||||
@rate_limit(
|
||||
limit=5,
|
||||
window_size=60,
|
||||
key_extractor=request_fingerprint,
|
||||
error_message="Too many submissions from this device.",
|
||||
)
|
||||
async def submit_form(request: Request) -> dict[str, str]:
|
||||
"""Form submission with fingerprint-based rate limiting."""
|
||||
return {"message": "Form submitted successfully"}
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Pattern 7: Time-of-day based limits
|
||||
# Different limits during peak vs off-peak hours
|
||||
# =============================================================================
|
||||
|
||||
def is_peak_hours() -> bool:
|
||||
"""Check if current time is during peak hours (9 AM - 6 PM UTC)."""
|
||||
current_hour = time.gmtime().tm_hour
|
||||
return 9 <= current_hour < 18
|
||||
|
||||
|
||||
def peak_aware_exempt(request: Request) -> bool:
|
||||
"""Exempt requests during off-peak hours."""
|
||||
return not is_peak_hours()
|
||||
|
||||
|
||||
@app.get("/api/peak-aware")
|
||||
@rate_limit(
|
||||
limit=10, # Strict limit during peak hours
|
||||
window_size=60,
|
||||
exempt_when=peak_aware_exempt, # No limit during off-peak
|
||||
)
|
||||
async def peak_aware_endpoint(request: Request) -> dict[str, Any]:
|
||||
"""Stricter limits during peak hours."""
|
||||
return {
|
||||
"message": "Success",
|
||||
"is_peak_hours": is_peak_hours(),
|
||||
"rate_limited": is_peak_hours(),
|
||||
}
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Pattern 8: Cascading limits (multiple tiers)
|
||||
# =============================================================================
|
||||
|
||||
per_second = RateLimitDependency(limit=5, window_size=1, key_prefix="sec")
|
||||
per_minute = RateLimitDependency(limit=100, window_size=60, key_prefix="min")
|
||||
per_hour = RateLimitDependency(limit=1000, window_size=3600, key_prefix="hour")
|
||||
|
||||
|
||||
async def cascading_limits(
|
||||
request: Request,
|
||||
sec_info: Any = Depends(per_second),
|
||||
min_info: Any = Depends(per_minute),
|
||||
hour_info: Any = Depends(per_hour),
|
||||
) -> dict[str, Any]:
|
||||
"""Apply multiple rate limit tiers."""
|
||||
return {
|
||||
"per_second": {"remaining": sec_info.remaining},
|
||||
"per_minute": {"remaining": min_info.remaining},
|
||||
"per_hour": {"remaining": hour_info.remaining},
|
||||
}
|
||||
|
||||
|
||||
@app.get("/api/cascading")
|
||||
async def cascading_endpoint(
|
||||
request: Request,
|
||||
limits: dict[str, Any] = Depends(cascading_limits),
|
||||
) -> dict[str, Any]:
|
||||
"""Endpoint with per-second, per-minute, and per-hour limits."""
|
||||
return {"message": "Success", "limits": limits}
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
|
||||
uvicorn.run(app, host="0.0.0.0", port=8001)
|
||||
133
examples/README.md
Normal file
133
examples/README.md
Normal file
@@ -0,0 +1,133 @@
|
||||
# FastAPI Traffic Examples
|
||||
|
||||
This directory contains comprehensive examples demonstrating how to use the `fastapi-traffic` rate limiting library.
|
||||
|
||||
## Basic Examples
|
||||
|
||||
### 01_quickstart.py
|
||||
Minimal setup to get rate limiting working. Start here if you're new to the library.
|
||||
- Basic backend and limiter setup
|
||||
- Exception handler for rate limit errors
|
||||
- Simple decorator usage
|
||||
|
||||
### 02_algorithms.py
|
||||
Demonstrates all available rate limiting algorithms:
|
||||
- **Fixed Window** - Simple, resets at fixed intervals
|
||||
- **Sliding Window** - Most precise, stores timestamps
|
||||
- **Sliding Window Counter** - Balance of precision and efficiency (default)
|
||||
- **Token Bucket** - Allows controlled bursts
|
||||
- **Leaky Bucket** - Smooths out traffic
|
||||
|
||||
### 03_backends.py
|
||||
Shows different storage backends:
|
||||
- **MemoryBackend** - Fast, ephemeral (default)
|
||||
- **SQLiteBackend** - Persistent, single-instance
|
||||
- **RedisBackend** - Distributed, multi-instance
|
||||
|
||||
### 04_key_extractors.py
|
||||
Custom key extractors for different rate limiting strategies:
|
||||
- Rate limit by IP address (default)
|
||||
- Rate limit by API key
|
||||
- Rate limit by user ID
|
||||
- Rate limit by endpoint + IP
|
||||
- Rate limit by tenant/organization
|
||||
- Composite keys (user + action)
|
||||
|
||||
### 05_middleware.py
|
||||
Middleware-based rate limiting for global protection:
|
||||
- Basic middleware setup
|
||||
- Custom configuration options
|
||||
- Path and IP exemptions
|
||||
- Alternative middleware classes
|
||||
|
||||
## Advanced Examples
|
||||
|
||||
### 06_dependency_injection.py
|
||||
Using FastAPI's dependency injection system:
|
||||
- Basic rate limit dependency
|
||||
- Tier-based rate limiting
|
||||
- Combining multiple rate limits
|
||||
- Conditional exemptions
|
||||
|
||||
### 07_redis_distributed.py
|
||||
Redis backend for distributed deployments:
|
||||
- Multi-instance rate limiting
|
||||
- Shared counters across nodes
|
||||
- Health checks and statistics
|
||||
- Fallback to memory backend
|
||||
|
||||
### 08_tiered_api.py
|
||||
Production-ready tiered API example:
|
||||
- Free, Starter, Pro, Enterprise tiers
|
||||
- Different limits per tier
|
||||
- Feature gating based on tier
|
||||
- API key validation
|
||||
|
||||
### 09_custom_responses.py
|
||||
Customizing rate limit responses:
|
||||
- Custom JSON error responses
|
||||
- Logging/monitoring callbacks
|
||||
- Different response formats (JSON, HTML, plain text)
|
||||
- Rate limit headers
|
||||
|
||||
### 10_advanced_patterns.py
|
||||
Real-world patterns and use cases:
|
||||
- **Cost-based limiting** - Different operations cost different amounts
|
||||
- **Priority exemptions** - Premium users exempt from limits
|
||||
- **Resource-based limiting** - Limit by resource ID + user
|
||||
- **Login protection** - Brute force prevention
|
||||
- **Webhook limiting** - Protect external services
|
||||
- **Request fingerprinting** - Spam prevention
|
||||
- **Time-of-day limits** - Peak vs off-peak hours
|
||||
- **Cascading limits** - Per-second, per-minute, per-hour
|
||||
|
||||
## Running Examples
|
||||
|
||||
Each example is a standalone FastAPI application. Run with:
|
||||
|
||||
```bash
|
||||
# Using uvicorn directly
|
||||
uvicorn examples.01_quickstart:app --reload
|
||||
|
||||
# Or run the file directly
|
||||
python examples/01_quickstart.py
|
||||
```
|
||||
|
||||
## Testing Rate Limits
|
||||
|
||||
Use curl or httpie to test:
|
||||
|
||||
```bash
|
||||
# Basic request
|
||||
curl http://localhost:8000/api/basic
|
||||
|
||||
# With API key
|
||||
curl -H "X-API-Key: my-key" http://localhost:8000/api/by-api-key
|
||||
|
||||
# Check rate limit headers
|
||||
curl -i http://localhost:8000/api/data
|
||||
|
||||
# Rapid requests to trigger rate limit
|
||||
for i in {1..20}; do curl http://localhost:8000/api/basic; done
|
||||
```
|
||||
|
||||
## Environment Variables
|
||||
|
||||
Some examples support configuration via environment variables:
|
||||
|
||||
- `RATE_LIMIT_BACKEND` - Backend type (memory, sqlite, redis)
|
||||
- `REDIS_URL` - Redis connection URL for distributed examples
|
||||
|
||||
## Requirements
|
||||
|
||||
Basic examples only need `fastapi-traffic` and `uvicorn`:
|
||||
|
||||
```bash
|
||||
pip install fastapi-traffic uvicorn
|
||||
```
|
||||
|
||||
For Redis examples:
|
||||
|
||||
```bash
|
||||
pip install redis
|
||||
```
|
||||
171
examples/basic_usage.py
Normal file
171
examples/basic_usage.py
Normal file
@@ -0,0 +1,171 @@
|
||||
"""Basic usage examples for fastapi-traffic."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import AsyncIterator
|
||||
|
||||
from fastapi import Depends, FastAPI, Request
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from fastapi_traffic import (
|
||||
Algorithm,
|
||||
RateLimitExceeded,
|
||||
RateLimiter,
|
||||
SQLiteBackend,
|
||||
rate_limit,
|
||||
)
|
||||
from fastapi_traffic.core.decorator import RateLimitDependency
|
||||
from fastapi_traffic.core.limiter import set_limiter
|
||||
|
||||
# Configure global rate limiter with SQLite backend for persistence
|
||||
backend = SQLiteBackend("rate_limits.db")
|
||||
limiter = RateLimiter(backend)
|
||||
set_limiter(limiter)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI) -> AsyncIterator[None]:
|
||||
"""Manage application lifespan - startup and shutdown."""
|
||||
# Startup: Initialize the rate limiter
|
||||
await limiter.initialize()
|
||||
yield
|
||||
# Shutdown: Cleanup
|
||||
await limiter.close()
|
||||
|
||||
|
||||
app = FastAPI(title="FastAPI Traffic Example", lifespan=lifespan)
|
||||
|
||||
|
||||
# Exception handler for rate limit exceeded
|
||||
@app.exception_handler(RateLimitExceeded)
|
||||
async def rate_limit_handler(request: Request, exc: RateLimitExceeded) -> JSONResponse:
|
||||
"""Handle rate limit exceeded exceptions."""
|
||||
headers = exc.limit_info.to_headers() if exc.limit_info else {}
|
||||
return JSONResponse(
|
||||
status_code=429,
|
||||
content={
|
||||
"error": "rate_limit_exceeded",
|
||||
"message": exc.message,
|
||||
"retry_after": exc.retry_after,
|
||||
},
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
|
||||
# Example 1: Basic decorator usage
|
||||
@app.get("/api/basic")
|
||||
@rate_limit(100, 60) # 100 requests per minute
|
||||
async def basic_endpoint(request: Request) -> dict[str, str]:
|
||||
"""Basic rate-limited endpoint."""
|
||||
return {"message": "Hello, World!"}
|
||||
|
||||
|
||||
# Example 2: Custom algorithm
|
||||
@app.get("/api/token-bucket")
|
||||
@rate_limit(
|
||||
limit=50,
|
||||
window_size=60,
|
||||
algorithm=Algorithm.TOKEN_BUCKET,
|
||||
burst_size=10, # Allow bursts of up to 10 requests
|
||||
)
|
||||
async def token_bucket_endpoint(request: Request) -> dict[str, str]:
|
||||
"""Endpoint using token bucket algorithm."""
|
||||
return {"message": "Token bucket rate limiting"}
|
||||
|
||||
|
||||
# Example 3: Sliding window for precise rate limiting
|
||||
@app.get("/api/sliding-window")
|
||||
@rate_limit(
|
||||
limit=30,
|
||||
window_size=60,
|
||||
algorithm=Algorithm.SLIDING_WINDOW,
|
||||
)
|
||||
async def sliding_window_endpoint(request: Request) -> dict[str, str]:
|
||||
"""Endpoint using sliding window algorithm."""
|
||||
return {"message": "Sliding window rate limiting"}
|
||||
|
||||
|
||||
# Example 4: Custom key extractor (rate limit by API key)
|
||||
def api_key_extractor(request: Request) -> str:
|
||||
"""Extract API key from header for rate limiting."""
|
||||
api_key = request.headers.get("X-API-Key", "anonymous")
|
||||
return f"api_key:{api_key}"
|
||||
|
||||
|
||||
@app.get("/api/by-api-key")
|
||||
@rate_limit(
|
||||
limit=1000,
|
||||
window_size=3600, # 1000 requests per hour
|
||||
key_extractor=api_key_extractor,
|
||||
)
|
||||
async def api_key_endpoint(request: Request) -> dict[str, str]:
|
||||
"""Endpoint rate limited by API key."""
|
||||
return {"message": "Rate limited by API key"}
|
||||
|
||||
|
||||
# Example 5: Using dependency injection
|
||||
rate_limit_dep = RateLimitDependency(limit=20, window_size=60)
|
||||
|
||||
|
||||
@app.get("/api/dependency")
|
||||
async def dependency_endpoint(
|
||||
request: Request,
|
||||
rate_info: dict[str, object] = Depends(rate_limit_dep),
|
||||
) -> dict[str, object]:
|
||||
"""Endpoint using rate limit as dependency."""
|
||||
return {
|
||||
"message": "Rate limit info available",
|
||||
"rate_limit": rate_info,
|
||||
}
|
||||
|
||||
|
||||
# Example 6: Exempt certain requests
|
||||
def is_admin(request: Request) -> bool:
|
||||
"""Check if request is from admin."""
|
||||
return request.headers.get("X-Admin-Token") == "secret-admin-token"
|
||||
|
||||
|
||||
@app.get("/api/admin-exempt")
|
||||
@rate_limit(
|
||||
limit=10,
|
||||
window_size=60,
|
||||
exempt_when=is_admin,
|
||||
)
|
||||
async def admin_exempt_endpoint(request: Request) -> dict[str, str]:
|
||||
"""Endpoint with admin exemption."""
|
||||
return {"message": "Admins are exempt from rate limiting"}
|
||||
|
||||
|
||||
# Example 7: Different costs for different operations
|
||||
@app.post("/api/expensive")
|
||||
@rate_limit(
|
||||
limit=100,
|
||||
window_size=60,
|
||||
cost=10, # This endpoint costs 10 tokens per request
|
||||
)
|
||||
async def expensive_endpoint(request: Request) -> dict[str, str]:
|
||||
"""Expensive operation that costs more tokens."""
|
||||
return {"message": "Expensive operation completed"}
|
||||
|
||||
|
||||
# Example 8: Global middleware rate limiting
|
||||
# Uncomment to enable global rate limiting
|
||||
# app.add_middleware(
|
||||
# RateLimitMiddleware,
|
||||
# limit=1000,
|
||||
# window_size=60,
|
||||
# exempt_paths={"/health", "/docs", "/openapi.json"},
|
||||
# )
|
||||
|
||||
|
||||
@app.get("/health")
|
||||
async def health_check() -> dict[str, str]:
|
||||
"""Health check endpoint (typically exempt from rate limiting)."""
|
||||
return {"status": "healthy"}
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
|
||||
uvicorn.run(app, host="0.0.0.0", port=8000)
|
||||
36
fastapi_traffic/__init__.py
Normal file
36
fastapi_traffic/__init__.py
Normal file
@@ -0,0 +1,36 @@
|
||||
"""FastAPI Traffic - Production-grade rate limiting for FastAPI."""
|
||||
|
||||
from fastapi_traffic.core.decorator import rate_limit
|
||||
from fastapi_traffic.core.limiter import RateLimiter
|
||||
from fastapi_traffic.core.config import RateLimitConfig
|
||||
from fastapi_traffic.core.algorithms import Algorithm
|
||||
from fastapi_traffic.backends.base import Backend
|
||||
from fastapi_traffic.backends.memory import MemoryBackend
|
||||
from fastapi_traffic.backends.sqlite import SQLiteBackend
|
||||
from fastapi_traffic.exceptions import (
|
||||
RateLimitExceeded,
|
||||
BackendError,
|
||||
ConfigurationError,
|
||||
)
|
||||
|
||||
__version__ = "0.1.0"
|
||||
__all__ = [
|
||||
"rate_limit",
|
||||
"RateLimiter",
|
||||
"RateLimitConfig",
|
||||
"Algorithm",
|
||||
"Backend",
|
||||
"MemoryBackend",
|
||||
"SQLiteBackend",
|
||||
"RateLimitExceeded",
|
||||
"BackendError",
|
||||
"ConfigurationError",
|
||||
]
|
||||
|
||||
# Optional Redis backend
|
||||
try:
|
||||
from fastapi_traffic.backends.redis import RedisBackend
|
||||
|
||||
__all__.append("RedisBackend")
|
||||
except ImportError:
|
||||
pass
|
||||
19
fastapi_traffic/backends/__init__.py
Normal file
19
fastapi_traffic/backends/__init__.py
Normal file
@@ -0,0 +1,19 @@
|
||||
"""Backend implementations for rate limit storage."""
|
||||
|
||||
from fastapi_traffic.backends.base import Backend
|
||||
from fastapi_traffic.backends.memory import MemoryBackend
|
||||
from fastapi_traffic.backends.sqlite import SQLiteBackend
|
||||
|
||||
__all__ = [
|
||||
"Backend",
|
||||
"MemoryBackend",
|
||||
"SQLiteBackend",
|
||||
]
|
||||
|
||||
# Optional Redis backend
|
||||
try:
|
||||
from fastapi_traffic.backends.redis import RedisBackend
|
||||
|
||||
__all__.append("RedisBackend")
|
||||
except ImportError:
|
||||
pass
|
||||
89
fastapi_traffic/backends/base.py
Normal file
89
fastapi_traffic/backends/base.py
Normal file
@@ -0,0 +1,89 @@
|
||||
"""Abstract base class for rate limit storage backends."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Any
|
||||
|
||||
|
||||
class Backend(ABC):
|
||||
"""Abstract base class for rate limit storage backends."""
|
||||
|
||||
@abstractmethod
|
||||
async def get(self, key: str) -> dict[str, Any] | None:
|
||||
"""Get the current state for a key.
|
||||
|
||||
Args:
|
||||
key: The rate limit key.
|
||||
|
||||
Returns:
|
||||
The stored state dictionary or None if not found.
|
||||
"""
|
||||
...
|
||||
|
||||
@abstractmethod
|
||||
async def set(self, key: str, value: dict[str, Any], *, ttl: float) -> None:
|
||||
"""Set the state for a key with TTL.
|
||||
|
||||
Args:
|
||||
key: The rate limit key.
|
||||
value: The state dictionary to store.
|
||||
ttl: Time-to-live in seconds.
|
||||
"""
|
||||
...
|
||||
|
||||
@abstractmethod
|
||||
async def delete(self, key: str) -> None:
|
||||
"""Delete the state for a key.
|
||||
|
||||
Args:
|
||||
key: The rate limit key.
|
||||
"""
|
||||
...
|
||||
|
||||
@abstractmethod
|
||||
async def exists(self, key: str) -> bool:
|
||||
"""Check if a key exists.
|
||||
|
||||
Args:
|
||||
key: The rate limit key.
|
||||
|
||||
Returns:
|
||||
True if the key exists, False otherwise.
|
||||
"""
|
||||
...
|
||||
|
||||
@abstractmethod
|
||||
async def increment(self, key: str, amount: int = 1) -> int:
|
||||
"""Atomically increment a counter.
|
||||
|
||||
Args:
|
||||
key: The rate limit key.
|
||||
amount: The amount to increment by.
|
||||
|
||||
Returns:
|
||||
The new value after incrementing.
|
||||
"""
|
||||
...
|
||||
|
||||
@abstractmethod
|
||||
async def clear(self) -> None:
|
||||
"""Clear all rate limit data."""
|
||||
...
|
||||
|
||||
async def close(self) -> None:
|
||||
"""Close the backend connection."""
|
||||
pass
|
||||
|
||||
async def __aenter__(self) -> Backend:
|
||||
"""Async context manager entry."""
|
||||
return self
|
||||
|
||||
async def __aexit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc_val: BaseException | None,
|
||||
exc_tb: Any,
|
||||
) -> None:
|
||||
"""Async context manager exit."""
|
||||
await self.close()
|
||||
139
fastapi_traffic/backends/memory.py
Normal file
139
fastapi_traffic/backends/memory.py
Normal file
@@ -0,0 +1,139 @@
|
||||
"""In-memory backend for rate limiting - suitable for single-process applications."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import time
|
||||
from collections import OrderedDict
|
||||
from typing import Any
|
||||
|
||||
from fastapi_traffic.backends.base import Backend
|
||||
|
||||
|
||||
class MemoryBackend(Backend):
|
||||
"""Thread-safe in-memory backend with LRU eviction and TTL support."""
|
||||
|
||||
__slots__ = ("_data", "_lock", "_max_size", "_cleanup_interval", "_cleanup_task")
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
max_size: int = 10000,
|
||||
cleanup_interval: float = 60.0,
|
||||
) -> None:
|
||||
"""Initialize the memory backend.
|
||||
|
||||
Args:
|
||||
max_size: Maximum number of entries to store (LRU eviction).
|
||||
cleanup_interval: Interval in seconds for cleaning expired entries.
|
||||
"""
|
||||
self._data: OrderedDict[str, tuple[dict[str, Any], float]] = OrderedDict()
|
||||
self._lock = asyncio.Lock()
|
||||
self._max_size = max_size
|
||||
self._cleanup_interval = cleanup_interval
|
||||
self._cleanup_task: asyncio.Task[None] | None = None
|
||||
|
||||
async def start_cleanup(self) -> None:
|
||||
"""Start the background cleanup task."""
|
||||
if self._cleanup_task is None:
|
||||
self._cleanup_task = asyncio.create_task(self._cleanup_loop())
|
||||
|
||||
async def _cleanup_loop(self) -> None:
|
||||
"""Background task to clean up expired entries."""
|
||||
while True:
|
||||
try:
|
||||
await asyncio.sleep(self._cleanup_interval)
|
||||
await self._cleanup_expired()
|
||||
except asyncio.CancelledError:
|
||||
break
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
async def _cleanup_expired(self) -> None:
|
||||
"""Remove expired entries."""
|
||||
now = time.time()
|
||||
async with self._lock:
|
||||
expired_keys = [
|
||||
key for key, (_, expires_at) in self._data.items() if expires_at <= now
|
||||
]
|
||||
for key in expired_keys:
|
||||
del self._data[key]
|
||||
|
||||
def _evict_if_needed(self) -> None:
|
||||
"""Evict oldest entries if over max size (must be called with lock held)."""
|
||||
while len(self._data) > self._max_size:
|
||||
self._data.popitem(last=False)
|
||||
|
||||
async def get(self, key: str) -> dict[str, Any] | None:
|
||||
"""Get the current state for a key."""
|
||||
async with self._lock:
|
||||
if key not in self._data:
|
||||
return None
|
||||
|
||||
value, expires_at = self._data[key]
|
||||
if expires_at <= time.time():
|
||||
del self._data[key]
|
||||
return None
|
||||
|
||||
self._data.move_to_end(key)
|
||||
return value.copy()
|
||||
|
||||
async def set(self, key: str, value: dict[str, Any], *, ttl: float) -> None:
|
||||
"""Set the state for a key with TTL."""
|
||||
expires_at = time.time() + ttl
|
||||
async with self._lock:
|
||||
self._data[key] = (value.copy(), expires_at)
|
||||
self._data.move_to_end(key)
|
||||
self._evict_if_needed()
|
||||
|
||||
async def delete(self, key: str) -> None:
|
||||
"""Delete the state for a key."""
|
||||
async with self._lock:
|
||||
self._data.pop(key, None)
|
||||
|
||||
async def exists(self, key: str) -> bool:
|
||||
"""Check if a key exists and is not expired."""
|
||||
async with self._lock:
|
||||
if key not in self._data:
|
||||
return False
|
||||
|
||||
_, expires_at = self._data[key]
|
||||
if expires_at <= time.time():
|
||||
del self._data[key]
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def increment(self, key: str, amount: int = 1) -> int:
|
||||
"""Atomically increment a counter."""
|
||||
async with self._lock:
|
||||
if key in self._data:
|
||||
value, expires_at = self._data[key]
|
||||
if expires_at > time.time():
|
||||
current = int(value.get("count", 0))
|
||||
new_value = current + amount
|
||||
value["count"] = new_value
|
||||
self._data[key] = (value, expires_at)
|
||||
return new_value
|
||||
|
||||
return amount
|
||||
|
||||
async def clear(self) -> None:
|
||||
"""Clear all rate limit data."""
|
||||
async with self._lock:
|
||||
self._data.clear()
|
||||
|
||||
async def close(self) -> None:
|
||||
"""Stop cleanup task and clear data."""
|
||||
if self._cleanup_task is not None:
|
||||
self._cleanup_task.cancel()
|
||||
try:
|
||||
await self._cleanup_task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
self._cleanup_task = None
|
||||
await self.clear()
|
||||
|
||||
def __len__(self) -> int:
|
||||
"""Return the number of stored entries."""
|
||||
return len(self._data)
|
||||
232
fastapi_traffic/backends/redis.py
Normal file
232
fastapi_traffic/backends/redis.py
Normal file
@@ -0,0 +1,232 @@
|
||||
"""Redis backend for rate limiting - distributed storage for multi-node deployments."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from fastapi_traffic.backends.base import Backend
|
||||
from fastapi_traffic.exceptions import BackendError
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from redis.asyncio import Redis
|
||||
|
||||
|
||||
class RedisBackend(Backend):
|
||||
"""Redis-based backend for distributed rate limiting."""
|
||||
|
||||
__slots__ = ("_client", "_key_prefix", "_owns_client")
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
client: Redis[bytes],
|
||||
*,
|
||||
key_prefix: str = "fastapi_traffic",
|
||||
) -> None:
|
||||
"""Initialize the Redis backend.
|
||||
|
||||
Args:
|
||||
client: An async Redis client instance.
|
||||
key_prefix: Prefix for all rate limit keys.
|
||||
"""
|
||||
self._client = client
|
||||
self._key_prefix = key_prefix
|
||||
self._owns_client = False
|
||||
|
||||
@classmethod
|
||||
async def from_url(
|
||||
cls,
|
||||
url: str = "redis://localhost:6379/0",
|
||||
*,
|
||||
key_prefix: str = "fastapi_traffic",
|
||||
**kwargs: Any,
|
||||
) -> RedisBackend:
|
||||
"""Create a RedisBackend from a Redis URL.
|
||||
|
||||
Args:
|
||||
url: Redis connection URL.
|
||||
key_prefix: Prefix for all rate limit keys.
|
||||
**kwargs: Additional arguments passed to Redis.from_url().
|
||||
|
||||
Returns:
|
||||
A new RedisBackend instance.
|
||||
"""
|
||||
try:
|
||||
from redis.asyncio import Redis
|
||||
except ImportError as e:
|
||||
msg = "redis package is required for RedisBackend. Install with: pip install redis"
|
||||
raise ImportError(msg) from e
|
||||
|
||||
client: Redis[bytes] = Redis.from_url(url, **kwargs)
|
||||
instance = cls(client, key_prefix=key_prefix)
|
||||
instance._owns_client = True
|
||||
return instance
|
||||
|
||||
def _make_key(self, key: str) -> str:
|
||||
"""Create a prefixed key."""
|
||||
return f"{self._key_prefix}:{key}"
|
||||
|
||||
async def get(self, key: str) -> dict[str, Any] | None:
|
||||
"""Get the current state for a key."""
|
||||
try:
|
||||
full_key = self._make_key(key)
|
||||
data = await self._client.get(full_key)
|
||||
if data is None:
|
||||
return None
|
||||
result: dict[str, Any] = json.loads(data)
|
||||
return result
|
||||
except Exception as e:
|
||||
raise BackendError(f"Failed to get key {key}", original_error=e)
|
||||
|
||||
async def set(self, key: str, value: dict[str, Any], *, ttl: float) -> None:
|
||||
"""Set the state for a key with TTL."""
|
||||
try:
|
||||
full_key = self._make_key(key)
|
||||
data = json.dumps(value)
|
||||
await self._client.setex(full_key, int(ttl) + 1, data)
|
||||
except Exception as e:
|
||||
raise BackendError(f"Failed to set key {key}", original_error=e)
|
||||
|
||||
async def delete(self, key: str) -> None:
|
||||
"""Delete the state for a key."""
|
||||
try:
|
||||
full_key = self._make_key(key)
|
||||
await self._client.delete(full_key)
|
||||
except Exception as e:
|
||||
raise BackendError(f"Failed to delete key {key}", original_error=e)
|
||||
|
||||
async def exists(self, key: str) -> bool:
|
||||
"""Check if a key exists."""
|
||||
try:
|
||||
full_key = self._make_key(key)
|
||||
result = await self._client.exists(full_key)
|
||||
return bool(result)
|
||||
except Exception as e:
|
||||
raise BackendError(f"Failed to check key {key}", original_error=e)
|
||||
|
||||
async def increment(self, key: str, amount: int = 1) -> int:
|
||||
"""Atomically increment a counter using Redis INCRBY."""
|
||||
try:
|
||||
full_key = self._make_key(key)
|
||||
result = await self._client.incrby(full_key, amount)
|
||||
return int(result)
|
||||
except Exception as e:
|
||||
raise BackendError(f"Failed to increment key {key}", original_error=e)
|
||||
|
||||
async def clear(self) -> None:
|
||||
"""Clear all rate limit data with this prefix."""
|
||||
try:
|
||||
pattern = f"{self._key_prefix}:*"
|
||||
cursor: int = 0
|
||||
while True:
|
||||
cursor, keys = await self._client.scan(cursor, match=pattern, count=100)
|
||||
if keys:
|
||||
await self._client.delete(*keys)
|
||||
if cursor == 0:
|
||||
break
|
||||
except Exception as e:
|
||||
raise BackendError("Failed to clear rate limits", original_error=e)
|
||||
|
||||
async def close(self) -> None:
|
||||
"""Close the Redis connection if we own it."""
|
||||
if self._owns_client:
|
||||
await self._client.aclose()
|
||||
|
||||
async def ping(self) -> bool:
|
||||
"""Check if Redis is reachable."""
|
||||
try:
|
||||
await self._client.ping()
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
async def get_stats(self) -> dict[str, Any]:
|
||||
"""Get statistics about the rate limit storage."""
|
||||
try:
|
||||
pattern = f"{self._key_prefix}:*"
|
||||
cursor: int = 0
|
||||
count = 0
|
||||
while True:
|
||||
cursor, keys = await self._client.scan(cursor, match=pattern, count=100)
|
||||
count += len(keys)
|
||||
if cursor == 0:
|
||||
break
|
||||
|
||||
info = await self._client.info("memory")
|
||||
return {
|
||||
"total_keys": count,
|
||||
"used_memory": info.get("used_memory_human", "unknown"),
|
||||
"key_prefix": self._key_prefix,
|
||||
}
|
||||
except Exception as e:
|
||||
raise BackendError("Failed to get stats", original_error=e)
|
||||
|
||||
|
||||
# Lua scripts for atomic operations
|
||||
SLIDING_WINDOW_SCRIPT = """
|
||||
local key = KEYS[1]
|
||||
local now = tonumber(ARGV[1])
|
||||
local window_size = tonumber(ARGV[2])
|
||||
local limit = tonumber(ARGV[3])
|
||||
local window_start = now - window_size
|
||||
|
||||
-- Remove expired entries
|
||||
redis.call('ZREMRANGEBYSCORE', key, '-inf', window_start)
|
||||
|
||||
-- Count current entries
|
||||
local count = redis.call('ZCARD', key)
|
||||
|
||||
if count < limit then
|
||||
-- Add new entry
|
||||
redis.call('ZADD', key, now, now .. ':' .. math.random())
|
||||
redis.call('EXPIRE', key, math.ceil(window_size) + 1)
|
||||
return {1, limit - count - 1}
|
||||
else
|
||||
-- Get oldest entry for retry-after calculation
|
||||
local oldest = redis.call('ZRANGE', key, 0, 0, 'WITHSCORES')
|
||||
local retry_after = 0
|
||||
if #oldest > 0 then
|
||||
retry_after = oldest[2] + window_size - now
|
||||
end
|
||||
return {0, 0, retry_after}
|
||||
end
|
||||
"""
|
||||
|
||||
TOKEN_BUCKET_SCRIPT = """
|
||||
local key = KEYS[1]
|
||||
local now = tonumber(ARGV[1])
|
||||
local bucket_size = tonumber(ARGV[2])
|
||||
local refill_rate = tonumber(ARGV[3])
|
||||
local ttl = tonumber(ARGV[4])
|
||||
|
||||
local data = redis.call('GET', key)
|
||||
local tokens, last_update
|
||||
|
||||
if data then
|
||||
local decoded = cjson.decode(data)
|
||||
tokens = decoded.tokens
|
||||
last_update = decoded.last_update
|
||||
else
|
||||
tokens = bucket_size
|
||||
last_update = now
|
||||
end
|
||||
|
||||
-- Refill tokens
|
||||
local elapsed = now - last_update
|
||||
tokens = math.min(bucket_size, tokens + elapsed * refill_rate)
|
||||
|
||||
local allowed = 0
|
||||
local retry_after = 0
|
||||
|
||||
if tokens >= 1 then
|
||||
tokens = tokens - 1
|
||||
allowed = 1
|
||||
else
|
||||
retry_after = (1 - tokens) / refill_rate
|
||||
end
|
||||
|
||||
-- Save state
|
||||
redis.call('SETEX', key, ttl, cjson.encode({tokens = tokens, last_update = now}))
|
||||
|
||||
return {allowed, math.floor(tokens), retry_after}
|
||||
"""
|
||||
298
fastapi_traffic/backends/sqlite.py
Normal file
298
fastapi_traffic/backends/sqlite.py
Normal file
@@ -0,0 +1,298 @@
|
||||
"""SQLite backend for rate limiting - persistent storage for single-node deployments."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import sqlite3
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from fastapi_traffic.backends.base import Backend
|
||||
from fastapi_traffic.exceptions import BackendError
|
||||
|
||||
|
||||
class SQLiteBackend(Backend):
|
||||
"""SQLite-based backend with connection pooling and async support."""
|
||||
|
||||
__slots__ = (
|
||||
"_db_path",
|
||||
"_connection",
|
||||
"_lock",
|
||||
"_cleanup_interval",
|
||||
"_cleanup_task",
|
||||
"_pool_size",
|
||||
"_connections",
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
db_path: str | Path = ":memory:",
|
||||
*,
|
||||
cleanup_interval: float = 300.0,
|
||||
pool_size: int = 5,
|
||||
) -> None:
|
||||
"""Initialize the SQLite backend.
|
||||
|
||||
Args:
|
||||
db_path: Path to SQLite database file or ":memory:" for in-memory.
|
||||
cleanup_interval: Interval in seconds for cleaning expired entries.
|
||||
pool_size: Number of connections in the pool.
|
||||
"""
|
||||
self._db_path = str(db_path)
|
||||
self._connection: sqlite3.Connection | None = None
|
||||
self._lock = asyncio.Lock()
|
||||
self._cleanup_interval = cleanup_interval
|
||||
self._cleanup_task: asyncio.Task[None] | None = None
|
||||
self._pool_size = pool_size
|
||||
self._connections: list[sqlite3.Connection] = []
|
||||
|
||||
async def initialize(self) -> None:
|
||||
"""Initialize the database and create tables."""
|
||||
await self._ensure_connection()
|
||||
await self._create_tables()
|
||||
if self._cleanup_task is None:
|
||||
self._cleanup_task = asyncio.create_task(self._cleanup_loop())
|
||||
|
||||
async def _ensure_connection(self) -> sqlite3.Connection:
|
||||
"""Ensure a database connection exists."""
|
||||
if self._connection is None:
|
||||
loop = asyncio.get_event_loop()
|
||||
self._connection = await loop.run_in_executor(
|
||||
None, self._create_connection
|
||||
)
|
||||
assert self._connection is not None
|
||||
return self._connection
|
||||
|
||||
def _create_connection(self) -> sqlite3.Connection:
|
||||
"""Create a new SQLite connection with optimized settings."""
|
||||
conn = sqlite3.connect(
|
||||
self._db_path,
|
||||
check_same_thread=False,
|
||||
isolation_level=None,
|
||||
)
|
||||
conn.execute("PRAGMA journal_mode=WAL")
|
||||
conn.execute("PRAGMA synchronous=NORMAL")
|
||||
conn.execute("PRAGMA cache_size=10000")
|
||||
conn.execute("PRAGMA temp_store=MEMORY")
|
||||
conn.row_factory = sqlite3.Row
|
||||
return conn
|
||||
|
||||
async def _create_tables(self) -> None:
|
||||
"""Create the rate limit tables."""
|
||||
conn = await self._ensure_connection()
|
||||
loop = asyncio.get_event_loop()
|
||||
await loop.run_in_executor(None, self._create_tables_sync, conn)
|
||||
|
||||
def _create_tables_sync(self, conn: sqlite3.Connection) -> None:
|
||||
"""Synchronously create tables."""
|
||||
conn.execute("""
|
||||
CREATE TABLE IF NOT EXISTS rate_limits (
|
||||
key TEXT PRIMARY KEY,
|
||||
data TEXT NOT NULL,
|
||||
expires_at REAL NOT NULL
|
||||
)
|
||||
""")
|
||||
conn.execute("""
|
||||
CREATE INDEX IF NOT EXISTS idx_expires_at ON rate_limits(expires_at)
|
||||
""")
|
||||
|
||||
async def _cleanup_loop(self) -> None:
|
||||
"""Background task to clean up expired entries."""
|
||||
while True:
|
||||
try:
|
||||
await asyncio.sleep(self._cleanup_interval)
|
||||
await self._cleanup_expired()
|
||||
except asyncio.CancelledError:
|
||||
break
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
async def _cleanup_expired(self) -> None:
|
||||
"""Remove expired entries."""
|
||||
try:
|
||||
conn = await self._ensure_connection()
|
||||
loop = asyncio.get_event_loop()
|
||||
await loop.run_in_executor(
|
||||
None,
|
||||
lambda: conn.execute(
|
||||
"DELETE FROM rate_limits WHERE expires_at <= ?", (time.time(),)
|
||||
),
|
||||
)
|
||||
except Exception as e:
|
||||
raise BackendError("Failed to cleanup expired entries", original_error=e)
|
||||
|
||||
async def get(self, key: str) -> dict[str, Any] | None:
|
||||
"""Get the current state for a key."""
|
||||
try:
|
||||
conn = await self._ensure_connection()
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
def _get() -> dict[str, Any] | None:
|
||||
cursor = conn.execute(
|
||||
"SELECT data, expires_at FROM rate_limits WHERE key = ?",
|
||||
(key,),
|
||||
)
|
||||
row = cursor.fetchone()
|
||||
if row is None:
|
||||
return None
|
||||
|
||||
expires_at = row["expires_at"]
|
||||
if expires_at <= time.time():
|
||||
conn.execute("DELETE FROM rate_limits WHERE key = ?", (key,))
|
||||
return None
|
||||
|
||||
data: dict[str, Any] = json.loads(row["data"])
|
||||
return data
|
||||
|
||||
return await loop.run_in_executor(None, _get)
|
||||
except Exception as e:
|
||||
raise BackendError(f"Failed to get key {key}", original_error=e)
|
||||
|
||||
async def set(self, key: str, value: dict[str, Any], *, ttl: float) -> None:
|
||||
"""Set the state for a key with TTL."""
|
||||
try:
|
||||
conn = await self._ensure_connection()
|
||||
loop = asyncio.get_event_loop()
|
||||
expires_at = time.time() + ttl
|
||||
data_json = json.dumps(value)
|
||||
|
||||
def _set() -> None:
|
||||
conn.execute(
|
||||
"""
|
||||
INSERT OR REPLACE INTO rate_limits (key, data, expires_at)
|
||||
VALUES (?, ?, ?)
|
||||
""",
|
||||
(key, data_json, expires_at),
|
||||
)
|
||||
|
||||
await loop.run_in_executor(None, _set)
|
||||
except Exception as e:
|
||||
raise BackendError(f"Failed to set key {key}", original_error=e)
|
||||
|
||||
async def delete(self, key: str) -> None:
|
||||
"""Delete the state for a key."""
|
||||
try:
|
||||
conn = await self._ensure_connection()
|
||||
loop = asyncio.get_event_loop()
|
||||
await loop.run_in_executor(
|
||||
None,
|
||||
lambda: conn.execute("DELETE FROM rate_limits WHERE key = ?", (key,)),
|
||||
)
|
||||
except Exception as e:
|
||||
raise BackendError(f"Failed to delete key {key}", original_error=e)
|
||||
|
||||
async def exists(self, key: str) -> bool:
|
||||
"""Check if a key exists and is not expired."""
|
||||
try:
|
||||
conn = await self._ensure_connection()
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
def _exists() -> bool:
|
||||
cursor = conn.execute(
|
||||
"SELECT 1 FROM rate_limits WHERE key = ? AND expires_at > ?",
|
||||
(key, time.time()),
|
||||
)
|
||||
return cursor.fetchone() is not None
|
||||
|
||||
return await loop.run_in_executor(None, _exists)
|
||||
except Exception as e:
|
||||
raise BackendError(f"Failed to check key {key}", original_error=e)
|
||||
|
||||
async def increment(self, key: str, amount: int = 1) -> int:
|
||||
"""Atomically increment a counter."""
|
||||
async with self._lock:
|
||||
try:
|
||||
conn = await self._ensure_connection()
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
def _increment() -> int:
|
||||
cursor = conn.execute(
|
||||
"SELECT data, expires_at FROM rate_limits WHERE key = ?",
|
||||
(key,),
|
||||
)
|
||||
row = cursor.fetchone()
|
||||
|
||||
if row is None or row["expires_at"] <= time.time():
|
||||
return amount
|
||||
|
||||
data: dict[str, Any] = json.loads(row["data"])
|
||||
current = int(data.get("count", 0))
|
||||
new_value = current + amount
|
||||
data["count"] = new_value
|
||||
|
||||
conn.execute(
|
||||
"UPDATE rate_limits SET data = ? WHERE key = ?",
|
||||
(json.dumps(data), key),
|
||||
)
|
||||
return new_value
|
||||
|
||||
return await loop.run_in_executor(None, _increment)
|
||||
except Exception as e:
|
||||
raise BackendError(f"Failed to increment key {key}", original_error=e)
|
||||
|
||||
async def clear(self) -> None:
|
||||
"""Clear all rate limit data."""
|
||||
try:
|
||||
conn = await self._ensure_connection()
|
||||
loop = asyncio.get_event_loop()
|
||||
await loop.run_in_executor(
|
||||
None, lambda: conn.execute("DELETE FROM rate_limits")
|
||||
)
|
||||
except Exception as e:
|
||||
raise BackendError("Failed to clear rate limits", original_error=e)
|
||||
|
||||
async def close(self) -> None:
|
||||
"""Close the database connection."""
|
||||
if self._cleanup_task is not None:
|
||||
self._cleanup_task.cancel()
|
||||
try:
|
||||
await self._cleanup_task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
self._cleanup_task = None
|
||||
|
||||
if self._connection is not None:
|
||||
self._connection.close()
|
||||
self._connection = None
|
||||
|
||||
for conn in self._connections:
|
||||
conn.close()
|
||||
self._connections.clear()
|
||||
|
||||
async def vacuum(self) -> None:
|
||||
"""Optimize the database by running VACUUM."""
|
||||
try:
|
||||
conn = await self._ensure_connection()
|
||||
loop = asyncio.get_event_loop()
|
||||
await loop.run_in_executor(None, lambda: conn.execute("VACUUM"))
|
||||
except Exception as e:
|
||||
raise BackendError("Failed to vacuum database", original_error=e)
|
||||
|
||||
async def get_stats(self) -> dict[str, Any]:
|
||||
"""Get statistics about the rate limit storage."""
|
||||
try:
|
||||
conn = await self._ensure_connection()
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
def _stats() -> dict[str, Any]:
|
||||
cursor = conn.execute("SELECT COUNT(*) as total FROM rate_limits")
|
||||
total = cursor.fetchone()["total"]
|
||||
|
||||
cursor = conn.execute(
|
||||
"SELECT COUNT(*) as active FROM rate_limits WHERE expires_at > ?",
|
||||
(time.time(),),
|
||||
)
|
||||
active = cursor.fetchone()["active"]
|
||||
|
||||
return {
|
||||
"total_entries": total,
|
||||
"active_entries": active,
|
||||
"expired_entries": total - active,
|
||||
"db_path": self._db_path,
|
||||
}
|
||||
|
||||
return await loop.run_in_executor(None, _stats)
|
||||
except Exception as e:
|
||||
raise BackendError("Failed to get stats", original_error=e)
|
||||
16
fastapi_traffic/core/__init__.py
Normal file
16
fastapi_traffic/core/__init__.py
Normal file
@@ -0,0 +1,16 @@
|
||||
"""Core rate limiting components."""
|
||||
|
||||
from fastapi_traffic.core.algorithms import Algorithm
|
||||
from fastapi_traffic.core.config import RateLimitConfig
|
||||
from fastapi_traffic.core.decorator import rate_limit
|
||||
from fastapi_traffic.core.limiter import RateLimiter
|
||||
from fastapi_traffic.core.models import RateLimitInfo, RateLimitResult
|
||||
|
||||
__all__ = [
|
||||
"Algorithm",
|
||||
"RateLimitConfig",
|
||||
"rate_limit",
|
||||
"RateLimiter",
|
||||
"RateLimitInfo",
|
||||
"RateLimitResult",
|
||||
]
|
||||
466
fastapi_traffic/core/algorithms.py
Normal file
466
fastapi_traffic/core/algorithms.py
Normal file
@@ -0,0 +1,466 @@
|
||||
"""Rate limiting algorithms implementation."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import time
|
||||
from abc import ABC, abstractmethod
|
||||
from enum import Enum
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from fastapi_traffic.core.models import RateLimitInfo
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from fastapi_traffic.backends.base import Backend
|
||||
|
||||
|
||||
class Algorithm(str, Enum):
|
||||
"""Available rate limiting algorithms."""
|
||||
|
||||
TOKEN_BUCKET = "token_bucket"
|
||||
SLIDING_WINDOW = "sliding_window"
|
||||
FIXED_WINDOW = "fixed_window"
|
||||
LEAKY_BUCKET = "leaky_bucket"
|
||||
SLIDING_WINDOW_COUNTER = "sliding_window_counter"
|
||||
|
||||
|
||||
class BaseAlgorithm(ABC):
|
||||
"""Base class for rate limiting algorithms."""
|
||||
|
||||
__slots__ = ("limit", "window_size", "backend", "burst_size")
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
limit: int,
|
||||
window_size: float,
|
||||
backend: Backend,
|
||||
*,
|
||||
burst_size: int | None = None,
|
||||
) -> None:
|
||||
self.limit = limit
|
||||
self.window_size = window_size
|
||||
self.backend = backend
|
||||
self.burst_size = burst_size or limit
|
||||
|
||||
@abstractmethod
|
||||
async def check(self, key: str) -> tuple[bool, RateLimitInfo]:
|
||||
"""Check if request is allowed and update state."""
|
||||
...
|
||||
|
||||
@abstractmethod
|
||||
async def reset(self, key: str) -> None:
|
||||
"""Reset the rate limit state for a key."""
|
||||
...
|
||||
|
||||
@abstractmethod
|
||||
async def get_state(self, key: str) -> RateLimitInfo | None:
|
||||
"""Get current state without consuming a token."""
|
||||
...
|
||||
|
||||
|
||||
class TokenBucketAlgorithm(BaseAlgorithm):
|
||||
"""Token bucket algorithm - allows bursts up to bucket capacity."""
|
||||
|
||||
__slots__ = ("refill_rate",)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
limit: int,
|
||||
window_size: float,
|
||||
backend: Backend,
|
||||
*,
|
||||
burst_size: int | None = None,
|
||||
) -> None:
|
||||
super().__init__(limit, window_size, backend, burst_size=burst_size)
|
||||
self.refill_rate = limit / window_size
|
||||
|
||||
async def check(self, key: str) -> tuple[bool, RateLimitInfo]:
|
||||
now = time.time()
|
||||
state = await self.backend.get(key)
|
||||
|
||||
if state is None:
|
||||
tokens = float(self.burst_size - 1)
|
||||
await self.backend.set(
|
||||
key,
|
||||
{"tokens": tokens, "last_update": now},
|
||||
ttl=self.window_size * 2,
|
||||
)
|
||||
return True, RateLimitInfo(
|
||||
limit=self.limit,
|
||||
remaining=int(tokens),
|
||||
reset_at=now + self.window_size,
|
||||
window_size=self.window_size,
|
||||
)
|
||||
|
||||
tokens = float(state.get("tokens", self.burst_size))
|
||||
last_update = float(state.get("last_update", now))
|
||||
|
||||
elapsed = now - last_update
|
||||
tokens = min(self.burst_size, tokens + elapsed * self.refill_rate)
|
||||
|
||||
if tokens >= 1:
|
||||
tokens -= 1
|
||||
allowed = True
|
||||
retry_after = None
|
||||
else:
|
||||
allowed = False
|
||||
retry_after = (1 - tokens) / self.refill_rate
|
||||
|
||||
await self.backend.set(
|
||||
key,
|
||||
{"tokens": tokens, "last_update": now},
|
||||
ttl=self.window_size * 2,
|
||||
)
|
||||
|
||||
return allowed, RateLimitInfo(
|
||||
limit=self.limit,
|
||||
remaining=int(tokens),
|
||||
reset_at=now + (self.burst_size - tokens) / self.refill_rate,
|
||||
retry_after=retry_after,
|
||||
window_size=self.window_size,
|
||||
)
|
||||
|
||||
async def reset(self, key: str) -> None:
|
||||
await self.backend.delete(key)
|
||||
|
||||
async def get_state(self, key: str) -> RateLimitInfo | None:
|
||||
now = time.time()
|
||||
state = await self.backend.get(key)
|
||||
|
||||
if state is None:
|
||||
return None
|
||||
|
||||
tokens = float(state.get("tokens", self.burst_size))
|
||||
last_update = float(state.get("last_update", now))
|
||||
|
||||
elapsed = now - last_update
|
||||
tokens = min(self.burst_size, tokens + elapsed * self.refill_rate)
|
||||
|
||||
return RateLimitInfo(
|
||||
limit=self.limit,
|
||||
remaining=int(tokens),
|
||||
reset_at=now + (self.burst_size - tokens) / self.refill_rate,
|
||||
window_size=self.window_size,
|
||||
)
|
||||
|
||||
|
||||
class SlidingWindowAlgorithm(BaseAlgorithm):
|
||||
"""Sliding window log algorithm - precise but memory intensive."""
|
||||
|
||||
async def check(self, key: str) -> tuple[bool, RateLimitInfo]:
|
||||
now = time.time()
|
||||
window_start = now - self.window_size
|
||||
state = await self.backend.get(key)
|
||||
|
||||
timestamps: list[float] = []
|
||||
if state is not None:
|
||||
raw_timestamps = state.get("timestamps", [])
|
||||
timestamps = [
|
||||
float(ts) for ts in raw_timestamps if float(ts) > window_start
|
||||
]
|
||||
|
||||
if len(timestamps) < self.limit:
|
||||
timestamps.append(now)
|
||||
allowed = True
|
||||
retry_after = None
|
||||
else:
|
||||
allowed = False
|
||||
oldest = min(timestamps) if timestamps else now
|
||||
retry_after = oldest + self.window_size - now
|
||||
|
||||
await self.backend.set(
|
||||
key,
|
||||
{"timestamps": timestamps},
|
||||
ttl=self.window_size * 2,
|
||||
)
|
||||
|
||||
remaining = max(0, self.limit - len(timestamps))
|
||||
reset_at = (min(timestamps) if timestamps else now) + self.window_size
|
||||
|
||||
return allowed, RateLimitInfo(
|
||||
limit=self.limit,
|
||||
remaining=remaining,
|
||||
reset_at=reset_at,
|
||||
retry_after=retry_after,
|
||||
window_size=self.window_size,
|
||||
)
|
||||
|
||||
async def reset(self, key: str) -> None:
|
||||
await self.backend.delete(key)
|
||||
|
||||
async def get_state(self, key: str) -> RateLimitInfo | None:
|
||||
now = time.time()
|
||||
window_start = now - self.window_size
|
||||
state = await self.backend.get(key)
|
||||
|
||||
if state is None:
|
||||
return None
|
||||
|
||||
raw_timestamps = state.get("timestamps", [])
|
||||
timestamps = [float(ts) for ts in raw_timestamps if float(ts) > window_start]
|
||||
remaining = max(0, self.limit - len(timestamps))
|
||||
reset_at = (min(timestamps) if timestamps else now) + self.window_size
|
||||
|
||||
return RateLimitInfo(
|
||||
limit=self.limit,
|
||||
remaining=remaining,
|
||||
reset_at=reset_at,
|
||||
window_size=self.window_size,
|
||||
)
|
||||
|
||||
|
||||
class FixedWindowAlgorithm(BaseAlgorithm):
|
||||
"""Fixed window algorithm - simple and efficient."""
|
||||
|
||||
async def check(self, key: str) -> tuple[bool, RateLimitInfo]:
|
||||
now = time.time()
|
||||
window_start = (now // self.window_size) * self.window_size
|
||||
window_end = window_start + self.window_size
|
||||
state = await self.backend.get(key)
|
||||
|
||||
count = 0
|
||||
if state is not None:
|
||||
stored_window = float(state.get("window_start", 0))
|
||||
if stored_window == window_start:
|
||||
count = int(state.get("count", 0))
|
||||
|
||||
if count < self.limit:
|
||||
count += 1
|
||||
allowed = True
|
||||
retry_after = None
|
||||
else:
|
||||
allowed = False
|
||||
retry_after = window_end - now
|
||||
|
||||
await self.backend.set(
|
||||
key,
|
||||
{"count": count, "window_start": window_start},
|
||||
ttl=self.window_size * 2,
|
||||
)
|
||||
|
||||
return allowed, RateLimitInfo(
|
||||
limit=self.limit,
|
||||
remaining=max(0, self.limit - count),
|
||||
reset_at=window_end,
|
||||
retry_after=retry_after,
|
||||
window_size=self.window_size,
|
||||
)
|
||||
|
||||
async def reset(self, key: str) -> None:
|
||||
await self.backend.delete(key)
|
||||
|
||||
async def get_state(self, key: str) -> RateLimitInfo | None:
|
||||
now = time.time()
|
||||
window_start = (now // self.window_size) * self.window_size
|
||||
window_end = window_start + self.window_size
|
||||
state = await self.backend.get(key)
|
||||
|
||||
if state is None:
|
||||
return None
|
||||
|
||||
count = 0
|
||||
stored_window = float(state.get("window_start", 0))
|
||||
if stored_window == window_start:
|
||||
count = int(state.get("count", 0))
|
||||
|
||||
return RateLimitInfo(
|
||||
limit=self.limit,
|
||||
remaining=max(0, self.limit - count),
|
||||
reset_at=window_end,
|
||||
window_size=self.window_size,
|
||||
)
|
||||
|
||||
|
||||
class LeakyBucketAlgorithm(BaseAlgorithm):
|
||||
"""Leaky bucket algorithm - smooths out bursts."""
|
||||
|
||||
__slots__ = ("leak_rate",)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
limit: int,
|
||||
window_size: float,
|
||||
backend: Backend,
|
||||
*,
|
||||
burst_size: int | None = None,
|
||||
) -> None:
|
||||
super().__init__(limit, window_size, backend, burst_size=burst_size)
|
||||
self.leak_rate = limit / window_size
|
||||
|
||||
async def check(self, key: str) -> tuple[bool, RateLimitInfo]:
|
||||
now = time.time()
|
||||
state = await self.backend.get(key)
|
||||
|
||||
water_level = 0.0
|
||||
if state is not None:
|
||||
water_level = float(state.get("water_level", 0))
|
||||
last_update = float(state.get("last_update", now))
|
||||
elapsed = now - last_update
|
||||
water_level = max(0, water_level - elapsed * self.leak_rate)
|
||||
|
||||
if water_level < self.burst_size:
|
||||
water_level += 1
|
||||
allowed = True
|
||||
retry_after = None
|
||||
else:
|
||||
allowed = False
|
||||
retry_after = (water_level - self.burst_size + 1) / self.leak_rate
|
||||
|
||||
await self.backend.set(
|
||||
key,
|
||||
{"water_level": water_level, "last_update": now},
|
||||
ttl=self.window_size * 2,
|
||||
)
|
||||
|
||||
remaining = max(0, int(self.burst_size - water_level))
|
||||
reset_at = now + water_level / self.leak_rate
|
||||
|
||||
return allowed, RateLimitInfo(
|
||||
limit=self.limit,
|
||||
remaining=remaining,
|
||||
reset_at=reset_at,
|
||||
retry_after=retry_after,
|
||||
window_size=self.window_size,
|
||||
)
|
||||
|
||||
async def reset(self, key: str) -> None:
|
||||
await self.backend.delete(key)
|
||||
|
||||
async def get_state(self, key: str) -> RateLimitInfo | None:
|
||||
now = time.time()
|
||||
state = await self.backend.get(key)
|
||||
|
||||
if state is None:
|
||||
return None
|
||||
|
||||
water_level = float(state.get("water_level", 0))
|
||||
last_update = float(state.get("last_update", now))
|
||||
elapsed = now - last_update
|
||||
water_level = max(0, water_level - elapsed * self.leak_rate)
|
||||
|
||||
remaining = max(0, int(self.burst_size - water_level))
|
||||
reset_at = now + water_level / self.leak_rate
|
||||
|
||||
return RateLimitInfo(
|
||||
limit=self.limit,
|
||||
remaining=remaining,
|
||||
reset_at=reset_at,
|
||||
window_size=self.window_size,
|
||||
)
|
||||
|
||||
|
||||
class SlidingWindowCounterAlgorithm(BaseAlgorithm):
|
||||
"""Sliding window counter - balance between precision and memory."""
|
||||
|
||||
async def check(self, key: str) -> tuple[bool, RateLimitInfo]:
|
||||
now = time.time()
|
||||
current_window = (now // self.window_size) * self.window_size
|
||||
previous_window = current_window - self.window_size
|
||||
window_progress = (now - current_window) / self.window_size
|
||||
|
||||
state = await self.backend.get(key)
|
||||
|
||||
prev_count = 0
|
||||
curr_count = 0
|
||||
if state is not None:
|
||||
prev_count = int(state.get("prev_count", 0))
|
||||
curr_count = int(state.get("curr_count", 0))
|
||||
stored_window = float(state.get("current_window", 0))
|
||||
|
||||
if stored_window < previous_window:
|
||||
prev_count = 0
|
||||
curr_count = 0
|
||||
elif stored_window == previous_window:
|
||||
prev_count = curr_count
|
||||
curr_count = 0
|
||||
|
||||
weighted_count = prev_count * (1 - window_progress) + curr_count
|
||||
|
||||
if weighted_count < self.limit:
|
||||
curr_count += 1
|
||||
allowed = True
|
||||
retry_after = None
|
||||
else:
|
||||
allowed = False
|
||||
retry_after = self.window_size * (1 - window_progress)
|
||||
|
||||
await self.backend.set(
|
||||
key,
|
||||
{
|
||||
"prev_count": prev_count,
|
||||
"curr_count": curr_count,
|
||||
"current_window": current_window,
|
||||
},
|
||||
ttl=self.window_size * 3,
|
||||
)
|
||||
|
||||
remaining = max(0, int(self.limit - weighted_count))
|
||||
reset_at = current_window + self.window_size
|
||||
|
||||
return allowed, RateLimitInfo(
|
||||
limit=self.limit,
|
||||
remaining=remaining,
|
||||
reset_at=reset_at,
|
||||
retry_after=retry_after,
|
||||
window_size=self.window_size,
|
||||
)
|
||||
|
||||
async def reset(self, key: str) -> None:
|
||||
await self.backend.delete(key)
|
||||
|
||||
async def get_state(self, key: str) -> RateLimitInfo | None:
|
||||
now = time.time()
|
||||
current_window = (now // self.window_size) * self.window_size
|
||||
previous_window = current_window - self.window_size
|
||||
window_progress = (now - current_window) / self.window_size
|
||||
|
||||
state = await self.backend.get(key)
|
||||
|
||||
if state is None:
|
||||
return None
|
||||
|
||||
prev_count = int(state.get("prev_count", 0))
|
||||
curr_count = int(state.get("curr_count", 0))
|
||||
stored_window = float(state.get("current_window", 0))
|
||||
|
||||
if stored_window < previous_window:
|
||||
prev_count = 0
|
||||
curr_count = 0
|
||||
elif stored_window == previous_window:
|
||||
prev_count = curr_count
|
||||
curr_count = 0
|
||||
|
||||
weighted_count = prev_count * (1 - window_progress) + curr_count
|
||||
remaining = max(0, int(self.limit - weighted_count))
|
||||
reset_at = current_window + self.window_size
|
||||
|
||||
return RateLimitInfo(
|
||||
limit=self.limit,
|
||||
remaining=remaining,
|
||||
reset_at=reset_at,
|
||||
window_size=self.window_size,
|
||||
)
|
||||
|
||||
|
||||
def get_algorithm(
|
||||
algorithm: Algorithm,
|
||||
limit: int,
|
||||
window_size: float,
|
||||
backend: Backend,
|
||||
*,
|
||||
burst_size: int | None = None,
|
||||
) -> BaseAlgorithm:
|
||||
"""Factory function to create algorithm instances."""
|
||||
algorithm_map: dict[Algorithm, type[BaseAlgorithm]] = {
|
||||
Algorithm.TOKEN_BUCKET: TokenBucketAlgorithm,
|
||||
Algorithm.SLIDING_WINDOW: SlidingWindowAlgorithm,
|
||||
Algorithm.FIXED_WINDOW: FixedWindowAlgorithm,
|
||||
Algorithm.LEAKY_BUCKET: LeakyBucketAlgorithm,
|
||||
Algorithm.SLIDING_WINDOW_COUNTER: SlidingWindowCounterAlgorithm,
|
||||
}
|
||||
|
||||
algorithm_class = algorithm_map.get(algorithm)
|
||||
if algorithm_class is None:
|
||||
msg = f"Unknown algorithm: {algorithm}"
|
||||
raise ValueError(msg)
|
||||
|
||||
return algorithm_class(limit, window_size, backend, burst_size=burst_size)
|
||||
81
fastapi_traffic/core/config.py
Normal file
81
fastapi_traffic/core/config.py
Normal file
@@ -0,0 +1,81 @@
|
||||
"""Configuration for rate limiting."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from typing import TYPE_CHECKING, Any, Callable
|
||||
|
||||
from fastapi_traffic.core.algorithms import Algorithm
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from starlette.requests import Request
|
||||
|
||||
from fastapi_traffic.backends.base import Backend
|
||||
|
||||
|
||||
KeyExtractor = Callable[["Request"], str]
|
||||
|
||||
|
||||
def default_key_extractor(request: Request) -> str:
|
||||
"""Extract client IP as the default rate limit key."""
|
||||
forwarded = request.headers.get("X-Forwarded-For")
|
||||
if forwarded:
|
||||
return forwarded.split(",")[0].strip()
|
||||
|
||||
real_ip = request.headers.get("X-Real-IP")
|
||||
if real_ip:
|
||||
return real_ip
|
||||
|
||||
if request.client:
|
||||
return request.client.host
|
||||
|
||||
return "unknown"
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class RateLimitConfig:
|
||||
"""Configuration for a rate limit rule."""
|
||||
|
||||
limit: int
|
||||
window_size: float = 60.0
|
||||
algorithm: Algorithm = Algorithm.SLIDING_WINDOW_COUNTER
|
||||
key_prefix: str = "ratelimit"
|
||||
key_extractor: KeyExtractor = field(default=default_key_extractor)
|
||||
burst_size: int | None = None
|
||||
include_headers: bool = True
|
||||
error_message: str = "Rate limit exceeded"
|
||||
status_code: int = 429
|
||||
skip_on_error: bool = False
|
||||
cost: int = 1
|
||||
exempt_when: Callable[[Request], bool] | None = None
|
||||
on_blocked: Callable[[Request, Any], Any] | None = None
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
if self.limit <= 0:
|
||||
msg = "limit must be positive"
|
||||
raise ValueError(msg)
|
||||
if self.window_size <= 0:
|
||||
msg = "window_size must be positive"
|
||||
raise ValueError(msg)
|
||||
if self.cost <= 0:
|
||||
msg = "cost must be positive"
|
||||
raise ValueError(msg)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class GlobalConfig:
|
||||
"""Global configuration for the rate limiter."""
|
||||
|
||||
backend: Backend | None = None
|
||||
enabled: bool = True
|
||||
default_limit: int = 100
|
||||
default_window_size: float = 60.0
|
||||
default_algorithm: Algorithm = Algorithm.SLIDING_WINDOW_COUNTER
|
||||
key_prefix: str = "fastapi_traffic"
|
||||
include_headers: bool = True
|
||||
error_message: str = "Rate limit exceeded. Please try again later."
|
||||
status_code: int = 429
|
||||
skip_on_error: bool = False
|
||||
exempt_ips: set[str] = field(default_factory=set)
|
||||
exempt_paths: set[str] = field(default_factory=set)
|
||||
headers_prefix: str = "X-RateLimit"
|
||||
259
fastapi_traffic/core/decorator.py
Normal file
259
fastapi_traffic/core/decorator.py
Normal file
@@ -0,0 +1,259 @@
|
||||
"""Rate limit decorator for FastAPI endpoints."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import functools
|
||||
from typing import TYPE_CHECKING, Any, Callable, TypeVar, overload
|
||||
|
||||
from fastapi_traffic.core.algorithms import Algorithm
|
||||
from fastapi_traffic.core.config import KeyExtractor, RateLimitConfig, default_key_extractor
|
||||
from fastapi_traffic.core.limiter import get_limiter
|
||||
from fastapi_traffic.exceptions import RateLimitExceeded
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from starlette.requests import Request
|
||||
from starlette.responses import Response
|
||||
|
||||
F = TypeVar("F", bound=Callable[..., Any])
|
||||
|
||||
|
||||
@overload
|
||||
def rate_limit(
|
||||
limit: int,
|
||||
*,
|
||||
window_size: float = ...,
|
||||
algorithm: Algorithm = ...,
|
||||
key_prefix: str = ...,
|
||||
key_extractor: KeyExtractor = ...,
|
||||
burst_size: int | None = ...,
|
||||
include_headers: bool = ...,
|
||||
error_message: str = ...,
|
||||
status_code: int = ...,
|
||||
skip_on_error: bool = ...,
|
||||
cost: int = ...,
|
||||
exempt_when: Callable[[Request], bool] | None = ...,
|
||||
on_blocked: Callable[[Request, Any], Any] | None = ...,
|
||||
) -> Callable[[F], F]: ...
|
||||
|
||||
|
||||
@overload
|
||||
def rate_limit(
|
||||
limit: int,
|
||||
window_size: float,
|
||||
/,
|
||||
) -> Callable[[F], F]: ...
|
||||
|
||||
|
||||
def rate_limit(
|
||||
limit: int,
|
||||
window_size: float = 60.0,
|
||||
*,
|
||||
algorithm: Algorithm = Algorithm.SLIDING_WINDOW_COUNTER,
|
||||
key_prefix: str = "ratelimit",
|
||||
key_extractor: KeyExtractor = default_key_extractor,
|
||||
burst_size: int | None = None,
|
||||
include_headers: bool = True,
|
||||
error_message: str = "Rate limit exceeded",
|
||||
status_code: int = 429,
|
||||
skip_on_error: bool = False,
|
||||
cost: int = 1,
|
||||
exempt_when: Callable[[Request], bool] | None = None,
|
||||
on_blocked: Callable[[Request, Any], Any] | None = None,
|
||||
) -> Callable[[F], F]:
|
||||
"""Decorator to apply rate limiting to a FastAPI endpoint.
|
||||
|
||||
Args:
|
||||
limit: Maximum number of requests allowed in the window.
|
||||
window_size: Time window in seconds.
|
||||
algorithm: Rate limiting algorithm to use.
|
||||
key_prefix: Prefix for the rate limit key.
|
||||
key_extractor: Function to extract the client identifier from request.
|
||||
burst_size: Maximum burst size (for token bucket/leaky bucket).
|
||||
include_headers: Whether to include rate limit headers in response.
|
||||
error_message: Error message when rate limit is exceeded.
|
||||
status_code: HTTP status code when rate limit is exceeded.
|
||||
skip_on_error: Skip rate limiting if backend errors occur.
|
||||
cost: Cost of each request (default 1).
|
||||
exempt_when: Function to determine if request should be exempt.
|
||||
on_blocked: Callback when a request is blocked.
|
||||
|
||||
Returns:
|
||||
Decorated function with rate limiting applied.
|
||||
|
||||
Example:
|
||||
```python
|
||||
from fastapi import FastAPI
|
||||
from fastapi_traffic import rate_limit
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
@app.get("/api/resource")
|
||||
@rate_limit(100, 60) # 100 requests per minute
|
||||
async def get_resource():
|
||||
return {"message": "Hello"}
|
||||
```
|
||||
"""
|
||||
config = RateLimitConfig(
|
||||
limit=limit,
|
||||
window_size=window_size,
|
||||
algorithm=algorithm,
|
||||
key_prefix=key_prefix,
|
||||
key_extractor=key_extractor,
|
||||
burst_size=burst_size,
|
||||
include_headers=include_headers,
|
||||
error_message=error_message,
|
||||
status_code=status_code,
|
||||
skip_on_error=skip_on_error,
|
||||
cost=cost,
|
||||
exempt_when=exempt_when,
|
||||
on_blocked=on_blocked,
|
||||
)
|
||||
|
||||
def decorator(func: F) -> F:
|
||||
@functools.wraps(func)
|
||||
async def async_wrapper(*args: Any, **kwargs: Any) -> Any:
|
||||
request = _extract_request(args, kwargs)
|
||||
if request is None:
|
||||
return await func(*args, **kwargs)
|
||||
|
||||
limiter = get_limiter()
|
||||
result = await limiter.hit(request, config)
|
||||
|
||||
response = await func(*args, **kwargs)
|
||||
|
||||
if config.include_headers and hasattr(response, "headers"):
|
||||
for key, value in result.info.to_headers().items():
|
||||
response.headers[key] = value
|
||||
|
||||
return response
|
||||
|
||||
@functools.wraps(func)
|
||||
def sync_wrapper(*args: Any, **kwargs: Any) -> Any:
|
||||
import asyncio
|
||||
|
||||
return asyncio.get_event_loop().run_until_complete(
|
||||
async_wrapper(*args, **kwargs)
|
||||
)
|
||||
|
||||
if _is_coroutine_function(func):
|
||||
return async_wrapper # type: ignore[return-value]
|
||||
return sync_wrapper # type: ignore[return-value]
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def _extract_request(
|
||||
args: tuple[Any, ...],
|
||||
kwargs: dict[str, Any],
|
||||
) -> Request | None:
|
||||
"""Extract the Request object from function arguments."""
|
||||
from starlette.requests import Request
|
||||
|
||||
for arg in args:
|
||||
if isinstance(arg, Request):
|
||||
return arg
|
||||
|
||||
for value in kwargs.values():
|
||||
if isinstance(value, Request):
|
||||
return value
|
||||
|
||||
if "request" in kwargs:
|
||||
req = kwargs["request"]
|
||||
if isinstance(req, Request):
|
||||
return req
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _is_coroutine_function(func: Callable[..., Any]) -> bool:
|
||||
"""Check if a function is a coroutine function."""
|
||||
import asyncio
|
||||
import inspect
|
||||
|
||||
return asyncio.iscoroutinefunction(func) or inspect.iscoroutinefunction(func)
|
||||
|
||||
|
||||
class RateLimitDependency:
|
||||
"""FastAPI dependency for rate limiting.
|
||||
|
||||
Example:
|
||||
```python
|
||||
from fastapi import FastAPI, Depends
|
||||
from fastapi_traffic import RateLimitDependency
|
||||
|
||||
app = FastAPI()
|
||||
rate_limiter = RateLimitDependency(limit=100, window_size=60)
|
||||
|
||||
@app.get("/api/resource")
|
||||
async def get_resource(rate_limit_info = Depends(rate_limiter)):
|
||||
return {"remaining": rate_limit_info.remaining}
|
||||
```
|
||||
"""
|
||||
|
||||
__slots__ = ("_config",)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
limit: int,
|
||||
window_size: float = 60.0,
|
||||
*,
|
||||
algorithm: Algorithm = Algorithm.SLIDING_WINDOW_COUNTER,
|
||||
key_prefix: str = "ratelimit",
|
||||
key_extractor: KeyExtractor = default_key_extractor,
|
||||
burst_size: int | None = None,
|
||||
error_message: str = "Rate limit exceeded",
|
||||
status_code: int = 429,
|
||||
skip_on_error: bool = False,
|
||||
cost: int = 1,
|
||||
exempt_when: Callable[[Request], bool] | None = None,
|
||||
) -> None:
|
||||
self._config = RateLimitConfig(
|
||||
limit=limit,
|
||||
window_size=window_size,
|
||||
algorithm=algorithm,
|
||||
key_prefix=key_prefix,
|
||||
key_extractor=key_extractor,
|
||||
burst_size=burst_size,
|
||||
include_headers=True,
|
||||
error_message=error_message,
|
||||
status_code=status_code,
|
||||
skip_on_error=skip_on_error,
|
||||
cost=cost,
|
||||
exempt_when=exempt_when,
|
||||
)
|
||||
|
||||
async def __call__(self, request: Request) -> Any:
|
||||
"""Check rate limit and return info."""
|
||||
limiter = get_limiter()
|
||||
result = await limiter.hit(request, self._config)
|
||||
return result.info
|
||||
|
||||
|
||||
def create_rate_limit_response(
|
||||
exc: RateLimitExceeded,
|
||||
*,
|
||||
include_headers: bool = True,
|
||||
) -> Response:
|
||||
"""Create a rate limit exceeded response.
|
||||
|
||||
Args:
|
||||
exc: The RateLimitExceeded exception.
|
||||
include_headers: Whether to include rate limit headers.
|
||||
|
||||
Returns:
|
||||
A JSONResponse with rate limit information.
|
||||
"""
|
||||
from starlette.responses import JSONResponse
|
||||
|
||||
headers: dict[str, str] = {}
|
||||
if include_headers and exc.limit_info is not None:
|
||||
headers = exc.limit_info.to_headers()
|
||||
|
||||
return JSONResponse(
|
||||
status_code=429,
|
||||
content={
|
||||
"detail": exc.message,
|
||||
"retry_after": exc.retry_after,
|
||||
},
|
||||
headers=headers,
|
||||
)
|
||||
301
fastapi_traffic/core/limiter.py
Normal file
301
fastapi_traffic/core/limiter.py
Normal file
@@ -0,0 +1,301 @@
|
||||
"""Core rate limiter implementation."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from fastapi_traffic.backends.memory import MemoryBackend
|
||||
from fastapi_traffic.core.algorithms import Algorithm, BaseAlgorithm, get_algorithm
|
||||
from fastapi_traffic.core.config import GlobalConfig, RateLimitConfig
|
||||
from fastapi_traffic.core.models import RateLimitInfo, RateLimitResult
|
||||
from fastapi_traffic.exceptions import BackendError, RateLimitExceeded
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from starlette.requests import Request
|
||||
|
||||
from fastapi_traffic.backends.base import Backend
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RateLimiter:
|
||||
"""Main rate limiter class that manages rate limiting logic."""
|
||||
|
||||
__slots__ = ("_config", "_backend", "_algorithms", "_initialized")
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
backend: Backend | None = None,
|
||||
*,
|
||||
config: GlobalConfig | None = None,
|
||||
) -> None:
|
||||
"""Initialize the rate limiter.
|
||||
|
||||
Args:
|
||||
backend: Storage backend for rate limit data.
|
||||
config: Global configuration options.
|
||||
"""
|
||||
self._config = config or GlobalConfig()
|
||||
self._backend = backend or self._config.backend or MemoryBackend()
|
||||
self._algorithms: dict[str, BaseAlgorithm] = {}
|
||||
self._initialized = False
|
||||
|
||||
@property
|
||||
def backend(self) -> Backend:
|
||||
"""Get the storage backend."""
|
||||
return self._backend
|
||||
|
||||
@property
|
||||
def config(self) -> GlobalConfig:
|
||||
"""Get the global configuration."""
|
||||
return self._config
|
||||
|
||||
async def initialize(self) -> None:
|
||||
"""Initialize the rate limiter and backend."""
|
||||
if self._initialized:
|
||||
return
|
||||
|
||||
if hasattr(self._backend, "initialize"):
|
||||
await self._backend.initialize() # type: ignore[union-attr]
|
||||
|
||||
if hasattr(self._backend, "start_cleanup"):
|
||||
await self._backend.start_cleanup() # type: ignore[union-attr]
|
||||
|
||||
self._initialized = True
|
||||
|
||||
async def close(self) -> None:
|
||||
"""Close the rate limiter and cleanup resources."""
|
||||
await self._backend.close()
|
||||
self._algorithms.clear()
|
||||
self._initialized = False
|
||||
|
||||
def _get_algorithm(
|
||||
self,
|
||||
limit: int,
|
||||
window_size: float,
|
||||
algorithm: Algorithm,
|
||||
burst_size: int | None = None,
|
||||
) -> BaseAlgorithm:
|
||||
"""Get or create an algorithm instance."""
|
||||
cache_key = f"{algorithm.value}:{limit}:{window_size}:{burst_size}"
|
||||
if cache_key not in self._algorithms:
|
||||
self._algorithms[cache_key] = get_algorithm(
|
||||
algorithm,
|
||||
limit,
|
||||
window_size,
|
||||
self._backend,
|
||||
burst_size=burst_size,
|
||||
)
|
||||
return self._algorithms[cache_key]
|
||||
|
||||
def _build_key(
|
||||
self,
|
||||
request: Request,
|
||||
config: RateLimitConfig,
|
||||
identifier: str | None = None,
|
||||
) -> str:
|
||||
"""Build the rate limit key for a request."""
|
||||
if identifier:
|
||||
client_id = identifier
|
||||
else:
|
||||
client_id = config.key_extractor(request)
|
||||
|
||||
path = request.url.path
|
||||
method = request.method
|
||||
|
||||
return f"{self._config.key_prefix}:{config.key_prefix}:{method}:{path}:{client_id}"
|
||||
|
||||
def _is_exempt(self, request: Request, config: RateLimitConfig) -> bool:
|
||||
"""Check if the request is exempt from rate limiting."""
|
||||
if not self._config.enabled:
|
||||
return True
|
||||
|
||||
if config.exempt_when is not None and config.exempt_when(request):
|
||||
return True
|
||||
|
||||
client_ip = config.key_extractor(request)
|
||||
if client_ip in self._config.exempt_ips:
|
||||
return True
|
||||
|
||||
if request.url.path in self._config.exempt_paths:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
async def check(
|
||||
self,
|
||||
request: Request,
|
||||
config: RateLimitConfig,
|
||||
*,
|
||||
identifier: str | None = None,
|
||||
cost: int | None = None,
|
||||
) -> RateLimitResult:
|
||||
"""Check if a request is allowed under the rate limit.
|
||||
|
||||
Args:
|
||||
request: The incoming request.
|
||||
config: Rate limit configuration for this endpoint.
|
||||
identifier: Optional custom identifier override.
|
||||
cost: Optional cost override for this request.
|
||||
|
||||
Returns:
|
||||
RateLimitResult with allowed status and limit info.
|
||||
"""
|
||||
if not self._initialized:
|
||||
await self.initialize()
|
||||
|
||||
if self._is_exempt(request, config):
|
||||
return RateLimitResult(
|
||||
allowed=True,
|
||||
info=RateLimitInfo(
|
||||
limit=config.limit,
|
||||
remaining=config.limit,
|
||||
reset_at=0,
|
||||
window_size=config.window_size,
|
||||
),
|
||||
key="exempt",
|
||||
)
|
||||
|
||||
key = self._build_key(request, config, identifier)
|
||||
actual_cost = cost or config.cost
|
||||
|
||||
try:
|
||||
algorithm = self._get_algorithm(
|
||||
config.limit,
|
||||
config.window_size,
|
||||
config.algorithm,
|
||||
config.burst_size,
|
||||
)
|
||||
|
||||
info: RateLimitInfo | None = None
|
||||
for _ in range(actual_cost):
|
||||
allowed, info = await algorithm.check(key)
|
||||
if not allowed:
|
||||
return RateLimitResult(allowed=False, info=info, key=key)
|
||||
|
||||
if info is None:
|
||||
info = RateLimitInfo(
|
||||
limit=config.limit,
|
||||
remaining=config.limit,
|
||||
reset_at=0,
|
||||
window_size=config.window_size,
|
||||
)
|
||||
return RateLimitResult(allowed=True, info=info, key=key)
|
||||
|
||||
except BackendError as e:
|
||||
logger.warning("Backend error during rate limit check: %s", e)
|
||||
if config.skip_on_error:
|
||||
return RateLimitResult(
|
||||
allowed=True,
|
||||
info=RateLimitInfo(
|
||||
limit=config.limit,
|
||||
remaining=config.limit,
|
||||
reset_at=0,
|
||||
window_size=config.window_size,
|
||||
),
|
||||
key=key,
|
||||
)
|
||||
raise
|
||||
|
||||
async def hit(
|
||||
self,
|
||||
request: Request,
|
||||
config: RateLimitConfig,
|
||||
*,
|
||||
identifier: str | None = None,
|
||||
cost: int | None = None,
|
||||
) -> RateLimitResult:
|
||||
"""Check rate limit and raise exception if exceeded.
|
||||
|
||||
Args:
|
||||
request: The incoming request.
|
||||
config: Rate limit configuration for this endpoint.
|
||||
identifier: Optional custom identifier override.
|
||||
cost: Optional cost override for this request.
|
||||
|
||||
Returns:
|
||||
RateLimitResult if allowed.
|
||||
|
||||
Raises:
|
||||
RateLimitExceeded: If the rate limit is exceeded.
|
||||
"""
|
||||
result = await self.check(request, config, identifier=identifier, cost=cost)
|
||||
|
||||
if not result.allowed:
|
||||
if config.on_blocked is not None:
|
||||
config.on_blocked(request, result)
|
||||
|
||||
raise RateLimitExceeded(
|
||||
config.error_message,
|
||||
retry_after=result.info.retry_after,
|
||||
limit_info=result.info,
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
async def reset(
|
||||
self,
|
||||
request: Request,
|
||||
config: RateLimitConfig,
|
||||
*,
|
||||
identifier: str | None = None,
|
||||
) -> None:
|
||||
"""Reset the rate limit for a specific key.
|
||||
|
||||
Args:
|
||||
request: The request to reset limits for.
|
||||
config: Rate limit configuration.
|
||||
identifier: Optional custom identifier override.
|
||||
"""
|
||||
key = self._build_key(request, config, identifier)
|
||||
algorithm = self._get_algorithm(
|
||||
config.limit,
|
||||
config.window_size,
|
||||
config.algorithm,
|
||||
config.burst_size,
|
||||
)
|
||||
await algorithm.reset(key)
|
||||
|
||||
async def get_state(
|
||||
self,
|
||||
request: Request,
|
||||
config: RateLimitConfig,
|
||||
*,
|
||||
identifier: str | None = None,
|
||||
) -> RateLimitInfo | None:
|
||||
"""Get the current rate limit state without consuming a token.
|
||||
|
||||
Args:
|
||||
request: The request to check.
|
||||
config: Rate limit configuration.
|
||||
identifier: Optional custom identifier override.
|
||||
|
||||
Returns:
|
||||
Current rate limit info or None if no state exists.
|
||||
"""
|
||||
key = self._build_key(request, config, identifier)
|
||||
algorithm = self._get_algorithm(
|
||||
config.limit,
|
||||
config.window_size,
|
||||
config.algorithm,
|
||||
config.burst_size,
|
||||
)
|
||||
return await algorithm.get_state(key)
|
||||
|
||||
|
||||
_default_limiter: RateLimiter | None = None
|
||||
|
||||
|
||||
def get_limiter() -> RateLimiter:
|
||||
"""Get the default rate limiter instance."""
|
||||
global _default_limiter
|
||||
if _default_limiter is None:
|
||||
_default_limiter = RateLimiter()
|
||||
return _default_limiter
|
||||
|
||||
|
||||
def set_limiter(limiter: RateLimiter) -> None:
|
||||
"""Set the default rate limiter instance."""
|
||||
global _default_limiter
|
||||
_default_limiter = limiter
|
||||
89
fastapi_traffic/core/models.py
Normal file
89
fastapi_traffic/core/models.py
Normal file
@@ -0,0 +1,89 @@
|
||||
"""Data models for rate limiting."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from enum import Enum
|
||||
from typing import Any
|
||||
|
||||
|
||||
class KeyType(str, Enum):
|
||||
"""Type of key extraction for rate limiting."""
|
||||
|
||||
IP = "ip"
|
||||
USER = "user"
|
||||
API_KEY = "api_key"
|
||||
ENDPOINT = "endpoint"
|
||||
CUSTOM = "custom"
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class RateLimitInfo:
|
||||
"""Information about the current rate limit state."""
|
||||
|
||||
limit: int
|
||||
remaining: int
|
||||
reset_at: float
|
||||
retry_after: float | None = None
|
||||
window_size: float = 60.0
|
||||
|
||||
def to_headers(self) -> dict[str, str]:
|
||||
"""Convert rate limit info to HTTP headers."""
|
||||
headers: dict[str, str] = {
|
||||
"X-RateLimit-Limit": str(self.limit),
|
||||
"X-RateLimit-Remaining": str(max(0, self.remaining)),
|
||||
"X-RateLimit-Reset": str(int(self.reset_at)),
|
||||
}
|
||||
if self.retry_after is not None:
|
||||
headers["Retry-After"] = str(int(self.retry_after))
|
||||
return headers
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class RateLimitResult:
|
||||
"""Result of a rate limit check."""
|
||||
|
||||
allowed: bool
|
||||
info: RateLimitInfo
|
||||
key: str
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class TokenBucketState:
|
||||
"""State for token bucket algorithm."""
|
||||
|
||||
tokens: float
|
||||
last_update: float
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class SlidingWindowState:
|
||||
"""State for sliding window algorithm."""
|
||||
|
||||
timestamps: list[float] = field(default_factory=list)
|
||||
count: int = 0
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class FixedWindowState:
|
||||
"""State for fixed window algorithm."""
|
||||
|
||||
count: int
|
||||
window_start: float
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class LeakyBucketState:
|
||||
"""State for leaky bucket algorithm."""
|
||||
|
||||
water_level: float
|
||||
last_update: float
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class BackendRecord:
|
||||
"""Generic record stored in backends."""
|
||||
|
||||
key: str
|
||||
data: dict[str, Any]
|
||||
expires_at: float
|
||||
50
fastapi_traffic/exceptions.py
Normal file
50
fastapi_traffic/exceptions.py
Normal file
@@ -0,0 +1,50 @@
|
||||
"""Custom exceptions for FastAPI Traffic rate limiter."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from fastapi_traffic.core.models import RateLimitInfo
|
||||
|
||||
|
||||
class FastAPITrafficError(Exception):
|
||||
"""Base exception for all FastAPI Traffic errors."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class RateLimitExceeded(FastAPITrafficError):
|
||||
"""Raised when a rate limit has been exceeded."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "Rate limit exceeded",
|
||||
*,
|
||||
retry_after: float | None = None,
|
||||
limit_info: RateLimitInfo | None = None,
|
||||
) -> None:
|
||||
super().__init__(message)
|
||||
self.message = message
|
||||
self.retry_after = retry_after
|
||||
self.limit_info = limit_info
|
||||
|
||||
|
||||
class BackendError(FastAPITrafficError):
|
||||
"""Raised when a backend operation fails."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: str = "Backend operation failed",
|
||||
*,
|
||||
original_error: Exception | None = None,
|
||||
) -> None:
|
||||
super().__init__(message)
|
||||
self.message = message
|
||||
self.original_error = original_error
|
||||
|
||||
|
||||
class ConfigurationError(FastAPITrafficError):
|
||||
"""Raised when there is a configuration error."""
|
||||
|
||||
pass
|
||||
184
fastapi_traffic/middleware.py
Normal file
184
fastapi_traffic/middleware.py
Normal file
@@ -0,0 +1,184 @@
|
||||
"""Rate limiting middleware for Starlette/FastAPI applications."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Awaitable, Callable
|
||||
|
||||
from starlette.middleware.base import BaseHTTPMiddleware
|
||||
from starlette.responses import JSONResponse
|
||||
|
||||
from fastapi_traffic.backends.memory import MemoryBackend
|
||||
from fastapi_traffic.core.algorithms import Algorithm
|
||||
from fastapi_traffic.core.config import GlobalConfig, RateLimitConfig, default_key_extractor
|
||||
from fastapi_traffic.core.limiter import RateLimiter
|
||||
from fastapi_traffic.exceptions import RateLimitExceeded
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from starlette.requests import Request
|
||||
from starlette.responses import Response
|
||||
from starlette.types import ASGIApp
|
||||
|
||||
from fastapi_traffic.backends.base import Backend
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RateLimitMiddleware(BaseHTTPMiddleware):
|
||||
"""Middleware for global rate limiting across all endpoints."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
app: ASGIApp,
|
||||
*,
|
||||
limit: int = 100,
|
||||
window_size: float = 60.0,
|
||||
algorithm: Algorithm = Algorithm.SLIDING_WINDOW_COUNTER,
|
||||
backend: Backend | None = None,
|
||||
key_prefix: str = "middleware",
|
||||
include_headers: bool = True,
|
||||
error_message: str = "Rate limit exceeded. Please try again later.",
|
||||
status_code: int = 429,
|
||||
skip_on_error: bool = False,
|
||||
exempt_paths: set[str] | None = None,
|
||||
exempt_ips: set[str] | None = None,
|
||||
key_extractor: Callable[[Request], str] = default_key_extractor,
|
||||
) -> None:
|
||||
"""Initialize the rate limit middleware.
|
||||
|
||||
Args:
|
||||
app: The ASGI application.
|
||||
limit: Maximum requests per window.
|
||||
window_size: Time window in seconds.
|
||||
algorithm: Rate limiting algorithm.
|
||||
backend: Storage backend (defaults to MemoryBackend).
|
||||
key_prefix: Prefix for rate limit keys.
|
||||
include_headers: Include rate limit headers in response.
|
||||
error_message: Error message when rate limited.
|
||||
status_code: HTTP status code when rate limited.
|
||||
skip_on_error: Skip rate limiting on backend errors.
|
||||
exempt_paths: Paths to exempt from rate limiting.
|
||||
exempt_ips: IP addresses to exempt from rate limiting.
|
||||
key_extractor: Function to extract client identifier.
|
||||
"""
|
||||
super().__init__(app)
|
||||
|
||||
self._backend = backend or MemoryBackend()
|
||||
self._config = RateLimitConfig(
|
||||
limit=limit,
|
||||
window_size=window_size,
|
||||
algorithm=algorithm,
|
||||
key_prefix=key_prefix,
|
||||
key_extractor=key_extractor,
|
||||
include_headers=include_headers,
|
||||
error_message=error_message,
|
||||
status_code=status_code,
|
||||
skip_on_error=skip_on_error,
|
||||
)
|
||||
|
||||
global_config = GlobalConfig(
|
||||
backend=self._backend,
|
||||
exempt_paths=exempt_paths or set(),
|
||||
exempt_ips=exempt_ips or set(),
|
||||
)
|
||||
|
||||
self._limiter = RateLimiter(self._backend, config=global_config)
|
||||
self._include_headers = include_headers
|
||||
self._error_message = error_message
|
||||
self._status_code = status_code
|
||||
|
||||
async def dispatch(
|
||||
self,
|
||||
request: Request,
|
||||
call_next: Callable[[Request], Awaitable[Response]],
|
||||
) -> Response:
|
||||
"""Process the request with rate limiting."""
|
||||
try:
|
||||
result = await self._limiter.check(request, self._config)
|
||||
|
||||
if not result.allowed:
|
||||
return self._create_rate_limit_response(result)
|
||||
|
||||
response = await call_next(request)
|
||||
|
||||
if self._include_headers:
|
||||
for key, value in result.info.to_headers().items():
|
||||
response.headers[key] = value
|
||||
|
||||
return response
|
||||
|
||||
except RateLimitExceeded as exc:
|
||||
return JSONResponse(
|
||||
status_code=self._status_code,
|
||||
content={
|
||||
"detail": exc.message,
|
||||
"retry_after": exc.retry_after,
|
||||
},
|
||||
headers=exc.limit_info.to_headers() if exc.limit_info else {},
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("Error in rate limit middleware: %s", e)
|
||||
if self._config.skip_on_error:
|
||||
return await call_next(request)
|
||||
raise
|
||||
|
||||
def _create_rate_limit_response(self, result: object) -> JSONResponse:
|
||||
"""Create a rate limit exceeded response."""
|
||||
from fastapi_traffic.core.models import RateLimitResult
|
||||
|
||||
if isinstance(result, RateLimitResult):
|
||||
headers = result.info.to_headers()
|
||||
retry_after = result.info.retry_after
|
||||
else:
|
||||
headers = {}
|
||||
retry_after = None
|
||||
|
||||
return JSONResponse(
|
||||
status_code=self._status_code,
|
||||
content={
|
||||
"detail": self._error_message,
|
||||
"retry_after": retry_after,
|
||||
},
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
|
||||
class SlidingWindowMiddleware(RateLimitMiddleware):
|
||||
"""Convenience middleware using sliding window algorithm."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
app: ASGIApp,
|
||||
*,
|
||||
limit: int = 100,
|
||||
window_size: float = 60.0,
|
||||
**kwargs: object,
|
||||
) -> None:
|
||||
super().__init__(
|
||||
app,
|
||||
limit=limit,
|
||||
window_size=window_size,
|
||||
algorithm=Algorithm.SLIDING_WINDOW,
|
||||
**kwargs, # type: ignore[arg-type]
|
||||
)
|
||||
|
||||
|
||||
class TokenBucketMiddleware(RateLimitMiddleware):
|
||||
"""Convenience middleware using token bucket algorithm."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
app: ASGIApp,
|
||||
*,
|
||||
limit: int = 100,
|
||||
window_size: float = 60.0,
|
||||
**kwargs: object,
|
||||
) -> None:
|
||||
super().__init__(
|
||||
app,
|
||||
limit=limit,
|
||||
window_size=window_size,
|
||||
algorithm=Algorithm.TOKEN_BUCKET,
|
||||
**kwargs, # type: ignore[arg-type]
|
||||
)
|
||||
0
fastapi_traffic/py.typed
Normal file
0
fastapi_traffic/py.typed
Normal file
6
main.py
Normal file
6
main.py
Normal file
@@ -0,0 +1,6 @@
|
||||
def main():
|
||||
print("Hello from fastapi-traffic!")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
105
pyproject.toml
Normal file
105
pyproject.toml
Normal file
@@ -0,0 +1,105 @@
|
||||
[project]
|
||||
name = "fastapi-traffic"
|
||||
version = "0.1.0"
|
||||
description = "Production-grade rate limiting for FastAPI with multiple algorithms and backends"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.10"
|
||||
license = { text = "MIT" }
|
||||
authors = [{ name = "zanewalker", email="bereckobrian@gmail.com" }]
|
||||
keywords = ["fastapi", "rate-limit", "rate-limiting", "throttle", "api", "redis", "sqlite"]
|
||||
classifiers = [
|
||||
"Development Status :: 4 - Beta",
|
||||
"Framework :: FastAPI",
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Topic :: Internet :: WWW/HTTP :: HTTP Servers",
|
||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||
"Typing :: Typed",
|
||||
]
|
||||
dependencies = [
|
||||
"starlette>=0.27.0",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
redis = ["redis>=5.0.0"]
|
||||
fastapi = ["fastapi>=0.100.0"]
|
||||
all = ["redis>=5.0.0", "fastapi>=0.100.0"]
|
||||
dev = [
|
||||
"pytest>=8.0.0",
|
||||
"pytest-asyncio>=0.23.0",
|
||||
"pytest-cov>=4.0.0",
|
||||
"httpx>=0.27.0",
|
||||
"ruff>=0.4.0",
|
||||
"pyright>=1.1.350",
|
||||
"redis>=5.0.0",
|
||||
"fastapi>=0.100.0",
|
||||
"uvicorn>=0.29.0",
|
||||
]
|
||||
|
||||
[project.urls]
|
||||
Homepage = "https://github.com/fastapi-traffic/fastapi-traffic"
|
||||
Documentation = "https://github.com/fastapi-traffic/fastapi-traffic#readme"
|
||||
Repository = "https://github.com/fastapi-traffic/fastapi-traffic"
|
||||
Issues = "https://github.com/fastapi-traffic/fastapi-traffic/issues"
|
||||
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[tool.hatch.build.targets.wheel]
|
||||
packages = ["fastapi_traffic"]
|
||||
|
||||
[tool.ruff]
|
||||
target-version = "py310"
|
||||
line-length = 88
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = [
|
||||
"E", # pycodestyle errors
|
||||
"W", # pycodestyle warnings
|
||||
"F", # Pyflakes
|
||||
"I", # isort
|
||||
"B", # flake8-bugbear
|
||||
"C4", # flake8-comprehensions
|
||||
"UP", # pyupgrade
|
||||
"ARG", # flake8-unused-arguments
|
||||
"SIM", # flake8-simplify
|
||||
"TCH", # flake8-type-checking
|
||||
"PTH", # flake8-use-pathlib
|
||||
"RUF", # Ruff-specific rules
|
||||
]
|
||||
ignore = [
|
||||
"E501", # line too long (handled by formatter)
|
||||
"B008", # do not perform function calls in argument defaults
|
||||
"B904", # raise without from inside except
|
||||
]
|
||||
|
||||
[tool.ruff.lint.isort]
|
||||
known-first-party = ["fastapi_traffic"]
|
||||
|
||||
[tool.pyright]
|
||||
pythonVersion = "3.10"
|
||||
typeCheckingMode = "strict"
|
||||
reportMissingTypeStubs = false
|
||||
reportUnknownMemberType = false
|
||||
reportUnknownArgumentType = false
|
||||
reportUnknownVariableType = false
|
||||
reportUnknownParameterType = false
|
||||
reportMissingImports = false
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
asyncio_mode = "auto"
|
||||
testpaths = ["tests"]
|
||||
addopts = "-v --tb=short"
|
||||
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"fastapi>=0.128.0",
|
||||
"pytest>=9.0.2",
|
||||
"uvicorn>=0.40.0",
|
||||
]
|
||||
1
tests/__init__.py
Normal file
1
tests/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Tests for fastapi-traffic."""
|
||||
211
tests/test_algorithms.py
Normal file
211
tests/test_algorithms.py
Normal file
@@ -0,0 +1,211 @@
|
||||
"""Tests for rate limiting algorithms."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import AsyncGenerator
|
||||
|
||||
import pytest
|
||||
|
||||
from fastapi_traffic.backends.memory import MemoryBackend
|
||||
from fastapi_traffic.core.algorithms import (
|
||||
Algorithm,
|
||||
FixedWindowAlgorithm,
|
||||
LeakyBucketAlgorithm,
|
||||
SlidingWindowAlgorithm,
|
||||
SlidingWindowCounterAlgorithm,
|
||||
TokenBucketAlgorithm,
|
||||
get_algorithm,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def backend() -> AsyncGenerator[MemoryBackend, None]:
|
||||
"""Create a memory backend for testing."""
|
||||
backend = MemoryBackend()
|
||||
yield backend
|
||||
await backend.close()
|
||||
|
||||
|
||||
class TestTokenBucketAlgorithm:
|
||||
"""Tests for TokenBucketAlgorithm."""
|
||||
|
||||
async def test_allows_requests_within_limit(
|
||||
self, backend: MemoryBackend
|
||||
) -> None:
|
||||
"""Test that requests within limit are allowed."""
|
||||
algo = TokenBucketAlgorithm(10, 60.0, backend)
|
||||
|
||||
for i in range(10):
|
||||
allowed, _ = await algo.check(f"key_{i % 2}")
|
||||
assert allowed, f"Request {i} should be allowed"
|
||||
|
||||
async def test_blocks_requests_over_limit(
|
||||
self, backend: MemoryBackend
|
||||
) -> None:
|
||||
"""Test that requests over limit are blocked."""
|
||||
algo = TokenBucketAlgorithm(3, 60.0, backend)
|
||||
|
||||
for _ in range(3):
|
||||
allowed, _ = await algo.check("test_key")
|
||||
assert allowed
|
||||
|
||||
allowed, info = await algo.check("test_key")
|
||||
assert not allowed
|
||||
assert info.retry_after is not None
|
||||
assert info.retry_after > 0
|
||||
|
||||
async def test_reset(self, backend: MemoryBackend) -> None:
|
||||
"""Test reset functionality."""
|
||||
algo = TokenBucketAlgorithm(3, 60.0, backend)
|
||||
|
||||
for _ in range(3):
|
||||
await algo.check("test_key")
|
||||
|
||||
allowed, _ = await algo.check("test_key")
|
||||
assert not allowed
|
||||
|
||||
await algo.reset("test_key")
|
||||
|
||||
allowed, _ = await algo.check("test_key")
|
||||
assert allowed
|
||||
|
||||
|
||||
class TestSlidingWindowAlgorithm:
|
||||
"""Tests for SlidingWindowAlgorithm."""
|
||||
|
||||
async def test_allows_requests_within_limit(
|
||||
self, backend: MemoryBackend
|
||||
) -> None:
|
||||
"""Test that requests within limit are allowed."""
|
||||
algo = SlidingWindowAlgorithm(5, 60.0, backend)
|
||||
|
||||
for _ in range(5):
|
||||
allowed, _ = await algo.check("test_key")
|
||||
assert allowed
|
||||
|
||||
async def test_blocks_requests_over_limit(
|
||||
self, backend: MemoryBackend
|
||||
) -> None:
|
||||
"""Test that requests over limit are blocked."""
|
||||
algo = SlidingWindowAlgorithm(3, 60.0, backend)
|
||||
|
||||
for _ in range(3):
|
||||
allowed, _ = await algo.check("test_key")
|
||||
assert allowed
|
||||
|
||||
allowed, info = await algo.check("test_key")
|
||||
assert not allowed
|
||||
assert info.remaining == 0
|
||||
|
||||
|
||||
class TestFixedWindowAlgorithm:
|
||||
"""Tests for FixedWindowAlgorithm."""
|
||||
|
||||
async def test_allows_requests_within_limit(
|
||||
self, backend: MemoryBackend
|
||||
) -> None:
|
||||
"""Test that requests within limit are allowed."""
|
||||
algo = FixedWindowAlgorithm(5, 60.0, backend)
|
||||
|
||||
for _ in range(5):
|
||||
allowed, _ = await algo.check("test_key")
|
||||
assert allowed
|
||||
|
||||
async def test_blocks_requests_over_limit(
|
||||
self, backend: MemoryBackend
|
||||
) -> None:
|
||||
"""Test that requests over limit are blocked."""
|
||||
algo = FixedWindowAlgorithm(3, 60.0, backend)
|
||||
|
||||
for _ in range(3):
|
||||
allowed, _ = await algo.check("test_key")
|
||||
assert allowed
|
||||
|
||||
allowed, info = await algo.check("test_key")
|
||||
assert not allowed
|
||||
assert info.remaining == 0
|
||||
|
||||
|
||||
class TestLeakyBucketAlgorithm:
|
||||
"""Tests for LeakyBucketAlgorithm."""
|
||||
|
||||
async def test_allows_requests_within_limit(
|
||||
self, backend: MemoryBackend
|
||||
) -> None:
|
||||
"""Test that requests within limit are allowed."""
|
||||
algo = LeakyBucketAlgorithm(5, 60.0, backend)
|
||||
|
||||
for _ in range(5):
|
||||
allowed, _ = await algo.check("test_key")
|
||||
assert allowed
|
||||
|
||||
async def test_blocks_requests_over_limit(
|
||||
self, backend: MemoryBackend
|
||||
) -> None:
|
||||
"""Test that requests over limit are blocked."""
|
||||
algo = LeakyBucketAlgorithm(3, 60.0, backend)
|
||||
|
||||
for _ in range(3):
|
||||
allowed, _ = await algo.check("test_key")
|
||||
assert allowed
|
||||
|
||||
allowed, _ = await algo.check("test_key")
|
||||
assert not allowed
|
||||
|
||||
|
||||
class TestSlidingWindowCounterAlgorithm:
|
||||
"""Tests for SlidingWindowCounterAlgorithm."""
|
||||
|
||||
async def test_allows_requests_within_limit(
|
||||
self, backend: MemoryBackend
|
||||
) -> None:
|
||||
"""Test that requests within limit are allowed."""
|
||||
algo = SlidingWindowCounterAlgorithm(5, 60.0, backend)
|
||||
|
||||
for _ in range(5):
|
||||
allowed, _ = await algo.check("test_key")
|
||||
assert allowed
|
||||
|
||||
async def test_blocks_requests_over_limit(
|
||||
self, backend: MemoryBackend
|
||||
) -> None:
|
||||
"""Test that requests over limit are blocked."""
|
||||
algo = SlidingWindowCounterAlgorithm(3, 60.0, backend)
|
||||
|
||||
for _ in range(3):
|
||||
allowed, _ = await algo.check("test_key")
|
||||
assert allowed
|
||||
|
||||
allowed, _ = await algo.check("test_key")
|
||||
assert not allowed
|
||||
|
||||
|
||||
class TestGetAlgorithm:
|
||||
"""Tests for get_algorithm factory function."""
|
||||
|
||||
async def test_get_token_bucket(self, backend: MemoryBackend) -> None:
|
||||
"""Test getting token bucket algorithm."""
|
||||
algo = get_algorithm(Algorithm.TOKEN_BUCKET, 10, 60.0, backend)
|
||||
assert isinstance(algo, TokenBucketAlgorithm)
|
||||
|
||||
async def test_get_sliding_window(self, backend: MemoryBackend) -> None:
|
||||
"""Test getting sliding window algorithm."""
|
||||
algo = get_algorithm(Algorithm.SLIDING_WINDOW, 10, 60.0, backend)
|
||||
assert isinstance(algo, SlidingWindowAlgorithm)
|
||||
|
||||
async def test_get_fixed_window(self, backend: MemoryBackend) -> None:
|
||||
"""Test getting fixed window algorithm."""
|
||||
algo = get_algorithm(Algorithm.FIXED_WINDOW, 10, 60.0, backend)
|
||||
assert isinstance(algo, FixedWindowAlgorithm)
|
||||
|
||||
async def test_get_leaky_bucket(self, backend: MemoryBackend) -> None:
|
||||
"""Test getting leaky bucket algorithm."""
|
||||
algo = get_algorithm(Algorithm.LEAKY_BUCKET, 10, 60.0, backend)
|
||||
assert isinstance(algo, LeakyBucketAlgorithm)
|
||||
|
||||
async def test_get_sliding_window_counter(
|
||||
self, backend: MemoryBackend
|
||||
) -> None:
|
||||
"""Test getting sliding window counter algorithm."""
|
||||
algo = get_algorithm(Algorithm.SLIDING_WINDOW_COUNTER, 10, 60.0, backend)
|
||||
assert isinstance(algo, SlidingWindowCounterAlgorithm)
|
||||
143
tests/test_backends.py
Normal file
143
tests/test_backends.py
Normal file
@@ -0,0 +1,143 @@
|
||||
"""Tests for rate limit backends."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from typing import AsyncGenerator
|
||||
|
||||
import pytest
|
||||
|
||||
from fastapi_traffic.backends.memory import MemoryBackend
|
||||
from fastapi_traffic.backends.sqlite import SQLiteBackend
|
||||
|
||||
|
||||
class TestMemoryBackend:
|
||||
"""Tests for MemoryBackend."""
|
||||
|
||||
@pytest.fixture
|
||||
async def backend(self) -> AsyncGenerator[MemoryBackend, None]:
|
||||
"""Create a memory backend for testing."""
|
||||
backend = MemoryBackend(max_size=100, cleanup_interval=1.0)
|
||||
yield backend
|
||||
await backend.close()
|
||||
|
||||
async def test_set_and_get(self, backend: MemoryBackend) -> None:
|
||||
"""Test basic set and get operations."""
|
||||
await backend.set("test_key", {"count": 5}, ttl=60.0)
|
||||
result = await backend.get("test_key")
|
||||
assert result is not None
|
||||
assert result["count"] == 5
|
||||
|
||||
async def test_get_nonexistent(self, backend: MemoryBackend) -> None:
|
||||
"""Test getting a nonexistent key."""
|
||||
result = await backend.get("nonexistent")
|
||||
assert result is None
|
||||
|
||||
async def test_delete(self, backend: MemoryBackend) -> None:
|
||||
"""Test delete operation."""
|
||||
await backend.set("test_key", {"count": 5}, ttl=60.0)
|
||||
await backend.delete("test_key")
|
||||
result = await backend.get("test_key")
|
||||
assert result is None
|
||||
|
||||
async def test_exists(self, backend: MemoryBackend) -> None:
|
||||
"""Test exists operation."""
|
||||
assert not await backend.exists("test_key")
|
||||
await backend.set("test_key", {"count": 5}, ttl=60.0)
|
||||
assert await backend.exists("test_key")
|
||||
|
||||
async def test_increment(self, backend: MemoryBackend) -> None:
|
||||
"""Test increment operation."""
|
||||
await backend.set("test_key", {"count": 5}, ttl=60.0)
|
||||
result = await backend.increment("test_key", 3)
|
||||
assert result == 8
|
||||
|
||||
async def test_clear(self, backend: MemoryBackend) -> None:
|
||||
"""Test clear operation."""
|
||||
await backend.set("key1", {"count": 1}, ttl=60.0)
|
||||
await backend.set("key2", {"count": 2}, ttl=60.0)
|
||||
await backend.clear()
|
||||
assert not await backend.exists("key1")
|
||||
assert not await backend.exists("key2")
|
||||
|
||||
async def test_ttl_expiration(self, backend: MemoryBackend) -> None:
|
||||
"""Test that entries expire after TTL."""
|
||||
await backend.set("test_key", {"count": 5}, ttl=0.1)
|
||||
await asyncio.sleep(0.2)
|
||||
result = await backend.get("test_key")
|
||||
assert result is None
|
||||
|
||||
async def test_lru_eviction(self) -> None:
|
||||
"""Test LRU eviction when max size is reached."""
|
||||
backend = MemoryBackend(max_size=3)
|
||||
try:
|
||||
await backend.set("key1", {"v": 1}, ttl=60.0)
|
||||
await backend.set("key2", {"v": 2}, ttl=60.0)
|
||||
await backend.set("key3", {"v": 3}, ttl=60.0)
|
||||
await backend.set("key4", {"v": 4}, ttl=60.0)
|
||||
|
||||
assert not await backend.exists("key1")
|
||||
assert await backend.exists("key2")
|
||||
assert await backend.exists("key3")
|
||||
assert await backend.exists("key4")
|
||||
finally:
|
||||
await backend.close()
|
||||
|
||||
|
||||
class TestSQLiteBackend:
|
||||
"""Tests for SQLiteBackend."""
|
||||
|
||||
@pytest.fixture
|
||||
async def backend(self) -> AsyncGenerator[SQLiteBackend, None]:
|
||||
"""Create an in-memory SQLite backend for testing."""
|
||||
backend = SQLiteBackend(":memory:", cleanup_interval=1.0)
|
||||
await backend.initialize()
|
||||
yield backend
|
||||
await backend.close()
|
||||
|
||||
async def test_set_and_get(self, backend: SQLiteBackend) -> None:
|
||||
"""Test basic set and get operations."""
|
||||
await backend.set("test_key", {"count": 5}, ttl=60.0)
|
||||
result = await backend.get("test_key")
|
||||
assert result is not None
|
||||
assert result["count"] == 5
|
||||
|
||||
async def test_get_nonexistent(self, backend: SQLiteBackend) -> None:
|
||||
"""Test getting a nonexistent key."""
|
||||
result = await backend.get("nonexistent")
|
||||
assert result is None
|
||||
|
||||
async def test_delete(self, backend: SQLiteBackend) -> None:
|
||||
"""Test delete operation."""
|
||||
await backend.set("test_key", {"count": 5}, ttl=60.0)
|
||||
await backend.delete("test_key")
|
||||
result = await backend.get("test_key")
|
||||
assert result is None
|
||||
|
||||
async def test_exists(self, backend: SQLiteBackend) -> None:
|
||||
"""Test exists operation."""
|
||||
assert not await backend.exists("test_key")
|
||||
await backend.set("test_key", {"count": 5}, ttl=60.0)
|
||||
assert await backend.exists("test_key")
|
||||
|
||||
async def test_increment(self, backend: SQLiteBackend) -> None:
|
||||
"""Test increment operation."""
|
||||
await backend.set("test_key", {"count": 5}, ttl=60.0)
|
||||
result = await backend.increment("test_key", 3)
|
||||
assert result == 8
|
||||
|
||||
async def test_clear(self, backend: SQLiteBackend) -> None:
|
||||
"""Test clear operation."""
|
||||
await backend.set("key1", {"count": 1}, ttl=60.0)
|
||||
await backend.set("key2", {"count": 2}, ttl=60.0)
|
||||
await backend.clear()
|
||||
assert not await backend.exists("key1")
|
||||
assert not await backend.exists("key2")
|
||||
|
||||
async def test_get_stats(self, backend: SQLiteBackend) -> None:
|
||||
"""Test get_stats operation."""
|
||||
await backend.set("key1", {"count": 1}, ttl=60.0)
|
||||
await backend.set("key2", {"count": 2}, ttl=60.0)
|
||||
stats = await backend.get_stats()
|
||||
assert stats["total_entries"] == 2
|
||||
assert stats["active_entries"] == 2
|
||||
696
uv.lock
generated
Normal file
696
uv.lock
generated
Normal file
@@ -0,0 +1,696 @@
|
||||
version = 1
|
||||
revision = 3
|
||||
requires-python = ">=3.10"
|
||||
|
||||
[[package]]
|
||||
name = "annotated-doc"
|
||||
version = "0.0.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/57/ba/046ceea27344560984e26a590f90bc7f4a75b06701f653222458922b558c/annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4", size = 7288, upload-time = "2025-11-10T22:07:42.062Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320", size = 5303, upload-time = "2025-11-10T22:07:40.673Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "annotated-types"
|
||||
version = "0.7.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anyio"
|
||||
version = "4.12.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "exceptiongroup", marker = "python_full_version < '3.11'" },
|
||||
{ name = "idna" },
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/96/f0/5eb65b2bb0d09ac6776f2eb54adee6abe8228ea05b20a5ad0e4945de8aac/anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703", size = 228685, upload-time = "2026-01-06T11:45:21.246Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "async-timeout"
|
||||
version = "5.0.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3", size = 9274, upload-time = "2024-11-06T16:41:39.6Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233, upload-time = "2024-11-06T16:41:37.9Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "backports-asyncio-runner"
|
||||
version = "1.2.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/8e/ff/70dca7d7cb1cbc0edb2c6cc0c38b65cba36cccc491eca64cabd5fe7f8670/backports_asyncio_runner-1.2.0.tar.gz", hash = "sha256:a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162", size = 69893, upload-time = "2025-07-02T02:27:15.685Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a0/59/76ab57e3fe74484f48a53f8e337171b4a2349e506eabe136d7e01d059086/backports_asyncio_runner-1.2.0-py3-none-any.whl", hash = "sha256:0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5", size = 12313, upload-time = "2025-07-02T02:27:14.263Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2026.1.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e0/2d/a891ca51311197f6ad14a7ef42e2399f36cf2f9bd44752b3dc4eab60fdc5/certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120", size = 154268, upload-time = "2026-01-04T02:42:41.825Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e6/ad/3cc14f097111b4de0040c83a525973216457bbeeb63739ef1ed275c1c021/certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c", size = 152900, upload-time = "2026-01-04T02:42:40.15Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "click"
|
||||
version = "8.3.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "colorama"
|
||||
version = "0.4.6"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "coverage"
|
||||
version = "7.13.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/23/f9/e92df5e07f3fc8d4c7f9a0f146ef75446bf870351cd37b788cf5897f8079/coverage-7.13.1.tar.gz", hash = "sha256:b7593fe7eb5feaa3fbb461ac79aac9f9fc0387a5ca8080b0c6fe2ca27b091afd", size = 825862, upload-time = "2025-12-28T15:42:56.969Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2d/9a/3742e58fd04b233df95c012ee9f3dfe04708a5e1d32613bd2d47d4e1be0d/coverage-7.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e1fa280b3ad78eea5be86f94f461c04943d942697e0dac889fa18fff8f5f9147", size = 218633, upload-time = "2025-12-28T15:40:10.165Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/45/7e6bdc94d89cd7c8017ce735cf50478ddfe765d4fbf0c24d71d30ea33d7a/coverage-7.13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c3d8c679607220979434f494b139dfb00131ebf70bb406553d69c1ff01a5c33d", size = 219147, upload-time = "2025-12-28T15:40:12.069Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/38/0d6a258625fd7f10773fe94097dc16937a5f0e3e0cdf3adef67d3ac6baef/coverage-7.13.1-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:339dc63b3eba969067b00f41f15ad161bf2946613156fb131266d8debc8e44d0", size = 245894, upload-time = "2025-12-28T15:40:13.556Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/27/58/409d15ea487986994cbd4d06376e9860e9b157cfbfd402b1236770ab8dd2/coverage-7.13.1-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:db622b999ffe49cb891f2fff3b340cdc2f9797d01a0a202a0973ba2562501d90", size = 247721, upload-time = "2025-12-28T15:40:15.37Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/da/bf/6e8056a83fd7a96c93341f1ffe10df636dd89f26d5e7b9ca511ce3bcf0df/coverage-7.13.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1443ba9acbb593fa7c1c29e011d7c9761545fe35e7652e85ce7f51a16f7e08d", size = 249585, upload-time = "2025-12-28T15:40:17.226Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/15/e1daff723f9f5959acb63cbe35b11203a9df77ee4b95b45fffd38b318390/coverage-7.13.1-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c832ec92c4499ac463186af72f9ed4d8daec15499b16f0a879b0d1c8e5cf4a3b", size = 246597, upload-time = "2025-12-28T15:40:19.028Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/74/a6/1efd31c5433743a6ddbc9d37ac30c196bb07c7eab3d74fbb99b924c93174/coverage-7.13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:562ec27dfa3f311e0db1ba243ec6e5f6ab96b1edfcfc6cf86f28038bc4961ce6", size = 247626, upload-time = "2025-12-28T15:40:20.846Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/9f/1609267dd3e749f57fdd66ca6752567d1c13b58a20a809dc409b263d0b5f/coverage-7.13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4de84e71173d4dada2897e5a0e1b7877e5eefbfe0d6a44edee6ce31d9b8ec09e", size = 245629, upload-time = "2025-12-28T15:40:22.397Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/f6/6815a220d5ec2466383d7cc36131b9fa6ecbe95c50ec52a631ba733f306a/coverage-7.13.1-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:a5a68357f686f8c4d527a2dc04f52e669c2fc1cbde38f6f7eb6a0e58cbd17cae", size = 245901, upload-time = "2025-12-28T15:40:23.836Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ac/58/40576554cd12e0872faf6d2c0eb3bc85f71d78427946ddd19ad65201e2c0/coverage-7.13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:77cc258aeb29a3417062758975521eae60af6f79e930d6993555eeac6a8eac29", size = 246505, upload-time = "2025-12-28T15:40:25.421Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/77/9233a90253fba576b0eee81707b5781d0e21d97478e5377b226c5b096c0f/coverage-7.13.1-cp310-cp310-win32.whl", hash = "sha256:bb4f8c3c9a9f34423dba193f241f617b08ffc63e27f67159f60ae6baf2dcfe0f", size = 221257, upload-time = "2025-12-28T15:40:27.217Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/43/e842ff30c1a0a623ec80db89befb84a3a7aad7bfe44a6ea77d5a3e61fedd/coverage-7.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:c8e2706ceb622bc63bac98ebb10ef5da80ed70fbd8a7999a5076de3afaef0fb1", size = 222191, upload-time = "2025-12-28T15:40:28.916Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b4/9b/77baf488516e9ced25fc215a6f75d803493fc3f6a1a1227ac35697910c2a/coverage-7.13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a55d509a1dc5a5b708b5dad3b5334e07a16ad4c2185e27b40e4dba796ab7f88", size = 218755, upload-time = "2025-12-28T15:40:30.812Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d7/cd/7ab01154e6eb79ee2fab76bf4d89e94c6648116557307ee4ebbb85e5c1bf/coverage-7.13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4d010d080c4888371033baab27e47c9df7d6fb28d0b7b7adf85a4a49be9298b3", size = 219257, upload-time = "2025-12-28T15:40:32.333Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/01/d5/b11ef7863ffbbdb509da0023fad1e9eda1c0eaea61a6d2ea5b17d4ac706e/coverage-7.13.1-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d938b4a840fb1523b9dfbbb454f652967f18e197569c32266d4d13f37244c3d9", size = 249657, upload-time = "2025-12-28T15:40:34.1Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/7c/347280982982383621d29b8c544cf497ae07ac41e44b1ca4903024131f55/coverage-7.13.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bf100a3288f9bb7f919b87eb84f87101e197535b9bd0e2c2b5b3179633324fee", size = 251581, upload-time = "2025-12-28T15:40:36.131Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/82/f6/ebcfed11036ade4c0d75fa4453a6282bdd225bc073862766eec184a4c643/coverage-7.13.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef6688db9bf91ba111ae734ba6ef1a063304a881749726e0d3575f5c10a9facf", size = 253691, upload-time = "2025-12-28T15:40:37.626Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/92/af8f5582787f5d1a8b130b2dcba785fa5e9a7a8e121a0bb2220a6fdbdb8a/coverage-7.13.1-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0b609fc9cdbd1f02e51f67f51e5aee60a841ef58a68d00d5ee2c0faf357481a3", size = 249799, upload-time = "2025-12-28T15:40:39.47Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/24/aa/0e39a2a3b16eebf7f193863323edbff38b6daba711abaaf807d4290cf61a/coverage-7.13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c43257717611ff5e9a1d79dce8e47566235ebda63328718d9b65dd640bc832ef", size = 251389, upload-time = "2025-12-28T15:40:40.954Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/73/46/7f0c13111154dc5b978900c0ccee2e2ca239b910890e674a77f1363d483e/coverage-7.13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e09fbecc007f7b6afdfb3b07ce5bd9f8494b6856dd4f577d26c66c391b829851", size = 249450, upload-time = "2025-12-28T15:40:42.489Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ac/ca/e80da6769e8b669ec3695598c58eef7ad98b0e26e66333996aee6316db23/coverage-7.13.1-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:a03a4f3a19a189919c7055098790285cc5c5b0b3976f8d227aea39dbf9f8bfdb", size = 249170, upload-time = "2025-12-28T15:40:44.279Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/af/18/9e29baabdec1a8644157f572541079b4658199cfd372a578f84228e860de/coverage-7.13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3820778ea1387c2b6a818caec01c63adc5b3750211af6447e8dcfb9b6f08dbba", size = 250081, upload-time = "2025-12-28T15:40:45.748Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/f8/c3021625a71c3b2f516464d322e41636aea381018319050a8114105872ee/coverage-7.13.1-cp311-cp311-win32.whl", hash = "sha256:ff10896fa55167371960c5908150b434b71c876dfab97b69478f22c8b445ea19", size = 221281, upload-time = "2025-12-28T15:40:47.232Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/27/56/c216625f453df6e0559ed666d246fcbaaa93f3aa99eaa5080cea1229aa3d/coverage-7.13.1-cp311-cp311-win_amd64.whl", hash = "sha256:a998cc0aeeea4c6d5622a3754da5a493055d2d95186bad877b0a34ea6e6dbe0a", size = 222215, upload-time = "2025-12-28T15:40:49.19Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/9a/be342e76f6e531cae6406dc46af0d350586f24d9b67fdfa6daee02df71af/coverage-7.13.1-cp311-cp311-win_arm64.whl", hash = "sha256:fea07c1a39a22614acb762e3fbbb4011f65eedafcb2948feeef641ac78b4ee5c", size = 220886, upload-time = "2025-12-28T15:40:51.067Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/8a/87af46cccdfa78f53db747b09f5f9a21d5fc38d796834adac09b30a8ce74/coverage-7.13.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6f34591000f06e62085b1865c9bc5f7858df748834662a51edadfd2c3bfe0dd3", size = 218927, upload-time = "2025-12-28T15:40:52.814Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/82/a8/6e22fdc67242a4a5a153f9438d05944553121c8f4ba70cb072af4c41362e/coverage-7.13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b67e47c5595b9224599016e333f5ec25392597a89d5744658f837d204e16c63e", size = 219288, upload-time = "2025-12-28T15:40:54.262Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/0a/853a76e03b0f7c4375e2ca025df45c918beb367f3e20a0a8e91967f6e96c/coverage-7.13.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3e7b8bd70c48ffb28461ebe092c2345536fb18bbbf19d287c8913699735f505c", size = 250786, upload-time = "2025-12-28T15:40:56.059Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/b4/694159c15c52b9f7ec7adf49d50e5f8ee71d3e9ef38adb4445d13dd56c20/coverage-7.13.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c223d078112e90dc0e5c4e35b98b9584164bea9fbbd221c0b21c5241f6d51b62", size = 253543, upload-time = "2025-12-28T15:40:57.585Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/96/b2/7f1f0437a5c855f87e17cf5d0dc35920b6440ff2b58b1ba9788c059c26c8/coverage-7.13.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:794f7c05af0763b1bbd1b9e6eff0e52ad068be3b12cd96c87de037b01390c968", size = 254635, upload-time = "2025-12-28T15:40:59.443Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/d1/73c3fdb8d7d3bddd9473c9c6a2e0682f09fc3dfbcb9c3f36412a7368bcab/coverage-7.13.1-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0642eae483cc8c2902e4af7298bf886d605e80f26382124cddc3967c2a3df09e", size = 251202, upload-time = "2025-12-28T15:41:01.328Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/66/3c/f0edf75dcc152f145d5598329e864bbbe04ab78660fe3e8e395f9fff010f/coverage-7.13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9f5e772ed5fef25b3de9f2008fe67b92d46831bd2bc5bdc5dd6bfd06b83b316f", size = 252566, upload-time = "2025-12-28T15:41:03.319Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/17/b3/e64206d3c5f7dcbceafd14941345a754d3dbc78a823a6ed526e23b9cdaab/coverage-7.13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:45980ea19277dc0a579e432aef6a504fe098ef3a9032ead15e446eb0f1191aee", size = 250711, upload-time = "2025-12-28T15:41:06.411Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/ad/28a3eb970a8ef5b479ee7f0c484a19c34e277479a5b70269dc652b730733/coverage-7.13.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:e4f18eca6028ffa62adbd185a8f1e1dd242f2e68164dba5c2b74a5204850b4cf", size = 250278, upload-time = "2025-12-28T15:41:08.285Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/54/e3/c8f0f1a93133e3e1291ca76cbb63565bd4b5c5df63b141f539d747fff348/coverage-7.13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f8dca5590fec7a89ed6826fce625595279e586ead52e9e958d3237821fbc750c", size = 252154, upload-time = "2025-12-28T15:41:09.969Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/bf/9939c5d6859c380e405b19e736321f1c7d402728792f4c752ad1adcce005/coverage-7.13.1-cp312-cp312-win32.whl", hash = "sha256:ff86d4e85188bba72cfb876df3e11fa243439882c55957184af44a35bd5880b7", size = 221487, upload-time = "2025-12-28T15:41:11.468Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/dc/7282856a407c621c2aad74021680a01b23010bb8ebf427cf5eacda2e876f/coverage-7.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:16cc1da46c04fb0fb128b4dc430b78fa2aba8a6c0c9f8eb391fd5103409a6ac6", size = 222299, upload-time = "2025-12-28T15:41:13.386Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/10/79/176a11203412c350b3e9578620013af35bcdb79b651eb976f4a4b32044fa/coverage-7.13.1-cp312-cp312-win_arm64.whl", hash = "sha256:8d9bc218650022a768f3775dd7fdac1886437325d8d295d923ebcfef4892ad5c", size = 220941, upload-time = "2025-12-28T15:41:14.975Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a3/a4/e98e689347a1ff1a7f67932ab535cef82eb5e78f32a9e4132e114bbb3a0a/coverage-7.13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cb237bfd0ef4d5eb6a19e29f9e528ac67ac3be932ea6b44fb6cc09b9f3ecff78", size = 218951, upload-time = "2025-12-28T15:41:16.653Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/32/33/7cbfe2bdc6e2f03d6b240d23dc45fdaf3fd270aaf2d640be77b7f16989ab/coverage-7.13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1dcb645d7e34dcbcc96cd7c132b1fc55c39263ca62eb961c064eb3928997363b", size = 219325, upload-time = "2025-12-28T15:41:18.609Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/59/f6/efdabdb4929487baeb7cb2a9f7dac457d9356f6ad1b255be283d58b16316/coverage-7.13.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3d42df8201e00384736f0df9be2ced39324c3907607d17d50d50116c989d84cd", size = 250309, upload-time = "2025-12-28T15:41:20.629Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/12/da/91a52516e9d5aea87d32d1523f9cdcf7a35a3b298e6be05d6509ba3cfab2/coverage-7.13.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fa3edde1aa8807de1d05934982416cb3ec46d1d4d91e280bcce7cca01c507992", size = 252907, upload-time = "2025-12-28T15:41:22.257Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/75/38/f1ea837e3dc1231e086db1638947e00d264e7e8c41aa8ecacf6e1e0c05f4/coverage-7.13.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9edd0e01a343766add6817bc448408858ba6b489039eaaa2018474e4001651a4", size = 254148, upload-time = "2025-12-28T15:41:23.87Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7f/43/f4f16b881aaa34954ba446318dea6b9ed5405dd725dd8daac2358eda869a/coverage-7.13.1-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:985b7836931d033570b94c94713c6dba5f9d3ff26045f72c3e5dbc5fe3361e5a", size = 250515, upload-time = "2025-12-28T15:41:25.437Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/34/8cba7f00078bd468ea914134e0144263194ce849ec3baad187ffb6203d1c/coverage-7.13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ffed1e4980889765c84a5d1a566159e363b71d6b6fbaf0bebc9d3c30bc016766", size = 252292, upload-time = "2025-12-28T15:41:28.459Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/a4/cffac66c7652d84ee4ac52d3ccb94c015687d3b513f9db04bfcac2ac800d/coverage-7.13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8842af7f175078456b8b17f1b73a0d16a65dcbdc653ecefeb00a56b3c8c298c4", size = 250242, upload-time = "2025-12-28T15:41:30.02Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/78/9a64d462263dde416f3c0067efade7b52b52796f489b1037a95b0dc389c9/coverage-7.13.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:ccd7a6fca48ca9c131d9b0a2972a581e28b13416fc313fb98b6d24a03ce9a398", size = 250068, upload-time = "2025-12-28T15:41:32.007Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/69/c8/a8994f5fece06db7c4a97c8fc1973684e178599b42e66280dded0524ef00/coverage-7.13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0403f647055de2609be776965108447deb8e384fe4a553c119e3ff6bfbab4784", size = 251846, upload-time = "2025-12-28T15:41:33.946Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cc/f7/91fa73c4b80305c86598a2d4e54ba22df6bf7d0d97500944af7ef155d9f7/coverage-7.13.1-cp313-cp313-win32.whl", hash = "sha256:549d195116a1ba1e1ae2f5ca143f9777800f6636eab917d4f02b5310d6d73461", size = 221512, upload-time = "2025-12-28T15:41:35.519Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/45/0b/0768b4231d5a044da8f75e097a8714ae1041246bb765d6b5563bab456735/coverage-7.13.1-cp313-cp313-win_amd64.whl", hash = "sha256:5899d28b5276f536fcf840b18b61a9fce23cc3aec1d114c44c07fe94ebeaa500", size = 222321, upload-time = "2025-12-28T15:41:37.371Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9b/b8/bdcb7253b7e85157282450262008f1366aa04663f3e3e4c30436f596c3e2/coverage-7.13.1-cp313-cp313-win_arm64.whl", hash = "sha256:868a2fae76dfb06e87291bcbd4dcbcc778a8500510b618d50496e520bd94d9b9", size = 220949, upload-time = "2025-12-28T15:41:39.553Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/70/52/f2be52cc445ff75ea8397948c96c1b4ee14f7f9086ea62fc929c5ae7b717/coverage-7.13.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:67170979de0dacac3f3097d02b0ad188d8edcea44ccc44aaa0550af49150c7dc", size = 219643, upload-time = "2025-12-28T15:41:41.567Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/79/c85e378eaa239e2edec0c5523f71542c7793fe3340954eafb0bc3904d32d/coverage-7.13.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f80e2bb21bfab56ed7405c2d79d34b5dc0bc96c2c1d2a067b643a09fb756c43a", size = 219997, upload-time = "2025-12-28T15:41:43.418Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fe/9b/b1ade8bfb653c0bbce2d6d6e90cc6c254cbb99b7248531cc76253cb4da6d/coverage-7.13.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f83351e0f7dcdb14d7326c3d8d8c4e915fa685cbfdc6281f9470d97a04e9dfe4", size = 261296, upload-time = "2025-12-28T15:41:45.207Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1f/af/ebf91e3e1a2473d523e87e87fd8581e0aa08741b96265730e2d79ce78d8d/coverage-7.13.1-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb3f6562e89bad0110afbe64e485aac2462efdce6232cdec7862a095dc3412f6", size = 263363, upload-time = "2025-12-28T15:41:47.163Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c4/8b/fb2423526d446596624ac7fde12ea4262e66f86f5120114c3cfd0bb2befa/coverage-7.13.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77545b5dcda13b70f872c3b5974ac64c21d05e65b1590b441c8560115dc3a0d1", size = 265783, upload-time = "2025-12-28T15:41:49.03Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9b/26/ef2adb1e22674913b89f0fe7490ecadcef4a71fa96f5ced90c60ec358789/coverage-7.13.1-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a4d240d260a1aed814790bbe1f10a5ff31ce6c21bc78f0da4a1e8268d6c80dbd", size = 260508, upload-time = "2025-12-28T15:41:51.035Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/7d/f0f59b3404caf662e7b5346247883887687c074ce67ba453ea08c612b1d5/coverage-7.13.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d2287ac9360dec3837bfdad969963a5d073a09a85d898bd86bea82aa8876ef3c", size = 263357, upload-time = "2025-12-28T15:41:52.631Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/b1/29896492b0b1a047604d35d6fa804f12818fa30cdad660763a5f3159e158/coverage-7.13.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:0d2c11f3ea4db66b5cbded23b20185c35066892c67d80ec4be4bab257b9ad1e0", size = 260978, upload-time = "2025-12-28T15:41:54.589Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/48/f2/971de1238a62e6f0a4128d37adadc8bb882ee96afbe03ff1570291754629/coverage-7.13.1-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:3fc6a169517ca0d7ca6846c3c5392ef2b9e38896f61d615cb75b9e7134d4ee1e", size = 259877, upload-time = "2025-12-28T15:41:56.263Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/fc/0474efcbb590ff8628830e9aaec5f1831594874360e3251f1fdec31d07a3/coverage-7.13.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d10a2ed46386e850bb3de503a54f9fe8192e5917fcbb143bfef653a9355e9a53", size = 262069, upload-time = "2025-12-28T15:41:58.093Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/88/4f/3c159b7953db37a7b44c0eab8a95c37d1aa4257c47b4602c04022d5cb975/coverage-7.13.1-cp313-cp313t-win32.whl", hash = "sha256:75a6f4aa904301dab8022397a22c0039edc1f51e90b83dbd4464b8a38dc87842", size = 222184, upload-time = "2025-12-28T15:41:59.763Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/58/a5/6b57d28f81417f9335774f20679d9d13b9a8fb90cd6160957aa3b54a2379/coverage-7.13.1-cp313-cp313t-win_amd64.whl", hash = "sha256:309ef5706e95e62578cda256b97f5e097916a2c26247c287bbe74794e7150df2", size = 223250, upload-time = "2025-12-28T15:42:01.52Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/81/7c/160796f3b035acfbb58be80e02e484548595aa67e16a6345e7910ace0a38/coverage-7.13.1-cp313-cp313t-win_arm64.whl", hash = "sha256:92f980729e79b5d16d221038dbf2e8f9a9136afa072f9d5d6ed4cb984b126a09", size = 221521, upload-time = "2025-12-28T15:42:03.275Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/8e/ba0e597560c6563fc0adb902fda6526df5d4aa73bb10adf0574d03bd2206/coverage-7.13.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:97ab3647280d458a1f9adb85244e81587505a43c0c7cff851f5116cd2814b894", size = 218996, upload-time = "2025-12-28T15:42:04.978Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6b/8e/764c6e116f4221dc7aa26c4061181ff92edb9c799adae6433d18eeba7a14/coverage-7.13.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8f572d989142e0908e6acf57ad1b9b86989ff057c006d13b76c146ec6a20216a", size = 219326, upload-time = "2025-12-28T15:42:06.691Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4f/a6/6130dc6d8da28cdcbb0f2bf8865aeca9b157622f7c0031e48c6cf9a0e591/coverage-7.13.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d72140ccf8a147e94274024ff6fd8fb7811354cf7ef88b1f0a988ebaa5bc774f", size = 250374, upload-time = "2025-12-28T15:42:08.786Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/82/2b/783ded568f7cd6b677762f780ad338bf4b4750205860c17c25f7c708995e/coverage-7.13.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d3c9f051b028810f5a87c88e5d6e9af3c0ff32ef62763bf15d29f740453ca909", size = 252882, upload-time = "2025-12-28T15:42:10.515Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cd/b2/9808766d082e6a4d59eb0cc881a57fc1600eb2c5882813eefff8254f71b5/coverage-7.13.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f398ba4df52d30b1763f62eed9de5620dcde96e6f491f4c62686736b155aa6e4", size = 254218, upload-time = "2025-12-28T15:42:12.208Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/44/ea/52a985bb447c871cb4d2e376e401116520991b597c85afdde1ea9ef54f2c/coverage-7.13.1-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:132718176cc723026d201e347f800cd1a9e4b62ccd3f82476950834dad501c75", size = 250391, upload-time = "2025-12-28T15:42:14.21Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7f/1d/125b36cc12310718873cfc8209ecfbc1008f14f4f5fa0662aa608e579353/coverage-7.13.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9e549d642426e3579b3f4b92d0431543b012dcb6e825c91619d4e93b7363c3f9", size = 252239, upload-time = "2025-12-28T15:42:16.292Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/16/10c1c164950cade470107f9f14bbac8485f8fb8515f515fca53d337e4a7f/coverage-7.13.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:90480b2134999301eea795b3a9dbf606c6fbab1b489150c501da84a959442465", size = 250196, upload-time = "2025-12-28T15:42:18.54Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2a/c6/cd860fac08780c6fd659732f6ced1b40b79c35977c1356344e44d72ba6c4/coverage-7.13.1-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:e825dbb7f84dfa24663dd75835e7257f8882629fc11f03ecf77d84a75134b864", size = 250008, upload-time = "2025-12-28T15:42:20.365Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f0/3a/a8c58d3d38f82a5711e1e0a67268362af48e1a03df27c03072ac30feefcf/coverage-7.13.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:623dcc6d7a7ba450bbdbeedbaa0c42b329bdae16491af2282f12a7e809be7eb9", size = 251671, upload-time = "2025-12-28T15:42:22.114Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f0/bc/fd4c1da651d037a1e3d53e8cb3f8182f4b53271ffa9a95a2e211bacc0349/coverage-7.13.1-cp314-cp314-win32.whl", hash = "sha256:6e73ebb44dca5f708dc871fe0b90cf4cff1a13f9956f747cc87b535a840386f5", size = 221777, upload-time = "2025-12-28T15:42:23.919Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/50/71acabdc8948464c17e90b5ffd92358579bd0910732c2a1c9537d7536aa6/coverage-7.13.1-cp314-cp314-win_amd64.whl", hash = "sha256:be753b225d159feb397bd0bf91ae86f689bad0da09d3b301478cd39b878ab31a", size = 222592, upload-time = "2025-12-28T15:42:25.619Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/c8/a6fb943081bb0cc926499c7907731a6dc9efc2cbdc76d738c0ab752f1a32/coverage-7.13.1-cp314-cp314-win_arm64.whl", hash = "sha256:228b90f613b25ba0019361e4ab81520b343b622fc657daf7e501c4ed6a2366c0", size = 221169, upload-time = "2025-12-28T15:42:27.629Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/16/61/d5b7a0a0e0e40d62e59bc8c7aa1afbd86280d82728ba97f0673b746b78e2/coverage-7.13.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:60cfb538fe9ef86e5b2ab0ca8fc8d62524777f6c611dcaf76dc16fbe9b8e698a", size = 219730, upload-time = "2025-12-28T15:42:29.306Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a3/2c/8881326445fd071bb49514d1ce97d18a46a980712b51fee84f9ab42845b4/coverage-7.13.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:57dfc8048c72ba48a8c45e188d811e5efd7e49b387effc8fb17e97936dde5bf6", size = 220001, upload-time = "2025-12-28T15:42:31.319Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/d7/50de63af51dfa3a7f91cc37ad8fcc1e244b734232fbc8b9ab0f3c834a5cd/coverage-7.13.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3f2f725aa3e909b3c5fdb8192490bdd8e1495e85906af74fe6e34a2a77ba0673", size = 261370, upload-time = "2025-12-28T15:42:32.992Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e1/2c/d31722f0ec918fd7453b2758312729f645978d212b410cd0f7c2aed88a94/coverage-7.13.1-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9ee68b21909686eeb21dfcba2c3b81fee70dcf38b140dcd5aa70680995fa3aa5", size = 263485, upload-time = "2025-12-28T15:42:34.759Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/7a/2c114fa5c5fc08ba0777e4aec4c97e0b4a1afcb69c75f1f54cff78b073ab/coverage-7.13.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:724b1b270cb13ea2e6503476e34541a0b1f62280bc997eab443f87790202033d", size = 265890, upload-time = "2025-12-28T15:42:36.517Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/65/d9/f0794aa1c74ceabc780fe17f6c338456bbc4e96bd950f2e969f48ac6fb20/coverage-7.13.1-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:916abf1ac5cf7eb16bc540a5bf75c71c43a676f5c52fcb9fe75a2bd75fb944e8", size = 260445, upload-time = "2025-12-28T15:42:38.646Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/23/184b22a00d9bb97488863ced9454068c79e413cb23f472da6cbddc6cfc52/coverage-7.13.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:776483fd35b58d8afe3acbd9988d5de592ab6da2d2a865edfdbc9fdb43e7c486", size = 263357, upload-time = "2025-12-28T15:42:40.788Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/bd/58af54c0c9199ea4190284f389005779d7daf7bf3ce40dcd2d2b2f96da69/coverage-7.13.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b6f3b96617e9852703f5b633ea01315ca45c77e879584f283c44127f0f1ec564", size = 260959, upload-time = "2025-12-28T15:42:42.808Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/2a/6839294e8f78a4891bf1df79d69c536880ba2f970d0ff09e7513d6e352e9/coverage-7.13.1-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:bd63e7b74661fed317212fab774e2a648bc4bb09b35f25474f8e3325d2945cd7", size = 259792, upload-time = "2025-12-28T15:42:44.818Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ba/c3/528674d4623283310ad676c5af7414b9850ab6d55c2300e8aa4b945ec554/coverage-7.13.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:933082f161bbb3e9f90d00990dc956120f608cdbcaeea15c4d897f56ef4fe416", size = 262123, upload-time = "2025-12-28T15:42:47.108Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/06/c5/8c0515692fb4c73ac379d8dc09b18eaf0214ecb76ea6e62467ba7a1556ff/coverage-7.13.1-cp314-cp314t-win32.whl", hash = "sha256:18be793c4c87de2965e1c0f060f03d9e5aff66cfeae8e1dbe6e5b88056ec153f", size = 222562, upload-time = "2025-12-28T15:42:49.144Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/0e/c0a0c4678cb30dac735811db529b321d7e1c9120b79bd728d4f4d6b010e9/coverage-7.13.1-cp314-cp314t-win_amd64.whl", hash = "sha256:0e42e0ec0cd3e0d851cb3c91f770c9301f48647cb2877cb78f74bdaa07639a79", size = 223670, upload-time = "2025-12-28T15:42:51.218Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/5f/b177aa0011f354abf03a8f30a85032686d290fdeed4222b27d36b4372a50/coverage-7.13.1-cp314-cp314t-win_arm64.whl", hash = "sha256:eaecf47ef10c72ece9a2a92118257da87e460e113b83cc0d2905cbbe931792b4", size = 221707, upload-time = "2025-12-28T15:42:53.034Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cc/48/d9f421cb8da5afaa1a64570d9989e00fb7955e6acddc5a12979f7666ef60/coverage-7.13.1-py3-none-any.whl", hash = "sha256:2016745cb3ba554469d02819d78958b571792bb68e31302610e898f80dd3a573", size = 210722, upload-time = "2025-12-28T15:42:54.901Z" },
|
||||
]
|
||||
|
||||
[package.optional-dependencies]
|
||||
toml = [
|
||||
{ name = "tomli", marker = "python_full_version <= '3.11'" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "exceptiongroup"
|
||||
version = "1.3.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598", size = 16740, upload-time = "2025-11-21T23:01:53.443Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fastapi"
|
||||
version = "0.128.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "annotated-doc" },
|
||||
{ name = "pydantic" },
|
||||
{ name = "starlette" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/52/08/8c8508db6c7b9aae8f7175046af41baad690771c9bcde676419965e338c7/fastapi-0.128.0.tar.gz", hash = "sha256:1cc179e1cef10a6be60ffe429f79b829dce99d8de32d7acb7e6c8dfdf7f2645a", size = 365682, upload-time = "2025-12-27T15:21:13.714Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/05/5cbb59154b093548acd0f4c7c474a118eda06da25aa75c616b72d8fcd92a/fastapi-0.128.0-py3-none-any.whl", hash = "sha256:aebd93f9716ee3b4f4fcfe13ffb7cf308d99c9f3ab5622d8877441072561582d", size = 103094, upload-time = "2025-12-27T15:21:12.154Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fastapi-traffic"
|
||||
version = "0.1.0"
|
||||
source = { editable = "." }
|
||||
dependencies = [
|
||||
{ name = "starlette" },
|
||||
]
|
||||
|
||||
[package.optional-dependencies]
|
||||
all = [
|
||||
{ name = "fastapi" },
|
||||
{ name = "redis" },
|
||||
]
|
||||
dev = [
|
||||
{ name = "fastapi" },
|
||||
{ name = "httpx" },
|
||||
{ name = "pyright" },
|
||||
{ name = "pytest" },
|
||||
{ name = "pytest-asyncio" },
|
||||
{ name = "pytest-cov" },
|
||||
{ name = "redis" },
|
||||
{ name = "ruff" },
|
||||
{ name = "uvicorn" },
|
||||
]
|
||||
fastapi = [
|
||||
{ name = "fastapi" },
|
||||
]
|
||||
redis = [
|
||||
{ name = "redis" },
|
||||
]
|
||||
|
||||
[package.dev-dependencies]
|
||||
dev = [
|
||||
{ name = "fastapi" },
|
||||
{ name = "pytest" },
|
||||
{ name = "uvicorn" },
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "fastapi", marker = "extra == 'all'", specifier = ">=0.100.0" },
|
||||
{ name = "fastapi", marker = "extra == 'dev'", specifier = ">=0.100.0" },
|
||||
{ name = "fastapi", marker = "extra == 'fastapi'", specifier = ">=0.100.0" },
|
||||
{ name = "httpx", marker = "extra == 'dev'", specifier = ">=0.27.0" },
|
||||
{ name = "pyright", marker = "extra == 'dev'", specifier = ">=1.1.350" },
|
||||
{ name = "pytest", marker = "extra == 'dev'", specifier = ">=8.0.0" },
|
||||
{ name = "pytest-asyncio", marker = "extra == 'dev'", specifier = ">=0.23.0" },
|
||||
{ name = "pytest-cov", marker = "extra == 'dev'", specifier = ">=4.0.0" },
|
||||
{ name = "redis", marker = "extra == 'all'", specifier = ">=5.0.0" },
|
||||
{ name = "redis", marker = "extra == 'dev'", specifier = ">=5.0.0" },
|
||||
{ name = "redis", marker = "extra == 'redis'", specifier = ">=5.0.0" },
|
||||
{ name = "ruff", marker = "extra == 'dev'", specifier = ">=0.4.0" },
|
||||
{ name = "starlette", specifier = ">=0.27.0" },
|
||||
{ name = "uvicorn", marker = "extra == 'dev'", specifier = ">=0.29.0" },
|
||||
]
|
||||
provides-extras = ["redis", "fastapi", "all", "dev"]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
dev = [
|
||||
{ name = "fastapi", specifier = ">=0.128.0" },
|
||||
{ name = "pytest", specifier = ">=9.0.2" },
|
||||
{ name = "uvicorn", specifier = ">=0.40.0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "h11"
|
||||
version = "0.16.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "httpcore"
|
||||
version = "1.0.9"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "certifi" },
|
||||
{ name = "h11" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "httpx"
|
||||
version = "0.28.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "anyio" },
|
||||
{ name = "certifi" },
|
||||
{ name = "httpcore" },
|
||||
{ name = "idna" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.11"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "iniconfig"
|
||||
version = "2.3.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nodeenv"
|
||||
version = "1.10.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/24/bf/d1bda4f6168e0b2e9e5958945e01910052158313224ada5ce1fb2e1113b8/nodeenv-1.10.0.tar.gz", hash = "sha256:996c191ad80897d076bdfba80a41994c2b47c68e224c542b48feba42ba00f8bb", size = 55611, upload-time = "2025-12-20T14:08:54.006Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/88/b2/d0896bdcdc8d28a7fc5717c305f1a861c26e18c05047949fb371034d98bd/nodeenv-1.10.0-py2.py3-none-any.whl", hash = "sha256:5bb13e3eed2923615535339b3c620e76779af4cb4c6a90deccc9e36b274d3827", size = 23438, upload-time = "2025-12-20T14:08:52.782Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "25.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pluggy"
|
||||
version = "1.6.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic"
|
||||
version = "2.12.5"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "annotated-types" },
|
||||
{ name = "pydantic-core" },
|
||||
{ name = "typing-extensions" },
|
||||
{ name = "typing-inspection" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic-core"
|
||||
version = "2.41.5"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c6/90/32c9941e728d564b411d574d8ee0cf09b12ec978cb22b294995bae5549a5/pydantic_core-2.41.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:77b63866ca88d804225eaa4af3e664c5faf3568cea95360d21f4725ab6e07146", size = 2107298, upload-time = "2025-11-04T13:39:04.116Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fb/a8/61c96a77fe28993d9a6fb0f4127e05430a267b235a124545d79fea46dd65/pydantic_core-2.41.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dfa8a0c812ac681395907e71e1274819dec685fec28273a28905df579ef137e2", size = 1901475, upload-time = "2025-11-04T13:39:06.055Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/b6/338abf60225acc18cdc08b4faef592d0310923d19a87fba1faf05af5346e/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5921a4d3ca3aee735d9fd163808f5e8dd6c6972101e4adbda9a4667908849b97", size = 1918815, upload-time = "2025-11-04T13:39:10.41Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/1c/2ed0433e682983d8e8cba9c8d8ef274d4791ec6a6f24c58935b90e780e0a/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25c479382d26a2a41b7ebea1043564a937db462816ea07afa8a44c0866d52f9", size = 2065567, upload-time = "2025-11-04T13:39:12.244Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/24/cf84974ee7d6eae06b9e63289b7b8f6549d416b5c199ca2d7ce13bbcf619/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f547144f2966e1e16ae626d8ce72b4cfa0caedc7fa28052001c94fb2fcaa1c52", size = 2230442, upload-time = "2025-11-04T13:39:13.962Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/21/4e287865504b3edc0136c89c9c09431be326168b1eb7841911cbc877a995/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f52298fbd394f9ed112d56f3d11aabd0d5bd27beb3084cc3d8ad069483b8941", size = 2350956, upload-time = "2025-11-04T13:39:15.889Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a8/76/7727ef2ffa4b62fcab916686a68a0426b9b790139720e1934e8ba797e238/pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:100baa204bb412b74fe285fb0f3a385256dad1d1879f0a5cb1499ed2e83d132a", size = 2068253, upload-time = "2025-11-04T13:39:17.403Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/8c/a4abfc79604bcb4c748e18975c44f94f756f08fb04218d5cb87eb0d3a63e/pydantic_core-2.41.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05a2c8852530ad2812cb7914dc61a1125dc4e06252ee98e5638a12da6cc6fb6c", size = 2177050, upload-time = "2025-11-04T13:39:19.351Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/67/b1/de2e9a9a79b480f9cb0b6e8b6ba4c50b18d4e89852426364c66aa82bb7b3/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:29452c56df2ed968d18d7e21f4ab0ac55e71dc59524872f6fc57dcf4a3249ed2", size = 2147178, upload-time = "2025-11-04T13:39:21Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/16/c1/dfb33f837a47b20417500efaa0378adc6635b3c79e8369ff7a03c494b4ac/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:d5160812ea7a8a2ffbe233d8da666880cad0cbaf5d4de74ae15c313213d62556", size = 2341833, upload-time = "2025-11-04T13:39:22.606Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/36/00f398642a0f4b815a9a558c4f1dca1b4020a7d49562807d7bc9ff279a6c/pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:df3959765b553b9440adfd3c795617c352154e497a4eaf3752555cfb5da8fc49", size = 2321156, upload-time = "2025-11-04T13:39:25.843Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/70/cad3acd89fde2010807354d978725ae111ddf6d0ea46d1ea1775b5c1bd0c/pydantic_core-2.41.5-cp310-cp310-win32.whl", hash = "sha256:1f8d33a7f4d5a7889e60dc39856d76d09333d8a6ed0f5f1190635cbec70ec4ba", size = 1989378, upload-time = "2025-11-04T13:39:27.92Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/76/92/d338652464c6c367e5608e4488201702cd1cbb0f33f7b6a85a60fe5f3720/pydantic_core-2.41.5-cp310-cp310-win_amd64.whl", hash = "sha256:62de39db01b8d593e45871af2af9e497295db8d73b085f6bfd0b18c83c70a8f9", size = 2013622, upload-time = "2025-11-04T13:39:29.848Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e8/72/74a989dd9f2084b3d9530b0915fdda64ac48831c30dbf7c72a41a5232db8/pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6", size = 2105873, upload-time = "2025-11-04T13:39:31.373Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/12/44/37e403fd9455708b3b942949e1d7febc02167662bf1a7da5b78ee1ea2842/pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b", size = 1899826, upload-time = "2025-11-04T13:39:32.897Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/33/7f/1d5cab3ccf44c1935a359d51a8a2a9e1a654b744b5e7f80d41b88d501eec/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a", size = 1917869, upload-time = "2025-11-04T13:39:34.469Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6e/6a/30d94a9674a7fe4f4744052ed6c5e083424510be1e93da5bc47569d11810/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8", size = 2063890, upload-time = "2025-11-04T13:39:36.053Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/be/76e5d46203fcb2750e542f32e6c371ffa9b8ad17364cf94bb0818dbfb50c/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e", size = 2229740, upload-time = "2025-11-04T13:39:37.753Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d3/ee/fed784df0144793489f87db310a6bbf8118d7b630ed07aa180d6067e653a/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1", size = 2350021, upload-time = "2025-11-04T13:39:40.94Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c8/be/8fed28dd0a180dca19e72c233cbf58efa36df055e5b9d90d64fd1740b828/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b", size = 2066378, upload-time = "2025-11-04T13:39:42.523Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/3b/698cf8ae1d536a010e05121b4958b1257f0b5522085e335360e53a6b1c8b/pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b", size = 2175761, upload-time = "2025-11-04T13:39:44.553Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b8/ba/15d537423939553116dea94ce02f9c31be0fa9d0b806d427e0308ec17145/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284", size = 2146303, upload-time = "2025-11-04T13:39:46.238Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/58/7f/0de669bf37d206723795f9c90c82966726a2ab06c336deba4735b55af431/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594", size = 2340355, upload-time = "2025-11-04T13:39:48.002Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/de/e7482c435b83d7e3c3ee5ee4451f6e8973cff0eb6007d2872ce6383f6398/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e", size = 2319875, upload-time = "2025-11-04T13:39:49.705Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fe/e6/8c9e81bb6dd7560e33b9053351c29f30c8194b72f2d6932888581f503482/pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b", size = 1987549, upload-time = "2025-11-04T13:39:51.842Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/11/66/f14d1d978ea94d1bc21fc98fcf570f9542fe55bfcc40269d4e1a21c19bf7/pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe", size = 2011305, upload-time = "2025-11-04T13:39:53.485Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/56/d8/0e271434e8efd03186c5386671328154ee349ff0354d83c74f5caaf096ed/pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f", size = 1972902, upload-time = "2025-11-04T13:39:56.488Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/11/72/90fda5ee3b97e51c494938a4a44c3a35a9c96c19bba12372fb9c634d6f57/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034", size = 2115441, upload-time = "2025-11-04T13:42:39.557Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1f/53/8942f884fa33f50794f119012dc6a1a02ac43a56407adaac20463df8e98f/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c", size = 1930291, upload-time = "2025-11-04T13:42:42.169Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/79/c8/ecb9ed9cd942bce09fc888ee960b52654fbdbede4ba6c2d6e0d3b1d8b49c/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2", size = 1948632, upload-time = "2025-11-04T13:42:44.564Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/1b/687711069de7efa6af934e74f601e2a4307365e8fdc404703afc453eab26/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad", size = 2138905, upload-time = "2025-11-04T13:42:47.156Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e6/b0/1a2aa41e3b5a4ba11420aba2d091b2d17959c8d1519ece3627c371951e73/pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b5819cd790dbf0c5eb9f82c73c16b39a65dd6dd4d1439dcdea7816ec9adddab8", size = 2103351, upload-time = "2025-11-04T13:43:02.058Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a4/ee/31b1f0020baaf6d091c87900ae05c6aeae101fa4e188e1613c80e4f1ea31/pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5a4e67afbc95fa5c34cf27d9089bca7fcab4e51e57278d710320a70b956d1b9a", size = 1925363, upload-time = "2025-11-04T13:43:05.159Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e1/89/ab8e86208467e467a80deaca4e434adac37b10a9d134cd2f99b28a01e483/pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ece5c59f0ce7d001e017643d8d24da587ea1f74f6993467d85ae8a5ef9d4f42b", size = 2135615, upload-time = "2025-11-04T13:43:08.116Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/99/0a/99a53d06dd0348b2008f2f30884b34719c323f16c3be4e6cc1203b74a91d/pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16f80f7abe3351f8ea6858914ddc8c77e02578544a0ebc15b4c2e1a0e813b0b2", size = 2175369, upload-time = "2025-11-04T13:43:12.49Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/94/30ca3b73c6d485b9bb0bc66e611cff4a7138ff9736b7e66bcf0852151636/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:33cb885e759a705b426baada1fe68cbb0a2e68e34c5d0d0289a364cf01709093", size = 2144218, upload-time = "2025-11-04T13:43:15.431Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/87/57/31b4f8e12680b739a91f472b5671294236b82586889ef764b5fbc6669238/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:c8d8b4eb992936023be7dee581270af5c6e0697a8559895f527f5b7105ecd36a", size = 2329951, upload-time = "2025-11-04T13:43:18.062Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/73/3c2c8edef77b8f7310e6fb012dbc4b8551386ed575b9eb6fb2506e28a7eb/pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:242a206cd0318f95cd21bdacff3fcc3aab23e79bba5cac3db5a841c9ef9c6963", size = 2318428, upload-time = "2025-11-04T13:43:20.679Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/02/8559b1f26ee0d502c74f9cca5c0d2fd97e967e083e006bbbb4e97f3a043a/pydantic_core-2.41.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d3a978c4f57a597908b7e697229d996d77a6d3c94901e9edee593adada95ce1a", size = 2147009, upload-time = "2025-11-04T13:43:23.286Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/9b/1b3f0e9f9305839d7e84912f9e8bfbd191ed1b1ef48083609f0dabde978c/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26", size = 2101980, upload-time = "2025-11-04T13:43:25.97Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a4/ed/d71fefcb4263df0da6a85b5d8a7508360f2f2e9b3bf5814be9c8bccdccc1/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808", size = 1923865, upload-time = "2025-11-04T13:43:28.763Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/3a/626b38db460d675f873e4444b4bb030453bbe7b4ba55df821d026a0493c4/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc", size = 2134256, upload-time = "2025-11-04T13:43:31.71Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/83/d9/8412d7f06f616bbc053d30cb4e5f76786af3221462ad5eee1f202021eb4e/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1", size = 2174762, upload-time = "2025-11-04T13:43:34.744Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/55/4c/162d906b8e3ba3a99354e20faa1b49a85206c47de97a639510a0e673f5da/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84", size = 2143141, upload-time = "2025-11-04T13:43:37.701Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1f/f2/f11dd73284122713f5f89fc940f370d035fa8e1e078d446b3313955157fe/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770", size = 2330317, upload-time = "2025-11-04T13:43:40.406Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/88/9d/b06ca6acfe4abb296110fb1273a4d848a0bfb2ff65f3ee92127b3244e16b/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f", size = 2316992, upload-time = "2025-11-04T13:43:43.602Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/36/c7/cfc8e811f061c841d7990b0201912c3556bfeb99cdcb7ed24adc8d6f8704/pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51", size = 2145302, upload-time = "2025-11-04T13:43:46.64Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pygments"
|
||||
version = "2.19.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyright"
|
||||
version = "1.1.408"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "nodeenv" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/74/b2/5db700e52554b8f025faa9c3c624c59f1f6c8841ba81ab97641b54322f16/pyright-1.1.408.tar.gz", hash = "sha256:f28f2321f96852fa50b5829ea492f6adb0e6954568d1caa3f3af3a5f555eb684", size = 4400578, upload-time = "2026-01-08T08:07:38.795Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/0c/82/a2c93e32800940d9573fb28c346772a14778b84ba7524e691b324620ab89/pyright-1.1.408-py3-none-any.whl", hash = "sha256:090b32865f4fdb1e0e6cd82bf5618480d48eecd2eb2e70f960982a3d9a4c17c1", size = 6399144, upload-time = "2026-01-08T08:07:37.082Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "9.0.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||
{ name = "exceptiongroup", marker = "python_full_version < '3.11'" },
|
||||
{ name = "iniconfig" },
|
||||
{ name = "packaging" },
|
||||
{ name = "pluggy" },
|
||||
{ name = "pygments" },
|
||||
{ name = "tomli", marker = "python_full_version < '3.11'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-asyncio"
|
||||
version = "1.3.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "backports-asyncio-runner", marker = "python_full_version < '3.11'" },
|
||||
{ name = "pytest" },
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/90/2c/8af215c0f776415f3590cac4f9086ccefd6fd463befeae41cd4d3f193e5a/pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5", size = 50087, upload-time = "2025-11-10T16:07:47.256Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/35/f8b19922b6a25bc0880171a2f1a003eaeb93657475193ab516fd87cac9da/pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5", size = 15075, upload-time = "2025-11-10T16:07:45.537Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-cov"
|
||||
version = "7.0.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "coverage", extra = ["toml"] },
|
||||
{ name = "pluggy" },
|
||||
{ name = "pytest" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redis"
|
||||
version = "7.1.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "async-timeout", marker = "python_full_version < '3.11.3'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/43/c8/983d5c6579a411d8a99bc5823cc5712768859b5ce2c8afe1a65b37832c81/redis-7.1.0.tar.gz", hash = "sha256:b1cc3cfa5a2cb9c2ab3ba700864fb0ad75617b41f01352ce5779dabf6d5f9c3c", size = 4796669, upload-time = "2025-11-19T15:54:39.961Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/89/f0/8956f8a86b20d7bb9d6ac0187cf4cd54d8065bc9a1a09eb8011d4d326596/redis-7.1.0-py3-none-any.whl", hash = "sha256:23c52b208f92b56103e17c5d06bdc1a6c2c0b3106583985a76a18f83b265de2b", size = 354159, upload-time = "2025-11-19T15:54:38.064Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.14.11"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d4/77/9a7fe084d268f8855d493e5031ea03fa0af8cc05887f638bf1c4e3363eb8/ruff-0.14.11.tar.gz", hash = "sha256:f6dc463bfa5c07a59b1ff2c3b9767373e541346ea105503b4c0369c520a66958", size = 5993417, upload-time = "2026-01-08T19:11:58.322Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f0/a6/a4c40a5aaa7e331f245d2dc1ac8ece306681f52b636b40ef87c88b9f7afd/ruff-0.14.11-py3-none-linux_armv6l.whl", hash = "sha256:f6ff2d95cbd335841a7217bdfd9c1d2e44eac2c584197ab1385579d55ff8830e", size = 12951208, upload-time = "2026-01-08T19:12:09.218Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/5c/360a35cb7204b328b685d3129c08aca24765ff92b5a7efedbdd6c150d555/ruff-0.14.11-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f6eb5c1c8033680f4172ea9c8d3706c156223010b8b97b05e82c59bdc774ee6", size = 13330075, upload-time = "2026-01-08T19:12:02.549Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1b/9e/0cc2f1be7a7d33cae541824cf3f95b4ff40d03557b575912b5b70273c9ec/ruff-0.14.11-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f2fc34cc896f90080fca01259f96c566f74069a04b25b6205d55379d12a6855e", size = 12257809, upload-time = "2026-01-08T19:12:00.366Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a7/e5/5faab97c15bb75228d9f74637e775d26ac703cc2b4898564c01ab3637c02/ruff-0.14.11-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53386375001773ae812b43205d6064dae49ff0968774e6befe16a994fc233caa", size = 12678447, upload-time = "2026-01-08T19:12:13.899Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1b/33/e9767f60a2bef779fb5855cab0af76c488e0ce90f7bb7b8a45c8a2ba4178/ruff-0.14.11-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a697737dce1ca97a0a55b5ff0434ee7205943d4874d638fe3ae66166ff46edbe", size = 12758560, upload-time = "2026-01-08T19:11:42.55Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/84/4c6cf627a21462bb5102f7be2a320b084228ff26e105510cd2255ea868e5/ruff-0.14.11-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6845ca1da8ab81ab1dce755a32ad13f1db72e7fba27c486d5d90d65e04d17b8f", size = 13599296, upload-time = "2026-01-08T19:11:30.371Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/88/e1/92b5ed7ea66d849f6157e695dc23d5d6d982bd6aa8d077895652c38a7cae/ruff-0.14.11-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e36ce2fd31b54065ec6f76cb08d60159e1b32bdf08507862e32f47e6dde8bcbf", size = 15048981, upload-time = "2026-01-08T19:12:04.742Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/61/df/c1bd30992615ac17c2fb64b8a7376ca22c04a70555b5d05b8f717163cf9f/ruff-0.14.11-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:590bcc0e2097ecf74e62a5c10a6b71f008ad82eb97b0a0079e85defe19fe74d9", size = 14633183, upload-time = "2026-01-08T19:11:40.069Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/04/e9/fe552902f25013dd28a5428a42347d9ad20c4b534834a325a28305747d64/ruff-0.14.11-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:53fe71125fc158210d57fe4da26e622c9c294022988d08d9347ec1cf782adafe", size = 14050453, upload-time = "2026-01-08T19:11:37.555Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ae/93/f36d89fa021543187f98991609ce6e47e24f35f008dfe1af01379d248a41/ruff-0.14.11-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a35c9da08562f1598ded8470fcfef2afb5cf881996e6c0a502ceb61f4bc9c8a3", size = 13757889, upload-time = "2026-01-08T19:12:07.094Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b7/9f/c7fb6ecf554f28709a6a1f2a7f74750d400979e8cd47ed29feeaa1bd4db8/ruff-0.14.11-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:0f3727189a52179393ecf92ec7057c2210203e6af2676f08d92140d3e1ee72c1", size = 13955832, upload-time = "2026-01-08T19:11:55.064Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/db/a0/153315310f250f76900a98278cf878c64dfb6d044e184491dd3289796734/ruff-0.14.11-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:eb09f849bd37147a789b85995ff734a6c4a095bed5fd1608c4f56afc3634cde2", size = 12586522, upload-time = "2026-01-08T19:11:35.356Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/2b/a73a2b6e6d2df1d74bf2b78098be1572191e54bec0e59e29382d13c3adc5/ruff-0.14.11-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:c61782543c1231bf71041461c1f28c64b961d457d0f238ac388e2ab173d7ecb7", size = 12724637, upload-time = "2026-01-08T19:11:47.796Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f0/41/09100590320394401cd3c48fc718a8ba71c7ddb1ffd07e0ad6576b3a3df2/ruff-0.14.11-py3-none-musllinux_1_2_i686.whl", hash = "sha256:82ff352ea68fb6766140381748e1f67f83c39860b6446966cff48a315c3e2491", size = 13145837, upload-time = "2026-01-08T19:11:32.87Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/d8/e035db859d1d3edf909381eb8ff3e89a672d6572e9454093538fe6f164b0/ruff-0.14.11-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:728e56879df4ca5b62a9dde2dd0eb0edda2a55160c0ea28c4025f18c03f86984", size = 13850469, upload-time = "2026-01-08T19:12:11.694Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4e/02/bb3ff8b6e6d02ce9e3740f4c17dfbbfb55f34c789c139e9cd91985f356c7/ruff-0.14.11-py3-none-win32.whl", hash = "sha256:337c5dd11f16ee52ae217757d9b82a26400be7efac883e9e852646f1557ed841", size = 12851094, upload-time = "2026-01-08T19:11:45.163Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/58/f1/90ddc533918d3a2ad628bc3044cdfc094949e6d4b929220c3f0eb8a1c998/ruff-0.14.11-py3-none-win_amd64.whl", hash = "sha256:f981cea63d08456b2c070e64b79cb62f951aa1305282974d4d5216e6e0178ae6", size = 14001379, upload-time = "2026-01-08T19:11:52.591Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c4/1c/1dbe51782c0e1e9cfce1d1004752672d2d4629ea46945d19d731ad772b3b/ruff-0.14.11-py3-none-win_arm64.whl", hash = "sha256:649fb6c9edd7f751db276ef42df1f3df41c38d67d199570ae2a7bd6cbc3590f0", size = 12938644, upload-time = "2026-01-08T19:11:50.027Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "starlette"
|
||||
version = "0.50.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "anyio" },
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ba/b8/73a0e6a6e079a9d9cfa64113d771e421640b6f679a52eeb9b32f72d871a1/starlette-0.50.0.tar.gz", hash = "sha256:a2a17b22203254bcbc2e1f926d2d55f3f9497f769416b3190768befe598fa3ca", size = 2646985, upload-time = "2025-11-01T15:25:27.516Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/52/1064f510b141bd54025f9b55105e26d1fa970b9be67ad766380a3c9b74b0/starlette-0.50.0-py3-none-any.whl", hash = "sha256:9e5391843ec9b6e472eed1365a78c8098cfceb7a74bfd4d6b1c0c0095efb3bca", size = 74033, upload-time = "2025-11-01T15:25:25.461Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tomli"
|
||||
version = "2.3.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/52/ed/3f73f72945444548f33eba9a87fc7a6e969915e7b1acc8260b30e1f76a2f/tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549", size = 17392, upload-time = "2025-10-08T22:01:47.119Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/2e/299f62b401438d5fe1624119c723f5d877acc86a4c2492da405626665f12/tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45", size = 153236, upload-time = "2025-10-08T22:01:00.137Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/7f/d8fffe6a7aefdb61bced88fcb5e280cfd71e08939da5894161bd71bea022/tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba", size = 148084, upload-time = "2025-10-08T22:01:01.63Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/5c/24935fb6a2ee63e86d80e4d3b58b222dafaf438c416752c8b58537c8b89a/tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf", size = 234832, upload-time = "2025-10-08T22:01:02.543Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/da/75dfd804fc11e6612846758a23f13271b76d577e299592b4371a4ca4cd09/tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441", size = 242052, upload-time = "2025-10-08T22:01:03.836Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/70/8c/f48ac899f7b3ca7eb13af73bacbc93aec37f9c954df3c08ad96991c8c373/tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845", size = 239555, upload-time = "2025-10-08T22:01:04.834Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ba/28/72f8afd73f1d0e7829bfc093f4cb98ce0a40ffc0cc997009ee1ed94ba705/tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c", size = 245128, upload-time = "2025-10-08T22:01:05.84Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b6/eb/a7679c8ac85208706d27436e8d421dfa39d4c914dcf5fa8083a9305f58d9/tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456", size = 96445, upload-time = "2025-10-08T22:01:06.896Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/fe/3d3420c4cb1ad9cb462fb52967080575f15898da97e21cb6f1361d505383/tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be", size = 107165, upload-time = "2025-10-08T22:01:08.107Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/b7/40f36368fcabc518bb11c8f06379a0fd631985046c038aca08c6d6a43c6e/tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac", size = 154891, upload-time = "2025-10-08T22:01:09.082Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/3f/d9dd692199e3b3aab2e4e4dd948abd0f790d9ded8cd10cbaae276a898434/tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22", size = 148796, upload-time = "2025-10-08T22:01:10.266Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/60/83/59bff4996c2cf9f9387a0f5a3394629c7efa5ef16142076a23a90f1955fa/tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f", size = 242121, upload-time = "2025-10-08T22:01:11.332Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/45/e5/7c5119ff39de8693d6baab6c0b6dcb556d192c165596e9fc231ea1052041/tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52", size = 250070, upload-time = "2025-10-08T22:01:12.498Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/45/12/ad5126d3a278f27e6701abde51d342aa78d06e27ce2bb596a01f7709a5a2/tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8", size = 245859, upload-time = "2025-10-08T22:01:13.551Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fb/a1/4d6865da6a71c603cfe6ad0e6556c73c76548557a8d658f9e3b142df245f/tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6", size = 250296, upload-time = "2025-10-08T22:01:14.614Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a0/b7/a7a7042715d55c9ba6e8b196d65d2cb662578b4d8cd17d882d45322b0d78/tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876", size = 97124, upload-time = "2025-10-08T22:01:15.629Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/06/1e/f22f100db15a68b520664eb3328fb0ae4e90530887928558112c8d1f4515/tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878", size = 107698, upload-time = "2025-10-08T22:01:16.51Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/48/06ee6eabe4fdd9ecd48bf488f4ac783844fd777f547b8d1b61c11939974e/tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b", size = 154819, upload-time = "2025-10-08T22:01:17.964Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/01/88793757d54d8937015c75dcdfb673c65471945f6be98e6a0410fba167ed/tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae", size = 148766, upload-time = "2025-10-08T22:01:18.959Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/42/17/5e2c956f0144b812e7e107f94f1cc54af734eb17b5191c0bbfb72de5e93e/tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b", size = 240771, upload-time = "2025-10-08T22:01:20.106Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/f4/0fbd014909748706c01d16824eadb0307115f9562a15cbb012cd9b3512c5/tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf", size = 248586, upload-time = "2025-10-08T22:01:21.164Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/30/77/fed85e114bde5e81ecf9bc5da0cc69f2914b38f4708c80ae67d0c10180c5/tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f", size = 244792, upload-time = "2025-10-08T22:01:22.417Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/55/92/afed3d497f7c186dc71e6ee6d4fcb0acfa5f7d0a1a2878f8beae379ae0cc/tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05", size = 248909, upload-time = "2025-10-08T22:01:23.859Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/84/ef50c51b5a9472e7265ce1ffc7f24cd4023d289e109f669bdb1553f6a7c2/tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606", size = 96946, upload-time = "2025-10-08T22:01:24.893Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b2/b7/718cd1da0884f281f95ccfa3a6cc572d30053cba64603f79d431d3c9b61b/tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999", size = 107705, upload-time = "2025-10-08T22:01:26.153Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/19/94/aeafa14a52e16163008060506fcb6aa1949d13548d13752171a755c65611/tomli-2.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e", size = 154244, upload-time = "2025-10-08T22:01:27.06Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/db/e4/1e58409aa78eefa47ccd19779fc6f36787edbe7d4cd330eeeedb33a4515b/tomli-2.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3", size = 148637, upload-time = "2025-10-08T22:01:28.059Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/26/b6/d1eccb62f665e44359226811064596dd6a366ea1f985839c566cd61525ae/tomli-2.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc", size = 241925, upload-time = "2025-10-08T22:01:29.066Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/70/91/7cdab9a03e6d3d2bb11beae108da5bdc1c34bdeb06e21163482544ddcc90/tomli-2.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0", size = 249045, upload-time = "2025-10-08T22:01:31.98Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/15/1b/8c26874ed1f6e4f1fcfeb868db8a794cbe9f227299402db58cfcc858766c/tomli-2.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879", size = 245835, upload-time = "2025-10-08T22:01:32.989Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/42/8e3c6a9a4b1a1360c1a2a39f0b972cef2cc9ebd56025168c4137192a9321/tomli-2.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005", size = 253109, upload-time = "2025-10-08T22:01:34.052Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/0c/b4da635000a71b5f80130937eeac12e686eefb376b8dee113b4a582bba42/tomli-2.3.0-cp314-cp314-win32.whl", hash = "sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463", size = 97930, upload-time = "2025-10-08T22:01:35.082Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b9/74/cb1abc870a418ae99cd5c9547d6bce30701a954e0e721821df483ef7223c/tomli-2.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8", size = 107964, upload-time = "2025-10-08T22:01:36.057Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/54/78/5c46fff6432a712af9f792944f4fcd7067d8823157949f4e40c56b8b3c83/tomli-2.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77", size = 163065, upload-time = "2025-10-08T22:01:37.27Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/39/67/f85d9bd23182f45eca8939cd2bc7050e1f90c41f4a2ecbbd5963a1d1c486/tomli-2.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf", size = 159088, upload-time = "2025-10-08T22:01:38.235Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/26/5a/4b546a0405b9cc0659b399f12b6adb750757baf04250b148d3c5059fc4eb/tomli-2.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530", size = 268193, upload-time = "2025-10-08T22:01:39.712Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/42/4f/2c12a72ae22cf7b59a7fe75b3465b7aba40ea9145d026ba41cb382075b0e/tomli-2.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b", size = 275488, upload-time = "2025-10-08T22:01:40.773Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/92/04/a038d65dbe160c3aa5a624e93ad98111090f6804027d474ba9c37c8ae186/tomli-2.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67", size = 272669, upload-time = "2025-10-08T22:01:41.824Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/be/2f/8b7c60a9d1612a7cbc39ffcca4f21a73bf368a80fc25bccf8253e2563267/tomli-2.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f", size = 279709, upload-time = "2025-10-08T22:01:43.177Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/46/cc36c679f09f27ded940281c38607716c86cf8ba4a518d524e349c8b4874/tomli-2.3.0-cp314-cp314t-win32.whl", hash = "sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0", size = 107563, upload-time = "2025-10-08T22:01:44.233Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/ff/426ca8683cf7b753614480484f6437f568fd2fda2edbdf57a2d3d8b27a0b/tomli-2.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba", size = 119756, upload-time = "2025-10-08T22:01:45.234Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.15.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typing-inspection"
|
||||
version = "0.4.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "uvicorn"
|
||||
version = "0.40.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "click" },
|
||||
{ name = "h11" },
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.11'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c3/d1/8f3c683c9561a4e6689dd3b1d345c815f10f86acd044ee1fb9a4dcd0b8c5/uvicorn-0.40.0.tar.gz", hash = "sha256:839676675e87e73694518b5574fd0f24c9d97b46bea16df7b8c05ea1a51071ea", size = 81761, upload-time = "2025-12-21T14:16:22.45Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/d8/2083a1daa7439a66f3a48589a57d576aa117726762618f6bb09fe3798796/uvicorn-0.40.0-py3-none-any.whl", hash = "sha256:c6c8f55bc8bf13eb6fa9ff87ad62308bbbc33d0b67f84293151efe87e0d5f2ee", size = 68502, upload-time = "2025-12-21T14:16:21.041Z" },
|
||||
]
|
||||
Reference in New Issue
Block a user