reddit_scraper/models.py

35 lines
1.8 KiB
Python

"""Pydantic models for API request/response validation."""
from pydantic import BaseModel, Field
from typing import Optional, Dict, Any
class SubredditQuery(BaseModel):
"""Validation model for subreddit scraping queries."""
subreddit: str = Field(..., min_length=1, description="Subreddit name (without 'r/')")
limit: int = Field(default=10, ge=1, le=100, description="Number of posts to retrieve")
time_range: str = Field(default="week", pattern=r"^(hour|day|week|month|year|all)$", description="Time filter for top posts")
depth: int = Field(default=3, ge=1, le=10, description="Maximum comment nesting depth")
include_comments: bool = Field(default=True, description="Whether to scrape comments (set False for faster results)")
class PostQuery(BaseModel):
"""Validation model for post scraping queries."""
post_id: str = Field(..., min_length=6, max_length=10, description="Reddit post ID")
depth: int = Field(default=3, ge=1, le=10, description="Maximum comment nesting depth")
class CustomQuery(BaseModel):
"""Validation model for custom scraping queries."""
type: str = Field(..., pattern=r"^(subreddit|post)$", description="Type of scrape to perform")
target: str = Field(..., min_length=1, description="Target subreddit or post ID")
limit: int = Field(default=10, ge=1, le=100, description="Number of posts (for subreddit type)")
time_range: str = Field(default="week", pattern=r"^(hour|day|week|month|year|all)$", description="Time filter (for subreddit type)")
depth: int = Field(default=3, ge=1, le=10, description="Maximum comment nesting depth")
include_comments: bool = Field(default=True, description="Whether to scrape comments (set False for faster results)")
class ErrorResponse(BaseModel):
"""Standard error response model."""
Error: str