mirror of
https://github.com/fsecada01/Pygentic-AI.git
synced 2026-05-13 12:44:59 +00:00
refactor: extract magic numbers to configuration constants
Created centralized configuration constants in settings/consts.py: ## New Constants **PDF & Caching** - PDF_CACHE_TTL_SECONDS = 300 - PDF_GENERATION_TIMEOUT_SECONDS = 60 **SWOT Validation** - SWOT_MIN_ITEMS_PER_CATEGORY = 2 - SWOT_MAX_ITEMS_PER_CATEGORY = 10 - SWOT_MIN_ANALYSIS_LENGTH = 100 - SWOT_MAX_ANALYSIS_LENGTH = 5000 **Status Updates** - STATUS_UPDATE_DELAY_MIN_SECONDS = 0 - STATUS_UPDATE_DELAY_MAX_SECONDS = 5 - STATUS_POLL_INTERVAL_SECONDS = 1 **HTTP Timeouts** - HTTP_REQUEST_TIMEOUT_SECONDS = 30 - HTTP_CONNECT_TIMEOUT_SECONDS = 10 **Input Validation** - MAX_PRIMARY_ENTITY_LENGTH = 500 - MAX_COMPARISON_ENTITIES_LENGTH = 2000 - MAX_COMPARISON_ENTITIES_COUNT = 10 **Reddit API** - REDDIT_MAX_SUBREDDITS = 10 - REDDIT_CONCURRENT_THRESHOLD = 3 ## Updated Files - router.py: Use validation length constants - utils.py: Use status update delay constants - tools.py: Use HTTP timeout constants - pdf_cache.py: Use PDF cache TTL constant ## Benefits - Single source of truth for configuration - Easy to adjust timeouts and limits - Better maintainability - Consistent behavior across modules Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
@ -12,9 +12,10 @@ from typing import Any
|
||||
from backend.core.core import SwotAnalysis
|
||||
from backend.core.pdf_service import compute_content_hash
|
||||
from backend.logger import logger
|
||||
from backend.settings.consts import PDF_CACHE_TTL_SECONDS
|
||||
|
||||
# Cache configuration
|
||||
CACHE_TTL_SECONDS = 300 # 5 minutes
|
||||
CACHE_TTL_SECONDS = PDF_CACHE_TTL_SECONDS # Imported from settings
|
||||
CLEANUP_INTERVAL_SECONDS = 60 # Run cleanup every minute
|
||||
|
||||
|
||||
|
||||
@ -9,6 +9,10 @@ from backend.core.consts import AI_MODEL
|
||||
from backend.core.core import SwotAgentDeps, SwotAnalysis, swot_agent
|
||||
from backend.core.utils import report_tool_usage
|
||||
from backend.logger import logger
|
||||
from backend.settings.consts import (
|
||||
HTTP_CONNECT_TIMEOUT_SECONDS,
|
||||
HTTP_REQUEST_TIMEOUT_SECONDS,
|
||||
)
|
||||
from backend.utils import get_val, set_event_loop, windows_sys_event_loop_check
|
||||
|
||||
|
||||
@ -25,8 +29,10 @@ async def fetch_website_content(
|
||||
:return: str
|
||||
"""
|
||||
logger.info(f"Fetching website content for: {url}")
|
||||
# Set reasonable timeouts: 10s connect, 30s total
|
||||
timeout = httpx.Timeout(30.0, connect=10.0)
|
||||
# Configure timeouts to prevent hanging on slow responses
|
||||
timeout = httpx.Timeout(
|
||||
HTTP_REQUEST_TIMEOUT_SECONDS, connect=HTTP_CONNECT_TIMEOUT_SECONDS
|
||||
)
|
||||
async with httpx.AsyncClient(
|
||||
follow_redirects=True, timeout=timeout
|
||||
) as http_client:
|
||||
|
||||
@ -54,3 +54,34 @@ all_dialects = enum.Enum(
|
||||
|
||||
# TODO: include Oracle and MSSQL dialects
|
||||
SECRET_KEY: str = config("SECRET_KEY")
|
||||
|
||||
# Application Configuration Constants
|
||||
# ===================================
|
||||
|
||||
# PDF Generation and Caching
|
||||
PDF_CACHE_TTL_SECONDS = 300 # 5 minutes
|
||||
PDF_GENERATION_TIMEOUT_SECONDS = 60
|
||||
|
||||
# SWOT Analysis Validation
|
||||
SWOT_MIN_ITEMS_PER_CATEGORY = 2
|
||||
SWOT_MAX_ITEMS_PER_CATEGORY = 10
|
||||
SWOT_MIN_ANALYSIS_LENGTH = 100
|
||||
SWOT_MAX_ANALYSIS_LENGTH = 5000
|
||||
|
||||
# Status Updates and Polling
|
||||
STATUS_UPDATE_DELAY_MIN_SECONDS = 0
|
||||
STATUS_UPDATE_DELAY_MAX_SECONDS = 5
|
||||
STATUS_POLL_INTERVAL_SECONDS = 1
|
||||
|
||||
# External API Configuration
|
||||
HTTP_REQUEST_TIMEOUT_SECONDS = 30
|
||||
HTTP_CONNECT_TIMEOUT_SECONDS = 10
|
||||
|
||||
# Input Validation
|
||||
MAX_PRIMARY_ENTITY_LENGTH = 500
|
||||
MAX_COMPARISON_ENTITIES_LENGTH = 2000
|
||||
MAX_COMPARISON_ENTITIES_COUNT = 10
|
||||
|
||||
# Reddit API Configuration
|
||||
REDDIT_MAX_SUBREDDITS = 10
|
||||
REDDIT_CONCURRENT_THRESHOLD = 3 # Switch to async when >3 subreddits
|
||||
|
||||
@ -11,6 +11,10 @@ from backend.core.pdf_cache import pdf_cache
|
||||
from backend.core.pdf_service import generate_swot_pdf
|
||||
from backend.logger import logger
|
||||
from backend.settings import app_settings
|
||||
from backend.settings.consts import (
|
||||
MAX_COMPARISON_ENTITIES_LENGTH,
|
||||
MAX_PRIMARY_ENTITY_LENGTH,
|
||||
)
|
||||
from backend.site.consts import (
|
||||
ANALYSIS_COMPLETE_MESSAGE,
|
||||
ANALYZING_MESSAGE,
|
||||
@ -45,12 +49,12 @@ class AnalysisInput(BaseModel):
|
||||
primary_entity: str = Field(
|
||||
...,
|
||||
min_length=1,
|
||||
max_length=500,
|
||||
max_length=MAX_PRIMARY_ENTITY_LENGTH,
|
||||
description="Primary entity (company name or URL)",
|
||||
)
|
||||
comparison_entities: str = Field(
|
||||
default="",
|
||||
max_length=2000,
|
||||
max_length=MAX_COMPARISON_ENTITIES_LENGTH,
|
||||
description="Comma-separated comparison entities (optional)",
|
||||
)
|
||||
|
||||
|
||||
@ -7,6 +7,10 @@ from loguru import logger
|
||||
|
||||
from backend.core.core import SwotAgentDeps, SwotAnalysis
|
||||
from backend.core.tools import run_agent
|
||||
from backend.settings.consts import (
|
||||
STATUS_UPDATE_DELAY_MAX_SECONDS,
|
||||
STATUS_UPDATE_DELAY_MIN_SECONDS,
|
||||
)
|
||||
from backend.site.consts import (
|
||||
ANALYSIS_COMPLETE_MESSAGE,
|
||||
result_store,
|
||||
@ -20,8 +24,11 @@ async def emulate_tool_completion(session_id: str, message: str) -> None:
|
||||
|
||||
Uses asyncio.sleep to avoid blocking the event loop.
|
||||
"""
|
||||
# Sleep a random amount of time between 0 and 5 seconds (async)
|
||||
await asyncio.sleep(random.randint(0, 5))
|
||||
# Sleep a random amount of time (async)
|
||||
delay = random.randint(
|
||||
STATUS_UPDATE_DELAY_MIN_SECONDS, STATUS_UPDATE_DELAY_MAX_SECONDS
|
||||
)
|
||||
await asyncio.sleep(delay)
|
||||
status_store[session_id].append(message)
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user