diff --git a/src/backend/core/pdf_cache.py b/src/backend/core/pdf_cache.py index b1674d7..0a518b2 100644 --- a/src/backend/core/pdf_cache.py +++ b/src/backend/core/pdf_cache.py @@ -12,9 +12,10 @@ from typing import Any from backend.core.core import SwotAnalysis from backend.core.pdf_service import compute_content_hash from backend.logger import logger +from backend.settings.consts import PDF_CACHE_TTL_SECONDS # Cache configuration -CACHE_TTL_SECONDS = 300 # 5 minutes +CACHE_TTL_SECONDS = PDF_CACHE_TTL_SECONDS # Imported from settings CLEANUP_INTERVAL_SECONDS = 60 # Run cleanup every minute diff --git a/src/backend/core/tools.py b/src/backend/core/tools.py index 0e6fabe..6e81104 100644 --- a/src/backend/core/tools.py +++ b/src/backend/core/tools.py @@ -9,6 +9,10 @@ from backend.core.consts import AI_MODEL from backend.core.core import SwotAgentDeps, SwotAnalysis, swot_agent from backend.core.utils import report_tool_usage from backend.logger import logger +from backend.settings.consts import ( + HTTP_CONNECT_TIMEOUT_SECONDS, + HTTP_REQUEST_TIMEOUT_SECONDS, +) from backend.utils import get_val, set_event_loop, windows_sys_event_loop_check @@ -25,8 +29,10 @@ async def fetch_website_content( :return: str """ logger.info(f"Fetching website content for: {url}") - # Set reasonable timeouts: 10s connect, 30s total - timeout = httpx.Timeout(30.0, connect=10.0) + # Configure timeouts to prevent hanging on slow responses + timeout = httpx.Timeout( + HTTP_REQUEST_TIMEOUT_SECONDS, connect=HTTP_CONNECT_TIMEOUT_SECONDS + ) async with httpx.AsyncClient( follow_redirects=True, timeout=timeout ) as http_client: diff --git a/src/backend/settings/consts.py b/src/backend/settings/consts.py index deb49db..1bd7068 100644 --- a/src/backend/settings/consts.py +++ b/src/backend/settings/consts.py @@ -54,3 +54,34 @@ all_dialects = enum.Enum( # TODO: include Oracle and MSSQL dialects SECRET_KEY: str = config("SECRET_KEY") + +# Application Configuration Constants +# =================================== + +# PDF Generation and Caching +PDF_CACHE_TTL_SECONDS = 300 # 5 minutes +PDF_GENERATION_TIMEOUT_SECONDS = 60 + +# SWOT Analysis Validation +SWOT_MIN_ITEMS_PER_CATEGORY = 2 +SWOT_MAX_ITEMS_PER_CATEGORY = 10 +SWOT_MIN_ANALYSIS_LENGTH = 100 +SWOT_MAX_ANALYSIS_LENGTH = 5000 + +# Status Updates and Polling +STATUS_UPDATE_DELAY_MIN_SECONDS = 0 +STATUS_UPDATE_DELAY_MAX_SECONDS = 5 +STATUS_POLL_INTERVAL_SECONDS = 1 + +# External API Configuration +HTTP_REQUEST_TIMEOUT_SECONDS = 30 +HTTP_CONNECT_TIMEOUT_SECONDS = 10 + +# Input Validation +MAX_PRIMARY_ENTITY_LENGTH = 500 +MAX_COMPARISON_ENTITIES_LENGTH = 2000 +MAX_COMPARISON_ENTITIES_COUNT = 10 + +# Reddit API Configuration +REDDIT_MAX_SUBREDDITS = 10 +REDDIT_CONCURRENT_THRESHOLD = 3 # Switch to async when >3 subreddits diff --git a/src/backend/site/router.py b/src/backend/site/router.py index 9683c63..52e185c 100644 --- a/src/backend/site/router.py +++ b/src/backend/site/router.py @@ -11,6 +11,10 @@ from backend.core.pdf_cache import pdf_cache from backend.core.pdf_service import generate_swot_pdf from backend.logger import logger from backend.settings import app_settings +from backend.settings.consts import ( + MAX_COMPARISON_ENTITIES_LENGTH, + MAX_PRIMARY_ENTITY_LENGTH, +) from backend.site.consts import ( ANALYSIS_COMPLETE_MESSAGE, ANALYZING_MESSAGE, @@ -45,12 +49,12 @@ class AnalysisInput(BaseModel): primary_entity: str = Field( ..., min_length=1, - max_length=500, + max_length=MAX_PRIMARY_ENTITY_LENGTH, description="Primary entity (company name or URL)", ) comparison_entities: str = Field( default="", - max_length=2000, + max_length=MAX_COMPARISON_ENTITIES_LENGTH, description="Comma-separated comparison entities (optional)", ) diff --git a/src/backend/site/utils.py b/src/backend/site/utils.py index 2e60c85..6a7e8d3 100644 --- a/src/backend/site/utils.py +++ b/src/backend/site/utils.py @@ -7,6 +7,10 @@ from loguru import logger from backend.core.core import SwotAgentDeps, SwotAnalysis from backend.core.tools import run_agent +from backend.settings.consts import ( + STATUS_UPDATE_DELAY_MAX_SECONDS, + STATUS_UPDATE_DELAY_MIN_SECONDS, +) from backend.site.consts import ( ANALYSIS_COMPLETE_MESSAGE, result_store, @@ -20,8 +24,11 @@ async def emulate_tool_completion(session_id: str, message: str) -> None: Uses asyncio.sleep to avoid blocking the event loop. """ - # Sleep a random amount of time between 0 and 5 seconds (async) - await asyncio.sleep(random.randint(0, 5)) + # Sleep a random amount of time (async) + delay = random.randint( + STATUS_UPDATE_DELAY_MIN_SECONDS, STATUS_UPDATE_DELAY_MAX_SECONDS + ) + await asyncio.sleep(delay) status_store[session_id].append(message)