Spaces:
Sleeping
Sleeping
| import os | |
| import logging | |
| from datetime import datetime | |
| from contextlib import asynccontextmanager | |
| from fastapi import FastAPI, WebSocket, HTTPException | |
| from fastapi.middleware.cors import CORSMiddleware | |
| from fastapi.responses import JSONResponse | |
| from websocket_handler import handle_websocket_connection | |
| from enhanced_websocket_handler import handle_enhanced_websocket_connection | |
| from hybrid_llm_service import HybridLLMService | |
| from voice_service import VoiceService | |
| from rag_service import search_documents | |
| from lancedb_service import LanceDBService | |
| from scenario_analysis_service import ScenarioAnalysisService | |
| import config | |
| from dotenv import load_dotenv | |
| # MCP and Authentication imports | |
| from fastapi import Depends | |
| from pydantic import BaseModel | |
| from typing import Optional | |
| from auth import get_current_user | |
| # Load environment variables | |
| load_dotenv() | |
| # Configure logging | |
| logging.basicConfig( | |
| level=logging.INFO, | |
| format='%(asctime)s [%(levelname)s] %(message)s', | |
| datefmt='%Y-%m-%d %H:%M:%S' | |
| ) | |
| logger = logging.getLogger(__name__) | |
| # Get configuration | |
| config_dict = { | |
| "ALLOWED_ORIGINS": config.ALLOWED_ORIGINS, | |
| "ENABLE_VOICE_FEATURES": config.ENABLE_VOICE_FEATURES | |
| } | |
| async def lifespan(app: FastAPI): | |
| """Application lifespan handler""" | |
| # Startup | |
| logger.info("π Starting Voice Bot Application...") | |
| # Setup sample documents if database is empty | |
| try: | |
| from setup_documents import setup_sample_documents | |
| await setup_sample_documents() | |
| except Exception as e: | |
| logger.warning(f"β οΈ Could not setup sample documents: {e}") | |
| logger.info("β Application started successfully") | |
| yield | |
| # Shutdown (if needed) | |
| logger.info("π Shutting down Voice Bot Application...") | |
| # Create FastAPI application | |
| app = FastAPI( | |
| title="Voice Bot Government Assistant", | |
| description="AI-powered voice assistant for government policies and services", | |
| version="1.0.0", | |
| lifespan=lifespan | |
| ) | |
| # Configure CORS | |
| app.add_middleware( | |
| CORSMiddleware, | |
| allow_origins=config.ALLOWED_ORIGINS, | |
| allow_credentials=True, | |
| allow_methods=["*"], | |
| allow_headers=["*"], | |
| ) | |
| # Initialize services (lazy loading for HF Spaces) | |
| llm_service = None | |
| voice_service = None | |
| lancedb_service = None | |
| scenario_service = None | |
| def get_llm_service(): | |
| global llm_service | |
| if llm_service is None: | |
| llm_service = HybridLLMService() | |
| return llm_service | |
| def get_voice_service(): | |
| global voice_service | |
| if voice_service is None: | |
| voice_service = VoiceService() | |
| return voice_service | |
| def get_lancedb_service(): | |
| global lancedb_service | |
| if lancedb_service is None: | |
| lancedb_service = LanceDBService() | |
| return lancedb_service | |
| def get_scenario_service(): | |
| global scenario_service | |
| if scenario_service is None: | |
| scenario_service = ScenarioAnalysisService() | |
| return scenario_service | |
| # Health check endpoint | |
| async def health_check(): | |
| """Health check endpoint""" | |
| return { | |
| "status": "healthy", | |
| "service": "voice-bot-api", | |
| "timestamp": datetime.now().isoformat(), | |
| "version": "1.0.0" | |
| } | |
| # Root endpoint | |
| async def root(): | |
| """Root endpoint with service information""" | |
| return { | |
| "message": "Voice Bot Government Assistant API", | |
| "status": "running", | |
| "version": "1.0.0", | |
| "endpoints": { | |
| "health": "/health", | |
| "chat": "/chat", | |
| "websocket": "/ws", | |
| "docs": "/docs" | |
| } | |
| } | |
| # Chat endpoint | |
| async def chat_endpoint(request: dict): | |
| """Text-based chat endpoint""" | |
| try: | |
| message = request.get("message", "") | |
| if not message: | |
| raise HTTPException(status_code=400, detail="Message is required") | |
| llm = get_llm_service() | |
| response = await llm.get_response(message) | |
| return { | |
| "response": response, | |
| "timestamp": datetime.now().isoformat() | |
| } | |
| except Exception as e: | |
| logger.error(f"Chat error: {str(e)}") | |
| raise HTTPException(status_code=500, detail=str(e)) | |
| # WebSocket endpoint | |
| async def websocket_endpoint(websocket: WebSocket): | |
| """WebSocket endpoint for real-time communication""" | |
| await handle_enhanced_websocket_connection(websocket) | |
| if __name__ == "__main__": | |
| import uvicorn | |
| uvicorn.run(app, host="0.0.0.0", port=7860) |