ChAbhishek28 commited on
Commit
3f1265d
Β·
1 Parent(s): a174809

Fix critical NoneType errors in hybrid LLM service

Browse files

- Remove duplicate and broken method definitions
- Properly implement analyze_task_complexity method with keywords
- Add null checks for provider.value and complexity.value access
- Fix method logic confusion between task complexity and provider selection
- Ensure all methods return valid enum values

Files changed (1) hide show
  1. hybrid_llm_service.py +11 -7
hybrid_llm_service.py CHANGED
@@ -85,10 +85,6 @@ class HybridLLMService:
85
 
86
  def analyze_task_complexity(self, message: str) -> TaskComplexity:
87
  """Analyze if a task requires complex reasoning or simple response"""
88
-
89
- def determine_task_complexity(self, message: str, context: str = "") -> TaskComplexity:
90
- """Determine task complexity - alias for analyze_task_complexity for compatibility"""
91
- return self.analyze_task_complexity(message)
92
  complex_keywords = [
93
  'analyze', 'compare', 'evaluate', 'scenario', 'chart', 'graph',
94
  'visualization', 'complex', 'detailed analysis', 'multi-step',
@@ -111,6 +107,10 @@ class HybridLLMService:
111
  return TaskComplexity.COMPLEX
112
 
113
  return TaskComplexity.SIMPLE
 
 
 
 
114
 
115
  def choose_llm_provider(self, message: str) -> LLMProvider:
116
  """Choose the best LLM provider based on task complexity and availability"""
@@ -146,7 +146,9 @@ At least one of these API keys is required for the Voice Bot to function properl
146
  provider = self.choose_llm_provider(message)
147
  complexity = self.analyze_task_complexity(message)
148
 
149
- logger.info(f"🎯 Using {provider.value} for {complexity.value} task")
 
 
150
 
151
  # If system_prompt is provided, prepend it to the context
152
  if system_prompt:
@@ -169,7 +171,8 @@ At least one of these API keys is required for the Voice Bot to function properl
169
  return "I apologize, but no AI providers are currently available. Please check your API keys."
170
 
171
  except Exception as e:
172
- logger.error(f"❌ Error with {provider.value}: {e}")
 
173
 
174
  # Try fallback provider
175
  if provider == LLMProvider.GROQ and self.gemini_llm:
@@ -260,7 +263,8 @@ At least one of these API keys is required for the Voice Bot to function properl
260
  yield "No AI providers are currently available."
261
 
262
  except Exception as e:
263
- logger.error(f"❌ Streaming error with {provider.value}: {e}")
 
264
 
265
  # Try fallback
266
  if provider == LLMProvider.GROQ and self.gemini_llm:
 
85
 
86
  def analyze_task_complexity(self, message: str) -> TaskComplexity:
87
  """Analyze if a task requires complex reasoning or simple response"""
 
 
 
 
88
  complex_keywords = [
89
  'analyze', 'compare', 'evaluate', 'scenario', 'chart', 'graph',
90
  'visualization', 'complex', 'detailed analysis', 'multi-step',
 
107
  return TaskComplexity.COMPLEX
108
 
109
  return TaskComplexity.SIMPLE
110
+
111
+ def determine_task_complexity(self, message: str, context: str = "") -> TaskComplexity:
112
+ """Determine task complexity - alias for analyze_task_complexity for compatibility"""
113
+ return self.analyze_task_complexity(message)
114
 
115
  def choose_llm_provider(self, message: str) -> LLMProvider:
116
  """Choose the best LLM provider based on task complexity and availability"""
 
146
  provider = self.choose_llm_provider(message)
147
  complexity = self.analyze_task_complexity(message)
148
 
149
+ provider_name = provider.value if provider else "unknown"
150
+ complexity_name = complexity.value if complexity else "unknown"
151
+ logger.info(f"🎯 Using {provider_name} for {complexity_name} task")
152
 
153
  # If system_prompt is provided, prepend it to the context
154
  if system_prompt:
 
171
  return "I apologize, but no AI providers are currently available. Please check your API keys."
172
 
173
  except Exception as e:
174
+ provider_name = provider.value if provider else "unknown"
175
+ logger.error(f"❌ Error with {provider_name}: {e}")
176
 
177
  # Try fallback provider
178
  if provider == LLMProvider.GROQ and self.gemini_llm:
 
263
  yield "No AI providers are currently available."
264
 
265
  except Exception as e:
266
+ provider_name = provider.value if provider else "unknown"
267
+ logger.error(f"❌ Streaming error with {provider_name}: {e}")
268
 
269
  # Try fallback
270
  if provider == LLMProvider.GROQ and self.gemini_llm: