GitHub Actions commited on
Commit
bc9bee1
·
1 Parent(s): 09b0800

Deploy backend from GitHub Actions

Browse files

🚀 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <[email protected]>

src/agents/personalization_agent.py CHANGED
@@ -40,20 +40,7 @@ class PersonalizationAgent:
40
  except Exception as e:
41
  print(f"[WARNING] Failed to disable tracing: {e}")
42
 
43
- # Initialize primary Gemini client
44
- try:
45
- self.gemini_client = AsyncOpenAI(
46
- api_key=os.getenv("GEMINI_API_KEY"),
47
- base_url="https://generativelanguage.googleapis.com/v1beta/openai/",
48
- timeout=120.0,
49
- max_retries=3
50
- )
51
- print("[DEBUG] Gemini client initialized successfully")
52
- except Exception as e:
53
- print(f"[ERROR] Failed to initialize Gemini client: {e}")
54
- self.gemini_client = None
55
-
56
- # Initialize fallback OpenRouter client
57
  self.openrouter_client = None
58
  if os.getenv("OPENROUTER_API_KEY"):
59
  try:
@@ -72,14 +59,15 @@ class PersonalizationAgent:
72
  print(f"[ERROR] Failed to initialize OpenRouter client: {e}")
73
  self.openrouter_client = None
74
 
75
- # Configure primary model (Gemini)
76
  self.primary_model = None
77
- if self.gemini_client:
78
  try:
79
- model_name = os.getenv("GEMINI_MODEL", "gemini-2.0-flash")
 
80
  self.primary_model = OpenAIChatCompletionsModel(
81
  model=model_name,
82
- openai_client=self.gemini_client
83
  )
84
  print(f"[DEBUG] Primary model initialized successfully: {model_name}")
85
  except Exception as e:
@@ -87,25 +75,9 @@ class PersonalizationAgent:
87
  traceback.print_exc()
88
  self.primary_model = None
89
 
90
- # Configure fallback model (OpenRouter)
91
- self.fallback_model = None
92
- if self.openrouter_client:
93
- try:
94
- # Use a working free model with fewer restrictions
95
- fallback_model_name = os.getenv("OPENROUTER_MODEL", "meta-llama/llama-3.2-3b-instruct:free")
96
- self.fallback_model = OpenAIChatCompletionsModel(
97
- model=fallback_model_name,
98
- openai_client=self.openrouter_client
99
- )
100
- print(f"[DEBUG] OpenRouter fallback configured with model: {fallback_model_name}")
101
- except Exception as e:
102
- print(f"[ERROR] Failed to initialize fallback model: {e}")
103
- traceback.print_exc()
104
- self.fallback_model = None
105
-
106
- # Initialize 3rd fallback (OpenAI)
107
  self.openai_client = None
108
- self.openai_fallback_model = None
109
  if os.getenv("OPENAI_API_KEY"):
110
  try:
111
  self.openai_client = AsyncOpenAI(
@@ -114,14 +86,14 @@ class PersonalizationAgent:
114
  max_retries=3
115
  )
116
  # User requested 'gpt-5-nano' (Hardcoded)
117
- self.openai_fallback_model = OpenAIChatCompletionsModel(
118
  model="gpt-5-nano",
119
  openai_client=self.openai_client
120
  )
121
- print(f"[DEBUG] OpenAI 3rd fallback configured with model: gpt-5-nano")
122
  except Exception as e:
123
- print(f"[ERROR] Failed to initialize OpenAI 3rd fallback: {e}")
124
- self.openai_fallback_model = None
125
 
126
  # Initialize the agent with primary model
127
  self.agent = None
@@ -249,35 +221,30 @@ class PersonalizationAgent:
249
  user_profile
250
  )
251
 
252
- # Try primary model first (Gemini)
253
- result = await self._try_personalize_with_model(
254
- model_name="gemini-2.0-flash",
255
- model=self.primary_model,
256
- input_text=personalized_input,
257
- content=content,
258
- user_profile=user_profile
259
- )
260
-
261
- # If primary model failed due to quota/rate limits and fallback is available, try OpenRouter
262
- if not result["success"] and self._should_use_fallback(result["error_message"]) and self.fallback_model:
263
- print(f"Primary model failed, attempting fallback to OpenRouter...")
264
- # Get the fallback model name from environment
265
- fallback_model_name = os.getenv("OPENROUTER_MODEL", "meta-llama/llama-3.2-3b-instruct:free")
266
  result = await self._try_personalize_with_model(
267
- model_name=fallback_model_name,
268
- model=self.fallback_model,
269
  input_text=personalized_input,
270
  content=content,
271
- user_profile=user_profile,
272
- is_fallback=True
273
  )
 
 
 
 
 
 
 
274
 
275
- # If OpenRouter failed, try 3rd fallback (OpenAI)
276
- if not result["success"] and self._should_use_fallback(result["error_message"]) and self.openai_fallback_model:
277
- print(f"OpenRouter fallback failed, attempting 3rd fallback to OpenAI...")
278
  result = await self._try_personalize_with_model(
279
  model_name="gpt-5-nano",
280
- model=self.openai_fallback_model,
281
  input_text=personalized_input,
282
  content=content,
283
  user_profile=user_profile,
 
40
  except Exception as e:
41
  print(f"[WARNING] Failed to disable tracing: {e}")
42
 
43
+ # Initialize primary OpenRouter client
 
 
 
 
 
 
 
 
 
 
 
 
 
44
  self.openrouter_client = None
45
  if os.getenv("OPENROUTER_API_KEY"):
46
  try:
 
59
  print(f"[ERROR] Failed to initialize OpenRouter client: {e}")
60
  self.openrouter_client = None
61
 
62
+ # Configure primary model (OpenRouter)
63
  self.primary_model = None
64
+ if self.openrouter_client:
65
  try:
66
+ # Use a working free model with fewer restrictions
67
+ model_name = os.getenv("OPENROUTER_MODEL", "meta-llama/llama-3.2-3b-instruct:free")
68
  self.primary_model = OpenAIChatCompletionsModel(
69
  model=model_name,
70
+ openai_client=self.openrouter_client
71
  )
72
  print(f"[DEBUG] Primary model initialized successfully: {model_name}")
73
  except Exception as e:
 
75
  traceback.print_exc()
76
  self.primary_model = None
77
 
78
+ # Initialize fallback (OpenAI)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
79
  self.openai_client = None
80
+ self.fallback_model = None
81
  if os.getenv("OPENAI_API_KEY"):
82
  try:
83
  self.openai_client = AsyncOpenAI(
 
86
  max_retries=3
87
  )
88
  # User requested 'gpt-5-nano' (Hardcoded)
89
+ self.fallback_model = OpenAIChatCompletionsModel(
90
  model="gpt-5-nano",
91
  openai_client=self.openai_client
92
  )
93
+ print(f"[DEBUG] OpenAI fallback configured with model: gpt-5-nano")
94
  except Exception as e:
95
+ print(f"[ERROR] Failed to initialize OpenAI fallback: {e}")
96
+ self.fallback_model = None
97
 
98
  # Initialize the agent with primary model
99
  self.agent = None
 
221
  user_profile
222
  )
223
 
224
+ # Try primary model first (OpenRouter)
225
+ if self.primary_model:
226
+ primary_model_name = os.getenv("OPENROUTER_MODEL", "meta-llama/llama-3.2-3b-instruct:free")
 
 
 
 
 
 
 
 
 
 
 
227
  result = await self._try_personalize_with_model(
228
+ model_name=primary_model_name,
229
+ model=self.primary_model,
230
  input_text=personalized_input,
231
  content=content,
232
+ user_profile=user_profile
 
233
  )
234
+ else:
235
+ # If no primary model, set result to failure
236
+ result = {
237
+ "success": False,
238
+ "error_message": "Primary model (OpenRouter) not available",
239
+ "model_name": "OpenRouter"
240
+ }
241
 
242
+ # If primary model failed due to quota/rate limits and fallback is available, try OpenAI
243
+ if not result["success"] and self._should_use_fallback(result["error_message"]) and self.fallback_model:
244
+ print(f"Primary model (OpenRouter) failed, attempting fallback to OpenAI...")
245
  result = await self._try_personalize_with_model(
246
  model_name="gpt-5-nano",
247
+ model=self.fallback_model,
248
  input_text=personalized_input,
249
  content=content,
250
  user_profile=user_profile,
src/services/openai_translation/translation_agent.py CHANGED
@@ -8,7 +8,6 @@ from typing import Dict, Optional, Any
8
  from dataclasses import dataclass
9
 
10
  from agents import Agent, Runner, AsyncOpenAI, OpenAIChatCompletionsModel
11
- from src.services.openai_translation.client import GeminiOpenAIClient, get_gemini_client
12
  from src.utils.translation_logger import get_translation_logger
13
 
14
  logger = get_translation_logger(__name__)
@@ -25,27 +24,15 @@ class TranslationContext:
25
 
26
  class OpenAITranslationAgent:
27
  """
28
- OpenAI Agents SDK-based translation agent using proper Runner.run pattern.
29
  """
30
 
31
- def __init__(
32
- self,
33
- gemini_client: Optional[GeminiOpenAIClient] = None,
34
- model: str = "gemini-2.0-flash-lite"
35
- ):
36
- """Initialize translation agent."""
37
- self.client = gemini_client or get_gemini_client()
38
  self.model = model
39
 
40
- # Create the agent with translation instructions
41
- self.agent = Agent(
42
- name="Translation Agent",
43
- instructions=self._get_translation_instructions(),
44
- model=self.client.get_model()
45
- )
46
-
47
- # Initialize fallback agent (OpenRouter)
48
- self.fallback_agent = None
49
  if os.getenv("OPENROUTER_API_KEY"):
50
  try:
51
  openrouter_client = AsyncOpenAI(
@@ -58,24 +45,24 @@ class OpenAITranslationAgent:
58
  "X-Title": "AI Book Translation Agent"
59
  }
60
  )
61
-
62
- fallback_model_name = os.getenv("OPENROUTER_MODEL", "meta-llama/llama-3.2-3b-instruct:free")
63
- fallback_model = OpenAIChatCompletionsModel(
64
- model=fallback_model_name,
65
  openai_client=openrouter_client
66
  )
67
-
68
- self.fallback_agent = Agent(
69
- name="Translation Agent (Fallback)",
70
  instructions=self._get_translation_instructions(),
71
- model=fallback_model
72
  )
73
- logger.info(f"Fallback translation agent initialized with {fallback_model_name}")
74
  except Exception as e:
75
- logger.error(f"Failed to initialize fallback agent: {e}")
76
 
77
- # Initialize 3rd fallback agent (OpenAI)
78
- self.openai_fallback_agent = None
79
  if os.getenv("OPENAI_API_KEY"):
80
  try:
81
  openai_client = AsyncOpenAI(
@@ -83,22 +70,23 @@ class OpenAITranslationAgent:
83
  timeout=120.0,
84
  max_retries=3
85
  )
86
-
87
- openai_model_name = "gpt-5-nano"
88
- openai_model = OpenAIChatCompletionsModel(
89
- model=openai_model_name,
90
  openai_client=openai_client
91
  )
92
-
93
- self.openai_fallback_agent = Agent(
94
- name="Translation Agent (3rd Fallback)",
95
  instructions=self._get_translation_instructions(),
96
- model=openai_model
97
  )
98
- logger.info(f"3rd fallback translation agent initialized with {openai_model_name}")
99
  except Exception as e:
100
- logger.error(f"Failed to initialize 3rd fallback agent: {e}")
101
 
 
102
  def _get_translation_instructions(self) -> str:
103
  """Get the base translation instructions for the agent."""
104
  return """
@@ -169,32 +157,22 @@ Additional context will be provided as needed for specific domains.
169
  prompt,
170
  max_turns=1 # Single turn for simple translation
171
  )
172
- model_used = self.model
 
173
  except Exception as e:
174
  # Check for fallback
175
  if self.fallback_agent and self._should_use_fallback(str(e)):
176
- logger.warning(f"Primary agent failed: {e}. Attempting fallback...")
177
  try:
178
  result = await Runner.run(
179
  self.fallback_agent,
180
  prompt,
181
  max_turns=1
182
  )
183
- model_used = os.getenv("OPENROUTER_MODEL", "meta-llama/llama-3.2-3b-instruct:free")
184
- logger.info(f"Fallback translation successful with {model_used}")
185
  except Exception as fallback_e:
186
- # Check for 3rd fallback
187
- if self.openai_fallback_agent and self._should_use_fallback(str(fallback_e)):
188
- logger.warning(f"OpenRouter fallback failed: {fallback_e}. Attempting 3rd fallback...")
189
- result = await Runner.run(
190
- self.openai_fallback_agent,
191
- prompt,
192
- max_turns=1
193
- )
194
- model_used = "gpt-5-nano"
195
- logger.info(f"3rd fallback translation successful with {model_used}")
196
- else:
197
- raise fallback_e
198
  else:
199
  raise e
200
 
@@ -292,6 +270,6 @@ Additional context will be provided as needed for specific domains.
292
 
293
 
294
  # Factory function
295
- def create_translation_agent(model: str = "gemini-2.0-flash-lite") -> OpenAITranslationAgent:
296
  """Create a translation agent instance."""
297
  return OpenAITranslationAgent(model=model)
 
8
  from dataclasses import dataclass
9
 
10
  from agents import Agent, Runner, AsyncOpenAI, OpenAIChatCompletionsModel
 
11
  from src.utils.translation_logger import get_translation_logger
12
 
13
  logger = get_translation_logger(__name__)
 
24
 
25
  class OpenAITranslationAgent:
26
  """
27
+ OpenAI Agents SDK-based translation agent using OpenRouter as primary.
28
  """
29
 
30
+ def __init__(self, model: str = "meta-llama/llama-3.2-3b-instruct:free"):
31
+ """Initialize translation agent with OpenRouter as primary."""
 
 
 
 
 
32
  self.model = model
33
 
34
+ # Initialize primary agent (OpenRouter)
35
+ self.agent = None
 
 
 
 
 
 
 
36
  if os.getenv("OPENROUTER_API_KEY"):
37
  try:
38
  openrouter_client = AsyncOpenAI(
 
45
  "X-Title": "AI Book Translation Agent"
46
  }
47
  )
48
+
49
+ primary_model_name = os.getenv("OPENROUTER_MODEL", "meta-llama/llama-3.2-3b-instruct:free")
50
+ primary_model = OpenAIChatCompletionsModel(
51
+ model=primary_model_name,
52
  openai_client=openrouter_client
53
  )
54
+
55
+ self.agent = Agent(
56
+ name="Translation Agent (OpenRouter)",
57
  instructions=self._get_translation_instructions(),
58
+ model=primary_model
59
  )
60
+ logger.info(f"Primary translation agent initialized with {primary_model_name}")
61
  except Exception as e:
62
+ logger.error(f"Failed to initialize primary agent: {e}")
63
 
64
+ # Initialize fallback agent (OpenAI)
65
+ self.fallback_agent = None
66
  if os.getenv("OPENAI_API_KEY"):
67
  try:
68
  openai_client = AsyncOpenAI(
 
70
  timeout=120.0,
71
  max_retries=3
72
  )
73
+
74
+ fallback_model_name = "gpt-5-nano"
75
+ fallback_model = OpenAIChatCompletionsModel(
76
+ model=fallback_model_name,
77
  openai_client=openai_client
78
  )
79
+
80
+ self.fallback_agent = Agent(
81
+ name="Translation Agent (OpenAI Fallback)",
82
  instructions=self._get_translation_instructions(),
83
+ model=fallback_model
84
  )
85
+ logger.info(f"Fallback translation agent initialized with {fallback_model_name}")
86
  except Exception as e:
87
+ logger.error(f"Failed to initialize fallback agent: {e}")
88
 
89
+
90
  def _get_translation_instructions(self) -> str:
91
  """Get the base translation instructions for the agent."""
92
  return """
 
157
  prompt,
158
  max_turns=1 # Single turn for simple translation
159
  )
160
+ model_used = os.getenv("OPENROUTER_MODEL", "meta-llama/llama-3.2-3b-instruct:free")
161
+ logger.info(f"Primary translation successful with {model_used}")
162
  except Exception as e:
163
  # Check for fallback
164
  if self.fallback_agent and self._should_use_fallback(str(e)):
165
+ logger.warning(f"Primary agent (OpenRouter) failed: {e}. Attempting OpenAI fallback...")
166
  try:
167
  result = await Runner.run(
168
  self.fallback_agent,
169
  prompt,
170
  max_turns=1
171
  )
172
+ model_used = "gpt-5-nano"
173
+ logger.info(f"OpenAI fallback translation successful with {model_used}")
174
  except Exception as fallback_e:
175
+ raise fallback_e
 
 
 
 
 
 
 
 
 
 
 
176
  else:
177
  raise e
178
 
 
270
 
271
 
272
  # Factory function
273
+ def create_translation_agent(model: str = "meta-llama/llama-3.2-3b-instruct:free") -> OpenAITranslationAgent:
274
  """Create a translation agent instance."""
275
  return OpenAITranslationAgent(model=model)