@@ -113,6 +113,7 @@ export async function generateText({
113
113
) ;
114
114
115
115
switch ( provider ) {
116
+ // OPENAI & LLAMACLOUD shared same structure.
116
117
case ModelProviderName . OPENAI :
117
118
case ModelProviderName . LLAMACLOUD : {
118
119
elizaLogger . debug ( "Initializing OpenAI model." ) ;
@@ -139,7 +140,7 @@ export async function generateText({
139
140
case ModelProviderName . GOOGLE : {
140
141
const google = createGoogleGenerativeAI ( ) ;
141
142
142
- const { text : anthropicResponse } = await aiGenerateText ( {
143
+ const { text : googleResponse } = await aiGenerateText ( {
143
144
model : google ( model ) ,
144
145
prompt : context ,
145
146
system :
@@ -152,7 +153,8 @@ export async function generateText({
152
153
presencePenalty : presence_penalty ,
153
154
} ) ;
154
155
155
- response = anthropicResponse ;
156
+ response = googleResponse ;
157
+ elizaLogger . debug ( "Received response from Google model." ) ;
156
158
break ;
157
159
}
158
160
@@ -280,7 +282,7 @@ export async function generateText({
280
282
const serverUrl = models [ provider ] . endpoint ;
281
283
const openai = createOpenAI ( { apiKey, baseURL : serverUrl } ) ;
282
284
283
- const { text : openaiResponse } = await aiGenerateText ( {
285
+ const { text : redpillResponse } = await aiGenerateText ( {
284
286
model : openai . languageModel ( model ) ,
285
287
prompt : context ,
286
288
temperature : temperature ,
@@ -293,8 +295,8 @@ export async function generateText({
293
295
presencePenalty : presence_penalty ,
294
296
} ) ;
295
297
296
- response = openaiResponse ;
297
- elizaLogger . debug ( "Received response from OpenAI model." ) ;
298
+ response = redpillResponse ;
299
+ elizaLogger . debug ( "Received response from redpill model." ) ;
298
300
break ;
299
301
}
300
302
0 commit comments