@@ -11,6 +11,7 @@ import { default as tiktoken, TiktokenModel } from "tiktoken";
11
11
import Together from "together-ai" ;
12
12
import { elizaLogger } from "./index.ts" ;
13
13
import models from "./models.ts" ;
14
+ import { createGoogleGenerativeAI } from "@ai-sdk/google" ;
14
15
import {
15
16
parseBooleanFromText ,
16
17
parseJsonArrayFromText ,
@@ -104,6 +105,25 @@ export async function generateText({
104
105
break ;
105
106
}
106
107
108
+ case ModelProviderName . GOOGLE :
109
+ const google = createGoogleGenerativeAI ( ) ;
110
+
111
+ const { text : anthropicResponse } = await aiGenerateText ( {
112
+ model : google ( model ) ,
113
+ prompt : context ,
114
+ system :
115
+ runtime . character . system ??
116
+ settings . SYSTEM_PROMPT ??
117
+ undefined ,
118
+ temperature : temperature ,
119
+ maxTokens : max_response_length ,
120
+ frequencyPenalty : frequency_penalty ,
121
+ presencePenalty : presence_penalty ,
122
+ } ) ;
123
+
124
+ response = anthropicResponse ;
125
+ break ;
126
+
107
127
case ModelProviderName . ANTHROPIC : {
108
128
elizaLogger . log ( "Initializing Anthropic model." ) ;
109
129
@@ -214,7 +234,6 @@ export async function generateText({
214
234
break ;
215
235
}
216
236
217
-
218
237
case ModelProviderName . OPENROUTER : {
219
238
elizaLogger . log ( "Initializing OpenRouter model." ) ;
220
239
const serverUrl = models [ provider ] . endpoint ;
@@ -238,7 +257,6 @@ export async function generateText({
238
257
break ;
239
258
}
240
259
241
-
242
260
case ModelProviderName . OLLAMA :
243
261
{
244
262
console . log ( "Initializing Ollama model." ) ;
@@ -425,10 +443,13 @@ export async function generateTrueOrFalse({
425
443
modelClass : string ;
426
444
} ) : Promise < boolean > {
427
445
let retryDelay = 1000 ;
428
- console . log ( "modelClass" , modelClass )
446
+ console . log ( "modelClass" , modelClass ) ;
429
447
430
448
const stop = Array . from (
431
- new Set ( [ ...( models [ runtime . modelProvider ] . settings . stop || [ ] ) , [ "\n" ] ] )
449
+ new Set ( [
450
+ ...( models [ runtime . modelProvider ] . settings . stop || [ ] ) ,
451
+ [ "\n" ] ,
452
+ ] )
432
453
) as string [ ] ;
433
454
434
455
while ( true ) {
0 commit comments