@@ -2,15 +2,25 @@ import dotenv from "dotenv";
2
2
const de = dotenv . config ( ) ;
3
3
console . log ( "DEBUG" , de )
4
4
5
+ const logFetch = async ( url : string , options : any ) => {
6
+ console . debug ( `Fetching ${ url } ` ) ;
7
+ // Disabled to avoid disclosure of sensitive information such as API keys
8
+ // elizaLogger.debug(JSON.stringify(options, null, 2));
9
+ return fetch ( url , options ) ;
10
+ } ;
11
+
12
+ import { bootstrapPlugin } from "@elizaos/plugin-bootstrap" ;
13
+
5
14
if ( de . error ) {
6
15
throw de . error ;
7
16
}
8
17
9
18
import { defaultCharacter } from "../defaultCharacter.js" ;
10
19
import {
11
20
12
- // type Adapter,
21
+
13
22
AgentRuntime ,
23
+
14
24
// CacheManager,
15
25
// CacheStore,
16
26
composeContext ,
@@ -38,6 +48,7 @@ import {
38
48
// settings,
39
49
stringToUuid ,
40
50
getEmbeddingZeroVector ,
51
+ composeActionExamples ,
41
52
// validateCharacterConfig,
42
53
} from "@elizaos/core" ;
43
54
//import { defaultCharacter } from "./defaultCharacter.ts";
@@ -166,6 +177,7 @@ export class AIService {
166
177
private chatModelFAQ : ChatWrapper ; // <ChatOpenAICallOptions>
167
178
//runtime: AgentRuntime;
168
179
private agents : Map < string , IAgentRuntime > ; // container management
180
+ runtime : AgentRuntime ;
169
181
170
182
/**
171
183
* Constructor for initializing the ChatOpenAI instance.
@@ -202,7 +214,18 @@ export class AIService {
202
214
this . codeFormatter = new CodeFormatter ( ) ;
203
215
204
216
this . agents = new Map ( ) ;
205
- // this.runtime = {
217
+ const token = "token" ;
218
+ const character = defaultCharacter ;
219
+ this . runtime = new AgentRuntime ( { token,
220
+ modelProvider : character . modelProvider ,
221
+ evaluators : [ ] ,
222
+ character,
223
+ // character.plugins are handled when clients are added
224
+ plugins : [ bootstrapPlugin ] . flat ( ) . filter ( Boolean ) ,
225
+ providers : [ ] ,
226
+ managers : [ ] ,
227
+ fetch : logFetch , } )
228
+ //{
206
229
// agentId: "00000000-0000-0000-0000-000000000000",
207
230
// serverUrl: "http://localhost",
208
231
// databaseAdapter: {} as IDatabaseAdapter,
@@ -294,55 +317,7 @@ export class AIService {
294
317
this . handleAPIError ( error as Error ) ;
295
318
return "" ;
296
319
}
297
- return "" ;
298
- // try {
299
- // try {
300
- // let response;
301
- // if (isFAQ) {
302
- // response = await this.chatModelFAQ.invoke(finalPrompt);
303
- // } else {
304
- // response = await this.chatModel.invoke(finalPrompt);
305
- // }
306
- // return response.content as string;
307
- // } catch (error) {
308
- // if (
309
- // error instanceof Error &&
310
- // error.message.includes("maximum context length")
311
- // ) {
312
- // console.warn(
313
- // "Token limit exceeded, attempting with further truncation..."
314
- // );
315
- // // Try with more aggressive truncation
316
- // finalPrompt = this.codeFormatter.truncateCodeBlock(prompt, 4000);
317
- // try {
318
- // const response =
319
- // await this.chatModel.invoke(finalPrompt);
320
- // return response.content as string;
321
- // } catch (retryError) {
322
- // if (
323
- // retryError instanceof Error &&
324
- // retryError.message.includes(
325
- // "maximum context length"
326
- // )
327
- // ) {
328
- // console.warn(
329
- // "Still exceeding token limit, using minimal context..."
330
- // );
331
- // // Final attempt with minimal context
332
- // finalPrompt = this.codeFormatter.truncateCodeBlock(prompt, 2000);
333
- // const response =
334
- // await this.chatModel.invoke(finalPrompt);
335
- // return response.content as string;
336
- // }
337
- // throw retryError;
338
- // }
339
- // }
340
- // throw error;
341
- // }
342
- // } catch (error) {
343
- // this.handleAPIError(error as Error);
344
- // return "";
345
- // }
320
+ return "" ;
346
321
}
347
322
348
323
/**
@@ -357,16 +332,9 @@ export class AIService {
357
332
throw error ;
358
333
}
359
334
}
360
- //function processChunk(finalPrompt: string, arg1: string, runtime: any) {
361
- //throw new Error("Function not implemented.");
362
- //}
363
-
364
-
365
-
366
-
367
- //import getTokenForProvider from "../index.ts"
368
335
369
336
async function processChunk ( prompt : string , manager : string , runtime : IAgentRuntime ) : Promise < string > {
337
+ console . log ( "process chunk" ) ;
370
338
try {
371
339
const mm = runtime . getMemoryManager ( manager ) ;
372
340
if ( ! mm ) {
0 commit comments