Skip to content

Commit 62c45a7

Browse files
committed
removed temp logging in generation.ts
1 parent 2e0b7b9 commit 62c45a7

File tree

1 file changed

+0
-234
lines changed

1 file changed

+0
-234
lines changed

packages/core/src/generation.ts

-234
Original file line numberDiff line numberDiff line change
@@ -198,24 +198,6 @@ export async function generateText({
198198
presencePenalty: presence_penalty,
199199
});
200200

201-
console.log("\n[LLM Debug]", {
202-
sent: {
203-
prompt: context.slice(0, 200) + "...",
204-
system:
205-
runtime.character.system ??
206-
settings.SYSTEM_PROMPT ??
207-
undefined,
208-
model,
209-
temperature,
210-
maxTokens: max_response_length,
211-
},
212-
received: {
213-
response: openaiResponse?.slice(0, 200) + "...",
214-
responseType: typeof openaiResponse,
215-
responseLength: openaiResponse?.length,
216-
},
217-
});
218-
219201
response = openaiResponse;
220202
elizaLogger.debug("Received response from OpenAI model.");
221203
break;
@@ -239,24 +221,6 @@ export async function generateText({
239221
presencePenalty: presence_penalty,
240222
});
241223

242-
console.log("\n[LLM Debug]", {
243-
sent: {
244-
prompt: context.slice(0, 200) + "...",
245-
system:
246-
runtime.character.system ??
247-
settings.SYSTEM_PROMPT ??
248-
undefined,
249-
model,
250-
temperature,
251-
maxTokens: max_response_length,
252-
},
253-
received: {
254-
response: googleResponse?.slice(0, 200) + "...",
255-
responseType: typeof googleResponse,
256-
responseLength: googleResponse?.length,
257-
},
258-
});
259-
260224
response = googleResponse;
261225
elizaLogger.debug("Received response from Google model.");
262226
break;
@@ -283,24 +247,6 @@ export async function generateText({
283247
presencePenalty: presence_penalty,
284248
});
285249

286-
console.log("\n[LLM Debug]", {
287-
sent: {
288-
prompt: context.slice(0, 200) + "...",
289-
system:
290-
runtime.character.system ??
291-
settings.SYSTEM_PROMPT ??
292-
undefined,
293-
model,
294-
temperature,
295-
maxTokens: max_response_length,
296-
},
297-
received: {
298-
response: anthropicResponse?.slice(0, 200) + "...",
299-
responseType: typeof anthropicResponse,
300-
responseLength: anthropicResponse?.length,
301-
},
302-
});
303-
304250
response = anthropicResponse;
305251
elizaLogger.debug("Received response from Anthropic model.");
306252
break;
@@ -327,24 +273,6 @@ export async function generateText({
327273
presencePenalty: presence_penalty,
328274
});
329275

330-
console.log("\n[LLM Debug]", {
331-
sent: {
332-
prompt: context.slice(0, 200) + "...",
333-
system:
334-
runtime.character.system ??
335-
settings.SYSTEM_PROMPT ??
336-
undefined,
337-
model,
338-
temperature,
339-
maxTokens: max_response_length,
340-
},
341-
received: {
342-
response: anthropicResponse?.slice(0, 200) + "...",
343-
responseType: typeof anthropicResponse,
344-
responseLength: anthropicResponse?.length,
345-
},
346-
});
347-
348276
response = anthropicResponse;
349277
elizaLogger.debug(
350278
"Received response from Claude Vertex model."
@@ -375,24 +303,6 @@ export async function generateText({
375303
presencePenalty: presence_penalty,
376304
});
377305

378-
console.log("\n[LLM Debug]", {
379-
sent: {
380-
prompt: context.slice(0, 200) + "...",
381-
system:
382-
runtime.character.system ??
383-
settings.SYSTEM_PROMPT ??
384-
undefined,
385-
model,
386-
temperature,
387-
maxTokens: max_response_length,
388-
},
389-
received: {
390-
response: grokResponse?.slice(0, 200) + "...",
391-
responseType: typeof grokResponse,
392-
responseLength: grokResponse?.length,
393-
},
394-
});
395-
396306
response = grokResponse;
397307
elizaLogger.debug("Received response from Grok model.");
398308
break;
@@ -414,24 +324,6 @@ export async function generateText({
414324
presencePenalty: presence_penalty,
415325
});
416326

417-
console.log("\n[LLM Debug]", {
418-
sent: {
419-
prompt: context.slice(0, 200) + "...",
420-
system:
421-
runtime.character.system ??
422-
settings.SYSTEM_PROMPT ??
423-
undefined,
424-
model,
425-
temperature,
426-
maxTokens: max_response_length,
427-
},
428-
received: {
429-
response: groqResponse?.slice(0, 200) + "...",
430-
responseType: typeof groqResponse,
431-
responseLength: groqResponse?.length,
432-
},
433-
});
434-
435327
response = groqResponse;
436328
break;
437329
}
@@ -483,24 +375,6 @@ export async function generateText({
483375
presencePenalty: presence_penalty,
484376
});
485377

486-
console.log("\n[LLM Debug]", {
487-
sent: {
488-
prompt: context.slice(0, 200) + "...",
489-
system:
490-
runtime.character.system ??
491-
settings.SYSTEM_PROMPT ??
492-
undefined,
493-
model,
494-
temperature,
495-
maxTokens: max_response_length,
496-
},
497-
received: {
498-
response: redpillResponse?.slice(0, 200) + "...",
499-
responseType: typeof redpillResponse,
500-
responseLength: redpillResponse?.length,
501-
},
502-
});
503-
504378
response = redpillResponse;
505379
elizaLogger.debug("Received response from redpill model.");
506380
break;
@@ -528,24 +402,6 @@ export async function generateText({
528402
presencePenalty: presence_penalty,
529403
});
530404

531-
console.log("\n[LLM Debug]", {
532-
sent: {
533-
prompt: context.slice(0, 200) + "...",
534-
system:
535-
runtime.character.system ??
536-
settings.SYSTEM_PROMPT ??
537-
undefined,
538-
model,
539-
temperature,
540-
maxTokens: max_response_length,
541-
},
542-
received: {
543-
response: openrouterResponse?.slice(0, 200) + "...",
544-
responseType: typeof openrouterResponse,
545-
responseLength: openrouterResponse?.length,
546-
},
547-
});
548-
549405
response = openrouterResponse;
550406
elizaLogger.debug("Received response from OpenRouter model.");
551407
break;
@@ -572,24 +428,6 @@ export async function generateText({
572428
presencePenalty: presence_penalty,
573429
});
574430

575-
console.log("\n[LLM Debug]", {
576-
sent: {
577-
prompt: context.slice(0, 200) + "...",
578-
system:
579-
runtime.character.system ??
580-
settings.SYSTEM_PROMPT ??
581-
undefined,
582-
model,
583-
temperature,
584-
maxTokens: max_response_length,
585-
},
586-
received: {
587-
response: ollamaResponse?.slice(0, 200) + "...",
588-
responseType: typeof ollamaResponse,
589-
responseLength: ollamaResponse?.length,
590-
},
591-
});
592-
593431
response = ollamaResponse;
594432
}
595433
elizaLogger.debug("Received response from Ollama model.");
@@ -616,24 +454,6 @@ export async function generateText({
616454
presencePenalty: presence_penalty,
617455
});
618456

619-
console.log("\n[LLM Debug]", {
620-
sent: {
621-
prompt: context.slice(0, 200) + "...",
622-
system:
623-
runtime.character.system ??
624-
settings.SYSTEM_PROMPT ??
625-
undefined,
626-
model,
627-
temperature,
628-
maxTokens: max_response_length,
629-
},
630-
received: {
631-
response: heuristResponse?.slice(0, 200) + "...",
632-
responseType: typeof heuristResponse,
633-
responseLength: heuristResponse?.length,
634-
},
635-
});
636-
637457
response = heuristResponse;
638458
elizaLogger.debug("Received response from Heurist model.");
639459
break;
@@ -683,24 +503,6 @@ export async function generateText({
683503
presencePenalty: presence_penalty,
684504
});
685505

686-
console.log("\n[LLM Debug]", {
687-
sent: {
688-
prompt: context.slice(0, 200) + "...",
689-
system:
690-
runtime.character.system ??
691-
settings.SYSTEM_PROMPT ??
692-
undefined,
693-
model,
694-
temperature,
695-
maxTokens: max_response_length,
696-
},
697-
received: {
698-
response: openaiResponse?.slice(0, 200) + "...",
699-
responseType: typeof openaiResponse,
700-
responseLength: openaiResponse?.length,
701-
},
702-
});
703-
704506
response = openaiResponse;
705507
elizaLogger.debug("Received response from GAIANET model.");
706508
break;
@@ -727,24 +529,6 @@ export async function generateText({
727529
presencePenalty: presence_penalty,
728530
});
729531

730-
console.log("\n[LLM Debug]", {
731-
sent: {
732-
prompt: context.slice(0, 200) + "...",
733-
system:
734-
runtime.character.system ??
735-
settings.SYSTEM_PROMPT ??
736-
undefined,
737-
model,
738-
temperature,
739-
maxTokens: max_response_length,
740-
},
741-
received: {
742-
response: galadrielResponse?.slice(0, 200) + "...",
743-
responseType: typeof galadrielResponse,
744-
responseLength: galadrielResponse?.length,
745-
},
746-
});
747-
748532
response = galadrielResponse;
749533
elizaLogger.debug("Received response from Galadriel model.");
750534
break;
@@ -768,24 +552,6 @@ export async function generateText({
768552
maxTokens: max_response_length,
769553
});
770554

771-
console.log("\n[LLM Debug]", {
772-
sent: {
773-
prompt: context.slice(0, 200) + "...",
774-
system:
775-
runtime.character.system ??
776-
settings.SYSTEM_PROMPT ??
777-
undefined,
778-
model,
779-
temperature,
780-
maxTokens: max_response_length,
781-
},
782-
received: {
783-
response: veniceResponse?.slice(0, 200) + "...",
784-
responseType: typeof veniceResponse,
785-
responseLength: veniceResponse?.length,
786-
},
787-
});
788-
789555
response = veniceResponse;
790556
elizaLogger.debug("Received response from Venice model.");
791557
break;

0 commit comments

Comments
 (0)