Skip to content

Commit 5f88013

Browse files
committed
fixes and summarize
1 parent b387fab commit 5f88013

File tree

13 files changed

+97
-99
lines changed

13 files changed

+97
-99
lines changed

core/package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
"start:degen": "node --loader ts-node/esm src/index.ts --characters=\"../characters/degenspartan.json\"",
2121
"start:service:all": "pm2 start npm --name=\"all\" --restart-delay=3000 --max-restarts=10 -- run start:degen",
2222
"stop:service:all": "pm2 stop all",
23-
"start:all": "node --loader ts-node/esm src/index.ts --characters=\"../characters/degenspartan.json\",\"../characters/ruby.character.json\"",
23+
"start:all": "node --loader ts-node/esm src/index.ts --characters=\"../characters/degenspartan.json\",\"../characters/ruby.character.json\",\"../characters/pmairca.character.json\"",
2424
"start:trump": "node --loader ts-node/esm src/index.ts --characters=\"../characters/trump.character.json\"",
2525
"start:service:tate": "pm2 start npm --name=\"tate\" --restart-delay=3000 --max-restarts=10 -- run start:tate",
2626
"stop:service:tate": "pm2 stop tate",

core/src/adapters/postgres.ts

+2-2
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,7 @@ export class PostgresDatabaseAdapter extends DatabaseAdapter {
110110
const placeholders = params.roomIds
111111
.map((_, i) => `$${i + 2}`)
112112
.join(", ");
113-
113+
114114
let query = `SELECT * FROM memories WHERE type = $1 AND "roomId" IN (${placeholders})`;
115115
let queryParams = [params.tableName, ...params.roomIds];
116116

@@ -635,7 +635,7 @@ export class PostgresDatabaseAdapter extends DatabaseAdapter {
635635

636636
if (params.unique) {
637637
sql += ` AND "unique" = true`;
638-
}
638+
}
639639

640640
// TODO: Test this
641641
if (params.agentId) {

core/src/adapters/sqlite.ts

+3-3
Original file line numberDiff line numberDiff line change
@@ -160,7 +160,7 @@ export class SqliteDatabaseAdapter extends DatabaseAdapter {
160160
const placeholders = params.roomIds.map(() => "?").join(", ");
161161
let sql = `SELECT * FROM memories WHERE type = ? AND roomId IN (${placeholders})`;
162162
let queryParams = [params.tableName, ...params.roomIds];
163-
163+
164164
if (params.agentId) {
165165
sql += ` AND userId = ?`;
166166
queryParams.push(params.agentId);
@@ -171,9 +171,9 @@ export class SqliteDatabaseAdapter extends DatabaseAdapter {
171171
content: string;
172172
})[];
173173

174-
return rows.map(row => ({
174+
return rows.map((row) => ({
175175
...row,
176-
content: JSON.parse(row.content)
176+
content: JSON.parse(row.content),
177177
}));
178178
}
179179

core/src/adapters/supabase.ts

+1-2
Original file line numberDiff line numberDiff line change
@@ -318,15 +318,14 @@ export class SupabaseDatabaseAdapter extends DatabaseAdapter {
318318
tableName: string;
319319
}
320320
): Promise<Memory[]> {
321-
322321
const queryParams = {
323322
query_table_name: params.tableName,
324323
query_roomId: params.roomId,
325324
query_embedding: embedding,
326325
query_match_threshold: params.match_threshold,
327326
query_match_count: params.count,
328327
query_unique: !!params.unique,
329-
}
328+
};
330329
if (params.agentId) {
331330
(queryParams as any).query_agentId = params.agentId;
332331
}

core/src/cli/index.ts

+2
Original file line numberDiff line numberDiff line change
@@ -123,6 +123,8 @@ export async function createAgentRuntime(
123123
const actionConfigs = loadActionConfigs(configPath);
124124
const customActions = await loadCustomActions(actionConfigs);
125125

126+
console.log("Creating runtime for character", character.name);
127+
126128
return new AgentRuntime({
127129
databaseAdapter: db,
128130
token,

core/src/clients/discord/actions/summarize_conversation.ts

+5-1
Original file line numberDiff line numberDiff line change
@@ -145,7 +145,11 @@ const summarizeAction = {
145145
"CONVERSATION_SUMMARY",
146146
],
147147
description: "Summarizes the conversation and attachments.",
148-
validate: async (runtime: IAgentRuntime, message: Memory, _state: State) => {
148+
validate: async (
149+
runtime: IAgentRuntime,
150+
message: Memory,
151+
_state: State
152+
) => {
149153
if (message.content.source !== "discord") {
150154
return false;
151155
}

core/src/clients/discord/messages.ts

+11-16
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,9 @@
1-
import { ChannelType, Client, Message as DiscordMessage, TextChannel } from "discord.js";
1+
import {
2+
ChannelType,
3+
Client,
4+
Message as DiscordMessage,
5+
TextChannel,
6+
} from "discord.js";
27
import { composeContext } from "../../core/context.ts";
38
import {
49
generateMessageResponse,
@@ -153,17 +158,6 @@ export class MessageManager {
153158

154159
const messageId = stringToUuid(message.id);
155160

156-
// Check if the message already exists in the cache or database
157-
const existingMessage =
158-
await this.runtime.messageManager.getMemoryById(messageId);
159-
160-
if (existingMessage) {
161-
// If the message content is the same, return early
162-
if (existingMessage.content.text === message.content) {
163-
return;
164-
}
165-
}
166-
167161
let shouldIgnore = false;
168162
let shouldRespond = true;
169163

@@ -271,10 +265,11 @@ export class MessageManager {
271265
}
272266
if (message.channel.type === ChannelType.GuildVoice) {
273267
// For voice channels, use text-to-speech
274-
const audioStream = await this.runtime.speechService.generate(
275-
this.runtime,
276-
content.text
277-
);
268+
const audioStream =
269+
await this.runtime.speechService.generate(
270+
this.runtime,
271+
content.text
272+
);
278273
await this.voiceManager.playAudioStream(
279274
userId,
280275
audioStream

core/src/clients/twitter/base.ts

+5-2
Original file line numberDiff line numberDiff line change
@@ -404,7 +404,8 @@ export class ClientBase extends EventEmitter {
404404
// Save the missing tweets as memories
405405
for (const tweet of tweetsToSave) {
406406
const roomId = stringToUuid(
407-
tweet.conversationId ?? "default-room-" + this.runtime.agentId
407+
tweet.conversationId ??
408+
"default-room-" + this.runtime.agentId
408409
);
409410
const tweetuserId =
410411
tweet.userId === this.twitterUserId
@@ -508,7 +509,9 @@ export class ClientBase extends EventEmitter {
508509

509510
// Save the new tweets as memories
510511
for (const tweet of tweetsToSave) {
511-
const roomId = stringToUuid(tweet.conversationId ?? "default-room-" + this.runtime.agentId);
512+
const roomId = stringToUuid(
513+
tweet.conversationId ?? "default-room-" + this.runtime.agentId
514+
);
512515
const tweetuserId =
513516
tweet.userId === this.twitterUserId
514517
? this.runtime.agentId

core/src/core/generation.ts

+60-64
Original file line numberDiff line numberDiff line change
@@ -68,64 +68,61 @@ export async function generateText({
6868

6969
switch (provider) {
7070
case ModelProvider.OPENAI:
71-
case ModelProvider.LLAMACLOUD:
72-
{
73-
console.log("Initializing OpenAI model.");
74-
const openai = createOpenAI({ apiKey });
75-
76-
const { text: openaiResponse } = await aiGenerateText({
77-
model: openai.languageModel(model),
78-
prompt: context,
79-
temperature: temperature,
80-
maxTokens: max_response_length,
81-
frequencyPenalty: frequency_penalty,
82-
presencePenalty: presence_penalty,
83-
});
84-
85-
response = openaiResponse;
86-
console.log("Received response from OpenAI model.");
87-
break;
88-
}
89-
90-
case ModelProvider.ANTHROPIC:
91-
{
92-
console.log("Initializing Anthropic model.");
93-
const anthropicVertex = createAnthropicVertex();
94-
95-
const { text: anthropicResponse } = await aiGenerateText({
96-
model: anthropicVertex(model),
97-
prompt: context,
98-
temperature: temperature,
99-
maxTokens: max_response_length,
100-
frequencyPenalty: frequency_penalty,
101-
presencePenalty: presence_penalty,
102-
});
103-
104-
response = anthropicResponse;
105-
console.log("Received response from Anthropic model.");
106-
break;
107-
}
108-
109-
case ModelProvider.GROK:
110-
{
111-
console.log("Initializing Grok model.");
112-
const grok = createGroq({ apiKey });
113-
114-
const { text: grokResponse } = await aiGenerateText({
115-
model: grok.languageModel(model, {
116-
parallelToolCalls: false,
117-
}),
118-
prompt: context,
119-
temperature: temperature,
120-
maxTokens: max_response_length,
121-
frequencyPenalty: frequency_penalty,
122-
presencePenalty: presence_penalty,
123-
});
124-
125-
response = grokResponse;
126-
console.log("Received response from Grok model.");
127-
break;
128-
}
71+
case ModelProvider.LLAMACLOUD: {
72+
console.log("Initializing OpenAI model.");
73+
const openai = createOpenAI({ apiKey });
74+
75+
const { text: openaiResponse } = await aiGenerateText({
76+
model: openai.languageModel(model),
77+
prompt: context,
78+
temperature: temperature,
79+
maxTokens: max_response_length,
80+
frequencyPenalty: frequency_penalty,
81+
presencePenalty: presence_penalty,
82+
});
83+
84+
response = openaiResponse;
85+
console.log("Received response from OpenAI model.");
86+
break;
87+
}
88+
89+
case ModelProvider.ANTHROPIC: {
90+
console.log("Initializing Anthropic model.");
91+
const anthropicVertex = createAnthropicVertex();
92+
93+
const { text: anthropicResponse } = await aiGenerateText({
94+
model: anthropicVertex(model),
95+
prompt: context,
96+
temperature: temperature,
97+
maxTokens: max_response_length,
98+
frequencyPenalty: frequency_penalty,
99+
presencePenalty: presence_penalty,
100+
});
101+
102+
response = anthropicResponse;
103+
console.log("Received response from Anthropic model.");
104+
break;
105+
}
106+
107+
case ModelProvider.GROK: {
108+
console.log("Initializing Grok model.");
109+
const grok = createGroq({ apiKey });
110+
111+
const { text: grokResponse } = await aiGenerateText({
112+
model: grok.languageModel(model, {
113+
parallelToolCalls: false,
114+
}),
115+
prompt: context,
116+
temperature: temperature,
117+
maxTokens: max_response_length,
118+
frequencyPenalty: frequency_penalty,
119+
presencePenalty: presence_penalty,
120+
});
121+
122+
response = grokResponse;
123+
console.log("Received response from Grok model.");
124+
break;
125+
}
129126

130127
case ModelProvider.LLAMALOCAL:
131128
console.log("Using local Llama model for text completion.");
@@ -140,12 +137,11 @@ export async function generateText({
140137
console.log("Received response from local Llama model.");
141138
break;
142139

143-
default:
144-
{
145-
const errorMessage = `Unsupported provider: ${provider}`;
146-
console.error(errorMessage);
147-
throw new Error(errorMessage);
148-
}
140+
default: {
141+
const errorMessage = `Unsupported provider: ${provider}`;
142+
console.error(errorMessage);
143+
throw new Error(errorMessage);
144+
}
149145
}
150146

151147
return response;

core/src/core/models.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ const models: Models = {
2525
model: {
2626
[ModelClass.SMALL]: "gpt-4o-mini",
2727
[ModelClass.MEDIUM]: "gpt-4o",
28-
[ModelClass.LARGE]: "gpt-4-turbo",
28+
[ModelClass.LARGE]: "gpt-4o",
2929
[ModelClass.EMBEDDING]: "text-embedding-3-small",
3030
},
3131
},

core/src/core/runtime.ts

+5-2
Original file line numberDiff line numberDiff line change
@@ -273,9 +273,12 @@ export class AgentRuntime implements IAgentRuntime {
273273
this.registerContextProvider(provider);
274274
});
275275

276-
if (!this.getSetting("OPENAI_API_KEY") && !this.llamaService) {
276+
if (
277+
this.modelProvider === ModelProvider.LLAMALOCAL &&
278+
!this.llamaService
279+
) {
277280
console.log(
278-
"No OpenAI key found, using LlamaLocal for agent",
281+
"Initializing LlamaLocal service for agent",
279282
this.agentId,
280283
this.character.name
281284
);

core/src/services/speech.ts

+1-4
Original file line numberDiff line numberDiff line change
@@ -107,10 +107,7 @@ async function textToSpeech(runtime: IAgentRuntime, text: string) {
107107
}
108108

109109
export class SpeechService implements ISpeechService {
110-
async generate(
111-
runtime: IAgentRuntime,
112-
text: string
113-
): Promise<Readable> {
110+
async generate(runtime: IAgentRuntime, text: string): Promise<Readable> {
114111
// check for elevenlabs API key
115112
if (runtime.getSetting("ELEVENLABS_XI_API_KEY")) {
116113
return textToSpeech(runtime, text);

core/tsconfig.json

-1
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
"target": "es2022",
44
"module": "es2022",
55
"lib": ["es2023", "dom"],
6-
"types": ["node"],
76
"moduleResolution": "bundler",
87
"outDir": "./dist",
98
"rootDir": "./src",

0 commit comments

Comments
 (0)