Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: rearchitect models to be able to include if you have a GPU or not #1257

Closed
wants to merge 17 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions packages/client-discord/src/actions/chat_with_attachments.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { composeContext } from "@ai16z/eliza";
import { generateText, trimTokens } from "@ai16z/eliza";
import { models } from "@ai16z/eliza";
import { getModelProviderData } from "@ai16z/eliza";
import { parseJSONObjectFromText } from "@ai16z/eliza";
import {
Action,
Expand All @@ -22,7 +22,7 @@ Summarization objective: {{objective}}

# Instructions: Summarize the attachments. Return the summary. Do not acknowledge this request, just summarize and continue the existing summary if there is one. Capture any important details based on the objective. Only respond with the new summary text.`;

export const attachmentIdsTemplate = `# Messages we are summarizing
export const attachmentIdsTemplate = `# Messages we are summarizing
{{recentMessages}}

# Instructions: {{senderName}} is requesting a summary of specific attachments. Your goal is to determine their objective, along with the list of attachment IDs to summarize.
Expand Down Expand Up @@ -183,7 +183,7 @@ const summarizeAction = {

let currentSummary = "";

const model = models[runtime.character.modelProvider];
const model = await getModelProviderData(runtime.character.modelProvider);
const chunkSize = model.settings.maxOutputTokens;

state.attachmentsWithText = attachmentsWithText;
Expand Down
5 changes: 3 additions & 2 deletions packages/client-discord/src/actions/summarize_conversation.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { composeContext } from "@ai16z/eliza";
import { generateText, splitChunks, trimTokens } from "@ai16z/eliza";
import { getActorDetails } from "@ai16z/eliza";
import { models } from "@ai16z/eliza";
import { getModelProviderData } from "@ai16z/eliza";
import { parseJSONObjectFromText } from "@ai16z/eliza";
import {
Action,
Expand Down Expand Up @@ -247,7 +247,8 @@ const summarizeAction = {

let currentSummary = "";

const model = models[runtime.character.settings.model];

const model = await getModelProviderData(runtime.character.settings.model);
const chunkSize = model.settings.maxContextLength - 1000;

const chunks = await splitChunks(formattedMemories, chunkSize, 0);
Expand Down
4 changes: 2 additions & 2 deletions packages/client-slack/src/actions/chat_with_attachments.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import {
trimTokens,
parseJSONObjectFromText,
} from "@ai16z/eliza";
import { models } from "@ai16z/eliza";
import { getModelProviderData } from "@ai16z/eliza";
import {
Action,
ActionExample,
Expand Down Expand Up @@ -194,7 +194,7 @@ const summarizeAction: Action = {

let currentSummary = "";

const model = models[runtime.character.modelProvider];
const model = await getModelProviderData(runtime.character.modelProvider);
const chunkSize = model.settings.maxOutputTokens;

currentState.attachmentsWithText = attachmentsWithText;
Expand Down
4 changes: 2 additions & 2 deletions packages/client-slack/src/actions/summarize_conversation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import {
trimTokens,
parseJSONObjectFromText,
} from "@ai16z/eliza";
import { models } from "@ai16z/eliza";
import { getModelProviderData } from "@ai16z/eliza";
import { getActorDetails } from "@ai16z/eliza";
import {
Action,
Expand Down Expand Up @@ -265,7 +265,7 @@ const summarizeAction: Action = {

let currentSummary = "";

const model = models[runtime.character.modelProvider];
const model = await getModelProviderData(runtime.character.modelProvider);
const chunkSize = model.settings.maxOutputTokens;

const chunks = await splitChunks(formattedMemories, chunkSize, 0);
Expand Down
17 changes: 8 additions & 9 deletions packages/client-telegram/src/messageManager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -333,7 +333,6 @@ export class MessageManager {
return isReplyToBot || isMentioned || (!this.runtime.character.clientConfig?.telegram?.shouldRespondOnlyToMentions && hasUsername);
}


private _checkInterest(chatId: string): boolean {
const chatState = this.interestChats[chatId];
if (!chatState) return false;
Expand Down Expand Up @@ -633,13 +632,13 @@ export class MessageManager {
}
);
} else {
// Handle local file paths
// Handle local file paths
if (!fs.existsSync(imagePath)) {
throw new Error(`File not found: ${imagePath}`);
}

const fileStream = fs.createReadStream(imagePath);

await ctx.telegram.sendPhoto(
ctx.chat.id,
{
Expand All @@ -650,7 +649,7 @@ export class MessageManager {
}
);
}

elizaLogger.info(`Image sent successfully: ${imagePath}`);
} catch (error) {
elizaLogger.error("Error sending image:", error);
Expand Down Expand Up @@ -968,7 +967,7 @@ export class MessageManager {
for (let i = 0; i < sentMessages.length; i++) {
const sentMessage = sentMessages[i];
const isLastMessage = i === sentMessages.length - 1;

const memory: Memory = {
id: stringToUuid(
sentMessage.message_id.toString() +
Expand All @@ -986,17 +985,17 @@ export class MessageManager {
createdAt: sentMessage.date * 1000,
embedding: getEmbeddingZeroVector(),
};

// Set action to CONTINUE for all messages except the last one
// For the last message, use the original action from the response content
memory.content.action = !isLastMessage
? "CONTINUE"
: content.action;

await this.runtime.messageManager.createMemory(memory);
memories.push(memory);
}

return memories;
}
};
Expand Down
17 changes: 17 additions & 0 deletions packages/client-whatsapp/eslint.config.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
import eslintGlobalConfig from "../../eslint.config.mjs";

export default [
{
ignores: [
"**/node_modules/*",
"**/coverage/*",
"**/dist/*",
"**/types/*",
"**/scripts/concatenated-output.ts",
"rollup.config.js",
"jest.config.js",
"docs/",
],
},
...eslintGlobalConfig,
];
10 changes: 10 additions & 0 deletions packages/client-whatsapp/tsconfig.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
{
"extends": "../core/tsconfig.json",
"compilerOptions": {
"outDir": "dist",
"rootDir": "src"
},
"include": [
"src/**/*.ts"
]
}
20 changes: 20 additions & 0 deletions packages/client-whatsapp/tsup.config.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
import { defineConfig } from "tsup";

export default defineConfig({
entry: ["src/index.ts"],
outDir: "dist",
sourcemap: true,
clean: true,
format: ["esm"], // Ensure you're targeting CommonJS
external: [
"dotenv", // Externalize dotenv to prevent bundling
"fs", // Externalize fs to use Node.js built-in module
"path", // Externalize other built-ins if necessary
"@reflink/reflink",
"@node-llama-cpp",
"https",
"http",
"agentkeepalive",
// Add other modules you want to externalize
],
});
1 change: 1 addition & 0 deletions packages/core/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@
"js-sha1": "0.7.0",
"js-tiktoken": "1.0.15",
"langchain": "0.3.6",
"systeminformation": "5.23.5",
"ollama-ai-provider": "0.16.1",
"openai": "4.73.0",
"tinyld": "1.3.4",
Expand Down
15 changes: 10 additions & 5 deletions packages/core/src/embedding.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import path from "node:path";
import { models } from "./models.ts";
import { getModelProviderData } from "./models.ts";
import { IAgentRuntime, ModelProviderName } from "./types.ts";
import settings from "./settings.ts";
import elizaLogger from "./logger.ts";
Expand Down Expand Up @@ -181,22 +181,24 @@ export async function embed(runtime: IAgentRuntime, input: string) {
}

if (config.provider === "Ollama") {
const ollamaSettings = await getModelProviderData(ModelProviderName.OLLAMA)
return await getRemoteEmbedding(input, {
model: config.model,
endpoint:
runtime.character.modelEndpointOverride ||
models[ModelProviderName.OLLAMA].endpoint,
ollamaSettings.endpoint,
isOllama: true,
dimensions: config.dimensions,
});
}

if (config.provider=="GaiaNet") {
const gaianetSettings = await getModelProviderData(ModelProviderName.GAIANET)
return await getRemoteEmbedding(input, {
model: config.model,
endpoint:
runtime.character.modelEndpointOverride ||
models[ModelProviderName.GAIANET].endpoint ||
gaianetSettings.endpoint ||
settings.SMALL_GAIANET_SERVER_URL ||
settings.MEDIUM_GAIANET_SERVER_URL ||
settings.LARGE_GAIANET_SERVER_URL,
Expand All @@ -218,11 +220,12 @@ export async function embed(runtime: IAgentRuntime, input: string) {
}

// Fallback to remote override
const modelSettings = await getModelProviderData(runtime.character.modelProvider)
return await getRemoteEmbedding(input, {
model: config.model,
endpoint:
runtime.character.modelEndpointOverride ||
models[runtime.character.modelProvider].endpoint,
modelSettings.endpoint,
apiKey: runtime.token,
dimensions: config.dimensions,
});
Expand Down Expand Up @@ -304,7 +307,9 @@ export async function embed(runtime: IAgentRuntime, input: string) {
: "not an array",
sample: Array.isArray(embedding)
? embedding.slice(0, 5)
: embedding,
: typeof embedding === 'object' && embedding !== null ?
Object.values(embedding).slice(0, 5)
: embedding, // not an array or object
});

// Process the embedding into the correct format
Expand Down
Loading
Loading