Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: ollama local and llama local #521

Merged
merged 13 commits into from
Nov 23, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -47,7 +47,8 @@
"dependencies": {
"ollama-ai-provider": "^0.16.1",
"optional": "^0.1.4",
"sharp": "^0.33.5"
"sharp": "^0.33.5",
"tslog": "^4.9.3"
},
"packageManager": "pnpm@9.12.3+sha512.cce0f9de9c5a7c95bef944169cc5dfe8741abfb145078c0d508b868056848a87c81e626246cb60967cbd7fd29a6c062ef73ff840d96b3c86c40ac92cf4a813ee"
}
2 changes: 1 addition & 1 deletion packages/core/src/defaultCharacter.ts
Original file line number Diff line number Diff line change
@@ -5,7 +5,7 @@ export const defaultCharacter: Character = {
username: "eliza",
plugins: [],
clients: [],
modelProvider: ModelProviderName.OPENAI,
modelProvider: ModelProviderName.OLLAMA,
settings: {
secrets: {},
voice: {
6 changes: 4 additions & 2 deletions packages/core/src/embedding.ts
Original file line number Diff line number Diff line change
@@ -86,8 +86,10 @@ export async function embed(runtime: IAgentRuntime, input: string) {
// 3. Fallback to OpenAI embedding model
const embeddingModel = settings.USE_OPENAI_EMBEDDING
? "text-embedding-3-small"
: modelProvider.model?.[ModelClass.EMBEDDING] ||
models[ModelProviderName.OPENAI].model[ModelClass.EMBEDDING];
: runtime.character.modelProvider === ModelProviderName.OLLAMA
? settings.OLLAMA_EMBEDDING_MODEL || "mxbai-embed-large"
: modelProvider.model?.[ModelClass.EMBEDDING] ||
models[ModelProviderName.OPENAI].model[ModelClass.EMBEDDING];

if (!embeddingModel) {
throw new Error("No embedding model configured");
11 changes: 9 additions & 2 deletions packages/core/src/generation.ts
Original file line number Diff line number Diff line change
@@ -62,7 +62,12 @@ export async function generateText({
return "";
}

elizaLogger.log("Genarating text...");
elizaLogger.log("Generating text...");

elizaLogger.info("Generating text with options:", {
modelProvider: runtime.modelProvider,
model: modelClass,
});

const provider = runtime.modelProvider;
const endpoint =
@@ -84,6 +89,8 @@ export async function generateText({
model = runtime.getSetting("LLAMACLOUD_MODEL_SMALL");
}

elizaLogger.info("Selected model:", model);

const temperature = models[provider].settings.temperature;
const frequency_penalty = models[provider].settings.frequency_penalty;
const presence_penalty = models[provider].settings.presence_penalty;
@@ -709,7 +716,7 @@ export async function generateMessageResponse({
let retryLength = 1000; // exponential backoff
while (true) {
try {
elizaLogger.log("Genarating message response..");
elizaLogger.log("Generating message response..");

const response = await generateText({
runtime,
45 changes: 34 additions & 11 deletions packages/core/src/logger.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,11 @@
class ElizaLogger {
import settings from "./settings.ts";
import { Logger, ILogObjMeta, ILogObj } from "tslog";

interface IElizaLogger extends Logger<IElizaLogger> {
progress(message: string): void;
}

class ElizaLogger implements IElizaLogger {
constructor() {
// Check if we're in Node.js environment
this.isNode =
@@ -7,7 +14,7 @@ class ElizaLogger {
process.versions.node != null;

// Set verbose based on environment
this.verbose = this.isNode ? process.env.verbose === "true" : false;
this.verbose = this.isNode ? settings.VERBOSE === "true" : false;
}

private isNode: boolean;
@@ -173,6 +180,7 @@ class ElizaLogger {
}
}

// @ts-ignore - custom implementation
log(...strings) {
this.#logWithStyle(strings, {
fg: "white",
@@ -182,6 +190,7 @@ class ElizaLogger {
});
}

// @ts-ignore - custom implementation
warn(...strings) {
this.#logWithStyle(strings, {
fg: "yellow",
@@ -191,6 +200,7 @@ class ElizaLogger {
});
}

// @ts-ignore - custom implementation
error(...strings) {
this.#logWithStyle(strings, {
fg: "red",
@@ -200,6 +210,7 @@ class ElizaLogger {
});
}

// @ts-ignore - custom implementation
info(...strings) {
this.#logWithStyle(strings, {
fg: "blue",
@@ -209,15 +220,7 @@ class ElizaLogger {
});
}

success(...strings) {
this.#logWithStyle(strings, {
fg: "green",
bg: "",
icon: "\u2713",
groupTitle: ` ${this.successesTitle}`,
});
}

// @ts-ignore - custom implementation
debug(...strings) {
if (!this.verbose) return;
this.#logWithStyle(strings, {
@@ -228,6 +231,15 @@ class ElizaLogger {
});
}

success(...strings) {
this.#logWithStyle(strings, {
fg: "green",
bg: "",
icon: "\u2713",
groupTitle: ` ${this.successesTitle}`,
});
}

assert(...strings) {
this.#logWithStyle(strings, {
fg: "cyan",
@@ -236,6 +248,17 @@ class ElizaLogger {
groupTitle: ` ${this.assertsTitle}`,
});
}

progress(message: string) {
if (this.isNode) {
// Clear the current line and move cursor to beginning
process.stdout.clearLine(0);
process.stdout.cursorTo(0);
process.stdout.write(message);
} else {
console.log(message);
}
}
}

export const elizaLogger = new ElizaLogger();
30 changes: 30 additions & 0 deletions packages/core/src/runtime.ts
Original file line number Diff line number Diff line change
@@ -176,7 +176,9 @@ export class AgentRuntime implements IAgentRuntime {
return;
}

// Add the service to the services map
this.services.set(serviceType, service);
elizaLogger.success(`Service ${serviceType} registered successfully`);
}

/**
@@ -217,6 +219,12 @@ export class AgentRuntime implements IAgentRuntime {
cacheManager: ICacheManager;
logging?: boolean;
}) {
elizaLogger.info("Initializing AgentRuntime with options:", {
character: opts.character?.name,
modelProvider: opts.modelProvider,
characterModelProvider: opts.character?.modelProvider,
});

this.#conversationLength =
opts.conversationLength ?? this.#conversationLength;
this.databaseAdapter = opts.databaseAdapter;
@@ -280,10 +288,32 @@ export class AgentRuntime implements IAgentRuntime {
});

this.serverUrl = opts.serverUrl ?? this.serverUrl;

elizaLogger.info("Setting model provider...");
elizaLogger.info(
"- Character model provider:",
this.character.modelProvider
);
elizaLogger.info("- Opts model provider:", opts.modelProvider);
elizaLogger.info("- Current model provider:", this.modelProvider);

this.modelProvider =
this.character.modelProvider ??
opts.modelProvider ??
this.modelProvider;

elizaLogger.info("Selected model provider:", this.modelProvider);

// Validate model provider
if (!Object.values(ModelProviderName).includes(this.modelProvider)) {
elizaLogger.error("Invalid model provider:", this.modelProvider);
elizaLogger.error(
"Available providers:",
Object.values(ModelProviderName)
);
throw new Error(`Invalid model provider: ${this.modelProvider}`);
}

if (!this.serverUrl) {
elizaLogger.warn("No serverUrl provided, defaulting to localhost");
}
17 changes: 14 additions & 3 deletions packages/plugin-node/src/services/image.ts
Original file line number Diff line number Diff line change
@@ -63,16 +63,23 @@ export class ImageDescriptionService
env.backends.onnx.wasm.proxy = false;
env.backends.onnx.wasm.numThreads = 1;

elizaLogger.log("Downloading Florence model...");
elizaLogger.info("Downloading Florence model...");

this.model = await Florence2ForConditionalGeneration.from_pretrained(
this.modelId,
{
device: "gpu",
progress_callback: (progress) => {
if (progress.status === "downloading") {
elizaLogger.log(
`Model download progress: ${JSON.stringify(progress)}`
const percent = (
(progress.loaded / progress.total) *
100
).toFixed(1);
const dots = ".".repeat(
Math.floor(Number(percent) / 5)
);
elizaLogger.info(
`Downloading Florence model: [${dots.padEnd(20, " ")}] ${percent}%`
);
}
},
@@ -81,10 +88,14 @@ export class ImageDescriptionService

elizaLogger.success("Florence model downloaded successfully");

elizaLogger.info("Downloading processor...");
this.processor = (await AutoProcessor.from_pretrained(
this.modelId
)) as Florence2Processor;

elizaLogger.info("Downloading tokenizer...");
this.tokenizer = await AutoTokenizer.from_pretrained(this.modelId);
elizaLogger.success("Image service initialization complete");
}

async describeImage(
423 changes: 377 additions & 46 deletions packages/plugin-node/src/services/llama.ts

Large diffs are not rendered by default.

9 changes: 9 additions & 0 deletions pnpm-lock.yaml