Skip to content

Commit b2a947b

Browse files
Merge pull request #1 from yodamaster726/ollama-fix
Ollama fix
2 parents ea52d23 + 644ebb2 commit b2a947b

File tree

1 file changed

+26
-3
lines changed
  • packages/plugin-node/src/services

1 file changed

+26
-3
lines changed

packages/plugin-node/src/services/llama.ts

+26-3
Original file line numberDiff line numberDiff line change
@@ -671,9 +671,32 @@ export class LlamaService extends Service {
671671
throw new Error("Sequence not initialized");
672672
}
673673

674-
const embeddingContext = await this.model.createEmbeddingContext();
675-
const embedding = await embeddingContext.getEmbeddingFor(input);
676-
return embedding?.vector ? [...embedding.vector] : undefined;
674+
const ollamaModel = process.env.OLLAMA_MODEL;
675+
const ollamaUrl =
676+
process.env.OLLAMA_SERVER_URL || "http://localhost:11434";
677+
const embeddingModel =
678+
process.env.OLLAMA_EMBEDDING_MODEL || "mxbai-embed-large";
679+
elizaLogger.info(
680+
`Using Ollama API for embeddings with model ${embeddingModel} (base: ${ollamaModel})`
681+
);
682+
683+
const response = await fetch(`${ollamaUrl}/api/embeddings`, {
684+
method: "POST",
685+
headers: {
686+
"Content-Type": "application/json",
687+
},
688+
body: JSON.stringify({
689+
input: input,
690+
model: embeddingModel,
691+
}),
692+
});
693+
694+
if (!response.ok) {
695+
throw new Error(`Failed to get embedding: ${response.statusText}`);
696+
}
697+
698+
const embedding = await response.json();
699+
return embedding.vector;
677700
}
678701

679702
private async ollamaCompletion(prompt: string): Promise<string> {

0 commit comments

Comments
 (0)