Skip to content

Commit a246942

Browse files
authored
Merge pull request elizaOS#2093 from tsubasakong/develop
feat: add Heurist embedding model
2 parents e0396cf + 0491451 commit a246942

File tree

2 files changed

+31
-5
lines changed

2 files changed

+31
-5
lines changed

packages/core/src/embedding.ts

+25-3
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@ export const EmbeddingProvider = {
1818
OpenAI: "OpenAI",
1919
Ollama: "Ollama",
2020
GaiaNet: "GaiaNet",
21+
Heurist: "Heurist",
2122
BGE: "BGE",
2223
} as const;
2324

@@ -39,23 +40,30 @@ export const getEmbeddingConfig = (): EmbeddingConfig => ({
3940
: settings.USE_GAIANET_EMBEDDING?.toLowerCase() === "true"
4041
? getEmbeddingModelSettings(ModelProviderName.GAIANET)
4142
.dimensions
42-
: 384, // BGE
43+
: settings.USE_HEURIST_EMBEDDING?.toLowerCase() === "true"
44+
? getEmbeddingModelSettings(ModelProviderName.HEURIST)
45+
.dimensions
46+
: 384, // BGE
4347
model:
4448
settings.USE_OPENAI_EMBEDDING?.toLowerCase() === "true"
4549
? getEmbeddingModelSettings(ModelProviderName.OPENAI).name
4650
: settings.USE_OLLAMA_EMBEDDING?.toLowerCase() === "true"
4751
? getEmbeddingModelSettings(ModelProviderName.OLLAMA).name
4852
: settings.USE_GAIANET_EMBEDDING?.toLowerCase() === "true"
4953
? getEmbeddingModelSettings(ModelProviderName.GAIANET).name
50-
: "BGE-small-en-v1.5",
54+
: settings.USE_HEURIST_EMBEDDING?.toLowerCase() === "true"
55+
? getEmbeddingModelSettings(ModelProviderName.HEURIST).name
56+
: "BGE-small-en-v1.5",
5157
provider:
5258
settings.USE_OPENAI_EMBEDDING?.toLowerCase() === "true"
5359
? "OpenAI"
5460
: settings.USE_OLLAMA_EMBEDDING?.toLowerCase() === "true"
5561
? "Ollama"
5662
: settings.USE_GAIANET_EMBEDDING?.toLowerCase() === "true"
5763
? "GaiaNet"
58-
: "BGE",
64+
: settings.USE_HEURIST_EMBEDDING?.toLowerCase() === "true"
65+
? "Heurist"
66+
: "BGE",
5967
});
6068

6169
async function getRemoteEmbedding(
@@ -126,6 +134,7 @@ export function getEmbeddingType(runtime: IAgentRuntime): "local" | "remote" {
126134
isNode &&
127135
runtime.character.modelProvider !== ModelProviderName.OPENAI &&
128136
runtime.character.modelProvider !== ModelProviderName.GAIANET &&
137+
runtime.character.modelProvider !== ModelProviderName.HEURIST &&
129138
!settings.USE_OPENAI_EMBEDDING;
130139

131140
return isLocal ? "local" : "remote";
@@ -146,6 +155,10 @@ export function getEmbeddingZeroVector(): number[] {
146155
embeddingDimension = getEmbeddingModelSettings(
147156
ModelProviderName.GAIANET
148157
).dimensions; // GaiaNet dimension
158+
} else if (settings.USE_HEURIST_EMBEDDING?.toLowerCase() === "true") {
159+
embeddingDimension = getEmbeddingModelSettings(
160+
ModelProviderName.HEURIST
161+
).dimensions; // Heurist dimension
149162
}
150163

151164
return Array(embeddingDimension).fill(0);
@@ -229,6 +242,15 @@ export async function embed(runtime: IAgentRuntime, input: string) {
229242
});
230243
}
231244

245+
if (config.provider === EmbeddingProvider.Heurist) {
246+
return await getRemoteEmbedding(input, {
247+
model: config.model,
248+
endpoint: getEndpoint(ModelProviderName.HEURIST),
249+
apiKey: runtime.token,
250+
dimensions: config.dimensions,
251+
});
252+
}
253+
232254
// BGE - try local first if in Node
233255
if (isNode) {
234256
try {

packages/core/src/models.ts

+6-2
Original file line numberDiff line numberDiff line change
@@ -545,17 +545,21 @@ export const models: Models = {
545545
[ModelClass.LARGE]: {
546546
name:
547547
settings.LARGE_HEURIST_MODEL ||
548-
"meta-llama/llama-3.1-405b-instruct",
548+
"meta-llama/llama-3.3-70b-instruct",
549549
stop: [],
550550
maxInputTokens: 128000,
551551
maxOutputTokens: 8192,
552552
repetition_penalty: 0.4,
553553
temperature: 0.7,
554554
},
555555
[ModelClass.IMAGE]: {
556-
name: settings.HEURIST_IMAGE_MODEL || "PepeXL",
556+
name: settings.HEURIST_IMAGE_MODEL || "FLUX.1-dev",
557557
steps: 20,
558558
},
559+
[ModelClass.EMBEDDING]: {
560+
name: "BAAI/bge-large-en-v1.5",
561+
dimensions: 1024,
562+
},
559563
},
560564
},
561565
[ModelProviderName.GALADRIEL]: {

0 commit comments

Comments
 (0)