Skip to content

Commit d800539

Browse files
feat: support for eternalai provider can get onchain system prompt
1 parent 89b6a19 commit d800539

File tree

2 files changed

+100
-3
lines changed

2 files changed

+100
-3
lines changed

.env.example

+5-2
Original file line numberDiff line numberDiff line change
@@ -86,8 +86,11 @@ USE_OPENAI_EMBEDDING= # Set to TRUE for OpenAI/1536, leave blank for l
8686

8787
# Eternal AI's Decentralized Inference API
8888
ETERNALAI_URL=
89-
ETERNALAI_MODEL= # Default: "neuralmagic/Meta-Llama-3.1-405B-Instruct-quantized.w4a16"
90-
ETERNALAI_CHAIN_ID=45762 #Default: "45762"
89+
ETERNALAI_MODEL= # Default: "neuralmagic/Meta-Llama-3.1-405B-Instruct-quantized.w4a16"
90+
ETERNALAI_CHAIN_ID=45762 # Default: "45762"
91+
ETERNALAI_RPC_URL= # Ex: https://rpc.symbiosis.eternalai.org
92+
ETERNALAI_AGENT_CONTRACT_ADDRESS= # Ex: 0x5799F6349D7E9DAeD0d5c7f90F5467eC929cc89e
93+
ETERNALAI_AGENT_ID= # Ex: 1
9194
ETERNALAI_API_KEY=
9295
ETERNALAI_LOG=false #Default: false
9396

packages/core/src/generation.ts

+95-1
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,8 @@ import {
5252
} from "./types.ts";
5353
import { fal } from "@fal-ai/client";
5454
import { tavily } from "@tavily/core";
55+
import BigNumber from "bignumber.js";
56+
import {createPublicClient, http} from "viem";
5557

5658
type Tool = CoreTool<any, any>;
5759
type StepResult = AIStepResult<any>;
@@ -164,6 +166,85 @@ async function truncateTiktoken(
164166
}
165167
}
166168

169+
/**
170+
* Get OnChain EternalAI System Prompt
171+
* @returns System Prompt
172+
*/
173+
async function getOnChainEternalAISystemPrompt(runtime: IAgentRuntime): Promise<string> | undefined {
174+
const agentId = runtime.getSetting("ETERNALAI_AGENT_ID")
175+
const providerUrl = runtime.getSetting("ETERNALAI_RPC_URL");
176+
const contractAddress = runtime.getSetting("ETERNALAI_AGENT_CONTRACT_ADDRESS");
177+
if (agentId && providerUrl && contractAddress) {
178+
// get on-chain system-prompt
179+
const contractABI = [{"inputs": [{"internalType": "uint256", "name": "_agentId", "type": "uint256"}], "name": "getAgentSystemPrompt", "outputs": [{"internalType": "bytes[]", "name": "","type": "bytes[]"}], "stateMutability": "view", "type": "function"}];
180+
181+
const publicClient = createPublicClient({
182+
transport: http(providerUrl),
183+
});
184+
185+
try {
186+
const validAddress: `0x${string}` = contractAddress as `0x${string}`;
187+
const result = await publicClient.readContract({
188+
address: validAddress,
189+
abi: contractABI,
190+
functionName: "getAgentSystemPrompt",
191+
args: [new BigNumber(agentId)],
192+
});
193+
if (result) {
194+
elizaLogger.info('on-chain system-prompt response', result[0]);
195+
const value = result[0].toString().replace("0x", "");
196+
let content = Buffer.from(value, 'hex').toString('utf-8');
197+
elizaLogger.info('on-chain system-prompt', content);
198+
return await fetchEternalAISystemPrompt(runtime, content)
199+
} else {
200+
return undefined;
201+
}
202+
} catch (error) {
203+
elizaLogger.error(error);
204+
elizaLogger.error('err', error);
205+
}
206+
}
207+
return undefined;
208+
}
209+
210+
/**
211+
* Fetch EternalAI System Prompt
212+
* @returns System Prompt
213+
*/
214+
async function fetchEternalAISystemPrompt(runtime: IAgentRuntime, content: string): Promise<string> | undefined {
215+
const IPFS = "ipfs://"
216+
const containsSubstring: boolean = content.includes(IPFS);
217+
if (containsSubstring) {
218+
219+
const lightHouse = content.replace(IPFS, "https://gateway.lighthouse.storage/ipfs/");
220+
elizaLogger.info("fetch lightHouse", lightHouse)
221+
const responseLH = await fetch(lightHouse, {
222+
method: "GET",
223+
});
224+
elizaLogger.info("fetch lightHouse resp", responseLH)
225+
if (responseLH.ok) {
226+
const data = await responseLH.text();
227+
return data;
228+
} else {
229+
const gcs = content.replace(IPFS, "https://cdn.eternalai.org/upload/")
230+
elizaLogger.info("fetch gcs", gcs)
231+
const responseGCS = await fetch(gcs, {
232+
method: "GET",
233+
});
234+
elizaLogger.info("fetch lightHouse gcs", responseGCS)
235+
if (responseGCS.ok) {
236+
const data = await responseGCS.text();
237+
return data;
238+
} else {
239+
throw new Error("invalid on-chain system prompt")
240+
}
241+
return undefined
242+
}
243+
} else {
244+
return content;
245+
}
246+
}
247+
167248
/**
168249
* Gets the Cloudflare Gateway base URL for a specific provider if enabled
169250
* @param runtime The runtime environment
@@ -485,10 +566,23 @@ export async function generateText({
485566
},
486567
});
487568

569+
let system_prompt = runtime.character.system ?? settings.SYSTEM_PROMPT ?? undefined;
570+
try {
571+
const on_chain_system_prompt = await getOnChainEternalAISystemPrompt(runtime);
572+
if (!on_chain_system_prompt) {
573+
elizaLogger.error(new Error("invalid on_chain_system_prompt"))
574+
} else {
575+
system_prompt = on_chain_system_prompt
576+
elizaLogger.info("new on-chain system prompt", system_prompt)
577+
}
578+
} catch (e) {
579+
elizaLogger.error(e)
580+
}
581+
488582
const { text: openaiResponse } = await aiGenerateText({
489583
model: openai.languageModel(model),
490584
prompt: context,
491-
system: runtime.character.system ?? settings.SYSTEM_PROMPT ?? undefined,
585+
system: system_prompt,
492586
temperature: temperature,
493587
maxTokens: max_response_length,
494588
frequencyPenalty: frequency_penalty,

0 commit comments

Comments
 (0)