Skip to content

Commit f061ad0

Browse files
committedJan 12, 2025
improvement: using strict types to avoid erorrs like issue 2164
1 parent 9777ad9 commit f061ad0

File tree

1 file changed

+10
-8
lines changed

1 file changed

+10
-8
lines changed
 

‎packages/core/src/generation.ts

+10-8
Original file line numberDiff line numberDiff line change
@@ -451,20 +451,23 @@ export async function generateText({
451451
const openai = createOpenAI({
452452
apiKey,
453453
baseURL: endpoint,
454-
fetch: async (url: string, options: any) => {
454+
fetch: async (input: RequestInfo | URL, init?: RequestInit): Promise<Response> => {
455+
const url = typeof input === 'string' ? input : input.toString();
455456
const chain_id =
456457
runtime.getSetting("ETERNALAI_CHAIN_ID") || "45762";
458+
459+
const options: RequestInit = { ...init };
457460
if (options?.body) {
458-
const body = JSON.parse(options.body);
461+
const body = JSON.parse(options.body as string);
459462
body.chain_id = chain_id;
460463
options.body = JSON.stringify(body);
461464
}
465+
462466
const fetching = await runtime.fetch(url, options);
463-
if (
464-
parseBooleanFromText(
465-
runtime.getSetting("ETERNALAI_LOG")
466-
)
467-
) {
467+
468+
if (parseBooleanFromText(
469+
runtime.getSetting("ETERNALAI_LOG")
470+
)) {
468471
elizaLogger.info(
469472
"Request data: ",
470473
JSON.stringify(options, null, 2)
@@ -1102,7 +1105,6 @@ export async function splitChunks(
11021105
* @param opts.presence_penalty The presence penalty to apply (0.0 to 2.0)
11031106
* @param opts.temperature The temperature to control randomness (0.0 to 2.0)
11041107
* @param opts.serverUrl The URL of the API server
1105-
* @param opts.token The API token for authentication
11061108
* @param opts.max_context_length Maximum allowed context length in tokens
11071109
* @param opts.max_response_length Maximum allowed response length in tokens
11081110
* @returns Promise resolving to a boolean value parsed from the model's response

0 commit comments

Comments
 (0)