Skip to content

Commit 7331246

Browse files
authored
Merge branch 'develop' into feature/rag-knowledge-improvements-v2
2 parents 0b59d6d + 9cc6c51 commit 7331246

File tree

4 files changed

+66
-15
lines changed

4 files changed

+66
-15
lines changed

packages/client-github/README.md

-5
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,6 @@ const client = await GitHubClientInterface.start(runtime);
4747
// Convert repository files to agent memories
4848
await client.createMemoriesFromFiles();
4949

50-
typescript
5150
// Convert repository files to agent memories
5251
await client.createMemoriesFromFiles();
5352
```
@@ -67,8 +66,6 @@ await client.createPullRequest(
6766
"Implements new functionality with tests"
6867
);
6968

70-
71-
typescript
7269
await client.createPullRequest(
7370
"Feature: Add new functionality",
7471
"feature/new-feature",
@@ -94,8 +91,6 @@ await client.createCommit(
9491
}
9592
]
9693
);
97-
98-
9994
```
10095

10196
## API Reference

packages/core/src/generation.ts

+10-8
Original file line numberDiff line numberDiff line change
@@ -575,20 +575,23 @@ export async function generateText({
575575
const openai = createOpenAI({
576576
apiKey,
577577
baseURL: endpoint,
578-
fetch: async (url: string, options: any) => {
578+
fetch: async (input: RequestInfo | URL, init?: RequestInit): Promise<Response> => {
579+
const url = typeof input === 'string' ? input : input.toString();
579580
const chain_id =
580581
runtime.getSetting("ETERNALAI_CHAIN_ID") || "45762";
582+
583+
const options: RequestInit = { ...init };
581584
if (options?.body) {
582-
const body = JSON.parse(options.body);
585+
const body = JSON.parse(options.body as string);
583586
body.chain_id = chain_id;
584587
options.body = JSON.stringify(body);
585588
}
589+
586590
const fetching = await runtime.fetch(url, options);
587-
if (
588-
parseBooleanFromText(
589-
runtime.getSetting("ETERNALAI_LOG")
590-
)
591-
) {
591+
592+
if (parseBooleanFromText(
593+
runtime.getSetting("ETERNALAI_LOG")
594+
)) {
592595
elizaLogger.info(
593596
"Request data: ",
594597
JSON.stringify(options, null, 2)
@@ -1272,7 +1275,6 @@ export async function splitChunks(
12721275
* @param opts.presence_penalty The presence penalty to apply (0.0 to 2.0)
12731276
* @param opts.temperature The temperature to control randomness (0.0 to 2.0)
12741277
* @param opts.serverUrl The URL of the API server
1275-
* @param opts.token The API token for authentication
12761278
* @param opts.max_context_length Maximum allowed context length in tokens
12771279
* @param opts.max_response_length Maximum allowed response length in tokens
12781280
* @returns Promise resolving to a boolean value parsed from the model's response

packages/core/src/runtime.ts

+2-2
Original file line numberDiff line numberDiff line change
@@ -380,9 +380,9 @@ export class AgentRuntime implements IAgentRuntime {
380380
this.imageVisionModelProvider =
381381
this.character.imageVisionModelProvider ?? this.modelProvider;
382382

383-
elizaLogger.info("Selected model provider:", this.modelProvider);
383+
// elizaLogger.info("Selected model provider:", this.modelProvider); duplicated log ln: 343
384384
elizaLogger.info(
385-
"Selected image model provider:",
385+
"Selected image vision model provider:",
386386
this.imageVisionModelProvider
387387
);
388388

packages/plugin-node/src/services/image.ts

+54
Original file line numberDiff line numberDiff line change
@@ -189,6 +189,51 @@ class OpenAIImageProvider implements ImageProvider {
189189
}
190190
}
191191

192+
193+
class GroqImageProvider implements ImageProvider {
194+
constructor(private runtime: IAgentRuntime) {}
195+
196+
async initialize(): Promise<void> {}
197+
198+
async describeImage(
199+
imageData: Buffer,
200+
mimeType: string
201+
): Promise<{ title: string; description: string }> {
202+
const imageUrl = convertToBase64DataUrl(imageData, mimeType);
203+
204+
const content = [
205+
{ type: "text", text: IMAGE_DESCRIPTION_PROMPT },
206+
{ type: "image_url", image_url: { url: imageUrl } },
207+
];
208+
209+
const endpoint =
210+
this.runtime.imageVisionModelProvider === ModelProviderName.GROQ
211+
? getEndpoint(this.runtime.imageVisionModelProvider)
212+
: "https://api.groq.com/openai/v1/";
213+
214+
const response = await fetch(endpoint + "/chat/completions", {
215+
method: "POST",
216+
headers: {
217+
"Content-Type": "application/json",
218+
Authorization: `Bearer ${this.runtime.getSetting("GROQ_API_KEY")}`,
219+
},
220+
body: JSON.stringify({
221+
model: /*this.runtime.imageVisionModelName ||*/ "llama-3.2-90b-vision-preview",
222+
messages: [{ role: "user", content }],
223+
max_tokens: 1024,
224+
}),
225+
});
226+
227+
if (!response.ok) {
228+
await handleApiError(response, "Groq");
229+
}
230+
231+
const data = await response.json();
232+
return parseImageResponse(data.choices[0].message.content);
233+
}
234+
}
235+
236+
192237
class GoogleImageProvider implements ImageProvider {
193238
constructor(private runtime: IAgentRuntime) {}
194239

@@ -280,6 +325,12 @@ export class ImageDescriptionService
280325
) {
281326
this.provider = new OpenAIImageProvider(this.runtime);
282327
elizaLogger.debug("Using openai for vision model");
328+
} else if (
329+
this.runtime.imageVisionModelProvider ===
330+
ModelProviderName.GROQ
331+
) {
332+
this.provider = new GroqImageProvider(this.runtime);
333+
elizaLogger.debug("Using Groq for vision model");
283334
} else {
284335
elizaLogger.error(
285336
`Unsupported image vision model provider: ${this.runtime.imageVisionModelProvider}`
@@ -291,6 +342,9 @@ export class ImageDescriptionService
291342
} else if (model === models[ModelProviderName.GOOGLE]) {
292343
this.provider = new GoogleImageProvider(this.runtime);
293344
elizaLogger.debug("Using google for vision model");
345+
} else if (model === models[ModelProviderName.GROQ]) {
346+
this.provider = new GroqImageProvider(this.runtime);
347+
elizaLogger.debug("Using groq for vision model");
294348
} else {
295349
elizaLogger.debug("Using default openai for vision model");
296350
this.provider = new OpenAIImageProvider(this.runtime);

0 commit comments

Comments
 (0)