Skip to content

Commit cae6a5c

Browse files
authored
Merge pull request #198 from HashWarlock/add-redpill
Add RedPill API Support
2 parents 5fe429a + 9e7a3da commit cae6a5c

File tree

6 files changed

+10854
-13459
lines changed

6 files changed

+10854
-13459
lines changed

core/.env.example

+1
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
DISCORD_APPLICATION_ID=
33
DISCORD_API_TOKEN= # Bot token
44
OPENAI_API_KEY=sk-* # OpenAI API key, starting with sk-
5+
REDPILL_API_KEY= # REDPILL API Key
56
GROQ_API_KEY=gsk_*
67

78
ELEVENLABS_XI_API_KEY= # API key from elevenlabs

core/src/cli/index.ts

+5
Original file line numberDiff line numberDiff line change
@@ -103,6 +103,11 @@ export function getTokenForProvider(
103103
character.settings?.secrets?.CLAUDE_API_KEY ||
104104
settings.CLAUDE_API_KEY
105105
);
106+
case ModelProvider.REDPILL:
107+
return (
108+
character.settings?.secrets?.REDPILL_API_KEY ||
109+
settings.REDPILL_API_KEY
110+
);
106111
}
107112
}
108113
export function initializeDatabase() {

core/src/core/generation.ts

+35-10
Original file line numberDiff line numberDiff line change
@@ -134,7 +134,7 @@ export async function generateText({
134134
case ModelProvider.GROQ: {
135135
console.log("Initializing Groq model.");
136136
const groq = createGroq({ apiKey });
137-
137+
138138
const { text: groqResponse } = await aiGenerateText({
139139
model: groq.languageModel(model),
140140
prompt: context,
@@ -143,26 +143,51 @@ export async function generateText({
143143
frequencyPenalty: frequency_penalty,
144144
presencePenalty: presence_penalty,
145145
});
146-
146+
147147
response = groqResponse;
148148
console.log("Received response from Groq model.");
149149
break;
150150
}
151151

152-
case ModelProvider.LLAMALOCAL:
152+
case ModelProvider.LLAMALOCAL: {
153153
prettyConsole.log(
154-
"Using local Llama model for text completion."
154+
"Using local Llama model for text completion."
155155
);
156156
response = await runtime.llamaService.queueTextCompletion(
157-
context,
158-
temperature,
159-
_stop,
160-
frequency_penalty,
161-
presence_penalty,
162-
max_response_length
157+
context,
158+
temperature,
159+
_stop,
160+
frequency_penalty,
161+
presence_penalty,
162+
max_response_length
163163
);
164164
prettyConsole.log("Received response from local Llama model.");
165165
break;
166+
}
167+
168+
case ModelProvider.REDPILL: {
169+
prettyConsole.log("Initializing RedPill model.");
170+
const serverUrl = models[provider].endpoint;
171+
const openai = createOpenAI({ apiKey, baseURL: serverUrl });
172+
173+
console.log('****** MODEL\n', model)
174+
console.log('****** CONTEXT\n', context)
175+
176+
const { text: openaiResponse } = await aiGenerateText({
177+
model: openai.languageModel(model),
178+
prompt: context,
179+
temperature: temperature,
180+
maxTokens: max_response_length,
181+
frequencyPenalty: frequency_penalty,
182+
presencePenalty: presence_penalty,
183+
});
184+
185+
console.log("****** RESPONSE\n", openaiResponse);
186+
187+
response = openaiResponse;
188+
prettyConsole.log("Received response from OpenAI model.");
189+
break;
190+
}
166191

167192
default: {
168193
const errorMessage = `Unsupported provider: ${provider}`;

core/src/core/models.ts

+20
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ type Models = {
99
[ModelProvider.LLAMALOCAL]: Model;
1010
[ModelProvider.GOOGLE]: Model;
1111
[ModelProvider.CLAUDE_VERTEX]: Model;
12+
[ModelProvider.REDPILL]: Model;
1213
// TODO: add OpenRouter - feel free to do this :)
1314
};
1415

@@ -149,6 +150,25 @@ const models: Models = {
149150
[ModelClass.EMBEDDING]: "text-embedding-004",
150151
},
151152
},
153+
[ModelProvider.REDPILL]: {
154+
endpoint: "https://api.red-pill.ai/v1",
155+
settings: {
156+
stop: [],
157+
maxInputTokens: 128000,
158+
maxOutputTokens: 8192,
159+
frequency_penalty: 0.0,
160+
presence_penalty: 0.0,
161+
temperature: 0.6,
162+
},
163+
// Available models: https://docs.red-pill.ai/get-started/supported-models
164+
// To test other models, change the models below
165+
model: {
166+
[ModelClass.SMALL]: "gpt-4o-mini", // [ModelClass.SMALL]: "claude-3-5-sonnet-20241022",
167+
[ModelClass.MEDIUM]: "gpt-4o", // [ModelClass.MEDIUM]: "claude-3-5-sonnet-20241022",
168+
[ModelClass.LARGE]: "gpt-4o", // [ModelClass.LARGE]: "claude-3-opus-20240229",
169+
[ModelClass.EMBEDDING]: "text-embedding-3-small",
170+
},
171+
},
152172
};
153173

154174
export function getModel(provider: ModelProvider, type: ModelClass) {

core/src/core/types.ts

+1
Original file line numberDiff line numberDiff line change
@@ -107,6 +107,7 @@ export enum ModelProvider {
107107
LLAMALOCAL = "llama_local",
108108
GOOGLE = "google",
109109
CLAUDE_VERTEX = "claude_vertex",
110+
REDPILL = "redpill"
110111
}
111112

112113
/**

0 commit comments

Comments
 (0)