Skip to content

Commit ff78d29

Browse files
authored
Merge pull request #1803 from elizaOS/fix/tests
fix: generation tests for trimTokens
2 parents 92ae29d + e49f2cd commit ff78d29

File tree

1 file changed

+12
-15
lines changed

1 file changed

+12
-15
lines changed

packages/core/src/tests/generation.test.ts

+12-15
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@ import {
77
splitChunks,
88
trimTokens,
99
} from "../generation";
10-
import type { TiktokenModel } from "js-tiktoken";
1110

1211
// Mock the elizaLogger
1312
vi.mock("../index.ts", () => ({
@@ -130,30 +129,28 @@ describe("Generation", () => {
130129
});
131130

132131
describe("trimTokens", () => {
133-
const model = "gpt-4" as TiktokenModel;
134-
135-
it("should return empty string for empty input", () => {
136-
const result = trimTokens("", 100, model);
132+
it("should return empty string for empty input", async () => {
133+
const result = await trimTokens("", 100, mockRuntime);
137134
expect(result).toBe("");
138135
});
139136

140-
it("should throw error for negative maxTokens", () => {
141-
expect(() => trimTokens("test", -1, model)).toThrow(
137+
it("should throw error for negative maxTokens", async () => {
138+
await expect(trimTokens("test", -1, mockRuntime)).rejects.toThrow(
142139
"maxTokens must be positive"
143140
);
144141
});
145142

146-
it("should return unchanged text if within token limit", () => {
143+
it("should return unchanged text if within token limit", async () => {
147144
const shortText = "This is a short text";
148-
const result = trimTokens(shortText, 10, model);
145+
const result = await trimTokens(shortText, 10, mockRuntime);
149146
expect(result).toBe(shortText);
150147
});
151148

152-
it("should truncate text to specified token limit", () => {
149+
it("should truncate text to specified token limit", async () => {
153150
// Using a longer text that we know will exceed the token limit
154151
const longText =
155152
"This is a much longer text that will definitely exceed our very small token limit and need to be truncated to fit within the specified constraints.";
156-
const result = trimTokens(longText, 5, model);
153+
const result = await trimTokens(longText, 5, mockRuntime);
157154

158155
// The exact result will depend on the tokenizer, but we can verify:
159156
// 1. Result is shorter than original
@@ -164,19 +161,19 @@ describe("Generation", () => {
164161
expect(longText.includes(result)).toBe(true);
165162
});
166163

167-
it("should handle non-ASCII characters", () => {
164+
it("should handle non-ASCII characters", async () => {
168165
const unicodeText = "Hello 👋 World 🌍";
169-
const result = trimTokens(unicodeText, 5, model);
166+
const result = await trimTokens(unicodeText, 5, mockRuntime);
170167
expect(result.length).toBeGreaterThan(0);
171168
});
172169

173-
it("should handle multiline text", () => {
170+
it("should handle multiline text", async () => {
174171
const multilineText = `Line 1
175172
Line 2
176173
Line 3
177174
Line 4
178175
Line 5`;
179-
const result = trimTokens(multilineText, 5, model);
176+
const result = await trimTokens(multilineText, 5, mockRuntime);
180177
expect(result.length).toBeGreaterThan(0);
181178
expect(result.length).toBeLessThan(multilineText.length);
182179
});

0 commit comments

Comments
 (0)