Skip to content

Commit 1d551e2

Browse files
committed
tsconfig ref @eliza/core, target src, include types, & esm module resolution. & models.gguf stored in models folder
1 parent 5764572 commit 1d551e2

File tree

6 files changed

+39
-10
lines changed

6 files changed

+39
-10
lines changed

.gitignore

+2
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,8 @@ embedding-cache.json
88
.DS_Store
99

1010
dist/
11+
# Allow models directory but ignore model files
12+
models/*.gguf
1113

1214
cookies.json
1315

package.json

+5
Original file line numberDiff line numberDiff line change
@@ -31,5 +31,10 @@
3131
},
3232
"engines": {
3333
"node": ">=22"
34+
},
35+
"dependencies": {
36+
"ollama-ai-provider": "^0.16.1",
37+
"optional": "^0.1.4",
38+
"sharp": "^0.33.5"
3439
}
3540
}

packages/agent/src/index.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ import {
2121
walletProvider,
2222
} from "@eliza/core";
2323
import readline from "readline";
24-
24+
console.log("Program starting")
2525
const args = parseArguments();
2626

2727
let charactersArg = args.characters || args.character;

packages/agent/tsconfig.json

+10-3
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,14 @@
11
{
22
"extends": "../../tsconfig.json",
33
"compilerOptions": {
4-
"outDir": "dist"
4+
"outDir": "dist",
5+
"rootDir": ".",
6+
"module": "ESNext",
7+
"moduleResolution": "Bundler",
8+
"types": ["node"]
59
},
6-
"include": ["."]
7-
}
10+
"include": ["src"],
11+
"references": [
12+
{ "path": "../core" }
13+
]
14+
}

packages/core/src/services/LlamaCppService.ts

+3-3
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,8 @@ class LlamaCppService {
7373
this.modelUrl =
7474
"https://huggingface.co/NousResearch/Hermes-3-Llama-3.1-8B-GGUF/resolve/main/Hermes-3-Llama-3.1-8B.Q8_0.gguf?download=true";
7575
const modelName = "model.gguf";
76-
this.modelPath = path.join(__dirname, modelName);
76+
// in packages/core/src/services/LlamaCppService.ts
77+
this.modelPath = path.join(__dirname, '../models/model.gguf');
7778
}
7879

7980
public static getInstance(): LlamaCppService {
@@ -148,11 +149,10 @@ class LlamaCppService {
148149
}
149150

150151
async checkModel() {
151-
console.log("Checking model");
152+
console.log("Checking model")
152153
if (!fs.existsSync(this.modelPath)) {
153154
console.log("this.modelPath", this.modelPath);
154155
console.log("Model not found. Downloading...");
155-
156156
await new Promise<void>((resolve, reject) => {
157157
const file = fs.createWriteStream(this.modelPath);
158158
let downloadedSize = 0;

pnpm-lock.yaml

+18-3
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)