From 76b9331cec0439b51338d230fc02c5eb1394c141 Mon Sep 17 00:00:00 2001 From: AIFlow_ML Date: Fri, 10 Jan 2025 19:25:33 +0700 Subject: [PATCH 1/3] feat: Add Akash Network plugin with autonomous deployment capabilities --- agent/package.json | 1 + agent/src/index.ts | 5 + packages/plugin-akash/.eslintrc.js | 29 + packages/plugin-akash/.npmignore | 6 + packages/plugin-akash/assets/akash.jpg | Bin 0 -> 16063 bytes packages/plugin-akash/eslint.config.mjs | 3 + packages/plugin-akash/jest.config.js | 31 + packages/plugin-akash/package.json | 51 + packages/plugin-akash/readme.md | 133 ++ .../src/actions/closeDeployment.ts | 521 ++++++ .../src/actions/createCertificate.ts | 456 +++++ .../src/actions/createDeployment.ts | 1470 +++++++++++++++++ .../plugin-akash/src/actions/estimateGas.ts | 354 ++++ .../src/actions/getDeploymentApi.ts | 495 ++++++ .../src/actions/getDeploymentStatus.ts | 493 ++++++ .../plugin-akash/src/actions/getGPUPricing.ts | 225 +++ .../plugin-akash/src/actions/getManifest.ts | 361 ++++ .../src/actions/getProviderInfo.ts | 369 +++++ .../src/actions/getProvidersList.ts | 333 ++++ packages/plugin-akash/src/environment.ts | 259 +++ packages/plugin-akash/src/error/error.ts | 126 ++ packages/plugin-akash/src/index.ts | 68 + packages/plugin-akash/src/providers/wallet.ts | 109 ++ packages/plugin-akash/src/runtime_inspect.ts | 90 + packages/plugin-akash/src/sdl/example.sdl.yml | 33 + packages/plugin-akash/src/types.ts | 167 ++ packages/plugin-akash/src/utils/paths.ts | 134 ++ packages/plugin-akash/tsconfig.json | 39 + packages/plugin-akash/tsup.config.ts | 10 + packages/plugin-akash/vitest.config.ts | 27 + 30 files changed, 6398 insertions(+) create mode 100644 packages/plugin-akash/.eslintrc.js create mode 100644 packages/plugin-akash/.npmignore create mode 100644 packages/plugin-akash/assets/akash.jpg create mode 100644 packages/plugin-akash/eslint.config.mjs create mode 100644 packages/plugin-akash/jest.config.js create mode 100644 packages/plugin-akash/package.json create mode 100644 packages/plugin-akash/readme.md create mode 100644 packages/plugin-akash/src/actions/closeDeployment.ts create mode 100644 packages/plugin-akash/src/actions/createCertificate.ts create mode 100644 packages/plugin-akash/src/actions/createDeployment.ts create mode 100644 packages/plugin-akash/src/actions/estimateGas.ts create mode 100644 packages/plugin-akash/src/actions/getDeploymentApi.ts create mode 100644 packages/plugin-akash/src/actions/getDeploymentStatus.ts create mode 100644 packages/plugin-akash/src/actions/getGPUPricing.ts create mode 100644 packages/plugin-akash/src/actions/getManifest.ts create mode 100644 packages/plugin-akash/src/actions/getProviderInfo.ts create mode 100644 packages/plugin-akash/src/actions/getProvidersList.ts create mode 100644 packages/plugin-akash/src/environment.ts create mode 100644 packages/plugin-akash/src/error/error.ts create mode 100644 packages/plugin-akash/src/index.ts create mode 100644 packages/plugin-akash/src/providers/wallet.ts create mode 100644 packages/plugin-akash/src/runtime_inspect.ts create mode 100644 packages/plugin-akash/src/sdl/example.sdl.yml create mode 100644 packages/plugin-akash/src/types.ts create mode 100644 packages/plugin-akash/src/utils/paths.ts create mode 100644 packages/plugin-akash/tsconfig.json create mode 100644 packages/plugin-akash/tsup.config.ts create mode 100644 packages/plugin-akash/vitest.config.ts diff --git a/agent/package.json b/agent/package.json index f4fa0f33e03..56028d6c7bc 100644 --- a/agent/package.json +++ b/agent/package.json @@ -61,6 +61,7 @@ "@elizaos/plugin-fuel": "workspace:*", "@elizaos/plugin-avalanche": "workspace:*", "@elizaos/plugin-web-search": "workspace:*", + "@elizaos/plugin-akash": "workspace:*", "readline": "1.3.0", "ws": "8.18.0", "yargs": "17.7.2" diff --git a/agent/src/index.ts b/agent/src/index.ts index 53058cf4ece..a945631ef4f 100644 --- a/agent/src/index.ts +++ b/agent/src/index.ts @@ -64,6 +64,7 @@ import { abstractPlugin } from "@elizaos/plugin-abstract"; import { avalanchePlugin } from "@elizaos/plugin-avalanche"; import { webSearchPlugin } from "@elizaos/plugin-web-search"; import { echoChamberPlugin } from "@elizaos/plugin-echochambers"; +import { akashPlugin } from "@elizaos/plugin-akash"; import Database from "better-sqlite3"; import fs from "fs"; import path from "path"; @@ -609,6 +610,10 @@ export async function createAgent( getSecret(character, "ECHOCHAMBERS_API_KEY") ? echoChamberPlugin : null, + getSecret(character, "AKASH_MNEMONIC") && + getSecret(character, "AKASH_WALLET_ADDRESS") + ? akashPlugin + : null, ].filter(Boolean), providers: [], actions: [], diff --git a/packages/plugin-akash/.eslintrc.js b/packages/plugin-akash/.eslintrc.js new file mode 100644 index 00000000000..e476cac57e6 --- /dev/null +++ b/packages/plugin-akash/.eslintrc.js @@ -0,0 +1,29 @@ +module.exports = { + root: true, + parser: '@typescript-eslint/parser', + parserOptions: { + project: './tsconfig.json', + tsconfigRootDir: __dirname, + ecmaVersion: 2020, + sourceType: 'module', + }, + plugins: ['@typescript-eslint'], + extends: [ + 'eslint:recommended', + 'plugin:@typescript-eslint/recommended', + 'plugin:@typescript-eslint/recommended-requiring-type-checking', + ], + rules: { + '@typescript-eslint/no-explicit-any': 'warn', + '@typescript-eslint/no-unused-vars': ['error', { + argsIgnorePattern: '^_', + varsIgnorePattern: '^_', + ignoreRestSiblings: true, + }], + '@typescript-eslint/explicit-function-return-type': 'off', + '@typescript-eslint/explicit-module-boundary-types': 'off', + '@typescript-eslint/no-non-null-assertion': 'warn', + 'no-console': ['error', { allow: ['warn', 'error'] }], + }, + ignorePatterns: ['dist/', 'node_modules/', '*.js', '*.mjs', '*.cjs'], +}; \ No newline at end of file diff --git a/packages/plugin-akash/.npmignore b/packages/plugin-akash/.npmignore new file mode 100644 index 00000000000..078562eceab --- /dev/null +++ b/packages/plugin-akash/.npmignore @@ -0,0 +1,6 @@ +* + +!dist/** +!package.json +!readme.md +!tsup.config.ts \ No newline at end of file diff --git a/packages/plugin-akash/assets/akash.jpg b/packages/plugin-akash/assets/akash.jpg new file mode 100644 index 0000000000000000000000000000000000000000..dd08e0e57057fa1703a6bdf2e3e3428f5e448713 GIT binary patch literal 16063 zcmeHu1y>zS*6tw$cXxLU?(XjH?(Ptr;O;JggS$HfcL?t89^8{lUYnV_=HB)FfUj9} zb?@q~v-h*LtE+!4{n`Q`N{LB|0YE?i0Fd`Tz^_%n7XbLjj~~H4f`fs9LxI2l!$5+A zL&Cs9L&HEr!y>}|u_403BOoHc!=j*|qN1SS;^5%m68y7)KtMpi!NOr8B4T2rA)#Ua z)A2t&_|*$Qf&_8@Z~+Q}1o(gi0*VCks~^Dn&fo`7kl(!iy@5l11cLzm@SFC#=Wp_R z3kDAQ5#j^LuN43+=zAw3DB?SDdahmI->&)J!~aF#F9gQ#LW?C;1wc_IeGPFe|04S4 z+LP=*?BD)_lC6`K*W1mbWv!k6KeTtpF)Ds)xby-5XtMnZ1sjV0>WedvZD_KSU7*bEKL&%aPAj-8)$#y1Qd%!kL^l694ivb@i?rF^ZjOb<=^29PpTze) zdc@c$@H0{lpU)h0vFShL-v_48eO9!uxu%IKAmO{LY89^8?$L1o(DMVoZv6~9!F{iR zuRYx$)3p*!B;dv9d;Cgff;oIM%dH%T8R%Rupwn)etnVZH3*V7<{Paq{5RzxNeu2$c z$?m989<9eJSV*OQu@CYffmVyuORUe50q=z@B+-#u*kA1zt;Lx}&I zrunN$miCX0B}09y7ut&5GxJ;|p_gX1H{P#w<~2AtE~d)G-$E}7cJtP2R- zZ048aNolrxip=?|$%3=SpDsR^ygW%?2 zY=fGf^6|qkXy2q%t>!6?cbl8u(Ac*R1f|Scr=9#iK=UpaZAc$Daeo1X2J{KNK8+n6 zwcoXa@J{wai1q0aa`6Q5_!QPVIp+6tw@(z@Jsb{AsphoKF?#R6`+)+9){JX@>zrek z39?UGIjiyhMy~hKkIQ+hAl4rS+JMy}i%vbId>UEj;(k^$0rbh);diSv;9|qR0-4^!+C8VsO!SAq3T=0SEKik< zFMkN1^ySbYdq@>q`GzjmH-3;A=f%T)r&YV)-r7LBpO|9yxfdcdD;`cWM;*f^&{959 z)O25oH*AW4?*vS_E|SdfK$&+~P~qg=eh&oxV)KJ%JBX1D?h=6I(cNmoyu!o#o^9Ec zjyE&W#A^^RXS7z%8dLi&)w_c1YN-^^7P@B)GL*NxcviL53CAA-K?@A45%KWZKZsd{cbj@6QbAw0YB`>X-HpXPl$FVgS9kg)05n)g zzt!8e>@Q4*rrH8j&hkezJd>uJ5LLKxO$MLh{^Awl*s@|R*C&SuK(huRcDTYu2w|8~ zTMbesUSq&#pDXtDfZLZnke<9}2JwE9}=u9@QYsWOo8(IFiIE}s7ik-Hf)J{d*{EG_P3ch+SOnr#ST zag_-CIr%Fy%UwStn1?|AV!D0eJC{qc#(VJp+UERu{27l=s*Lbid49Be=C44(C zCO1l(9GWikKm;aSdM{(2{dn4vgpGvjtdax3^tQcfI{V>M zC5nGp6Q7@;xR3tR2tQo-bu$3=-bGUzKDj z>gwhqPnDapP7U9bdF=}teDN}4m&xCJLZ~E`K46t$?#HK0;=p)6dtcskT#MIY18}ld zIPq&ErgTn_=rsKSFWyHucgrbz6Z{=m%QI=!*kZ?hsa9~SJ3%G|4NzwfKQhe%qQv<)A_Rb znEIeyj*WxwHHojM=X)UMr-PSlp*03sn9BoZXkGR1aNvdL_BbuXl|OTzkJP2mrjKFpy{IIjXANGI^yKJG9_BV`Jw?4!0S^=*Re zaNYUm&#Y`~O5xG3_{1$wcOF=6+hZ=5gtb_+_Mx#)J22kI!`s(aQ^dVCFF&#@_%?fH z&Q2F9=zz0U`w+4TPMd+3>Oy0B&e-m7i#FS8lYHp>hhcO##U!C)Lp+gC^_RVlc`trY zL3X?^sx8nT0} zsb>TAsInT2(+*QG*&USW^Rut=BgQI36sirSb>h&}&wij;GKEI*K^CU*&(cjl3Ka2( z6!A3nZ5wy+++`i2d&W$3ijQQcl+w(W zaj;JW2S?&ga7&BzdYeX=(9A0s&@72VqhX0eW;8F*flur$&kmmwF$EQy*p z_cUa40FkWUs2_!VC5bswtpiIfDdrkNK2}f*{$N|vH>-@hPVk z>+BY^UcE*Ct0_hOQf9lxYTK7-xnYQ^d7q`)Hdh&;5I@~8?gyr3gOB4sKowN7YW7oD z+s1DSQTtF%9F8=hoLWS2aPCXlcLPzkarG(6*lUi*qr8)IG<-FV|tnztfAe5qOgnG&@O(C=}e0Dk?Koq^tKMs%-jObv1q8_qAv0 z-WQ8Tu0}bo)Vo;PgUu!~r+@%|SUuRU@-Sq!LV1LQ#GqX$fuzyxgXK+HUFEtla(oB4&@}Kv9`F|s7j*5#3`w|=tq=oPv|F&MGj-g z!5=lpTx>kQ0YFPw{aQ%W{1q>Nr1-;~s;B2A>Y?+RM$~E-NDhH^h<~W^=>C}SrYWdN zx_^c_VN|wn*!ZOY6cd-UV0dV_g+Rw1`)=^6w4%`_Ey5Y|M&mHvkj>2)sceWcEKQ`Z zLZozV=*p)P2>2kn1Tow{cX|ALtnS>Gb-`L?oQqPnD5577NJB$!BX_rL`d5sx z4ehqybx(McJRI&b8i-3BxBL()4kyls<1n4#rtkopFC^#h1Nq{xHcB&+tO{SD zszh4(Ruz$5xdYlP0U0c~r_fMm7R&L3DLFKYb0%L1 zTFtE8P;*9X4J5x^-%^&&q4)bu9P$r)nMq~IqWm&6(poWkXZ&r>r}cll=VBpY!5Bx9 za=jN`LADoLl{7`-m?nXWdcGJ|q@+Fo!X`SN0}>JtQlS7#vArhR)ihH~g8&=(=6Z@{ z)LbWzl$!P81FD`QleN+wyhpx`HAXBKTgR`r(n*k;Op=yI)2!iDa1FP>U*4zgJf0y< z9`$Ta{Du0x&W%v1-Hn;)m0bdLqv!hSc)-*r&e!!^FDYt3k-h zGMFoXa0Fw*haY%4`z+V_XOlitW|K{PUf>#0C(7Yq)*hQKodQ)fLS053Z&G_V(OCq> zPE!B4bc_ntepidmuPVs*wTv}Q@m$RQA7iu6=EP+U{^YaVTTRs1RW&*W-eH_BF;wdW zPm*i6+CxqZ1N~qQx7sQ)%;6b9NuP`+k04Mif?HO*aWj&bY0yqAx1}1SZ0$x|wWN!8 zZRO$}cgeq6y_Kd$pXf{t0N@E$46#JHlFd5)S20CkXNM(2>Y}zym&Zj}6V2rb?*wYerj68 z7RrCi@vWj%xUx2naD-V%s+K@WXQY)$ex9WuUcdPT(6fEMJJ>9jQ?Ah@mPLP2sEZUC zt4|bP1B=z$qV;Otm;5Xup4>jKHASJ7N}A=@0)yZAP2=wLEmkR3*@tj@$E(V8%$o`_ zV9CWlgj0YZlY17BZ5n_4@jD7)blsk{>6c>3U7c-fn|bt+=NtREDvJimL9Q5m6Aaz9 zeqBh|b*mUEq&U-D8tNn-^C}ftt|?a3v0nguYuJZ*wB!^Q`n~(marBn7rs+x_ zm$qY>U1vq+BHQw1ttn9CC~X2=`nJ27)52t?G0NJXLNv62BURDx3(*-Z>yL8d^}%7T zl-G8Kd*k(`;+e{{W9mgS9*`9gM@4NsaJbMkAPpZr;2EZ0@F{{L!SOdR>h@bVF(D1w zwR$`etEc}k!ZEiA(sf_rYlm1gcs?W04LYv>E}Aih>ab{^TR1s zk#EwoJ;x_I4$SqOnX5U|7qexKL_|cyUmgA4OH4ULz=!t=^CJ`lBm@}5?;`VeiHQUt z0z*V*L?I*=R8%o^3}6yc3XK0cLn4rnSX)=$gQ{%g6x2ISs^FZIk47eJV4OG0z%0_Y z{a5V?#s6M=Hc0^$2NVgT=9$jR(1+0hRqE%1nUSyU-X61#eyR4c*mj?;&#!i62Y%{y z@mRKWk2McPli-;&T&06i_YO~DFrl8axvi&82$EiaGKb|JHQ)Bj-)cV_Vu?#CL3AH_ zkylxe;V6#b(vOaM_JHzOXl<4xJ{t?G1%KE}Bdr1Ldr6S3q$m)Gu+C8)`UQZOHxqAg zOf(m>B%bLrOpxr7cq_JW^Pa5|e>y#l^JAZU2Kt#*&v&o$uca~s_00G4JZg{P?RuQ| zg%f6@O8|ApE9fDA>@1s;d;umsCDL1cj@e!Q!Z-!f;54{8Kerrk^^~gwWv%OH>leYFYkbXPc8jk*bY9F#c^pY_vaT| z+8MmG9Rba>9D@SpzzpiFLrYI*q@UPXoo1NT;Pjqog$s~Ol0 zW9@eQp{Qnzs?@@8O4%(i+%}ba+Gm%CQz(i)Ho1>itQZU3KY1i&=1z{Wkhqz3CqhKk z)&G*zR^=eS*krhOTUFIP#W-#-R=zrN=im1|%A_)iqYSQXnK%r?G1Jk9gpBVmV4{sE z%GzonvW;=;cWo@W(pM)Dzl3|!lX4_95J<>0KNPW4)beK}Q`UWoB=31JO1+I>*;JR2 z34@S^0oO+gFL~sQH<2EGsQ2%Ejaicwy{rBOh((^kq$2j*hayR#!A@ZV19!uo>xgkv zL`Ck;F#tEm#g8bq8A5PNi~X_aULi(PcA}qa0F*vkHoojvrFcAlV^r0YJ9J!?MD<|W z+>97*2&6Z{z=Z|6>C_Qe4Dcj1QtgXUC-d&S3|TDBlf1CPB;xpk{{oQiDCUnDP!Q&JyD*xgT)*dbGQ; zDnahz_P#9LQ;zRStDb9f(+cRBF-$nomb=52jQ+GkSAW=Tv35 zAww2He&DT;It$o`(T_^4m>#QFLN&YH+xP|O<9yJM#b2u&O;8)mxDW?%$el%orQPn;hfn5JM z!~k)|&n`GtaOn8jsK99PqmMP@wUyV2mK(wzTNyuTh8_CEW?Be=Zb*<%>habfcJ}xj z%n4zE^140Csih2`9ZXlwAdprNQ~4S8AtQL|B<+d)SU438Hc_gFM08_(@EiP%KX%t= zNqvqRAjt^Cv~1t%R8?_s1;HzqW3LqdXYNlGj$h3zE;|oI*)E_r?a*~GXfnaGz$4!3 z$j)~db|luo#g(Ar+!4jze9Uq$Pfp{1dOhzEOoTm)MTUn4$>-MkbYh>)z>CsiOlry} z6x3(k8wt~?N~e=K&v=FkfD$qiyv$1EnZk8Z!dnI9H79L=>T30$eZu3qKJZ%?%ncvI zCiYao&5=arj@cqWO-2O?!=+ay?B-nB2RYesDJ_cAB@Yjxy25^r7T;zoT*2ldjN8o6 ztYYgxl5f_r*9N8wXIO+OM7U!(x4mIySxDU5x6HAgZ}#k9Og{_he2o#p=Y*Ux#RbVV z1l9Tlu$)ey2|e5b65;s(3E*^<`0I5TWp4L|6t!8GIRxW1=8$}{`x7HP$}H0T6EskV zs6h|GTGg7xNRekJBT1F$57aO|dRPI%w+*pwO+YPidi=lD!-gG^91=!+{?S+$%Y?>L zjead|*$cT9EF_A(ho+(dvc^$GQdA_CqRum#e~5w0tOXhS%8^Pc57ONXXN#w8K_kxa z@tX7wq3w|!f zD#|N?v%zs8g|N|C6Ub4Z5T73jriu=L_h;pZ!A$B%V60J9bdo&7By%yP+szBxAL(*J zI_B+6!PG6mmP5~T#>~gDLGY=J@?T>G+$H2dhoH0;l6bx)V`a(Aqd8}nnmRkl??Esu zOpr&V`&SiqiMrdC+%9gm!;^-XY7)DyjrMNET$Pv1`J!0y(+)!Uzr|t6ck3Kkk%?*^ z9dNT#vI&Zhn{mcAW8)W%b?)e4dBc9z5DP5I5J_PqP9};EqU)*MV6fx^zG|>^EBVs; zMp;@V8tH~bG*-cjTijZm#J^=krz?i>=HcQbrc-!QD0Y2|$7nUNwqB!1g5M^38nrp* zB}TW(ycB6388``YUh$ZUTh5e3<74diz5226q9rUwsY>yA_CoB=J8^qZHk+UjvmU@C z6@Agwoy6mjqmxX9#T!4=z>lK2H=DK<=0Z)#cTai+7dDR6fr~SuC=DCqXBmh7aB|A> z3*h%V!R$&7E5Rov%iVMu{@B=<%+^yuHKDj7Rq(y2^T4E%DeG8uYW?_v=qS6A4lMro z%-Fj83m}Qz_S@$3^oTr8zJjlj((S`vS;tI=r+8@&OfPOg9*|@?Qtg(+eosalstp(* zn!d?g(GsKlG-J#zeAe@B>qm2~mPSfsqQ@RLV~zIJ?;*(QP5W)Ox=4S(*2l=c-#&|c zA$mI4G-Ws8#y*DsfdA)J#UR;oAhB())G{&rO?N%KNolEDiaF2O;GymmU?GF?uz#7$ zBYM5Xk&IP}HlE{94Fgvj9?Kp!bUfKv9B*j-3*+1@w(CWzkKBZxMvR*7(chFdijSt*Wq6PJ{Ipn z5Eq~nd-NDX(E=azF#m5SDyo}jF1#ny9RWi zX=}$9iRPSXZe1Quep^qs+Ao01>2d0nw!TK@70$sP%)WE`5NsWCpymmNOPvIWIYdE0 zebmGB3SEQ_E-=?6`s9gZDB=mPw85G2DuL)BG!{!>g+^YlfF^r-M?@#15k?b-f*O>`&)RKr8jv^b5Kqc71DBc|V(rQq_VRFg!betcO zb1b5Ut~%Pc!}?rFzSxc$Kc}1kIBmNMk%dRFWS{ZzHort>3K<*ewT_Ui8>Mx#k+M}S;KD*0R8Pp~4-nhe6 z&ByhiIV>8|Po<<~UEh|bzbi|&2dj4E%2sXsq|hvWd2ZYD%js%`xG|CwMqZ&8G40 z-;!XVqq!3=OamWSw6!ZrYU50d?lJdV_RUzdH<8|&IR>ovu;3&X2pPbQD9qaz0!CV85E}DP(&38a%{|G zh98SE0pBjIZL zv#N5Z7>)we_sW^)#Ir%Be6L@Z!BjCwA7GIwcb~8^r!t|lqT6(?WANwhWVFJ=N6ffq zTc6rLKMuwH-0e4f_7UCsD*qhC)9ymJZy3pqJU+xE$xeDjx*YGx)W5j3FWiai;*%C9 z2t?KMf`UYbPsqu*kHx0}(+M7_nIh`e|97Mf$DIayc3s)1N9`ba3AY1}rKH7mF-H&9 z(gC;n=UpwAz!K&CX)$L3X71)Xd%ScPBeTFVa0Jxy4uWxf$G{{mvjs-R250=Vw3}R! z!_5IUP$s!kgOK-1zzmY>j}TqT!IcI#@S38$PXh01*bPrO1Igc@z=>n!Ir?n9!s9Uy z^O0t+bYi^|RrkyRmL>i}l1g(+^rw#7de0$smDscS=jNAUeVOivmFND=uuaZ{x%n?6 zLK=JZ+DSPr7i{pDMB#p_`-WcK2Lst@(MB4?cIbSkOMr33>JgSoewN*mYl+$JtlbV63n+gkbuVHw#U4w)Ip!bz zK0%$HuDCFH!zPxo&}Oc0%&yL^-40Kcm?0O!F0Th^8zbERY#)uG2>LXLE~&oRlj}xY zVRq|5hJU>@jm(o{JvUrZFfUu@(^A1O-6a;2xwlJr!`do)imR-cwj~-3rQR6_hOzHv zloES1a%GeeZ&(q&6(6r}K^vj6a^^gx6Ak;0+kRBCDM=Pm4mvN*hy6y1kqP@NRE@{? z;Cq~zYeHJSC8&JSH}50tr=zT5Mzg-~s$QETKtWtJWg0nc`No;f!M1rco)h z!@F@Oi(BJ{;8^9Cj@29}wko($@3F3znmkbthh`&jzbtz6y@x*G!P7KTLkDq;5l)7) z!QFObo5f=R)zPe(ER*xo_vH3d(I*=V2;HadGrai3>7!)x8J@J03<^PG)X8&r#4iM) z;oea{CDjLaDBHb54`1nVTfQFJCr$aRH`fgHg#k0d#S&6M8cp#t$fTo#nYA$1FF+{r zUicRK{V%|%ND60yGeyKXZkS!nmvYb<$Z=x{RUhwebe3TC#j&gZ_ldz;)(9{E{qFC{ zA9kcaW_uvO!9KpPko>*9PyPTtBK@BGK_et$7F2XZVG>X>3_v9oivP->U{p(DFrD|u zEYJIb%ips+;|a*9c~C%$`h!=oNIv`i|2Pm%u}Vk~tPc4afms|0OB>;DyuEgv+u+Te zqkR9CAZCnB>5M%)^N67s|0AJ&?yhTQ{qtk1?2MOAMeJF^CHevy6k$ZPW{7YYRUjk_ zAH2F(PVw9(rVIC^R*_E5mz$EtoanC?zQ+5CfSkn19LR? zA<$xxm*|+aDN8I>>QZ^(McVb!S(eHy9LIPIZ~rOQ-#zt!4`f3>jW{Hgm3Ze3qJn%( z9@$vz_b@{;sPGq7d*5+o$UBtjb05fC$?D$}$ zJTo>O&$vay&F|Cxj&2i3@B*#r)c!1E#y9J_(JWOIM2uJr+-i*fM6qWxG!W`V5JN9$S{< zs`I#ky(YWHR&YP$9DTm#wFb_r$2V3q44I4h@9U=J9iKAeY1Bx2 z3w7iy8K5Mqk+4NKC1&WKg%c;~$jFn}rR)-C_D#LRe*q9XHFLVi#q{e%8ud+b7Y z2VRfiSl-s9!`6I1Y{A4(^9z*IJ5!~tpb>+?prc~S&mj<%3?->pI3v&MuS>i(E!cx+ zJWl|Uc_FjGgiaWLTb64jlzqkLUx3w$zMn$8pBiyi(9X-rH_-jrf5Wjc-kZrP!)i(R z(1a-*t|r#j^rc2}Fe`fIKlv~VvH)1ij~nYDCAiuw$L76QYsvT+Z@lo1(<@JSkGHt8 z?DpbXO0X!jK=&u5OAb~%!kptRwzT~Y2jjfUK$>6!L{ZUxHS+xzGC!0gY0iLe_O5vi zNol^ivKhFon-rufjx_CWo6PO%!p>@*T0#MnU@6|;ZR!uA^O0C~oD@SUS>Yf3?XqOG z)TImAC6Arz*oWV=@)$9yda+RS{Ers$RAJty-`Ovy4V}tESNw&4G z!(`eG=HP1JTy3GEqqS~eo%;_rm`s00XnyUy z7Y^?=w4i!dl!VN#9Lfep9DE;ig2j)JeQhyeC3Cl`kW)nNEeuSH)gw1qA7-_2cDEUwJE#gq|%UhhaBm3OzEST%P4Gd;#N19Q8K_zx{vX$ zkau{TlFsRK`~p~Xro%P{6e-N-?>IkwWy|0Q0-v>Cz_9WZ-@ssLM3fw5$;|p<$<7Js zitwM+;|LJdXa5)lrQfuiUQa$4-R3l11 z=8Wuf@44V@5q_rQ_~fu`{uXLpH2EPBOanSFPGHr8)E4pYUxzmco1yc1`>K}d4H8Pn zihl&88=##RYi{P-Q9`DwV@b@3v&G>3WaVlhha@GI;5_r2A}b#bo+%>Dze0%5V>t8y zS;a#*2Cbyzs!R7`LUkK3r(y-1ITd_`>Sb2H`&mHx3t-mSIg1`ONaAn8YNG#)&{U)P zk>@=QiNEP!i(6Egaf>=LuF#~%OzVuhMI0D6s53wO0!;p1`6Kc>GlPJ^ynlTG3id~8 z{{33!{gVbnBxFJ&MkW?PMW^X)6m;VEmoxESYf%M+loEPKn3V&6pef8Sh!_~<<^PeU z-@l0XBTXj>2g40F2(=azqIU5lONSBG+LB`ET(LOyijmwD46o80qFe{PY& zPiEN-AOj=Jq-(}VZP$e}QWw^UQ{eJBcRV9ZDz*IR8t$hQ@+RUVB;@qF5q=>f%IwTO z1;zyah$#T-D?SaaV6`e97l52%Becma7i5CB(PjS}_`G(L-P&MfP3iUvV3L(5B288w zD{hk#7>rsDf>Cec80^i&w43$f(BCh?IDET%n>?{Pv*2ZSx({ zM)r_*TK4w3pc0zxa6>c)SK(xduJjL_{l6MezFOo%a{uzG+VZZ_f0!xD-^B?PT@C3U z%X08ze3oe)g18|7{&f;p0}N!D$C4mQ>#<#Fr0}KUme%EjlsIp=`0Iydl;&f{1$fig zN1?q!!q~{vXq0Lk5{`M8RFs6yArv<0U4>5c{Wr;}NV&_UkkPE7(?W8dM*v!mQcMYb z4K|`x_!WBGz;jyfJu<2*rb8aRhDXUZb!4VM1lL?op_w8z8%HuckHIIVYdqN*NF}fG zKatyqUlvz#(_*KKQ8?|-VbZ0Fpa2`bNMxz};bkcm`laaUDlE|nZuE|zcJ?wJp)h51 zJ|Uh&I{OLbklvm9nqtbkb?tIEm-qV6xtrMgVJv&{HT-6iYSNj}3R+1>g;hJ`Ickz8 zGb8aF7@)rWCD>em&+F{#^rrd&7sk(TPWFb&CSh0(<&L;}I z;SvgNTEpbY4-Y8hYCAr!Q)byEVM>0>qCTHr_YpVy{?crX-W1DReo3`s&rqEd6%@>y z^+}L$g^YRf1C@V43Ik1Z^hpvj6vvlh&k;p=G;h!k;_BF2W)@*tl;{+i-#A8CMBNM> zJfx_jM4ISiBk?p|Nr7=T9H#|XOUvqo%1ssrJyntBezi)#+3FV;yy~k>opnG-u@Fk6 z`r4`%L@7#OqNDeWH{%$btQp6E_@m8qG zPm&92D-mkYDy)mFhqC9z3uUUP3ZvQtSba+zi)=D>1Ce<`iWP5PTJv+~_~o(WR6LhV z)`Q#vVl^(RqC`tfZ!F%;(8;d1t?d z8i6r6)Qih_jC$PNv#CfYv;ggwYD*S%3OFam7MqJ%U4je|snNwJiyN-yVE|ZQn;;8B zCVieSvEMV!e*k6|!%$8mr#b@KaGa2&V#_fhA*l#3Bvp^mwM}o`>$f`ejD+!ER4?ADIj0S}hmXea^{oKVxss%i=~Z7FIo})jUbal{6Qnw>2Oyj776(_V6?I zcia6uBWRMzvVZvu&IvQC2Jcs6^VeooX><`TvBqcUqf7y&Moa4YZ;qWn*^gq>QxSbT z)EPHL_p_%e`ybMZq|NbzN=AwCY%0DQ4ZsYpYr&P0;+G05Q5Myfo%yZ$a>~vHrv)Nc z1wLQl_v!o{(HAA^2OhlCCWYL@i#wFChL)FGC6B8i3}$>6XT6d0^`wpE_`U*1iV{|Y zlxU?y16v+h`k9L6i!l_xo7_m*nnI&fk&i8RF)B=v^%M|j%#hBOIucp-Cc}ujv=W4^ zGy-Em2iw>Yaf6{|0>#(#=nM%&_~dp0vj*~9(lsa?gV=sfyd4h68+GSZ#RG#KUZD*;-Rl-`7KjJ-O6|OQkN{4zE2m{ zg6+Yw$}(#n^89;mViH$#xX?h;Gkqqz@Ka2_0D6{E?qPom3~i?J6^*{aNmZ1&v8i72 zA9=L34q_#XE@-Be)GhqCb#4&@OM)y0$(ZGgsLJIJC}{k=izW2*@GLOTdR7<~A<$dg zLBj_5Q>5&(Rw86ecc9vIP?cptG76n&da;nH zs6=T6`TbmcWHw4`WG{|c-#zs^sD=?;? z)o9NVsdpaLb1L2C1HSo%Z+bL$XcQEb!N5XkG78+3%tX_3tjt#OCsX)@D_4qWD+vX{ z9j5GqS?DYIZw|mahsxrG&4mN zT(r60B9h57D)GMuH2%~+%`8ZUK62#U)ks7roCC{SNtTMAFiquDiS5%*Xjup@C7;WL z&CK0Jq=lhq>Z1%_A=@@uu$hmXxwf9nT$Qg>DT4gEbu^-1vaPczp&u$CSZjd|AJa!9 z9dCv{(b++UA(ib&Mh7Hb=a_JD!fZuNwh%+PzbW&aTw7sfP2#o&YVN#r(wGx*lgw@q zZzsH1*=i*5hbS_fnD(yJ0vD2!5y6?MiU2}2NLR+{_OMDo6rUAOlU+G=v#v|&!+49P zYqQL8ybmLZVIr9izDT-#x|? zc;mXl=@OIR)Xw^#E=GRZczuG0x_AHMuEbw7M7aVa#H}aJq1fH^rlge>get-A-VDHL z)myo~I4Yq~6DgRaixuh)O=ojwh>z?P-o>PnxI>yjgY1|D{}VYhUud&me}?bSB|XD% zMNkOE^m|P&FrFzG|0i+@;6l4RF<@-#eI59#+&L< zWtZMHnyh@4e4Bc>WO0FJ&5~DdLerWc#iC1LPyaw-6a$;pdF3RLpp(^+Q|-Q#bD*Dc%qaGC#gPU~4H#4)?)q-ye@KcM>inmqxF+W)-X^8gSI7q)dcZd& zy9kl2{D4jS6@@uP;I^Zvt}tMeoM_dQoeoIqs*WUXxZzN#t}ey4`<{q|PW5A<$Rm4$ zY|E~;bxeBug@Xsyb@9Y>7J?VL@sST2=o=q@Bq$qzs&|wU2?Tw?lB)qnyN(UT{!9h~ zTf8Q1Iaolc;ZnDvEk_!0NJ)e|Ek6+Dc9oEzB*6jM>4+gz#?J0E`5yH?dX|fa@IOYq zv456EFQ&s3Uz3WGUCZEn_uE>x_guq(j<3t#?p=x8AT>=RQi-gsE`{!`/test'], + testMatch: [ + "**/__tests__/**/*.+(ts|tsx|js)", + "**/?(*.)+(spec|test).+(ts|tsx|js)" + ], + transform: { + "^.+\\.(ts|tsx)$": "ts-jest" + }, + moduleNameMapper: { + '^@/(.*)$': '/src/$1' + }, + setupFilesAfterEnv: ['/test/setup/jest.setup.ts'], + globals: { + 'ts-jest': { + tsconfig: 'tsconfig.json' + } + }, + testTimeout: 30000, + verbose: true, + collectCoverage: true, + coverageDirectory: "coverage", + coverageReporters: ["text", "lcov"], + coveragePathIgnorePatterns: [ + "/node_modules/", + "/test/fixtures/", + "/test/setup/" + ] +}; \ No newline at end of file diff --git a/packages/plugin-akash/package.json b/packages/plugin-akash/package.json new file mode 100644 index 00000000000..6c2bbab527f --- /dev/null +++ b/packages/plugin-akash/package.json @@ -0,0 +1,51 @@ +{ + "name": "@elizaos/plugin-akash", + "version": "0.1.0", + "description": "Akash Network Plugin for Eliza", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "type": "module", + "scripts": { + "build": "tsup", + "dev": "tsup --watch", + "clean": "rm -rf dist", + "lint": "eslint .", + "lint:fix": "eslint . --fix", + "test": "vitest", + "test:watch": "vitest watch", + "test:coverage": "vitest run --coverage", + "test:ui": "vitest --ui" + }, + "dependencies": { + "@akashnetwork/akash-api": "^1.4.0", + "@akashnetwork/akashjs": "0.10.1", + "@cosmjs/proto-signing": "^0.31.3", + "@cosmjs/stargate": "0.31.3", + "@elizaos/core": "workspace:*", + "axios": "^1.7.9", + "dotenv": "^16.4.1", + "jsrsasign": "^11.1.0", + "node-fetch": "^2.7.0", + "zod": "^3.22.4", + "@types/js-yaml": "^4.0.9" + }, + "devDependencies": { + "@types/dotenv": "^8.2.0", + "@types/jest": "^29.5.11", + "@types/js-yaml": "^4.0.9", + "@types/node": "^20.10.5", + "@typescript-eslint/eslint-plugin": "^6.15.0", + "@typescript-eslint/parser": "^6.15.0", + "@vitest/coverage-v8": "^0.34.6", + "@vitest/ui": "^0.34.6", + "eslint": "^8.56.0", + "tsup": "^8.0.1", + "typescript": "^5.3.3", + "vite": "^5.0.10", + "vite-tsconfig-paths": "^4.2.2", + "vitest": "^0.34.6" + }, + "peerDependencies": { + "@elizaos/core": "workspace:*" + } +} diff --git a/packages/plugin-akash/readme.md b/packages/plugin-akash/readme.md new file mode 100644 index 00000000000..081f353f26b --- /dev/null +++ b/packages/plugin-akash/readme.md @@ -0,0 +1,133 @@ +# Akash Network Plugin for Eliza + +A powerful plugin for interacting with the Akash Network, enabling deployment management and cloud compute operations through Eliza. + +## Table of Contents +- [Installation](#installation) +- [Configuration](#configuration) +- [Directory Structure](#directory-structure) +- [Available Actions](#available-actions) + +## Installation + +```bash +pnpm add @elizaos/plugin-akash +``` + +## Configuration + +### Environment Variables +Create a `.env` file in your project root with the following configuration: + +```env +# Network Configuration +AKASH_ENV=mainnet +AKASH_NET=https://raw.githubusercontent.com/ovrclk/net/master/mainnet +RPC_ENDPOINT=https://rpc.akashnet.net:443 + +# Transaction Settings +AKASH_GAS_PRICES=0.025uakt +AKASH_GAS_ADJUSTMENT=1.5 +AKASH_KEYRING_BACKEND=os +AKASH_FROM=default +AKASH_FEES=20000uakt + +# Authentication +AKASH_MNEMONIC=your_12_word_mnemonic_here + +# Manifest Settings +AKASH_MANIFEST_MODE=auto # Options: auto, validate_only +AKASH_MANIFEST_VALIDATION_LEVEL=strict # Options: strict, basic, none +AKASH_MANIFEST_PATH=/path/to/manifests # Optional: Path to save generated manifests + +# Deployment Settings +AKASH_DEPOSIT=5000000uakt # Default deployment deposit +AKASH_SDL=deployment.yml # Default SDL file name +``` + +**Important Notes:** +- `AKASH_MNEMONIC`: Your 12-word wallet mnemonic phrase (required) +- `AKASH_MANIFEST_MODE`: Controls manifest generation behavior +- `AKASH_MANIFEST_VALIDATION_LEVEL`: Sets SDL validation strictness +- `AKASH_DEPOSIT`: Default deposit amount for deployments + +⚠️ Never commit your `.env` file with real credentials to version control! + + +#### SDL (Stack Definition Language) +``` +src/sdl/example.sdl.yml +``` +Place your SDL configuration files here. The plugin looks for SDL files in this directory by default. + +#### Certificates +``` +src/.certificates/ +``` +SSL certificates for secure provider communication are stored here. + +## Available Actions + +| Action | Description | Parameters | +|---------------------|------------------------------------------------|---------------------------------------------| +| CREATE_DEPLOYMENT | Create a new deployment | `sdl`, `sdlFile`, `deposit` | +| CLOSE_DEPLOYMENT | Close an existing deployment | `dseq`, `owner` | +| GET_PROVIDER_INFO | Get provider information | `provider` | +| GET_DEPLOYMENT_STATUS| Check deployment status | `dseq`, `owner` | +| GET_GPU_PRICING | Get GPU pricing comparison | `cpu`, `memory`, `storage` | +| GET_MANIFEST | Generate deployment manifest | `sdl`, `sdlFile` | +| GET_PROVIDERS_LIST | List available providers | `filter: { active, hasGPU, region }` | + + +Each action returns a structured response with: +```typescript +{ + text: string; // Human-readable response + content: { + success: boolean; // Operation success status + data?: any; // Action-specific data + error?: { // Present only on failure + code: string; + message: string; + }; + metadata: { // Operation metadata + timestamp: string; + source: string; + action: string; + version: string; + actionId: string; + } + } +} +``` + +## Error Handling + +The plugin includes comprehensive error handling with specific error codes: + +- `VALIDATION_SDL_FAILED`: SDL validation errors +- `WALLET_NOT_INITIALIZED`: Wallet setup issues +- `DEPLOYMENT_CREATION_FAILED`: Deployment failures +- `API_REQUEST_FAILED`: Network/API issues +- `MANIFEST_PARSING_FAILED`: Manifest generation errors +- `PROVIDER_FILTER_ERROR`: Provider filtering issues + +## Development + +### Running Tests +```bash +pnpm test +``` + +### Building +```bash +pnpm run build +``` + +## License + +This project is licensed under the MIT License - see the LICENSE file for details. + +## Support + +For support and questions, please open an issue in the repository or contact the maintainers. diff --git a/packages/plugin-akash/src/actions/closeDeployment.ts b/packages/plugin-akash/src/actions/closeDeployment.ts new file mode 100644 index 00000000000..f245c689044 --- /dev/null +++ b/packages/plugin-akash/src/actions/closeDeployment.ts @@ -0,0 +1,521 @@ +import { Action, elizaLogger } from "@elizaos/core"; +import { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; +import { DirectSecp256k1HdWallet, Registry } from "@cosmjs/proto-signing"; +import { SigningStargateClient } from "@cosmjs/stargate"; +import { getAkashTypeRegistry, getTypeUrl } from "@akashnetwork/akashjs/build/stargate"; +import { MsgCloseDeployment } from "@akashnetwork/akash-api/akash/deployment/v1beta3"; +import { validateAkashConfig } from "../environment"; +import { fetchDeployments } from "./getDeploymentApi"; +import { AkashError, AkashErrorCode } from "../error/error"; +import { getCertificatePath } from "../utils/paths"; +import { inspectRuntime, isPluginLoaded } from "../runtime_inspect"; + +interface CloseDeploymentContent extends Content { + dseq?: string; + closeAll?: boolean; +} + +// Certificate file path +const CERTIFICATE_PATH = getCertificatePath(import.meta.url); + +// Initialize wallet and client +async function initializeClient(runtime: IAgentRuntime) { + elizaLogger.info("=== Initializing Client for Deployment Closure ==="); + const config = await validateAkashConfig(runtime); + + if (!config.AKASH_MNEMONIC) { + throw new AkashError( + "AKASH_MNEMONIC is required for closing deployments", + AkashErrorCode.WALLET_NOT_INITIALIZED + ); + } + + elizaLogger.debug("Initializing wallet", { + rpcEndpoint: config.RPC_ENDPOINT, + chainId: config.AKASH_CHAIN_ID, + version: config.AKASH_VERSION, + hasMnemonic: !!config.AKASH_MNEMONIC + }); + + const wallet = await DirectSecp256k1HdWallet.fromMnemonic(config.AKASH_MNEMONIC, { + prefix: "akash" + }); + + const [account] = await wallet.getAccounts(); + elizaLogger.debug("Wallet initialized successfully", { + address: account.address, + prefix: "akash" + }); + + // Initialize registry and client + const myRegistry = new Registry(getAkashTypeRegistry()); + const client = await SigningStargateClient.connectWithSigner( + config.AKASH_NODE || "https://rpc.akash.forbole.com:443", + wallet, + { registry: myRegistry } + ); + + elizaLogger.info("Client initialization complete", { + nodeUrl: config.AKASH_NODE || "https://rpc.akash.forbole.com:443", + address: account.address + }); + + return { client, account, wallet }; +} + +// Verify deployment status before closing +async function verifyDeploymentStatus(runtime: IAgentRuntime, dseq: string): Promise { + elizaLogger.info("Verifying deployment status", { dseq }); + + try { + const deployments = await fetchDeployments(runtime, undefined, 0, 100); + const deployment = deployments.results.find(d => d.dseq === dseq); + + if (!deployment) { + throw new AkashError( + `Deployment not found with DSEQ: ${dseq}`, + AkashErrorCode.DEPLOYMENT_NOT_FOUND + ); + } + + if (deployment.status.toLowerCase() !== 'active') { + throw new AkashError( + `Deployment ${dseq} is not active (current status: ${deployment.status})`, + AkashErrorCode.DEPLOYMENT_CLOSE_FAILED + ); + } + + return true; + } catch (error) { + if (error instanceof AkashError) { + throw error; + } + throw new AkashError( + `Failed to verify deployment status: ${error instanceof Error ? error.message : String(error)}`, + AkashErrorCode.DEPLOYMENT_NOT_FOUND + ); + } +} + +// Close a single deployment by DSEQ +async function closeSingleDeployment( + runtime: IAgentRuntime, + dseq: string +): Promise { + elizaLogger.info("Closing single deployment", { dseq }); + + try { + // Verify deployment exists and is active + await verifyDeploymentStatus(runtime, dseq); + + const { client, account } = await initializeClient(runtime); + + // Create close deployment message + const message = MsgCloseDeployment.fromPartial({ + id: { + dseq: dseq, + owner: account.address + } + }); + + const msgAny = { + typeUrl: getTypeUrl(MsgCloseDeployment), + value: message + }; + + // Set fee + const fee = { + amount: [{ denom: "uakt", amount: "20000" }], + gas: "800000" + }; + + // Send transaction + elizaLogger.info("Sending close deployment transaction", { dseq }); + const result = await client.signAndBroadcast( + account.address, + [msgAny], + fee, + `close deployment ${dseq}` + ); + + if (result.code !== 0) { + throw new AkashError( + `Transaction failed: ${result.rawLog}`, + AkashErrorCode.DEPLOYMENT_CLOSE_FAILED, + { rawLog: result.rawLog } + ); + } + + elizaLogger.info("Deployment closed successfully", { + dseq, + transactionHash: result.transactionHash + }); + + return true; + } catch (error) { + elizaLogger.error("Failed to close deployment", { + dseq, + error: error instanceof Error ? error.message : String(error), + code: error instanceof AkashError ? error.code : AkashErrorCode.DEPLOYMENT_CLOSE_FAILED, + stack: error instanceof Error ? error.stack : undefined + }); + throw error; + } +} + +// Close all active deployments +async function closeAllDeployments( + runtime: IAgentRuntime +): Promise<{ success: string[], failed: string[] }> { + elizaLogger.info("Closing all active deployments"); + + try { + // Fetch active deployments + const deployments = await fetchDeployments(runtime, undefined, 0, 100); + const activeDeployments = deployments.results.filter(d => + d.status.toLowerCase() === 'active' + ); + + if (activeDeployments.length === 0) { + elizaLogger.info("No active deployments found to close"); + return { success: [], failed: [] }; + } + + elizaLogger.info("Found active deployments to close", { + count: activeDeployments.length, + dseqs: activeDeployments.map(d => d.dseq) + }); + + // Close each deployment + const results = { success: [] as string[], failed: [] as string[] }; + for (const deployment of activeDeployments) { + try { + await closeSingleDeployment(runtime, deployment.dseq); + results.success.push(deployment.dseq); + } catch (error) { + elizaLogger.error("Failed to close deployment", { + dseq: deployment.dseq, + error: error instanceof Error ? error.message : String(error), + code: error instanceof AkashError ? error.code : AkashErrorCode.DEPLOYMENT_CLOSE_FAILED + }); + results.failed.push(deployment.dseq); + } + } + + elizaLogger.info("Finished closing deployments", results); + return results; + } catch (error) { + elizaLogger.error("Failed to close deployments", { + error: error instanceof Error ? error.message : String(error), + code: error instanceof AkashError ? error.code : AkashErrorCode.DEPLOYMENT_CLOSE_FAILED, + stack: error instanceof Error ? error.stack : undefined + }); + throw error; + } +} + +export const closeDeploymentAction: Action = { + name: "CLOSE_DEPLOYMENT", + similes: ["CLOSE_AKASH_DEPLOYMENT", "STOP_DEPLOYMENT", "TERMINATE_DEPLOYMENT"], + description: "Close an active deployment on the Akash Network", + examples: [[ + { + user: "user", + content: { + text: "Close deployment with DSEQ 123456", + dseq: "123456" + } as CloseDeploymentContent + } as ActionExample, + { + user: "assistant", + content: { + text: "Closing deployment with DSEQ 123456..." + } as CloseDeploymentContent + } as ActionExample + ], [ + { + user: "user", + content: { + text: "Close all active deployments", + closeAll: true + } as CloseDeploymentContent + } as ActionExample, + { + user: "assistant", + content: { + text: "Closing all active deployments..." + } as CloseDeploymentContent + } as ActionExample + ]], + + validate: async (runtime: IAgentRuntime, message: Memory): Promise => { + elizaLogger.debug("=== Starting Close Deployment Validation ==="); + elizaLogger.debug("Validating close deployment request", { message }); + + // Check if plugin is properly loaded + if (!isPluginLoaded(runtime, "akash")) { + elizaLogger.error("Akash plugin not properly loaded during validation"); + return false; + } + + try { + const params = message.content as Partial; + const config = await validateAkashConfig(runtime); + elizaLogger.debug("Validating parameters", { params }); + + // If no parameters provided, use environment defaults + if (!params.dseq && !params.closeAll) { + if (config.AKASH_CLOSE_DEP === "closeAll") { + params.closeAll = true; + } else if (config.AKASH_CLOSE_DEP === "dseq" && config.AKASH_CLOSE_DSEQ) { + params.dseq = config.AKASH_CLOSE_DSEQ; + } else { + throw new AkashError( + "Either dseq or closeAll parameter is required", + AkashErrorCode.VALIDATION_PARAMETER_MISSING, + { parameters: ["dseq", "closeAll"] } + ); + } + } + + if (params.dseq && params.closeAll) { + throw new AkashError( + "Cannot specify both dseq and closeAll parameters", + AkashErrorCode.VALIDATION_PARAMETER_INVALID, + { parameters: ["dseq", "closeAll"] } + ); + } + + if (params.dseq && !/^\d+$/.test(params.dseq)) { + throw new AkashError( + "DSEQ must be a numeric string", + AkashErrorCode.VALIDATION_PARAMETER_INVALID, + { parameter: "dseq", value: params.dseq } + ); + } + + elizaLogger.debug("Validation completed successfully"); + return true; + } catch (error) { + elizaLogger.error("Close deployment validation failed", { + error: error instanceof AkashError ? { + code: error.code, + message: error.message, + details: error.details + } : String(error) + }); + return false; + } + }, + + handler: async ( + runtime: IAgentRuntime, + message: Memory, + state: State | undefined, + options: { [key: string]: unknown } = {}, + callback?: HandlerCallback + ): Promise => { + const actionId = Date.now().toString(); + elizaLogger.info("=== Starting Close Deployment Request ===", { + actionId, + messageId: message.id, + userId: message.userId + }); + + try { + const config = await validateAkashConfig(runtime); + const params = message.content as Partial; + + // If no parameters provided, use environment defaults + if (!params.dseq && !params.closeAll) { + if (config.AKASH_CLOSE_DEP === "closeAll") { + params.closeAll = true; + } else if (config.AKASH_CLOSE_DEP === "dseq" && config.AKASH_CLOSE_DSEQ) { + params.dseq = config.AKASH_CLOSE_DSEQ; + } else { + if (callback) { + elizaLogger.info("=== Preparing error callback response ===", { + actionId, + hasCallback: true, + errorType: 'AkashError' + }); + + const errorResponse = { + text: "Either DSEQ or closeAll parameter is required", + content: { + success: false, + error: { + code: AkashErrorCode.VALIDATION_PARAMETER_MISSING, + message: "Either dseq or closeAll parameter is required" + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'closeDeployment', + version: '1.0.0', + actionId + } + } + }; + + callback(errorResponse); + } + return false; + } + } + + if (params.closeAll) { + const results = await closeAllDeployments(runtime); + + if (callback) { + elizaLogger.info("=== Preparing callback response for bulk closure ===", { + hasCallback: true, + actionId, + successCount: results.success.length, + failedCount: results.failed.length + }); + + const callbackResponse = { + text: `Deployment Closure Results:\n\nSuccessfully closed: ${results.success.length} deployments${ + results.success.length > 0 ? `\nDSEQs: ${results.success.join(', ')}` : '' + }${ + results.failed.length > 0 ? `\n\nFailed to close: ${results.failed.length} deployments\nDSEQs: ${results.failed.join(', ')}` : '' + }`, + content: { + success: results.failed.length === 0, + data: { + successful: results.success, + failed: results.failed, + totalClosed: results.success.length, + totalFailed: results.failed.length + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'closeDeployment', + version: '1.0.0', + actionId + } + } + }; + + elizaLogger.info("=== Executing callback with response ===", { + actionId, + responseText: callbackResponse.text, + hasContent: !!callbackResponse.content, + contentKeys: Object.keys(callbackResponse.content), + metadata: callbackResponse.content.metadata + }); + + callback(callbackResponse); + + elizaLogger.info("=== Callback executed successfully ===", { + actionId, + timestamp: new Date().toISOString() + }); + } + return results.failed.length === 0; + + } else if (params.dseq) { + const success = await closeSingleDeployment(runtime, params.dseq); + + if (callback) { + elizaLogger.info("=== Preparing callback response for single closure ===", { + hasCallback: true, + actionId, + dseq: params.dseq, + success + }); + + const callbackResponse = { + text: success ? + `Successfully closed deployment DSEQ: ${params.dseq}` : + `Failed to close deployment DSEQ: ${params.dseq}`, + content: { + success, + data: { + dseq: params.dseq + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'closeDeployment', + version: '1.0.0', + actionId + } + } + }; + + elizaLogger.info("=== Executing callback with response ===", { + actionId, + responseText: callbackResponse.text, + hasContent: !!callbackResponse.content, + contentKeys: Object.keys(callbackResponse.content), + metadata: callbackResponse.content.metadata + }); + + callback(callbackResponse); + + elizaLogger.info("=== Callback executed successfully ===", { + actionId, + timestamp: new Date().toISOString() + }); + } + return success; + } + + return false; + + } catch (error) { + elizaLogger.error("Close deployment request failed", { + error: error instanceof Error ? error.message : String(error), + code: error instanceof AkashError ? error.code : AkashErrorCode.DEPLOYMENT_CLOSE_FAILED, + actionId + }); + + if (callback) { + elizaLogger.info("=== Preparing error callback response ===", { + actionId, + hasCallback: true, + errorType: error instanceof AkashError ? 'AkashError' : 'Error' + }); + + const errorResponse = { + text: `Failed to close deployment: ${error instanceof Error ? error.message : String(error)}`, + content: { + success: false, + error: { + code: error instanceof AkashError ? error.code : AkashErrorCode.DEPLOYMENT_CLOSE_FAILED, + message: error instanceof Error ? error.message : String(error) + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'closeDeployment', + version: '1.0.0', + actionId + } + } + }; + + elizaLogger.info("=== Executing error callback ===", { + actionId, + responseText: errorResponse.text, + hasContent: !!errorResponse.content, + contentKeys: Object.keys(errorResponse.content) + }); + + callback(errorResponse); + + elizaLogger.info("=== Error callback executed ===", { + actionId, + timestamp: new Date().toISOString() + }); + } + + return false; + } + } +}; + +export default closeDeploymentAction; \ No newline at end of file diff --git a/packages/plugin-akash/src/actions/createCertificate.ts b/packages/plugin-akash/src/actions/createCertificate.ts new file mode 100644 index 00000000000..67058e2d168 --- /dev/null +++ b/packages/plugin-akash/src/actions/createCertificate.ts @@ -0,0 +1,456 @@ +import { Action, elizaLogger } from "@elizaos/core"; +import { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; +import { DirectSecp256k1HdWallet } from "@cosmjs/proto-signing"; +import { SigningStargateClient } from "@cosmjs/stargate"; +import * as cert from "@akashnetwork/akashjs/build/certificates"; +import { certificateManager } from "@akashnetwork/akashjs/build/certificates/certificate-manager"; +import { CertificatePem } from "@akashnetwork/akashjs/build/certificates/certificate-manager/CertificateManager"; +import { getAkashTypeRegistry } from "@akashnetwork/akashjs/build/stargate"; +import { validateAkashConfig } from "../environment"; +import { AkashError, AkashErrorCode, withRetry } from "../error/error"; +import * as fs from 'fs'; +import * as path from 'path'; +import { Registry } from "@cosmjs/proto-signing"; +import type { SigningStargateClient as AkashSigningStargateClient } from "@akashnetwork/akashjs/node_modules/@cosmjs/stargate"; +import { getCertificatePath } from "../utils/paths"; + +interface CreateCertificateContent extends Content { + overwrite?: boolean; +} + +// Certificate file path +const CERTIFICATE_PATH = getCertificatePath(import.meta.url); + +// Save certificate to file +async function saveCertificate(certificate: CertificatePem): Promise { + elizaLogger.debug("Saving certificate to file", { path: CERTIFICATE_PATH }); + try { + // Ensure directory exists + const dir = path.dirname(CERTIFICATE_PATH); + if (!fs.existsSync(dir)) { + fs.mkdirSync(dir, { recursive: true }); + } + const json = JSON.stringify(certificate); + fs.writeFileSync(CERTIFICATE_PATH, json); + elizaLogger.debug("Certificate saved successfully"); + } catch (error) { + elizaLogger.error("Failed to save certificate", { + error: error instanceof Error ? error.message : String(error), + path: CERTIFICATE_PATH + }); + throw new AkashError( + "Failed to save certificate", + AkashErrorCode.FILE_WRITE_ERROR, + { path: CERTIFICATE_PATH, error } + ); + } +} + +// Load certificate from file +function loadCertificate(): CertificatePem { + elizaLogger.debug("Loading certificate from file", { path: CERTIFICATE_PATH }); + try { + if (!fs.existsSync(CERTIFICATE_PATH)) { + throw new AkashError( + "Certificate file not found", + AkashErrorCode.CERTIFICATE_NOT_FOUND, + { path: CERTIFICATE_PATH } + ); + } + const json = fs.readFileSync(CERTIFICATE_PATH, "utf8"); + const certificate = JSON.parse(json); + elizaLogger.debug("Certificate loaded successfully", { + hasCert: !!certificate.cert, + hasPrivateKey: !!certificate.privateKey, + hasPublicKey: !!certificate.publicKey + }); + return certificate; + } catch (error) { + elizaLogger.error("Failed to load certificate", { + error: error instanceof Error ? error.message : String(error), + path: CERTIFICATE_PATH + }); + if (error instanceof AkashError) { + throw error; + } + throw new AkashError( + "Failed to load certificate", + AkashErrorCode.FILE_READ_ERROR, + { path: CERTIFICATE_PATH, error } + ); + } +} + +// Initialize wallet with proper error handling +async function initializeWallet(mnemonic: string): Promise { + elizaLogger.debug("=== Initializing Wallet ===", { + mnemonicLength: mnemonic.split(' ').length, + hasMnemonic: !!mnemonic, + mnemonicFirstWord: mnemonic.split(' ')[0] + }); + + // Validate mnemonic format + const words = mnemonic.trim().split(/\s+/); + if (words.length !== 12 && words.length !== 24) { + const error = `Invalid mnemonic length: got ${words.length} words, expected 12 or 24 words`; + elizaLogger.error("Mnemonic validation failed", { + error, + wordCount: words.length, + expectedCounts: [12, 24], + mnemonicPreview: words.slice(0, 3).join(' ') + '...' + }); + throw new AkashError( + error, + AkashErrorCode.WALLET_INITIALIZATION_FAILED, + { + wordCount: words.length, + expectedCounts: [12, 24] + } + ); + } + + try { + elizaLogger.debug("Creating wallet with mnemonic", { + wordCount: words.length, + mnemonicPreview: words.slice(0, 3).join(' ') + '...' + }); + + const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic, { + prefix: "akash" + }); + const accounts = await wallet.getAccounts(); + + elizaLogger.debug("Wallet initialized successfully", { + accountCount: accounts.length, + firstAccountAddress: accounts[0]?.address, + addressPrefix: accounts[0]?.address?.substring(0, 6) + }); + + if (!accounts.length) { + throw new AkashError( + "No accounts found in wallet", + AkashErrorCode.WALLET_INITIALIZATION_FAILED + ); + } + + return wallet; + } catch (error) { + elizaLogger.error("Wallet initialization failed", { + error: error instanceof Error ? error.message : String(error), + stack: error instanceof Error ? error.stack : undefined, + mnemonicLength: words.length, + mnemonicPreview: words.slice(0, 3).join(' ') + '...' + }); + + if (error instanceof AkashError) { + throw error; + } + + throw new AkashError( + `Failed to initialize wallet: ${error instanceof Error ? error.message : String(error)}`, + AkashErrorCode.WALLET_INITIALIZATION_FAILED, + { + mnemonicLength: words.length, + error: error instanceof Error ? error.message : String(error) + } + ); + } +} + +// Setup client with proper error handling and fallback RPC endpoints +async function setupClient(wallet: DirectSecp256k1HdWallet, rpcEndpoint: string): Promise { + // Try alternative RPC endpoints if the main one fails + const rpcEndpoints = [ + rpcEndpoint, + "https://rpc.akashnet.net:443", + "https://akash-rpc.polkachu.com:443", + "https://akash-rpc.europlots.com:443" + ]; + + elizaLogger.info("=== Setting up Stargate Client ===", { + primaryRpcEndpoint: rpcEndpoint, + allEndpoints: rpcEndpoints, + walletType: wallet.constructor.name + }); + + let lastError: Error | undefined; + for (const endpoint of rpcEndpoints) { + try { + elizaLogger.debug("Attempting to connect to RPC endpoint", { + endpoint, + attempt: rpcEndpoints.indexOf(endpoint) + 1, + totalEndpoints: rpcEndpoints.length + }); + + const registry = new Registry(getAkashTypeRegistry()); + elizaLogger.debug("Registry created for endpoint", { + endpoint, + registryType: registry.constructor.name + }); + + const client = await SigningStargateClient.connectWithSigner( + endpoint, + wallet, + { registry } + ); + + elizaLogger.debug("Client setup completed successfully", { + endpoint, + clientType: client.constructor.name + }); + + return client; + } catch (error) { + lastError = error as Error; + elizaLogger.warn("Failed to connect to RPC endpoint", { + endpoint, + error: error instanceof Error ? error.message : String(error), + remainingEndpoints: rpcEndpoints.slice(rpcEndpoints.indexOf(endpoint) + 1).length + }); + } + } + + throw new AkashError( + `Failed to connect to any RPC endpoint: ${lastError?.message}`, + AkashErrorCode.CLIENT_SETUP_FAILED, + { lastError } + ); +} + +export const createCertificateAction: Action = { + name: "CREATE_CERTIFICATE", + similes: ["GENERATE_CERTIFICATE", "SETUP_CERTIFICATE", "INIT_CERTIFICATE"], + description: "Create or load Akash certificate for provider interactions", + examples: [[ + { + user: "user", + content: { + text: "Create a new certificate", + overwrite: true + } as CreateCertificateContent + } as ActionExample, + { + user: "assistant", + content: { + text: "Creating new certificate..." + } as CreateCertificateContent + } as ActionExample + ]], + + validate: async (runtime: IAgentRuntime, message: Memory): Promise => { + elizaLogger.debug("=== Starting Certificate Validation ==="); + try { + const params = message.content as Partial; + + // Validate Akash configuration + await validateAkashConfig(runtime); + + // If overwrite is specified, it must be a boolean + if (params.overwrite !== undefined && typeof params.overwrite !== 'boolean') { + throw new AkashError( + "Overwrite parameter must be a boolean", + AkashErrorCode.VALIDATION_PARAMETER_INVALID, + { parameter: "overwrite", value: params.overwrite } + ); + } + + elizaLogger.debug("Certificate validation completed successfully"); + return true; + } catch (error) { + elizaLogger.error("Certificate validation failed", { + error: error instanceof AkashError ? { + code: error.code, + message: error.message, + details: error.details + } : String(error) + }); + return false; + } + }, + + handler: async ( + runtime: IAgentRuntime, + message: Memory, + state: State | undefined, + options: { callback?: HandlerCallback } = {} + ): Promise => { + const actionId = Date.now().toString(); + elizaLogger.info("=== Starting Certificate Creation/Loading ===", { actionId }); + + try { + // First validate the parameters + if (!await createCertificateAction.validate(runtime, message)) { + const error = new AkashError( + "Invalid parameters provided", + AkashErrorCode.VALIDATION_PARAMETER_INVALID + ); + if (options.callback) { + options.callback({ + text: `Failed to validate parameters: ${error.message}`, + error: error.message, + content: { + success: false, + error: { + code: error.code, + message: error.message + } + } + }); + } + return false; + } + + const params = message.content as Partial; + const config = await validateAkashConfig(runtime); + + try { + // Check if certificate exists and overwrite is not true + if (fs.existsSync(CERTIFICATE_PATH) && !params.overwrite) { + elizaLogger.info("Loading existing certificate"); + const certificate = loadCertificate(); + + if (options.callback) { + options.callback({ + text: "Loaded existing certificate", + content: { + success: true, + certificate: { + hasCert: !!certificate.cert, + hasPrivateKey: !!certificate.privateKey, + hasPublicKey: !!certificate.publicKey + } + } + }); + } + return true; + } + + // Initialize wallet + elizaLogger.info("Initializing wallet for certificate creation"); + const wallet = await initializeWallet(config.AKASH_MNEMONIC); + const accounts = await wallet.getAccounts(); + const address = accounts[0].address; + elizaLogger.debug("Wallet initialized", { + address, + accountCount: accounts.length + }); + + // Setup client + elizaLogger.debug("Setting up Stargate client"); + const client = await setupClient(wallet, config.RPC_ENDPOINT); + elizaLogger.debug("Client setup completed"); + + // Generate new certificate + elizaLogger.info("Generating new certificate"); + const certificate = certificateManager.generatePEM(address); + elizaLogger.debug("Certificate generated", { + hasCert: !!certificate.cert, + hasPrivateKey: !!certificate.privateKey, + hasPublicKey: !!certificate.publicKey + }); + + // Broadcast certificate + elizaLogger.info("Broadcasting certificate to network"); + const result = await withRetry(async () => { + return await cert.broadcastCertificate( + certificate, + address, + client as unknown as AkashSigningStargateClient + ); + }); + + if (result.code !== 0) { + throw new AkashError( + `Could not create certificate: ${result.rawLog}`, + AkashErrorCode.CERTIFICATE_CREATION_FAILED, + { rawLog: result.rawLog } + ); + } + + elizaLogger.info("Certificate broadcast successful", { + code: result.code, + txHash: result.transactionHash, + height: result.height, + gasUsed: result.gasUsed + }); + + // Save certificate + await saveCertificate(certificate); + elizaLogger.info("Certificate saved to file", { path: CERTIFICATE_PATH }); + + if (options.callback) { + options.callback({ + text: "Certificate created and saved successfully", + content: { + success: true, + certificate: { + hasCert: !!certificate.cert, + hasPrivateKey: !!certificate.privateKey, + hasPublicKey: !!certificate.publicKey + }, + transaction: { + hash: result.transactionHash, + height: result.height, + gasUsed: result.gasUsed + } + } + }); + } + + return true; + } catch (error) { + elizaLogger.error("Failed to create/load certificate", { + error: error instanceof Error ? error.message : String(error), + stack: error instanceof Error ? error.stack : undefined + }); + + if (options.callback) { + options.callback({ + text: `Failed to create/load certificate: ${error instanceof Error ? error.message : String(error)}`, + error: error instanceof Error ? error.message : String(error), + content: { + success: false, + error: error instanceof AkashError ? { + code: error.code, + message: error.message, + details: error.details + } : { + code: AkashErrorCode.CERTIFICATE_CREATION_FAILED, + message: String(error) + } + } + }); + } + return false; + } + } catch (error) { + elizaLogger.error("Certificate operation failed", { + error: error instanceof Error ? error.message : String(error), + code: error instanceof AkashError ? error.code : AkashErrorCode.CERTIFICATE_CREATION_FAILED, + actionId + }); + + if (options.callback) { + options.callback({ + text: `Certificate operation failed: ${error instanceof Error ? error.message : String(error)}`, + error: error instanceof Error ? error.message : String(error), + content: { + success: false, + error: error instanceof AkashError ? { + code: error.code, + message: error.message, + details: error.details + } : { + code: AkashErrorCode.CERTIFICATE_CREATION_FAILED, + message: String(error) + } + } + }); + } + + return false; + } + } +}; + +export default createCertificateAction; diff --git a/packages/plugin-akash/src/actions/createDeployment.ts b/packages/plugin-akash/src/actions/createDeployment.ts new file mode 100644 index 00000000000..f8adfed0e19 --- /dev/null +++ b/packages/plugin-akash/src/actions/createDeployment.ts @@ -0,0 +1,1470 @@ +import { Action, elizaLogger } from "@elizaos/core"; +import { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; +import { MsgCreateDeployment } from "@akashnetwork/akash-api/akash/deployment/v1beta3"; +import { QueryClientImpl as QueryProviderClient, QueryProviderRequest } from "@akashnetwork/akash-api/akash/provider/v1beta3"; +import { QueryBidsRequest, QueryClientImpl as QueryMarketClient, MsgCreateLease, BidID } from "@akashnetwork/akash-api/akash/market/v1beta4"; +import * as cert from "@akashnetwork/akashjs/build/certificates"; +import { getRpc } from "@akashnetwork/akashjs/build/rpc"; +import { SDL } from "@akashnetwork/akashjs/build/sdl"; +import { getAkashTypeRegistry } from "@akashnetwork/akashjs/build/stargate"; +import { CertificatePem } from "@akashnetwork/akashjs/build/certificates/certificate-manager/CertificateManager"; +import { certificateManager } from "@akashnetwork/akashjs/build/certificates/certificate-manager"; +import { DirectSecp256k1HdWallet, Registry } from "@cosmjs/proto-signing"; +import { SigningStargateClient } from "@cosmjs/stargate"; +import { validateAkashConfig } from "../environment"; +import { AkashError, AkashErrorCode, withRetry } from "../error/error"; +import * as fs from 'fs'; +import * as path from 'path'; +import { getCertificatePath, getDefaultSDLPath } from "../utils/paths"; +import { fileURLToPath } from 'url'; +import { inspectRuntime, isPluginLoaded } from "../runtime_inspect"; +import https from 'node:https'; +import axios from 'axios'; + +interface CreateDeploymentContent extends Content { + sdl?: string; + sdlFile?: string; + deposit?: string; +} + +// Certificate file path +const CERTIFICATE_PATH = getCertificatePath(import.meta.url); + +// Save certificate to file +function saveCertificate(certificate: CertificatePem) { + elizaLogger.debug("Saving certificate to file", { path: CERTIFICATE_PATH }); + try { + // Ensure directory exists + const dir = path.dirname(CERTIFICATE_PATH); + if (!fs.existsSync(dir)) { + fs.mkdirSync(dir, { recursive: true }); + } + const json = JSON.stringify(certificate); + fs.writeFileSync(CERTIFICATE_PATH, json); + elizaLogger.debug("Certificate saved successfully"); + } catch (error) { + elizaLogger.error("Failed to save certificate", { + error: error instanceof Error ? error.message : String(error), + path: CERTIFICATE_PATH + }); + throw error; + } +} + +// Load certificate from file +function loadCertificate(path: string): CertificatePem { + elizaLogger.debug("Loading certificate from file", { path }); + try { + const json = fs.readFileSync(path, "utf8"); + const certificate = JSON.parse(json); + elizaLogger.debug("Certificate loaded successfully", { + hasCert: !!certificate.cert, + hasPrivateKey: !!certificate.privateKey, + hasPublicKey: !!certificate.publicKey + }); + return certificate; + } catch (error) { + elizaLogger.error("Failed to load certificate", { + error: error instanceof Error ? error.message : String(error), + path + }); + throw error; + } +} + +const DEFAULT_SDL_PATH = (() => { + const currentFileUrl = import.meta.url; + // elizaLogger.info("=== Starting SDL Path Resolution in createDeployment ===", { + // currentFileUrl, + // cwd: process.cwd(), + // importMetaUrl: import.meta.url + // }); + + // Use the utility function from paths.ts instead of manual resolution + const sdlPath = getDefaultSDLPath(currentFileUrl); + + // Only log if file doesn't exist + if (!fs.existsSync(sdlPath)) { + elizaLogger.warn("Default SDL path not found", { + sdlPath, + exists: false + }); + } + + return sdlPath; +})(); + +const validateDeposit = (deposit: string): boolean => { + const pattern = /^\d+uakt$/; + return pattern.test(deposit); +}; + +const loadSDLFromFile = (filePath: string): string => { + // elizaLogger.info("=== Loading SDL File ===", { + // requestedPath: filePath, + // resolvedPath: path.resolve(filePath), + // defaultSdlPath: DEFAULT_SDL_PATH, + // cwd: process.cwd(), + // exists: fs.existsSync(filePath), + // defaultExists: fs.existsSync(DEFAULT_SDL_PATH) + // }); + + try { + // If path doesn't contain plugin-akash and it's not the default path, adjust it + if (!filePath.includes('plugin-akash') && filePath !== DEFAULT_SDL_PATH) { + const adjustedPath = path.join(path.dirname(DEFAULT_SDL_PATH), path.basename(filePath)); + // elizaLogger.info("Adjusting SDL path", { + // originalPath: filePath, + // adjustedPath, + // exists: fs.existsSync(adjustedPath), + // dirExists: fs.existsSync(path.dirname(adjustedPath)), + // dirContents: fs.existsSync(path.dirname(adjustedPath)) ? fs.readdirSync(path.dirname(adjustedPath)) : [] + // }); + filePath = adjustedPath; + } + + // Try multiple possible locations + const possiblePaths = [ + filePath, + path.join(process.cwd(), filePath), + path.join(process.cwd(), 'packages', 'plugin-akash', filePath), + path.join(process.cwd(), 'packages', 'plugin-akash', 'src', filePath), + path.join(path.dirname(DEFAULT_SDL_PATH), filePath) + ]; + + // elizaLogger.info("Attempting to load SDL from possible paths", { + // possiblePaths, + // existsMap: possiblePaths.map(p => ({ path: p, exists: fs.existsSync(p) })) + // }); + + for (const tryPath of possiblePaths) { + if (fs.existsSync(tryPath)) { + const content = fs.readFileSync(tryPath, "utf8"); + elizaLogger.info("SDL file loaded successfully from", { + path: tryPath + }); + return content; + } + } + + // If we get here, none of the paths worked + throw new Error(`SDL file not found in any of the possible locations`); + } catch (error) { + elizaLogger.error("Failed to read SDL file", { + filePath, + error: error instanceof Error ? error.message : String(error) + }); + throw new AkashError( + `Failed to read SDL file: ${error instanceof Error ? error.message : String(error)}`, + AkashErrorCode.VALIDATION_SDL_FAILED, + { filePath } + ); + } +}; + +const formatErrorMessage = (error: unknown): string => { + if (error instanceof AkashError) { + if (error.code === AkashErrorCode.WALLET_NOT_INITIALIZED) { + return "Akash wallet not initialized"; + } + if (error.code === AkashErrorCode.DEPLOYMENT_CREATION_FAILED) { + return `Transaction failed: ${error.details?.rawLog || 'Unknown error'}`; + } + if (error.code === AkashErrorCode.MANIFEST_PARSING_FAILED) { + return "Failed to parse SDL"; + } + if (error.code === AkashErrorCode.VALIDATION_PARAMETER_MISSING) { + return `${error.message}`; + } + if (error.code === AkashErrorCode.VALIDATION_SDL_FAILED) { + return `Failed to parse SDL: ${error.details?.error || error.message}`; + } + if (error.code === AkashErrorCode.VALIDATION_PARAMETER_INVALID) { + return `Invalid deposit format. Must be in format: uakt`; + } + return error.message; + } + + const message = error instanceof Error ? error.message : String(error); + if (message.toLowerCase().includes("insufficient funds")) { + return "Insufficient funds"; + } + if (message.toLowerCase().includes("invalid deposit")) { + return "Invalid deposit amount"; + } + if (message.toLowerCase().includes("cannot read properties")) { + return "Failed to parse SDL: Invalid format"; + } + return message; +}; + +async function initializeWallet(mnemonic: string) { + elizaLogger.debug("=== Initializing Wallet ===", { + mnemonicLength: mnemonic.split(' ').length, + hasMnemonic: !!mnemonic, + mnemonicFirstWord: mnemonic.split(' ')[0] + }); + + // Validate mnemonic format + const words = mnemonic.trim().split(/\s+/); + if (words.length !== 12 && words.length !== 24) { + const error = `Invalid mnemonic length: got ${words.length} words, expected 12 or 24 words`; + elizaLogger.error("Mnemonic validation failed", { + error, + wordCount: words.length, + expectedCounts: [12, 24], + mnemonicPreview: words.slice(0, 3).join(' ') + '...' + }); + throw new AkashError( + error, + AkashErrorCode.WALLET_INITIALIZATION_FAILED, + { + wordCount: words.length, + expectedCounts: [12, 24] + } + ); + } + + try { + elizaLogger.debug("Creating wallet with mnemonic", { + wordCount: words.length, + mnemonicPreview: words.slice(0, 3).join(' ') + '...' + }); + + const wallet = await DirectSecp256k1HdWallet.fromMnemonic(mnemonic, { + prefix: "akash" + }); + const accounts = await wallet.getAccounts(); + + elizaLogger.debug("Wallet initialized successfully", { + accountCount: accounts.length, + firstAccountAddress: accounts[0]?.address, + addressPrefix: accounts[0]?.address?.substring(0, 6) + }); + + if (!accounts.length) { + throw new AkashError( + "No accounts found in wallet", + AkashErrorCode.WALLET_INITIALIZATION_FAILED + ); + } + + return wallet; + } catch (error) { + elizaLogger.error("Wallet initialization failed", { + error: error instanceof Error ? error.message : String(error), + stack: error instanceof Error ? error.stack : undefined, + mnemonicLength: words.length, + mnemonicPreview: words.slice(0, 3).join(' ') + '...' + }); + + // Provide more specific error messages + const errorMessage = error instanceof Error ? error.message : String(error); + if (errorMessage.includes("Invalid mnemonic")) { + throw new AkashError( + "Invalid mnemonic format: The mnemonic phrase contains invalid words or is malformed", + AkashErrorCode.WALLET_INITIALIZATION_FAILED, + { + mnemonicLength: words.length, + error: errorMessage + } + ); + } + + throw new AkashError( + `Failed to initialize wallet: ${errorMessage}`, + AkashErrorCode.WALLET_INITIALIZATION_FAILED, + { + mnemonicLength: words.length, + error: errorMessage + } + ); + } +} + +async function setupClient(wallet: DirectSecp256k1HdWallet, rpcEndpoint: string) { + // Try alternative RPC endpoints if the main one fails + const rpcEndpoints = [ + "https://akash-rpc.europlots.com:443", // New endpoint first + rpcEndpoint, + "https://rpc.akashnet.net:443", + "https://rpc.akash.forbole.com:443", + "https://rpc-akash.ecostake.com:443", + "https://akash-rpc.polkachu.com:443", + "https://akash.c29r3.xyz:443/rpc" + ]; + + elizaLogger.info("=== Setting up Stargate Client ===", { + primaryRpcEndpoint: rpcEndpoint, + allEndpoints: rpcEndpoints, + walletType: wallet.constructor.name, + preferredEndpoint: rpcEndpoints[0] + }); + + let lastError: Error | undefined; + for (const endpoint of rpcEndpoints) { + try { + elizaLogger.debug("Attempting to connect to RPC endpoint", { + endpoint, + attempt: rpcEndpoints.indexOf(endpoint) + 1, + totalEndpoints: rpcEndpoints.length + }); + + const registry = new Registry(getAkashTypeRegistry()); + elizaLogger.debug("Registry created for endpoint", { + endpoint, + registryType: registry.constructor.name + }); + + const client = await SigningStargateClient.connectWithSigner( + endpoint, + wallet, + { registry } + ); + + // Check if client is connected by attempting to get the height + try { + const height = await client.getHeight(); + elizaLogger.info("Stargate client setup successful", { + endpoint, + height, + clientType: client.constructor.name, + attempt: rpcEndpoints.indexOf(endpoint) + 1 + }); + return client; + } catch (heightError) { + elizaLogger.error("Failed to get chain height", { + endpoint, + attempt: rpcEndpoints.indexOf(endpoint) + 1, + error: heightError instanceof Error ? heightError.message : String(heightError) + }); + lastError = heightError instanceof Error ? heightError : new Error(String(heightError)); + continue; + } + } catch (error) { + elizaLogger.error("Failed to connect to RPC endpoint", { + endpoint, + attempt: rpcEndpoints.indexOf(endpoint) + 1, + error: error instanceof Error ? error.message : String(error), + stack: error instanceof Error ? error.stack : undefined + }); + lastError = error instanceof Error ? error : new Error(String(error)); + continue; + } + } + + // If we get here, all endpoints failed + elizaLogger.error("All RPC endpoints failed", { + endpoints: rpcEndpoints, + lastError: lastError?.message, + totalAttempts: rpcEndpoints.length + }); + throw new AkashError( + `Failed to setup client: ${lastError?.message}`, + AkashErrorCode.CLIENT_SETUP_FAILED, + { rpcEndpoint: rpcEndpoints.join(", ") } + ); +} + +async function fetchBid(dseq: number, owner: string, rpcEndpoint: string) { + elizaLogger.info("=== Starting Bid Fetch Process ===", { + dseq, + owner, + ownerPrefix: owner.substring(0, 6) + }); + + const maxRetries = 3; + let lastError: Error | undefined; + + for (let retry = 0; retry < maxRetries; retry++) { + try { + elizaLogger.debug("Connecting to RPC for bid fetch", { + rpcEndpoint, + attempt: retry + 1, + maxRetries + }); + + const rpc = await getRpc(rpcEndpoint); + elizaLogger.debug("RPC connection established", { + rpcType: rpc.constructor.name, + attempt: retry + 1 + }); + + const client = new QueryMarketClient(rpc); + const request = QueryBidsRequest.fromPartial({ + filters: { + owner: owner, + dseq: dseq + } + }); + + const startTime = Date.now(); + const timeout = 1000 * 60 * 5; // 5 minutes timeout + elizaLogger.debug("Starting bid polling loop", { + timeout: "5 minutes", + pollInterval: "5 seconds", + attempt: retry + 1 + }); + + while (Date.now() - startTime < timeout) { + const elapsedTime = Math.round((Date.now() - startTime) / 1000); + elizaLogger.debug("Polling for bids", { + dseq, + owner: owner.substring(0, 6), + elapsedSeconds: elapsedTime, + remainingSeconds: Math.round(timeout/1000 - elapsedTime), + attempt: retry + 1 + }); + + try { + await new Promise(resolve => setTimeout(resolve, 5000)); + const bids = await client.Bids(request); + + if (bids.bids.length > 0 && bids.bids[0].bid !== undefined) { + elizaLogger.info("Bid found successfully", { + dseq, + owner: owner.substring(0, 6), + bidCount: bids.bids.length, + elapsedSeconds: elapsedTime, + attempt: retry + 1 + }); + elizaLogger.debug("Bid details", { + bid: bids.bids[0].bid, + provider: bids.bids[0].bid?.bidId?.provider + }); + return bids.bids[0].bid; + } + } catch (pollError) { + // Log but continue polling if it's a temporary error + elizaLogger.warn("Temporary error during bid polling", { + error: pollError instanceof Error ? pollError.message : String(pollError), + dseq, + attempt: retry + 1, + willRetry: true + }); + continue; + } + } + + elizaLogger.error("Bid fetch timeout", { + dseq, + owner: owner.substring(0, 6), + timeout: "5 minutes", + attempt: retry + 1 + }); + throw new AkashError( + `Could not fetch bid for deployment ${dseq}. Timeout reached.`, + AkashErrorCode.BID_FETCH_TIMEOUT, + { dseq, owner } + ); + } catch (error) { + lastError = error instanceof Error ? error : new Error(String(error)); + elizaLogger.error("Error during bid fetch", { + error: error instanceof Error ? error.message : String(error), + stack: error instanceof Error ? error.stack : undefined, + dseq, + owner: owner.substring(0, 6), + attempt: retry + 1, + hasMoreRetries: retry < maxRetries - 1 + }); + + if (retry < maxRetries - 1) { + // Wait before retrying (exponential backoff) + const delay = Math.pow(2, retry) * 1000; + elizaLogger.info("Retrying bid fetch after delay", { + delay, + nextAttempt: retry + 2, + maxRetries + }); + await new Promise(resolve => setTimeout(resolve, delay)); + continue; + } + } + } + + // If we get here, all retries failed + elizaLogger.error("All bid fetch attempts failed", { + dseq, + owner: owner.substring(0, 6), + attempts: maxRetries, + finalError: lastError?.message + }); + throw lastError || new Error("Failed to fetch bid after all retries"); +} + +async function createLease(deployment: any, wallet: DirectSecp256k1HdWallet, client: SigningStargateClient, rpcEndpoint: string): Promise { + const { dseq, owner } = deployment.id; + elizaLogger.info("Starting lease creation", { dseq, owner }); + + try { + elizaLogger.debug("Fetching bid for lease creation"); + const bid = await fetchBid(dseq, owner, rpcEndpoint); + const accounts = await wallet.getAccounts(); + + if (bid.bidId === undefined) { + elizaLogger.error("Invalid bid - missing bidId", { dseq, owner }); + throw new AkashError("Bid ID is undefined", AkashErrorCode.INVALID_BID); + } + + elizaLogger.debug("Creating lease message", { + dseq, + owner, + bidId: bid.bidId + }); + + const lease = { + bidId: bid.bidId + }; + + const fee = { + amount: [{ denom: "uakt", amount: "50000" }], + gas: "2000000" + }; + + const msg = { + typeUrl: `/${MsgCreateLease.$type}`, + value: MsgCreateLease.fromPartial(lease) + }; + + elizaLogger.info("Broadcasting lease creation transaction"); + const tx = await client.signAndBroadcast(accounts[0].address, [msg], fee, "create lease"); + + if (tx.code !== 0) { + elizaLogger.error("Lease creation failed", { + dseq, + owner, + code: tx.code, + rawLog: tx.rawLog + }); + throw new AkashError( + `Could not create lease: ${tx.rawLog}`, + AkashErrorCode.LEASE_CREATION_FAILED, + { rawLog: tx.rawLog } + ); + } + + elizaLogger.info("Lease created successfully", { + dseq, + owner, + txHash: tx.transactionHash + }); + + return { + id: BidID.toJSON(bid.bidId) + }; + } catch (error) { + elizaLogger.error("Error during lease creation", { + error, + dseq, + owner + }); + throw error; + } +} + +interface LeaseStatus { + services: Record; +} + +async function queryLeaseStatus(lease: any, providerUri: string, certificate: CertificatePem): Promise { + const id = lease.id; + elizaLogger.info("Querying lease status", { + dseq: id?.dseq, + gseq: id?.gseq, + oseq: id?.oseq, + providerUri + }); + + if (id === undefined) { + elizaLogger.error("Invalid lease - missing ID"); + throw new AkashError("Lease ID is undefined", AkashErrorCode.INVALID_LEASE); + } + + const leasePath = `/lease/${id.dseq}/${id.gseq}/${id.oseq}/status`; + elizaLogger.debug("Setting up request", { + providerUri, + leasePath, + hasCert: !!certificate.cert, + hasKey: !!certificate.privateKey + }); + + const MAX_RETRIES = 3; + const INITIAL_RETRY_DELAY = 3000; + let retryCount = 0; + + while (retryCount < MAX_RETRIES) { + try { + const url = new URL(providerUri); + const fullUrl = `${url.protocol}//${url.hostname}${url.port ? ':' + url.port : ''}${leasePath}`; + + elizaLogger.debug("Making request", { + url: fullUrl, + method: 'GET', + hasCertificate: !!certificate, + retryCount + }); + + const agent = new https.Agent({ + cert: certificate.cert, + key: certificate.privateKey, + rejectUnauthorized: false, + keepAlive: false, + timeout: 10000 + }); + + try { + const response = await fetch(fullUrl, { + method: 'GET', + headers: { + 'Content-Type': 'application/json', + 'Accept': 'application/json' + }, + // @ts-ignore - Node's fetch has agent support + agent, + signal: AbortSignal.timeout(10000) + }); + + if (response.status !== 200) { + elizaLogger.warn("Non-OK response from lease status query", { + statusCode: response.status, + statusText: response.statusText, + dseq: id.dseq, + url: fullUrl, + retryCount + }); + + if (response.status === 404) { + elizaLogger.debug("Deployment not ready yet (404)", { + dseq: id.dseq, + retryCount + }); + return undefined; + } + throw new Error(`Could not query lease status: ${response.status}`); + } + + const data = await response.json() as LeaseStatus; + elizaLogger.debug("Lease status received", { + dseq: id.dseq, + dataLength: JSON.stringify(data).length, + hasServices: !!data.services, + serviceCount: Object.keys(data.services || {}).length + }); + return data; + } finally { + agent.destroy(); + } + } catch (error) { + elizaLogger.warn("Error during lease status query", { + error: error instanceof Error ? error.message : String(error), + stack: error instanceof Error ? error.stack : undefined, + dseq: id.dseq, + providerUri, + retryCount + }); + + if (retryCount < MAX_RETRIES - 1) { + const delay = INITIAL_RETRY_DELAY * Math.pow(2, retryCount); + elizaLogger.debug("Retrying after error", { + delay, + nextRetry: retryCount + 1, + maxRetries: MAX_RETRIES + }); + await new Promise(r => setTimeout(r, delay)); + retryCount++; + continue; + } + + // On final retry, if it's a network error or 404, return undefined + if (error instanceof Error && + ((error as any).code === 'ECONNABORTED' || + (error as any).code === 'ETIMEDOUT' || + ((error as any).response && (error as any).response.status === 404))) { + elizaLogger.info("Returning undefined after max retries", { + dseq: id.dseq, + error: error.message + }); + return undefined; + } + + throw error; + } + } + + elizaLogger.info("Max retries reached, returning undefined", { + dseq: id.dseq, + maxRetries: MAX_RETRIES + }); + return undefined; +} + +async function sendManifest(sdl: SDL, lease: any, certificate: CertificatePem, rpcEndpoint: string) { + elizaLogger.info("Starting manifest send process"); + if (lease.id === undefined) { + elizaLogger.error("Invalid lease - missing ID"); + throw new AkashError("Lease ID is undefined", AkashErrorCode.INVALID_LEASE); + } + + try { + const { dseq, provider } = lease.id; + elizaLogger.debug("Getting provider info", { provider }); + + const rpc = await getRpc(rpcEndpoint); + const client = new QueryProviderClient(rpc); + const request = QueryProviderRequest.fromPartial({ + owner: provider + }); + + const tx = await client.Provider(request); + + if (tx.provider === undefined) { + elizaLogger.error("Provider not found", { provider }); + throw new AkashError( + `Could not find provider ${provider}`, + AkashErrorCode.PROVIDER_NOT_FOUND + ); + } + + const providerInfo = tx.provider; + elizaLogger.debug("Provider info retrieved", { + provider, + hostUri: providerInfo.hostUri + }); + + const manifest = sdl.manifestSortedJSON(); + const path = `/deployment/${dseq}/manifest`; + + elizaLogger.info("Sending manifest to provider", { + dseq, + provider, + manifestLength: manifest.length + }); + + const uri = new URL(providerInfo.hostUri); + + const httpsAgent = new https.Agent({ + cert: certificate.cert, + key: certificate.privateKey, + rejectUnauthorized: false, + keepAlive: false, + timeout: 10000 + }); + + try { + const fullUrl = `${uri.protocol}//${uri.hostname}${uri.port ? ':' + uri.port : ''}${path}`; + elizaLogger.debug("Making manifest request", { + url: fullUrl, + method: 'PUT', + manifestLength: manifest.length + }); + + const response = await axios.put(fullUrl, manifest, { + headers: { + 'Content-Type': 'application/json', + 'Accept': 'application/json' + }, + httpsAgent, + timeout: 10000, + validateStatus: null // Don't throw on any status code + }); + + if (response.status !== 200) { + elizaLogger.error("Failed to send manifest", { + statusCode: response.status, + statusText: response.statusText, + dseq + }); + throw new Error(`Failed to send manifest: ${response.status} ${response.statusText}`); + } + + elizaLogger.info("Manifest sent successfully", { dseq }); + } finally { + httpsAgent.destroy(); + } + + // Wait for deployment to start + elizaLogger.info("Waiting for deployment to start", { dseq }); + const startTime = Date.now(); + const timeout = 1000 * 60 * 10; // 10 minutes timeout + let consecutiveErrors = 0; + const MAX_CONSECUTIVE_ERRORS = 5; + + while (Date.now() - startTime < timeout) { + const elapsedTime = Math.round((Date.now() - startTime) / 1000); + elizaLogger.debug("Checking deployment status", { + dseq, + elapsedTime: `${elapsedTime}s`, + remainingTime: `${Math.round(timeout/1000 - elapsedTime)}s`, + consecutiveErrors + }); + + try { + const status = await queryLeaseStatus(lease, providerInfo.hostUri, certificate); + + if (status === undefined) { + consecutiveErrors++; + elizaLogger.debug("Status check returned undefined", { + dseq, + consecutiveErrors, + maxConsecutiveErrors: MAX_CONSECUTIVE_ERRORS + }); + + if (consecutiveErrors >= MAX_CONSECUTIVE_ERRORS) { + elizaLogger.warn("Too many consecutive undefined status responses", { + dseq, + consecutiveErrors + }); + // Don't throw, just continue waiting + consecutiveErrors = 0; + } + + await new Promise(resolve => setTimeout(resolve, 3000)); + continue; + } + + // Reset error counter on successful status check + consecutiveErrors = 0; + + for (const [name, service] of Object.entries<{ uris?: string[] }>(status.services)) { + if (service.uris) { + const rawUrl = service.uris[0]; + // Ensure URL has protocol + const serviceUrl = rawUrl.startsWith('http') ? rawUrl : `http://${rawUrl}`; + elizaLogger.info("Service is available", { + name, + rawUrl, + serviceUrl, + dseq + }); + return serviceUrl; + } + } + } catch (error) { + consecutiveErrors++; + const errorMessage = error instanceof Error ? error.message : String(error); + elizaLogger.warn("Error checking deployment status", { + error: errorMessage, + dseq, + consecutiveErrors, + maxConsecutiveErrors: MAX_CONSECUTIVE_ERRORS + }); + + if (consecutiveErrors >= MAX_CONSECUTIVE_ERRORS) { + elizaLogger.error("Too many consecutive errors checking deployment status", { + dseq, + consecutiveErrors, + error: errorMessage + }); + throw new AkashError( + "Too many consecutive errors checking deployment status", + AkashErrorCode.DEPLOYMENT_START_TIMEOUT, + { dseq, error: errorMessage } + ); + } + } + + await new Promise(resolve => setTimeout(resolve, 3000)); + } + + elizaLogger.error("Deployment start timeout", { + dseq, + timeout: "10 minutes" + }); + throw new AkashError( + "Could not start deployment. Timeout reached.", + AkashErrorCode.DEPLOYMENT_START_TIMEOUT + ); + } catch (error) { + elizaLogger.error("Error during manifest send process", { + error: error instanceof Error ? error.message : String(error), + stack: error instanceof Error ? error.stack : undefined, + dseq: lease.id.dseq + }); + throw error; + } +} + +async function loadOrCreateCertificate(wallet: DirectSecp256k1HdWallet, client: SigningStargateClient): Promise { + elizaLogger.info("=== Starting Certificate Creation/Loading Process ==="); + try { + const accounts = await wallet.getAccounts(); + const address = accounts[0].address; + elizaLogger.debug("Got wallet address for certificate", { + address, + addressLength: address.length, + addressPrefix: address.substring(0, 6) + }); + + // Check if certificate exists + if (fs.existsSync(CERTIFICATE_PATH)) { + elizaLogger.info("Found existing certificate file", { path: CERTIFICATE_PATH }); + const cert = loadCertificate(CERTIFICATE_PATH); + elizaLogger.debug("Loaded existing certificate", { + hasCert: !!cert.cert, + hasPrivateKey: !!cert.privateKey, + hasPublicKey: !!cert.publicKey, + certLength: cert.cert?.length, + privateKeyLength: cert.privateKey?.length, + publicKeyLength: cert.publicKey?.length + }); + return cert; + } + + // Create new certificate exactly like the example + elizaLogger.info("No existing certificate found, creating new one", { address }); + const certificate = certificateManager.generatePEM(address); + elizaLogger.debug("Certificate generated", { + hasCert: !!certificate.cert, + hasPrivateKey: !!certificate.privateKey, + hasPublicKey: !!certificate.publicKey, + certLength: certificate.cert?.length, + privateKeyLength: certificate.privateKey?.length, + publicKeyLength: certificate.publicKey?.length + }); + + // Broadcast certificate + elizaLogger.info("Broadcasting certificate to network", { + address, + certLength: certificate.cert?.length, + publicKeyLength: certificate.publicKey?.length + }); + + const result = await cert.broadcastCertificate( + certificate, + address, + client as any + ).catch(error => { + elizaLogger.error("Certificate broadcast failed", { + error: error instanceof Error ? error.message : String(error), + stack: error instanceof Error ? error.stack : undefined, + address, + certLength: certificate.cert?.length + }); + throw error; + }); + + if (result.code !== 0) { + const error = `Could not create certificate: ${result.rawLog}`; + elizaLogger.error("Certificate broadcast returned error code", { + code: result.code, + rawLog: result.rawLog, + address, + txHash: result.transactionHash + }); + throw new AkashError( + error, + AkashErrorCode.CERTIFICATE_CREATION_FAILED, + { rawLog: result.rawLog } + ); + } + + elizaLogger.info("Certificate broadcast successful", { + code: result.code, + txHash: result.transactionHash, + height: result.height, + gasUsed: result.gasUsed + }); + + // Save certificate + saveCertificate(certificate); + elizaLogger.info("Certificate saved to file", { path: CERTIFICATE_PATH }); + + elizaLogger.info("Certificate process completed successfully", { + hasCert: !!certificate.cert, + hasPrivateKey: !!certificate.privateKey, + hasPublicKey: !!certificate.publicKey, + path: CERTIFICATE_PATH + }); + + return certificate; + } catch (error) { + elizaLogger.error("Certificate creation/broadcast process failed", { + error: error instanceof Error ? error.message : String(error), + stack: error instanceof Error ? error.stack : undefined, + path: CERTIFICATE_PATH + }); + throw error; + } +} + +async function parseSDL(sdlContent: string): Promise { + try { + // Clean up SDL content by taking only the part after the YAML document separator + const yamlSeparatorIndex = sdlContent.indexOf('---'); + if (yamlSeparatorIndex === -1) { + throw new Error("No YAML document separator (---) found in SDL"); + } + + // Extract only the actual YAML content + const cleanSDL = sdlContent.substring(yamlSeparatorIndex); + + elizaLogger.info("Starting SDL parsing process", { + originalLength: sdlContent.length, + cleanLength: cleanSDL.length, + yamlSeparatorIndex, + cleanContent: cleanSDL.substring(0, 200) + '...', + firstLine: cleanSDL.split('\n')[0], + lastLine: cleanSDL.split('\n').slice(-1)[0], + lineCount: cleanSDL.split('\n').length, + hasVersion: cleanSDL.includes('version: "2.0"'), + hasServices: cleanSDL.includes('services:'), + hasProfiles: cleanSDL.includes('profiles:'), + hasDeployment: cleanSDL.includes('deployment:'), + charCodes: cleanSDL.substring(0, 50).split('').map(c => c.charCodeAt(0)) + }); + + // Try to parse SDL with clean content - exactly like the example + const parsedSDL = SDL.fromString(cleanSDL, "beta3"); + elizaLogger.debug("Initial SDL parsing successful", { + hasVersion: !!parsedSDL.version, + hasServices: !!parsedSDL.services, + hasProfiles: !!parsedSDL.profiles, + hasDeployment: !!parsedSDL.deployments, + serviceCount: Object.keys(parsedSDL.services || {}).length, + profileCount: Object.keys(parsedSDL.profiles || {}).length + }); + + // Get groups and version like the example + const groups = parsedSDL.groups(); + const version = await parsedSDL.manifestVersion(); + + elizaLogger.info("SDL validation completed", { + groupCount: groups.length, + version, + groups: JSON.stringify(groups) + }); + + return parsedSDL; + } catch (error) { + elizaLogger.error("Failed to parse SDL", { + error: error instanceof Error ? error.message : String(error), + stack: error instanceof Error ? error.stack : undefined, + sdlContent: sdlContent.substring(0, 200) + '...', + sdlLength: sdlContent.length + }); + throw error; + } +} + +export const createDeploymentAction: Action = { + name: "CREATE_DEPLOYMENT", + similes: ["DEPLOY", "START_DEPLOYMENT", "LAUNCH"], + description: "Create a new deployment on Akash Network", + examples: [[ + { + user: "user", + content: { + text: "Deploy SDL on Akash Network", + sdl: "version: \"2.0\"\n\nservices:\n web:\n image: nginx\n expose:\n - port: 80\n as: 80\n to:\n - global: true" + } as CreateDeploymentContent + } as ActionExample + ]], + + validate: async (runtime: IAgentRuntime, message: Memory): Promise => { + elizaLogger.debug("=== Starting Deployment Validation ==="); + elizaLogger.debug("Validating deployment request", { message }); + + // Check if plugin is properly loaded + if (!isPluginLoaded(runtime, "akash")) { + elizaLogger.error("Akash plugin not properly loaded during validation"); + return false; + } + + try { + const params = message.content as Partial; + elizaLogger.debug("Checking SDL content", { params }); + + // Get SDL content either from direct string, specified file, or default file + let sdlContent: string; + if (params.sdl) { + sdlContent = params.sdl; + } else if (params.sdlFile) { + sdlContent = loadSDLFromFile(params.sdlFile); + } else { + sdlContent = loadSDLFromFile(DEFAULT_SDL_PATH); + } + + if (params.deposit && !validateDeposit(params.deposit)) { + throw new AkashError( + "Invalid deposit format", + AkashErrorCode.VALIDATION_PARAMETER_INVALID, + { parameter: "deposit", value: params.deposit } + ); + } + + elizaLogger.debug("Validating SDL format"); + try { + // Clean up SDL content by taking only the part after the YAML document separator + const yamlSeparatorIndex = sdlContent.indexOf('---'); + if (yamlSeparatorIndex === -1) { + throw new Error("No YAML document separator (---) found in SDL"); + } + + // Extract only the actual YAML content + const cleanSDL = sdlContent.substring(yamlSeparatorIndex); + + // Use exact same approach as example for validation + const sdl = SDL.fromString(cleanSDL, "beta3"); + await sdl.manifestVersion(); // Verify we can get the version + elizaLogger.debug("SDL format validation successful", { + groups: sdl.groups(), + groupCount: sdl.groups().length + }); + } catch (sdlError) { + elizaLogger.error("SDL format validation failed", { error: sdlError }); + throw new AkashError( + `Invalid SDL format: ${sdlError instanceof Error ? sdlError.message : String(sdlError)}`, + AkashErrorCode.VALIDATION_SDL_FAILED, + { sdl: sdlContent } + ); + } + + elizaLogger.debug("Validation completed successfully"); + return true; + } catch (error) { + elizaLogger.error("Deployment validation failed", { + error: error instanceof AkashError ? { + category: error.category, + code: error.code, + message: error.message, + details: error.details + } : String(error) + }); + return false; + } + }, + + handler: async ( + runtime: IAgentRuntime, + message: Memory, + state: State | undefined, + options: { [key: string]: unknown; } = {}, + callback?: HandlerCallback + ): Promise => { + const actionId = Date.now().toString(); + elizaLogger.info("=== Starting Deployment Creation ===", { + actionId, + messageId: message.id, + userId: message.userId + }); + + // Inspect runtime to verify plugin and action registration + inspectRuntime(runtime); + + try { + elizaLogger.debug("=== Validating Akash Configuration ==="); + const config = await validateAkashConfig(runtime); + elizaLogger.debug("Configuration validated successfully", { + rpcEndpoint: config.RPC_ENDPOINT, + chainId: config.AKASH_CHAIN_ID, + version: config.AKASH_VERSION, + hasMnemonic: !!config.AKASH_MNEMONIC + }); + + const params = message.content as CreateDeploymentContent; + elizaLogger.debug("=== Processing Deployment Parameters ===", { + hasSDL: !!params.sdl, + hasSDLFile: !!params.sdlFile, + hasDeposit: !!params.deposit + }); + + // Get SDL content either from direct string, specified file, or default file + let sdlContent: string; + let sdlSource: string; + if (params.sdl) { + sdlContent = params.sdl; + sdlSource = 'direct'; + } else if (params.sdlFile) { + sdlContent = loadSDLFromFile(params.sdlFile); + sdlSource = 'file'; + } else { + sdlContent = loadSDLFromFile(DEFAULT_SDL_PATH); + sdlSource = 'default'; + } + elizaLogger.debug("SDL content loaded", { + source: sdlSource, + contentLength: sdlContent.length + }); + + if (params.deposit && !validateDeposit(params.deposit)) { + elizaLogger.error("Invalid deposit format", { + deposit: params.deposit + }); + throw new AkashError( + "Invalid deposit format", + AkashErrorCode.VALIDATION_PARAMETER_INVALID, + { parameter: "deposit", value: params.deposit } + ); + } + + // Initialize wallet from mnemonic + elizaLogger.info("=== Initializing Wallet and Client ==="); + const wallet = await initializeWallet(config.AKASH_MNEMONIC); + const accounts = await wallet.getAccounts(); + const address = accounts[0].address; + elizaLogger.debug("Wallet initialized", { + address, + accountCount: accounts.length + }); + + // Setup client + elizaLogger.debug("Setting up Stargate client"); + const client = await setupClient(wallet, config.RPC_ENDPOINT); + elizaLogger.debug("Client setup completed", { + rpcEndpoint: config.RPC_ENDPOINT + }); + + // Load or create certificate + elizaLogger.info("=== Setting up Certificate ==="); + const certificate = await loadOrCreateCertificate(wallet, client); + elizaLogger.debug("Certificate setup completed", { + hasCert: !!certificate.cert, + hasPrivateKey: !!certificate.privateKey, + hasPublicKey: !!certificate.publicKey + }); + + // Parse SDL + elizaLogger.info("=== Parsing SDL Configuration ==="); + let sdl: SDL; + try { + sdl = await parseSDL(sdlContent); + elizaLogger.debug("SDL parsed successfully", { + groupCount: sdl.groups().length, + groups: sdl.groups(), + version: await sdl.manifestVersion() + }); + } catch (sdlError) { + elizaLogger.error("SDL parsing failed", { + error: sdlError instanceof Error ? sdlError.message : String(sdlError), + sdlContent + }); + throw new AkashError( + `SDL parsing failed: ${sdlError instanceof Error ? sdlError.message : String(sdlError)}`, + AkashErrorCode.MANIFEST_PARSING_FAILED, + { + sdl: sdlContent, + actionId + } + ); + } + + elizaLogger.info("=== Creating Deployment Message ==="); + const blockHeight = await client.getHeight(); + elizaLogger.debug("Current block height", { blockHeight }); + + const deployment = { + id: { + owner: address, + dseq: blockHeight + }, + groups: sdl.groups(), + deposit: { + denom: "uakt", + amount: params.deposit?.replace("uakt", "") || config.AKASH_DEPOSIT.replace("uakt", "") + }, + version: await sdl.manifestVersion(), + depositor: address + }; + + elizaLogger.debug("Deployment object created", { + owner: deployment.id.owner, + dseq: deployment.id.dseq, + groupCount: deployment.groups.length, + groups: deployment.groups, + deposit: deployment.deposit, + version: deployment.version + }); + + const msg = { + typeUrl: "/akash.deployment.v1beta3.MsgCreateDeployment", + value: MsgCreateDeployment.fromPartial(deployment) + }; + + // Broadcast transaction with retry for network issues + elizaLogger.info("=== Broadcasting Deployment Transaction ===", { + owner: address, + dseq: blockHeight, + deposit: params.deposit || config.AKASH_DEPOSIT, + groups: deployment.groups + }); + + const result = await withRetry(async () => { + elizaLogger.debug("Attempting to sign and broadcast transaction", { + attempt: 'current', + fees: config.AKASH_DEPOSIT, + gas: "800000", + groups: deployment.groups + }); + + const txResult = await client.signAndBroadcast( + address, + [msg], + { + amount: [{ denom: "uakt", amount: config.AKASH_DEPOSIT.replace("uakt", "") }], + gas: "800000", + } + ); + + elizaLogger.debug("Transaction broadcast result", { + code: txResult.code, + height: txResult.height, + transactionHash: txResult.transactionHash, + gasUsed: txResult.gasUsed, + gasWanted: txResult.gasWanted, + rawLog: txResult.rawLog + }); + + if (txResult.code !== 0) { + elizaLogger.error("Transaction failed", { + code: txResult.code, + rawLog: txResult.rawLog, + groups: deployment.groups + }); + throw new AkashError( + `Transaction failed: ${txResult.rawLog}`, + AkashErrorCode.DEPLOYMENT_CREATION_FAILED, + { + rawLog: txResult.rawLog, + dseq: blockHeight, + owner: address, + actionId, + groups: deployment.groups + } + ); + } + + return txResult; + }); + + elizaLogger.info("=== Deployment Created Successfully ===", { + txHash: result.transactionHash, + owner: address, + dseq: blockHeight, + actionId, + height: result.height, + gasUsed: result.gasUsed + }); + + // Create lease + elizaLogger.debug("=== Creating Lease ==="); + const lease = await createLease(deployment, wallet, client, config.RPC_ENDPOINT); + elizaLogger.debug("Lease created", { + leaseId: lease.id, + dseq: deployment.id.dseq + }); + + // Send manifest + elizaLogger.debug("=== Sending Manifest ==="); + const serviceUrl = await sendManifest(sdl, lease, certificate, config.RPC_ENDPOINT); + elizaLogger.debug("Manifest sent successfully", { + serviceUrl + }); + + if (callback) { + elizaLogger.info("=== Preparing callback response for deployment creation ===", { + hasCallback: true, + actionId, + dseq: String(blockHeight) + }); + + const callbackResponse = { + text: `Deployment created and started successfully\nDSEQ: ${blockHeight}\nOwner: ${address}\nTx Hash: ${result.transactionHash}\nService URL: ${serviceUrl}`, + content: { + success: true, + data: { + txHash: result.transactionHash, + owner: address, + dseq: String(blockHeight), + serviceUrl + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'createDeployment', + version: '1.0.0', + actionId + } + } + }; + + elizaLogger.info("=== Executing callback with response ===", { + actionId, + responseText: callbackResponse.text, + hasContent: !!callbackResponse.content, + contentKeys: Object.keys(callbackResponse.content), + metadata: callbackResponse.content.metadata + }); + + callback(callbackResponse); + + elizaLogger.info("=== Callback executed successfully ===", { + actionId, + timestamp: new Date().toISOString() + }); + } + + elizaLogger.info("=== Deployment Process Completed Successfully ===", { + actionId, + txHash: result.transactionHash, + dseq: blockHeight + }); + + return true; + } catch (error) { + elizaLogger.error("=== Deployment Creation Failed ===", { + error: error instanceof AkashError ? { + category: error.category, + code: error.code, + message: error.message, + details: error.details + } : String(error), + actionId, + stack: error instanceof Error ? error.stack : undefined + }); + + if (callback) { + elizaLogger.info("=== Preparing error callback response ===", { + actionId, + hasCallback: true, + errorType: error instanceof AkashError ? 'AkashError' : 'Error' + }); + + const errorResponse = { + text: "Failed to create deployment", + content: { + success: false, + error: { + code: error instanceof AkashError ? error.code : AkashErrorCode.DEPLOYMENT_CREATION_FAILED, + message: error instanceof Error ? error.message : String(error) + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'createDeployment', + version: '1.0.0', + actionId + } + } + }; + + elizaLogger.info("=== Executing error callback ===", { + actionId, + responseText: errorResponse.text, + hasContent: !!errorResponse.content, + contentKeys: Object.keys(errorResponse.content) + }); + + callback(errorResponse); + + elizaLogger.info("=== Error callback executed successfully ===", { + actionId, + timestamp: new Date().toISOString() + }); + } + + return false; + } + }, +}; + +export default createDeploymentAction; \ No newline at end of file diff --git a/packages/plugin-akash/src/actions/estimateGas.ts b/packages/plugin-akash/src/actions/estimateGas.ts new file mode 100644 index 00000000000..e83ccc8fa53 --- /dev/null +++ b/packages/plugin-akash/src/actions/estimateGas.ts @@ -0,0 +1,354 @@ +import { Action, elizaLogger } from "@elizaos/core"; +import { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; +import { DirectSecp256k1HdWallet, Registry, EncodeObject } from "@cosmjs/proto-signing"; +import { SigningStargateClient } from "@cosmjs/stargate"; +import { MsgCloseDeployment } from "@akashnetwork/akash-api/akash/deployment/v1beta3"; +import { getAkashTypeRegistry, getTypeUrl } from "@akashnetwork/akashjs/build/stargate"; +import { validateAkashConfig } from "../environment"; +import { AkashError, AkashErrorCode } from "../error/error"; +import { encodingForModel } from "js-tiktoken"; + +interface AkashMessage { + typeUrl: string; + value: { + id?: { + owner: string; + dseq: string; + }; + [key: string]: unknown; + }; +} + +interface EstimateGasContent extends Content { + text: string; + dseq?: string; + operation: "close" | "create" | "update"; + message?: EncodeObject; +} + +function getTotalTokensFromString(str: string): number { + try { + const encoding = encodingForModel("gpt-3.5-turbo"); + return encoding.encode(str).length; + } catch (error) { + elizaLogger.warn("Failed to count tokens", { error }); + return 0; + } +} + +export const estimateGas: Action = { + name: "ESTIMATE_GAS", + similes: ["CALCULATE_GAS", "GET_GAS_ESTIMATE", "CHECK_GAS"], + description: "Estimate gas for a transaction on Akash Network", + examples: [[ + { + user: "user", + content: { + text: "Can you estimate gas for closing deployment with DSEQ 123456?", + operation: "close" + } as EstimateGasContent + } as ActionExample + ]], + + validate: async (runtime: IAgentRuntime, message: Memory): Promise => { + elizaLogger.debug("Validating gas estimation request", { message }); + try { + const params = message.content as Partial; + const config = await validateAkashConfig(runtime); + + // Extract DSEQ from text if present + if (params.text && !params.dseq) { + const dseqMatch = params.text.match(/dseq\s*(?::|=|\s)\s*(\d+)/i) || + params.text.match(/deployment\s+(?:number|sequence|#)?\s*(\d+)/i) || + params.text.match(/(\d{6,})/); // Matches standalone numbers of 6+ digits + if (dseqMatch) { + params.dseq = dseqMatch[1]; + elizaLogger.debug("Extracted DSEQ from text", { + text: params.text, + extractedDseq: params.dseq + }); + } + } + + // If no operation provided, check environment configuration + if (!params.operation) { + if (config.AKASH_GAS_OPERATION) { + params.operation = config.AKASH_GAS_OPERATION as "close" | "create" | "update"; + elizaLogger.info("Using operation from environment", { operation: params.operation }); + } else { + throw new AkashError( + "Operation type is required (close, create, or update)", + AkashErrorCode.VALIDATION_PARAMETER_MISSING, + { parameter: "operation" } + ); + } + } + + // For close operations, check DSEQ from various sources + if (params.operation === "close") { + if (!params.dseq) { + if (config.AKASH_GAS_DSEQ) { + params.dseq = config.AKASH_GAS_DSEQ; + elizaLogger.info("Using DSEQ from environment", { dseq: params.dseq }); + } else { + throw new AkashError( + "Deployment sequence (dseq) is required for close operation", + AkashErrorCode.VALIDATION_PARAMETER_MISSING, + { parameter: "dseq" } + ); + } + } + } + + // For create/update operations, check message + if ((params.operation === "create" || params.operation === "update") && !params.message) { + throw new AkashError( + "Message is required for create/update operations", + AkashErrorCode.VALIDATION_PARAMETER_MISSING, + { parameter: "message" } + ); + } + + return true; + } catch (error) { + elizaLogger.error("Gas estimation validation failed", { + error: error instanceof AkashError ? { + code: error.code, + message: error.message, + details: error.details + } : String(error) + }); + return false; + } + }, + + handler: async ( + runtime: IAgentRuntime, + message: Memory, + state: State | undefined, + options: { [key: string]: unknown } = {}, + callback?: HandlerCallback + ): Promise => { + const actionId = Date.now().toString(); + elizaLogger.info("Starting gas estimation", { actionId }); + + elizaLogger.debug("=== Handler Parameters ===", { + hasRuntime: !!runtime, + hasMessage: !!message, + hasState: !!state, + hasOptions: !!options, + hasCallback: !!callback, + actionId + }); + + try { + const config = await validateAkashConfig(runtime); + const params = message.content as Partial; + + // Initialize wallet and get address + const wallet = await DirectSecp256k1HdWallet.fromMnemonic(config.AKASH_MNEMONIC, { prefix: "akash" }); + const [account] = await wallet.getAccounts(); + + // Initialize client with Akash registry + const myRegistry = new Registry(getAkashTypeRegistry()); + const client = await SigningStargateClient.connectWithSigner( + config.RPC_ENDPOINT, + wallet, + { registry: myRegistry } + ); + + let msg: EncodeObject; + switch (params.operation) { + case "close": + msg = { + typeUrl: getTypeUrl(MsgCloseDeployment), + value: MsgCloseDeployment.fromPartial({ + id: { + owner: account.address, + dseq: params.dseq + } + }) + }; + break; + case "create": + case "update": + if (!params.message) { + if (callback) { + callback({ + text: `Message is required for ${params.operation} operations.`, + content: { + success: false, + error: { + code: AkashErrorCode.VALIDATION_PARAMETER_MISSING, + message: "Missing message", + help: `Please provide a message object for the ${params.operation} operation.` + } + } + }); + } + return false; + } + msg = params.message; + break; + default: + if (callback) { + callback({ + text: `Invalid operation type: ${params.operation}. Must be one of: close, create, or update.`, + content: { + success: false, + error: { + code: AkashErrorCode.VALIDATION_PARAMETER_INVALID, + message: "Invalid operation", + help: "Specify a valid operation type: 'close', 'create', or 'update'." + } + } + }); + } + return false; + } + + // Estimate gas + elizaLogger.info("Estimating gas for operation", { + operation: params.operation, + dseq: params.dseq, + owner: account.address + }); + + const gasEstimate = await client.simulate( + account.address, + [msg], + `Estimate gas for ${params.operation} operation` + ); + + elizaLogger.info("Gas estimation completed", { + gasEstimate, + operation: params.operation, + dseq: params.dseq, + owner: account.address, + actionId + }); + + if (callback) { + elizaLogger.info("=== Preparing callback response for gas estimation ===", { + hasCallback: true, + actionId, + operation: params.operation, + dseq: params.dseq + }); + + const operationText = params.operation === "close" ? `closing deployment ${params.dseq}` : params.operation; + const estimateData = { + gasEstimate, + operation: params.operation, + dseq: params.dseq, + owner: account.address, + message: msg + }; + + let responseText = `I've estimated the gas for ${operationText}:\n`; + responseText += `• Gas Required: ${gasEstimate} units\n`; + responseText += `• Operation: ${params.operation}\n`; + if (params.dseq) { + responseText += `• DSEQ: ${params.dseq}\n`; + } + responseText += `• Owner: ${account.address}`; + + const response = { + text: responseText, + content: { + success: true, + data: estimateData, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'estimateGas', + version: '1.0.0', + actionId, + tokenCount: getTotalTokensFromString(responseText) + } + } + }; + + elizaLogger.info("=== Executing callback with response ===", { + actionId, + responseText: response.text, + hasContent: !!response.content, + contentKeys: Object.keys(response.content), + metadata: response.content.metadata + }); + + callback(response); + + elizaLogger.info("=== Callback executed successfully ===", { + actionId, + timestamp: new Date().toISOString() + }); + } else { + elizaLogger.warn("=== No callback provided for gas estimation ===", { + actionId, + operation: params.operation, + dseq: params.dseq + }); + } + + return true; + } catch (error) { + elizaLogger.error("Gas estimation failed", { + error: error instanceof Error ? error.message : String(error), + actionId + }); + + if (callback) { + elizaLogger.info("=== Preparing error callback response ===", { + actionId, + hasCallback: true, + errorType: error instanceof AkashError ? 'AkashError' : 'Error' + }); + + const errorResponse = { + code: error instanceof AkashError ? error.code : AkashErrorCode.API_ERROR, + message: error instanceof Error ? error.message : String(error), + details: error instanceof AkashError ? error.details : undefined + }; + + const response = { + text: `Failed to estimate gas: ${errorResponse.message}`, + content: { + success: false, + error: errorResponse, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'estimateGas', + version: '1.0.0', + actionId + } + } + }; + + elizaLogger.info("=== Executing error callback ===", { + actionId, + errorResponse, + hasContent: !!response.content, + contentKeys: Object.keys(response.content) + }); + + callback(response); + + elizaLogger.info("=== Error callback executed ===", { + actionId, + timestamp: new Date().toISOString() + }); + } else { + elizaLogger.warn("=== No callback provided for error handling ===", { + actionId, + errorMessage: error instanceof Error ? error.message : String(error) + }); + } + + return false; + } + } +}; + +export default estimateGas; \ No newline at end of file diff --git a/packages/plugin-akash/src/actions/getDeploymentApi.ts b/packages/plugin-akash/src/actions/getDeploymentApi.ts new file mode 100644 index 00000000000..eea16727b6d --- /dev/null +++ b/packages/plugin-akash/src/actions/getDeploymentApi.ts @@ -0,0 +1,495 @@ +import { Action, elizaLogger } from "@elizaos/core"; +import { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; +import { DirectSecp256k1HdWallet } from "@cosmjs/proto-signing"; +import { validateAkashConfig } from "../environment"; +import { AkashError, AkashErrorCode } from "../error/error"; +import * as fs from 'fs'; +import * as path from 'path'; +import { getCertificatePath, getDeploymentsPath } from "../utils/paths"; + +export interface DeploymentInfo { + owner: string; + dseq: string; + status: string; + createdHeight: number; + cpuUnits: number; + gpuUnits: number; + memoryQuantity: number; + storageQuantity: number; +} + +export interface DeploymentListResponse { + count: number; + results: DeploymentInfo[]; +} + +interface GetDeploymentsContent extends Content { + status?: 'active' | 'closed'; + skip?: number; + limit?: number; +} + +async function sleep(ms: number) { + return new Promise(resolve => setTimeout(resolve, ms)); +} + +async function fetchWithRetry(url: string, options: RequestInit, retries = 3, delay = 1000): Promise { + for (let i = 0; i < retries; i++) { + try { + const response = await fetch(url, options); + if (response.ok) { + return response; + } + + const error = await response.text(); + elizaLogger.warn(`API request failed (attempt ${i + 1}/${retries})`, { + status: response.status, + error + }); + + if (i < retries - 1) { + await sleep(delay * Math.pow(2, i)); // Exponential backoff + continue; + } + + throw new AkashError( + `API request failed after ${retries} attempts: ${response.status} - ${error}`, + AkashErrorCode.API_ERROR + ); + } catch (error) { + if (i === retries - 1) { + throw error; + } + elizaLogger.warn(`API request error (attempt ${i + 1}/${retries})`, { + error: error instanceof Error ? error.message : String(error) + }); + await sleep(delay * Math.pow(2, i)); + } + } + throw new AkashError( + `Failed to fetch after ${retries} retries`, + AkashErrorCode.API_ERROR + ); +} + +export async function initializeWallet(runtime: IAgentRuntime): Promise<{wallet: DirectSecp256k1HdWallet | null, address: string}> { + try { + // Validate configuration and get mnemonic + const config = await validateAkashConfig(runtime); + + elizaLogger.info("Initializing wallet with config", { + hasMnemonic: !!config.AKASH_MNEMONIC, + hasWalletAddress: !!config.AKASH_WALLET_ADDRESS + }); + + // First try to get the wallet address directly + if (config.AKASH_WALLET_ADDRESS) { + elizaLogger.info("Using provided wallet address", { + address: config.AKASH_WALLET_ADDRESS + }); + return { + wallet: null, + address: config.AKASH_WALLET_ADDRESS + }; + } + + // If no wallet address, create wallet from mnemonic + if (!config.AKASH_MNEMONIC) { + throw new AkashError( + "Neither AKASH_WALLET_ADDRESS nor AKASH_MNEMONIC provided", + AkashErrorCode.WALLET_NOT_INITIALIZED + ); + } + + try { + elizaLogger.info("Creating wallet from mnemonic"); + const wallet = await DirectSecp256k1HdWallet.fromMnemonic(config.AKASH_MNEMONIC, { + prefix: "akash" + }); + + // Get account address + const accounts = await wallet.getAccounts(); + const address = accounts[0].address; + + elizaLogger.info("Wallet initialized from mnemonic", { + address, + accountCount: accounts.length + }); + + return { wallet, address }; + } catch (error) { + throw new AkashError( + `Failed to initialize wallet: ${error instanceof Error ? error.message : String(error)}`, + AkashErrorCode.WALLET_NOT_INITIALIZED, + { originalError: error instanceof Error ? error.message : String(error) } + ); + } + } catch (error) { + // Ensure all errors are properly wrapped as AkashError + if (error instanceof AkashError) { + throw error; + } + throw new AkashError( + `Failed to initialize wallet: ${error instanceof Error ? error.message : String(error)}`, + AkashErrorCode.WALLET_NOT_INITIALIZED, + { originalError: error instanceof Error ? error.message : String(error) } + ); + } +} + +export async function fetchDeployments( + runtime: IAgentRuntime, + status?: 'active' | 'closed', + skip = 0, + limit = 10 +): Promise { + elizaLogger.info("Initializing deployment fetch", { + status: status || 'all', + skip, + limit + }); + + try { + // Initialize wallet and get address + const { address } = await initializeWallet(runtime); + + if (!address) { + throw new AkashError( + "Failed to get wallet address", + AkashErrorCode.WALLET_NOT_INITIALIZED + ); + } + + elizaLogger.info("Fetching deployments from API", { + address, + status: status || 'all', + skip, + limit + }); + + // Map status for API compatibility + const apiStatus = status; + + // Don't include status in URL if not specified + const url = `https://console-api.akash.network/v1/addresses/${address}/deployments/${skip}/${limit}${apiStatus ? `?status=${apiStatus}` : ''}&reverseSorting=true`; + elizaLogger.debug("Making API request", { url }); + + const response = await fetchWithRetry(url, { + headers: { + 'accept': 'application/json' + } + }); + + const data = await response.json() as DeploymentListResponse; + elizaLogger.info("Deployments fetched successfully", { + count: data.count, + resultCount: data.results.length, + status: status || 'all' + }); + + // Keep status as-is from API + data.results = data.results.map(deployment => ({ + ...deployment, + status: deployment.status.toLowerCase() + })); + + // Save deployments to files, organized by their actual status + const deploymentDir = getDeploymentsPath(import.meta.url); + elizaLogger.info("Using deployments directory", { deploymentDir }); + + // Create base deployments directory if it doesn't exist + if (!fs.existsSync(deploymentDir)) { + elizaLogger.info("Creating deployments directory", { deploymentDir }); + fs.mkdirSync(deploymentDir, { recursive: true }); + } + + // Group deployments by status + const deploymentsByStatus = data.results.reduce((acc, deployment) => { + const status = deployment.status.toLowerCase(); + if (!acc[status]) { + acc[status] = []; + } + acc[status].push(deployment); + return acc; + }, {} as Record); + + // Save deployments by status + for (const [status, deployments] of Object.entries(deploymentsByStatus)) { + const statusDir = path.join(deploymentDir, status); + elizaLogger.info("Processing status directory", { statusDir, status, deploymentCount: deployments.length }); + + // Ensure status directory exists + if (!fs.existsSync(statusDir)) { + elizaLogger.info("Creating status directory", { statusDir }); + fs.mkdirSync(statusDir, { recursive: true }); + } + + // Save all deployments for this status in parallel + await Promise.all(deployments.map(async (deployment) => { + const filePath = path.join(statusDir, `${deployment.dseq}.json`); + elizaLogger.debug("Saving deployment file", { filePath, dseq: deployment.dseq }); + await saveDeploymentInfo(deployment, filePath); + })); + } + + return data; + } catch (error) { + elizaLogger.error("Failed to fetch deployments", { + error: error instanceof Error ? error.message : String(error), + stack: error instanceof Error ? error.stack : undefined + }); + throw error; + } +} + +export async function saveDeploymentInfo(deploymentInfo: DeploymentInfo, filePath: string): Promise { + elizaLogger.info("Saving deployment info", { + dseq: deploymentInfo.dseq, + owner: deploymentInfo.owner, + filePath + }); + + try { + // Ensure directory exists + const dir = path.dirname(filePath); + if (!fs.existsSync(dir)) { + fs.mkdirSync(dir, { recursive: true }); + } + + // Save deployment info + fs.writeFileSync(filePath, JSON.stringify(deploymentInfo, null, 2), 'utf8'); + elizaLogger.debug("Deployment info saved successfully"); + } catch (error) { + elizaLogger.error("Failed to save deployment info", { + error: error instanceof Error ? error.message : String(error), + stack: error instanceof Error ? error.stack : undefined, + filePath + }); + throw error; + } +} + +export async function loadDeploymentInfo(filePath: string): Promise { + elizaLogger.info("Loading deployment info", { filePath }); + + try { + const fs = require('fs'); + if (!fs.existsSync(filePath)) { + throw new AkashError( + `Deployment info file not found: ${filePath}`, + AkashErrorCode.FILE_NOT_FOUND + ); + } + + const data = fs.readFileSync(filePath, 'utf8'); + const deploymentInfo = JSON.parse(data) as DeploymentInfo; + elizaLogger.debug("Deployment info loaded successfully", { + dseq: deploymentInfo.dseq, + owner: deploymentInfo.owner + }); + + return deploymentInfo; + } catch (error) { + elizaLogger.error("Failed to load deployment info", { + error: error instanceof Error ? error.message : String(error), + stack: error instanceof Error ? error.stack : undefined, + filePath + }); + throw error; + } +} + +export const getDeploymentApiAction: Action = { + name: "GET_DEPLOYMENTS", + similes: ["LIST_DEPLOYMENTS", "FETCH_DEPLOYMENTS", "SHOW_DEPLOYMENTS"], + description: "Fetch deployments from Akash Network", + examples: [[ + { + user: "user", + content: { + text: "Get all deployments", + } as GetDeploymentsContent + } as ActionExample, + { + user: "assistant", + content: { + text: "Fetching all deployments..." + } as GetDeploymentsContent + } as ActionExample + ], [ + { + user: "user", + content: { + text: "Get active deployments", + status: "active" + } as GetDeploymentsContent + } as ActionExample, + { + user: "assistant", + content: { + text: "Fetching active deployments..." + } as GetDeploymentsContent + } as ActionExample + ]], + + validate: async (runtime: IAgentRuntime, message: Memory): Promise => { + elizaLogger.debug("Validating get deployments request", { message }); + try { + const params = message.content as Partial; + + if (params.status && !['active', 'closed'].includes(params.status)) { + throw new AkashError( + "Status must be either 'active' or 'closed'", + AkashErrorCode.VALIDATION_PARAMETER_INVALID, + { parameter: "status", value: params.status } + ); + } + + if (params.skip !== undefined && (typeof params.skip !== 'number' || params.skip < 0)) { + throw new AkashError( + "Skip must be a non-negative number", + AkashErrorCode.VALIDATION_PARAMETER_INVALID, + { parameter: "skip", value: params.skip } + ); + } + + if (params.limit !== undefined && (typeof params.limit !== 'number' || params.limit <= 0)) { + throw new AkashError( + "Limit must be a positive number", + AkashErrorCode.VALIDATION_PARAMETER_INVALID, + { parameter: "limit", value: params.limit } + ); + } + + return true; + } catch (error) { + elizaLogger.error("Get deployments validation failed", { + error: error instanceof AkashError ? { + code: error.code, + message: error.message, + details: error.details + } : String(error) + }); + return false; + } + }, + + handler: async ( + runtime: IAgentRuntime, + message: Memory, + state: State | undefined, + options: { [key: string]: unknown } = {}, + callback?: HandlerCallback + ): Promise => { + const actionId = Date.now().toString(); + elizaLogger.info("Starting deployment API request", { actionId }); + + try { + const config = await validateAkashConfig(runtime); + const params = message.content as Partial; + + // Fetch deployments + const deployments = await fetchDeployments( + runtime, + params.status, + params.skip, + params.limit + ); + + if (callback) { + elizaLogger.info("=== Preparing callback response for deployments ===", { + hasCallback: true, + actionId, + deploymentCount: deployments.count + }); + + const callbackResponse = { + text: `Found ${deployments.count} deployment${deployments.count !== 1 ? 's' : ''}${params.status ? ` with status: ${params.status}` : ''}\n\nDeployments:\n${deployments.results.map(dep => + `- DSEQ: ${dep.dseq}\n Status: ${dep.status}\n CPU: ${dep.cpuUnits} units\n Memory: ${dep.memoryQuantity} units\n Storage: ${dep.storageQuantity} units` + ).join('\n\n')}`, + content: { + success: true, + data: { + deployments: deployments.results, + total: deployments.count, + status: params.status || 'all' + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getDeploymentApi', + version: '1.0.0', + actionId + } + } + }; + + elizaLogger.info("=== Executing callback with response ===", { + actionId, + responseText: callbackResponse.text, + hasContent: !!callbackResponse.content, + contentKeys: Object.keys(callbackResponse.content), + metadata: callbackResponse.content.metadata + }); + + callback(callbackResponse); + + elizaLogger.info("=== Callback executed successfully ===", { + actionId, + timestamp: new Date().toISOString() + }); + } + + return true; + } catch (error) { + elizaLogger.error("Get deployments request failed", { + error: error instanceof Error ? error.message : String(error), + actionId + }); + + if (callback) { + elizaLogger.info("=== Preparing error callback response ===", { + actionId, + hasCallback: true, + errorType: error instanceof AkashError ? 'AkashError' : 'Error' + }); + + const errorResponse = { + text: `Failed to get deployments: ${error instanceof Error ? error.message : String(error)}`, + content: { + success: false, + error: { + code: error instanceof AkashError ? error.code : AkashErrorCode.API_ERROR, + message: error instanceof Error ? error.message : String(error) + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getDeploymentApi', + version: '1.0.0', + actionId + } + } + }; + + elizaLogger.info("=== Executing error callback ===", { + actionId, + responseText: errorResponse.text, + hasContent: !!errorResponse.content, + contentKeys: Object.keys(errorResponse.content) + }); + + callback(errorResponse); + + elizaLogger.info("=== Error callback executed ===", { + actionId, + timestamp: new Date().toISOString() + }); + } + + return false; + } + } +}; + +export default getDeploymentApiAction; \ No newline at end of file diff --git a/packages/plugin-akash/src/actions/getDeploymentStatus.ts b/packages/plugin-akash/src/actions/getDeploymentStatus.ts new file mode 100644 index 00000000000..48413f7b4d8 --- /dev/null +++ b/packages/plugin-akash/src/actions/getDeploymentStatus.ts @@ -0,0 +1,493 @@ +import { Action, elizaLogger } from "@elizaos/core"; +import { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; +import { DirectSecp256k1HdWallet } from "@cosmjs/proto-signing"; +import { QueryDeploymentRequest, QueryClientImpl as DeploymentQueryClient } from "@akashnetwork/akash-api/akash/deployment/v1beta3"; +import { getRpc } from "@akashnetwork/akashjs/build/rpc"; +import { validateAkashConfig } from "../environment"; +import { AkashError, AkashErrorCode } from "../error/error"; + +interface GetDeploymentStatusContent extends Content { + text: string; + dseq?: string; +} + +interface DeploymentGroup { + groupId?: { + owner: string; + dseq: string; + gseq: number; + }; + state: string; + resources: Array<{ + resources: { + cpu: { + units: { + val: string; + }; + }; + memory: { + quantity: { + val: string; + }; + }; + storage: Array<{ + quantity: { + val: string; + }; + }>; + }; + count: number; + price: { + denom: string; + amount: string; + }; + }>; +} + +interface DeploymentResponse { + deploymentId?: { + owner: string; + dseq: string; + }; + state: string; + version: string; + createdAt: string; + escrowAccount?: { + balance?: { + denom: string; + amount: string; + }; + }; + groups?: DeploymentGroup[]; +} + +enum DeploymentState { + UNKNOWN = 0, + ACTIVE = 1, + CLOSED = 2, + INSUFFICIENT_FUNDS = 3, +} + +export const getDeploymentStatusAction: Action = { + name: "GET_DEPLOYMENT_STATUS", + similes: ["CHECK_DEPLOYMENT", "DEPLOYMENT_STATUS", "DEPLOYMENT_STATE", "CHECK DSEQ"], + description: "Get the current status of a deployment on Akash Network", + examples: [[ + { + user: "user", + content: { + text: "Can you check the deployment status of the DSEQ 123456?", + } as GetDeploymentStatusContent + } as ActionExample + ]], + + validate: async (runtime: IAgentRuntime, message: Memory): Promise => { + elizaLogger.debug("Validating get deployment status request", { message }); + try { + const params = message.content as Partial; + const config = await validateAkashConfig(runtime); + + // Extract DSEQ from text if present + if (params.text && !params.dseq) { + // Pattern to match DSEQ followed by numbers + const dseqMatch = params.text.match(/(?:DSEQ|dseq)\s*(\d+)/i); + if (dseqMatch) { + params.dseq = dseqMatch[1]; + elizaLogger.debug("Extracted DSEQ from text", { + text: params.text, + extractedDseq: params.dseq + }); + } + } + + // If no dseq provided, check environment configuration + if (!params.dseq) { + if (config.AKASH_DEP_STATUS === "dseq" && config.AKASH_DEP_DSEQ) { + params.dseq = config.AKASH_DEP_DSEQ; + } else if (config.AKASH_DEP_STATUS === "param_passed") { + elizaLogger.info("DSEQ parameter is required when AKASH_DEP_STATUS is set to param_passed", { + current_status: config.AKASH_DEP_STATUS + }); + return true; // Allow validation to pass, we'll handle the missing parameter in the handler + } else { + elizaLogger.info("No DSEQ provided and no valid environment configuration found", { + dep_status: config.AKASH_DEP_STATUS, + dep_dseq: config.AKASH_DEP_DSEQ + }); + return true; // Allow validation to pass, we'll handle the missing configuration in the handler + } + } + + // If dseq is provided, validate its format + if (params.dseq && !/^\d+$/.test(params.dseq)) { + throw new AkashError( + "Invalid DSEQ format. Must be a numeric string", + AkashErrorCode.VALIDATION_PARAMETER_INVALID, + { parameter: "dseq", value: params.dseq } + ); + } + + return true; + } catch (error) { + elizaLogger.error("Get deployment status validation failed", { + error: error instanceof AkashError ? { + code: error.code, + message: error.message, + details: error.details + } : String(error) + }); + return false; + } + }, + + handler: async ( + runtime: IAgentRuntime, + message: Memory, + state: State | undefined, + options: { [key: string]: unknown } = {}, + callback?: HandlerCallback + ): Promise => { + const actionId = Date.now().toString(); + elizaLogger.info("Starting deployment status request", { actionId }); + + try { + const config = await validateAkashConfig(runtime); + const params = message.content as Partial; + let dseqSource = "parameter"; // Track where the DSEQ came from + + // Handle missing dseq parameter based on environment configuration + if (!params.dseq) { + if (config.AKASH_DEP_STATUS === "dseq") { + if (config.AKASH_DEP_DSEQ) { + params.dseq = config.AKASH_DEP_DSEQ; + dseqSource = "environment"; + } else { + if (callback) { + callback({ + text: "AKASH_DEP_DSEQ is not set in your environment. Please set a valid deployment sequence number.", + content: { + success: false, + error: { + code: AkashErrorCode.VALIDATION_PARAMETER_MISSING, + message: "Missing AKASH_DEP_DSEQ", + help: "When AKASH_DEP_STATUS is set to 'dseq', you must also set AKASH_DEP_DSEQ in your .env file." + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getDeploymentStatus', + version: '1.0.0', + actionId + } + } + }); + } + return false; + } + } else if (config.AKASH_DEP_STATUS === "param_passed") { + if (callback) { + callback({ + text: "DSEQ parameter is required. Please provide a deployment sequence number.", + content: { + success: false, + error: { + code: AkashErrorCode.VALIDATION_PARAMETER_MISSING, + message: "Missing required parameter: dseq", + help: "You need to provide a deployment sequence number (dseq) to check its status." + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getDeploymentStatus', + version: '1.0.0', + actionId + } + } + }); + } + return false; + } else { + if (callback) { + callback({ + text: "No deployment configuration found. Please set AKASH_DEP_STATUS and AKASH_DEP_DSEQ in your environment or provide a dseq parameter.", + content: { + success: false, + error: { + code: AkashErrorCode.VALIDATION_PARAMETER_MISSING, + message: "Missing configuration", + help: "Set AKASH_DEP_STATUS='dseq' and AKASH_DEP_DSEQ in your .env file, or set AKASH_DEP_STATUS='param_passed' and provide dseq parameter in your request." + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getDeploymentStatus', + version: '1.0.0', + actionId + } + } + }); + } + return false; + } + } + + // Initialize wallet from mnemonic + const wallet = await DirectSecp256k1HdWallet.fromMnemonic(config.AKASH_MNEMONIC, { prefix: "akash" }); + const [account] = await wallet.getAccounts(); + + // Initialize query client + const queryClient = new DeploymentQueryClient(await getRpc(config.RPC_ENDPOINT)); + + // Query deployment + elizaLogger.info("Querying deployment status", { + dseq: params.dseq, + owner: account.address + }); + + try { + const request = QueryDeploymentRequest.fromPartial({ + id: { + owner: account.address, + dseq: params.dseq + } + }); + + const response = await queryClient.Deployment(request); + + if (!response.deployment) { + // Different messages based on DSEQ source + if (dseqSource === "environment") { + if (callback) { + callback({ + text: "The deployment sequence number in your environment configuration was not found. Please check AKASH_DEP_DSEQ value.", + content: { + success: false, + error: { + code: AkashErrorCode.DEPLOYMENT_NOT_FOUND, + message: "Invalid AKASH_DEP_DSEQ", + help: "Update AKASH_DEP_DSEQ in your .env file with a valid deployment sequence number, or switch to AKASH_DEP_STATUS='param_passed' to provide DSEQ as a parameter.", + current_dseq: params.dseq + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getDeploymentStatus', + version: '1.0.0', + actionId + } + } + }); + } + } else { + throw new AkashError( + "Deployment not found", + AkashErrorCode.DEPLOYMENT_NOT_FOUND, + { + dseq: params.dseq, + owner: account.address, + actionId + } + ); + } + return false; + } + + // Format deployment status + const deployment = response.deployment as unknown as DeploymentResponse; + const status = { + owner: deployment.deploymentId?.owner, + dseq: deployment.deploymentId?.dseq, + state: deployment.state, + version: deployment.version, + createdAt: deployment.createdAt, + balance: deployment.escrowAccount?.balance, + groups: deployment.groups?.map((group: DeploymentGroup) => ({ + groupId: group.groupId, + state: group.state, + resources: group.resources + })) + }; + + elizaLogger.info("Deployment status retrieved successfully", { + dseq: params.dseq, + state: status.state, + owner: status.owner, + actionId + }); + + if (callback) { + // Convert numeric state to readable string + const stateString = DeploymentState[status.state as keyof typeof DeploymentState] || 'UNKNOWN'; + + const formattedBalance = deployment.escrowAccount?.balance + ? `${deployment.escrowAccount.balance.amount}${deployment.escrowAccount.balance.denom}` + : 'No balance information'; + + elizaLogger.info("=== Preparing callback response for deployment status ===", { + hasCallback: true, + actionId, + dseq: params.dseq + }); + + const callbackResponse = { + text: `Deployment ${params.dseq} Status:\nState: ${stateString}\nBalance: ${formattedBalance}\nCreated At: ${status.createdAt}`, + content: { + success: true, + data: { + deployment: status, + queryResponse: response.deployment + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getDeploymentStatus', + version: '1.0.0', + actionId + } + } + }; + + elizaLogger.info("=== Executing callback with response ===", { + actionId, + responseText: callbackResponse.text, + hasContent: !!callbackResponse.content, + contentKeys: Object.keys(callbackResponse.content), + metadata: callbackResponse.content.metadata + }); + + callback(callbackResponse); + + elizaLogger.info("=== Callback executed successfully ===", { + actionId, + timestamp: new Date().toISOString() + }); + } + + return true; + } catch (queryError) { + // Handle query errors differently based on DSEQ source + if (dseqSource === "environment") { + elizaLogger.warn("Failed to query deployment from environment configuration", { + dseq: params.dseq, + error: queryError instanceof Error ? queryError.message : String(queryError) + }); + if (callback) { + callback({ + text: "Could not find deployment with the configured DSEQ. Please check your environment settings.", + content: { + success: false, + error: { + code: AkashErrorCode.API_ERROR, + message: "Invalid AKASH_DEP_DSEQ configuration", + help: "Verify that AKASH_DEP_DSEQ contains a valid deployment sequence number, or switch to AKASH_DEP_STATUS='param_passed' to provide DSEQ as a parameter.", + current_dseq: params.dseq + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getDeploymentStatus', + version: '1.0.0', + actionId + } + } + }); + } + } else { + elizaLogger.error("Failed to query deployment", { + error: queryError instanceof Error ? queryError.message : String(queryError), + actionId + }); + if (callback) { + elizaLogger.info("=== Preparing error callback response ===", { + actionId, + hasCallback: true, + errorType: queryError instanceof AkashError ? 'AkashError' : 'Error' + }); + + const errorResponse = { + text: `Failed to get deployment status: ${queryError instanceof Error ? queryError.message : String(queryError)}`, + content: { + success: false, + error: { + code: queryError instanceof AkashError ? queryError.code : AkashErrorCode.API_ERROR, + message: queryError instanceof Error ? queryError.message : String(queryError) + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getDeploymentStatus', + version: '1.0.0', + actionId + } + } + }; + + elizaLogger.info("=== Executing error callback ===", { + actionId, + responseText: errorResponse.text, + hasContent: !!errorResponse.content, + contentKeys: Object.keys(errorResponse.content) + }); + + callback(errorResponse); + + elizaLogger.info("=== Error callback executed ===", { + actionId, + timestamp: new Date().toISOString() + }); + } + } + return false; + } + } catch (error) { + elizaLogger.error("Get deployment status request failed", { + error: error instanceof Error ? error.message : String(error), + actionId + }); + + if (callback) { + elizaLogger.info("=== Preparing error callback response ===", { + actionId, + hasCallback: true, + errorType: error instanceof AkashError ? 'AkashError' : 'Error' + }); + + const errorResponse = { + text: `Failed to get deployment status: ${error instanceof Error ? error.message : String(error)}`, + content: { + success: false, + error: { + code: error instanceof AkashError ? error.code : AkashErrorCode.API_ERROR, + message: error instanceof Error ? error.message : String(error) + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getDeploymentStatus', + version: '1.0.0', + actionId + } + } + }; + + elizaLogger.info("=== Executing error callback ===", { + actionId, + responseText: errorResponse.text, + hasContent: !!errorResponse.content, + contentKeys: Object.keys(errorResponse.content) + }); + + callback(errorResponse); + + elizaLogger.info("=== Error callback executed ===", { + actionId, + timestamp: new Date().toISOString() + }); + } + + return false; + } + } +}; + +export default getDeploymentStatusAction; \ No newline at end of file diff --git a/packages/plugin-akash/src/actions/getGPUPricing.ts b/packages/plugin-akash/src/actions/getGPUPricing.ts new file mode 100644 index 00000000000..562c0871984 --- /dev/null +++ b/packages/plugin-akash/src/actions/getGPUPricing.ts @@ -0,0 +1,225 @@ +import { Action, elizaLogger } from "@elizaos/core"; +import { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; +import { getConfig } from "../environment"; + +interface GetGPUPricingContent extends Content { + cpu?: number; // CPU units in millicores (e.g., 1000 = 1 CPU) + memory?: number; // Memory in bytes (e.g., 1000000000 = 1GB) + storage?: number; // Storage in bytes (e.g., 1000000000 = 1GB) +} + +interface PricingResponse { + spec: { + cpu: number; + memory: number; + storage: number; + }; + akash: number; + aws: number; + gcp: number; + azure: number; +} + +// Get configuration with defaults +const config = getConfig(process.env.AKASH_ENV); +const PRICING_API_URL = config.AKASH_PRICING_API_URL; +const DEFAULT_CPU = parseInt(config.AKASH_DEFAULT_CPU || "1000"); +const DEFAULT_MEMORY = parseInt(config.AKASH_DEFAULT_MEMORY || "1000000000"); +const DEFAULT_STORAGE = parseInt(config.AKASH_DEFAULT_STORAGE || "1000000000"); + +// Custom error class for GPU pricing errors +class GPUPricingError extends Error { + constructor(message: string, public code: string) { + super(message); + this.name = 'GPUPricingError'; + } +} + +export const getGPUPricingAction: Action = { + name: "GET_GPU_PRICING", + similes: ["GET_PRICING", "COMPARE_PRICES", "CHECK_PRICING"], + description: "Get GPU pricing comparison between Akash and major cloud providers", + examples: [[ + { + user: "user", + content: { + text: "Get GPU pricing for 2 CPUs, 2GB memory, and 10GB storage", + cpu: 2000, + memory: 2000000000, + storage: 10000000000 + } as GetGPUPricingContent + } as ActionExample + ], [ + { + user: "user", + content: { + text: "Compare GPU prices across providers" + } as GetGPUPricingContent + } as ActionExample + ]], + + validate: async (runtime: IAgentRuntime, message: Memory): Promise => { + elizaLogger.debug("Validating GPU pricing request", { message }); + try { + const params = message.content as Partial; + + // Validate CPU if provided + if (params.cpu !== undefined && (isNaN(params.cpu) || params.cpu <= 0)) { + throw new GPUPricingError("CPU units must be a positive number", "INVALID_CPU"); + } + + // Validate memory if provided + if (params.memory !== undefined && (isNaN(params.memory) || params.memory <= 0)) { + throw new GPUPricingError("Memory must be a positive number", "INVALID_MEMORY"); + } + + // Validate storage if provided + if (params.storage !== undefined && (isNaN(params.storage) || params.storage <= 0)) { + throw new GPUPricingError("Storage must be a positive number", "INVALID_STORAGE"); + } + + return true; + } catch (error) { + elizaLogger.error("GPU pricing validation failed", { + error: error instanceof GPUPricingError ? { + code: error.code, + message: error.message + } : String(error) + }); + return false; + } + }, + + handler: async ( + runtime: IAgentRuntime, + message: Memory, + state: State | undefined, + options: { [key: string]: unknown; } = {}, + callback?: HandlerCallback + ): Promise => { + const actionId = Date.now().toString(); + elizaLogger.info("Starting GPU pricing request", { actionId }); + + try { + const params = message.content as GetGPUPricingContent; + + // Use provided values or defaults + const requestBody = { + cpu: params.cpu || DEFAULT_CPU, + memory: params.memory || DEFAULT_MEMORY, + storage: params.storage || DEFAULT_STORAGE + }; + + elizaLogger.info("Fetching pricing information", { + specs: requestBody, + apiUrl: PRICING_API_URL + }); + + // Make API request using fetch + const response = await fetch(PRICING_API_URL, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'Accept': 'application/json' + }, + body: JSON.stringify(requestBody) + }); + + if (!response.ok) { + throw new GPUPricingError( + `API request failed with status ${response.status}: ${response.statusText}`, + "API_ERROR" + ); + } + + const data = await response.json() as PricingResponse; + + // Calculate savings percentages + const savings = { + vs_aws: ((data.aws - data.akash) / data.aws * 100).toFixed(2), + vs_gcp: ((data.gcp - data.akash) / data.gcp * 100).toFixed(2), + vs_azure: ((data.azure - data.akash) / data.azure * 100).toFixed(2) + }; + + elizaLogger.info("Pricing information retrieved successfully", { + specs: data.spec, + pricing: { + akash: data.akash, + aws: data.aws, + gcp: data.gcp, + azure: data.azure + }, + savings + }); + + if (callback) { + const callbackResponse = { + text: `GPU Pricing Comparison\nAkash: $${data.akash}\nAWS: $${data.aws} (${savings.vs_aws}% savings)\nGCP: $${data.gcp} (${savings.vs_gcp}% savings)\nAzure: $${data.azure} (${savings.vs_azure}% savings)`, + content: { + success: true, + data: { + specs: { + cpu: data.spec.cpu, + memory: data.spec.memory, + storage: data.spec.storage + }, + pricing: { + akash: data.akash, + aws: data.aws, + gcp: data.gcp, + azure: data.azure + }, + savings: { + vs_aws: `${savings.vs_aws}%`, + vs_gcp: `${savings.vs_gcp}%`, + vs_azure: `${savings.vs_azure}%` + } + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getGPUPricing', + version: '1.0.0', + actionId + } + } + }; + + callback(callbackResponse); + } + + return true; + } catch (error) { + elizaLogger.error("GPU pricing request failed", { + error: error instanceof Error ? error.message : String(error), + actionId + }); + + if (callback) { + const errorResponse = { + text: "Failed to get GPU pricing information", + content: { + success: false, + error: { + code: error instanceof GPUPricingError ? error.code : 'UNKNOWN_ERROR', + message: error instanceof Error ? error.message : String(error) + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getGPUPricing', + version: '1.0.0', + actionId + } + } + }; + + callback(errorResponse); + } + + return false; + } + } +}; + +export default getGPUPricingAction; diff --git a/packages/plugin-akash/src/actions/getManifest.ts b/packages/plugin-akash/src/actions/getManifest.ts new file mode 100644 index 00000000000..981cf9c65e0 --- /dev/null +++ b/packages/plugin-akash/src/actions/getManifest.ts @@ -0,0 +1,361 @@ +import { Action, elizaLogger } from "@elizaos/core"; +import { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; +import { SDL } from "@akashnetwork/akashjs/build/sdl"; +import { validateAkashConfig } from "../environment"; +import { AkashError, AkashErrorCode } from "../error/error"; +import * as fs from 'fs'; +import * as path from 'path'; +import yaml from 'js-yaml'; +import { getAkashTypeRegistry } from "@akashnetwork/akashjs/build/stargate"; +import { getCertificatePath, getDefaultSDLPath } from "../utils/paths"; + +interface GetManifestContent extends Content { + sdl?: string; + sdlFile?: string; +} + +// elizaLogger.info("Default SDL path initialized", { DEFAULT_SDL_PATH }); +// elizaLogger.info("Loading SDL from file", { filePath }); +// elizaLogger.info("Resolved SDL file path", { resolvedPath }); +// elizaLogger.error("SDL file not found", { resolvedPath }); +// elizaLogger.info("SDL file loaded successfully", { content }); +// elizaLogger.error("Failed to read SDL file", { error }); +// elizaLogger.error("SDL validation failed", { error }); +// elizaLogger.info("Using provided SDL content"); +// elizaLogger.info("Loading SDL from file", { path: params.sdlFile }); +// elizaLogger.info("Loading default SDL", { path: DEFAULT_SDL_PATH }); +// elizaLogger.debug("Parsing SDL content and generating manifest"); + +const DEFAULT_SDL_PATH = (() => { + const currentFileUrl = import.meta.url; + const sdlPath = getDefaultSDLPath(currentFileUrl); + + // Only log if file doesn't exist + if (!fs.existsSync(sdlPath)) { + elizaLogger.warn("Default SDL path not found", { + sdlPath, + exists: false + }); + } + + return sdlPath; +})(); + +const loadSDLFromFile = (filePath: string): string => { + try { + // If path doesn't contain plugin-akash and it's not the default path, adjust it + if (!filePath.includes('plugin-akash') && filePath !== DEFAULT_SDL_PATH) { + const adjustedPath = path.join(path.dirname(DEFAULT_SDL_PATH), path.basename(filePath)); + filePath = adjustedPath; + } + + // Try multiple possible locations + const possiblePaths = [ + filePath, + path.join(process.cwd(), filePath), + path.join(process.cwd(), 'packages', 'plugin-akash', filePath), + path.join(process.cwd(), 'packages', 'plugin-akash', 'src', filePath), + path.join(path.dirname(DEFAULT_SDL_PATH), filePath) + ]; + + for (const tryPath of possiblePaths) { + if (fs.existsSync(tryPath)) { + const content = fs.readFileSync(tryPath, "utf8"); + elizaLogger.info("SDL file loaded successfully from", { + path: tryPath + }); + return content; + } + } + + // If we get here, none of the paths worked + throw new AkashError( + `SDL file not found in any of the possible locations`, + AkashErrorCode.VALIDATION_SDL_FAILED, + { + filePath, + triedPaths: possiblePaths + } + ); + } catch (error) { + elizaLogger.error("Failed to read SDL file", { + filePath, + error: error instanceof Error ? error.message : String(error) + }); + throw new AkashError( + `Failed to read SDL file: ${error instanceof Error ? error.message : String(error)}`, + AkashErrorCode.VALIDATION_SDL_FAILED, + { filePath } + ); + } +}; + +const validateSDL = (sdlContent: string, validationLevel: string = "strict"): boolean => { + try { + // First try to parse as YAML + const parsed = yaml.load(sdlContent); + if (!parsed || typeof parsed !== 'object') { + throw new Error('Invalid SDL format: not a valid YAML object'); + } + + if (validationLevel === "none") { + // elizaLogger.debug("Skipping SDL validation (validation level: none)"); + return true; + } + + // Required sections based on validation level + const requiredSections = ['version', 'services']; + const sectionsToCheck = validationLevel === "strict" ? + [...requiredSections, 'profiles', 'deployment'] : + requiredSections; + + for (const section of sectionsToCheck) { + if (!(section in parsed)) { + throw new Error(`Invalid SDL format: missing required section '${section}'`); + } + } + + // elizaLogger.debug("SDL validation successful", { + // validationLevel, + // checkedSections: sectionsToCheck + // }); + return true; + } catch (error) { + // elizaLogger.error("SDL validation failed", { + // error: error instanceof Error ? error.message : String(error), + // validationLevel + // }); + return false; + } +}; + +export const getManifestAction: Action = { + name: "GET_MANIFEST", + similes: ["LOAD_MANIFEST", "READ_MANIFEST", "PARSE_MANIFEST"], + description: "Load and validate SDL to generate a manifest for Akash deployments", + examples: [[ + { + user: "user", + content: { + text: "Get manifest from SDL file", + sdlFile: "deployment.yml" + } as GetManifestContent + } as ActionExample + ]], + + validate: async (runtime: IAgentRuntime, message: Memory): Promise => { + elizaLogger.debug("Validating manifest request", { message }); + try { + const params = message.content as Partial; + const config = await validateAkashConfig(runtime); + + // Either SDL content or file path must be provided + if (!params.sdl && !params.sdlFile && !config.AKASH_SDL) { + throw new AkashError( + "Either SDL content, file path, or AKASH_SDL environment variable must be provided", + AkashErrorCode.VALIDATION_PARAMETER_MISSING, + { parameters: ["sdl", "sdlFile", "AKASH_SDL"] } + ); + } + + // If SDL content is provided, validate it + if (params.sdl) { + const validationLevel = config.AKASH_MANIFEST_VALIDATION_LEVEL || "strict"; + if (!validateSDL(params.sdl, validationLevel)) { + throw new AkashError( + "Invalid SDL format", + AkashErrorCode.VALIDATION_SDL_FAILED + ); + } + } + + return true; + } catch (error) { + elizaLogger.error("Manifest validation failed", { + error: error instanceof AkashError ? { + code: error.code, + message: error.message, + details: error.details + } : String(error) + }); + return false; + } + }, + + handler: async ( + runtime: IAgentRuntime, + message: Memory, + state: State | undefined, + options: { [key: string]: unknown; } = {}, + callback?: HandlerCallback + ): Promise => { + const actionId = Date.now().toString(); + elizaLogger.info("Starting manifest operation", { actionId }); + + try { + const config = await validateAkashConfig(runtime); + const params = message.content as Partial; + + let sdlContent: string; + try { + // Load SDL content based on priority: params.sdl > params.sdlFile > config.AKASH_SDL + if (params.sdl) { + sdlContent = params.sdl; + elizaLogger.info("Using provided SDL content"); + } else if (params.sdlFile) { + sdlContent = loadSDLFromFile(params.sdlFile); + elizaLogger.info("Loaded SDL from file", { path: params.sdlFile }); + } else { + const sdlPath = config.AKASH_SDL || DEFAULT_SDL_PATH; + sdlContent = loadSDLFromFile(sdlPath); + elizaLogger.info("Using SDL from environment", { path: sdlPath }); + } + + // Validate based on environment settings + const validationLevel = config.AKASH_MANIFEST_VALIDATION_LEVEL || "strict"; + const isValid = validateSDL(sdlContent, validationLevel); + + if (!isValid) { + throw new AkashError( + "SDL validation failed", + AkashErrorCode.VALIDATION_SDL_FAILED + ); + } + + // Check manifest mode + const manifestMode = config.AKASH_MANIFEST_MODE || "auto"; + if (manifestMode === "validate_only") { + elizaLogger.info("Validation successful (validate_only mode)"); + if (callback) { + const callbackResponse = { + text: "SDL validation successful", + content: { + success: true, + data: { + validationLevel, + mode: manifestMode + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getManifest', + version: '1.0.0', + actionId + } + } + }; + callback(callbackResponse); + } + return true; + } + + // Generate manifest + const sdl = new SDL(yaml.load(sdlContent) as any); + const manifest = sdl.manifest(); + + // Save manifest if path is specified + if (config.AKASH_MANIFEST_PATH) { + const manifestPath = path.join( + config.AKASH_MANIFEST_PATH, + `manifest-${Date.now()}.yaml` + ); + fs.writeFileSync(manifestPath, yaml.dump(manifest), 'utf8'); + elizaLogger.info("Manifest saved", { path: manifestPath }); + } + + if (callback) { + const callbackResponse = { + text: "Manifest generated successfully", + content: { + success: true, + data: { + manifest, + settings: { + mode: manifestMode, + validationLevel, + outputPath: config.AKASH_MANIFEST_PATH + } + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getManifest', + version: '1.0.0', + actionId + } + } + }; + callback(callbackResponse); + } + + return true; + } catch (error) { + const formattedError = error instanceof Error ? error.message : String(error); + elizaLogger.error("Manifest operation failed", { + error: formattedError, + settings: { + mode: config.AKASH_MANIFEST_MODE || "auto", + validationLevel: config.AKASH_MANIFEST_VALIDATION_LEVEL || "strict", + outputPath: config.AKASH_MANIFEST_PATH + } + }); + + if (callback) { + const errorResponse = { + text: "Failed to process manifest", + content: { + success: false, + error: error instanceof AkashError ? { + code: error.code, + message: error.message, + details: error.details + } : { + code: AkashErrorCode.MANIFEST_PARSING_FAILED, + message: formattedError + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getManifest', + version: '1.0.0', + actionId + } + } + }; + callback(errorResponse); + } + return false; + } + } catch (error) { + elizaLogger.error("Manifest operation failed", { + error: error instanceof Error ? error.message : String(error), + actionId + }); + + if (callback) { + const errorResponse = { + text: "Manifest operation failed", + content: { + success: false, + error: { + code: AkashErrorCode.MANIFEST_PARSING_FAILED, + message: error instanceof Error ? error.message : String(error) + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getManifest', + version: '1.0.0', + actionId + } + } + }; + callback(errorResponse); + } + + return false; + } + } +}; + +export default getManifestAction; diff --git a/packages/plugin-akash/src/actions/getProviderInfo.ts b/packages/plugin-akash/src/actions/getProviderInfo.ts new file mode 100644 index 00000000000..0203a4a62f9 --- /dev/null +++ b/packages/plugin-akash/src/actions/getProviderInfo.ts @@ -0,0 +1,369 @@ +import { Action, elizaLogger } from "@elizaos/core"; +import { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; +import { QueryProviderRequest, QueryClientImpl as ProviderQueryClient } from "@akashnetwork/akash-api/akash/provider/v1beta3"; +import { getRpc } from "@akashnetwork/akashjs/build/rpc"; +import { AkashError, AkashErrorCode } from "../error/error"; +import { validateAkashConfig } from "../environment"; + +interface GetProviderInfoContent extends Content { + text: string; + provider?: string; +} + +interface ProviderResponse { + provider?: { + owner: string; + hostUri: string; + attributes: Array<{ + key: string; + value: string; + }>; + info?: { + email: string; + website: string; + capabilities: string[]; + }; + status?: ProviderStatus; + }; +} + +interface ProviderStatus { + cluster?: { + nodes: Array<{ + name: string; + capacity: { + cpu: string; + memory: string; + storage: string; + }; + allocatable: { + cpu: string; + memory: string; + storage: string; + }; + }>; + }; + leases?: { + active: number; + pending: number; + available: number; + }; +} + +const sleep = (ms: number) => new Promise(resolve => setTimeout(resolve, ms)); + +export const getProviderInfoAction: Action = { + name: "GET_PROVIDER_INFO", + similes: ["CHECK_PROVIDER", "PROVIDER_INFO", "PROVIDER_STATUS", "CHECK PROVIDER"], + description: "Get detailed information about a provider on Akash Network", + examples: [[ + { + user: "user", + content: { + text: "Can you check the provider info for akash1ccktptfkvdc67msasmesuy5m7gpc76z75kukpz?", + } as GetProviderInfoContent + } as ActionExample + ]], + + validate: async (runtime: IAgentRuntime, message: Memory): Promise => { + elizaLogger.debug("Validating get provider info request", { message }); + try { + const params = message.content as Partial; + const config = await validateAkashConfig(runtime); + + // Extract provider address from text if present + if (params.text && !params.provider) { + // Pattern to match akash1 followed by address characters + const providerMatch = params.text.match(/akash1[a-zA-Z0-9]{38}/); + if (providerMatch) { + params.provider = providerMatch[0]; + elizaLogger.debug("Extracted provider address from text", { + text: params.text, + extractedProvider: params.provider + }); + } + } + + // If still no provider specified, use environment default + if (!params.provider && config.AKASH_PROVIDER_INFO) { + params.provider = config.AKASH_PROVIDER_INFO; + } + + if (!params.provider) { + throw new AkashError( + "Provider address is required", + AkashErrorCode.VALIDATION_PARAMETER_MISSING, + { parameter: "provider" } + ); + } + + // Validate provider address format + if (!params.provider.startsWith("akash1")) { + throw new AkashError( + "Invalid provider address format. Must start with 'akash1'", + AkashErrorCode.VALIDATION_PARAMETER_INVALID, + { parameter: "provider", value: params.provider } + ); + } + + return true; + } catch (error) { + elizaLogger.error("Get provider info validation failed", { + error: error instanceof AkashError ? { + code: error.code, + message: error.message, + details: error.details + } : String(error) + }); + return false; + } + }, + + handler: async ( + runtime: IAgentRuntime, + message: Memory, + state: State | undefined, + options: { [key: string]: unknown } = {}, + callback?: HandlerCallback + ): Promise => { + const actionId = Date.now().toString(); + elizaLogger.info("Starting provider info request", { actionId }); + + elizaLogger.debug("=== Handler Parameters ===", { + hasRuntime: !!runtime, + hasMessage: !!message, + hasState: !!state, + hasOptions: !!options, + hasCallback: !!callback, + actionId + }); + + try { + const config = await validateAkashConfig(runtime); + const params = message.content as Partial; + + // If no provider specified, use environment default + if (!params.provider && config.AKASH_PROVIDER_INFO) { + params.provider = config.AKASH_PROVIDER_INFO; + } + + if (!params.provider) { + throw new AkashError( + "Provider address is required", + AkashErrorCode.VALIDATION_PARAMETER_MISSING, + { parameter: "provider" } + ); + } + + // Query provider information + elizaLogger.info("Querying provider information", { + provider: params.provider, + actionId + }); + + const queryClient = new ProviderQueryClient(await getRpc(config.RPC_ENDPOINT)); + const request = QueryProviderRequest.fromPartial({ + owner: params.provider + }); + + try { + const response = await queryClient.Provider(request) as ProviderResponse; + + if (!response.provider) { + throw new AkashError( + "Failed to query provider: Provider not found", + AkashErrorCode.PROVIDER_NOT_FOUND, + { + provider: params.provider, + actionId + } + ); + } + + // Add a delay before querying status + await sleep(2000); // 2 second delay + + // Query provider status from their API + elizaLogger.info("Querying provider status", { + hostUri: response.provider.hostUri, + actionId + }); + + const hostUri = response.provider.hostUri.replace(/^https?:\/\//, ''); + elizaLogger.debug("Making provider status request", { url: `https://${hostUri}/status` }); + + try { + const statusResponse = await fetch(`https://${hostUri}/status`, { + headers: { + 'Accept': 'application/json' + }, + signal: AbortSignal.timeout(5000) + }); + + if (!statusResponse.ok) { + elizaLogger.debug("Provider status not available", { + status: statusResponse.status, + provider: params.provider, + hostUri: response.provider.hostUri, + actionId + }); + } else { + const statusData = await statusResponse.json(); + response.provider.status = statusData; + } + } catch (statusError) { + elizaLogger.debug("Provider status fetch failed", { + error: statusError instanceof Error ? statusError.message : String(statusError), + provider: params.provider, + hostUri: response.provider.hostUri, + actionId + }); + } + + // Format provider information + const info = { + owner: response.provider.owner, + hostUri: response.provider.hostUri, + attributes: response.provider.attributes, + info: response.provider.info, + status: response.provider.status ? { + nodes: response.provider.status.cluster?.nodes.map(node => ({ + name: node.name, + capacity: node.capacity, + allocatable: node.allocatable + })), + leases: response.provider.status.leases + } : undefined + }; + + elizaLogger.info("Provider information retrieved successfully", { + provider: params.provider, + hostUri: response.provider.hostUri, + hasStatus: !!response.provider.status, + actionId + }); + + if (callback) { + elizaLogger.info("=== Preparing callback response for provider info ===", { + hasCallback: true, + actionId, + provider: params.provider + }); + + const callbackResponse = { + text: `Provider ${params.provider} information:\nHost URI: ${info.hostUri}\nOwner: ${info.owner}${info.info ? `\nEmail: ${info.info.email}\nWebsite: ${info.info.website}` : ''}\nAttributes: ${info.attributes.map(attr => `${attr.key}: ${attr.value}`).join(', ')}`, + content: { + success: true, + data: { + provider: info, + queryResponse: response.provider + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getProviderInfo', + version: '1.0.0', + actionId + } + } + }; + + elizaLogger.info("=== Executing callback with response ===", { + actionId, + responseText: callbackResponse.text, + hasContent: !!callbackResponse.content, + contentKeys: Object.keys(callbackResponse.content), + metadata: callbackResponse.content.metadata + }); + + callback(callbackResponse); + + elizaLogger.info("=== Callback executed successfully ===", { + actionId, + timestamp: new Date().toISOString() + }); + } + + return true; + } catch (queryError) { + // Handle specific error cases + const errorMessage = queryError instanceof Error ? queryError.message : String(queryError); + + if (errorMessage.toLowerCase().includes("invalid address")) { + throw new AkashError( + "Failed to query provider: Invalid address format", + AkashErrorCode.VALIDATION_PARAMETER_INVALID, + { + provider: params.provider, + error: errorMessage, + actionId + } + ); + } + + // For all other query errors, treat as provider not found + throw new AkashError( + "Failed to query provider: Provider not found or not accessible", + AkashErrorCode.PROVIDER_NOT_FOUND, + { + provider: params.provider, + error: errorMessage, + actionId + } + ); + } + } catch (error) { + elizaLogger.error("Get provider info request failed", { + error: error instanceof Error ? error.message : String(error), + actionId + }); + + if (callback) { + elizaLogger.info("=== Preparing error callback response ===", { + actionId, + hasCallback: true, + errorType: error instanceof AkashError ? 'AkashError' : 'Error' + }); + + const errorResponse = { + code: error instanceof AkashError ? error.code : AkashErrorCode.API_ERROR, + message: error instanceof Error ? error.message : String(error), + details: error instanceof AkashError ? error.details : undefined + }; + + const response = { + text: `Failed to get provider information: ${errorResponse.message}`, + content: { + success: false, + error: errorResponse, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getProviderInfo', + version: '1.0.0', + actionId + } + } + }; + + elizaLogger.info("=== Executing error callback ===", { + actionId, + errorResponse, + hasContent: !!response.content, + contentKeys: Object.keys(response.content) + }); + + callback(response); + + elizaLogger.info("=== Error callback executed ===", { + actionId, + timestamp: new Date().toISOString() + }); + } + + return false; + } + } +}; + +export default getProviderInfoAction; \ No newline at end of file diff --git a/packages/plugin-akash/src/actions/getProvidersList.ts b/packages/plugin-akash/src/actions/getProvidersList.ts new file mode 100644 index 00000000000..3944e9c507d --- /dev/null +++ b/packages/plugin-akash/src/actions/getProvidersList.ts @@ -0,0 +1,333 @@ +import { Action, elizaLogger } from "@elizaos/core"; +import { IAgentRuntime, Memory, State, HandlerCallback, Content, ActionExample } from "@elizaos/core"; +import { AkashError, AkashErrorCode } from "../error/error"; +import { validateAkashConfig } from "../environment"; + +interface GetProvidersListContent extends Content { + filter?: { + active?: boolean; + hasGPU?: boolean; + region?: string; + }; +} + +interface ProviderAttributes { + key: string; + value: string; +} + +interface ProviderInfo { + owner: string; + hostUri: string; + attributes: ProviderAttributes[]; + active: boolean; + uptime: number; + leaseCount: number; + info?: { + email?: string; + website?: string; + capabilities?: string[]; + }; + status?: { + available: boolean; + error?: string; + lastCheckTime: string; + resources?: { + cpu: { + total: number; + available: number; + }; + memory: { + total: number; + available: number; + }; + storage: { + total: number; + available: number; + }; + }; + }; +} + +const API_BASE_URL = "https://console-api.akash.network/v1"; + +async function fetchProviders(): Promise { + try { + const response = await fetch(`${API_BASE_URL}/providers`, { + headers: { + 'Accept': 'application/json' + } + }); + + if (!response.ok) { + throw new AkashError( + "Failed to fetch providers list: Invalid response from API", + AkashErrorCode.API_RESPONSE_INVALID, + { + status: response.status, + statusText: response.statusText + } + ); + } + + const data = await response.json(); + return data; + } catch (error) { + if (error instanceof AkashError) { + throw error; + } + throw new AkashError( + `Failed to fetch providers list: ${error instanceof Error ? error.message : String(error)}`, + AkashErrorCode.API_REQUEST_FAILED, + { + error: error instanceof Error ? error.message : String(error) + } + ); + } +} + +function filterProviders(providers: ProviderInfo[], filter?: GetProvidersListContent['filter']): ProviderInfo[] { + if (!filter) return providers; + + try { + let filtered = [...providers]; + + if (filter.active !== undefined) { + filtered = filtered.filter(p => { + const isActive = p.active && p.status?.available !== false; + return isActive === filter.active; + }); + } + + if (filter.hasGPU) { + filtered = filtered.filter(p => + p.attributes.some(attr => + attr.key.toLowerCase().includes('gpu') && + attr.value.toLowerCase() !== 'false' && + attr.value !== '0' + ) + ); + } + + if (filter.region) { + const regionFilter = filter.region.toLowerCase(); + filtered = filtered.filter(p => + p.attributes.some(attr => + attr.key.toLowerCase() === 'region' && + attr.value.toLowerCase().includes(regionFilter) + ) + ); + } + + return filtered; + } catch (error) { + throw new AkashError( + "Failed to apply provider filters", + AkashErrorCode.PROVIDER_FILTER_ERROR, + { filter, error: error instanceof Error ? error.message : String(error) } + ); + } +} + +export const getProvidersListAction: Action = { + name: "GET_PROVIDERS_LIST", + similes: ["LIST_PROVIDERS", "FETCH_PROVIDERS", "GET_ALL_PROVIDERS"], + description: "Get a list of all available providers on the Akash Network with their details and status", + examples: [[ + { + user: "user", + content: { + text: "Get a list of all active providers" + } as GetProvidersListContent + } as ActionExample, + { + user: "assistant", + content: { + text: "Fetching list of active Akash providers...", + filter: { + active: true + } + } as GetProvidersListContent + } as ActionExample + ], [ + { + user: "user", + content: { + text: "Show me all GPU providers in the US region", + filter: { + hasGPU: true, + region: "us" + } + } as GetProvidersListContent + } as ActionExample + ]], + + validate: async (runtime: IAgentRuntime, message: Memory): Promise => { + elizaLogger.debug("Validating get providers list request", { message }); + try { + const params = message.content as Partial; + + // Validate filter parameters if provided + if (params.filter) { + if (params.filter.region && typeof params.filter.region !== 'string') { + throw new AkashError( + "Region filter must be a string", + AkashErrorCode.VALIDATION_PARAMETER_INVALID, + { parameter: "filter.region" } + ); + } + + if (params.filter.active !== undefined && typeof params.filter.active !== 'boolean') { + throw new AkashError( + "Active filter must be a boolean", + AkashErrorCode.VALIDATION_PARAMETER_INVALID, + { parameter: "filter.active" } + ); + } + + if (params.filter.hasGPU !== undefined && typeof params.filter.hasGPU !== 'boolean') { + throw new AkashError( + "HasGPU filter must be a boolean", + AkashErrorCode.VALIDATION_PARAMETER_INVALID, + { parameter: "filter.hasGPU" } + ); + } + } + + return true; + } catch (error) { + elizaLogger.error("Get providers list validation failed", { + error: error instanceof AkashError ? { + code: error.code, + message: error.message, + details: error.details + } : String(error) + }); + return false; + } + }, + + handler: async ( + runtime: IAgentRuntime, + message: Memory, + state: State | undefined, + options: { [key: string]: unknown; } = {}, + callback?: HandlerCallback + ): Promise => { + const actionId = Date.now().toString(); + elizaLogger.info("Starting providers list request", { actionId }); + + try { + await validateAkashConfig(runtime); + const params = message.content as GetProvidersListContent; + + elizaLogger.info("Fetching providers list", { + filter: params.filter, + actionId + }); + + // Fetch providers + const allProviders = await fetchProviders(); + + // Apply filters + const filteredProviders = filterProviders(allProviders, params.filter); + + elizaLogger.info("Providers list retrieved successfully", { + totalProviders: allProviders.length, + filteredProviders: filteredProviders.length, + filter: params.filter, + actionId + }); + + if (callback) { + const callbackResponse = { + text: `Retrieved ${filteredProviders.length} providers${params.filter ? ' (filtered)' : ''} from total ${allProviders.length}`, + content: { + success: true, + data: { + summary: { + total: allProviders.length, + filtered: filteredProviders.length, + activeCount: filteredProviders.filter(p => p.active && p.status?.available !== false).length, + gpuCount: filteredProviders.filter(p => + p.attributes.some(attr => + attr.key.toLowerCase().includes('gpu') && + attr.value.toLowerCase() !== 'false' && + attr.value !== '0' + ) + ).length + }, + providers: filteredProviders.map(p => ({ + owner: p.owner, + hostUri: p.hostUri, + active: p.active && p.status?.available !== false, + uptime: p.uptime, + leaseCount: p.leaseCount, + attributes: p.attributes, + info: { + ...p.info, + capabilities: p.info?.capabilities || [], + region: p.attributes.find(a => a.key.toLowerCase() === 'region')?.value || 'unknown' + }, + resources: p.status?.resources || { + cpu: { total: 0, available: 0 }, + memory: { total: 0, available: 0 }, + storage: { total: 0, available: 0 } + }, + status: { + available: p.status?.available || false, + lastCheckTime: p.status?.lastCheckTime || new Date().toISOString(), + error: p.status?.error + } + })) + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getProvidersList', + version: '1.0.0', + actionId, + filters: params.filter || {} + } + } + }; + + callback(callbackResponse); + } + + return true; + } catch (error) { + elizaLogger.error("Get providers list request failed", { + error: error instanceof Error ? error.message : String(error), + code: error instanceof AkashError ? error.code : undefined, + actionId + }); + + if (callback) { + const errorResponse = { + text: "Failed to get providers list", + content: { + success: false, + error: { + code: error instanceof AkashError ? error.code : AkashErrorCode.API_REQUEST_FAILED, + message: error instanceof Error ? error.message : String(error) + }, + metadata: { + timestamp: new Date().toISOString(), + source: 'akash-plugin', + action: 'getProvidersList', + version: '1.0.0', + actionId + } + } + }; + + callback(errorResponse); + } + + return false; + } + } +}; +export default getProvidersListAction; + diff --git a/packages/plugin-akash/src/environment.ts b/packages/plugin-akash/src/environment.ts new file mode 100644 index 00000000000..12a8332b087 --- /dev/null +++ b/packages/plugin-akash/src/environment.ts @@ -0,0 +1,259 @@ +import { IAgentRuntime, elizaLogger } from "@elizaos/core"; +import { z } from "zod"; + +// Add ENV variable at the top +let ENV: string = "mainnet"; + +// Log environment information +elizaLogger.info("Environment sources", { + shellVars: Object.keys(process.env).filter(key => key.startsWith('AKASH_')), +}); + +export const akashEnvSchema = z.object({ + AKASH_MNEMONIC: z.string() + .min(1, "Wallet mnemonic is required") + .refine( + (mnemonic) => { + const words = mnemonic.trim().split(/\s+/); + return words.length === 12 || words.length === 24; + }, + { + message: "Mnemonic must be 12 or 24 words", + path: ["AKASH_MNEMONIC"] + } + ), + AKASH_WALLET_ADDRESS: z.string() + .min(1, "Wallet address is required") + .regex(/^akash[a-zA-Z0-9]{39}$/, "Invalid Akash wallet address format") + .optional(), + AKASH_NET: z.string().min(1, "Network configuration URL is required"), + AKASH_VERSION: z.string().min(1, "Akash version is required"), + AKASH_CHAIN_ID: z.string().min(1, "Chain ID is required"), + AKASH_NODE: z.string().min(1, "Node URL is required"), + RPC_ENDPOINT: z.string().min(1, "RPC endpoint is required"), + AKASH_GAS_PRICES: z.string().min(1, "Gas prices are required"), + AKASH_GAS_ADJUSTMENT: z.string().min(1, "Gas adjustment is required"), + AKASH_KEYRING_BACKEND: z.string().min(1, "Keyring backend is required"), + AKASH_FROM: z.string().min(1, "Key name is required"), + AKASH_FEES: z.string().min(1, "Transaction fees are required"), + AKASH_DEPOSIT: z.string().min(1, "Deposit is required be careful with the value not too low generally around 500000uakt"), + AKASH_PRICING_API_URL: z.string().optional(), + AKASH_DEFAULT_CPU: z.string().optional(), + AKASH_DEFAULT_MEMORY: z.string().optional(), + AKASH_DEFAULT_STORAGE: z.string().optional(), + AKASH_SDL: z.string().optional(), + AKASH_CLOSE_DEP: z.string().optional(), + AKASH_CLOSE_DSEQ: z.string().optional(), + AKASH_PROVIDER_INFO: z.string().optional(), + AKASH_DEP_STATUS: z.string().optional(), + AKASH_DEP_DSEQ: z.string().optional(), + AKASH_GAS_OPERATION: z.string().optional(), + AKASH_GAS_DSEQ: z.string().optional(), + // Manifest Configuration + AKASH_MANIFEST_MODE: z.string() + .optional() + .refine( + (mode) => !mode || ["auto", "manual", "validate_only"].includes(mode), + { + message: "AKASH_MANIFEST_MODE must be one of: auto, manual, validate_only" + } + ), + AKASH_MANIFEST_PATH: z.string() + .optional(), + AKASH_MANIFEST_VALIDATION_LEVEL: z.string() + .optional() + .refine( + (level) => !level || ["strict", "lenient", "none"].includes(level), + { + message: "AKASH_MANIFEST_VALIDATION_LEVEL must be one of: strict, lenient, none" + } + ), +}); + +export type AkashConfig = z.infer; + +export function getConfig( + env: string | undefined | null = ENV || + process.env.AKASH_ENV +) { + ENV = env || "mainnet"; + switch (env) { + case "mainnet": + return { + AKASH_NET: "https://raw.githubusercontent.com/ovrclk/net/master/mainnet", + RPC_ENDPOINT: "https://rpc.akashnet.net:443", + AKASH_GAS_PRICES: "0.025uakt", + AKASH_GAS_ADJUSTMENT: "1.5", + AKASH_KEYRING_BACKEND: "os", + AKASH_FROM: "default", + AKASH_FEES: "20000uakt", + AKASH_WALLET_ADDRESS: process.env.AKASH_WALLET_ADDRESS || "", + AKASH_PRICING_API_URL: process.env.AKASH_PRICING_API_URL || "https://console-api.akash.network/v1/pricing", + AKASH_DEFAULT_CPU: process.env.AKASH_DEFAULT_CPU || "1000", + AKASH_DEFAULT_MEMORY: process.env.AKASH_DEFAULT_MEMORY || "1000000000", + AKASH_DEFAULT_STORAGE: process.env.AKASH_DEFAULT_STORAGE || "1000000000", + AKASH_SDL: process.env.AKASH_SDL || "example.sdl.yml", + AKASH_CLOSE_DEP: process.env.AKASH_CLOSE_DEP || "closeAll", + AKASH_CLOSE_DSEQ: process.env.AKASH_CLOSE_DSEQ || "", + AKASH_PROVIDER_INFO: process.env.AKASH_PROVIDER_INFO || "", + AKASH_DEP_STATUS: process.env.AKASH_DEP_STATUS || "param_passed", + AKASH_DEP_DSEQ: process.env.AKASH_DEP_DSEQ || "", + AKASH_GAS_OPERATION: process.env.AKASH_GAS_OPERATION || "close", + AKASH_GAS_DSEQ: process.env.AKASH_GAS_DSEQ || "", + AKASH_MANIFEST_MODE: process.env.AKASH_MANIFEST_MODE || "auto", + AKASH_MANIFEST_PATH: process.env.AKASH_MANIFEST_PATH || "", + AKASH_MANIFEST_VALIDATION_LEVEL: process.env.AKASH_MANIFEST_VALIDATION_LEVEL || "strict", + AKASH_DEPOSIT: process.env.AKASH_DEPOSIT || "500000uakt" + }; + case "testnet": + return { + AKASH_NET: "https://raw.githubusercontent.com/ovrclk/net/master/testnet", + RPC_ENDPOINT: "https://rpc.sandbox-01.aksh.pw", + AKASH_GAS_PRICES: "0.025uakt", + AKASH_GAS_ADJUSTMENT: "1.5", + AKASH_KEYRING_BACKEND: "test", + AKASH_FROM: "default", + AKASH_FEES: "20000uakt", + AKASH_WALLET_ADDRESS: process.env.AKASH_WALLET_ADDRESS || "", + AKASH_PRICING_API_URL: process.env.AKASH_PRICING_API_URL || "https://console-api.akash.network/v1/pricing", + AKASH_DEFAULT_CPU: process.env.AKASH_DEFAULT_CPU || "1000", + AKASH_DEFAULT_MEMORY: process.env.AKASH_DEFAULT_MEMORY || "1000000000", + AKASH_DEFAULT_STORAGE: process.env.AKASH_DEFAULT_STORAGE || "1000000000", + AKASH_SDL: process.env.AKASH_SDL || "example.sdl.yml", + AKASH_CLOSE_DEP: process.env.AKASH_CLOSE_DEP || "closeAll", + AKASH_CLOSE_DSEQ: process.env.AKASH_CLOSE_DSEQ || "", + AKASH_PROVIDER_INFO: process.env.AKASH_PROVIDER_INFO || "", + AKASH_DEP_STATUS: process.env.AKASH_DEP_STATUS || "param_passed", + AKASH_DEP_DSEQ: process.env.AKASH_DEP_DSEQ || "", + AKASH_GAS_OPERATION: process.env.AKASH_GAS_OPERATION || "close", + AKASH_GAS_DSEQ: process.env.AKASH_GAS_DSEQ || "", + AKASH_MANIFEST_MODE: process.env.AKASH_MANIFEST_MODE || "auto", + AKASH_MANIFEST_PATH: process.env.AKASH_MANIFEST_PATH || "", + AKASH_MANIFEST_VALIDATION_LEVEL: process.env.AKASH_MANIFEST_VALIDATION_LEVEL || "strict", + AKASH_DEPOSIT: process.env.AKASH_DEPOSIT || "500000uakt" + }; + default: + return { + AKASH_NET: "https://raw.githubusercontent.com/ovrclk/net/master/mainnet", + RPC_ENDPOINT: "https://rpc.akashnet.net:443", + AKASH_GAS_PRICES: "0.025uakt", + AKASH_GAS_ADJUSTMENT: "1.5", + AKASH_KEYRING_BACKEND: "os", + AKASH_FROM: "default", + AKASH_FEES: "20000uakt", + AKASH_WALLET_ADDRESS: process.env.AKASH_WALLET_ADDRESS || "", + AKASH_PRICING_API_URL: process.env.AKASH_PRICING_API_URL || "https://console-api.akash.network/v1/pricing", + AKASH_DEFAULT_CPU: process.env.AKASH_DEFAULT_CPU || "1000", + AKASH_DEFAULT_MEMORY: process.env.AKASH_DEFAULT_MEMORY || "1000000000", + AKASH_DEFAULT_STORAGE: process.env.AKASH_DEFAULT_STORAGE || "1000000000", + AKASH_SDL: process.env.AKASH_SDL || "example.sdl.yml", + AKASH_CLOSE_DEP: process.env.AKASH_CLOSE_DEP || "closeAll", + AKASH_CLOSE_DSEQ: process.env.AKASH_CLOSE_DSEQ || "", + AKASH_PROVIDER_INFO: process.env.AKASH_PROVIDER_INFO || "", + AKASH_DEP_STATUS: process.env.AKASH_DEP_STATUS || "param_passed", + AKASH_DEP_DSEQ: process.env.AKASH_DEP_DSEQ || "", + AKASH_GAS_OPERATION: process.env.AKASH_GAS_OPERATION || "close", + AKASH_GAS_DSEQ: process.env.AKASH_GAS_DSEQ || "", + AKASH_MANIFEST_MODE: process.env.AKASH_MANIFEST_MODE || "auto", + AKASH_MANIFEST_PATH: process.env.AKASH_MANIFEST_PATH || "", + AKASH_MANIFEST_VALIDATION_LEVEL: process.env.AKASH_MANIFEST_VALIDATION_LEVEL || "strict", + AKASH_DEPOSIT: process.env.AKASH_DEPOSIT || "500000uakt" + }; + } +} + +export async function validateAkashConfig( + runtime: IAgentRuntime +): Promise { + try { + // Log environment information + // elizaLogger.info("Environment configuration details", { + // shellMnemonic: process.env.AKASH_MNEMONIC, + // runtimeMnemonic: runtime.getSetting("AKASH_MNEMONIC"), + // envVars: { + // fromShell: Object.keys(process.env).filter(key => key.startsWith('AKASH_')), + // fromRuntime: Object.keys(runtime) + // .filter(key => typeof runtime.getSetting === 'function' && runtime.getSetting(key)) + // .filter(key => key.startsWith('AKASH_')) + // } + // }); + + const envConfig = getConfig( + runtime.getSetting("AKASH_ENV") ?? undefined + ); + + // Fetch dynamic values from the network configuration + const akashNet = process.env.AKASH_NET || runtime.getSetting("AKASH_NET") || envConfig.AKASH_NET; + const version = await fetch(`${akashNet}/version.txt`).then(res => res.text()); + const chainId = await fetch(`${akashNet}/chain-id.txt`).then(res => res.text()); + const node = await fetch(`${akashNet}/rpc-nodes.txt`).then(res => res.text().then(text => text.split('\n')[0])); + + // Prioritize shell environment variables over runtime settings + const mnemonic = process.env.AKASH_MNEMONIC || runtime.getSetting("AKASH_MNEMONIC"); + + // elizaLogger.debug("SDL configuration", { + // fromShell: process.env.AKASH_SDL, + // fromRuntime: runtime.getSetting("AKASH_SDL"), + // fromConfig: envConfig.AKASH_SDL + // }); + + if (!mnemonic) { + throw new Error( + "AKASH_MNEMONIC not found in environment variables or runtime settings.\n" + + "Please ensure AKASH_MNEMONIC is set in your shell environment or runtime settings" + ); + } + + // Clean the mnemonic string - handle quotes and whitespace + const cleanMnemonic = mnemonic + .trim() + .replace(/^["']|["']$/g, '') // Remove surrounding quotes + .replace(/\n/g, ' ') + .replace(/\r/g, ' ') + .replace(/\s+/g, ' '); + + const mnemonicWords = cleanMnemonic.split(' ').filter(word => word.length > 0); + + if (mnemonicWords.length !== 12 && mnemonicWords.length !== 24) { + throw new Error( + `Invalid AKASH_MNEMONIC length: got ${mnemonicWords.length} words, expected 12 or 24 words.\n` + + `Words found: ${mnemonicWords.join(', ')}` + ); + } + + const config = { + AKASH_MNEMONIC: cleanMnemonic, + AKASH_NET: akashNet, + AKASH_VERSION: version, + AKASH_CHAIN_ID: chainId, + AKASH_NODE: node, + RPC_ENDPOINT: process.env.RPC_ENDPOINT || runtime.getSetting("RPC_ENDPOINT") || envConfig.RPC_ENDPOINT, + AKASH_GAS_PRICES: process.env.AKASH_GAS_PRICES || runtime.getSetting("AKASH_GAS_PRICES") || envConfig.AKASH_GAS_PRICES, + AKASH_GAS_ADJUSTMENT: process.env.AKASH_GAS_ADJUSTMENT || runtime.getSetting("AKASH_GAS_ADJUSTMENT") || envConfig.AKASH_GAS_ADJUSTMENT, + AKASH_KEYRING_BACKEND: process.env.AKASH_KEYRING_BACKEND || runtime.getSetting("AKASH_KEYRING_BACKEND") || envConfig.AKASH_KEYRING_BACKEND, + AKASH_FROM: process.env.AKASH_FROM || runtime.getSetting("AKASH_FROM") || envConfig.AKASH_FROM, + AKASH_FEES: process.env.AKASH_FEES || runtime.getSetting("AKASH_FEES") || envConfig.AKASH_FEES, + AKASH_PRICING_API_URL: process.env.AKASH_PRICING_API_URL || runtime.getSetting("AKASH_PRICING_API_URL") || envConfig.AKASH_PRICING_API_URL, + AKASH_DEFAULT_CPU: process.env.AKASH_DEFAULT_CPU || runtime.getSetting("AKASH_DEFAULT_CPU") || envConfig.AKASH_DEFAULT_CPU, + AKASH_DEFAULT_MEMORY: process.env.AKASH_DEFAULT_MEMORY || runtime.getSetting("AKASH_DEFAULT_MEMORY") || envConfig.AKASH_DEFAULT_MEMORY, + AKASH_DEFAULT_STORAGE: process.env.AKASH_DEFAULT_STORAGE || runtime.getSetting("AKASH_DEFAULT_STORAGE") || envConfig.AKASH_DEFAULT_STORAGE, + AKASH_SDL: process.env.AKASH_SDL || runtime.getSetting("AKASH_SDL") || envConfig.AKASH_SDL, + AKASH_CLOSE_DEP: process.env.AKASH_CLOSE_DEP || runtime.getSetting("AKASH_CLOSE_DEP") || envConfig.AKASH_CLOSE_DEP, + AKASH_CLOSE_DSEQ: process.env.AKASH_CLOSE_DSEQ || runtime.getSetting("AKASH_CLOSE_DSEQ") || envConfig.AKASH_CLOSE_DSEQ, + AKASH_PROVIDER_INFO: process.env.AKASH_PROVIDER_INFO || runtime.getSetting("AKASH_PROVIDER_INFO") || envConfig.AKASH_PROVIDER_INFO, + AKASH_DEP_STATUS: process.env.AKASH_DEP_STATUS || runtime.getSetting("AKASH_DEP_STATUS") || envConfig.AKASH_DEP_STATUS, + AKASH_DEP_DSEQ: process.env.AKASH_DEP_DSEQ || runtime.getSetting("AKASH_DEP_DSEQ") || envConfig.AKASH_DEP_DSEQ, + AKASH_GAS_OPERATION: process.env.AKASH_GAS_OPERATION || runtime.getSetting("AKASH_GAS_OPERATION") || envConfig.AKASH_GAS_OPERATION, + AKASH_GAS_DSEQ: process.env.AKASH_GAS_DSEQ || runtime.getSetting("AKASH_GAS_DSEQ") || envConfig.AKASH_GAS_DSEQ, + AKASH_MANIFEST_MODE: process.env.AKASH_MANIFEST_MODE || runtime.getSetting("AKASH_MANIFEST_MODE") || envConfig.AKASH_MANIFEST_MODE, + AKASH_MANIFEST_PATH: process.env.AKASH_MANIFEST_PATH || runtime.getSetting("AKASH_MANIFEST_PATH") || envConfig.AKASH_MANIFEST_PATH, + AKASH_MANIFEST_VALIDATION_LEVEL: process.env.AKASH_MANIFEST_VALIDATION_LEVEL || runtime.getSetting("AKASH_MANIFEST_VALIDATION_LEVEL") || envConfig.AKASH_MANIFEST_VALIDATION_LEVEL, + AKASH_DEPOSIT: process.env.AKASH_DEPOSIT || runtime.getSetting("AKASH_DEPOSIT") || envConfig.AKASH_DEPOSIT + }; + + return akashEnvSchema.parse(config); + } catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error); + throw new Error(`Failed to validate Akash configuration: ${errorMessage}`); + } +} diff --git a/packages/plugin-akash/src/error/error.ts b/packages/plugin-akash/src/error/error.ts new file mode 100644 index 00000000000..3211d8f8283 --- /dev/null +++ b/packages/plugin-akash/src/error/error.ts @@ -0,0 +1,126 @@ +import { elizaLogger } from "@elizaos/core"; + +export enum AkashErrorCategory { + WALLET = 'WALLET', + DEPLOYMENT = 'DEPLOYMENT', + LEASE = 'LEASE', + PROVIDER = 'PROVIDER', + MANIFEST = 'MANIFEST', + NETWORK = 'NETWORK', + TRANSACTION = 'TRANSACTION', + VALIDATION = 'VALIDATION', + SDK = 'SDK', + API = 'API', + FILE = 'FILE' +} + +export enum AkashErrorCode { + // Wallet Errors (1000-1999) + WALLET_NOT_INITIALIZED = 1000, + WALLET_CONNECTION_FAILED = 1001, + WALLET_INSUFFICIENT_FUNDS = 1002, + WALLET_UNAUTHORIZED = 1003, + WALLET_SIGNATURE_FAILED = 1004, + WALLET_MESSAGE_INVALID = 1005, + WALLET_INITIALIZATION_FAILED = "WALLET_INITIALIZATION_FAILED", + CLIENT_SETUP_FAILED = "CLIENT_SETUP_FAILED", + + // Certificate Errors (1500-1599) + CERTIFICATE_CREATION_FAILED = 1500, + CERTIFICATE_BROADCAST_FAILED = 1501, + CERTIFICATE_NOT_FOUND = 1502, + + // Deployment Errors (2000-2999) + DEPLOYMENT_NOT_FOUND = 2000, + DEPLOYMENT_CREATION_FAILED = 2001, + DEPLOYMENT_UPDATE_FAILED = 2002, + DEPLOYMENT_CLOSE_FAILED = 2003, + DEPLOYMENT_START_TIMEOUT = 2004, + + // Lease Errors (3000-3999) + LEASE_NOT_FOUND = 3000, + LEASE_CREATION_FAILED = 3001, + LEASE_CLOSE_FAILED = 3002, + LEASE_INVALID_STATE = 3003, + LEASE_BID_NOT_FOUND = 3004, + LEASE_QUERY_FAILED = 3005, + LEASE_STATUS_ERROR = 3006, + LEASE_VALIDATION_FAILED = 3007, + INVALID_LEASE = 3008, + + // Provider Errors (4000-4999) + PROVIDER_NOT_FOUND = 4000, + PROVIDER_UNREACHABLE = 4001, + PROVIDER_RESPONSE_ERROR = 4002, + PROVIDER_LIST_ERROR = 4003, + PROVIDER_FILTER_ERROR = 4004, + + // Manifest Errors (5000-5999) + MANIFEST_INVALID = 5000, + MANIFEST_PARSING_FAILED = 5001, + MANIFEST_DEPLOYMENT_FAILED = 5002, + MANIFEST_VALIDATION_FAILED = 5003, + + // Bid Errors (6000-6999) + BID_FETCH_TIMEOUT = 6000, + INVALID_BID = 6001, + + // SDL Errors (7000-7999) + SDL_PARSING_FAILED = 7000, + + // Validation Errors (8000-8999) + VALIDATION_PARAMETER_MISSING = 8000, + VALIDATION_PARAMETER_INVALID = 8001, + VALIDATION_STATE_INVALID = 8002, + VALIDATION_SDL_FAILED = 8003, + VALIDATION_CONFIG_INVALID = 8004, + + // Generic Errors (9000-9999) + INSUFFICIENT_FUNDS = 9000, + + // API Errors (10000-10999) + API_ERROR = 10000, + API_RESPONSE_INVALID = 10001, + API_REQUEST_FAILED = 10002, + API_TIMEOUT = 10003, + + // File System Errors (11000-11999) + FILE_NOT_FOUND = 11000, + FILE_READ_ERROR = 11001, + FILE_WRITE_ERROR = 11002, + FILE_PERMISSION_ERROR = 11003, + + // Network Errors (12000-12999) + RPC_CONNECTION_FAILED = 12000 +} + +export class AkashError extends Error { + constructor( + message: string, + public code: AkashErrorCode, + public details?: Record, + public category: string = "akash" + ) { + super(message); + this.name = "AkashError"; + } +} + +export async function withRetry( + fn: () => Promise, + maxRetries: number = 3, + delay: number = 1000 +): Promise { + let lastError: Error | undefined; + for (let i = 0; i < maxRetries; i++) { + try { + return await fn(); + } catch (error) { + lastError = error as Error; + if (i < maxRetries - 1) { + await new Promise(resolve => setTimeout(resolve, delay * Math.pow(2, i))); + } + } + } + throw lastError; +} diff --git a/packages/plugin-akash/src/index.ts b/packages/plugin-akash/src/index.ts new file mode 100644 index 00000000000..e8c8955204c --- /dev/null +++ b/packages/plugin-akash/src/index.ts @@ -0,0 +1,68 @@ +import { Plugin, elizaLogger } from "@elizaos/core"; +import { createDeploymentAction } from "./actions/createDeployment"; +import { closeDeploymentAction } from "./actions/closeDeployment"; +import { getProviderInfoAction } from "./actions/getProviderInfo"; +import { getDeploymentStatusAction } from "./actions/getDeploymentStatus"; +import { estimateGas } from "./actions/estimateGas"; +import { getDeploymentApiAction } from "./actions/getDeploymentApi"; +import { getGPUPricingAction } from "./actions/getGPUPricing"; +import { getManifestAction } from "./actions/getManifest"; +import { getProvidersListAction } from "./actions/getProvidersList"; + +const actions = [ + createDeploymentAction, + closeDeploymentAction, + getProviderInfoAction, + getDeploymentStatusAction, + estimateGas, + getDeploymentApiAction, + getGPUPricingAction, + getManifestAction, + getProvidersListAction, +]; + +// Initial banner +console.log("\n┌════════════════════════════════════════┐"); +console.log("│ AKASH NETWORK PLUGIN │"); +console.log("├────────────────────────────────────────┤"); +console.log("│ Initializing Akash Network Plugin... │"); +console.log("│ Version: 0.1.0 │"); +console.log("└════════════════════════════════════════┘"); + +// Format action registration message +const formatActionInfo = (action: any) => { + const name = action.name.padEnd(25); + const similes = (action.similes?.join(", ") || "none").padEnd(60); + const hasHandler = action.handler ? "✓" : "✗"; + const hasValidator = action.validate ? "✓" : "✗"; + const hasExamples = action.examples?.length > 0 ? "✓" : "✗"; + + return `│ ${name} │ ${hasHandler} │ ${hasValidator} │ ${hasExamples} │ ${similes} │`; +}; + +// Log registered actions +console.log("\n┌───────────────────────────┬───┬───┬───┬───────────────────────────────────────────────────────────┐"); +console.log("│ Action │ H │ V │ E │ Similes │"); +console.log("├───────────────────────────┼───┼───┼───┼────────────────────────────────────────────────────────────┤"); +actions.forEach(action => { + console.log(formatActionInfo(action)); +}); +console.log("└───────────────────────────┴───┴───┴───┴──────────────────────────────────────────────────────────┘"); + +// Plugin status +console.log("\n┌─────────────────────────────────────┐"); +console.log("│ Plugin Status │"); +console.log("├─────────────────────────────────────┤"); +console.log(`│ Name : akash │`); +console.log(`│ Actions : ${actions.length.toString().padEnd(24)} │`); +console.log(`│ Status : Loaded & Ready │`); +console.log("└─────────────────────────────────────┘\n"); + +export const akashPlugin: Plugin = { + name: "akash", + description: "Akash Network Plugin for deploying and managing cloud compute", + actions: actions, + evaluators: [] +}; + +export default akashPlugin; \ No newline at end of file diff --git a/packages/plugin-akash/src/providers/wallet.ts b/packages/plugin-akash/src/providers/wallet.ts new file mode 100644 index 00000000000..100b8f6bdca --- /dev/null +++ b/packages/plugin-akash/src/providers/wallet.ts @@ -0,0 +1,109 @@ +import { DirectSecp256k1HdWallet } from "@cosmjs/proto-signing"; +import { SigningStargateClient } from "@cosmjs/stargate"; +import { elizaLogger } from "@elizaos/core"; +import { IAgentRuntime, Memory } from "@elizaos/core/src/types"; +import { validateAkashConfig } from "../environment"; +import { getAkashTypeRegistry } from "@akashnetwork/akashjs/build/stargate"; +import { + AkashProvider, + AkashWalletState, + AkashError, + AKASH_ERROR_CODES, + AkashRegistryTypes +} from "../types"; + +// Use a proper UUID for the wallet room +const WALLET_ROOM_ID = "00000000-0000-0000-0000-000000000001"; + +export const walletProvider: AkashProvider = { + type: "AKASH_WALLET", + version: "1.0.0", + name: "wallet", + description: "Akash wallet provider", + + initialize: async (runtime: IAgentRuntime): Promise => { + elizaLogger.info("Initializing Akash wallet provider"); + try { + const mnemonic = runtime.getSetting("AKASH_MNEMONIC"); + if (!mnemonic) { + throw new Error("AKASH_MNEMONIC not found in environment variables"); + } + + const config = await validateAkashConfig(runtime); + + // Create wallet from mnemonic + const wallet = await DirectSecp256k1HdWallet.fromMnemonic(config.AKASH_MNEMONIC, { + prefix: "akash", + }); + + // Get the wallet address + const [account] = await wallet.getAccounts(); + const address = account.address; + + // Create signing client with registry + const client = await SigningStargateClient.connectWithSigner( + config.RPC_ENDPOINT, + wallet, + { registry: getAkashTypeRegistry() as any } + ); + + // Store wallet info in memory manager + const state: AkashWalletState = { + wallet, + client, + address, + }; + + // Create memory object + const memory: Memory = { + id: WALLET_ROOM_ID, + userId: runtime.agentId, + agentId: runtime.agentId, + roomId: WALLET_ROOM_ID, + content: { + type: "wallet_state", + text: `Akash wallet initialized with address: ${address}`, + data: state, + }, + createdAt: Date.now(), + }; + + await runtime.messageManager.createMemory(memory); + + elizaLogger.info("Akash wallet provider initialized successfully", { + address, + }); + } catch (error) { + elizaLogger.error("Failed to initialize Akash wallet provider", { + error: error instanceof Error ? error.message : String(error) + }); + throw error; + } + }, + + get: async (runtime: IAgentRuntime, _message?: Memory): Promise => { + const memories = await runtime.messageManager.getMemories({ + roomId: WALLET_ROOM_ID, + count: 1, + }); + + const state = memories[0]?.content?.data; + if (!state) { + throw new AkashError( + "Akash wallet not initialized", + AKASH_ERROR_CODES.WALLET_NOT_INITIALIZED + ); + } + return state as AkashWalletState; + }, + + validate: async (_runtime: IAgentRuntime, _message?: Memory): Promise => { + return true; + }, + + process: async (_runtime: IAgentRuntime, _message?: Memory): Promise => { + // No processing needed for wallet provider + } +}; + +export default walletProvider; diff --git a/packages/plugin-akash/src/runtime_inspect.ts b/packages/plugin-akash/src/runtime_inspect.ts new file mode 100644 index 00000000000..25b5aee39fd --- /dev/null +++ b/packages/plugin-akash/src/runtime_inspect.ts @@ -0,0 +1,90 @@ +import { elizaLogger } from "@elizaos/core"; +import type { IAgentRuntime, Plugin, Action } from "@elizaos/core"; + +/** + * Utility to inspect runtime plugin loading + */ +export function inspectRuntime(runtime: IAgentRuntime) { + elizaLogger.info("=== Runtime Plugin Inspection ==="); + + // Check if runtime has plugins array + const hasPlugins = !!(runtime as any).plugins; + elizaLogger.info("Runtime plugins status:", { + hasPluginsArray: hasPlugins, + pluginCount: hasPlugins ? (runtime as any).plugins.length : 0 + }); + + // If plugins exist, check for our plugin + if (hasPlugins) { + const plugins = (runtime as any).plugins as Plugin[]; + const akashPlugin = plugins.find(p => p.name === "akash"); + + elizaLogger.info("Akash plugin status:", { + isLoaded: !!akashPlugin, + pluginDetails: akashPlugin ? { + name: akashPlugin.name, + actionCount: akashPlugin.actions?.length || 0, + actions: akashPlugin.actions?.map(a => a.name) || [] + } : null + }); + } + + // Check registered actions + const hasActions = !!(runtime as any).actions; + if (hasActions) { + const actions = (runtime as any).actions as Action[]; + const akashActions = actions.filter((action: Action) => + action.name === "CREATE_DEPLOYMENT" || + (action.similes || []).includes("CREATE_DEPLOYMENT") + ); + + elizaLogger.info("Akash actions status:", { + totalActions: actions.length, + akashActionsCount: akashActions.length, + akashActions: akashActions.map((action: Action) => ({ + name: action.name, + similes: action.similes + })) + }); + } +} + +/** + * Helper to check if a plugin is properly loaded + */ +export function isPluginLoaded(runtime: IAgentRuntime, pluginName: string): boolean { + // Check plugins array + const plugins = (runtime as any).plugins as Plugin[]; + if (!plugins) { + elizaLogger.warn(`No plugins array found in runtime`); + return false; + } + + // Look for our plugin + const plugin = plugins.find(p => p.name === pluginName); + if (!plugin) { + elizaLogger.warn(`Plugin ${pluginName} not found in runtime plugins`); + return false; + } + + // Check if actions are registered + const actions = (runtime as any).actions as Action[]; + if (!actions || !actions.length) { + elizaLogger.warn(`No actions found in runtime`); + return false; + } + + // Check if plugin's actions are registered + const pluginActions = plugin.actions || []; + const registeredActions = pluginActions.every(pluginAction => + actions.some((action: Action) => action.name === pluginAction.name) + ); + + if (!registeredActions) { + elizaLogger.warn(`Not all ${pluginName} actions are registered in runtime`); + return false; + } + + elizaLogger.info(`Plugin ${pluginName} is properly loaded and registered`); + return true; +} \ No newline at end of file diff --git a/packages/plugin-akash/src/sdl/example.sdl.yml b/packages/plugin-akash/src/sdl/example.sdl.yml new file mode 100644 index 00000000000..6e6ac836886 --- /dev/null +++ b/packages/plugin-akash/src/sdl/example.sdl.yml @@ -0,0 +1,33 @@ +--- +version: "2.0" +services: + web: + image: baktun/hello-akash-world:1.0.0 + expose: + - port: 3000 + as: 80 + to: + - global: true +profiles: + compute: + web: + resources: + cpu: + units: 0.5 + memory: + size: 512Mi + storage: + size: 512Mi + placement: + dcloud: + pricing: + web: + denom: uakt + amount: 20000 + + +deployment: + web: + dcloud: + profile: web + count: 1 diff --git a/packages/plugin-akash/src/types.ts b/packages/plugin-akash/src/types.ts new file mode 100644 index 00000000000..8d5c94a9880 --- /dev/null +++ b/packages/plugin-akash/src/types.ts @@ -0,0 +1,167 @@ +import { DirectSecp256k1HdWallet, Registry } from "@cosmjs/proto-signing"; +import { SigningStargateClient } from "@cosmjs/stargate"; +import { Provider } from "@elizaos/core"; +import { IAgentRuntime, Memory } from "@elizaos/core"; +import { SDL } from "@akashnetwork/akashjs/build/sdl"; +import { MsgCreateDeployment } from "@akashnetwork/akash-api/akash/deployment/v1beta3"; +import { QueryBidsRequest, MsgCreateLease, BidID } from "@akashnetwork/akash-api/akash/market/v1beta4"; + +// Core wallet state type +export interface AkashWalletState { + wallet: DirectSecp256k1HdWallet; + client: SigningStargateClient; + address: string; + certificate?: { + cert: string; + privateKey: string; + publicKey: string; + }; +} + +// Provider type extending core Provider +export interface AkashProvider { + type: string; + version: string; + name: string; + description: string; + initialize: (runtime: IAgentRuntime) => Promise; + get: (runtime: IAgentRuntime, message?: Memory) => Promise; + validate: (runtime: IAgentRuntime, message?: Memory) => Promise; + process: (runtime: IAgentRuntime, message?: Memory) => Promise; +} + +// Registry type for Akash +export type AkashRegistryTypes = [string, any][]; + +// Deployment related types +export interface AkashDeploymentId { + owner: string; + dseq: string; +} + +export interface AkashDeployment { + id: AkashDeploymentId; + sdl: SDL; + deposit: string; + msg?: MsgCreateDeployment; +} + +// Lease related types +export interface AkashLeaseId { + owner: string; + dseq: string; + provider: string; + gseq: number; + oseq: number; +} + +export interface AkashLease { + id: AkashLeaseId; + state?: string; + manifestData?: any; + msg?: MsgCreateLease; +} + +// Provider types +export interface AkashProviderInfo { + owner: string; + hostUri: string; + attributes: Array<{ + key: string; + value: string; + }>; +} + +// Bid types +export interface AkashBidId { + owner: string; + dseq: string; + gseq: number; + oseq: number; + provider: string; +} + +export interface AkashBid { + id: AkashBidId; + state: string; + price: { + denom: string; + amount: string; + }; +} + +// Error handling types +export enum AKASH_ERROR_CODES { + WALLET_NOT_INITIALIZED = "WALLET_NOT_INITIALIZED", + INVALID_MNEMONIC = "INVALID_MNEMONIC", + INVALID_ADDRESS = "INVALID_ADDRESS", + INSUFFICIENT_FUNDS = "INSUFFICIENT_FUNDS", + DEPLOYMENT_FAILED = "DEPLOYMENT_FAILED", + LEASE_FAILED = "LEASE_FAILED", + PROVIDER_NOT_FOUND = "PROVIDER_NOT_FOUND", + NETWORK_ERROR = "NETWORK_ERROR", + CERTIFICATE_ERROR = "CERTIFICATE_ERROR", + MANIFEST_ERROR = "MANIFEST_ERROR", + BID_ERROR = "BID_ERROR", + MANIFEST_FAILED = "MANIFEST_FAILED", + PROVIDER_ERROR = "PROVIDER_ERROR" +} + +export class AkashError extends Error { + constructor( + message: string, + public code: AKASH_ERROR_CODES, + public originalError?: Error + ) { + super(message); + this.name = "AkashError"; + } +} + +// Provider configuration +export interface AkashConfig { + AKASH_MNEMONIC: string; + RPC_ENDPOINT: string; + CHAIN_ID?: string; + GAS_PRICE?: string; + GAS_ADJUSTMENT?: number; + CERTIFICATE_PATH?: string; +} + +// Message types +export interface AkashMessage { + type: string; + value: any; +} + +// Response types +export interface AkashTxResponse { + code: number; + height: number; + txhash: string; + rawLog: string; + data?: string; + gasUsed: number; + gasWanted: number; +} + +// Provider state types +export interface AkashProviderState { + isInitialized: boolean; + lastSync: number; + balance?: string; + address?: string; + certificate?: { + cert: string; + privateKey: string; + publicKey: string; + }; +} + +// Memory room constants +export const AKASH_MEMORY_ROOMS = { + WALLET: "00000000-0000-0000-0000-000000000001", + DEPLOYMENT: "00000000-0000-0000-0000-000000000002", + LEASE: "00000000-0000-0000-0000-000000000003", + CERTIFICATE: "00000000-0000-0000-0000-000000000004" +} as const; diff --git a/packages/plugin-akash/src/utils/paths.ts b/packages/plugin-akash/src/utils/paths.ts new file mode 100644 index 00000000000..3cbb1cd1e07 --- /dev/null +++ b/packages/plugin-akash/src/utils/paths.ts @@ -0,0 +1,134 @@ +import * as path from 'path'; +import { fileURLToPath } from 'url'; +import { elizaLogger } from "@elizaos/core"; +import { existsSync } from 'fs'; +import fs from 'fs'; +import { getConfig } from '../environment'; + +export const getPluginRoot = (importMetaUrl: string) => { + // elizaLogger.info("=== Starting Plugin Root Resolution ===", { + // importMetaUrl, + // isFileProtocol: importMetaUrl.startsWith('file://'), + // urlSegments: importMetaUrl.split('/') + // }); + + const currentFileUrl = importMetaUrl; + const currentFilePath = fileURLToPath(currentFileUrl); + const currentDir = path.dirname(currentFilePath); + + // Find plugin-akash directory by walking up until we find it + let dir = currentDir; + while (dir && path.basename(dir) !== 'plugin-akash' && dir !== '/') { + dir = path.dirname(dir); + } + + if (!dir || dir === '/') { + elizaLogger.error("Could not find plugin-akash directory", { + currentFilePath, + currentDir, + searchPath: dir + }); + throw new Error("Could not find plugin-akash directory"); + } + + // elizaLogger.info("Plugin Root Path Details", { + // currentFilePath, + // currentDir, + // pluginRoot: dir, + // exists: existsSync(dir), + // parentDir: path.dirname(dir), + // parentExists: existsSync(path.dirname(dir)), + // parentContents: existsSync(path.dirname(dir)) ? fs.readdirSync(path.dirname(dir)) : [] + // }); + + return dir; +}; + +export const getSrcPath = (importMetaUrl: string) => { + // elizaLogger.info("=== Resolving Src Path ==="); + const pluginRoot = getPluginRoot(importMetaUrl); + const srcPath = path.join(pluginRoot, 'src'); + + // elizaLogger.info("Src Path Details", { + // pluginRoot, + // srcPath, + // exists: existsSync(srcPath), + // contents: existsSync(srcPath) ? fs.readdirSync(srcPath) : [], + // absolutePath: path.resolve(srcPath), + // relativeToCwd: path.relative(process.cwd(), srcPath) + // }); + + return srcPath; +}; + +export const getCertificatePath = (importMetaUrl: string) => { + const srcPath = getSrcPath(importMetaUrl); + const certPath = path.join(srcPath, '.certificates', 'cert.json'); + + // elizaLogger.debug("Certificate Path Resolution", { + // srcPath, + // certPath, + // exists: existsSync(certPath) + // }); + + return certPath; +}; + +export const getDefaultSDLPath = (importMetaUrl: string) => { + // elizaLogger.info("=== Resolving SDL Path ==="); + const pluginRoot = getPluginRoot(importMetaUrl); + const srcPath = getSrcPath(importMetaUrl); + const config = getConfig(process.env.AKASH_ENV); + const sdlFileName = config.AKASH_SDL; + const sdlPath = path.join(srcPath, 'sdl', sdlFileName); + const sdlDir = path.dirname(sdlPath); + + // Only log if file doesn't exist as a warning + if (!existsSync(sdlPath)) { + // elizaLogger.warn("SDL file not found at expected path", { + // sdlPath, + // exists: false + // }); + } + + // Try to find SDL file in nearby directories + const searchPaths = [ + sdlPath, + path.join(srcPath, sdlFileName), + path.join(pluginRoot, sdlFileName), + path.join(pluginRoot, 'sdl', sdlFileName), + path.join(pluginRoot, 'src', 'sdl', sdlFileName) + ]; + + // Only log if we find the file + for (const searchPath of searchPaths) { + if (existsSync(searchPath)) { + // elizaLogger.info("Found SDL file at", { path: searchPath }); + return searchPath; + } + } + + return sdlPath; +}; + +// Helper function to ensure a path includes plugin-akash +export const ensurePluginPath = (filePath: string, importMetaUrl: string) => { + if (!filePath.includes('plugin-akash')) { + const srcPath = getSrcPath(importMetaUrl); + return path.join(srcPath, path.basename(filePath)); + } + return filePath; +}; + +export function getDeploymentsPath(importMetaUrl: string): string { + const srcPath = getSrcPath(importMetaUrl); + const deploymentsPath = path.join(srcPath, 'deployments'); + + // elizaLogger.debug("Deployments Path Resolution", { + // srcPath, + // deploymentsPath, + // exists: existsSync(deploymentsPath) + // }); + + return deploymentsPath; +} \ No newline at end of file diff --git a/packages/plugin-akash/tsconfig.json b/packages/plugin-akash/tsconfig.json new file mode 100644 index 00000000000..e535bee0d71 --- /dev/null +++ b/packages/plugin-akash/tsconfig.json @@ -0,0 +1,39 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "dist", + "rootDir": "src", + "module": "ESNext", + "target": "ESNext", + "lib": [ + "ESNext", + "DOM" + ], + "moduleResolution": "Bundler", + "allowImportingTsExtensions": true, + "emitDeclarationOnly": true, + "isolatedModules": true, + "esModuleInterop": true, + "skipLibCheck": true, + "strict": true, + "declaration": true, + "sourceMap": true, + "types": [ + "vitest/globals", + "node", + "jest" + ], + "baseUrl": ".", + "preserveSymlinks": true + }, + "include": [ + "src/**/*", + "test/actions/getDeploymentApi.test.ts" + ], + "exclude": [ + "node_modules", + "dist", + "test", + "../../packages/core/**/*" + ] +} \ No newline at end of file diff --git a/packages/plugin-akash/tsup.config.ts b/packages/plugin-akash/tsup.config.ts new file mode 100644 index 00000000000..a2b714de910 --- /dev/null +++ b/packages/plugin-akash/tsup.config.ts @@ -0,0 +1,10 @@ +import { defineConfig } from "tsup"; + +export default defineConfig({ + entry: ["src/index.ts"], + format: ["esm"], + dts: true, + splitting: false, + sourcemap: true, + clean: true, +}); diff --git a/packages/plugin-akash/vitest.config.ts b/packages/plugin-akash/vitest.config.ts new file mode 100644 index 00000000000..2b76c168780 --- /dev/null +++ b/packages/plugin-akash/vitest.config.ts @@ -0,0 +1,27 @@ +import { defineConfig } from 'vitest/config'; +import path from 'path'; + +export default defineConfig({ + test: { + globals: true, + environment: 'node', + include: ['test/**/*.{test,spec}.{js,mjs,cjs,ts,mts,cts,jsx,tsx}'], + exclude: ['node_modules', 'dist', '.idea', '.git', '.cache'], + root: '.', + reporters: ['verbose'], + coverage: { + reporter: ['text', 'json', 'html'], + exclude: [ + 'node_modules/', + 'test/fixtures/', + 'test/setup/' + ] + }, + setupFiles: ['./test/setup/vitest.setup.ts'] + }, + resolve: { + alias: { + '@': path.resolve(__dirname, './src') + } + } +}); \ No newline at end of file From 8e3d2328b6f10222906981a990d3fe63f105deae Mon Sep 17 00:00:00 2001 From: AIFlow_ML Date: Fri, 10 Jan 2025 20:19:45 +0700 Subject: [PATCH 2/3] adding all the mising environments for Akash --- .env.example | 45 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) diff --git a/.env.example b/.env.example index 60f1a00d860..e2f48c6f505 100644 --- a/.env.example +++ b/.env.example @@ -467,3 +467,48 @@ TAVILY_API_KEY= # Verifiable Inference Configuration VERIFIABLE_INFERENCE_ENABLED=false # Set to false to disable verifiable inference VERIFIABLE_INFERENCE_PROVIDER=opacity # Options: opacity + +#################################### +#### Akash Network Configuration #### +#################################### +AKASH_ENV=mainnet +AKASH_NET=https://raw.githubusercontent.com/ovrclk/net/master/mainnet +RPC_ENDPOINT=https://rpc.akashnet.net:443 +AKASH_GAS_PRICES=0.025uakt +AKASH_GAS_ADJUSTMENT=1.5 +AKASH_KEYRING_BACKEND=os +AKASH_FROM=default +AKASH_FEES=20000uakt +AKASH_DEPOSIT=500000uakt +AKASH_MNEMONIC= +AKASH_WALLET_ADDRESS= +# Akash Pricing API +AKASH_PRICING_API_URL=https://console-api.akash.network/v1/pricing +# Default values # 1 CPU = 1000 1GB = 1000000000 1GB = 1000000000 +AKASH_DEFAULT_CPU=1000 +AKASH_DEFAULT_MEMORY=1000000000 +AKASH_DEFAULT_STORAGE=1000000000 +AKASH_SDL=example.sdl.yml +# Close deployment +# Close all deployments = closeAll +# Close a single deployment = dseq and add the value in AKASH_CLOSE_DSEQ +AKASH_CLOSE_DEP=closeAll +AKASH_CLOSE_DSEQ=19729929 +# Provider Info we added one to check you will have to pass this into the action +AKASH_PROVIDER_INFO=akash1ccktptfkvdc67msasmesuy5m7gpc76z75kukpz +# Deployment Status +# AKASH_DEP_STATUS = dseq or param_passed when you are building you wil pass the dseq dinamically to test you +# you can pass the dseq using AKASH_DEP_DSEQ 19729929 is an example of a dseq we test while build. +AKASH_DEP_STATUS=dseq +AKASH_DEP_DSEQ=19729929 +# Gas Estimation Options: close, create, or update +# qseq is required when operation is "close" 19729929 is an example of a dseq we test while build. +AKASH_GAS_OPERATION=close +AKASH_GAS_DSEQ=19729929 +# Manifest +# Values: "auto" | "manual" | "validate_only" Default: "auto" +AKASH_MANIFEST_MODE=auto +# Default: Will use the SDL directory +AKASH_MANIFEST_PATH= +# Values: "strict" | "lenient" | "none" - Default: "strict" +AKASH_MANIFEST_VALIDATION_LEVEL=strict \ No newline at end of file From 909baf814b5b6877a16cbd67b96c8f27f081f452 Mon Sep 17 00:00:00 2001 From: Sayo Date: Sat, 11 Jan 2025 02:56:43 +0530 Subject: [PATCH 3/3] Update packages/plugin-akash/src/actions/getDeploymentApi.ts Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com> --- packages/plugin-akash/src/actions/getDeploymentApi.ts | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/packages/plugin-akash/src/actions/getDeploymentApi.ts b/packages/plugin-akash/src/actions/getDeploymentApi.ts index eea16727b6d..a279dff3b96 100644 --- a/packages/plugin-akash/src/actions/getDeploymentApi.ts +++ b/packages/plugin-akash/src/actions/getDeploymentApi.ts @@ -171,7 +171,12 @@ export async function fetchDeployments( const apiStatus = status; // Don't include status in URL if not specified - const url = `https://console-api.akash.network/v1/addresses/${address}/deployments/${skip}/${limit}${apiStatus ? `?status=${apiStatus}` : ''}&reverseSorting=true`; + const params = new URLSearchParams(); + if (apiStatus) { + params.append('status', apiStatus); + } + params.append('reverseSorting', 'true'); + const url = `https://console-api.akash.network/v1/addresses/${address}/deployments/${skip}/${limit}?${params.toString()}`; elizaLogger.debug("Making API request", { url }); const response = await fetchWithRetry(url, {