From c38d894f365ec84f467f8818fa7f63d01749ef3c Mon Sep 17 00:00:00 2001 From: Andres David Jimenez Sulbaran Date: Sat, 28 Mar 2026 08:55:56 -0600 Subject: [PATCH 01/24] feat: add specification and implementation tasks for @effectify/hatchet package - Created hatchet-spec.md to define requirements and scenarios for the @effectify/hatchet package, covering core, effectifier, workflow, logging, schema, testing, monorepo setup, and integration specs. - Added hatchet-tasks.md to outline implementation tasks for the package, detailing phases from monorepo setup to documentation and release, with a total of 47 tasks. --- .engram/chunks/a5edc791.jsonl.gz | Bin 0 -> 13845 bytes .engram/manifest.json | 8 + docs/design/hatchet-design.md | 764 +++++++++ .../CHAT-original-idea-effectify-hatchet.md | 709 +++++++++ docs/prds/PRD-effectify-hatchet.md | 1392 +++++++++++++++++ docs/proposals/hatchet-integration.md | 82 + docs/specs/hatchet-spec.md | 719 +++++++++ docs/tasks/hatchet-tasks.md | 392 +++++ 8 files changed, 4066 insertions(+) create mode 100644 .engram/chunks/a5edc791.jsonl.gz create mode 100644 docs/design/hatchet-design.md create mode 100644 docs/prds/CHAT-original-idea-effectify-hatchet.md create mode 100644 docs/prds/PRD-effectify-hatchet.md create mode 100644 docs/proposals/hatchet-integration.md create mode 100644 docs/specs/hatchet-spec.md create mode 100644 docs/tasks/hatchet-tasks.md diff --git a/.engram/chunks/a5edc791.jsonl.gz b/.engram/chunks/a5edc791.jsonl.gz new file mode 100644 index 0000000000000000000000000000000000000000..b1e669da1bb7cdc8fd9c486c45b0171863d9b417 GIT binary patch literal 13845 zcmV+wHtNYAiwFP!00000|Lnb8ZzD;TDE3#*RWDGIhNPJMW)2oJT1nMi+x0b)disu= z?Pf6Zkc?6?A}b=361%F5!5VA$!hW&QzE5-E?H2?4&@gcO!G;ao3vB&?@n0yw4}SAc zFycf+W|E>*QdRfN8tB68wn#>th&XZLe8q`#UhQbqnoFT~!mnQK@OURQ-+a@;U>e86 zPBfk3-~Zj^>}>wB|NCyUGq~8P?W9V+z$n`ZcW^qzDC5)BPHiXV3ZqP_)lRr`FqaEF zU?NtiW47e+L5$0*x=i&!wTsS}$}s+h8Cb1mt5#YtnJ7KdOHrqRIYSlT#(?Ku| zn$77H&mVnx)(p~T&GdJ@Ve*%lQ_v0D{V?cm&Z*5htcC6A6x)+WX`<)+^77GAzWn*; z)Pq(l>~_NT;M$ye{Q#%k=@eT+{_b$~_tVeg7r~_U^UtXd&1TpP!p?AGPEp+LOgddN zr^`J3-RIx7UbOm;Kf5^l`R6n+^b`!kPV?HFTEQ?PJe`00a@G1>=VANzr=JIZ0U94b zvl|B8u(i2&Q5;0ANyp6T5B;P5$)Q@ttEgGWVS-F!J(@Ey!$i*NGoH=!38YGzh`1CCBB{&9hTPn4v($Ad$M9mj=NhQpo(c8c?`}G~h&M5@LM8z;?@Z&J>p#G#8+7#&xDl1BlpS zV!mIwUmU2ZR121Dz)V0+USP(T5>P`V7ioeSQ-BFXJW6B*3BYCwXS7VznbZ(V4Jp%7 z18NizH^3VXv1gG?(f)0)>_6Axkbo$+{)oT*QJ68Rdjly`%*;SZnL&&REToVMQ+a0q zQbELG=J&&IQA`ky6f$*TaKA)ljunfgr^?+(iX{qj zMp^@0CJAFLg;#%hs_ zi8w7OLxdtzEHNaCFEGjtg^tBlL7BxKjUh&z=5PO5(^fd_!6aX#_Vh84q5aKa!VV)B zhOWbr2paGh5fXF8#gxzT3W<%yRpW)0Vhk^HG#Jk~3WTvau^Bo-3SqQkS2x-jk3_2h zpW2gN($R!zv!5u6-HTDjXi)U_|41^vkcMfDM7u$xx`i2BBi51VG~kd3gnw(<12NfU zmAyE&x4(a6muvQ#x$R%yp>ThHB*x?Mgz5Q6d;-TeB6S4{Cpk~zkw^{4+qNlCuRAfO z|MqX5pFaEhlcV#qk(lue(p=9$Dn8=^K07%)hI)M}lSE$DGd9yS2Ty4vMlgm$g-n|- zU(=uP8XhfD7G?0-OfynEHAe$pkHqUb{d51R{9!*C2zR1Hcia%IoLl~hqX4FGjx1`e@nm&&k;GA zefMJEriGrK93DS8sXGSdK->8p%P6OlYiXbO@j9b>?Y8BV>kGFV=1+JHkNE{Ad@iM# z-V>hj8M|piu75CajtYb>S@iY?37UhDFlVdcyl6fX#NINZ@YnEInrZ(Ym1BD5fMQ}! z8cq}w(cGQYHJm(TNn($o!t#`R@-;FfHC$fbIKroo&OiI&@4h*I_W8+EH$c%NMz~bm z@8*#d(}YKv{b7M>hGyV3q4gS`V9fIcyoS$MgoY)X`O}$mi}2j5m}IdHGD#*Zx`5{i z6Xv_10jIb`Wkgg`A%^vHPW6hwWIh|66v}s<6$)gChh9mQx;>=FuEBG1?6PVK;5n>p`xFnE% z(adqHnh}J>0o_D|)o?t_>QOfM*=T%=LgRXq=9r5@6E4NYHGODPx2Zr$b2R(o#cWk> z)6p-LBdgm~nZu0+i#9sWPFVa}+sS0gqi-&7^&Z8)UQzrNE;$()k<5+$AJlf@JT-X8 zu<~8PboPx#6kDAP-LTs?Mg$4%?S2T?Bv0epn|L#>H5_bONKvmHu^3yOUc{!oN$;K( z(j=cv`BlX>I*enCE3(EM8YW`M+pv|`1IuI$>|{p-|OeC_?_%VnDQ$~ z4Ry)Ff;&DwIfhewnR7+fn)c>t<-2ejx}0HSFz!&Lxz2FG;P~Y4PCPb@Kw4V_3S(U< z_V$NwH}GOqDup^3Bg~ZO@-rmU{^>EP1&*7oJ2RNUSSmHoq~h|9NHb&S!ZL9;(k%!J3Oe=Z z`fFj!Bg-28<;1U%`T7AO^ZO?Hj>wCYi_Jba`<`lE?aJ89Qk~IPV@}d!%MLie9xvKy zc;2b#b@YuLsH%7*VvSuhZZ^7&AaFvnIsOSY0@eg)byvI|x`7xJFrH^ws>6c=%gGy& zTzGGF>8$pQUms*_cJSNk_bkflFLP9@-(FkKHSC=mOUCR+uII>vCHRG0#}_s7emz|k z;%k1SH9h$n-rn!XDA99XoK&*3P-kzy7ZuB;-B-?J))?8@j6Dl4wb3fRmu}tH*HK{i z#Q^#H<(o^hi(*%zURhy z^88?v;rQnM09f#XrcZYOnS^oWSh;S$b^7>tv;VaNv?(!d^OH5tx4lQ83kXz7D%3X` z_TsAKQ?2H&U{}jjsth;iu-J(`OGp*h%k9Se&ta5074#?3(}!WZ`yM^rHMvbsrzT!c zZ#SEpdU~_do&-_9IqgodJL$Ln0(yGJ^o8C~)X&X#*Y)#l>gBIC)mcYD&*!FR5SDz8 zj@mPo~<(p*7XFLNGU7^`cKthw{Kk?xjQ$`#XSj!G&Dy4ylRSKACA z2ro-JeKXK1n{zR~e3M%whiy_yj$l)Av|BAU zZ1;Kt){lcp`@X>*zHNzAFFeldFCN_?RC`y1#wYNpWXVX}<>r64WJsmCz6Y!8*Zs+j zZcY~&qBE68H_=G(bYd2?A?_hDC zQ{I&;)8p})<70vXG%AR>c>Dc=NA#tz!5#^)$a7SqU2z>r!y(MM2pX`z|6IvQVhjOp z`p{fsx#`1wzgHIOFl9-`Cah`9D(i`WVjNbcne`f*V?2fxj2jEgSj;lkc%d7P9)4da zx*aphkrX!wd;{zM{?GsT|9t;{0?){qrR7BVn;18s>C2Zq&Yk5r65TbtE7UnAc4A(o zHYZkWL=myMR19`+R?G{JT}0|N*K;H>{q9fy2#4bB_X*cruScL*gVdZI*UX)u`w0N3 z*WDEU_HTZFa(edY*;6MHGB$I68D(JXhk0}}veq_H=Ow`urf9`Q;|xa&^i$_gED!iP z8!v*BHl1UewhA5T@=|+H51oJCp6mtPYNp1JzwRsq%@=uMEPgD# zjYeqWRR(7nrUdjcU!F;6o+;y#UC)Bru(s;Vt^;b9jUOi~`_Wq)G zB;7C=gze#N60~e`qXhL$N(tJ!7B1P2dQl6zy#cl`7*2m0+v~_FtF}j-1m;y%UogRD z=5Sr`B)Q=h@JQj?e};tFMljwE!g+#bMtn(8-UVk0(QKmYr57?G2U5d*p>T={MTBD; zZd6_xx{yU1%&E+=dHW;eMv;`6j3k8{W-Liq?5Nhx#Kj}p*-_BV35^8H4y+*MF@O6b z-5!unmX!;$A&k& z?8oCGO7a!j$Vk5?4Uu~LLuTZ52)n@^oMD!yMJV`SlT0BfwqP1o2CYt+sqAQC_>R>n z=7w)5&Wu>)BH}5#Io|$fIGI5=Y`@1VQfYG2$t;=_C$n|UE7ECp2h;AL&4OufFdW8z zHQm-(@D=GlXIV7ItgvL?{qZ}P=1D?|XPPi!G!*N~cz$|Zct^=F&E`txvpIQSV4{#+ z#PU+ypxJ(*Dc0y^j@qyjmSQYn1^@73f)2re}v<8lzNQl&6$&I+HL8h%Ru>!B) z0u#(w4L)-yJ3IaymO&#JHk#Y?;!4L5&Z0SK$J!XQ3&w@#hGUQU8a#b=UU*CoG<6sQ z(}Z`Xb-u8QSOx0X{@qZy$oK+nq|~X6liJ-I!|}6|GXi_^Zy%kV69#jQFn&UeCqA{^ z8Vh!Ty9ryN+8b|<@L0}fs2U0>Q1@n#ZYIWrwq344IH1h0I{wFt+*bp5drbcGch_%`^9t`RsLzN9Oz8y_;cwDb)qh zEycZCid%99V|Q){5~^Ln7FdHzo&7RrXEJG!#(4E9EB+Rl+`QU(-0s-bs!=vMq z!^e;Qi<4tGeE!J#8)814qSE$UDrDxkeog`7=8Oi7Cd^q3gI|>~cGQ=sh_GpoljCXT zoR}3Cbi7UXw~jT8E63sR`6CT%X4J%Ni(Eb!!){fCXql@B_dLK;)M#`LjJKhl=1Wdf z1Dz!a#>QC8!0<#3u?ro=xPbSCTZ};g;l<#g(JU8d{X0v)4x0ULEF*o8s#qU9KRqs) zZ{6?jq8z|$Lc|%12O(nz!G13Gu+Bw>GfOd6nqLQ^Q;f8SKp%?U=93NS?G3Em>%t~# zFfa;no;YoM6K0H>&x8kRKNV2zM+>*A?$tb{>YByxR?wMss$0nPp#V&uEZk1tHmpI= zgmlhy1=<-~VhlY%F;i>-)ptO!Yc{+pUTUgvF6sjx7F17 zfB)z2{-c*aMo{RD-r9Gs*{Q>>F#Vm_-77$x<^sk#j_WKq4b! zW|pF*6gJL;dEYp#`XJnN4ev(M?4BoEj;dC$pc*7{HsfMe11n5wK7R6jBMn1(vE-0xCU%>}pbe9VuV5N(J^%ODI6_9RnW3W#AGT5`>-9CRg zMSlRzUf8-d9(S$Dc16F^k83F}g*jSDt5Xh>z#*|HXBctdK22*42(%V6)j14R3c^z9= z6LQI8q>w98yo~9LN)or!XqwPN2LmQ$nxL%6>@z=MLg9YqWKXAHjxkLP=@+_`r!NoH zj085wDTx?(V71l58l2R?>#Q1+Y3MYJEx+%<-`YQ}HG$XgJYku!#81qaH6J#JqZAk6wQ=rtDeg2k>D9;3ICQ}+Z-j_E#gEhxsrlzd~d-(E-r+;6g4RR5V%C;Cfyhb zfODGGPcWtJc)b-r*|o9nFkwIC?c?1#62W@#3)%fbc7Fx3Yh-%2)$VlnM#5b+dza!( zw7~^ra|FCf?bAbNU>Tqdr`qik?l5`G^CXjDn2(P(Tx0^3`S#B;j^8sbU_8U@REhbrVm6@)?!QUUW6(lAzvUueT# zLFtBpH`b&cIaO`6L;XaCYRV#nW0o;`Mfl2`p<;I^h&F^>{W1wb zXUfnZ5xSLZ_?CEM@!c^&Q zEwnBHHhx&iJVRA?ScVk{8DpL0*}V9UuEPt>s!-4pqh%?I4Tg%QMi-75o0$J>1o9>f z+!IcSSthkr+?Z4K%uZSMdX#V!+2@AyomxjM9lQ`m;*nUggvT(p>u6*$mW?SF@e?Li zyL)5cVkxa%9*)Sxdp<{+4#IfBxM-ZqSdJl%Avbi!_hwt_9R)#`OS(?Kul zM7?I*j3*yQTN+(b11U(ZN8AU9TdK=Y%%s)$tx9begJSd?70WjQX4ed6Cur<6qBdg+ zcN#(SR}FaXZk2NZ52`H|+!(_6007?oPyfS6*z@`ByMK81PydS%h}UA{{oVk)`|iI6 za6x6sssZ=>N5jFp@BZOm{x|dSOU5aJ#aN!kT5U86h%}Jh(2%$aoSi&AIXbt*H*SoH z{2F6UmuTzX-G3muUbzm)ZQ1o7eDn&yyMO=R(m>z6cj;yv+%j98u`Jiuzc*VY)uVU} z4~uc&-JibOx9Z&6;NZ*8PEJoC_}c&;KYH@$9DW1O9zHxfF@LO2^Y(Ep zC4`gp4YDM9KT5c19AcXVwYqV(aw~hDH+a>Z$62Fnzn%~OcjGt?MI_S0^rsrjzpbI*WBE%SD_^Ly+W~nD)(Hy@f1C4C0Oh(LK0T^z<7lnbDNQmY)KYY7|C$-vP zyUh2{Z*Mc+C!GQIS_9T@_HY;vK7uE`%z0*COj9xhV_1=ytuSH6VIuHFFy%MqQS{ZI91!uv{1n_!J}dpnrrhb}!rIV{Ng!+Ag=)V({>01M1$k z%lj;@ThxVn*)BhWEw(f3+bM^*v~)8(>zJ88(@YncngeU66+9lOQV19 z-lG^htUV07!$#2Sly=zQc026JJx8(E5-xE-{ClIo{9=dQI?7f%Y|#J34!d!be}#5f ziM4xwTkXGg{<2h^t+v2 zjNN{}*Xn+(R$*UWaBdy=kc)~+RZ%d0N&&W2!T6b-iQ&)aOx&tq{0!r4RWLq6jNMDY zFoLYp3Ohlg-S6Jnq4?yU3dTnWvU{pAzX-Bl1lccw>~5p@$1o~7pkM>R5;us!;BeRu zJN@k#`~Zyk5sp}r+y!$Vv=+~{?~J*7j^e>qF!x2q6K(L->a@b3b@S`j+w_BW7_>_L zptnUoXk)wC#HiJcJKbr}VK?XpHy4m8^@C`Rl{nQj$5>VqA;0?%|0`@}L!0s|e=6;L zHfa&(lq1IFfD5nXfNM|!8TdEwmBJSkeD$?Q<~DdxcfR4RKkiO3i{>!pN@sf`fmV7r zp_EBGTP+s|3a1ni^x%DSdoU7nF0#m&Ia{JnfYY$qRJbklkt8c1m@!ONz=X`^~uM6U@NzTFvGP!FCONNgf=BlmSy zpGtMfRIDEW7ap3PL%lx7Bz-^*))+I!Dg5cK1VrieRD|_Qp3UnNKJgc(DV?zdDGkL> zM^g7)eKD|{kb{~YuNhbe_6vOwu>;m`1|4j7daN1u+wCZbhQmo`FqpKO!|pI1wprBd zb_|{2BF$F+|A9GW8B0Q#u-LhV*~@4N>*_4>|rrecOr6;vgpEXY)ENB zuIiPq?nQJ2v825ZVPrJuPepL|6gOMyeD(X=04xo!eLmQNkU@-t zUb{Ky1Z>*vc5jMEyAwi;eU3VlD?kAYrZZHqvMFIHvnXm=R$eejTjVQPu*4#daw9Hm z*y4n_^h5W*DY6S6gh81CzrXF7&5*^d)^IS0d(CFE-@lgk;vNgv+}`&X&124H(kAw; zW@o32E~u|z$F`nhbYVo2!bOf$?5fiDHw1RUQN_%*|ILSBBryLgFf!N{_}ZVu?Li0Q z{*c9kHoKOE@qQRtE9{||y}oi}4kek}-4=uszAd){MY1Mq$sj132Ai?iAbrjwx?Ef_ z;)~1mk!fF6%mA^`kP8kGQz=q1pDoAEWkD{+swRvj@=OR4XCF$MK~Rkn9c+sdjR*a9 zt2Y_ou-BgsJ2&0L-DM+IbbpAp(3W?eQK}7#n)M)JOyKB|3#iRjDH3JY%#tIi6tAWr zUah_CqC5Qm@ZZDx0w{37ue_NY79$ulrT9M!;r6b-b#Jsi0I>F!%7s8h8G{h}mehoBa@um=Mxv5h8Y06+GC! zmDRg7_Mz{dl*u=O;p(m9>|Lan{Nt?5>kC{^}Znn?OLaE%MkRv{`g^rwRY>4VPZnk zM-CDMgIc@y^Ww{l(QW_y2n$-B+c9>Jao+v$-<>>u^zh{94@Zwr-u?04!I3Lmn(cPKIT#e-4e;@0Oevftu9_%MG2sGj9=|PdaHIl}0x6X(~s<}6$R zVRXIRX^hTA@F7d=48u<8-yLj=Dw_oDDMmxq8M0xg^|4EIQF3Pg>@l(_Ic>P@4L`cX z z{BX46?nynpk9Ib6ZpynFCs+PeV$skwd#>b6q1K_#CJ{qW1BK-HZPp-`mm-lYwx4Vc zkLC>>`$fn0TF2J=I`+yTyS=rpo%fE}F1q$QYhC;6U0pts5Pmfb%oo4{6$PMp77n8wT1caD&}bZNrl5(r zwpUhB!p)L)yi3&2!1N7^n?S$H=!HI@*{CpR1x0FU-#mSG{>{T@Upzg2P_sX~BuZx)i!MU=>TC1u8^4K9WsHp%8kVhQ zqt$E#{`)VP5|rKFSFRlG+{g@fNF(&xp^&2ft&uu5=-19JL=n%F%+oL&iFbec$NRZO zwleBpoGZULS3Y1GAEf%X!)7yVR#ew*_x3&38MgY9Ni<=tUKg7mq07vfQSwPa)96Z8 z+DyzwSNGa|a^H6kb|r(bTPE2ZY`YRd97Nsjq%#SI!Jygu7_CfmO1()1{Pi4G6(RH=IQS~|F-p_ z)qnii#o79Myt%O;7gUA6$P@7V^q5p+vn!-YD`W|eSxixwJVt9+SwDO3O1&2e##*vu0Wv=6?q7lntyE6CVl!YSpU&^$)vvwg6XlG%B~ zC1H^8iJ}o~au1fKV2fohQc7NK4Pvl%0jy}(DDiNXn{#HTI-#@~kg6OfOhYbs#uTT- z+%fvdjAC;Z=t`PRITPP9pM1n-9`VyzGO*5wMZSV5H$&81z0b^LeA9fEbAPW;L3}Dz zamnho0Fz=17EIY>h0jlq8)hS@UVe@uW4_2fjbXP5mU0CKElgAKWSH{^uhA0I_N2O! z{4(Yr6IA;@uzryTh0H+aK({ zX<}DO$|=Yx97$nF8+M<|qdfxm_QwT1&0C^+`vcp;ni}v}0##1o%H}H+kSVI0p)z9` zCjQ8mMP7<5h`YpRZeHYGjqrM@Sel};)q@%F3N8ha;M*T4tv=H=dmdQK(u~DYmF_FQ zmuZeMD=Xp~9aLwS>VhNb3WKwV3L0>{G50e+_m$N`P*cn2)+1H}6iZ`?7)&J0=2oV? zs1OxW^^}2Bo_q#nDV!wU8de=QP7TT)`@%%z85J|G~zW?@=z#p>6R=4W(3!2;X z%gJ=YqDgbeI{j{Y!ulVvNb?sLxz))klJJA3-VeJ$W$N34gW~28yZt_EHHR$j#=Vc2 zx{yUJdP8wGE|(=U620t2uD#Fw`RQ@ZzPfgbOKPj8=t3vi&_hbMumze6I9cQgu}dRA zy|P)kF+c%2ha{g-B_kK$K|NziS6z>6x@tRir3LyH!2DS@oQv7iYqw+~i>TZNX?9$C z7=Dx$$pg1u?Vf&{b)we@%MxrcyCI;=b2G0m#yBDE%iRKb*>3BS(o{3GO38y`&`#z( zX&rVIB+X)}%<_^PaVA|#Ik^Ijv)t(qCyv-ChxlVV*&^$j(f3lOFyb+@YItRyq)Amo z(}&P-yBD_l6&l_a=f%3M_Au@Z25Ua$c(6!DOgxjj|jGM_mFF+(^BsF zQmcAy(_lmF%6+U1;ZR{PU<75I3Y^lK+7x|GlO=kz2?C0WL%SrB3j!f}NQ|9RXfDzS zTeoP~vzWheEG>y~mMZV6Tuil{Rfd?mZqZRbQRa-YOpy{}F~KU96_zJ}Rm=OA7Q+U= zE-wDemqkF z>N=HwQxPglXq0Nu*b9||{vqeO(+S&^a~*7pR%93rhpl+pZpXo-IoR%iy4$&y7sdd! z?hZQJ#>6&sjxncVkvuYDOBn1s@o~F6@~#s!a!3^-sPL$i0)?v7qOo)+pnF5No|n3H zAu_JXsNE`8NTtqZW)nBOXb#fennPhBZ(4(4;WSs$&{ZOxdhcbj%T7`{8ny{NE5@bK zhN)YL{v*cQM!X%1Vxerp)@`mKIE{1{K1;V?^-MZ~N-b+|>tDH)7+ zN-VjNQ!L3NF8q0RO-dn6l#CJA5=J}T0wVaCR1N2X27phfR;*zf^88d-DWR}+YxI|22qd2+nK3N z`uy9MtJd#258J;#{XDqXtnc>a9E~9KHQE`R898rcQSptH{FieS;Yifs(3kVDkH0WB zqMTNgtSHv$@DT{A>oLoD%tSrINf>i2qs8*$yt<9 zT63tz4+qg6lSJjx9C^Kl$&WGvn8RPm)Oe`Uvid!RG=}L z){UfL!koI5Y56F7V1$JfSeMf&#fDL2SiSD5-q2pbnWJFJ*gPJON5Yi_dqw}P3%0;F z2P5(ByFb1A?vL-j`y==|Ve9WeKLP)Y9qP8$Tv>Fi+M8*O89ifN?``X?u>|Un8Rp9} z)V_w0u7Q&Y+JO28dUomo`qqtfRsQ&$`_aV)*yMTT*9C6ZCbBk(go_J$Yh=|(WBl72 z;u4DaZXFMw0NrU*KUa5$%4MUf8*;{j{l+4`a(G_b3y;%tw;Hz_-WTE8n#~BIx-%mY zJ2w;Mitzd10bV^5Sa+dpMFWsixHOz5n{(|g42k-UEt{bDY=$a?SN2@b(NMdd%RGrG z@S@Vq^hkJwLSqPrDU0ShJO-F@~STFs;Nku&+^znYr#qqzW6;$W<95Am?+0h18i#ZpcIi zif&Kp7BPp9;K>(f=Wrq9WdTVVE36>Ek$Cll3%+1U4W=?8Cg#br>_UM6+HK>6>G?>! zGT%z!qXbN2M+j7d%2uk=Hhgn`-&2C#-?uMNzj8s|N99>t2js_gF@+jWh@6x}OC z|MhO=g9TG1^K0b!l$i}P#=qyf=7-m|YQ^h93DA3 zd=2AQRD?<iKRZOWgH_72nj9&x*Ed4IguHVU8$CH-lC~V81CFu1&XWoT2(4w{bTNx?yWj(Sf$b zl(J#F8N}Eg_J@OL+8lm_4n!(4)JXwS6ambwkzD$1ti`v?FYp5{WxTpNefqmi3p_!D-@d_+ zZau(+PA#b>o+~q0_@LL3`@Z;wT4|-mHmrSxQAUm#Uuu|bsWfcwu&Jzhak*Y`^p+*t zti4wh+skc@HE{*2wfe+2_={a7zG7*V0W;b`l@X;bm#tJ6*zQ7z#z_J2J`~ch{eLK;oyPVT`7(JVwK1t-IVPI6$H-uNC9i!PkXW4wT6OMN7lp*N-dz}JV}-!D9!mh>nD z6BF3qzvkQYefRfmTvbuCpE(zD%GP{ZZ+{fd|LLmDug5-oW!d)EyYnndb$H;@1zqAq z^9&F6V3#yq1IEU5?7<4Yaw4GY<~K{_;$W{vwGqqciqdjj!GuK@GetSFU&sm7=E)SF z80VC$b1Qu?}v}yl~yoeX{20=00BLMlW_uVPmKm{9pg@ z=l}iB|M}1V>CgZ1&;O~e*%V<_Br{ITCK2Ta-ExyCX3IeMJxin-iGYqn&=27?9MYqr zUCl))$>09KZcpU(8jcOOef!TEcFkh0pPAeGzaEKTV8*WB|KEr8DmsruFf=1mF=ZfC zKa(yN$>ER!T~;E3-SKlJ7hF5Wb!hq1cn@yqKVC1V_AFcUlp4ZpgiLxAlE>et7mdJbiZl=;5QI!}CYao_;+NkB&n~jBQj`7}ujY zU!+WYi#QTz6fGZC>NtO4uQ_o>Ht#nIS-i{RJzE4gQyiDpVK>c_1QhB#`H(B)ZrE;C zUo{57wy*~rwxedV(;Dg?g_& zh~r_q6C;~;SpO#L>jSX^hjeM~m#PaS{1oeHZM#UEG;tUVf(Z_B5_J08 zj4%9Kdyt=>9>WT#=!MM>H1%RO#)?IW#a;QTxYY6023Rb?r5f6}&L*im&9huF4H`A^ z)(=8fYwgyxwe4oBIhl6)t^Tyn7>n89V{AUh>8gy*nZ24Z`>ofSn#X{DUWes?d+HWbrO4jzu?*hKlsojcV$k74PEb$ zE8Ru*Nth(^W$uEIy`5;MWkZiB`e(`U(&!M(=%Kl*F{o3H$`-gb3J}HF=` computations. The architecture follows a layered pattern where each layer handles a specific concern: + +``` +┌─────────────────────────────────────────────────────────────────────────────┐ +│ Hatchet Engine (External) │ +│ (Workflow execution, retries, UI) │ +└─────────────────────────────────────────────────────────────────────────────┘ + ▲ + │ HTTP/WebSocket + ▼ +┌─────────────────────────────────────────────────────────────────────────────┐ +│ @effectify/hatchet │ +├─────────────────────────────────────────────────────────────────────────────┤ +│ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ │ +│ │ Core │ │ Effectifier │ │ Workflow │ │ Logging │ │ +│ │ Config │ │ execute() │ │ Builder │ │ HatchetLog │ │ +│ │ Client │ │ ManagedRt │ │ task() │ │ withLogger │ │ +│ │ Context │ │ │ │ register │ │ │ │ +│ └─────────────┘ └─────────────┘ └─────────────┘ └─────────────┘ │ +│ │ │ │ │ │ +│ └────────────────┴────────────────┴────────────────┘ │ +│ │ │ +│ ▼ │ +│ ┌─────────────────────────────────────────────────────────────────────────┐ │ +│ │ ServiceMap Layer Composition │ │ +│ │ ┌──────────────┐ ┌────────────────┐ ┌────────────────────┐ │ │ +│ │ │HatchetConfig │ │HatchetClient │ │HatchetStepContext │ │ │ +│ │ │ Service │ │ Service │ │ Service │ │ │ +│ │ └──────────────┘ └────────────────┘ └────────────────────┘ │ │ +│ └─────────────────────────────────────────────────────────────────────────┘ │ +└─────────────────────────────────────────────────────────────────────────────┘ + ▲ + │ Effect.provide() + ▼ +┌─────────────────────────────────────────────────────────────────────────────┐ +│ User Application Code │ +│ ┌───────────────────────────────────────────────────────────────────────┐ │ +│ │ workflow({ name: "user-workflow" }) │ │ +│ │ .task({ name: "task1" }, Effect.gen(function*() { ... })) │ │ +│ │ .task({ name: "task2", parents: ["task1"] }, Effect.gen(...)) │ │ +│ └───────────────────────────────────────────────────────────────────────┘ │ +└─────────────────────────────────────────────────────────────────────────────┘ +``` + +### 1.2 Data Flow + +The execution flow follows this sequence: + +``` +1. User defines workflow with EffectWorkflow builder +2. registerWorkflow() creates Hatchet workflow + tasks +3. Hatchet worker starts, listening for workflow triggers +4. Hatchet calls task function with (input, context) +5. effectifyTask converts Promise → Effect execution: + a. Inject HatchetStepContext as service + b. Run Effect with ManagedRuntime + c. Convert Success → return value + d. Convert Failure → throw (triggers Hatchet retry) +6. Logs via HatchetLogger sync to Hatchet UI +7. Parent task outputs accessible via ctx.parentOutput() +``` + +--- + +## 2. Effectifier Design + +### 2.1 Core Concept + +The **effectifier** is the bridge that converts `Effect` into a Hatchet-compatible Promise function `(input: unknown, ctx: HatchetContext) => Promise`. This is necessary because Hatchet expects async functions, not Effect computations. + +### 2.2 Effectifier Implementation + +```typescript +// packages/hatchet/src/effectifier/execute.ts + +import { Cause, Effect, ManagedRuntime, ServiceMap } from "effect" +import type { Context as HatchetContext } from "@hatchet-dev/typescript-sdk" +import { HatchetStepContext } from "../core/context" + +/** + * effectifyTask: Converts Effect to Hatchet-compatible Promise function + * + * Key behaviors: + * - Injects HatchetStepContext as a service + * - Uses ManagedRuntime to execute the Effect + * - Converts Success → return value + * - Converts Failure → throw Error (triggers Hatchet retries) + */ +export const effectifyTask = ( + effect: Effect.Effect, + runtime: ManagedRuntime.ManagedRuntime, +) => { + return async (input: unknown, ctx: HatchetContext): Promise => { + // Step 1: Provide HatchetStepContext as a service + const effectWithContext = Effect.provideService( + effect, + HatchetStepContext, + ctx, + ) + + // Step 2: Execute with ManagedRuntime + const exit = await runtime.runPromiseExit(effectWithContext) + + // Step 3: Convert result + if (exit._tag === "Success") { + return exit.value + } else { + // Failure → throw for Hatchet retry mechanism + const error = Cause.squash(exit.cause) + throw error instanceof Error ? error : new Error(String(error)) + } + } +} + +/** + * createEffectifierFromLayer: Factory that creates an effectifier + * from a Layer + * + * The layer provides all dependencies needed by the Effects + */ +export const createEffectifierFromLayer = ( + layer: Layer.Layer, +) => { + // Create persistent runtime for all tasks + const runtime = ManagedRuntime.make(layer) + + return (effect: Effect.Effect) => effectifyTask(effect, runtime) +} +``` + +### 2.3 Error Propagation Strategy + +The effectifier must convert Effect failures to thrown exceptions because Hatchet's retry mechanism works on thrown errors: + +| Effect Result | Hatchet Behavior | +| --------------------- | --------------------------------------------- | +| `Exit.success(value)` | Return `value` to workflow | +| `Exit.failure(cause)` | Throw error → Hatchet retries (if configured) | + +**Critical**: Not all errors should trigger retries. The design allows users to: + +- Use `Effect.fail()` for recoverable errors (triggers retry) +- Use `Effect.die()` for fatal errors (no retry) +- Configure retries per-task in the workflow definition + +### 2.4 Runtime Lifecycle + +The ManagedRuntime is created once per workflow registration and reused across all task executions: + +```typescript +// In registerWorkflow() +const runtime = ManagedRuntime.make(layer) + +// Each task gets the same runtime +wf.tasks.forEach((taskDef) => { + hatchetWorkflow.task({ + name: taskDef.options.name, + fn: effectifyTask(taskDef.effect, runtime), // Same runtime instance + }) +}) +``` + +**Important**: The runtime should be cleaned up when the worker stops. This will be handled in the worker lifecycle management. + +--- + +## 3. Workflow Builder Design + +### 3.1 EffectWorkflow Class + +The `EffectWorkflow` class provides a builder pattern for defining workflows: + +```typescript +// packages/hatchet/src/workflow/workflow.ts + +import { Effect } from "effect" +import type { TaskDefinition, TaskOptions, WorkflowOptions } from "./types" + +export class EffectWorkflow { + readonly tasks: TaskDefinition[] = [] + + constructor( + readonly options: WorkflowOptions, + readonly dependencies: R = undefined as R, + ) {} + + /** + * Adds a task to the workflow + * Accumulates dependencies via union types + */ + task( + options: TaskOptions, + effect: Effect.Effect, + ): EffectWorkflow { + this.tasks.push({ options, effect } as TaskDefinition) + return this as any + } +} + +export const workflow = (options: WorkflowOptions) => new EffectWorkflow(options) +``` + +### 3.2 Type Inference Flow + +The type system tracks accumulated dependencies through the builder: + +``` +workflow({ name: "my-workflow" }) + .task({ name: "task1" }, Effect<..., ..., Database>) // R = Database + .task({ name: "task2" }, Effect<..., ..., EmailService>) // R = Database | EmailService + .task({ name: "task3" }, Effect<..., ..., Logger>) // R = Database | EmailService | Logger +``` + +This enables: + +- Full type inference for all dependencies +- Compile-time error if a required service is missing +- Layer construction from accumulated types + +### 3.3 Task Options Mapping + +The SDK uses different terminology than the internal types: + +```typescript +// packages/hatchet/src/workflow/types.ts + +import type { RetryOpts, TaskConcurrency } from "@hatchet-dev/typescript-sdk" + +export interface TaskOptions { + readonly name: string + readonly timeout?: string // SDK: execution_timeout + readonly retries?: number // SDK: retries (RetryOpts) + readonly rateLimits?: Array<{ key: string; limit: number; duration: string }> + readonly concurrency?: TaskConcurrency[] + readonly parents?: string[] // DAG dependencies +} + +export interface WorkflowOptions { + readonly name: string + readonly description?: string + readonly version?: string + readonly sticky?: boolean + readonly concurrency?: TaskConcurrency[] +} +``` + +### 3.4 Registration Flow + +The `registerWorkflow` function orchestrates the entire registration: + +```typescript +// packages/hatchet/src/workflow/register.ts + +export const registerWorkflow = ( + workerName: string, + wf: EffectWorkflow, + layer: Layer.Layer, +): Effect.Effect => + Effect.gen(function*() { + // 1. Get Hatchet client + const hatchet = yield* HatchetClientService + + // 2. Create runtime for effect execution + const runtime = ManagedRuntime.make(layer) + + // 3. Create Hatchet workflow + const hatchetWorkflow = hatchet.workflow({ + name: wf.options.name, + ...(wf.options.description && { description: wf.options.description }), + ...(wf.options.version && { version: wf.options.version }), + }) + + // 4. Register each task + wf.tasks.forEach((taskDef) => { + hatchetWorkflow.task({ + name: taskDef.options.name, + fn: effectifyTask(taskDef.effect, runtime), + ...mapTaskOptions(taskDef.options), // Convert to SDK format + }) + }) + + // 5. Create and start worker + const worker = yield* Effect.tryPromise({ + try: () => hatchet.worker(workerName, { workflows: [hatchetWorkflow] }), + catch: (e) => new HatchetError({ message: "Failed to create worker", cause: e }), + }) + + yield* Effect.log( + `Workflow '${wf.options.name}' registered on worker '${workerName}'`, + ) + + yield* Effect.tryPromise({ + try: () => worker.start(), + catch: (e) => new HatchetError({ message: "Failed to start worker", cause: e }), + }) + }) +``` + +--- + +## 4. ServiceMap Integration + +### 4.1 Service Hierarchy + +Three core services form the foundation: + +``` +ServiceMap +├── HatchetConfig (static configuration) +│ └── { token, host, namespace } +├── HatchetClientService (Hatchet SDK client) +│ └── HatchetClient instance +└── HatchetStepContext (per-task execution context) + └── HatchetContext from SDK +``` + +### 4.2 HatchetConfig Service + +```typescript +// packages/hatchet/src/core/config.ts + +import { Config, Effect, Layer, Schema, ServiceMap } from "effect" + +const HatchetConfigSchema = Schema.Struct({ + token: Schema.String, + host: Schema.String.pipe(Schema.defaultTo("http://localhost:8080")), + namespace: Schema.optional(Schema.String), +}) + +type HatchetConfigType = Schema.Schema.Type + +/** + * HatchetConfig: Static configuration for Hatchet connection + * Uses ServiceMap.Service (NOT Context.Tag from v3) + */ +export class HatchetConfig extends ServiceMap.Service< + HatchetConfig, + HatchetConfigType +>()("HatchetConfig") {} + +export const HatchetConfigLayer = (config: HatchetConfigType) => Layer.succeed(HatchetConfig, config) + +export const HatchetConfigLayerFromEnv = ( + config: Config.Wrap, +): Layer.Layer => + Layer.effect(HatchetConfig)(Effect.map(Config.unwrap(config), (c) => c)) +``` + +### 4.3 HatchetClientService + +```typescript +// packages/hatchet/src/core/client.ts + +import { Data, Effect, Layer, ServiceMap } from "effect" +import { HatchetClient } from "@hatchet-dev/typescript-sdk" + +/** + * HatchetError: Tagged error for Hatchet-specific failures + */ +export class HatchetError extends Data.TaggedError( + "@effectify/hatchet/HatchetError", +)<{ + readonly message: string + readonly cause?: unknown +}> {} + +/** + * HatchetClientService: The SDK client instance + */ +export class HatchetClientService extends ServiceMap.Service< + HatchetClientService, + HatchetClient +>()("HatchetClient") {} + +/** + * HatchetClientLive: Creates client from config + */ +export const HatchetClientLive = Layer.effect(HatchetClientService)( + Effect.gen(function*() { + const config = yield* HatchetConfig + const hatchet = HatchetClient.init({ + token: config.token, + host_port: config.host, + }) + return hatchet + }), +) +``` + +### 4.4 HatchetStepContext + +```typescript +// packages/hatchet/src/core/context.ts + +import { Effect, ServiceMap } from "effect" +import type { Context as HatchetContext } from "@hatchet-dev/typescript-sdk" + +/** + * HatchetStepContext: Per-task execution context from Hatchet SDK + * + * Provides access to: + * - input: workflow input (property, not method in SDK v1) + * - parentOutput(taskRef): output from parent tasks + * - log(): write to Hatchet UI + * - logger: structured logging + */ +export class HatchetStepContext extends ServiceMap.Service< + HatchetStepContext, + HatchetContext +>()("HatchetStepContext") {} + +/** + * getHatchetInput: Helper to extract typed input from context + */ +export const getHatchetInput = () => Effect.map(HatchetStepContext, (ctx) => ctx.input as T) +``` + +--- + +## 5. Logging Architecture + +### 5.1 HatchetLogger Design + +The custom logger intercepts Effect.log() calls and forwards them to Hatchet: + +```typescript +// packages/hatchet/src/logging/hatchet-logger.ts + +import { Effect, Logger, Option, ServiceMap } from "effect" +import { HatchetStepContext } from "../core/context" + +/** + * HatchetLogger: Custom Effect Logger that forwards logs to Hatchet UI + * + * Flow: + * 1. Effect.log() is called in user code + * 2. Logger.make() receives the log entry + * 3. Check if HatchetStepContext exists in the fiber + * 4. If yes → forward to ctx.log() + * 5. Always output to console for development + */ +export const HatchetLogger = Logger.make(({ logLevel, message, context }) => { + const msg = typeof message === "string" ? message : String(message) + const formatted = `[${logLevel.label}] ${msg}` + + // Check if we're in a Hatchet task context + const hatchetCtxOpt = ServiceMap.getOption(context, HatchetStepContext) + + if (Option.isSome(hatchetCtxOpt)) { + // We're inside a task → forward to Hatchet + hatchetCtxOpt.value.log(formatted) + } + + // Always log to console + console.log(formatted) +}) + +/** + * withHatchetLogger: Wraps an Effect with the Hatchet logger + * + * Note: Logger.replace doesn't exist in v4 → use Effect.withLogger + */ +export const withHatchetLogger = ( + effect: Effect.Effect, +): Effect.Effect => Effect.withLogger(effect, HatchetLogger) +``` + +### 5.2 Log Level Mapping + +Effect log levels map to Hatchet: + +| Effect LogLevel | Hatchet Method | +| --------------- | --------------------------------- | +| Debug | `ctx.logger.debug()` | +| Info | `ctx.log()` / `ctx.logger.info()` | +| Warning | `ctx.logger.warn()` | +| Error | `ctx.logger.error()` | +| Fatal | `ctx.logger.error()` | + +--- + +## 6. Schema Integration + +### 6.1 Input Validation + +```typescript +// packages/hatchet/src/schema/get-validated-input.ts + +import { Effect, Schema } from "effect" +import { HatchetStepContext } from "../core/context" + +/** + * getValidatedInput: Extract and validate workflow input + * + * Uses Schema.decodeUnknown from the effect package + * (NOT @effect/schema - it's all in one package in v4) + */ +export const getValidatedInput = ( + schema: Schema.Schema, +): Effect.Effect => + Effect.gen(function*() { + const ctx = yield* HatchetStepContext + const rawInput = ctx.input + const decode = Schema.decodeUnknown(schema) + return yield* decode(rawInput) + }) +``` + +--- + +## 7. Testing Strategy + +### 7.1 Unit Testing Utilities + +```typescript +// packages/hatchet/src/testing/mock-context.ts + +import { Effect, Exit, ServiceMap } from "effect" +import { HatchetStepContext } from "../core/context" + +/** + * createMockStepContext: Creates a mock Hatchet context + * Matches SDK v1.19.0 interface + */ +export const createMockStepContext = (input?: unknown) => ({ + input: input ?? {}, + parentOutput: async () => null, + log: async (msg: string) => console.log(`[HATCHET] ${msg}`), + logger: { + info: async (msg: string) => console.info(`[INFO] ${msg}`), + debug: async (msg: string) => console.debug(`[DEBUG] ${msg}`), + warn: async (msg: string) => console.warn(`[WARN] ${msg}`), + error: async (msg: string) => console.error(`[ERROR] ${msg}`), + }, + workflowRunId: () => "test-run-id", + workflowName: () => "test-workflow", + taskName: () => "test-task", + retryCount: () => 0, +}) + +/** + * runTestTask: Execute an Effect task with mock context + * Returns Exit for detailed result inspection + */ +export const runTestTask = ( + effect: Effect.Effect, + layer: Layer.Layer, + mockContext?: any, +): Effect.Effect, never, R> => { + const ctx = mockContext ?? createMockStepContext() + + return Effect.gen(function*() { + const runtime = yield* Effect.runtime() + + return yield* Effect.provideService(effect, HatchetStepContext, ctx).pipe( + Effect.exit, + Effect.provideLayer(layer), + ) + }) +} +``` + +### 7.2 Integration Testing + +Integration tests require Docker Compose: + +```yaml +# packages/hatchet/tests/integration/docker-compose.yml +version: "3.8" + +services: + postgres-test: + image: postgres:16-alpine + environment: + POSTGRES_USER: hatchet + POSTGRES_PASSWORD: hatchet + POSTGRES_DB: hatchet + tmpfs: + - /var/lib/postgresql/data + + hatchet-test: + image: ghcr.io/hatchet-dev/hatchet:latest + environment: + - HATCHET_SERVER_TOKEN=test-token + - DATABASE_URL=postgresql://hatchet:hatchet@postgres-test:5432/hatchet + depends_on: + postgres-test: + condition: service_healthy + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8080/health"] +``` + +--- + +## 8. Type Flow + +### 8.1 Input Type Propagation + +``` +User defines Schema + │ + ▼ +getValidatedInput(Schema) + │ + ▼ +Effect + │ + ▼ +Effect.gen yields validated input (type-safe) +``` + +### 8.2 ServiceMap Type Inference + +``` +workflow({ name: "wf" }) + .task(opts, Effect<..., ..., Database>) + .task(opts, Effect<..., ..., EmailService>) + │ + ▼ +EffectWorkflow + │ + ▼ +Layer + │ + ▼ +ManagedRuntime.make(layer) + │ + ▼ +Effectifier: (input, ctx) => Promise +``` + +--- + +## 9. Package Structure + +``` +packages/hatchet/ +├── project.json # Nx project config (follows prisma pattern) +├── package.json # Package manifest +├── tsconfig.json # TypeScript config +├── vitest.config.ts # Test config +├── src/ +│ ├── index.ts # Public exports +│ ├── core/ +│ │ ├── config.ts # HatchetConfig + Layers +│ │ ├── client.ts # HatchetClientService + HatchetError +│ │ └── context.ts # HatchetStepContext +│ ├── effectifier/ +│ │ ├── execute.ts # effectifyTask + createEffectifierFromLayer +│ │ └── types.ts # Internal types +│ ├── workflow/ +│ │ ├── workflow.ts # EffectWorkflow class + workflow() +│ │ ├── task.ts # task() function +│ │ ├── register.ts # registerWorkflow() +│ │ └── types.ts # TaskOptions, WorkflowOptions +│ ├── logging/ +│ │ └── hatchet-logger.ts # HatchetLogger + withHatchetLogger +│ ├── schema/ +│ │ └── get-validated-input.ts # getValidatedInput +│ └── testing/ +│ └── mock-context.ts # Test utilities +├── tests/ +│ ├── unit/ +│ │ ├── client.test.ts +│ │ ├── effectifier.test.ts +│ │ ├── logger.test.ts +│ │ └── workflow.test.ts +│ └── integration/ +│ ├── docker-compose.yml +│ └── workflow.test.ts +└── README.md +``` + +--- + +## 10. Key Design Decisions + +### 10.1 Effect v4 API Choices + +| Decision | Rationale | +| --------------------------------------- | ----------------------------------------- | +| `ServiceMap.Service` over `Context.Tag` | Context module doesn't exist in v4 | +| `ManagedRuntime.make(layer)` | `Effect.runtime()` doesn't exist in v4 | +| `Effect.withLogger(effect, logger)` | `Logger.replace` doesn't exist in v4 | +| `ServiceMap.getOption` | Context module doesn't exist | +| Schema from `effect` package | `@effect/schema` is deprecated/merged | + +### 10.2 Hatchet SDK v1.19.0 API Choices + +| Decision | Rationale | +| --------------------------- | ---------------------------------------- | +| `.task()` not `.step()` | SDK v1 uses task terminology | +| `ctx.input` property | SDK v1 has input as property, not method | +| `ctx.parentOutput(taskRef)` | Replaces deprecated `stepOutput()` | + +### 10.3 Error Handling Strategy + +- **Recoverable errors**: Use `Effect.fail()` → converted to thrown Error → Hatchet retries +- **Fatal errors**: Use `Effect.die()` or `Effect.exit()` → no retry +- **Tagged errors**: Use `HatchetError` for Hatchet-specific failures + +--- + +## 11. Risks and Mitigations + +| Risk | Likelihood | Impact | Mitigation | +| ---------------------------- | ---------- | ------ | -------------------------------------------- | +| SDK API changes | Medium | High | Pin to v1.19.0, verify on updates | +| Effect v4 breaking changes | Low | High | Use only verified APIs from effect-reference | +| Runtime memory leaks | Medium | Medium | Ensure runtime disposal in worker stop | +| Retry loop on startup errors | Medium | High | Use Effect.die() for fatal startup errors | + +--- + +## 12. Dependencies + +```json +{ + "dependencies": { + "@hatchet-dev/typescript-sdk": "^1.19.0" + }, + "peerDependencies": { + "effect": "catalog:" + }, + "devDependencies": { + "@effect/vitest": "catalog:", + "@types/node": "catalog:", + "typescript": "catalog:", + "vitest": "catalog:" + } +} +``` + +--- + +## 13. Next Steps + +1. **Initialize package structure** following `@effectify/prisma` patterns +2. **Implement core services**: HatchetConfig, HatchetClientService, HatchetStepContext +3. **Build effectifier**: effectifyTask with ManagedRuntime integration +4. **Create workflow builder**: EffectWorkflow with task chaining +5. **Add logging**: HatchetLogger with Effect.withLogger +6. **Schema utilities**: getValidatedInput +7. **Testing utilities**: createMockStepContext, runTestTask +8. **Write unit tests** (>90% coverage) +9. **Docker Compose setup** for integration tests +10. **Integration tests** against real Hatchet + +--- + +_Document created: March 2026_ +_Version: 1.0.0_ diff --git a/docs/prds/CHAT-original-idea-effectify-hatchet.md b/docs/prds/CHAT-original-idea-effectify-hatchet.md new file mode 100644 index 0000000..c7b5ef7 --- /dev/null +++ b/docs/prds/CHAT-original-idea-effectify-hatchet.md @@ -0,0 +1,709 @@ +¡Totalmente viable y de hecho es una excelente idea! Integrar un SDK basado en Promesas como el de Hatchet dentro del ecosistema de Effect es uno de los casos de uso más comunes e ideales. Te permite ganar tipado estricto de errores, inyección de dependencias limpia y un manejo de concurrencia mucho más robusto. +​Aquí tienes un Proof of Concept (PoC) de cómo se vería @effectify/hatchet. Lo dividiremos en tres partes: la definición del servicio (para inyección de dependencias), los wrappers de las acciones asíncronas, y el adaptador para que Hatchet pueda ejecutar tus steps definidos con Effect. +​1. Definición del Servicio y Errores +​Primero, envolvemos la instancia de Hatchet en un Context.Tag para poder inyectarla en nuestra aplicación, y definimos un tipo de error personalizado. + +import { Effect, Context, Layer, Data } from "effect"; +import { Hatchet } from "@hatchet-dev/typescript-sdk"; + +// Definimos un error tipado para nuestro wrapper +export class HatchetError extends Data.TaggedError("HatchetError")<{ +  readonly message: string; +  readonly cause?: unknown; +}> {} + +// Creamos el Tag para inyectar el cliente de Hatchet +export class HatchetClient extends Context.Tag("@effectify/hatchet/Client")< +  HatchetClient, +  Hatchet + +> () {} + +// Capa (Layer) para proveer el cliente en tu aplicación +export const HatchetLive = Layer.succeed( +  HatchetClient, +  Hatchet.init() // Inicialización real del SDK de Hatchet +); + +2. Wrapper de Acciones (Llamadas a la API) + ​Aquí envolvemos las llamadas a la API de Hatchet (que devuelven Promesas) usando Effect.tryPromise. Esto convierte las excepciones no controladas de las Promesas en errores tipados (HatchetError) en el canal de errores de Effect. + +// Wrapper para ejecutar un workflow desde tu app Effect +export const runWorkflow = (workflowName: string, input: any) => +  Effect.gen(function* () { +    // Obtenemos el cliente inyectado +    const hatchet = yield* HatchetClient; + +    // Envolvemos la Promesa en un Effect +    return yield* Effect.tryPromise({ +      try: () => hatchet.admin.runWorkflow(workflowName, input), +      catch: (error) => new HatchetError({ +        message: `Error al ejecutar workflow ${workflowName}`, +        cause: error +      }), +    }); +  }); + +3. Adaptador para Workflows (El "Effectifier") + ​Hatchet espera que los steps de un workflow sean funciones asíncronas (Promise). Para que tu app sea "nativa en Effect", quieres escribir tus steps devolviendo Effect. Necesitamos una función que traduzca tu Effect de vuelta a una Promise que Hatchet pueda entender al registrar el worker. + +import { Context as HatchetContext } from "@hatchet-dev/typescript-sdk"; + +/** + * Convierte un step escrito en Effect en una función async compatible con Hatchet. + * Nota: Para este PoC asumimos que el Effect no tiene requerimientos (never). +*/ +export const effectifyStep = ( +  effectStep: (ctx: HatchetContext) => Effect.Effect +) => { +  return async (ctx: HatchetContext): Promise => { +    // Convertimos el Effect en una Promesa para que Hatchet lo ejecute +    return await Effect.runPromise(effectStep(ctx)); +  }; +};[10:06 PM]¡Totalmente viable y de hecho es una excelente idea! Integrar un SDK basado en Promesas como el de Hatchet dentro del ecosistema de Effect es uno de los casos de uso más comunes e ideales. Te permite ganar tipado estricto de errores, inyección de dependencias limpia y un manejo de concurrencia mucho más robusto. +Aquí tienes un Proof of Concept (PoC) de cómo se vería @effectify/hatchet. Lo dividiremos en tres partes: la definición del servicio (para inyección de dependencias), los wrappers de las acciones asíncronas, y el adaptador para que Hatchet pueda ejecutar tus steps definidos con Effect. + +1. Definición del Servicio y Errores + Primero, envolvemos la instancia de Hatchet en un Context.Tag para poder inyectarla en nuestra aplicación, y definimos un tipo de error personalizado. + import { Effect, Context, Layer, Data } from "effect"; + import { Hatchet } from "@hatchet-dev/typescript-sdk"; + +// Definimos un error tipado para nuestro wrapper +export class HatchetError extends Data.TaggedError("HatchetError")<{ +  readonly message: string; +  readonly cause?: unknown; +}> {} + +// Creamos el Tag para inyectar el cliente de Hatchet +export class HatchetClient extends Context.Tag("@effectify/hatchet/Client")< +  HatchetClient, +  Hatchet + +> () {} + +// Capa (Layer) para proveer el cliente en tu aplicación +export const HatchetLive = Layer.succeed( +  HatchetClient, +  Hatchet.init() // Inicialización real del SDK de Hatchet +); + +2. Wrapper de Acciones (Llamadas a la API) + Aquí envolvemos las llamadas a la API de Hatchet (que devuelven Promesas) usando Effect.tryPromise. Esto convierte las excepciones no controladas de las Promesas en errores tipados (HatchetError) en el canal de errores de Effect. + // Wrapper para ejecutar un workflow desde tu app Effect + export const runWorkflow = (workflowName: string, input: any) => +   Effect.gen(function* () { +     // Obtenemos el cliente inyectado +     const hatchet = yield* HatchetClient; + +    // Envolvemos la Promesa en un Effect +    return yield* Effect.tryPromise({ +      try: () => hatchet.admin.runWorkflow(workflowName, input), +      catch: (error) => new HatchetError({ +        message: `Error al ejecutar workflow ${workflowName}`, +        cause: error +      }), +    }); +  }); + +3. Adaptador para Workflows (El "Effectifier") + Hatchet espera que los steps de un workflow sean funciones asíncronas (Promise). Para que tu app sea "nativa en Effect", quieres escribir tus steps devolviendo Effect. Necesitamos una función que traduzca tu Effect de vuelta a una Promise que Hatchet pueda entender al registrar el worker. + import { Context as HatchetContext } from "@hatchet-dev/typescript-sdk"; + +/** + * Convierte un step escrito en Effect en una función async compatible con Hatchet. + * Nota: Para este PoC asumimos que el Effect no tiene requerimientos (never). +*/ +export const effectifyStep = ( +  effectStep: (ctx: HatchetContext) => Effect.Effect +) => { +  return async (ctx: HatchetContext): Promise => { +    // Convertimos el Effect en una Promesa para que Hatchet lo ejecute +    return await Effect.runPromise(effectStep(ctx)); +  }; +}; + +4. Ejemplo de Uso: Tu App Nativa en Effect + Así es como se vería el código de tu aplicación consumiendo tu librería @effectify/hatchet. + import { Effect } from "effect"; + // Importaríamos desde "@effectify/hatchet" + // import { HatchetClient, HatchetLive, runWorkflow, effectifyStep } from "@effectify/hatchet"; + +// 1. Definimos un step nativo en Effect +const myEffectStep = (ctx: any) => +  Effect.gen(function* () { +    yield* Effect.log(`Iniciando step con input: ${JSON.stringify(ctx.workflowInput())}`); + +    // Aquí puedes usar otras bondades de Effect, como delays, retries, etc. +    yield* Effect.sleep("2 seconds"); + +    return { success: true, processedAt: new Date().toISOString() }; +  }); + +// 2. Registramos el worker (generalmente esto se hace al inicio de la app) +const startWorker = Effect.gen(function* () { +  const hatchet = yield* HatchetClient; +  const worker = hatchet.worker("effect-worker"); + +  // Usamos nuestro wrapper para adaptar el step +  yield* Effect.sync(() => { +    worker.registerWorkflow({ +      id: "my-effect-workflow", +      description: "Workflow de prueba nativo en Effect", +      on: { event: "trigger:effect-workflow" }, +      steps: [ +        { +          name: "step-1", +          // Adaptamos nuestro Effect a la firma que espera el SDK de Hatchet +          run: effectifyStep(myEffectStep), +        }, +], +    }); +  }); + +  yield* Effect.log("Iniciando worker de Hatchet..."); +  yield* Effect.tryPromise({ +    try: () => worker.start(), +    catch: (err) => new HatchetError({ message: "Error al iniciar worker", cause: err }) +  }); +}); + +// 3. Ejecutamos un workflow desde algún lugar de tu app +const mainProgram = Effect.gen(function* () { +  yield* Effect.log("Disparando workflow..."); + +  const result = yield* runWorkflow("my-effect-workflow", { +    message: "Hola desde Effect!" +  }); + +  yield* Effect.log(`Workflow disparado con ID: ${result.workflowRunId}`); +}); + +// 4. Unimos todo y proveemos las dependencias +const runnable = Effect.provide( +  Effect.all([startWorker, mainProgram], { concurrency: "unbounded" }), +  HatchetLive +); + +// Ejecutamos la aplicación +// Effect.runPromise(runnable); + +Puntos clave de este PoC: + * Aislamiento de Promesas: Tu lógica de negocio principal se mantiene pura trabajando con Effect, y solo tocas Promesas en los bordes (cuando llamas a Hatchet o cuando Hatchet llama a tu worker). + * Manejo de Errores Tipados: Si Hatchet falla al disparar un workflow, obtienes un HatchetError manejable mediante Effect.catchAll o Effect.catchTag. + * Inyección Limpia: Gracias a HatchetClient (el Context.Tag), puedes simular fácilmente el SDK en tus tests proporcionando un Layer mockeado en lugar del HatchetLive. +¿Te gustaría que profundicemos en cómo pasar un Runtime de Effect dentro de effectifyStep para que tus steps puedan acceder a otros servicios inyectados (como una base de datos o un logger personalizado) en lugar de estar limitados a dependencias never?[10:06 PM]Si podrías profundizar más y como Hatchet sabría si hay un error y hacer un retry com hatchet si hacemos el catch del error con Effect de lado de la app ? Hatcher creo que detecta los errores throw quiero manejarlo nativamente con Effect +[10:06 PM]Es una excelente pregunta y toca el corazón de cómo integrar dos paradigmas distintos: el manejo de errores funcional de Effect y el sistema de reintentos basado en excepciones de Hatchet. +Para responder a tu duda: "Manejarlo nativamente en Effect" no significa ocultarle el error a Hatchet. +Si dentro de tu step usas Effect.catchAll y devuelves un resultado exitoso, Hatchet dirá "¡Genial, el step funcionó!" y pasará al siguiente paso sin hacer retry. Para que Hatchet sepa que debe reintentar, el Effect debe terminar en un estado de fallo (Failure). +La magia ocurre en nuestro wrapper. Necesitamos hacer dos cosas: + * Extraer tu Effect usando un Runtime (para que puedas inyectar dependencias como bases de datos en tus steps). + * Evaluar el resultado del Effect: si es un éxito, devolver la Promesa resuelta; si es un fallo, traducirlo a un throw clásico que Hatchet entienda para disparar su retry. +Aquí tienes cómo evolucionaríamos el código para lograr exactamente esto. + +1. El "Effectifier" Avanzado (Manejando Errores y Dependencias) + En lugar de usar Effect.runPromise a secas, usaremos Runtime.runPromiseExit. Esto nos da el control absoluto sobre el ciclo de vida del Effect antes de entregárselo a Hatchet. + import { Effect, Runtime, Cause } from "effect"; + import { Context as HatchetContext } from "@hatchet-dev/typescript-sdk"; + +/** + * Fábrica para crear nuestro adaptador de steps, inyectándole el Runtime de tu app. + * Esto permite que tus steps tengan dependencias (R) y no estén limitados a 'never'. +*/ +export const createHatchetEffectifier = (runtime: Runtime.Runtime) => { +  // Retornamos la función que adaptará cada step individual +  return ( +    effectStep: (ctx: HatchetContext) => Effect.Effect +  ) => { +    return async (ctx: HatchetContext): Promise => { + +      // Ejecutamos el Effect y capturamos su "Exit" (Éxito o Fallo) +      const exit = await Runtime.runPromiseExit(runtime)(effectStep(ctx)); + +      if (exit._tag === "Success") { +        // Todo salió bien en Effect, le pasamos el valor a Hatchet +        return exit.value; +      } else { +        // El Effect falló (ya sea un error tipado E o un defecto inesperado). +        // Usamos Cause.squash para aplanar el error de Effect a algo manejable. +        const error = Cause.squash(exit.cause); + +        // ¡Aquí está la magia para Hatchet! Lanzamos un throw nativo. +        // Esto le dice a Hatchet: "Este step falló, aplica tu política de retries". +        throw error instanceof Error ? error : new Error(String(error)); +      } +    }; +  }; +}; + +2. Creando un Step con Errores y Dependencias + Ahora vamos a crear un step que simule interactuar con una base de datos. Usaremos los canales de Effect para fallar a propósito y ver cómo el wrapper hace su trabajo. + import { Context, Layer } from "effect"; + +// Simulamos un servicio de Base de Datos +export class Database extends Context.Tag("Database")< +  Database, +  { queryUser: (id: string) => Effect.Effect } + +> () {} + +export const DatabaseLive = Layer.succeed(Database, { +  queryUser: (id) => Effect.gen(function* () { +    // Simulamos un fallo aleatorio de red para provocar un retry en Hatchet +    const success = Math.random() > 0.5; +    if (!success) { +      yield* Effect.fail(new Error("Timeout en la base de datos")); +    } +    return { id, name: "Usuario Effect" }; +  }) +}); + +// Nuestro Step 100% nativo en Effect +const fetchUserStep = (ctx: HatchetContext) => +  Effect.gen(function* () { +    const input = ctx.workflowInput(); +    const db = yield* Database; // Usamos nuestra dependencia inyectada + +    yield* Effect.log("Intentando obtener usuario..."); + +    // Si queryUser falla, el flujo se interrumpe aquí y el Effect +    // termina en Failure. El "Effectifier" lo atrapará y hará el throw para Hatchet. +    const user = yield* db.queryUser(input.userId); + +    yield* Effect.log("Usuario obtenido con éxito"); +    return user; +  }); + +3. Uniendo Todo: Registrando el Worker con el Runtime + Para que tu fetchUserStep pueda acceder a la Database, necesitamos crear el Runtime de Effect y pasárselo a nuestro createHatchetEffectifier. + const startApp = Effect.gen(function* () { +   const hatchet = yield* HatchetClient; // Asumiendo el Tag del ejemplo anterior + +  // 1. Obtenemos el runtime actual que ya tiene inyectadas todas tus capas (Database, etc.) +  const runtime = yield* Effect.runtime(); + +  // 2. Creamos nuestra función adaptadora inyectándole el runtime +  const effectifyStep = createHatchetEffectifier(runtime); + +  const worker = hatchet.worker("effect-worker"); + +  yield* Effect.sync(() => { +    worker.registerWorkflow({ +      id: "fetch-user-workflow", +      description: "Workflow con retries y dependencias", +      on: { event: "trigger:fetch-user" }, +      steps: [ +        { +          name: "fetch-user", +          // Adaptamos nuestro Effect. ¡Tendrá acceso a la Database! +          // Si falla, Hatchet lo reintentará según su configuración por defecto. +          run: effectifyStep(fetchUserStep), +        }, +], +    }); +  }); + +  yield* Effect.tryPromise(() => worker.start()); +}); + +// Proveemos las capas e iniciamos +const runnable = Effect.provide(startApp, DatabaseLive); +// Effect.runPromise(runnable); + +Resumen del Flujo de Errores + * Si ocurre un error de red (Effect.fail): Tu código Effect se detiene. El exit._tag será "Failure". Nuestro wrapper lanza un throw. Hatchet atrapa el throw, marca el intento como fallido y programa el retry. + * Si el error es crítico y NO quieres que Hatchet reintente: Podrías manejarlo dentro de tu Effect usando Effect.catchAll y devolviendo un objeto que indique fallo en la lógica de negocio (ej. return { status: "FATAL", message: "..." }). Como no hay un throw, Hatchet lo tomará como un step "exitoso" y pasará al siguiente step, donde puedes evaluar ese status. +¿Te gustaría que diseñemos un sistema para tipar y extraer el contexto de Hatchet (por ejemplo, el input o los resultados de steps anteriores) usando el Context nativo de Effect en lugar de pasar la variable ctx como parámetro a la función?[10:06 PM]Buenísima idea procede +[10:07 PM]¡Excelente! Vamos a llevar el PoC al siguiente nivel. +El problema de pasar la variable ctx como parámetro a la función es que rompe un poco la filosofía de Effect. En el ecosistema de Effect, si una pieza de código necesita acceder a información contextual (como el input de un request, o en este caso, el contexto del workflow de Hatchet), lo ideal es declararlo como un requerimiento en el entorno (R) del Effect. +De esta manera, la firma de tu step queda completamente limpia de parámetros de infraestructura. +Aquí tienes cómo construiríamos este sistema de inyección dinámica. + +1. El Tag del Contexto de Hatchet + Primero, creamos un Context.Tag específico para almacenar el contexto que Hatchet nos entrega en cada ejecución de un step. + import { Context, Effect } from "effect"; + import { Context as HatchetContext } from "@hatchet-dev/typescript-sdk"; + +// Definimos el Tag. +// Nota: En una librería real, usaríamos genéricos +// para tipar fuertemente lo que devuelve ctx.workflowInput(). +export class HatchetStepContext extends Context.Tag("@effectify/hatchet/StepContext")< +  HatchetStepContext, +  HatchetContext + +> () {} + +2. El "Effectifier" de Inyección Dinámica + Ahora, actualizamos nuestra función fábrica. En lugar de aceptar una función que recibe ctx, ahora aceptará directamente un Effect que requiere el HatchetStepContext. + El "Effectifier" se encargará de atrapar el ctx que envía Hatchet en tiempo de ejecución y lo inyectará (Effect.provideService) en tu Effect justo antes de ejecutarlo. + import { Runtime, Cause } from "effect"; + +export const createHatchetEffectifier = (runtime: Runtime.Runtime) => { +  return ( +    // El step ya no es una función. Es un Effect puro que requiere +    // el contexto de Hatchet además de las dependencias de tu app (R) +    effectStep: Effect.Effect +  ) => { +    return async (ctx: HatchetContext): Promise => { + +      // 1. Inyectamos el contexto específico de esta ejecución de Hatchet +      const stepWithContext = Effect.provideService( +        effectStep, +        HatchetStepContext, +        ctx +      ); + +      // 2. Ejecutamos el Effect fusionado con el Runtime de tu app +      const exit = await Runtime.runPromiseExit(runtime)(stepWithContext); + +      // 3. Manejamos el resultado (el mismo mecanismo de retries de antes) +      if (exit._tag === "Success") { +        return exit.value; +      } else { +        const error = Cause.squash(exit.cause); +        throw error instanceof Error ? error : new Error(String(error)); +      } +    }; +  }; +}; + +3. El Step 100% Nativo y Limpio + Mira cómo queda tu lógica de negocio ahora. Tu step ya no es una función que recibe parámetros raros; es simplemente una descripción de un programa (Effect) que pide lo que necesita. + // Simulamos el Tag de la base de datos del ejemplo anterior + // import { Database } from "./mis-servicios"; + +export const fetchUserStep = Effect.gen(function* () { +  // Extraemos el contexto de Hatchet de forma nativa +  const ctx = yield* HatchetStepContext; + +  // Extraemos otras dependencias +  const db = yield* Database; + +  // Obtenemos el input tipado del workflow +  const input = ctx.workflowInput(); +  yield* Effect.log(`Buscando usuario con ID: ${input.userId}`); + +  // Ejecutamos la lógica que puede fallar (y disparar el retry de Hatchet) +  const user = yield* db.queryUser(input.userId); + +  return user; +}); + +Por qué este enfoque es superior: + * Firmas Limpias: El tipado te dice exactamente qué necesita tu step para funcionar (Database | HatchetStepContext). No hay argumentos ocultos. + * Composición: Puedes extraer lógica común a funciones auxiliares. Por ejemplo, podrías crear una función const getUserId = Effect.map(HatchetStepContext, ctx => ctx.workflowInput().userId) y usarla en cualquier parte de tu pipeline sin tener que pasar ctx en cascada. + * Testing Perfecto: Para hacer un test unitario de fetchUserStep, no necesitas levantar un worker de Hatchet. Solo usas Effect.provideService(HatchetStepContext, mockCtx) y listo. +El siguiente nivel: Hatchet tiene una característica excelente donde puedes hacer ctx.log("mensaje") para que los logs aparezcan en su UI en tiempo real. ¿Te gustaría que hagamos un Logger personalizado de Effect para que cuando tú escribas yield* Effect.log("Hola"), este se envíe automáticamente a la UI de Hatchet usando ese mismo ctx inyectado?[10:07 PM]También excelente idea procede +[10:07 PM]¡Manos a la obra! Esta es una de las integraciones más elegantes que puedes hacer, porque te permite mantener tu código completamente agnóstico de Hatchet, mientras aprovechas su UI de observabilidad. +En Effect, cuando usas Effect.log("Hola"), por debajo el sistema envía ese mensaje a todos los Loggers que estén registrados. Los Loggers en Effect tienen acceso al Context (las dependencias) del Fiber (el hilo ligero) que ejecutó el log. +Sabiendo esto, podemos crear un Logger personalizado que intercepte el mensaje, busque si existe el HatchetStepContext en el entorno, y si lo encuentra, lo envíe a la UI de Hatchet. + +1. Creando el Logger de Hatchet + Vamos a usar Logger.make para definir cómo queremos procesar los logs. + import { Logger, Context, Option, Layer } from "effect"; + // Importamos el tag que creamos en el paso anterior + // import { HatchetStepContext } from "./context"; + +export const HatchetLogger = Logger.make(({ logLevel, message, context }) => { +  // 1. Extraemos el mensaje base +  const msg = typeof message === "string" ? message : String(message); + +  // 2. Buscamos el contexto de Hatchet dentro del Fiber actual +  const hatchetCtxOpt = Context.getOption(context, HatchetStepContext); + +  if (Option.isSome(hatchetCtxOpt)) { +    // 3. ¡Bingo! Estamos dentro de un step de Hatchet. +    // Usamos el método nativo de Hatchet para enviar el log a su UI. +    const ctx = hatchetCtxOpt.value; + +    // Hatchet soporta ctx.log(). Le añadimos el nivel de severidad (INFO, ERROR, etc.) +    ctx.log(`[${logLevel.label}] ${msg}`); +  } + +  // 4. Mantenemos el log local en consola para el desarrollo +  // (En producción podrías omitir esto o usar un logger JSON) +  console.log(`[${logLevel.label}] ${msg}`); +}); + +// Creamos una Capa (Layer) que reemplaza el logger por defecto de Effect por el nuestro +export const HatchetLoggerLive = Logger.replace( +  Logger.defaultLogger, +  HatchetLogger +); + +2. Actualizando tu Step (¡Cero cambios!) + Lo hermoso de este enfoque es que tu lógica de negocio no cambia en absoluto. Usas Effect.log como lo harías normalmente en cualquier aplicación Effect. + export const processPaymentStep = Effect.gen(function* () { +   const ctx = yield* HatchetStepContext; +   const input = ctx.workflowInput(); + +  // Este log aparecerá automáticamente en tu terminal +  // ¡Y EN LA UI DE HATCHET EN TIEMPO REAL! +  yield* Effect.log(`Iniciando procesamiento de pago para la orden: ${input.orderId}`); + +  // ... lógica compleja ... + +  if (input.amount > 10000) { +    // También soporta distintos niveles de log +    yield* Effect.logWarning("El monto es inusualmente alto, marcando para revisión"); +  } + +  yield* Effect.log("Pago procesado con éxito"); +  return { status: "success" }; +}); + +3. Inyectando el Logger en tu Aplicación + Para que el Logger empiece a funcionar, solo tienes que proveer su Layer al momento de arrancar tu aplicación o tu Worker. + import { Effect } from "effect"; + +const startWorkerApp = Effect.gen(function* () { +  const hatchet = yield* HatchetClient; +  const runtime = yield* Effect.runtime(); // + Otras deps +  const effectifyStep = createHatchetEffectifier(runtime); + +  const worker = hatchet.worker("payment-worker"); + +  yield* Effect.sync(() => { +    worker.registerWorkflow({ +      id: "payment-workflow", +      description: "Workflow de pagos con logs sincronizados", +      on: { event: "trigger:payment" }, +      steps: [ +        { +          name: "process-payment", +          run: effectifyStep(processPaymentStep), +        }, +], +    }); +  }); + +  yield* Effect.tryPromise(() => worker.start()); +}); + +// Aquí es donde ocurre la magia final. +// Proveemos el Logger a toda la aplicación. +const runnable = Effect.provide(startWorkerApp, HatchetLoggerLive); + +// Effect.runPromise(runnable); + +El resultado +Con esta arquitectura lograste: + * Separación de responsabilidades: Tus steps no saben que están enviando logs a internet; solo usan herramientas nativas de Effect. + * Observabilidad dual: Tienes logs en la consola local (para debuggear rápido) y en el dashboard de Hatchet (para monitoreo del sistema). + * Código limpio: Sin pasar ctx a funciones anidadas solo para poder loggear un evento. +Para cerrar el círculo de esta arquitectura Effect-native, el único punto débil que nos queda es que ctx.workflowInput() devuelve any, lo cual es peligroso. +¿Te gustaría que te muestre cómo integrar @effect/schema en nuestro "Effectifier" para que el input del workflow sea validado y parseado estrictamente antes de que tu step empiece a ejecutarse?[10:07 PM]Si adelante muestrame +[10:08 PM]¡Perfecto! Este es el toque final para hacer que la librería sea un wrapper robusto, de nivel empresarial. +El principal problema de ctx.workflowInput() es que te devuelve un tipo any. Si cambias la estructura del evento que dispara el workflow, pero olvidas actualizar el step, el código fallará en tiempo de ejecución (o peor, causará bugs silenciosos). +Con @effect/schema, validamos la estructura exacta de los datos antes de que empiece a correr tu lógica de negocio. Si los datos vienen mal formados, el step falla inmediatamente y rechaza la ejecución. + +1. El Extractor Validado (Tu nueva herramienta) + Vamos a crear una función de utilidad en tu librería @effectify/hatchet que se encargue de leer el contexto de Hatchet y pasarlo por un esquema de validación. + import { Effect } from "effect"; + import * as Schema from "@effect/schema/Schema"; + // import { HatchetStepContext } from "./context"; + +/** + * Lee el input del workflow desde el contexto inyectado + * y lo valida contra un esquema de @effect/schema. +_/ +export const getValidatedInput = (schema: Schema.Schema) => +  Effect.gen(function_ () { +    // 1. Obtenemos el contexto (si no está, Effect pedirá inyectarlo) +    const ctx = yield* HatchetStepContext; +    const rawInput = ctx.workflowInput(); + +    // 2. Creamos el decodificador basado en el esquema +    const decode = Schema.decodeUnknown(schema); + +    // 3. Ejecutamos la validación. +    // Si falla, el Effect se interrumpe automáticamente con un ParseError. +    return yield* decode(rawInput); +  }); + +2. Definiendo el Esquema de tu Input + En tu aplicación, en lugar de confiar ciegamente en lo que llega, describes exactamente cómo debe ser la forma de los datos usando @effect/schema. + // schemas.ts + import * as Schema from "@effect/schema/Schema"; + +// Definimos un esquema estricto para el pago +export const PaymentInputSchema = Schema.Struct({ +  orderId: Schema.String, +  // Podemos añadir validaciones de negocio directamente al esquema: +  amount: Schema.Number.pipe(Schema.positive()), +  currency: Schema.Literal("USD", "EUR", "MXN"), +  customerId: Schema.String +}); + +// Opcional: Extraemos el tipo de TypeScript para usarlo en otras funciones +export type PaymentInput = Schema.Schema.Type; + +3. El Step Final: Seguro y Tipado + Mira cómo se transforma tu lógica de negocio. Es increíblemente limpio y 100% Type-Safe. + // import { getValidatedInput } from "@effectify/hatchet"; + // import { PaymentInputSchema } from "./schemas"; + +export const processPaymentStep = Effect.gen(function* () { +  // Magia pura: Extrae, valida y tipa el input en una sola línea. +  const input = yield* getValidatedInput(PaymentInputSchema); + +  // ¡A partir de aquí, TypeScript sabe exactamente qué es 'input'! +  // Si intentas hacer input.monto (incorrecto) en lugar de input.amount, el editor se quejará. +  yield* Effect.log( +`Procesando cobro de ${input.amount} ${input.currency} para la orden ${input.orderId}` +  ); + +  // Si llegamos a esta línea, garantizamos que amount > 0 y currency es válido. +  // ... tu lógica de cobro con base de datos o API externa ... + +  return { status: "success", receiptId: "rec_12345" }; +}); + +¿Qué pasa si el Input es inválido? (Manejo de Errores) +Si disparas el workflow y omites el customerId, el Schema.decodeUnknown lanzará un error tipo ParseError. +Como vimos en los pasos anteriores, nuestro "Effectifier" atrapa cualquier error en la capa del Runtime. Así que tomará ese ParseError, lo convertirá en una excepción y se la enviará a Hatchet. En tu panel de Hatchet verás exactamente por qué falló el step (ej. "Missing property customerId"), deteniendo la ejecución antes de causar problemas de consistencia en tu base de datos. +Resumen del Stack @effectify/hatchet +Hasta este punto, hemos construido las bases de una librería increíblemente poderosa: + * HatchetClient: Inyección limpia del SDK de Hatchet. + * createHatchetEffectifier: El puente que traduce Effect a Promise y maneja los fallos para que Hatchet haga los retries. + * HatchetStepContext: Inyección dinámica del contexto sin pasar variables por parámetros. + * HatchetLogger: Sincronización transparente de los logs nativos de Effect con la UI de Hatchet. + * getValidatedInput: Parseo estricto y seguro en tiempo de ejecución de los eventos entrantes. +¿Te gustaría que diseñemos un patrón "Builder" o "Router"? Esto nos serviría para que registrar los workflows en tu worker no requiera tanto "boilerplate" de Hatchet, permitiéndote encadenar steps de manera más declarativa y puramente al estilo de Effect.[10:08 PM]Me parece bien el patrón builder +[10:08 PM]¡Excelente! Llegamos a la cereza del pastel. +En Effect, cuando construyes una librería, quieres que la API pública sea declarativa y que maneje la composición de dependencias (R) de forma automática. El problema actual con el registro de Hatchet es que te obliga a mezclar la configuración del worker con la definición de tus steps, y además tienes que llamar a effectifyStep manualmente para cada uno. +Vamos a crear un módulo Workflow usando el patrón Builder (muy similar a cómo funciona Router en @effect/platform o @effect/rpc). Esto nos permitirá encadenar métodos usando pipe e inferir todas las dependencias requeridas. + +1. El Módulo Builder (Workflow) + Primero, definimos una estructura inmutable que irá acumulando la configuración y los steps, manteniendo un registro estricto de los tipos y dependencias (R). + import { Effect, pipe } from "effect"; + // import { HatchetStepContext } from "./context"; + +// Definición interna de un step no evaluado +interface StepDefinition { +  name: string; +  effect: Effect.Effect; +} + +// Nuestra estructura principal inmutable +export class HatchetWorkflow { +  constructor( +    readonly id: string, +    readonly desc: string = "", +    readonly triggerEvent: string = "", +    readonly steps: StepDefinition[] = [] +  ) {} +} + +// --- API del Builder --- + +export const Workflow = { +  /** Inicia la definición de un nuevo workflow */ +  make: (id: string): HatchetWorkflow => +    new HatchetWorkflow(id), + +  /** Añade una descripción */ +  description: (desc: string) => (wf: HatchetWorkflow) => +    new HatchetWorkflow(wf.id, desc, wf.triggerEvent, wf.steps), + +  /** Define el evento que lo dispara */ +  onEvent: (event: string) => (wf: HatchetWorkflow) => +    new HatchetWorkflow(wf.id, wf.desc, event, wf.steps), + +  /** * Añade un step. +   * ¡Magia de TS!: Combina las dependencias previas (R) con las del nuevo step (R2) +*/ +  addStep: (name: string, effect: Effect.Effect) => +(wf: HatchetWorkflow): HatchetWorkflow => +      new HatchetWorkflow(wf.id, wf.desc, wf.triggerEvent, [ +        ...wf.steps, +        { name, effect } as any +]) +}; + +2. El Registrador de Workflows (Worker Wrapper) + Ahora necesitamos una función que tome este HatchetWorkflow declarativo y haga todo el "trabajo sucio": obtener el Runtime, crear el Effectifier y registrar todo en el SDK nativo de Hatchet. + import { Effect } from "effect"; + // import { createHatchetEffectifier } from "./effectifier"; + // import { HatchetClient } from "./client"; + +export const registerWorkflow = ( +  workerName: string, +  workflow: HatchetWorkflow +): Effect.Effect => +  Effect.gen(function* () { +    // 1. Obtenemos el cliente inyectado +    const hatchet = yield* HatchetClient; +    const worker = hatchet.worker(workerName); + +    // 2. Extraemos el Runtime con TODAS las dependencias (R) acumuladas en el builder +    const runtime = yield* Effect.runtime(); + +    // 3. Creamos nuestra fábrica de adaptadores con ese runtime +    const effectify = createHatchetEffectifier(runtime); + +    // 4. Transformamos nuestro builder al formato que espera Hatchet +    yield* Effect.sync(() => { +      worker.registerWorkflow({ +        id: workflow.id, +        description: workflow.desc, +        on: { event: workflow.triggerEvent }, +        steps: workflow.steps.map(step => ({ +          name: step.name, +          // Convertimos cada Effect en una Promise de Hatchet aquí mismo +          run: effectify(step.effect) +        })) +      }); +    }); + +    yield* Effect.log(`Workflow '${workflow.id}' registrado en worker '${workerName}'`); + +    // 5. Iniciamos el worker +    yield* Effect.tryPromise(() => worker.start()).pipe( +      Effect.catchAllCause(cause => Effect.logError("Error iniciando worker", cause)) +    ); +  }); + +3. El Resultado Final: Tu App Nativa en Effect + Mira cómo queda la experiencia de desarrollo de quien usa tu librería. Es código puro, declarativo, funcional y altamente tipado. + import { Effect, pipe } from "effect"; + // Importaciones de tu app + // import { processPaymentStep, sendEmailStep, Database, EmailService } from "./mis-servicios"; + // Importaciones de tu librería + // import { Workflow, registerWorkflow, HatchetLive, HatchetLoggerLive } from "@effectify/hatchet"; + +// 1. Construimos el Workflow declarativamente +const paymentWorkflow = pipe( +  Workflow.make("payment-workflow"), +  Workflow.description("Procesa el pago y envía un recibo"), +  Workflow.onEvent("trigger:payment"), +  Workflow.addStep("process-payment", processPaymentStep), // Requiere Database +  Workflow.addStep("send-email", sendEmailStep)            // Requiere EmailService +); + +// En este punto, TypeScript sabe que paymentWorkflow es de tipo: +// HatchetWorkflow + +// 2. Creamos el programa principal +const mainProgram = registerWorkflow("pagos-worker", paymentWorkflow); + +// 3. Proveemos las dependencias y ejecutamos +const runnable = Effect.provide( +  mainProgram, +  // Proveemos todas las capas necesarias +  Effect.all([ +    HatchetLive, +    HatchetLoggerLive, +    DatabaseLive, +    EmailServiceLive +]) +); + +// Boom. +// Effect.runPromise(runnable); + +¿Por qué este patrón es brillante? + * Inferencia de Dependencias Perfecta: Si en el futuro añades un tercer paso Workflow.addStep("audit", auditStep) que requiere un AuditService, TypeScript inmediatamente te marcará un error en Effect.provide exigiéndote que proveas el AuditServiceLive. Cero bugs en producción por dependencias faltantes. + * Cero Boilerplate: El desarrollador de la app no tiene que lidiar con Promesas, contextos oscuros, ni adaptadores. Solo define sus pasos como Effect y los une con el Workflow.make. + * Escalabilidad: Puedes tener un archivo separado que exporte un array de HatchetWorkflow y luego iterarlos en tu programa principal para registrarlos todos de un golpe en el worker. +Con esto, tienes esencialmente una librería completa, moderna y lista para producción. Combina la robustez de Hatchet (concurrencia, retries, observabilidad) con la seguridad y composición de Effect (tipado estricto, inyección de dependencias, manejo de errores funcional). +¿Te gustaría repasar algún punto en particular de este PoC, o crees que tienes lo suficiente para empezar a codificar la librería real? diff --git a/docs/prds/PRD-effectify-hatchet.md b/docs/prds/PRD-effectify-hatchet.md new file mode 100644 index 0000000..dbf8af2 --- /dev/null +++ b/docs/prds/PRD-effectify-hatchet.md @@ -0,0 +1,1392 @@ +# PRD: @effectify/hatchet + +**Product Requirements Document** + +--- + +## 1. Resumen Ejecutivo + +**@effectify/hatchet** es un package npm del **monorepositorio effectify** que proporciona una capa de integración nativa entre el ecosistema [Effect](https://effect.website/) (versión 4 beta) y [Hatchet](https://hatchet.run/), un sistema de workflows y background jobs distribuido. + +### Propósito en el Monorepositorio + +Este package sigue la filosofía del monorepositorio de crear **bindings nativos de Effect** para herramientas de terceros. Al igual que otros packages del monorepositorio (ej. `@effectify/react-router`, `@effectify/sql`), este módulo permite que aplicaciones nativas con Effect puedan integrar dependencias de terceros de manera типово-safe y funcional. + +### Propuesta de Valor + +| Antes (sin la librería) | Después (con @effectify/hatchet) | +| --------------------------------------------------- | ---------------------------------------------------------- | +| Steps escritos como funciones async (`Promise`) | Steps escritos como `Effect` puro | +| Manejo de errores con try/catch y thrown exceptions | Errores tipados en el canal `E` de Effect | +| Dependencias pasadas como parámetros | Inyección de dependencias via ServiceMap | +| Logging manual a Hatchet con `ctx.log()` | Uso nativo de `Effect.log()` con sincronización automática | +| Input no tipado (`any`) | Validación automática con `@effect/schema` | +| Configuración dispersa en múltiples lugares | Configuración centralizada via Effect Config | +| Tests contra servicios externos complejos | Tests contra Hatchet real en Docker Compose | + +--- + +## 2. Contexto Histórico: Los Ejemplos Originales (Effect v3) + +> **Nota importante**: Los siguientes ejemplos fueron extraídos de una conversación inicial con un LLM realizada hace unos meses. Estos códigos están escritos en **Effect v3** y contenían varios patrones que **NO son válidos en Effect v4**. Se incluyen aquí como referencia histórica del pensamiento inicial. + +### 2.1 Primera Versión del Effectifier (v3 - Obsoleto) + +```typescript +// ⚠️ CÓDIGO V3 - REFERENCIA HISTÓRICA SOLAMENTE +// NO USAR - contiene patrones incorrectos para v4 + +import { Context, Data, Effect, Layer } from "effect" +import { Hatchet } from "@hatchet-dev/typescript-sdk" + +// Este código usa Context.Tag que en v4 es ServiceMap.Service +export class HatchetClient extends Context.Tag("@effectify/hatchet/Client")< + HatchetClient, + Hatchet +>() {} + +// ❌ PROBLEMA: Runtime.runPromiseExit NO existe así en v4 +export const effectifyStep = ( + effectStep: (ctx: HatchetContext) => Effect.Effect, +) => { + return async (ctx: HatchetContext): Promise => { + return await Effect.runPromise(effectStep(ctx)) // ❌ Incorrecto en v4 + } +} +``` + +### 2.2 Problema del Runtime (v3 - Obsoleto) + +```typescript +// ⚠️ CÓDIGO V3 - REFERENCIA HISTÓRICA SOLAMENTE +// ❌ PROBLEMA: Runtime fue eliminado en v4 +export const createHatchetEffectifier = (runtime: Runtime.Runtime) => { + return (effectStep) => { + return async (ctx) => { + // ❌ Runtime.runPromiseExit NO existe en v4 + const exit = await Runtime.runPromiseExit(runtime)(effectStep(ctx)) + // ... + } + } +} +``` + +--- + +## 3. Goals (Objetivos) + +Los siguientes goals están basados en las features discutidas en el chat original. + +### Goal 1: Integración Nativa con Effect v4 + +**Descripción**: Cada step de un workflow Hatchet debe poder escribirse como un `Effect` puro, aprovechando todas las bondades del ecosistema Effect. + +**Detalles Técnicos (v4)**: + +- Usar `ServiceMap.Service` en lugar de `Context.Tag` +- El tipo `R` (dependencias) debe inferirse automáticamente +- Errores propagados correctamente para que Hatchet aplique retries +- Contexto de Hatchet disponible vía inyección de dependencias + +**Criterio de Éxito**: Un developer puede escribir un step que requiera una dependencia y se inyecte automáticamente. + +--- + +### Goal 2: Bidireccionalidad Effect ↔ Promise (Effectifier) + +**Descripción**: La librería debe actuar como puente bidireccional: + +1. **Effect → Promise**: Ejecutar un `Effect` dentro del runtime de Hatchet (que espera `Promise`) +2. **Promise → Effect**: Llamar a funciones del SDK de Hatchet desde Effect + +**Detalles Técnicos (v4)**: + +- Usar `Effect.runForkWith` junto con `Effect.services` para ejecutar Effects +- Convertir Failures a excepciones nativas para que Hatchet detecte errores +- Envolver llamadas SDK con `Effect.tryPromise` para tipar errores +- El Effectifier debe permitir que Hatchet ejecute steps definidos como Effects puros + +**Criterio de Éxito**: Un workflow que falla con `Effect.fail` debe aparecer como "failed" en el dashboard de Hatchet y triggear los retries configurados. + +--- + +### Goal 3: Inyección de Contexto de Hatchet (HatchetStepContext) + +**Descripción**: El contexto de Hatchet (input, output de steps anteriores, logger) debe estar disponible vía inyección de dependencias, NO como parámetros pasados a funciones. + +**Detalles Técnicos (v4)**: + +- Crear `HatchetStepContext` como ServiceMap.Service +- El step NO recibe ctx como parámetro, lo obtiene con `yield* HatchetStepContext` +- Permite acceder a: `workflowInput()`, `stepOutput()`, `log()` + +**Criterio de Éxito**: `yield* HatchetStepContext` devuelve el contexto con acceso a input (property `input`) y output de tasks padre (`parentOutput(taskRef)`). + +--- + +### Goal 4: Observabilidad Integrada (HatchetLogger) + +**Descripción**: Los logs generados con `Effect.log()` deben aparecer automáticamente en el dashboard de Hatchet sin necesidad de invocar `ctx.log()` manualmente. + +**Detalles Técnicos (v4)**: + +- Crear un Logger personalizado de Effect +- Detectar si existe `HatchetStepContext` en el Fiber actual +- Si existe, reenviar el log a `ctx.log()` de Hatchet +- Si no existe, comportarse como logger por defecto + +**Criterio de Éxito**: `yield* Effect.log("mensaje")` dentro de un step aparece en la UI de Hatchet. + +--- + +### Goal 5: Validación de Input con Schema + +**Descripción**: El input de un workflow debe validarse automáticamente contra un schema de `effect` (Schema) antes de ejecutar la lógica de negocio. + +**Detalles Técnicos (v4)**: + +- Usar `Schema.decodeUnknown` del paquete principal `effect` +- Proveer utilidad `getValidatedInput(schema)` +- Si la validación falla, el step falla con `Schema.ParseError` +- El tipo TypeScript debe inferirse del schema + +**Criterio de Éxito**: Input inválido falla con ParseError antes de ejecutar lógica. + +--- + +### Goal 6: API Declarativa Estilo Hatchet (Workflow Builder) + +**Descripción**: La API de definición de workflows debe ser muy similar a la de Hatchet, pero usando Effect. Los usuarios de Hatchet deben sentirse familiarizados. + +**Detalles Técnicos (v4)**: + +- Métodos similares a Hatchet: `workflow()`, `task()`, reemplazando el handler `fn` por un Effect +- Tasks definidos como Effects puros +- Inferencia automática de dependencias acumuladas +- Soporte para opciones de Hatchet: timeout, retry, parents (DAG), etc. + +**Criterio de Éxito**: Un workflow completo se registra en <10 líneas de código. + +--- + +### Goal 7: Configuración Centralizada (Effect Config) + +**Descripción**: Toda la configuración de Hatchet (token, host, namespace, etc.) debe estar centralizada usando Effect Config. + +**Detalles Técnicos (v4)**: + +- Usar `Config.Wrap<>` para definir configuración +- Crear servicio `HatchetConfig` via `ServiceMap.Service` +- El cliente se inicializa desde la configuración +- Permite sobrescribir en testing + +**Criterio de Éxito**: Un solo lugar para configurar Hatchet, usado por todos los servicios de la librería. + +--- + +### Goal 8: Testing Robusto con Docker Compose + +**Descripción**: Tests de integración contra Hatchet real en Docker, incluyendo PostgreSQL (necesario para Hatchet). + +**Detalles Técnicos**: + +- Docker Compose con Hatchet + PostgreSQL (no emulador) +- Tests de integración que ejecutan workflows reales +- Verificación de: registro, ejecución, retries, logs +- Helpers de testing para tests unitarios sin dependencias externas + +**Criterio de Éxito**: >90% coverage en tests unitarios, tests de integración passing contra Hatchet real. + +--- + +### Goal 9: Ejemplo en React Router Example + +**Descripción**: La app `react-router-example` del monorepo debe incluir un ejemplo funcional de la librería. + +**Detalles Técnicos**: + +- Worker de Hatchet corriendo como proceso separado +- Routes para dispara y monitorear workflows +- docker-compose con Hatchet + PostgreSQL + App + Worker + +**Criterio de Éxito**: La app ejemplo demuestra workflows funcionales. + +--- + +### Goal 10: Documentación del Package + +**Descripción**: El package debe incluir un README.md completo con setup, instalación y ejemplos. + +**Detalles Técnicos**: + +- Sección de instalación (npm/pnpm) +- Sección de configuración +- Ejemplos de uso básicos y avanzados +- API reference resumida + +**Criterio de Éxito**: Un developer puede usar la librería siguiendo solo el README. + +--- + +## 4. Arquitectura de Módulos + +``` +@effectify/hatchet/ +├── src/ +│ ├── index.ts # Exports públicos +│ │ +│ ├── core/ +│ │ ├── config.ts # HatchetConfig + Effect Config +│ │ ├── client.ts # HatchetClientService + HatchetClientLive +│ │ ├── error.ts # HatchetError (TaggedError) +│ │ └── context.ts # HatchetStepContext + getHatchetInput +│ │ +│ ├── effectifier/ +│ │ ├── execute.ts # effectifyTask + createEffectifierFromLayer +│ │ └── types.ts # Tipos internos +│ │ +│ ├── workflow/ +│ │ ├── workflow.ts # workflow() + EffectWorkflow class +│ │ ├── task.ts # task() function (replaces step()) +│ │ ├── register.ts # registerWorkflow() +│ │ └── types.ts # TaskOptions, WorkflowOptions +│ │ +│ ├── logging/ +│ │ ├── hatchet-logger.ts # HatchetLogger + withHatchetLogger +│ │ └── index.ts # Exports +│ │ +│ ├── schema/ +│ │ ├── get-validated-input.ts # getValidatedInput +│ │ └── index.ts # Exports +│ │ +│ └── testing/ +│ ├── mock-context.ts # createMockStepContext + runTestTask +│ └── index.ts # Exports +│ +├── tests/ +│ ├── unit/ +│ │ ├── client.test.ts +│ │ ├── effectifier.test.ts +│ │ ├── logger.test.ts +│ │ ├── schema.test.ts +│ │ └── workflow.test.ts +│ │ +│ └── integration/ +│ ├── docker-compose.yml # Hatchet + PostgreSQL +│ └── workflow.test.ts # Tests contra Hatchet real +│ +├── package.json +├── tsconfig.json +├── vitest.config.ts +└── README.md +``` + +--- + +## 5. API Propuesta - Effect v4 + +### 5.1 Módulo: Core - Configuración (v4 Pattern) + +```typescript +// src/core/config.ts +import { Config, Effect, Layer, Schema, ServiceMap } from "effect" + +// Esquema de configuración +const HatchetConfigSchema = Schema.Struct({ + token: Schema.String, + host: Schema.String.pipe(Schema.defaultTo("http://localhost:8080")), + namespace: Schema.optional(Schema.String), +}) + +type HatchetConfigType = Schema.Schema.Type + +// ✅ ServiceMap.Service en lugar de Context.Tag +export class HatchetConfig extends ServiceMap.Service< + HatchetConfig, + HatchetConfigType +>()("HatchetConfig") {} + +// Layer que provee la configuración +export const HatchetConfigLayer = ( + config: HatchetConfigType, +): Layer.Layer => Layer.succeed(HatchetConfig, config) + +// Layer desde Config.Wrap +export const HatchetConfigLayerFromEnv = ( + config: Config.Wrap, +): Layer.Layer => + Layer.effect(HatchetConfig)(Effect.map(Config.unwrap(config), (c) => c)) +``` + +### 5.2 Módulo: Core - Cliente (v4 Pattern) + +```typescript +// src/core/client.ts +import { Data, Effect, Layer, ServiceMap } from "effect" +import { HatchetClient } from "@hatchet-dev/typescript-sdk" + +// ✅ Errores usando Data.TaggedError +export class HatchetError extends Data.TaggedError( + "@effectify/hatchet/HatchetError", +)<{ + readonly message: string + readonly cause?: unknown +}> {} + +// ✅ ServiceMap.Service para el cliente +export class HatchetClientService extends ServiceMap.Service< + HatchetClientService, + HatchetClient +>()("HatchetClient") {} + +// Layer para inicializar el cliente +export const HatchetClientLive = Layer.effect(HatchetClientService)( + Effect.gen(function*() { + const config = yield* HatchetConfig + // ✅ SDK real: HatchetClient.init() + const hatchet = HatchetClient.init({ + token: config.token, + host_port: config.host, + }) + return hatchet + }), +) +``` + +### 5.3 Módulo: Core - Contexto del Step (v4 Pattern) + +```typescript +// src/core/context.ts +import { Effect, ServiceMap } from "effect" +import type { Context as HatchetContext } from "@hatchet-dev/typescript-sdk" + +export class HatchetStepContext extends ServiceMap.Service< + HatchetStepContext, + HatchetContext +>()("HatchetStepContext") {} + +// ✅ Utility to access input (SDK v1: input is a property, not a method) +// Usage: const input = yield* getHatchetInput() +export const getHatchetInput = () => Effect.map(HatchetStepContext, (ctx) => ctx.input as T) +``` + +### 5.4 Módulo: Effectifier + +```typescript +// src/effectifier/execute.ts +import { Cause, Effect, ManagedRuntime } from "effect" +import type { Context as HatchetContext } from "@hatchet-dev/typescript-sdk" +import { HatchetStepContext } from "../core/context" + +// ✅ Effectifier: ejecuta un Effect en el contexto de Hatchet +// Convierte Effect → Promise para que Hatchet ejecute el task +// Si el Effect falla, hace throw para que Hatchet aplique retries + +export const effectifyTask = ( + effect: Effect.Effect, + runtime: ManagedRuntime.ManagedRuntime, +) => { + return async (input: unknown, ctx: HatchetContext): Promise => { + // 1. Inyectamos el contexto de Hatchet como servicio + const effectWithContext = Effect.provideService( + effect, + HatchetStepContext, + ctx, + ) + + // 2. Ejecutamos con ManagedRuntime (no Effect.runtime() que no existe en v4) + const exit = await runtime.runPromiseExit(effectWithContext) + + // 3. Convertimos el resultado + if (exit._tag === "Success") { + return exit.value + } else { + // ✅ Convertir failure a excepción para que Hatchet haga retry + const error = Cause.squash(exit.cause) + throw error instanceof Error ? error : new Error(String(error)) + } + } +} + +// ✅ Fábrica: crea un Effectifier desde un Layer +// Usage: const effectify = createEffectifierFromLayer(MyAppLayer) +export const createEffectifierFromLayer = ( + layer: Layer.Layer, +) => { + const runtime = ManagedRuntime.make(layer) + return (effect: Effect.Effect) => effectifyTask(effect, runtime) +} +``` + +### 5.5 Módulo: Workflow (API Estilo Hatchet) + +```typescript +// src/workflow/types.ts +import type { RetryOpts, TaskConcurrency } from "@hatchet-dev/typescript-sdk" + +export interface TaskOptions { + readonly name: string + readonly timeout?: string + readonly retries?: number + readonly rateLimits?: Array<{ key: string; limit: number; duration: string }> + readonly concurrency?: TaskConcurrency[] + readonly parents?: string[] // DAG: parent task names +} + +export interface WorkflowOptions { + readonly name: string + readonly description?: string + readonly version?: string + readonly sticky?: boolean + readonly concurrency?: TaskConcurrency[] +} + +export interface TaskDefinition { + readonly options: TaskOptions + readonly effect: Effect.Effect +} +``` + +```typescript +// src/workflow/workflow.ts +import { Effect } from "effect" +import type { TaskDefinition, TaskOptions, WorkflowOptions } from "./types" + +export class EffectWorkflow { + readonly tasks: TaskDefinition[] = [] + + constructor( + readonly options: WorkflowOptions, + readonly dependencies: R = undefined as R, + ) {} + + // ✅ Adds a task (replaces step() — SDK uses .task()) + task( + options: TaskOptions, + effect: Effect.Effect, + ): EffectWorkflow { + this.tasks.push({ options, effect } as TaskDefinition) + return this as any + } +} + +export const workflow = (options: WorkflowOptions) => new EffectWorkflow(options) +``` + +```typescript +// src/workflow/register.ts +import { Effect, ManagedRuntime } from "effect" +import { HatchetClientService } from "../core/client" +import { HatchetStepContext } from "../core/context" +import { effectifyTask } from "../effectifier/execute" +import type { EffectWorkflow } from "./workflow" + +// ✅ registerWorkflow: registra un EffectWorkflow en Hatchet +// Reemplaza el boilerplate manual de crear tasks con effectifyStep +export const registerWorkflow = ( + workerName: string, + wf: EffectWorkflow, + layer: Layer.Layer, +): Effect.Effect => + Effect.gen(function*() { + const hatchet = yield* HatchetClientService + const runtime = ManagedRuntime.make(layer) + + // ✅ SDK real: hatchet.workflow({ name }) + const hatchetWorkflow = hatchet.workflow({ + name: wf.options.name, + ...(wf.options.description && { description: wf.options.description }), + ...(wf.options.version && { version: wf.options.version }), + }) + + // ✅ SDK real: workflow.task({ name, fn }) + // Convertimos cada Effect.Task a un task de Hatchet + wf.tasks.forEach((taskDef) => { + hatchetWorkflow.task({ + name: taskDef.options.name, + fn: effectifyTask(taskDef.effect, runtime), + ...(taskDef.options.retries && { retries: taskDef.options.retries }), + ...(taskDef.options.timeout && { + execution_timeout: taskDef.options.timeout, + }), + ...(taskDef.options.parents && { parents: taskDef.options.parents }), + }) + }) + + // ✅ SDK real: hatchet.worker(name, { workflows: [wf] }) + const worker = yield* Effect.tryPromise({ + try: () => hatchet.worker(workerName, { workflows: [hatchetWorkflow] }), + catch: (e) => new HatchetError({ message: "Failed to create worker", cause: e }), + }) + + yield* Effect.log( + `Workflow '${wf.options.name}' registered on worker '${workerName}'`, + ) + + // Iniciar el worker + yield* Effect.tryPromise({ + try: () => worker.start(), + catch: (e) => new HatchetError({ message: "Failed to start worker", cause: e }), + }) + }) +``` + +### 5.6 Módulo: Logging + +```typescript +// src/logging/hatchet-logger.ts +import { Effect, Logger, Option, ServiceMap } from "effect" + +export const HatchetLogger = Logger.make(({ logLevel, message, context }) => { + const msg = typeof message === "string" ? message : String(message) + + // ✅ Buscamos el contexto de Hatchet dentro del Fiber actual + const hatchetCtxOpt = ServiceMap.getOption(context, HatchetStepContext) + + if (Option.isSome(hatchetCtxOpt)) { + // Estamos dentro de un task de Hatchet — enviamos log a su UI + hatchetCtxOpt.value.log(`[${logLevel.label}] ${msg}`) + } + + // Mantenemos el log local en consola + console.log(`[${logLevel.label}] ${msg}`) +}) + +// ✅ Logger.replace NO existe en v4 — usar Effect.withLogger +export const withHatchetLogger = ( + effect: Effect.Effect, +): Effect.Effect => Effect.withLogger(effect, HatchetLogger) +``` + +### 5.7 Módulo: Schema + +```typescript +// src/schema/get-validated-input.ts +import { Effect, Schema } from "effect" +import { HatchetStepContext } from "../core/context" + +// ✅ Extrae y valida el input del workflow contra un schema +// ✅ Schema es parte del paquete principal 'effect', NO '@effect/schema' +export const getValidatedInput = ( + schema: Schema.Schema, +): Effect.Effect => + Effect.gen(function*() { + const ctx = yield* HatchetStepContext + // ✅ SDK v1: input es una property, no ctx.workflowInput() + const rawInput = ctx.input + const decode = Schema.decodeUnknown(schema) + return yield* decode(rawInput) + }) +``` + +### 5.8 Módulo: Testing + +```typescript +// src/testing/mock-context.ts +import { Effect, Exit, ServiceMap } from "effect" +import { HatchetStepContext } from "../core/context" + +export const createMockStepContext = (input?: unknown): any => ({ + input: input ?? {}, // ✅ SDK v1: input es property + parentOutput: async () => null, + log: async () => {}, + logger: { + info: async () => {}, + debug: async () => {}, + warn: async () => {}, + error: async () => {}, + }, + workflowRunId: () => "test-run-id", + workflowName: () => "test-workflow", + taskName: () => "test-task", + retryCount: () => 0, +}) + +export const runTestTask = ( + effect: Effect.Effect, + mockContext?: any, +): Effect.Effect, never, R> => { + const ctx = mockContext ?? createMockStepContext() + return effect.pipe( + Effect.provideService(HatchetStepContext, ctx), + Effect.exit, + ) as any +} +``` + +--- + +## 6. Ejemplo Completo de Uso - Effect v4 (API Real del SDK) + +### 6.1 Definición de Workflow + +```typescript +// workflows/user-onboarding.ts +import { Effect, Schema } from "effect" +import { getValidatedInput, HatchetStepContext, task, workflow } from "@effectify/hatchet" + +const UserInputSchema = Schema.Struct({ + userId: Schema.String, + email: Schema.String.pipe(Schema.email()), +}) + +const fetchUserTask = task( + { name: "fetch-user", timeout: "30s" }, + Effect.gen(function*() { + const input = yield* getValidatedInput(UserInputSchema) + const db = yield* Database + yield* Effect.log(`Fetching user ${input.userId}`) + return yield* db.findUser(input.userId) + }), +) + +const sendEmailTask = task( + { name: "send-email", retries: 3, parents: ["fetch-user"] }, + Effect.gen(function*() { + const ctx = yield* HatchetStepContext + // ✅ SDK v1: parentOutput(taskRef) replaces deprecated stepOutput + const user = yield* Effect.tryPromise({ + try: () => ctx.parentOutput<{ email: string }>(fetchUserTask), + catch: (e) => new HatchetError({ message: "Failed to get user", cause: e }), + }) + const emailService = yield* EmailService + yield* Effect.log(`Sending email to ${user.email}`) + return yield* emailService.send(user.email, "Welcome!") + }), +) + +export const userOnboardingWorkflow = workflow({ + name: "user-onboarding", + description: "Onboarding workflow", +}) + .task(fetchUserTask) + .task(sendEmailTask) +``` + +### 6.2 Registro del Worker + +```typescript +// worker/index.ts +import { Effect, Layer } from "effect" +import { + HatchetClientLive, + HatchetConfig, + HatchetConfigLayer, + registerWorkflow, + withHatchetLogger, +} from "@effectify/hatchet" +import { userOnboardingWorkflow } from "./workflows/user-onboarding" + +const mainProgram = Effect.gen(function*() { + const cfg = yield* HatchetConfig + yield* Effect.log(`Conectando a Hatchet en ${cfg.host}`) + yield* registerWorkflow("main-worker", userOnboardingWorkflow, DatabaseLive) + yield* Effect.log("Worker iniciado") +}) + +const runnable = withHatchetLogger( + Effect.provide( + mainProgram, + Layer.mergeAll( + HatchetConfigLayer({ + token: process.env.HATCHET_TOKEN!, + host: process.env.HATCHET_HOST ?? "http://localhost:8080", + }), + HatchetClientLive, + DatabaseLive, + EmailServiceLive, + ), + ), +) + +Effect.runPromise(runnable) +``` + +--- + +## 7. Docker Compose para Desarrollo + +### 7.1 docker-compose.yml (Development) + +```yaml +version: "3.8" + +services: + # PostgreSQL requerido por Hatchet + postgres: + image: postgres:16-alpine + ports: + - "5432:5432" + environment: + POSTGRES_USER: hatchet + POSTGRES_PASSWORD: hatchet + POSTGRES_DB: hatchet + volumes: + - hatchet_data:/var/lib/postgresql/data + healthcheck: + test: ["CMD-SHELL", "pg_isready -U hatchet"] + interval: 5s + timeout: 3s + retries: 5 + + # Hatchet Engine + hatchet: + image: ghcr.io/hatchet-dev/hatchet:latest + ports: + - "8080:8080" + environment: + - HATCHET_SERVER_TOKEN=${HATCHET_TOKEN:-test-token} + - HATCHET_SERVER_URL=http://localhost:8080 + - DATABASE_URL=postgresql://hatchet:hatchet@postgres:5432/hatchet + - HATCHET_PG_MIN_IDLE_CONNS=1 + - HATCHET_PG_MAX_IDLE_CONNS=2 + depends_on: + postgres: + condition: service_healthy + +volumes: + hatchet_data: +``` + +--- + +## 8. React Router Example + +### 8.1 Estructura + +``` +apps/react-router-example/ +├── src/ +│ ├── routes/ +│ │ ├── _index.tsx # Dashboard de workflows +│ │ ├── api.workflows.trigger.tsx # Endpoint para dispara +│ │ └── api.workflows.status.tsx # Status del workflow +│ ├── services/ +│ │ └── hatchet.ts # Cliente de Hatchet +│ ├── worker/ +│ │ ├── index.ts # Entry point del worker +│ │ └── workflows/ +│ │ ├── hello.ts # Ejemplo simple +│ │ └── user-onboarding.ts # Ejemplo completo +│ └── lib/ +│ └── hatchet-setup.ts # Setup de layers +├── docker-compose.yml # Hatchet + App + Worker +├── Dockerfile.worker # Worker单独 +└── package.json +``` + +### 8.2 docker-compose.yml del Ejemplo + +```yaml +version: "3.8" + +services: + # PostgreSQL + postgres: + image: postgres:16-alpine + ports: + - "5432:5432" + environment: + POSTGRES_USER: hatchet + POSTGRES_PASSWORD: hatchet + POSTGRES_DB: hatchet + volumes: + - hatchet_data:/var/lib/postgresql/data + + # Hatchet Engine + hatchet: + image: ghcr.io/hatchet-dev/hatchet:latest + ports: + - "8080:8080" + environment: + - HATCHET_SERVER_TOKEN=${HATCHET_TOKEN:-test-token} + - DATABASE_URL=postgresql://hatchet:hatchet@postgres:5432/hatchet + depends_on: + postgres: + condition: service_healthy + + # React Router App + app: + build: . + ports: + - "3000:3000" + environment: + - DATABASE_URL=file:./data.db + - HATCHET_TOKEN=${HATCHET_TOKEN:-test-token} + - HATCHET_HOST=http://hatchet:8080 + depends_on: + - hatchet + volumes: + - ./data:/app/data + + # Worker de Hatchet + worker: + build: + context: . + dockerfile: Dockerfile.worker + environment: + - HATCHET_TOKEN=${HATCHET_TOKEN:-test-token} + - HATCHET_HOST=http://hatchet:8080 + - DATABASE_URL=postgresql://hatchet:hatchet@postgres:5432/hatchet + depends_on: + - hatchet + +volumes: + hatchet_data: +``` + +### 8.3 Ejemplo Simple: Hello World + +```typescript +// src/worker/workflows/hello.ts +import { Effect } from "effect" +import { task, workflow } from "@effectify/hatchet" + +// Task simple que retorna un mensaje +const helloTask = task( + { name: "hello" }, + Effect.gen(function*() { + yield* Effect.log("Ejecutando hello task") + return { + message: "Hello from Effect v4 + Hatchet!", + timestamp: new Date().toISOString(), + } + }), +) + +export const helloWorkflow = workflow({ + name: "hello-world", + description: "Ejemplo simple de workflow con Effect v4", +}).task(helloTask) +``` + +### 8.4 Ejemplo Completo: User Onboarding + +```typescript +// src/worker/workflows/user-onboarding.ts +import { Effect, Schema } from "effect" +import { getValidatedInput, HatchetError, HatchetStepContext, task, workflow } from "@effectify/hatchet" + +// Schema de validación +const UserInputSchema = Schema.Struct({ + userId: Schema.String, + email: Schema.String.pipe(Schema.email()), + name: Schema.String, +}) + +// Task 1: Validar y crear usuario +const createUserTask = task( + { name: "create-user", timeout: "30s" }, + Effect.gen(function*() { + const input = yield* getValidatedInput(UserInputSchema) + const db = yield* Database + + yield* Effect.log(`Creando usuario: ${input.name} (${input.email})`) + + const user = yield* db.createUser({ + id: input.userId, + name: input.name, + email: input.email, + }) + + return user + }), +) + +// Task 2: Enviar email de bienvenida (depends on createUserTask) +const sendWelcomeEmailTask = task( + { name: "send-welcome-email", retries: 3, parents: ["create-user"] }, + Effect.gen(function*() { + const ctx = yield* HatchetStepContext + // ✅ SDK v1: parentOutput(taskRef) replaces deprecated stepOutput + const user = yield* Effect.tryPromise({ + try: () => ctx.parentOutput<{ email: string; name: string }>(createUserTask), + catch: (e) => new HatchetError({ message: "Failed to get parent output", cause: e }), + }) + const emailService = yield* EmailService + + yield* Effect.log(`Enviando email a ${user.email}`) + + yield* emailService.send(user.email, "Bienvenido a la plataforma!") + + return { emailSent: true } + }), +) + +// Task 3: Logging final (depends on sendWelcomeEmailTask) +const notifyAdminTask = task( + { name: "notify-admin", parents: ["create-user"] }, + Effect.gen(function*() { + const ctx = yield* HatchetStepContext + const user = yield* Effect.tryPromise({ + try: () => ctx.parentOutput<{ email: string; name: string }>(createUserTask), + catch: (e) => new HatchetError({ message: "Failed to get parent output", cause: e }), + }) + + yield* Effect.log(`Nuevo usuario registrado: ${user.name} <${user.email}>`) + + return { notified: true } + }), +) + +// Workflow completo (DAG con tasks) +export const userOnboardingWorkflow = workflow({ + name: "user-onboarding", + description: "Workflow de onboarding de nuevos usuarios", +}) + .task(createUserTask) + .task(sendWelcomeEmailTask) + .task(notifyAdminTask) +``` + +### 8.5 Routes del Ejemplo + +```typescript +// src/routes/api.workflows.trigger.tsx +import { type ActionFunctionArgs, json } from "react-router" +import { Effect } from "effect" +import { AppLayers, triggerWorkflow } from "~/lib/hatchet-setup" + +export async function action({ request }: ActionFunctionArgs) { + const formData = await request.formData() + const userId = formData.get("userId") as string + const email = formData.get("email") as string + const name = formData.get("name") as string + + const program = Effect.gen(function*() { + yield* Effect.log(`Triggering onboarding para ${email}`) + const result = yield* triggerWorkflow("user-onboarding", { + userId, + email, + name, + }) + yield* Effect.log(`Workflow iniciado: ${result.workflowRunId}`) + return { workflowRunId: result.workflowRunId } + }) + + const result = await Effect.runPromise(Effect.provide(program, AppLayers)) + return json({ success: true, workflowRunId: result.workflowRunId }) +} + +// src/routes/api.workflows.status.tsx +import { json, type LoaderFunctionArgs } from "react-router" +import { Effect } from "effect" +import { AppLayers, getWorkflowStatus } from "~/lib/hatchet-setup" + +export async function loader({ request }: LoaderFunctionArgs) { + const url = new URL(request.url) + const workflowRunId = url.searchParams.get("workflowRunId") + + if (!workflowRunId) { + return json({ error: "workflowRunId requerido" }, { status: 400 }) + } + + const program = getWorkflowStatus(workflowRunId) + const result = await Effect.runPromise(Effect.provide(program, AppLayers)) + + return json(result) +} +``` + +--- + +## 9. README.md del Package + +El package debe incluir un `README.md` completo: + +````markdown +# @effectify/hatchet + +> Integración nativa entre Effect v4 y Hatchet + +## Instalación + +```bash +npm install @effectify/hatchet +# o +pnpm add @effectify/hatchet +``` + +## Requisitos + +- Effect v4 (`effect` package) +- `@hatchet-dev/typescript-sdk` v1+ +- Hatchet Engine corriendo (ver docker-compose) + +## Configuración rápida + +### 1. Docker Compose + +```yaml +# docker-compose.yml +version: "3.8" +services: + postgres: + image: postgres:16-alpine + environment: + POSTGRES_USER: hatchet + POSTGRES_PASSWORD: hatchet + POSTGRES_DB: hatchet + + hatchet: + image: ghcr.io/hatchet-dev/hatchet:latest + environment: + - HATCHET_SERVER_TOKEN=tu-token + - DATABASE_URL=postgresql://hatchet:hatchet@postgres:5432/hatchet + depends_on: + - postgres +``` + +### 2. Definir un Workflow + +```typescript +import { Effect, Schema } from "effect" +import { getValidatedInput, HatchetStepContext, task, workflow } from "@effectify/hatchet" + +// Schema de validación del input +const InputSchema = Schema.Struct({ + name: Schema.String, + email: Schema.String.pipe(Schema.email()), +}) + +// Task como Effect puro +const greetTask = task( + { name: "greet", timeout: "30s" }, + Effect.gen(function*() { + const input = yield* getValidatedInput(InputSchema) + yield* Effect.log(`Hola, ${input.name}!`) + return { greeting: `Hola, ${input.name}!` } + }), +) + +// Definir workflow +export const greetWorkflow = workflow({ + name: "greet-user", + description: "Saluda a un usuario", +}).task(greetTask) +``` + +### 3. Iniciar el Worker + +```typescript +import { Effect, Layer } from "effect" +import { HatchetClientLive, HatchetConfigLayer, registerWorkflow, withHatchetLogger } from "@effectify/hatchet" +import { greetWorkflow } from "./workflows/greet" + +const main = Effect.gen(function*() { + yield* registerWorkflow("my-worker", greetWorkflow, Layer.empty) + yield* Effect.log("Worker iniciado") +}) + +Effect.runPromise( + withHatchetLogger( + Effect.provide( + main, + Layer.mergeAll( + HatchetConfigLayer({ + token: process.env.HATCHET_TOKEN!, + host: process.env.HATCHET_HOST ?? "http://localhost:8080", + }), + HatchetClientLive, + ), + ), + ), +) +``` + +### 4. Disparar desde tu App + +```typescript +import { Effect } from "effect" +import { HatchetClientService, HatchetError } from "@effectify/hatchet" + +const triggerGreeting = (name: string, email: string) => + Effect.gen(function*() { + const hatchet = yield* HatchetClientService + const result = yield* Effect.tryPromise({ + try: () => hatchet.admin.runWorkflow("greet-user", { name, email }), + catch: (e) => new HatchetError({ message: "Error al ejecutar", cause: e }), + }) + return result + }) +``` + +## API + +### `workflow(options)` + +Define un workflow estilo Hatchet. + +```typescript +const myWorkflow = workflow({ + name: "my-workflow", + description: "Descripción", +}) +``` + +### `task(options, effect)` + +Define un task como Effect puro (replaces step). + +```typescript +task( + { + name: "my-task", + timeout: "30s", // timeout opcional + retries: 3, // retries opcional + parents: ["other-task"], // DAG: dependencias opcionales + }, + Effect.gen(function*() { + // Tu lógica como Effect + return { result: "ok" } + }), +) +``` + +### `getValidatedInput(schema)` + +Extrae y valida el input del workflow (SDK v1: usa `ctx.input`). + +```typescript +const myTask = task( + { name: "process" }, + Effect.gen(function*() { + const input = yield* getValidatedInput(MySchema) + // input está tipado y validado + return input + }), +) +``` + +## Errores + +Todos los errores usan `Data.TaggedError`: + +```typescript +import { HatchetError } from "@effectify/hatchet" + +Effect.gen(function*() { + // ... +}).pipe( + Effect.catchTag("HatchetError", (e) => Effect.log(`Error: ${e.message}`)), +) +``` +```` + +## Testing + +```typescript +import { createMockStepContext, runTestStep } from "@effectify/hatchet/testing" + +it("should process step", async () => { + const mockCtx = createMockStepContext({ name: "Test" }) + const result = await Effect.runPromiseExit( + runTestStep(myStep, Layer.empty, mockCtx), + ) + expect(Exit.isSuccess(result)).toBe(true) +}) +``` + +## Licencia + +MIT + +```` +--- + +## 10. Estrategia de Testing + +### 10.1 Tests Unitarios + +- Sin dependencias externas +- Coverage >90% en módulos core +- Uso de `createMockStepContext` y `runTestTask` + +### 10.2 Tests de Integración + +**Docker Compose con PostgreSQL**: + +```yaml +# tests/integration/docker-compose.yml +version: '3.8' + +services: + postgres-test: + image: postgres:16-alpine + environment: + POSTGRES_USER: hatchet + POSTGRES_PASSWORD: hatchet + POSTGRES_DB: hatchet + tmpfs: + - /var/lib/postgresql/data + healthcheck: + test: ["CMD-SHELL", "pg_isready -U hatchet"] + + hatchet-test: + image: ghcr.io/hatchet-dev/hatchet:latest + environment: + - HATCHET_SERVER_TOKEN=test-token + - DATABASE_URL=postgresql://hatchet:hatchet@postgres-test:5432/hatchet + depends_on: + postgres-test: + condition: service_healthy + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8080/health"] + interval: 5s + timeout: 3s + retries: 15 +```` + +**Tests de integración**: + +```typescript +// tests/integration/workflow.test.ts +describe("Workflow Integration", () => { + beforeAll(async () => { + // Esperar a que Hatchet esté listo + await waitForHatchet("http://localhost:8080/health") + }) + + it("should register and execute workflow", async () => { + // Arrange + const testTask = task({ name: "test" }, Effect.succeed({ ok: true })) + const testWorkflow = workflow({ + name: "integration-test", + }).task(testTask) + + // Act + const program = Effect.gen(function*() { + yield* registerWorkflow("test-worker", testWorkflow, Layer.empty) + yield* Effect.sleep("1s") // Esperar registro + + const hatchet = yield* HatchetClientService + return yield* Effect.tryPromise(() => hatchet.admin.runWorkflow("integration-test", {})) + }) + + const result = await Effect.runPromise(Effect.provide(program, TestLayers)) + + // Assert + expect(result.workflowRunId).toBeDefined() + }) +}) +``` + +--- + +## 11. Patrones Obligatorios de Effect v4 + +### ✅ Patrones OBLIGATORIOS + +1. **NUNCA usar try-catch en Effect.gen** +2. **NUNCA usar type assertions (`as any`)** +3. **SIEMPRE usar `return yield*` para Effects terminal** +4. **Usar `ServiceMap.Service` en lugar de `Context.Tag`** +5. **Usar `ManagedRuntime.make(layer)` en lugar de `Effect.runtime()` (NO existe)** +6. **Usar `Schema` del paquete `effect`, NO de `@effect/schema` (paquete separado NO existe)** +7. **Usar `Effect.withLogger(effect, logger)` en lugar de `Logger.replace` (NO existe)** +8. **Usar `ServiceMap.getOption()` en lugar de `Context.getOption()` (módulo Context NO existe)** + +### ✅ APIs Verificadas (Existentes) + +- `ServiceMap.Service` — reemplaza `Context.Tag` +- `Effect.runForkWith(services)` — ejecutar Effect con services +- `Effect.provideService(key, value)` — inyectar un service +- `Config.Wrap` / `Config.unwrap(wrapped)` — configuración type-safe +- `Layer.succeed(key)(value)` — Layer estático +- `Layer.effect(key)(effect)` — Layer desde Effect +- `Layer.mergeAll(layers...)` — merge de layers +- `Data.TaggedError(tag)` — errores estructurados +- `Logger.make(fn)` — crear logger custom +- `Effect.runPromiseExit` / `Effect.runFork` — ejecución + +--- + +## 12. Roadmap + +### Milestone 1: Core + Config (Semana 1-2) + +- [ ] Setup del proyecto +- [ ] `HatchetConfig` con ServiceMap +- [ ] `HatchetClient` +- [ ] Tests unitarios + +### Milestone 2: Effectifier + Context (Semana 3) + +- [ ] `HatchetStepContext` +- [ ] `effectifyTask` + `createEffectifierFromLayer` +- [ ] Manejo de errores correcto (Failure → throw para Hatchet retries) + +### Milestone 3: Workflow API (Semana 4) + +- [ ] `workflow()` y `task()` (SDK usa `.task()`, no `.step()`) +- [ ] Inferencia de dependencias +- [ ] `registerWorkflow` + +### Milestone 4: Logging + Schema (Semana 5) + +- [ ] Logger personalizado +- [ ] Validación de input + +### Milestone 5: Docker + Testing (Semana 6) + +- [ ] Docker Compose con PostgreSQL +- [ ] Tests de integración + +### Milestone 6: React Router Example + README + Release (Semana 7-8) + +- [ ] Ejemplo completo en react-router-example +- [ ] README.md del package +- [ ] Release v0.1.0 + +--- + +## 13. Dependencias + +```json +{ + "dependencies": { + "@hatchet-dev/typescript-sdk": "^1.19.0" + }, + "peerDependencies": { + "effect": "catalog:" + }, + "devDependencies": { + "@effect/vitest": "catalog:", + "@types/node": "catalog:", + "typescript": "catalog:", + "vitest": "catalog:" + } +} +``` + +**Nota**: `@effect/schema` NO es necesario — Schema está incluido en el paquete principal `effect`. + +--- + +## 14. Glosario + +| Término | Definición | +| -------------------- | ------------------------------------------------ | +| **Effect** | Framework de programación funcional v4 | +| **ServiceMap** | Sistema de inyección de dependencias en v4 | +| **Hatchet** | Sistema de workflows y background jobs | +| **Effectifier** | Adaptador que convierte Effect a Promise | +| **ManagedRuntime** | Ejecución de Effects con dependencias (v4) | +| **Layer** | Composición de dependencias en Effect | +| **Data.TaggedError** | Errores estructurados con discriminación | +| **Task** | Unidad de trabajo en Hatchet SDK v1 (era "step") | + +--- + +## 15. Referencias + +- **Effect v4 Patterns**: `.effect-reference/.patterns/` +- **Effect v4 Migration**: `.effect-reference/migration/` +- **Skill**: `.agent/skills/effect-pattern-discovery/SKILL.md` + +--- + +_Documento creado: Marzo 2026_ +_Versión: 1.1.0 (APIs verificadas)_ +_Estado: Draft - APIs verificadas contra Effect v4 + Hatchet SDK v1.19.0_ +_Cambios v1.1.0_: API verification — step→task, Schema de effect, ManagedRuntime, ctx.input, Effect.withLogger diff --git a/docs/proposals/hatchet-integration.md b/docs/proposals/hatchet-integration.md new file mode 100644 index 0000000..8a6cb10 --- /dev/null +++ b/docs/proposals/hatchet-integration.md @@ -0,0 +1,82 @@ +# Proposal: @effectify/hatchet Integration + +## Intent + +Create `@effectify/hatchet` — a native Effect v4 integration with Hatchet SDK v1.19.0 for defining workflows as pure Effects. This enables users to write Hatchet tasks using `Effect` instead of async functions, leveraging typed errors, dependency injection via ServiceMap, and automatic logging sync. + +## Scope + +### In Scope + +- `@effectify/hatchet` package with core, effectifier, workflow, logging, schema, and testing modules +- Effectifier bridge converting Effect → Promise using ManagedRuntime +- Workflow builder API: `workflow({ name }).task(task1).task(task2)` pattern +- HatchetStepContext service for injecting Hatchet context into Effects +- Custom Effect logger that syncs to Hatchet UI +- Schema validation utilities for workflow input +- Unit tests with mock context (>90% coverage) +- Integration tests with Docker Compose (Hatchet + PostgreSQL) +- README.md with usage examples +- Release configuration in nx.json + +### Out of Scope + +- React Router example app integration (deferred to future change) +- Support for older Hatchet SDK versions (v0.x) + +## Approach + +1. **Setup Package Structure** — Create `packages/hatchet/` following `@effectify/prisma` patterns (project.json, package.json, tsconfig.json, vitest.config.ts) + +2. **Implement Core Modules** — Build HatchetConfig, HatchetClientService, HatchetStepContext, and HatchetError using ServiceMap.Service (NOT Context.Tag) + +3. **Build Effectifier** — Create `effectifyTask` using ManagedRuntime.make(layer) to convert Effect → Promise with proper error propagation for Hatchet retries + +4. **Create Workflow API** — Implement `workflow()` and `task()` functions matching Hatchet SDK v1 patterns (uses `.task()` not `.step()`) + +5. **Add Logging & Schema** — Implement HatchetLogger using Effect.withLogger and getValidatedInput using Schema.decodeUnknown + +6. **Write Tests** — Create unit tests with mock context and integration tests against Docker Compose + +## Dependencies + +- `@hatchet-dev/typescript-sdk: ^1.19.0` — Hatchet SDK (NOT yet installed) +- `effect: catalog:` — Effect v4 (already in monorepo) +- Docker Compose with Hatchet + PostgreSQL for integration tests + +## Risks + +| Risk | Likelihood | Mitigation | +| -------------------------------------------- | ---------- | --------------------------------------------------------------- | +| Hatchet SDK API differs from PRD assumptions | Medium | Use verified APIs: `.task()`, `ctx.input`, `ctx.parentOutput()` | +| Effect v4 APIs break during beta | Low | Use only verified APIs from `.effect-reference/` | +| ManagedRuntime leak in effectifier | Medium | Ensure runtime disposal in worker lifecycle | + +## Success Criteria + +- [ ] Package builds without errors using `nx build @effectify/hatchet` +- [ ] Unit tests pass (>90% coverage) via `nx test @effectify/hatchet` +- [ ] Integration tests pass against Docker Compose Hatchet +- [ ] README.md provides complete usage documentation +- [ ] Workflow fails correctly trigger Hatchet retries +- [ ] Effect.log() output appears in Hatchet dashboard + +## Affected Files + +| Area | Impact | Description | +| ---------------------------------------------- | -------- | --------------------------------------------- | +| `packages/hatchet/project.json` | New | Nx project configuration | +| `packages/hatchet/package.json` | New | Package manifest with Hatchet SDK dependency | +| `packages/hatchet/tsconfig.json` | New | TypeScript configuration | +| `packages/hatchet/vitest.config.ts` | New | Test configuration | +| `packages/hatchet/src/index.ts` | New | Public exports | +| `packages/hatchet/src/core/*.ts` | New | Config, client, context, error modules | +| `packages/hatchet/src/effectifier/*.ts` | New | Effect → Promise bridge | +| `packages/hatchet/src/workflow/*.ts` | New | Workflow builder API | +| `packages/hatchet/src/logging/*.ts` | New | Hatchet logger | +| `packages/hatchet/src/schema/*.ts` | New | Input validation | +| `packages/hatchet/src/testing/*.ts` | New | Test utilities | +| `packages/hatchet/tests/unit/*.test.ts` | New | Unit tests | +| `packages/hatchet/tests/integration/*.test.ts` | New | Integration tests | +| `packages/hatchet/README.md` | New | Package documentation | +| `nx.json` | Modified | Add release configuration for hatchet package | diff --git a/docs/specs/hatchet-spec.md b/docs/specs/hatchet-spec.md new file mode 100644 index 0000000..fc499fe --- /dev/null +++ b/docs/specs/hatchet-spec.md @@ -0,0 +1,719 @@ +# @effectify/hatchet Specification + +This specification defines the requirements and scenarios for the `@effectify/hatchet` package, which provides a native Effect v4 integration with Hatchet SDK v1.19.0. + +--- + +## Table of Contents + +1. [Core Module Specs](#core-module-specs) +2. [Effectifier Module Specs](#effectifier-module-specs) +3. [Workflow Module Specs](#workflow-module-specs) +4. [Logging Module Specs](#logging-module-specs) +5. [Schema Module Specs](#schema-module-specs) +6. [Testing Module Specs](#testing-module-specs) +7. [Monorepo Setup Specs](#monorepo-setup-specs) +8. [Integration Specs](#integration-specs) + +--- + +## Core Module Specs + +### HatchetConfig Spec + +The HatchetConfig module provides centralized configuration management for the Hatchet integration using Effect's Config system. + +#### Requirements + +- [REQ-CORE-01] HatchetConfig MUST be defined as a ServiceMap.Service with the configuration schema type as its payload +- [REQ-CORE-02] HatchetConfig MUST support token, host, and namespace properties +- [REQ-CORE-03] HatchetConfig MUST provide a Layer for static configuration via HatchetConfigLayer +- [REQ-CORE-04] HatchetConfig MUST support loading from environment variables via Config.Wrap +- [REQ-CORE-05] Host MUST default to "http://localhost:8080" if not provided +- [REQ-CORE-06] Namespace MUST be optional + +#### Scenarios + +##### Scenario: Static Configuration Layer Creation + +Given a configuration object with token and host +When HatchetConfigLayer is called with that configuration +Then it returns a Layer that provides the configuration as a service + +##### Scenario: Configuration from Environment + +Given environment variables HATCHET_TOKEN and HATCHET_HOST +When HatchetConfigLayerFromEnv is called with wrapped config +Then it returns a Layer that reads from environment and provides the configuration + +##### Scenario: Default Host Value + +Given a configuration object with only a token +When HatchetConfigLayer processes that configuration +Then the host defaults to "http://localhost:8080" + +##### Scenario: Optional Namespace + +Given a configuration object without namespace +When HatchetConfigLayer processes that configuration +Then namespace is undefined in the provided service + +--- + +### HatchetClientService Spec + +The HatchetClientService module provides the Hatchet SDK client as an injectable Effect service. + +#### Requirements + +- [REQ-CLIENT-01] HatchetClientService MUST be defined as a ServiceMap.Service wrapping HatchetClient from @hatchet-dev/typescript-sdk +- [REQ-CLIENT-02] HatchetClientService MUST be initialized using HatchetClient.init() with token and host_port +- [REQ-CLIENT-03] HatchetClientLive MUST be a Layer that initializes the client from HatchetConfig +- [REQ-CLIENT-04] Initialization MUST handle Config errors and convert to HatchetError +- [REQ-CLIENT-05] The layer MUST depend on HatchetConfig to obtain connection parameters + +#### Scenarios + +##### Scenario: Client Initialization with Valid Config + +Given a HatchetConfig with valid token and host +When HatchetClientLive layer is built +Then it initializes HatchetClient with token and host_port + +##### Scenario: Client Initialization Fails + +Given a HatchetConfig with invalid token +When HatchetClientLive layer is executed +Then it fails with HatchetError containing the cause + +##### Scenario: Client Depends on Config Service + +Given HatchetClientLive is used without HatchetConfig +When the layer is built +Then it fails with a missing dependency error + +--- + +### HatchetError Spec + +The HatchetError module provides structured error handling using Data.TaggedError. + +#### Requirements + +- [REQ-ERROR-01] HatchetError MUST be defined using Data.TaggedError +- [REQ-ERROR-02] HatchetError MUST have a message property describing the error +- [REQ-ERROR-03] HatchetError MUST have an optional cause property for underlying errors +- [REQ-ERROR-04] HatchetError MUST be catchable using Effect.catchTag + +#### Scenarios + +##### Scenario: Creating HatchetError + +Given an error message and optional cause +When HatchetError is constructed +Then it creates a TaggedError with those properties + +##### Scenario: Catching HatchetError in Effect + +Given an Effect that fails with HatchetError +When the Effect is caught using Effect.catchTag("HatchetError") +Then the error handler receives the HatchetError instance + +--- + +### HatchetStepContext Spec + +The HatchetStepContext module provides the Hatchet step context as an injectable Effect service. + +#### Requirements + +- [REQ-CONTEXT-01] HatchetStepContext MUST be defined as a ServiceMap.Service +- [REQ-CONTEXT-02] HatchetStepContext MUST wrap the Hatchet SDK Context type +- [REQ-CONTEXT-03] getHatchetInput MUST extract and type the input property from the context +- [REQ-CONTEXT-04] The context MUST provide access to input, parentOutput, log, and logger properties +- [REQ-CONTEXT-05] Input access MUST work with the SDK v1.19.0 ctx.input property (not a method) + +#### Scenarios + +##### Scenario: Accessing Step Input + +Given a HatchetStepContext with input { userId: "123" } +When getHatchetInput is called and yielded +Then it returns the input typed as the generic type parameter + +##### Scenario: Accessing Parent Output + +Given a task with a parent task +When HatchetStepContext.parentOutput is called with a task reference +Then it returns the output of the parent task + +##### Scenario: Using Logger in Context + +Given a HatchetStepContext +When ctx.logger.info is called +Then it logs to the Hatchet dashboard + +--- + +## Effectifier Module Specs + +### effectifyTask Spec + +The effectifier module bridges Effect execution with Hatchet's Promise-based task system. + +#### Requirements + +- [REQ-EFFECT-01] effectifyTask MUST convert an Effect to a function compatible with Hatchet's task API +- [REQ-EFFECT-02] effectifyTask MUST accept an Effect with HatchetStepContext in its dependencies +- [REQ-EFFECT-03] effectifyTask MUST inject the Hatchet context as a service before execution +- [REQ-EFFECT-04] Success results MUST be returned as-is +- [REQ-EFFECT-05] Failure causes MUST be thrown as Errors for Hatchet retry detection +- [REQ-EFFECT-06] The function signature MUST match (input: unknown, ctx: HatchetContext) => Promise + +#### Scenarios + +##### Scenario: Successful Effect Execution + +Given an Effect that succeeds with a value +When effectifyTask executes the Effect +Then it returns the success value as a Promise + +##### Scenario: Failed Effect Throws Error + +Given an Effect that fails with an error +When effectifyTask executes the Effect +Then it throws an Error for Hatchet to detect and potentially retry + +##### Scenario: Context Injection + +Given an Effect that requires HatchetStepContext +When effectifyTask runs the Effect +Then HatchetStepContext is provided with the Hatchet context + +##### Scenario: ManagedRuntime Execution + +Given a ManagedRuntime with dependencies +When effectifyTask runs with that runtime +Then the Effect is executed using runtime.runPromiseExit + +--- + +### createEffectifierFromLayer Spec + +The factory function creates an effectifier with pre-configured dependencies. + +#### Requirements + +- [REQ-FACTORY-01] createEffectifierFromLayer MUST accept a Layer defining dependencies +- [REQ-FACTORY-02] createEffectifierFromLayer MUST create a ManagedRuntime from the layer +- [REQ-FACTORY-03] The returned function MUST accept an Effect with those dependencies +- [REQ-FACTORY-04] The runtime MUST be disposed when the worker shuts down + +#### Scenarios + +##### Scenario: Creating Effectifier with Dependencies + +Given a Layer with Database and EmailService +When createEffectifierFromLayer is called with that layer +Then it returns a function that can execute Effects requiring those services + +##### Scenario: Effectifier Inherits Dependencies + +Given an Effect that requires Database service +When the effectified function is called +Then it uses the dependencies from the layer + +--- + +## Workflow Module Specs + +### workflow() Spec + +The workflow function creates a declarative workflow builder matching Hatchet's API style. + +#### Requirements + +- [REQ-WF-01] workflow() MUST accept WorkflowOptions with name, description, version, sticky, and concurrency +- [REQ-WF-02] workflow() MUST return an EffectWorkflow instance +- [REQ-WF-03] The workflow name MUST be required +- [REQ-WF-04] Description and version MUST be optional + +#### Scenarios + +##### Scenario: Creating a Basic Workflow + +Given workflow options with a name +When workflow() is called +Then it returns an EffectWorkflow with empty tasks + +##### Scenario: Workflow with All Options + +Given workflow options with name, description, version, sticky, and concurrency +When workflow() is called +Then all options are stored in the EffectWorkflow + +--- + +### task() Spec + +The task function defines a single task within a workflow. + +#### Requirements + +- [REQ-TASK-01] task() MUST accept TaskOptions and an Effect +- [REQ-TASK-02] TaskOptions MUST include name (required), timeout, retries, rateLimits, concurrency, and parents +- [REQ-TASK-03] task() MUST return a TaskDefinition that can be added to a workflow +- [REQ-TASK-04] Parents MUST define DAG dependencies between tasks + +#### Scenarios + +##### Scenario: Creating a Basic Task + +Given task options with a name and an Effect +When task() is called +Then it returns a TaskDefinition with those options + +##### Scenario: Task with Retry Configuration + +Given task options with retries: 3 +When task() is called +Then the retry configuration is stored + +##### Scenario: Task with Parent Dependencies + +Given task options with parents: ["fetch-user", "validate-input"] +When task() is called +Then the parents are stored for DAG execution order + +--- + +### EffectWorkflow.task() Spec + +The task method adds a task to the workflow builder chain. + +#### Requirements + +- [REQ-WFTASK-01] EffectWorkflow.task() MUST accept a TaskDefinition +- [REQ-WFTASK-01] EffectWorkflow.task() MUST return a new EffectWorkflow with updated dependencies +- [REQ-WFTASK-02] Multiple calls to task() MUST accumulate tasks in the workflow +- [REQ-WFTASK-03] Dependencies from all tasks MUST be merged + +#### Scenarios + +##### Scenario: Adding Single Task to Workflow + +Given an EffectWorkflow with no tasks +When .task() is called with a TaskDefinition +Then the workflow contains one task + +##### Scenario: Chaining Multiple Tasks + +Given an EffectWorkflow +When .task() is called multiple times +Then all tasks are accumulated in the workflow + +##### Scenario: Dependency Inference Across Tasks + +Given tasks with different dependency requirements +When they are added to the workflow +Then the workflow dependencies are the union of all task dependencies + +--- + +### registerWorkflow() Spec + +The registerWorkflow function registers an EffectWorkflow with Hatchet. + +#### Requirements + +- [REQ-REG-01] registerWorkflow MUST accept worker name, EffectWorkflow, and a Layer +- [REQ-REG-02] registerWorkflow MUST return an Effect that registers the workflow +- [REQ-REG-03] The function MUST use hatchet.workflow() to create the workflow +- [REQ-REG-04] The function MUST use workflow.task() for each task (not step()) +- [REQ-REG-05] The function MUST create a worker with hatchet.worker() +- [REQ-REG-06] The function MUST start the worker +- [REQ-REG-07] Errors during registration MUST fail with HatchetError + +#### Scenarios + +##### Scenario: Registering a Simple Workflow + +Given a workflow with one task and a layer +When registerWorkflow is executed +Then the workflow is registered with Hatchet and worker starts + +##### Scenario: Registration Fails with Invalid Workflow + +Given a workflow with no tasks +When registerWorkflow is executed +Then it fails with HatchetError + +##### Scenario: Worker Creation Error + +Given a Hatchet client that fails to create a worker +When registerWorkflow is executed +Then it fails with HatchetError containing the cause + +--- + +## Logging Module Specs + +### HatchetLogger Spec + +The HatchetLogger provides automatic log synchronization between Effect.log() and Hatchet UI. + +#### Requirements + +- [REQ-LOG-01] HatchetLogger MUST be created using Logger.make +- [REQ-LOG-02] HatchetLogger MUST detect if HatchetStepContext exists in the fiber +- [REQ-LOG-03] If context exists, logs MUST be sent to ctx.log() +- [REQ-LOG-04] Logs MUST always be printed to console regardless of context +- [REQ-LOG-05] Log level MUST be included in the Hatchet log message + +#### Scenarios + +##### Scenario: Log Within Hatchet Task + +Given an Effect running within a Hatchet task with HatchetStepContext +When Effect.log("message") is called +Then the message appears in Hatchet dashboard via ctx.log() + +##### Scenario: Log Outside Hatchet Task + +Given an Effect running outside a Hatchet task (no HatchetStepContext) +When Effect.log("message") is called +Then the message goes to console only + +##### Scenario: Log Level Included + +Given a log with level "debug" +When HatchetLogger formats the message +Then the output includes the log level label + +--- + +### withHatchetLogger Spec + +The withHatchetLogger function applies the HatchetLogger to an Effect. + +#### Requirements + +- [REQ-WLOG-01] withHatchetLogger MUST accept an Effect and return a new Effect +- [REQ-WLOG-02] withHatchetLogger MUST use Effect.withLogger (not Logger.replace) +- [REQ-WLOG-03] The returned Effect MUST have the same type signature as input + +#### Scenarios + +##### Scenario: Applying Logger to Effect + +Given an Effect +When withHatchetLogger is called +Then it returns an Effect with the HatchetLogger applied + +--- + +## Schema Module Specs + +### getValidatedInput Spec + +The getValidatedInput function validates workflow input against an Effect Schema. + +#### Requirements + +- [REQ-SCHEMA-01] getValidatedInput MUST accept a Schema as parameter +- [REQ-SCHEMA-02] getValidatedInput MUST extract input from HatchetStepContext +- [REQ-SCHEMA-03] getValidatedInput MUST use Schema.decodeUnknown for validation +- [REQ-SCHEMA-04] On validation failure, it MUST fail with Schema.ParseError +- [REQ-SCHEMA-05] On success, it MUST return the parsed and typed input +- [REQ-SCHEMA-06] The Schema type parameter MUST infer the return type + +#### Scenarios + +##### Scenario: Valid Input Passes Validation + +Given a Schema and valid input data +When getValidatedInput is executed +Then it returns the parsed input + +##### Scenario: Invalid Input Fails with ParseError + +Given a Schema and invalid input data +When getValidatedInput is executed +Then it fails with Schema.ParseError + +##### Scenario: Type Inference from Schema + +Given a Schema.Struct with { name: Schema.String } +When getValidatedInput is used +Then the return type includes name: string + +--- + +## Testing Module Specs + +### createMockStepContext Spec + +The createMockStepContext function creates a mock Hatchet context for testing. + +#### Requirements + +- [REQ-MOCK-01] createMockStepContext MUST accept optional input data +- [REQ-MOCK-02] The mock MUST include input property with the provided data +- [REQ-MOCK-03] The mock MUST include parentOutput that returns null +- [REQ-MOCK-04] The mock MUST include log and logger methods (no-op) +- [REQ-MOCK-05] The mock MUST include workflowRunId, workflowName, taskName, retryCount + +#### Scenarios + +##### Scenario: Creating Mock Context with Input + +Given input data { userId: "123" } +When createMockStepContext is called with that input +Then ctx.input returns { userId: "123" } + +##### Scenario: Creating Default Mock Context + +Given no input +When createMockStepContext is called +Then ctx.input returns empty object + +--- + +### runTestTask Spec + +The runTestTask function executes an Effect with a mock context. + +#### Requirements + +- [REQ-RUNTEST-01] runTestTask MUST accept an Effect with HatchetStepContext dependency +- [REQ-RUNTEST-02] runTestTask MUST accept optional mock context +- [REQ-RUNTEST-03] runTestTask MUST provide HatchetStepContext as a service +- [REQ-RUNTEST-04] runTestTask MUST return Exit.Exit for result inspection + +#### Scenarios + +##### Scenario: Running Task with Mock Context + +Given an Effect that yields HatchetStepContext +When runTestTask is executed with mock context +Then the Effect has access to the mock context + +##### Scenario: Test Returns Exit + +Given an Effect that succeeds or fails +When runTestTask is executed +Then the result is wrapped in Exit for assertion + +--- + +## Monorepo Setup Specs + +### project.json Spec + +The Nx project configuration for the hatchet package. + +#### Requirements + +- [REQ-NX-01] The project MUST be named @effectify/hatchet +- [REQ-NX-02] The source root MUST be packages/hatchet/src +- [REQ-NX-03] The build target MUST use @nx/js:tsc +- [REQ-NX-04] The test target MUST run vitest +- [REQ-NX-05] The lint target MUST use nx-oxlint:lint + +#### Scenarios + +##### Scenario: Build Target Executes + +Given nx build @effectify/hatchet +When the command is run +Then it produces output in packages/hatchet/dist + +##### Scenario: Test Target Executes + +Given nx test @effectify/hatchet +When the command is run +Then vitest runs the test suite + +--- + +### package.json Spec + +The package manifest for @effectify/hatchet. + +#### Requirements + +- [REQ-PKG-01] Package name MUST be @effectify/hatchet +- [REQ-PKG-02] Effect MUST be a peerDependency using catalog: +- [REQ-PKG-03] @hatchet-dev/typescript-sdk MUST be a dependency with version ^1.19.0 +- [REQ-PKG-04] @effect/vitest MUST be a devDependency +- [REQ-PKG-05] Type MUST be module (ESM) +- [REQ-PKG-06] Exports MUST include "." for main entry + +#### Scenarios + +##### Scenario: Package.json Validates Dependencies + +Given the package.json +When npm or pnpm installs dependencies +Then effect is installed as peerDependency and hatchet-sdk as dependency + +--- + +### tsconfig.json Spec + +TypeScript configuration for the hatchet package. + +#### Requirements + +- [REQ-TS-01] tsconfig.json MUST extend ../../tsconfig.base.json +- [REQ-TS-02] tsconfig.lib.json MUST use composite builds +- [REQ-TS-03] tsconfig.spec.json MUST include test files +- [REQ-TS-04] Paths MUST include @effectify/hatchet for self-references + +#### Scenarios + +##### Scenario: TypeScript Compiles with Composite + +Given tsconfig.lib.json +When tsc builds the package +Then it produces declaration files and build info + +--- + +### vitest.config.ts Spec + +Test configuration for the hatchet package. + +#### Requirements + +- [REQ-VITEST-01] vitest.config.ts MUST use @effect/vitest for equality testers +- [REQ-VITEST-02] setupFiles MUST include setup-tests.ts +- [REQ-VITEST-03] Test include pattern MUST match \*_/_.test.ts +- [REQ-VITEST-04] Aliases MUST resolve @effectify/hatchet to src + +#### Scenarios + +##### Scenario: Tests Use Effect Equality Testers + +Given vitest runs a test with Effect comparisons +When assertions are made +Then @effect/vitest equality testers are applied + +--- + +### nx.json Integration Spec + +The hatchet package must be added to the release configuration. + +#### Requirements + +- [REQ-RELEASE-01] nx.json MUST include hatchet in release.projects array +- [REQ-RELEASE-02] The package MUST be releasable as npm package + +#### Scenarios + +##### Scenario: Release Includes Hatchet Package + +Given nx release is run +When the hatchet project is included +Then it publishes to npm registry + +--- + +## Integration Specs + +### Docker Compose Spec + +Integration tests require a Docker Compose setup with Hatchet and PostgreSQL. + +#### Requirements + +- [REQ-DOCKER-01] docker-compose.yml MUST include postgres service with correct credentials +- [REQ-DOCKER-02] docker-compose.yml MUST include hatchet service +- [REQ-DOCKER-03] Hatchet MUST depend on postgres with health check +- [REQ-DOCKER-04] postgres MUST use healthcheck for dependency conditions +- [REQ-DOCKER-05] DATABASE_URL MUST be configured for postgres connection + +#### Scenarios + +##### Scenario: Docker Compose Starts Successfully + +Given docker-compose.yml +When docker compose up -d is run +Then both postgres and hatchet services start + +##### Scenario: Health Checks Pass + +Given running containers +When health checks are queried +Then both services return healthy status + +--- + +### Integration Tests Spec + +Tests against real Hatchet engine. + +#### Requirements + +- [REQ-INT-01] Integration tests MUST wait for Hatchet to be ready before running +- [REQ-INT-02] Integration tests MUST test workflow registration +- [REQ-INT-03] Integration tests MUST test workflow execution +- [REQ-INT-04] Integration tests MUST verify logs appear in Hatchet +- [REQ-INT-05] Integration tests MUST verify error handling triggers retries + +#### Scenarios + +##### Scenario: Workflow Registration + +Given a defined EffectWorkflow +When registerWorkflow is executed against real Hatchet +Then the workflow appears in Hatchet dashboard + +##### Scenario: Workflow Execution + +Given a registered workflow +When triggered via hatchet.admin.runWorkflow +Then it executes and returns a workflowRunId + +##### Scenario: Task Retries on Error + +Given a task that fails with Effect.fail +When the workflow is executed +Then Hatchet retries the task according to retry configuration + +--- + +## Error Handling Scenarios + +### Error Propagation + +#### Scenario: Effect Failure Becomes Hatchet Error + +Given an Effect that fails with an error +When effectifyTask converts the failure +Then Hatchet receives an exception that triggers its error handling + +#### Scenario: Network Errors During Client Init + +Given network is unavailable +When HatchetClientLive attempts initialization +Then it fails with HatchetError containing the network error + +--- + +## Performance and Resource Management + +### Runtime Disposal + +#### Scenario: ManagedRuntime Cleanup + +Given createEffectifierFromLayer creates a runtime +When the worker shuts down +Then the runtime is disposed to prevent leaks + +--- + +## Summary + +This specification defines 68 requirements across 8 major module categories. Each requirement is testable through the defined scenarios. The package uses verified Effect v4 APIs (ServiceMap.Service, ManagedRuntime.make, Effect.withLogger) and Hatchet SDK v1.19.0 APIs (workflow.task(), ctx.input, ctx.parentOutput()). diff --git a/docs/tasks/hatchet-tasks.md b/docs/tasks/hatchet-tasks.md new file mode 100644 index 0000000..4f477b1 --- /dev/null +++ b/docs/tasks/hatchet-tasks.md @@ -0,0 +1,392 @@ +# @effectify/hatchet Implementation Tasks + +## Executive Summary + +This document defines the implementation tasks for `@effectify/hatchet`, an Effect v4 integration with Hatchet SDK v1.19.0. The package enables users to write Hatchet workflows as pure `Effect` computations with dependency injection, typed errors, and automatic logging sync. + +**Total Tasks**: 47 tasks across 9 phases + +--- + +## Phase 1: Monorepo Setup + +Set up the Nx package structure following `@effectify/prisma` patterns. + +- [ ] [TASK-001] Create `packages/hatchet/` directory structure + + - **Files**: `packages/hatchet/` + - **Depends on**: None + - **Spec ref**: REQ-NX-01, REQ-NX-02 + +- [ ] [TASK-002] Create `packages/hatchet/project.json` with Nx configuration + + - **Files**: `packages/hatchet/project.json` + - **Depends on**: TASK-001 + - **Spec ref**: REQ-NX-01, REQ-NX-02, REQ-NX-03, REQ-NX-04, REQ-NX-05 + +- [ ] [TASK-003] Create `packages/hatchet/package.json` with dependencies + + - **Files**: `packages/hatchet/package.json` + - **Depends on**: TASK-001 + - **Spec ref**: REQ-PKG-01, REQ-PKG-02, REQ-PKG-03, REQ-PKG-04, REQ-PKG-05, REQ-PKG-06 + +- [ ] [TASK-004] Create `packages/hatchet/tsconfig.json` + + - **Files**: `packages/hatchet/tsconfig.json` + - **Depends on**: TASK-001 + - **Spec ref**: REQ-TS-01, REQ-TS-04 + +- [ ] [TASK-005] Create `packages/hatchet/tsconfig.lib.json` + + - **Files**: `packages/hatchet/tsconfig.lib.json` + - **Depends on**: TASK-001, TASK-004 + - **Spec ref**: REQ-TS-02 + +- [ ] [TASK-006] Create `packages/hatchet/tsconfig.spec.json` + + - **Files**: `packages/hatchet/tsconfig.spec.json` + - **Depends on**: TASK-001, TASK-004 + - **Spec ref**: REQ-TS-03 + +- [ ] [TASK-007] Create `packages/hatchet/vitest.config.ts` + + - **Files**: `packages/hatchet/vitest.config.ts` + - **Depends on**: TASK-001 + - **Spec ref**: REQ-VITEST-01, REQ-VITEST-02, REQ-VITEST-03, REQ-VITEST-04 + +- [ ] [TASK-008] Create `packages/hatchet/setup-tests.ts` + + - **Files**: `packages/hatchet/setup-tests.ts` + - **Depends on**: TASK-001 + - **Spec ref**: REQ-VITEST-02 + +- [ ] [TASK-009] Create `packages/hatchet/src/` directory structure + + - **Files**: `packages/hatchet/src/` + - **Depends on**: TASK-001 + +- [ ] [TASK-010] Add `packages/hatchet` to `nx.json` release.projects + + - **Files**: `nx.json` + - **Depends on**: TASK-001 + - **Spec ref**: REQ-RELEASE-01, REQ-RELEASE-02 + +- [ ] [TASK-011] Install dependencies with pnpm + - **Files**: N/A + - **Depends on**: TASK-002, TASK-003 + - **Spec ref**: REQ-PKG-02, REQ-PKG-03 + +--- + +## Phase 2: Core Module + +Implement the core services: HatchetError, HatchetConfig, HatchetClientService, and HatchetStepContext. + +- [ ] [TASK-012] Create `packages/hatchet/src/core/error.ts` — HatchetError + + - **Files**: `packages/hatchet/src/core/error.ts` + - **Depends on**: TASK-009 + - **Spec ref**: REQ-ERROR-01, REQ-ERROR-02, REQ-ERROR-03, REQ-ERROR-04 + +- [ ] [TASK-013] Create `packages/hatchet/src/core/config.ts` — HatchetConfig + + - **Files**: `packages/hatchet/src/core/config.ts` + - **Depends on**: TASK-012 + - **Spec ref**: REQ-CORE-01, REQ-CORE-02, REQ-CORE-03, REQ-CORE-04, REQ-CORE-05, REQ-CORE-06 + +- [ ] [TASK-014] Create `packages/hatchet/src/core/client.ts` — HatchetClientService + + - **Files**: `packages/hatchet/src/core/client.ts` + - **Depends on**: TASK-012, TASK-013 + - **Spec ref**: REQ-CLIENT-01, REQ-CLIENT-02, REQ-CLIENT-03, REQ-CLIENT-04, REQ-CLIENT-05 + +- [ ] [TASK-015] Create `packages/hatchet/src/core/context.ts` — HatchetStepContext + + - **Files**: `packages/hatchet/src/core/context.ts` + - **Depends on**: TASK-014 + - **Spec ref**: REQ-CONTEXT-01, REQ-CONTEXT-02, REQ-CONTEXT-03, REQ-CONTEXT-04, REQ-CONTEXT-05 + +- [ ] [TASK-016] Create `packages/hatchet/src/core/index.ts` — Core exports + + - **Files**: `packages/hatchet/src/core/index.ts` + - **Depends on**: TASK-012, TASK-013, TASK-014, TASK-015 + +- [ ] [TASK-017] Create `packages/hatchet/tests/unit/core.test.ts` — Core unit tests + - **Files**: `packages/hatchet/tests/unit/core.test.ts` + - **Depends on**: TASK-016 + - **Spec ref**: REQ-ERROR-01, REQ-ERROR-02, REQ-ERROR-03, REQ-ERROR-04, REQ-CORE-01, REQ-CORE-02, REQ-CORE-03, REQ-CORE-04, REQ-CORE-05, REQ-CORE-06, REQ-CLIENT-01, REQ-CLIENT-02, REQ-CLIENT-03, REQ-CLIENT-04, REQ-CLIENT-05, REQ-CONTEXT-01, REQ-CONTEXT-02, REQ-CONTEXT-03, REQ-CONTEXT-04, REQ-CONTEXT-05 + +--- + +## Phase 3: Effectifier Module + +Implement the bridge that converts Effect → Promise for Hatchet task execution. + +- [ ] [TASK-018] Create `packages/hatchet/src/effectifier/types.ts` — Internal types + + - **Files**: `packages/hatchet/src/effectifier/types.ts` + - **Depends on**: TASK-015 + +- [ ] [TASK-019] Create `packages/hatchet/src/effectifier/execute.ts` — effectifyTask + + - **Files**: `packages/hatchet/src/effectifier/execute.ts` + - **Depends on**: TASK-018, TASK-015 + - **Spec ref**: REQ-EFFECT-01, REQ-EFFECT-02, REQ-EFFECT-03, REQ-EFFECT-04, REQ-EFFECT-05, REQ-EFFECT-06, REQ-FACTORY-01, REQ-FACTORY-02, REQ-FACTORY-03, REQ-FACTORY-04 + +- [ ] [TASK-020] Create `packages/hatchet/src/effectifier/index.ts` — Effectifier exports + + - **Files**: `packages/hatchet/src/effectifier/index.ts` + - **Depends on**: TASK-018, TASK-019 + +- [ ] [TASK-021] Create `packages/hatchet/tests/unit/effectifier.test.ts` — Effectifier tests + - **Files**: `packages/hatchet/tests/unit/effectifier.test.ts` + - **Depends on**: TASK-020 + - **Spec ref**: REQ-EFFECT-01, REQ-EFFECT-02, REQ-EFFECT-03, REQ-EFFECT-04, REQ-EFFECT-05, REQ-EFFECT-06, REQ-FACTORY-01, REQ-FACTORY-02, REQ-FACTORY-03, REQ-FACTORY-04 + +--- + +## Phase 4: Workflow Module + +Implement the declarative workflow builder matching Hatchet's API style. + +- [ ] [TASK-022] Create `packages/hatchet/src/workflow/types.ts` — TaskOptions, WorkflowOptions + + - **Files**: `packages/hatchet/src/workflow/types.ts` + - **Depends on**: TASK-019 + - **Spec ref**: REQ-TASK-02, REQ-WF-01, REQ-WF-02, REQ-WF-03, REQ-WF-04 + +- [ ] [TASK-023] Create `packages/hatchet/src/workflow/workflow.ts` — EffectWorkflow class + + - **Files**: `packages/hatchet/src/workflow/workflow.ts` + - **Depends on**: TASK-022 + - **Spec ref**: REQ-WF-01, REQ-WF-02, REQ-WFTASK-01, REQ-WFTASK-02, REQ-WFTASK-03 + +- [ ] [TASK-024] Create `packages/hatchet/src/workflow/task.ts` — task() function + + - **Files**: `packages/hatchet/src/workflow/task.ts` + - **Depends on**: TASK-022, TASK-023 + - **Spec ref**: REQ-TASK-01, REQ-TASK-02, REQ-TASK-03, REQ-TASK-04 + +- [ ] [TASK-025] Create `packages/hatchet/src/workflow/register.ts` — registerWorkflow() + + - **Files**: `packages/hatchet/src/workflow/register.ts` + - **Depends on**: TASK-019, TASK-022, TASK-023, TASK-024 + - **Spec ref**: REQ-REG-01, REQ-REG-02, REQ-REG-03, REQ-REG-04, REQ-REG-05, REQ-REG-06, REQ-REG-07 + +- [ ] [TASK-026] Create `packages/hatchet/src/workflow/index.ts` — Workflow exports + + - **Files**: `packages/hatchet/src/workflow/index.ts` + - **Depends on**: TASK-022, TASK-023, TASK-024, TASK-025 + +- [ ] [TASK-027] Create `packages/hatchet/tests/unit/workflow.test.ts` — Workflow tests + - **Files**: `packages/hatchet/tests/unit/workflow.test.ts` + - **Depends on**: TASK-026 + - **Spec ref**: REQ-WF-01, REQ-WF-02, REQ-WF-03, REQ-WF-04, REQ-TASK-01, REQ-TASK-02, REQ-TASK-03, REQ-TASK-04, REQ-WFTASK-01, REQ-WFTASK-02, REQ-WFTASK-03, REQ-REG-01, REQ-REG-02, REQ-REG-03, REQ-REG-04, REQ-REG-05, REQ-REG-06, REQ-REG-07 + +--- + +## Phase 5: Logging Module + +Implement automatic log synchronization between Effect.log() and Hatchet UI. + +- [ ] [TASK-028] Create `packages/hatchet/src/logging/hatchet-logger.ts` — HatchetLogger + + - **Files**: `packages/hatchet/src/logging/hatchet-logger.ts` + - **Depends on**: TASK-015 + - **Spec ref**: REQ-LOG-01, REQ-LOG-02, REQ-LOG-03, REQ-LOG-04, REQ-LOG-05, REQ-WLOG-01, REQ-WLOG-02, REQ-WLOG-03 + +- [ ] [TASK-029] Create `packages/hatchet/src/logging/index.ts` — Logging exports + + - **Files**: `packages/hatchet/src/logging/index.ts` + - **Depends on**: TASK-028 + +- [ ] [TASK-030] Create `packages/hatchet/tests/unit/logger.test.ts` — Logger tests + - **Files**: `packages/hatchet/tests/unit/logger.test.ts` + - **Depends on**: TASK-029 + - **Spec ref**: REQ-LOG-01, REQ-LOG-02, REQ-LOG-03, REQ-LOG-04, REQ-LOG-05, REQ-WLOG-01, REQ-WLOG-02, REQ-WLOG-03 + +--- + +## Phase 6: Schema Module + +Implement input validation utilities using Effect Schema. + +- [ ] [TASK-031] Create `packages/hatchet/src/schema/get-validated-input.ts` — getValidatedInput + + - **Files**: `packages/hatchet/src/schema/get-validated-input.ts` + - **Depends on**: TASK-015 + - **Spec ref**: REQ-SCHEMA-01, REQ-SCHEMA-02, REQ-SCHEMA-03, REQ-SCHEMA-04, REQ-SCHEMA-05, REQ-SCHEMA-06 + +- [ ] [TASK-032] Create `packages/hatchet/src/schema/index.ts` — Schema exports + + - **Files**: `packages/hatchet/src/schema/index.ts` + - **Depends on**: TASK-031 + +- [ ] [TASK-033] Create `packages/hatchet/tests/unit/schema.test.ts` — Schema tests + - **Files**: `packages/hatchet/tests/unit/schema.test.ts` + - **Depends on**: TASK-032 + - **Spec ref**: REQ-SCHEMA-01, REQ-SCHEMA-02, REQ-SCHEMA-03, REQ-SCHEMA-04, REQ-SCHEMA-05, REQ-SCHEMA-06 + +--- + +## Phase 7: Testing Module + +Implement testing utilities for unit testing tasks. + +- [ ] [TASK-034] Create `packages/hatchet/src/testing/mock-context.ts` — Test utilities + + - **Files**: `packages/hatchet/src/testing/mock-context.ts` + - **Depends on**: TASK-015 + - **Spec ref**: REQ-MOCK-01, REQ-MOCK-02, REQ-MOCK-03, REQ-MOCK-04, REQ-MOCK-05, REQ-RUNTEST-01, REQ-RUNTEST-02, REQ-RUNTEST-03, REQ-RUNTEST-04 + +- [ ] [TASK-035] Create `packages/hatchet/src/testing/index.ts` — Testing exports + - **Files**: `packages/hatchet/src/testing/index.ts` + - **Depends on**: TASK-034 + +--- + +## Phase 8: Public API + +Create the main public exports for the package. + +- [ ] [TASK-036] Create `packages/hatchet/src/index.ts` — Main public exports + + - **Files**: `packages/hatchet/src/index.ts` + - **Depends on**: TASK-016, TASK-020, TASK-026, TASK-029, TASK-032, TASK-035 + +- [ ] [TASK-037] Verify build passes with `nx build @effectify/hatchet` + + - **Files**: N/A + - **Depends on**: TASK-036, TASK-010, TASK-011 + +- [ ] [TASK-038] Verify tests pass with `nx test @effectify/hatchet` + + - **Files**: N/A + - **Depends on**: TASK-017, TASK-021, TASK-027, TASK-030, TASK-033, TASK-035, TASK-036 + +- [ ] [TASK-039] Verify lint passes with `nx lint @effectify/hatchet` + - **Files**: N/A + - **Depends on**: TASK-036 + +--- + +## Phase 9: Integration Tests + +Create integration tests against real Hatchet engine. + +- [ ] [TASK-040] Create `packages/hatchet/tests/integration/docker-compose.yml` + + - **Files**: `packages/hatchet/tests/integration/docker-compose.yml` + - **Depends on**: TASK-036 + - **Spec ref**: REQ-DOCKER-01, REQ-DOCKER-02, REQ-DOCKER-03, REQ-DOCKER-04, REQ-DOCKER-05 + +- [ ] [TASK-041] Create `packages/hatchet/tests/integration/workflow.test.ts` — Integration tests + + - **Files**: `packages/hatchet/tests/integration/workflow.test.ts` + - **Depends on**: TASK-040 + - **Spec ref**: REQ-INT-01, REQ-INT-02, REQ-INT-03, REQ-INT-04, REQ-INT-05 + +- [ ] [TASK-042] Run integration tests with Docker Compose + - **Files**: N/A + - **Depends on**: TASK-040, TASK-041 + +--- + +## Phase 10: Documentation & Release + +Finalize package for release. + +- [ ] [TASK-043] Create `packages/hatchet/README.md` — Package documentation + + - **Files**: `packages/hatchet/README.md` + - **Depends on**: TASK-036, TASK-038 + +- [ ] [TASK-044] Verify package.json exports are correct + + - **Files**: `packages/hatchet/package.json` + - **Depends on**: TASK-003, TASK-036 + +- [ ] [TASK-045] Create TypeScript declaration files verification + + - **Files**: N/A + - **Depends on**: TASK-037 + +- [ ] [TASK-046] Verify release configuration in nx.json + + - **Files**: `nx.json` + - **Depends on**: TASK-010 + +- [ ] [TASK-047] Test nx release dry-run for hatchet package + - **Files**: N/A + - **Depends on**: TASK-046 + +--- + +## Task Dependencies Graph + +``` +Phase 1: Monorepo Setup +├── TASK-001 ─┬─► TASK-002 ─┬─► TASK-003 ─┬─► TASK-011 +│ │ │ │ +│ │ │ └─► TASK-010 ──► TASK-046 +│ │ │ +│ │ ├─► TASK-004 ─┬─► TASK-005 +│ │ │ └─► TASK-006 +│ │ │ +│ │ └─► TASK-007 +│ │ +│ └─► TASK-008 +│ +└─► TASK-009 ─┬─► TASK-012 ──► TASK-013 ──► TASK-014 ──► TASK-015 ──► TASK-016 + │ │ + ├─► TASK-018 ──► TASK-019 ──┬─► TASK-020 ──► TASK-021 ──┤ + │ │ │ + │ ├─► TASK-022 ──► TASK-023 ──► TASK-024 ──► TASK-025 ──► TASK-026 ──► TASK-027 + │ │ │ + │ └─► TASK-028 ──► TASK-029 ──► TASK-030 + │ + ├─► TASK-031 ──► TASK-032 ──► TASK-033 + │ + └─► TASK-034 ──► TASK-035 + +Phase 8: Public API +TASK-016, TASK-020, TASK-026, TASK-029, TASK-032, TASK-035 ──► TASK-036 ──┬─► TASK-037 + ├─► TASK-038 + └─► TASK-039 + +Phase 9: Integration Tests +TASK-036 ──► TASK-040 ──► TASK-041 ──► TASK-042 + +Phase 10: Documentation +TASK-036, TASK-038 ──► TASK-043 ──► TASK-044 ──► TASK-045 ──► TASK-047 +``` + +--- + +## Completion Criteria + +- [ ] All 47 tasks completed +- [ ] Build passes: `nx build @effectify/hatchet` +- [ ] Unit tests pass (>90% coverage): `nx test @effectify/hatchet` +- [ ] Lint passes: `nx lint @effectify/hatchet` +- [ ] Integration tests pass with Docker Compose +- [ ] Release configuration verified in nx.json +- [ ] README.md complete with usage examples + +--- + +## Key Technical Decisions + +| Decision | Rationale | +| --------------------------------------- | --------------------------------------------- | +| `ServiceMap.Service` over `Context.Tag` | Context module doesn't exist in Effect v4 | +| `ManagedRuntime.make(layer)` | `Effect.runtime()` doesn't exist in v4 | +| `Effect.withLogger(effect, logger)` | `Logger.replace` doesn't exist in v4 | +| `workflow.task()` not `workflow.step()` | SDK v1.19.0 uses task terminology | +| `ctx.input` property | SDK v1.19.0 has input as property, not method | + +--- + +**Document Version**: 1.0.0\ +**Created**: March 2026\ +**Phase**: Tasks Breakdown From 107a54bf078a5c728097be808ee3e4a8fb6e600d Mon Sep 17 00:00:00 2001 From: Andres David Jimenez Sulbaran Date: Sun, 29 Mar 2026 00:07:45 -0600 Subject: [PATCH 02/24] feat: add workflow module with declarative builder and task management - Introduced `workflow` module for defining Hatchet workflows using Effect. - Implemented `registerWorkflow` and `registerWorkflowWithConfig` for workflow registration with Hatchet workers. - Added task helper functions to create tasks with options and effects. - Defined types for workflow and task options to enhance type safety. - Created integration and unit tests for workflow registration, task creation, and error handling. - Set up Docker Compose for local development and testing of Hatchet Lite. - Added TypeScript configuration files for building and testing the package. --- .vscode/settings.json | 6 +- .../app/workflows/example.workflow.ts | 194 ++++++ apps/react-router-example/package.json | 1 + docs/tasks/hatchet-tasks.md | 32 +- nx.json | 52 +- packages/hatchet/README.md | 228 +++++++ packages/hatchet/package.json | 44 ++ packages/hatchet/project.json | 41 ++ packages/hatchet/scripts/test-workflow.ts | 63 ++ packages/hatchet/setup-tests.ts | 9 + packages/hatchet/src/core/client.ts | 57 ++ packages/hatchet/src/core/config.ts | 69 ++ packages/hatchet/src/core/context.ts | 17 + packages/hatchet/src/core/error.ts | 90 +++ packages/hatchet/src/core/index.ts | 23 + packages/hatchet/src/effectifier/execute.ts | 93 +++ packages/hatchet/src/effectifier/index.ts | 10 + packages/hatchet/src/effectifier/types.ts | 47 ++ packages/hatchet/src/index.ts | 45 ++ .../hatchet/src/logging/hatchet-logger.ts | 106 ++++ packages/hatchet/src/logging/index.ts | 8 + .../hatchet/src/schema/get-validated-input.ts | 86 +++ packages/hatchet/src/schema/index.ts | 7 + packages/hatchet/src/testing/index.ts | 15 + packages/hatchet/src/testing/mock-context.ts | 136 ++++ packages/hatchet/src/workflow/index.ts | 16 + packages/hatchet/src/workflow/register.ts | 226 +++++++ packages/hatchet/src/workflow/task.ts | 66 ++ packages/hatchet/src/workflow/types.ts | 106 ++++ packages/hatchet/src/workflow/workflow.ts | 105 ++++ .../tests/integration/docker-compose.yml | 70 +++ .../tests/integration/workflow.test.ts | 132 ++++ packages/hatchet/tests/unit/errors.test.ts | 87 +++ packages/hatchet/tests/unit/logging.test.ts | 72 +++ packages/hatchet/tests/unit/schema.test.ts | 116 ++++ packages/hatchet/tests/unit/workflow.test.ts | 139 +++++ packages/hatchet/tsconfig.json | 18 + packages/hatchet/tsconfig.lib.json | 20 + packages/hatchet/tsconfig.spec.json | 14 + packages/hatchet/vitest.config.ts | 19 + pnpm-lock.yaml | 587 ++++++++++++++++-- pnpm-workspace.yaml | 302 ++++----- 42 files changed, 3302 insertions(+), 272 deletions(-) create mode 100644 apps/react-router-example/app/workflows/example.workflow.ts create mode 100644 packages/hatchet/README.md create mode 100644 packages/hatchet/package.json create mode 100644 packages/hatchet/project.json create mode 100644 packages/hatchet/scripts/test-workflow.ts create mode 100644 packages/hatchet/setup-tests.ts create mode 100644 packages/hatchet/src/core/client.ts create mode 100644 packages/hatchet/src/core/config.ts create mode 100644 packages/hatchet/src/core/context.ts create mode 100644 packages/hatchet/src/core/error.ts create mode 100644 packages/hatchet/src/core/index.ts create mode 100644 packages/hatchet/src/effectifier/execute.ts create mode 100644 packages/hatchet/src/effectifier/index.ts create mode 100644 packages/hatchet/src/effectifier/types.ts create mode 100644 packages/hatchet/src/index.ts create mode 100644 packages/hatchet/src/logging/hatchet-logger.ts create mode 100644 packages/hatchet/src/logging/index.ts create mode 100644 packages/hatchet/src/schema/get-validated-input.ts create mode 100644 packages/hatchet/src/schema/index.ts create mode 100644 packages/hatchet/src/testing/index.ts create mode 100644 packages/hatchet/src/testing/mock-context.ts create mode 100644 packages/hatchet/src/workflow/index.ts create mode 100644 packages/hatchet/src/workflow/register.ts create mode 100644 packages/hatchet/src/workflow/task.ts create mode 100644 packages/hatchet/src/workflow/types.ts create mode 100644 packages/hatchet/src/workflow/workflow.ts create mode 100644 packages/hatchet/tests/integration/docker-compose.yml create mode 100644 packages/hatchet/tests/integration/workflow.test.ts create mode 100644 packages/hatchet/tests/unit/errors.test.ts create mode 100644 packages/hatchet/tests/unit/logging.test.ts create mode 100644 packages/hatchet/tests/unit/schema.test.ts create mode 100644 packages/hatchet/tests/unit/workflow.test.ts create mode 100644 packages/hatchet/tsconfig.json create mode 100644 packages/hatchet/tsconfig.lib.json create mode 100644 packages/hatchet/tsconfig.spec.json create mode 100644 packages/hatchet/vitest.config.ts diff --git a/.vscode/settings.json b/.vscode/settings.json index a849230..17160fa 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -3,8 +3,6 @@ "terminal.integrated.env.linux": { "PATH": "/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin:${env:PATH}" }, - // Para que la extensión Nx encuentre pnpm: abre Cursor desde una terminal donde pnpm esté en PATH, p. ej. `cursor .` - "typescript.tsdk": "node_modules/typescript/lib", "[javascript]": { "editor.defaultFormatter": "dprint.dprint" }, @@ -64,6 +62,6 @@ }, "editor.formatOnPaste": true, "emmet.showExpandedAbbreviation": "never", - "typescript.experimental.useTsgo": true, - "typescript.native-preview.tsdk": "node_modules/@typescript/native-preview" + "typescript.native-preview.tsdk": "node_modules/@typescript/native-preview", + "js/ts.experimental.useTsgo": true } \ No newline at end of file diff --git a/apps/react-router-example/app/workflows/example.workflow.ts b/apps/react-router-example/app/workflows/example.workflow.ts new file mode 100644 index 0000000..93a91f8 --- /dev/null +++ b/apps/react-router-example/app/workflows/example.workflow.ts @@ -0,0 +1,194 @@ +/** + * @effectify/hatchet - Example Workflow + * + * This is a simple example of how to use @effectify/hatchet to create + * workflows that can be registered with Hatchet. + * + * NOTE: This requires a running Hatchet instance to execute. + * See docker-compose.yml in packages/hatchet/tests/integration/ + */ + +import * as Effect from "effect/Effect" +import * as Layer from "effect/Layer" +import * as Schema from "effect/Schema" +import { + getValidatedInput, + HatchetClientLive, + HatchetConfigLayer, + HatchetStepContext, + registerWorkflow, + task, + workflow, +} from "@effectify/hatchet" + +// ============================================================================ +// Schema Definitions +// ============================================================================ + +/** Input schema for the user workflow */ +const UserWorkflowInput = Schema.Struct({ + userId: Schema.String, + action: Schema.Literals(["welcome", "reminder", "notification"]), +}) + +type UserWorkflowInputType = Schema.Schema.Type + +// ============================================================================ +// Task Definitions +// ============================================================================ + +/** + * Task 1: Validate and process user input + * This task validates the input and prepares the data for the next step + */ +const processUserTask = task( + { + name: "process-user", + timeout: "30s", + }, + Effect.gen(function*() { + // Get and validate input using Effect Schema + const input: UserWorkflowInputType = yield* getValidatedInput( + UserWorkflowInput, + ) + + // Process the input (in a real app, this might query a database) + const processed = { + userId: input.userId, + action: input.action, + timestamp: new Date().toISOString(), + } + + yield* Effect.log( + `Processed user ${input.userId} for action ${input.action}`, + ) + + return processed + }), +) + +/** + * Task 2: Send notification (simulated) + * This task depends on process-user task + */ +const sendNotificationTask = task( + { + name: "send-notification", + timeout: "60s", + parents: ["process-user"], // Depends on process-user task + }, + Effect.gen(function*() { + // In a real app, this would send an actual notification + // For now, we simulate a delay and return success + yield* Effect.log("Sending notification...") + + // Simulate some processing time + const result = { + success: true, + messageId: `msg-${Date.now()}`, + } + + yield* Effect.log(`Notification sent successfully`) + + return result + }), +) + +// ============================================================================ +// Workflow Definition +// ============================================================================ + +/** + * Example workflow that demonstrates: + * - Task chaining with dependencies (parents) + * - Input validation using Effect Schema + * - Type-safe task outputs + */ +export const exampleWorkflow = workflow({ + name: "user-notification-workflow", + description: "Example workflow demonstrating @effectify/hatchet", + version: "1.0.0", +}) + .task(processUserTask) + .task(sendNotificationTask) + +// ============================================================================ +// Registration (for running the workflow) +// ============================================================================ + +/** + * Environment configuration for Hatchet + * + * Hatchet Lite runs on: + * - UI: http://localhost:8888 + * - gRPC: localhost:7077 + * + * IMPORTANT: Use port 7077 for gRPC connections! + */ +const hatchetConfigLayer = HatchetConfigLayer({ + token: process.env.HATCHET_TOKEN || "test-token", + host: process.env.HATCHET_HOST || "http://localhost:7077", +}) + +/** + * Full layer with all dependencies + */ +const appLayer = Layer.merge(hatchetConfigLayer, HatchetClientLive) + +/** + * Start the worker and register the workflow + * + * Usage: + * ```typescript + * // In your server startup code: + * Effect.runPromise( + * registerWorkflow("my-worker", exampleWorkflow, appLayer) + * ) + * ``` + * + * Or with custom config: + * ```typescript + * Effect.runPromise( + * registerWorkflowWithConfig({ + * workerName: "my-worker", + * workflow: exampleWorkflow, + * layer: appLayer, + * onStart: () => { + * console.log("Worker starting..."); + * }, + * }) + * ) + * ``` + */ +export const startWorker = () => { + return registerWorkflow("example-worker", exampleWorkflow, appLayer) +} + +// ============================================================================ +// Usage Example (for documentation) +// ============================================================================ + +/** + * This is how you would trigger this workflow from your application: + * + * ```typescript + * import { Hatchet } from "@hatchet-dev/typescript-sdk"; + * + * const hatchet = new Hatchet({ + * token: process.env.HATCHET_TOKEN, + * host: process.env.HATCHET_HOST, + * }); + * + * // Trigger the workflow + * const result = await hatchet.client.workflow.run( + * "user-notification-workflow", + * { + * * userId: "user-123", + * * action: "welcome", + * * }, + * { + * * token: process.env.HATCHET_TOKEN, + * * } + * ); + * ``` + */ diff --git a/apps/react-router-example/package.json b/apps/react-router-example/package.json index c6ae90c..3762a1b 100644 --- a/apps/react-router-example/package.json +++ b/apps/react-router-example/package.json @@ -9,6 +9,7 @@ }, "sideEffects": false, "dependencies": { + "@effectify/hatchet": "workspace:*", "@effectify/node-better-auth": "workspace:*", "@effectify/react-query": "workspace:*", "@effectify/react-router": "workspace:*", diff --git a/docs/tasks/hatchet-tasks.md b/docs/tasks/hatchet-tasks.md index 4f477b1..45dfbcb 100644 --- a/docs/tasks/hatchet-tasks.md +++ b/docs/tasks/hatchet-tasks.md @@ -12,66 +12,66 @@ This document defines the implementation tasks for `@effectify/hatchet`, an Effe Set up the Nx package structure following `@effectify/prisma` patterns. -- [ ] [TASK-001] Create `packages/hatchet/` directory structure +- [x] [TASK-001] Create `packages/hatchet/` directory structure - **Files**: `packages/hatchet/` - **Depends on**: None - **Spec ref**: REQ-NX-01, REQ-NX-02 -- [ ] [TASK-002] Create `packages/hatchet/project.json` with Nx configuration +- [x] [TASK-002] Create `packages/hatchet/project.json` with Nx configuration - **Files**: `packages/hatchet/project.json` - **Depends on**: TASK-001 - **Spec ref**: REQ-NX-01, REQ-NX-02, REQ-NX-03, REQ-NX-04, REQ-NX-05 -- [ ] [TASK-003] Create `packages/hatchet/package.json` with dependencies +- [x] [TASK-003] Create `packages/hatchet/package.json` with dependencies - **Files**: `packages/hatchet/package.json` - **Depends on**: TASK-001 - **Spec ref**: REQ-PKG-01, REQ-PKG-02, REQ-PKG-03, REQ-PKG-04, REQ-PKG-05, REQ-PKG-06 -- [ ] [TASK-004] Create `packages/hatchet/tsconfig.json` +- [x] [TASK-004] Create `packages/hatchet/tsconfig.json` - **Files**: `packages/hatchet/tsconfig.json` - **Depends on**: TASK-001 - **Spec ref**: REQ-TS-01, REQ-TS-04 -- [ ] [TASK-005] Create `packages/hatchet/tsconfig.lib.json` +- [x] [TASK-005] Create `packages/hatchet/tsconfig.lib.json` - **Files**: `packages/hatchet/tsconfig.lib.json` - **Depends on**: TASK-001, TASK-004 - **Spec ref**: REQ-TS-02 -- [ ] [TASK-006] Create `packages/hatchet/tsconfig.spec.json` +- [x] [TASK-006] Create `packages/hatchet/tsconfig.spec.json` - **Files**: `packages/hatchet/tsconfig.spec.json` - **Depends on**: TASK-001, TASK-004 - **Spec ref**: REQ-TS-03 -- [ ] [TASK-007] Create `packages/hatchet/vitest.config.ts` +- [x] [TASK-007] Create `packages/hatchet/vitest.config.ts` - **Files**: `packages/hatchet/vitest.config.ts` - **Depends on**: TASK-001 - **Spec ref**: REQ-VITEST-01, REQ-VITEST-02, REQ-VITEST-03, REQ-VITEST-04 -- [ ] [TASK-008] Create `packages/hatchet/setup-tests.ts` +- [x] [TASK-008] Create `packages/hatchet/setup-tests.ts` - **Files**: `packages/hatchet/setup-tests.ts` - **Depends on**: TASK-001 - **Spec ref**: REQ-VITEST-02 -- [ ] [TASK-009] Create `packages/hatchet/src/` directory structure +- [x] [TASK-009] Create `packages/hatchet/src/` directory structure - **Files**: `packages/hatchet/src/` - **Depends on**: TASK-001 -- [ ] [TASK-010] Add `packages/hatchet` to `nx.json` release.projects +- [x] [TASK-010] Add `packages/hatchet` to `nx.json` release.projects - **Files**: `nx.json` - **Depends on**: TASK-001 - **Spec ref**: REQ-RELEASE-01, REQ-RELEASE-02 -- [ ] [TASK-011] Install dependencies with pnpm +- [x] [TASK-011] Install dependencies with pnpm - **Files**: N/A - **Depends on**: TASK-002, TASK-003 - **Spec ref**: REQ-PKG-02, REQ-PKG-03 @@ -82,31 +82,31 @@ Set up the Nx package structure following `@effectify/prisma` patterns. Implement the core services: HatchetError, HatchetConfig, HatchetClientService, and HatchetStepContext. -- [ ] [TASK-012] Create `packages/hatchet/src/core/error.ts` — HatchetError +- [x] [TASK-012] Create `packages/hatchet/src/core/error.ts` — HatchetError - **Files**: `packages/hatchet/src/core/error.ts` - **Depends on**: TASK-009 - **Spec ref**: REQ-ERROR-01, REQ-ERROR-02, REQ-ERROR-03, REQ-ERROR-04 -- [ ] [TASK-013] Create `packages/hatchet/src/core/config.ts` — HatchetConfig +- [x] [TASK-013] Create `packages/hatchet/src/core/config.ts` — HatchetConfig - **Files**: `packages/hatchet/src/core/config.ts` - **Depends on**: TASK-012 - **Spec ref**: REQ-CORE-01, REQ-CORE-02, REQ-CORE-03, REQ-CORE-04, REQ-CORE-05, REQ-CORE-06 -- [ ] [TASK-014] Create `packages/hatchet/src/core/client.ts` — HatchetClientService +- [x] [TASK-014] Create `packages/hatchet/src/core/client.ts` — HatchetClientService - **Files**: `packages/hatchet/src/core/client.ts` - **Depends on**: TASK-012, TASK-013 - **Spec ref**: REQ-CLIENT-01, REQ-CLIENT-02, REQ-CLIENT-03, REQ-CLIENT-04, REQ-CLIENT-05 -- [ ] [TASK-015] Create `packages/hatchet/src/core/context.ts` — HatchetStepContext +- [x] [TASK-015] Create `packages/hatchet/src/core/context.ts` — HatchetStepContext - **Files**: `packages/hatchet/src/core/context.ts` - **Depends on**: TASK-014 - **Spec ref**: REQ-CONTEXT-01, REQ-CONTEXT-02, REQ-CONTEXT-03, REQ-CONTEXT-04, REQ-CONTEXT-05 -- [ ] [TASK-016] Create `packages/hatchet/src/core/index.ts` — Core exports +- [x] [TASK-016] Create `packages/hatchet/src/core/index.ts` — Core exports - **Files**: `packages/hatchet/src/core/index.ts` - **Depends on**: TASK-012, TASK-013, TASK-014, TASK-015 diff --git a/nx.json b/nx.json index b70b4c5..d2ab8d7 100644 --- a/nx.json +++ b/nx.json @@ -5,10 +5,7 @@ "packageManager": "pnpm" }, "namedInputs": { - "default": [ - "{projectRoot}/**/*", - "sharedGlobals" - ], + "default": ["{projectRoot}/**/*", "sharedGlobals"], "production": [ "default", "!{projectRoot}/**/?(*.)+(spec|test).[jt]s?(x)?(.snap)", @@ -35,10 +32,7 @@ }, { "plugin": "@nx/vite/plugin", - "exclude": [ - "apps/solid-app-start/**/*", - "apps/react-app-remix/**/*" - ], + "exclude": ["apps/solid-app-start/**/*", "apps/react-app-remix/**/*"], "options": { "buildTargetName": "build", "serveTargetName": "serve", @@ -88,7 +82,8 @@ "packages/node/better-auth", "packages/solid/query", "packages/react/router-better-auth", - "packages/prisma" + "packages/prisma", + "packages/hatchet" ], "changelog": { "automaticFromRef": "HEAD~100", @@ -111,9 +106,7 @@ } }, "sync": { - "disabledTaskSyncGenerators": [ - "@nx/js:typescript-sync" - ] + "disabledTaskSyncGenerators": ["@nx/js:typescript-sync"] }, "targets": { "local-registry": { @@ -149,45 +142,26 @@ "targetDefaults": { "dev": { "cache": false, - "dependsOn": [ - "^build" - ] + "dependsOn": ["^build"] }, "build": { "cache": true, - "inputs": [ - "production", - "^production" - ], - "dependsOn": [ - "^build" - ] + "inputs": ["production", "^production"], + "dependsOn": ["^build"] }, "serve": { "cache": false, - "dependsOn": [ - "^build" - ] + "dependsOn": ["^build"] }, "@nx/esbuild:esbuild": { "cache": true, - "dependsOn": [ - "^build" - ], - "inputs": [ - "production", - "^production" - ] + "dependsOn": ["^build"], + "inputs": ["production", "^production"] }, "@nx/js:tsc": { "cache": true, - "dependsOn": [ - "^build" - ], - "inputs": [ - "production", - "^production" - ] + "dependsOn": ["^build"], + "inputs": ["production", "^production"] } }, "generators": { diff --git a/packages/hatchet/README.md b/packages/hatchet/README.md new file mode 100644 index 0000000..ca2d0cc --- /dev/null +++ b/packages/hatchet/README.md @@ -0,0 +1,228 @@ +# @effectify/hatchet + +> Native Effect v4 integration with Hatchet workflows + +`@effectify/hatchet` provides a native Effect v4 API for creating and running Hatchet workflows. Write your tasks as `Effect`s and let the package handle the conversion to Hatchet's step functions. + +## Features + +- **Native Effect API** — Write tasks using `Effect` instead of async functions +- **Type-safe inputs** — Built-in schema validation for task inputs using Effect's Schema +- **Automatic logging** — Logs are automatically synced to the Hatchet UI +- **Familiar API** — API mirrors Hatchet's native API for easy migration +- **Full TypeScript** — Complete type inference and IntelliSense + +## Installation + +```bash +pnpm add @effectify/hatchet effect +``` + +## Quick Start + +```typescript +import { registerWorkflow, task, workflow } from "@effectify/hatchet" +import * as Effect from "effect/Effect" + +// Define your task as an Effect +const myTask = task( + { name: "hello-task" }, + Effect.succeed({ message: "Hello from Effect!" }), +) + +// Create a workflow +const myWorkflow = workflow({ + name: "hello-workflow", + description: "My first Effect-powered workflow", +}).task(myTask) + +// Register and start the worker +import * as Layer from "effect/Layer" + +const configLayer = HatchetConfigLayer({ + token: process.env.HATCHET_TOKEN!, + host: process.env.HATCHET_HOST || "http://localhost:8080", +}) + +Effect.runPromise(registerWorkflow("my-worker", myWorkflow, configLayer)) +``` + +## API Reference + +### `workflow(options)` + +Creates a new workflow definition. + +```typescript +const wf = workflow({ + name: "my-workflow", + description: "Optional description", + version: "1.0.0", +}) +``` + +**Options:** + +- `name` (required) — Workflow name +- `description` — Workflow description +- `version` — Workflow version (default: "1.0.0") + +**Returns:** `EffectWorkflow` — Chainable workflow builder + +### `.task(taskDefinition)` + +Adds a task to the workflow. Returns the workflow for chaining. + +```typescript +workflow({ name: "my-workflow" }).task(task1).task(task2) // Can depend on task1 via parents +``` + +### `task(options, effect)` + +Defines a task as an Effect. + +```typescript +const myTask = task( + { + name: "my-task", + timeout: "30s", // Optional timeout + parents: ["previous-task"], // Optional dependencies + }, + Effect.gen(function*() { + // Your Effect code here + return { result: "done" } + }), +) +``` + +**Options:** + +- `name` (required) — Task name +- `timeout` — Task timeout (e.g., "30s", "1m") +- `parents` — Array of parent task names + +**Effect:** Any `Effect` — The task logic + +### `registerWorkflow(workerName, workflow, layer)` + +Registers a workflow and starts a worker. + +```typescript +Effect.runPromise(registerWorkflow("my-worker", myWorkflow, configLayer)) +``` + +### `registerWorkflowWithConfig(workerName, workflow, config)` + +Alternative registration with inline config. + +```typescript +Effect.runPromise( + registerWorkflowWithConfig("my-worker", myWorkflow, { + token: "...", + host: "http://localhost:7077", // gRPC port + }), +) +``` + +## Input Validation + +Use `getValidatedInput` to validate and parse task inputs with type safety: + +```typescript +import { Schema } from "effect" +import { getValidatedInput } from "@effectify/hatchet" + +const UserInputSchema = Schema.Struct({ + userId: Schema.String, + action: Schema.Literal("create", "update", "delete"), +}) + +const myTask = task( + { name: "process-user" }, + Effect.gen(function*() { + const input = yield* getValidatedInput(UserInputSchema) + // input is typed as { userId: string, action: "create" | "update" | "delete" } + return { processed: input.userId } + }), +) +``` + +## Logging + +The `withHatchetLogger` function adds automatic log syncing to any Effect: + +```typescript +import { task, withHatchetLogger, workflow } from "@effectify/hatchet" + +const loggedTask = task( + { name: "logged-task" }, + withHatchetLogger( + Effect.gen(function*() { + yield* Effect.log("Starting work...") + const result = yield* Effect.succeed({ data: "done" }) + yield* Effect.log("Work complete!") + return result + }), + ), +) +``` + +## Testing + +Use the testing utilities to create mock contexts and test tasks in isolation: + +```typescript +import { createMockContext, testTask } from "@effectify/hatchet/testing" + +const mockContext = createMockContext<{ userId: string }>({ + workflow: { runId: "test-run", workflowId: "test-wf" }, + step: { name: "test-step" }, + input: { userId: "test-user" }, +}) + +const result = await testTask(myTask, mockContext) +``` + +## Error Handling + +The package provides typed errors using Effect's `Data.TaggedError`: + +```typescript +import { HatchetError, HatchetRetryError, HatchetTimeoutError } from "@effectify/hatchet" + +const myTask = task( + { name: "faulty-task" }, + Effect.gen(function*() { + yield* new HatchetError({ message: "Something went wrong" }) + // Or use specific error types: + // yield* new HatchetTimeoutError({ message: "Task timed out" }) + }), +) +``` + +## Effect Patterns + +This package follows Effect v4 best practices: + +- Use `Effect.gen` for async task logic +- Use `yield*` for direct error yielding (not `Effect.fail`) +- Use `Schema` from the `effect` package for validation +- Use `Layer` for dependency injection + +```typescript +// ✅ Correct +yield * new HatchetError({ message: "Failed" }) + +// ❌ Don't use Effect.fail +yield * Effect.fail(new Error("Failed")) // Avoid this +``` + +## Requirements + +- Effect v4 (v4.0.0 or later) +- Hatchet v1.19.0+ +- TypeScript 5.0+ + +## License + +MIT diff --git a/packages/hatchet/package.json b/packages/hatchet/package.json new file mode 100644 index 0000000..c9b295f --- /dev/null +++ b/packages/hatchet/package.json @@ -0,0 +1,44 @@ +{ + "name": "@effectify/hatchet", + "version": "0.1.0-alpha.1", + "description": "Native Effect v4 integration with Hatchet workflows", + "repository": { + "type": "git", + "url": "https://github.com/devx-op/effectify", + "directory": "packages/hatchet" + }, + "type": "module", + "license": "MIT", + "main": "./dist/src/index.js", + "exports": { + ".": "./dist/src/index.js" + }, + "files": [ + "dist" + ], + "publishConfig": { + "access": "public" + }, + "scripts": { + "build": "nx build @effectify/hatchet", + "test": "vitest", + "lint": "nx lint @effectify/hatchet" + }, + "keywords": [], + "author": "", + "packageManager": "pnpm@10.14.0", + "dependencies": { + "@hatchet-dev/typescript-sdk": "^1.19.0" + }, + "peerDependencies": { + "effect": "catalog:" + }, + "devDependencies": { + "@effect/vitest": "catalog:", + "@types/node": "catalog:", + "@vitest/coverage-v8": "catalog:", + "typescript": "catalog:", + "vite-tsconfig-paths": "catalog:", + "vitest": "catalog:" + } +} diff --git a/packages/hatchet/project.json b/packages/hatchet/project.json new file mode 100644 index 0000000..edf8622 --- /dev/null +++ b/packages/hatchet/project.json @@ -0,0 +1,41 @@ +{ + "name": "@effectify/hatchet", + "$schema": "../../node_modules/nx/schemas/project-schema.json", + "sourceRoot": "packages/hatchet/src", + "projectType": "library", + "targets": { + "build": { + "executor": "@nx/js:tsc", + "outputs": ["{options.outputPath}"], + "defaultConfiguration": "production", + "options": { + "outputPath": "packages/hatchet/dist", + "main": "packages/hatchet/src/index.ts", + "tsConfig": "packages/hatchet/tsconfig.lib.json", + "format": ["esm"], + "generatePackageJson": false, + "updateBuildableProjectDepsInPackageJson": false, + "additionalEntryPoints": [] + }, + "configurations": { + "development": {}, + "production": {} + } + }, + "test": { + "executor": "nx:run-commands", + "options": { + "command": "vitest run", + "cwd": "packages/hatchet" + } + }, + "lint": { + "executor": "nx-oxlint:lint", + "outputs": ["{options.outputFile}"], + "options": { + "lintFilePatterns": ["packages/hatchet/**/*.{ts,tsx,js,jsx}"] + } + } + }, + "tags": ["node", "react", "hatchet"] +} diff --git a/packages/hatchet/scripts/test-workflow.ts b/packages/hatchet/scripts/test-workflow.ts new file mode 100644 index 0000000..2427d1f --- /dev/null +++ b/packages/hatchet/scripts/test-workflow.ts @@ -0,0 +1,63 @@ +/** + * Example: Running a worker with @effectify/hatchet + * + * This demonstrates how to integrate @effectify/hatchet with Hatchet Lite. + * + * Usage: + * HATCHET_TOKEN="your-token" pnpm tsx packages/hatchet/scripts/test-workflow.ts + */ + +import { Hatchet } from "@hatchet-dev/typescript-sdk" + +const token = process.env.HATCHET_TOKEN +if (!token) { + console.error("❌ Please set HATCHET_TOKEN") + console.log(" Get it from: http://localhost:8888 → Settings → API Tokens") + process.exit(1) +} + +// For Hatchet Lite: +// - UI Dashboard: http://localhost:8888 +// - gRPC API: localhost:7077 (use this!) +const hatchet = Hatchet.init({ + token, + host_port: "localhost:7077", // gRPC port, NOT UI port + tls_config: { + tls_strategy: "none", + }, +}) + +console.log("✅ Hatchet client initialized") +console.log(" gRPC: localhost:7077") +console.log(" UI: http://localhost:8888\n") + +// Define your Effect-based tasks +const myTask = hatchet.task({ + name: "my-effect-task", + fn: async (input: { message: string }) => { + // Your Effect logic would go here + console.log(" 📝 Processing:", input.message) + return { result: "done", message: input.message } + }, +}) + +async function main() { + const worker = await hatchet.worker("my-worker", { + workflows: [myTask], + slots: 10, + }) + + console.log("✅ Worker registered") + await worker.start() + console.log("✅ Worker started!\n") + + console.log("📋 To trigger:") + console.log(" 1. Go to http://localhost:8888") + console.log(" 2. Find 'my-effect-task' → Run") + console.log(' 3. Input: { "message": "Hello!" }') + console.log("") + + await new Promise(() => {}) +} + +main().catch(console.error) diff --git a/packages/hatchet/setup-tests.ts b/packages/hatchet/setup-tests.ts new file mode 100644 index 0000000..41dcb88 --- /dev/null +++ b/packages/hatchet/setup-tests.ts @@ -0,0 +1,9 @@ +import * as it from "@effect/vitest" +import path from "node:path" +import { fileURLToPath } from "node:url" + +it.addEqualityTesters() + +if (typeof __dirname === "undefined") { + ;(globalThis as any).__dirname = path.dirname(fileURLToPath(import.meta.url)) +} diff --git a/packages/hatchet/src/core/client.ts b/packages/hatchet/src/core/client.ts new file mode 100644 index 0000000..b38f3d2 --- /dev/null +++ b/packages/hatchet/src/core/client.ts @@ -0,0 +1,57 @@ +/** + * @effectify/hatchet - Hatchet Client + * + * Hatchet SDK client as a ServiceMap.Service using Effect v4 + */ + +import * as Effect from "effect/Effect" +import * as ServiceMap from "effect/ServiceMap" +import * as Layer from "effect/Layer" +import type { HatchetClient as HatchetClientType } from "@hatchet-dev/typescript-sdk" +import { HatchetClient as HatchetClientSDK } from "@hatchet-dev/typescript-sdk" +import { HatchetConfig } from "./config.js" +import { HatchetInitError } from "./error.js" + +/** + * ServiceMap.Service for the Hatchet SDK client + * Renamed to HatchetClientService to avoid conflict with SDK class name + */ +export class HatchetClientService extends ServiceMap.Service< + HatchetClientService, + HatchetClientType +>()("HatchetClient") {} + +/** + * Create a Layer that provides the HatchetClientService + * The client is initialized from HatchetConfig + */ +export const HatchetClientLive = Layer.effect(HatchetClientService)( + Effect.gen(function*() { + const config = yield* HatchetConfig + + // Initialize Hatchet client with token and host + // SDK v1.19.0 API: HatchetClient.init({ token, host_port }) + const hatchet = HatchetClientSDK.init({ + token: config.token, + host_port: config.host, + }) + + if (!hatchet) { + return yield* HatchetInitError.of( + "Hatchet client initialization returned undefined", + ) + } + + return hatchet + }), +) + +/** + * Convenience function to get the Hatchet client from context + * Usage: yield* getHatchetClient() + */ +export const getHatchetClient = (): Effect.Effect< + HatchetClientType, + never, + HatchetClientService +> => Effect.service(HatchetClientService) diff --git a/packages/hatchet/src/core/config.ts b/packages/hatchet/src/core/config.ts new file mode 100644 index 0000000..965d85c --- /dev/null +++ b/packages/hatchet/src/core/config.ts @@ -0,0 +1,69 @@ +/** + * @effectify/hatchet - Configuration + * + * Hatchet configuration using Effect v4 Config and ServiceMap.Service + */ + +import * as Effect from "effect/Effect" +import * as ServiceMap from "effect/ServiceMap" +import * as Layer from "effect/Layer" +import * as Config from "effect/Config" +import * as Schema from "effect/Schema" + +/** + * Configuration schema for Hatchet + * Uses Schema from the main 'effect' package + */ +const HatchetConfigSchema = Schema.Struct({ + token: Schema.String, + host: Schema.String, + namespace: Schema.optional(Schema.String), +}) + +/** + * Type extracted from the schema + */ +export type HatchetConfigType = Schema.Schema.Type + +/** + * ServiceMap.Service for Hatchet configuration + * This allows injecting config via Effect's dependency injection + */ +export class HatchetConfig extends ServiceMap.Service< + HatchetConfig, + HatchetConfigType +>()("HatchetConfig") {} + +/** + * Create a Layer that provides the HatchetConfig service + * from a config object + */ +export const HatchetConfigLayer = ( + config: HatchetConfigType, +): Layer.Layer => Layer.succeed(HatchetConfig, config) + +/** + * Default configuration values + */ +export const defaultHatchetConfig = { + host: "http://localhost:8080" as const, +} + +/** + * Create a Layer from environment variables + * Uses Config.Wrap for type-safe environment config + * and applies default values + */ +export const HatchetConfigLayerFromEnv = ( + config: Config.Wrap, +): Layer.Layer => + Layer.effect(HatchetConfig)( + Effect.gen(function*() { + const unwrapped = yield* Config.unwrap(config) + return { + host: unwrapped.host ?? defaultHatchetConfig.host, + token: unwrapped.token, + namespace: unwrapped.namespace, + } + }), + ) diff --git a/packages/hatchet/src/core/context.ts b/packages/hatchet/src/core/context.ts new file mode 100644 index 0000000..d08e83e --- /dev/null +++ b/packages/hatchet/src/core/context.ts @@ -0,0 +1,17 @@ +/** + * @effectify/hatchet - Hatchet Step Context + * + * ServiceMap.Service for injecting Hatchet context into Effect tasks + */ + +import * as ServiceMap from "effect/ServiceMap" +import type { Context as HatchetContext } from "@hatchet-dev/typescript-sdk" + +/** + * ServiceMap.Service for the Hatchet step context + * This is injected at runtime by the effectifier when executing a task + */ +export class HatchetStepContext extends ServiceMap.Service< + HatchetStepContext, + HatchetContext +>()("HatchetStepContext") {} diff --git a/packages/hatchet/src/core/error.ts b/packages/hatchet/src/core/error.ts new file mode 100644 index 0000000..1ab630c --- /dev/null +++ b/packages/hatchet/src/core/error.ts @@ -0,0 +1,90 @@ +/** + * @effectify/hatchet - Core Error Types + * + * Error types for Hatchet integration using Effect v4 Data.TaggedError + */ + +import * as Data from "effect/Data" + +/** + * Base error class for all Hatchet-related errors + * Uses Data.TaggedError for discriminated error handling + */ +export class HatchetError extends Data.TaggedError("HatchetError")<{ + readonly message: string + readonly cause?: unknown +}> { + /** + * Create a new HatchetError + */ + static of(message: string, cause?: unknown): HatchetError { + return new HatchetError({ message, cause }) + } +} + +/** + * Error when Hatchet SDK initialization fails + */ +export class HatchetInitError extends Data.TaggedError("HatchetInitError")<{ + readonly message: string + readonly cause?: unknown +}> { + static of(message: string, cause?: unknown): HatchetInitError { + return new HatchetInitError({ message, cause }) + } +} + +/** + * Error when workflow/task execution fails + */ +export class HatchetExecutionError extends Data.TaggedError( + "HatchetExecutionError", +)<{ + readonly message: string + readonly taskName?: string + readonly cause?: unknown +}> { + static of( + message: string, + taskName?: string, + cause?: unknown, + ): HatchetExecutionError { + return new HatchetExecutionError({ message, taskName, cause }) + } +} + +/** + * Error when worker registration fails + */ +export class HatchetWorkerError extends Data.TaggedError("HatchetWorkerError")<{ + readonly message: string + readonly workerName?: string + readonly cause?: unknown +}> { + static of( + message: string, + workerName?: string, + cause?: unknown, + ): HatchetWorkerError { + return new HatchetWorkerError({ message, workerName, cause }) + } +} + +/** + * Error when context operations fail + */ +export class HatchetContextError extends Data.TaggedError( + "HatchetContextError", +)<{ + readonly message: string + readonly operation: "input" | "parentOutput" | "log" + readonly cause?: unknown +}> { + static of( + operation: "input" | "parentOutput" | "log", + message: string, + cause?: unknown, + ): HatchetContextError { + return new HatchetContextError({ message, operation, cause }) + } +} diff --git a/packages/hatchet/src/core/index.ts b/packages/hatchet/src/core/index.ts new file mode 100644 index 0000000..30e9bbf --- /dev/null +++ b/packages/hatchet/src/core/index.ts @@ -0,0 +1,23 @@ +/** + * @effectify/hatchet - Core Module Exports + */ + +export { + HatchetContextError, + HatchetError, + HatchetExecutionError, + HatchetInitError, + HatchetWorkerError, +} from "./error.js" + +export { + defaultHatchetConfig, + HatchetConfig, + HatchetConfigLayer, + HatchetConfigLayerFromEnv, + type HatchetConfigType, +} from "./config.js" + +export { HatchetClientLive, HatchetClientService } from "./client.js" + +export { HatchetStepContext } from "./context.js" diff --git a/packages/hatchet/src/effectifier/execute.ts b/packages/hatchet/src/effectifier/execute.ts new file mode 100644 index 0000000..2fdfb4a --- /dev/null +++ b/packages/hatchet/src/effectifier/execute.ts @@ -0,0 +1,93 @@ +/** + * @effectify/hatchet - Effectifier Execute + * + * Core function that converts Effect to a Hatchet-compatible Promise function + * Uses ManagedRuntime to execute Effects with their dependencies + */ + +import * as Effect from "effect/Effect" +import * as Cause from "effect/Cause" +import * as ManagedRuntime from "effect/ManagedRuntime" +import * as Layer from "effect/Layer" +import type { Context as HatchetContext } from "@hatchet-dev/typescript-sdk" +import type { HatchetTaskFn } from "./types.js" +import { HatchetStepContext } from "../core/context.js" + +/** + * effectifyTask converts an Effect into a Hatchet-compatible task function. + * + * This is the core "Effectifier" - it bridges Effect's pure functional world + * with Hatchet's Promise-based task execution. + * + * @param effect - The Effect to execute + * @param runtime - ManagedRuntime with the Effect's dependencies + * @returns A function compatible with Hatchet's task signature + * + * Usage: + * ```typescript + * const taskFn = effectifyTask(myEffect, runtime); + * workflow.task({ name: "my-task", fn: taskFn }); + * ``` + */ +export const effectifyTask = ( + effect: Effect.Effect, + runtime: ManagedRuntime.ManagedRuntime, +): HatchetTaskFn => { + return async (input: unknown, ctx: HatchetContext): Promise => { + // 1. Inject the Hatchet context as a service + const effectWithContext = Effect.provideService( + effect, + HatchetStepContext, + ctx, + ) + + // 2. Execute with the ManagedRuntime + const exit = await runtime.runPromiseExit(effectWithContext) + + // 3. Convert result + if (exit._tag === "Success") { + return exit.value + } else { + // Convert Failure to thrown error so Hatchet can apply retries + const error = Cause.squash(exit.cause) + throw error instanceof Error ? error : new Error(String(error)) + } + } +} + +/** + * createEffectifierFromLayer creates an effectifier from a Layer. + * + * This is a convenience factory for common use cases where you have + * a Layer describing all your dependencies. + * + * @param layer - The Layer containing all dependencies + * @returns An effectifyTask function bound to that runtime + * + * Usage: + * ```typescript + * const effectify = createEffectifierFromLayer(MyAppLayer); + * const taskFn = effectify(myEffect); + * ``` + */ +export const createEffectifierFromLayer = ( + layer: Layer.Layer, +) => { + const runtime = ManagedRuntime.make(layer) + + return ( + effect: Effect.Effect, + ): HatchetTaskFn => effectifyTask(effect, runtime) +} + +/** + * createEffectifierFromServices creates an effectifier from a ServiceMap. + * + * @param services - The ServiceMap with dependencies + * @returns An effectifyTask function bound to that runtime + */ +export const createEffectifierFromServiceMap = ( + services: Layer.Layer, +) => { + return createEffectifierFromLayer(services) +} diff --git a/packages/hatchet/src/effectifier/index.ts b/packages/hatchet/src/effectifier/index.ts new file mode 100644 index 0000000..5720fd7 --- /dev/null +++ b/packages/hatchet/src/effectifier/index.ts @@ -0,0 +1,10 @@ +/** + * @effectify/hatchet - Effectifier Module + * + * Converts Effect to Hatchet-compatible Promise functions + * using ManagedRuntime for dependency injection. + */ + +export { createEffectifierFromLayer, createEffectifierFromServiceMap, effectifyTask } from "./execute.js" + +export type { EffectifiedTask, EffectifyOptions, HatchetEffect, HatchetTaskFn } from "./types.js" diff --git a/packages/hatchet/src/effectifier/types.ts b/packages/hatchet/src/effectifier/types.ts new file mode 100644 index 0000000..0230c47 --- /dev/null +++ b/packages/hatchet/src/effectifier/types.ts @@ -0,0 +1,47 @@ +/** + * @effectify/hatchet - Effectifier Module + * + * Types for the effectifier that converts Effect → Promise for Hatchet + */ + +import type { Effect, ManagedRuntime } from "effect" +import type { Context as HatchetContext } from "@hatchet-dev/typescript-sdk" +import type { HatchetStepContext } from "../core/context.js" + +/** + * Signature of a function that Hatchet calls to execute a task + * SDK v1: (input: unknown, ctx: HatchetContext) => Promise + */ +export type HatchetTaskFn = ( + input: unknown, + ctx: HatchetContext, +) => Promise + +/** + * Options for effectifyTask + */ +export interface EffectifyOptions { + /** + * Whether to sync logs to Hatchet UI + * @default true + */ + syncLogs?: boolean +} + +/** + * Internal type for the effectified task with its runtime + */ +export interface EffectifiedTask { + readonly taskFn: HatchetTaskFn + readonly runtime: ManagedRuntime.ManagedRuntime +} + +/** + * Type for a Effect that can be converted to a Hatchet task + * Must require HatchetStepContext in its dependencies + */ +export type HatchetEffect = Effect.Effect< + A, + E, + R | HatchetStepContext +> diff --git a/packages/hatchet/src/index.ts b/packages/hatchet/src/index.ts new file mode 100644 index 0000000..a4348a7 --- /dev/null +++ b/packages/hatchet/src/index.ts @@ -0,0 +1,45 @@ +// @effectify/hatchet - Native Effect v4 integration with Hatchet workflows +// +// This package provides an API that mirrors Hatchet native API but uses Effect: +// - workflow(): Create a workflow definition +// - task(): Define tasks as Effect instead of async functions +// - registerWorkflow(): Register and start a worker +// +// Usage: +// +// import { workflow, task, registerWorkflow, getValidatedInput, HatchetStepContext } from "@effectify/hatchet" +// import * as Effect from "effect/Effect" +// +// const myTask = task( +// { name: "my-task", timeout: "30s" }, +// Effect.gen(function*() { +// const input = yield* getValidatedInput(MySchema) +// return { result: "ok" } +// }) +// ) +// +// const myWorkflow = workflow({ name: "my-workflow" }).task(myTask) +// +// // Register and start worker +// Effect.runPromise( +// registerWorkflow("my-worker", myWorkflow, MyLayer) +// ) + +// Core - Configuration, Client, Context, Errors +export * from "./core/index.js" + +// Workflow - API that mirrors Hatchet +export { registerWorkflow, registerWorkflowWithConfig, task, workflow } from "./workflow/index.js" +export type { TaskOptions, WorkflowOptions } from "./workflow/index.js" + +// Logging - Automatic log sync to Hatchet UI +export * from "./logging/index.js" + +// Schema - Input validation +export * from "./schema/index.js" + +// Testing utilities +export * from "./testing/index.js" + +// Internal - not exported publicly (use workflow/task instead) +// export * from "./effectifier/index.js"; diff --git a/packages/hatchet/src/logging/hatchet-logger.ts b/packages/hatchet/src/logging/hatchet-logger.ts new file mode 100644 index 0000000..6cd3800 --- /dev/null +++ b/packages/hatchet/src/logging/hatchet-logger.ts @@ -0,0 +1,106 @@ +/** + * @effectify/hatchet - Hatchet Logger + * + * Custom Effect Logger that automatically syncs logs to Hatchet UI. + * When Effect.log() is called inside a Hatchet task, logs appear in Hatchet dashboard. + */ + +import * as Effect from "effect/Effect" +import * as Logger from "effect/Logger" +import * as Option from "effect/Option" +import { HatchetStepContext } from "../core/context.js" + +/** + * Creates a Hatchet-aware logger that: + * - If inside a Hatchet task: forwards logs to Hatchet UI via ctx.log() + * - Otherwise: behaves as the default console logger + */ +export const makeHatchetLogger = (): Logger.Logger => + Logger.make(({ logLevel, message }) => { + const msg = typeof message === "string" ? message : String(message) + const formatted = `[${String(logLevel)}] ${msg}` + + // Try to get HatchetStepContext from current fiber + // If we're inside a Hatchet task, this will succeed + const hatchetCtx = Effect.serviceOption(HatchetStepContext) + + Effect.runPromise(hatchetCtx).then((ctxOpt) => { + if (Option.isSome(ctxOpt)) { + // We're inside a Hatchet task - send log to Hatchet UI + try { + ctxOpt.value.log(formatted) + } catch { + // If ctx.log fails, still output to console + console.log(formatted) + } + } else { + // Not inside a Hatchet task, just console + console.log(formatted) + } + }) + }) + +/** + * Default Hatchet logger instance + */ +export const HatchetLogger: Logger.Logger = makeHatchetLogger() + +/** + * Runs an Effect with the Hatchet logger enabled. + * + * @example + * ```typescript + * const result = await Effect.runPromise( + * withHatchetLogger( + * Effect.gen(function*() { + * yield* Effect.log("Hello from Effect!") // Appears in Hatchet UI + * }) + * ) + * ) + * ``` + */ +export const withHatchetLogger = ( + effect: Effect.Effect, +): Effect.Effect => Effect.withLogger(effect, HatchetLogger) + +/** + * Creates a custom Hatchet logger with additional options. + */ +export const createHatchetLogger = ( + options: { + /** + * Custom format for log messages + */ + format?: (level: string, message: string) => string + /** + * Whether to also log to console (default: true) + */ + console?: boolean + } = {}, +): Logger.Logger => { + const { + format = (level, msg) => `[${level}] ${msg}`, + console: shouldConsole = true, + } = options + + return Logger.make(({ logLevel, message }) => { + const msg = typeof message === "string" ? message : String(message) + const formatted = format(String(logLevel), msg) + + const hatchetCtx = Effect.serviceOption(HatchetStepContext) + + Effect.runPromise(hatchetCtx).then((ctxOpt) => { + if (Option.isSome(ctxOpt)) { + try { + ctxOpt.value.log(formatted) + } catch { + // ignore + } + } + + if (shouldConsole) { + console.log(formatted) + } + }) + }) +} diff --git a/packages/hatchet/src/logging/index.ts b/packages/hatchet/src/logging/index.ts new file mode 100644 index 0000000..fcaba7f --- /dev/null +++ b/packages/hatchet/src/logging/index.ts @@ -0,0 +1,8 @@ +/** + * @effectify/hatchet - Logging Module + * + * Automatic log synchronization between Effect.log() and Hatchet UI. + * When Effect.log() is called inside a Hatchet task, logs appear in Hatchet dashboard. + */ + +export { createHatchetLogger, HatchetLogger, makeHatchetLogger, withHatchetLogger } from "./hatchet-logger.js" diff --git a/packages/hatchet/src/schema/get-validated-input.ts b/packages/hatchet/src/schema/get-validated-input.ts new file mode 100644 index 0000000..66ce440 --- /dev/null +++ b/packages/hatchet/src/schema/get-validated-input.ts @@ -0,0 +1,86 @@ +/** + * @effectify/hatchet - Schema Validation + * + * Utilities for validating workflow input using Effect Schema. + */ + +import * as Effect from "effect/Effect" +import * as Option from "effect/Option" +import * as Schema from "effect/Schema" +import { HatchetStepContext } from "../core/context.js" + +/** + * Extracts and validates the workflow input against a Schema. + * + * This is the main way to get type-safe input in your tasks. + * + * @param schema - The Schema to validate against + * @returns Effect that resolves to the parsed input + * + * @example + * ```typescript + * const UserSchema = Schema.Struct({ + * userId: Schema.String, + * email: Schema.String.pipe(Schema.email()), + * }) + * + * const myTask = task( + * { name: "process-user" }, + * Effect.gen(function*() { + * const input = yield* getValidatedInput(UserSchema) + * // input is now typed as { userId: string, email: string } + * return yield* processUser(input) + * }) + * ) + * ``` + */ +export const getValidatedInput = ( + schema: Schema.Schema, +): Effect.Effect => + Effect.flatMap(Effect.service(HatchetStepContext), (ctx) => { + // SDK v1: input is a property, not ctx.workflowInput() + const rawInput = ctx.input + + // Use decodeUnknownOption - returns Option, no Effect context needed + // We cast schema to any because the Schema type system is complex + // and decodeUnknownOption has constraints we don't need + const decodeOption = Schema.decodeUnknownOption as ( + s: Schema.Schema, + ) => (input: unknown) => Option.Option + const option = decodeOption(schema)(rawInput) + + // Convert Option to Effect - None becomes SchemaError failure + return Option.match(option, { + onNone: () => { + // Create a simple error message - SchemaError expects specific types + const error = new Error("Input validation failed") + return Effect.fail( + Object.assign(error, { _tag: "SchemaError" }) as Schema.SchemaError, + ) + }, + onSome: (a) => Effect.succeed(a), + }) + }) + +/** + * Extracts raw input without validation. + * Use this if you want to validate manually or don't need validation. + * + * @returns The raw input from the Hatchet context + */ +export const getRawInput = (): Effect.Effect< + unknown, + never, + HatchetStepContext +> => Effect.map(Effect.service(HatchetStepContext), (ctx) => ctx.input) + +/** + * Creates a decoder Effect from a schema. + * Useful for composing with other Effects. + * + * @param schema - The Schema to decode with + * @returns Effect that decodes the input + */ +export const decodeInput = ( + schema: Schema.Schema, +): Effect.Effect => getValidatedInput(schema) diff --git a/packages/hatchet/src/schema/index.ts b/packages/hatchet/src/schema/index.ts new file mode 100644 index 0000000..a304f0c --- /dev/null +++ b/packages/hatchet/src/schema/index.ts @@ -0,0 +1,7 @@ +/** + * @effectify/hatchet - Schema Module + * + * Input validation using Effect Schema. + */ + +export { decodeInput, getRawInput, getValidatedInput } from "./get-validated-input.js" diff --git a/packages/hatchet/src/testing/index.ts b/packages/hatchet/src/testing/index.ts new file mode 100644 index 0000000..fa508bb --- /dev/null +++ b/packages/hatchet/src/testing/index.ts @@ -0,0 +1,15 @@ +/** + * @effectify/hatchet - Testing Module + * + * Utilities for testing workflows without external dependencies. + */ + +export { + createDefaultMockLayer, + createMockContext, + createMockLayer, + createMockStepContext, + runWithMockContext, + testTask, + testTaskExit, +} from "./mock-context.js" diff --git a/packages/hatchet/src/testing/mock-context.ts b/packages/hatchet/src/testing/mock-context.ts new file mode 100644 index 0000000..7cec4cf --- /dev/null +++ b/packages/hatchet/src/testing/mock-context.ts @@ -0,0 +1,136 @@ +/** + * @effectify/hatchet - Testing Utilities + * + * Utilities for testing workflows without external dependencies. + */ + +import * as Effect from "effect/Effect" +import * as Exit from "effect/Exit" +import * as Layer from "effect/Layer" +import type { Context as HatchetContext } from "@hatchet-dev/typescript-sdk" +import { HatchetStepContext } from "../core/context.js" + +/** + * Creates a mock HatchetStepContext for testing. + * + * @param options - Optional configuration for the mock context + * @returns A mock context object + */ +export const createMockContext = ( + options: { + readonly input?: unknown + readonly taskName?: string + readonly workflowName?: string + readonly workflowRunId?: string + readonly retryCount?: number + } = {}, +): HatchetContext => { + const { + input = {}, + taskName = "test-task", + workflowName = "test-workflow", + workflowRunId = "test-run-id", + retryCount = 0, + } = options + + // Return as any - the actual Hatchet SDK context has many internal properties + // eslint-disable-next-line @typescript-eslint/no-explicit-any + return { + input, + taskName: () => taskName, + workflowName: () => workflowName, + workflowRunId: () => workflowRunId, + retryCount: () => retryCount, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + parentOutput: async () => null as any, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + log: async (_msg: any) => { + // no-op for testing + }, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + logger: {} as any, + } as any +} + +/** + * Creates a Layer that provides a mock HatchetStepContext. + * + * @param mockContext - The mock context to provide + * @returns A Layer that provides the mock context + */ +export const createMockLayer = ( + mockContext: HatchetContext, +): Layer.Layer => + // eslint-disable-next-line @typescript-eslint/no-explicit-any + Layer.succeed(HatchetStepContext, mockContext as any) + +/** + * Creates a Layer with a default mock context. + * + * @returns A Layer with default mock context + */ +export const createDefaultMockLayer = (): Layer.Layer< + HatchetStepContext, + never, + never +> => + // eslint-disable-next-line @typescript-eslint/no-explicit-any + Layer.succeed(HatchetStepContext, createMockContext() as any) + +/** + * Runs an Effect with a mock HatchetStepContext using provide. + * + * @param effect - The Effect to run + * @param mockContext - Optional mock context (creates default if not provided) + * @returns Effect with the context provided + */ +export const runWithMockContext = ( + effect: Effect.Effect, + mockContext?: HatchetContext, +): Effect.Effect => { + const ctx = mockContext ?? createMockContext() + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const mockLayer = Layer.succeed(HatchetStepContext, ctx as any) + return Effect.provide(effect, mockLayer) +} + +/** + * A simple test runner that executes an Effect with mock context. + * + * @param effect - The Effect to test + * @param mockContext - Optional mock context + * @returns Promise with the result + */ +export const testTask = async ( + effect: Effect.Effect, + mockContext?: HatchetContext, +): Promise => { + const ctx = mockContext ?? createMockContext() + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const mockLayer = Layer.succeed(HatchetStepContext, ctx as any) + const effectWithContext = Effect.provide(effect, mockLayer) + return Effect.runPromise(effectWithContext) +} + +/** + * A test runner that returns the Exit for more detailed assertions. + * + * @param effect - The Effect to test + * @param mockContext - Optional mock context + * @returns Promise with the Exit + */ +export const testTaskExit = async ( + effect: Effect.Effect, + mockContext?: HatchetContext, +): Promise> => { + const ctx = mockContext ?? createMockContext() + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const mockLayer = Layer.succeed(HatchetStepContext, ctx as any) + const effectWithContext = Effect.provide(effect, mockLayer) + return Effect.runPromiseExit(effectWithContext) +} + +/** + * @deprecated Use createMockContext instead + */ +export const createMockStepContext = createMockContext diff --git a/packages/hatchet/src/workflow/index.ts b/packages/hatchet/src/workflow/index.ts new file mode 100644 index 0000000..752e7ae --- /dev/null +++ b/packages/hatchet/src/workflow/index.ts @@ -0,0 +1,16 @@ +/** + * @effectify/hatchet - Workflow Module + * + * Declarative workflow builder for defining Hatchet workflows with Effect. + * Mirrors Hatchet API but uses Effect instead of async functions. + */ + +// Main exports - same names as Hatchet API +export { EffectWorkflow, workflow } from "./workflow.js" +export { task } from "./task.js" +export { registerWorkflow, registerWorkflowWithConfig } from "./register.js" + +// Types +export type { TaskOptions, TaskResult, WorkflowOptions } from "./types.js" +export type { TaskDefinition } from "./task.js" +export type { RegisterWorkflowConfig } from "./register.js" diff --git a/packages/hatchet/src/workflow/register.ts b/packages/hatchet/src/workflow/register.ts new file mode 100644 index 0000000..212df71 --- /dev/null +++ b/packages/hatchet/src/workflow/register.ts @@ -0,0 +1,226 @@ +/** + * @effectify/hatchet - Workflow Registration + * + * Registers EffectWorkflows with Hatchet workers. + */ + +import * as Effect from "effect/Effect" +import * as Layer from "effect/Layer" +import * as ManagedRuntime from "effect/ManagedRuntime" +import type { Worker } from "@hatchet-dev/typescript-sdk" +import type { StickyStrategyInput } from "@hatchet-dev/typescript-sdk" +import type { Concurrency } from "@hatchet-dev/typescript-sdk" +import type { Duration } from "@hatchet-dev/typescript-sdk" +import type { RateLimitDuration } from "@hatchet-dev/typescript-sdk" +import type { WorkflowDeclaration } from "@hatchet-dev/typescript-sdk" +import type { HatchetClientService } from "../core/client.js" +import { getHatchetClient } from "../core/client.js" +import type { EffectWorkflow } from "./workflow.js" +import { effectifyTask } from "../effectifier/execute.js" +import { HatchetError } from "../core/error.js" + +/** + * Registers an EffectWorkflow with a Hatchet worker. + * + * This function: + * 1. Gets the Hatchet client from the Effect context + * 2. Creates a workflow using hatchet.workflow() + * 3. Converts each Effect task to a Hatchet-compatible function + * 4. Registers the tasks with the workflow + * 5. Creates a worker and registers the workflow + * 6. Starts the worker + * + * @param workerName - Name for the Hatchet worker + * @param wf - The EffectWorkflow to register + * @param layer - The Layer containing all Dependencies + * @returns Effect that succeeds when worker starts (or fails with HatchetError) + * + * @example + * ```typescript + * const myWorkflow = workflow({ name: "my-workflow" }) + * .task({ name: "task1" }, EffectGen.task1) + * .task({ name: "task2", parents: ["task1"] }, EffectGen.task2); + * + * const register = registerWorkflow("my-worker", myWorkflow, MyAppLayer); + * + * // Run it + * await Effect.runPromise(register); + * ``` + */ +export const registerWorkflow = ( + workerName: string, + wf: EffectWorkflow, + layer: Layer.Layer, +): Effect.Effect => + Effect.gen(function*() { + // 1. Get the Hatchet client + const hatchet = yield* getHatchetClient() + + // 2. Create the ManagedRuntime + const runtime = ManagedRuntime.make(layer) + + // 3. Convert sticky option to proper type + const sticky: StickyStrategyInput | undefined = wf.options.sticky + ? "soft" + : undefined + + // 4. Create the workflow using Hatchet SDK + const workflowDeclaration = hatchet.workflow({ + name: wf.options.name, + description: wf.options.description, + version: wf.options.version, + sticky, + concurrency: convertConcurrency(wf.options.concurrency), + }) + + // 5. Convert each Effect task to Hatchet task + const taskMap = new Map>() + + for (const taskDef of wf.tasks) { + // Create the effectified task function + const taskFn = effectifyTask(taskDef.effect, runtime) + + // Get parent tasks if specified + const parents = taskDef.options.parents + ? taskDef.options.parents + .map((parentName) => taskMap.get(parentName)) + .filter((p): p is NonNullable => p !== undefined) + : undefined + + // Create the Hatchet task + const hatchetTask = workflowDeclaration.task({ + name: taskDef.options.name, + fn: taskFn, + executionTimeout: taskDef.options.timeout as Duration | undefined, + retries: taskDef.options.retries, + backoff: taskDef.options.backoff, + rateLimits: convertRateLimits(taskDef.options.rateLimits), + concurrency: convertConcurrency(taskDef.options.concurrency), + parents: parents, + }) + + taskMap.set(taskDef.options.name, hatchetTask) + } + + // 6. Create the worker (returns a Promise) + const worker: Worker = yield* Effect.tryPromise({ + try: async () => + await hatchet.worker(workerName, { + labels: {}, + }), + catch: (error) => + new HatchetError({ + message: `Failed to create worker: ${error}`, + cause: error, + }), + }) + + // 7. Register the workflow + yield* Effect.tryPromise({ + try: () => worker.registerWorkflow(workflowDeclaration), + catch: (error) => + new HatchetError({ + message: `Failed to register workflow: ${error}`, + cause: error, + }), + }) + + // 8. Start the worker (this is blocking) + yield* Effect.tryPromise({ + try: () => worker.start(), + catch: (error) => + new HatchetError({ + message: `Failed to start worker: ${error}`, + cause: error, + }), + }) + }) + +/** + * Converts concurrency options to the SDK format + */ +function convertConcurrency( + concurrency: Concurrency | readonly Concurrency[] | undefined, +): Concurrency | Concurrency[] | undefined { + if (concurrency === undefined) { + return undefined + } + // Check if it's an array by checking for 'expression' property (single object) vs 'length' (array) + if ("expression" in concurrency) { + // Single Concurrency object + return concurrency + } + // It's an array - make a mutable copy + return [...concurrency] as Concurrency[] +} + +/** + * Converts rate limits to the SDK format + */ +function convertRateLimits( + rateLimits: + | ReadonlyArray<{ + readonly units: string | number + readonly key?: string + readonly staticKey?: string + readonly dynamicKey?: string + readonly limit?: string | number + readonly duration?: string + }> + | undefined, +): + | { + units: string | number + key?: string + staticKey?: string + dynamicKey?: string + limit?: string | number + duration?: RateLimitDuration + }[] + | undefined +{ + if (rateLimits === undefined) { + return undefined + } + return rateLimits.map((rl) => ({ + units: rl.units, + key: rl.key, + staticKey: rl.staticKey, + dynamicKey: rl.dynamicKey, + limit: rl.limit, + duration: rl.duration as RateLimitDuration | undefined, + })) +} + +/** + * Creates a workflow registration effect with custom configuration. + * + * @param config - Configuration for the registration + * @returns Effect that registers and starts the worker + */ +export interface RegisterWorkflowConfig { + workerName: string + workflow: EffectWorkflow + layer: Layer.Layer + onStart?: () => void | Effect.Effect + onStop?: () => void | Effect.Effect +} + +export const registerWorkflowWithConfig = ( + config: RegisterWorkflowConfig, +): Effect.Effect => + Effect.gen(function*() { + // Run onStart callback if provided + if (config.onStart) { + const result = config.onStart() + if (Effect.isEffect(result)) { + yield* result + } + } + + // Register and start the workflow + yield* registerWorkflow(config.workerName, config.workflow, config.layer) + + // Note: onStop would need to be handled by signal handlers + // This is typically done at the application level + }) diff --git a/packages/hatchet/src/workflow/task.ts b/packages/hatchet/src/workflow/task.ts new file mode 100644 index 0000000..0af44eb --- /dev/null +++ b/packages/hatchet/src/workflow/task.ts @@ -0,0 +1,66 @@ +/** + * @effectify/hatchet - Task Helper + * + * Helper functions for creating tasks. + * Mirrors Hatchet API but uses Effect instead of async functions. + */ + +import * as Effect from "effect/Effect" +import type { TaskOptions } from "./types.js" +import type { HatchetStepContext } from "../core/context.js" + +/** + * Creates a task that can be added to a workflow. + * + * This is the main way to define tasks in @effectify/hatchet, + * mirroring Hatchet's API but using Effect instead of async functions. + * + * @param options - Task configuration (name, timeout, retries, parents, etc.) + * @param effect - The Effect to execute for this task + * @returns A TaskDefinition that can be added to a workflow + * + * @example + * ```typescript + * // Define a task + * const fetchUserTask = task( + * { name: "fetch-user", timeout: "30s" }, + * Effect.gen(function*() { + * const input = yield* getValidatedInput(UserSchema) + * const user = yield* Database.findUser(input.userId) + * return user + * }) + * ) + * + * // Add to workflow + * const myWorkflow = workflow({ name: "my-workflow" }) + * .task(fetchUserTask) + * ``` + */ +export const task = ( + options: TaskOptions, + effect: Effect.Effect, +): { + options: TaskOptions + effect: Effect.Effect +} => ({ + options, + effect, +}) + +/** + * Type for a task definition that can be added to a workflow. + * Preserves the original Effect types A (value), E (error), and R (requirements). + */ +export type TaskDefinition = { + readonly options: TaskOptions + // Use a type that preserves the effect's type parameters + // eslint-disable-next-line @typescript-eslint/no-explicit-any + readonly effect: Effect.Effect +} + +/** + * Type for mapping task names to their results. + */ +export type TaskResults = { + [K in T["name"]]: unknown +} diff --git a/packages/hatchet/src/workflow/types.ts b/packages/hatchet/src/workflow/types.ts new file mode 100644 index 0000000..a96c71e --- /dev/null +++ b/packages/hatchet/src/workflow/types.ts @@ -0,0 +1,106 @@ +/** + * @effectify/hatchet - Workflow Types + * + * Type definitions for the declarative workflow builder. + */ + +import type { Effect } from "effect" +import type { Concurrency } from "@hatchet-dev/typescript-sdk" +import type { HatchetStepContext } from "../core/context.js" + +/** + * Options for defining a task in a workflow. + */ +export interface TaskOptions { + /** + * Unique name for this task within the workflow. + */ + readonly name: string + + /** + * Timeout for task execution (e.g., "30s", "5m"). + * SDK field: executionTimeout + */ + readonly timeout?: string + + /** + * Number of retry attempts on failure. + */ + readonly retries?: number + + /** + * Backoff strategy for retries. + */ + readonly backoff?: { + readonly factor?: number + readonly maxSeconds?: number + } + + /** + * Rate limits for this task. + */ + readonly rateLimits?: ReadonlyArray<{ + readonly units: string | number + readonly key?: string + readonly staticKey?: string + readonly dynamicKey?: string + readonly limit?: string | number + readonly duration?: string + }> + + /** + * Concurrency settings. + */ + readonly concurrency?: Concurrency | readonly Concurrency[] + + /** + * Parent task names for DAG dependencies. + * This task will wait for all parents to complete before running. + */ + readonly parents?: readonly string[] +} + +/** + * Options for defining a workflow. + */ +export interface WorkflowOptions { + /** + * Unique name for this workflow. + */ + readonly name: string + + /** + * Human-readable description. + */ + readonly description?: string + + /** + * Version string (e.g., "1.0.0"). + */ + readonly version?: string + + /** + * Whether to use sticky execution (same worker). + */ + readonly sticky?: boolean + + /** + * Concurrency settings for the workflow. + */ + readonly concurrency?: Concurrency | readonly Concurrency[] +} + +/** + * Internal type for a task definition in the builder. + */ +export interface TaskDefinition { + readonly options: TaskOptions + readonly effect: Effect.Effect +} + +/** + * Result from calling workflow.task() - can be chained. + */ +export interface TaskResult { + readonly name: string +} diff --git a/packages/hatchet/src/workflow/workflow.ts b/packages/hatchet/src/workflow/workflow.ts new file mode 100644 index 0000000..ce6aff4 --- /dev/null +++ b/packages/hatchet/src/workflow/workflow.ts @@ -0,0 +1,105 @@ +/** + * @effectify/hatchet - Workflow Builder + * + * Declarative workflow builder for defining Hatchet workflows with Effect. + * Mirrors Hatchet API but uses Effect instead of async functions. + */ + +import * as Effect from "effect/Effect" +import type { TaskOptions, WorkflowOptions } from "./types.js" +import type { HatchetStepContext } from "../core/context.js" +import type { TaskDefinition } from "./task.js" + +/** + * EffectWorkflow - Builder class for defining Hatchet workflows with Effect + * + * Provides a fluent API for defining workflows with tasks that return Effect. + * Tracks accumulated dependencies through the type system. + * + * @example + * ```typescript + * // Style 1: Pass task definition + * const myWorkflow = workflow({ name: "my-workflow" }) + * .task(task({ name: "task1" }, myEffect1)) + * .task(task({ name: "task2", parents: ["task1"] }, myEffect2)); + * + * // Style 2: Pass options + effect directly + * const myWorkflow = workflow({ name: "my-workflow" }) + * .task({ name: "task1" }, myEffect1) + * .task({ name: "task2", parents: ["task1"] }, myEffect2); + * ``` + */ +export class EffectWorkflow { + readonly tasks: TaskDefinition[] = [] + + constructor( + readonly options: WorkflowOptions, + readonly dependencies: R = undefined as unknown as R, + ) {} + + /** + * Adds a task to the workflow. + * + * Supports two styles: + * 1. .task(taskDefinition) - from task() function + * 2. .task(taskOptions, effect) - direct options + effect + */ + task( + optionsOrDefinition: TaskOptions | TaskDefinition, + effect?: Effect.Effect, + ): EffectWorkflow { + let taskDef: TaskDefinition + + if (effect === undefined) { + // Style 1: Passed a task definition from task() + const def = optionsOrDefinition as TaskDefinition + taskDef = def as TaskDefinition + } else { + // Style 2: Passed options + effect directly + const opts = optionsOrDefinition as TaskOptions + // Create task definition preserving original effect types + taskDef = { + options: opts, + effect, + } as TaskDefinition + } + + const newWorkflow = new EffectWorkflow( + this.options, + undefined as unknown as R | R2, + ) + + // Copy existing tasks + newWorkflow.tasks.push(...this.tasks) + + // Add the new task + newWorkflow.tasks.push(taskDef) + + return newWorkflow + } + + /** + * Returns the accumulated dependencies from all tasks. + */ + getDependencies(): R { + return this.dependencies + } +} + +/** + * Creates a new EffectWorkflow builder. + * + * Mirrors Hatchet's workflow() function but returns EffectWorkflow. + * + * @param options - Workflow configuration + * @returns A new EffectWorkflow instance + * + * @example + * ```typescript + * const myWorkflow = workflow({ + * name: "my-workflow", + * description: "My first Effect-powered workflow", + * }); + * ``` + */ +export const workflow = (options: WorkflowOptions): EffectWorkflow => new EffectWorkflow(options) diff --git a/packages/hatchet/tests/integration/docker-compose.yml b/packages/hatchet/tests/integration/docker-compose.yml new file mode 100644 index 0000000..0848403 --- /dev/null +++ b/packages/hatchet/tests/integration/docker-compose.yml @@ -0,0 +1,70 @@ +# Hatchet Lite - For development and testing +# Based on official docs: https://docs.hatchet.run/self-hosting/hatchet-lite +# +# Usage: +# docker-compose -f docker-compose.yml up -d +# +# Access UI: http://localhost:8888 +# Access gRPC: localhost:7077 +# +# Credentials: +# Email: admin@example.com +# Password: Admin123!! + +version: "3.8" +name: hatchet-lite + +services: + postgres: + image: postgres:15.6 + command: postgres -c 'max_connections=200' + restart: always + environment: + - POSTGRES_USER=hatchet + - POSTGRES_PASSWORD=hatchet + - POSTGRES_DB=hatchet + ports: + - "5432:5432" + volumes: + - hatchet_lite_postgres_data:/var/lib/postgresql/data + healthcheck: + test: ["CMD-SHELL", "pg_isready -d hatchet -U hatchet"] + interval: 10s + timeout: 10s + retries: 5 + start_period: 10s + networks: + - hatchet-network + + hatchet-lite: + image: ghcr.io/hatchet-dev/hatchet/hatchet-lite:latest + ports: + - "8888:8888" + - "7077:7077" + depends_on: + postgres: + condition: service_healthy + environment: + DATABASE_URL: "postgresql://hatchet:hatchet@postgres:5432/hatchet?sslmode=disable" + SERVER_AUTH_COOKIE_DOMAIN: localhost + SERVER_AUTH_COOKIE_INSECURE: "t" + SERVER_GRPC_BIND_ADDRESS: "0.0.0.0" + SERVER_GRPC_INSECURE: "t" + SERVER_GRPC_BROADCAST_ADDRESS: localhost:7077 + SERVER_GRPC_PORT: "7077" + SERVER_URL: http://localhost:8888 + SERVER_AUTH_SET_EMAIL_VERIFIED: "t" + SERVER_DEFAULT_ENGINE_VERSION: "V1" + SERVER_INTERNAL_CLIENT_INTERNAL_GRPC_BROADCAST_ADDRESS: localhost:7077 + networks: + - hatchet-network + volumes: + - "hatchet_lite_config:/config" + +volumes: + hatchet_lite_postgres_data: + hatchet_lite_config: + +networks: + hatchet-network: + driver: bridge diff --git a/packages/hatchet/tests/integration/workflow.test.ts b/packages/hatchet/tests/integration/workflow.test.ts new file mode 100644 index 0000000..fbbe271 --- /dev/null +++ b/packages/hatchet/tests/integration/workflow.test.ts @@ -0,0 +1,132 @@ +/** + * @effectify/hatchet - Integration Tests + * + * These tests require Hatchet to be running (via docker-compose). + * Run: docker-compose up -d before running these tests. + */ + +import { afterAll, beforeAll, describe, expect, it } from "vitest" +import * as Effect from "effect/Effect" +import * as Layer from "effect/Layer" +import { + getValidatedInput, + HatchetClientLive, + HatchetConfigLayer, + registerWorkflow, + task, + withHatchetLogger, + workflow, +} from "@effectify/hatchet" + +const HATCHET_HOST = process.env.HATCHET_HOST || "http://localhost:7077" +const HATCHET_TOKEN = process.env.HATCHET_TOKEN || "test-token" + +describe("Integration: Workflow Registration", () => { + // Skip all tests if Hatchet is not available + const skipIfNoHatchet = async () => { + try { + const response = await fetch(`${HATCHET_HOST}/health`) + if (!response.ok) { + throw new Error("Hatchet not healthy") + } + } catch { + return true + } + return false + } + + it("should create a simple workflow", async () => { + const shouldSkip = await skipIfNoHatchet() + if (shouldSkip) { + expect(true).toBe(true) // Placeholder + return + } + + // Create a simple task + const simpleTask = task( + { name: "simple-task" }, + Effect.succeed({ message: "Hello from integration test!" }), + ) + + // Create workflow + const wf = workflow({ + name: "integration-test-simple", + description: "Simple integration test workflow", + }).task(simpleTask) + + // Verify workflow structure + expect(wf.tasks).toHaveLength(1) + expect(wf.tasks[0]?.options.name).toBe("simple-task") + }) + + it("should create workflow with multiple tasks", async () => { + const shouldSkip = await skipIfNoHatchet() + if (shouldSkip) { + expect(true).toBe(true) + return + } + + const task1 = task({ name: "task-1" }, Effect.succeed({ step: 1 })) + + const task2 = task( + { name: "task-2", parents: ["task-1"] }, + Effect.succeed({ step: 2 }), + ) + + const wf = workflow({ + name: "integration-test-multi", + }) + .task(task1) + .task(task2) + + expect(wf.tasks).toHaveLength(2) + expect(wf.tasks[1]?.options.parents).toEqual(["task-1"]) + }) + + it("should support task with input validation", async () => { + const shouldSkip = await skipIfNoHatchet() + if (shouldSkip) { + expect(true).toBe(true) + return + } + + // This test verifies the schema validation works + const UserSchema = Effect.succeed({ + decode: (input: unknown) => { + if (typeof input === "object" && input !== null && "userId" in input) { + return input as { userId: string } + } + throw new Error("Invalid input") + }, + }) + + // Just verify the workflow can be created with input + const wf = workflow({ + name: "integration-test-validation", + }) + + expect(wf).toBeDefined() + }) +}) + +describe("Integration: Layer Composition", () => { + it("should create config layer from environment", () => { + const configLayer = HatchetConfigLayer({ + token: HATCHET_TOKEN, + host: HATCHET_HOST, + }) + + expect(configLayer).toBeDefined() + }) + + it("should compose layers correctly", async () => { + const configLayer = HatchetConfigLayer({ + token: HATCHET_TOKEN, + host: HATCHET_HOST, + }) + + const fullLayer = Layer.merge(configLayer, HatchetClientLive) + + expect(fullLayer).toBeDefined() + }) +}) diff --git a/packages/hatchet/tests/unit/errors.test.ts b/packages/hatchet/tests/unit/errors.test.ts new file mode 100644 index 0000000..dd55a3c --- /dev/null +++ b/packages/hatchet/tests/unit/errors.test.ts @@ -0,0 +1,87 @@ +/** + * @effectify/hatchet - Error Tests + */ + +import { describe, expect, it } from "vitest" +import * as Effect from "effect/Effect" +import * as Data from "effect/Data" +import { + HatchetContextError, + HatchetError, + HatchetExecutionError, + HatchetInitError, + HatchetWorkerError, +} from "@effectify/hatchet" + +describe("HatchetError", () => { + it("should create a basic error", () => { + const error = new HatchetError({ message: "Something went wrong" }) + expect(error.message).toBe("Something went wrong") + }) + + it("should create error with cause", () => { + const cause = new Error("Original error") + const error = new HatchetError({ message: "Failed", cause }) + expect(error.cause).toBe(cause) + }) + + it("should be a Data.TaggedError", () => { + const error = new HatchetError({ message: "Test" }) + expect(error._tag).toBe("HatchetError") + }) +}) + +describe("HatchetInitError", () => { + it("should create initialization error", () => { + const error = new HatchetInitError({ message: "Failed to initialize" }) + expect(error.message).toBe("Failed to initialize") + expect(error._tag).toBe("HatchetInitError") + }) +}) + +describe("HatchetExecutionError", () => { + it("should create execution error", () => { + const error = new HatchetExecutionError({ message: "Execution failed" }) + expect(error.message).toBe("Execution failed") + expect(error._tag).toBe("HatchetExecutionError") + }) +}) + +describe("HatchetWorkerError", () => { + it("should create worker error", () => { + const error = new HatchetWorkerError({ message: "Worker failed" }) + expect(error.message).toBe("Worker failed") + expect(error._tag).toBe("HatchetWorkerError") + }) +}) + +describe("HatchetContextError", () => { + it("should create context error", () => { + const error = new HatchetContextError({ + message: "Context error", + operation: "input", + }) + expect(error.message).toBe("Context error") + expect(error._tag).toBe("HatchetContextError") + }) +}) + +describe("Error yielding in Effects", () => { + it("should yield HatchetError directly", async () => { + const effect = Effect.gen(function*() { + yield* new HatchetError({ message: "Error in effect" }) + }) + + const exit = await Effect.runPromiseExit(effect) + expect(exit._tag).toBe("Failure") + }) + + it("should yield specific error types", async () => { + const effect = Effect.gen(function*() { + yield* new HatchetExecutionError({ message: "Task failed" }) + }) + + const exit = await Effect.runPromiseExit(effect) + expect(exit._tag).toBe("Failure") + }) +}) diff --git a/packages/hatchet/tests/unit/logging.test.ts b/packages/hatchet/tests/unit/logging.test.ts new file mode 100644 index 0000000..b8994cb --- /dev/null +++ b/packages/hatchet/tests/unit/logging.test.ts @@ -0,0 +1,72 @@ +/** + * @effectify/hatchet - Logging Tests + */ + +import { describe, expect, it, vi } from "vitest" +import * as Effect from "effect/Effect" +import * as Layer from "effect/Layer" +import { createHatchetLogger, HatchetLogger, makeHatchetLogger, withHatchetLogger } from "@effectify/hatchet" +import { createMockContext, testTask } from "@effectify/hatchet" +import { HatchetStepContext } from "@effectify/hatchet" + +describe("HatchetLogger", () => { + it("should be defined", () => { + expect(HatchetLogger).toBeDefined() + }) +}) + +describe("makeHatchetLogger", () => { + it("should create a logger instance", () => { + const logger = makeHatchetLogger() + expect(logger).toBeDefined() + }) +}) + +describe("withHatchetLogger", () => { + it("should wrap an effect with the Hatchet logger", () => { + const effect = Effect.succeed("test") + const wrapped = withHatchetLogger(effect) + expect(wrapped).toBeDefined() + }) + + it("should preserve the effect result", async () => { + const effect = withHatchetLogger(Effect.succeed("hello")) + const result = await Effect.runPromise(effect) + expect(result).toBe("hello") + }) + + it("should work with Effect.gen", async () => { + const effect = withHatchetLogger( + Effect.gen(function*() { + return yield* Effect.succeed("world") + }), + ) + const result = await Effect.runPromise(effect) + expect(result).toBe("world") + }) + + it("should propagate failures", async () => { + const error = new Error("test error") + const effect = withHatchetLogger(Effect.fail(error)) + const exit = await Effect.runPromiseExit(effect) + expect(exit._tag).toBe("Failure") + }) +}) + +describe("createHatchetLogger", () => { + it("should create logger with default options", () => { + const logger = createHatchetLogger() + expect(logger).toBeDefined() + }) + + it("should create logger with custom format", () => { + const formatFn = vi.fn((level, msg) => `[${level.toUpperCase()}] ${msg}`) + const logger = createHatchetLogger({ format: formatFn }) + expect(logger).toBeDefined() + }) + + it("should create logger without console output", () => { + const logger = createHatchetLogger({ console: false }) + expect(logger).toBeDefined() + }) +}) diff --git a/packages/hatchet/tests/unit/schema.test.ts b/packages/hatchet/tests/unit/schema.test.ts new file mode 100644 index 0000000..a2ffbdb --- /dev/null +++ b/packages/hatchet/tests/unit/schema.test.ts @@ -0,0 +1,116 @@ +/** + * @effectify/hatchet - Schema Tests + */ + +import { describe, expect, it } from "vitest" +import * as Effect from "effect/Effect" +import * as Layer from "effect/Layer" +import * as Schema from "effect/Schema" +import { getRawInput, getValidatedInput, HatchetStepContext } from "@effectify/hatchet" +import { createMockContext } from "@effectify/hatchet" + +describe("getValidatedInput", () => { + it("should validate input against a valid schema", async () => { + const UserSchema = Schema.Struct({ + userId: Schema.String, + name: Schema.String, + }) + + const mockCtx = createMockContext({ + input: { userId: "123", name: "John" }, + }) + + const mockLayer = Layer.succeed(HatchetStepContext, mockCtx as any) + const effect = getValidatedInput(UserSchema) + const result = await Effect.runPromiseExit( + Effect.provide(effect, mockLayer), + ) + + expect(result._tag).toBe("Success") + if (result._tag === "Success") { + expect(result.value.userId).toBe("123") + expect(result.value.name).toBe("John") + } + }) + + it("should fail with invalid input", async () => { + const UserSchema = Schema.Struct({ + userId: Schema.String, + }) + + // Pass number instead of string + const mockCtx = createMockContext({ + input: { userId: 123 }, + }) + + const mockLayer = Layer.succeed(HatchetStepContext, mockCtx as any) + const effect = getValidatedInput(UserSchema) + const result = await Effect.runPromiseExit( + Effect.provide(effect, mockLayer), + ) + + expect(result._tag).toBe("Failure") + }) + + it("should fail with missing required field", async () => { + const UserSchema = Schema.Struct({ + userId: Schema.String, + email: Schema.String, + }) + + // Missing email + const mockCtx = createMockContext({ + input: { userId: "123" }, + }) + + const mockLayer = Layer.succeed(HatchetStepContext, mockCtx as any) + const effect = getValidatedInput(UserSchema) + const result = await Effect.runPromiseExit( + Effect.provide(effect, mockLayer), + ) + + expect(result._tag).toBe("Failure") + }) + + it("should validate optional fields", async () => { + const UserSchema = Schema.Struct({ + userId: Schema.String, + nickname: Schema.optional(Schema.String), + }) + + // Without optional field + const mockCtx = createMockContext({ + input: { userId: "123" }, + }) + + const mockLayer = Layer.succeed(HatchetStepContext, mockCtx as any) + const effect = getValidatedInput(UserSchema) + const result = await Effect.runPromiseExit( + Effect.provide(effect, mockLayer), + ) + + expect(result._tag).toBe("Success") + if (result._tag === "Success") { + expect(result.value.nickname).toBeUndefined() + } + }) +}) + +describe("getRawInput", () => { + it("should return raw input without validation", async () => { + const mockCtx = createMockContext({ + input: { anything: "goes" }, + }) + + const mockLayer = Layer.succeed(HatchetStepContext, mockCtx as any) + const effect = getRawInput() + const result = await Effect.runPromiseExit( + Effect.provide(effect, mockLayer), + ) + + expect(result._tag).toBe("Success") + if (result._tag === "Success") { + expect(result.value).toEqual({ anything: "goes" }) + } + }) +}) diff --git a/packages/hatchet/tests/unit/workflow.test.ts b/packages/hatchet/tests/unit/workflow.test.ts new file mode 100644 index 0000000..230d6b5 --- /dev/null +++ b/packages/hatchet/tests/unit/workflow.test.ts @@ -0,0 +1,139 @@ +/** + * @effectify/hatchet - Workflow Tests + */ + +import { describe, expect, it } from "vitest" +import * as Effect from "effect/Effect" +import { createMockContext, task, testTask, testTaskExit, workflow } from "@effectify/hatchet" + +describe("workflow", () => { + it("should create a workflow with options", () => { + const wf = workflow({ name: "test-workflow" }) + expect(wf.options.name).toBe("test-workflow") + }) + + it("should create a workflow with description", () => { + const wf = workflow({ + name: "test-workflow", + description: "A test workflow", + }) + expect(wf.options.name).toBe("test-workflow") + expect(wf.options.description).toBe("A test workflow") + }) + + it("should create a workflow with version", () => { + const wf = workflow({ + name: "test-workflow", + version: "1.0.0", + }) + expect(wf.options.version).toBe("1.0.0") + }) + + it("should add tasks using task() function", () => { + const myTask = task({ name: "task-1" }, Effect.succeed({ result: "ok" })) + + const wf = workflow({ name: "test" }).task(myTask) + + expect(wf.tasks).toHaveLength(1) + expect(wf.tasks[0]?.options.name).toBe("task-1") + }) + + it("should add tasks directly with options and effect", () => { + const wf = workflow({ name: "test" }).task( + { name: "task-1" }, + Effect.succeed({ result: "ok" }), + ) + + expect(wf.tasks).toHaveLength(1) + }) + + it("should chain multiple tasks", () => { + const wf = workflow({ name: "test" }) + .task({ name: "task-1" }, Effect.succeed(1)) + .task({ name: "task-2" }, Effect.succeed(2)) + + expect(wf.tasks).toHaveLength(2) + expect(wf.tasks[0]?.options.name).toBe("task-1") + expect(wf.tasks[1]?.options.name).toBe("task-2") + }) + + it("should support parents for DAG dependencies", () => { + const wf = workflow({ name: "test" }) + .task({ name: "task-1" }, Effect.succeed(1)) + .task({ name: "task-2", parents: ["task-1"] }, Effect.succeed(2)) + + expect(wf.tasks[1]?.options.parents).toEqual(["task-1"]) + }) + + it("should support task timeout", () => { + const wf = workflow({ name: "test" }).task( + { name: "task-1", timeout: "30s" }, + Effect.succeed({ result: "ok" }), + ) + + expect(wf.tasks[0]?.options.timeout).toBe("30s") + }) + + it("should support task retries", () => { + const wf = workflow({ name: "test" }).task( + { name: "task-1", retries: 3 }, + Effect.succeed({ result: "ok" }), + ) + + expect(wf.tasks[0]?.options.retries).toBe(3) + }) +}) + +describe("task()", () => { + it("should create a task definition", () => { + const taskDef = task({ name: "my-task" }, Effect.succeed({ data: "test" })) + + expect(taskDef.options.name).toBe("my-task") + }) + + it("should preserve effect in task definition", () => { + const taskDef = task({ name: "my-task" }, Effect.succeed({ data: "test" })) + + expect(taskDef.effect).toBeDefined() + }) +}) + +describe("with mock context", () => { + it("should create mock context with default values", () => { + const ctx = createMockContext() + expect(ctx.taskName()).toBe("test-task") + expect(ctx.workflowName()).toBe("test-workflow") + }) + + it("should create mock context with custom input", () => { + const ctx = createMockContext({ + input: { userId: "123" }, + }) + expect(ctx.input).toEqual({ userId: "123" }) + }) + + it("should run effect with mock context", async () => { + const effect = Effect.succeed("hello") + const result = await testTask(effect) + expect(result).toBe("hello") + }) + + it("should handle failures in testTaskExit", async () => { + const effect = Effect.fail(new Error("test error")) + const exit = await testTaskExit(effect) + + expect(exit._tag).toBe("Failure") + if (exit._tag === "Failure") { + expect(exit.cause).toBeDefined() + } + }) + + it("should provide custom mock context", async () => { + const effect = Effect.succeed("custom") + const result = await testTask( + effect, + createMockContext({ input: "custom" }), + ) + expect(result).toBe("custom") + }) +}) diff --git a/packages/hatchet/tsconfig.json b/packages/hatchet/tsconfig.json new file mode 100644 index 0000000..cdfd80a --- /dev/null +++ b/packages/hatchet/tsconfig.json @@ -0,0 +1,18 @@ +{ + "extends": "../../tsconfig.base.json", + "files": [], + "include": [], + "references": [ + { + "path": "./tsconfig.lib.json" + }, + { + "path": "./tsconfig.spec.json" + } + ], + "compilerOptions": { + "paths": { + "@effectify/hatchet": ["./src/index.ts"] + } + } +} diff --git a/packages/hatchet/tsconfig.lib.json b/packages/hatchet/tsconfig.lib.json new file mode 100644 index 0000000..a8afe64 --- /dev/null +++ b/packages/hatchet/tsconfig.lib.json @@ -0,0 +1,20 @@ +{ + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "composite": true, + "rootDir": "src", + "outDir": "dist", + "target": "es2022", + "tsBuildInfoFile": "dist/tsconfig.lib.tsbuildinfo", + "emitDeclarationOnly": false, + "moduleResolution": "nodenext", + "forceConsistentCasingInFileNames": true, + "types": ["node"], + "paths": { + "@/*": ["./src/*"] + } + }, + "include": ["src/**/*.ts", "src/**/*.tsx"], + "exclude": ["src/**/*.test.ts", "src/**/*.test.tsx"], + "references": [] +} diff --git a/packages/hatchet/tsconfig.spec.json b/packages/hatchet/tsconfig.spec.json new file mode 100644 index 0000000..87477b6 --- /dev/null +++ b/packages/hatchet/tsconfig.spec.json @@ -0,0 +1,14 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "composite": true, + "outDir": "./dist/out-tsc", + "types": ["vitest/globals", "node"], + "module": "esnext", + "moduleResolution": "bundler", + "paths": { + "@effectify/hatchet": ["./src/index.ts"] + } + }, + "include": ["test/**/*.ts", "src/**/*.ts"] +} diff --git a/packages/hatchet/vitest.config.ts b/packages/hatchet/vitest.config.ts new file mode 100644 index 0000000..204ceb9 --- /dev/null +++ b/packages/hatchet/vitest.config.ts @@ -0,0 +1,19 @@ +import path from "node:path" +import { defineConfig } from "vitest/config" + +export default defineConfig({ + test: { + setupFiles: [path.join(__dirname, "setup-tests.ts")], + include: ["**/*.test.ts"], + exclude: ["**/node_modules/**", "**/dist/**"], + globals: true, + }, + resolve: { + alias: { + "@effectify/hatchet": path.join(__dirname, "src/index.ts"), + }, + }, + esbuild: { + target: "node22", + }, +}) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index ae0f3fe..c2e168f 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -163,7 +163,7 @@ catalogs: specifier: 5.1.1 version: 5.1.1 '@vitest/coverage-v8': - specifier: ^4.1.0 + specifier: 4.1.2 version: 4.1.2 '@vitest/ui': specifier: ^4.1.0 @@ -449,7 +449,7 @@ importers: version: 8.0.3(@types/node@20.19.25)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3) vitest: specifier: 'catalog:' - version: 4.1.2(@types/node@20.19.25)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@20.19.25)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) + version: 4.1.2(@opentelemetry/api@1.9.1)(@types/node@20.19.25)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@20.19.25)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) apps/docs: dependencies: @@ -593,6 +593,9 @@ importers: apps/react-router-example: dependencies: + '@effectify/hatchet': + specifier: workspace:* + version: link:../../packages/hatchet '@effectify/node-better-auth': specifier: workspace:* version: link:../../packages/node/better-auth @@ -674,7 +677,7 @@ importers: dependencies: '@effect-atom/atom': specifier: 'catalog:' - version: 0.5.0(@effect/experimental@0.59.0(@effect/platform@0.94.5(effect@4.0.0-beta.33))(effect@4.0.0-beta.33)(ioredis@5.10.1))(@effect/platform@0.94.5(effect@4.0.0-beta.33))(@effect/rpc@0.73.2(@effect/platform@0.94.5(effect@4.0.0-beta.33))(effect@4.0.0-beta.33))(effect@4.0.0-beta.33) + version: 0.5.0(@effect/experimental@0.59.0(@effect/platform@0.95.0(effect@4.0.0-beta.33))(effect@4.0.0-beta.33)(ioredis@5.10.1))(@effect/platform@0.95.0(effect@4.0.0-beta.33))(@effect/rpc@0.73.2(@effect/platform@0.95.0(effect@4.0.0-beta.33))(effect@4.0.0-beta.33))(effect@4.0.0-beta.33) '@effect/atom-solid': specifier: 'catalog:' version: 4.0.0-beta.33(effect@4.0.0-beta.33)(solid-js@1.9.12) @@ -819,6 +822,34 @@ importers: specifier: 'catalog:' version: 1.9.12 + packages/hatchet: + dependencies: + '@hatchet-dev/typescript-sdk': + specifier: ^1.19.0 + version: 1.19.0 + effect: + specifier: 'catalog:' + version: 4.0.0-beta.33 + devDependencies: + '@effect/vitest': + specifier: 'catalog:' + version: 4.0.0-beta.33(effect@4.0.0-beta.33)(vitest@4.1.2) + '@types/node': + specifier: 'catalog:' + version: 20.19.25 + '@vitest/coverage-v8': + specifier: 'catalog:' + version: 4.1.2(vitest@4.1.2) + typescript: + specifier: 'catalog:' + version: 5.9.3 + vite-tsconfig-paths: + specifier: 'catalog:' + version: 6.1.1(typescript@5.9.3)(vite@8.0.3(@types/node@20.19.25)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) + vitest: + specifier: 'catalog:' + version: 4.1.2(@opentelemetry/api@1.9.1)(@types/node@20.19.25)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@20.19.25)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) + packages/monorepo-tools: {} packages/node/better-auth: @@ -850,7 +881,7 @@ importers: version: 4.0.0-beta.33 vitest: specifier: 'catalog:' - version: 4.1.2(@types/node@20.19.25)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@20.19.25)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) + version: 4.1.2(@opentelemetry/api@1.9.1)(@types/node@20.19.25)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@20.19.25)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) packages/prisma: dependencies: @@ -938,7 +969,7 @@ importers: version: 6.1.1(typescript@5.9.3)(vite@8.0.3(@types/node@20.19.25)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.20.6)(yaml@2.8.3)) vitest: specifier: 'catalog:' - version: 4.1.2(@types/node@20.19.25)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@20.19.25)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.20.6)(yaml@2.8.3)) + version: 4.1.2(@opentelemetry/api@1.9.1)(@types/node@20.19.25)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@20.19.25)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.20.6)(yaml@2.8.3)) packages/react/query: dependencies: @@ -1904,6 +1935,9 @@ packages: '@better-fetch/fetch@1.1.21': resolution: {integrity: sha512-/ImESw0sskqlVR94jB+5+Pxjf+xBwDZF/N5+y2/q4EqD7IARUTSpPfIo8uf39SYpCxyOCtbyYpUrZ3F/k0zT4A==} + '@bufbuild/protobuf@2.11.0': + resolution: {integrity: sha512-sBXGT13cpmPR5BMgHE6UEEfEaShh5Ror6rfN3yEK5si7QVrtZg8LEPQb0VVhiLRUslD2yLnXtnRzG035J/mZXQ==} + '@capsizecss/unpack@4.0.0': resolution: {integrity: sha512-VERIM64vtTP1C4mxQ5thVT9fK0apjPFobqybMtA1UdUujWka24ERHbRHFGmpbbhp73MhV+KSsHQH9C6uOTdEQA==} engines: {node: '>=18'} @@ -2085,11 +2119,6 @@ packages: effect: ^4.0.0-beta.33 ioredis: ^5.7.0 - '@effect/platform@0.94.5': - resolution: {integrity: sha512-z05APUiDDPbodhTkH/RJqOLoCU11bU2IZLfcwLFrld03+ob1VeqRnELQlmueLIYm6NZifHAtjl32V+GRt34y4A==} - peerDependencies: - effect: ^3.19.17 - '@effect/platform@0.95.0': resolution: {integrity: sha512-WDlRiWRSWlmhCPq09bvAofK0qr5vM4yNklXjoJdZHmugKRRTpN/Okn3ODnjgM/Kb/4hjMrRyrsUeH/Brieq7KA==} peerDependencies: @@ -2793,6 +2822,18 @@ packages: '@floating-ui/utils@0.2.11': resolution: {integrity: sha512-RiB/yIh78pcIxl6lLMG0CgBXAZ2Y0eVHqMPYugu+9U0AeT6YBeiJpf7lbdJNIugFP5SIjwNRgo4DhR1Qxi26Gg==} + '@grpc/grpc-js@1.14.3': + resolution: {integrity: sha512-Iq8QQQ/7X3Sac15oB6p0FmUg/klxQvXLeileoqrTRGJYLV+/9tubbr9ipz0GKHjmXVsgFPo/+W+2cA8eNcR+XA==} + engines: {node: '>=12.10.0'} + + '@grpc/proto-loader@0.8.0': + resolution: {integrity: sha512-rc1hOQtjIWGxcxpb9aHAfLpIctjEnsDehj0DAiVfBlmT84uvR0uUtN2hEi/ecvWVjXUGf5qPF4qEgiLOx1YIMQ==} + engines: {node: '>=6'} + hasBin: true + + '@hatchet-dev/typescript-sdk@1.19.0': + resolution: {integrity: sha512-O9O30qqWpQDUspBn2krGLWMJIhVvxofJPOwsFTXrWNJ0AL5kIznW3LQWFMbc0HyYijlXxAHP+M5R9NVTlM19ew==} + '@hono/node-server@1.19.9': resolution: {integrity: sha512-vHL6w3ecZsky+8P5MD+eFfaGTyCeOHUIFYMGpQGbrBTSmNNoxv0if69rEZ5giu36weC5saFuznL411gRX7bJDw==} engines: {node: '>=18.14.1'} @@ -3066,6 +3107,9 @@ packages: '@jridgewell/trace-mapping@0.3.31': resolution: {integrity: sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==} + '@js-sdsl/ordered-map@4.4.2': + resolution: {integrity: sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==} + '@jspm/core@2.1.0': resolution: {integrity: sha512-3sRl+pkyFY/kLmHl0cgHiFp2xEqErA8N3ECjMs7serSUBmoJ70lBa0PG5t0IM6WJgdZNyyI0R8YFfi5wM8+mzg==} @@ -3431,6 +3475,96 @@ packages: resolution: {integrity: sha512-hAX0pT/73190NLqBPPWSdBVGtbY6VOhWYK3qqHqtXQ1gK7kS2yz4+ivsN07hpJ6I3aeMtKP6J6npsEKOAzuTLA==} engines: {node: '>=20.0'} + '@opentelemetry/api-logs@0.208.0': + resolution: {integrity: sha512-CjruKY9V6NMssL/T1kAFgzosF1v9o6oeN+aX5JB/C/xPNtmgIJqcXHG7fA82Ou1zCpWGl4lROQUKwUNE1pMCyg==} + engines: {node: '>=8.0.0'} + + '@opentelemetry/api@1.9.1': + resolution: {integrity: sha512-gLyJlPHPZYdAk1JENA9LeHejZe1Ti77/pTeFm/nMXmQH/HFZlcS/O2XJB+L8fkbrNSqhdtlvjBVjxwUYanNH5Q==} + engines: {node: '>=8.0.0'} + + '@opentelemetry/core@2.2.0': + resolution: {integrity: sha512-FuabnnUm8LflnieVxs6eP7Z383hgQU4W1e3KJS6aOG3RxWxcHyBxH8fDMHNgu/gFx/M2jvTOW/4/PHhLz6bjWw==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + + '@opentelemetry/core@2.6.1': + resolution: {integrity: sha512-8xHSGWpJP9wBxgBpnqGL0R3PbdWQndL1Qp50qrg71+B28zK5OQmUgcDKLJgzyAAV38t4tOyLMGDD60LneR5W8g==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + + '@opentelemetry/exporter-trace-otlp-grpc@0.208.0': + resolution: {integrity: sha512-E/eNdcqVUTAT7BC+e8VOw/krqb+5rjzYkztMZ/o+eyJl+iEY6PfczPXpwWuICwvsm0SIhBoh9hmYED5Vh5RwIw==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/instrumentation@0.208.0': + resolution: {integrity: sha512-Eju0L4qWcQS+oXxi6pgh7zvE2byogAkcsVv0OjHF/97iOz1N/aKE6etSGowYkie+YA1uo6DNwdSxaaNnLvcRlA==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/otlp-exporter-base@0.208.0': + resolution: {integrity: sha512-gMd39gIfVb2OgxldxUtOwGJYSH8P1kVFFlJLuut32L6KgUC4gl1dMhn+YC2mGn0bDOiQYSk/uHOdSjuKp58vvA==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/otlp-grpc-exporter-base@0.208.0': + resolution: {integrity: sha512-fGvAg3zb8fC0oJAzfz7PQppADI2HYB7TSt/XoCaBJFi1mSquNUjtHXEoviMgObLAa1NRIgOC1lsV1OUKi+9+lQ==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/otlp-transformer@0.208.0': + resolution: {integrity: sha512-DCFPY8C6lAQHUNkzcNT9R+qYExvsk6C5Bto2pbNxgicpcSWbe2WHShLxkOxIdNcBiYPdVHv/e7vH7K6TI+C+fQ==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/resources@2.2.0': + resolution: {integrity: sha512-1pNQf/JazQTMA0BiO5NINUzH0cbLbbl7mntLa4aJNmCCXSj0q03T5ZXXL0zw4G55TjdL9Tz32cznGClf+8zr5A==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.3.0 <1.10.0' + + '@opentelemetry/resources@2.6.1': + resolution: {integrity: sha512-lID/vxSuKWXM55XhAKNoYXu9Cutoq5hFdkbTdI/zDKQktXzcWBVhNsOkiZFTMU9UtEWuGRNe0HUgmsFldIdxVA==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.3.0 <1.10.0' + + '@opentelemetry/sdk-logs@0.208.0': + resolution: {integrity: sha512-QlAyL1jRpOeaqx7/leG1vJMp84g0xKP6gJmfELBpnI4O/9xPX+Hu5m1POk9Kl+veNkyth5t19hRlN6tNY1sjbA==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.4.0 <1.10.0' + + '@opentelemetry/sdk-metrics@2.2.0': + resolution: {integrity: sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.9.0 <1.10.0' + + '@opentelemetry/sdk-trace-base@2.2.0': + resolution: {integrity: sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.3.0 <1.10.0' + + '@opentelemetry/sdk-trace-base@2.6.1': + resolution: {integrity: sha512-r86ut4T1e8vNwB35CqCcKd45yzqH6/6Wzvpk2/cZB8PsPLlZFTvrh8yfOS3CYZYcUmAx4hHTZJ8AO8Dj8nrdhw==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.3.0 <1.10.0' + + '@opentelemetry/semantic-conventions@1.40.0': + resolution: {integrity: sha512-cifvXDhcqMwwTlTK04GBNeIe7yyo28Mfby85QXFe1Yk8nmi36Ab/5UQwptOx84SsoGNRg+EVSjwzfSZMy6pmlw==} + engines: {node: '>=14'} + '@oslojs/encoding@1.1.0': resolution: {integrity: sha512-70wQhgYmndg4GCPxPPxPGevRKqTIJ2Nh4OkiMWmDAVYsTQ+Ta7Sq+rPevXyXGdzr30/qZBnyOalCszoMxlyldQ==} @@ -3811,6 +3945,36 @@ packages: react: ^18.0.0 || ^19.0.0 react-dom: ^18.0.0 || ^19.0.0 + '@protobufjs/aspromise@1.1.2': + resolution: {integrity: sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==} + + '@protobufjs/base64@1.1.2': + resolution: {integrity: sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==} + + '@protobufjs/codegen@2.0.4': + resolution: {integrity: sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==} + + '@protobufjs/eventemitter@1.1.0': + resolution: {integrity: sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==} + + '@protobufjs/fetch@1.1.0': + resolution: {integrity: sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==} + + '@protobufjs/float@1.0.2': + resolution: {integrity: sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==} + + '@protobufjs/inquire@1.1.0': + resolution: {integrity: sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==} + + '@protobufjs/path@1.1.2': + resolution: {integrity: sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==} + + '@protobufjs/pool@1.1.0': + resolution: {integrity: sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==} + + '@protobufjs/utf8@1.1.0': + resolution: {integrity: sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==} + '@radix-ui/primitive@1.1.3': resolution: {integrity: sha512-JTF99U/6XIjCBo0wqkU5sK10glYe27MRRsfwoiq5zzOEZLHU3A3KCMa5X/azekYRCJ0HlwI0crAXS/5dEHTzDg==} @@ -5190,6 +5354,9 @@ packages: '@types/parse-json@4.0.2': resolution: {integrity: sha512-dISoDXWWQwUquiKsyZ4Ng+HX2KsPL7LyHKHQwgGFEA3IaKac4Obd+h2a/a6waisAoepJlBcx9paWqjA8/HVjCw==} + '@types/qs@6.15.0': + resolution: {integrity: sha512-JawvT8iBVWpzTrz3EGw9BTQFg3BQNmwERdKE22vlTxawwtbyUSlMppvZYKLZzB5zgACXdXxbD3m1bXaMqP/9ow==} + '@types/react-dom@19.1.9': resolution: {integrity: sha512-qXRuZaOsAdXKFyOhRBg6Lqqc0yay13vN7KrIg4L7N4aaHN68ma9OK3NE1BoDFgFOTfM7zg+3/8+2n8rLUH3OKQ==} peerDependencies: @@ -5591,6 +5758,9 @@ packages: resolution: {integrity: sha512-E+iruNOY8VV9s4JEbe1aNEm6MiszPRr/UfcHMz0TQh1BXSxHK+ASV1R6W4HpjBhSeS+54PIsAMCBmwD06LLsqQ==} hasBin: true + abort-controller-x@0.4.3: + resolution: {integrity: sha512-VtUwTNU8fpMwvWGn4xE93ywbogTYsuT+AUxAXOeelbXuQVIwNmC5YLeho9sH4vZ4ITW8414TTAOG1nW6uIVHCA==} + abort-controller@3.0.0: resolution: {integrity: sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==} engines: {node: '>=6.5'} @@ -5599,6 +5769,11 @@ packages: resolution: {integrity: sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==} engines: {node: '>= 0.6'} + acorn-import-attributes@1.9.5: + resolution: {integrity: sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ==} + peerDependencies: + acorn: ^8 + acorn-import-phases@1.0.4: resolution: {integrity: sha512-wKmbr/DDiIXzEOiWrTTUcDm24kQ2vGfZQvM2fwg2vXqR5uW6aapr7ObPtj1th32b9u90/Pf4AItvdTh42fBmVQ==} engines: {node: '>=10.13.0'} @@ -6022,6 +6197,9 @@ packages: bindings@1.5.0: resolution: {integrity: sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==} + bintrees@1.0.2: + resolution: {integrity: sha512-VOMgTMwjAaUG580SXn3LacVgjurrbMme7ZZNYGSSV7mmtY6QQRh0Eg3pwIcntQ77DErK1L0NxkbetjcoXzVwKw==} + bl@4.1.0: resolution: {integrity: sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==} @@ -7630,6 +7808,9 @@ packages: resolution: {integrity: sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==} engines: {node: '>=6'} + import-in-the-middle@2.0.6: + resolution: {integrity: sha512-3vZV3jX0XRFW3EJDTwzWoZa+RH1b8eTTx6YOCjglrLyPuepwoBti1k3L2dKwdCUrnVEfc5CuRuGstaC/uQJJaw==} + import-meta-resolve@4.2.0: resolution: {integrity: sha512-Iqv2fzaTQN28s/FwZAoFq0ZSs/7hMAHJVX+w8PZl3cY19Pxk6jFFalxQoIfW2826i/fDLXv8IiEZRIT0lDuWcg==} @@ -8928,6 +9109,9 @@ packages: modern-ahocorasick@1.1.0: resolution: {integrity: sha512-sEKPVl2rM+MNVkGQt3ChdmD8YsigmXdn5NifZn6jiwn9LRJpWm8F3guhaqrJT/JOat6pwpbXEk6kv+b9DMIjsQ==} + module-details-from-path@1.0.4: + resolution: {integrity: sha512-EGWKgxALGMgzvxYF1UyGTy0HXX/2vHLkw6+NvDKW2jypWbHpjQuj4UMcqQWXHERJhVGKikolT06G3bcKe4fi7w==} + morgan@1.10.1: resolution: {integrity: sha512-223dMRJtI/l25dJKWpgij2cMtywuG/WiUKXdvwfbhGKBhy1puASqXwFzmWZ7+K73vUPoR7SS2Qz2cI/g9MKw0A==} engines: {node: '>= 0.8.0'} @@ -9015,6 +9199,12 @@ packages: resolution: {integrity: sha512-Z4SmBUweYa09+o6pG+eASabEpP6QkQ70yHj351pQoEXIs8uHbaU2DWVmzBANKgflPa47A50PtB2+NgRpQvr7vA==} engines: {node: '>= 10'} + nice-grpc-common@2.0.2: + resolution: {integrity: sha512-7RNWbls5kAL1QVUOXvBsv1uO0wPQK3lHv+cY1gwkTzirnG1Nop4cBJZubpgziNbaVc/bl9QJcyvsf/NQxa3rjQ==} + + nice-grpc@2.1.14: + resolution: {integrity: sha512-GK9pKNxlvnU5FAdaw7i2FFuR9CqBspcE+if2tqnKXBcE0R8525wj4BZvfcwj7FjvqbssqKxRHt2nwedalbJlww==} + nlcst-to-string@4.0.0: resolution: {integrity: sha512-YKLBCcUYKAg0FNlOBT6aI91qFmSiFKiluk655WzPF+DDMA02qIyy8uiRqI8QXtcFpEvll12LpL5MXqEmAZ+dcA==} @@ -9554,6 +9744,10 @@ packages: resolution: {integrity: sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==} engines: {node: '>= 0.6.0'} + prom-client@15.1.3: + resolution: {integrity: sha512-6ZiOBfCywsD4k1BN9IX0uZhF+tJkV8q8llP64G5Hajs4JOeVLPCwpPVcpXy3BwYiUGgyJzsJJQeOIv7+hDSq8g==} + engines: {node: ^16 || ^18 || >=20} + promise-inflight@1.0.1: resolution: {integrity: sha512-6zWPyEOFaQBJYcGMHBKTKJ3u6TBsnMFOIZSa6ce1e/ZrrsOlnHRHbabMjLiBYKp+n44X9eUI6VUPaukCXHuG4g==} peerDependencies: @@ -9579,6 +9773,10 @@ packages: property-information@7.1.0: resolution: {integrity: sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==} + protobufjs@7.5.4: + resolution: {integrity: sha512-CvexbZtbov6jW2eXAvLukXjXUW1TzFaivC46BpWc/3BpcCysb5Vffu+B3XHMm8lVEuy2Mm4XGex8hBSg1yapPg==} + engines: {node: '>=12.0.0'} + proxy-addr@2.0.7: resolution: {integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==} engines: {node: '>= 0.10'} @@ -9884,6 +10082,10 @@ packages: resolution: {integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==} engines: {node: '>=0.10.0'} + require-in-the-middle@8.0.1: + resolution: {integrity: sha512-QT7FVMXfWOYFbeRBF6nu+I6tr2Tf3u0q8RIEjNob/heKY/nh7drD/k7eeMFmSQgnTtCzLDcCu/XEnpW2wk4xCQ==} + engines: {node: '>=9.3.0 || >=8.10.0 <9.0.0'} + require-like@0.1.2: resolution: {integrity: sha512-oyrU88skkMtDdauHDuKVrgR+zuItqr6/c//FXzvmxRGMexSDc6hNvJInGW3LL46n+8b50RykrvwSUIIQH2LQ5A==} @@ -10472,6 +10674,9 @@ packages: tcp-port-used@1.0.2: resolution: {integrity: sha512-l7ar8lLUD3XS1V2lfoJlCBaeoaWo/2xfYt81hM7VlvR4RrMVFqfmzfhLVk40hAb368uitje5gPtBRL1m/DGvLA==} + tdigest@0.1.2: + resolution: {integrity: sha512-+G0LLgjjo9BZX2MfdvPfH+MKLCrxlXSYec5DaPYP1fe6Iyhf0/fSmJ0bFiZ1F8BT6cGXl2LpltQptzjXKWEkKA==} + terser-webpack-plugin@5.4.0: resolution: {integrity: sha512-Bn5vxm48flOIfkdl5CaD2+1CiUVbonWQ3KQPyP7/EuIl9Gbzq/gQFOzaMFUEgVjB1396tcK0SG8XcNJ/2kDH8g==} engines: {node: '>= 10.13.0'} @@ -10619,6 +10824,9 @@ packages: zod: optional: true + ts-error@1.0.6: + resolution: {integrity: sha512-tLJxacIQUM82IR7JO1UUkKlYuUTmoY9HBJAmNWFzheSlDS5SPMcNIepejHJa4BpPQLAcbRhRf3GDJzyj6rbKvA==} + ts-patch@3.3.0: resolution: {integrity: sha512-zAOzDnd5qsfEnjd9IGy1IRuvA7ygyyxxdxesbhMdutt8AHFjD8Vw8hU2rMF89HX1BKRWFYqKHrO8Q6lw0NeUZg==} hasBin: true @@ -12635,6 +12843,8 @@ snapshots: '@better-fetch/fetch@1.1.21': {} + '@bufbuild/protobuf@2.11.0': {} + '@capsizecss/unpack@4.0.0': dependencies: fontkitten: 1.0.3 @@ -12754,11 +12964,11 @@ snapshots: '@dprint/win32-x64@0.51.1': optional: true - '@effect-atom/atom@0.5.0(@effect/experimental@0.59.0(@effect/platform@0.94.5(effect@4.0.0-beta.33))(effect@4.0.0-beta.33)(ioredis@5.10.1))(@effect/platform@0.94.5(effect@4.0.0-beta.33))(@effect/rpc@0.73.2(@effect/platform@0.94.5(effect@4.0.0-beta.33))(effect@4.0.0-beta.33))(effect@4.0.0-beta.33)': + '@effect-atom/atom@0.5.0(@effect/experimental@0.59.0(@effect/platform@0.95.0(effect@4.0.0-beta.33))(effect@4.0.0-beta.33)(ioredis@5.10.1))(@effect/platform@0.95.0(effect@4.0.0-beta.33))(@effect/rpc@0.73.2(@effect/platform@0.95.0(effect@4.0.0-beta.33))(effect@4.0.0-beta.33))(effect@4.0.0-beta.33)': dependencies: - '@effect/experimental': 0.59.0(@effect/platform@0.94.5(effect@4.0.0-beta.33))(effect@4.0.0-beta.33)(ioredis@5.10.1) - '@effect/platform': 0.94.5(effect@4.0.0-beta.33) - '@effect/rpc': 0.73.2(@effect/platform@0.94.5(effect@4.0.0-beta.33))(effect@4.0.0-beta.33) + '@effect/experimental': 0.59.0(@effect/platform@0.95.0(effect@4.0.0-beta.33))(effect@4.0.0-beta.33)(ioredis@5.10.1) + '@effect/platform': 0.95.0(effect@4.0.0-beta.33) + '@effect/rpc': 0.73.2(@effect/platform@0.95.0(effect@4.0.0-beta.33))(effect@4.0.0-beta.33) effect: 4.0.0-beta.33 '@effect/atom-solid@4.0.0-beta.33(effect@4.0.0-beta.33)(solid-js@1.9.12)': @@ -12771,14 +12981,6 @@ snapshots: micromatch: 4.0.8 pkg-entry-points: 1.1.1 - '@effect/experimental@0.59.0(@effect/platform@0.94.5(effect@4.0.0-beta.33))(effect@4.0.0-beta.33)(ioredis@5.10.1)': - dependencies: - '@effect/platform': 0.94.5(effect@4.0.0-beta.33) - effect: 4.0.0-beta.33 - uuid: 11.1.0 - optionalDependencies: - ioredis: 5.10.1 - '@effect/experimental@0.59.0(@effect/platform@0.95.0(effect@4.0.0-beta.33))(effect@4.0.0-beta.33)(ioredis@5.10.1)': dependencies: '@effect/platform': 0.95.0(effect@4.0.0-beta.33) @@ -12809,13 +13011,6 @@ snapshots: - bufferutil - utf-8-validate - '@effect/platform@0.94.5(effect@4.0.0-beta.33)': - dependencies: - effect: 4.0.0-beta.33 - find-my-way-ts: 0.1.6 - msgpackr: 1.11.9 - multipasta: 0.2.7 - '@effect/platform@0.95.0(effect@4.0.0-beta.33)': dependencies: effect: 4.0.0-beta.33 @@ -12823,9 +13018,9 @@ snapshots: msgpackr: 1.11.9 multipasta: 0.2.7 - '@effect/rpc@0.73.2(@effect/platform@0.94.5(effect@4.0.0-beta.33))(effect@4.0.0-beta.33)': + '@effect/rpc@0.73.2(@effect/platform@0.95.0(effect@4.0.0-beta.33))(effect@4.0.0-beta.33)': dependencies: - '@effect/platform': 0.94.5(effect@4.0.0-beta.33) + '@effect/platform': 0.95.0(effect@4.0.0-beta.33) effect: 4.0.0-beta.33 msgpackr: 1.11.9 @@ -12863,7 +13058,7 @@ snapshots: '@effect/vitest@4.0.0-beta.33(effect@4.0.0-beta.33)(vitest@4.1.2)': dependencies: effect: 4.0.0-beta.33 - vitest: 4.1.2(@types/node@20.19.25)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@20.19.25)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) + vitest: 4.1.2(@opentelemetry/api@1.9.1)(@types/node@20.19.25)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@20.19.25)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) '@electric-sql/pglite-socket@0.0.20(@electric-sql/pglite@0.3.15)': dependencies: @@ -13240,6 +13435,44 @@ snapshots: '@floating-ui/utils@0.2.11': {} + '@grpc/grpc-js@1.14.3': + dependencies: + '@grpc/proto-loader': 0.8.0 + '@js-sdsl/ordered-map': 4.4.2 + + '@grpc/proto-loader@0.8.0': + dependencies: + lodash.camelcase: 4.3.0 + long: 5.3.2 + protobufjs: 7.5.4 + yargs: 17.7.2 + + '@hatchet-dev/typescript-sdk@1.19.0': + dependencies: + '@bufbuild/protobuf': 2.11.0 + '@types/qs': 6.15.0 + abort-controller-x: 0.4.3 + axios: 1.13.6 + long: 5.3.2 + nice-grpc: 2.1.14 + nice-grpc-common: 2.0.2 + protobufjs: 7.5.4 + qs: 6.15.0 + semver: 7.7.4 + yaml: 2.8.3 + zod: 3.25.76 + zod-to-json-schema: 3.25.1(zod@3.25.76) + optionalDependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 2.6.1(@opentelemetry/api@1.9.1) + '@opentelemetry/exporter-trace-otlp-grpc': 0.208.0(@opentelemetry/api@1.9.1) + '@opentelemetry/instrumentation': 0.208.0(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-trace-base': 2.6.1(@opentelemetry/api@1.9.1) + prom-client: 15.1.3 + transitivePeerDependencies: + - debug + - supports-color + '@hono/node-server@1.19.9(hono@4.11.4)': dependencies: hono: 4.11.4 @@ -13386,7 +13619,7 @@ snapshots: '@jest/console@30.3.0': dependencies: '@jest/types': 30.3.0 - '@types/node': 20.19.25 + '@types/node': 25.5.0 chalk: 4.1.2 jest-message-util: 30.3.0 jest-util: 30.3.0 @@ -13398,7 +13631,7 @@ snapshots: dependencies: '@jest/fake-timers': 30.3.0 '@jest/types': 30.3.0 - '@types/node': 20.19.25 + '@types/node': 25.5.0 jest-mock: 30.3.0 '@jest/expect-utils@30.3.0': @@ -13416,7 +13649,7 @@ snapshots: dependencies: '@jest/types': 30.3.0 '@sinonjs/fake-timers': 15.1.1 - '@types/node': 20.19.25 + '@types/node': 25.5.0 jest-message-util: 30.3.0 jest-mock: 30.3.0 jest-util: 30.3.0 @@ -13434,7 +13667,7 @@ snapshots: '@jest/pattern@30.0.1': dependencies: - '@types/node': 20.19.25 + '@types/node': 25.5.0 jest-regex-util: 30.0.1 '@jest/reporters@30.3.0': @@ -13445,7 +13678,7 @@ snapshots: '@jest/transform': 30.3.0 '@jest/types': 30.3.0 '@jridgewell/trace-mapping': 0.3.31 - '@types/node': 20.19.25 + '@types/node': 25.5.0 chalk: 4.1.2 collect-v8-coverage: 1.0.3 exit-x: 0.2.2 @@ -13521,7 +13754,7 @@ snapshots: '@jest/schemas': 30.0.5 '@types/istanbul-lib-coverage': 2.0.6 '@types/istanbul-reports': 3.0.4 - '@types/node': 20.19.25 + '@types/node': 25.5.0 '@types/yargs': 17.0.35 chalk: 4.1.2 @@ -13549,6 +13782,8 @@ snapshots: '@jridgewell/resolve-uri': 3.1.2 '@jridgewell/sourcemap-codec': 1.5.5 + '@js-sdsl/ordered-map@4.4.2': {} + '@jspm/core@2.1.0': {} '@kobalte/core@0.13.11(solid-js@1.9.12)': @@ -14301,7 +14536,7 @@ snapshots: tsconfig-paths: 4.2.0 tslib: 2.8.1 vite: 8.0.3(@types/node@20.19.25)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3) - vitest: 4.1.2(@types/node@20.19.25)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@20.19.25)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) + vitest: 4.1.2(@opentelemetry/api@1.9.1)(@types/node@20.19.25)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@20.19.25)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) transitivePeerDependencies: - '@babel/traverse' - '@swc-node/register' @@ -14325,7 +14560,7 @@ snapshots: tsconfig-paths: 4.2.0 tslib: 2.8.1 vite: 8.0.3(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3) - vitest: 4.1.2(@types/node@25.5.0)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) + vitest: 4.1.2(@opentelemetry/api@1.9.1)(@types/node@25.5.0)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) transitivePeerDependencies: - '@babel/traverse' - '@swc-node/register' @@ -14345,7 +14580,7 @@ snapshots: tslib: 2.8.1 optionalDependencies: vite: 8.0.3(@types/node@20.19.25)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3) - vitest: 4.1.2(@types/node@20.19.25)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@20.19.25)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) + vitest: 4.1.2(@opentelemetry/api@1.9.1)(@types/node@20.19.25)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@20.19.25)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) transitivePeerDependencies: - '@babel/traverse' - '@swc-node/register' @@ -14365,7 +14600,7 @@ snapshots: tslib: 2.8.1 optionalDependencies: vite: 8.0.3(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3) - vitest: 4.1.2(@types/node@25.5.0)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) + vitest: 4.1.2(@opentelemetry/api@1.9.1)(@types/node@25.5.0)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) transitivePeerDependencies: - '@babel/traverse' - '@swc-node/register' @@ -14442,6 +14677,124 @@ snapshots: '@oozcitak/util@10.0.0': {} + '@opentelemetry/api-logs@0.208.0': + dependencies: + '@opentelemetry/api': 1.9.1 + optional: true + + '@opentelemetry/api@1.9.1': + optional: true + + '@opentelemetry/core@2.2.0(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/semantic-conventions': 1.40.0 + optional: true + + '@opentelemetry/core@2.6.1(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/semantic-conventions': 1.40.0 + optional: true + + '@opentelemetry/exporter-trace-otlp-grpc@0.208.0(@opentelemetry/api@1.9.1)': + dependencies: + '@grpc/grpc-js': 1.14.3 + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-exporter-base': 0.208.0(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-grpc-exporter-base': 0.208.0(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-transformer': 0.208.0(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-trace-base': 2.2.0(@opentelemetry/api@1.9.1) + optional: true + + '@opentelemetry/instrumentation@0.208.0(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/api-logs': 0.208.0 + import-in-the-middle: 2.0.6 + require-in-the-middle: 8.0.1 + transitivePeerDependencies: + - supports-color + optional: true + + '@opentelemetry/otlp-exporter-base@0.208.0(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-transformer': 0.208.0(@opentelemetry/api@1.9.1) + optional: true + + '@opentelemetry/otlp-grpc-exporter-base@0.208.0(@opentelemetry/api@1.9.1)': + dependencies: + '@grpc/grpc-js': 1.14.3 + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-exporter-base': 0.208.0(@opentelemetry/api@1.9.1) + '@opentelemetry/otlp-transformer': 0.208.0(@opentelemetry/api@1.9.1) + optional: true + + '@opentelemetry/otlp-transformer@0.208.0(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/api-logs': 0.208.0 + '@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-logs': 0.208.0(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-metrics': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/sdk-trace-base': 2.2.0(@opentelemetry/api@1.9.1) + protobufjs: 7.5.4 + optional: true + + '@opentelemetry/resources@2.2.0(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/semantic-conventions': 1.40.0 + optional: true + + '@opentelemetry/resources@2.6.1(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 2.6.1(@opentelemetry/api@1.9.1) + '@opentelemetry/semantic-conventions': 1.40.0 + optional: true + + '@opentelemetry/sdk-logs@0.208.0(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/api-logs': 0.208.0 + '@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.1) + optional: true + + '@opentelemetry/sdk-metrics@2.2.0(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.1) + optional: true + + '@opentelemetry/sdk-trace-base@2.2.0(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.1) + '@opentelemetry/semantic-conventions': 1.40.0 + optional: true + + '@opentelemetry/sdk-trace-base@2.6.1(@opentelemetry/api@1.9.1)': + dependencies: + '@opentelemetry/api': 1.9.1 + '@opentelemetry/core': 2.6.1(@opentelemetry/api@1.9.1) + '@opentelemetry/resources': 2.6.1(@opentelemetry/api@1.9.1) + '@opentelemetry/semantic-conventions': 1.40.0 + optional: true + + '@opentelemetry/semantic-conventions@1.40.0': + optional: true + '@oslojs/encoding@1.1.0': {} '@oven/bun-darwin-aarch64@1.3.0': @@ -14777,6 +15130,29 @@ snapshots: react: 19.2.0 react-dom: 19.2.0(react@19.2.0) + '@protobufjs/aspromise@1.1.2': {} + + '@protobufjs/base64@1.1.2': {} + + '@protobufjs/codegen@2.0.4': {} + + '@protobufjs/eventemitter@1.1.0': {} + + '@protobufjs/fetch@1.1.0': + dependencies: + '@protobufjs/aspromise': 1.1.2 + '@protobufjs/inquire': 1.1.0 + + '@protobufjs/float@1.0.2': {} + + '@protobufjs/inquire@1.1.0': {} + + '@protobufjs/path@1.1.2': {} + + '@protobufjs/pool@1.1.0': {} + + '@protobufjs/utf8@1.1.0': {} + '@radix-ui/primitive@1.1.3': {} '@radix-ui/react-compose-refs@1.1.2(@types/react@19.1.13)(react@19.2.0)': @@ -16388,7 +16764,7 @@ snapshots: '@types/http-proxy@1.17.17': dependencies: - '@types/node': 20.19.25 + '@types/node': 25.5.0 '@types/istanbul-lib-coverage@2.0.6': {} @@ -16434,6 +16810,8 @@ snapshots: '@types/parse-json@4.0.2': {} + '@types/qs@6.15.0': {} + '@types/react-dom@19.1.9(@types/react@19.1.13)': dependencies: '@types/react': 19.1.13 @@ -16446,11 +16824,11 @@ snapshots: '@types/responselike@1.0.0': dependencies: - '@types/node': 20.19.25 + '@types/node': 25.5.0 '@types/sax@1.2.7': dependencies: - '@types/node': 24.12.0 + '@types/node': 25.5.0 '@types/semver@7.5.8': {} @@ -16816,7 +17194,7 @@ snapshots: obug: 2.1.1 std-env: 4.0.0 tinyrainbow: 3.1.0 - vitest: 4.1.2(@types/node@20.19.25)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@20.19.25)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.20.6)(yaml@2.8.3)) + vitest: 4.1.2(@opentelemetry/api@1.9.1)(@types/node@20.19.25)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@20.19.25)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) '@vitest/expect@4.1.2': dependencies: @@ -16887,7 +17265,7 @@ snapshots: sirv: 3.0.2 tinyglobby: 0.2.15 tinyrainbow: 3.1.0 - vitest: 4.1.2(@types/node@25.5.0)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) + vitest: 4.1.2(@opentelemetry/api@1.9.1)(@types/node@25.5.0)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) '@vitest/utils@4.1.2': dependencies: @@ -17004,6 +17382,8 @@ snapshots: jsonparse: 1.3.1 through: 2.3.8 + abort-controller-x@0.4.3: {} + abort-controller@3.0.0: dependencies: event-target-shim: 5.0.1 @@ -17013,6 +17393,11 @@ snapshots: mime-types: 2.1.35 negotiator: 0.6.3 + acorn-import-attributes@1.9.5(acorn@8.16.0): + dependencies: + acorn: 8.16.0 + optional: true + acorn-import-phases@1.0.4(acorn@8.16.0): dependencies: acorn: 8.16.0 @@ -17472,7 +17857,7 @@ snapshots: react: 18.2.0 react-dom: 18.2.0(react@18.2.0) solid-js: 1.9.12 - vitest: 4.1.2(@types/node@25.5.0)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@25.5.0)(esbuild@0.17.6)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) + vitest: 4.1.2(@opentelemetry/api@1.9.1)(@types/node@25.5.0)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@25.5.0)(esbuild@0.17.6)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) better-auth@1.4.10(@prisma/client@7.3.0(prisma@7.3.0(@types/react@19.1.13)(better-sqlite3@12.4.6)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)(typescript@5.9.3))(typescript@5.9.3))(better-sqlite3@12.4.6)(mysql2@3.15.3)(prisma@7.3.0(@types/react@19.1.13)(better-sqlite3@12.4.6)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)(typescript@5.9.3))(react-dom@19.2.0(react@19.2.0))(react@19.2.0)(solid-js@1.9.12)(vitest@4.1.2): dependencies: @@ -17496,7 +17881,7 @@ snapshots: react: 19.2.0 react-dom: 19.2.0(react@19.2.0) solid-js: 1.9.12 - vitest: 4.1.2(@types/node@25.5.0)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) + vitest: 4.1.2(@opentelemetry/api@1.9.1)(@types/node@25.5.0)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) better-auth@1.4.10(@prisma/client@7.3.0(prisma@7.3.0(@types/react@19.1.13)(better-sqlite3@12.6.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)(typescript@5.9.3))(typescript@5.9.3))(better-sqlite3@12.6.2)(mysql2@3.15.3)(prisma@7.3.0(@types/react@19.1.13)(better-sqlite3@12.6.2)(react-dom@19.2.0(react@19.2.0))(react@19.2.0)(typescript@5.9.3))(react-dom@19.2.0(react@19.2.0))(react@19.2.0)(solid-js@1.9.12)(vitest@4.1.2): dependencies: @@ -17520,7 +17905,7 @@ snapshots: react: 19.2.0 react-dom: 19.2.0(react@19.2.0) solid-js: 1.9.12 - vitest: 4.1.2(@types/node@25.5.0)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) + vitest: 4.1.2(@opentelemetry/api@1.9.1)(@types/node@25.5.0)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) better-call@1.1.7(zod@4.3.6): dependencies: @@ -17556,6 +17941,9 @@ snapshots: dependencies: file-uri-to-path: 1.0.0 + bintrees@1.0.2: + optional: true + bl@4.1.0: dependencies: buffer: 5.7.1 @@ -19571,6 +19959,14 @@ snapshots: parent-module: 1.0.1 resolve-from: 4.0.0 + import-in-the-middle@2.0.6: + dependencies: + acorn: 8.16.0 + acorn-import-attributes: 1.9.5(acorn@8.16.0) + cjs-module-lexer: 2.2.0 + module-details-from-path: 1.0.4 + optional: true + import-meta-resolve@4.2.0: {} imurmurhash@0.1.4: {} @@ -19816,7 +20212,7 @@ snapshots: '@jest/expect': 30.3.0 '@jest/test-result': 30.3.0 '@jest/types': 30.3.0 - '@types/node': 20.19.25 + '@types/node': 25.5.0 chalk: 4.1.2 co: 4.6.0 dedent: 1.7.2(babel-plugin-macros@3.1.0) @@ -19891,7 +20287,7 @@ snapshots: '@jest/environment': 30.3.0 '@jest/fake-timers': 30.3.0 '@jest/types': 30.3.0 - '@types/node': 20.19.25 + '@types/node': 25.5.0 jest-mock: 30.3.0 jest-util: 30.3.0 jest-validate: 30.3.0 @@ -19899,7 +20295,7 @@ snapshots: jest-haste-map@30.3.0: dependencies: '@jest/types': 30.3.0 - '@types/node': 20.19.25 + '@types/node': 25.5.0 anymatch: 3.1.3 fb-watchman: 2.0.2 graceful-fs: 4.2.11 @@ -19938,7 +20334,7 @@ snapshots: jest-mock@30.3.0: dependencies: '@jest/types': 30.3.0 - '@types/node': 20.19.25 + '@types/node': 25.5.0 jest-util: 30.3.0 jest-pnp-resolver@1.2.3(jest-resolve@30.3.0): @@ -19965,7 +20361,7 @@ snapshots: '@jest/test-result': 30.3.0 '@jest/transform': 30.3.0 '@jest/types': 30.3.0 - '@types/node': 20.19.25 + '@types/node': 25.5.0 chalk: 4.1.2 emittery: 0.13.1 exit-x: 0.2.2 @@ -19994,7 +20390,7 @@ snapshots: '@jest/test-result': 30.3.0 '@jest/transform': 30.3.0 '@jest/types': 30.3.0 - '@types/node': 20.19.25 + '@types/node': 25.5.0 chalk: 4.1.2 cjs-module-lexer: 2.2.0 collect-v8-coverage: 1.0.3 @@ -20041,7 +20437,7 @@ snapshots: jest-util@30.3.0: dependencies: '@jest/types': 30.3.0 - '@types/node': 20.19.25 + '@types/node': 25.5.0 chalk: 4.1.2 ci-info: 4.4.0 graceful-fs: 4.2.11 @@ -20060,7 +20456,7 @@ snapshots: dependencies: '@jest/test-result': 30.3.0 '@jest/types': 30.3.0 - '@types/node': 20.19.25 + '@types/node': 25.5.0 ansi-escapes: 4.3.2 chalk: 4.1.2 emittery: 0.13.1 @@ -20069,13 +20465,13 @@ snapshots: jest-worker@27.5.1: dependencies: - '@types/node': 20.19.25 + '@types/node': 25.5.0 merge-stream: 2.0.0 supports-color: 8.1.1 jest-worker@30.3.0: dependencies: - '@types/node': 20.19.25 + '@types/node': 25.5.0 '@ungap/structured-clone': 1.3.0 jest-util: 30.3.0 merge-stream: 2.0.0 @@ -21391,6 +21787,9 @@ snapshots: modern-ahocorasick@1.1.0: {} + module-details-from-path@1.0.4: + optional: true + morgan@1.10.1: dependencies: basic-auth: 2.0.1 @@ -21469,6 +21868,16 @@ snapshots: neotraverse@0.6.18: {} + nice-grpc-common@2.0.2: + dependencies: + ts-error: 1.0.6 + + nice-grpc@2.1.14: + dependencies: + '@grpc/grpc-js': 1.14.3 + abort-controller-x: 0.4.3 + nice-grpc-common: 2.0.2 + nlcst-to-string@4.0.0: dependencies: '@types/nlcst': 2.0.3 @@ -22205,6 +22614,12 @@ snapshots: process@0.11.10: {} + prom-client@15.1.3: + dependencies: + '@opentelemetry/api': 1.9.1 + tdigest: 0.1.2 + optional: true + promise-inflight@1.0.1: {} promise-retry@2.0.1: @@ -22227,6 +22642,21 @@ snapshots: property-information@7.1.0: {} + protobufjs@7.5.4: + dependencies: + '@protobufjs/aspromise': 1.1.2 + '@protobufjs/base64': 1.1.2 + '@protobufjs/codegen': 2.0.4 + '@protobufjs/eventemitter': 1.1.0 + '@protobufjs/fetch': 1.1.0 + '@protobufjs/float': 1.0.2 + '@protobufjs/inquire': 1.1.0 + '@protobufjs/path': 1.1.2 + '@protobufjs/pool': 1.1.0 + '@protobufjs/utf8': 1.1.0 + '@types/node': 25.5.0 + long: 5.3.2 + proxy-addr@2.0.7: dependencies: forwarded: 0.2.0 @@ -22633,6 +23063,14 @@ snapshots: require-from-string@2.0.2: {} + require-in-the-middle@8.0.1: + dependencies: + debug: 4.4.3 + module-details-from-path: 1.0.4 + transitivePeerDependencies: + - supports-color + optional: true + require-like@0.1.2: {} requires-port@1.0.0: {} @@ -23376,6 +23814,11 @@ snapshots: transitivePeerDependencies: - supports-color + tdigest@0.1.2: + dependencies: + bintrees: 1.0.2 + optional: true + terser-webpack-plugin@5.4.0(@swc/core@1.15.8(@swc/helpers@0.5.19))(esbuild@0.27.4)(webpack@5.105.4(@swc/core@1.15.8(@swc/helpers@0.5.19))(esbuild@0.27.4)): dependencies: '@jridgewell/trace-mapping': 0.3.31 @@ -23501,6 +23944,8 @@ snapshots: valibot: 1.3.1(typescript@5.9.3) zod: 4.3.6 + ts-error@1.0.6: {} + ts-patch@3.3.0: dependencies: chalk: 4.1.2 @@ -24049,6 +24494,16 @@ snapshots: - supports-color - typescript + vite-tsconfig-paths@6.1.1(typescript@5.9.3)(vite@8.0.3(@types/node@20.19.25)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)): + dependencies: + debug: 4.4.3 + globrex: 0.1.2 + tsconfck: 3.1.6(typescript@5.9.3) + vite: 8.0.3(@types/node@20.19.25)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3) + transitivePeerDependencies: + - supports-color + - typescript + vite@5.4.21(@types/node@25.5.0)(lightningcss@1.32.0)(terser@5.46.1): dependencies: esbuild: 0.21.5 @@ -24183,7 +24638,7 @@ snapshots: optionalDependencies: vite: 8.0.3(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3) - vitest@4.1.2(@types/node@20.19.25)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@20.19.25)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.20.6)(yaml@2.8.3)): + vitest@4.1.2(@opentelemetry/api@1.9.1)(@types/node@20.19.25)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@20.19.25)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.20.6)(yaml@2.8.3)): dependencies: '@vitest/expect': 4.1.2 '@vitest/mocker': 4.1.2(vite@8.0.3(@types/node@20.19.25)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.20.6)(yaml@2.8.3)) @@ -24206,13 +24661,14 @@ snapshots: vite: 8.0.3(@types/node@20.19.25)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.20.6)(yaml@2.8.3) why-is-node-running: 2.3.0 optionalDependencies: + '@opentelemetry/api': 1.9.1 '@types/node': 20.19.25 '@vitest/ui': 4.1.2(vitest@4.1.2) jsdom: 27.2.0 transitivePeerDependencies: - msw - vitest@4.1.2(@types/node@20.19.25)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@20.19.25)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)): + vitest@4.1.2(@opentelemetry/api@1.9.1)(@types/node@20.19.25)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@20.19.25)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)): dependencies: '@vitest/expect': 4.1.2 '@vitest/mocker': 4.1.2(vite@8.0.3(@types/node@20.19.25)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) @@ -24235,13 +24691,14 @@ snapshots: vite: 8.0.3(@types/node@20.19.25)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3) why-is-node-running: 2.3.0 optionalDependencies: + '@opentelemetry/api': 1.9.1 '@types/node': 20.19.25 '@vitest/ui': 4.1.2(vitest@4.1.2) jsdom: 27.2.0 transitivePeerDependencies: - msw - vitest@4.1.2(@types/node@25.5.0)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@25.5.0)(esbuild@0.17.6)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)): + vitest@4.1.2(@opentelemetry/api@1.9.1)(@types/node@25.5.0)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@25.5.0)(esbuild@0.17.6)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)): dependencies: '@vitest/expect': 4.1.2 '@vitest/mocker': 4.1.2(vite@8.0.3(@types/node@25.5.0)(esbuild@0.17.6)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) @@ -24264,6 +24721,7 @@ snapshots: vite: 8.0.3(@types/node@25.5.0)(esbuild@0.17.6)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3) why-is-node-running: 2.3.0 optionalDependencies: + '@opentelemetry/api': 1.9.1 '@types/node': 25.5.0 '@vitest/ui': 4.1.2(vitest@4.1.2) jsdom: 27.2.0 @@ -24271,7 +24729,7 @@ snapshots: - msw optional: true - vitest@4.1.2(@types/node@25.5.0)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)): + vitest@4.1.2(@opentelemetry/api@1.9.1)(@types/node@25.5.0)(@vitest/ui@4.1.2)(jsdom@27.2.0)(vite@8.0.3(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)): dependencies: '@vitest/expect': 4.1.2 '@vitest/mocker': 4.1.2(vite@8.0.3(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) @@ -24294,6 +24752,7 @@ snapshots: vite: 8.0.3(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3) why-is-node-running: 2.3.0 optionalDependencies: + '@opentelemetry/api': 1.9.1 '@types/node': 25.5.0 '@vitest/ui': 4.1.2(vitest@4.1.2) jsdom: 27.2.0 diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml index b4d6058..a657ba6 100644 --- a/pnpm-workspace.yaml +++ b/pnpm-workspace.yaml @@ -1,153 +1,157 @@ packages: - - "apps/*" - - "packages/*" - - "packages/*/*" - - "!packages/*/dist" - - "!packages/*/*/dist" + - apps/* + - packages/* + - packages/*/* + - '!packages/*/dist' + - '!packages/*/*/dist' + catalog: - "@astrojs/starlight": "0.36.3" - "@astrojs/starlight-tailwind": "4.0.2" - "@better-auth/cli": "1.4.10" - "@corvu/drawer": "0.2.4" - "@dprint/formatter": "^0.5.1" - "@dprint/typescript": "^0.95.15" - "@effect-atom/atom": "0.5.0" - "@effect/atom-solid": "4.0.0-beta.33" - "@effect/build-utils": "0.8.9" - "@effect/experimental": "0.59.0" - "@effect/language-service": "0.56.0" - "@effect/platform": "4.0.0-beta.33" - "@effect/platform-node": "4.0.0-beta.33" - "@effect/tsgo": "0.0.17" - "@effect/vitest": "4.0.0-beta.33" - "@kobalte/core": "0.13.11" - "@nx/js": "22.6.2" - "@nx/node": "22.6.2" - "@nx/react": "22.6.2" - "@nx/vite": "22.6.2" - "@nx/web": "22.6.2" - "@prisma/adapter-better-sqlite3": "7.3.0" - "@prisma/adapter-pg": "7.3.0" - "@prisma/client": "7.3.0" - "@prisma/client-runtime-utils": "7.3.0" - "@prisma/generator": "7.3.0" - "@prisma/generator-helper": "7.3.0" - "@prisma/internals": "7.3.0" - "@radix-ui/react-form": "0.1.8" - "@radix-ui/react-label": "2.1.8" - "@radix-ui/react-slot": "1.2.4" - "@react-router/dev": "7.12.0" - "@react-router/node": "7.12.0" - "@react-router/serve": "7.12.0" - "@remix-run/node": "2.17.4" - "@remix-run/node-fetch-server": "0.12.0" - "@remix-run/react": "2.17.4" - "@remix-run/serve": "2.17.2" - "@solidjs/start": "1.2.0" - "@solidjs/testing-library": "0.8.10" - "@swc-node/register": "1.11.1" - "@swc/core": "1.15.8" - "@swc/helpers": "0.5.19" - "@tailwindcss/postcss": "4.1.17" - "@tailwindcss/typography": "0.5.19" - "@tailwindcss/vite": "4.1.18" - "@tanstack/devtools-vite": "^0.3.11" - "@tanstack/query-core": "5.90.20" - "@tanstack/react-form": "1.25.0" - "@tanstack/react-query": "5.90.10" - "@tanstack/react-query-devtools": "5.91.0" - "@tanstack/react-router": "1.139.3" - "@tanstack/react-router-devtools": "1.139.3" - "@tanstack/router-plugin": "^1.133.21" - "@tanstack/solid-form": "1.25.0" - "@tanstack/solid-query": "5.90.23" - "@tanstack/solid-query-devtools": "5.91.0" - "@tanstack/solid-router": "^1.133.20" - "@tanstack/solid-router-devtools": "^1.133.20" - "@tanstack/solid-router-ssr-query": "^1.132.25" - "@tanstack/solid-start": "^1.132.25" - "@testing-library/dom": "10.4.1" - "@testing-library/jest-dom": "6.9.1" - "@testing-library/react": "16.3.0" - "@types/better-sqlite3": "7.6.13" - "@types/compression": "1.8.1" - "@types/express": "5.0.5" - "@types/node": "20.19.25" - "@types/pg": "^8.15.6" - "@types/react": "19.2.7" - "@types/react-dom": "19.2.3" - "@types/validator": "13.15.10" - "@typescript/native-preview": "7.0.0-dev.20260322.1" - "@vitejs/plugin-react": "5.1.1" - "@vitejs/plugin-rsc": "0.5.1" - "@vitest/coverage-v8": "^4.1.0" - "@vitest/ui": "^4.1.0" - astro: "5.17.1" - autoprefixer: "10.4.22" - better-auth: "1.4.10" - better-sqlite3: "12.6.2" - class-variance-authority: "0.7.1" - clsx: "2.1.1" - compression: "1.8.1" - cross-env: "10.1.0" - dotenv: "^17.2.3" - dprint: "0.51.1" - effect: "4.0.0-beta.33" - effect-prisma-generator: "^0.4.0" - eta: "^4.5.0" - express: "5.1.0" - globals: "16.5.0" - globby: "16.0.0" - husky: "9.1.7" - isbot: "5.1.32" - jiti: "2.6.1" - jsdom: "27.2.0" - kysely: "0.28.11" - lint-staged: "16.2.7" - lucide-react: "^0.554.0" - lucide-solid: "^0.554.0" - nx-oxlint: "0.1.1" - oxlint: "0.15.15" - pg: "^8.16.3" - postcss: "8.5.6" - prettier: "2.8.8" - prisma: "7.3.0" - react: "19.2.0" - react-dom: "19.2.0" - react-router: "7.12.0" - sharp: "0.34.5" - solid-js: "^1.9.9" - starlight-sidebar-topics: "0.6.2" - starlight-sidebar-topics-dropdown: "0.5.2" - starlight-theme-nova: "0.10.0" - tailwind-merge: "3.4.0" - tailwindcss: "4.1.18" - tailwindcss-animate: "1.0.7" - ts-patch: "3.3.0" - tslib: "2.8.1" - tsx: "4.20.6" - tw-animate-css: "1.4.0" - typescript: "5.9.3" - ultracite: "6.3.6" - validator: "13.15.23" - verdaccio: "6.2.2" - vite: "8.0.3" - vite-plugin-devtools-json: "0.4.1" - vite-plugin-dts: "4.5.4" - vite-plugin-lucide-preprocess: "1.4.8" - vite-plugin-solid: "2.11.11" - vite-tsconfig-paths: "6.1.1" - vitest: "4.1.2" - web-vitals: "5.1.0" - zod: "4.3.6" -catalogMode: "prefer" + '@astrojs/starlight': 0.36.3 + '@astrojs/starlight-tailwind': 4.0.2 + '@better-auth/cli': 1.4.10 + '@corvu/drawer': 0.2.4 + '@dprint/formatter': ^0.5.1 + '@dprint/typescript': ^0.95.15 + '@effect-atom/atom': 0.5.0 + '@effect/atom-solid': 4.0.0-beta.33 + '@effect/build-utils': 0.8.9 + '@effect/experimental': 0.59.0 + '@effect/language-service': 0.56.0 + '@effect/platform': 4.0.0-beta.33 + '@effect/platform-node': 4.0.0-beta.33 + '@effect/tsgo': 0.0.17 + '@effect/vitest': 4.0.0-beta.33 + '@kobalte/core': 0.13.11 + '@nx/js': 22.6.2 + '@nx/node': 22.6.2 + '@nx/react': 22.6.2 + '@nx/vite': 22.6.2 + '@nx/web': 22.6.2 + '@prisma/adapter-better-sqlite3': 7.3.0 + '@prisma/adapter-pg': 7.3.0 + '@prisma/client': 7.3.0 + '@prisma/client-runtime-utils': 7.3.0 + '@prisma/generator': 7.3.0 + '@prisma/generator-helper': 7.3.0 + '@prisma/internals': 7.3.0 + '@radix-ui/react-form': 0.1.8 + '@radix-ui/react-label': 2.1.8 + '@radix-ui/react-slot': 1.2.4 + '@react-router/dev': 7.12.0 + '@react-router/node': 7.12.0 + '@react-router/serve': 7.12.0 + '@remix-run/node': 2.17.4 + '@remix-run/node-fetch-server': 0.12.0 + '@remix-run/react': 2.17.4 + '@remix-run/serve': 2.17.2 + '@solidjs/start': 1.2.0 + '@solidjs/testing-library': 0.8.10 + '@swc-node/register': 1.11.1 + '@swc/core': 1.15.8 + '@swc/helpers': 0.5.19 + '@tailwindcss/postcss': 4.1.17 + '@tailwindcss/typography': 0.5.19 + '@tailwindcss/vite': 4.1.18 + '@tanstack/devtools-vite': ^0.3.11 + '@tanstack/query-core': 5.90.20 + '@tanstack/react-form': 1.25.0 + '@tanstack/react-query': 5.90.10 + '@tanstack/react-query-devtools': 5.91.0 + '@tanstack/react-router': 1.139.3 + '@tanstack/react-router-devtools': 1.139.3 + '@tanstack/router-plugin': ^1.133.21 + '@tanstack/solid-form': 1.25.0 + '@tanstack/solid-query': 5.90.23 + '@tanstack/solid-query-devtools': 5.91.0 + '@tanstack/solid-router': ^1.133.20 + '@tanstack/solid-router-devtools': ^1.133.20 + '@tanstack/solid-router-ssr-query': ^1.132.25 + '@tanstack/solid-start': ^1.132.25 + '@testing-library/dom': 10.4.1 + '@testing-library/jest-dom': 6.9.1 + '@testing-library/react': 16.3.0 + '@types/better-sqlite3': 7.6.13 + '@types/compression': 1.8.1 + '@types/express': 5.0.5 + '@types/node': 20.19.25 + '@types/pg': ^8.15.6 + '@types/react': 19.2.7 + '@types/react-dom': 19.2.3 + '@types/validator': 13.15.10 + '@typescript/native-preview': 7.0.0-dev.20260322.1 + '@vitejs/plugin-react': 5.1.1 + '@vitejs/plugin-rsc': 0.5.1 + '@vitest/coverage-v8': 4.1.2 + '@vitest/ui': ^4.1.0 + astro: 5.17.1 + autoprefixer: 10.4.22 + better-auth: 1.4.10 + better-sqlite3: 12.6.2 + class-variance-authority: 0.7.1 + clsx: 2.1.1 + compression: 1.8.1 + cross-env: 10.1.0 + dotenv: ^17.2.3 + dprint: 0.51.1 + effect: 4.0.0-beta.33 + effect-prisma-generator: ^0.4.0 + eta: ^4.5.0 + express: 5.1.0 + globals: 16.5.0 + globby: 16.0.0 + husky: 9.1.7 + isbot: 5.1.32 + jiti: 2.6.1 + jsdom: 27.2.0 + kysely: 0.28.11 + lint-staged: 16.2.7 + lucide-react: ^0.554.0 + lucide-solid: ^0.554.0 + nx-oxlint: 0.1.1 + oxlint: 0.15.15 + pg: ^8.16.3 + postcss: 8.5.6 + prettier: 2.8.8 + prisma: 7.3.0 + react: 19.2.0 + react-dom: 19.2.0 + react-router: 7.12.0 + sharp: 0.34.5 + solid-js: ^1.9.9 + starlight-sidebar-topics: 0.6.2 + starlight-sidebar-topics-dropdown: 0.5.2 + starlight-theme-nova: 0.10.0 + tailwind-merge: 3.4.0 + tailwindcss: 4.1.18 + tailwindcss-animate: 1.0.7 + ts-patch: 3.3.0 + tslib: 2.8.1 + tsx: 4.20.6 + tw-animate-css: 1.4.0 + typescript: 5.9.3 + ultracite: 6.3.6 + validator: 13.15.23 + verdaccio: 6.2.2 + vite: 8.0.3 + vite-plugin-devtools-json: 0.4.1 + vite-plugin-dts: 4.5.4 + vite-plugin-lucide-preprocess: 1.4.8 + vite-plugin-solid: 2.11.11 + vite-tsconfig-paths: 6.1.1 + vitest: 4.1.2 + web-vitals: 5.1.0 + zod: 4.3.6 + +catalogMode: prefer + cleanupUnusedCatalogs: true + onlyBuiltDependencies: - - "@parcel/watcher" - - "@swc/core" - - "@tailwindcss/oxide" - - "better-sqlite3" - - "core-js" - - "esbuild" - - "msgpackr-extract" - - "nx" + - '@parcel/watcher' + - '@swc/core' + - '@tailwindcss/oxide' + - better-sqlite3 + - core-js + - esbuild + - msgpackr-extract + - nx From a02eee5768a3cda202b2310a747ab11141e9c6ea Mon Sep 17 00:00:00 2001 From: Andres David Jimenez Sulbaran Date: Sun, 29 Mar 2026 10:27:47 -0600 Subject: [PATCH 03/24] feat(nx): add skills for managing Nx plugins, running tasks, and workspace exploration - Created `nx-plugins` skill for discovering and installing Nx plugins. - Introduced `nx-run-tasks` skill to assist with executing tasks in an Nx workspace. - Developed `nx-workspace` skill for exploring workspace structure, project configurations, and available targets. - Added reference documentation for affected projects in the Nx workspace. - Updated agent guidelines to emphasize the use of new skills and best practices for Nx commands. - Enhanced documentation for the Hatchet package with improved examples and architecture details. - Implemented unit tests for the effectifier module in the Hatchet package. --- .../skills/link-workspace-packages/SKILL.md | 127 ++++++ .agents/skills/monitor-ci/SKILL.md | 301 ++++++++++++ .../skills/monitor-ci/references/fix-flows.md | 108 +++++ .../monitor-ci/scripts/ci-poll-decide.mjs | 428 ++++++++++++++++++ .../monitor-ci/scripts/ci-state-update.mjs | 160 +++++++ .agents/skills/nx-generate/SKILL.md | 166 +++++++ .agents/skills/nx-import/SKILL.md | 238 ++++++++++ .agents/skills/nx-import/references/VITE.md | 397 ++++++++++++++++ .agents/skills/nx-plugins/SKILL.md | 9 + .agents/skills/nx-run-tasks/SKILL.md | 58 +++ .agents/skills/nx-workspace/SKILL.md | 286 ++++++++++++ .../nx-workspace/references/AFFECTED.md | 27 ++ .codex/agents/ci-monitor-subagent.toml | 46 ++ .codex/config.toml | 11 + .github/agents/ci-monitor-subagent.agent.md | 49 ++ .github/prompts/monitor-ci.prompt.md | 301 ++++++++++++ .../skills/link-workspace-packages/SKILL.md | 127 ++++++ .github/skills/monitor-ci/SKILL.md | 301 ++++++++++++ .../skills/monitor-ci/references/fix-flows.md | 108 +++++ .../monitor-ci/scripts/ci-poll-decide.mjs | 428 ++++++++++++++++++ .../monitor-ci/scripts/ci-state-update.mjs | 160 +++++++ .github/skills/nx-generate/SKILL.md | 166 +++++++ .github/skills/nx-import/SKILL.md | 238 ++++++++++ .github/skills/nx-import/references/VITE.md | 397 ++++++++++++++++ .github/skills/nx-plugins/SKILL.md | 9 + .github/skills/nx-run-tasks/SKILL.md | 58 +++ .github/skills/nx-workspace/SKILL.md | 286 ++++++++++++ .../nx-workspace/references/AFFECTED.md | 27 ++ .gitignore | 3 +- .opencode/agents/ci-monitor-subagent.md | 50 ++ .opencode/commands/monitor-ci.md | 301 ++++++++++++ .../skills/link-workspace-packages/SKILL.md | 127 ++++++ .opencode/skills/monitor-ci/SKILL.md | 301 ++++++++++++ .../skills/monitor-ci/references/fix-flows.md | 108 +++++ .../monitor-ci/scripts/ci-poll-decide.mjs | 428 ++++++++++++++++++ .../monitor-ci/scripts/ci-state-update.mjs | 160 +++++++ .opencode/skills/nx-generate/SKILL.md | 166 +++++++ .opencode/skills/nx-import/SKILL.md | 238 ++++++++++ .opencode/skills/nx-import/references/VITE.md | 397 ++++++++++++++++ .opencode/skills/nx-plugins/SKILL.md | 9 + .opencode/skills/nx-run-tasks/SKILL.md | 58 +++ .opencode/skills/nx-workspace/SKILL.md | 286 ++++++++++++ .../nx-workspace/references/AFFECTED.md | 27 ++ AGENTS.md | 21 +- docs/tasks/hatchet-tasks.md | 4 +- opencode.json | 13 + packages/hatchet/README.md | 13 + .../hatchet/tests/unit/effectifier.test.ts | 413 +++++++++++++++++ 48 files changed, 8127 insertions(+), 13 deletions(-) create mode 100644 .agents/skills/link-workspace-packages/SKILL.md create mode 100644 .agents/skills/monitor-ci/SKILL.md create mode 100644 .agents/skills/monitor-ci/references/fix-flows.md create mode 100644 .agents/skills/monitor-ci/scripts/ci-poll-decide.mjs create mode 100644 .agents/skills/monitor-ci/scripts/ci-state-update.mjs create mode 100644 .agents/skills/nx-generate/SKILL.md create mode 100644 .agents/skills/nx-import/SKILL.md create mode 100644 .agents/skills/nx-import/references/VITE.md create mode 100644 .agents/skills/nx-plugins/SKILL.md create mode 100644 .agents/skills/nx-run-tasks/SKILL.md create mode 100644 .agents/skills/nx-workspace/SKILL.md create mode 100644 .agents/skills/nx-workspace/references/AFFECTED.md create mode 100644 .codex/agents/ci-monitor-subagent.toml create mode 100644 .codex/config.toml create mode 100644 .github/agents/ci-monitor-subagent.agent.md create mode 100644 .github/prompts/monitor-ci.prompt.md create mode 100644 .github/skills/link-workspace-packages/SKILL.md create mode 100644 .github/skills/monitor-ci/SKILL.md create mode 100644 .github/skills/monitor-ci/references/fix-flows.md create mode 100644 .github/skills/monitor-ci/scripts/ci-poll-decide.mjs create mode 100644 .github/skills/monitor-ci/scripts/ci-state-update.mjs create mode 100644 .github/skills/nx-generate/SKILL.md create mode 100644 .github/skills/nx-import/SKILL.md create mode 100644 .github/skills/nx-import/references/VITE.md create mode 100644 .github/skills/nx-plugins/SKILL.md create mode 100644 .github/skills/nx-run-tasks/SKILL.md create mode 100644 .github/skills/nx-workspace/SKILL.md create mode 100644 .github/skills/nx-workspace/references/AFFECTED.md create mode 100644 .opencode/agents/ci-monitor-subagent.md create mode 100644 .opencode/commands/monitor-ci.md create mode 100644 .opencode/skills/link-workspace-packages/SKILL.md create mode 100644 .opencode/skills/monitor-ci/SKILL.md create mode 100644 .opencode/skills/monitor-ci/references/fix-flows.md create mode 100644 .opencode/skills/monitor-ci/scripts/ci-poll-decide.mjs create mode 100644 .opencode/skills/monitor-ci/scripts/ci-state-update.mjs create mode 100644 .opencode/skills/nx-generate/SKILL.md create mode 100644 .opencode/skills/nx-import/SKILL.md create mode 100644 .opencode/skills/nx-import/references/VITE.md create mode 100644 .opencode/skills/nx-plugins/SKILL.md create mode 100644 .opencode/skills/nx-run-tasks/SKILL.md create mode 100644 .opencode/skills/nx-workspace/SKILL.md create mode 100644 .opencode/skills/nx-workspace/references/AFFECTED.md create mode 100644 opencode.json create mode 100644 packages/hatchet/tests/unit/effectifier.test.ts diff --git a/.agents/skills/link-workspace-packages/SKILL.md b/.agents/skills/link-workspace-packages/SKILL.md new file mode 100644 index 0000000..de13134 --- /dev/null +++ b/.agents/skills/link-workspace-packages/SKILL.md @@ -0,0 +1,127 @@ +--- +name: link-workspace-packages +description: 'Link workspace packages in monorepos (npm, yarn, pnpm, bun). USE WHEN: (1) you just created or generated new packages and need to wire up their dependencies, (2) user imports from a sibling package and needs to add it as a dependency, (3) you get resolution errors for workspace packages (@org/*) like "cannot find module", "failed to resolve import", "TS2307", or "cannot resolve". DO NOT patch around with tsconfig paths or manual package.json edits - use the package manager''s workspace commands to fix actual linking.' +--- + +# Link Workspace Packages + +Add dependencies between packages in a monorepo. All package managers support workspaces but with different syntax. + +## Detect Package Manager + +Check whether there's a `packageManager` field in the root-level `package.json`. + +Alternatively check lockfile in repo root: + +- `pnpm-lock.yaml` → pnpm +- `yarn.lock` → yarn +- `bun.lock` / `bun.lockb` → bun +- `package-lock.json` → npm + +## Workflow + +1. Identify consumer package (the one importing) +2. Identify provider package(s) (being imported) +3. Add dependency using package manager's workspace syntax +4. Verify symlinks created in consumer's `node_modules/` + +--- + +## pnpm + +Uses `workspace:` protocol - symlinks only created when explicitly declared. + +```bash +# From consumer directory +pnpm add @org/ui --workspace + +# Or with --filter from anywhere +pnpm add @org/ui --filter @org/app --workspace +``` + +Result in `package.json`: + +```json +{ "dependencies": { "@org/ui": "workspace:*" } } +``` + +--- + +## yarn (v2+/berry) + +Also uses `workspace:` protocol. + +```bash +yarn workspace @org/app add @org/ui +``` + +Result in `package.json`: + +```json +{ "dependencies": { "@org/ui": "workspace:^" } } +``` + +--- + +## npm + +No `workspace:` protocol. npm auto-symlinks workspace packages. + +```bash +npm install @org/ui --workspace @org/app +``` + +Result in `package.json`: + +```json +{ "dependencies": { "@org/ui": "*" } } +``` + +npm resolves to local workspace automatically during install. + +--- + +## bun + +Supports `workspace:` protocol (pnpm-compatible). + +```bash +cd packages/app && bun add @org/ui +``` + +Result in `package.json`: + +```json +{ "dependencies": { "@org/ui": "workspace:*" } } +``` + +--- + +## Examples + +**Example 1: pnpm - link ui lib to app** + +```bash +pnpm add @org/ui --filter @org/app --workspace +``` + +**Example 2: npm - link multiple packages** + +```bash +npm install @org/data-access @org/ui --workspace @org/dashboard +``` + +**Example 3: Debug "Cannot find module"** + +1. Check if dependency is declared in consumer's `package.json` +2. If not, add it using appropriate command above +3. Run install (`pnpm install`, `npm install`, etc.) + +## Notes + +- Symlinks appear in `/node_modules/@org/` +- **Hoisting differs by manager:** + - npm/bun: hoist shared deps to root `node_modules` + - pnpm: no hoisting (strict isolation, prevents phantom deps) + - yarn berry: uses Plug'n'Play by default (no `node_modules`) +- Root `package.json` should have `"private": true` to prevent accidental publish diff --git a/.agents/skills/monitor-ci/SKILL.md b/.agents/skills/monitor-ci/SKILL.md new file mode 100644 index 0000000..48b71bf --- /dev/null +++ b/.agents/skills/monitor-ci/SKILL.md @@ -0,0 +1,301 @@ +--- +name: monitor-ci +description: Monitor Nx Cloud CI pipeline and handle self-healing fixes. USE WHEN user says "monitor ci", "watch ci", "ci monitor", "watch ci for this branch", "track ci", "check ci status", wants to track CI status, or needs help with self-healing CI fixes. Prefer this skill over native CI provider tools (gh, glab, etc.) for CI monitoring — it integrates with Nx Cloud self-healing which those tools cannot access. +--- + +# Monitor CI Command + +You are the orchestrator for monitoring Nx Cloud CI pipeline executions and handling self-healing fixes. You spawn subagents to interact with Nx Cloud, run deterministic decision scripts, and take action based on the results. + +## Context + +- **Current Branch:** !`git branch --show-current` +- **Current Commit:** !`git rev-parse --short HEAD` +- **Remote Status:** !`git status -sb | head -1` + +## User Instructions + +$ARGUMENTS + +**Important:** If user provides specific instructions, respect them over default behaviors described below. + +## Configuration Defaults + +| Setting | Default | Description | +| ------------------------- | ------------- | ------------------------------------------------------------------------- | +| `--max-cycles` | 10 | Maximum **agent-initiated** CI Attempt cycles before timeout | +| `--timeout` | 120 | Maximum duration in minutes | +| `--verbosity` | medium | Output level: minimal, medium, verbose | +| `--branch` | (auto-detect) | Branch to monitor | +| `--fresh` | false | Ignore previous context, start fresh | +| `--auto-fix-workflow` | false | Attempt common fixes for pre-CI-Attempt failures (e.g., lockfile updates) | +| `--new-cipe-timeout` | 10 | Minutes to wait for new CI Attempt after action | +| `--local-verify-attempts` | 3 | Max local verification + enhance cycles before pushing to CI | + +Parse any overrides from `$ARGUMENTS` and merge with defaults. + +## Nx Cloud Connection Check + +Before starting the monitoring loop, verify the workspace is connected to Nx Cloud. Without this connection, no CI data is available and the entire skill is inoperable. + +### Step 0: Verify Nx Cloud Connection + +1. **Check `nx.json`** at workspace root for `nxCloudId` or `nxCloudAccessToken` +2. **If `nx.json` missing OR neither property exists** → exit with: + + ``` + Nx Cloud not connected. Unlock 70% faster CI and auto-fix broken PRs with https://nx.dev/nx-cloud + ``` + +3. **If connected** → continue to main loop + +## Architecture Overview + +1. **This skill (orchestrator)**: spawns subagents, runs scripts, prints status, does local coding work +2. **ci-monitor-subagent (haiku)**: calls one MCP tool (ci_information or update_self_healing_fix), returns structured result, exits +3. **ci-poll-decide.mjs (deterministic script)**: takes ci_information result + state, returns action + status message +4. **ci-state-update.mjs (deterministic script)**: manages budget gates, post-action state transitions, and cycle classification + +## Status Reporting + +The decision script handles message formatting based on verbosity. When printing messages to the user: + +- Prepend `[monitor-ci]` to every message from the script's `message` field +- For your own action messages (e.g. "Applying fix via MCP..."), also prepend `[monitor-ci]` + +## Anti-Patterns + +These behaviors cause real problems — racing with self-healing, losing CI progress, or wasting context: + +| Anti-Pattern | Why It's Bad | +| ----------------------------------------------------------------------------------------------- | ------------------------------------------------------------------ | +| Using CI provider CLIs with `--watch` flags (e.g., `gh pr checks --watch`, `glab ci status -w`) | Bypasses Nx Cloud self-healing entirely | +| Writing custom CI polling scripts | Unreliable, pollutes context, no self-healing | +| Cancelling CI workflows/pipelines | Destructive, loses CI progress | +| Running CI checks on main agent | Wastes main agent context tokens | +| Independently analyzing/fixing CI failures while polling | Races with self-healing, causes duplicate fixes and confused state | + +**If this skill fails to activate**, the fallback is: + +1. Use CI provider CLI for a one-time, read-only status check (single call, no watch/polling flags) +2. Immediately delegate to this skill with gathered context +3. Do not continue polling on main agent — it wastes context tokens and bypasses self-healing + +## Session Context Behavior + +If the user previously ran `/monitor-ci` in this session, you may have prior state (poll counts, last CI Attempt URL, etc.). Resume from that state unless `--fresh` is set, in which case discard it and start from Step 1. + +## MCP Tool Reference + +Three field sets control polling efficiency — use the lightest set that gives you what you need: + +```yaml +WAIT_FIELDS: 'cipeUrl,commitSha,cipeStatus' +LIGHT_FIELDS: 'cipeStatus,cipeUrl,branch,commitSha,selfHealingStatus,verificationStatus,userAction,failedTaskIds,verifiedTaskIds,selfHealingEnabled,failureClassification,couldAutoApplyTasks,autoApplySkipped,autoApplySkipReason,shortLink,confidence,confidenceReasoning,hints,selfHealingSkippedReason,selfHealingSkipMessage' +HEAVY_FIELDS: 'taskOutputSummary,suggestedFix,suggestedFixReasoning,suggestedFixDescription' +``` + +The `ci_information` tool accepts `branch` (optional, defaults to current git branch), `select` (comma-separated field names), and `pageToken` (0-based pagination for long strings). + +The `update_self_healing_fix` tool accepts a `shortLink` and an action: `APPLY`, `REJECT`, or `RERUN_ENVIRONMENT_STATE`. + +## Default Behaviors by Status + +The decision script returns one of the following statuses. This table defines the **default behavior** for each. User instructions can override any of these. + +**Simple exits** — just report and exit: + +| Status | Default Behavior | +| ----------------------- | ---------------------------------------------------------------------------------------------------------------- | +| `ci_success` | Exit with success | +| `cipe_canceled` | Exit, CI was canceled | +| `cipe_timed_out` | Exit, CI timed out | +| `polling_timeout` | Exit, polling timeout reached | +| `circuit_breaker` | Exit, no progress after 5 consecutive polls | +| `environment_rerun_cap` | Exit, environment reruns exhausted | +| `fix_auto_applying` | Self-healing is handling it — just record `last_cipe_url`, enter wait mode. No MCP call or local git ops needed. | +| `error` | Wait 60s and loop | + +**Statuses requiring action** — when handling these in Step 3, read `references/fix-flows.md` for the detailed flow: + +| Status | Summary | +| ------------------------ | --------------------------------------------------------------------------------------------- | +| `fix_auto_apply_skipped` | Fix verified but auto-apply skipped (e.g., loop prevention). Inform user, offer manual apply. | +| `fix_apply_ready` | Fix verified (all tasks or e2e-only). Apply via MCP. | +| `fix_needs_local_verify` | Fix has unverified non-e2e tasks. Run locally, then apply or enhance. | +| `fix_needs_review` | Fix verification failed/not attempted. Analyze and decide. | +| `fix_failed` | Self-healing failed. Fetch heavy data, attempt local fix (gate check first). | +| `no_fix` | No fix available. Fetch heavy data, attempt local fix (gate check first) or exit. | +| `environment_issue` | Request environment rerun via MCP (gate check first). | +| `self_healing_throttled` | Reject old fixes, attempt local fix. | +| `no_new_cipe` | CI Attempt never spawned. Auto-fix workflow or exit with guidance. | +| `cipe_no_tasks` | CI failed with no tasks. Retry once with empty commit. | + +**Key rules (always apply):** + +- **Git safety**: Stage specific files by name — `git add -A` or `git add .` risks committing the user's unrelated work-in-progress or secrets +- **Environment failures** (OOM, command not found, permission denied): bail immediately. These aren't code bugs, so spending local-fix budget on them is wasteful +- **Gate check**: Run `ci-state-update.mjs gate` before local fix attempts — if budget exhausted, print message and exit + +## Main Loop + +### Step 1: Initialize Tracking + +``` +cycle_count = 0 # Only incremented for agent-initiated cycles (counted against --max-cycles) +start_time = now() +no_progress_count = 0 +local_verify_count = 0 +env_rerun_count = 0 +last_cipe_url = null +expected_commit_sha = null +agent_triggered = false # Set true after monitor takes an action that triggers new CI Attempt +poll_count = 0 +wait_mode = false +prev_status = null +prev_cipe_status = null +prev_sh_status = null +prev_verification_status = null +prev_failure_classification = null +``` + +### Step 2: Polling Loop + +Repeat until done: + +#### 2a. Spawn subagent (FETCH_STATUS) + +Determine select fields based on mode: + +- **Wait mode**: use WAIT_FIELDS (`cipeUrl,commitSha,cipeStatus`) +- **Normal mode (first poll or after newCipeDetected)**: use LIGHT_FIELDS + +Call the `ci_information` tool with the determined `select` fields for the current branch. Wait for the result before proceeding. + +#### 2b. Run decision script + +```bash +node /scripts/ci-poll-decide.mjs '' \ + [--wait-mode] \ + [--prev-cipe-url ] \ + [--expected-sha ] \ + [--prev-status ] \ + [--timeout ] \ + [--new-cipe-timeout ] \ + [--env-rerun-count ] \ + [--no-progress-count ] \ + [--prev-cipe-status ] \ + [--prev-sh-status ] \ + [--prev-verification-status ] \ + [--prev-failure-classification ] +``` + +The script outputs a single JSON line: `{ action, code, message, delay?, noProgressCount, envRerunCount, fields?, newCipeDetected?, verifiableTaskIds? }` + +#### 2c. Process script output + +Parse the JSON output and update tracking state: + +- `no_progress_count = output.noProgressCount` +- `env_rerun_count = output.envRerunCount` +- `prev_cipe_status = subagent_result.cipeStatus` +- `prev_sh_status = subagent_result.selfHealingStatus` +- `prev_verification_status = subagent_result.verificationStatus` +- `prev_failure_classification = subagent_result.failureClassification` +- `prev_status = output.action + ":" + (output.code || subagent_result.cipeStatus)` +- `poll_count++` + +Based on `action`: + +- **`action == "poll"`**: Print `output.message`, sleep `output.delay` seconds, go to 2a + - If `output.newCipeDetected`: clear wait mode, reset `wait_mode = false` +- **`action == "wait"`**: Print `output.message`, sleep `output.delay` seconds, go to 2a +- **`action == "done"`**: Proceed to Step 3 with `output.code` + +### Step 3: Handle Actionable Status + +When decision script returns `action == "done"`: + +1. Run cycle-check (Step 4) **before** handling the code +2. Check the returned `code` +3. Look up default behavior in the table above +4. Check if user instructions override the default +5. Execute the appropriate action +6. **If action expects new CI Attempt**, update tracking (see Step 3a) +7. If action results in looping, go to Step 2 + +#### Tool calls for actions + +Several statuses require fetching additional data or calling tools: + +- **fix_apply_ready**: Call `update_self_healing_fix` with action `APPLY` +- **fix_needs_local_verify**: Call `ci_information` with HEAVY_FIELDS for fix details before local verification +- **fix_needs_review**: Call `ci_information` with HEAVY_FIELDS → get `suggestedFixDescription`, `suggestedFixSummary`, `taskFailureSummaries` +- **fix_failed / no_fix**: Call `ci_information` with HEAVY_FIELDS → get `taskFailureSummaries` for local fix context +- **environment_issue**: Call `update_self_healing_fix` with action `RERUN_ENVIRONMENT_STATE` +- **self_healing_throttled**: Call `ci_information` with HEAVY_FIELDS → get `selfHealingSkipMessage`; then call `update_self_healing_fix` for each old fix + +### Step 3a: Track State for New-CI-Attempt Detection + +After actions that should trigger a new CI Attempt, run: + +```bash +node /scripts/ci-state-update.mjs post-action \ + --action \ + --cipe-url \ + --commit-sha +``` + +Action types: `fix-auto-applying`, `apply-mcp`, `apply-local-push`, `reject-fix-push`, `local-fix-push`, `env-rerun`, `auto-fix-push`, `empty-commit-push` + +The script returns `{ waitMode, pollCount, lastCipeUrl, expectedCommitSha, agentTriggered }`. Update all tracking state from the output, then go to Step 2. + +### Step 4: Cycle Classification and Progress Tracking + +When the decision script returns `action == "done"`, run cycle-check **before** handling the code: + +```bash +node /scripts/ci-state-update.mjs cycle-check \ + --code \ + [--agent-triggered] \ + --cycle-count --max-cycles \ + --env-rerun-count +``` + +The script returns `{ cycleCount, agentTriggered, envRerunCount, approachingLimit, message }`. Update tracking state from the output. + +- If `approachingLimit` → ask user whether to continue (with 5 or 10 more cycles) or stop monitoring +- If previous cycle was NOT agent-triggered (human pushed), log that human-initiated push was detected + +#### Progress Tracking + +- `no_progress_count`, circuit breaker (5 polls), and backoff reset are handled by ci-poll-decide.mjs (progress = any change in cipeStatus, selfHealingStatus, verificationStatus, or failureClassification) +- `env_rerun_count` reset on non-environment status is handled by ci-state-update.mjs cycle-check +- On new CI Attempt detected (poll script returns `newCipeDetected`) → reset `local_verify_count = 0`, `env_rerun_count = 0` + +## Error Handling + +| Error | Action | +| ------------------------------ | ----------------------------------------------------------------------------------------------------------- | +| Git rebase conflict | Report to user, exit | +| `nx-cloud apply-locally` fails | Reject fix via MCP (`action: "REJECT"`), then attempt manual patch (Reject + Fix From Scratch Flow) or exit | +| MCP tool error | Retry once, if fails report to user | +| Subagent spawn failure | Retry once, if fails exit with error | +| Decision script error | Treat as `error` status, increment `no_progress_count` | +| No new CI Attempt detected | If `--auto-fix-workflow`, try lockfile update; otherwise report to user with guidance | +| Lockfile auto-fix fails | Report to user, exit with guidance to check CI logs | + +## User Instruction Examples + +Users can override default behaviors: + +| Instruction | Effect | +| ------------------------------------------------ | --------------------------------------------------- | +| "never auto-apply" | Always prompt before applying any fix | +| "always ask before git push" | Prompt before each push | +| "reject any fix for e2e tasks" | Auto-reject if `failedTaskIds` contains e2e | +| "apply all fixes regardless of verification" | Skip verification check, apply everything | +| "if confidence < 70, reject" | Check confidence field before applying | +| "run 'nx affected -t typecheck' before applying" | Add local verification step | +| "auto-fix workflow failures" | Attempt lockfile updates on pre-CI-Attempt failures | +| "wait 45 min for new CI Attempt" | Override new-CI-Attempt timeout (default: 10 min) | diff --git a/.agents/skills/monitor-ci/references/fix-flows.md b/.agents/skills/monitor-ci/references/fix-flows.md new file mode 100644 index 0000000..b33aa02 --- /dev/null +++ b/.agents/skills/monitor-ci/references/fix-flows.md @@ -0,0 +1,108 @@ +# Detailed Status Handling & Fix Flows + +## Status Handling by Code + +### fix_auto_apply_skipped + +The script returns `autoApplySkipReason` in its output. + +1. Report the skip reason to the user (e.g., "Auto-apply was skipped because the previous CI pipeline execution was triggered by Nx Cloud") +2. Offer to apply the fix manually — spawn UPDATE_FIX subagent with `APPLY` if user agrees +3. Record `last_cipe_url`, enter wait mode + +### fix_apply_ready + +- Spawn UPDATE_FIX subagent with `APPLY` +- Record `last_cipe_url`, enter wait mode + +### fix_needs_local_verify + +The script returns `verifiableTaskIds` in its output. + +1. **Detect package manager:** `pnpm-lock.yaml` → `pnpm nx`, `yarn.lock` → `yarn nx`, otherwise `npx nx` +2. **Run verifiable tasks in parallel** — spawn `general` subagents for each task +3. **If all pass** → spawn UPDATE_FIX subagent with `APPLY`, enter wait mode +4. **If any fail** → Apply Locally + Enhance Flow (see below) + +### fix_needs_review + +Spawn FETCH_HEAVY subagent, then analyze fix content (`suggestedFixDescription`, `suggestedFixSummary`, `taskFailureSummaries`): + +- If fix looks correct → apply via MCP +- If fix needs enhancement → Apply Locally + Enhance Flow +- If fix is wrong → run `ci-state-update.mjs gate --gate-type local-fix`. If not allowed, print message and exit. Otherwise → Reject + Fix From Scratch Flow + +### fix_failed / no_fix + +Spawn FETCH_HEAVY subagent for `taskFailureSummaries`. Run `ci-state-update.mjs gate --gate-type local-fix` — if not allowed, print message and exit. Otherwise attempt local fix (counter already incremented by gate). If successful → commit, push, enter wait mode. If not → exit with failure. + +### environment_issue + +1. Run `ci-state-update.mjs gate --gate-type env-rerun`. If not allowed, print message and exit. +2. Spawn UPDATE_FIX subagent with `RERUN_ENVIRONMENT_STATE` +3. Enter wait mode with `last_cipe_url` set + +### self_healing_throttled + +Spawn FETCH_HEAVY subagent for `selfHealingSkipMessage`. + +1. **Parse throttle message** for CI Attempt URLs (regex: `/cipes/{id}`) +2. **Reject previous fixes** — for each URL: spawn FETCH_THROTTLE_INFO to get `shortLink`, then UPDATE_FIX with `REJECT` +3. **Attempt local fix**: Run `ci-state-update.mjs gate --gate-type local-fix`. If not allowed → skip to step 4. Otherwise use `failedTaskIds` and `taskFailureSummaries` for context. +4. **Fallback if local fix not possible or budget exhausted**: push empty commit (`git commit --allow-empty -m "ci: rerun after rejecting throttled fixes"`), enter wait mode + +### no_new_cipe + +1. Report to user: no CI attempt found, suggest checking CI provider +2. If `--auto-fix-workflow`: detect package manager, run install, commit lockfile if changed, enter wait mode +3. Otherwise: exit with guidance + +### cipe_no_tasks + +1. Report to user: CI failed with no tasks recorded +2. Retry: `git commit --allow-empty -m "chore: retry ci [monitor-ci]"` + push, enter wait mode +3. If retry also returns `cipe_no_tasks`: exit with failure + +## Fix Action Flows + +### Apply via MCP + +Spawn UPDATE_FIX subagent with `APPLY`. New CI Attempt spawns automatically. No local git ops. + +### Apply Locally + Enhance Flow + +1. `nx-cloud apply-locally ` (sets state to `APPLIED_LOCALLY`) +2. Enhance code to fix failing tasks +3. Run failing tasks to verify +4. If still failing → run `ci-state-update.mjs gate --gate-type local-fix`. If not allowed, commit current state and push (let CI be final judge). Otherwise loop back to enhance. +5. If passing → commit and push, enter wait mode + +### Reject + Fix From Scratch Flow + +1. Run `ci-state-update.mjs gate --gate-type local-fix`. If not allowed, print message and exit. +2. Spawn UPDATE_FIX subagent with `REJECT` +3. Fix from scratch locally +4. Commit and push, enter wait mode + +## Environment vs Code Failure Recognition + +When any local fix path runs a task and it fails, assess whether the failure is a **code issue** or an **environment/tooling issue** before running the gate script. + +**Indicators of environment/tooling failures** (non-exhaustive): command not found / binary missing, OOM / heap allocation failures, permission denied, network timeouts / DNS failures, missing system libraries, Docker/container issues, disk space exhaustion. + +When detected → bail immediately without running gate (no budget consumed). Report that the failure is an environment/tooling issue, not a code bug. + +**Code failures** (compilation errors, test assertion failures, lint violations, type errors) are genuine candidates for local fix attempts and proceed normally through the gate. + +## Git Safety + +- Stage specific files by name — `git add -A` or `git add .` risks committing the user's unrelated work-in-progress or secrets + +## Commit Message Format + +```bash +git commit -m "fix(): + +Failed tasks: , +Local verification: passed|enhanced|failed-pushing-to-ci" +``` diff --git a/.agents/skills/monitor-ci/scripts/ci-poll-decide.mjs b/.agents/skills/monitor-ci/scripts/ci-poll-decide.mjs new file mode 100644 index 0000000..3fe17e5 --- /dev/null +++ b/.agents/skills/monitor-ci/scripts/ci-poll-decide.mjs @@ -0,0 +1,428 @@ +#!/usr/bin/env node + +/** + * CI Poll Decision Script + * + * Deterministic decision engine for CI monitoring. + * Takes ci_information JSON + state args, outputs a single JSON action line. + * + * Architecture: + * classify() — pure decision tree, returns { action, code, extra? } + * buildOutput() — maps classification to full output with messages, delays, counters + * + * Usage: + * node ci-poll-decide.mjs '' \ + * [--wait-mode] [--prev-cipe-url ] [--expected-sha ] \ + * [--prev-status ] [--timeout ] [--new-cipe-timeout ] \ + * [--env-rerun-count ] [--no-progress-count ] \ + * [--prev-cipe-status ] [--prev-sh-status ] \ + * [--prev-verification-status ] [--prev-failure-classification ] + */ + +// --- Arg parsing --- + +const args = process.argv.slice(2); +const ciInfoJson = args[0]; +const pollCount = parseInt(args[1], 10) || 0; +const verbosity = args[2] || 'medium'; + +function getFlag(name) { + return args.includes(name); +} + +function getArg(name) { + const idx = args.indexOf(name); + return idx !== -1 && idx + 1 < args.length ? args[idx + 1] : null; +} + +const waitMode = getFlag('--wait-mode'); +const prevCipeUrl = getArg('--prev-cipe-url'); +const expectedSha = getArg('--expected-sha'); +const prevStatus = getArg('--prev-status'); +const timeoutSeconds = parseInt(getArg('--timeout') || '0', 10); +const newCipeTimeoutSeconds = parseInt(getArg('--new-cipe-timeout') || '0', 10); +const envRerunCount = parseInt(getArg('--env-rerun-count') || '0', 10); +const inputNoProgressCount = parseInt(getArg('--no-progress-count') || '0', 10); +const prevCipeStatus = getArg('--prev-cipe-status'); +const prevShStatus = getArg('--prev-sh-status'); +const prevVerificationStatus = getArg('--prev-verification-status'); +const prevFailureClassification = getArg('--prev-failure-classification'); + +// --- Parse CI info --- + +let ci; +try { + ci = JSON.parse(ciInfoJson); +} catch { + console.log( + JSON.stringify({ + action: 'done', + code: 'error', + message: 'Failed to parse ci_information JSON', + noProgressCount: inputNoProgressCount + 1, + envRerunCount, + }) + ); + process.exit(0); +} + +const { + cipeStatus, + selfHealingStatus, + verificationStatus, + selfHealingEnabled, + selfHealingSkippedReason, + failureClassification: rawFailureClassification, + failedTaskIds = [], + verifiedTaskIds = [], + couldAutoApplyTasks, + autoApplySkipped, + autoApplySkipReason, + userAction, + cipeUrl, + commitSha, +} = ci; + +const failureClassification = rawFailureClassification?.toLowerCase() ?? null; + +// --- Helpers --- + +function categorizeTasks() { + const verifiedSet = new Set(verifiedTaskIds); + const unverified = failedTaskIds.filter((t) => !verifiedSet.has(t)); + if (unverified.length === 0) return { category: 'all_verified' }; + + const e2e = unverified.filter((t) => { + const parts = t.split(':'); + return parts.length >= 2 && parts[1].includes('e2e'); + }); + if (e2e.length === unverified.length) return { category: 'e2e_only' }; + + const verifiable = unverified.filter((t) => { + const parts = t.split(':'); + return !(parts.length >= 2 && parts[1].includes('e2e')); + }); + return { category: 'needs_local_verify', verifiableTaskIds: verifiable }; +} + +function backoff(count) { + const delays = [60, 90, 120]; + return delays[Math.min(count, delays.length - 1)]; +} + +function hasStateChanged() { + if (prevCipeStatus && cipeStatus !== prevCipeStatus) return true; + if (prevShStatus && selfHealingStatus !== prevShStatus) return true; + if (prevVerificationStatus && verificationStatus !== prevVerificationStatus) + return true; + if ( + prevFailureClassification && + failureClassification !== prevFailureClassification + ) + return true; + return false; +} + +function isTimedOut() { + if (timeoutSeconds <= 0) return false; + const avgDelay = pollCount === 0 ? 0 : backoff(Math.floor(pollCount / 2)); + return pollCount * avgDelay >= timeoutSeconds; +} + +function isWaitTimedOut() { + if (newCipeTimeoutSeconds <= 0) return false; + return pollCount * 30 >= newCipeTimeoutSeconds; +} + +function isNewCipe() { + return ( + (prevCipeUrl && cipeUrl && cipeUrl !== prevCipeUrl) || + (expectedSha && commitSha && commitSha === expectedSha) + ); +} + +// ============================================================ +// classify() — pure decision tree +// +// Returns: { action: 'poll'|'wait'|'done', code: string, extra? } +// +// Decision priority (top wins): +// WAIT MODE: +// 1. new CI Attempt detected → poll (new_cipe_detected) +// 2. wait timed out → done (no_new_cipe) +// 3. still waiting → wait (waiting_for_cipe) +// NORMAL MODE: +// 4. polling timeout → done (polling_timeout) +// 5. circuit breaker (5 polls) → done (circuit_breaker) +// 6. CI succeeded → done (ci_success) +// 7. CI canceled → done (cipe_canceled) +// 8. CI timed out → done (cipe_timed_out) +// 9. CI failed, no tasks recorded → done (cipe_no_tasks) +// 10. environment failure → done (environment_rerun_cap | environment_issue) +// 11. self-healing throttled → done (self_healing_throttled) +// 12. CI in progress / not started → poll (ci_running) +// 13. self-healing in progress → poll (sh_running) +// 14. flaky task auto-rerun → poll (flaky_rerun) +// 15. fix auto-applied → poll (fix_auto_applied) +// 16. auto-apply: skipped → done (fix_auto_apply_skipped) +// 17. auto-apply: verification pending→ poll (verification_pending) +// 18. auto-apply: verified → done (fix_auto_applying) +// 19. fix: verification failed/none → done (fix_needs_review) +// 20. fix: all/e2e verified → done (fix_apply_ready) +// 21. fix: needs local verify → done (fix_needs_local_verify) +// 22. self-healing failed → done (fix_failed) +// 23. no fix available → done (no_fix) +// 24. fallback → poll (fallback) +// ============================================================ + +function classify() { + // --- Wait mode --- + if (waitMode) { + if (isNewCipe()) return { action: 'poll', code: 'new_cipe_detected' }; + if (isWaitTimedOut()) return { action: 'done', code: 'no_new_cipe' }; + return { action: 'wait', code: 'waiting_for_cipe' }; + } + + // --- Guards --- + if (isTimedOut()) return { action: 'done', code: 'polling_timeout' }; + if (noProgressCount >= 5) return { action: 'done', code: 'circuit_breaker' }; + + // --- Terminal CI states --- + if (cipeStatus === 'SUCCEEDED') return { action: 'done', code: 'ci_success' }; + if (cipeStatus === 'CANCELED') + return { action: 'done', code: 'cipe_canceled' }; + if (cipeStatus === 'TIMED_OUT') + return { action: 'done', code: 'cipe_timed_out' }; + + // --- CI failed, no tasks --- + if ( + cipeStatus === 'FAILED' && + failedTaskIds.length === 0 && + selfHealingStatus == null + ) + return { action: 'done', code: 'cipe_no_tasks' }; + + // --- Environment failure --- + if (failureClassification === 'environment_state') { + if (envRerunCount >= 2) + return { action: 'done', code: 'environment_rerun_cap' }; + return { action: 'done', code: 'environment_issue' }; + } + + // --- Throttled --- + if (selfHealingSkippedReason === 'THROTTLED') + return { action: 'done', code: 'self_healing_throttled' }; + + // --- Still running: CI --- + if (cipeStatus === 'IN_PROGRESS' || cipeStatus === 'NOT_STARTED') + return { action: 'poll', code: 'ci_running' }; + + // --- Still running: self-healing --- + if ( + (selfHealingStatus === 'IN_PROGRESS' || + selfHealingStatus === 'NOT_STARTED') && + !selfHealingSkippedReason + ) + return { action: 'poll', code: 'sh_running' }; + + // --- Still running: flaky rerun --- + if (failureClassification === 'flaky_task') + return { action: 'poll', code: 'flaky_rerun' }; + + // --- Fix auto-applied, waiting for new CI Attempt --- + if (userAction === 'APPLIED_AUTOMATICALLY') + return { action: 'poll', code: 'fix_auto_applied' }; + + // --- Auto-apply path (couldAutoApplyTasks) --- + if (couldAutoApplyTasks === true) { + if (autoApplySkipped === true) + return { + action: 'done', + code: 'fix_auto_apply_skipped', + extra: { autoApplySkipReason }, + }; + if ( + verificationStatus === 'NOT_STARTED' || + verificationStatus === 'IN_PROGRESS' + ) + return { action: 'poll', code: 'verification_pending' }; + if (verificationStatus === 'COMPLETED') + return { action: 'done', code: 'fix_auto_applying' }; + // verification FAILED or NOT_EXECUTABLE → falls through to fix_needs_review + } + + // --- Fix available --- + if (selfHealingStatus === 'COMPLETED') { + if ( + verificationStatus === 'FAILED' || + verificationStatus === 'NOT_EXECUTABLE' || + (couldAutoApplyTasks !== true && !verificationStatus) + ) + return { action: 'done', code: 'fix_needs_review' }; + + const tasks = categorizeTasks(); + if (tasks.category === 'all_verified' || tasks.category === 'e2e_only') + return { action: 'done', code: 'fix_apply_ready' }; + return { + action: 'done', + code: 'fix_needs_local_verify', + extra: { verifiableTaskIds: tasks.verifiableTaskIds }, + }; + } + + // --- Fix failed --- + if (selfHealingStatus === 'FAILED') + return { action: 'done', code: 'fix_failed' }; + + // --- No fix available --- + if ( + cipeStatus === 'FAILED' && + (selfHealingEnabled === false || selfHealingStatus === 'NOT_EXECUTABLE') + ) + return { action: 'done', code: 'no_fix' }; + + // --- Fallback --- + return { action: 'poll', code: 'fallback' }; +} + +// ============================================================ +// buildOutput() — maps classification to full JSON output +// ============================================================ + +// Message templates keyed by status or key +const messages = { + // wait mode + new_cipe_detected: () => + `New CI Attempt detected! CI: ${cipeStatus || 'N/A'}`, + no_new_cipe: () => + 'New CI Attempt timeout exceeded. No new CI Attempt detected.', + waiting_for_cipe: () => 'Waiting for new CI Attempt...', + + // guards + polling_timeout: () => 'Polling timeout exceeded.', + circuit_breaker: () => 'No progress after 5 consecutive polls. Stopping.', + + // terminal + ci_success: () => 'CI passed successfully!', + cipe_canceled: () => 'CI Attempt was canceled.', + cipe_timed_out: () => 'CI Attempt timed out.', + cipe_no_tasks: () => 'CI failed but no Nx tasks were recorded.', + + // environment + environment_rerun_cap: () => 'Environment rerun cap (2) exceeded. Bailing.', + environment_issue: () => 'CI: FAILED | Classification: ENVIRONMENT_STATE', + + // throttled + self_healing_throttled: () => + 'Self-healing throttled \u2014 too many unapplied fixes.', + + // polling + ci_running: () => `CI: ${cipeStatus}`, + sh_running: () => `CI: ${cipeStatus} | Self-healing: ${selfHealingStatus}`, + flaky_rerun: () => + 'CI: FAILED | Classification: FLAKY_TASK (auto-rerun in progress)', + fix_auto_applied: () => + 'CI: FAILED | Fix auto-applied, new CI Attempt spawning', + verification_pending: () => + `CI: FAILED | Self-healing: COMPLETED | Verification: ${verificationStatus}`, + + // actionable + fix_auto_applying: () => 'Fix verified! Auto-applying...', + fix_auto_apply_skipped: (extra) => + `Fix verified but auto-apply was skipped. ${ + extra?.autoApplySkipReason + ? `Reason: ${extra.autoApplySkipReason}` + : 'Offer to apply manually.' + }`, + fix_needs_review: () => + `Fix available but needs review. Verification: ${ + verificationStatus || 'N/A' + }`, + fix_apply_ready: () => 'Fix available and verified. Ready to apply.', + fix_needs_local_verify: (extra) => + `Fix available. ${extra.verifiableTaskIds.length} task(s) need local verification.`, + fix_failed: () => 'Self-healing failed to generate a fix.', + no_fix: () => 'CI failed, no fix available.', + + // fallback + fallback: () => + `CI: ${cipeStatus || 'N/A'} | Self-healing: ${ + selfHealingStatus || 'N/A' + } | Verification: ${verificationStatus || 'N/A'}`, +}; + +// Codes where noProgressCount resets to 0 (genuine progress occurred) +const resetProgressCodes = new Set([ + 'ci_success', + 'fix_auto_applying', + 'fix_auto_apply_skipped', + 'fix_needs_review', + 'fix_apply_ready', + 'fix_needs_local_verify', +]); + +function formatMessage(msg) { + if (verbosity === 'minimal') { + const currentStatus = `${cipeStatus}|${selfHealingStatus}|${verificationStatus}`; + if (currentStatus === (prevStatus || '')) return null; + return msg; + } + if (verbosity === 'verbose') { + return [ + `Poll #${pollCount + 1} | CI: ${cipeStatus || 'N/A'} | Self-healing: ${ + selfHealingStatus || 'N/A' + } | Verification: ${verificationStatus || 'N/A'}`, + msg, + ].join('\n'); + } + return `Poll #${pollCount + 1} | ${msg}`; +} + +function buildOutput(decision) { + const { action, code, extra } = decision; + + // noProgressCount is already computed before classify() was called. + // Here we only handle the reset for "genuine progress" done-codes. + + const msgFn = messages[code]; + const rawMsg = msgFn ? msgFn(extra) : `Unknown: ${code}`; + const message = formatMessage(rawMsg); + + const result = { + action, + code, + message, + noProgressCount: resetProgressCodes.has(code) ? 0 : noProgressCount, + envRerunCount, + }; + + // Add delay + if (action === 'wait') { + result.delay = 30; + } else if (action === 'poll') { + result.delay = code === 'new_cipe_detected' ? 60 : backoff(noProgressCount); + result.fields = 'light'; + } + + // Add extras + if (code === 'new_cipe_detected') result.newCipeDetected = true; + if (extra?.verifiableTaskIds) + result.verifiableTaskIds = extra.verifiableTaskIds; + if (extra?.autoApplySkipReason) + result.autoApplySkipReason = extra.autoApplySkipReason; + + console.log(JSON.stringify(result)); +} + +// --- Run --- + +// Compute noProgressCount from input. Single assignment, no mutation. +// Wait mode: reset on new cipe, otherwise unchanged (wait doesn't count as no-progress). +// Normal mode: reset on any state change, otherwise increment. +const noProgressCount = (() => { + if (waitMode) return isNewCipe() ? 0 : inputNoProgressCount; + if (isNewCipe() || hasStateChanged()) return 0; + return inputNoProgressCount + 1; +})(); + +buildOutput(classify()); diff --git a/.agents/skills/monitor-ci/scripts/ci-state-update.mjs b/.agents/skills/monitor-ci/scripts/ci-state-update.mjs new file mode 100644 index 0000000..90fa714 --- /dev/null +++ b/.agents/skills/monitor-ci/scripts/ci-state-update.mjs @@ -0,0 +1,160 @@ +#!/usr/bin/env node + +/** + * CI State Update Script + * + * Deterministic state management for CI monitor actions. + * Three commands: gate, post-action, cycle-check. + * + * Usage: + * node ci-state-update.mjs gate --gate-type [counter args] + * node ci-state-update.mjs post-action --action [--cipe-url ] [--commit-sha ] + * node ci-state-update.mjs cycle-check --code [--agent-triggered] [counter args] + */ + +// --- Arg parsing --- + +const args = process.argv.slice(2); +const command = args[0]; + +function getFlag(name) { + return args.includes(name); +} + +function getArg(name) { + const idx = args.indexOf(name); + return idx !== -1 && idx + 1 < args.length ? args[idx + 1] : null; +} + +function output(result) { + console.log(JSON.stringify(result)); +} + +// --- gate --- +// Check if an action is allowed and return incremented counter. +// Called before any local fix attempt or environment rerun. + +function gate() { + const gateType = getArg('--gate-type'); + + if (gateType === 'local-fix') { + const count = parseInt(getArg('--local-verify-count') || '0', 10); + const max = parseInt(getArg('--local-verify-attempts') || '3', 10); + if (count >= max) { + return output({ + allowed: false, + localVerifyCount: count, + message: `Local fix budget exhausted (${count}/${max} attempts)`, + }); + } + return output({ + allowed: true, + localVerifyCount: count + 1, + message: null, + }); + } + + if (gateType === 'env-rerun') { + const count = parseInt(getArg('--env-rerun-count') || '0', 10); + if (count >= 2) { + return output({ + allowed: false, + envRerunCount: count, + message: `Environment issue persists after ${count} reruns. Manual investigation needed.`, + }); + } + return output({ + allowed: true, + envRerunCount: count + 1, + message: null, + }); + } + + output({ allowed: false, message: `Unknown gate type: ${gateType}` }); +} + +// --- post-action --- +// Compute next state after an action is taken. +// Returns wait mode params and whether the action was agent-triggered. + +function postAction() { + const action = getArg('--action'); + const cipeUrl = getArg('--cipe-url'); + const commitSha = getArg('--commit-sha'); + + // MCP-triggered or auto-applied: track by cipeUrl + const cipeUrlActions = ['fix-auto-applying', 'apply-mcp', 'env-rerun']; + // Local push: track by commitSha + const commitShaActions = [ + 'apply-local-push', + 'reject-fix-push', + 'local-fix-push', + 'auto-fix-push', + 'empty-commit-push', + ]; + + const trackByCipeUrl = cipeUrlActions.includes(action); + const trackByCommitSha = commitShaActions.includes(action); + + if (!trackByCipeUrl && !trackByCommitSha) { + return output({ error: `Unknown action: ${action}` }); + } + + // fix-auto-applying: self-healing did it, NOT the monitor + const agentTriggered = action !== 'fix-auto-applying'; + + output({ + waitMode: true, + pollCount: 0, + lastCipeUrl: trackByCipeUrl ? cipeUrl : null, + expectedCommitSha: trackByCommitSha ? commitSha : null, + agentTriggered, + }); +} + +// --- cycle-check --- +// Cycle classification + counter resets when a new "done" code is received. +// Called at the start of handling each actionable code. + +function cycleCheck() { + const status = getArg('--code'); + const wasAgentTriggered = getFlag('--agent-triggered'); + let cycleCount = parseInt(getArg('--cycle-count') || '0', 10); + const maxCycles = parseInt(getArg('--max-cycles') || '10', 10); + let envRerunCount = parseInt(getArg('--env-rerun-count') || '0', 10); + + // Cycle classification: if previous cycle was agent-triggered, count it + if (wasAgentTriggered) cycleCount++; + + // Reset env_rerun_count on non-environment status + if (status !== 'environment_issue') envRerunCount = 0; + + // Approaching limit gate + const approachingLimit = cycleCount >= maxCycles - 2; + + output({ + cycleCount, + agentTriggered: false, + envRerunCount, + approachingLimit, + message: approachingLimit + ? `Approaching cycle limit (${cycleCount}/${maxCycles})` + : null, + }); +} + +// --- Dispatch --- + +switch (command) { + case 'gate': + gate(); + break; + case 'post-action': + postAction(); + break; + case 'cycle-check': + cycleCheck(); + break; + default: + output({ error: `Unknown command: ${command}` }); +} diff --git a/.agents/skills/nx-generate/SKILL.md b/.agents/skills/nx-generate/SKILL.md new file mode 100644 index 0000000..af7ba80 --- /dev/null +++ b/.agents/skills/nx-generate/SKILL.md @@ -0,0 +1,166 @@ +--- +name: nx-generate +description: Generate code using nx generators. INVOKE IMMEDIATELY when user mentions scaffolding, setup, structure, creating apps/libs, or setting up project structure. Trigger words - scaffold, setup, create a ... app, create a ... lib, project structure, generate, add a new project. ALWAYS use this BEFORE calling nx_docs or exploring - this skill handles discovery internally. +--- + +# Run Nx Generator + +Nx generators are powerful tools that scaffold projects, make automated code migrations or automate repetitive tasks in a monorepo. They ensure consistency across the codebase and reduce boilerplate work. + +This skill applies when the user wants to: + +- Create new projects like libraries or applications +- Scaffold features or boilerplate code +- Run workspace-specific or custom generators +- Do anything else that an nx generator exists for + +## Key Principles + +1. **Always use `--no-interactive`** - Prevents prompts that would hang execution +2. **Read the generator source code** - The schema alone is not enough; understand what the generator actually does +3. **Match existing repo patterns** - Study similar artifacts in the repo and follow their conventions +4. **Verify with lint/test/build/typecheck etc.** - Generated code must pass verification. The listed targets are just an example, use what's appropriate for this workspace. + +## Steps + +### 1. Discover Available Generators + +Use the Nx CLI to discover available generators: + +- List all generators for a plugin: `npx nx list @nx/react` +- View available plugins: `npx nx list` + +This includes plugin generators (e.g., `@nx/react:library`) and local workspace generators. + +### 2. Match Generator to User Request + +Identify which generator(s) could fulfill the user's needs. Consider what artifact type they want, which framework is relevant, and any specific generator names mentioned. + +**IMPORTANT**: When both a local workspace generator and an external plugin generator could satisfy the request, **always prefer the local workspace generator**. Local generators are customized for the specific repo's patterns. + +If no suitable generator exists, you can stop using this skill. However, the burden of proof is high—carefully consider all available generators before deciding none apply. + +### 3. Get Generator Options + +Use the `--help` flag to understand available options: + +```bash +npx nx g @nx/react:library --help +``` + +Pay attention to required options, defaults that might need overriding, and options relevant to the user's request. + +### Library Buildability + +**Default to non-buildable libraries** unless there's a specific reason for buildable. + +| Type | When to use | Generator flags | +| --------------------------- | ----------------------------------------------------------------- | ----------------------------------- | +| **Non-buildable** (default) | Internal monorepo libs consumed by apps | No `--bundler` flag | +| **Buildable** | Publishing to npm, cross-repo sharing, stable libs for cache hits | `--bundler=vite` or `--bundler=swc` | + +Non-buildable libs: + +- Export `.ts`/`.tsx` source directly +- Consumer's bundler compiles them +- Faster dev experience, less config + +Buildable libs: + +- Have their own build target +- Useful for stable libs that rarely change (cache hits) +- Required for npm publishing + +**If unclear, ask the user:** "Should this library be buildable (own build step, better caching) or non-buildable (source consumed directly, simpler setup)?" + +### 4. Read Generator Source Code + +**This step is critical.** The schema alone does not tell you everything. Reading the source code helps you: + +- Know exactly what files will be created/modified and where +- Understand side effects (updating configs, installing deps, etc.) +- Identify behaviors and options not obvious from the schema +- Understand how options interact with each other + +To find generator source code: + +- For plugin generators: Use `node -e "console.log(require.resolve('@nx//generators.json'));"` to find the generators.json, then locate the source from there +- If that fails, read directly from `node_modules//generators.json` +- For local generators: Typically in `tools/generators/` or a local plugin directory. Search the repo for the generator name. + +After reading the source, reconsider: Is this the right generator? If not, go back to step 2. + +> **⚠️ `--directory` flag behavior can be misleading.** +> It should specify the full path of the generated library or component, not the parent path that it will be generated in. +> +> ```bash +> # ✅ Correct - directory is the full path for the library +> nx g @nx/react:library --directory=libs/my-lib +> # generates libs/my-lib/package.json and more +> +> # ❌ Wrong - this will create files at libs and libs/src/... +> nx g @nx/react:library --name=my-lib --directory=libs +> # generates libs/package.json and more +> ``` + +### 5. Examine Existing Patterns + +Before generating, examine the target area of the codebase: + +- Look at similar existing artifacts (other libraries, applications, etc.) +- Identify naming conventions, file structures, and configuration patterns +- Note which test runners, build tools, and linters are used +- Configure the generator to match these patterns + +### 6. Dry-Run to Verify File Placement + +**Always run with `--dry-run` first** to verify files will be created in the correct location: + +```bash +npx nx g @nx/react:library --name=my-lib --dry-run --no-interactive +``` + +Review the output carefully. If files would be created in the wrong location, adjust your options based on what you learned from the generator source code. + +Note: Some generators don't support dry-run (e.g., if they install npm packages). If dry-run fails for this reason, proceed to running the generator for real. + +### 7. Run the Generator + +Execute the generator: + +```bash +nx generate --no-interactive +``` + +> **Tip:** New packages often need workspace dependencies wired up (e.g., importing shared types, being consumed by apps). The `link-workspace-packages` skill can help add these correctly. + +### 8. Modify Generated Code (If Needed) + +Generators provide a starting point. Modify the output as needed to: + +- Add or modify functionality as requested +- Adjust imports, exports, or configurations +- Integrate with existing code patterns + +**Important:** If you replace or delete generated test files (e.g., `*.spec.ts`), either write meaningful replacement tests or remove the `test` target from the project configuration. Empty test suites will cause `nx test` to fail. + +### 9. Format and Verify + +Format all generated/modified files: + +```bash +nx format --fix +``` + +This example is for built-in nx formatting with prettier. There might be other formatting tools for this workspace, use these when appropriate. + +Then verify the generated code works. Keep in mind that the changes you make with a generator or subsequent modifications might impact various projects so it's usually not enough to only run targets for the artifact you just created. + +```bash +# these targets are just an example! +nx run-many -t build,lint,test,typecheck +``` + +These targets are common examples used across many workspaces. You should do research into other targets available for this workspace and its projects. CI configuration is usually a good guide for what the critical targets are that have to pass. + +If verification fails with manageable issues (a few lint errors, minor type issues), fix them. If issues are extensive, attempt obvious fixes first, then escalate to the user with details about what was generated, what's failing, and what you've attempted. diff --git a/.agents/skills/nx-import/SKILL.md b/.agents/skills/nx-import/SKILL.md new file mode 100644 index 0000000..b1cd381 --- /dev/null +++ b/.agents/skills/nx-import/SKILL.md @@ -0,0 +1,238 @@ +--- +name: nx-import +description: Import, merge, or combine repositories into an Nx workspace using nx import. USE WHEN the user asks to adopt Nx across repos, move projects into a monorepo, or bring code/history from another repository. +--- + +## Quick Start + +- `nx import` brings code from a source repository or folder into the current workspace, preserving commit history. +- After nx `22.6.0`, `nx import` responds with .ndjson outputs and follow-up questions. For earlier versions, always run with `--no-interactive` and specify all flags directly. +- Run `nx import --help` for available options. +- Make sure the destination directory is empty before importing. + EXAMPLE: target has `libs/utils` and `libs/models`; source has `libs/ui` and `libs/data-access` — you cannot import `libs/` into `libs/` directly. Import each source library individually. + +Primary docs: + +- https://nx.dev/docs/guides/adopting-nx/import-project +- https://nx.dev/docs/guides/adopting-nx/preserving-git-histories + +Read the nx docs if you have the tools for it. + +## Import Strategy + +**Subdirectory-at-a-time** (`nx import apps --source=apps`): + +- **Recommended for monorepo sources** — files land at top level, no redundant config +- Caveats: multiple import commands (separate merge commits each); dest must not have conflicting directories; root configs (deps, plugins, targetDefaults) not imported +- **Directory conflicts**: Import into alternate-named dir (e.g. `imported-apps/`), then rename + +**Whole repo** (`nx import imported --source=.`): + +- **Only for non-monorepo sources** (single-project repos) +- For monorepos, creates messy nested config (`imported/nx.json`, `imported/tsconfig.base.json`, etc.) +- If you must: keep imported `tsconfig.base.json` (projects extend it), prefix workspace globs and executor paths + +### Directory Conventions + +- **Always prefer the destination's existing conventions.** Source uses `libs/`but dest uses `packages/`? Import into `packages/` (`nx import packages/foo --source=libs/foo`). +- If dest has no convention (empty workspace), ask the user. + +### Application vs Library Detection + +Before importing, identify whether the source is an **application** or a **library**: + +- **Applications**: Deployable end products. Common indicators: + - _Frontend_: `next.config.*`, `vite.config.*` with a build entry point, framework-specific app scaffolding (CRA, Angular CLI app, etc.) + - _Backend (Node.js)_: Express/Fastify/NestJS server entrypoint, no `"exports"` field in `package.json` + - _JVM_: Maven `pom.xml` with `jar` or `war` and a `main` class; Gradle `application` plugin or `mainClass` setting + - _.NET_: `.csproj`/`.fsproj` with `Exe` or `WinExe` + - _General_: Dockerfile, a runnable entrypoint, no public API surface intended for import by other projects +- **Libraries**: Reusable packages consumed by other projects. Common indicators: `"main"`/`"exports"` in `package.json`, Maven/Gradle packaging as a library jar, .NET `Library`, named exports intended for import by other packages. + +**Destination directory rules**: + +- Applications → `apps/`. Check workspace globs (e.g. `pnpm-workspace.yaml`, `workspaces` in root `package.json`) for an existing `apps/*` entry. + - If `apps/*` is **not** present, add it before importing: update the workspace glob config and commit (or stage) the change. + - Example: `nx import apps/my-app --source=packages/my-app` +- Libraries → follow the dest's existing convention (`packages/`, `libs/`, etc.). + +## Common Issues + +### pnpm Workspace Globs (Critical) + +`nx import` adds the imported directory itself (e.g. `apps`) to `pnpm-workspace.yaml`, **NOT** glob patterns for packages within it. Cross-package imports will fail with `Cannot find module`. + +**Fix**: Replace with proper globs from the source config (e.g. `apps/*`, `libs/shared/*`), then `pnpm install`. + +### Root Dependencies and Config Not Imported (Critical) + +`nx import` does **NOT** merge from the source's root: + +- `dependencies`/`devDependencies` from `package.json` +- `targetDefaults` from `nx.json` (e.g. `"@nx/esbuild:esbuild": { "dependsOn": ["^build"] }` — critical for build ordering) +- `namedInputs` from `nx.json` (e.g. `production` exclusion patterns for test files) +- Plugin configurations from `nx.json` + +**Fix**: Diff source and dest `package.json` + `nx.json`. Add missing deps, merge relevant `targetDefaults` and `namedInputs`. + +### TypeScript Project References + +After import, run `nx sync --yes`. If it reports nothing but typecheck still fails, `nx reset` first, then `nx sync --yes` again. + +### Explicit Executor Path Fixups + +Inferred targets (via Nx plugins) resolve config relative to project root — no changes needed. Explicit executor targets (e.g. `@nx/esbuild:esbuild`) have workspace-root-relative paths (`main`, `outputPath`, `tsConfig`, `assets`, `sourceRoot`) that must be prefixed with the import destination directory. + +### Plugin Detection + +- **Whole-repo import**: `nx import` detects and offers to install plugins. Accept them. +- **Subdirectory import**: Plugins NOT auto-detected. Manually add with `npx nx add @nx/PLUGIN`. Check `include`/`exclude` patterns — defaults won't match alternate directories (e.g. `apps-beta/`). +- Run `npx nx reset` after any plugin config changes. + +### Redundant Root Files (Whole-Repo Only) + +Whole-repo import brings ALL source root files into the dest subdirectory. Clean up: + +- `pnpm-lock.yaml` — stale; dest has its own lockfile +- `pnpm-workspace.yaml` — source workspace config; conflicts with dest +- `node_modules/` — stale symlinks pointing to source filesystem +- `.gitignore` — redundant with dest root `.gitignore` +- `nx.json` — source Nx config; dest has its own +- `README.md` — optional; keep or remove + +**Don't blindly delete** `tsconfig.base.json` — imported projects may extend it via relative paths. + +### Root ESLint Config Missing (Subdirectory Import) + +Subdirectory import doesn't bring the source's root `eslint.config.mjs`, but project configs reference `../../eslint.config.mjs`. + +**Fix order**: + +1. Install ESLint deps first: `pnpm add -wD eslint@^9 @nx/eslint-plugin typescript-eslint` (plus framework-specific plugins) +2. Create root `eslint.config.mjs` (copy from source or create with `@nx/eslint-plugin` base rules) +3. Then `npx nx add @nx/eslint` to register the plugin in `nx.json` + +Install `typescript-eslint` explicitly — pnpm's strict hoisting won't auto-resolve this transitive dep of `@nx/eslint-plugin`. + +### ESLint Version Pinning (Critical) + +**Pin ESLint to v9** (`eslint@^9.0.0`). ESLint 10 breaks `@nx/eslint` and many plugins with cryptic errors like `Cannot read properties of undefined (reading 'version')`. + +`@nx/eslint` may peer-depend on ESLint 8, causing the wrong version to resolve. If lint fails with `Cannot read properties of undefined (reading 'allow')`, add `pnpm.overrides`: + +```json +{ "pnpm": { "overrides": { "eslint": "^9.0.0" } } } +``` + +### Dependency Version Conflicts + +After import, compare key deps (`typescript`, `eslint`, framework-specific). If dest uses newer versions, upgrade imported packages to match (usually safe). If source is newer, may need to upgrade dest first. Use `pnpm.overrides` to enforce single-version policy if desired. + +### Module Boundaries + +Imported projects may lack `tags`. Add tags or update `@nx/enforce-module-boundaries` rules. + +### Project Name Collisions (Multi-Import) + +Same `name` in `package.json` across source and dest causes `MultipleProjectsWithSameNameError`. **Fix**: Rename conflicting names (e.g. `@org/api` → `@org/teama-api`), update all dep references and import statements, `pnpm install`. The root `package.json` of each imported repo also becomes a project — rename those too. + +### Workspace Dep Import Ordering + +`pnpm install` fails during `nx import` if a `"workspace:*"` dependency hasn't been imported yet. File operations still succeed. **Fix**: Import all projects first, then `pnpm install --no-frozen-lockfile`. + +### `.gitkeep` Blocking Subdirectory Import + +The TS preset creates `packages/.gitkeep`. Remove it and commit before importing. + +### Frontend tsconfig Base Settings (Critical) + +The TS preset defaults (`module: "nodenext"`, `moduleResolution: "nodenext"`, `lib: ["es2022"]`) are incompatible with frontend frameworks (React, Next.js, Vue, Vite). After importing frontend projects, verify the dest root `tsconfig.base.json`: + +- **`moduleResolution`**: Must be `"bundler"` (not `"nodenext"`) +- **`module`**: Must be `"esnext"` (not `"nodenext"`) +- **`lib`**: Must include `"dom"` and `"dom.iterable"` (frontend projects need these) +- **`jsx`**: `"react-jsx"` for React-only workspaces, per-project for mixed frameworks + +For **subdirectory imports**, the dest root tsconfig is authoritative — update it. For **whole-repo imports**, imported projects may extend their own nested `tsconfig.base.json`, making this less critical. + +If the dest also has backend projects needing `nodenext`, use per-project overrides instead of changing the root. + +**Gotcha**: TypeScript does NOT merge `lib` arrays — a project-level override **replaces** the base array entirely. Always include all needed entries (e.g. `es2022`, `dom`, `dom.iterable`) in any project-level `lib`. + +### `@nx/react` Typings for Libraries + +React libraries generated with `@nx/react:library` reference `@nx/react/typings/cssmodule.d.ts` and `@nx/react/typings/image.d.ts` in their tsconfig `types`. These fail with `Cannot find type definition file` unless `@nx/react` is installed in the dest workspace. + +**Fix**: `pnpm add -wD @nx/react` + +### Jest Preset Missing (Subdirectory Import) + +Nx presets create `jest.preset.js` at the workspace root, and project jest configs reference it (e.g. `../../jest.preset.js`). Subdirectory import does NOT bring this file. + +**Fix**: + +1. Run `npx nx add @nx/jest` — registers `@nx/jest/plugin` in `nx.json` and updates `namedInputs` +2. Create `jest.preset.js` at workspace root (see `references/JEST.md` for content) — `nx add` only creates this when a generator runs, not on bare `nx add` +3. Install test runner deps: `pnpm add -wD jest jest-environment-jsdom ts-jest @types/jest` +4. Install framework-specific test deps as needed (see `references/JEST.md`) + +For deeper Jest issues (tsconfig.spec.json, Babel transforms, CI atomization, Jest vs Vitest coexistence), see `references/JEST.md`. + +### Target Name Prefixing (Whole-Repo Import) + +When importing a project with existing npm scripts (`build`, `dev`, `start`, `lint`), Nx plugins auto-prefix inferred target names to avoid conflicts: e.g. `next:build`, `vite:build`, `eslint:lint`. + +**Fix**: Remove the Nx-rewritten npm scripts from the imported `package.json`, then either: + +- Accept the prefixed names (e.g. `nx run app:next:build`) +- Rename plugin target names in `nx.json` to use unprefixed names + +## Non-Nx Source Issues + +When the source is a plain pnpm/npm workspace without `nx.json`. + +### npm Script Rewriting (Critical) + +Nx rewrites `package.json` scripts during init, creating broken commands (e.g. `vitest run` → `nx test run`). **Fix**: Remove all rewritten scripts — Nx plugins infer targets from config files. + +### `noEmit` → `composite` + `emitDeclarationOnly` (Critical) + +Plain TS projects use `"noEmit": true`, incompatible with Nx project references. + +**Symptoms**: "typecheck target is disabled because one or more project references set 'noEmit: true'" or TS6310. + +**Fix** in **all** imported tsconfigs: + +1. Remove `"noEmit": true`. If inherited via extends chain, set `"noEmit": false` explicitly. +2. Add `"composite": true`, `"emitDeclarationOnly": true`, `"declarationMap": true` +3. Add `"outDir": "dist"` and `"tsBuildInfoFile": "dist/tsconfig.tsbuildinfo"` +4. Add `"extends": "../../tsconfig.base.json"` if missing. Remove settings now inherited from base. + +### Stale node_modules and Lockfiles + +`nx import` may bring `node_modules/` (pnpm symlinks pointing to the source filesystem) and `pnpm-lock.yaml` from the source. Both are stale. + +**Fix**: `rm -rf imported/node_modules imported/pnpm-lock.yaml imported/pnpm-workspace.yaml imported/.gitignore`, then `pnpm install`. + +### ESLint Config Handling + +- **Legacy `.eslintrc.json` (ESLint 8)**: Delete all `.eslintrc.*`, remove v8 deps, create flat `eslint.config.mjs`. +- **Flat config (`eslint.config.js`)**: Self-contained configs can often be left as-is. +- **No ESLint**: Create both root and project-level configs from scratch. + +### TypeScript `paths` Aliases + +Nx uses `package.json` `"exports"` + pnpm workspace linking instead of tsconfig `"paths"`. If packages have proper `"exports"`, paths are redundant. Otherwise, update paths for the new directory structure. + +## Technology-specific Guidance + +Identify technologies in the source repo, then read and apply the matching reference file(s). + +Available references: + +- `references/ESLINT.md` — ESLint projects: duplicate `lint`/`eslint:lint` targets, legacy `.eslintrc.*` linting generated files, flat config `.cjs` self-linting, `typescript-eslint` v7/v9 peer dep conflict, mixed ESLint v8+v9 in one workspace. +- `references/GRADLE.md` +- `references/JEST.md` — Jest testing: `@nx/jest/plugin` setup, jest.preset.js, testing deps by framework, tsconfig.spec.json, Jest vs Vitest coexistence, Babel transforms, CI atomization. +- `references/NEXT.md` — Next.js projects: `@nx/next/plugin` targets, `withNx`, Next.js TS config (`noEmit`, `jsx: "preserve"`), auto-installing deps via wrong PM, non-Nx `create-next-app` imports, mixed Next.js+Vite coexistence. +- `references/TURBOREPO.md` +- `references/VITE.md` — Vite projects (React, Vue, or both): `@nx/vite/plugin` typecheck target, `resolve.alias`/`__dirname` fixes, framework deps, Vue-specific setup, mixed React+Vue coexistence. diff --git a/.agents/skills/nx-import/references/VITE.md b/.agents/skills/nx-import/references/VITE.md new file mode 100644 index 0000000..b777e5a --- /dev/null +++ b/.agents/skills/nx-import/references/VITE.md @@ -0,0 +1,397 @@ +## Vite + +Vite-specific guidance for `nx import`. For generic import issues (pnpm globs, root deps, project references, name collisions, ESLint, frontend tsconfig base settings, `@nx/react` typings, Jest preset, non-Nx source handling), see `SKILL.md`. + +--- + +### `@nx/vite/plugin` Typecheck Target + +`@nx/vite/plugin` defaults `typecheckTargetName` to `"vite:typecheck"`. If the workspace expects `"typecheck"`, set it explicitly in `nx.json`. If `@nx/js/typescript` is also registered, rename one target to avoid conflicts (e.g. `"tsc-typecheck"` for the JS plugin). + +Keep both plugins only if the workspace has non-Vite pure TS libraries — `@nx/js/typescript` handles those while `@nx/vite/plugin` handles Vite projects. + +### @nx/vite Plugin Install Failure + +Plugin init loads `vite.config.ts` before deps are available. **Fix**: `pnpm add -wD vite @vitejs/plugin-react` (or `@vitejs/plugin-vue`) first, then `pnpm exec nx add @nx/vite`. + +### Vite `resolve.alias` and `__dirname` (Non-Nx Sources) + +**`__dirname` undefined** (CJS-only): Replace with `fileURLToPath(new URL('./src', import.meta.url))` from `'node:url'`. + +**`@/` path alias**: Vite's `resolve.alias` works at runtime but TS needs matching `"paths"`. Set `"baseUrl": "."` in project tsconfig. + +**PostCSS/Tailwind**: Verify `content` globs resolve correctly after import. + +### Missing TypeScript `types` (Non-Nx Sources) + +Non-Nx tsconfigs may not declare all needed types. Ensure Vite projects include `"types": ["node", "vite/client"]` in their tsconfig. + +### `noEmit` Fix: Vite-Specific Notes + +See SKILL.md for the generic noEmit→composite fix. Vite-specific additions: + +- Non-Nx Vite projects often have **both** `tsconfig.app.json` and `tsconfig.node.json` with `noEmit` — fix both +- Solution-style tsconfigs (`"files": [], "references": [...]`) may lack `extends`. Add `extends` pointing to the dest root `tsconfig.base.json` so base settings (`moduleResolution`, `lib`) apply. +- This is safe — Vite/Vitest ignore TypeScript emit settings. + +### Dependency Version Conflicts + +**Shared Vite deps (both frameworks):** `vite`, `vitest`, `jsdom`, `@types/node`, `typescript` (dev) + +**Vite 6→7**: Typecheck fails (`Plugin` type mismatch); build/serve still works. Fix: align versions. +**Vitest 3→4**: Usually works; type conflicts may surface in shared test utils. + +--- + +## React Router 7 (Vite-Based) + +React Router 7 (`@react-router/dev`) uses Vite under the hood with a `vite.config.ts` and a `react-router.config.ts`. The `@nx/vite/plugin` detects `vite.config.ts` and creates inferred targets. + +### Targets + +`@nx/vite/plugin` creates `build`, `dev`, `serve` targets. The `build` target invokes the script defined in `package.json` (usually `react-router build`), not `vite build` directly. + +**No separate typecheck target from `@nx/vite/plugin`** — React Router 7 typegen is run as part of `typecheck` (e.g. `react-router typegen && tsc`). The `typecheck` target is inferred from the tsconfig. Keep the `typecheck` script in `package.json` if present; it is not rewritten. + +### tsconfig Notes + +React Router 7 uses a single `tsconfig.json` (no `tsconfig.app.json`/`tsconfig.node.json` split). It includes: + +- `"rootDirs": [".", "./.react-router/types"]` — for generated type files; keep as-is +- `"paths": { "~/*": ["./app/*"] }` — self-referential alias; keep as-is +- `"noEmit": true` — replace with composite settings per SKILL.md + +### Build Output + +React Router 7 outputs to `build/` (not `dist/`). Add `build` to the dest root `.gitignore`. + +### Generated Types Directory + +React Router 7 generates `.react-router/` at the project root for route type generation. Add `.react-router` to the dest root `.gitignore`. + +--- + +## TanStack Start (Vite-Based) + +TanStack Start uses Vinxi under the hood, which wraps Vite. Projects have a standard `vite.config.ts` that `@nx/vite/plugin` detects normally. + +### Targets + +`@nx/vite/plugin` creates `build`, `dev`, `preview`, `serve-static`, `typecheck` targets. The `build` target runs `vite build` which invokes the TanStack Start Vinxi pipeline (produces both client and SSR bundles). + +### tsconfig Notes + +TanStack Start uses a single `tsconfig.json` with `"allowImportingTsExtensions": true` and `"noEmit": true`. Apply the standard noEmit → composite fix. `allowImportingTsExtensions` is compatible with `emitDeclarationOnly: true` — no change needed. + +### `paths` Aliases + +TanStack Start commonly uses `"#/*": ["./src/*"]` and `"@/*": ["./src/*"]`. These are self-referential — keep as-is for a single-project app. + +### Uncommitted Source Repo + +`create-tan-stack` initializes a git repo but does NOT make an initial commit. Before importing, commit first: + +```bash +git -C /path/to/source add . && git -C /path/to/source commit -m "Initial commit" +``` + +### Generated and Build Directories + +TanStack Start / Vinxi / Nitro generate several directories that must be added to the dest root `.gitignore`: + +- `.vinxi` — Vinxi build cache +- `.tanstack` — TanStack generated files +- `.nitro` — Nitro build artifacts +- `.output` — server-side build output (SSR/edge) + +These are not covered by `dist` or `build`. + +--- + +## React-Specific + +### React Dependencies + +**Production:** `react`, `react-dom` +**Dev:** `@types/react`, `@types/react-dom`, `@vitejs/plugin-react`, `@testing-library/react`, `@testing-library/jest-dom`, `jsdom` +**ESLint (Nx sources):** `eslint-plugin-import`, `eslint-plugin-jsx-a11y`, `eslint-plugin-react`, `eslint-plugin-react-hooks` +**ESLint (`create-vite`):** `eslint-plugin-react-refresh`, `eslint-plugin-react-hooks` — self-contained flat configs can be left as-is +**Nx plugins:** `@nx/react` (generators), `@nx/vite`, `@nx/vitest`, `@nx/eslint` + +### React TypeScript Configuration + +Add `"jsx": "react-jsx"` — in `tsconfig.base.json` for single-framework workspaces, per-project for mixed (see Mixed section). + +### React ESLint Config + +```js +import nx from "@nx/eslint-plugin" +import baseConfig from "../../eslint.config.mjs" +export default [ + ...baseConfig, + ...nx.configs["flat/react"], + { files: ["**/*.ts", "**/*.tsx"], rules: {} }, +] +``` + +### React Version Conflicts + +React 18 (source) + React 19 (dest): pnpm may hoist mismatched `react-dom`, causing `TypeError: Cannot read properties of undefined (reading 'S')`. **Fix**: Align versions with `pnpm.overrides`. + +### `@testing-library/jest-dom` with Vitest + +If source used Jest: change import to `@testing-library/jest-dom/vitest` in test-setup.ts, add to tsconfig `types`. + +--- + +## Vue-Specific + +### Vue Dependencies + +**Production:** `vue` (plus `vue-router`, `pinia` if used) +**Dev:** `@vitejs/plugin-vue`, `vue-tsc`, `@vue/test-utils`, `jsdom` +**ESLint:** `eslint-plugin-vue`, `vue-eslint-parser`, `@vue/eslint-config-typescript`, `@vue/eslint-config-prettier` +**Nx plugins:** `@nx/vue` (generators), `@nx/vite`, `@nx/vitest`, `@nx/eslint` (install AFTER deps — see below) + +### Vue TypeScript Configuration + +Add to `tsconfig.base.json` (single-framework) or per-project (mixed): + +```json +{ "jsx": "preserve", "jsxImportSource": "vue", "resolveJsonModule": true } +``` + +### `vue-shims.d.ts` + +Vue SFC files need a type declaration. Usually exists in each project's `src/` and imports cleanly. If missing: + +```ts +declare module "*.vue" { + import { defineComponent } from "vue" + const component: ReturnType + export default component +} +``` + +### `vue-tsc` Auto-Detection + +Both `@nx/js/typescript` and `@nx/vite/plugin` auto-detect `vue-tsc` when installed — no manual config needed. Remove source scripts like `"typecheck": "vue-tsc --noEmit"`. + +### ESLint Plugin Installation Order (Critical) + +`@nx/eslint` init **crashes** if Vue ESLint deps aren't installed first (it loads all config files). + +**Correct order:** + +1. `pnpm add -wD eslint@^9 eslint-plugin-vue vue-eslint-parser @vue/eslint-config-typescript @typescript-eslint/parser @nx/eslint-plugin typescript-eslint` +2. Create root `eslint.config.mjs` +3. Then `npx nx add @nx/eslint` + +### Vue ESLint Config Pattern + +```js +import vue from "eslint-plugin-vue" +import vueParser from "vue-eslint-parser" +import tsParser from "@typescript-eslint/parser" +import baseConfig from "../../eslint.config.mjs" +export default [ + ...baseConfig, + ...vue.configs["flat/recommended"], + { + files: ["**/*.vue"], + languageOptions: { parser: vueParser, parserOptions: { parser: tsParser } }, + }, + { + files: ["**/*.ts", "**/*.tsx", "**/*.js", "**/*.jsx", "**/*.vue"], + rules: { "vue/multi-word-component-names": "off" }, + }, +] +``` + +**Important**: `vue-eslint-parser` override must come **AFTER** base config — `flat/typescript` sets the TS parser globally without a `files` filter, breaking `.vue` parsing. + +`vue-eslint-parser` must be an explicit pnpm dependency (strict resolution prevents transitive import). + +**Known issue**: Some generated Vue ESLint configs omit `vue-eslint-parser`. Use the pattern above instead. + +--- + +## Mixed React + Vue + +When both frameworks coexist, several settings become per-project. + +### tsconfig `jsx` — Per-Project Only + +- React: `"jsx": "react-jsx"` in project tsconfig +- Vue: `"jsx": "preserve"`, `"jsxImportSource": "vue"` in project tsconfig +- Root: **NO** `jsx` setting + +### Typecheck — Auto-Detects Framework + +`@nx/vite/plugin` uses `vue-tsc` for Vue projects and `tsc` for React automatically. + +```json +{ + "plugins": [ + { "plugin": "@nx/eslint/plugin", "options": { "targetName": "lint" } }, + { + "plugin": "@nx/vite/plugin", + "options": { + "buildTargetName": "build", + "typecheckTargetName": "typecheck", + "testTargetName": "test" + } + } + ] +} +``` + +Remove `@nx/js/typescript` if all projects use Vite. Keep it (renamed to `"tsc-typecheck"`) only for non-Vite pure TS libs. + +### ESLint — Three-Tier Config + +1. **Root**: Base rules only, no framework-specific rules +2. **React projects**: Extend root + `nx.configs['flat/react']` +3. **Vue projects**: Extend root + `vue.configs['flat/recommended']` + `vue-eslint-parser` + +**Required packages**: Shared (`eslint@^9`, `@nx/eslint-plugin`, `typescript-eslint`, `@typescript-eslint/parser`), React (`eslint-plugin-import`, `eslint-plugin-jsx-a11y`, `eslint-plugin-react`, `eslint-plugin-react-hooks`), Vue (`eslint-plugin-vue`, `vue-eslint-parser`) + +`@nx/react`/`@nx/vue` are for generators only — no target conflicts. + +--- + +## Redundant npm Scripts After Import + +`nx import` copies `package.json` verbatim, so npm scripts come along. For Vite-based projects `@nx/vite/plugin` already infers the same targets from `vite.config.ts` — the npm scripts just shadow the plugin with weaker `nx:run-script` wrappers (no first-class caching inputs/outputs). Remove them after import. + +### Standalone Vite App (`create-vite`) + +Remove the following scripts — every one is redundant: + +| Script | Plugin replacement | +| ----------------------------- | ---------------------------------------------------------------------------- | +| `dev: vite` | `@nx/vite/plugin` → `dev` | +| `build: tsc -b && vite build` | `@nx/vite/plugin` → `build`; `typecheck` via `@nx/js/typescript` handles tsc | +| `preview: vite preview` | `@nx/vite/plugin` → `preview` | +| `lint: eslint .` | `@nx/eslint/plugin` → `eslint:lint` | + +### TanStack Start + +Remove `build`, `dev`, `preview`, and `test` scripts, but move any hardcoded `--port` flag to `vite.config.ts` first: + +```ts +// vite.config.ts +export default defineConfig({ + server: { port: 3000 }, // replaces `vite dev --port 3000` + ... +}) +``` + +### React Router 7 — Keep ALL scripts + +Do **not** remove React Router 7 scripts. They use the framework CLI (`react-router build`, `react-router dev`, `react-router-serve`) which is not interchangeable with plain `vite`: + +- `typecheck` runs `react-router typegen && tsc` — typegen must precede `tsc` or it fails on missing route types +- `start` serves the SSR bundle — no plugin equivalent + +--- + +## Fix Orders + +### Nx Source + +1. Generic fixes from SKILL.md (pnpm globs, root deps, executor paths, frontend tsconfig base settings, `@nx/react` typings) +2. Configure `@nx/vite/plugin` typecheck target +3. **React**: `jsx: "react-jsx"` (root or per-project) +4. **Vue**: `jsx: "preserve"` + `jsxImportSource: "vue"`; verify `vue-shims.d.ts`; install ESLint deps before `@nx/eslint` +5. **Mixed**: `jsx` per-project; remove/rename `@nx/js/typescript` +6. `nx sync --yes && nx reset && nx run-many -t typecheck,build,test,lint` + +### Non-Nx Source (additional steps) + +0. Import into `apps/` (see SKILL.md: "Application vs Library Detection") +1. Generic fixes from SKILL.md (stale files cleanup, pnpm globs, rewritten scripts, target name prefixing, noEmit→composite, ESLint handling) +2. Fix `noEmit` in **all** tsconfigs (app, node, etc. — non-Nx projects often have multiple) +3. Add `extends` to solution-style tsconfigs so root settings apply +4. Fix `resolve.alias` / `__dirname` / `baseUrl` +5. Ensure `types` include `vite/client` and `node` +6. Install `@nx/vite` manually if it failed during import +7. Remove redundant npm scripts so `@nx/vite/plugin` infers them natively (see "Redundant npm Scripts" section) +8. **Vue**: Add `outDir` + `**/*.vue.d.ts` to ESLint ignores +9. Full verification + +### Multiple-Source Imports + +See SKILL.md for generic multi-import (name collisions, dep refs). Vite-specific: fix tsconfig `references` paths for alternate directories (`../../libs/` → `../../libs-beta/`). + +### Non-Nx Source: React Router 7 + +1. Ensure source has at least one commit (see SKILL.md: "Source Repo Has No Commits") +2. `nx import` whole-repo into `apps/` (see SKILL.md: "Application vs Library Detection") → auto-installs `@nx/vite`, `@nx/react` +3. Stale file cleanup: `node_modules/`, `package-lock.json`, `.gitignore` +4. Fix `tsconfig.json`: `noEmit` → `composite + emitDeclarationOnly + outDir + tsBuildInfoFile` +5. Add `build` and `.react-router` to dest root `.gitignore` +6. **Keep all npm scripts** — React Router 7 uses framework CLI (`react-router build/dev`), not plain vite (see "Redundant npm Scripts" above) +7. `npm install && nx reset && nx sync --yes` + +### Non-Nx Source: TanStack Start + +1. Ensure source has at least one commit — `create-tan-stack` does NOT auto-commit (see SKILL.md) +2. `nx import` whole-repo into `apps/` (see SKILL.md: "Application vs Library Detection") → auto-installs `@nx/vite`, `@nx/vitest` +3. Stale file cleanup: `node_modules/`, `package-lock.json`, `.gitignore` +4. Fix `tsconfig.json`: `noEmit` → `composite + emitDeclarationOnly + outDir + tsBuildInfoFile` +5. Keep `allowImportingTsExtensions` — compatible with `emitDeclarationOnly: true` +6. Add `.vinxi`, `.tanstack`, `.nitro`, `.output` to dest root `.gitignore` +7. Move hardcoded `--port` from `dev` script into `vite.config.ts` (`server: { port: N }`) +8. Remove redundant npm scripts — `@nx/vite/plugin` infers `build`, `dev`, `preview`, `test` (see "Redundant npm Scripts" above) +9. `npm install && nx reset && nx sync --yes` + +### Quick Reference: React vs Vue + +| Aspect | React | Vue | +| ------------- | ------------------------ | ----------------------------------------- | +| Vite plugin | `@vitejs/plugin-react` | `@vitejs/plugin-vue` | +| Type checker | `tsc` | `vue-tsc` (auto-detected) | +| SFC support | N/A | `vue-shims.d.ts` needed | +| tsconfig jsx | `"react-jsx"` | `"preserve"` + `"jsxImportSource": "vue"` | +| ESLint parser | Standard TS | `vue-eslint-parser` + TS sub-parser | +| ESLint setup | Straightforward | Must install deps before `@nx/eslint` | +| Test utils | `@testing-library/react` | `@vue/test-utils` | + +### Quick Reference: Vite-Based React Frameworks + +| Aspect | Vite (standalone) | React Router 7 | TanStack Start | +| ------------------ | ----------------- | ----------------------- | ------------------------ | +| Build config | `vite.config.ts` | `vite.config.ts` | `vite.config.ts` | +| Build output | `dist/` | `build/` | `dist/` | +| SSR bundle | No | Yes (`build/server/`) | Yes (`dist/server/`) | +| tsconfig layout | app + node split | Single tsconfig | Single tsconfig | +| Auto-committed | Depends on tool | Usually yes | **No — commit first** | +| `nx import` plugin | `@nx/vite` | `@nx/vite`, `@nx/react` | `@nx/vite`, `@nx/vitest` | + +--- + +## Iteration Log + +### Scenario 6: Multiple non-Nx React apps (CRA, Next.js, React Router 7, TanStack Start, Vite) → TS preset (PASS) + +- Sources: 5 standalone non-Nx repos with different build tools +- Dest: CNW ts preset (Nx 22.5.1), npm workspaces, `packages/*` +- Import: whole-repo for each, sequential into `packages/` +- Pre-import fixes: + 1. Removed `packages/.gitkeep` and committed + 2. `git init && git add . && git commit` in Vite app (no git at all) + 3. `git add . && git commit` in TanStack app (git init'd but no commits) +- Import: `npm exec nx -- import packages/ --source=. --ref=main --no-interactive` + - Next.js import auto-installed `@nx/eslint`, `@nx/next` + - React Router 7 import auto-installed `@nx/vite`, `@nx/react`, `@nx/docker` (Dockerfile present) + - TanStack import auto-installed `@nx/vitest` +- Post-import fixes: + 1. Removed stale `node_modules/`, `package-lock.json`, `.gitignore` from each package + 2. Removed Nx-rewritten scripts from `board-games-nextjs/package.json` (had `"build": "nx next:build"`, etc.) + 3. Updated root `tsconfig.base.json`: `nodenext` → `bundler`, added `dom`/`dom.iterable` to lib, added `jsx: react-jsx` + 4. Added `build` to dest root `.gitignore` (CRA and React Router 7 output there) + 5. Fixed `noEmit` → `composite + emitDeclarationOnly` in: `board-games-vite/tsconfig.app.json`, `board-games-vite/tsconfig.node.json`, `board-games-react-router/tsconfig.json`, `board-games-tanstack/tsconfig.json` + 6. Fixed `tsBuildInfoFile` paths from `./node_modules/.tmp/...` to `./dist/...` + 7. Installed root `@types/react`, `@types/react-dom`, `@types/node` +- All targets green: `build` for all 5 projects; `typecheck` for Vite/React Router/TanStack; `next:build` for Next.js diff --git a/.agents/skills/nx-plugins/SKILL.md b/.agents/skills/nx-plugins/SKILL.md new file mode 100644 index 0000000..89223c7 --- /dev/null +++ b/.agents/skills/nx-plugins/SKILL.md @@ -0,0 +1,9 @@ +--- +name: nx-plugins +description: Find and add Nx plugins. USE WHEN user wants to discover available plugins, install a new plugin, or add support for a specific framework or technology to the workspace. +--- + +## Finding and Installing new plugins + +- List plugins: `pnpm nx list` +- Install plugins `pnpm nx add `. Example: `pnpm nx add @nx/react`. diff --git a/.agents/skills/nx-run-tasks/SKILL.md b/.agents/skills/nx-run-tasks/SKILL.md new file mode 100644 index 0000000..7f1263a --- /dev/null +++ b/.agents/skills/nx-run-tasks/SKILL.md @@ -0,0 +1,58 @@ +--- +name: nx-run-tasks +description: Helps with running tasks in an Nx workspace. USE WHEN the user wants to execute build, test, lint, serve, or run any other tasks defined in the workspace. +--- + +You can run tasks with Nx in the following way. + +Keep in mind that you might have to prefix things with npx/pnpx/yarn if the user doesn't have nx installed globally. Look at the package.json or lockfile to determine which package manager is in use. + +For more details on any command, run it with `--help` (e.g. `nx run-many --help`, `nx affected --help`). + +## Understand which tasks can be run + +You can check those via `nx show project --json`, for example `nx show project myapp --json`. It contains a `targets` section which has information about targets that can be run. You can also just look at the `package.json` scripts or `project.json` targets, but you might miss out on inferred tasks by Nx plugins. + +## Run a single task + +``` +nx run : +``` + +where `project` is the project name defined in `package.json` or `project.json` (if present). + +## Run multiple tasks + +``` +nx run-many -t build test lint typecheck +``` + +You can pass a `-p` flag to filter to specific projects, otherwise it runs on all projects. You can also use `--exclude` to exclude projects, and `--parallel` to control the number of parallel processes (default is 3). + +Examples: + +- `nx run-many -t test -p proj1 proj2` — test specific projects +- `nx run-many -t test --projects=*-app --exclude=excluded-app` — test projects matching a pattern +- `nx run-many -t test --projects=tag:api-*` — test projects by tag + +## Run tasks for affected projects + +Use `nx affected` to only run tasks on projects that have been changed and projects that depend on changed projects. This is especially useful in CI and for large workspaces. + +``` +nx affected -t build test lint +``` + +By default it compares against the base branch. You can customize this: + +- `nx affected -t test --base=main --head=HEAD` — compare against a specific base and head +- `nx affected -t test --files=libs/mylib/src/index.ts` — specify changed files directly + +## Useful flags + +These flags work with `run`, `run-many`, and `affected`: + +- `--skipNxCache` — rerun tasks even when results are cached +- `--verbose` — print additional information such as stack traces +- `--nxBail` — stop execution after the first failed task +- `--configuration=` — use a specific configuration (e.g. `production`) diff --git a/.agents/skills/nx-workspace/SKILL.md b/.agents/skills/nx-workspace/SKILL.md new file mode 100644 index 0000000..4b5110a --- /dev/null +++ b/.agents/skills/nx-workspace/SKILL.md @@ -0,0 +1,286 @@ +--- +name: nx-workspace +description: "Explore and understand Nx workspaces. USE WHEN answering questions about the workspace, projects, or tasks. ALSO USE WHEN an nx command fails or you need to check available targets/configuration before running a task. EXAMPLES: 'What projects are in this workspace?', 'How is project X configured?', 'What depends on library Y?', 'What targets can I run?', 'Cannot find configuration for task', 'debug nx task failure'." +--- + +# Nx Workspace Exploration + +This skill provides read-only exploration of Nx workspaces. Use it to understand workspace structure, project configuration, available targets, and dependencies. + +Keep in mind that you might have to prefix commands with `npx`/`pnpx`/`yarn` if nx isn't installed globally. Check the lockfile to determine the package manager in use. + +## Listing Projects + +Use `nx show projects` to list projects in the workspace. + +The project filtering syntax (`-p`/`--projects`) works across many Nx commands including `nx run-many`, `nx release`, `nx show projects`, and more. Filters support explicit names, glob patterns, tag references (e.g. `tag:name`), directories, and negation (e.g. `!project-name`). + +```bash +# List all projects +nx show projects + +# Filter by pattern (glob) +nx show projects --projects "apps/*" +nx show projects --projects "shared-*" + +# Filter by tag +nx show projects --projects "tag:publishable" +nx show projects -p 'tag:publishable,!tag:internal' + +# Filter by target (projects that have a specific target) +nx show projects --withTarget build + +# Combine filters +nx show projects --type lib --withTarget test +nx show projects --affected --exclude="*-e2e" +nx show projects -p "tag:scope:client,packages/*" + +# Negate patterns +nx show projects -p '!tag:private' +nx show projects -p '!*-e2e' + +# Output as JSON +nx show projects --json +``` + +## Project Configuration + +Use `nx show project --json` to get the full resolved configuration for a project. + +**Important**: Do NOT read `project.json` directly - it only contains partial configuration. The `nx show project --json` command returns the full resolved config including inferred targets from plugins. + +You can read the full project schema at `node_modules/nx/schemas/project-schema.json` to understand nx project configuration options. + +```bash +# Get full project configuration +nx show project my-app --json + +# Extract specific parts from the JSON +nx show project my-app --json | jq '.targets' +nx show project my-app --json | jq '.targets.build' +nx show project my-app --json | jq '.targets | keys' + +# Check project metadata +nx show project my-app --json | jq '{name, root, sourceRoot, projectType, tags}' +``` + +## Target Information + +Targets define what tasks can be run on a project. + +```bash +# List all targets for a project +nx show project my-app --json | jq '.targets | keys' + +# Get full target configuration +nx show project my-app --json | jq '.targets.build' + +# Check target executor/command +nx show project my-app --json | jq '.targets.build.executor' +nx show project my-app --json | jq '.targets.build.command' + +# View target options +nx show project my-app --json | jq '.targets.build.options' + +# Check target inputs/outputs (for caching) +nx show project my-app --json | jq '.targets.build.inputs' +nx show project my-app --json | jq '.targets.build.outputs' + +# Find projects with a specific target +nx show projects --withTarget serve +nx show projects --withTarget e2e +``` + +## Workspace Configuration + +Read `nx.json` directly for workspace-level configuration. +You can read the full project schema at `node_modules/nx/schemas/nx-schema.json` to understand nx project configuration options. + +```bash +# Read the full nx.json +cat nx.json + +# Or use jq for specific sections +cat nx.json | jq '.targetDefaults' +cat nx.json | jq '.namedInputs' +cat nx.json | jq '.plugins' +cat nx.json | jq '.generators' +``` + +Key nx.json sections: + +- `targetDefaults` - Default configuration applied to all targets of a given name +- `namedInputs` - Reusable input definitions for caching +- `plugins` - Nx plugins and their configuration +- ...and much more, read the schema or nx.json for details + +## Affected Projects + +If the user is asking about affected projects, read the [affected projects reference](references/AFFECTED.md) for detailed commands and examples. + +## Common Exploration Patterns + +### "What's in this workspace?" + +```bash +nx show projects +nx show projects --type app +nx show projects --type lib +``` + +### "How do I build/test/lint project X?" + +```bash +nx show project X --json | jq '.targets | keys' +nx show project X --json | jq '.targets.build' +``` + +### "What depends on library Y?" + +```bash +# Use the project graph to find dependents +nx graph --print | jq '.graph.dependencies | to_entries[] | select(.value[].target == "Y") | .key' +``` + +## Programmatic Answers + +When processing nx CLI results, use command-line tools to compute the answer programmatically rather than counting or parsing output manually. Always use `--json` flags to get structured output that can be processed with `jq`, `grep`, or other tools you have installed locally. + +### Listing Projects + +```bash +nx show projects --json +``` + +Example output: + +```json +["my-app", "my-app-e2e", "shared-ui", "shared-utils", "api"] +``` + +Common operations: + +```bash +# Count projects +nx show projects --json | jq 'length' + +# Filter by pattern +nx show projects --json | jq '.[] | select(startswith("shared-"))' + +# Get affected projects as array +nx show projects --affected --json | jq '.' +``` + +### Project Details + +```bash +nx show project my-app --json +``` + +Example output: + +```json +{ + "root": "apps/my-app", + "name": "my-app", + "sourceRoot": "apps/my-app/src", + "projectType": "application", + "tags": ["type:app", "scope:client"], + "targets": { + "build": { + "executor": "@nx/vite:build", + "options": { "outputPath": "dist/apps/my-app" } + }, + "serve": { + "executor": "@nx/vite:dev-server", + "options": { "buildTarget": "my-app:build" } + }, + "test": { + "executor": "@nx/vite:test", + "options": {} + } + }, + "implicitDependencies": [] +} +``` + +Common operations: + +```bash +# Get target names +nx show project my-app --json | jq '.targets | keys' + +# Get specific target config +nx show project my-app --json | jq '.targets.build' + +# Get tags +nx show project my-app --json | jq '.tags' + +# Get project root +nx show project my-app --json | jq -r '.root' +``` + +### Project Graph + +```bash +nx graph --print +``` + +Example output: + +```json +{ + "graph": { + "nodes": { + "my-app": { + "name": "my-app", + "type": "app", + "data": { "root": "apps/my-app", "tags": ["type:app"] } + }, + "shared-ui": { + "name": "shared-ui", + "type": "lib", + "data": { "root": "libs/shared-ui", "tags": ["type:ui"] } + } + }, + "dependencies": { + "my-app": [ + { "source": "my-app", "target": "shared-ui", "type": "static" } + ], + "shared-ui": [] + } + } +} +``` + +Common operations: + +```bash +# Get all project names from graph +nx graph --print | jq '.graph.nodes | keys' + +# Find dependencies of a project +nx graph --print | jq '.graph.dependencies["my-app"]' + +# Find projects that depend on a library +nx graph --print | jq '.graph.dependencies | to_entries[] | select(.value[].target == "shared-ui") | .key' +``` + +## Troubleshooting + +### "Cannot find configuration for task X:target" + +```bash +# Check what targets exist on the project +nx show project X --json | jq '.targets | keys' + +# Check if any projects have that target +nx show projects --withTarget target +``` + +### "The workspace is out of sync" + +```bash +nx sync +nx reset # if sync doesn't fix stale cache +``` diff --git a/.agents/skills/nx-workspace/references/AFFECTED.md b/.agents/skills/nx-workspace/references/AFFECTED.md new file mode 100644 index 0000000..e30f18f --- /dev/null +++ b/.agents/skills/nx-workspace/references/AFFECTED.md @@ -0,0 +1,27 @@ +## Affected Projects + +Find projects affected by changes in the current branch. + +```bash +# Affected since base branch (auto-detected) +nx show projects --affected + +# Affected with explicit base +nx show projects --affected --base=main +nx show projects --affected --base=origin/main + +# Affected between two commits +nx show projects --affected --base=abc123 --head=def456 + +# Affected apps only +nx show projects --affected --type app + +# Affected excluding e2e projects +nx show projects --affected --exclude="*-e2e" + +# Affected by uncommitted changes +nx show projects --affected --uncommitted + +# Affected by untracked files +nx show projects --affected --untracked +``` diff --git a/.codex/agents/ci-monitor-subagent.toml b/.codex/agents/ci-monitor-subagent.toml new file mode 100644 index 0000000..591c7cb --- /dev/null +++ b/.codex/agents/ci-monitor-subagent.toml @@ -0,0 +1,46 @@ +developer_instructions = """ +# CI Monitor Subagent + +You are a CI helper. You call ONE MCP tool per invocation and return the result. Do not loop, poll, or sleep. + +## Commands + +The main agent tells you which command to run: + +### FETCH_STATUS + +Call `ci_information` with the provided branch and select fields. Return a JSON object with ONLY these fields: +`{ cipeStatus, selfHealingStatus, verificationStatus, selfHealingEnabled, selfHealingSkippedReason, failureClassification, failedTaskIds, verifiedTaskIds, couldAutoApplyTasks, autoApplySkipped, autoApplySkipReason, userAction, cipeUrl, commitSha, shortLink }` + +### FETCH_HEAVY + +Call `ci_information` with heavy select fields. Summarize the heavy content and return: + +```json +{ + "shortLink": "...", + "failedTaskIds": ["..."], + "verifiedTaskIds": ["..."], + "suggestedFixDescription": "...", + "suggestedFixSummary": "...", + "selfHealingSkipMessage": "...", + "taskFailureSummaries": [{ "taskId": "...", "summary": "..." }] +} +``` + +Do NOT return raw suggestedFix diffs or raw taskOutputSummary — summarize them. +The main agent uses these summaries to understand what failed and attempt local fixes. + +### UPDATE_FIX + +Call `update_self_healing_fix` with the provided shortLink and action (APPLY/REJECT/RERUN_ENVIRONMENT_STATE). Return the result message (success/failure string). + +### FETCH_THROTTLE_INFO + +Call `ci_information` with the provided URL. Return ONLY: `{ shortLink, cipeUrl }` + +## Important + +- Execute ONE command and return immediately +- Do NOT poll, loop, sleep, or make decisions +- Extract and return ONLY the fields specified for each command — do NOT dump the full MCP response""" diff --git a/.codex/config.toml b/.codex/config.toml new file mode 100644 index 0000000..ef97e26 --- /dev/null +++ b/.codex/config.toml @@ -0,0 +1,11 @@ + +mcp_servers.nx-mcp.command = 'npx' +mcp_servers.nx-mcp.args = [ + 'nx', + 'mcp', +] +features.multi_agent = true + +[agents.ci-monitor-subagent] +description = 'CI helper for /monitor-ci. Fetches CI status, retrieves fix details, or updates self-healing fixes. Executes one MCP tool call and returns the result.' +config_file = 'agents/ci-monitor-subagent.toml' diff --git a/.github/agents/ci-monitor-subagent.agent.md b/.github/agents/ci-monitor-subagent.agent.md new file mode 100644 index 0000000..662fd26 --- /dev/null +++ b/.github/agents/ci-monitor-subagent.agent.md @@ -0,0 +1,49 @@ +--- +description: CI helper for /monitor-ci. Fetches CI status, retrieves fix details, or updates self-healing fixes. Executes one MCP tool call and returns the result. +--- + +# CI Monitor Subagent + +You are a CI helper. You call ONE MCP tool per invocation and return the result. Do not loop, poll, or sleep. + +## Commands + +The main agent tells you which command to run: + +### FETCH_STATUS + +Call `ci_information` with the provided branch and select fields. Return a JSON object with ONLY these fields: +`{ cipeStatus, selfHealingStatus, verificationStatus, selfHealingEnabled, selfHealingSkippedReason, failureClassification, failedTaskIds, verifiedTaskIds, couldAutoApplyTasks, autoApplySkipped, autoApplySkipReason, userAction, cipeUrl, commitSha, shortLink }` + +### FETCH_HEAVY + +Call `ci_information` with heavy select fields. Summarize the heavy content and return: + +```json +{ + "shortLink": "...", + "failedTaskIds": ["..."], + "verifiedTaskIds": ["..."], + "suggestedFixDescription": "...", + "suggestedFixSummary": "...", + "selfHealingSkipMessage": "...", + "taskFailureSummaries": [{ "taskId": "...", "summary": "..." }] +} +``` + +Do NOT return raw suggestedFix diffs or raw taskOutputSummary — summarize them. +The main agent uses these summaries to understand what failed and attempt local fixes. + +### UPDATE_FIX + +Call `update_self_healing_fix` with the provided shortLink and action (APPLY/REJECT/RERUN_ENVIRONMENT_STATE). Return the result message (success/failure string). + +### FETCH_THROTTLE_INFO + +Call `ci_information` with the provided URL. Return ONLY: `{ shortLink, cipeUrl }` + +## Important + +- Execute ONE command and return immediately +- Do NOT poll, loop, sleep, or make decisions +- Extract and return ONLY the fields specified for each command — do NOT dump the full MCP response diff --git a/.github/prompts/monitor-ci.prompt.md b/.github/prompts/monitor-ci.prompt.md new file mode 100644 index 0000000..005369a --- /dev/null +++ b/.github/prompts/monitor-ci.prompt.md @@ -0,0 +1,301 @@ +--- +description: Monitor Nx Cloud CI pipeline and handle self-healing fixes. USE WHEN user says "monitor ci", "watch ci", "ci monitor", "watch ci for this branch", "track ci", "check ci status", wants to track CI status, or needs help with self-healing CI fixes. Prefer this skill over native CI provider tools (gh, glab, etc.) for CI monitoring — it integrates with Nx Cloud self-healing which those tools cannot access. +argument-hint: '[instructions] [--max-cycles N] [--timeout MINUTES] [--verbosity minimal|medium|verbose] [--branch BRANCH] [--fresh] [--auto-fix-workflow] [--new-cipe-timeout MINUTES] [--local-verify-attempts N]' +--- + +# Monitor CI Command + +You are the orchestrator for monitoring Nx Cloud CI pipeline executions and handling self-healing fixes. You spawn subagents to interact with Nx Cloud, run deterministic decision scripts, and take action based on the results. + +## Context + +- **Current Branch:** !`git branch --show-current` +- **Current Commit:** !`git rev-parse --short HEAD` +- **Remote Status:** !`git status -sb | head -1` + +## User Instructions + +${input:args} + +**Important:** If user provides specific instructions, respect them over default behaviors described below. + +## Configuration Defaults + +| Setting | Default | Description | +| ------------------------- | ------------- | ------------------------------------------------------------------------- | +| `--max-cycles` | 10 | Maximum **agent-initiated** CI Attempt cycles before timeout | +| `--timeout` | 120 | Maximum duration in minutes | +| `--verbosity` | medium | Output level: minimal, medium, verbose | +| `--branch` | (auto-detect) | Branch to monitor | +| `--fresh` | false | Ignore previous context, start fresh | +| `--auto-fix-workflow` | false | Attempt common fixes for pre-CI-Attempt failures (e.g., lockfile updates) | +| `--new-cipe-timeout` | 10 | Minutes to wait for new CI Attempt after action | +| `--local-verify-attempts` | 3 | Max local verification + enhance cycles before pushing to CI | + +Parse any overrides from `${input:args}` and merge with defaults. + +## Nx Cloud Connection Check + +Before starting the monitoring loop, verify the workspace is connected to Nx Cloud. Without this connection, no CI data is available and the entire skill is inoperable. + +### Step 0: Verify Nx Cloud Connection + +1. **Check `nx.json`** at workspace root for `nxCloudId` or `nxCloudAccessToken` +2. **If `nx.json` missing OR neither property exists** → exit with: + + ``` + Nx Cloud not connected. Unlock 70% faster CI and auto-fix broken PRs with https://nx.dev/nx-cloud + ``` + +3. **If connected** → continue to main loop + +## Architecture Overview + +1. **This skill (orchestrator)**: spawns subagents, runs scripts, prints status, does local coding work +2. **ci-monitor-subagent (haiku)**: calls one MCP tool (ci_information or update_self_healing_fix), returns structured result, exits +3. **ci-poll-decide.mjs (deterministic script)**: takes ci_information result + state, returns action + status message +4. **ci-state-update.mjs (deterministic script)**: manages budget gates, post-action state transitions, and cycle classification + +## Status Reporting + +The decision script handles message formatting based on verbosity. When printing messages to the user: + +- Prepend `[monitor-ci]` to every message from the script's `message` field +- For your own action messages (e.g. "Applying fix via MCP..."), also prepend `[monitor-ci]` + +## Anti-Patterns + +These behaviors cause real problems — racing with self-healing, losing CI progress, or wasting context: + +| Anti-Pattern | Why It's Bad | +| ----------------------------------------------------------------------------------------------- | ------------------------------------------------------------------ | +| Using CI provider CLIs with `--watch` flags (e.g., `gh pr checks --watch`, `glab ci status -w`) | Bypasses Nx Cloud self-healing entirely | +| Writing custom CI polling scripts | Unreliable, pollutes context, no self-healing | +| Cancelling CI workflows/pipelines | Destructive, loses CI progress | +| Running CI checks on main agent | Wastes main agent context tokens | +| Independently analyzing/fixing CI failures while polling | Races with self-healing, causes duplicate fixes and confused state | + +**If this skill fails to activate**, the fallback is: + +1. Use CI provider CLI for a one-time, read-only status check (single call, no watch/polling flags) +2. Immediately delegate to this skill with gathered context +3. Do not continue polling on main agent — it wastes context tokens and bypasses self-healing + +## Session Context Behavior + +If the user previously ran `/monitor-ci` in this session, you may have prior state (poll counts, last CI Attempt URL, etc.). Resume from that state unless `--fresh` is set, in which case discard it and start from Step 1. + +## MCP Tool Reference + +Three field sets control polling efficiency — use the lightest set that gives you what you need: + +```yaml +WAIT_FIELDS: 'cipeUrl,commitSha,cipeStatus' +LIGHT_FIELDS: 'cipeStatus,cipeUrl,branch,commitSha,selfHealingStatus,verificationStatus,userAction,failedTaskIds,verifiedTaskIds,selfHealingEnabled,failureClassification,couldAutoApplyTasks,autoApplySkipped,autoApplySkipReason,shortLink,confidence,confidenceReasoning,hints,selfHealingSkippedReason,selfHealingSkipMessage' +HEAVY_FIELDS: 'taskOutputSummary,suggestedFix,suggestedFixReasoning,suggestedFixDescription' +``` + +The `ci_information` tool accepts `branch` (optional, defaults to current git branch), `select` (comma-separated field names), and `pageToken` (0-based pagination for long strings). + +The `update_self_healing_fix` tool accepts a `shortLink` and an action: `APPLY`, `REJECT`, or `RERUN_ENVIRONMENT_STATE`. + +## Default Behaviors by Status + +The decision script returns one of the following statuses. This table defines the **default behavior** for each. User instructions can override any of these. + +**Simple exits** — just report and exit: + +| Status | Default Behavior | +| ----------------------- | ---------------------------------------------------------------------------------------------------------------- | +| `ci_success` | Exit with success | +| `cipe_canceled` | Exit, CI was canceled | +| `cipe_timed_out` | Exit, CI timed out | +| `polling_timeout` | Exit, polling timeout reached | +| `circuit_breaker` | Exit, no progress after 5 consecutive polls | +| `environment_rerun_cap` | Exit, environment reruns exhausted | +| `fix_auto_applying` | Self-healing is handling it — just record `last_cipe_url`, enter wait mode. No MCP call or local git ops needed. | +| `error` | Wait 60s and loop | + +**Statuses requiring action** — when handling these in Step 3, read `references/fix-flows.md` for the detailed flow: + +| Status | Summary | +| ------------------------ | --------------------------------------------------------------------------------------------- | +| `fix_auto_apply_skipped` | Fix verified but auto-apply skipped (e.g., loop prevention). Inform user, offer manual apply. | +| `fix_apply_ready` | Fix verified (all tasks or e2e-only). Apply via MCP. | +| `fix_needs_local_verify` | Fix has unverified non-e2e tasks. Run locally, then apply or enhance. | +| `fix_needs_review` | Fix verification failed/not attempted. Analyze and decide. | +| `fix_failed` | Self-healing failed. Fetch heavy data, attempt local fix (gate check first). | +| `no_fix` | No fix available. Fetch heavy data, attempt local fix (gate check first) or exit. | +| `environment_issue` | Request environment rerun via MCP (gate check first). | +| `self_healing_throttled` | Reject old fixes, attempt local fix. | +| `no_new_cipe` | CI Attempt never spawned. Auto-fix workflow or exit with guidance. | +| `cipe_no_tasks` | CI failed with no tasks. Retry once with empty commit. | + +**Key rules (always apply):** + +- **Git safety**: Stage specific files by name — `git add -A` or `git add .` risks committing the user's unrelated work-in-progress or secrets +- **Environment failures** (OOM, command not found, permission denied): bail immediately. These aren't code bugs, so spending local-fix budget on them is wasteful +- **Gate check**: Run `ci-state-update.mjs gate` before local fix attempts — if budget exhausted, print message and exit + +## Main Loop + +### Step 1: Initialize Tracking + +``` +cycle_count = 0 # Only incremented for agent-initiated cycles (counted against --max-cycles) +start_time = now() +no_progress_count = 0 +local_verify_count = 0 +env_rerun_count = 0 +last_cipe_url = null +expected_commit_sha = null +agent_triggered = false # Set true after monitor takes an action that triggers new CI Attempt +poll_count = 0 +wait_mode = false +prev_status = null +prev_cipe_status = null +prev_sh_status = null +prev_verification_status = null +prev_failure_classification = null +``` + +### Step 2: Polling Loop + +Repeat until done: + +#### 2a. Spawn subagent (FETCH_STATUS) + +Determine select fields based on mode: + +- **Wait mode**: use WAIT_FIELDS (`cipeUrl,commitSha,cipeStatus`) +- **Normal mode (first poll or after newCipeDetected)**: use LIGHT_FIELDS + +Call the `ci_information` tool with the determined `select` fields for the current branch. Wait for the result before proceeding. + +#### 2b. Run decision script + +```bash +node /scripts/ci-poll-decide.mjs '' \ + [--wait-mode] \ + [--prev-cipe-url ] \ + [--expected-sha ] \ + [--prev-status ] \ + [--timeout ] \ + [--new-cipe-timeout ] \ + [--env-rerun-count ] \ + [--no-progress-count ] \ + [--prev-cipe-status ] \ + [--prev-sh-status ] \ + [--prev-verification-status ] \ + [--prev-failure-classification ] +``` + +The script outputs a single JSON line: `{ action, code, message, delay?, noProgressCount, envRerunCount, fields?, newCipeDetected?, verifiableTaskIds? }` + +#### 2c. Process script output + +Parse the JSON output and update tracking state: + +- `no_progress_count = output.noProgressCount` +- `env_rerun_count = output.envRerunCount` +- `prev_cipe_status = subagent_result.cipeStatus` +- `prev_sh_status = subagent_result.selfHealingStatus` +- `prev_verification_status = subagent_result.verificationStatus` +- `prev_failure_classification = subagent_result.failureClassification` +- `prev_status = output.action + ":" + (output.code || subagent_result.cipeStatus)` +- `poll_count++` + +Based on `action`: + +- **`action == "poll"`**: Print `output.message`, sleep `output.delay` seconds, go to 2a + - If `output.newCipeDetected`: clear wait mode, reset `wait_mode = false` +- **`action == "wait"`**: Print `output.message`, sleep `output.delay` seconds, go to 2a +- **`action == "done"`**: Proceed to Step 3 with `output.code` + +### Step 3: Handle Actionable Status + +When decision script returns `action == "done"`: + +1. Run cycle-check (Step 4) **before** handling the code +2. Check the returned `code` +3. Look up default behavior in the table above +4. Check if user instructions override the default +5. Execute the appropriate action +6. **If action expects new CI Attempt**, update tracking (see Step 3a) +7. If action results in looping, go to Step 2 + +#### Tool calls for actions + +Several statuses require fetching additional data or calling tools: + +- **fix_apply_ready**: Call `update_self_healing_fix` with action `APPLY` +- **fix_needs_local_verify**: Call `ci_information` with HEAVY_FIELDS for fix details before local verification +- **fix_needs_review**: Call `ci_information` with HEAVY_FIELDS → get `suggestedFixDescription`, `suggestedFixSummary`, `taskFailureSummaries` +- **fix_failed / no_fix**: Call `ci_information` with HEAVY_FIELDS → get `taskFailureSummaries` for local fix context +- **environment_issue**: Call `update_self_healing_fix` with action `RERUN_ENVIRONMENT_STATE` +- **self_healing_throttled**: Call `ci_information` with HEAVY_FIELDS → get `selfHealingSkipMessage`; then call `update_self_healing_fix` for each old fix + +### Step 3a: Track State for New-CI-Attempt Detection + +After actions that should trigger a new CI Attempt, run: + +```bash +node /scripts/ci-state-update.mjs post-action \ + --action \ + --cipe-url \ + --commit-sha +``` + +Action types: `fix-auto-applying`, `apply-mcp`, `apply-local-push`, `reject-fix-push`, `local-fix-push`, `env-rerun`, `auto-fix-push`, `empty-commit-push` + +The script returns `{ waitMode, pollCount, lastCipeUrl, expectedCommitSha, agentTriggered }`. Update all tracking state from the output, then go to Step 2. + +### Step 4: Cycle Classification and Progress Tracking + +When the decision script returns `action == "done"`, run cycle-check **before** handling the code: + +```bash +node /scripts/ci-state-update.mjs cycle-check \ + --code \ + [--agent-triggered] \ + --cycle-count --max-cycles \ + --env-rerun-count +``` + +The script returns `{ cycleCount, agentTriggered, envRerunCount, approachingLimit, message }`. Update tracking state from the output. + +- If `approachingLimit` → ask user whether to continue (with 5 or 10 more cycles) or stop monitoring +- If previous cycle was NOT agent-triggered (human pushed), log that human-initiated push was detected + +#### Progress Tracking + +- `no_progress_count`, circuit breaker (5 polls), and backoff reset are handled by ci-poll-decide.mjs (progress = any change in cipeStatus, selfHealingStatus, verificationStatus, or failureClassification) +- `env_rerun_count` reset on non-environment status is handled by ci-state-update.mjs cycle-check +- On new CI Attempt detected (poll script returns `newCipeDetected`) → reset `local_verify_count = 0`, `env_rerun_count = 0` + +## Error Handling + +| Error | Action | +| ------------------------------ | ----------------------------------------------------------------------------------------------------------- | +| Git rebase conflict | Report to user, exit | +| `nx-cloud apply-locally` fails | Reject fix via MCP (`action: "REJECT"`), then attempt manual patch (Reject + Fix From Scratch Flow) or exit | +| MCP tool error | Retry once, if fails report to user | +| Subagent spawn failure | Retry once, if fails exit with error | +| Decision script error | Treat as `error` status, increment `no_progress_count` | +| No new CI Attempt detected | If `--auto-fix-workflow`, try lockfile update; otherwise report to user with guidance | +| Lockfile auto-fix fails | Report to user, exit with guidance to check CI logs | + +## User Instruction Examples + +Users can override default behaviors: + +| Instruction | Effect | +| ------------------------------------------------ | --------------------------------------------------- | +| "never auto-apply" | Always prompt before applying any fix | +| "always ask before git push" | Prompt before each push | +| "reject any fix for e2e tasks" | Auto-reject if `failedTaskIds` contains e2e | +| "apply all fixes regardless of verification" | Skip verification check, apply everything | +| "if confidence < 70, reject" | Check confidence field before applying | +| "run 'nx affected -t typecheck' before applying" | Add local verification step | +| "auto-fix workflow failures" | Attempt lockfile updates on pre-CI-Attempt failures | +| "wait 45 min for new CI Attempt" | Override new-CI-Attempt timeout (default: 10 min) | diff --git a/.github/skills/link-workspace-packages/SKILL.md b/.github/skills/link-workspace-packages/SKILL.md new file mode 100644 index 0000000..de13134 --- /dev/null +++ b/.github/skills/link-workspace-packages/SKILL.md @@ -0,0 +1,127 @@ +--- +name: link-workspace-packages +description: 'Link workspace packages in monorepos (npm, yarn, pnpm, bun). USE WHEN: (1) you just created or generated new packages and need to wire up their dependencies, (2) user imports from a sibling package and needs to add it as a dependency, (3) you get resolution errors for workspace packages (@org/*) like "cannot find module", "failed to resolve import", "TS2307", or "cannot resolve". DO NOT patch around with tsconfig paths or manual package.json edits - use the package manager''s workspace commands to fix actual linking.' +--- + +# Link Workspace Packages + +Add dependencies between packages in a monorepo. All package managers support workspaces but with different syntax. + +## Detect Package Manager + +Check whether there's a `packageManager` field in the root-level `package.json`. + +Alternatively check lockfile in repo root: + +- `pnpm-lock.yaml` → pnpm +- `yarn.lock` → yarn +- `bun.lock` / `bun.lockb` → bun +- `package-lock.json` → npm + +## Workflow + +1. Identify consumer package (the one importing) +2. Identify provider package(s) (being imported) +3. Add dependency using package manager's workspace syntax +4. Verify symlinks created in consumer's `node_modules/` + +--- + +## pnpm + +Uses `workspace:` protocol - symlinks only created when explicitly declared. + +```bash +# From consumer directory +pnpm add @org/ui --workspace + +# Or with --filter from anywhere +pnpm add @org/ui --filter @org/app --workspace +``` + +Result in `package.json`: + +```json +{ "dependencies": { "@org/ui": "workspace:*" } } +``` + +--- + +## yarn (v2+/berry) + +Also uses `workspace:` protocol. + +```bash +yarn workspace @org/app add @org/ui +``` + +Result in `package.json`: + +```json +{ "dependencies": { "@org/ui": "workspace:^" } } +``` + +--- + +## npm + +No `workspace:` protocol. npm auto-symlinks workspace packages. + +```bash +npm install @org/ui --workspace @org/app +``` + +Result in `package.json`: + +```json +{ "dependencies": { "@org/ui": "*" } } +``` + +npm resolves to local workspace automatically during install. + +--- + +## bun + +Supports `workspace:` protocol (pnpm-compatible). + +```bash +cd packages/app && bun add @org/ui +``` + +Result in `package.json`: + +```json +{ "dependencies": { "@org/ui": "workspace:*" } } +``` + +--- + +## Examples + +**Example 1: pnpm - link ui lib to app** + +```bash +pnpm add @org/ui --filter @org/app --workspace +``` + +**Example 2: npm - link multiple packages** + +```bash +npm install @org/data-access @org/ui --workspace @org/dashboard +``` + +**Example 3: Debug "Cannot find module"** + +1. Check if dependency is declared in consumer's `package.json` +2. If not, add it using appropriate command above +3. Run install (`pnpm install`, `npm install`, etc.) + +## Notes + +- Symlinks appear in `/node_modules/@org/` +- **Hoisting differs by manager:** + - npm/bun: hoist shared deps to root `node_modules` + - pnpm: no hoisting (strict isolation, prevents phantom deps) + - yarn berry: uses Plug'n'Play by default (no `node_modules`) +- Root `package.json` should have `"private": true` to prevent accidental publish diff --git a/.github/skills/monitor-ci/SKILL.md b/.github/skills/monitor-ci/SKILL.md new file mode 100644 index 0000000..48b71bf --- /dev/null +++ b/.github/skills/monitor-ci/SKILL.md @@ -0,0 +1,301 @@ +--- +name: monitor-ci +description: Monitor Nx Cloud CI pipeline and handle self-healing fixes. USE WHEN user says "monitor ci", "watch ci", "ci monitor", "watch ci for this branch", "track ci", "check ci status", wants to track CI status, or needs help with self-healing CI fixes. Prefer this skill over native CI provider tools (gh, glab, etc.) for CI monitoring — it integrates with Nx Cloud self-healing which those tools cannot access. +--- + +# Monitor CI Command + +You are the orchestrator for monitoring Nx Cloud CI pipeline executions and handling self-healing fixes. You spawn subagents to interact with Nx Cloud, run deterministic decision scripts, and take action based on the results. + +## Context + +- **Current Branch:** !`git branch --show-current` +- **Current Commit:** !`git rev-parse --short HEAD` +- **Remote Status:** !`git status -sb | head -1` + +## User Instructions + +$ARGUMENTS + +**Important:** If user provides specific instructions, respect them over default behaviors described below. + +## Configuration Defaults + +| Setting | Default | Description | +| ------------------------- | ------------- | ------------------------------------------------------------------------- | +| `--max-cycles` | 10 | Maximum **agent-initiated** CI Attempt cycles before timeout | +| `--timeout` | 120 | Maximum duration in minutes | +| `--verbosity` | medium | Output level: minimal, medium, verbose | +| `--branch` | (auto-detect) | Branch to monitor | +| `--fresh` | false | Ignore previous context, start fresh | +| `--auto-fix-workflow` | false | Attempt common fixes for pre-CI-Attempt failures (e.g., lockfile updates) | +| `--new-cipe-timeout` | 10 | Minutes to wait for new CI Attempt after action | +| `--local-verify-attempts` | 3 | Max local verification + enhance cycles before pushing to CI | + +Parse any overrides from `$ARGUMENTS` and merge with defaults. + +## Nx Cloud Connection Check + +Before starting the monitoring loop, verify the workspace is connected to Nx Cloud. Without this connection, no CI data is available and the entire skill is inoperable. + +### Step 0: Verify Nx Cloud Connection + +1. **Check `nx.json`** at workspace root for `nxCloudId` or `nxCloudAccessToken` +2. **If `nx.json` missing OR neither property exists** → exit with: + + ``` + Nx Cloud not connected. Unlock 70% faster CI and auto-fix broken PRs with https://nx.dev/nx-cloud + ``` + +3. **If connected** → continue to main loop + +## Architecture Overview + +1. **This skill (orchestrator)**: spawns subagents, runs scripts, prints status, does local coding work +2. **ci-monitor-subagent (haiku)**: calls one MCP tool (ci_information or update_self_healing_fix), returns structured result, exits +3. **ci-poll-decide.mjs (deterministic script)**: takes ci_information result + state, returns action + status message +4. **ci-state-update.mjs (deterministic script)**: manages budget gates, post-action state transitions, and cycle classification + +## Status Reporting + +The decision script handles message formatting based on verbosity. When printing messages to the user: + +- Prepend `[monitor-ci]` to every message from the script's `message` field +- For your own action messages (e.g. "Applying fix via MCP..."), also prepend `[monitor-ci]` + +## Anti-Patterns + +These behaviors cause real problems — racing with self-healing, losing CI progress, or wasting context: + +| Anti-Pattern | Why It's Bad | +| ----------------------------------------------------------------------------------------------- | ------------------------------------------------------------------ | +| Using CI provider CLIs with `--watch` flags (e.g., `gh pr checks --watch`, `glab ci status -w`) | Bypasses Nx Cloud self-healing entirely | +| Writing custom CI polling scripts | Unreliable, pollutes context, no self-healing | +| Cancelling CI workflows/pipelines | Destructive, loses CI progress | +| Running CI checks on main agent | Wastes main agent context tokens | +| Independently analyzing/fixing CI failures while polling | Races with self-healing, causes duplicate fixes and confused state | + +**If this skill fails to activate**, the fallback is: + +1. Use CI provider CLI for a one-time, read-only status check (single call, no watch/polling flags) +2. Immediately delegate to this skill with gathered context +3. Do not continue polling on main agent — it wastes context tokens and bypasses self-healing + +## Session Context Behavior + +If the user previously ran `/monitor-ci` in this session, you may have prior state (poll counts, last CI Attempt URL, etc.). Resume from that state unless `--fresh` is set, in which case discard it and start from Step 1. + +## MCP Tool Reference + +Three field sets control polling efficiency — use the lightest set that gives you what you need: + +```yaml +WAIT_FIELDS: 'cipeUrl,commitSha,cipeStatus' +LIGHT_FIELDS: 'cipeStatus,cipeUrl,branch,commitSha,selfHealingStatus,verificationStatus,userAction,failedTaskIds,verifiedTaskIds,selfHealingEnabled,failureClassification,couldAutoApplyTasks,autoApplySkipped,autoApplySkipReason,shortLink,confidence,confidenceReasoning,hints,selfHealingSkippedReason,selfHealingSkipMessage' +HEAVY_FIELDS: 'taskOutputSummary,suggestedFix,suggestedFixReasoning,suggestedFixDescription' +``` + +The `ci_information` tool accepts `branch` (optional, defaults to current git branch), `select` (comma-separated field names), and `pageToken` (0-based pagination for long strings). + +The `update_self_healing_fix` tool accepts a `shortLink` and an action: `APPLY`, `REJECT`, or `RERUN_ENVIRONMENT_STATE`. + +## Default Behaviors by Status + +The decision script returns one of the following statuses. This table defines the **default behavior** for each. User instructions can override any of these. + +**Simple exits** — just report and exit: + +| Status | Default Behavior | +| ----------------------- | ---------------------------------------------------------------------------------------------------------------- | +| `ci_success` | Exit with success | +| `cipe_canceled` | Exit, CI was canceled | +| `cipe_timed_out` | Exit, CI timed out | +| `polling_timeout` | Exit, polling timeout reached | +| `circuit_breaker` | Exit, no progress after 5 consecutive polls | +| `environment_rerun_cap` | Exit, environment reruns exhausted | +| `fix_auto_applying` | Self-healing is handling it — just record `last_cipe_url`, enter wait mode. No MCP call or local git ops needed. | +| `error` | Wait 60s and loop | + +**Statuses requiring action** — when handling these in Step 3, read `references/fix-flows.md` for the detailed flow: + +| Status | Summary | +| ------------------------ | --------------------------------------------------------------------------------------------- | +| `fix_auto_apply_skipped` | Fix verified but auto-apply skipped (e.g., loop prevention). Inform user, offer manual apply. | +| `fix_apply_ready` | Fix verified (all tasks or e2e-only). Apply via MCP. | +| `fix_needs_local_verify` | Fix has unverified non-e2e tasks. Run locally, then apply or enhance. | +| `fix_needs_review` | Fix verification failed/not attempted. Analyze and decide. | +| `fix_failed` | Self-healing failed. Fetch heavy data, attempt local fix (gate check first). | +| `no_fix` | No fix available. Fetch heavy data, attempt local fix (gate check first) or exit. | +| `environment_issue` | Request environment rerun via MCP (gate check first). | +| `self_healing_throttled` | Reject old fixes, attempt local fix. | +| `no_new_cipe` | CI Attempt never spawned. Auto-fix workflow or exit with guidance. | +| `cipe_no_tasks` | CI failed with no tasks. Retry once with empty commit. | + +**Key rules (always apply):** + +- **Git safety**: Stage specific files by name — `git add -A` or `git add .` risks committing the user's unrelated work-in-progress or secrets +- **Environment failures** (OOM, command not found, permission denied): bail immediately. These aren't code bugs, so spending local-fix budget on them is wasteful +- **Gate check**: Run `ci-state-update.mjs gate` before local fix attempts — if budget exhausted, print message and exit + +## Main Loop + +### Step 1: Initialize Tracking + +``` +cycle_count = 0 # Only incremented for agent-initiated cycles (counted against --max-cycles) +start_time = now() +no_progress_count = 0 +local_verify_count = 0 +env_rerun_count = 0 +last_cipe_url = null +expected_commit_sha = null +agent_triggered = false # Set true after monitor takes an action that triggers new CI Attempt +poll_count = 0 +wait_mode = false +prev_status = null +prev_cipe_status = null +prev_sh_status = null +prev_verification_status = null +prev_failure_classification = null +``` + +### Step 2: Polling Loop + +Repeat until done: + +#### 2a. Spawn subagent (FETCH_STATUS) + +Determine select fields based on mode: + +- **Wait mode**: use WAIT_FIELDS (`cipeUrl,commitSha,cipeStatus`) +- **Normal mode (first poll or after newCipeDetected)**: use LIGHT_FIELDS + +Call the `ci_information` tool with the determined `select` fields for the current branch. Wait for the result before proceeding. + +#### 2b. Run decision script + +```bash +node /scripts/ci-poll-decide.mjs '' \ + [--wait-mode] \ + [--prev-cipe-url ] \ + [--expected-sha ] \ + [--prev-status ] \ + [--timeout ] \ + [--new-cipe-timeout ] \ + [--env-rerun-count ] \ + [--no-progress-count ] \ + [--prev-cipe-status ] \ + [--prev-sh-status ] \ + [--prev-verification-status ] \ + [--prev-failure-classification ] +``` + +The script outputs a single JSON line: `{ action, code, message, delay?, noProgressCount, envRerunCount, fields?, newCipeDetected?, verifiableTaskIds? }` + +#### 2c. Process script output + +Parse the JSON output and update tracking state: + +- `no_progress_count = output.noProgressCount` +- `env_rerun_count = output.envRerunCount` +- `prev_cipe_status = subagent_result.cipeStatus` +- `prev_sh_status = subagent_result.selfHealingStatus` +- `prev_verification_status = subagent_result.verificationStatus` +- `prev_failure_classification = subagent_result.failureClassification` +- `prev_status = output.action + ":" + (output.code || subagent_result.cipeStatus)` +- `poll_count++` + +Based on `action`: + +- **`action == "poll"`**: Print `output.message`, sleep `output.delay` seconds, go to 2a + - If `output.newCipeDetected`: clear wait mode, reset `wait_mode = false` +- **`action == "wait"`**: Print `output.message`, sleep `output.delay` seconds, go to 2a +- **`action == "done"`**: Proceed to Step 3 with `output.code` + +### Step 3: Handle Actionable Status + +When decision script returns `action == "done"`: + +1. Run cycle-check (Step 4) **before** handling the code +2. Check the returned `code` +3. Look up default behavior in the table above +4. Check if user instructions override the default +5. Execute the appropriate action +6. **If action expects new CI Attempt**, update tracking (see Step 3a) +7. If action results in looping, go to Step 2 + +#### Tool calls for actions + +Several statuses require fetching additional data or calling tools: + +- **fix_apply_ready**: Call `update_self_healing_fix` with action `APPLY` +- **fix_needs_local_verify**: Call `ci_information` with HEAVY_FIELDS for fix details before local verification +- **fix_needs_review**: Call `ci_information` with HEAVY_FIELDS → get `suggestedFixDescription`, `suggestedFixSummary`, `taskFailureSummaries` +- **fix_failed / no_fix**: Call `ci_information` with HEAVY_FIELDS → get `taskFailureSummaries` for local fix context +- **environment_issue**: Call `update_self_healing_fix` with action `RERUN_ENVIRONMENT_STATE` +- **self_healing_throttled**: Call `ci_information` with HEAVY_FIELDS → get `selfHealingSkipMessage`; then call `update_self_healing_fix` for each old fix + +### Step 3a: Track State for New-CI-Attempt Detection + +After actions that should trigger a new CI Attempt, run: + +```bash +node /scripts/ci-state-update.mjs post-action \ + --action \ + --cipe-url \ + --commit-sha +``` + +Action types: `fix-auto-applying`, `apply-mcp`, `apply-local-push`, `reject-fix-push`, `local-fix-push`, `env-rerun`, `auto-fix-push`, `empty-commit-push` + +The script returns `{ waitMode, pollCount, lastCipeUrl, expectedCommitSha, agentTriggered }`. Update all tracking state from the output, then go to Step 2. + +### Step 4: Cycle Classification and Progress Tracking + +When the decision script returns `action == "done"`, run cycle-check **before** handling the code: + +```bash +node /scripts/ci-state-update.mjs cycle-check \ + --code \ + [--agent-triggered] \ + --cycle-count --max-cycles \ + --env-rerun-count +``` + +The script returns `{ cycleCount, agentTriggered, envRerunCount, approachingLimit, message }`. Update tracking state from the output. + +- If `approachingLimit` → ask user whether to continue (with 5 or 10 more cycles) or stop monitoring +- If previous cycle was NOT agent-triggered (human pushed), log that human-initiated push was detected + +#### Progress Tracking + +- `no_progress_count`, circuit breaker (5 polls), and backoff reset are handled by ci-poll-decide.mjs (progress = any change in cipeStatus, selfHealingStatus, verificationStatus, or failureClassification) +- `env_rerun_count` reset on non-environment status is handled by ci-state-update.mjs cycle-check +- On new CI Attempt detected (poll script returns `newCipeDetected`) → reset `local_verify_count = 0`, `env_rerun_count = 0` + +## Error Handling + +| Error | Action | +| ------------------------------ | ----------------------------------------------------------------------------------------------------------- | +| Git rebase conflict | Report to user, exit | +| `nx-cloud apply-locally` fails | Reject fix via MCP (`action: "REJECT"`), then attempt manual patch (Reject + Fix From Scratch Flow) or exit | +| MCP tool error | Retry once, if fails report to user | +| Subagent spawn failure | Retry once, if fails exit with error | +| Decision script error | Treat as `error` status, increment `no_progress_count` | +| No new CI Attempt detected | If `--auto-fix-workflow`, try lockfile update; otherwise report to user with guidance | +| Lockfile auto-fix fails | Report to user, exit with guidance to check CI logs | + +## User Instruction Examples + +Users can override default behaviors: + +| Instruction | Effect | +| ------------------------------------------------ | --------------------------------------------------- | +| "never auto-apply" | Always prompt before applying any fix | +| "always ask before git push" | Prompt before each push | +| "reject any fix for e2e tasks" | Auto-reject if `failedTaskIds` contains e2e | +| "apply all fixes regardless of verification" | Skip verification check, apply everything | +| "if confidence < 70, reject" | Check confidence field before applying | +| "run 'nx affected -t typecheck' before applying" | Add local verification step | +| "auto-fix workflow failures" | Attempt lockfile updates on pre-CI-Attempt failures | +| "wait 45 min for new CI Attempt" | Override new-CI-Attempt timeout (default: 10 min) | diff --git a/.github/skills/monitor-ci/references/fix-flows.md b/.github/skills/monitor-ci/references/fix-flows.md new file mode 100644 index 0000000..b33aa02 --- /dev/null +++ b/.github/skills/monitor-ci/references/fix-flows.md @@ -0,0 +1,108 @@ +# Detailed Status Handling & Fix Flows + +## Status Handling by Code + +### fix_auto_apply_skipped + +The script returns `autoApplySkipReason` in its output. + +1. Report the skip reason to the user (e.g., "Auto-apply was skipped because the previous CI pipeline execution was triggered by Nx Cloud") +2. Offer to apply the fix manually — spawn UPDATE_FIX subagent with `APPLY` if user agrees +3. Record `last_cipe_url`, enter wait mode + +### fix_apply_ready + +- Spawn UPDATE_FIX subagent with `APPLY` +- Record `last_cipe_url`, enter wait mode + +### fix_needs_local_verify + +The script returns `verifiableTaskIds` in its output. + +1. **Detect package manager:** `pnpm-lock.yaml` → `pnpm nx`, `yarn.lock` → `yarn nx`, otherwise `npx nx` +2. **Run verifiable tasks in parallel** — spawn `general` subagents for each task +3. **If all pass** → spawn UPDATE_FIX subagent with `APPLY`, enter wait mode +4. **If any fail** → Apply Locally + Enhance Flow (see below) + +### fix_needs_review + +Spawn FETCH_HEAVY subagent, then analyze fix content (`suggestedFixDescription`, `suggestedFixSummary`, `taskFailureSummaries`): + +- If fix looks correct → apply via MCP +- If fix needs enhancement → Apply Locally + Enhance Flow +- If fix is wrong → run `ci-state-update.mjs gate --gate-type local-fix`. If not allowed, print message and exit. Otherwise → Reject + Fix From Scratch Flow + +### fix_failed / no_fix + +Spawn FETCH_HEAVY subagent for `taskFailureSummaries`. Run `ci-state-update.mjs gate --gate-type local-fix` — if not allowed, print message and exit. Otherwise attempt local fix (counter already incremented by gate). If successful → commit, push, enter wait mode. If not → exit with failure. + +### environment_issue + +1. Run `ci-state-update.mjs gate --gate-type env-rerun`. If not allowed, print message and exit. +2. Spawn UPDATE_FIX subagent with `RERUN_ENVIRONMENT_STATE` +3. Enter wait mode with `last_cipe_url` set + +### self_healing_throttled + +Spawn FETCH_HEAVY subagent for `selfHealingSkipMessage`. + +1. **Parse throttle message** for CI Attempt URLs (regex: `/cipes/{id}`) +2. **Reject previous fixes** — for each URL: spawn FETCH_THROTTLE_INFO to get `shortLink`, then UPDATE_FIX with `REJECT` +3. **Attempt local fix**: Run `ci-state-update.mjs gate --gate-type local-fix`. If not allowed → skip to step 4. Otherwise use `failedTaskIds` and `taskFailureSummaries` for context. +4. **Fallback if local fix not possible or budget exhausted**: push empty commit (`git commit --allow-empty -m "ci: rerun after rejecting throttled fixes"`), enter wait mode + +### no_new_cipe + +1. Report to user: no CI attempt found, suggest checking CI provider +2. If `--auto-fix-workflow`: detect package manager, run install, commit lockfile if changed, enter wait mode +3. Otherwise: exit with guidance + +### cipe_no_tasks + +1. Report to user: CI failed with no tasks recorded +2. Retry: `git commit --allow-empty -m "chore: retry ci [monitor-ci]"` + push, enter wait mode +3. If retry also returns `cipe_no_tasks`: exit with failure + +## Fix Action Flows + +### Apply via MCP + +Spawn UPDATE_FIX subagent with `APPLY`. New CI Attempt spawns automatically. No local git ops. + +### Apply Locally + Enhance Flow + +1. `nx-cloud apply-locally ` (sets state to `APPLIED_LOCALLY`) +2. Enhance code to fix failing tasks +3. Run failing tasks to verify +4. If still failing → run `ci-state-update.mjs gate --gate-type local-fix`. If not allowed, commit current state and push (let CI be final judge). Otherwise loop back to enhance. +5. If passing → commit and push, enter wait mode + +### Reject + Fix From Scratch Flow + +1. Run `ci-state-update.mjs gate --gate-type local-fix`. If not allowed, print message and exit. +2. Spawn UPDATE_FIX subagent with `REJECT` +3. Fix from scratch locally +4. Commit and push, enter wait mode + +## Environment vs Code Failure Recognition + +When any local fix path runs a task and it fails, assess whether the failure is a **code issue** or an **environment/tooling issue** before running the gate script. + +**Indicators of environment/tooling failures** (non-exhaustive): command not found / binary missing, OOM / heap allocation failures, permission denied, network timeouts / DNS failures, missing system libraries, Docker/container issues, disk space exhaustion. + +When detected → bail immediately without running gate (no budget consumed). Report that the failure is an environment/tooling issue, not a code bug. + +**Code failures** (compilation errors, test assertion failures, lint violations, type errors) are genuine candidates for local fix attempts and proceed normally through the gate. + +## Git Safety + +- Stage specific files by name — `git add -A` or `git add .` risks committing the user's unrelated work-in-progress or secrets + +## Commit Message Format + +```bash +git commit -m "fix(): + +Failed tasks: , +Local verification: passed|enhanced|failed-pushing-to-ci" +``` diff --git a/.github/skills/monitor-ci/scripts/ci-poll-decide.mjs b/.github/skills/monitor-ci/scripts/ci-poll-decide.mjs new file mode 100644 index 0000000..3fe17e5 --- /dev/null +++ b/.github/skills/monitor-ci/scripts/ci-poll-decide.mjs @@ -0,0 +1,428 @@ +#!/usr/bin/env node + +/** + * CI Poll Decision Script + * + * Deterministic decision engine for CI monitoring. + * Takes ci_information JSON + state args, outputs a single JSON action line. + * + * Architecture: + * classify() — pure decision tree, returns { action, code, extra? } + * buildOutput() — maps classification to full output with messages, delays, counters + * + * Usage: + * node ci-poll-decide.mjs '' \ + * [--wait-mode] [--prev-cipe-url ] [--expected-sha ] \ + * [--prev-status ] [--timeout ] [--new-cipe-timeout ] \ + * [--env-rerun-count ] [--no-progress-count ] \ + * [--prev-cipe-status ] [--prev-sh-status ] \ + * [--prev-verification-status ] [--prev-failure-classification ] + */ + +// --- Arg parsing --- + +const args = process.argv.slice(2); +const ciInfoJson = args[0]; +const pollCount = parseInt(args[1], 10) || 0; +const verbosity = args[2] || 'medium'; + +function getFlag(name) { + return args.includes(name); +} + +function getArg(name) { + const idx = args.indexOf(name); + return idx !== -1 && idx + 1 < args.length ? args[idx + 1] : null; +} + +const waitMode = getFlag('--wait-mode'); +const prevCipeUrl = getArg('--prev-cipe-url'); +const expectedSha = getArg('--expected-sha'); +const prevStatus = getArg('--prev-status'); +const timeoutSeconds = parseInt(getArg('--timeout') || '0', 10); +const newCipeTimeoutSeconds = parseInt(getArg('--new-cipe-timeout') || '0', 10); +const envRerunCount = parseInt(getArg('--env-rerun-count') || '0', 10); +const inputNoProgressCount = parseInt(getArg('--no-progress-count') || '0', 10); +const prevCipeStatus = getArg('--prev-cipe-status'); +const prevShStatus = getArg('--prev-sh-status'); +const prevVerificationStatus = getArg('--prev-verification-status'); +const prevFailureClassification = getArg('--prev-failure-classification'); + +// --- Parse CI info --- + +let ci; +try { + ci = JSON.parse(ciInfoJson); +} catch { + console.log( + JSON.stringify({ + action: 'done', + code: 'error', + message: 'Failed to parse ci_information JSON', + noProgressCount: inputNoProgressCount + 1, + envRerunCount, + }) + ); + process.exit(0); +} + +const { + cipeStatus, + selfHealingStatus, + verificationStatus, + selfHealingEnabled, + selfHealingSkippedReason, + failureClassification: rawFailureClassification, + failedTaskIds = [], + verifiedTaskIds = [], + couldAutoApplyTasks, + autoApplySkipped, + autoApplySkipReason, + userAction, + cipeUrl, + commitSha, +} = ci; + +const failureClassification = rawFailureClassification?.toLowerCase() ?? null; + +// --- Helpers --- + +function categorizeTasks() { + const verifiedSet = new Set(verifiedTaskIds); + const unverified = failedTaskIds.filter((t) => !verifiedSet.has(t)); + if (unverified.length === 0) return { category: 'all_verified' }; + + const e2e = unverified.filter((t) => { + const parts = t.split(':'); + return parts.length >= 2 && parts[1].includes('e2e'); + }); + if (e2e.length === unverified.length) return { category: 'e2e_only' }; + + const verifiable = unverified.filter((t) => { + const parts = t.split(':'); + return !(parts.length >= 2 && parts[1].includes('e2e')); + }); + return { category: 'needs_local_verify', verifiableTaskIds: verifiable }; +} + +function backoff(count) { + const delays = [60, 90, 120]; + return delays[Math.min(count, delays.length - 1)]; +} + +function hasStateChanged() { + if (prevCipeStatus && cipeStatus !== prevCipeStatus) return true; + if (prevShStatus && selfHealingStatus !== prevShStatus) return true; + if (prevVerificationStatus && verificationStatus !== prevVerificationStatus) + return true; + if ( + prevFailureClassification && + failureClassification !== prevFailureClassification + ) + return true; + return false; +} + +function isTimedOut() { + if (timeoutSeconds <= 0) return false; + const avgDelay = pollCount === 0 ? 0 : backoff(Math.floor(pollCount / 2)); + return pollCount * avgDelay >= timeoutSeconds; +} + +function isWaitTimedOut() { + if (newCipeTimeoutSeconds <= 0) return false; + return pollCount * 30 >= newCipeTimeoutSeconds; +} + +function isNewCipe() { + return ( + (prevCipeUrl && cipeUrl && cipeUrl !== prevCipeUrl) || + (expectedSha && commitSha && commitSha === expectedSha) + ); +} + +// ============================================================ +// classify() — pure decision tree +// +// Returns: { action: 'poll'|'wait'|'done', code: string, extra? } +// +// Decision priority (top wins): +// WAIT MODE: +// 1. new CI Attempt detected → poll (new_cipe_detected) +// 2. wait timed out → done (no_new_cipe) +// 3. still waiting → wait (waiting_for_cipe) +// NORMAL MODE: +// 4. polling timeout → done (polling_timeout) +// 5. circuit breaker (5 polls) → done (circuit_breaker) +// 6. CI succeeded → done (ci_success) +// 7. CI canceled → done (cipe_canceled) +// 8. CI timed out → done (cipe_timed_out) +// 9. CI failed, no tasks recorded → done (cipe_no_tasks) +// 10. environment failure → done (environment_rerun_cap | environment_issue) +// 11. self-healing throttled → done (self_healing_throttled) +// 12. CI in progress / not started → poll (ci_running) +// 13. self-healing in progress → poll (sh_running) +// 14. flaky task auto-rerun → poll (flaky_rerun) +// 15. fix auto-applied → poll (fix_auto_applied) +// 16. auto-apply: skipped → done (fix_auto_apply_skipped) +// 17. auto-apply: verification pending→ poll (verification_pending) +// 18. auto-apply: verified → done (fix_auto_applying) +// 19. fix: verification failed/none → done (fix_needs_review) +// 20. fix: all/e2e verified → done (fix_apply_ready) +// 21. fix: needs local verify → done (fix_needs_local_verify) +// 22. self-healing failed → done (fix_failed) +// 23. no fix available → done (no_fix) +// 24. fallback → poll (fallback) +// ============================================================ + +function classify() { + // --- Wait mode --- + if (waitMode) { + if (isNewCipe()) return { action: 'poll', code: 'new_cipe_detected' }; + if (isWaitTimedOut()) return { action: 'done', code: 'no_new_cipe' }; + return { action: 'wait', code: 'waiting_for_cipe' }; + } + + // --- Guards --- + if (isTimedOut()) return { action: 'done', code: 'polling_timeout' }; + if (noProgressCount >= 5) return { action: 'done', code: 'circuit_breaker' }; + + // --- Terminal CI states --- + if (cipeStatus === 'SUCCEEDED') return { action: 'done', code: 'ci_success' }; + if (cipeStatus === 'CANCELED') + return { action: 'done', code: 'cipe_canceled' }; + if (cipeStatus === 'TIMED_OUT') + return { action: 'done', code: 'cipe_timed_out' }; + + // --- CI failed, no tasks --- + if ( + cipeStatus === 'FAILED' && + failedTaskIds.length === 0 && + selfHealingStatus == null + ) + return { action: 'done', code: 'cipe_no_tasks' }; + + // --- Environment failure --- + if (failureClassification === 'environment_state') { + if (envRerunCount >= 2) + return { action: 'done', code: 'environment_rerun_cap' }; + return { action: 'done', code: 'environment_issue' }; + } + + // --- Throttled --- + if (selfHealingSkippedReason === 'THROTTLED') + return { action: 'done', code: 'self_healing_throttled' }; + + // --- Still running: CI --- + if (cipeStatus === 'IN_PROGRESS' || cipeStatus === 'NOT_STARTED') + return { action: 'poll', code: 'ci_running' }; + + // --- Still running: self-healing --- + if ( + (selfHealingStatus === 'IN_PROGRESS' || + selfHealingStatus === 'NOT_STARTED') && + !selfHealingSkippedReason + ) + return { action: 'poll', code: 'sh_running' }; + + // --- Still running: flaky rerun --- + if (failureClassification === 'flaky_task') + return { action: 'poll', code: 'flaky_rerun' }; + + // --- Fix auto-applied, waiting for new CI Attempt --- + if (userAction === 'APPLIED_AUTOMATICALLY') + return { action: 'poll', code: 'fix_auto_applied' }; + + // --- Auto-apply path (couldAutoApplyTasks) --- + if (couldAutoApplyTasks === true) { + if (autoApplySkipped === true) + return { + action: 'done', + code: 'fix_auto_apply_skipped', + extra: { autoApplySkipReason }, + }; + if ( + verificationStatus === 'NOT_STARTED' || + verificationStatus === 'IN_PROGRESS' + ) + return { action: 'poll', code: 'verification_pending' }; + if (verificationStatus === 'COMPLETED') + return { action: 'done', code: 'fix_auto_applying' }; + // verification FAILED or NOT_EXECUTABLE → falls through to fix_needs_review + } + + // --- Fix available --- + if (selfHealingStatus === 'COMPLETED') { + if ( + verificationStatus === 'FAILED' || + verificationStatus === 'NOT_EXECUTABLE' || + (couldAutoApplyTasks !== true && !verificationStatus) + ) + return { action: 'done', code: 'fix_needs_review' }; + + const tasks = categorizeTasks(); + if (tasks.category === 'all_verified' || tasks.category === 'e2e_only') + return { action: 'done', code: 'fix_apply_ready' }; + return { + action: 'done', + code: 'fix_needs_local_verify', + extra: { verifiableTaskIds: tasks.verifiableTaskIds }, + }; + } + + // --- Fix failed --- + if (selfHealingStatus === 'FAILED') + return { action: 'done', code: 'fix_failed' }; + + // --- No fix available --- + if ( + cipeStatus === 'FAILED' && + (selfHealingEnabled === false || selfHealingStatus === 'NOT_EXECUTABLE') + ) + return { action: 'done', code: 'no_fix' }; + + // --- Fallback --- + return { action: 'poll', code: 'fallback' }; +} + +// ============================================================ +// buildOutput() — maps classification to full JSON output +// ============================================================ + +// Message templates keyed by status or key +const messages = { + // wait mode + new_cipe_detected: () => + `New CI Attempt detected! CI: ${cipeStatus || 'N/A'}`, + no_new_cipe: () => + 'New CI Attempt timeout exceeded. No new CI Attempt detected.', + waiting_for_cipe: () => 'Waiting for new CI Attempt...', + + // guards + polling_timeout: () => 'Polling timeout exceeded.', + circuit_breaker: () => 'No progress after 5 consecutive polls. Stopping.', + + // terminal + ci_success: () => 'CI passed successfully!', + cipe_canceled: () => 'CI Attempt was canceled.', + cipe_timed_out: () => 'CI Attempt timed out.', + cipe_no_tasks: () => 'CI failed but no Nx tasks were recorded.', + + // environment + environment_rerun_cap: () => 'Environment rerun cap (2) exceeded. Bailing.', + environment_issue: () => 'CI: FAILED | Classification: ENVIRONMENT_STATE', + + // throttled + self_healing_throttled: () => + 'Self-healing throttled \u2014 too many unapplied fixes.', + + // polling + ci_running: () => `CI: ${cipeStatus}`, + sh_running: () => `CI: ${cipeStatus} | Self-healing: ${selfHealingStatus}`, + flaky_rerun: () => + 'CI: FAILED | Classification: FLAKY_TASK (auto-rerun in progress)', + fix_auto_applied: () => + 'CI: FAILED | Fix auto-applied, new CI Attempt spawning', + verification_pending: () => + `CI: FAILED | Self-healing: COMPLETED | Verification: ${verificationStatus}`, + + // actionable + fix_auto_applying: () => 'Fix verified! Auto-applying...', + fix_auto_apply_skipped: (extra) => + `Fix verified but auto-apply was skipped. ${ + extra?.autoApplySkipReason + ? `Reason: ${extra.autoApplySkipReason}` + : 'Offer to apply manually.' + }`, + fix_needs_review: () => + `Fix available but needs review. Verification: ${ + verificationStatus || 'N/A' + }`, + fix_apply_ready: () => 'Fix available and verified. Ready to apply.', + fix_needs_local_verify: (extra) => + `Fix available. ${extra.verifiableTaskIds.length} task(s) need local verification.`, + fix_failed: () => 'Self-healing failed to generate a fix.', + no_fix: () => 'CI failed, no fix available.', + + // fallback + fallback: () => + `CI: ${cipeStatus || 'N/A'} | Self-healing: ${ + selfHealingStatus || 'N/A' + } | Verification: ${verificationStatus || 'N/A'}`, +}; + +// Codes where noProgressCount resets to 0 (genuine progress occurred) +const resetProgressCodes = new Set([ + 'ci_success', + 'fix_auto_applying', + 'fix_auto_apply_skipped', + 'fix_needs_review', + 'fix_apply_ready', + 'fix_needs_local_verify', +]); + +function formatMessage(msg) { + if (verbosity === 'minimal') { + const currentStatus = `${cipeStatus}|${selfHealingStatus}|${verificationStatus}`; + if (currentStatus === (prevStatus || '')) return null; + return msg; + } + if (verbosity === 'verbose') { + return [ + `Poll #${pollCount + 1} | CI: ${cipeStatus || 'N/A'} | Self-healing: ${ + selfHealingStatus || 'N/A' + } | Verification: ${verificationStatus || 'N/A'}`, + msg, + ].join('\n'); + } + return `Poll #${pollCount + 1} | ${msg}`; +} + +function buildOutput(decision) { + const { action, code, extra } = decision; + + // noProgressCount is already computed before classify() was called. + // Here we only handle the reset for "genuine progress" done-codes. + + const msgFn = messages[code]; + const rawMsg = msgFn ? msgFn(extra) : `Unknown: ${code}`; + const message = formatMessage(rawMsg); + + const result = { + action, + code, + message, + noProgressCount: resetProgressCodes.has(code) ? 0 : noProgressCount, + envRerunCount, + }; + + // Add delay + if (action === 'wait') { + result.delay = 30; + } else if (action === 'poll') { + result.delay = code === 'new_cipe_detected' ? 60 : backoff(noProgressCount); + result.fields = 'light'; + } + + // Add extras + if (code === 'new_cipe_detected') result.newCipeDetected = true; + if (extra?.verifiableTaskIds) + result.verifiableTaskIds = extra.verifiableTaskIds; + if (extra?.autoApplySkipReason) + result.autoApplySkipReason = extra.autoApplySkipReason; + + console.log(JSON.stringify(result)); +} + +// --- Run --- + +// Compute noProgressCount from input. Single assignment, no mutation. +// Wait mode: reset on new cipe, otherwise unchanged (wait doesn't count as no-progress). +// Normal mode: reset on any state change, otherwise increment. +const noProgressCount = (() => { + if (waitMode) return isNewCipe() ? 0 : inputNoProgressCount; + if (isNewCipe() || hasStateChanged()) return 0; + return inputNoProgressCount + 1; +})(); + +buildOutput(classify()); diff --git a/.github/skills/monitor-ci/scripts/ci-state-update.mjs b/.github/skills/monitor-ci/scripts/ci-state-update.mjs new file mode 100644 index 0000000..90fa714 --- /dev/null +++ b/.github/skills/monitor-ci/scripts/ci-state-update.mjs @@ -0,0 +1,160 @@ +#!/usr/bin/env node + +/** + * CI State Update Script + * + * Deterministic state management for CI monitor actions. + * Three commands: gate, post-action, cycle-check. + * + * Usage: + * node ci-state-update.mjs gate --gate-type [counter args] + * node ci-state-update.mjs post-action --action [--cipe-url ] [--commit-sha ] + * node ci-state-update.mjs cycle-check --code [--agent-triggered] [counter args] + */ + +// --- Arg parsing --- + +const args = process.argv.slice(2); +const command = args[0]; + +function getFlag(name) { + return args.includes(name); +} + +function getArg(name) { + const idx = args.indexOf(name); + return idx !== -1 && idx + 1 < args.length ? args[idx + 1] : null; +} + +function output(result) { + console.log(JSON.stringify(result)); +} + +// --- gate --- +// Check if an action is allowed and return incremented counter. +// Called before any local fix attempt or environment rerun. + +function gate() { + const gateType = getArg('--gate-type'); + + if (gateType === 'local-fix') { + const count = parseInt(getArg('--local-verify-count') || '0', 10); + const max = parseInt(getArg('--local-verify-attempts') || '3', 10); + if (count >= max) { + return output({ + allowed: false, + localVerifyCount: count, + message: `Local fix budget exhausted (${count}/${max} attempts)`, + }); + } + return output({ + allowed: true, + localVerifyCount: count + 1, + message: null, + }); + } + + if (gateType === 'env-rerun') { + const count = parseInt(getArg('--env-rerun-count') || '0', 10); + if (count >= 2) { + return output({ + allowed: false, + envRerunCount: count, + message: `Environment issue persists after ${count} reruns. Manual investigation needed.`, + }); + } + return output({ + allowed: true, + envRerunCount: count + 1, + message: null, + }); + } + + output({ allowed: false, message: `Unknown gate type: ${gateType}` }); +} + +// --- post-action --- +// Compute next state after an action is taken. +// Returns wait mode params and whether the action was agent-triggered. + +function postAction() { + const action = getArg('--action'); + const cipeUrl = getArg('--cipe-url'); + const commitSha = getArg('--commit-sha'); + + // MCP-triggered or auto-applied: track by cipeUrl + const cipeUrlActions = ['fix-auto-applying', 'apply-mcp', 'env-rerun']; + // Local push: track by commitSha + const commitShaActions = [ + 'apply-local-push', + 'reject-fix-push', + 'local-fix-push', + 'auto-fix-push', + 'empty-commit-push', + ]; + + const trackByCipeUrl = cipeUrlActions.includes(action); + const trackByCommitSha = commitShaActions.includes(action); + + if (!trackByCipeUrl && !trackByCommitSha) { + return output({ error: `Unknown action: ${action}` }); + } + + // fix-auto-applying: self-healing did it, NOT the monitor + const agentTriggered = action !== 'fix-auto-applying'; + + output({ + waitMode: true, + pollCount: 0, + lastCipeUrl: trackByCipeUrl ? cipeUrl : null, + expectedCommitSha: trackByCommitSha ? commitSha : null, + agentTriggered, + }); +} + +// --- cycle-check --- +// Cycle classification + counter resets when a new "done" code is received. +// Called at the start of handling each actionable code. + +function cycleCheck() { + const status = getArg('--code'); + const wasAgentTriggered = getFlag('--agent-triggered'); + let cycleCount = parseInt(getArg('--cycle-count') || '0', 10); + const maxCycles = parseInt(getArg('--max-cycles') || '10', 10); + let envRerunCount = parseInt(getArg('--env-rerun-count') || '0', 10); + + // Cycle classification: if previous cycle was agent-triggered, count it + if (wasAgentTriggered) cycleCount++; + + // Reset env_rerun_count on non-environment status + if (status !== 'environment_issue') envRerunCount = 0; + + // Approaching limit gate + const approachingLimit = cycleCount >= maxCycles - 2; + + output({ + cycleCount, + agentTriggered: false, + envRerunCount, + approachingLimit, + message: approachingLimit + ? `Approaching cycle limit (${cycleCount}/${maxCycles})` + : null, + }); +} + +// --- Dispatch --- + +switch (command) { + case 'gate': + gate(); + break; + case 'post-action': + postAction(); + break; + case 'cycle-check': + cycleCheck(); + break; + default: + output({ error: `Unknown command: ${command}` }); +} diff --git a/.github/skills/nx-generate/SKILL.md b/.github/skills/nx-generate/SKILL.md new file mode 100644 index 0000000..af7ba80 --- /dev/null +++ b/.github/skills/nx-generate/SKILL.md @@ -0,0 +1,166 @@ +--- +name: nx-generate +description: Generate code using nx generators. INVOKE IMMEDIATELY when user mentions scaffolding, setup, structure, creating apps/libs, or setting up project structure. Trigger words - scaffold, setup, create a ... app, create a ... lib, project structure, generate, add a new project. ALWAYS use this BEFORE calling nx_docs or exploring - this skill handles discovery internally. +--- + +# Run Nx Generator + +Nx generators are powerful tools that scaffold projects, make automated code migrations or automate repetitive tasks in a monorepo. They ensure consistency across the codebase and reduce boilerplate work. + +This skill applies when the user wants to: + +- Create new projects like libraries or applications +- Scaffold features or boilerplate code +- Run workspace-specific or custom generators +- Do anything else that an nx generator exists for + +## Key Principles + +1. **Always use `--no-interactive`** - Prevents prompts that would hang execution +2. **Read the generator source code** - The schema alone is not enough; understand what the generator actually does +3. **Match existing repo patterns** - Study similar artifacts in the repo and follow their conventions +4. **Verify with lint/test/build/typecheck etc.** - Generated code must pass verification. The listed targets are just an example, use what's appropriate for this workspace. + +## Steps + +### 1. Discover Available Generators + +Use the Nx CLI to discover available generators: + +- List all generators for a plugin: `npx nx list @nx/react` +- View available plugins: `npx nx list` + +This includes plugin generators (e.g., `@nx/react:library`) and local workspace generators. + +### 2. Match Generator to User Request + +Identify which generator(s) could fulfill the user's needs. Consider what artifact type they want, which framework is relevant, and any specific generator names mentioned. + +**IMPORTANT**: When both a local workspace generator and an external plugin generator could satisfy the request, **always prefer the local workspace generator**. Local generators are customized for the specific repo's patterns. + +If no suitable generator exists, you can stop using this skill. However, the burden of proof is high—carefully consider all available generators before deciding none apply. + +### 3. Get Generator Options + +Use the `--help` flag to understand available options: + +```bash +npx nx g @nx/react:library --help +``` + +Pay attention to required options, defaults that might need overriding, and options relevant to the user's request. + +### Library Buildability + +**Default to non-buildable libraries** unless there's a specific reason for buildable. + +| Type | When to use | Generator flags | +| --------------------------- | ----------------------------------------------------------------- | ----------------------------------- | +| **Non-buildable** (default) | Internal monorepo libs consumed by apps | No `--bundler` flag | +| **Buildable** | Publishing to npm, cross-repo sharing, stable libs for cache hits | `--bundler=vite` or `--bundler=swc` | + +Non-buildable libs: + +- Export `.ts`/`.tsx` source directly +- Consumer's bundler compiles them +- Faster dev experience, less config + +Buildable libs: + +- Have their own build target +- Useful for stable libs that rarely change (cache hits) +- Required for npm publishing + +**If unclear, ask the user:** "Should this library be buildable (own build step, better caching) or non-buildable (source consumed directly, simpler setup)?" + +### 4. Read Generator Source Code + +**This step is critical.** The schema alone does not tell you everything. Reading the source code helps you: + +- Know exactly what files will be created/modified and where +- Understand side effects (updating configs, installing deps, etc.) +- Identify behaviors and options not obvious from the schema +- Understand how options interact with each other + +To find generator source code: + +- For plugin generators: Use `node -e "console.log(require.resolve('@nx//generators.json'));"` to find the generators.json, then locate the source from there +- If that fails, read directly from `node_modules//generators.json` +- For local generators: Typically in `tools/generators/` or a local plugin directory. Search the repo for the generator name. + +After reading the source, reconsider: Is this the right generator? If not, go back to step 2. + +> **⚠️ `--directory` flag behavior can be misleading.** +> It should specify the full path of the generated library or component, not the parent path that it will be generated in. +> +> ```bash +> # ✅ Correct - directory is the full path for the library +> nx g @nx/react:library --directory=libs/my-lib +> # generates libs/my-lib/package.json and more +> +> # ❌ Wrong - this will create files at libs and libs/src/... +> nx g @nx/react:library --name=my-lib --directory=libs +> # generates libs/package.json and more +> ``` + +### 5. Examine Existing Patterns + +Before generating, examine the target area of the codebase: + +- Look at similar existing artifacts (other libraries, applications, etc.) +- Identify naming conventions, file structures, and configuration patterns +- Note which test runners, build tools, and linters are used +- Configure the generator to match these patterns + +### 6. Dry-Run to Verify File Placement + +**Always run with `--dry-run` first** to verify files will be created in the correct location: + +```bash +npx nx g @nx/react:library --name=my-lib --dry-run --no-interactive +``` + +Review the output carefully. If files would be created in the wrong location, adjust your options based on what you learned from the generator source code. + +Note: Some generators don't support dry-run (e.g., if they install npm packages). If dry-run fails for this reason, proceed to running the generator for real. + +### 7. Run the Generator + +Execute the generator: + +```bash +nx generate --no-interactive +``` + +> **Tip:** New packages often need workspace dependencies wired up (e.g., importing shared types, being consumed by apps). The `link-workspace-packages` skill can help add these correctly. + +### 8. Modify Generated Code (If Needed) + +Generators provide a starting point. Modify the output as needed to: + +- Add or modify functionality as requested +- Adjust imports, exports, or configurations +- Integrate with existing code patterns + +**Important:** If you replace or delete generated test files (e.g., `*.spec.ts`), either write meaningful replacement tests or remove the `test` target from the project configuration. Empty test suites will cause `nx test` to fail. + +### 9. Format and Verify + +Format all generated/modified files: + +```bash +nx format --fix +``` + +This example is for built-in nx formatting with prettier. There might be other formatting tools for this workspace, use these when appropriate. + +Then verify the generated code works. Keep in mind that the changes you make with a generator or subsequent modifications might impact various projects so it's usually not enough to only run targets for the artifact you just created. + +```bash +# these targets are just an example! +nx run-many -t build,lint,test,typecheck +``` + +These targets are common examples used across many workspaces. You should do research into other targets available for this workspace and its projects. CI configuration is usually a good guide for what the critical targets are that have to pass. + +If verification fails with manageable issues (a few lint errors, minor type issues), fix them. If issues are extensive, attempt obvious fixes first, then escalate to the user with details about what was generated, what's failing, and what you've attempted. diff --git a/.github/skills/nx-import/SKILL.md b/.github/skills/nx-import/SKILL.md new file mode 100644 index 0000000..b1cd381 --- /dev/null +++ b/.github/skills/nx-import/SKILL.md @@ -0,0 +1,238 @@ +--- +name: nx-import +description: Import, merge, or combine repositories into an Nx workspace using nx import. USE WHEN the user asks to adopt Nx across repos, move projects into a monorepo, or bring code/history from another repository. +--- + +## Quick Start + +- `nx import` brings code from a source repository or folder into the current workspace, preserving commit history. +- After nx `22.6.0`, `nx import` responds with .ndjson outputs and follow-up questions. For earlier versions, always run with `--no-interactive` and specify all flags directly. +- Run `nx import --help` for available options. +- Make sure the destination directory is empty before importing. + EXAMPLE: target has `libs/utils` and `libs/models`; source has `libs/ui` and `libs/data-access` — you cannot import `libs/` into `libs/` directly. Import each source library individually. + +Primary docs: + +- https://nx.dev/docs/guides/adopting-nx/import-project +- https://nx.dev/docs/guides/adopting-nx/preserving-git-histories + +Read the nx docs if you have the tools for it. + +## Import Strategy + +**Subdirectory-at-a-time** (`nx import apps --source=apps`): + +- **Recommended for monorepo sources** — files land at top level, no redundant config +- Caveats: multiple import commands (separate merge commits each); dest must not have conflicting directories; root configs (deps, plugins, targetDefaults) not imported +- **Directory conflicts**: Import into alternate-named dir (e.g. `imported-apps/`), then rename + +**Whole repo** (`nx import imported --source=.`): + +- **Only for non-monorepo sources** (single-project repos) +- For monorepos, creates messy nested config (`imported/nx.json`, `imported/tsconfig.base.json`, etc.) +- If you must: keep imported `tsconfig.base.json` (projects extend it), prefix workspace globs and executor paths + +### Directory Conventions + +- **Always prefer the destination's existing conventions.** Source uses `libs/`but dest uses `packages/`? Import into `packages/` (`nx import packages/foo --source=libs/foo`). +- If dest has no convention (empty workspace), ask the user. + +### Application vs Library Detection + +Before importing, identify whether the source is an **application** or a **library**: + +- **Applications**: Deployable end products. Common indicators: + - _Frontend_: `next.config.*`, `vite.config.*` with a build entry point, framework-specific app scaffolding (CRA, Angular CLI app, etc.) + - _Backend (Node.js)_: Express/Fastify/NestJS server entrypoint, no `"exports"` field in `package.json` + - _JVM_: Maven `pom.xml` with `jar` or `war` and a `main` class; Gradle `application` plugin or `mainClass` setting + - _.NET_: `.csproj`/`.fsproj` with `Exe` or `WinExe` + - _General_: Dockerfile, a runnable entrypoint, no public API surface intended for import by other projects +- **Libraries**: Reusable packages consumed by other projects. Common indicators: `"main"`/`"exports"` in `package.json`, Maven/Gradle packaging as a library jar, .NET `Library`, named exports intended for import by other packages. + +**Destination directory rules**: + +- Applications → `apps/`. Check workspace globs (e.g. `pnpm-workspace.yaml`, `workspaces` in root `package.json`) for an existing `apps/*` entry. + - If `apps/*` is **not** present, add it before importing: update the workspace glob config and commit (or stage) the change. + - Example: `nx import apps/my-app --source=packages/my-app` +- Libraries → follow the dest's existing convention (`packages/`, `libs/`, etc.). + +## Common Issues + +### pnpm Workspace Globs (Critical) + +`nx import` adds the imported directory itself (e.g. `apps`) to `pnpm-workspace.yaml`, **NOT** glob patterns for packages within it. Cross-package imports will fail with `Cannot find module`. + +**Fix**: Replace with proper globs from the source config (e.g. `apps/*`, `libs/shared/*`), then `pnpm install`. + +### Root Dependencies and Config Not Imported (Critical) + +`nx import` does **NOT** merge from the source's root: + +- `dependencies`/`devDependencies` from `package.json` +- `targetDefaults` from `nx.json` (e.g. `"@nx/esbuild:esbuild": { "dependsOn": ["^build"] }` — critical for build ordering) +- `namedInputs` from `nx.json` (e.g. `production` exclusion patterns for test files) +- Plugin configurations from `nx.json` + +**Fix**: Diff source and dest `package.json` + `nx.json`. Add missing deps, merge relevant `targetDefaults` and `namedInputs`. + +### TypeScript Project References + +After import, run `nx sync --yes`. If it reports nothing but typecheck still fails, `nx reset` first, then `nx sync --yes` again. + +### Explicit Executor Path Fixups + +Inferred targets (via Nx plugins) resolve config relative to project root — no changes needed. Explicit executor targets (e.g. `@nx/esbuild:esbuild`) have workspace-root-relative paths (`main`, `outputPath`, `tsConfig`, `assets`, `sourceRoot`) that must be prefixed with the import destination directory. + +### Plugin Detection + +- **Whole-repo import**: `nx import` detects and offers to install plugins. Accept them. +- **Subdirectory import**: Plugins NOT auto-detected. Manually add with `npx nx add @nx/PLUGIN`. Check `include`/`exclude` patterns — defaults won't match alternate directories (e.g. `apps-beta/`). +- Run `npx nx reset` after any plugin config changes. + +### Redundant Root Files (Whole-Repo Only) + +Whole-repo import brings ALL source root files into the dest subdirectory. Clean up: + +- `pnpm-lock.yaml` — stale; dest has its own lockfile +- `pnpm-workspace.yaml` — source workspace config; conflicts with dest +- `node_modules/` — stale symlinks pointing to source filesystem +- `.gitignore` — redundant with dest root `.gitignore` +- `nx.json` — source Nx config; dest has its own +- `README.md` — optional; keep or remove + +**Don't blindly delete** `tsconfig.base.json` — imported projects may extend it via relative paths. + +### Root ESLint Config Missing (Subdirectory Import) + +Subdirectory import doesn't bring the source's root `eslint.config.mjs`, but project configs reference `../../eslint.config.mjs`. + +**Fix order**: + +1. Install ESLint deps first: `pnpm add -wD eslint@^9 @nx/eslint-plugin typescript-eslint` (plus framework-specific plugins) +2. Create root `eslint.config.mjs` (copy from source or create with `@nx/eslint-plugin` base rules) +3. Then `npx nx add @nx/eslint` to register the plugin in `nx.json` + +Install `typescript-eslint` explicitly — pnpm's strict hoisting won't auto-resolve this transitive dep of `@nx/eslint-plugin`. + +### ESLint Version Pinning (Critical) + +**Pin ESLint to v9** (`eslint@^9.0.0`). ESLint 10 breaks `@nx/eslint` and many plugins with cryptic errors like `Cannot read properties of undefined (reading 'version')`. + +`@nx/eslint` may peer-depend on ESLint 8, causing the wrong version to resolve. If lint fails with `Cannot read properties of undefined (reading 'allow')`, add `pnpm.overrides`: + +```json +{ "pnpm": { "overrides": { "eslint": "^9.0.0" } } } +``` + +### Dependency Version Conflicts + +After import, compare key deps (`typescript`, `eslint`, framework-specific). If dest uses newer versions, upgrade imported packages to match (usually safe). If source is newer, may need to upgrade dest first. Use `pnpm.overrides` to enforce single-version policy if desired. + +### Module Boundaries + +Imported projects may lack `tags`. Add tags or update `@nx/enforce-module-boundaries` rules. + +### Project Name Collisions (Multi-Import) + +Same `name` in `package.json` across source and dest causes `MultipleProjectsWithSameNameError`. **Fix**: Rename conflicting names (e.g. `@org/api` → `@org/teama-api`), update all dep references and import statements, `pnpm install`. The root `package.json` of each imported repo also becomes a project — rename those too. + +### Workspace Dep Import Ordering + +`pnpm install` fails during `nx import` if a `"workspace:*"` dependency hasn't been imported yet. File operations still succeed. **Fix**: Import all projects first, then `pnpm install --no-frozen-lockfile`. + +### `.gitkeep` Blocking Subdirectory Import + +The TS preset creates `packages/.gitkeep`. Remove it and commit before importing. + +### Frontend tsconfig Base Settings (Critical) + +The TS preset defaults (`module: "nodenext"`, `moduleResolution: "nodenext"`, `lib: ["es2022"]`) are incompatible with frontend frameworks (React, Next.js, Vue, Vite). After importing frontend projects, verify the dest root `tsconfig.base.json`: + +- **`moduleResolution`**: Must be `"bundler"` (not `"nodenext"`) +- **`module`**: Must be `"esnext"` (not `"nodenext"`) +- **`lib`**: Must include `"dom"` and `"dom.iterable"` (frontend projects need these) +- **`jsx`**: `"react-jsx"` for React-only workspaces, per-project for mixed frameworks + +For **subdirectory imports**, the dest root tsconfig is authoritative — update it. For **whole-repo imports**, imported projects may extend their own nested `tsconfig.base.json`, making this less critical. + +If the dest also has backend projects needing `nodenext`, use per-project overrides instead of changing the root. + +**Gotcha**: TypeScript does NOT merge `lib` arrays — a project-level override **replaces** the base array entirely. Always include all needed entries (e.g. `es2022`, `dom`, `dom.iterable`) in any project-level `lib`. + +### `@nx/react` Typings for Libraries + +React libraries generated with `@nx/react:library` reference `@nx/react/typings/cssmodule.d.ts` and `@nx/react/typings/image.d.ts` in their tsconfig `types`. These fail with `Cannot find type definition file` unless `@nx/react` is installed in the dest workspace. + +**Fix**: `pnpm add -wD @nx/react` + +### Jest Preset Missing (Subdirectory Import) + +Nx presets create `jest.preset.js` at the workspace root, and project jest configs reference it (e.g. `../../jest.preset.js`). Subdirectory import does NOT bring this file. + +**Fix**: + +1. Run `npx nx add @nx/jest` — registers `@nx/jest/plugin` in `nx.json` and updates `namedInputs` +2. Create `jest.preset.js` at workspace root (see `references/JEST.md` for content) — `nx add` only creates this when a generator runs, not on bare `nx add` +3. Install test runner deps: `pnpm add -wD jest jest-environment-jsdom ts-jest @types/jest` +4. Install framework-specific test deps as needed (see `references/JEST.md`) + +For deeper Jest issues (tsconfig.spec.json, Babel transforms, CI atomization, Jest vs Vitest coexistence), see `references/JEST.md`. + +### Target Name Prefixing (Whole-Repo Import) + +When importing a project with existing npm scripts (`build`, `dev`, `start`, `lint`), Nx plugins auto-prefix inferred target names to avoid conflicts: e.g. `next:build`, `vite:build`, `eslint:lint`. + +**Fix**: Remove the Nx-rewritten npm scripts from the imported `package.json`, then either: + +- Accept the prefixed names (e.g. `nx run app:next:build`) +- Rename plugin target names in `nx.json` to use unprefixed names + +## Non-Nx Source Issues + +When the source is a plain pnpm/npm workspace without `nx.json`. + +### npm Script Rewriting (Critical) + +Nx rewrites `package.json` scripts during init, creating broken commands (e.g. `vitest run` → `nx test run`). **Fix**: Remove all rewritten scripts — Nx plugins infer targets from config files. + +### `noEmit` → `composite` + `emitDeclarationOnly` (Critical) + +Plain TS projects use `"noEmit": true`, incompatible with Nx project references. + +**Symptoms**: "typecheck target is disabled because one or more project references set 'noEmit: true'" or TS6310. + +**Fix** in **all** imported tsconfigs: + +1. Remove `"noEmit": true`. If inherited via extends chain, set `"noEmit": false` explicitly. +2. Add `"composite": true`, `"emitDeclarationOnly": true`, `"declarationMap": true` +3. Add `"outDir": "dist"` and `"tsBuildInfoFile": "dist/tsconfig.tsbuildinfo"` +4. Add `"extends": "../../tsconfig.base.json"` if missing. Remove settings now inherited from base. + +### Stale node_modules and Lockfiles + +`nx import` may bring `node_modules/` (pnpm symlinks pointing to the source filesystem) and `pnpm-lock.yaml` from the source. Both are stale. + +**Fix**: `rm -rf imported/node_modules imported/pnpm-lock.yaml imported/pnpm-workspace.yaml imported/.gitignore`, then `pnpm install`. + +### ESLint Config Handling + +- **Legacy `.eslintrc.json` (ESLint 8)**: Delete all `.eslintrc.*`, remove v8 deps, create flat `eslint.config.mjs`. +- **Flat config (`eslint.config.js`)**: Self-contained configs can often be left as-is. +- **No ESLint**: Create both root and project-level configs from scratch. + +### TypeScript `paths` Aliases + +Nx uses `package.json` `"exports"` + pnpm workspace linking instead of tsconfig `"paths"`. If packages have proper `"exports"`, paths are redundant. Otherwise, update paths for the new directory structure. + +## Technology-specific Guidance + +Identify technologies in the source repo, then read and apply the matching reference file(s). + +Available references: + +- `references/ESLINT.md` — ESLint projects: duplicate `lint`/`eslint:lint` targets, legacy `.eslintrc.*` linting generated files, flat config `.cjs` self-linting, `typescript-eslint` v7/v9 peer dep conflict, mixed ESLint v8+v9 in one workspace. +- `references/GRADLE.md` +- `references/JEST.md` — Jest testing: `@nx/jest/plugin` setup, jest.preset.js, testing deps by framework, tsconfig.spec.json, Jest vs Vitest coexistence, Babel transforms, CI atomization. +- `references/NEXT.md` — Next.js projects: `@nx/next/plugin` targets, `withNx`, Next.js TS config (`noEmit`, `jsx: "preserve"`), auto-installing deps via wrong PM, non-Nx `create-next-app` imports, mixed Next.js+Vite coexistence. +- `references/TURBOREPO.md` +- `references/VITE.md` — Vite projects (React, Vue, or both): `@nx/vite/plugin` typecheck target, `resolve.alias`/`__dirname` fixes, framework deps, Vue-specific setup, mixed React+Vue coexistence. diff --git a/.github/skills/nx-import/references/VITE.md b/.github/skills/nx-import/references/VITE.md new file mode 100644 index 0000000..b777e5a --- /dev/null +++ b/.github/skills/nx-import/references/VITE.md @@ -0,0 +1,397 @@ +## Vite + +Vite-specific guidance for `nx import`. For generic import issues (pnpm globs, root deps, project references, name collisions, ESLint, frontend tsconfig base settings, `@nx/react` typings, Jest preset, non-Nx source handling), see `SKILL.md`. + +--- + +### `@nx/vite/plugin` Typecheck Target + +`@nx/vite/plugin` defaults `typecheckTargetName` to `"vite:typecheck"`. If the workspace expects `"typecheck"`, set it explicitly in `nx.json`. If `@nx/js/typescript` is also registered, rename one target to avoid conflicts (e.g. `"tsc-typecheck"` for the JS plugin). + +Keep both plugins only if the workspace has non-Vite pure TS libraries — `@nx/js/typescript` handles those while `@nx/vite/plugin` handles Vite projects. + +### @nx/vite Plugin Install Failure + +Plugin init loads `vite.config.ts` before deps are available. **Fix**: `pnpm add -wD vite @vitejs/plugin-react` (or `@vitejs/plugin-vue`) first, then `pnpm exec nx add @nx/vite`. + +### Vite `resolve.alias` and `__dirname` (Non-Nx Sources) + +**`__dirname` undefined** (CJS-only): Replace with `fileURLToPath(new URL('./src', import.meta.url))` from `'node:url'`. + +**`@/` path alias**: Vite's `resolve.alias` works at runtime but TS needs matching `"paths"`. Set `"baseUrl": "."` in project tsconfig. + +**PostCSS/Tailwind**: Verify `content` globs resolve correctly after import. + +### Missing TypeScript `types` (Non-Nx Sources) + +Non-Nx tsconfigs may not declare all needed types. Ensure Vite projects include `"types": ["node", "vite/client"]` in their tsconfig. + +### `noEmit` Fix: Vite-Specific Notes + +See SKILL.md for the generic noEmit→composite fix. Vite-specific additions: + +- Non-Nx Vite projects often have **both** `tsconfig.app.json` and `tsconfig.node.json` with `noEmit` — fix both +- Solution-style tsconfigs (`"files": [], "references": [...]`) may lack `extends`. Add `extends` pointing to the dest root `tsconfig.base.json` so base settings (`moduleResolution`, `lib`) apply. +- This is safe — Vite/Vitest ignore TypeScript emit settings. + +### Dependency Version Conflicts + +**Shared Vite deps (both frameworks):** `vite`, `vitest`, `jsdom`, `@types/node`, `typescript` (dev) + +**Vite 6→7**: Typecheck fails (`Plugin` type mismatch); build/serve still works. Fix: align versions. +**Vitest 3→4**: Usually works; type conflicts may surface in shared test utils. + +--- + +## React Router 7 (Vite-Based) + +React Router 7 (`@react-router/dev`) uses Vite under the hood with a `vite.config.ts` and a `react-router.config.ts`. The `@nx/vite/plugin` detects `vite.config.ts` and creates inferred targets. + +### Targets + +`@nx/vite/plugin` creates `build`, `dev`, `serve` targets. The `build` target invokes the script defined in `package.json` (usually `react-router build`), not `vite build` directly. + +**No separate typecheck target from `@nx/vite/plugin`** — React Router 7 typegen is run as part of `typecheck` (e.g. `react-router typegen && tsc`). The `typecheck` target is inferred from the tsconfig. Keep the `typecheck` script in `package.json` if present; it is not rewritten. + +### tsconfig Notes + +React Router 7 uses a single `tsconfig.json` (no `tsconfig.app.json`/`tsconfig.node.json` split). It includes: + +- `"rootDirs": [".", "./.react-router/types"]` — for generated type files; keep as-is +- `"paths": { "~/*": ["./app/*"] }` — self-referential alias; keep as-is +- `"noEmit": true` — replace with composite settings per SKILL.md + +### Build Output + +React Router 7 outputs to `build/` (not `dist/`). Add `build` to the dest root `.gitignore`. + +### Generated Types Directory + +React Router 7 generates `.react-router/` at the project root for route type generation. Add `.react-router` to the dest root `.gitignore`. + +--- + +## TanStack Start (Vite-Based) + +TanStack Start uses Vinxi under the hood, which wraps Vite. Projects have a standard `vite.config.ts` that `@nx/vite/plugin` detects normally. + +### Targets + +`@nx/vite/plugin` creates `build`, `dev`, `preview`, `serve-static`, `typecheck` targets. The `build` target runs `vite build` which invokes the TanStack Start Vinxi pipeline (produces both client and SSR bundles). + +### tsconfig Notes + +TanStack Start uses a single `tsconfig.json` with `"allowImportingTsExtensions": true` and `"noEmit": true`. Apply the standard noEmit → composite fix. `allowImportingTsExtensions` is compatible with `emitDeclarationOnly: true` — no change needed. + +### `paths` Aliases + +TanStack Start commonly uses `"#/*": ["./src/*"]` and `"@/*": ["./src/*"]`. These are self-referential — keep as-is for a single-project app. + +### Uncommitted Source Repo + +`create-tan-stack` initializes a git repo but does NOT make an initial commit. Before importing, commit first: + +```bash +git -C /path/to/source add . && git -C /path/to/source commit -m "Initial commit" +``` + +### Generated and Build Directories + +TanStack Start / Vinxi / Nitro generate several directories that must be added to the dest root `.gitignore`: + +- `.vinxi` — Vinxi build cache +- `.tanstack` — TanStack generated files +- `.nitro` — Nitro build artifacts +- `.output` — server-side build output (SSR/edge) + +These are not covered by `dist` or `build`. + +--- + +## React-Specific + +### React Dependencies + +**Production:** `react`, `react-dom` +**Dev:** `@types/react`, `@types/react-dom`, `@vitejs/plugin-react`, `@testing-library/react`, `@testing-library/jest-dom`, `jsdom` +**ESLint (Nx sources):** `eslint-plugin-import`, `eslint-plugin-jsx-a11y`, `eslint-plugin-react`, `eslint-plugin-react-hooks` +**ESLint (`create-vite`):** `eslint-plugin-react-refresh`, `eslint-plugin-react-hooks` — self-contained flat configs can be left as-is +**Nx plugins:** `@nx/react` (generators), `@nx/vite`, `@nx/vitest`, `@nx/eslint` + +### React TypeScript Configuration + +Add `"jsx": "react-jsx"` — in `tsconfig.base.json` for single-framework workspaces, per-project for mixed (see Mixed section). + +### React ESLint Config + +```js +import nx from "@nx/eslint-plugin" +import baseConfig from "../../eslint.config.mjs" +export default [ + ...baseConfig, + ...nx.configs["flat/react"], + { files: ["**/*.ts", "**/*.tsx"], rules: {} }, +] +``` + +### React Version Conflicts + +React 18 (source) + React 19 (dest): pnpm may hoist mismatched `react-dom`, causing `TypeError: Cannot read properties of undefined (reading 'S')`. **Fix**: Align versions with `pnpm.overrides`. + +### `@testing-library/jest-dom` with Vitest + +If source used Jest: change import to `@testing-library/jest-dom/vitest` in test-setup.ts, add to tsconfig `types`. + +--- + +## Vue-Specific + +### Vue Dependencies + +**Production:** `vue` (plus `vue-router`, `pinia` if used) +**Dev:** `@vitejs/plugin-vue`, `vue-tsc`, `@vue/test-utils`, `jsdom` +**ESLint:** `eslint-plugin-vue`, `vue-eslint-parser`, `@vue/eslint-config-typescript`, `@vue/eslint-config-prettier` +**Nx plugins:** `@nx/vue` (generators), `@nx/vite`, `@nx/vitest`, `@nx/eslint` (install AFTER deps — see below) + +### Vue TypeScript Configuration + +Add to `tsconfig.base.json` (single-framework) or per-project (mixed): + +```json +{ "jsx": "preserve", "jsxImportSource": "vue", "resolveJsonModule": true } +``` + +### `vue-shims.d.ts` + +Vue SFC files need a type declaration. Usually exists in each project's `src/` and imports cleanly. If missing: + +```ts +declare module "*.vue" { + import { defineComponent } from "vue" + const component: ReturnType + export default component +} +``` + +### `vue-tsc` Auto-Detection + +Both `@nx/js/typescript` and `@nx/vite/plugin` auto-detect `vue-tsc` when installed — no manual config needed. Remove source scripts like `"typecheck": "vue-tsc --noEmit"`. + +### ESLint Plugin Installation Order (Critical) + +`@nx/eslint` init **crashes** if Vue ESLint deps aren't installed first (it loads all config files). + +**Correct order:** + +1. `pnpm add -wD eslint@^9 eslint-plugin-vue vue-eslint-parser @vue/eslint-config-typescript @typescript-eslint/parser @nx/eslint-plugin typescript-eslint` +2. Create root `eslint.config.mjs` +3. Then `npx nx add @nx/eslint` + +### Vue ESLint Config Pattern + +```js +import vue from "eslint-plugin-vue" +import vueParser from "vue-eslint-parser" +import tsParser from "@typescript-eslint/parser" +import baseConfig from "../../eslint.config.mjs" +export default [ + ...baseConfig, + ...vue.configs["flat/recommended"], + { + files: ["**/*.vue"], + languageOptions: { parser: vueParser, parserOptions: { parser: tsParser } }, + }, + { + files: ["**/*.ts", "**/*.tsx", "**/*.js", "**/*.jsx", "**/*.vue"], + rules: { "vue/multi-word-component-names": "off" }, + }, +] +``` + +**Important**: `vue-eslint-parser` override must come **AFTER** base config — `flat/typescript` sets the TS parser globally without a `files` filter, breaking `.vue` parsing. + +`vue-eslint-parser` must be an explicit pnpm dependency (strict resolution prevents transitive import). + +**Known issue**: Some generated Vue ESLint configs omit `vue-eslint-parser`. Use the pattern above instead. + +--- + +## Mixed React + Vue + +When both frameworks coexist, several settings become per-project. + +### tsconfig `jsx` — Per-Project Only + +- React: `"jsx": "react-jsx"` in project tsconfig +- Vue: `"jsx": "preserve"`, `"jsxImportSource": "vue"` in project tsconfig +- Root: **NO** `jsx` setting + +### Typecheck — Auto-Detects Framework + +`@nx/vite/plugin` uses `vue-tsc` for Vue projects and `tsc` for React automatically. + +```json +{ + "plugins": [ + { "plugin": "@nx/eslint/plugin", "options": { "targetName": "lint" } }, + { + "plugin": "@nx/vite/plugin", + "options": { + "buildTargetName": "build", + "typecheckTargetName": "typecheck", + "testTargetName": "test" + } + } + ] +} +``` + +Remove `@nx/js/typescript` if all projects use Vite. Keep it (renamed to `"tsc-typecheck"`) only for non-Vite pure TS libs. + +### ESLint — Three-Tier Config + +1. **Root**: Base rules only, no framework-specific rules +2. **React projects**: Extend root + `nx.configs['flat/react']` +3. **Vue projects**: Extend root + `vue.configs['flat/recommended']` + `vue-eslint-parser` + +**Required packages**: Shared (`eslint@^9`, `@nx/eslint-plugin`, `typescript-eslint`, `@typescript-eslint/parser`), React (`eslint-plugin-import`, `eslint-plugin-jsx-a11y`, `eslint-plugin-react`, `eslint-plugin-react-hooks`), Vue (`eslint-plugin-vue`, `vue-eslint-parser`) + +`@nx/react`/`@nx/vue` are for generators only — no target conflicts. + +--- + +## Redundant npm Scripts After Import + +`nx import` copies `package.json` verbatim, so npm scripts come along. For Vite-based projects `@nx/vite/plugin` already infers the same targets from `vite.config.ts` — the npm scripts just shadow the plugin with weaker `nx:run-script` wrappers (no first-class caching inputs/outputs). Remove them after import. + +### Standalone Vite App (`create-vite`) + +Remove the following scripts — every one is redundant: + +| Script | Plugin replacement | +| ----------------------------- | ---------------------------------------------------------------------------- | +| `dev: vite` | `@nx/vite/plugin` → `dev` | +| `build: tsc -b && vite build` | `@nx/vite/plugin` → `build`; `typecheck` via `@nx/js/typescript` handles tsc | +| `preview: vite preview` | `@nx/vite/plugin` → `preview` | +| `lint: eslint .` | `@nx/eslint/plugin` → `eslint:lint` | + +### TanStack Start + +Remove `build`, `dev`, `preview`, and `test` scripts, but move any hardcoded `--port` flag to `vite.config.ts` first: + +```ts +// vite.config.ts +export default defineConfig({ + server: { port: 3000 }, // replaces `vite dev --port 3000` + ... +}) +``` + +### React Router 7 — Keep ALL scripts + +Do **not** remove React Router 7 scripts. They use the framework CLI (`react-router build`, `react-router dev`, `react-router-serve`) which is not interchangeable with plain `vite`: + +- `typecheck` runs `react-router typegen && tsc` — typegen must precede `tsc` or it fails on missing route types +- `start` serves the SSR bundle — no plugin equivalent + +--- + +## Fix Orders + +### Nx Source + +1. Generic fixes from SKILL.md (pnpm globs, root deps, executor paths, frontend tsconfig base settings, `@nx/react` typings) +2. Configure `@nx/vite/plugin` typecheck target +3. **React**: `jsx: "react-jsx"` (root or per-project) +4. **Vue**: `jsx: "preserve"` + `jsxImportSource: "vue"`; verify `vue-shims.d.ts`; install ESLint deps before `@nx/eslint` +5. **Mixed**: `jsx` per-project; remove/rename `@nx/js/typescript` +6. `nx sync --yes && nx reset && nx run-many -t typecheck,build,test,lint` + +### Non-Nx Source (additional steps) + +0. Import into `apps/` (see SKILL.md: "Application vs Library Detection") +1. Generic fixes from SKILL.md (stale files cleanup, pnpm globs, rewritten scripts, target name prefixing, noEmit→composite, ESLint handling) +2. Fix `noEmit` in **all** tsconfigs (app, node, etc. — non-Nx projects often have multiple) +3. Add `extends` to solution-style tsconfigs so root settings apply +4. Fix `resolve.alias` / `__dirname` / `baseUrl` +5. Ensure `types` include `vite/client` and `node` +6. Install `@nx/vite` manually if it failed during import +7. Remove redundant npm scripts so `@nx/vite/plugin` infers them natively (see "Redundant npm Scripts" section) +8. **Vue**: Add `outDir` + `**/*.vue.d.ts` to ESLint ignores +9. Full verification + +### Multiple-Source Imports + +See SKILL.md for generic multi-import (name collisions, dep refs). Vite-specific: fix tsconfig `references` paths for alternate directories (`../../libs/` → `../../libs-beta/`). + +### Non-Nx Source: React Router 7 + +1. Ensure source has at least one commit (see SKILL.md: "Source Repo Has No Commits") +2. `nx import` whole-repo into `apps/` (see SKILL.md: "Application vs Library Detection") → auto-installs `@nx/vite`, `@nx/react` +3. Stale file cleanup: `node_modules/`, `package-lock.json`, `.gitignore` +4. Fix `tsconfig.json`: `noEmit` → `composite + emitDeclarationOnly + outDir + tsBuildInfoFile` +5. Add `build` and `.react-router` to dest root `.gitignore` +6. **Keep all npm scripts** — React Router 7 uses framework CLI (`react-router build/dev`), not plain vite (see "Redundant npm Scripts" above) +7. `npm install && nx reset && nx sync --yes` + +### Non-Nx Source: TanStack Start + +1. Ensure source has at least one commit — `create-tan-stack` does NOT auto-commit (see SKILL.md) +2. `nx import` whole-repo into `apps/` (see SKILL.md: "Application vs Library Detection") → auto-installs `@nx/vite`, `@nx/vitest` +3. Stale file cleanup: `node_modules/`, `package-lock.json`, `.gitignore` +4. Fix `tsconfig.json`: `noEmit` → `composite + emitDeclarationOnly + outDir + tsBuildInfoFile` +5. Keep `allowImportingTsExtensions` — compatible with `emitDeclarationOnly: true` +6. Add `.vinxi`, `.tanstack`, `.nitro`, `.output` to dest root `.gitignore` +7. Move hardcoded `--port` from `dev` script into `vite.config.ts` (`server: { port: N }`) +8. Remove redundant npm scripts — `@nx/vite/plugin` infers `build`, `dev`, `preview`, `test` (see "Redundant npm Scripts" above) +9. `npm install && nx reset && nx sync --yes` + +### Quick Reference: React vs Vue + +| Aspect | React | Vue | +| ------------- | ------------------------ | ----------------------------------------- | +| Vite plugin | `@vitejs/plugin-react` | `@vitejs/plugin-vue` | +| Type checker | `tsc` | `vue-tsc` (auto-detected) | +| SFC support | N/A | `vue-shims.d.ts` needed | +| tsconfig jsx | `"react-jsx"` | `"preserve"` + `"jsxImportSource": "vue"` | +| ESLint parser | Standard TS | `vue-eslint-parser` + TS sub-parser | +| ESLint setup | Straightforward | Must install deps before `@nx/eslint` | +| Test utils | `@testing-library/react` | `@vue/test-utils` | + +### Quick Reference: Vite-Based React Frameworks + +| Aspect | Vite (standalone) | React Router 7 | TanStack Start | +| ------------------ | ----------------- | ----------------------- | ------------------------ | +| Build config | `vite.config.ts` | `vite.config.ts` | `vite.config.ts` | +| Build output | `dist/` | `build/` | `dist/` | +| SSR bundle | No | Yes (`build/server/`) | Yes (`dist/server/`) | +| tsconfig layout | app + node split | Single tsconfig | Single tsconfig | +| Auto-committed | Depends on tool | Usually yes | **No — commit first** | +| `nx import` plugin | `@nx/vite` | `@nx/vite`, `@nx/react` | `@nx/vite`, `@nx/vitest` | + +--- + +## Iteration Log + +### Scenario 6: Multiple non-Nx React apps (CRA, Next.js, React Router 7, TanStack Start, Vite) → TS preset (PASS) + +- Sources: 5 standalone non-Nx repos with different build tools +- Dest: CNW ts preset (Nx 22.5.1), npm workspaces, `packages/*` +- Import: whole-repo for each, sequential into `packages/` +- Pre-import fixes: + 1. Removed `packages/.gitkeep` and committed + 2. `git init && git add . && git commit` in Vite app (no git at all) + 3. `git add . && git commit` in TanStack app (git init'd but no commits) +- Import: `npm exec nx -- import packages/ --source=. --ref=main --no-interactive` + - Next.js import auto-installed `@nx/eslint`, `@nx/next` + - React Router 7 import auto-installed `@nx/vite`, `@nx/react`, `@nx/docker` (Dockerfile present) + - TanStack import auto-installed `@nx/vitest` +- Post-import fixes: + 1. Removed stale `node_modules/`, `package-lock.json`, `.gitignore` from each package + 2. Removed Nx-rewritten scripts from `board-games-nextjs/package.json` (had `"build": "nx next:build"`, etc.) + 3. Updated root `tsconfig.base.json`: `nodenext` → `bundler`, added `dom`/`dom.iterable` to lib, added `jsx: react-jsx` + 4. Added `build` to dest root `.gitignore` (CRA and React Router 7 output there) + 5. Fixed `noEmit` → `composite + emitDeclarationOnly` in: `board-games-vite/tsconfig.app.json`, `board-games-vite/tsconfig.node.json`, `board-games-react-router/tsconfig.json`, `board-games-tanstack/tsconfig.json` + 6. Fixed `tsBuildInfoFile` paths from `./node_modules/.tmp/...` to `./dist/...` + 7. Installed root `@types/react`, `@types/react-dom`, `@types/node` +- All targets green: `build` for all 5 projects; `typecheck` for Vite/React Router/TanStack; `next:build` for Next.js diff --git a/.github/skills/nx-plugins/SKILL.md b/.github/skills/nx-plugins/SKILL.md new file mode 100644 index 0000000..89223c7 --- /dev/null +++ b/.github/skills/nx-plugins/SKILL.md @@ -0,0 +1,9 @@ +--- +name: nx-plugins +description: Find and add Nx plugins. USE WHEN user wants to discover available plugins, install a new plugin, or add support for a specific framework or technology to the workspace. +--- + +## Finding and Installing new plugins + +- List plugins: `pnpm nx list` +- Install plugins `pnpm nx add `. Example: `pnpm nx add @nx/react`. diff --git a/.github/skills/nx-run-tasks/SKILL.md b/.github/skills/nx-run-tasks/SKILL.md new file mode 100644 index 0000000..7f1263a --- /dev/null +++ b/.github/skills/nx-run-tasks/SKILL.md @@ -0,0 +1,58 @@ +--- +name: nx-run-tasks +description: Helps with running tasks in an Nx workspace. USE WHEN the user wants to execute build, test, lint, serve, or run any other tasks defined in the workspace. +--- + +You can run tasks with Nx in the following way. + +Keep in mind that you might have to prefix things with npx/pnpx/yarn if the user doesn't have nx installed globally. Look at the package.json or lockfile to determine which package manager is in use. + +For more details on any command, run it with `--help` (e.g. `nx run-many --help`, `nx affected --help`). + +## Understand which tasks can be run + +You can check those via `nx show project --json`, for example `nx show project myapp --json`. It contains a `targets` section which has information about targets that can be run. You can also just look at the `package.json` scripts or `project.json` targets, but you might miss out on inferred tasks by Nx plugins. + +## Run a single task + +``` +nx run : +``` + +where `project` is the project name defined in `package.json` or `project.json` (if present). + +## Run multiple tasks + +``` +nx run-many -t build test lint typecheck +``` + +You can pass a `-p` flag to filter to specific projects, otherwise it runs on all projects. You can also use `--exclude` to exclude projects, and `--parallel` to control the number of parallel processes (default is 3). + +Examples: + +- `nx run-many -t test -p proj1 proj2` — test specific projects +- `nx run-many -t test --projects=*-app --exclude=excluded-app` — test projects matching a pattern +- `nx run-many -t test --projects=tag:api-*` — test projects by tag + +## Run tasks for affected projects + +Use `nx affected` to only run tasks on projects that have been changed and projects that depend on changed projects. This is especially useful in CI and for large workspaces. + +``` +nx affected -t build test lint +``` + +By default it compares against the base branch. You can customize this: + +- `nx affected -t test --base=main --head=HEAD` — compare against a specific base and head +- `nx affected -t test --files=libs/mylib/src/index.ts` — specify changed files directly + +## Useful flags + +These flags work with `run`, `run-many`, and `affected`: + +- `--skipNxCache` — rerun tasks even when results are cached +- `--verbose` — print additional information such as stack traces +- `--nxBail` — stop execution after the first failed task +- `--configuration=` — use a specific configuration (e.g. `production`) diff --git a/.github/skills/nx-workspace/SKILL.md b/.github/skills/nx-workspace/SKILL.md new file mode 100644 index 0000000..4b5110a --- /dev/null +++ b/.github/skills/nx-workspace/SKILL.md @@ -0,0 +1,286 @@ +--- +name: nx-workspace +description: "Explore and understand Nx workspaces. USE WHEN answering questions about the workspace, projects, or tasks. ALSO USE WHEN an nx command fails or you need to check available targets/configuration before running a task. EXAMPLES: 'What projects are in this workspace?', 'How is project X configured?', 'What depends on library Y?', 'What targets can I run?', 'Cannot find configuration for task', 'debug nx task failure'." +--- + +# Nx Workspace Exploration + +This skill provides read-only exploration of Nx workspaces. Use it to understand workspace structure, project configuration, available targets, and dependencies. + +Keep in mind that you might have to prefix commands with `npx`/`pnpx`/`yarn` if nx isn't installed globally. Check the lockfile to determine the package manager in use. + +## Listing Projects + +Use `nx show projects` to list projects in the workspace. + +The project filtering syntax (`-p`/`--projects`) works across many Nx commands including `nx run-many`, `nx release`, `nx show projects`, and more. Filters support explicit names, glob patterns, tag references (e.g. `tag:name`), directories, and negation (e.g. `!project-name`). + +```bash +# List all projects +nx show projects + +# Filter by pattern (glob) +nx show projects --projects "apps/*" +nx show projects --projects "shared-*" + +# Filter by tag +nx show projects --projects "tag:publishable" +nx show projects -p 'tag:publishable,!tag:internal' + +# Filter by target (projects that have a specific target) +nx show projects --withTarget build + +# Combine filters +nx show projects --type lib --withTarget test +nx show projects --affected --exclude="*-e2e" +nx show projects -p "tag:scope:client,packages/*" + +# Negate patterns +nx show projects -p '!tag:private' +nx show projects -p '!*-e2e' + +# Output as JSON +nx show projects --json +``` + +## Project Configuration + +Use `nx show project --json` to get the full resolved configuration for a project. + +**Important**: Do NOT read `project.json` directly - it only contains partial configuration. The `nx show project --json` command returns the full resolved config including inferred targets from plugins. + +You can read the full project schema at `node_modules/nx/schemas/project-schema.json` to understand nx project configuration options. + +```bash +# Get full project configuration +nx show project my-app --json + +# Extract specific parts from the JSON +nx show project my-app --json | jq '.targets' +nx show project my-app --json | jq '.targets.build' +nx show project my-app --json | jq '.targets | keys' + +# Check project metadata +nx show project my-app --json | jq '{name, root, sourceRoot, projectType, tags}' +``` + +## Target Information + +Targets define what tasks can be run on a project. + +```bash +# List all targets for a project +nx show project my-app --json | jq '.targets | keys' + +# Get full target configuration +nx show project my-app --json | jq '.targets.build' + +# Check target executor/command +nx show project my-app --json | jq '.targets.build.executor' +nx show project my-app --json | jq '.targets.build.command' + +# View target options +nx show project my-app --json | jq '.targets.build.options' + +# Check target inputs/outputs (for caching) +nx show project my-app --json | jq '.targets.build.inputs' +nx show project my-app --json | jq '.targets.build.outputs' + +# Find projects with a specific target +nx show projects --withTarget serve +nx show projects --withTarget e2e +``` + +## Workspace Configuration + +Read `nx.json` directly for workspace-level configuration. +You can read the full project schema at `node_modules/nx/schemas/nx-schema.json` to understand nx project configuration options. + +```bash +# Read the full nx.json +cat nx.json + +# Or use jq for specific sections +cat nx.json | jq '.targetDefaults' +cat nx.json | jq '.namedInputs' +cat nx.json | jq '.plugins' +cat nx.json | jq '.generators' +``` + +Key nx.json sections: + +- `targetDefaults` - Default configuration applied to all targets of a given name +- `namedInputs` - Reusable input definitions for caching +- `plugins` - Nx plugins and their configuration +- ...and much more, read the schema or nx.json for details + +## Affected Projects + +If the user is asking about affected projects, read the [affected projects reference](references/AFFECTED.md) for detailed commands and examples. + +## Common Exploration Patterns + +### "What's in this workspace?" + +```bash +nx show projects +nx show projects --type app +nx show projects --type lib +``` + +### "How do I build/test/lint project X?" + +```bash +nx show project X --json | jq '.targets | keys' +nx show project X --json | jq '.targets.build' +``` + +### "What depends on library Y?" + +```bash +# Use the project graph to find dependents +nx graph --print | jq '.graph.dependencies | to_entries[] | select(.value[].target == "Y") | .key' +``` + +## Programmatic Answers + +When processing nx CLI results, use command-line tools to compute the answer programmatically rather than counting or parsing output manually. Always use `--json` flags to get structured output that can be processed with `jq`, `grep`, or other tools you have installed locally. + +### Listing Projects + +```bash +nx show projects --json +``` + +Example output: + +```json +["my-app", "my-app-e2e", "shared-ui", "shared-utils", "api"] +``` + +Common operations: + +```bash +# Count projects +nx show projects --json | jq 'length' + +# Filter by pattern +nx show projects --json | jq '.[] | select(startswith("shared-"))' + +# Get affected projects as array +nx show projects --affected --json | jq '.' +``` + +### Project Details + +```bash +nx show project my-app --json +``` + +Example output: + +```json +{ + "root": "apps/my-app", + "name": "my-app", + "sourceRoot": "apps/my-app/src", + "projectType": "application", + "tags": ["type:app", "scope:client"], + "targets": { + "build": { + "executor": "@nx/vite:build", + "options": { "outputPath": "dist/apps/my-app" } + }, + "serve": { + "executor": "@nx/vite:dev-server", + "options": { "buildTarget": "my-app:build" } + }, + "test": { + "executor": "@nx/vite:test", + "options": {} + } + }, + "implicitDependencies": [] +} +``` + +Common operations: + +```bash +# Get target names +nx show project my-app --json | jq '.targets | keys' + +# Get specific target config +nx show project my-app --json | jq '.targets.build' + +# Get tags +nx show project my-app --json | jq '.tags' + +# Get project root +nx show project my-app --json | jq -r '.root' +``` + +### Project Graph + +```bash +nx graph --print +``` + +Example output: + +```json +{ + "graph": { + "nodes": { + "my-app": { + "name": "my-app", + "type": "app", + "data": { "root": "apps/my-app", "tags": ["type:app"] } + }, + "shared-ui": { + "name": "shared-ui", + "type": "lib", + "data": { "root": "libs/shared-ui", "tags": ["type:ui"] } + } + }, + "dependencies": { + "my-app": [ + { "source": "my-app", "target": "shared-ui", "type": "static" } + ], + "shared-ui": [] + } + } +} +``` + +Common operations: + +```bash +# Get all project names from graph +nx graph --print | jq '.graph.nodes | keys' + +# Find dependencies of a project +nx graph --print | jq '.graph.dependencies["my-app"]' + +# Find projects that depend on a library +nx graph --print | jq '.graph.dependencies | to_entries[] | select(.value[].target == "shared-ui") | .key' +``` + +## Troubleshooting + +### "Cannot find configuration for task X:target" + +```bash +# Check what targets exist on the project +nx show project X --json | jq '.targets | keys' + +# Check if any projects have that target +nx show projects --withTarget target +``` + +### "The workspace is out of sync" + +```bash +nx sync +nx reset # if sync doesn't fix stale cache +``` diff --git a/.github/skills/nx-workspace/references/AFFECTED.md b/.github/skills/nx-workspace/references/AFFECTED.md new file mode 100644 index 0000000..e30f18f --- /dev/null +++ b/.github/skills/nx-workspace/references/AFFECTED.md @@ -0,0 +1,27 @@ +## Affected Projects + +Find projects affected by changes in the current branch. + +```bash +# Affected since base branch (auto-detected) +nx show projects --affected + +# Affected with explicit base +nx show projects --affected --base=main +nx show projects --affected --base=origin/main + +# Affected between two commits +nx show projects --affected --base=abc123 --head=def456 + +# Affected apps only +nx show projects --affected --type app + +# Affected excluding e2e projects +nx show projects --affected --exclude="*-e2e" + +# Affected by uncommitted changes +nx show projects --affected --uncommitted + +# Affected by untracked files +nx show projects --affected --untracked +``` diff --git a/.gitignore b/.gitignore index 5e4f9e8..7fdd1f5 100644 --- a/.gitignore +++ b/.gitignore @@ -72,4 +72,5 @@ generated/ .wrangler .claude/worktrees -.claude/settings.local.json \ No newline at end of file +.claude/settings.local.json +.nx/polygraph \ No newline at end of file diff --git a/.opencode/agents/ci-monitor-subagent.md b/.opencode/agents/ci-monitor-subagent.md new file mode 100644 index 0000000..98003c6 --- /dev/null +++ b/.opencode/agents/ci-monitor-subagent.md @@ -0,0 +1,50 @@ +--- +description: CI helper for /monitor-ci. Fetches CI status, retrieves fix details, or updates self-healing fixes. Executes one MCP tool call and returns the result. +mode: subagent +--- + +# CI Monitor Subagent + +You are a CI helper. You call ONE MCP tool per invocation and return the result. Do not loop, poll, or sleep. + +## Commands + +The main agent tells you which command to run: + +### FETCH_STATUS + +Call `ci_information` with the provided branch and select fields. Return a JSON object with ONLY these fields: +`{ cipeStatus, selfHealingStatus, verificationStatus, selfHealingEnabled, selfHealingSkippedReason, failureClassification, failedTaskIds, verifiedTaskIds, couldAutoApplyTasks, autoApplySkipped, autoApplySkipReason, userAction, cipeUrl, commitSha, shortLink }` + +### FETCH_HEAVY + +Call `ci_information` with heavy select fields. Summarize the heavy content and return: + +```json +{ + "shortLink": "...", + "failedTaskIds": ["..."], + "verifiedTaskIds": ["..."], + "suggestedFixDescription": "...", + "suggestedFixSummary": "...", + "selfHealingSkipMessage": "...", + "taskFailureSummaries": [{ "taskId": "...", "summary": "..." }] +} +``` + +Do NOT return raw suggestedFix diffs or raw taskOutputSummary — summarize them. +The main agent uses these summaries to understand what failed and attempt local fixes. + +### UPDATE_FIX + +Call `update_self_healing_fix` with the provided shortLink and action (APPLY/REJECT/RERUN_ENVIRONMENT_STATE). Return the result message (success/failure string). + +### FETCH_THROTTLE_INFO + +Call `ci_information` with the provided URL. Return ONLY: `{ shortLink, cipeUrl }` + +## Important + +- Execute ONE command and return immediately +- Do NOT poll, loop, sleep, or make decisions +- Extract and return ONLY the fields specified for each command — do NOT dump the full MCP response diff --git a/.opencode/commands/monitor-ci.md b/.opencode/commands/monitor-ci.md new file mode 100644 index 0000000..3f7d140 --- /dev/null +++ b/.opencode/commands/monitor-ci.md @@ -0,0 +1,301 @@ +--- +description: Monitor Nx Cloud CI pipeline and handle self-healing fixes. USE WHEN user says "monitor ci", "watch ci", "ci monitor", "watch ci for this branch", "track ci", "check ci status", wants to track CI status, or needs help with self-healing CI fixes. Prefer this skill over native CI provider tools (gh, glab, etc.) for CI monitoring — it integrates with Nx Cloud self-healing which those tools cannot access. +argument-hint: '[instructions] [--max-cycles N] [--timeout MINUTES] [--verbosity minimal|medium|verbose] [--branch BRANCH] [--fresh] [--auto-fix-workflow] [--new-cipe-timeout MINUTES] [--local-verify-attempts N]' +--- + +# Monitor CI Command + +You are the orchestrator for monitoring Nx Cloud CI pipeline executions and handling self-healing fixes. You spawn subagents to interact with Nx Cloud, run deterministic decision scripts, and take action based on the results. + +## Context + +- **Current Branch:** !`git branch --show-current` +- **Current Commit:** !`git rev-parse --short HEAD` +- **Remote Status:** !`git status -sb | head -1` + +## User Instructions + +$ARGUMENTS + +**Important:** If user provides specific instructions, respect them over default behaviors described below. + +## Configuration Defaults + +| Setting | Default | Description | +| ------------------------- | ------------- | ------------------------------------------------------------------------- | +| `--max-cycles` | 10 | Maximum **agent-initiated** CI Attempt cycles before timeout | +| `--timeout` | 120 | Maximum duration in minutes | +| `--verbosity` | medium | Output level: minimal, medium, verbose | +| `--branch` | (auto-detect) | Branch to monitor | +| `--fresh` | false | Ignore previous context, start fresh | +| `--auto-fix-workflow` | false | Attempt common fixes for pre-CI-Attempt failures (e.g., lockfile updates) | +| `--new-cipe-timeout` | 10 | Minutes to wait for new CI Attempt after action | +| `--local-verify-attempts` | 3 | Max local verification + enhance cycles before pushing to CI | + +Parse any overrides from `$ARGUMENTS` and merge with defaults. + +## Nx Cloud Connection Check + +Before starting the monitoring loop, verify the workspace is connected to Nx Cloud. Without this connection, no CI data is available and the entire skill is inoperable. + +### Step 0: Verify Nx Cloud Connection + +1. **Check `nx.json`** at workspace root for `nxCloudId` or `nxCloudAccessToken` +2. **If `nx.json` missing OR neither property exists** → exit with: + + ``` + Nx Cloud not connected. Unlock 70% faster CI and auto-fix broken PRs with https://nx.dev/nx-cloud + ``` + +3. **If connected** → continue to main loop + +## Architecture Overview + +1. **This skill (orchestrator)**: spawns subagents, runs scripts, prints status, does local coding work +2. **ci-monitor-subagent (haiku)**: calls one MCP tool (ci_information or update_self_healing_fix), returns structured result, exits +3. **ci-poll-decide.mjs (deterministic script)**: takes ci_information result + state, returns action + status message +4. **ci-state-update.mjs (deterministic script)**: manages budget gates, post-action state transitions, and cycle classification + +## Status Reporting + +The decision script handles message formatting based on verbosity. When printing messages to the user: + +- Prepend `[monitor-ci]` to every message from the script's `message` field +- For your own action messages (e.g. "Applying fix via MCP..."), also prepend `[monitor-ci]` + +## Anti-Patterns + +These behaviors cause real problems — racing with self-healing, losing CI progress, or wasting context: + +| Anti-Pattern | Why It's Bad | +| ----------------------------------------------------------------------------------------------- | ------------------------------------------------------------------ | +| Using CI provider CLIs with `--watch` flags (e.g., `gh pr checks --watch`, `glab ci status -w`) | Bypasses Nx Cloud self-healing entirely | +| Writing custom CI polling scripts | Unreliable, pollutes context, no self-healing | +| Cancelling CI workflows/pipelines | Destructive, loses CI progress | +| Running CI checks on main agent | Wastes main agent context tokens | +| Independently analyzing/fixing CI failures while polling | Races with self-healing, causes duplicate fixes and confused state | + +**If this skill fails to activate**, the fallback is: + +1. Use CI provider CLI for a one-time, read-only status check (single call, no watch/polling flags) +2. Immediately delegate to this skill with gathered context +3. Do not continue polling on main agent — it wastes context tokens and bypasses self-healing + +## Session Context Behavior + +If the user previously ran `/monitor-ci` in this session, you may have prior state (poll counts, last CI Attempt URL, etc.). Resume from that state unless `--fresh` is set, in which case discard it and start from Step 1. + +## MCP Tool Reference + +Three field sets control polling efficiency — use the lightest set that gives you what you need: + +```yaml +WAIT_FIELDS: 'cipeUrl,commitSha,cipeStatus' +LIGHT_FIELDS: 'cipeStatus,cipeUrl,branch,commitSha,selfHealingStatus,verificationStatus,userAction,failedTaskIds,verifiedTaskIds,selfHealingEnabled,failureClassification,couldAutoApplyTasks,autoApplySkipped,autoApplySkipReason,shortLink,confidence,confidenceReasoning,hints,selfHealingSkippedReason,selfHealingSkipMessage' +HEAVY_FIELDS: 'taskOutputSummary,suggestedFix,suggestedFixReasoning,suggestedFixDescription' +``` + +The `ci_information` tool accepts `branch` (optional, defaults to current git branch), `select` (comma-separated field names), and `pageToken` (0-based pagination for long strings). + +The `update_self_healing_fix` tool accepts a `shortLink` and an action: `APPLY`, `REJECT`, or `RERUN_ENVIRONMENT_STATE`. + +## Default Behaviors by Status + +The decision script returns one of the following statuses. This table defines the **default behavior** for each. User instructions can override any of these. + +**Simple exits** — just report and exit: + +| Status | Default Behavior | +| ----------------------- | ---------------------------------------------------------------------------------------------------------------- | +| `ci_success` | Exit with success | +| `cipe_canceled` | Exit, CI was canceled | +| `cipe_timed_out` | Exit, CI timed out | +| `polling_timeout` | Exit, polling timeout reached | +| `circuit_breaker` | Exit, no progress after 5 consecutive polls | +| `environment_rerun_cap` | Exit, environment reruns exhausted | +| `fix_auto_applying` | Self-healing is handling it — just record `last_cipe_url`, enter wait mode. No MCP call or local git ops needed. | +| `error` | Wait 60s and loop | + +**Statuses requiring action** — when handling these in Step 3, read `references/fix-flows.md` for the detailed flow: + +| Status | Summary | +| ------------------------ | --------------------------------------------------------------------------------------------- | +| `fix_auto_apply_skipped` | Fix verified but auto-apply skipped (e.g., loop prevention). Inform user, offer manual apply. | +| `fix_apply_ready` | Fix verified (all tasks or e2e-only). Apply via MCP. | +| `fix_needs_local_verify` | Fix has unverified non-e2e tasks. Run locally, then apply or enhance. | +| `fix_needs_review` | Fix verification failed/not attempted. Analyze and decide. | +| `fix_failed` | Self-healing failed. Fetch heavy data, attempt local fix (gate check first). | +| `no_fix` | No fix available. Fetch heavy data, attempt local fix (gate check first) or exit. | +| `environment_issue` | Request environment rerun via MCP (gate check first). | +| `self_healing_throttled` | Reject old fixes, attempt local fix. | +| `no_new_cipe` | CI Attempt never spawned. Auto-fix workflow or exit with guidance. | +| `cipe_no_tasks` | CI failed with no tasks. Retry once with empty commit. | + +**Key rules (always apply):** + +- **Git safety**: Stage specific files by name — `git add -A` or `git add .` risks committing the user's unrelated work-in-progress or secrets +- **Environment failures** (OOM, command not found, permission denied): bail immediately. These aren't code bugs, so spending local-fix budget on them is wasteful +- **Gate check**: Run `ci-state-update.mjs gate` before local fix attempts — if budget exhausted, print message and exit + +## Main Loop + +### Step 1: Initialize Tracking + +``` +cycle_count = 0 # Only incremented for agent-initiated cycles (counted against --max-cycles) +start_time = now() +no_progress_count = 0 +local_verify_count = 0 +env_rerun_count = 0 +last_cipe_url = null +expected_commit_sha = null +agent_triggered = false # Set true after monitor takes an action that triggers new CI Attempt +poll_count = 0 +wait_mode = false +prev_status = null +prev_cipe_status = null +prev_sh_status = null +prev_verification_status = null +prev_failure_classification = null +``` + +### Step 2: Polling Loop + +Repeat until done: + +#### 2a. Spawn subagent (FETCH_STATUS) + +Determine select fields based on mode: + +- **Wait mode**: use WAIT_FIELDS (`cipeUrl,commitSha,cipeStatus`) +- **Normal mode (first poll or after newCipeDetected)**: use LIGHT_FIELDS + +Call the `ci_information` tool with the determined `select` fields for the current branch. Wait for the result before proceeding. + +#### 2b. Run decision script + +```bash +node /scripts/ci-poll-decide.mjs '' \ + [--wait-mode] \ + [--prev-cipe-url ] \ + [--expected-sha ] \ + [--prev-status ] \ + [--timeout ] \ + [--new-cipe-timeout ] \ + [--env-rerun-count ] \ + [--no-progress-count ] \ + [--prev-cipe-status ] \ + [--prev-sh-status ] \ + [--prev-verification-status ] \ + [--prev-failure-classification ] +``` + +The script outputs a single JSON line: `{ action, code, message, delay?, noProgressCount, envRerunCount, fields?, newCipeDetected?, verifiableTaskIds? }` + +#### 2c. Process script output + +Parse the JSON output and update tracking state: + +- `no_progress_count = output.noProgressCount` +- `env_rerun_count = output.envRerunCount` +- `prev_cipe_status = subagent_result.cipeStatus` +- `prev_sh_status = subagent_result.selfHealingStatus` +- `prev_verification_status = subagent_result.verificationStatus` +- `prev_failure_classification = subagent_result.failureClassification` +- `prev_status = output.action + ":" + (output.code || subagent_result.cipeStatus)` +- `poll_count++` + +Based on `action`: + +- **`action == "poll"`**: Print `output.message`, sleep `output.delay` seconds, go to 2a + - If `output.newCipeDetected`: clear wait mode, reset `wait_mode = false` +- **`action == "wait"`**: Print `output.message`, sleep `output.delay` seconds, go to 2a +- **`action == "done"`**: Proceed to Step 3 with `output.code` + +### Step 3: Handle Actionable Status + +When decision script returns `action == "done"`: + +1. Run cycle-check (Step 4) **before** handling the code +2. Check the returned `code` +3. Look up default behavior in the table above +4. Check if user instructions override the default +5. Execute the appropriate action +6. **If action expects new CI Attempt**, update tracking (see Step 3a) +7. If action results in looping, go to Step 2 + +#### Tool calls for actions + +Several statuses require fetching additional data or calling tools: + +- **fix_apply_ready**: Call `update_self_healing_fix` with action `APPLY` +- **fix_needs_local_verify**: Call `ci_information` with HEAVY_FIELDS for fix details before local verification +- **fix_needs_review**: Call `ci_information` with HEAVY_FIELDS → get `suggestedFixDescription`, `suggestedFixSummary`, `taskFailureSummaries` +- **fix_failed / no_fix**: Call `ci_information` with HEAVY_FIELDS → get `taskFailureSummaries` for local fix context +- **environment_issue**: Call `update_self_healing_fix` with action `RERUN_ENVIRONMENT_STATE` +- **self_healing_throttled**: Call `ci_information` with HEAVY_FIELDS → get `selfHealingSkipMessage`; then call `update_self_healing_fix` for each old fix + +### Step 3a: Track State for New-CI-Attempt Detection + +After actions that should trigger a new CI Attempt, run: + +```bash +node /scripts/ci-state-update.mjs post-action \ + --action \ + --cipe-url \ + --commit-sha +``` + +Action types: `fix-auto-applying`, `apply-mcp`, `apply-local-push`, `reject-fix-push`, `local-fix-push`, `env-rerun`, `auto-fix-push`, `empty-commit-push` + +The script returns `{ waitMode, pollCount, lastCipeUrl, expectedCommitSha, agentTriggered }`. Update all tracking state from the output, then go to Step 2. + +### Step 4: Cycle Classification and Progress Tracking + +When the decision script returns `action == "done"`, run cycle-check **before** handling the code: + +```bash +node /scripts/ci-state-update.mjs cycle-check \ + --code \ + [--agent-triggered] \ + --cycle-count --max-cycles \ + --env-rerun-count +``` + +The script returns `{ cycleCount, agentTriggered, envRerunCount, approachingLimit, message }`. Update tracking state from the output. + +- If `approachingLimit` → ask user whether to continue (with 5 or 10 more cycles) or stop monitoring +- If previous cycle was NOT agent-triggered (human pushed), log that human-initiated push was detected + +#### Progress Tracking + +- `no_progress_count`, circuit breaker (5 polls), and backoff reset are handled by ci-poll-decide.mjs (progress = any change in cipeStatus, selfHealingStatus, verificationStatus, or failureClassification) +- `env_rerun_count` reset on non-environment status is handled by ci-state-update.mjs cycle-check +- On new CI Attempt detected (poll script returns `newCipeDetected`) → reset `local_verify_count = 0`, `env_rerun_count = 0` + +## Error Handling + +| Error | Action | +| ------------------------------ | ----------------------------------------------------------------------------------------------------------- | +| Git rebase conflict | Report to user, exit | +| `nx-cloud apply-locally` fails | Reject fix via MCP (`action: "REJECT"`), then attempt manual patch (Reject + Fix From Scratch Flow) or exit | +| MCP tool error | Retry once, if fails report to user | +| Subagent spawn failure | Retry once, if fails exit with error | +| Decision script error | Treat as `error` status, increment `no_progress_count` | +| No new CI Attempt detected | If `--auto-fix-workflow`, try lockfile update; otherwise report to user with guidance | +| Lockfile auto-fix fails | Report to user, exit with guidance to check CI logs | + +## User Instruction Examples + +Users can override default behaviors: + +| Instruction | Effect | +| ------------------------------------------------ | --------------------------------------------------- | +| "never auto-apply" | Always prompt before applying any fix | +| "always ask before git push" | Prompt before each push | +| "reject any fix for e2e tasks" | Auto-reject if `failedTaskIds` contains e2e | +| "apply all fixes regardless of verification" | Skip verification check, apply everything | +| "if confidence < 70, reject" | Check confidence field before applying | +| "run 'nx affected -t typecheck' before applying" | Add local verification step | +| "auto-fix workflow failures" | Attempt lockfile updates on pre-CI-Attempt failures | +| "wait 45 min for new CI Attempt" | Override new-CI-Attempt timeout (default: 10 min) | diff --git a/.opencode/skills/link-workspace-packages/SKILL.md b/.opencode/skills/link-workspace-packages/SKILL.md new file mode 100644 index 0000000..de13134 --- /dev/null +++ b/.opencode/skills/link-workspace-packages/SKILL.md @@ -0,0 +1,127 @@ +--- +name: link-workspace-packages +description: 'Link workspace packages in monorepos (npm, yarn, pnpm, bun). USE WHEN: (1) you just created or generated new packages and need to wire up their dependencies, (2) user imports from a sibling package and needs to add it as a dependency, (3) you get resolution errors for workspace packages (@org/*) like "cannot find module", "failed to resolve import", "TS2307", or "cannot resolve". DO NOT patch around with tsconfig paths or manual package.json edits - use the package manager''s workspace commands to fix actual linking.' +--- + +# Link Workspace Packages + +Add dependencies between packages in a monorepo. All package managers support workspaces but with different syntax. + +## Detect Package Manager + +Check whether there's a `packageManager` field in the root-level `package.json`. + +Alternatively check lockfile in repo root: + +- `pnpm-lock.yaml` → pnpm +- `yarn.lock` → yarn +- `bun.lock` / `bun.lockb` → bun +- `package-lock.json` → npm + +## Workflow + +1. Identify consumer package (the one importing) +2. Identify provider package(s) (being imported) +3. Add dependency using package manager's workspace syntax +4. Verify symlinks created in consumer's `node_modules/` + +--- + +## pnpm + +Uses `workspace:` protocol - symlinks only created when explicitly declared. + +```bash +# From consumer directory +pnpm add @org/ui --workspace + +# Or with --filter from anywhere +pnpm add @org/ui --filter @org/app --workspace +``` + +Result in `package.json`: + +```json +{ "dependencies": { "@org/ui": "workspace:*" } } +``` + +--- + +## yarn (v2+/berry) + +Also uses `workspace:` protocol. + +```bash +yarn workspace @org/app add @org/ui +``` + +Result in `package.json`: + +```json +{ "dependencies": { "@org/ui": "workspace:^" } } +``` + +--- + +## npm + +No `workspace:` protocol. npm auto-symlinks workspace packages. + +```bash +npm install @org/ui --workspace @org/app +``` + +Result in `package.json`: + +```json +{ "dependencies": { "@org/ui": "*" } } +``` + +npm resolves to local workspace automatically during install. + +--- + +## bun + +Supports `workspace:` protocol (pnpm-compatible). + +```bash +cd packages/app && bun add @org/ui +``` + +Result in `package.json`: + +```json +{ "dependencies": { "@org/ui": "workspace:*" } } +``` + +--- + +## Examples + +**Example 1: pnpm - link ui lib to app** + +```bash +pnpm add @org/ui --filter @org/app --workspace +``` + +**Example 2: npm - link multiple packages** + +```bash +npm install @org/data-access @org/ui --workspace @org/dashboard +``` + +**Example 3: Debug "Cannot find module"** + +1. Check if dependency is declared in consumer's `package.json` +2. If not, add it using appropriate command above +3. Run install (`pnpm install`, `npm install`, etc.) + +## Notes + +- Symlinks appear in `/node_modules/@org/` +- **Hoisting differs by manager:** + - npm/bun: hoist shared deps to root `node_modules` + - pnpm: no hoisting (strict isolation, prevents phantom deps) + - yarn berry: uses Plug'n'Play by default (no `node_modules`) +- Root `package.json` should have `"private": true` to prevent accidental publish diff --git a/.opencode/skills/monitor-ci/SKILL.md b/.opencode/skills/monitor-ci/SKILL.md new file mode 100644 index 0000000..48b71bf --- /dev/null +++ b/.opencode/skills/monitor-ci/SKILL.md @@ -0,0 +1,301 @@ +--- +name: monitor-ci +description: Monitor Nx Cloud CI pipeline and handle self-healing fixes. USE WHEN user says "monitor ci", "watch ci", "ci monitor", "watch ci for this branch", "track ci", "check ci status", wants to track CI status, or needs help with self-healing CI fixes. Prefer this skill over native CI provider tools (gh, glab, etc.) for CI monitoring — it integrates with Nx Cloud self-healing which those tools cannot access. +--- + +# Monitor CI Command + +You are the orchestrator for monitoring Nx Cloud CI pipeline executions and handling self-healing fixes. You spawn subagents to interact with Nx Cloud, run deterministic decision scripts, and take action based on the results. + +## Context + +- **Current Branch:** !`git branch --show-current` +- **Current Commit:** !`git rev-parse --short HEAD` +- **Remote Status:** !`git status -sb | head -1` + +## User Instructions + +$ARGUMENTS + +**Important:** If user provides specific instructions, respect them over default behaviors described below. + +## Configuration Defaults + +| Setting | Default | Description | +| ------------------------- | ------------- | ------------------------------------------------------------------------- | +| `--max-cycles` | 10 | Maximum **agent-initiated** CI Attempt cycles before timeout | +| `--timeout` | 120 | Maximum duration in minutes | +| `--verbosity` | medium | Output level: minimal, medium, verbose | +| `--branch` | (auto-detect) | Branch to monitor | +| `--fresh` | false | Ignore previous context, start fresh | +| `--auto-fix-workflow` | false | Attempt common fixes for pre-CI-Attempt failures (e.g., lockfile updates) | +| `--new-cipe-timeout` | 10 | Minutes to wait for new CI Attempt after action | +| `--local-verify-attempts` | 3 | Max local verification + enhance cycles before pushing to CI | + +Parse any overrides from `$ARGUMENTS` and merge with defaults. + +## Nx Cloud Connection Check + +Before starting the monitoring loop, verify the workspace is connected to Nx Cloud. Without this connection, no CI data is available and the entire skill is inoperable. + +### Step 0: Verify Nx Cloud Connection + +1. **Check `nx.json`** at workspace root for `nxCloudId` or `nxCloudAccessToken` +2. **If `nx.json` missing OR neither property exists** → exit with: + + ``` + Nx Cloud not connected. Unlock 70% faster CI and auto-fix broken PRs with https://nx.dev/nx-cloud + ``` + +3. **If connected** → continue to main loop + +## Architecture Overview + +1. **This skill (orchestrator)**: spawns subagents, runs scripts, prints status, does local coding work +2. **ci-monitor-subagent (haiku)**: calls one MCP tool (ci_information or update_self_healing_fix), returns structured result, exits +3. **ci-poll-decide.mjs (deterministic script)**: takes ci_information result + state, returns action + status message +4. **ci-state-update.mjs (deterministic script)**: manages budget gates, post-action state transitions, and cycle classification + +## Status Reporting + +The decision script handles message formatting based on verbosity. When printing messages to the user: + +- Prepend `[monitor-ci]` to every message from the script's `message` field +- For your own action messages (e.g. "Applying fix via MCP..."), also prepend `[monitor-ci]` + +## Anti-Patterns + +These behaviors cause real problems — racing with self-healing, losing CI progress, or wasting context: + +| Anti-Pattern | Why It's Bad | +| ----------------------------------------------------------------------------------------------- | ------------------------------------------------------------------ | +| Using CI provider CLIs with `--watch` flags (e.g., `gh pr checks --watch`, `glab ci status -w`) | Bypasses Nx Cloud self-healing entirely | +| Writing custom CI polling scripts | Unreliable, pollutes context, no self-healing | +| Cancelling CI workflows/pipelines | Destructive, loses CI progress | +| Running CI checks on main agent | Wastes main agent context tokens | +| Independently analyzing/fixing CI failures while polling | Races with self-healing, causes duplicate fixes and confused state | + +**If this skill fails to activate**, the fallback is: + +1. Use CI provider CLI for a one-time, read-only status check (single call, no watch/polling flags) +2. Immediately delegate to this skill with gathered context +3. Do not continue polling on main agent — it wastes context tokens and bypasses self-healing + +## Session Context Behavior + +If the user previously ran `/monitor-ci` in this session, you may have prior state (poll counts, last CI Attempt URL, etc.). Resume from that state unless `--fresh` is set, in which case discard it and start from Step 1. + +## MCP Tool Reference + +Three field sets control polling efficiency — use the lightest set that gives you what you need: + +```yaml +WAIT_FIELDS: 'cipeUrl,commitSha,cipeStatus' +LIGHT_FIELDS: 'cipeStatus,cipeUrl,branch,commitSha,selfHealingStatus,verificationStatus,userAction,failedTaskIds,verifiedTaskIds,selfHealingEnabled,failureClassification,couldAutoApplyTasks,autoApplySkipped,autoApplySkipReason,shortLink,confidence,confidenceReasoning,hints,selfHealingSkippedReason,selfHealingSkipMessage' +HEAVY_FIELDS: 'taskOutputSummary,suggestedFix,suggestedFixReasoning,suggestedFixDescription' +``` + +The `ci_information` tool accepts `branch` (optional, defaults to current git branch), `select` (comma-separated field names), and `pageToken` (0-based pagination for long strings). + +The `update_self_healing_fix` tool accepts a `shortLink` and an action: `APPLY`, `REJECT`, or `RERUN_ENVIRONMENT_STATE`. + +## Default Behaviors by Status + +The decision script returns one of the following statuses. This table defines the **default behavior** for each. User instructions can override any of these. + +**Simple exits** — just report and exit: + +| Status | Default Behavior | +| ----------------------- | ---------------------------------------------------------------------------------------------------------------- | +| `ci_success` | Exit with success | +| `cipe_canceled` | Exit, CI was canceled | +| `cipe_timed_out` | Exit, CI timed out | +| `polling_timeout` | Exit, polling timeout reached | +| `circuit_breaker` | Exit, no progress after 5 consecutive polls | +| `environment_rerun_cap` | Exit, environment reruns exhausted | +| `fix_auto_applying` | Self-healing is handling it — just record `last_cipe_url`, enter wait mode. No MCP call or local git ops needed. | +| `error` | Wait 60s and loop | + +**Statuses requiring action** — when handling these in Step 3, read `references/fix-flows.md` for the detailed flow: + +| Status | Summary | +| ------------------------ | --------------------------------------------------------------------------------------------- | +| `fix_auto_apply_skipped` | Fix verified but auto-apply skipped (e.g., loop prevention). Inform user, offer manual apply. | +| `fix_apply_ready` | Fix verified (all tasks or e2e-only). Apply via MCP. | +| `fix_needs_local_verify` | Fix has unverified non-e2e tasks. Run locally, then apply or enhance. | +| `fix_needs_review` | Fix verification failed/not attempted. Analyze and decide. | +| `fix_failed` | Self-healing failed. Fetch heavy data, attempt local fix (gate check first). | +| `no_fix` | No fix available. Fetch heavy data, attempt local fix (gate check first) or exit. | +| `environment_issue` | Request environment rerun via MCP (gate check first). | +| `self_healing_throttled` | Reject old fixes, attempt local fix. | +| `no_new_cipe` | CI Attempt never spawned. Auto-fix workflow or exit with guidance. | +| `cipe_no_tasks` | CI failed with no tasks. Retry once with empty commit. | + +**Key rules (always apply):** + +- **Git safety**: Stage specific files by name — `git add -A` or `git add .` risks committing the user's unrelated work-in-progress or secrets +- **Environment failures** (OOM, command not found, permission denied): bail immediately. These aren't code bugs, so spending local-fix budget on them is wasteful +- **Gate check**: Run `ci-state-update.mjs gate` before local fix attempts — if budget exhausted, print message and exit + +## Main Loop + +### Step 1: Initialize Tracking + +``` +cycle_count = 0 # Only incremented for agent-initiated cycles (counted against --max-cycles) +start_time = now() +no_progress_count = 0 +local_verify_count = 0 +env_rerun_count = 0 +last_cipe_url = null +expected_commit_sha = null +agent_triggered = false # Set true after monitor takes an action that triggers new CI Attempt +poll_count = 0 +wait_mode = false +prev_status = null +prev_cipe_status = null +prev_sh_status = null +prev_verification_status = null +prev_failure_classification = null +``` + +### Step 2: Polling Loop + +Repeat until done: + +#### 2a. Spawn subagent (FETCH_STATUS) + +Determine select fields based on mode: + +- **Wait mode**: use WAIT_FIELDS (`cipeUrl,commitSha,cipeStatus`) +- **Normal mode (first poll or after newCipeDetected)**: use LIGHT_FIELDS + +Call the `ci_information` tool with the determined `select` fields for the current branch. Wait for the result before proceeding. + +#### 2b. Run decision script + +```bash +node /scripts/ci-poll-decide.mjs '' \ + [--wait-mode] \ + [--prev-cipe-url ] \ + [--expected-sha ] \ + [--prev-status ] \ + [--timeout ] \ + [--new-cipe-timeout ] \ + [--env-rerun-count ] \ + [--no-progress-count ] \ + [--prev-cipe-status ] \ + [--prev-sh-status ] \ + [--prev-verification-status ] \ + [--prev-failure-classification ] +``` + +The script outputs a single JSON line: `{ action, code, message, delay?, noProgressCount, envRerunCount, fields?, newCipeDetected?, verifiableTaskIds? }` + +#### 2c. Process script output + +Parse the JSON output and update tracking state: + +- `no_progress_count = output.noProgressCount` +- `env_rerun_count = output.envRerunCount` +- `prev_cipe_status = subagent_result.cipeStatus` +- `prev_sh_status = subagent_result.selfHealingStatus` +- `prev_verification_status = subagent_result.verificationStatus` +- `prev_failure_classification = subagent_result.failureClassification` +- `prev_status = output.action + ":" + (output.code || subagent_result.cipeStatus)` +- `poll_count++` + +Based on `action`: + +- **`action == "poll"`**: Print `output.message`, sleep `output.delay` seconds, go to 2a + - If `output.newCipeDetected`: clear wait mode, reset `wait_mode = false` +- **`action == "wait"`**: Print `output.message`, sleep `output.delay` seconds, go to 2a +- **`action == "done"`**: Proceed to Step 3 with `output.code` + +### Step 3: Handle Actionable Status + +When decision script returns `action == "done"`: + +1. Run cycle-check (Step 4) **before** handling the code +2. Check the returned `code` +3. Look up default behavior in the table above +4. Check if user instructions override the default +5. Execute the appropriate action +6. **If action expects new CI Attempt**, update tracking (see Step 3a) +7. If action results in looping, go to Step 2 + +#### Tool calls for actions + +Several statuses require fetching additional data or calling tools: + +- **fix_apply_ready**: Call `update_self_healing_fix` with action `APPLY` +- **fix_needs_local_verify**: Call `ci_information` with HEAVY_FIELDS for fix details before local verification +- **fix_needs_review**: Call `ci_information` with HEAVY_FIELDS → get `suggestedFixDescription`, `suggestedFixSummary`, `taskFailureSummaries` +- **fix_failed / no_fix**: Call `ci_information` with HEAVY_FIELDS → get `taskFailureSummaries` for local fix context +- **environment_issue**: Call `update_self_healing_fix` with action `RERUN_ENVIRONMENT_STATE` +- **self_healing_throttled**: Call `ci_information` with HEAVY_FIELDS → get `selfHealingSkipMessage`; then call `update_self_healing_fix` for each old fix + +### Step 3a: Track State for New-CI-Attempt Detection + +After actions that should trigger a new CI Attempt, run: + +```bash +node /scripts/ci-state-update.mjs post-action \ + --action \ + --cipe-url \ + --commit-sha +``` + +Action types: `fix-auto-applying`, `apply-mcp`, `apply-local-push`, `reject-fix-push`, `local-fix-push`, `env-rerun`, `auto-fix-push`, `empty-commit-push` + +The script returns `{ waitMode, pollCount, lastCipeUrl, expectedCommitSha, agentTriggered }`. Update all tracking state from the output, then go to Step 2. + +### Step 4: Cycle Classification and Progress Tracking + +When the decision script returns `action == "done"`, run cycle-check **before** handling the code: + +```bash +node /scripts/ci-state-update.mjs cycle-check \ + --code \ + [--agent-triggered] \ + --cycle-count --max-cycles \ + --env-rerun-count +``` + +The script returns `{ cycleCount, agentTriggered, envRerunCount, approachingLimit, message }`. Update tracking state from the output. + +- If `approachingLimit` → ask user whether to continue (with 5 or 10 more cycles) or stop monitoring +- If previous cycle was NOT agent-triggered (human pushed), log that human-initiated push was detected + +#### Progress Tracking + +- `no_progress_count`, circuit breaker (5 polls), and backoff reset are handled by ci-poll-decide.mjs (progress = any change in cipeStatus, selfHealingStatus, verificationStatus, or failureClassification) +- `env_rerun_count` reset on non-environment status is handled by ci-state-update.mjs cycle-check +- On new CI Attempt detected (poll script returns `newCipeDetected`) → reset `local_verify_count = 0`, `env_rerun_count = 0` + +## Error Handling + +| Error | Action | +| ------------------------------ | ----------------------------------------------------------------------------------------------------------- | +| Git rebase conflict | Report to user, exit | +| `nx-cloud apply-locally` fails | Reject fix via MCP (`action: "REJECT"`), then attempt manual patch (Reject + Fix From Scratch Flow) or exit | +| MCP tool error | Retry once, if fails report to user | +| Subagent spawn failure | Retry once, if fails exit with error | +| Decision script error | Treat as `error` status, increment `no_progress_count` | +| No new CI Attempt detected | If `--auto-fix-workflow`, try lockfile update; otherwise report to user with guidance | +| Lockfile auto-fix fails | Report to user, exit with guidance to check CI logs | + +## User Instruction Examples + +Users can override default behaviors: + +| Instruction | Effect | +| ------------------------------------------------ | --------------------------------------------------- | +| "never auto-apply" | Always prompt before applying any fix | +| "always ask before git push" | Prompt before each push | +| "reject any fix for e2e tasks" | Auto-reject if `failedTaskIds` contains e2e | +| "apply all fixes regardless of verification" | Skip verification check, apply everything | +| "if confidence < 70, reject" | Check confidence field before applying | +| "run 'nx affected -t typecheck' before applying" | Add local verification step | +| "auto-fix workflow failures" | Attempt lockfile updates on pre-CI-Attempt failures | +| "wait 45 min for new CI Attempt" | Override new-CI-Attempt timeout (default: 10 min) | diff --git a/.opencode/skills/monitor-ci/references/fix-flows.md b/.opencode/skills/monitor-ci/references/fix-flows.md new file mode 100644 index 0000000..b33aa02 --- /dev/null +++ b/.opencode/skills/monitor-ci/references/fix-flows.md @@ -0,0 +1,108 @@ +# Detailed Status Handling & Fix Flows + +## Status Handling by Code + +### fix_auto_apply_skipped + +The script returns `autoApplySkipReason` in its output. + +1. Report the skip reason to the user (e.g., "Auto-apply was skipped because the previous CI pipeline execution was triggered by Nx Cloud") +2. Offer to apply the fix manually — spawn UPDATE_FIX subagent with `APPLY` if user agrees +3. Record `last_cipe_url`, enter wait mode + +### fix_apply_ready + +- Spawn UPDATE_FIX subagent with `APPLY` +- Record `last_cipe_url`, enter wait mode + +### fix_needs_local_verify + +The script returns `verifiableTaskIds` in its output. + +1. **Detect package manager:** `pnpm-lock.yaml` → `pnpm nx`, `yarn.lock` → `yarn nx`, otherwise `npx nx` +2. **Run verifiable tasks in parallel** — spawn `general` subagents for each task +3. **If all pass** → spawn UPDATE_FIX subagent with `APPLY`, enter wait mode +4. **If any fail** → Apply Locally + Enhance Flow (see below) + +### fix_needs_review + +Spawn FETCH_HEAVY subagent, then analyze fix content (`suggestedFixDescription`, `suggestedFixSummary`, `taskFailureSummaries`): + +- If fix looks correct → apply via MCP +- If fix needs enhancement → Apply Locally + Enhance Flow +- If fix is wrong → run `ci-state-update.mjs gate --gate-type local-fix`. If not allowed, print message and exit. Otherwise → Reject + Fix From Scratch Flow + +### fix_failed / no_fix + +Spawn FETCH_HEAVY subagent for `taskFailureSummaries`. Run `ci-state-update.mjs gate --gate-type local-fix` — if not allowed, print message and exit. Otherwise attempt local fix (counter already incremented by gate). If successful → commit, push, enter wait mode. If not → exit with failure. + +### environment_issue + +1. Run `ci-state-update.mjs gate --gate-type env-rerun`. If not allowed, print message and exit. +2. Spawn UPDATE_FIX subagent with `RERUN_ENVIRONMENT_STATE` +3. Enter wait mode with `last_cipe_url` set + +### self_healing_throttled + +Spawn FETCH_HEAVY subagent for `selfHealingSkipMessage`. + +1. **Parse throttle message** for CI Attempt URLs (regex: `/cipes/{id}`) +2. **Reject previous fixes** — for each URL: spawn FETCH_THROTTLE_INFO to get `shortLink`, then UPDATE_FIX with `REJECT` +3. **Attempt local fix**: Run `ci-state-update.mjs gate --gate-type local-fix`. If not allowed → skip to step 4. Otherwise use `failedTaskIds` and `taskFailureSummaries` for context. +4. **Fallback if local fix not possible or budget exhausted**: push empty commit (`git commit --allow-empty -m "ci: rerun after rejecting throttled fixes"`), enter wait mode + +### no_new_cipe + +1. Report to user: no CI attempt found, suggest checking CI provider +2. If `--auto-fix-workflow`: detect package manager, run install, commit lockfile if changed, enter wait mode +3. Otherwise: exit with guidance + +### cipe_no_tasks + +1. Report to user: CI failed with no tasks recorded +2. Retry: `git commit --allow-empty -m "chore: retry ci [monitor-ci]"` + push, enter wait mode +3. If retry also returns `cipe_no_tasks`: exit with failure + +## Fix Action Flows + +### Apply via MCP + +Spawn UPDATE_FIX subagent with `APPLY`. New CI Attempt spawns automatically. No local git ops. + +### Apply Locally + Enhance Flow + +1. `nx-cloud apply-locally ` (sets state to `APPLIED_LOCALLY`) +2. Enhance code to fix failing tasks +3. Run failing tasks to verify +4. If still failing → run `ci-state-update.mjs gate --gate-type local-fix`. If not allowed, commit current state and push (let CI be final judge). Otherwise loop back to enhance. +5. If passing → commit and push, enter wait mode + +### Reject + Fix From Scratch Flow + +1. Run `ci-state-update.mjs gate --gate-type local-fix`. If not allowed, print message and exit. +2. Spawn UPDATE_FIX subagent with `REJECT` +3. Fix from scratch locally +4. Commit and push, enter wait mode + +## Environment vs Code Failure Recognition + +When any local fix path runs a task and it fails, assess whether the failure is a **code issue** or an **environment/tooling issue** before running the gate script. + +**Indicators of environment/tooling failures** (non-exhaustive): command not found / binary missing, OOM / heap allocation failures, permission denied, network timeouts / DNS failures, missing system libraries, Docker/container issues, disk space exhaustion. + +When detected → bail immediately without running gate (no budget consumed). Report that the failure is an environment/tooling issue, not a code bug. + +**Code failures** (compilation errors, test assertion failures, lint violations, type errors) are genuine candidates for local fix attempts and proceed normally through the gate. + +## Git Safety + +- Stage specific files by name — `git add -A` or `git add .` risks committing the user's unrelated work-in-progress or secrets + +## Commit Message Format + +```bash +git commit -m "fix(): + +Failed tasks: , +Local verification: passed|enhanced|failed-pushing-to-ci" +``` diff --git a/.opencode/skills/monitor-ci/scripts/ci-poll-decide.mjs b/.opencode/skills/monitor-ci/scripts/ci-poll-decide.mjs new file mode 100644 index 0000000..3fe17e5 --- /dev/null +++ b/.opencode/skills/monitor-ci/scripts/ci-poll-decide.mjs @@ -0,0 +1,428 @@ +#!/usr/bin/env node + +/** + * CI Poll Decision Script + * + * Deterministic decision engine for CI monitoring. + * Takes ci_information JSON + state args, outputs a single JSON action line. + * + * Architecture: + * classify() — pure decision tree, returns { action, code, extra? } + * buildOutput() — maps classification to full output with messages, delays, counters + * + * Usage: + * node ci-poll-decide.mjs '' \ + * [--wait-mode] [--prev-cipe-url ] [--expected-sha ] \ + * [--prev-status ] [--timeout ] [--new-cipe-timeout ] \ + * [--env-rerun-count ] [--no-progress-count ] \ + * [--prev-cipe-status ] [--prev-sh-status ] \ + * [--prev-verification-status ] [--prev-failure-classification ] + */ + +// --- Arg parsing --- + +const args = process.argv.slice(2); +const ciInfoJson = args[0]; +const pollCount = parseInt(args[1], 10) || 0; +const verbosity = args[2] || 'medium'; + +function getFlag(name) { + return args.includes(name); +} + +function getArg(name) { + const idx = args.indexOf(name); + return idx !== -1 && idx + 1 < args.length ? args[idx + 1] : null; +} + +const waitMode = getFlag('--wait-mode'); +const prevCipeUrl = getArg('--prev-cipe-url'); +const expectedSha = getArg('--expected-sha'); +const prevStatus = getArg('--prev-status'); +const timeoutSeconds = parseInt(getArg('--timeout') || '0', 10); +const newCipeTimeoutSeconds = parseInt(getArg('--new-cipe-timeout') || '0', 10); +const envRerunCount = parseInt(getArg('--env-rerun-count') || '0', 10); +const inputNoProgressCount = parseInt(getArg('--no-progress-count') || '0', 10); +const prevCipeStatus = getArg('--prev-cipe-status'); +const prevShStatus = getArg('--prev-sh-status'); +const prevVerificationStatus = getArg('--prev-verification-status'); +const prevFailureClassification = getArg('--prev-failure-classification'); + +// --- Parse CI info --- + +let ci; +try { + ci = JSON.parse(ciInfoJson); +} catch { + console.log( + JSON.stringify({ + action: 'done', + code: 'error', + message: 'Failed to parse ci_information JSON', + noProgressCount: inputNoProgressCount + 1, + envRerunCount, + }) + ); + process.exit(0); +} + +const { + cipeStatus, + selfHealingStatus, + verificationStatus, + selfHealingEnabled, + selfHealingSkippedReason, + failureClassification: rawFailureClassification, + failedTaskIds = [], + verifiedTaskIds = [], + couldAutoApplyTasks, + autoApplySkipped, + autoApplySkipReason, + userAction, + cipeUrl, + commitSha, +} = ci; + +const failureClassification = rawFailureClassification?.toLowerCase() ?? null; + +// --- Helpers --- + +function categorizeTasks() { + const verifiedSet = new Set(verifiedTaskIds); + const unverified = failedTaskIds.filter((t) => !verifiedSet.has(t)); + if (unverified.length === 0) return { category: 'all_verified' }; + + const e2e = unverified.filter((t) => { + const parts = t.split(':'); + return parts.length >= 2 && parts[1].includes('e2e'); + }); + if (e2e.length === unverified.length) return { category: 'e2e_only' }; + + const verifiable = unverified.filter((t) => { + const parts = t.split(':'); + return !(parts.length >= 2 && parts[1].includes('e2e')); + }); + return { category: 'needs_local_verify', verifiableTaskIds: verifiable }; +} + +function backoff(count) { + const delays = [60, 90, 120]; + return delays[Math.min(count, delays.length - 1)]; +} + +function hasStateChanged() { + if (prevCipeStatus && cipeStatus !== prevCipeStatus) return true; + if (prevShStatus && selfHealingStatus !== prevShStatus) return true; + if (prevVerificationStatus && verificationStatus !== prevVerificationStatus) + return true; + if ( + prevFailureClassification && + failureClassification !== prevFailureClassification + ) + return true; + return false; +} + +function isTimedOut() { + if (timeoutSeconds <= 0) return false; + const avgDelay = pollCount === 0 ? 0 : backoff(Math.floor(pollCount / 2)); + return pollCount * avgDelay >= timeoutSeconds; +} + +function isWaitTimedOut() { + if (newCipeTimeoutSeconds <= 0) return false; + return pollCount * 30 >= newCipeTimeoutSeconds; +} + +function isNewCipe() { + return ( + (prevCipeUrl && cipeUrl && cipeUrl !== prevCipeUrl) || + (expectedSha && commitSha && commitSha === expectedSha) + ); +} + +// ============================================================ +// classify() — pure decision tree +// +// Returns: { action: 'poll'|'wait'|'done', code: string, extra? } +// +// Decision priority (top wins): +// WAIT MODE: +// 1. new CI Attempt detected → poll (new_cipe_detected) +// 2. wait timed out → done (no_new_cipe) +// 3. still waiting → wait (waiting_for_cipe) +// NORMAL MODE: +// 4. polling timeout → done (polling_timeout) +// 5. circuit breaker (5 polls) → done (circuit_breaker) +// 6. CI succeeded → done (ci_success) +// 7. CI canceled → done (cipe_canceled) +// 8. CI timed out → done (cipe_timed_out) +// 9. CI failed, no tasks recorded → done (cipe_no_tasks) +// 10. environment failure → done (environment_rerun_cap | environment_issue) +// 11. self-healing throttled → done (self_healing_throttled) +// 12. CI in progress / not started → poll (ci_running) +// 13. self-healing in progress → poll (sh_running) +// 14. flaky task auto-rerun → poll (flaky_rerun) +// 15. fix auto-applied → poll (fix_auto_applied) +// 16. auto-apply: skipped → done (fix_auto_apply_skipped) +// 17. auto-apply: verification pending→ poll (verification_pending) +// 18. auto-apply: verified → done (fix_auto_applying) +// 19. fix: verification failed/none → done (fix_needs_review) +// 20. fix: all/e2e verified → done (fix_apply_ready) +// 21. fix: needs local verify → done (fix_needs_local_verify) +// 22. self-healing failed → done (fix_failed) +// 23. no fix available → done (no_fix) +// 24. fallback → poll (fallback) +// ============================================================ + +function classify() { + // --- Wait mode --- + if (waitMode) { + if (isNewCipe()) return { action: 'poll', code: 'new_cipe_detected' }; + if (isWaitTimedOut()) return { action: 'done', code: 'no_new_cipe' }; + return { action: 'wait', code: 'waiting_for_cipe' }; + } + + // --- Guards --- + if (isTimedOut()) return { action: 'done', code: 'polling_timeout' }; + if (noProgressCount >= 5) return { action: 'done', code: 'circuit_breaker' }; + + // --- Terminal CI states --- + if (cipeStatus === 'SUCCEEDED') return { action: 'done', code: 'ci_success' }; + if (cipeStatus === 'CANCELED') + return { action: 'done', code: 'cipe_canceled' }; + if (cipeStatus === 'TIMED_OUT') + return { action: 'done', code: 'cipe_timed_out' }; + + // --- CI failed, no tasks --- + if ( + cipeStatus === 'FAILED' && + failedTaskIds.length === 0 && + selfHealingStatus == null + ) + return { action: 'done', code: 'cipe_no_tasks' }; + + // --- Environment failure --- + if (failureClassification === 'environment_state') { + if (envRerunCount >= 2) + return { action: 'done', code: 'environment_rerun_cap' }; + return { action: 'done', code: 'environment_issue' }; + } + + // --- Throttled --- + if (selfHealingSkippedReason === 'THROTTLED') + return { action: 'done', code: 'self_healing_throttled' }; + + // --- Still running: CI --- + if (cipeStatus === 'IN_PROGRESS' || cipeStatus === 'NOT_STARTED') + return { action: 'poll', code: 'ci_running' }; + + // --- Still running: self-healing --- + if ( + (selfHealingStatus === 'IN_PROGRESS' || + selfHealingStatus === 'NOT_STARTED') && + !selfHealingSkippedReason + ) + return { action: 'poll', code: 'sh_running' }; + + // --- Still running: flaky rerun --- + if (failureClassification === 'flaky_task') + return { action: 'poll', code: 'flaky_rerun' }; + + // --- Fix auto-applied, waiting for new CI Attempt --- + if (userAction === 'APPLIED_AUTOMATICALLY') + return { action: 'poll', code: 'fix_auto_applied' }; + + // --- Auto-apply path (couldAutoApplyTasks) --- + if (couldAutoApplyTasks === true) { + if (autoApplySkipped === true) + return { + action: 'done', + code: 'fix_auto_apply_skipped', + extra: { autoApplySkipReason }, + }; + if ( + verificationStatus === 'NOT_STARTED' || + verificationStatus === 'IN_PROGRESS' + ) + return { action: 'poll', code: 'verification_pending' }; + if (verificationStatus === 'COMPLETED') + return { action: 'done', code: 'fix_auto_applying' }; + // verification FAILED or NOT_EXECUTABLE → falls through to fix_needs_review + } + + // --- Fix available --- + if (selfHealingStatus === 'COMPLETED') { + if ( + verificationStatus === 'FAILED' || + verificationStatus === 'NOT_EXECUTABLE' || + (couldAutoApplyTasks !== true && !verificationStatus) + ) + return { action: 'done', code: 'fix_needs_review' }; + + const tasks = categorizeTasks(); + if (tasks.category === 'all_verified' || tasks.category === 'e2e_only') + return { action: 'done', code: 'fix_apply_ready' }; + return { + action: 'done', + code: 'fix_needs_local_verify', + extra: { verifiableTaskIds: tasks.verifiableTaskIds }, + }; + } + + // --- Fix failed --- + if (selfHealingStatus === 'FAILED') + return { action: 'done', code: 'fix_failed' }; + + // --- No fix available --- + if ( + cipeStatus === 'FAILED' && + (selfHealingEnabled === false || selfHealingStatus === 'NOT_EXECUTABLE') + ) + return { action: 'done', code: 'no_fix' }; + + // --- Fallback --- + return { action: 'poll', code: 'fallback' }; +} + +// ============================================================ +// buildOutput() — maps classification to full JSON output +// ============================================================ + +// Message templates keyed by status or key +const messages = { + // wait mode + new_cipe_detected: () => + `New CI Attempt detected! CI: ${cipeStatus || 'N/A'}`, + no_new_cipe: () => + 'New CI Attempt timeout exceeded. No new CI Attempt detected.', + waiting_for_cipe: () => 'Waiting for new CI Attempt...', + + // guards + polling_timeout: () => 'Polling timeout exceeded.', + circuit_breaker: () => 'No progress after 5 consecutive polls. Stopping.', + + // terminal + ci_success: () => 'CI passed successfully!', + cipe_canceled: () => 'CI Attempt was canceled.', + cipe_timed_out: () => 'CI Attempt timed out.', + cipe_no_tasks: () => 'CI failed but no Nx tasks were recorded.', + + // environment + environment_rerun_cap: () => 'Environment rerun cap (2) exceeded. Bailing.', + environment_issue: () => 'CI: FAILED | Classification: ENVIRONMENT_STATE', + + // throttled + self_healing_throttled: () => + 'Self-healing throttled \u2014 too many unapplied fixes.', + + // polling + ci_running: () => `CI: ${cipeStatus}`, + sh_running: () => `CI: ${cipeStatus} | Self-healing: ${selfHealingStatus}`, + flaky_rerun: () => + 'CI: FAILED | Classification: FLAKY_TASK (auto-rerun in progress)', + fix_auto_applied: () => + 'CI: FAILED | Fix auto-applied, new CI Attempt spawning', + verification_pending: () => + `CI: FAILED | Self-healing: COMPLETED | Verification: ${verificationStatus}`, + + // actionable + fix_auto_applying: () => 'Fix verified! Auto-applying...', + fix_auto_apply_skipped: (extra) => + `Fix verified but auto-apply was skipped. ${ + extra?.autoApplySkipReason + ? `Reason: ${extra.autoApplySkipReason}` + : 'Offer to apply manually.' + }`, + fix_needs_review: () => + `Fix available but needs review. Verification: ${ + verificationStatus || 'N/A' + }`, + fix_apply_ready: () => 'Fix available and verified. Ready to apply.', + fix_needs_local_verify: (extra) => + `Fix available. ${extra.verifiableTaskIds.length} task(s) need local verification.`, + fix_failed: () => 'Self-healing failed to generate a fix.', + no_fix: () => 'CI failed, no fix available.', + + // fallback + fallback: () => + `CI: ${cipeStatus || 'N/A'} | Self-healing: ${ + selfHealingStatus || 'N/A' + } | Verification: ${verificationStatus || 'N/A'}`, +}; + +// Codes where noProgressCount resets to 0 (genuine progress occurred) +const resetProgressCodes = new Set([ + 'ci_success', + 'fix_auto_applying', + 'fix_auto_apply_skipped', + 'fix_needs_review', + 'fix_apply_ready', + 'fix_needs_local_verify', +]); + +function formatMessage(msg) { + if (verbosity === 'minimal') { + const currentStatus = `${cipeStatus}|${selfHealingStatus}|${verificationStatus}`; + if (currentStatus === (prevStatus || '')) return null; + return msg; + } + if (verbosity === 'verbose') { + return [ + `Poll #${pollCount + 1} | CI: ${cipeStatus || 'N/A'} | Self-healing: ${ + selfHealingStatus || 'N/A' + } | Verification: ${verificationStatus || 'N/A'}`, + msg, + ].join('\n'); + } + return `Poll #${pollCount + 1} | ${msg}`; +} + +function buildOutput(decision) { + const { action, code, extra } = decision; + + // noProgressCount is already computed before classify() was called. + // Here we only handle the reset for "genuine progress" done-codes. + + const msgFn = messages[code]; + const rawMsg = msgFn ? msgFn(extra) : `Unknown: ${code}`; + const message = formatMessage(rawMsg); + + const result = { + action, + code, + message, + noProgressCount: resetProgressCodes.has(code) ? 0 : noProgressCount, + envRerunCount, + }; + + // Add delay + if (action === 'wait') { + result.delay = 30; + } else if (action === 'poll') { + result.delay = code === 'new_cipe_detected' ? 60 : backoff(noProgressCount); + result.fields = 'light'; + } + + // Add extras + if (code === 'new_cipe_detected') result.newCipeDetected = true; + if (extra?.verifiableTaskIds) + result.verifiableTaskIds = extra.verifiableTaskIds; + if (extra?.autoApplySkipReason) + result.autoApplySkipReason = extra.autoApplySkipReason; + + console.log(JSON.stringify(result)); +} + +// --- Run --- + +// Compute noProgressCount from input. Single assignment, no mutation. +// Wait mode: reset on new cipe, otherwise unchanged (wait doesn't count as no-progress). +// Normal mode: reset on any state change, otherwise increment. +const noProgressCount = (() => { + if (waitMode) return isNewCipe() ? 0 : inputNoProgressCount; + if (isNewCipe() || hasStateChanged()) return 0; + return inputNoProgressCount + 1; +})(); + +buildOutput(classify()); diff --git a/.opencode/skills/monitor-ci/scripts/ci-state-update.mjs b/.opencode/skills/monitor-ci/scripts/ci-state-update.mjs new file mode 100644 index 0000000..90fa714 --- /dev/null +++ b/.opencode/skills/monitor-ci/scripts/ci-state-update.mjs @@ -0,0 +1,160 @@ +#!/usr/bin/env node + +/** + * CI State Update Script + * + * Deterministic state management for CI monitor actions. + * Three commands: gate, post-action, cycle-check. + * + * Usage: + * node ci-state-update.mjs gate --gate-type [counter args] + * node ci-state-update.mjs post-action --action [--cipe-url ] [--commit-sha ] + * node ci-state-update.mjs cycle-check --code [--agent-triggered] [counter args] + */ + +// --- Arg parsing --- + +const args = process.argv.slice(2); +const command = args[0]; + +function getFlag(name) { + return args.includes(name); +} + +function getArg(name) { + const idx = args.indexOf(name); + return idx !== -1 && idx + 1 < args.length ? args[idx + 1] : null; +} + +function output(result) { + console.log(JSON.stringify(result)); +} + +// --- gate --- +// Check if an action is allowed and return incremented counter. +// Called before any local fix attempt or environment rerun. + +function gate() { + const gateType = getArg('--gate-type'); + + if (gateType === 'local-fix') { + const count = parseInt(getArg('--local-verify-count') || '0', 10); + const max = parseInt(getArg('--local-verify-attempts') || '3', 10); + if (count >= max) { + return output({ + allowed: false, + localVerifyCount: count, + message: `Local fix budget exhausted (${count}/${max} attempts)`, + }); + } + return output({ + allowed: true, + localVerifyCount: count + 1, + message: null, + }); + } + + if (gateType === 'env-rerun') { + const count = parseInt(getArg('--env-rerun-count') || '0', 10); + if (count >= 2) { + return output({ + allowed: false, + envRerunCount: count, + message: `Environment issue persists after ${count} reruns. Manual investigation needed.`, + }); + } + return output({ + allowed: true, + envRerunCount: count + 1, + message: null, + }); + } + + output({ allowed: false, message: `Unknown gate type: ${gateType}` }); +} + +// --- post-action --- +// Compute next state after an action is taken. +// Returns wait mode params and whether the action was agent-triggered. + +function postAction() { + const action = getArg('--action'); + const cipeUrl = getArg('--cipe-url'); + const commitSha = getArg('--commit-sha'); + + // MCP-triggered or auto-applied: track by cipeUrl + const cipeUrlActions = ['fix-auto-applying', 'apply-mcp', 'env-rerun']; + // Local push: track by commitSha + const commitShaActions = [ + 'apply-local-push', + 'reject-fix-push', + 'local-fix-push', + 'auto-fix-push', + 'empty-commit-push', + ]; + + const trackByCipeUrl = cipeUrlActions.includes(action); + const trackByCommitSha = commitShaActions.includes(action); + + if (!trackByCipeUrl && !trackByCommitSha) { + return output({ error: `Unknown action: ${action}` }); + } + + // fix-auto-applying: self-healing did it, NOT the monitor + const agentTriggered = action !== 'fix-auto-applying'; + + output({ + waitMode: true, + pollCount: 0, + lastCipeUrl: trackByCipeUrl ? cipeUrl : null, + expectedCommitSha: trackByCommitSha ? commitSha : null, + agentTriggered, + }); +} + +// --- cycle-check --- +// Cycle classification + counter resets when a new "done" code is received. +// Called at the start of handling each actionable code. + +function cycleCheck() { + const status = getArg('--code'); + const wasAgentTriggered = getFlag('--agent-triggered'); + let cycleCount = parseInt(getArg('--cycle-count') || '0', 10); + const maxCycles = parseInt(getArg('--max-cycles') || '10', 10); + let envRerunCount = parseInt(getArg('--env-rerun-count') || '0', 10); + + // Cycle classification: if previous cycle was agent-triggered, count it + if (wasAgentTriggered) cycleCount++; + + // Reset env_rerun_count on non-environment status + if (status !== 'environment_issue') envRerunCount = 0; + + // Approaching limit gate + const approachingLimit = cycleCount >= maxCycles - 2; + + output({ + cycleCount, + agentTriggered: false, + envRerunCount, + approachingLimit, + message: approachingLimit + ? `Approaching cycle limit (${cycleCount}/${maxCycles})` + : null, + }); +} + +// --- Dispatch --- + +switch (command) { + case 'gate': + gate(); + break; + case 'post-action': + postAction(); + break; + case 'cycle-check': + cycleCheck(); + break; + default: + output({ error: `Unknown command: ${command}` }); +} diff --git a/.opencode/skills/nx-generate/SKILL.md b/.opencode/skills/nx-generate/SKILL.md new file mode 100644 index 0000000..af7ba80 --- /dev/null +++ b/.opencode/skills/nx-generate/SKILL.md @@ -0,0 +1,166 @@ +--- +name: nx-generate +description: Generate code using nx generators. INVOKE IMMEDIATELY when user mentions scaffolding, setup, structure, creating apps/libs, or setting up project structure. Trigger words - scaffold, setup, create a ... app, create a ... lib, project structure, generate, add a new project. ALWAYS use this BEFORE calling nx_docs or exploring - this skill handles discovery internally. +--- + +# Run Nx Generator + +Nx generators are powerful tools that scaffold projects, make automated code migrations or automate repetitive tasks in a monorepo. They ensure consistency across the codebase and reduce boilerplate work. + +This skill applies when the user wants to: + +- Create new projects like libraries or applications +- Scaffold features or boilerplate code +- Run workspace-specific or custom generators +- Do anything else that an nx generator exists for + +## Key Principles + +1. **Always use `--no-interactive`** - Prevents prompts that would hang execution +2. **Read the generator source code** - The schema alone is not enough; understand what the generator actually does +3. **Match existing repo patterns** - Study similar artifacts in the repo and follow their conventions +4. **Verify with lint/test/build/typecheck etc.** - Generated code must pass verification. The listed targets are just an example, use what's appropriate for this workspace. + +## Steps + +### 1. Discover Available Generators + +Use the Nx CLI to discover available generators: + +- List all generators for a plugin: `npx nx list @nx/react` +- View available plugins: `npx nx list` + +This includes plugin generators (e.g., `@nx/react:library`) and local workspace generators. + +### 2. Match Generator to User Request + +Identify which generator(s) could fulfill the user's needs. Consider what artifact type they want, which framework is relevant, and any specific generator names mentioned. + +**IMPORTANT**: When both a local workspace generator and an external plugin generator could satisfy the request, **always prefer the local workspace generator**. Local generators are customized for the specific repo's patterns. + +If no suitable generator exists, you can stop using this skill. However, the burden of proof is high—carefully consider all available generators before deciding none apply. + +### 3. Get Generator Options + +Use the `--help` flag to understand available options: + +```bash +npx nx g @nx/react:library --help +``` + +Pay attention to required options, defaults that might need overriding, and options relevant to the user's request. + +### Library Buildability + +**Default to non-buildable libraries** unless there's a specific reason for buildable. + +| Type | When to use | Generator flags | +| --------------------------- | ----------------------------------------------------------------- | ----------------------------------- | +| **Non-buildable** (default) | Internal monorepo libs consumed by apps | No `--bundler` flag | +| **Buildable** | Publishing to npm, cross-repo sharing, stable libs for cache hits | `--bundler=vite` or `--bundler=swc` | + +Non-buildable libs: + +- Export `.ts`/`.tsx` source directly +- Consumer's bundler compiles them +- Faster dev experience, less config + +Buildable libs: + +- Have their own build target +- Useful for stable libs that rarely change (cache hits) +- Required for npm publishing + +**If unclear, ask the user:** "Should this library be buildable (own build step, better caching) or non-buildable (source consumed directly, simpler setup)?" + +### 4. Read Generator Source Code + +**This step is critical.** The schema alone does not tell you everything. Reading the source code helps you: + +- Know exactly what files will be created/modified and where +- Understand side effects (updating configs, installing deps, etc.) +- Identify behaviors and options not obvious from the schema +- Understand how options interact with each other + +To find generator source code: + +- For plugin generators: Use `node -e "console.log(require.resolve('@nx//generators.json'));"` to find the generators.json, then locate the source from there +- If that fails, read directly from `node_modules//generators.json` +- For local generators: Typically in `tools/generators/` or a local plugin directory. Search the repo for the generator name. + +After reading the source, reconsider: Is this the right generator? If not, go back to step 2. + +> **⚠️ `--directory` flag behavior can be misleading.** +> It should specify the full path of the generated library or component, not the parent path that it will be generated in. +> +> ```bash +> # ✅ Correct - directory is the full path for the library +> nx g @nx/react:library --directory=libs/my-lib +> # generates libs/my-lib/package.json and more +> +> # ❌ Wrong - this will create files at libs and libs/src/... +> nx g @nx/react:library --name=my-lib --directory=libs +> # generates libs/package.json and more +> ``` + +### 5. Examine Existing Patterns + +Before generating, examine the target area of the codebase: + +- Look at similar existing artifacts (other libraries, applications, etc.) +- Identify naming conventions, file structures, and configuration patterns +- Note which test runners, build tools, and linters are used +- Configure the generator to match these patterns + +### 6. Dry-Run to Verify File Placement + +**Always run with `--dry-run` first** to verify files will be created in the correct location: + +```bash +npx nx g @nx/react:library --name=my-lib --dry-run --no-interactive +``` + +Review the output carefully. If files would be created in the wrong location, adjust your options based on what you learned from the generator source code. + +Note: Some generators don't support dry-run (e.g., if they install npm packages). If dry-run fails for this reason, proceed to running the generator for real. + +### 7. Run the Generator + +Execute the generator: + +```bash +nx generate --no-interactive +``` + +> **Tip:** New packages often need workspace dependencies wired up (e.g., importing shared types, being consumed by apps). The `link-workspace-packages` skill can help add these correctly. + +### 8. Modify Generated Code (If Needed) + +Generators provide a starting point. Modify the output as needed to: + +- Add or modify functionality as requested +- Adjust imports, exports, or configurations +- Integrate with existing code patterns + +**Important:** If you replace or delete generated test files (e.g., `*.spec.ts`), either write meaningful replacement tests or remove the `test` target from the project configuration. Empty test suites will cause `nx test` to fail. + +### 9. Format and Verify + +Format all generated/modified files: + +```bash +nx format --fix +``` + +This example is for built-in nx formatting with prettier. There might be other formatting tools for this workspace, use these when appropriate. + +Then verify the generated code works. Keep in mind that the changes you make with a generator or subsequent modifications might impact various projects so it's usually not enough to only run targets for the artifact you just created. + +```bash +# these targets are just an example! +nx run-many -t build,lint,test,typecheck +``` + +These targets are common examples used across many workspaces. You should do research into other targets available for this workspace and its projects. CI configuration is usually a good guide for what the critical targets are that have to pass. + +If verification fails with manageable issues (a few lint errors, minor type issues), fix them. If issues are extensive, attempt obvious fixes first, then escalate to the user with details about what was generated, what's failing, and what you've attempted. diff --git a/.opencode/skills/nx-import/SKILL.md b/.opencode/skills/nx-import/SKILL.md new file mode 100644 index 0000000..b1cd381 --- /dev/null +++ b/.opencode/skills/nx-import/SKILL.md @@ -0,0 +1,238 @@ +--- +name: nx-import +description: Import, merge, or combine repositories into an Nx workspace using nx import. USE WHEN the user asks to adopt Nx across repos, move projects into a monorepo, or bring code/history from another repository. +--- + +## Quick Start + +- `nx import` brings code from a source repository or folder into the current workspace, preserving commit history. +- After nx `22.6.0`, `nx import` responds with .ndjson outputs and follow-up questions. For earlier versions, always run with `--no-interactive` and specify all flags directly. +- Run `nx import --help` for available options. +- Make sure the destination directory is empty before importing. + EXAMPLE: target has `libs/utils` and `libs/models`; source has `libs/ui` and `libs/data-access` — you cannot import `libs/` into `libs/` directly. Import each source library individually. + +Primary docs: + +- https://nx.dev/docs/guides/adopting-nx/import-project +- https://nx.dev/docs/guides/adopting-nx/preserving-git-histories + +Read the nx docs if you have the tools for it. + +## Import Strategy + +**Subdirectory-at-a-time** (`nx import apps --source=apps`): + +- **Recommended for monorepo sources** — files land at top level, no redundant config +- Caveats: multiple import commands (separate merge commits each); dest must not have conflicting directories; root configs (deps, plugins, targetDefaults) not imported +- **Directory conflicts**: Import into alternate-named dir (e.g. `imported-apps/`), then rename + +**Whole repo** (`nx import imported --source=.`): + +- **Only for non-monorepo sources** (single-project repos) +- For monorepos, creates messy nested config (`imported/nx.json`, `imported/tsconfig.base.json`, etc.) +- If you must: keep imported `tsconfig.base.json` (projects extend it), prefix workspace globs and executor paths + +### Directory Conventions + +- **Always prefer the destination's existing conventions.** Source uses `libs/`but dest uses `packages/`? Import into `packages/` (`nx import packages/foo --source=libs/foo`). +- If dest has no convention (empty workspace), ask the user. + +### Application vs Library Detection + +Before importing, identify whether the source is an **application** or a **library**: + +- **Applications**: Deployable end products. Common indicators: + - _Frontend_: `next.config.*`, `vite.config.*` with a build entry point, framework-specific app scaffolding (CRA, Angular CLI app, etc.) + - _Backend (Node.js)_: Express/Fastify/NestJS server entrypoint, no `"exports"` field in `package.json` + - _JVM_: Maven `pom.xml` with `jar` or `war` and a `main` class; Gradle `application` plugin or `mainClass` setting + - _.NET_: `.csproj`/`.fsproj` with `Exe` or `WinExe` + - _General_: Dockerfile, a runnable entrypoint, no public API surface intended for import by other projects +- **Libraries**: Reusable packages consumed by other projects. Common indicators: `"main"`/`"exports"` in `package.json`, Maven/Gradle packaging as a library jar, .NET `Library`, named exports intended for import by other packages. + +**Destination directory rules**: + +- Applications → `apps/`. Check workspace globs (e.g. `pnpm-workspace.yaml`, `workspaces` in root `package.json`) for an existing `apps/*` entry. + - If `apps/*` is **not** present, add it before importing: update the workspace glob config and commit (or stage) the change. + - Example: `nx import apps/my-app --source=packages/my-app` +- Libraries → follow the dest's existing convention (`packages/`, `libs/`, etc.). + +## Common Issues + +### pnpm Workspace Globs (Critical) + +`nx import` adds the imported directory itself (e.g. `apps`) to `pnpm-workspace.yaml`, **NOT** glob patterns for packages within it. Cross-package imports will fail with `Cannot find module`. + +**Fix**: Replace with proper globs from the source config (e.g. `apps/*`, `libs/shared/*`), then `pnpm install`. + +### Root Dependencies and Config Not Imported (Critical) + +`nx import` does **NOT** merge from the source's root: + +- `dependencies`/`devDependencies` from `package.json` +- `targetDefaults` from `nx.json` (e.g. `"@nx/esbuild:esbuild": { "dependsOn": ["^build"] }` — critical for build ordering) +- `namedInputs` from `nx.json` (e.g. `production` exclusion patterns for test files) +- Plugin configurations from `nx.json` + +**Fix**: Diff source and dest `package.json` + `nx.json`. Add missing deps, merge relevant `targetDefaults` and `namedInputs`. + +### TypeScript Project References + +After import, run `nx sync --yes`. If it reports nothing but typecheck still fails, `nx reset` first, then `nx sync --yes` again. + +### Explicit Executor Path Fixups + +Inferred targets (via Nx plugins) resolve config relative to project root — no changes needed. Explicit executor targets (e.g. `@nx/esbuild:esbuild`) have workspace-root-relative paths (`main`, `outputPath`, `tsConfig`, `assets`, `sourceRoot`) that must be prefixed with the import destination directory. + +### Plugin Detection + +- **Whole-repo import**: `nx import` detects and offers to install plugins. Accept them. +- **Subdirectory import**: Plugins NOT auto-detected. Manually add with `npx nx add @nx/PLUGIN`. Check `include`/`exclude` patterns — defaults won't match alternate directories (e.g. `apps-beta/`). +- Run `npx nx reset` after any plugin config changes. + +### Redundant Root Files (Whole-Repo Only) + +Whole-repo import brings ALL source root files into the dest subdirectory. Clean up: + +- `pnpm-lock.yaml` — stale; dest has its own lockfile +- `pnpm-workspace.yaml` — source workspace config; conflicts with dest +- `node_modules/` — stale symlinks pointing to source filesystem +- `.gitignore` — redundant with dest root `.gitignore` +- `nx.json` — source Nx config; dest has its own +- `README.md` — optional; keep or remove + +**Don't blindly delete** `tsconfig.base.json` — imported projects may extend it via relative paths. + +### Root ESLint Config Missing (Subdirectory Import) + +Subdirectory import doesn't bring the source's root `eslint.config.mjs`, but project configs reference `../../eslint.config.mjs`. + +**Fix order**: + +1. Install ESLint deps first: `pnpm add -wD eslint@^9 @nx/eslint-plugin typescript-eslint` (plus framework-specific plugins) +2. Create root `eslint.config.mjs` (copy from source or create with `@nx/eslint-plugin` base rules) +3. Then `npx nx add @nx/eslint` to register the plugin in `nx.json` + +Install `typescript-eslint` explicitly — pnpm's strict hoisting won't auto-resolve this transitive dep of `@nx/eslint-plugin`. + +### ESLint Version Pinning (Critical) + +**Pin ESLint to v9** (`eslint@^9.0.0`). ESLint 10 breaks `@nx/eslint` and many plugins with cryptic errors like `Cannot read properties of undefined (reading 'version')`. + +`@nx/eslint` may peer-depend on ESLint 8, causing the wrong version to resolve. If lint fails with `Cannot read properties of undefined (reading 'allow')`, add `pnpm.overrides`: + +```json +{ "pnpm": { "overrides": { "eslint": "^9.0.0" } } } +``` + +### Dependency Version Conflicts + +After import, compare key deps (`typescript`, `eslint`, framework-specific). If dest uses newer versions, upgrade imported packages to match (usually safe). If source is newer, may need to upgrade dest first. Use `pnpm.overrides` to enforce single-version policy if desired. + +### Module Boundaries + +Imported projects may lack `tags`. Add tags or update `@nx/enforce-module-boundaries` rules. + +### Project Name Collisions (Multi-Import) + +Same `name` in `package.json` across source and dest causes `MultipleProjectsWithSameNameError`. **Fix**: Rename conflicting names (e.g. `@org/api` → `@org/teama-api`), update all dep references and import statements, `pnpm install`. The root `package.json` of each imported repo also becomes a project — rename those too. + +### Workspace Dep Import Ordering + +`pnpm install` fails during `nx import` if a `"workspace:*"` dependency hasn't been imported yet. File operations still succeed. **Fix**: Import all projects first, then `pnpm install --no-frozen-lockfile`. + +### `.gitkeep` Blocking Subdirectory Import + +The TS preset creates `packages/.gitkeep`. Remove it and commit before importing. + +### Frontend tsconfig Base Settings (Critical) + +The TS preset defaults (`module: "nodenext"`, `moduleResolution: "nodenext"`, `lib: ["es2022"]`) are incompatible with frontend frameworks (React, Next.js, Vue, Vite). After importing frontend projects, verify the dest root `tsconfig.base.json`: + +- **`moduleResolution`**: Must be `"bundler"` (not `"nodenext"`) +- **`module`**: Must be `"esnext"` (not `"nodenext"`) +- **`lib`**: Must include `"dom"` and `"dom.iterable"` (frontend projects need these) +- **`jsx`**: `"react-jsx"` for React-only workspaces, per-project for mixed frameworks + +For **subdirectory imports**, the dest root tsconfig is authoritative — update it. For **whole-repo imports**, imported projects may extend their own nested `tsconfig.base.json`, making this less critical. + +If the dest also has backend projects needing `nodenext`, use per-project overrides instead of changing the root. + +**Gotcha**: TypeScript does NOT merge `lib` arrays — a project-level override **replaces** the base array entirely. Always include all needed entries (e.g. `es2022`, `dom`, `dom.iterable`) in any project-level `lib`. + +### `@nx/react` Typings for Libraries + +React libraries generated with `@nx/react:library` reference `@nx/react/typings/cssmodule.d.ts` and `@nx/react/typings/image.d.ts` in their tsconfig `types`. These fail with `Cannot find type definition file` unless `@nx/react` is installed in the dest workspace. + +**Fix**: `pnpm add -wD @nx/react` + +### Jest Preset Missing (Subdirectory Import) + +Nx presets create `jest.preset.js` at the workspace root, and project jest configs reference it (e.g. `../../jest.preset.js`). Subdirectory import does NOT bring this file. + +**Fix**: + +1. Run `npx nx add @nx/jest` — registers `@nx/jest/plugin` in `nx.json` and updates `namedInputs` +2. Create `jest.preset.js` at workspace root (see `references/JEST.md` for content) — `nx add` only creates this when a generator runs, not on bare `nx add` +3. Install test runner deps: `pnpm add -wD jest jest-environment-jsdom ts-jest @types/jest` +4. Install framework-specific test deps as needed (see `references/JEST.md`) + +For deeper Jest issues (tsconfig.spec.json, Babel transforms, CI atomization, Jest vs Vitest coexistence), see `references/JEST.md`. + +### Target Name Prefixing (Whole-Repo Import) + +When importing a project with existing npm scripts (`build`, `dev`, `start`, `lint`), Nx plugins auto-prefix inferred target names to avoid conflicts: e.g. `next:build`, `vite:build`, `eslint:lint`. + +**Fix**: Remove the Nx-rewritten npm scripts from the imported `package.json`, then either: + +- Accept the prefixed names (e.g. `nx run app:next:build`) +- Rename plugin target names in `nx.json` to use unprefixed names + +## Non-Nx Source Issues + +When the source is a plain pnpm/npm workspace without `nx.json`. + +### npm Script Rewriting (Critical) + +Nx rewrites `package.json` scripts during init, creating broken commands (e.g. `vitest run` → `nx test run`). **Fix**: Remove all rewritten scripts — Nx plugins infer targets from config files. + +### `noEmit` → `composite` + `emitDeclarationOnly` (Critical) + +Plain TS projects use `"noEmit": true`, incompatible with Nx project references. + +**Symptoms**: "typecheck target is disabled because one or more project references set 'noEmit: true'" or TS6310. + +**Fix** in **all** imported tsconfigs: + +1. Remove `"noEmit": true`. If inherited via extends chain, set `"noEmit": false` explicitly. +2. Add `"composite": true`, `"emitDeclarationOnly": true`, `"declarationMap": true` +3. Add `"outDir": "dist"` and `"tsBuildInfoFile": "dist/tsconfig.tsbuildinfo"` +4. Add `"extends": "../../tsconfig.base.json"` if missing. Remove settings now inherited from base. + +### Stale node_modules and Lockfiles + +`nx import` may bring `node_modules/` (pnpm symlinks pointing to the source filesystem) and `pnpm-lock.yaml` from the source. Both are stale. + +**Fix**: `rm -rf imported/node_modules imported/pnpm-lock.yaml imported/pnpm-workspace.yaml imported/.gitignore`, then `pnpm install`. + +### ESLint Config Handling + +- **Legacy `.eslintrc.json` (ESLint 8)**: Delete all `.eslintrc.*`, remove v8 deps, create flat `eslint.config.mjs`. +- **Flat config (`eslint.config.js`)**: Self-contained configs can often be left as-is. +- **No ESLint**: Create both root and project-level configs from scratch. + +### TypeScript `paths` Aliases + +Nx uses `package.json` `"exports"` + pnpm workspace linking instead of tsconfig `"paths"`. If packages have proper `"exports"`, paths are redundant. Otherwise, update paths for the new directory structure. + +## Technology-specific Guidance + +Identify technologies in the source repo, then read and apply the matching reference file(s). + +Available references: + +- `references/ESLINT.md` — ESLint projects: duplicate `lint`/`eslint:lint` targets, legacy `.eslintrc.*` linting generated files, flat config `.cjs` self-linting, `typescript-eslint` v7/v9 peer dep conflict, mixed ESLint v8+v9 in one workspace. +- `references/GRADLE.md` +- `references/JEST.md` — Jest testing: `@nx/jest/plugin` setup, jest.preset.js, testing deps by framework, tsconfig.spec.json, Jest vs Vitest coexistence, Babel transforms, CI atomization. +- `references/NEXT.md` — Next.js projects: `@nx/next/plugin` targets, `withNx`, Next.js TS config (`noEmit`, `jsx: "preserve"`), auto-installing deps via wrong PM, non-Nx `create-next-app` imports, mixed Next.js+Vite coexistence. +- `references/TURBOREPO.md` +- `references/VITE.md` — Vite projects (React, Vue, or both): `@nx/vite/plugin` typecheck target, `resolve.alias`/`__dirname` fixes, framework deps, Vue-specific setup, mixed React+Vue coexistence. diff --git a/.opencode/skills/nx-import/references/VITE.md b/.opencode/skills/nx-import/references/VITE.md new file mode 100644 index 0000000..b777e5a --- /dev/null +++ b/.opencode/skills/nx-import/references/VITE.md @@ -0,0 +1,397 @@ +## Vite + +Vite-specific guidance for `nx import`. For generic import issues (pnpm globs, root deps, project references, name collisions, ESLint, frontend tsconfig base settings, `@nx/react` typings, Jest preset, non-Nx source handling), see `SKILL.md`. + +--- + +### `@nx/vite/plugin` Typecheck Target + +`@nx/vite/plugin` defaults `typecheckTargetName` to `"vite:typecheck"`. If the workspace expects `"typecheck"`, set it explicitly in `nx.json`. If `@nx/js/typescript` is also registered, rename one target to avoid conflicts (e.g. `"tsc-typecheck"` for the JS plugin). + +Keep both plugins only if the workspace has non-Vite pure TS libraries — `@nx/js/typescript` handles those while `@nx/vite/plugin` handles Vite projects. + +### @nx/vite Plugin Install Failure + +Plugin init loads `vite.config.ts` before deps are available. **Fix**: `pnpm add -wD vite @vitejs/plugin-react` (or `@vitejs/plugin-vue`) first, then `pnpm exec nx add @nx/vite`. + +### Vite `resolve.alias` and `__dirname` (Non-Nx Sources) + +**`__dirname` undefined** (CJS-only): Replace with `fileURLToPath(new URL('./src', import.meta.url))` from `'node:url'`. + +**`@/` path alias**: Vite's `resolve.alias` works at runtime but TS needs matching `"paths"`. Set `"baseUrl": "."` in project tsconfig. + +**PostCSS/Tailwind**: Verify `content` globs resolve correctly after import. + +### Missing TypeScript `types` (Non-Nx Sources) + +Non-Nx tsconfigs may not declare all needed types. Ensure Vite projects include `"types": ["node", "vite/client"]` in their tsconfig. + +### `noEmit` Fix: Vite-Specific Notes + +See SKILL.md for the generic noEmit→composite fix. Vite-specific additions: + +- Non-Nx Vite projects often have **both** `tsconfig.app.json` and `tsconfig.node.json` with `noEmit` — fix both +- Solution-style tsconfigs (`"files": [], "references": [...]`) may lack `extends`. Add `extends` pointing to the dest root `tsconfig.base.json` so base settings (`moduleResolution`, `lib`) apply. +- This is safe — Vite/Vitest ignore TypeScript emit settings. + +### Dependency Version Conflicts + +**Shared Vite deps (both frameworks):** `vite`, `vitest`, `jsdom`, `@types/node`, `typescript` (dev) + +**Vite 6→7**: Typecheck fails (`Plugin` type mismatch); build/serve still works. Fix: align versions. +**Vitest 3→4**: Usually works; type conflicts may surface in shared test utils. + +--- + +## React Router 7 (Vite-Based) + +React Router 7 (`@react-router/dev`) uses Vite under the hood with a `vite.config.ts` and a `react-router.config.ts`. The `@nx/vite/plugin` detects `vite.config.ts` and creates inferred targets. + +### Targets + +`@nx/vite/plugin` creates `build`, `dev`, `serve` targets. The `build` target invokes the script defined in `package.json` (usually `react-router build`), not `vite build` directly. + +**No separate typecheck target from `@nx/vite/plugin`** — React Router 7 typegen is run as part of `typecheck` (e.g. `react-router typegen && tsc`). The `typecheck` target is inferred from the tsconfig. Keep the `typecheck` script in `package.json` if present; it is not rewritten. + +### tsconfig Notes + +React Router 7 uses a single `tsconfig.json` (no `tsconfig.app.json`/`tsconfig.node.json` split). It includes: + +- `"rootDirs": [".", "./.react-router/types"]` — for generated type files; keep as-is +- `"paths": { "~/*": ["./app/*"] }` — self-referential alias; keep as-is +- `"noEmit": true` — replace with composite settings per SKILL.md + +### Build Output + +React Router 7 outputs to `build/` (not `dist/`). Add `build` to the dest root `.gitignore`. + +### Generated Types Directory + +React Router 7 generates `.react-router/` at the project root for route type generation. Add `.react-router` to the dest root `.gitignore`. + +--- + +## TanStack Start (Vite-Based) + +TanStack Start uses Vinxi under the hood, which wraps Vite. Projects have a standard `vite.config.ts` that `@nx/vite/plugin` detects normally. + +### Targets + +`@nx/vite/plugin` creates `build`, `dev`, `preview`, `serve-static`, `typecheck` targets. The `build` target runs `vite build` which invokes the TanStack Start Vinxi pipeline (produces both client and SSR bundles). + +### tsconfig Notes + +TanStack Start uses a single `tsconfig.json` with `"allowImportingTsExtensions": true` and `"noEmit": true`. Apply the standard noEmit → composite fix. `allowImportingTsExtensions` is compatible with `emitDeclarationOnly: true` — no change needed. + +### `paths` Aliases + +TanStack Start commonly uses `"#/*": ["./src/*"]` and `"@/*": ["./src/*"]`. These are self-referential — keep as-is for a single-project app. + +### Uncommitted Source Repo + +`create-tan-stack` initializes a git repo but does NOT make an initial commit. Before importing, commit first: + +```bash +git -C /path/to/source add . && git -C /path/to/source commit -m "Initial commit" +``` + +### Generated and Build Directories + +TanStack Start / Vinxi / Nitro generate several directories that must be added to the dest root `.gitignore`: + +- `.vinxi` — Vinxi build cache +- `.tanstack` — TanStack generated files +- `.nitro` — Nitro build artifacts +- `.output` — server-side build output (SSR/edge) + +These are not covered by `dist` or `build`. + +--- + +## React-Specific + +### React Dependencies + +**Production:** `react`, `react-dom` +**Dev:** `@types/react`, `@types/react-dom`, `@vitejs/plugin-react`, `@testing-library/react`, `@testing-library/jest-dom`, `jsdom` +**ESLint (Nx sources):** `eslint-plugin-import`, `eslint-plugin-jsx-a11y`, `eslint-plugin-react`, `eslint-plugin-react-hooks` +**ESLint (`create-vite`):** `eslint-plugin-react-refresh`, `eslint-plugin-react-hooks` — self-contained flat configs can be left as-is +**Nx plugins:** `@nx/react` (generators), `@nx/vite`, `@nx/vitest`, `@nx/eslint` + +### React TypeScript Configuration + +Add `"jsx": "react-jsx"` — in `tsconfig.base.json` for single-framework workspaces, per-project for mixed (see Mixed section). + +### React ESLint Config + +```js +import nx from "@nx/eslint-plugin" +import baseConfig from "../../eslint.config.mjs" +export default [ + ...baseConfig, + ...nx.configs["flat/react"], + { files: ["**/*.ts", "**/*.tsx"], rules: {} }, +] +``` + +### React Version Conflicts + +React 18 (source) + React 19 (dest): pnpm may hoist mismatched `react-dom`, causing `TypeError: Cannot read properties of undefined (reading 'S')`. **Fix**: Align versions with `pnpm.overrides`. + +### `@testing-library/jest-dom` with Vitest + +If source used Jest: change import to `@testing-library/jest-dom/vitest` in test-setup.ts, add to tsconfig `types`. + +--- + +## Vue-Specific + +### Vue Dependencies + +**Production:** `vue` (plus `vue-router`, `pinia` if used) +**Dev:** `@vitejs/plugin-vue`, `vue-tsc`, `@vue/test-utils`, `jsdom` +**ESLint:** `eslint-plugin-vue`, `vue-eslint-parser`, `@vue/eslint-config-typescript`, `@vue/eslint-config-prettier` +**Nx plugins:** `@nx/vue` (generators), `@nx/vite`, `@nx/vitest`, `@nx/eslint` (install AFTER deps — see below) + +### Vue TypeScript Configuration + +Add to `tsconfig.base.json` (single-framework) or per-project (mixed): + +```json +{ "jsx": "preserve", "jsxImportSource": "vue", "resolveJsonModule": true } +``` + +### `vue-shims.d.ts` + +Vue SFC files need a type declaration. Usually exists in each project's `src/` and imports cleanly. If missing: + +```ts +declare module "*.vue" { + import { defineComponent } from "vue" + const component: ReturnType + export default component +} +``` + +### `vue-tsc` Auto-Detection + +Both `@nx/js/typescript` and `@nx/vite/plugin` auto-detect `vue-tsc` when installed — no manual config needed. Remove source scripts like `"typecheck": "vue-tsc --noEmit"`. + +### ESLint Plugin Installation Order (Critical) + +`@nx/eslint` init **crashes** if Vue ESLint deps aren't installed first (it loads all config files). + +**Correct order:** + +1. `pnpm add -wD eslint@^9 eslint-plugin-vue vue-eslint-parser @vue/eslint-config-typescript @typescript-eslint/parser @nx/eslint-plugin typescript-eslint` +2. Create root `eslint.config.mjs` +3. Then `npx nx add @nx/eslint` + +### Vue ESLint Config Pattern + +```js +import vue from "eslint-plugin-vue" +import vueParser from "vue-eslint-parser" +import tsParser from "@typescript-eslint/parser" +import baseConfig from "../../eslint.config.mjs" +export default [ + ...baseConfig, + ...vue.configs["flat/recommended"], + { + files: ["**/*.vue"], + languageOptions: { parser: vueParser, parserOptions: { parser: tsParser } }, + }, + { + files: ["**/*.ts", "**/*.tsx", "**/*.js", "**/*.jsx", "**/*.vue"], + rules: { "vue/multi-word-component-names": "off" }, + }, +] +``` + +**Important**: `vue-eslint-parser` override must come **AFTER** base config — `flat/typescript` sets the TS parser globally without a `files` filter, breaking `.vue` parsing. + +`vue-eslint-parser` must be an explicit pnpm dependency (strict resolution prevents transitive import). + +**Known issue**: Some generated Vue ESLint configs omit `vue-eslint-parser`. Use the pattern above instead. + +--- + +## Mixed React + Vue + +When both frameworks coexist, several settings become per-project. + +### tsconfig `jsx` — Per-Project Only + +- React: `"jsx": "react-jsx"` in project tsconfig +- Vue: `"jsx": "preserve"`, `"jsxImportSource": "vue"` in project tsconfig +- Root: **NO** `jsx` setting + +### Typecheck — Auto-Detects Framework + +`@nx/vite/plugin` uses `vue-tsc` for Vue projects and `tsc` for React automatically. + +```json +{ + "plugins": [ + { "plugin": "@nx/eslint/plugin", "options": { "targetName": "lint" } }, + { + "plugin": "@nx/vite/plugin", + "options": { + "buildTargetName": "build", + "typecheckTargetName": "typecheck", + "testTargetName": "test" + } + } + ] +} +``` + +Remove `@nx/js/typescript` if all projects use Vite. Keep it (renamed to `"tsc-typecheck"`) only for non-Vite pure TS libs. + +### ESLint — Three-Tier Config + +1. **Root**: Base rules only, no framework-specific rules +2. **React projects**: Extend root + `nx.configs['flat/react']` +3. **Vue projects**: Extend root + `vue.configs['flat/recommended']` + `vue-eslint-parser` + +**Required packages**: Shared (`eslint@^9`, `@nx/eslint-plugin`, `typescript-eslint`, `@typescript-eslint/parser`), React (`eslint-plugin-import`, `eslint-plugin-jsx-a11y`, `eslint-plugin-react`, `eslint-plugin-react-hooks`), Vue (`eslint-plugin-vue`, `vue-eslint-parser`) + +`@nx/react`/`@nx/vue` are for generators only — no target conflicts. + +--- + +## Redundant npm Scripts After Import + +`nx import` copies `package.json` verbatim, so npm scripts come along. For Vite-based projects `@nx/vite/plugin` already infers the same targets from `vite.config.ts` — the npm scripts just shadow the plugin with weaker `nx:run-script` wrappers (no first-class caching inputs/outputs). Remove them after import. + +### Standalone Vite App (`create-vite`) + +Remove the following scripts — every one is redundant: + +| Script | Plugin replacement | +| ----------------------------- | ---------------------------------------------------------------------------- | +| `dev: vite` | `@nx/vite/plugin` → `dev` | +| `build: tsc -b && vite build` | `@nx/vite/plugin` → `build`; `typecheck` via `@nx/js/typescript` handles tsc | +| `preview: vite preview` | `@nx/vite/plugin` → `preview` | +| `lint: eslint .` | `@nx/eslint/plugin` → `eslint:lint` | + +### TanStack Start + +Remove `build`, `dev`, `preview`, and `test` scripts, but move any hardcoded `--port` flag to `vite.config.ts` first: + +```ts +// vite.config.ts +export default defineConfig({ + server: { port: 3000 }, // replaces `vite dev --port 3000` + ... +}) +``` + +### React Router 7 — Keep ALL scripts + +Do **not** remove React Router 7 scripts. They use the framework CLI (`react-router build`, `react-router dev`, `react-router-serve`) which is not interchangeable with plain `vite`: + +- `typecheck` runs `react-router typegen && tsc` — typegen must precede `tsc` or it fails on missing route types +- `start` serves the SSR bundle — no plugin equivalent + +--- + +## Fix Orders + +### Nx Source + +1. Generic fixes from SKILL.md (pnpm globs, root deps, executor paths, frontend tsconfig base settings, `@nx/react` typings) +2. Configure `@nx/vite/plugin` typecheck target +3. **React**: `jsx: "react-jsx"` (root or per-project) +4. **Vue**: `jsx: "preserve"` + `jsxImportSource: "vue"`; verify `vue-shims.d.ts`; install ESLint deps before `@nx/eslint` +5. **Mixed**: `jsx` per-project; remove/rename `@nx/js/typescript` +6. `nx sync --yes && nx reset && nx run-many -t typecheck,build,test,lint` + +### Non-Nx Source (additional steps) + +0. Import into `apps/` (see SKILL.md: "Application vs Library Detection") +1. Generic fixes from SKILL.md (stale files cleanup, pnpm globs, rewritten scripts, target name prefixing, noEmit→composite, ESLint handling) +2. Fix `noEmit` in **all** tsconfigs (app, node, etc. — non-Nx projects often have multiple) +3. Add `extends` to solution-style tsconfigs so root settings apply +4. Fix `resolve.alias` / `__dirname` / `baseUrl` +5. Ensure `types` include `vite/client` and `node` +6. Install `@nx/vite` manually if it failed during import +7. Remove redundant npm scripts so `@nx/vite/plugin` infers them natively (see "Redundant npm Scripts" section) +8. **Vue**: Add `outDir` + `**/*.vue.d.ts` to ESLint ignores +9. Full verification + +### Multiple-Source Imports + +See SKILL.md for generic multi-import (name collisions, dep refs). Vite-specific: fix tsconfig `references` paths for alternate directories (`../../libs/` → `../../libs-beta/`). + +### Non-Nx Source: React Router 7 + +1. Ensure source has at least one commit (see SKILL.md: "Source Repo Has No Commits") +2. `nx import` whole-repo into `apps/` (see SKILL.md: "Application vs Library Detection") → auto-installs `@nx/vite`, `@nx/react` +3. Stale file cleanup: `node_modules/`, `package-lock.json`, `.gitignore` +4. Fix `tsconfig.json`: `noEmit` → `composite + emitDeclarationOnly + outDir + tsBuildInfoFile` +5. Add `build` and `.react-router` to dest root `.gitignore` +6. **Keep all npm scripts** — React Router 7 uses framework CLI (`react-router build/dev`), not plain vite (see "Redundant npm Scripts" above) +7. `npm install && nx reset && nx sync --yes` + +### Non-Nx Source: TanStack Start + +1. Ensure source has at least one commit — `create-tan-stack` does NOT auto-commit (see SKILL.md) +2. `nx import` whole-repo into `apps/` (see SKILL.md: "Application vs Library Detection") → auto-installs `@nx/vite`, `@nx/vitest` +3. Stale file cleanup: `node_modules/`, `package-lock.json`, `.gitignore` +4. Fix `tsconfig.json`: `noEmit` → `composite + emitDeclarationOnly + outDir + tsBuildInfoFile` +5. Keep `allowImportingTsExtensions` — compatible with `emitDeclarationOnly: true` +6. Add `.vinxi`, `.tanstack`, `.nitro`, `.output` to dest root `.gitignore` +7. Move hardcoded `--port` from `dev` script into `vite.config.ts` (`server: { port: N }`) +8. Remove redundant npm scripts — `@nx/vite/plugin` infers `build`, `dev`, `preview`, `test` (see "Redundant npm Scripts" above) +9. `npm install && nx reset && nx sync --yes` + +### Quick Reference: React vs Vue + +| Aspect | React | Vue | +| ------------- | ------------------------ | ----------------------------------------- | +| Vite plugin | `@vitejs/plugin-react` | `@vitejs/plugin-vue` | +| Type checker | `tsc` | `vue-tsc` (auto-detected) | +| SFC support | N/A | `vue-shims.d.ts` needed | +| tsconfig jsx | `"react-jsx"` | `"preserve"` + `"jsxImportSource": "vue"` | +| ESLint parser | Standard TS | `vue-eslint-parser` + TS sub-parser | +| ESLint setup | Straightforward | Must install deps before `@nx/eslint` | +| Test utils | `@testing-library/react` | `@vue/test-utils` | + +### Quick Reference: Vite-Based React Frameworks + +| Aspect | Vite (standalone) | React Router 7 | TanStack Start | +| ------------------ | ----------------- | ----------------------- | ------------------------ | +| Build config | `vite.config.ts` | `vite.config.ts` | `vite.config.ts` | +| Build output | `dist/` | `build/` | `dist/` | +| SSR bundle | No | Yes (`build/server/`) | Yes (`dist/server/`) | +| tsconfig layout | app + node split | Single tsconfig | Single tsconfig | +| Auto-committed | Depends on tool | Usually yes | **No — commit first** | +| `nx import` plugin | `@nx/vite` | `@nx/vite`, `@nx/react` | `@nx/vite`, `@nx/vitest` | + +--- + +## Iteration Log + +### Scenario 6: Multiple non-Nx React apps (CRA, Next.js, React Router 7, TanStack Start, Vite) → TS preset (PASS) + +- Sources: 5 standalone non-Nx repos with different build tools +- Dest: CNW ts preset (Nx 22.5.1), npm workspaces, `packages/*` +- Import: whole-repo for each, sequential into `packages/` +- Pre-import fixes: + 1. Removed `packages/.gitkeep` and committed + 2. `git init && git add . && git commit` in Vite app (no git at all) + 3. `git add . && git commit` in TanStack app (git init'd but no commits) +- Import: `npm exec nx -- import packages/ --source=. --ref=main --no-interactive` + - Next.js import auto-installed `@nx/eslint`, `@nx/next` + - React Router 7 import auto-installed `@nx/vite`, `@nx/react`, `@nx/docker` (Dockerfile present) + - TanStack import auto-installed `@nx/vitest` +- Post-import fixes: + 1. Removed stale `node_modules/`, `package-lock.json`, `.gitignore` from each package + 2. Removed Nx-rewritten scripts from `board-games-nextjs/package.json` (had `"build": "nx next:build"`, etc.) + 3. Updated root `tsconfig.base.json`: `nodenext` → `bundler`, added `dom`/`dom.iterable` to lib, added `jsx: react-jsx` + 4. Added `build` to dest root `.gitignore` (CRA and React Router 7 output there) + 5. Fixed `noEmit` → `composite + emitDeclarationOnly` in: `board-games-vite/tsconfig.app.json`, `board-games-vite/tsconfig.node.json`, `board-games-react-router/tsconfig.json`, `board-games-tanstack/tsconfig.json` + 6. Fixed `tsBuildInfoFile` paths from `./node_modules/.tmp/...` to `./dist/...` + 7. Installed root `@types/react`, `@types/react-dom`, `@types/node` +- All targets green: `build` for all 5 projects; `typecheck` for Vite/React Router/TanStack; `next:build` for Next.js diff --git a/.opencode/skills/nx-plugins/SKILL.md b/.opencode/skills/nx-plugins/SKILL.md new file mode 100644 index 0000000..89223c7 --- /dev/null +++ b/.opencode/skills/nx-plugins/SKILL.md @@ -0,0 +1,9 @@ +--- +name: nx-plugins +description: Find and add Nx plugins. USE WHEN user wants to discover available plugins, install a new plugin, or add support for a specific framework or technology to the workspace. +--- + +## Finding and Installing new plugins + +- List plugins: `pnpm nx list` +- Install plugins `pnpm nx add `. Example: `pnpm nx add @nx/react`. diff --git a/.opencode/skills/nx-run-tasks/SKILL.md b/.opencode/skills/nx-run-tasks/SKILL.md new file mode 100644 index 0000000..7f1263a --- /dev/null +++ b/.opencode/skills/nx-run-tasks/SKILL.md @@ -0,0 +1,58 @@ +--- +name: nx-run-tasks +description: Helps with running tasks in an Nx workspace. USE WHEN the user wants to execute build, test, lint, serve, or run any other tasks defined in the workspace. +--- + +You can run tasks with Nx in the following way. + +Keep in mind that you might have to prefix things with npx/pnpx/yarn if the user doesn't have nx installed globally. Look at the package.json or lockfile to determine which package manager is in use. + +For more details on any command, run it with `--help` (e.g. `nx run-many --help`, `nx affected --help`). + +## Understand which tasks can be run + +You can check those via `nx show project --json`, for example `nx show project myapp --json`. It contains a `targets` section which has information about targets that can be run. You can also just look at the `package.json` scripts or `project.json` targets, but you might miss out on inferred tasks by Nx plugins. + +## Run a single task + +``` +nx run : +``` + +where `project` is the project name defined in `package.json` or `project.json` (if present). + +## Run multiple tasks + +``` +nx run-many -t build test lint typecheck +``` + +You can pass a `-p` flag to filter to specific projects, otherwise it runs on all projects. You can also use `--exclude` to exclude projects, and `--parallel` to control the number of parallel processes (default is 3). + +Examples: + +- `nx run-many -t test -p proj1 proj2` — test specific projects +- `nx run-many -t test --projects=*-app --exclude=excluded-app` — test projects matching a pattern +- `nx run-many -t test --projects=tag:api-*` — test projects by tag + +## Run tasks for affected projects + +Use `nx affected` to only run tasks on projects that have been changed and projects that depend on changed projects. This is especially useful in CI and for large workspaces. + +``` +nx affected -t build test lint +``` + +By default it compares against the base branch. You can customize this: + +- `nx affected -t test --base=main --head=HEAD` — compare against a specific base and head +- `nx affected -t test --files=libs/mylib/src/index.ts` — specify changed files directly + +## Useful flags + +These flags work with `run`, `run-many`, and `affected`: + +- `--skipNxCache` — rerun tasks even when results are cached +- `--verbose` — print additional information such as stack traces +- `--nxBail` — stop execution after the first failed task +- `--configuration=` — use a specific configuration (e.g. `production`) diff --git a/.opencode/skills/nx-workspace/SKILL.md b/.opencode/skills/nx-workspace/SKILL.md new file mode 100644 index 0000000..4b5110a --- /dev/null +++ b/.opencode/skills/nx-workspace/SKILL.md @@ -0,0 +1,286 @@ +--- +name: nx-workspace +description: "Explore and understand Nx workspaces. USE WHEN answering questions about the workspace, projects, or tasks. ALSO USE WHEN an nx command fails or you need to check available targets/configuration before running a task. EXAMPLES: 'What projects are in this workspace?', 'How is project X configured?', 'What depends on library Y?', 'What targets can I run?', 'Cannot find configuration for task', 'debug nx task failure'." +--- + +# Nx Workspace Exploration + +This skill provides read-only exploration of Nx workspaces. Use it to understand workspace structure, project configuration, available targets, and dependencies. + +Keep in mind that you might have to prefix commands with `npx`/`pnpx`/`yarn` if nx isn't installed globally. Check the lockfile to determine the package manager in use. + +## Listing Projects + +Use `nx show projects` to list projects in the workspace. + +The project filtering syntax (`-p`/`--projects`) works across many Nx commands including `nx run-many`, `nx release`, `nx show projects`, and more. Filters support explicit names, glob patterns, tag references (e.g. `tag:name`), directories, and negation (e.g. `!project-name`). + +```bash +# List all projects +nx show projects + +# Filter by pattern (glob) +nx show projects --projects "apps/*" +nx show projects --projects "shared-*" + +# Filter by tag +nx show projects --projects "tag:publishable" +nx show projects -p 'tag:publishable,!tag:internal' + +# Filter by target (projects that have a specific target) +nx show projects --withTarget build + +# Combine filters +nx show projects --type lib --withTarget test +nx show projects --affected --exclude="*-e2e" +nx show projects -p "tag:scope:client,packages/*" + +# Negate patterns +nx show projects -p '!tag:private' +nx show projects -p '!*-e2e' + +# Output as JSON +nx show projects --json +``` + +## Project Configuration + +Use `nx show project --json` to get the full resolved configuration for a project. + +**Important**: Do NOT read `project.json` directly - it only contains partial configuration. The `nx show project --json` command returns the full resolved config including inferred targets from plugins. + +You can read the full project schema at `node_modules/nx/schemas/project-schema.json` to understand nx project configuration options. + +```bash +# Get full project configuration +nx show project my-app --json + +# Extract specific parts from the JSON +nx show project my-app --json | jq '.targets' +nx show project my-app --json | jq '.targets.build' +nx show project my-app --json | jq '.targets | keys' + +# Check project metadata +nx show project my-app --json | jq '{name, root, sourceRoot, projectType, tags}' +``` + +## Target Information + +Targets define what tasks can be run on a project. + +```bash +# List all targets for a project +nx show project my-app --json | jq '.targets | keys' + +# Get full target configuration +nx show project my-app --json | jq '.targets.build' + +# Check target executor/command +nx show project my-app --json | jq '.targets.build.executor' +nx show project my-app --json | jq '.targets.build.command' + +# View target options +nx show project my-app --json | jq '.targets.build.options' + +# Check target inputs/outputs (for caching) +nx show project my-app --json | jq '.targets.build.inputs' +nx show project my-app --json | jq '.targets.build.outputs' + +# Find projects with a specific target +nx show projects --withTarget serve +nx show projects --withTarget e2e +``` + +## Workspace Configuration + +Read `nx.json` directly for workspace-level configuration. +You can read the full project schema at `node_modules/nx/schemas/nx-schema.json` to understand nx project configuration options. + +```bash +# Read the full nx.json +cat nx.json + +# Or use jq for specific sections +cat nx.json | jq '.targetDefaults' +cat nx.json | jq '.namedInputs' +cat nx.json | jq '.plugins' +cat nx.json | jq '.generators' +``` + +Key nx.json sections: + +- `targetDefaults` - Default configuration applied to all targets of a given name +- `namedInputs` - Reusable input definitions for caching +- `plugins` - Nx plugins and their configuration +- ...and much more, read the schema or nx.json for details + +## Affected Projects + +If the user is asking about affected projects, read the [affected projects reference](references/AFFECTED.md) for detailed commands and examples. + +## Common Exploration Patterns + +### "What's in this workspace?" + +```bash +nx show projects +nx show projects --type app +nx show projects --type lib +``` + +### "How do I build/test/lint project X?" + +```bash +nx show project X --json | jq '.targets | keys' +nx show project X --json | jq '.targets.build' +``` + +### "What depends on library Y?" + +```bash +# Use the project graph to find dependents +nx graph --print | jq '.graph.dependencies | to_entries[] | select(.value[].target == "Y") | .key' +``` + +## Programmatic Answers + +When processing nx CLI results, use command-line tools to compute the answer programmatically rather than counting or parsing output manually. Always use `--json` flags to get structured output that can be processed with `jq`, `grep`, or other tools you have installed locally. + +### Listing Projects + +```bash +nx show projects --json +``` + +Example output: + +```json +["my-app", "my-app-e2e", "shared-ui", "shared-utils", "api"] +``` + +Common operations: + +```bash +# Count projects +nx show projects --json | jq 'length' + +# Filter by pattern +nx show projects --json | jq '.[] | select(startswith("shared-"))' + +# Get affected projects as array +nx show projects --affected --json | jq '.' +``` + +### Project Details + +```bash +nx show project my-app --json +``` + +Example output: + +```json +{ + "root": "apps/my-app", + "name": "my-app", + "sourceRoot": "apps/my-app/src", + "projectType": "application", + "tags": ["type:app", "scope:client"], + "targets": { + "build": { + "executor": "@nx/vite:build", + "options": { "outputPath": "dist/apps/my-app" } + }, + "serve": { + "executor": "@nx/vite:dev-server", + "options": { "buildTarget": "my-app:build" } + }, + "test": { + "executor": "@nx/vite:test", + "options": {} + } + }, + "implicitDependencies": [] +} +``` + +Common operations: + +```bash +# Get target names +nx show project my-app --json | jq '.targets | keys' + +# Get specific target config +nx show project my-app --json | jq '.targets.build' + +# Get tags +nx show project my-app --json | jq '.tags' + +# Get project root +nx show project my-app --json | jq -r '.root' +``` + +### Project Graph + +```bash +nx graph --print +``` + +Example output: + +```json +{ + "graph": { + "nodes": { + "my-app": { + "name": "my-app", + "type": "app", + "data": { "root": "apps/my-app", "tags": ["type:app"] } + }, + "shared-ui": { + "name": "shared-ui", + "type": "lib", + "data": { "root": "libs/shared-ui", "tags": ["type:ui"] } + } + }, + "dependencies": { + "my-app": [ + { "source": "my-app", "target": "shared-ui", "type": "static" } + ], + "shared-ui": [] + } + } +} +``` + +Common operations: + +```bash +# Get all project names from graph +nx graph --print | jq '.graph.nodes | keys' + +# Find dependencies of a project +nx graph --print | jq '.graph.dependencies["my-app"]' + +# Find projects that depend on a library +nx graph --print | jq '.graph.dependencies | to_entries[] | select(.value[].target == "shared-ui") | .key' +``` + +## Troubleshooting + +### "Cannot find configuration for task X:target" + +```bash +# Check what targets exist on the project +nx show project X --json | jq '.targets | keys' + +# Check if any projects have that target +nx show projects --withTarget target +``` + +### "The workspace is out of sync" + +```bash +nx sync +nx reset # if sync doesn't fix stale cache +``` diff --git a/.opencode/skills/nx-workspace/references/AFFECTED.md b/.opencode/skills/nx-workspace/references/AFFECTED.md new file mode 100644 index 0000000..e30f18f --- /dev/null +++ b/.opencode/skills/nx-workspace/references/AFFECTED.md @@ -0,0 +1,27 @@ +## Affected Projects + +Find projects affected by changes in the current branch. + +```bash +# Affected since base branch (auto-detected) +nx show projects --affected + +# Affected with explicit base +nx show projects --affected --base=main +nx show projects --affected --base=origin/main + +# Affected between two commits +nx show projects --affected --base=abc123 --head=def456 + +# Affected apps only +nx show projects --affected --type app + +# Affected excluding e2e projects +nx show projects --affected --exclude="*-e2e" + +# Affected by uncommitted changes +nx show projects --affected --uncommitted + +# Affected by untracked files +nx show projects --affected --untracked +``` diff --git a/AGENTS.md b/AGENTS.md index 269c2c8..5fcd2ca 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -3,21 +3,22 @@ # General Guidelines for working with Nx +- For navigating/exploring the workspace, invoke the `nx-workspace` skill first - it has patterns for querying projects, targets, and dependencies - When running tasks (for example build, lint, test, e2e, etc.), always prefer running the task through `nx` (i.e. `nx run`, `nx run-many`, `nx affected`) instead of using the underlying tooling directly +- Prefix nx commands with the workspace's package manager (e.g., `pnpm nx build`, `npm exec nx test`) - avoids using globally installed CLI - You have access to the Nx MCP server and its tools, use them to help the user -- When answering questions about the repository, use the `nx_workspace` tool first to gain an understanding of the workspace architecture where applicable. -- When working in individual projects, use the `nx_project_details` mcp tool to analyze and understand the specific project structure and dependencies -- For questions around nx configuration, best practices or if you're unsure, use the `nx_docs` tool to get relevant, up-to-date docs. Always use this instead of assuming things about nx configuration -- If the user needs help with an Nx configuration or project graph error, use the `nx_workspace` tool to get any errors +- For Nx plugin best practices, check `node_modules/@nx//PLUGIN.md`. Not all plugins have this file - proceed without it if unavailable. +- NEVER guess CLI flags - always check nx_docs or `--help` first when unsure -# CI Error Guidelines +## Scaffolding & Generators -If the user wants help with fixing an error in their CI pipeline, use the following flow: +- For scaffolding tasks (creating apps, libs, project structure, setup), ALWAYS invoke the `nx-generate` skill FIRST before exploring or calling MCP tools -- Retrieve the list of current CI Pipeline Executions (CIPEs) using the `nx_cloud_cipe_details` tool -- If there are any errors, use the `nx_cloud_fix_cipe_failure` tool to retrieve the logs for a specific task -- Use the task logs to see what's wrong and help the user fix their problem. Use the appropriate tools if necessary -- Make sure that the problem is fixed by running the task that you passed into the `nx_cloud_fix_cipe_failure` tool +## When to use nx_docs + +- USE for: advanced config options, unfamiliar flags, migration guides, plugin configuration, edge cases +- DON'T USE for: basic generator syntax (`nx g @nx/react:app`), standard commands, things you already know +- The `nx-generate` skill handles generator discovery internally - don't call nx_docs just to look up generator syntax diff --git a/docs/tasks/hatchet-tasks.md b/docs/tasks/hatchet-tasks.md index 45dfbcb..03a1ff3 100644 --- a/docs/tasks/hatchet-tasks.md +++ b/docs/tasks/hatchet-tasks.md @@ -138,7 +138,7 @@ Implement the bridge that converts Effect → Promise for Hatchet task execution - **Files**: `packages/hatchet/src/effectifier/index.ts` - **Depends on**: TASK-018, TASK-019 -- [ ] [TASK-021] Create `packages/hatchet/tests/unit/effectifier.test.ts` — Effectifier tests +- [x] [TASK-021] Create `packages/hatchet/tests/unit/effectifier.test.ts` — Effectifier tests - **Files**: `packages/hatchet/tests/unit/effectifier.test.ts` - **Depends on**: TASK-020 - **Spec ref**: REQ-EFFECT-01, REQ-EFFECT-02, REQ-EFFECT-03, REQ-EFFECT-04, REQ-EFFECT-05, REQ-EFFECT-06, REQ-FACTORY-01, REQ-FACTORY-02, REQ-FACTORY-03, REQ-FACTORY-04 @@ -296,7 +296,7 @@ Create integration tests against real Hatchet engine. Finalize package for release. -- [ ] [TASK-043] Create `packages/hatchet/README.md` — Package documentation +- [x] [TASK-043] Create `packages/hatchet/README.md` — Package documentation - **Files**: `packages/hatchet/README.md` - **Depends on**: TASK-036, TASK-038 diff --git a/opencode.json b/opencode.json new file mode 100644 index 0000000..c57851b --- /dev/null +++ b/opencode.json @@ -0,0 +1,13 @@ +{ + "mcp": { + "nx-mcp": { + "type": "local", + "command": [ + "npx", + "nx", + "mcp" + ], + "enabled": true + } + } +} diff --git a/packages/hatchet/README.md b/packages/hatchet/README.md index ca2d0cc..912d5d5 100644 --- a/packages/hatchet/README.md +++ b/packages/hatchet/README.md @@ -223,6 +223,19 @@ yield * Effect.fail(new Error("Failed")) // Avoid this - Hatchet v1.19.0+ - TypeScript 5.0+ +## Architecture + +For detailed architecture decisions and implementation details, see: + +- [Specs](./docs/specs/hatchet-spec.md) - Detailed requirement specifications +- [Design](./docs/design/hatchet-design.md) - Architecture and design decisions + +The `effectifier` module (internal) handles the conversion from Effect to Hatchet's Promise-based task functions, providing: + +- `effectifyTask` - Core function that wraps an Effect with a ManagedRuntime +- Context injection - Automatic injection of HatchetStepContext into Effects +- Error propagation - Proper error mapping for Hatchet's retry mechanism + ## License MIT diff --git a/packages/hatchet/tests/unit/effectifier.test.ts b/packages/hatchet/tests/unit/effectifier.test.ts new file mode 100644 index 0000000..0da7ae1 --- /dev/null +++ b/packages/hatchet/tests/unit/effectifier.test.ts @@ -0,0 +1,413 @@ +/** + * @effectify/hatchet - Effectifier Tests + * + * Unit tests for the effectifier module that converts Effect → Promise for Hatchet + */ + +import { describe, expect, it, vi } from "vitest" +import * as Effect from "effect/Effect" +import * as Layer from "effect/Layer" +import * as ManagedRuntime from "effect/ManagedRuntime" +import * as ServiceMap from "effect/ServiceMap" +import type { Context as HatchetContext } from "@hatchet-dev/typescript-sdk" +import { HatchetStepContext } from "@effectify/hatchet" +import { createMockContext } from "@effectify/hatchet" +import { + createEffectifierFromLayer, + createEffectifierFromServiceMap, + effectifyTask, +} from "../../src/effectifier/execute.js" + +// Sample service for testing using ServiceMap pattern +class TestService extends ServiceMap.Service()( + "TestService", +) {} + +// Layer that provides TestService +const TestServiceLayer = Layer.effect( + TestService, + Effect.succeed("test-service-value"), +) + +// Effect that uses TestService +const effectWithService = Effect.gen(function*() { + const service = yield* TestService + return `service: ${service}` +}) + +// Effect that uses both TestService and HatchetStepContext +const effectWithServiceAndContext = Effect.gen(function*() { + const service = yield* TestService + const ctx = yield* HatchetStepContext + return `service: ${service}, task: ${ctx.taskName()}` +}) + +describe("effectifyTask", () => { + it("should execute Effect successfully and return value", async () => { + // Create a simple effect that succeeds + const simpleEffect = Effect.succeed("hello-world") + + // Create runtime with empty layer + const runtime = ManagedRuntime.make(Layer.empty) + + // Create the effectified task function + const taskFn = effectifyTask(simpleEffect, runtime) + + // Execute with mock context + const mockCtx = createMockContext() + const result = await taskFn({}, mockCtx) + + expect(result).toBe("hello-world") + + await runtime.dispose() + }) + + it("should pass HatchetContext as HatchetStepContext service", async () => { + const effect = Effect.gen(function*() { + const ctx = yield* HatchetStepContext + return ctx.taskName() + }) + + const runtime = ManagedRuntime.make(Layer.empty) + const taskFn = effectifyTask(effect, runtime) + + const mockCtx = createMockContext({ taskName: "my-custom-task" }) + const result = await taskFn({}, mockCtx) + + expect(result).toBe("my-custom-task") + + await runtime.dispose() + }) + + it("should inject service dependencies from runtime", async () => { + const runtime = ManagedRuntime.make(TestServiceLayer) + const taskFn = effectifyTask(effectWithService, runtime) + + const mockCtx = createMockContext() + const result = await taskFn({}, mockCtx) + + expect(result).toBe("service: test-service-value") + + await runtime.dispose() + }) + + it("should work with both service and context", async () => { + const runtime = ManagedRuntime.make(TestServiceLayer) + const taskFn = effectifyTask(effectWithServiceAndContext, runtime) + + const mockCtx = createMockContext({ taskName: "context-task" }) + const result = await taskFn({}, mockCtx) + + expect(result).toBe("service: test-service-value, task: context-task") + + await runtime.dispose() + }) + + it("should throw error when Effect fails", async () => { + const failingEffect = Effect.fail(new Error("task failed")) + + const runtime = ManagedRuntime.make(Layer.empty) + const taskFn = effectifyTask(failingEffect, runtime) + + const mockCtx = createMockContext() + + await expect(taskFn({}, mockCtx)).rejects.toThrow("task failed") + + await runtime.dispose() + }) + + it("should throw non-Error failures as Error with string", async () => { + const failingEffect = Effect.fail("string error") + + const runtime = ManagedRuntime.make(Layer.empty) + const taskFn = effectifyTask(failingEffect, runtime) + + const mockCtx = createMockContext() + + // Should throw an Error with the string value + await expect(taskFn({}, mockCtx)).rejects.toThrow("string error") + + await runtime.dispose() + }) + + it("should receive input from Hatchet", async () => { + const effect = Effect.gen(function*() { + const ctx = yield* HatchetStepContext + return ctx.input as { value: string } + }) + + const runtime = ManagedRuntime.make(Layer.empty) + const taskFn = effectifyTask(effect, runtime) + + const mockCtx = createMockContext({ input: { value: "test-input" } }) + const result = await taskFn({ value: "test-input" }, mockCtx) + + expect(result).toEqual({ value: "test-input" }) + + await runtime.dispose() + }) + + it("should provide workflow name to effect", async () => { + const effect = Effect.gen(function*() { + const ctx = yield* HatchetStepContext + return ctx.workflowName() + }) + + const runtime = ManagedRuntime.make(Layer.empty) + const taskFn = effectifyTask(effect, runtime) + + const mockCtx = createMockContext({ workflowName: "my-workflow" }) + const result = await taskFn({}, mockCtx) + + expect(result).toBe("my-workflow") + + await runtime.dispose() + }) + + it("should provide workflow run ID to effect", async () => { + const effect = Effect.gen(function*() { + const ctx = yield* HatchetStepContext + return ctx.workflowRunId() + }) + + const runtime = ManagedRuntime.make(Layer.empty) + const taskFn = effectifyTask(effect, runtime) + + const mockCtx = createMockContext({ workflowRunId: "run-123" }) + const result = await taskFn({}, mockCtx) + + expect(result).toBe("run-123") + + await runtime.dispose() + }) + + it("should handle retry count from context", async () => { + const effect = Effect.gen(function*() { + const ctx = yield* HatchetStepContext + return ctx.retryCount() + }) + + const runtime = ManagedRuntime.make(Layer.empty) + const taskFn = effectifyTask(effect, runtime) + + const mockCtx = createMockContext({ retryCount: 3 }) + const result = await taskFn({}, mockCtx) + + expect(result).toBe(3) + + await runtime.dispose() + }) + + it("should propagate typed errors from Effect", async () => { + class CustomError extends Error { + readonly _tag = "CustomError" + constructor(message: string) { + super(message) + this.name = "CustomError" + } + } + + const failingEffect = Effect.fail(new CustomError("custom error")) + + const runtime = ManagedRuntime.make(Layer.empty) + const taskFn = effectifyTask(failingEffect, runtime) + + const mockCtx = createMockContext() + + // Should throw the original error + await expect(taskFn({}, mockCtx)).rejects.toThrow("custom error") + + await runtime.dispose() + }) +}) + +describe("createEffectifierFromLayer", () => { + it("should create effectifier from Layer", async () => { + const effectify = createEffectifierFromLayer(TestServiceLayer) + + const taskFn = effectify(effectWithService) + + const mockCtx = createMockContext() + const result = await taskFn({}, mockCtx) + + expect(result).toBe("service: test-service-value") + }) + + it("should return a function that can be called multiple times", async () => { + const effectify = createEffectifierFromLayer(TestServiceLayer) + + const taskFn1 = effectify(Effect.succeed("result-1")) + const taskFn2 = effectify(Effect.succeed("result-2")) + + const mockCtx = createMockContext() + + const result1 = await taskFn1({}, mockCtx) + const result2 = await taskFn2({}, mockCtx) + + expect(result1).toBe("result-1") + expect(result2).toBe("result-2") + }) + + it("should preserve context in created function", async () => { + const effect = Effect.gen(function*() { + const ctx = yield* HatchetStepContext + return ctx.taskName() + }) + + const effectify = createEffectifierFromLayer(Layer.empty) + const taskFn = effectify(effect) + + const mockCtx = createMockContext({ taskName: "layer-task" }) + const result = await taskFn({}, mockCtx) + + expect(result).toBe("layer-task") + }) +}) + +describe("createEffectifierFromServiceMap", () => { + it("should create effectifier from ServiceMap (Layer)", async () => { + const services = Layer.succeed(TestService, "from-service-map") + + const effect = Effect.gen(function*() { + const service = yield* TestService + return service + }) + + const effectify = createEffectifierFromServiceMap(services) + const taskFn = effectify(effect) + + const mockCtx = createMockContext() + const result = await taskFn({}, mockCtx) + + expect(result).toBe("from-service-map") + }) + + it("should behave the same as createEffectifierFromLayer", async () => { + const effect = Effect.succeed("test") + + const effectify1 = createEffectifierFromLayer(Layer.empty) + const effectify2 = createEffectifierFromServiceMap(Layer.empty) + + const mockCtx = createMockContext() + + const result1 = await effectify1(effect)({}, mockCtx) + const result2 = await effectify2(effect)({}, mockCtx) + + expect(result1).toBe(result2) + }) +}) + +describe("Effect with complex error handling", () => { + it("should handle Effect that catches and transforms errors", async () => { + const effect = Effect.gen(function*() { + const result = yield* Effect.tryPromise({ + try: async () => { + throw new Error("async error") + }, + catch: (error) => new Error(`caught: ${error}`), + }) + return result + }) + + const runtime = ManagedRuntime.make(Layer.empty) + const taskFn = effectifyTask(effect, runtime) + + const mockCtx = createMockContext() + + await expect(taskFn({}, mockCtx)).rejects.toThrow( + "caught: Error: async error", + ) + + await runtime.dispose() + }) + + it("should handle Effect with flatMap/pipe", async () => { + const effect = Effect.succeed(5).pipe( + Effect.flatMap((n) => Effect.succeed(n * 2)), + Effect.map((n) => `result: ${n}`), + ) + + const runtime = ManagedRuntime.make(Layer.empty) + const taskFn = effectifyTask(effect, runtime) + + const mockCtx = createMockContext() + const result = await taskFn({}, mockCtx) + + expect(result).toBe("result: 10") + + await runtime.dispose() + }) + + it("should handle async Effect with delay", async () => { + const effect = Effect.gen(function*() { + yield* Effect.sleep(10) + return "delayed-result" + }) + + const runtime = ManagedRuntime.make(Layer.empty) + const taskFn = effectifyTask(effect, runtime) + + const mockCtx = createMockContext() + const result = await taskFn({}, mockCtx) + + expect(result).toBe("delayed-result") + + await runtime.dispose() + }) +}) + +describe("Context access patterns", () => { + it("should allow accessing all context methods", async () => { + const effect = Effect.gen(function*() { + const ctx = yield* HatchetStepContext + return { + taskName: ctx.taskName(), + workflowName: ctx.workflowName(), + workflowRunId: ctx.workflowRunId(), + retryCount: ctx.retryCount(), + input: ctx.input, + } + }) + + const runtime = ManagedRuntime.make(Layer.empty) + const taskFn = effectifyTask(effect, runtime) + + const mockCtx = createMockContext({ + taskName: "test-task", + workflowName: "test-workflow", + workflowRunId: "run-abc", + retryCount: 2, + input: { key: "value" }, + }) + + const result = await taskFn({}, mockCtx) + + expect(result).toEqual({ + taskName: "test-task", + workflowName: "test-workflow", + workflowRunId: "run-abc", + retryCount: 2, + input: { key: "value" }, + }) + + await runtime.dispose() + }) + + it("should work with complex Effect patterns for context access", async () => { + // Using Effect.gen with yield* to access context - standard pattern + const effect = Effect.gen(function*() { + const ctx = yield* HatchetStepContext + return ctx.taskName() + }) + + const runtime = ManagedRuntime.make(Layer.empty) + const taskFn = effectifyTask(effect, runtime) + + const mockCtx = createMockContext({ taskName: "request-task" }) + const result = await taskFn({}, mockCtx) + + expect(result).toBe("request-task") + + await runtime.dispose() + }) +}) From 2bf4befcaa1a150883a15ad2cf298e44642f1854 Mon Sep 17 00:00:00 2001 From: Andres David Jimenez Sulbaran Date: Thu, 9 Apr 2026 23:35:16 -0600 Subject: [PATCH 04/24] feat(hatchet): add events client demo slice --- .../app/lib/runtime.server.ts | 14 +- apps/react-router-example/app/routes.tsx | 1 + .../app/routes/hatchet-demo.server.test.ts | 162 +++++++++++ .../app/routes/hatchet-demo.server.ts | 16 ++ .../app/routes/hatchet-demo.tsx | 266 ++++++++++++++++++ apps/react-router-example/project.json | 7 + apps/react-router-example/tsconfig.json | 3 + apps/react-router-example/vitest.config.ts | 12 + packages/hatchet/src/clients/events.ts | 168 +++++++++++ packages/hatchet/src/clients/index.ts | 10 + packages/hatchet/src/core/error.ts | 57 ++++ packages/hatchet/src/core/index.ts | 3 + packages/hatchet/src/index.ts | 3 + packages/hatchet/src/testing/index.ts | 8 + packages/hatchet/src/testing/mock-client.ts | 105 +++++++ .../hatchet/tests/unit/clients/events.test.ts | 207 ++++++++++++++ packages/hatchet/tests/unit/errors.test.ts | 30 ++ 17 files changed, 1071 insertions(+), 1 deletion(-) create mode 100644 apps/react-router-example/app/routes/hatchet-demo.server.test.ts create mode 100644 apps/react-router-example/app/routes/hatchet-demo.server.ts create mode 100644 apps/react-router-example/app/routes/hatchet-demo.tsx create mode 100644 apps/react-router-example/vitest.config.ts create mode 100644 packages/hatchet/src/clients/events.ts create mode 100644 packages/hatchet/src/clients/index.ts create mode 100644 packages/hatchet/src/testing/mock-client.ts create mode 100644 packages/hatchet/tests/unit/clients/events.test.ts diff --git a/apps/react-router-example/app/lib/runtime.server.ts b/apps/react-router-example/app/lib/runtime.server.ts index 63f0455..151281d 100644 --- a/apps/react-router-example/app/lib/runtime.server.ts +++ b/apps/react-router-example/app/lib/runtime.server.ts @@ -2,6 +2,7 @@ import "dotenv/config" import { Runtime } from "@effectify/react-router" import { AuthService } from "@effectify/node-better-auth" import * as Layer from "effect/Layer" +import { HatchetClientLive, HatchetConfig } from "@effectify/hatchet" import { authOptions } from "./better-auth-options.server.js" import { Prisma } from "./../../prisma/generated/effect/index.js" import { PrismaBetterSqlite3 } from "@prisma/adapter-better-sqlite3" @@ -12,7 +13,13 @@ const adapter = new PrismaBetterSqlite3({ url: process.env.DATABASE_URL ?? "file:./dev.db", }) -const AppLayer = Layer.mergeAll( +const HatchetConfigLayer = Layer.succeed(HatchetConfig, { + host: process.env.HATCHET_HOST ?? "localhost:7077", + token: process.env.HATCHET_TOKEN ?? "", + namespace: process.env.HATCHET_NAMESPACE, +}) + +const BaseAppLayer = Layer.mergeAll( Authlayer, Prisma.layer({ // Prisma Client options @@ -21,4 +28,9 @@ const AppLayer = Layer.mergeAll( }), ) +const AppLayer = Layer.mergeAll( + Layer.merge(BaseAppLayer, HatchetConfigLayer), + HatchetClientLive, +) as unknown as Layer.Layer + export const { withLoaderEffect, withActionEffect } = Runtime.make(AppLayer) diff --git a/apps/react-router-example/app/routes.tsx b/apps/react-router-example/app/routes.tsx index 96c1bea..0564ade 100644 --- a/apps/react-router-example/app/routes.tsx +++ b/apps/react-router-example/app/routes.tsx @@ -7,4 +7,5 @@ export default [ route("login", "./routes/login.tsx"), route("signup", "./routes/signup.tsx"), route("todo-app", "./routes/todo-app.tsx"), + route("hatchet-demo", "./routes/hatchet-demo.tsx"), ] satisfies RouteConfig diff --git a/apps/react-router-example/app/routes/hatchet-demo.server.test.ts b/apps/react-router-example/app/routes/hatchet-demo.server.test.ts new file mode 100644 index 0000000..55100cf --- /dev/null +++ b/apps/react-router-example/app/routes/hatchet-demo.server.test.ts @@ -0,0 +1,162 @@ +import * as Effect from "effect/Effect" +import * as Layer from "effect/Layer" +import { beforeEach, describe, expect, it, vi } from "vitest" + +const pushEventMock = vi.fn() +const getEventMock = vi.fn() +const listRunsMock = vi.fn() +const cancelRunMock = vi.fn() +const runWorkflowMock = vi.fn() + +vi.mock("@effectify/hatchet", () => ({ + pushEvent: (...args: Array) => pushEventMock(...args), + getEvent: (...args: Array) => getEventMock(...args), + listRuns: (...args: Array) => listRunsMock(...args), + cancelRun: (...args: Array) => cancelRunMock(...args), + runWorkflow: (...args: Array) => runWorkflowMock(...args), +})) + +vi.mock("../lib/runtime.server.js", async () => { + const { Runtime } = await import("@effectify/react-router") + return Runtime.make(Layer.empty) +}) + +import { action, loader } from "./hatchet-demo.js" +import { buildEventRedirect, parseEventPayload, readSelectedEventId } from "./hatchet-demo.server.js" + +const createRouteArgs = (request: Request) => ({ + request, + params: {}, + context: {}, + unstable_pattern: "routes/hatchet-demo", +}) + +describe("hatchet demo event helpers", () => { + beforeEach(() => { + pushEventMock.mockReset() + getEventMock.mockReset() + listRunsMock.mockReset() + cancelRunMock.mockReset() + runWorkflowMock.mockReset() + }) + + it("parseEventPayload returns JSON objects for push-event actions", () => { + expect(parseEventPayload('{"userId":"user-123","source":"demo"}')).toEqual({ + userId: "user-123", + source: "demo", + }) + }) + + it("parseEventPayload rejects non-object JSON payloads", () => { + expect(() => parseEventPayload('["not","an","object"]')).toThrowError( + "Event payload must be a JSON object", + ) + }) + + it("readSelectedEventId returns the requested event id from the loader URL", () => { + expect( + readSelectedEventId("https://example.com/hatchet-demo?eventId=event-123"), + ).toBe("event-123") + }) + + it("readSelectedEventId ignores empty event ids and buildEventRedirect encodes valid ids", () => { + expect( + readSelectedEventId("https://example.com/hatchet-demo?eventId="), + ).toBeUndefined() + expect(buildEventRedirect("event id/123")).toBe( + "/hatchet-demo?eventId=event%20id%2F123", + ) + }) + + it("action pushes an event and redirects the loader to the selected event", async () => { + listRunsMock.mockReturnValue(Effect.succeed([])) + pushEventMock.mockReturnValue( + Effect.succeed({ + eventId: "event id/123", + key: "user.created", + payload: { userId: "user-123", source: "demo" }, + scope: "demo", + }), + ) + getEventMock.mockReturnValue( + Effect.succeed({ + eventId: "event id/123", + key: "user.created", + payload: { userId: "user-123", source: "demo" }, + scope: "demo", + }), + ) + + const formData = new FormData() + formData.set("intent", "push") + formData.set("eventKey", "user.created") + formData.set("eventPayload", '{"userId":"user-123","source":"demo"}') + + const actionResponse = await action( + createRouteArgs( + new Request("https://example.com/hatchet-demo", { + method: "POST", + body: formData, + }), + ), + ) + + expect(pushEventMock).toHaveBeenCalledWith( + "user.created", + { userId: "user-123", source: "demo" }, + { + additionalMetadata: { + source: "react-router-example", + }, + scope: "demo", + }, + ) + expect(actionResponse).toBeInstanceOf(Response) + const redirectResponse = actionResponse as Response + expect(redirectResponse.status).toBe(302) + expect(redirectResponse.headers.get("Location")).toBe( + "/hatchet-demo?eventId=event%20id%2F123", + ) + + const loaderResponse = await loader( + createRouteArgs( + new Request( + `https://example.com${redirectResponse.headers.get("Location")}`, + ), + ), + ) + + expect(getEventMock).toHaveBeenCalledWith("event id/123") + expect(loaderResponse).toEqual({ + ok: true, + data: { + event: { + eventId: "event id/123", + key: "user.created", + payload: { userId: "user-123", source: "demo" }, + scope: "demo", + }, + runs: [], + }, + }) + }) + + it("loader skips event lookup when the URL does not select an event", async () => { + listRunsMock.mockReturnValue( + Effect.succeed([{ id: "run-1", workflowName: "wf", status: "COMPLETED" }]), + ) + + const loaderResponse = await loader( + createRouteArgs(new Request("https://example.com/hatchet-demo")), + ) + + expect(getEventMock).not.toHaveBeenCalled() + expect(loaderResponse).toEqual({ + ok: true, + data: { + event: undefined, + runs: [{ id: "run-1", workflowName: "wf", status: "COMPLETED" }], + }, + }) + }) +}) diff --git a/apps/react-router-example/app/routes/hatchet-demo.server.ts b/apps/react-router-example/app/routes/hatchet-demo.server.ts new file mode 100644 index 0000000..007454f --- /dev/null +++ b/apps/react-router-example/app/routes/hatchet-demo.server.ts @@ -0,0 +1,16 @@ +export const parseEventPayload = (input: string): Record => { + const parsed = JSON.parse(input) as unknown + + if (parsed === null || typeof parsed !== "object" || Array.isArray(parsed)) { + throw new Error("Event payload must be a JSON object") + } + + return parsed as Record +} + +export const readSelectedEventId = (requestUrl: string): string | undefined => { + const eventId = new URL(requestUrl).searchParams.get("eventId")?.trim() + return eventId ? eventId : undefined +} + +export const buildEventRedirect = (eventId: string): string => `/hatchet-demo?eventId=${encodeURIComponent(eventId)}` diff --git a/apps/react-router-example/app/routes/hatchet-demo.tsx b/apps/react-router-example/app/routes/hatchet-demo.tsx new file mode 100644 index 0000000..3cd496a --- /dev/null +++ b/apps/react-router-example/app/routes/hatchet-demo.tsx @@ -0,0 +1,266 @@ +/** + * @effectify/react-router-example - Hatchet Demo Route + * + * Demonstrates using Hatchet workflow client from a React Router page + */ + +import type { Route } from "./+types/hatchet-demo.js" +import * as Effect from "effect/Effect" +import { ActionArgsContext, httpFailure, httpRedirect, httpSuccess, LoaderArgsContext } from "@effectify/react-router" +import { withActionEffect, withLoaderEffect } from "../lib/runtime.server.js" +import { cancelRun, getEvent, listRuns, pushEvent, runWorkflow } from "@effectify/hatchet" +import { Form, useActionData } from "react-router" +import { buildEventRedirect, parseEventPayload, readSelectedEventId } from "./hatchet-demo.server.js" + +export const loader = Effect.gen(function*() { + const { request } = yield* LoaderArgsContext + const eventId = readSelectedEventId(request.url) + const runs = yield* listRuns() + + const event = eventId ? yield* getEvent(eventId) : undefined + + return yield* httpSuccess({ event, runs }) +}).pipe(withLoaderEffect) + +export const action = Effect.gen(function*() { + const { request } = yield* ActionArgsContext + const formData = yield* Effect.tryPromise({ + try: () => request.formData(), + catch: (cause) => + new Error( + cause instanceof Error ? cause.message : "Failed to read form data", + ), + }) + const intent = String(formData.get("intent") ?? "") + + if (intent === "run") { + const workflowName = String(formData.get("workflowName") ?? "") + const inputStr = String(formData.get("input") ?? "{}") + + if (!workflowName) { + return yield* httpFailure("Workflow name is required") + } + + let input: Record + try { + input = JSON.parse(inputStr) + } catch { + return yield* httpFailure("Invalid JSON input") + } + + yield* runWorkflow(workflowName, input) + return yield* httpRedirect("/hatchet-demo") + } + + if (intent === "push") { + const eventKey = String(formData.get("eventKey") ?? "").trim() + const eventPayloadInput = String(formData.get("eventPayload") ?? "{}") + + if (!eventKey) { + return yield* httpFailure("Event key is required") + } + + let eventPayload: Record + try { + eventPayload = parseEventPayload(eventPayloadInput) + } catch (error) { + return yield* httpFailure( + error instanceof Error ? error.message : "Invalid event payload", + ) + } + + const event = yield* pushEvent(eventKey, eventPayload, { + additionalMetadata: { + source: "react-router-example", + }, + scope: "demo", + }) + + return yield* httpRedirect(buildEventRedirect(event.eventId)) + } + + if (intent === "cancel") { + const runId = String(formData.get("runId") ?? "") + + if (!runId) { + return yield* httpFailure("Run ID is required") + } + + yield* cancelRun(runId) + return yield* httpRedirect("/hatchet-demo") + } + + return yield* httpFailure("Unknown intent") +}).pipe(withActionEffect) + +export default function HatchetDemo({ loaderData }: Route.ComponentProps) { + const actionData = useActionData() + + if (loaderData.ok) { + const event = loaderData.data?.event + const runs = loaderData.data?.runs ?? [] + + return ( +
+
+

Hatchet Workflow Demo

+ + {/* Run Workflow Form */} +
+

Run Workflow

+
+
+ + + +