From a4759a4cb4fb15e922cf3fd5a1e2d0f6d64fe950 Mon Sep 17 00:00:00 2001 From: Kristina Fefelova Date: Fri, 22 Nov 2024 15:21:32 +0400 Subject: [PATCH 001/636] Initial commit: README, .gitignore Signed-off-by: Kristina Fefelova --- .gitignore | 25 +++++++++++++++++++++++++ README.md | 1 + 2 files changed, 26 insertions(+) create mode 100644 .gitignore create mode 100644 README.md diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000000..cef2ceb3c7a --- /dev/null +++ b/.gitignore @@ -0,0 +1,25 @@ +# build output +dist/ +lib/ + +# VS Code settings +.vscode/settings.json + +# Jetbrains setting folder +.idea/ + +# dependencies +node_modules/ + +# logs +npm-debug.log* +yarn-debug.log* +yarn-error.log* +pnpm-debug.log* + +# environment variables +.env +.env.production + +# macOS-specific files +.DS_Store diff --git a/README.md b/README.md new file mode 100644 index 00000000000..4c327ea3b30 --- /dev/null +++ b/README.md @@ -0,0 +1 @@ +# Platform communication package \ No newline at end of file From 148314513d494351c883a9cfd20f83ce73d51745 Mon Sep 17 00:00:00 2001 From: Kristina Fefelova Date: Fri, 22 Nov 2024 17:57:32 +0400 Subject: [PATCH 002/636] Init with bun Signed-off-by: Kristina Fefelova --- .prettierrc | 11 +++++++++++ bun.lockb | Bin 0 -> 50280 bytes eslint.config.js | 14 ++++++++++++++ package.json | 18 ++++++++++++++++++ scripts/build.sh | 27 +++++++++++++++++++++++++++ tsconfig.json | 30 ++++++++++++++++++++++++++++++ 6 files changed, 100 insertions(+) create mode 100644 .prettierrc create mode 100755 bun.lockb create mode 100644 eslint.config.js create mode 100644 package.json create mode 100644 scripts/build.sh create mode 100644 tsconfig.json diff --git a/.prettierrc b/.prettierrc new file mode 100644 index 00000000000..d0f0f537418 --- /dev/null +++ b/.prettierrc @@ -0,0 +1,11 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "trailingComma": "none", + "tabWidth": 2, + "semi": false, + "singleQuote": true, + "printWidth": 120, + "useTabs": false, + "bracketSpacing": true, + "proseWrap": "preserve" +} \ No newline at end of file diff --git a/bun.lockb b/bun.lockb new file mode 100755 index 0000000000000000000000000000000000000000..df37a141c128c80527ac3b79742a15c90124279e GIT binary patch literal 50280 zcmeIb2V4|O(>}UnLvt00Sma zF@XUS0t)6Vh!L~is@WZw<59r#-TS}4?_T?T>-2P2*VEP2)p2G~T~$Ak&C(BM#_BU- zr>X=e#)<*aqvJxN!kEzvdRQDQCWu3yY9Pi>qtOgpQ!Cn6f00X`@vT2V&P7>115M#ch5I0>6G0NpbECKNq z;P-*J7-CI`nQVG^bX+iPkO;S20p$0_{P5T~7K27x$xDYZV?*hQ@icE}8uFcaaXgwV zhyzMQGh>(>nln_6{49_|{ck}U>ko?#;&2$Dv?xaMRBX>&$i#9`14K`b`I z4f4@0nf*cK zm81PvK#cqt1}mIl)ajQ7P!8=77R2TxfKDt%BAXeOOq&PFpuaeSvZ%*Oh_T;Qc;(rQ zn5hgFjV1^B0xSI%=nJg$3cx`9&p;aM1MlN-m<*N+mj zKOJH)ZMr)c1LcPC$^#Z**H@5_{bm4z4*6*iqaU4w81>`u@=JSfE8OQ8KKdkkV|l%` z%e6yZ>C<;h?f9rN)p-v~#`*ZPL+{OmMmQB2i)y+$xlEitd?h_swMx3Wta|1$+wfw| z;}TDHI+TUJZdAXhnd#VKBP+G{NXp{aRm%ML1IE348Sh{DQfS@nR;}uVsS&yjzgezO(h}79w+!=Vra1?6Ke* zyGBQUyVdc>bAr>1qRZaa)JBA#)t-``bT3Tu%Emj_cO6hJ{Qgw!-3N~B>KN@o!G*cO z?(*lB_G+H9r)ZkaUB%Snr($myzke!Trp1V|%*{@?)^v88oPA{fquEAJb|o+My`S)5 zRj$pc>*am}8b2%!J-qSZ>$R&jDqNRue;0I8^|Zq(`S*(oO1;0O)p{?QO*6TtP;+L> zzEgGk>lZtE%!oePw70C-M)uCC5tX88V$Xb)tT!(Be3f1Kz0T93*M*oxA2z$c8M9-k z@6GYj&+BU+WEi;gpVVMq6%ulB$XnL(rijDhez$7e^iOV*uc?v=PSCKl=`A_@ieKSn zr*|73y?4)*uxWB0m1%ul(fRT@hsNx|->oKQ_ZlLeerA&H{48y&;CW^eLX*!uGbv8o zbGyYVfdApYLdKoq33F9b%=fpSzS{e7@bu;M6C|JQ66GB@ zde)r&LjFo_O0%mOGoB@}{O@gdFS(M`S}$+9s4++K)8=A+e#w`0J|}OVxqYBWGQ@N1 z#+Hx4%~Se`niQ$}ZTdXthsKLOD)PN&X&UKOlw$S0?J*|#`*-D*VHN4<-7@k_oY?SJmVrRNGB0eWGL)}`L`;g5DIHJ__1wQuKd z%1T`|&p3AJEhoOxD}i$_$vTeO@-Fh}2F>)Lx^o!8Kl`Pj^%l+4*?kpW#GG2|_hj68 z@4%KHpO*x+AD3wmK3w%g+4Al2y){y%5rTe3UKfQ6PwVJAe*HL6=FPip((Cw_g)Jfd zvU2F!n`-8 zCp35O(VNwGWSt)Qh%eK45Ig8?WnF`Y`OkrnCK1Ax#U1Lc7mw`Omj~usus{dt>AH$PJ zSrVtG_4kJf7u!$j^QZDIJbA49PkflHZvn_-|B>UrJH1IhDSr;+asL8l=yBT-gOnGB z!e}(J?(qMC*{LI-h(IKlX}TAr2IjUhvR=o{V<*PD3J2cKz=C5WBR}NzbqIX#~)d5 zPkorIKN#e3{E_J%DG!Cn^7}#F7UciL_E-lg{{iH2{zrLiKT=NuDenY_2DCq!Cgbif z$a2$vlJ8EA3^kuDYXO%{Xg{p~PXZ~= z0eN!$|C2r>|>1Adi*)m-f#9d2dSnq#oThg4F*S$m9Bf<-wKz*nbq@lA4qU zs6Xz1FOVnO|6lrV1<2d+^e24)+tOVi+usQC|C9R@30WG=2I@!MNzCr*Mm||K2ISp% z^4NEO+JDs`kNdwr@f~XYKR})wKbZfg^_%tYZ2y0mKbP?2QU5;)r2Q^~ye+h!)SHaE z!y@Gs;Uxk3@1OV{Vw3XTAa4fpu#EpvAmtZ;JlY?H;V9JOK+2y2c_*Gcj=!D;QeJHU zjphvUm`2<6v>!du_Fo5i8<77K-=W%1XJF^~ z5!cQ??Y}UP$MH*!ou1wUk^R3B0JpB=a#O+Q8vYa!>Tl4D2yg%*#bs%rdlZUPR zAKSkcMx{1quGNzrm_8GKXxUHJW}3Ag+{aC)lcRjzq>$|-KxSJ zzohN|)PG-qJldbc`&0eRRJs0x{`X(zuPBiJpR7M8KpxLu|8@TEtM)&e|9wFo#~<3i zr@jM4JNmx>aBmV&$y z)Q@TGgFmgmic&wy{;B*YkVpUNsT|oZQhz^<&i)f1nLUU}`E?+V_D9?Oi4KmuI`G~X z_Ri_NWRD}V{?AZ9Ie%gOaGltrK+2nIcAno+Uad#coqm!H^639EAcy7kB4CX8EkCS+5YK-M4^YVKVW4%*&<*6~^ z1oHAR#xc+2r7=c+BrlCI;zaY(7$ZN1m;P0Z_KD~5sWIZPc=;HkuciT^pUwtC-_HTU zatnblV2t%G!UP12F};`AiY$K19tizipXawtUJLL7$-37@ezMD7fgeWDso8xlyg1J#WuO_wmKIs=dY+ z?G!%e|1p{sT~@Su;QedW(yxxkjyy$sx#Cnt_JBum`npL+9w@$Bx$5b=VK?R7$1SJe z3Xq@>3qGCi@iI8~w4A(2sYHyDYVDzfy@j5RS>mDVeINe0Lvi8L5-^<|fnXxrP+|=FvL^yFc0V z&6q27|E#aeL8g>81sB&OG6)9v<`&e(9%npX7%j)NYvhl<5bu-xc7v2aRNGl^-6zrm zb7aq1xAk7oov0!ErGoiVtnJ#4=@eXCgUBFQ;UHkO&wsO*JpIro zySsZ0g=2l1}7~&&W99-C(q8C75LfK!qn)pr*BM-g8RCLGlp&~yF<+y z0{v2amQLuuC9y6pJ}T&h_&p1SCrgs!DY!W2kwI|d?6=FRm(N~&VVBmLz>tbf_Eqjj zTQ__)lU^J5}tSb7lw6RK`$n#Yj+y z1@zN7DY&?YB7@+N!zs6Ag83~>O$-gHypQC}_%5QRYaViFvLNl^ z`a1KI?<|_up3Hmf`(56<$bwBib1o}=m8BO5lGO&BCJ zKOtk{iwRlT6kKr<6k@?|I&JrE8B`05nvp$byLH5~tPhHoS4(W33(kmgdpgG9K*0f@ z2A#x&s<4wXrx*)2j_jLobMgj*JxLLqiMN(XZ!4tWN>FicXXbmf_A?APlG$vtdVh=V zf;BtcS{JE4ijx@~`rP62)?*)sbn5J%Y`93Io=fqjKogAFB6Mc>c0*Kb?t zcy4~3xsPE}VsG8x`IHqna8i2 z?`#X~cfcph@5t7ekC`_v=2dO3T()A3?dbbblO?AIy*7K;-mb8x%xZPwqOiW98F`@e z_T7R}_nvt=Q}mT4K_M1&DqTx!6o27bmy>-e?7JoXk(1F3BgNi|7uRVOI~z&smh5SM zqB(cs$Qflb`p$dCC$QzQV}G00{9R5C^1Jhcr`%!TRGcJS|>m4=E$xaxY*R6AQixE6&o056d*YytI$`0-xkJFE2Ul$rkB#pzxabf^#277dnpj6g)t8T|AJ2 zi|2SU2xc80Z|c0n#X@63y;9)9P1m+rueY(=b45U7$Bn64y^lRLdSmlaL-6P!dFC<) zsqLzA`eSBpGkEhbKFr9(c1VQIX9}(?2@0{`l!2nJMi+RC_TRo^wtnlatCb^W&Q0N) zQRd{9tn_MH_2tY%ta|p+NkI>GeNH|z<9fM^MMGfrQvGtNIPre*A+Tu{2#xWEN58L^=Sn8eO8kJ=>3vVv)POIDR zS^74nI5&9W&NKNGTsbOkq`SxY+>r~j+9!$(I=_@zAwGYjh;?Z8SjVzu!l7X`0xgQY zWPO^I86`Tqt{H!hEzvX(Nz1k9pPX^`L^(bB$PWtc04i>FyYsWh>Txo$x9?8-7M3J^ zLeyZX%MLNEZIiEj8CiJ6v57zK!pKdtPTaj~`SwGK*V_ZL7B>}zd!>%w5aJ-DQa7H0 z3$O7yf*^f!$bQ*ep~V#!28H)~8=p|vr^ajX@`n|n>``?V&Cw@aHt4OdNwHD5UH`pt z=BW7xUFK#!Pj%R~@@d1~(lvVzQOAWs2P*WN>+Ch_F1}I}7_)cz)C8{v3$a%Z@(tHd z`&7u7Q@2-(?~+x5`_sI=E9M?nO8NNg`gn(__nMlvxy1X%o_6zP7tOP$=nJpSI)Y${ z|Imu+i@mPS+|+m7ivf~V>ZvJmv(B8$F|OWQ?%w)6WW|oWJys9)uTgvE!ciL|SR;CH zum0>oH17$sZjBri;@?QY9oT^i`cCFtKdyU!@2&`D)fl@Uo~gI|1TXi=dFXz;Z&<94 zom|=UGQNgUdXZXHH%6P99;5P zKqu=w1$Ph?mw%|RM(u$3YikOQY@5}1_seF7^5?UfBrfFi4G`acp?T&c)zMa`?VpZa zF(u>2qWMw-rpG9s+3fN`{luhoSN3oCIE{j#&3~Q!1}rwC>47nXwahZfsdM ztCxfD<&ky@!R@R%t?O^c&rfsr)Sj{XNBQpM+CH?(8EK)5E=*rzvTH$M_I3)c3KjR> z{$!zdQ`c<2?6SJTcdff+{r9l71=%^*%4x~7s|S{_S8p}>F}jbeeIV;ZTEwM_t_ov;rQ*&^S<;?q)NYxcDOagnXZvAN?^PRZd%553^Q^{1SopD` z!N@&hbu2BFE;$4?eTxsa`XbNlqoyLhqr~T;8J|h&JqoTG6?f^xfjg6j57$X~J(x4q zQBd3O@Q-tw@3@Kyg;p1(E&g_6kiW?u-Gx5Y787!t1I7fJhu6t`-TzH;ua7e$%~9C! zIR#gpikq;TSylgZm9Lo9>#+e+&K|otVXrsEsxCSv?HX$y5^`kZ;oYACU-FGK8?bLp z=IN%i_Pr?)B10VdK28|bR9hrfO2O5j;u@T_8S}_)^zenQ_eZ}P-ls4k-eK}6d&wrj zA3ODKC^?zmX*wzK`gK6|Y`d7W&x~t~H1{t8^&c`^hQCvKYJ2{bAO%;GihD(v|MjtQ zr7(+2CgI{npU0@)5#Q5)L&@{FSr-=jOp9~-EReH(`o$Twt~Ij{OKR+?1OV!VhC) z9=7ohe4pA}P}FDKK36SclbyY1ew3ajeAs>(EnqDLcL)`C*Y4<#%Y$ax@@I`4Jw=`| zqGj= zr|G&cI0-CW<+W<(bISupv8elBZ7Qxsvd_5JhgZIBx}#hBP(sp7Wo%uX@)Fa5tl(kS zSLpq?H$PNCjf;56m}2wA6U)w**xZ=p0Cufz+OsmevcdHN!Df@P7x9k zP%+!EwpA)CAoY{E|1O!yrMt%F)ht=DKAnO)l!_~TY&-k)moa5>p?z(owGX>!FIeun z`puh@js5RmFW{Rc7IEd>;XJ$j`BQhE3|70NS@|(!UgUb`T@nVDr&(N0FQoPxor?R^ zpM7ZG`Oui^#@!daR5YhKhFsbeafa_rgr;TnoVNHQ`UO672k&lqB75$|`+DmlcB^s% z?S;d7^~(Lz?VCf#=Th|5qv9T#yu>`DeSLJx`LG4s&n%r+9{j$MEw}XU+}ybl%5L)v zGtZga*I2jrYRjQIq2}taRvJ4tYEJb<+jUXwGHr*XqZC|yD(;fhs9VFB!S!Lq&5;r# zUQB6EEKv$oJpTG~FKyj%<;lsr11=1XcMWn4^a*l#RI<&W{_E=P+w{a2eF?sG%giCH z4+YnNirdz?yet5rZQPfr8a0qU#crp6x4|+>m+RV7H=M zkItAB|io3}?-^_Xa7rJ_xA-!Oc#qQC!i}Kk7^&$=`j?7i6cd7$_pUZN{3NHEH2jK-b?!1*bX{l<8*P)iL+5HBe z8&kf%{G-Z4qr-C6Rr;YJl{dUT7^t*)3+y^-)z){(%kP(CKRfPon?BT1EZ5|PU->Eu zt|?XD!X@d)ralW_|CbpNg=-Ya&Fsq{XUaEwCddAe^Z%$r{a#u zSK}u)*DMk~IaxwSWm0I`#}}_0D7eF@xc#(yxxX{jF}cCI$xKgINmJjqZ)+^W@m_k( z`Sk1fxJQ{ewU3}a?|E8>X$%K?)O%o^Q^5=6lT|Z>_G5zVBYmfS`KX6OP zW@vUmXs)z01$Q_V*S;^m)wwV8+g``55eqsmq@=@?X>3{ieX6or5B2-)8; z4O=}zJpRd<-glQua)h980BxuZMc)xr z+{HFXF_} z%a15{-deL><;~Cw;e%xIq>Lh!rM|2=l`1)Q)|#E6CkmdUlzK9#O>&)8kA!SqqkJTi8~ z2b&{p0(Yrhy;n)>=f-xna?U^U{=Lq(FR({;*4qI24dOo(H;_}VAb%SFRvN&e?qj7wGgD-wiS(th3Mwq#^ zU5$r9j#TZ&VewYYRz-f9^>zpNx9(u>)KE0b6a38gY~9zq1&Mpv6n#fgaoOdr(*C1X zJUi_YA9{XQSk~_Tl1xRj>mz2lD!RFAIx03j$yeXX+2UjwJnhT+ZMVM*yppQ8*Ya+( z&Vu-Q!-b4u3a%{`H%{@k@0`@im5TS1gj_9OSZc`&KDS+FR&z~H%iG)Oc+PAu85i-C z+G@_&54CEK3e(o!cyex(y8ov`6Kulve>k5?!5vM-4Zc4tl+(NIl(|y*=xiy|DYG62 z)t%H*ms0&|vR&d?HY>imuTXNSSH(W@dsRcrRHa`i?@k-@`nsAnQ%Ef6y8K8AF20{8 zgWyApB@bGi`nud+cz?e}Fh6TZK=pe?g`t6C8!l$FBkF7I+x{cS5A#)!&9_sye>x6x`o^PkOls`zbS~@)>LBuKi+F&EvC~mPMZ8k%gO+`!*H{dNOU@oK|bI^e&`q z&YAkM(xPcs!k#0u0(GWnNoAD@Nc&Ik^VwiJb-(UN#hq2Q+3Q-W^|X+|GjFTdov!br zR3F&9fp7HYA%i@YH3T(k$B7+WS6*@NbCHX{!n6CUXot7z$Lu?{9a+HzvHHyAY zRNQB8n?4Ovn=7{X>7AQu-!|M35P36x#r{x{O&iP4?b_jJ>RoVt8~@pph30bkJ60LV zFF2w9Sc>(m)wf_}^Ygr&cJnE?&Q#o!>-@{EMVwwaudmt|)!a9I#;n>lSx8&ON=A9Y z;Gy}ecDS5$@t9q5@1CXeU?Jf_xz8<($FZ2J^q%UDF=7bU?Y5)fj-}$hZS&dr_?FxV z-**pZ`0rYD>qfC!_8_|pO$wJqO>epkOiiCXS^3(^WwIU<(z^PaGq2 zT~tQAZHeodq6|F&U;F2L&wjkP8&|e6?(m`Veb9sTUUIl5m0cx zuD!&tAM5GC)dTm+2T||)+^M+M$3yr}w=wirsF;e@`#JRD({Ib__u_y|&N3&Ny6hdt z4n95}ws3}3k+RF-PtlKc-C~|lFHrMO{ zCrbD1r{LmuDP$0A5xnwl@}PTh=91qGT2GreHhd|x8FcDIMKs^3(5Fw|@6=a#>d=pQ zzPla_Y4i4^;Chmv5DQ+sa<^gpIhm`A6HnYYpg#Vy zp^)fQ*=-(jcIFvfXtzFeMqHBL#x^w5D>fLIKHPrhO z{62;Zg2JaaDNmZCU|K&mdUwh9u{F=7RkCV}8T|_?6JIXQa4A~-a?|9~GZi%UC6xJI zZJ$x}^0oA?5;xCnpI=`VkN18#ilVO<2@0{G=wRDEYuA@6yLpZZ4GC7+w%p%{zEwz6 z;AZiwfKh5u0qZhacg`PX&75}ZshUgS-3uF%tEY~yyDs5eyj1SQn~N7IxZYIUGWNj} z1zyFcQ?1|Ml>4#TZN2aNc>jq$rw)KzjXcstHDx;{o_uXdch$qc`SJHBJ zuT0t%7IV_0UezK7_|t#*kE`&JLHE^M>A*=B#@^3d?-g10s`pWC+C zT8^J$2mDTn41zCbr0tjxm>H`VHR5!8(HO1%z8lKQ?pG~Xdp6!Q@N&rQyp=IKrtG>s zf6cmBj|ZQ#Omp|FojE~s-x>R@W3^sv+DbjY`IDd!3)Vf*ZdfIrT>E6++P*J!goG!m ze)Tumxbj})=6wqXE)hJZQM)fn_LGiWqhD}f#DMx~CKrk-2K@4gUT@LJkB>;9R# z0#@>tZ34@D&5RUsERx>V+}o>pr2X|$6UH5TDJ7|1?Q1@nJ^fPiF!>T|;L7j=8;06H z57ey{JNlC9zvTBI1a6j@^o@uqN3_cQ<3uMk)V3b3DvnuKJ}h>*PD9DyT?GjXXDdm! znGLdbyO}tzSM>?qCmY?y8M|LB9kMBVVe_Z=MHD*(Q0<_&fp8fmxU$_7`2&# zOMa(9=o=6(dQ5VV@iYEsTYH}^*0UWdTbkOEQ=c?(U|rJ$AD2MWcL4`(xG2RQA1Jf+ zuJ)+L2^T6>FWRI(QN&#~Q)XyM9|~?DRo{ZNqH;cu6F)BPvL4(vqv6r^1aX0VLuu2$ z>A7r>TJLhX`F?&znVM|r4=3B>{bEcCUlm;0a#U1lKf~ij$>}@$sr$YlDz0U!vT%RB zt5UYsk^&n>S@{i9Y~B7LQLWBz$H9S7U+>Pfzh^OLBi*R1@8rvohE<2IR%ZUNHtaQ8 zaljtOIiiKbsP~@1R9sEh)@huJi-LxEsm{6d*rIRhN%2X7JCxSE{vui_ylmez8^5K4 zyqhEHXkRaDj`);kH<^TjJX-lj70Qv%zAom!JXy*CP_ z;s&4CIX!Xi(B1OEdUU_x!-mjr#|&&QNimR=f0UH<&_>B5q4s^aq|Jo*eK}37eO8ss z|Kf3Mlcn~iH#!HcGG+JGQ1oR`abGO|l4F(kL{+!$!oXSY3}Z?STAj?&TMlvN#)QwW zI(Gkg)Tv6!nWby0A}-cbaHB|2hy}+to_%}fU|Q|% zC8~*e?$@n7wMuof7i-*mDHS()yhyOnq|~U^x3}wOJ#F{6Uo~yfC`IOx?=40V`^P_P zZ|yDkrj~+>-?@@Ou%e_~xa7^+rKfCT7Ha8ysE&WN@KT%GiGFjWo-A8GTlZ$-;Z0XQ z8ot~WchP)=iB{~{z{HU$Yn;O@FVt+)Gm!gylY$#Vfn7mldihmNoAv`o>Xl$2huMtuNcP@wkm-`+|n>OZAfc6F=We3R26z z*|un!z#4t^J+aFN&|6M_In+cul6v>LnYN{?Zf&A>-Hk-Z|N)A{k!@1b8aWp&e6)z-nQ!O%u6lDq72qVG8>!5Te#;n zCbXoEdoyW~V`i9<=cTI~q~s1J=8yl8>%bp9jC${h-z$?raGcw&6Y)>gS!0}%*G*-A zN;)wmtz>?bdcip7qOYPE>te>_RUSEf=V|p0*<@|i)}tQRO!9nh#*9(^XciE@L}xqo zJtUh1g;;R;#ii;^Sy{)VjN_jaJ=rL|?%R$*xf>qk1k3L*J^$# zpKtAZ-uuf6`(_cH$fKsI`LS%jQzQ6tj?!O0yiwv@Gf90##DrsK{Z**@$wVq{PSj`< zafyfXFO001zT$K9s3q%e8ebImZ}oMm{IWnb+w2C@XoEpR-_}0wOhmG3KV8`oF!;&$ zS^C3jAAAZP)I7BxrQfDfansZ8J4_p{`Fck6?3fFjrOoSN{27xeq*4T|y)tZ=J!{FLch43!<$> z1Ij44)2O%`m&Sg`xEyzX#ngwnHWjb(R~N4Tl(Xlq^*Yg%xB`)Dw9Jhq)~i0`jo)r| zd}G~Pw#JBiwRY1zCvD38aFs90r1BO8H<^md+O2i(^xUh1>*v;u$ywfhW#q9*Q{JU| zKYC}cmS5?SW;`tD;= z2wdM(;8pJ0Z!uNh=~P_bezkXRuJs)L*>Ix-^L@T&+PcZXO=689%e?7~Z=&mO%Z+|0 zc_7dvZi;a@U0t=$jD9lSy_6emmxRrCV+5RYrP?=@ifgL5^t-tpit z+14Ar;VqI+7p>0PGiHmH$DCZR2&dpo_gH)RfidzI<0^6uHlDs+xza6U(ttO`)O~6i z1^0jaNU`q>3YefysG;KkSwHO*d)@q9o@c^5`=>R9>TD~yn(F#uO5u?O`?bv9=}ms{ z@|%0!iR}@`Zhmvil4pz`Jm^zo{(+@RcCRV8GpV?-6&^K$r}W>w63JO&UYVjxKd?BX zurb<4`G-W|w|z6*93uOO1uVX~L^)?(w)9N1y<)ewEIqB@6Sr1J{ZZQ__eu(GIu&3V};vA4gF)8tNfFNCh2FVmM&BMaIbQ#Y}oUZEfm~YRNPHf))|50Ep~|bo_^@| z>`98?6}9uHpD!3ZbDh=T%gQ4qMmFAznHr;%8};#&Zi=E{-*|eS*NZkg_jl1^Qt`)@ z;QeGz13wfb|AxE@$ce*mym~78x7=R~{BN}Y`aSt?0qlT}f#JL8^#6@*!FK4wN4)U; zb-FpkQvXZa@xRgb|0i_D{w4p7h9mHCE_?@`{(qw0zjeTQg=YjZ{%e!}a~43qC;zPt z9o7G5_9a*c`R{ZrRr`N;?nGVi8=?L%zL#lm=W|H@uLysz-TYtCp1(@|wZLBs{I$Se z3;eafUkm)Tz+VgewZLBs{I$Se3;eafUkm)Tz+VgewZLBs{I$Se3;eaf|L-jD|If>Q zn_1kBF6YH!7Hn3C9y6BB35t%^i;fG43S&ky^t@OMhLwtufeM?M!iWnSs$!xN6wM5e zjSGYK^Bv*)4{pE={9X%9O8%WQGL7$p$iGR2-(F!E{9ArSKp615Da^xnlUYC*(t$7! z|AyXNAS|B&gn3VSioeQ)1OzdQL}Mc>(60+kQ4aM$9IOX%g@FzNl>?#9@aY8cx18a< zH1Y3}^o1Dp#=m=of1?UsS>rn`{5S4t1Ay@FAK~9B!oMqoe*;JfXb_MxkP470kQ$IW zkOq(@&|o0^J0tivLGbT)=m6;g4FxIz+65#6BnkxIaS(q)7QaId0mAR;@!NO&o*aK` zfxqMM1H#|S;BQ;-_gVN`DmNhfEgb%C%@xQQXe`heAP1mPK=_+&OP~=zW@U_5tkxDg?ql!~S9e;diU}ZRKI0BS4WrQ9$@zI{qfc2gn-;e~*U0b+ZK; z4P*s090-3uk6oArbQB1`Ix`qRp|**mktL4G^{s?QQ~ucE`R)A3%Q?41|7yKBESNJ~R*r{Z1B02B;qp`lb{R zd?rKqt{B7uKsXvjAV%L21ri1l0z#k1d`x4EzAga--^dX@j(7ur5IP$W#R8G|SU=W>x|2E~5B(1HC-YL^ISGh2E#p5mPxx#y#M6ML17RNO zIRj`W&@7;IpbTE@m<t!zL#Vht*(M#~f?exf-C~fQ{4ddPOdI?v~3| z5k@P61=3M#cuwyx z<_b*|)g^)p{1-W2JqA{C2SYpvWY|Z&i+1r#zUJo(Fwrvt6A=G^1lDse`xa-fTa75eF*MRMq^$-S zwCHVBd1*0A34Y)h=oy-WPziAQ07qWfS3^;2YA(S+2JH}Vq=3Vi@hpkue{VZ*jJfsv z+zb4CyiTB1gN!uDuwA0O14qxAL!cSy!GOi@#$j$qpDAl~`cG;A zjtR(s4a@-zMt-`-jOe3Hd&^KaM1zSZlo6bWV{p-In#nzdnlpq9*av1Cp4N*BO1;0O z)p~cL`2h!Qy>_)mh3oR|?|?I$Ytc~P^aIY5UCB#*?`1>hudn9*$7G@ZMOsmD*n0tc{AwcIk_O?-U+1r}hh;ZNx=h`5Q85>GZjHgLFW=bgRAIAm` z)PnV_1R1nWe%6uEJHAP51I{pRJ-Dr7f!5DG&r~-swnY*$M0*N&GWwFD!OBa^o&yJZ z2RebqWYfc=EfrBjt&ST)94P>SE9!XgoyNW=An!q!FgR_guROdY` z8Rz4`G0-zd>qIg+IPucvt-IkX=+o!844)1rA&i7Y5@pJ&XD+i1FV-Z`aL9p&FyiS< zHmy{2-F5Pt>U#u-@Q`BQpmnMjrb@{1*G=Q+gKRVoZ4hi4kR!JK z-px#r2gZcfhRFJjA3!fSi*o(VXWV(WAgO(4K?eN|dd{22aY*e~5F9|vRXpE`r^5jx zh{LE?w-A|=JU0tuc-{*fW(<=wt10 zQIC z@4zyHF7gdz(5qJE+MK#x?l*vtA!elC&n`csN%1X$;iLd48Zf;gTF^<=(+;cT-vgQn zG41?XH)3r6Qfq<}!$#kw(YEY6Rky!>u_K_t7{QU16u}I^{mg}!MISc1zZpYt2qVM? zaU$qo)~oEw?{%IQy?}!w7&8`(RnMpTMo|DeYMX9sX_T@ZWbt7N@#!Dm8- z;4s;YX$+`m$57v!&l zaw3A*bm-UcZF)-%=hXw9%Ga0Mr&s(6FFU>4@Q9yp z2KYOANFs|#j|2-15l=reNq0W_D6Z(gpRWOJDxl%o*qA-|yVb;Ov<{A7;LHaO_F$&< zbw%gP=Nxz%Kzrg7W0OPTLK)gt!Sl={geDUlqNTrFX>pOEi9;o5(W|{52Txy4KS9(3 zI|oDB?`Ndnue85kHGjR%p(Eg;5Cp54UPZZ{t&!GM&;b1%l>7ZE<0i{p7j7RYk__?O zx)Ee>?*i5#zZYNBm?QaVb209Oc)k7m6=MRR^#dEYmt09|t(V7@2|W?ef>`14K`b_d zU-D(0&&k_ofPm zFZ!s+_nxJRYduZ~*bA_+1SQhPHf5+=ihOb+>LGmRHx>mAe&3#7%m>EcJ1`pmz3UW# z4W%N~{7YMd6=<|GPy%O)Q~m}Y#17x|Rn5MK5BHTd_t z=ycd80^m=(TQtut$cA%e`qG0{RZKO*?9N^EN8rF|Go8OFD|OX8W7r9idvZA#A}YX% z9rU)cu0g}Rlk;yIkUQ@iFn^#1R8Z&u^!17*Ml20TvX(6u*5dJk|WJf1lJ{9><+D>}%;Fj(OX zh~I6KUdO*IYzZMlj1n6_!(G6Om{V*0o{T#W9Gv*TqQR`75C*Im$7LFX4_7@=1`f^? zFoLr{2K&c5u;s_+B|+dJpc^qxzwyv4-yYvvBV`%^99+#{1doQ}7p`-Henwswg$wa) zhI=I7tN;$8>F7It{WwwP4Ix9!NWa;ofy_J@a5!6D?BjZU!u~dGem-34VRK6h3u1E; zAf9^P`I>>-LR^0gx&FptB(j-d$uvm~doSx1Lnjd&qNV?~HJTdHoK?89%UludeAmsT z+X)#%+*hpx4(<;|Dmwo{^eZ&G55;- zc7FRW8qn2=o4j;ReeK&@vB&UC%UjLZ$+00(u#3*}ik(NF@%099@XQ2$UM8;WrPOA zu7<;oW(GrDV>UDwZjw9l5jaNRZ!p%}VG>2ZPT4am)6JrQ+a6*x{z?Np@*B{8o`1LYmNZNRL^J_ zObGv$(SdejxXZh9Q(4DN|MTSxcLmV1pT=RZVuPZ`b6CvSa4QwV&TF~ho!4?S4m*SZ zIsQdvD=T-2OfWN6AM+${Z?<@;lyI2CW;0k*8LY|IR5Con#(o?Gvo7kJIo$LD-!zHu z9y}eEpCom`J3zx%mWlHyoV?0B5%;6G78;S9DuCpn(Mwh4)giAjuQ!nuwa2PzPG z+>XAu; z%VI#ca7_iua^P}|mxyG?#nRa!5sa9i5LO(UO$V<@ie<1vg5nu;CQB6Sp^vgv6j|e0fP{#6qp~kRX7^^9UV!46>Lpi7fa(oCYJf zwR3`kp)j{HSkLH5Oin~xA_q=)aj}q&jthmNdc=w!!;EFd1aU$lVuLt|te|Lc*ce2O zi$|vp0%f}B(au89tDOltPQ51BvEcU6iJ=U7G#ds{C_RZ46p!vm2mRr7EH)61Z;V** z8U{1Ai;8|O3f29bMaKns7*^T^4jvXk79&s+Ph4G>?^G6~IunGfK{v9UZ3o%T1Rd|# zx~sRtS|Hl-Oh}q`Q!DSf59D}Hpevlvx{>;+^FjKj9H<-jd|g|s!8Fwa_frn21OFqe2#4~Z*bqi2o$z9q3Sc^h=lCw*I|~7FX96sR zw(7c#|Evx*`$zqt80^5i!Jv*mK%ge+Cb%=`M!rXb9DsXNj?lQfR`a&mfXRCTJ@NV< zYy}yNX&ju5y0Ca>Aua?@0>&(MWY-p_%x);$IoI{bv$+=rC`h~upqt_;v6~txQ!5Bk zQgq_ILO0U97Zo7MdxARAv!IpSJ7Ac#IT6IQB4Zk??p-aJlv034NfGw%Zah+|18GW% zXh(O_m)$@oCUihUWeeNICnYiLZ8k;h}9uMbU}B{i^bvAohJxSV2th^5O4881S6;guJ6%# zh|wIJ7#7CpYDuP)0t8A5D(-%E`)SXCYX5T=Li8mu?z=D;@5LWT^PY&cxqB(%MIs?Z zJi*L_;|-dQ8>}GM(X+d9o6bT61Svs``0kvBd+kfe!Ai}bbKh-Z)x@g_*f>q=vc1vF zSPq@Kc_X0yl?$DTyA*)!0$;0_Hz6 zprgAV=j6*fkR~5{?4j_~ucIw?7Kj2LrHYIW@Y)s&Wi5Mc>%`0(^rjv6yu^4d~KOJ$qXyosOpw8bj zq4~I%?|NeTm6Cw^t8Cc3bbsFDzJdh=?gR8__cG+`RFEPcL3ccLb*=lq?<;=N9A_)| zsI5mR+$#_O;64zCitY~Mzq(NY1mb3;N7niY;HL{4fchz?M@JnlDDO%LFo@|33<2MW zp(#4pA@F5U7hd;|asY$HasLXJUfo%gxRdI*h9b;CF-s3Rmb+r>cV2UWcxM8%K=dw2muG-8K;zq0x5G5aBdEm{6-1{_o7>mJci<90{;K6$>)Pmu3;CKf8$AKP)L=_L51J%$NqVB&VZ3;T2yA{t3b z5UbX3Y-KzX4$JtWstb^gGJw(X+@sAfVHR9+xDU`c-t{)z9Y)hxa8~bX%ybq)3w21q zF_3q?4Y!`$({*=9|11b-|I8pRV2KSR+^EMU#sqhH7uZn-a5|oeONj2)MDE8xAjW+l z{Jy(=E9IUX1Su&{ns?OhqU(Nk8h`+ILid5n;iCg|0csg|pBfv=?CK^IJv9MgFa{7q z?2WFs0=(~kK#2DQVt8!pTIv^Sakb~^NUZL}NM$9$JO3bf%Nosy?!p$-VyFv6iZBMz zd&Dg|oftNpAa+z2a6gp<#Gi5q&~8rzKaFx;6Jfj)a|qEsoC|s1a&-A60JjVva38=W z;RBZLtlT++@;;K#|HpERX!s@_-u)5FZ_g(C-z=6LTOq8JoA}Ro79TV-TvKy6Oa@CYq)SVa#Q=zWBqr|eto@H&RmXQKfbwew3~}DRv!fQg{ARG| z5sc_~xVY!qnJ3+aukya*!p`J=(FI#?bHK*YA0HLYBDVEi$P?dMfiw|=W5WKgJHP6H z1u^oJ-mTPcnh6lU;Se(kF>r|$G?W<@)Jtq!e0I*AQq9acWVB-{_p z;B$O;)>yVj(CEv&PtO`UKS3L2Vx=;?;~O;U_ii1(Kds`zc<|z1enaO2gAG2efIl1X zYcQTB@DY2DLpra%?2dX9f3GZ{lKNiem2c>fqy9k#-a0X!_pM*2wv=Dsa#e(H8j0_M Ld#v#Pp@RPx*LY#t literal 0 HcmV?d00001 diff --git a/eslint.config.js b/eslint.config.js new file mode 100644 index 00000000000..0a7a95e2752 --- /dev/null +++ b/eslint.config.js @@ -0,0 +1,14 @@ +import pluginJs from "@eslint/js"; +import tseslint from "typescript-eslint"; +import prettierRecommended from "eslint-plugin-prettier/recommended"; +import configPrettier from "eslint-config-prettier"; + +/** @type {import('eslint').Linter.Config[]} */ +export default [ + {files: ["**/*.ts"]}, + pluginJs.configs.recommended, + prettierRecommended, + configPrettier, + ...tseslint.configs.recommended, +]; + diff --git a/package.json b/package.json new file mode 100644 index 00000000000..e29626ced9c --- /dev/null +++ b/package.json @@ -0,0 +1,18 @@ +{ + "name": "communication", + "type": "module", + "workspaces": ["packages/*"], + "scripts": { + "build": "sh scripts/build.sh", + "lint": "eslint packages/**/src/*.ts", + "format": "prettier --write packages/**/src/*.ts && bun run lint" + }, + "devDependencies": { + "@eslint/js": "^9.15.0", + "eslint": "^9.15.0", + "eslint-config-prettier": "^9.1.0", + "eslint-plugin-prettier": "^5.2.1", + "prettier": "^3.3.3", + "typescript-eslint": "^8.15.0" + } +} diff --git a/scripts/build.sh b/scripts/build.sh new file mode 100644 index 00000000000..5f852afece4 --- /dev/null +++ b/scripts/build.sh @@ -0,0 +1,27 @@ +#!/bin/bash + +PACKAGES_DIR="./packages" + +for package in "$PACKAGES_DIR"/*; do + if [ -d "$package" ]; then + PACKAGE_NAME=$(basename "$package") + ENTRY_POINT="$package/src/index.ts" + OUT_DIR="$package/dist" + + echo "Building package: $PACKAGE_NAME" + + if [ -f "$ENTRY_POINT" ]; then + bun build "$ENTRY_POINT" --outdir "$OUT_DIR" + + if [ $? -eq 0 ]; then + echo "Package $PACKAGE_NAME built successfully" + else + echo "Error building package $PACKAGE_NAME" + fi + else + echo "Entry point $ENTRY_POINT not found for package $PACKAGE_NAME" + fi + fi +done + +echo "All packages processed." \ No newline at end of file diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 00000000000..42f77bc858b --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,30 @@ +{ + "compilerOptions": { + // Enable latest features + "lib": ["ESNext", "DOM"], + "target": "ESNext", + "module": "ESNext", + "moduleDetection": "force", + "allowJs": true, + + // Bundler mode + "moduleResolution": "bundler", + "allowImportingTsExtensions": true, + "verbatimModuleSyntax": true, + "noEmit": true, + + // Best practices + "strict": true, + "skipLibCheck": true, + "noFallthroughCasesInSwitch": true, + + // Some stricter flags (disabled by default) + "noUnusedLocals": false, + "noUnusedParameters": false, + "noPropertyAccessFromIndexSignature": false, + "types": [ + "bun-types" // add Bun global + ], +// "exclude": ["node_modules", "dist"] + } +} From 916a5aef6cd29e32945df614dc750b64b7998e07 Mon Sep 17 00:00:00 2001 From: Kristina Fefelova Date: Mon, 2 Dec 2024 13:41:31 +0400 Subject: [PATCH 003/636] Init messages types --- bun.lockb | Bin 50280 -> 57024 bytes packages/types/package.json | 12 ++++++++++ packages/types/src/index.ts | 2 ++ packages/types/src/message.ts | 37 +++++++++++++++++++++++++++++ packages/types/src/notification.ts | 18 ++++++++++++++ packages/types/tsconfig.json | 8 +++++++ scripts/build.sh | 2 +- 7 files changed, 78 insertions(+), 1 deletion(-) create mode 100644 packages/types/package.json create mode 100644 packages/types/src/index.ts create mode 100644 packages/types/src/message.ts create mode 100644 packages/types/src/notification.ts create mode 100644 packages/types/tsconfig.json diff --git a/bun.lockb b/bun.lockb index df37a141c128c80527ac3b79742a15c90124279e..d839a1928abd342cf7502111f2fc46d2d5c9de94 100755 GIT binary patch delta 11032 zcmeHNX;f6lwmzq!fx`@f(13_2iefiIE2Gk&i7^oejG~eRZJ;G6Q)pChYK#&`1TCC! zj6<9U9FpLOLmXqGMl|L+L~}KXG2WPKj6>A-?K5ECT=UlZ@!nhOt@Voav1{+zwQJX| zed?Ukr+%1b)3DE`TG#&Ugcg;9=AHbpf63Em+eeoMdiweFYNx5mSnF*G8t3R(9dVTt z%e*;*T3QY=iIM(0a$Na#R6RgV=Dgg35{`S-hT}-S4%8hq&osJ#w3o^9nKGRRDxf{v zWGd3;nz?B3Zs4`De4$$6{{SU>cR^iKP-z7PXV5=X92WwbYu4uE6=re;_LAZ~$el=@ zQ&3oJ;<#HfKRmY}TRWgn81%?ZlX+B0Zl0Mlnco{_DlX&Bp^^HT%tgf}jJgwg&A|7Dt29kxR$(#4VIAb; zeTj=G?@(R?ffoeXrp!?}P{=paa2z)savE@zt2E$4#;e-J#luGk%HK7bwQw;f96V{M zumrGJo(f8C7)OX+bKijX1AmkmRX&z6u;~eI1Sn~z$z#WY27(Whvj#~)Nefe zBI^Qp3f2a1X_6z%+A?E)9%n4hDKZwDP0JvsNuF;m(PN;nXDQG0k)lBhW<>O!@{+=j zpcI66d^rw%%g=z2rS^PNagHhKMN*ZcA4Oug(OmK# zqEl=dWzHR5#$AFpwEVU3mK?bS>edC7BDsUvlt0>3%yBvJlx7qLil%Z`%#3Ex96Sw( zY$_?qH5D&mk+wdTi>)QMCV(Q8<;yX7()s{HBgWIolaAA=9<)GAl8kEw+v#sRL z5n0|H`ebL+sN8H+xuQaINlr0xm@*M%NINMJUIm5SjPiI?$WT#^wxq1cWL^;}UHzpc zrUJ6Sz*FW_fKuGX$Q1h69b!4qqQb&F&?2K5aW)s_<(43xSPsr-$2?nqTg%ap`z(L^ z`R>*JcT-+J6B2kTIO^WUuakpQZg#)1f8bP4&kY~{(RXWN*wd`qO+O?RW`A4N`5(=H zHUB!R?b2=Q*WT_hqPW|sa?YGFuG805S08-UBjjeEyX(&_yUx9SCO~+5+=Yoprk(#H z{r(w^-D`&Y;a?2f?B8)(g+tQpn(xAAMtB+Y9?yr}tZT)V09Xz{7Ux+C2gKPid* z;^4p^W{(^U!UU{PXG~3ClLBLT8#Y!*YWTKJ3IE+DN)tUj>S7C@*l7=XM=Fm4(TLn z^Ug7y8M#EsYs=t@@E$m_Z^I6{ z#9#tt?grroWYpV+G*m%O?6G^IFdUgeiYmrKaS1E)Fz}aIy$2$JYl33v$PRkM@M$d0 z)1Y1rSv=e4lAz*U*ly26p%W4*2}%wu*)v9%04^S!Ep<_U0Y((xga1;s~mh2KEECPpYgeC>@R1-%* zcq((YY?4!q&=XhvE6}tj=hU;n#gaS15!7g8(M`b(+b@lU2*BQ3;3$kdV<43jMLMB#34(IdvttWU< z2afU&0Rr~~9K58YcaHJUwwK31e=1~I^vR(REw4bYDKIKBHuAYcYJ7=C^6k`#fPoQXE0uege#m99yo6es8mL28N$;hU+L7F%!6aFaD9Mv$ilq{HdiJ0~l-dzQu^;kW zEJR8E8Yhj9id3vADfE=(L@BbpWS%Isr^q}}iV!w7QQlXU6D2)ljaZ11oi_oJ4`_pb zRB)w;g(x))lvt6fm^REOlB7fA)^xd*C@oxu%v)2QRfhSnC1DNCZ}-UU#HPXH>`lm_}#luDHNdVuuz0L1T=`Ta6~ z0F(+*YPU4t9}RRApkht!!JihTFQ?@AIe-j(CF}iHDCwV<`dPR}xg$|ZTPpv1`w!y( zk_||7g5>1s|LFch>)%5hPNFoR^?pN5);zWUckVxNG{C=YKqN$=eTNEB8u{uGs=t^2lx}f9u%*wH zBLkYH_wjr`?cl|t;+0eGjUHV2@y*ypgI)VK-U!?iSm#YY!yDJux9ERl#~AbDVQvXSzT25waICa4WpiCZQManEI^H*TVeOx%zg1CTY z*cuz}US6NN^+bz<&AzWLce>YDn)a*}_eP>&R>+zwRt`_Iw%et0{)3T^uI@UK)P3q0jF@x`H?y&gqfy8YYj4jnJ5PF5_b`#kZxRG*dU(*u6XtY7Z6 z)@j$S6f48itqoU19az}=@yug~U43nuciDa9s}CM-TesEo=#{KpcQ5H4hd!=Ly*PF7 znSRS#4HM2zxs-lg-TiAz;8x4MK9yfS^W=7)urfTu+VJ`aZhl?I1*c-;Te)X#Epq+c zxz%}>pX|-Irk$B~XScDL?&SE4CEt}K)T&m`jh}z~?c2wK_^J+jm(E`Pc%m@6ldqNC znbvlHTR156%bzTVCziydhu0hb=C3(A^?qf$sqK!G&N{dEmFsVoUUUlj<+(oHVm$ER zZdh**L&esbI~$+6cb~Di__MUXo2(4avNqiDBd-fVJ|Uxm__C*|zBl%K*L-S-PvEI* zYwMTPPv18x#4)Vzq1oS@Z|E24cKU#2VaVd0d+cWQ_ZySquzhOj7dsm~tPIoRm{M%F ze|u}zu6~mbw2RnWlQX+?3co*Ay}yOcrQ4SMDQC_Py>b8X&5-bp?N)id;X1Rvanb(M z5vw+}FNxkV{l3m<_}fYPg;vS6*@{uROWGdO{Z0@Ex|8|qHfOH9yx4ce z;SBd%$Nxz3?zJw(XTqJuL2J@xtnleGD17vT`%g5kJEN~99bSH80J~^qSb1(F`?hKG zNA_P6XR|ehcQ-y~-Lk#h46viSuTpf95ix{>eOxgU6eDmTo$CB{HDJQ}d?} zT%Vo%U|+8bKfT-KNAF9YS{YUzKt;oY!_xD0o3DK7Ik);?|Fqxc>NnlZ*nRZi(hhUo zh98eT*mBafpZ4{0`skaf{kILxU-rkafA%cR@w}kdweO+dy0kH1If`>ky8yY6($(@%b%ku&w# zctb+L<;u&CShO(V`g+>hln~RCu~KZk8~1H}cxJfQvX%awbH+Jb?D%TZ*}}*ZwCRS$Ys5|2!8w%u>|ie_UxA8tT*>%drGDs)hey1^eu5WuTfyQc z{VF|u$I{GeyvnKhj)~b1RI0J9*wu_Q(qLzD9Mso?@sm3Gj3bE)ZJf>V0)3+m`a)VI zN|oQMSzG*!u;Y4$nP>1|M?~&tK2%{VvY*KUjg1rUP$NygyoT}bCtIF9mkJ3f^Dr6l zt=gKWXLc_VpeS!I^oq1ul)QM8p(kWztE9CfTl5%CyBz6L*h#icb^)(E?8%f6_QU%D zT6&zPrx$wCz5pBsjsTwlE`Te5h|%{2{8kb&d89>)nhw|sYy;@I+!3I6GJ3-}3LFED z11Er!z$u^*FabHhyTAy*2n+-0_4IAv9e^Gv(|~?JD$pP33#0(O>9wyXDz5|G0ea6j z0Q6i=kF5~^e#D_4O*lQ!8KBprUBJh{24E9FA)Ew^0!9O4fxW;!;5={|_yYJ6I0Kvo z@_>ALIUI=!y+98J1_1+rH-T4xSAirT9!LNvb*2IIu5}J50EPfPfMkHyi`HizFdyg! z&ArMJ>zU??yhptY?ABB&QiO)YyorLCI8lXtYJc%rgU za3=y20CKAWm;#Us^in(nAWP&f-O(vQ$zl~S8>j^6J~RuU{^Yh|b292=_c*zRa&DVM z9lN|075&br*uVGEqWdPIBvT<>LK%rPcIr$zCuf^F0wPsx$<+T`vxdq{)S&N zY#*qNy{Y2mX3oH~PSZQI9?DA-Qa&A;_YT~+Mdfx+#b1a+|Hu|k*90gZ zAT@S_+W3c#u7OsBE{?PW6+1KCH+nq$L5#|qtB17c(_-=o6_3f0JDl=S)SNPM;IQuR zSBPDsXbxd27C6J#U-_yUAK7bw-=u4spn$N@^wU`C3=MvoABq2CSjCLq__6e>8G3)^ zTkz};w-0 zhyV9j$Si%d@{v5vujkp8_jbU2#RugxdcmW(hrLJi_ZP>C)?wAPHmqT;uPWMxb*%AK zy=ub_&5Gh@vN@F+=343JuY9;~aQ%M$Ydf+ju#pHXF85`ud!^oA`S4#_|67bY{&jK> zVb?_?gUJTJjBUh!slJe1L;WuMa=yl2`Otr&a=fdLs+w^$akhqE z!^X|-9iV)*4_}+uaH}G70>*$U1uD#u{e5-{f00F2Y4~%Dul8a?t6HjhJG1gCUon6h zmR;rUL&dyaw`72-JGv8~|HOa{>RyKgSq_UQC^nbO2Xg|vG=VkG z-(pQ2ce3>?%wdp5+ziTD(OmBYWDhQ3Kk0*?~b!Ru7Z(BT_>8f@-h7j@1Du4 z?~nLFY|>oFd|t$Yq+xZ)_{koAaEta3F@fjK_x1m4 zxBta3T~iuqEIykpMj|R_1cn^At6p;T=TI1?)C6)x2H-wKirjwH0~nK5?m8@*&_ z;HLHb*@cCE0e>ATX=%l<3(9(l-Thy>zl!-nt1B;6Q=bX!Z;SMN4Rfp2;PZC-+TOTA z$JFZiMZ29Yv z|9tZcZo2s?MulY7r7EtC;Gt+tHTS37u&vA$F44p z@+%0CPUB38?ff}k>iVo&R(tC*>#!v5Ra2=kzbMaSPRc6G&mUEgn`JD?Ei4FE#6`xe zk;WX8Ib1ok^Cvw?ykqc^gtqS4x#p5^lQ};;Gq)fdS;*=(1h`W+hHHt<)>0a>VQX7y z6k}*ms#?n4&dj)>LU8OuKWs8#DGeHz8OKk?`rfmvW7S~cm3zF8}Z0>m$9 zPwN+2;(V;NM1eMnv^=mZbT*5cr5e+%3h_prPHIU~sv!-;5POM7w?sLe;S!w^vqLW( z=0b~9!KCE4O4-kGWBL~NGWRofUwB!PY`e@hA8H+}yp1UD9?F}>Ls@Ow74fopa44NM z;70;_!EbPIkg2WXr&lCvXReU75HNpb(D$mO8F5uK_|ajViwEx7xFw43Ky-p(!G8de C*dZ_g delta 7102 zcmeHMd3aRS6@T|719_8`046U3Ap=3y3`vG$5+n22R74UM2@n)yVulcsuw(*=f*Da+ zw8(P85+H%F3qi$*OTi7S6|D##N(oDWS}iJdsfwVr^mpC@`h8LRhwVS@{l4Fvcb0qa zx#ymH-pkD^FNU077_!vWEj4$}OBZXti<^Dd-$M>`h`!uqW5uWeN1vNL{;#ifIeul% z_TvX6y*;>Wbcd>M9xxc=*DguHLOiwt^#{sIrc_GO*RmwBd`yTW#ekOiXHH@3Jn$^f zG3ia93iu+we_BdOK>85@Ke1#=QOb;IQhu1BA58s3T-9LA#n$s-#qOR2&(lpT^Ho;* zi=Wz`JlW_g%!TL{n&x;P5A)K&hcIW%KCDDMX^8q zT2iILjw3PA7pR;Lrz-q20woh?ON-zQZ~tI;%Z{uBjlK~NNoI$DzkH^@LXzU(F?7L` z@EEG#OBjS#@H%*&5ZP2&S>mr4LTRCKxeGcO?xuhul)<4`JX>X=7h)VlPd$#GL(UtO ziUY#>xuBdT`$5^AN|rZvwmCxSurf#lNVRK93WA*hr9w`-N4_G{8oO6aq9(g0KNUg= z;a>S{2#pQ*igr{Ju8A?!gx}?)Mrg_#p^|hnx(f1zdqfn~L}+3(HAQG)&)Ou3*Gj24!UFt9BxEA2(mc10lw z20;*}cm4nzyBSLGL~$Z^2x%~c&P92!keXJSvK%rVAcQUC!*)uD^(xmPsB9>c6NW>m zCRP)_p{7_w1R4&eozBI2L<=ftt=UE*^f%Dys2q8B1f6K@RsI4&Ul_rM)*dAVN8|=@ zq1;6&1~*LSupIfhNIKERt9%4uPY5v|Oxhwd*`=0r&hAkP!SNo*`hxa=>klqO=e_}# zrE|`%Sz)LdNMSa_TJO8YA&9$7E|lscJjx|-17X=x_wPm|IuBxmyAqrceD?Nba6BCx z!K48jkB!U8Flry3qYT4SZ+%oJQs*HmP&Ij9EUi(! zii{JKqYs1i+4_L%r+2v@Px;31*ovdAX-$GxBv4a=ri?@t=?9fWKGh>uP=P~JnjpKH z*Gg1dBjyMUVqOj|5BA`qJjy|Eyq#EKj7PZwj@N6f$%FI7dxf09dY77<5sSm%c=cFs zq{rsK$-IHa#pEdYc=A9V#hZY{P!l!zwKy7^=v6x6s17y;M~KR(DN$4QKsEw0Bq2i4 zkqSC!$~4r0QIH`;>{+2G7K5n0J;!zwPviBc8&Jtc>Q9g1>1KXX-o+F9Y8=Pzq&hV* zkqVrevZ0-oB)m@_g5!)c@_aC<9W~n$R3Uh;<|qg8#5;qGM{HUq8heUd05=>QXNFDR z?>cZi4s^(&tfPWtO@5~Xtx5KxAT%Xw%BZBPE@NA^ zfHUif!=roxPQwsLSf@wvIE~?3lGEw2m4X|kFaK3MA*HU@gb{?%9Sx_9Ak>rE1*rhp zNW&RKcsSK`(Zn)pg604!Y9=&D94zlLIF35{VnJh)t&y0MtMZm)@Al z06dWdfaKBJ&r!BRn(FSwGq2nX+$XCeM`hp$um|AwL&_69t4j^a{Br=? zzW^}*Ta({s@-KpNW6FAN&7J%z7ZmKkf8pq}4_u#hhs+aS)ocl1kAMB6&(m`WFq%OT ze}3Qz`N(r)%31rXAAKGi?MElSD+K>P`Ut}R-qA0@s{i9hzj0hvcnTd=y5jpFRIx5c zk{aJG{YJKJk3hZ0$HQ~FvKPLx?fBTpZd^R|jF9)XZJaXwqAYiJ(xK8(Y(cM7hS`>N z#9x}|b0140`2*oP^jV&azL4&0tXkSHq%kVJTD@^R|5RUHQ%T36_ON7m#a&wxI{kzaMk2@5x*F@+Li-%1Abs4FbTK| z7!QmA#sYT$e5&z{r+*0y1sw+D0{rE08^C4e7N8%%wXP4q#nA(xC|60n@Q@0m0d9a_ z3H+LP2H^L}4uC_#;Rpbgz&!xJ3ibm0wk`$uEzaKy`~{H@aQ)%Wk^aB{zzbvn*}y{p zhjbs0%CGm)z(7C)`0ct3AfOuP3-F6_9k2pe39JWdfmOf;U^T#R+iAc!fcIuO@EDNv zFn-YD7aG)l;|zDYH>r10pyXaZ)mFD|%vyCxwhuDZ)oZD4&Gn9W7`H*nogF!O>Bo81 zfut5lTK#gQC&jIGPqtEWec^=*zMGFoVVq8S8-ia>)_;shz4SmU|B`DO>vrSR_|y?k z_jqCPHBBM8esO3QGiN-s(D%FcNCm_&!x#><~cbbf7jB^vq0 z=7r=~r#gb*RB-1Oo~+!R_hy(Z#`kumrMuFk8XCGz&9JVW)E1+Y9bIQuL!au(#@Lc| zv*o}4qgfRN=e9xPcBQkXC)KV~Gp(Djz>w1X2?ORmtoKZJAx*5iv#|@D_h#?8_&Z&b zmg&lr&I`J*F5Y2XwtaP?Q``E--`8{lxP=Qk>QJkSE|guHi$5V2)Vjq$+FPqSth>Us zHOVjK(zdVABg=56h(4<=c35|j>Kn_H(+@wj6MDBRjh&fK)9c(~C2hfPJvHICfg;zd znbzIul*`$dawm^)phs_4rYl{NtZUZ}@}I{K|Mb&o=2*~tDAU=k@$%O)&0L=@a_I5( zZgCTxUayKg`UcNqsK*A?VO{Gs9;{m4zqp|@`lrEHtS*o4+Ms6qwBEYtEqM7;+qysf zt*hQM&4uGwN4qexbpd>E@tmkQ`MrB&k?Kn2fu%$`x54cg9Hv*4;Hcqm<@sWs`91n) z!T?*APrd4M@h9ipdR0ED(CYdj4(l4Xyjx=7j_SgpvT&P&&7&*zZijVUySlpXYwtck zCLV)jxR3$3&_&2qnz%7mc?`8H0oj#I4{TJ$JX&l0(xHuc;&y7kNfq14v&k)=iKenm z@$$#fG=EdN!@8p_oSxjLZ+LVT#ztoGN(3F<DcgnlMDH>=_i%GvD3Rnk;EN7ACr zd$0l87PsT4w{9#g-r^Rs=+!MMO7O`os>8Z~{Ujnu4WHLJ9X@aX5Ufaw-uiQ6716k@ zJw!`dwACrUu2KV1;vJO^ZI74D1ToHKyXMaxPjONu*S2_vbyq8&nm=dtBKH(omLLmyWi&E&Yb^?Htr zB&vm#b(#Ef&$TCV^5X}?B|bwq&nxN3<8INHY&+Z;)>U=>gbP=`UFy52rxDKmwM*+h zU%s;Y&6tcy(0EV}pcz2rJ5*eUE!g1}i)inT+zjg;`q855-TO`)JR=KsC<8GcoMhY) zFKF4VE_c(@TmmtM3va2F)Muw^tV_P6QpL`6D+P?bH$r{278_x*uJ8l<@^?NVM}L9g zdb@ga#_@KW+jF%~3N2}HI|g<#{{L|Ko(I2pzjK}1MtJ*V>1motryFv`0_w5LEr!xv z_-#X_PpR_f$@Ijoc!zc0ecy%Rw-?5K(oq)dx*JFw8gL@P30ozMp2C>J)qRy?MY84?BR-9rWI*+wDP9wD161 XwuRm@K5S}^8^&Ls?t Date: Mon, 2 Dec 2024 15:58:34 +0400 Subject: [PATCH 004/636] Update types Signed-off-by: Kristina Fefelova --- bun.lockb | Bin 57024 -> 50280 bytes packages/types/src/message.ts | 12 +++--------- packages/types/src/notification.ts | 8 ++------ 3 files changed, 5 insertions(+), 15 deletions(-) diff --git a/bun.lockb b/bun.lockb index d839a1928abd342cf7502111f2fc46d2d5c9de94..13f7117668b129d6004a6e1424385c701ee94437 100755 GIT binary patch delta 7036 zcmeHMX>?RY7Jl`ThP=*d0F#%7kcJ>@nj}rq34wlWDk2G>1PBV+F-;6fSkhrrAT1(` zj5gZT|97l9SJg7rh1{g;N(Qz4-MUOJyeGB3_QRjyYS*Ke z=WaQAK+?%Lx^SPt7{7K&3KpVz4X8g*Rx+hhlD?58iRI%$Bq;{8%s+DqYv+My zd5%f12UWlq`Tf&UN&?bH=obzCBq+;=+6=xCl>60#Mh!>h9VkSAHiC8mEeWI)mrX5{ z5?dRJ4UmVkym-pg3cn<+GWkg*Q;Je%Oq24%jP_wPP{dUY!(6Pr2wH6Jaqv9dq%vP+ zrN2lj_0O5f<}Zej^=3hj&GZLmRF;$lB!6J~41dKOX#pC!pFc3I!jD;tpw|Zc_mJ~) ze0NQ)U_Xq5oUKoe)a5;bLm+4iL6N_3Mllr113aB1Wkb#r-Wp|0xP^q=HTxXqh9G#i zFOUKgi%)@Pt!H67MhpsU1)JfUtXnNT2tEP)O!CQbRb4S=EVu+v)^26aeO5?PBKYH= zERTdiSGOO8ocr~HT|D{=;MrN@+Zl_T97vhtD=(9L6~)th6#;)9N@m;0^P^d)qM(E7pLr6C&d!34Mb>@G58_C>v&Z^K%%G1pa+cZhsnUO9s6NrX%QM z;4wt-ZSXv9+xCXRMo?A#7*%Nw!;k?T424CYyiJ7_zPtR`f$vTEV9d_`eiW43%l#F_ z{`4P%DiwO{iAlad<#ZTT;hzyGnKVaQ3Tt@#hrwDlWEE(1KU9*;4gr7pOn-$W#ld2< z1&_mGGzIrz5MIG+;CVtsQ)Oj|zhXG0g~sJB?rfNw0t#0KM_}=+m5E;PaS%OqKYjr@ zZ&WG{2)EAz|EG#El>qC8kg%{7{`5;7hjgf-;Dc1no#D%ZlPtSFNc zio>ZURujLY=2&w?xj=U?9jO z3*fSJ&e<(13^@ZK%!*j+y+3kr;&zh@rG^NPauM7RXtvhv>xV$+K@4|Sf-{`Y*1iOe zr-NY_GaOlmy(!2U;So1dLA<7Hg6wu(!x!VRaXJ}E9l~>zk*M|2M|C1}7E*z#$$MjI zjp|iooS+{9GcP$ z*_F6fqS_fghhyOLa&URj2N&g04ua$D#0q0P%4Kl8USmxjoG;!h#01v6+~f>j90td$ z$9f|@HU~~-Uz!k;qvWIJfjo*g0fC_=YVxabG(OR*biz>`W(*D&l~Hq|rtF057RV5U za78C7=%^{vkO#&b1VeWTW-kBdFcLHRWB@*jD2>_8`@% ziAhx8)Rgt@tsvoj`Uo6HoDt{4NbRKAmLUtldNoHmh#Kz^i>{osvoj}n1Q=+!k5Q37)$Yho=GBx~~99cfLn7YU&`SyRR&T`?Kk zvKgG2PaGcQ6L1=aK)^aZipOaT-kWn+yg22J@E`ejOqc0XTF4-ChTMy%#yJFdI(d!A2vZREzqhf4cr08N-r%@ZpZ1>2gJ_lg(=Ii4LTiQ(t)5{ zm~#6dfEO?X;KG#U*GtCys2JsH$_m(Ry)X>{ZZcS%ay!n7UYPO(!vU7(0$i9fkHf4N zraaClfaSOFin!o_>4hmb+-k5o<$?1}IksOfOa)EIh`Wh5?sh}?6Uw7ZFy;RamFfG` zxGL^-r`d}sE+DDI(nA0%J`8YS$`d}q3<^``mzi`qC@y;GQGokB25@1@daF%ZW70KNIIMwy3sY9C z1$cluldlKm!j$Ff0oH2(xU@uBZzI44Z87O~P%c*}ub)@k1aOC)0GF#NPxQPlH7N7D z0M>s2VE*?ezt`kn1m(h%+qtxK@~d2EU<3XON1tur{Hz;fp7^R3Qvh51+aGPEP z3=02qg(u`A&xI*R?Qeecd2p1UocwMO{Qu~~3IBUXzX+@Tk01S}30dJObVTWf?}Jdq zdK^h=daLw;Y}*one2M@YGX6-qWsW%JlQH+`~zSO2@DUy<8b) zThR%BX`;`)EQ#a~glo}fWit9gx~-{d`M{8-;H<{BRJHuPeZT@JpzK~iZ_+>>i#Q^Ky2e9m{X~Y-! ztbee2kdt%gZh+^72lz>V|G4J&6n;XIV(C(KX%(mFrvT2Hd>8S>$l10WxC`(DlYqN{ zJAjG6IAA<*8^EU;-+20$zzEQhKrX;v4mSgwW^M!q0-Wpm1DqT^0FrW*)EkvlAPsN> zd?oNT@f^VS$yR_}!tMwFmB4I(uYx@Q-`1r7-{Snez+Vvg0Oue692o=*2E0HPkPR#Z z*rj`URKDKF0z&`|;M;WtKtMGx0N{&qE$}F?3Rnl!0gnOefz<%tw$p$K0PoF8;1M9{ z0sNrEFEOaS-wb!UH>pojpyVDu)m67?%6jagY#(YiSJzVgn(G|#P;QHmJ3V^x@^g9B zfuvRlTK#dP7sb`O@hAPwwJQDw_Sd@G7or6_8r&aPzx&VH@FQVjVwNi{!<8!Slj!Ab z@$&l;9jc9&KbPoiZ4V_H(ZcG5!T2m@FptaiyiZ(xe(1u~yBn z9*xviW0M`-W>%v;)s>C0CF@m^ZreT8!JfoHa%-R(+a zV;0f$dbe0boAKK~&G>Dk$aQL__3}C8Quf8%$+tMrqmL`ol`cuvqiIL^uMPqE-r9?Wj z-t8C`rssp;m{D)$`C^{?Bl>4T0b7<&y&H1z2jjd3RX(oJ>W1MC>k+oRdt%|%>cSDS zaGQfIpvw(zhxORHx_ZE?@9rKKkHIorhyXmt!sRMW+7PQeg3ObE=t`#hHmG6&)mp!F zXhWX3l{#!x#TN2xbjzorscd7sd@h<6ZA^DqFRX>rllu<{kIur_h%8=-pd%aIKewG| zpGnS5s(6rcHo5T-G!^woTDoZ`HbC3#cKrO-ji-k_ z_Ccni$VT;7##6=9bX#N3Fl$uqOTI0hOzZtNP21FE?)}#!=$_Dbx&?nCH@GmNv0(9Q z^Xj2uXyNT}#FMI{g%~%tUXO8+M0L=zo`PTMReLNaKYkcY;xmNvyo%m=(k%v%ZL2%O zdT7qS^V7@UF87_+!wBd8$0z6hU%kBJjhKwP(c(cqK+9k%->Tv3LdVmk?&ENKv9Q`GR>*MOf5y#tYZqJoIDYUH7?HJP8`0v4`+4q0>L6>^9 zt?>5G(!(^9PB!L>#nf}VTa2JP@Y|M3pHby6lIf}K@eb>?`rc2A-&zv;Nhevb>27RY zC7s!>ih1@QYnmw|ZpW#Z+#17A+-cPCa(|k3eB(0?r&kUlZ9jOV4 z?G2|a%*yzam>u=QNWM8yXg^Kfk=^vwj@0WAoJN^7qT=SjNZ;VY`wjl+ewuNjbL^E{ r?|>m5c>tq3=)DuS+JmNO$pO}EhjvT(u-S6dQ2yp*SG8CdNcGVic7mXag-lnL?w2Q)84kqG;iS zV;tff#UTlfIK(j~YD8nMLo`>D7~_q(#yCWM-#!EO%{6bmAMd@j-de9%AN%ZGyLRo` zwNIUMy6T6ib`5*&s`c&9jBimjaNda@`;|V4v441RfS12-uXfs+%(Xt&;IU3l)sa^? zvCNw@u%-0?lNjm06US9-!=)#v*^-}ESjurP+i@Jp*MWL~=9@IG{2!oX?+&P2DlS?*P`9)b=p`)aD7jkEk z=N1-~m^tpI%n!>e%+ZZ3=GJJW`dXI4`&w(^18JX!7P%XazNoul`KHoRa}GDcTt152 zzW^cW9fcmbX|{|k&C9oNX3M)H%_ZgBSyWOxv!%Gij9zy@uNnB>aFvE>$}TFQIIM%5 zyf1YXW<^4K~5d6a+5lIz<5==_yqU}LB%^Jiw-X4Mt~-;Pvixm0WLvL=oZ5|p zUu0bXPr=&YBMox6MOSVr$mdKYxy7aui+LI3G{|$!C3+MT_N*0IzEU)3!c2(X6JApI z5tM@Pwjal#ZN+KuTF`Bv)Ec=0e4bgJslc!G9>!$#;;RCQg3v>zHh)3FRu+B|MB{kr0^%%wVQrOF3S0KcISUI z|JCyA%(hFnu3vkr!|;-BCo4Eh=GabO&%Sd1s~({@`rKK6cG)%V)zg8(TVu~pI6U>- z7a8|XYaL!O77Y7h=;nZqQ!6#evueJJm=Wo1GCk#v>&slj;t4XZ*xu z_KQZtA7T$RMqxZ=s56Epu!%u&yd4`OB=MiJdcmmT?AaqBNfl|&5}cCwd{*vc4!9U_YBtd=mfyw6)CXE~x?`{;fo5#9wy_U+gK z*H{do+`}kbhm2a=k%lVRnLYAI5{4mDNKwUjC@x{;o<{x>tM^1Ca8FPSo!9}-SU#P_ zdl}WMAxmI;T@zKjE8FFjBy>U|B|}NWQoLe?@!%4`*;5nsC*bn>sS&|Ts zZ0jiX8giD>QP9)g@Lv0$c*?>Yga9!Bs5e zZens?Vh;n2DsMlQ&>~4F!MgXFWEkPu!|GcY1vQ=m{UAf;Aq*ufKFFwUfGnLo(Il#u zW0CABUbV&ok||zI$5nsv>I$xU(3Q|8P;yeb@y4)+EsgxgEI!yM{DK-OHE4ojumV$# zNts>C9tIoLS4qaY`Xvh5AZbdF;|R(?a5R}n0B|3GqjXS<6MY99*>Mz8EDX;Ano5Mi zDOPw3T#~4XG~&@mh>^d|;#(V4ErZ#Z)=B&TR^Qqvd>Gv1GUnqqaPrFGA1frb;y5Er zAWMT|g(=|3yd&d-W7WICrL(S~Mjm;D5Sc=hAgsj|IVA<*E_)b?9Kds=zvK+U`vEI& zYvix7`nE=)SsQ82ImY|MIf6-t8dkzcS_y@95J#t z5GO_x8=Af*mtJLdNzU+Z%OFb z18P>MX-9glu6X+BpohhbbAdEmY+VGYlxmm>*E(>0p`(J1qoU)=he=WAZKZ+{QYn=w z>FdBz{vkl%9)p9Il=LpKp1SsOA81d7EQ>yAics?s^qOL`T^6X=0q-~sm>AI}^2=4% zLE`LW3zGlYkV^T1=~OQuj&=1yWZJp#6r)(H0DS^Q0}CIAS>qxB2LRT^LX^gg@o+TC zc!0`(MoACTD7Hf`i{%B>+EhUex=MO4q*RaO77ZrLdPGT{B2!G2$kV$A6{1v+Ad2mf z=VBpB@>e*ie_Tk#mXbnGSx%H9+e_w&Qhln-6Qu}YVH4%A%W|TmhpZ6`QL^&}K=S@= z@Q(`a6tNJciUATUQWevM`$my;uw0uV*Ak_P%anOr%Co9)U$!J%qrgfInq*B|N|v)^ zIZ;Z75i(Dd>hoouDA_5Ld7@NbB=aw#G~g1vqKhiHXrg4OROX2~1EfW{R|#lU|K9q8 z_`hHQ5}hD9dHO%P{?PpQ5WABob!fZZP?arD_5YpgPds(-Zwn9!QE1(vLXK@is=N{RpL>yO*Nu0PwCMmg&Qo_p&46r-EOu9;HV&u=+fC)1eaVm~_d_Y1Ea z%y{~CJK;j(?9Z1rB(KYMY8IbT)Yr|r=Sg^j^U}-u9X`)$j~U0gCe`h!Iv@6C^ucSJ zw7Cn?SN}Y0$49WN^iZi71+_2kyyAm(Bel1mY`T2(yUhAEH@mKX^wO%oSNxP=_1OPW zpUa2)H_Pbb^X* z1^ugY8-^Q(RPk7)6vI<&4S$yXO~tbZjw^YcAV3Jk{22m#X>qM?Ac;^LTRi z#s6%U_Nz7RanSzB?LS%m>pCtt8JEz?BYR7+ z+wU%|&bj{NXt_D{^witCOwIHs#$_(~t~9Y$wR&#C{9|w3IvUK+?yzU+tmTg;2%|dr z+1Q<7YxlQ>1H-=j$$Dr)X>3MBz3FcO+9Ol$RkfSa?r_=6vwL2;_D0zS=ipzS88WP< z{rB&L_x3baZmGGw@rg(G>5EG~OaHss#_&vA!yP~LJ|FBGIx?6qf0E{RefM|Gr-b?j zoxHlXeo6hby(2@N!e2i)>zi{8eWTn@?YAxrUA$wr!_0pEqf<58rj&iLqrub0Fujf` z#eUnjH)rnbJ86Hr$jvpmv&ts(`{LC5TG(B@W!;y0`rMG~_a5B{jp*2JmDg)-GwK@` z?K>5@YE%2tn2)C2)0>QcJ3+tDD!DdGF-lKK`=h!a7w)cJ?=e_id@e$NqF~+Ttd*A* z`>i;X>2dSeAIUzw)}{K6zr8qkP5Sf|zI_HpjJkjCvDR%z%+=&W%dhum7iN zK7Ia(erw|Gwq$k>{n%1naXztEdBdhyvoSwu-t>&mT!+^`o@do~zOj4hrn8r$0$V(> ze0tyQ>52FE_B#L5J6(SCx%jD#VdVu>G(0Fgqd>p;@|Rw7s}J-`|81^e)1Ay+M-D9Q zFxP$9vA6>*Ctm$&Z(rvRznRi+>yUzFe+>O+&$3*v^9FtU9)?Zc)2r7$w`&#OWXUKM z`e53NBaFYco|ZNx#PnvY6nme>y;~lf9_GDlWkBcLv6>4VUrs(#6m|UO z1nUnE1~v13z1c>)kk9jC?HgCPIJCKue<&mKmu}r|WSt4RHu~gJ*Vd1B*%)SuRZ#o1 zW0%_8SiNj}j_<0ILr&c)wj_j=`Gj5c`Ds{}5sMePzWL<%nd*U@W8{V@r3o(q>We{7Z)3kd>v9=8kO9YdEcPq)%Zd*;d&FeDbizlSA1L z?*;1Ub)Mc{=uP`Pa0oaIdR9Qx6OGXR|d`aIeRd<<*=HUSjEiNHu;6fg$Z1MCIP0jGd3fG>g5 zz!@MPD4>tS5xAfa=t00hpg-^i@DlJckPIXMi2$X}RDiy<&H{zNV4w$(0?>TX{LBO9 z1Kj}npj-wl02Trt0Ly_zz{>XcM_;D&RaycJ0j2>ox79!-wL+<>WsfGeRg-wq_7;_o zRaOe_1YkTsZdC%40dj#pil+l)iQJ_pIwdGsoDIwZssMTp%><}Dxvkiogln>Uj9f!G zyH%o2UEYk2d3$7BQgAdgO!c8CN!u*Ao0P4MUT!nC@R{-YD19Wp)>++DL*O>q&OytH zPcIl#ze0VSKB_Ym-1xcE`~sD|ykQrO+Xm?3Zm4*iFXzJ)Rn>bC&|Hu|k(*`PAidu((Z34nZ)j%s!A5U6>ik+V37c&n2VAK`O z)q`8~X))=ziif-84ySB$T2e<07~1{4O0j9Q-oSC;Di$=|FF@HRO^E8%-+$uOO;Er< z^~m~kmNs3BAKORZ|7cb@y*GaT{A#)(K-t}$^})6QPsV>aK=e71I$OtD&Cv2YSmKP_ z0A*8Idu)zyxpM19Xc-VfbbOxeonhc_vb*?ymxazW#3Ha;>w7k0=?kheh8{>r! z;~(@M-Y-DxD@Kob)Y-9yxqhk`JJzwrPxZ1LJ2*3%pTXu-X_;Gg~cRX8VZ&)Uuq}9?l;86&Go$Nvd`6p z3Hvosjx%>6+~99w!^X zSkhR_IflQQLDU~%1#`6gH*Crr12($SrhBt5=cxI&S>v3Y2;7Xh29-2B{04S$u7N+w zT$wgN**L%bAnWpwW|~f-Kcc@`EQM*&{7u%>a0gq@!Zic6;_Y3*is$+yB71OC`ioD9 zio=5=e|MVs@oX50(s!akE8F8=`1DL!eQ)>=VvT`n!xFK9_B-2@g>2)IZ+Ae7}IdcKcrp(>ogVkDxn>pyt^9rcncKZn6E zO+R8JCTeG8Q+`HHP2HYvj$f7Qapx%e`};dC{w8sN-)mx|#3kV-Yq7w~DLpb5kes8bHiTA=N z1Mz3zl!FOlZ|9zx>h)8Iil+)gC%geM-C}KL<-o$6h>WVXJG;iK_zBU7SF}D(Il>?m zbvbe?t#&tRI_slj^qsgsewHPP}q z)J|HVl+zC4+{aJ`$!9Yvl=BeO#tL)rPC34z9GS3}Qj zI-;D{5J!YMj#CbkC?_~jgESN;B3BF`r%aIt+>WD)rh_EX;SJZd^lX-wa4{DL{dGqC z`PC=nFJFD0=Spy$yeXZDUC2& z3L>)d3L}t(tZqY~2W4Y~j@TR>r6C)-wv|>fh6?3UN7>t%nKrb1r8t-5xjHG;^`*IE zC_77vN{h0K@^P76R9I-vCO37K(vrNwT$1G%jm*L6vl26yLX2UQncdwN7-U-`S$(dl zi()AaZS*fNC~N+?A6>LQmZfYA34HF3q^UTki;@)5<)cVat#~R4Z7DNpF!F&WovEb6 zRNlm8m6w`LO`!b_55 z>m|1NVCxX&`$GAeP`(`=$ZAtBi?_}FgBh#=pZ)a7-k{OQ)ZXdS%aXOzm&sZvn7=aU ccSX{SydoO>@Q~is6U!PNiQ-ocok>{mA7VEYWdHyG diff --git a/packages/types/src/message.ts b/packages/types/src/message.ts index 77f3a21054a..b530e6a6108 100644 --- a/packages/types/src/message.ts +++ b/packages/types/src/message.ts @@ -1,10 +1,9 @@ +//TODO: Import from @platform export type CardID = string export type SocialID = string -export type ThreadID = string - -export type ID = bigint export type RichText = string +export type ID = bigint export type MessageID = ID & { message: true } interface Object { @@ -17,18 +16,13 @@ export interface Message extends Object { content: RichText card: CardID version: number -} - -export type RichMessage = Message & { reactions: Reaction[] attachments: Attachment[] - isPinned: boolean - thread?: ThreadID } export interface Reaction extends Object { message: MessageID - reaction: number // maybe string after emojis rework + reaction: number } export interface Attachment extends Object { diff --git a/packages/types/src/notification.ts b/packages/types/src/notification.ts index 678069a08e9..427bac36d49 100644 --- a/packages/types/src/notification.ts +++ b/packages/types/src/notification.ts @@ -1,18 +1,14 @@ -import type { Message } from './message' - -type CardID = string -type HulyID = string +import type { Message, SocialID, CardID } from './message' export interface Notification { message: Message - user: HulyID + socialId: SocialID read: boolean archived: boolean } export interface NotificationContext { card: CardID - user: HulyID lastViewTimestamp: number lastUpdateTimestamp: number } From 2097fc823ac61165f02c4883d5e0e62a0f5dc147 Mon Sep 17 00:00:00 2001 From: Kristina Fefelova Date: Mon, 2 Dec 2024 16:01:13 +0400 Subject: [PATCH 005/636] Update types Signed-off-by: Kristina Fefelova --- packages/types/src/message.ts | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/packages/types/src/message.ts b/packages/types/src/message.ts index b530e6a6108..e9b7fe9736c 100644 --- a/packages/types/src/message.ts +++ b/packages/types/src/message.ts @@ -8,13 +8,12 @@ export type MessageID = ID & { message: true } interface Object { creator: SocialID - created: number + created: Date } export interface Message extends Object { id: MessageID content: RichText - card: CardID version: number reactions: Reaction[] attachments: Attachment[] From 5ce61bbd8dbd9e899ae9cf1f048409ddcbacedf1 Mon Sep 17 00:00:00 2001 From: Kristina Fefelova Date: Mon, 2 Dec 2024 18:53:53 +0400 Subject: [PATCH 006/636] Fix date type Signed-off-by: Kristina Fefelova --- packages/types/src/notification.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/types/src/notification.ts b/packages/types/src/notification.ts index 427bac36d49..5bfd1c20629 100644 --- a/packages/types/src/notification.ts +++ b/packages/types/src/notification.ts @@ -9,6 +9,6 @@ export interface Notification { export interface NotificationContext { card: CardID - lastViewTimestamp: number - lastUpdateTimestamp: number + lastView: Date + lastUpdate: Date } From b86aba806d576a1bf74230ccea1a1c156329f6f1 Mon Sep 17 00:00:00 2001 From: Kristina Fefelova Date: Mon, 2 Dec 2024 16:51:51 +0400 Subject: [PATCH 007/636] Define tables Signed-off-by: Kristina Fefelova --- bun.lockb | Bin 50280 -> 56496 bytes packages/postgres/migrations/01_message.sql | 19 ++++++++++++++++++ .../postgres/migrations/02_attachment.sql | 11 ++++++++++ packages/postgres/migrations/03_reaction.sql | 11 ++++++++++ .../postgres/migrations/04_notification.sql | 10 +++++++++ .../migrations/05_notificationContext.sql | 11 ++++++++++ packages/postgres/package.json | 18 +++++++++++++++++ packages/postgres/src/index.ts | 0 packages/postgres/tsconfig.json | 8 ++++++++ 9 files changed, 88 insertions(+) create mode 100644 packages/postgres/migrations/01_message.sql create mode 100644 packages/postgres/migrations/02_attachment.sql create mode 100644 packages/postgres/migrations/03_reaction.sql create mode 100644 packages/postgres/migrations/04_notification.sql create mode 100644 packages/postgres/migrations/05_notificationContext.sql create mode 100644 packages/postgres/package.json create mode 100644 packages/postgres/src/index.ts create mode 100644 packages/postgres/tsconfig.json diff --git a/bun.lockb b/bun.lockb index 13f7117668b129d6004a6e1424385c701ee94437..2e06ff644fac096388ecd101b011db49d9bf7140 100755 GIT binary patch delta 10301 zcmeHNd014(vOi~-5e_?u!mtRU2&fFpD8r%*Xs$7eYs7^l=m;Z0*}|a0b(t}0R0L7m z9k*x_cX3Z}-{Nl6C>o7R)Myg*dNtRmXvF)gbLQZ@yxjNQ@4fH+cYD5?>Z-1;uCA`` zv()){s@mDS>0vqfSrH=(xp59sdl5_Fz0F0ifuuJ=ip;gcSTx+QTtjiOF^d~vEFDGWSA$7% z%OFQ)8cidMb8=0b(e%+sV^Jx$7LC-;Xeul+V$^cTH3l65Q)!xp%z`5Fg9UuDKGj9! zx3=Vh;RQyPap=fw2;`Y)IF2)bPXmr|l?FV^ctvQuCu{`6@}a>L0TZ+BL6g)KU}&_2 zV=2fC!*J1RZVPB%&`X&?;ce~(ot~gN0h6@GI<`A-GthQcetQ^X+y0oB`t^fdr2Qsn za@NO9q)84pMU)!yaydg$cA=rjWE=%PO>%ux2`>YNKC>mkTk-}im;v6ag(4C>08CD3 z@56EEYgr9i4O|9HhRNRa1|0;t9)?i+CCsfQa1d}H@IGLuuslN}>FwYz87u{rnX|}I zHONmgCLvBY?@@dw{7u&sdagjCm7O1JRE>G0b2p{s=}D z8AqCOhLv)gVGS*R5v(Oc_5!p4dRUA$iybhUEO()VX7CU*paBt0 z#l<i4K(lApjV92U$E(Aw`AR5yhp2MpH?cwEIhnjrpWu z1Zaw!4}i&Uy{s7WI340R;KG7}T;M{334S&e=H?W`pIa9weU#=SJ9;jA|Mkw#{2r#h zb2g;e$>5mB>%L0~PQBCl*50%!o}O!W{NAfNsm-g*MeBb~F39?!vi;wi{%ZQQyw#FT zYgXNBJ-n#XNegGn7}M^%%IiSTxH8FOw&PLJ}^YlZs5u^Pp1(^su${Dfro$iacHVs#FBAq`>I9Z05*9AR!mg1=6 zV_B(_o;R}^{9k8vPI`rJBbMNt#AmQlXT7kr5dugQ=ACs4cYF2%O}$yXi(XZ3&v8lY zm181*g4MX_g?geP+K8n%>x2#t92W~p#U{Aw_^GVaRWBR?Pm%ynP$CNA8YDWfcsIQ; z4m^_JSc;oY*afO9CgQO z;;9qTKqY`uQV&%nsNQUxYa)Mx)il-%9tf_s(nuaUp(7}=#hx7ybV3;@^0`8s$u&@& zK-q~@AkJ>`gB?q8(Fq@cLhwP7ytuM~A`jGBDJ7fWq!WU$$=`ycJ=vzp0u@Kr2#Zmp zkwq(opFq)Am_PJsarTlAdB!{G_yQL1trt##_r53rpR2+=X;!ar{M*7dM>D*5(S?;21RSA6r&=vspKzdMuR~~{b)_TB1-habJS?w zn5nZ))fp#K0_*6OD2zi*2fnKqm)lsKzn*`<;sf+TH=J?Z&;q^i%^X$(-Zk+0iX)Q` zy0Q3Xdes6PgZpB_bzE`UQhZ4G>K~(@HPgM@uYP5Hl`1hvqGz85EAeC^`lO3mB3z+P;TbO{kuaVdoC% zSYd#`hp>dOB;LSE!}Rr^02SNp5Xv683C?JUfEW%j&Kb-$EXrGOz{N)jGZ^VI4ODN4C?H}liL?@&!6c9K z4NNlMJ4}vZiuY-tDE8nPQ0GC>8Gv|q)_JJHtwW$c6;d2ZW80(QEr>O^WV)3hoib-Y zk^jUU;G$|RQFc}hZN#Z3f-A)a)>2hK6zk}zW&0hJ2AU@UIRVEahvncR0gV9Iw_+ho zlf{nWXnt^xrQvvpsE8=9ffuasOh|4~R_BK=HO8^b?6@XfITIpR@dXJUf3rvMD$?pS@ zi@pX>vBmbFPl?<=$7Jp~Jhh05zO@SeJD3JO4H2i0m|7Vx~XO~Nx z>i=Pvcly8Y@~umvogxIDt9Pr%&Y#F`8dBNc-gh$7@hlhR=+tMI-aeRK|3Rp5skZXe zlKsi6GaVbprxf&dHSd1aX1~*tYnp9MUN1VPAM28|d3VLdu=ipP-dwNFp5OnozYW{A z5t`)@Cd*2Jm4)q>f4X|4`oXL9*N)!KsQLVE$2BkBTKS{pe!AIx-^QNT4)<%E-qZ8r z{s%4<7Oj}{c+|j(9e3gu4s`8Xd#l;5W}BP1+9>|SR^V!`pmic1(*Bwrn^(m zPJOV`&{%U~Y{uf-#fgg)pUqB~ckKOpM}zpv*1MO?SoUJPFshx8jo#_DdVgCmAneS2 z^P%y@y7b5z!}osbBU7GKgiZ-PTvGn+?ze8fTXM-M=+SF!y4kSr>BBZXJoJ;R=R8>V z%Dr=0Rngb|e>B=Crh5yyDBFJFbuq|0WMmLu`l^r5tzEa9P6_dDcJjumn#DEK_KXa1 zY}4!Dj4K!R_l|Zuz0bTLq-xu)M&*5dN2fY$nNsr2w*7R=C&xsEtQAXvcy5ee;&+QwJ>52}Jb?_ajuyXSz&QBR+|RJ(4Ay^(xq*{y!;l8s`zuab+h|GW`>KaaPo&gdMn z!!*-!F|m8;{`ESeJ~wG@S^C#5!)sp7H9L5`yKBk%Z?8rB2fQ+U_0+Zg#3y^YU%dZe zhhLgp{>nx%-Kfb$Ij~K7o@T?fGoG_&9_ZWux7phD4>NWiIk2SlY`0;@;tm8(xN(0^ zZ>KM=OzFF6aNg42hy0^!Nw()jttPyScD+~G%vEpnnmM}jlZ!Izhu%%g9&TJ+a(eE{ zjmk~?olK$Ig8wouZ1*5_bxz3UU!KM0`K}0fra2vW`sK5->?!qQ^@;gcE3VeDSYh1F zHFUJeOE+6q3ao5WyQlj3*tG>+^FT9+(YRZk2-a_GxS`uo1;%IacS{#r;TD}tGLgxt1a(*wsdQj_sWxlPv0vv zC4`kU3A^0n{;&=ssusAs_v-k$nFBccsI^my7hJFHckzpXN6rlkcZeGyoOK;CzHvI9 z*y#C7DaaX$u3N8VJX5I7;yFvebLp32;{Z45jj3VllA$+v{`S^!S$0Y++HpMgF&X(v3Zl%bTU!{&{g*T&HZ)ObUnS*oK)~KAXy{W>M zXVqI7zbG6XtTVMR#($h-u752Rnm)xZTp~UV+R}9Q+n5+A@-03+G^`gnZ*KRMpyl(H z0)TYU9W9-eBu|S+yiHaUexJ*+my<%+&ma4T)15KhIqwG?1RMe!2Eb$Vc9-7lQW6Bd zXg;yP9Dy2cXh0009e^Gy=n-Q( zU=3g$fLvz*6az*AN&)m>u^Vt6a0+l5@D1P$;4EMSAQvzk<8vRNLU*VA0q+9(0y+cU z0_Xwp0K7io=!!iBKu;^@0C|9cfWH8e0klrEE^`2L0Vx1_09yi>2bd374p<6U0QeNJ zFa&>#03QJ;Bx!+ZO)CLm)C;9#A$u_?n4h>cVDd{v2AMk^Fb?nufXtc%prOeGx;RY( zkR~#hf|3p=(l`T90iX*_Ie_}pXsZ|E4<)La1y60&-Var8cvXI*AJm8!|b~29i95gNe z>XJV7Jlf+l(d{8{m%W~<_OC~~8tua_>$jvu#NAQw)@II|XYHo7ZlOcelL9%nWZIp! zZll8Ov4YQNr-`9~R&gT+$WvHi1s6s~rx?HnIPfE_zn9>Zs{SruyLTH)&_CwBL{{ulZTGO7!V~#**UTw zXQcAgEV@$7uVws9FE+R`P!ZzHER{Z@1Jx|6(%s3O-{CB!PE*-wtEki=+fPo(CO5gU zAYq>aX{Q-L`&l+|rq*9h%Jz&rxI%s6kQdt9NuztZvu|g{_{rJYp&zyEnC#>hXH|f? zgtNd|+JCizXkW_mW~uotY|1PxvM7DZyx5spD!x6dowXf~n?75skXDEPm|dQ&<(DxR zruLK5un(ROy*9Y9Lp#wP(cT1>!qn*g9&6}W&epIt4w2YG*uxf9IJ-#@qKD(;gyi8t zQO_L9cT_@2w5Aqqc?vFK$Afc2JWb8bZ!%&ieAup`768sc*JpkS~jx>%;5W zhBvovg;a{em8T|J@7-v}S+olg7SZsO58++afj6nc<6i zV2_Pnq~%Amb@=bVZY)x_m($WG8inua9yQ@f7{w5NT24+>k9m-NdaCFBUS?8BVgj_5?Rvd*p<- zoM{fGtA= zb+j7sSd~-iaxxn&G1gNfR7$$b>27gFV<|4Id3lSR5T`z7sDpKK>R!&7E3Ll9EfUlT z*xM6pciw9{xaA2iKB=Qtj59e)FXzX}Z0U%V^Z0TGo&1j`?0z}ZFXz}5W_~>NLX-S4 zfSiU$Cu)Il@@E5b!d{#a+R$SCkU&o1qXi*HEq`1hcbFYV4GkX=NZI?QtLREkr6~bY#U`{kN5gFvu%;IzLAWGW-0qyYW_r_Rq~JHM8s%xEM;A={~L28N!hrF zXi1>I^=lBRRs1jneJMa_GS+WOA`C@EhSCOVXlb$0&_HDwij55fz7#PQjWXoYjKbvJ zGLy#mBM)}Ou+3|ldx+5k2@yviaM-6`G;h%$PovoOCmMaDFbgb1^9DK5Hd{fF0sjT< zYVk%&oR6)P$cTs`DGw|QeegofQjKX=hBQH)K7Wz8R6`huF7^_?hY|VoVT@>$7#&Td zFJ#anRWK>|l~o>WB3zL|`-dxR!@(B5@}0JPUmfs0J48n;C@w75Oh^k@WmmG$^)kcrG4lB6D**dp8x;= delta 6899 zcmeHLX>?RY7Jk)9LtZDHq(dOQG>`-$VM}LeLMH_B08vRKfs6zYM=&7`1VRYufWi!> zS%gGIh0DGK0I`DvZ=^{spB zR@JRrw{E?z$DR)Ps6ON&SMT(~KR@@)`XA%wP5LsVH6iNzlx;O*uYcpoc@sZ;A?4KN zKkq)(s>rf<#khpJZxNyXB^tESJGY9pi@>uy zH%M;)Rl%2gz167|KIH@Sivs^PD9cA!4Zaza`!#|_jzr}I6e2*IL9YX?@TE?zoK>cD zYi}qvK_1TXsa3OTyo$0W$WN)LDo>qLt(1fr?FBSc#MKpGF4kTOEjIToc%E)bWod1# zw_KUwoi~@wUj`xT-3dK5)9ag4TT$s#yuR6UyfyQbMQG%HUSD;M7qgZ_uLJl?kn?g% zC(o*3Ka7W*txvW~c_0692x1{9_m<6>3Wb?Io=#D6Am<5hi8LnMO~R6r^9kmLz(1|j zmkJZ7o&(QXkHdD1=oi=uHluX9v|70rd_4HMRBDNW~EYEGqt+3#^)`9oEN#IlR@tU#dvl8 zm^i~5ykVvA-aZV(it9ny3HHv4g1&w~cn!1+lnpbz`8f8+XS z&Ab{^>Cj_OOeyu%&W2Gn-Z{RCDf5&DSi{?20BhNhHK0+0P)Q7S@Ofv>_0}j#94tng z|12y4E(X4qaUd8TSrh4e>guF}m6b9%VfXIiNR^8e(*DKLyb8 zGFsS!HaZdG5$!0ygKixIzYnBWBXh+83Utube}NwYoiHly;89a?JO+Xb(wHyoPn**5DS1b8-Eqt>FvGfOMg!{B%>tRLfrBh#=C z5lj&taTDct*42k0`=iu=&#jz7Mw2}}R~?O7e>si=ak7N+HC-H{22EEjI5fF(6s*lU z7~D|V<8IVSjM3r$H>e?A7x5H`*VQq|8$;28ebqc-HRU^X^)zHxB3O;=Xm}c4fTw4I zD}uh^lK?p0!%!I&m%*{Wj1_r0nf-WA9%4><;dRt_-B_vJV|C&j4y3~9T(tx>59E*7SIyUi&#n_D<;DQ;$=~4d;PR9s{&?JxQNis%nPmUyybp|-Y zH_xMnID(fP-AzA?Lyhe+{Ln;d3ZekAF@`M+;S*}6ZT)7BEP-$F%326*S(x%DBLS8d0t(sF zfh4DyvcKP$`(3a&-jL)F3dwv-2|3Cfvr zI4uA;lokWH>6H5b)>{s6VaoG8zzoU*ynYru7^Dw@T7lI7cU%i_Vaj^zf^>b5ZUE)d zma^VPfctL-m~RBQFlG5xfaP^f_~CLb%8J_oHfVQ{?gQo0mhy^Q0M%( zpv*rBK)+6T8Wk4&)etC$g8Z|fT$pk@m;e9i^%%#SC-@D=R~o^``uEfO`|17t^eR1n z+v#n&c^Ch+bQn$7xqle!6TFu5;$*EWlZui^+3Ni;)sVxf_&q_(s?pl$i;f@42Nx znIDw(IBzZkSbrhFvX7)4(Eefx-Alt%&Vx?@;dGul2nSuHS#_LS9|btw@h!qv8mGyb zz+}J+OaZ0=lYof;-?bBf+W@|&@p;7`)5<7dG*AfeSHR7{jlfO7P+$-+7~rgiWXK;w zbxIm4=>UI;xB*V)oZ$Zg@I~+lzz*QW`+!>DE`W3RGXUSSGXTDS`O@bPgc5*1Ecjz% z7;rt12XNZW0hR#lz(YJLU(n-#;eZbCwYUNxU==U~;45wuuo_qcYyma{Yk{r6I)Jaq zYT$N&cVZ>LcW>f7_(ADhZ&2HyIquB7#QurCiuqpJOdVSKt@+Mk8xd??wUHLB@9FFe z+%+~H1wNt zT7SGGHT!*w$aZC9x!~c?6?$|-A2kY*;ebLJoku4>{kTDMnh!7UBxy0W!I2h=n3(R$ z$U(1tl(tdJGGBVUcYl0m&8_Emq22Aui1=|NG zN(x=xScot0!cA^5oEC1<@cOV3zvXm(mqxE`DiU{6*Ug$(N}kPbv4&>gw~4lFc00|- z!m;rqP9$7tfwktGC+O47+5q#_F~RcBiN&9NR*iPIE5ntkTzQx57=7TG`43jDlRf*h z2QFG@P-CXZrCE(`r}>skZPH(ESwK|w$TkkFEnaWS96+Aq1kIvS0%N^ zz?i$!U2;esOUp8!K5sqznRU}UpZ1btWw>x~Y)msP-=c|b zv}=nSf4$D4UP2eQ6m~J+H`8|I9r@s%vIXb?S96cEDoxxvQY@prTQ#wWN}FQo^wtEY z`Cz$j)sPoXJ~h5GI%K)9=XlbTUhdZv)8l~%<5e;Ei0Lmqn)KpL>$Ho|?GIfH&f^)Q zYP71U$Z5V}4(WJq-N3_PYzCr;HFwjcCU+O}9kVFy=r-+GYpfhen)yi-jog+g9;do( z8eXc_Z*yalU)`oT=OUK*oSd%j`BUC)^(wls7ZFDHlGyHcnva`bpXt%DY0rDQY?sE_ zY1HyyPZG3@e}A4v zV@F@no<{9R5--v09UA@;&v+!BBOP~-|9@jk9_ayooLD~6=j+eb9w<5%W_X2XH=hev zJlWb>(LJO7>S6y*qsX0|MI5EqUmbq) zRC3PMG51r_-o92j7|q_yX886 zzrOc)%c4cz;QlhRjG(mLTGr1;y7_{7U-_y7ht32zJ#Z)?K+|c#ZVktN?QXXiLa*R= zDP6``=40;QuZ|oz7M(Q>Jv=xAh!pdAH*|dFm{;39!bu7llUr6(;U2eWpoM!1MIs&F zqh%E&8gHk|+V^fO-2OPHLX6?Ut+EC!yp6dDBPPyYS#Yy#$z+S#E}up}5|3?XugKjM z(yLdpvQ$MvKffV1QRaeQ*^Vn6Q>kRH+c~_4@nrw~U5hWgcU_|v3x8(IKpjBa_ZH%q zU)bvwqv*P3w}_>veVWsJBR*rFZ{Zfv7QV!{J6YXbDD3&+h18XZ_D(*S0GDe zyRuvv>B<6nzF8AT>1?x>Y`!e7NSm;{=fS}Unm)J~xV+0(CraWb8n}NL`S+*Cck5!f za7lL7-^5*!KaJ*_6@`w{vHdwMeIHBjNp!}c9yP?Lj?%s}Jz}n0gsp~nbSr&wrpo3I evh}U3)e&u`;_JbdQ>~cUNuQog!CSPWiv9y0IQ$_1 diff --git a/packages/postgres/migrations/01_message.sql b/packages/postgres/migrations/01_message.sql new file mode 100644 index 00000000000..c6b56b210ff --- /dev/null +++ b/packages/postgres/migrations/01_message.sql @@ -0,0 +1,19 @@ +CREATE TABLE IF NOT EXISTS message +( + id INT8 NOT NULL DEFAULT unique_rowid(), + content TEXT, + version INTEGER NOT NULL, + creator VARCHAR(255) NOT NULL, + created TIMESTAMP NOT NULL, + + PRIMARY KEY (id, version) +); + +CREATE TABLE IF NOT EXISTS message_place +( + workspace_id UUID NOT NULL, + card_id UUID NOT NULL, + message_id INT8 NOT NULL, + + PRIMARY KEY (workspace_id, card_id, message_id) +); \ No newline at end of file diff --git a/packages/postgres/migrations/02_attachment.sql b/packages/postgres/migrations/02_attachment.sql new file mode 100644 index 00000000000..d7753e0a37d --- /dev/null +++ b/packages/postgres/migrations/02_attachment.sql @@ -0,0 +1,11 @@ +CREATE TABLE IF NOT EXISTS attachment +( + message_id INT8 NOT NULL, + card_id UUID NOT NULL, + creator VARCHAR(255) NOT NULL, + created TIMESTAMP DEFAULT now(), + + PRIMARY KEY (message_id, card_id) +); + +CREATE INDEX IF NOT EXISTS attachment_message_idx ON attachment (message_id); \ No newline at end of file diff --git a/packages/postgres/migrations/03_reaction.sql b/packages/postgres/migrations/03_reaction.sql new file mode 100644 index 00000000000..3daa69d7c87 --- /dev/null +++ b/packages/postgres/migrations/03_reaction.sql @@ -0,0 +1,11 @@ +CREATE TABLE IF NOT EXISTS reaction +( + message_id INT8 NOT NULL, + reaction INTEGER NOT NULL, + creator VARCHAR(255) NOT NULL, + created TIMESTAMP NOT NULL, + + PRIMARY KEY (message_id, creator, reaction) +); + +CREATE INDEX IF NOT EXISTS reaction_message_idx ON reaction (message_id); diff --git a/packages/postgres/migrations/04_notification.sql b/packages/postgres/migrations/04_notification.sql new file mode 100644 index 00000000000..8daba799215 --- /dev/null +++ b/packages/postgres/migrations/04_notification.sql @@ -0,0 +1,10 @@ +CREATE TABLE IF NOT EXISTS notification +( + social_id VARCHAR(255) NOT NULL, + message_id INT8 NOT NULL, + + read BOOLEAN NOT NULL DEFAULT false, + archived BOOLEAN NOT NULL DEFAULT false, + + PRIMARY KEY (social_id, message_id) +); \ No newline at end of file diff --git a/packages/postgres/migrations/05_notificationContext.sql b/packages/postgres/migrations/05_notificationContext.sql new file mode 100644 index 00000000000..3f4b05678bb --- /dev/null +++ b/packages/postgres/migrations/05_notificationContext.sql @@ -0,0 +1,11 @@ +CREATE TABLE IF NOT EXISTS notification_context +( + workspace_id UUID NOT NULL, + card_id UUID NOT NULL, + huly_id VARCHAR(255) NOT NULL, /* Or maybe account id or something else */ + + last_view_timestamp TIMESTAMP, + last_update_timestamp TIMESTAMP, + + PRIMARY KEY (workspace_id, card_id, huly_id) +); \ No newline at end of file diff --git a/packages/postgres/package.json b/packages/postgres/package.json new file mode 100644 index 00000000000..bd008cd7ea3 --- /dev/null +++ b/packages/postgres/package.json @@ -0,0 +1,18 @@ +{ + "name": "@communication/postgres", + "version": "0.1.0", + "module": "src/index.ts", + "type": "module", + "devDependencies": { + "@types/bun": "^1.1.13" + }, + "dependencies": { + "@communication/types": "0.1.0", + "pg": "8.12.0", + "postgres": "^3.4.4", + "uuid": "^11.0.3" + }, + "peerDependencies": { + "typescript": "^5.6.3" + } +} diff --git a/packages/postgres/src/index.ts b/packages/postgres/src/index.ts new file mode 100644 index 00000000000..e69de29bb2d diff --git a/packages/postgres/tsconfig.json b/packages/postgres/tsconfig.json new file mode 100644 index 00000000000..49e05cea1ee --- /dev/null +++ b/packages/postgres/tsconfig.json @@ -0,0 +1,8 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src"] +} From 9ecdc931e461e2dadf203ca36ab80b29668dec23 Mon Sep 17 00:00:00 2001 From: Kristina Fefelova Date: Mon, 2 Dec 2024 16:55:09 +0400 Subject: [PATCH 008/636] Fix deps Signed-off-by: Kristina Fefelova --- bun.lockb | Bin 56496 -> 56152 bytes packages/postgres/migrations/01_message.sql | 2 +- .../postgres/migrations/02_attachment.sql | 2 +- .../postgres/migrations/04_notification.sql | 2 +- .../migrations/05_notificationContext.sql | 2 +- packages/postgres/package.json | 5 ++--- 6 files changed, 6 insertions(+), 7 deletions(-) diff --git a/bun.lockb b/bun.lockb index 2e06ff644fac096388ecd101b011db49d9bf7140..cb32c781b3eb1499f9d4a358807220d00e2e0ac6 100755 GIT binary patch delta 1228 zcmc&!Z%9*76u)=f$gwW32p5OY=w+{T?@BYp?_n!0aefNz_ zN|(l^J$mii&X%}^*h`a2TlHqXu)5}{8XLNjnS5Z>k@wlB`N7SENyNZwO_sd&j=D55 zi=)O_XRoz69rkE!Lrn-zNb@rCLF6Fv9%P4IS6A<_IgJ`Z5>UJ9Y}KVWMT?r{tB~W7 zd0|VIFB1y5<`h~5Tv`r&0)=TgT+DGo&S0E^CEi5yVHSON(U%?Z&8L~TkEk+HNx-5o zarwC3xoAErFRBbwGO+L_$&?1mM!m&7>6ZPMi=kP%aJ06L|->pKyj=`jIaZ1N3aa9=$jBL6L5nS zzQBDt4UN9T(C-_JVTUvad3_4F*q2L#Fwj>bH}s(s?Rr0SfLhSpiIWz!qHaO3HCZh; zg4Jn5+(NMT=tgE8vd+5@9f(c@o6A}l>Bk4!5kK42K8VdXwl!petQnhMhgf^w?4SEA!VF>t@)CjYFyS&^VX@m^=;Zz_OnghA=CDD}?65a&7YWTI3D!g1= zmC9YAXl{E5hQp~zguEIrdQ`mg;_1qT%AU0%pED>X7{ag?e#l)Fe?JZw3Lk~BWrch~ O^vr~@nhMsJr~Uv82{lCk delta 1504 zcmc&!Z%kWN6u-AETVL_9PKH|*=2$i;U`yM%7BnfanUL9T7&SJV+suUigKQ-hwCLPQ zbQ=f-+Qa@ZL-?`}L@6*InFD8n3!7|%t;qgSHp4zFnHm*|i80aQxx7~s6F>OPoBVqE z`G=dW|W<@DFO#Ta8QAv__$EVQf8K8`jOZLM2Z*HG*9 z2egE&L>{Pf9$*P-khAzbXw%SE2)`)G70Rc6vTuuRTzgF@;)2=S!5l)2=u8EgW}#6S z&M%=4;rIL!t{l^EK@Wv8^fW_5K?yg5nsrHQEU<8^F>w(x8I+}4xMpN8it)&%k(rPw zpp3W3Qu1Kr8LP59kB}Eogs(ydnCFySizLXPafOAufy}&=f$|nEC0}H6V0%4}>{&5; zwuSnHV*M+M!^^PCOXAD1{;)6bo`{Zk9esm4>V`r2kRwU3K0nX`w^G+W2J8kqB3?uWnc*t_ua0*e7-vM}U%9dH*}$ZMk@%y@fLH~*fO{?cmp2ZGfJN%&5tQi6C^L4c{SH7l_wE+)1~r}CH>LE69= zN#EWssdBV^lBYw`6ljl0T3FK9Tr?qAd_>X)rA_~UaKu5!;9;aEgU#G!#Mg*%#1+I< z=z>o(*mSUon?zhk+(5)&XXjQr1>K#MD(wjra0R+Rb$q>GIIfhxd=&W+#D@sBE34)s zL^Fa_!>VMRvdURUtk)2t1<{IN&$A+XyYL5>VE~Sv$`J-nJxbRk-*57xUr+ZMje3KA z6P)a;Feh)2_RT*(vEW~5m7!N}*5iGUykja)_dP20Rn4KtgdY3?jk%B+T_dyRf;np1 zT!EiCyIaPZZeHK@YfK}fhfJ8>KuGe|*%r&>cE5M~5PI0g=&6D3sD=jM^Qc=D91v?B z9l#@91#R)=u(f~VGEoFif2nF-ih#KAS${Z3UFG%H6*~R<3*Yn}D8%+)- Date: Mon, 2 Dec 2024 18:57:44 +0400 Subject: [PATCH 009/636] Use timestamptz Signed-off-by: Kristina Fefelova --- packages/postgres/migrations/01_message.sql | 2 +- packages/postgres/migrations/02_attachment.sql | 2 +- packages/postgres/migrations/03_reaction.sql | 2 +- .../postgres/migrations/05_notificationContext.sql | 10 +++++----- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/packages/postgres/migrations/01_message.sql b/packages/postgres/migrations/01_message.sql index 6fc4306cd01..8aa19866f98 100644 --- a/packages/postgres/migrations/01_message.sql +++ b/packages/postgres/migrations/01_message.sql @@ -4,7 +4,7 @@ CREATE TABLE IF NOT EXISTS message content TEXT, version INTEGER NOT NULL, creator VARCHAR(255) NOT NULL, - created TIMESTAMP NOT NULL, + created TIMESTAMPTZ NOT NULL DEFAULT now(), PRIMARY KEY (id, version) ); diff --git a/packages/postgres/migrations/02_attachment.sql b/packages/postgres/migrations/02_attachment.sql index 3ce48473231..b74d0ec0eda 100644 --- a/packages/postgres/migrations/02_attachment.sql +++ b/packages/postgres/migrations/02_attachment.sql @@ -3,7 +3,7 @@ CREATE TABLE IF NOT EXISTS attachment message_id INT8 NOT NULL, card_id UUID NOT NULL, creator VARCHAR(255) NOT NULL, - created TIMESTAMP DEFAULT now(), + created TIMESTAMPTZ NOT NULL DEFAULT now(), PRIMARY KEY (message_id, card_id) ); diff --git a/packages/postgres/migrations/03_reaction.sql b/packages/postgres/migrations/03_reaction.sql index 3daa69d7c87..5dc21091f71 100644 --- a/packages/postgres/migrations/03_reaction.sql +++ b/packages/postgres/migrations/03_reaction.sql @@ -3,7 +3,7 @@ CREATE TABLE IF NOT EXISTS reaction message_id INT8 NOT NULL, reaction INTEGER NOT NULL, creator VARCHAR(255) NOT NULL, - created TIMESTAMP NOT NULL, + created TIMESTAMPTZ NOT NULL DEFAULT now(), PRIMARY KEY (message_id, creator, reaction) ); diff --git a/packages/postgres/migrations/05_notificationContext.sql b/packages/postgres/migrations/05_notificationContext.sql index a432c17fddb..ca8137a4c3e 100644 --- a/packages/postgres/migrations/05_notificationContext.sql +++ b/packages/postgres/migrations/05_notificationContext.sql @@ -1,11 +1,11 @@ CREATE TABLE IF NOT EXISTS notification_context ( - workspace_id UUID NOT NULL, - card_id UUID NOT NULL, - huly_id VARCHAR(255) NOT NULL, /* Or maybe account id or something else */ + workspace_id UUID NOT NULL, + card_id UUID NOT NULL, + huly_id VARCHAR(255) NOT NULL, /* Or maybe account id or something else */ - last_view_timestamp TIMESTAMP, - last_update_timestamp TIMESTAMP, + last_view TIMESTAMPTZ, + last_update TIMESTAMPTZ, PRIMARY KEY (workspace_id, card_id, huly_id) ); From 281aba5cf728fb35f1e1287d4dc56148cc20d6cb Mon Sep 17 00:00:00 2001 From: Kristina Fefelova Date: Mon, 2 Dec 2024 20:26:19 +0400 Subject: [PATCH 010/636] Replace personal read with timestamp Signed-off-by: Kristina Fefelova --- packages/postgres/migrations/04_notification.sql | 3 --- .../postgres/migrations/05_notificationContext.sql | 11 ++++++----- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/packages/postgres/migrations/04_notification.sql b/packages/postgres/migrations/04_notification.sql index 990e1e51ffd..96b53f1a6d1 100644 --- a/packages/postgres/migrations/04_notification.sql +++ b/packages/postgres/migrations/04_notification.sql @@ -3,8 +3,5 @@ CREATE TABLE IF NOT EXISTS notification social_id VARCHAR(255) NOT NULL, message_id INT8 NOT NULL, - read BOOLEAN NOT NULL DEFAULT false, - archived BOOLEAN NOT NULL DEFAULT false, - PRIMARY KEY (social_id, message_id) ); diff --git a/packages/postgres/migrations/05_notificationContext.sql b/packages/postgres/migrations/05_notificationContext.sql index ca8137a4c3e..f8936f24cc0 100644 --- a/packages/postgres/migrations/05_notificationContext.sql +++ b/packages/postgres/migrations/05_notificationContext.sql @@ -1,11 +1,12 @@ CREATE TABLE IF NOT EXISTS notification_context ( - workspace_id UUID NOT NULL, - card_id UUID NOT NULL, - huly_id VARCHAR(255) NOT NULL, /* Or maybe account id or something else */ + workspace_id UUID NOT NULL, + card_id UUID NOT NULL, + huly_id VARCHAR(255) NOT NULL, /* Or maybe account id or something else */ - last_view TIMESTAMPTZ, - last_update TIMESTAMPTZ, + archived_from TIMESTAMPTZ, + last_view TIMESTAMPTZ, + last_update TIMESTAMPTZ, PRIMARY KEY (workspace_id, card_id, huly_id) ); From e2de49ec978bf5a5bb92dbbc1a0299b804dadf4a Mon Sep 17 00:00:00 2001 From: Kristina Date: Mon, 23 Dec 2024 16:38:49 +0400 Subject: [PATCH 011/636] Update types & db schema (#3) * Update types & db schema --- bun.lockb | Bin 56152 -> 56600 bytes package.json | 2 + packages/cockroach/migrations/01_message.sql | 24 +++++++++ packages/cockroach/migrations/02_patch.sql | 13 +++++ .../migrations/03_attachment.sql} | 5 +- packages/cockroach/migrations/04_reaction.sql | 12 +++++ .../migrations/05_notificationContext.sql | 15 ++++++ .../cockroach/migrations/06_notification.sql | 9 ++++ packages/{postgres => cockroach}/package.json | 4 +- .../{postgres => cockroach}/tsconfig.json | 0 packages/types/package.json | 3 +- packages/types/src/index.ts | 1 + packages/types/src/message.ts | 12 +++-- packages/types/src/notification.ts | 20 ++++++-- packages/types/src/query.ts | 47 ++++++++++++++++++ 15 files changed, 154 insertions(+), 13 deletions(-) create mode 100644 packages/cockroach/migrations/01_message.sql create mode 100644 packages/cockroach/migrations/02_patch.sql rename packages/{postgres/migrations/02_attachment.sql => cockroach/migrations/03_attachment.sql} (62%) create mode 100644 packages/cockroach/migrations/04_reaction.sql create mode 100644 packages/cockroach/migrations/05_notificationContext.sql create mode 100644 packages/cockroach/migrations/06_notification.sql rename packages/{postgres => cockroach}/package.json (80%) rename packages/{postgres => cockroach}/tsconfig.json (100%) create mode 100644 packages/types/src/query.ts diff --git a/bun.lockb b/bun.lockb index cb32c781b3eb1499f9d4a358807220d00e2e0ac6..22de1295d3aa3489aa57d4f6cf58200747fc3dcd 100755 GIT binary patch delta 8498 zcmeHMd03Uz)_?brgS^axDTm_#j-Z0T0S^b1SJYc3VqzkiIUL}CpbT=*D|L-SW@-+p z3o1uMElor%w4^dKy_HQ`n5Aacy|?=`*yQzE)V03fdI#d`vG4i*`M&45&vUo_&f073 zwWqb#9(eZ$&vbe0xi0JCVqTs){d&{A?W=q)9e#Cp`Hi+~M}qzqz2~7bC+?{?d0jah zy5d7gO?B&Y`_!D=pfTF7uOxXIkPQKKx#t&`RY_7}7fE7yJ!mlKeAl8f)?TH{7whzX zP#NU~E>}ftv0Lf~J_vk-E?;KW_|HMPy^ElM6OajnfBlN<+r(&o)O=o_2 zCEKAMavr@iK$Q>n)I!h=f&y3G!XhY?y16?^S_C-j3m!;CuRK!^DBJs8P%bZZRTjAte+{Zq z)Z?%zbh@h+z&e$#h3?|QYUve>hTXp$qve6Lfd-90W~N@j?J8a5s+6QD7%9&v927;K zpU?=;pbL2J5YbdsRqU!ovK$%~Fg~4I*3@CR$ z7nJ35P|njb^hSecyTeRsuNCF)s-jB7Fh?TN&_P-tgo8pa(G!3SH(XH^TUA}*a=W9o z)xWsPRaSrsh2S}Irh&5E#rs_|P- zt@u4n?S6`EHcQg2s3$1H&n_~l+FucM)QaDKP&d!!J!-rq)134#S3#4oz>$2(Zi9;D&=UQ*Dr4Y^CZTMZVaf0hw%p zrBgaIvdIyw$QvNzHYCajw##S0jRWUHA|}NL4&lh~A`C>H1&*8VLdOH`7<_ezBAFuPm=E>%@UMe{CjlW0j$nz%---4uBs{5ce*=n&0L2FGLS zO2>WeasxOvyorq>Uj>KA!g!fW!gk5#=t3ERcKJbYSU=EYV;~2*9rZZVNpaoRa+EU!OBfjTfyAS_khdb8s_Jb%b}wIBSbg4Cn8q0jb+PjgnJMeBfzn( zwAoaF)9SG!-_kkEJ2=G$tDmQhi3Zrs)4-+D$lx@219EoA1J&p}MeV&5lL)8O-s$pe zY%AlnwqctmskOHvUxjQkWQaZ3U^Y4WDCTBtyi@6hoMv8)J!qVo3&wUii}ur#C;(#~xRP}}%6+D9bO@e6jU80;@X$ys7!ZLx=12P@)ZY9Fk~ zqj3gkv4??UkyQ^-D^2zYE}QVcyeG~H)( zjOO5u**~ZYv<>BTaBM0y+GT05#$kIwU&Fy^{)f9n2|41@YJ4$Mh8KXnNP-Wf1i%}> z>QEC?6W{>YGO)Fp{wKxz01?@(P{58RZG%)}@&m88tEGh8Ak9 zN^wj}9)M%-LBI!C3$WfgfD=>ha6L0fOqt)HQv$`+MtTh3dYb@Fo49{g+zc??qSLLQ zoH|o3-v)34+X3bq0ZvR=z7t^iE`Za`DC<1|@PL|l{+u}oaOzBX#?Js;;RwK~Gv$t2 zRjEdqe-2>%7Xapu0-TsK|DsNhfwErBaXoValn3$}z=>&B;50KxH>0e77T|{8133L} zDC?gCKx?G;xt5;(|Mwl(f27`dXUZeI*?z;SEa!3sU=wBloS3qFv`)w9bSx+*rd<99 zfa|II4*UPtjRy=;=l#aux#IuDzLUV~uw&!l-jM#Y@4yKE*>^ZdJMT9f8q9Mn{=4rx zCh7f_jrOo18D6D-wQjlVkcdQYz@Tk`p`p#!)7ZQ#}=yPrKV?s~s<(NjOZ?U8%- zXMdmf_tlH0ZN2(q_1#6Wm;ZSrV~W@m5g5AsNCSP97fy%sGer%Ff?V2@A3&~x%&c0r zmY%$J)K6QH-JDj~@Xq;j=5tdY8SLl1dUBPmb??fEd5NDi{rSwH!WsDwO}l(`LH;W< zYknB}(!9-O+fFvue`cTEcyZ9alay5uKEZFl>)7tv1C^%-IHzCmi+}y8jsGal-%``3 zK0E1hk4I1L$veNkxqH7O51kx8;mN-oKD766aL0L9MD6c1DN++2z-4xmfm)>BMzE`0!QO>w3mI$yk0%xsxi^1aFY zsXuNLxEMc+i?NqHKS=wcvZ$q3*<~SqXt`1Ty~$J*MYBq#vI0$V`*}!DrPGAy{1x(~4 z`gUZ-1AKzY~Qeqa&sN8n!I9KiRA zGr&8*yFeRI3X}nS&z#LywCTu91NdO#!-|j4TY(H94ZvTz(sE!Wz!#Eo;4Xkq*3kfa zp$>QicoY}|Yy%pA$AFE%4q!X53D^m22KYMoVibPdz#M@6TMw)UQn)pwx{dH{-;f88 zT?%lYwZM`VNB#Sz8V|Df0esLiWgB?(JUf6@4a(IY1eO6DLEH=X$kSQ@@DzVRS_k?t zuohSYtO8a8T$iW%8qWu5+X0D|KNeHd6Zvp-YkVn;^!9}$6cRR0H&EkSVx)HxB!Q4j z1%;NUyLo!AiM^Jd@)5HfaS6$B@v0rT#P}YGl7u+?OOi@ZM@EV9g;Fmu*;44nV^%90 zu{(+z@~&KQ-ujNDHHj9bm1Ns!v$8irQ2w%e`;qsR32i7((YpMJYByR;VS=`73>QP` z@WvtiCJ0U0_`>aeA*X4-Dd; z*$L){Pr3NybA|%c+_Tql@+pKuH2diutah*liZ;@0Twl=QfMD zNZGqBR^xf<)S5p9_AtG-*d$;EZbX_v2e#R)%dubjAt! z?$~az8ZS`uM%JxZ(Dotsl&o3)7%kmiB-YWFsQoFK8*NtO*=XXhmv>lRKhX`k@HG2x z5Z#ILZ-TYY_<0NZj~wL}l!9`$8m5&_jVS*eFQUAKzG<|GS4rMs6Ju!P4vQF1xjPQS z8v#3QLi5<8Gz0lV^zcqg@^P#vOv7{KhrCO(!+es|k*JYmycK=vA8GNse?THi*c=$+ zc)HZlRwE7BHAIZ1++9&UjW?5%)8oJQUH#0%sF8pOMh*W6+VVuW9xCc;6ui9H@IT=< znVx#wBBs!=$A|Pa7N+sURCr!_Iwv;e(_m2rTk|$VWt)3=VV^lm7oY50*CMV^%-$hl z0);%0V>Mow5|mv7{O+q^+I$R&sEcpeQn3Kb!_P73u!swQ{wWs~5}lM&m?={xAQksl1uX$Cay z$p~X&nw3?H$c9Z-RqIW+Jk?F?0o{rnbxLUFUYn?;^?NONxoO&)B~DU1B$tLE~ zfhJ3`@!Yk3*j;M|9vT1=Q!`V&xBb+CH)4tC}l^1-hI*_M=XFBTH9By(9qT$Vn$>XE7Bk<&kvv1#Z?Tm%3 zp)DNaRqpmPJ>|RaJ$D~Uc=N{sDWt8_i+n#jfJq@mAH;358?C+2mB#GPiI{<7 zm{;lpDamh&#eqLhJE*B$zd%p#cSPlv=g+GwcjnI>m0w<3y0EM`-&s{$UN)>lPQC|* zhK*8R4Tt3|EW>~836ajUb#Iw)U{`-CdcjIl&PJG$5~rd-r|A9s;bFAT)cjLU&>6qY#w V)?&SQ#fN&WI5Xcf_>=gf{{^(Kv-_CkWdsrAp&&y$prAMiisHDVvw||X>>%p^`#bl6*;&5X{bSdEcI*3o zed^RXRdwo|Q+03OT6H+!lY;@v9r0%uKOgi_)3I;bJY_%hYxwHhrIQZSk4ujZ-(TMQ zk1IRhlFwX`)IzoS(bd1J)fny9B1yhQsD^=hypv1HDkZ6HfFyCd4>S~XvS&saYtPWz z3w1gLREE6B<0(%p@k$}!n}av&?d4{TzX;0XodyjVh{|1)Bn5+B1nmG?;!P}`JaxR( zx2dM+M|(4FFD{!};gO_cIzOSLtSE7Mxilw8lUGo0(Ym?{XtB8gn2V>IFxg#M z=_!&Xd8%fx`FqjGdUeoaGd=B)oSTA@KNB`liSp~dN9Te2iFIbwH^B0p`h)+2k7mo zFsQNomeBf*fL%QLm*Cl1&$ZMRS?W!!a!;8oxhsmx-4$NX47Bqi>sx8G4iw{6`?|N* zyukr;!+RfNAXa=El%0?oDM{$-+Ya6ax(JjFGrZXvd<^*eFoflou(mkR7|>|Yu6X62Pm0%d<61!egZPerjO z<>#atg?{Xb32txYG#FLkneHu_P$lh$H5~p5Sj&bS18v>|mHv7MuV=~(PlY5YuoyC5 z04PMhuQ3R(;CrlqCqyjlbwjKs!Z z^GAWQdiOrerBbotOyX3+6Ew7$OQcavup+9-kKfNpwkjfuvhiC$HCDyEz6ruZxluhx4pGDq$_`P? zOK@gnku^9|yhr{JMZV8G#%n@DtuDC-PKq>eX1dVaB`*ZWvOwm`7qkJa{z0Y zezcG!JWI}k>sir6VNk53ns7zDPX2JL9}@PPg)W4 zh)XZ%1kun4mz)Q#J2yGrZ33fLXpD|zsJ#r*`U!A92?Y>E?8XhB5>?) zlj@PH;QD~W0^px$9EgK7l5Ca??YOH9yw5K&lOP$uZbJPeQY)YF`mJ z9!2%CycV49bJ+3?IGzudVN55EHg=|h)Owh}avC^xIa~pkzX)!S#=$YSz;Q4G)%57pN?WeBq;cT1ejJ=b zI;XkfM{vA&EZ6EXKY`ufopM7no_uc3L=* z2H>i|aZ;GoP@e|J;~)u*2;^$4OCAS~hi^(@u`V-#gKyxHe?|@2gjz;>!x7b} zv0<7c{3LfmD&TA$p&0}ZS5ZwTMZ8IVNUlSY1_=@eON@=f$ZB6+lnwECfdJK)?= z%cCgEdjdQlvPvyXxxJT8d+W3hC>N$Ie*)lsPXSz*a(iD%o1c?bEsZHF^wZm!1^|eQ zT9~ptM`KmW6XfdcIG5DIl=TK{tV($t92IK&aD9GhxURsIJ0MHd!juO_Myu`lYO5qM zWxY{49j&)B3-oq_wlyk`q9)qiwRJUjbL-uh%0Q{kGiCWCooCACPSJU$ zEHBgfM^W|%zvFRfOnDqH-fmR^D*UD-%>_6$p9eT?mjJBz0>FhSPq>U36sF8C*J&-N z8F&fceyad3Oj++0ovzmD8Y4s2pn(fhR$L440PA(W4wMU1Zr=#7-m3tYM^V<>46s2v zbh-zW%R|cR=M}#WaEF5cm&TMQdP8m1DD#H^);|m|e?;ex>ijWKE=*Z|9AHCE0bH0i z1*(6~9Ll38cQ_C502cr*{~gNu9|BB20*vyA)2RBFJdRX5_?TkDv{MNF|K*M&%@2)F zAvOQFfi3!9KaRBd|HEmd?*CuvI4aN$Xhbb)6F2<-afH){j{}tdv&WJ9QO6K_gxmi| zj-!UVu80I`;~7Y!3a!;$ijw#2nGdxo8!Fs#*_&x*Qi-TmS0-0S0Z9$9jr zbH;7Q*ss37H~8hZw@o~H<=&v**gV5Kw!2%p{$Op}C;1bPknT6X52}Zbjge8UEuU|$ zeK%_4)HPim`_s)agOAg_!btKI=7@Q;zc8N;6tN+&Xcw5xHZ4I_OZkb8VbD+ z1azprJjnbN{;P$cWs+!!t5_(+Fxu@c#LIkG<$RepdMmu}9&T7yc~dlOnDJd}{B-2n zHIpTDh3Y+N;RpKJVV=I89Yz-i!jz!|^~lmSJ+1fUow0iFiN0sNNtYhVn(M{^!97#IQ! z1qJ~*z(9bX8T~o=`k=xu&{+UK;?jX6pev9JqyXIjezD+}jROF`mFxo8Yd)Y7m=06{ zM}W704*}kMXMuCT`@jdlBw#YY?}%eL`T3o7BrqHp20Q^g2`E4Y&>dI+a5(raQbEPkc|oL?MU0+sJm+0O%P-g2M@;8}PnzX3QydA1h- z&e0{nB49DVeR)ay*j|)&do*h4@oU=lN^GhfY->zyeBX;{!l@I6mR_JOK#k9VF-=m? z5`vZybng`#4Oksz=k@Y_svCdfhP&?tDM%D#IFcO*dL>n_cG}qr9_tfgeGh z?nq9B!q;?ewatDXavS6mE-SC)C8qz$By_RVn#7s}*(J38i>UXm7EMxk;O#VcjV;yq z_WXIQE!@&G#AFf$T^-38P%yqu+nVIZ**nizsY+NNeM#%ka|EWu5`C@AquUN@JKt{- zu$V29j1T7Cfs^u{e&V?WYR?ph6I+$8ud&&U5ANNQb4EnX`=TClEY*=LNe+r$YqR$; zX`fTG3QTWZyczU`E>FYmq2X(Dg_}06b=r-qgBLcwmUnN?2YJxyhEXuTk8ZBDi6tbj zD;9M$ZJkZ*p~dT*;usyrub=*9oio+AKN%U-|3dVw23T*{VBD~j-Oczhw{)0Y9XHK^ zn3-wmW}Dr(s%aUz|H!OmWvkSlm>FNNY1w+GD5P<9Hu`LRl-;=GIT7;Bj(!bAw^Yp( zM=Gv^D6GzD_XTL5&u6~*uP$@<>|Kx}G7hX$9m8&18l}IKxH9%+IBcBT)uGO$%0)Tt zuCw7=?7HnXagn}6y)XS#=fr2AqzyJPhq5>1+Kn5Yq-|LzzFshX4(5QT+1dcwzadxb zr3cX6Nw$qn(SdR|vfqj~CddbawX2hXv<7WE=>z?@*lt|O^lJIVs_w^wcto}uBc7+B zuR86v-JDb(nQ=V%-1b@VABWn94A_;LKz zaS=h?R7+G#yHoQmHjzs4Ta127=$S1EB8fI_iEU%tKAale^^Rr9!DZ-|toi+pjjnBp z{0AwZ?nc2|t7SCLWL#|2zH#zoN&Do5Kkv&Ja&NT>g`VA-U^jN>-|ym3F^=Be8Y_;I zK!|_tVH1}qd0PTLX^q{MXE$zhl9kOJX3r0cg1JtIQ%l%$+iWJy=*ENCjT@zJ_Egp8 z|8d$K)idg;piY%)+#J<)@I9Wez%xEnWcBRkV1P`( zi;Gt7f9r~$_b$t^dq&X(EaDhl-|iF>DSC&~Zd^O%J$>WBzb$wFRn>L!SRH7}4x8P$ zehM6wGU8m5o$o@9^M&O{Y2yy3*iRqt$bH0&JRvq2?mK$!wAqivXjh1t(ftc%tr(Dl zQIfF~Sk%a^w0UQg;V3PJs)rw*o0=-0o?6WG2t1VM(T-hCF`2GHUO;zt@y2?p-YK4; zs(NRezjtR>UPu}rh>;y=?(SA?osw}^_3Vw}a|^?-w>ODoY#L`bJU1z6x6N+cVbv!6 zYH7yzD4I@QN>m#?db8CJ z-XE5zZc1Zk8`ofktLy*VYiiLoHB4y^n0B)xt=$u8`dddjxF;pm+F5(5omzPD{a@Wa zZ(|b?32am2UM%~38+pvLf0zpiANY{OQ`@~ZTwS^L7F#CjPt@&usQYJjI=MHmQ{Oh) zjotIFHT9$+Yj4NXp;WBHFmg>s2^)%9QD4C57(FlBs1$ zMl&th_jtqa_Z^P2^iuEflEzOjqoF6;w*gH`WG<1tp6ql~7SCdiwdn2rZRpw`?4ldJ z_3`%&_Dk0TEHm}_7ha~&Pdh{z&wS$ve_j`Ny`rvU%BS-!PhZh2F28~v9sg60L%R43 P_vm8|!$XX}dB6Pcrv6&S diff --git a/package.json b/package.json index e29626ced9c..034075fe14d 100644 --- a/package.json +++ b/package.json @@ -9,6 +9,8 @@ }, "devDependencies": { "@eslint/js": "^9.15.0", + "@types/bun": "^1.1.14", + "bun-types": "^1.1.38", "eslint": "^9.15.0", "eslint-config-prettier": "^9.1.0", "eslint-plugin-prettier": "^5.2.1", diff --git a/packages/cockroach/migrations/01_message.sql b/packages/cockroach/migrations/01_message.sql new file mode 100644 index 00000000000..1236df5ae25 --- /dev/null +++ b/packages/cockroach/migrations/01_message.sql @@ -0,0 +1,24 @@ +CREATE TABLE IF NOT EXISTS message +( + id UUID NOT NULL DEFAULT gen_random_uuid(), + content TEXT NOT NULL, + creator VARCHAR(255) NOT NULL, + created TIMESTAMPTZ NOT NULL, + + PRIMARY KEY (id) +); + +CREATE TABLE IF NOT EXISTS message_place +( + workspace_id UUID NOT NULL, + card_id UUID NOT NULL, + message_id UUID NOT NULL, + + PRIMARY KEY (workspace_id, card_id, message_id), + FOREIGN KEY (message_id) REFERENCES message (id) ON DELETE CASCADE +); + + + +CREATE INDEX idx_message_place_workspace_card ON message_place (workspace_id, card_id); +CREATE INDEX idx_message_place_message_id ON message_place (message_id); diff --git a/packages/cockroach/migrations/02_patch.sql b/packages/cockroach/migrations/02_patch.sql new file mode 100644 index 00000000000..0f3baa57315 --- /dev/null +++ b/packages/cockroach/migrations/02_patch.sql @@ -0,0 +1,13 @@ +CREATE TABLE IF NOT EXISTS patch +( + id INT8 NOT NULL DEFAULT unique_rowid(), + message_id UUID NOT NULL, + content TEXT NOT NULL, + creator VARCHAR(255) NOT NULL, + created TIMESTAMPTZ NOT NULL, + + PRIMARY KEY (id), + FOREIGN KEY (message_id) REFERENCES message (id) ON DELETE CASCADE +); + +CREATE INDEX idx_patch_message_id ON patch (message_id); \ No newline at end of file diff --git a/packages/postgres/migrations/02_attachment.sql b/packages/cockroach/migrations/03_attachment.sql similarity index 62% rename from packages/postgres/migrations/02_attachment.sql rename to packages/cockroach/migrations/03_attachment.sql index b74d0ec0eda..6f732045484 100644 --- a/packages/postgres/migrations/02_attachment.sql +++ b/packages/cockroach/migrations/03_attachment.sql @@ -1,11 +1,12 @@ CREATE TABLE IF NOT EXISTS attachment ( - message_id INT8 NOT NULL, + message_id UUID NOT NULL, card_id UUID NOT NULL, creator VARCHAR(255) NOT NULL, created TIMESTAMPTZ NOT NULL DEFAULT now(), - PRIMARY KEY (message_id, card_id) + PRIMARY KEY (message_id, card_id), + FOREIGN KEY (message_id) REFERENCES message (id) ON DELETE CASCADE ); CREATE INDEX IF NOT EXISTS attachment_message_idx ON attachment (message_id); diff --git a/packages/cockroach/migrations/04_reaction.sql b/packages/cockroach/migrations/04_reaction.sql new file mode 100644 index 00000000000..3d3af2c0872 --- /dev/null +++ b/packages/cockroach/migrations/04_reaction.sql @@ -0,0 +1,12 @@ +CREATE TABLE IF NOT EXISTS reaction +( + message_id UUID NOT NULL, + reaction VARCHAR(100) NOT NULL, + creator VARCHAR(255) NOT NULL, + created TIMESTAMPTZ NOT NULL DEFAULT now(), + + PRIMARY KEY (message_id, creator, reaction), + FOREIGN KEY (message_id) REFERENCES message (id) ON DELETE CASCADE +); + +CREATE INDEX IF NOT EXISTS reaction_message_idx ON reaction (message_id); diff --git a/packages/cockroach/migrations/05_notificationContext.sql b/packages/cockroach/migrations/05_notificationContext.sql new file mode 100644 index 00000000000..b461c45fc58 --- /dev/null +++ b/packages/cockroach/migrations/05_notificationContext.sql @@ -0,0 +1,15 @@ +CREATE TABLE IF NOT EXISTS notification_context +( + id UUID NOT NULL DEFAULT gen_random_uuid(), + workspace_id UUID NOT NULL, + card_id UUID NOT NULL, + + person_workspace UUID NOT NULL, + + archived_from TIMESTAMPTZ, + last_view TIMESTAMPTZ, + last_update TIMESTAMPTZ, + + PRIMARY KEY (id), + UNIQUE (workspace_id, card_id, person_workspace) +); diff --git a/packages/cockroach/migrations/06_notification.sql b/packages/cockroach/migrations/06_notification.sql new file mode 100644 index 00000000000..df4bcce202f --- /dev/null +++ b/packages/cockroach/migrations/06_notification.sql @@ -0,0 +1,9 @@ +CREATE TABLE IF NOT EXISTS notification +( + message_id UUID NOT NULL, + context UUID NOT NULL, + + PRIMARY KEY (message_id, context), + FOREIGN KEY (message_id) REFERENCES message (id) ON DELETE CASCADE, + FOREIGN KEY (context) REFERENCES notification_context (id) ON DELETE CASCADE +); diff --git a/packages/postgres/package.json b/packages/cockroach/package.json similarity index 80% rename from packages/postgres/package.json rename to packages/cockroach/package.json index 3a7faecac7d..ac6244f46b7 100644 --- a/packages/postgres/package.json +++ b/packages/cockroach/package.json @@ -1,10 +1,10 @@ { - "name": "@communication/postgres", + "name": "@communication/cockroach", "version": "0.1.0", "module": "src/index.ts", "type": "module", "devDependencies": { - "@types/bun": "^1.1.13" + "@types/bun": "^1.1.14" }, "dependencies": { "@communication/types": "workspace:*", diff --git a/packages/postgres/tsconfig.json b/packages/cockroach/tsconfig.json similarity index 100% rename from packages/postgres/tsconfig.json rename to packages/cockroach/tsconfig.json diff --git a/packages/types/package.json b/packages/types/package.json index b13a82c8f16..9c80d980935 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -1,10 +1,11 @@ { "name": "@communication/types", "version": "0.1.0", + "main": "src/index.ts", "module": "src/index.ts", "type": "module", "devDependencies": { - "@types/bun": "^1.1.13" + "@types/bun": "^1.1.14" }, "peerDependencies": { "typescript": "^5.6.3" diff --git a/packages/types/src/index.ts b/packages/types/src/index.ts index 7a82e8d8a07..0d1134f9e28 100644 --- a/packages/types/src/index.ts +++ b/packages/types/src/index.ts @@ -1,2 +1,3 @@ export * from './message.ts' export * from './notification.ts' +export * from './query.ts' diff --git a/packages/types/src/message.ts b/packages/types/src/message.ts index e9b7fe9736c..5641439e2e2 100644 --- a/packages/types/src/message.ts +++ b/packages/types/src/message.ts @@ -1,9 +1,8 @@ -//TODO: Import from @platform export type CardID = string export type SocialID = string export type RichText = string -export type ID = bigint +export type ID = string export type MessageID = ID & { message: true } interface Object { @@ -14,14 +13,19 @@ interface Object { export interface Message extends Object { id: MessageID content: RichText - version: number + edited: Date reactions: Reaction[] attachments: Attachment[] } +export interface Patch extends Object { + message: MessageID + content: RichText +} + export interface Reaction extends Object { message: MessageID - reaction: number + reaction: string } export interface Attachment extends Object { diff --git a/packages/types/src/notification.ts b/packages/types/src/notification.ts index 5bfd1c20629..79b9b83a5ef 100644 --- a/packages/types/src/notification.ts +++ b/packages/types/src/notification.ts @@ -1,14 +1,26 @@ -import type { Message, SocialID, CardID } from './message' +import type { Message, CardID, ID } from './message' + +export type ContextID = ID & { context: true } export interface Notification { message: Message - socialId: SocialID + context: ContextID read: boolean archived: boolean } export interface NotificationContext { + id: ContextID card: CardID - lastView: Date - lastUpdate: Date + workspace: string + personWorkspace: string + archivedFrom?: Date + lastView?: Date + lastUpdate?: Date +} + +export interface NotificationContextUpdate { + archivedFrom?: Date + lastView?: Date + lastUpdate?: Date } diff --git a/packages/types/src/query.ts b/packages/types/src/query.ts new file mode 100644 index 00000000000..302893542a1 --- /dev/null +++ b/packages/types/src/query.ts @@ -0,0 +1,47 @@ +import type { CardID, MessageID } from './message' +import type { ContextID } from './notification' + +export enum SortOrder { + Asc = 1, + Desc = -1 +} + +export enum Direction { + Backward = 1, + Forward = -1 +} + +export interface Window { + getResult(): T[] + + loadNextPage(): Promise + loadPrevPage(): Promise + + hasNextPage(): boolean + hasPrevPage(): boolean +} + +interface FindParams { + from?: Date + excluded?: boolean + direction?: Direction + sort?: SortOrder + limit?: number +} + +export interface FindMessagesParams extends FindParams { + id?: MessageID + card?: CardID +} + +export interface FindNotificationsParams extends FindParams { + context?: ContextID + message?: MessageID + read?: boolean + archived?: boolean +} + +export interface FindNotificationContextParams extends FindParams { + id?: ContextID + card?: CardID +} From e884fbec2f549a76dfa363683cf6ffbc7300291d Mon Sep 17 00:00:00 2001 From: Kristina Date: Mon, 23 Dec 2024 17:26:39 +0400 Subject: [PATCH 012/636] Init cockroach adapter (#4) * Init cockroach adapter --- bun.lockb | Bin 56600 -> 57472 bytes packages/cockroach/package.json | 5 +- packages/cockroach/src/adapter.ts | 122 +++++++++ packages/cockroach/src/connection.ts | 104 ++++++++ packages/cockroach/src/db/base.ts | 41 +++ packages/cockroach/src/db/message.ts | 228 +++++++++++++++++ packages/cockroach/src/db/notification.ts | 239 ++++++++++++++++++ packages/cockroach/src/db/types.ts | 59 +++++ packages/cockroach/src/index.ts | 1 + packages/postgres/migrations/01_message.sql | 19 -- packages/postgres/migrations/03_reaction.sql | 11 - .../postgres/migrations/04_notification.sql | 7 - .../migrations/05_notificationContext.sql | 12 - packages/postgres/src/index.ts | 0 packages/sdk-types/package.json | 16 ++ packages/sdk-types/src/db.ts | 54 ++++ packages/sdk-types/src/index.ts | 1 + packages/sdk-types/tsconfig.json | 8 + 18 files changed, 877 insertions(+), 50 deletions(-) create mode 100644 packages/cockroach/src/adapter.ts create mode 100644 packages/cockroach/src/connection.ts create mode 100644 packages/cockroach/src/db/base.ts create mode 100644 packages/cockroach/src/db/message.ts create mode 100644 packages/cockroach/src/db/notification.ts create mode 100644 packages/cockroach/src/db/types.ts create mode 100644 packages/cockroach/src/index.ts delete mode 100644 packages/postgres/migrations/01_message.sql delete mode 100644 packages/postgres/migrations/03_reaction.sql delete mode 100644 packages/postgres/migrations/04_notification.sql delete mode 100644 packages/postgres/migrations/05_notificationContext.sql delete mode 100644 packages/postgres/src/index.ts create mode 100644 packages/sdk-types/package.json create mode 100644 packages/sdk-types/src/db.ts create mode 100644 packages/sdk-types/src/index.ts create mode 100644 packages/sdk-types/tsconfig.json diff --git a/bun.lockb b/bun.lockb index 22de1295d3aa3489aa57d4f6cf58200747fc3dcd..5db3fe48a99d18dac938dc6ad72f92e427c2bd1e 100755 GIT binary patch delta 8169 zcmeHMd3;pW^?!Gg33)>%E08=UkOUL9m~}`d5Xb{WAV5$o5ODz~WD*9G5Hb@aMa`r@ zB?7XXC`(94SVRQD5J4&^oBi0JptxZb2yH*sS{Bi&)K;+H^WHq9{%YIb=hHvE^ZCB> z&b{ZJd+xdCF7xi%mjZl80#>FDct5>y_S8#3<@W7g%h`)6e>qJ4b;1N^Qr$Iob&nMT zleS;!l(h8Js=~P1R~mFie_ABT*MZjvP?x97UE!6a`vN41^ACZBgO<7GRj~A4fBq&v z-3cnAyx8TcOmTap0`OtrNBQ%2n{_@aNY`fu4V?gi90-JfMu8@Qx;-hSWpj$8#a(p4 zVB~k@{L+d!RW3=oX43f*cSUhZb)|GLSm)blgov(fgAFWw2~uqCNZ8Kimy|iZURSa7 zpsQvcn|~RZEO!oaY^KXo?RA%VB$sDywX3Q|`V@s+&*iDCa-r4NAr}R{7^ZSJoipcD zu^$d0pRHeN)$)^lJCMf_!f9rBJ=WurM;2`VO81Ba!2q<@BV$ z#?sN?SxmtgK&5XMDBIzjt(h%-4!$S&v*a{I*FFKIk>Kh;S-RZc_#L3V!H@LkdtuSd z<>|=hdOjG&+T+2qw~lnzdpX;aQsXQylblthmCh=UYcKM-mlt~I^c*Pk)%t3pb$9TH zIpMx;P{e`~D0{&hBT1<1`$R~R0@?w}h8gah3O*kEFn@Uuns=758>~lFt>iPpYAq`AxNPgC3tR$azNpGM(}f`@MyuS@XF=KD--2>^xvQ$wmHB;C ztwBBZM2XYmoeS?&xvD+xk{am>tl{x*gSBkPHPEo3czMiU!Q(2Q=ck4WP_7VrFnyoCoFR zSA%l?<0$8D8FFRd+3%x4xmCDJqt(^!V!TR~b3ESCDg-r$rX-D&*7nmQV-YARW97r<4^eMCE$=%t7{16tC7B{D4pBn&7FE1S9TwFT9Y~h0d14IZcU8qg zs>kyXb>R68$swwoW|ky$$tP%1SBI#k`VdvTLLDKhsaFtLta)M@OcqTBUJetKuC2JCu%h%aiZK zNZyJ}v^Kb@g0a5~vyQ_Dp8Ju2^N9U@a(_P`fiol)M0@?fva6_mdJXb!5HwW@Vc@Pj7 za&J{6Q+{t%u8)zVak_3e=pE`nUM$w#yO4(vgd^%HKTb8DM&4AiT5`?%uq=($-iBZW zovyvL;%&0__7&bn^P8NDm4xloXW%N56I5}6@)K0K3syNjKzN7>z;U$cA>KxEAJzOd z=TU)@E2sC?2L-_nzf^$Z;Y1jK+YgR|!mJIo8S5?UK@cK*RB$|)@I+UK>;cCiV&-5H zXd_WI1;o?1#5|Er^@*w|qmD#XemVYz$rzH5gd0nU%^}|fPK64@YJx*<2FL2VP(*^m z{5Ck8wNMC=rDu4^YL^l#k~{s>d^hI`DLoJWv&DC?C)1 zRCmTfzJVbkk|HYXba>!c`44?1DFa%%vWHFG`jI6$&orSQjZ4mxcVTTGskIJU#1-mD zMilqgeJSWvvO|6poF1DT#aF;_G@+~Dh72&KFs3(LET^dQF68l;p*H&Sg`d-xitK@U z&$Z~4%lsVT0_|)9r;jJxFW#s6)LbXlJl=VvKmcop1efxXAz?LVo3zB#1i)*O05FTS z^zSIkA<85Z6{N+{eQCi|mKHpXt04fibTeh?;Q+S+UulUc=ilb1=&{D%4$6rsmtz*7 zl@n=kBk&^S^8<+~m%@`;Vj2Jx=&VM$9P5Ran3||pdUOQmV&-azX#f?ZM{8;kLz>P) zy-;Gx7IU|_);$0xrp({#r}u$!Vj4)#rAOCtsgtvi{u|1z%+#d+nX;Z@e?C(gDEISB zxxB*9Gi5!MI6-Pzc+mv-z{TlbQFfVEEB$B6ist$AnQ{o#0US1u0l~lufaO*JoS3pf ztEFUS`9;qlKWzZTkwaPsaK%PIqO%#gznYmtx|y=NjQ~rx0?cm$I5Fk?Edb|l1vvdH z%5vKPZhfbpwt;d2)&0d8+`ktAuJ9{>)6J9{dP&RGDf0&amVX&w{uO`|Q|1r(>0wZo zJL;#eg0lZ#lWdZNGn8}^;B+(XLcQ#9wQTo?0IT~5;Pl^6mOle9JqsA=|G6q;Y6^1w zwf`OEhJIQWbcg@6ENB7q)3Tt4!~cF+xcz^!EVO^)cqfHoT@xs##5&=j30F^#IeO3a z`}@hK-&^rU)8XOI%(O%e9x><6(AtC7k`H%nI-B}J_wUyKMt#_t*LHCE`~C0DJo>wx zO6i)ZTP~HnunP--kgkVa2Yrl=xC-evC00r*8IL3K+>%0?UuvcIz%8YO(n9J#%Sww& z$BX533fu?avSy9PzskBP5Y5@(0;F|(Mw6ikU+@}ZrZ6oJF z(VkefM4+kXJTq~^jPNeBHX(LV?E5AWwQ&wSU^V{izug!A-BD|XXm6QUt>6(EAU~+L7j185?{c(Su zvAOcjwG@DSl*BtNE8Oq5D`YJ-)3_A40lN)8y8&+iCx8RML4a++pG5vj$se(#D2iy9 zqom`F_r*N`ZyNYh%pWSGlQikc=vv3}9Vsa6P~=$vRg79C0gv<-ijF*Js<$vtdYg z?V;v%qq5isF_;C$hp6}<9)d{Z@Gy4+JwJK&%0EwR@Wco4K!+m7N7vRVHg41c?s(9E zm&d2qeHkpKXQbjcNNS*r^>!O~8lmpnSak8CbMz?*<%3ad!$rwMmXIt}G~Gqupj)WX%87R}S-# z{2eHV&r{JUO^406*vNS*r6J*1B)E${Fj7Z*K(h9Syb7)fm{1>!P&2iZ?a zX|;=DDrjZTm9{3!UxnzO{FhQA@;;?E{m;@^<1S%%_s+FL4h6GD?g}*GcHyo~nKt7# zVPe|RP0I0O-5@YXZ*mfC++?>Iw+uyd6Ne4&8kXZ%0Q+arrA>AWTEb>UoTXemKc^|1 z?Kb0v;mZ$i>E62i531G)J2&a2&71d%y)8~lZPLp=Z{X~ zZenS1L)*a*J9z%G0E}5fr_hUQbm1Ah*hX>N?Kb1WWXb~xVHZ2-ECib9>JFY< zx9_xzeN=#IxNJ$cWUZz=3mD8l+++!*D*6)WGj;QdEY!cCy$ZR)jK5uN1NU219B z1BWA>-$O&r*yz~qBpefQO8C?6^nQzD^~td+z`Md;A@q_v$*6^8g%FC-O+%uhC_neBf8~N?ap7-{RD0X|iX)aHBT9LaVjXgna zFW4gaB`rncQ~bVgz82ped-~4-mb?88R-U8qj}rSBmtMx%!#HW|_lu3~)cR8X(L$=k pfxczlt(N(ny2&d$p`kCup9${y!`dJCMXqq|app(_A%E~)^ADFPZczXL delta 7790 zcmeHMdtB93w*Tz|2l*if5MTT_DuVbx1vwn>ARs?q6@gMTB081|a=?QikAttYz>Ku< zk+7hgd>}qje5F!mQ)WJK$;$qIsn04#Gif`8WfAZp+d&~dP{?&=#-;db$vrA_m zZnF5^yb`+hV@WHG^?4CB=NfcI{|%6&+GafV0(E;!J&UR(DZxjQSl$F00$S=`vWT_U z8S-TYeGF7aeWBZ35$o|vL%;`v4>#m>R-NwvWqa2^gQnsk2nvCq9TrI%3hMF37MGS6 zNV9wBiq|3U$@1bwS}kPw9vhL3A=v|Le_f$dhDj#ySUm@>XqEy%Ej)g<-ui`i~LC)Ui+cf#8+EozrfuPV`u(%isWnS)1l9oWu9c~KJJNzt&ES6#8hQmon zY8T{tW8q`*0Pw8#Cs3HIO~+iYANeJk+tQoh!@;*vzNLRnA&iEC%L8TY$wucxK?i{k zGvt}@sC&I1hyZsz$0c(`u1jcjS?Qy24pdo~MGU{P}K7K?-{1k=BB8yx#}q`Z9M_ zu{+`WtSUo0&YPlqZ*?W2Q{`Ul^%O0a-heke|7+kaJJJpsoQ8*oj0RqJ*%Eh^Bu$5> zJfbjARMq|+MtB51;JHJjQ+2h+UG*e6eEZj&8=|{a0*X-9uE*e6i<1U1t}Ov&z7i{g z)8aTNcfSCX&zUn5l;f7!CR^|E_5Z_odao_pbA}ekuZtrT6QIf^n8GAz z$m9rci8N{rP{khV2v99w`q0Rp>0%&xda7a;wc@vlI`DguVggm!YL%qB(NIujPnXD| z)<9J>Qb(X_`HPiC+S0`s^4L`2rB<6NxA|dCG-Zd)C5BQ=kSg5d2~w>sJtQfeVgpkx zH+oQKP`cb38$&u&{ixFBl5@a~1!twMV3&0p*HKyjG~rL3!RfMsU?!WjB8NO7s@wn> zx0k3g#3f$>mjTY7+sjc1I?ui@!3}v1IJWIWT|qAR-r7r*FG9wxeON=-C?-^ue}YJ} zo`oFHdy-m1RdJp=P-DfUa_|DEE7T<>lc%?8t%odyz6?sW{4tO^d#B5hn1~5b@~6t) zF1Z9;3OHYGBJT!wzs?1_EMM8EvroD_3^O(wLi7if2f(pMJ*X?dB{zfPs9UHjHrXEx zQilhC!6jlth)DG|(j{Vc|@HW)IE3<>1%~{DbCE zaNN1>?SF#f{@@sFW?)a_Ux1T}ho@DvTfR>sMuXZRY5iBhfavW&`kb_8yzSvZGG7*WMF8N+?>6#{zNuZBt)$(Bkl|-kDKx&Ov#U$#8R%IG|+ht72 zZ^0RxhutNQh?FE1CXlOUo9{u6NSAdVI6XMu;tAP=Fl9&Sfk^Ym1N)`L!E!x{ zMh;Jx`(aa_t~-PfZ=}}Ys(6n&hO084Q9SiX91L+RxEWew9#j)J-4sv#hv3+T%t<0* zDJDi0E6Ic3Gt`P-AL_ubi(*EqqKrHvRVzG}f@oE_lLn8tSJQ?Z)fs(j6#wEol{kE{qx8i@TYq$#g1cV0?e}hx1YE{4P+| zx(DFGlsm&duN95bwD<^z5BRBulqqXXGkB(K9T(_o|3ul)d_&F@ zXP{JS@JzYB%;1@_opOUO=l)rt!Vuhv`k;>Ab+~k=Y-q72)hP~6sTSZ=dIInVHUg}- z3E;w%`>SULrJl#nf(C;UDBd-s%>Xxi7U05^^|lyvt3kJca_LT4Z#%&4cL2<{09=@| zd>6p-n%(%}awp1)&jIYv0fQa^<ruTe*)mropMJnYf_yu{|f;6HPWkiV8Ka% z3sdG_Gw3N$HhkKkXF=Jaw*W3odjJ=iLAeuU{VM?5{Q%&iQ+mzmN3S~lP1LZ;ReO0*<+*Jk}UBQwUp#hMv0BA z3$sK$%HEBSVzxC_B`rF(~w4?Newnpz-A&P0>;`w-yT(x+$jnBM(INB~o!-~;5L(s~4 zN8A0&ejC=dvc8v3fIlBxD2+sKHKyTI`}RA%uy6yw;o*~%i^=o0&M#qH_@p#-;Ez_G zGGxA>hjr2I6O^~~Ck+{&rreg#p`QW1Y`{EGUpG7pe43ta%8cRT;|<_VfX|>;0QLc) z;Y$_1SLACasSnwnF0bKJ?kK>!KcDY`03SH#fwzHofOmll03T!(KoPJ2@Bj;e0w5pY zx1ooCIRL*&fZv$r0{luhli!YV@xbqgcL8?;6M<9! z|2;`-0Gu$WPGAS{EU*jM0`Q9sAN^in zo`N5q&?cZBNM>s&jX%d+Mzy7F4)e)=0(>3N0I&}0vr}sUc97F>6DTLsMqmT54p}j5pxpb z61bm}v}&7TxhiPuwy*)_g{R_|7i}N0q!DWIapPF+mY_@9!jfhfhl%> zciA1uyoB{mU6?y>@?&*c(}XxDZtiH{cExU9;-)yVriZV(aS-(wXPg7`O*z{Y++i%l zZz0ufpC*=2`*x?@ynEi*axC{Bk6g^ve2(Y7nkcea5iOM3Tx>V5yp^}=14B0v{P5J({kNH!*(PjnYZ#Y!ZR*MbhW{K(+Tt5e$hAMzMfX{fL-fsY#bI( zIGuV<5kqLyK1C$Z=^ak7jOtqy3Tg?ro1Y0@5Bj&(d)f*)tPX@V33)@4Tby?DGsCi% z|LU@)+{?}(=y78uNwb0N{6LGM+Y@zkqeZb?v(lygVRrMo!ubs=gZf)OSY{CzI0s9b zO|d(jI3x0RDx!f_?3`vdKN5`DoBsMA>Ixn~Cs5@$h0*VJP7~+Iu}cxBNbK%Ib9Y6E z6siHw(Nm~!S4htg@eVetaAZk7T`)9t{o(=U{r-f$H#Vo7@@LyT0NBQj|Kqz8?B)%B z_Lws}mAB9KfkL9*`Ea_t+iCw(h<@W*P#HabLeJo2!vIE-P7!;Y|I_rLzKzQEDB?|8 zv&V@m#GLJY=;9u$$fOVUyok81d(J8JnR$jj#PbVe+pE}5V=ZAUwKu;m_+)Nx|3u9j z&E5NG;$8)R?U2a%8=IX5f*ZvpF%KLs0(xI`Xu zAoW5HXqjNqmY?9o(>H=UFfYjKUpjl%6XjU@{X$HlF|CT2PKWl5vYX5Chs!oZRMTJf zMT#r5YQG}BZ=qOjhSR=7BgIsDsx{Yceim@3yN9k^-76ffIpdsql75FybO*Z|(Pr5& zn?qanD+A0=46{!C(Yp2emm|KP5Iyyh%ufL83LB5U`qB9th5}ZC`8DCx=q(?m=7wcz zkal@v z?;=WXi{9l*tkTgl44O`g3)lMt+-iyVQuIaQ5L~xgVTkVf^JsK zk3`KwYe$c&a~H&OyvFhdH;)`g6#M^D-}{G0RFq#LEj^M-?;gpVe#<4nz=rVM@^u^KQ zfX|=D&`?Gkp$CsmpdH8J!)FiBm*(TiN$-kfLEp|iqO08ch&qqWZ(G{7a2VB{u(zon zec=<3d`+*+xJHTBqX#i#exor@+D(Sazh6Tac51r5I$-d}daL-4xfS~RVVh-!!rG6u NHZR^7BlvET{tK0; { + return await this.message.createMessage(content, creator, created) + } + + async placeMessage(message: MessageID, card: CardID, workspace: string): Promise { + return await this.message.placeMessage(message, card, workspace) + } + + async createPatch(message: MessageID, content: RichText, creator: SocialID, created: Date): Promise { + return await this.message.createPatch(message, content, creator, created) + } + + async removeMessage(message: MessageID): Promise { + return await this.message.removeMessage(message) + } + + async createReaction(message: MessageID, reaction: string, creator: SocialID, created: Date): Promise { + return await this.message.createReaction(message, reaction, creator, created) + } + + async removeReaction(message: MessageID, reaction: string, creator: SocialID): Promise { + return await this.message.removeReaction(message, reaction, creator) + } + + async createAttachment(message: MessageID, card: CardID, creator: SocialID, created: Date): Promise { + return await this.message.createAttachment(message, card, creator, created) + } + + async removeAttachment(message: MessageID, card: CardID): Promise { + return await this.message.removeAttachment(message, card) + } + + async findMessages(workspace: string, params: FindMessagesParams): Promise { + return await this.message.find(workspace, params) + } + + async createNotification(message: MessageID, context: ContextID): Promise { + return await this.notification.createNotification(message, context) + } + + async removeNotification(message: MessageID, context: ContextID): Promise { + return await this.notification.removeNotification(message, context) + } + + async createContext( + workspace: string, + card: CardID, + personWorkspace: string, + lastView?: Date, + lastUpdate?: Date + ): Promise { + return await this.notification.createContext(workspace, card, personWorkspace, lastView, lastUpdate) + } + + async updateContext(context: ContextID, update: NotificationContextUpdate): Promise { + return await this.notification.updateContext(context, update) + } + + async removeContext(context: ContextID): Promise { + return await this.notification.removeContext(context) + } + + async findContexts( + params: FindNotificationContextParams, + personWorkspaces: string[], + workspace?: string + ): Promise { + return await this.notification.findContexts(params, personWorkspaces, workspace) + } + + async findNotifications( + params: FindNotificationsParams, + personWorkspace: string, + workspace?: string + ): Promise { + return await this.notification.findNotifications(params, personWorkspace, workspace) + } + + close(): void { + this.db.close() + } +} + +export async function createDbAdapter(connectionString: string): Promise { + const db = connect(connectionString) + const sqlClient = await db.getClient() + + return new CockroachAdapter(db, sqlClient) +} diff --git a/packages/cockroach/src/connection.ts b/packages/cockroach/src/connection.ts new file mode 100644 index 00000000000..a9aa16e74b0 --- /dev/null +++ b/packages/cockroach/src/connection.ts @@ -0,0 +1,104 @@ +//Full copy from @hcengineering/postgres +import postgres from 'postgres' +import { v4 as uuid } from 'uuid' + +const connections = new Map() +const clientRefs = new Map() + +export interface PostgresClientReference { + getClient: () => Promise + close: () => void +} + +class PostgresClientReferenceImpl { + count: number + client: postgres.Sql | Promise + + constructor( + client: postgres.Sql | Promise, + readonly onclose: () => void + ) { + this.count = 0 + this.client = client + } + + async getClient(): Promise { + if (this.client instanceof Promise) { + this.client = await this.client + } + return this.client + } + + close(force: boolean = false): void { + this.count-- + if (this.count === 0 || force) { + if (force) { + this.count = 0 + } + void (async () => { + this.onclose() + const cl = await this.client + await cl.end() + console.log('Closed postgres connection') + })() + } + } + + addRef(): void { + this.count++ + console.log('Add postgres connection', this.count) + } +} + +export class ClientRef implements PostgresClientReference { + id = uuid() + constructor(readonly client: PostgresClientReferenceImpl) { + clientRefs.set(this.id, this) + } + + closed = false + async getClient(): Promise { + if (!this.closed) { + return await this.client.getClient() + } else { + throw new Error('DB client-query is already closed') + } + } + + close(): void { + // Do not allow double close of connection client-query + if (!this.closed) { + clientRefs.delete(this.id) + this.closed = true + this.client.close() + } + } +} + +export function connect(connectionString: string, database?: string): PostgresClientReference { + const extraOptions = JSON.parse(process.env.POSTGRES_OPTIONS ?? '{}') + const key = `${connectionString}${extraOptions}` + let existing = connections.get(key) + + if (existing === undefined) { + const sql = postgres(connectionString, { + connection: { + application_name: 'communication' + }, + database, + max: 10, + transform: { + undefined: null + }, + ...extraOptions + }) + + existing = new PostgresClientReferenceImpl(sql, () => { + connections.delete(key) + }) + connections.set(key, existing) + } + // Add reference and return once closable + existing.addRef() + return new ClientRef(existing) +} diff --git a/packages/cockroach/src/db/base.ts b/packages/cockroach/src/db/base.ts new file mode 100644 index 00000000000..671ffc80dc7 --- /dev/null +++ b/packages/cockroach/src/db/base.ts @@ -0,0 +1,41 @@ +import type postgres from 'postgres' + +export class BaseDb { + constructor( + readonly client: postgres.Sql + ) {} + + async insert(table: string, data: Record): Promise { + const keys = Object.keys(data) + const values = Object.values(data) + const sql = ` + INSERT INTO ${table} (${keys.map((k) => `"${k}"`).join(', ')}) + VALUES (${keys.map((_, idx) => `$${idx + 1}`).join(', ')}); + ` + await this.client.unsafe(sql, values) + } + + async insertWithReturn(table: string, data: Record, returnField : string): Promise { + const keys = Object.keys(data) + const values = Object.values(data) + const sql = ` + INSERT INTO ${table} (${keys.map((k) => `"${k}"`).join(', ')}) + VALUES (${keys.map((_, idx) => `$${idx + 1}`).join(', ')}) + RETURNING ${returnField};` + const result =await this.client.unsafe(sql, values) + + return result[0][returnField] + } + + async remove(table: string, where: Record): Promise { + const keys = Object.keys(where) + const values = Object.values(where) + + const sql = ` + DELETE + FROM ${table} + WHERE ${keys.map((k, idx) => `"${k}" = $${idx + 1}`).join(' AND ')};` + + await this.client.unsafe(sql, values) + } +} diff --git a/packages/cockroach/src/db/message.ts b/packages/cockroach/src/db/message.ts new file mode 100644 index 00000000000..2722f68bef6 --- /dev/null +++ b/packages/cockroach/src/db/message.ts @@ -0,0 +1,228 @@ +import { + type Message, + type MessageID, + type CardID, + type FindMessagesParams, + SortOrder, + type SocialID, + type RichText, + Direction, type Reaction, type Attachment +} from '@communication/types' + +import {BaseDb} from './base.ts' +import { + TableName, + type MessageDb, + type MessagePlaceDb, + type AttachmentDb, + type ReactionDb, + type PatchDb +} from './types.ts' + +export class MessagesDb extends BaseDb { + //Message + async createMessage(content: RichText, creator: SocialID, created: Date): Promise { + const dbData: MessageDb = { + content: content, + creator: creator, + created: created, + } + + const id = await this.insertWithReturn(TableName.Message, dbData, 'id') + + return id as MessageID + } + + async removeMessage(message: MessageID): Promise { + await this.remove(TableName.Message, {id: message}) + } + + async placeMessage(message: MessageID, card: CardID, workspace: string): Promise { + const dbData: MessagePlaceDb = { + workspace_id: workspace, + card_id: card, + message_id: message + } + await this.insert(TableName.MessagePlace, dbData) + } + + async createPatch(message: MessageID, content: RichText, creator: SocialID, created: Date): Promise { + const dbData: PatchDb = { + message_id: message, + content: content, + creator: creator, + created: created + } + + await this.insert(TableName.Patch, dbData) + } + + //Attachment + async createAttachment(message: MessageID, card: CardID, creator: SocialID, created: Date): Promise { + const dbData: AttachmentDb = { + message_id: message, + card_id: card, + creator: creator, + created: created + } + await this.insert(TableName.Attachment, dbData) + } + + async removeAttachment(message: MessageID, card: CardID): Promise { + await this.remove(TableName.Attachment, { + message_id: message, + card_id: card + }) + } + + //Reaction + async createReaction(message: MessageID, reaction: string, creator: SocialID, created: Date): Promise { + const dbData: ReactionDb = { + message_id: message, + reaction: reaction, + creator: creator, + created: created + } + await this.insert(TableName.Reaction, dbData) + } + + async removeReaction(message: MessageID, reaction: string, creator: SocialID): Promise { + await this.remove(TableName.Reaction, { + message_id: message, + reaction: reaction, + creator: creator + }) + } + + //Find messages + async find(workspace: string, params: FindMessagesParams): Promise { + //TODO: experiment with select to improve performance + const select = `SELECT m.id, + m.content, + m.creator, + m.created, + ${this.subSelectPatches()}, + ${this.subSelectAttachments()}, + ${this.subSelectReactions()} + FROM ${TableName.Message} m + INNER JOIN ${TableName.MessagePlace} mp ON m.id = mp.message_id` + + const {where, values} = this.buildMessageWhere(workspace, params) + const orderBy = params.sort ? `ORDER BY m.created ${params.sort === SortOrder.Asc ? 'ASC' : 'DESC'}` : '' + const limit = params.limit ? ` LIMIT ${params.limit}` : '' + const sql = [select, where, orderBy, limit].join(' ') + + const result = await this.client.unsafe(sql, values) + + return result.map(it => this.toMessage(it)) as Message[] + } + + buildMessageWhere(workspace: string, params: FindMessagesParams): { where: string, values: any[] } { + const where: string[] = ['mp.workspace_id = $1'] + const values: any[] = [workspace] + let index = 2 + for (const key of Object.keys(params)) { + const value = (params as any)[key] + switch (key) { + case 'id': { + where.push(`m.id = $${index++}`) + values.push(value) + break + } + case 'card': { + where.push(`mp.card_id = $${index++}`) + values.push(value) + break + } + case 'from': { + const exclude = params.excluded ?? false + const direction = params.direction ?? Direction.Forward + const getOperator = () => { + if (exclude) { + return direction === Direction.Forward ? '>' : '<' + } else { + return direction === Direction.Forward ? '>=' : '<=' + } + } + + where.push(`m.created ${getOperator()} $${index++}`) + values.push(value) + break + } + } + } + + return {where: `WHERE ${where.join(' AND ')}`, values} + } + + subSelectPatches(): string { + return `array( + SELECT jsonb_build_object( + 'content', p.content, + 'creator', p.creator, + 'created', p.created + ) + FROM ${TableName.Patch} p + WHERE p.message_id = m.id + ) AS patches` + } + + subSelectAttachments(): string { + return `array( + SELECT jsonb_build_object( + 'card_id', a.card_id, + 'message_id', a.message_id, + 'creator', a.creator, + 'created', a.created + ) + FROM ${TableName.Attachment} a + WHERE a.message_id = m.id + ) AS attachments` + } + + subSelectReactions(): string { + return `array( + SELECT jsonb_build_object( + 'message_id', r.message_id, + 'reaction', r.reaction, + 'creator', r.creator, + 'created', r.created + ) + FROM ${TableName.Reaction} r + WHERE r.message_id = m.id + ) AS reactions` + } + + toMessage(row: any): Message { + const lastPatch = row.patches?.[0] + + return { + id: row.id, + content: lastPatch?.content ?? row.content, + creator: row.creator, + created: new Date(row.created), + edited: new Date(lastPatch?.created ?? row.created), + reactions: (row.reactions ?? []).map(this.toReaction), + attachments: (row.attachments ?? []).map(this.toAttachment) + } + } + + toReaction(row: any): Reaction { + return { + message: row.message_id, + reaction: row.reaction, + creator: row.creator, + created: new Date(row.created) + } + } + + toAttachment(row: any): Attachment { + return { + message: row.message_id, + card: row.card_id, + creator: row.creator, + created: new Date(row.created) + } + } +} + diff --git a/packages/cockroach/src/db/notification.ts b/packages/cockroach/src/db/notification.ts new file mode 100644 index 00000000000..dfa9528d12c --- /dev/null +++ b/packages/cockroach/src/db/notification.ts @@ -0,0 +1,239 @@ +import { + type MessageID, + type ContextID, + type CardID, + type NotificationContext, + type FindNotificationContextParams, SortOrder, + type FindNotificationsParams, type Notification, + type NotificationContextUpdate +} from '@communication/types' + +import {BaseDb} from './base.ts' +import {TableName, type ContextDb, type NotificationDb} from './types.ts' + +export class NotificationsDb extends BaseDb { + async createNotification(message: MessageID, context: ContextID): Promise { + const dbData: NotificationDb = { + message_id: message, + context + } + await this.insert(TableName.Notification, dbData) + } + + async removeNotification(message: MessageID, context: ContextID): Promise { + await this.remove(TableName.Notification, { + message_id: message, + context + }) + } + + async createContext(workspace: string, card: CardID, personWorkspace: string, lastView?: Date, lastUpdate?: Date): Promise { + const dbData: ContextDb = { + workspace_id: workspace, + card_id: card, + person_workspace: personWorkspace, + last_view: lastView, + last_update: lastUpdate + } + return await this.insertWithReturn(TableName.NotificationContext, dbData, 'id') as ContextID + } + + async removeContext(context: ContextID): Promise { + await this.remove(TableName.NotificationContext, { + id: context + }) + } + + async updateContext(context: ContextID, update: NotificationContextUpdate): Promise { + const dbData: Partial = {} + + if (update.archivedFrom != null) { + dbData.archived_from = update.archivedFrom + } + if (update.lastView != null) { + dbData.last_view = update.lastView + } + if (update.lastUpdate != null) { + dbData.last_update = update.lastUpdate + } + + if (Object.keys(dbData).length === 0) { + return + } + + const keys = Object.keys(dbData) + const values = Object.values(dbData) + + const sql = `UPDATE ${TableName.NotificationContext} + SET ${keys.map((k, idx) => `"${k}" = $${idx + 1}`).join(', ')} + WHERE id =$${keys.length + 1}` + + await this.client.unsafe(sql, [values, context]) + } + + async findContexts( params: FindNotificationContextParams, personWorkspaces: string[], workspace?: string,): Promise { + const select = ` + SELECT nc.id, nc.card_id, nc.archived_from, nc.last_view, nc.last_update + FROM ${TableName.NotificationContext} nc`; + const {where, values} = this.buildContextWhere(params, personWorkspaces, workspace) + // const orderSql = `ORDER BY nc.created ${params.sort === SortOrder.Asc ? 'ASC' : 'DESC'}` + const limit = params.limit ? ` LIMIT ${params.limit}` : '' + const sql = [select, where, limit].join(' ') + + const result = await this.client.unsafe(sql, values); + + return result.map(this.toNotificationContext); + } + + + async findNotifications(params: FindNotificationsParams, personWorkspace: string, workspace?: string): Promise { + //TODO: experiment with select to improve performance, should join with attachments and reactions? + const select = ` + SELECT n.message_id, + n.context, + m.content AS message_content, + m.creator AS message_creator, + m.created AS message_created, + nc.card_id, + nc.archived_from, + nc.last_view, + nc.last_update, + (SELECT json_agg( + jsonb_build_object( + 'id', p.id, + 'content', p.content, + 'creator', p.creator, + 'created', p.created + ) + ) + FROM ${TableName.Patch} p + WHERE p.message_id = m.id) AS patches + FROM ${TableName.Notification} n + JOIN ${TableName.NotificationContext} nc ON n.context = nc.id + JOIN ${TableName.Message} m ON n.message_id = m.id + `; + const {where, values} = this.buildNotificationWhere(params, personWorkspace, workspace) + const orderBy = params.sort ? `ORDER BY m.created ${params.sort === SortOrder.Asc ? 'ASC' : 'DESC'}` : '' + const limit = params.limit ? ` LIMIT ${params.limit}` : '' + const sql = [select, where, orderBy, limit].join(' ') + + const result = await this.client.unsafe(sql, values); + + return result.map(this.toNotification); + } + + buildContextWhere(params: FindNotificationContextParams, personWorkspaces: string[], workspace?: string,): { + where: string, + values: any[] + } { + const where: string[] = [] + const values: any[] = [] + let index = 1 + + if(workspace != null) { + where.push(`nc.workspace_id = $${index++}`) + values.push(workspace) + } + + if(personWorkspaces.length > 0) { + where.push(`nc.person_workspace IN (${personWorkspaces.map((it) => `$${index++}`).join(', ')})`) + values.push(...personWorkspaces) + } + + for (const key of Object.keys(params)) { + const value = (params as any)[key] + switch (key) { + case 'card': { + where.push(`nc.card_id = $${index++}`) + values.push(value) + break + } + } + } + + return {where: `WHERE ${where.join(' AND ')}`, values} + } + + buildNotificationWhere(params: FindNotificationsParams, personWorkspace: string, workspace?: string): { + where: string, + values: any[] + } { + const where: string[] = ['nc.person_workspace = $1'] + const values: any[] = [personWorkspace] + let index = 2 + + if(workspace != null) { + where.push(`nc.workspace_id = $${index++}`) + values.push(workspace) + } + + for (const key of Object.keys(params)) { + const value = (params as any)[key] + switch (key) { + case 'context': { + where.push(`n.context = $${index++}`) + values.push(value) + break + } + case 'card': { + where.push(`nc.card_id = $${index++}`) + values.push(value) + break + } + case 'read': { + if (value === true) { + where.push(`nc.last_view IS NOT NULL AND nc.last_view >= m.created`) + } else if (value === false) { + where.push(`(nc.last_view IS NULL OR nc.last_view > m.created)`) + } + break + } + case 'archived': { + if (value === true) { + where.push(`nc.archived_from IS NOT NULL AND nc.archived_from >= m.created`) + } else if (value === false) { + where.push(`(nc.archived_from IS NULL OR nc.archived_from > m.created)`) + } + break + } + } + } + + return {where: `WHERE ${where.join(' AND ')}`, values} + } + + toNotificationContext(row: any): NotificationContext { + return { + id: row.id, + card: row.card_id, + workspace: row.workspace_id, + personWorkspace: row.person_workspace, + archivedFrom: row.archived_from ? new Date(row.archived_from) : undefined, + lastView: row.last_view ? new Date(row.last_view) : undefined, + lastUpdate: row.last_update ? new Date(row.last_update) : undefined + } + } + + toNotification(row: any): Notification { + const lastPatch = row.patches?.[0] + const lastView = row.last_view ? new Date(row.last_view) : undefined + const archivedFrom = row.archived_from ? new Date(row.archived_from) : undefined + const created = new Date(row.message_created) + + return { + message: { + id: row.id, + content: lastPatch?.content ?? row.message_content, + creator: row.message_creator, + created, + edited: new Date(lastPatch?.created ?? row.message_created), + reactions: row.reactions ?? [], + attachments: row.attachments ?? [] + }, + context: row.context, + read: lastView != null && lastView >= created, + archived: archivedFrom != null && archivedFrom >= created + } + } +} + diff --git a/packages/cockroach/src/db/types.ts b/packages/cockroach/src/db/types.ts new file mode 100644 index 00000000000..9dab08561a0 --- /dev/null +++ b/packages/cockroach/src/db/types.ts @@ -0,0 +1,59 @@ +import type {CardID, ContextID, MessageID, RichText, SocialID } from "@communication/types" + +export enum TableName { + Message = 'message', + Patch = 'patch', + MessagePlace = 'message_place', + Attachment = 'attachment', + Reaction = 'reaction', + Notification = 'notification', + NotificationContext = 'notification_context' +} + +export interface MessageDb { + content: RichText, + creator: SocialID, + created: Date, +} + +export interface PatchDb { + message_id: MessageID, + content: RichText, + creator: SocialID, + created: Date, +} + +export interface MessagePlaceDb { + workspace_id: string, + card_id: CardID, + message_id: MessageID +} + +export interface ReactionDb { + message_id: MessageID, + reaction: string, + creator: SocialID + created: Date +} + +export interface AttachmentDb { + message_id: MessageID, + card_id: CardID, + creator: SocialID + created: Date +} + +export interface NotificationDb { + message_id: MessageID, + context: ContextID +} + +export interface ContextDb { + workspace_id: string + card_id: CardID + person_workspace: string + + archived_from?: Date + last_view?: Date + last_update?: Date +} \ No newline at end of file diff --git a/packages/cockroach/src/index.ts b/packages/cockroach/src/index.ts new file mode 100644 index 00000000000..03eeab5ffaf --- /dev/null +++ b/packages/cockroach/src/index.ts @@ -0,0 +1 @@ +export * from './adapter.ts' diff --git a/packages/postgres/migrations/01_message.sql b/packages/postgres/migrations/01_message.sql deleted file mode 100644 index 8aa19866f98..00000000000 --- a/packages/postgres/migrations/01_message.sql +++ /dev/null @@ -1,19 +0,0 @@ -CREATE TABLE IF NOT EXISTS message -( - id INT8 NOT NULL DEFAULT unique_rowid(), - content TEXT, - version INTEGER NOT NULL, - creator VARCHAR(255) NOT NULL, - created TIMESTAMPTZ NOT NULL DEFAULT now(), - - PRIMARY KEY (id, version) -); - -CREATE TABLE IF NOT EXISTS message_place -( - workspace_id UUID NOT NULL, - card_id UUID NOT NULL, - message_id INT8 NOT NULL, - - PRIMARY KEY (workspace_id, card_id, message_id) -); diff --git a/packages/postgres/migrations/03_reaction.sql b/packages/postgres/migrations/03_reaction.sql deleted file mode 100644 index 5dc21091f71..00000000000 --- a/packages/postgres/migrations/03_reaction.sql +++ /dev/null @@ -1,11 +0,0 @@ -CREATE TABLE IF NOT EXISTS reaction -( - message_id INT8 NOT NULL, - reaction INTEGER NOT NULL, - creator VARCHAR(255) NOT NULL, - created TIMESTAMPTZ NOT NULL DEFAULT now(), - - PRIMARY KEY (message_id, creator, reaction) -); - -CREATE INDEX IF NOT EXISTS reaction_message_idx ON reaction (message_id); diff --git a/packages/postgres/migrations/04_notification.sql b/packages/postgres/migrations/04_notification.sql deleted file mode 100644 index 96b53f1a6d1..00000000000 --- a/packages/postgres/migrations/04_notification.sql +++ /dev/null @@ -1,7 +0,0 @@ -CREATE TABLE IF NOT EXISTS notification -( - social_id VARCHAR(255) NOT NULL, - message_id INT8 NOT NULL, - - PRIMARY KEY (social_id, message_id) -); diff --git a/packages/postgres/migrations/05_notificationContext.sql b/packages/postgres/migrations/05_notificationContext.sql deleted file mode 100644 index f8936f24cc0..00000000000 --- a/packages/postgres/migrations/05_notificationContext.sql +++ /dev/null @@ -1,12 +0,0 @@ -CREATE TABLE IF NOT EXISTS notification_context -( - workspace_id UUID NOT NULL, - card_id UUID NOT NULL, - huly_id VARCHAR(255) NOT NULL, /* Or maybe account id or something else */ - - archived_from TIMESTAMPTZ, - last_view TIMESTAMPTZ, - last_update TIMESTAMPTZ, - - PRIMARY KEY (workspace_id, card_id, huly_id) -); diff --git a/packages/postgres/src/index.ts b/packages/postgres/src/index.ts deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/packages/sdk-types/package.json b/packages/sdk-types/package.json new file mode 100644 index 00000000000..eccea9085e0 --- /dev/null +++ b/packages/sdk-types/package.json @@ -0,0 +1,16 @@ +{ + "name": "@communication/sdk-types", + "version": "0.1.0", + "main": "src/index.ts", + "module": "src/index.ts", + "type": "module", + "devDependencies": { + "@types/bun": "^1.1.14" + }, + "dependencies": { + "@communication/types": "workspace:*" + }, + "peerDependencies": { + "typescript": "^5.6.3" + } +} diff --git a/packages/sdk-types/src/db.ts b/packages/sdk-types/src/db.ts new file mode 100644 index 00000000000..0e5c9c38d33 --- /dev/null +++ b/packages/sdk-types/src/db.ts @@ -0,0 +1,54 @@ +import type { + CardID, + ContextID, + FindMessagesParams, + FindNotificationContextParams, + FindNotificationsParams, + Message, + MessageID, + NotificationContext, + NotificationContextUpdate, + RichText, + SocialID, + Notification +} from '@communication/types' + +export interface DbAdapter { + createMessage(content: RichText, creator: SocialID, created: Date): Promise + removeMessage(id: MessageID): Promise + + placeMessage(message: MessageID, card: CardID, workspace: string): Promise + createPatch(message: MessageID, content: RichText, creator: SocialID, created: Date): Promise + + createReaction(message: MessageID, reaction: string, creator: SocialID, created: Date): Promise + removeReaction(message: MessageID, reaction: string, creator: SocialID): Promise + + createAttachment(message: MessageID, card: CardID, creator: SocialID, created: Date): Promise + removeAttachment(message: MessageID, card: CardID): Promise + + findMessages(workspace: string, query: FindMessagesParams): Promise + + createNotification(message: MessageID, context: ContextID): Promise + removeNotification(message: MessageID, context: ContextID): Promise + createContext( + personWorkspace: string, + workspace: string, + card: CardID, + lastView?: Date, + lastUpdate?: Date + ): Promise + updateContext(context: ContextID, update: NotificationContextUpdate): Promise + removeContext(context: ContextID): Promise + findContexts( + params: FindNotificationContextParams, + personWorkspaces: string[], + workspace?: string + ): Promise + findNotifications( + params: FindNotificationsParams, + personWorkspace: string, + workspace?: string + ): Promise + + close(): void +} diff --git a/packages/sdk-types/src/index.ts b/packages/sdk-types/src/index.ts new file mode 100644 index 00000000000..1beb455f5e3 --- /dev/null +++ b/packages/sdk-types/src/index.ts @@ -0,0 +1 @@ +export * from './db' diff --git a/packages/sdk-types/tsconfig.json b/packages/sdk-types/tsconfig.json new file mode 100644 index 00000000000..49e05cea1ee --- /dev/null +++ b/packages/sdk-types/tsconfig.json @@ -0,0 +1,8 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src"] +} From fe660866e45f5e3e6216e0b8f37929ffb34fcc08 Mon Sep 17 00:00:00 2001 From: Kristina Date: Mon, 23 Dec 2024 18:37:37 +0400 Subject: [PATCH 013/636] Init sqlite-wasm adapter (#5) * Init sqlite-wasm adapter --- bun.lockb | Bin 57472 -> 59816 bytes packages/sqlite-wasm/package.json | 20 ++ packages/sqlite-wasm/src/adapter.ts | 121 ++++++++++ packages/sqlite-wasm/src/connection.ts | 56 +++++ packages/sqlite-wasm/src/db/base.ts | 62 ++++++ packages/sqlite-wasm/src/db/message.ts | 213 ++++++++++++++++++ packages/sqlite-wasm/src/db/notification.ts | 233 ++++++++++++++++++++ packages/sqlite-wasm/src/db/types.ts | 62 ++++++ packages/sqlite-wasm/src/index.ts | 1 + packages/sqlite-wasm/src/migrations.ts | 138 ++++++++++++ packages/sqlite-wasm/tsconfig.json | 8 + 11 files changed, 914 insertions(+) create mode 100644 packages/sqlite-wasm/package.json create mode 100644 packages/sqlite-wasm/src/adapter.ts create mode 100644 packages/sqlite-wasm/src/connection.ts create mode 100644 packages/sqlite-wasm/src/db/base.ts create mode 100644 packages/sqlite-wasm/src/db/message.ts create mode 100644 packages/sqlite-wasm/src/db/notification.ts create mode 100644 packages/sqlite-wasm/src/db/types.ts create mode 100644 packages/sqlite-wasm/src/index.ts create mode 100644 packages/sqlite-wasm/src/migrations.ts create mode 100644 packages/sqlite-wasm/tsconfig.json diff --git a/bun.lockb b/bun.lockb index 5db3fe48a99d18dac938dc6ad72f92e427c2bd1e..76fa802047aa892f2968c867da494978ab0cb830 100755 GIT binary patch delta 9451 zcmeHMdstLe*FR@ql*3KA7z~3ThL-^c7zP{_Wz@_x(X>=7%>)@>~+_jm*v$JA9vz*#n@Z|oYz@V6#pMT8R^Wr-f65szW zxIvK1vWnF3vdv2sM!9+lLit_X`hhxJxz79&L3r6s5LkW-v?XY+V|+eqUsL6$RC*p% zLVbqAQD}C$gh}9AfG<|%7xfC?tC`ZD71TEc3Yk#w2JHpf4%F!~XXO^83w6yE#m zu{VFPO$p~dDF!ge-4GdHcI#E~K7I3|u~ z^FyJ;dfw1uGaatd5@)VUaJY&}9mNxcAehAM9InD*2S&Z86NCWpm5}o^)5aDQvmb6j z&ereok>ye4=O74#Aj6SfngxYC7Y`>0Cm`nmZ}=($_8?K$p{xuxLQtNa<}$;?tP=37 z^%z2c9?KVjvKeXPWUGa6e?bTV?*+=ElomV9<1j*DY5v5q1sRTg7zI`3uYq#=D%F}P zpkd$(Re22>qrU7Hh0&$-s`|t0qlzESHnJ1>@U>@?+?lYH3wzG3bV{56AK-#yeMTs znNZ@$=aI*N=S+DLl>OdErO?NMQ}>X*n|r@pThH{%2l~!6-cIR%s$JO8&o7U9H^$*p z7Q5VP=r%uLq~FrUpy2wCvv&3lY3YAq-v{~Y3Zmxk>Ycm9^p7K1)d54U_n;GXgPeyb zTRN!o5Gl$tSvmy2Ct^H_ES_Dc*3&NjL=Ct;N+vJ6SV~S@w^J>yx2VC(uIr$uDDPzH zS-l{@1?3_YdD+C(RO@XQZ&CxU1IXlK7iW?a*P~SHW0${18Bh+DhK zwmQh;okE+74W^a{qr|sT0X)d@v&}=T- zcN!e`BD-0Vu+Z}8inT+*@p!NdeO7~GXCkcLHt}+{)`0W%3&9O0b4Yg& z+$6}|SiEdHLja94B#T3+)?k+wW3fn*d%^tlAA##dX0IeE01wSDrFSGvHr0mM#n-4I z#4a_UrWa}uV1rE;)r!U$lcjvDo?05)q}AZq$idHMtLohq(?=3OX#nmzI8F+^9BM0G0^ARh5b2W#j@!W#UN&hJxMW!q$s}ToNV{kuC$1S( zi|bNqh_vg!ZUc+SZ0_z6@yo(tv`OXQ>}ZTEjj&0_z;VaT$r54H3vD&G496Yv2o73@ zJH-VzamOYpF6d0A4t6OYFNdLuDOi-QQ*8&ku6-mmc1V^+L@H57(qMM;zzvrh^L%cB zQ@Y})Tkyu0oMYiTr9w~`yGN$G3z6FVv? z$_=p+30P}dbf7k^qzot(VE@e;Iu*4GPml`~~|AC*3;Qap8XVaoN70XoWy3+E2lcXDCMwF4AZrW`-4PjX?(^-n0Q zOm(zBF369iPpML-ZgeXys6Pu*0j?dv3<^`W_!)rZBLOZQNG*B7WfQ|_k-`)*l;8(E=P=H#t|eAkp`mzApZ zO(}Quyeenv1-uAw+ROqxfcXIHRRUa?vOy#uEx8cDf(0sF2&xB`0^D#JAkdU}#a}C# zLupF6bv3}+H30Lg0WM5gz7}BlI)KZADC=zic=RnQ-2uu4RPh%Jcz(M9Zm<{N(vEMI?1*#ln!-2Zj8{<<>%CIYRZ3FQV~0ZebGawh*53ry<=tu#E)Un`8_ z@V{0VIpZpE<+S)~g^}Iy|7eAYQw;uf$$0?(n-!*RPnR=h`XV!hKFjnebCw=zJh}7m zUh~lBhA#a2(fO%v+ZNBeJ@i%g?^^!tlY2=i{U;r|l^%TGXn3}Ie~;YYpYFZ-()obw zur>aHhbK-oMO;DGqVRKzpFwA;BQtS7b#MQS1l#u8j>W^S&In%i`QQ&KZ-47`W9E>L z?~lB=cT30SsSDbSe$Qwy(PT@TU9VR?S!t|DO1jxQE+p`gA)^oM{FbI<1(&@)(4)`U z<43n#JLr_EtXE&0Q`!2BjcYE{nYIn8ihpbR*yWiEdK~`R;!(e_U+Zf*JA`Ss=Gn*9 zmIYp$@M-)!$IdsKSodE|tt&dcqU>(Yh-czUdi39R>~^dBiD&wsTYQCnY`@Hv`A*SS z6U#mxOLKlUJac+zigAB(K+Qkx*QSnn;?4eZ{N}|)oYH?o=Ir2+bG|pIq}( z=%i;(8I~Talt%cqX#DbC<0r>^t!*{KGP`ZN;j>$e`tXIt9^XEZb7}LWy9HA}%X_o+ zk0CpLxZcpjy4f@>GnH;+d()iE6mc$DvQlY@(}xyj4ZuGB3b=mbeCW~a0b&JJWT(>i z;JyP#)XSMl8*+STt#g35knT8B#YNPAT&lR3R^hsY?&10>4arHxXHG4yRV3z4*YCvG zKKKgQRac!Gr^7zqUDEy-?Lkut4*0y&Liso91ZYcDF<4|p3N7}~55{LGe^2b9>fG1s z7Pw}K;sI)1I#%EQVMTuutNI*;)1g`3p&fq|OedBT-YF1+)#xJO5h4w zv~W-&Z|Qgk@d@xL@ELFf_z>VN&3=Gw@&o*V03eY378R7?lZbasLLI=bfBg5tf3?Z` zkMp1xfX{)8z$Jk9HOB$oAQc1Iz&IcW$OCxiI~L%#+;hMffcM_~Il()0d`0qK82pXK z`}i{cr$^rKr-1eadI7xwJHT5BD}XN{{v(6GmiUhec^9`EbPvE=z}EnFU?MOD z;62V|;0kaRxCVRy@TRN)DCFH5@4eFi{t|cwNCk!ij{*IFM}Qsxz5)e)q&Ysk>*6i> zD4;*k2jG0*crO7sc8>zpz$$>l#t~Zs)BwwXwZL){E-Qe8Kq-(8aIiT^2e+Q&mjt<$u^H2Wmw~?lGXOSkCNK-&089f|N4wYB%!6z$z-`zxHn$R}0OkX) z0G#MN6}E(@#nWSp_yYj#TgdZi$&*2;-a~IxwWrHf35lE(A$S0_|7F`W8x2VyBs>z+ z%R|S_ZydD1)utKG)EAPi6uZn|>g5|A4q@-+G9nEiw2d2#09wGOO_@>K{V= z%JiEz)B2tkP~R2xafmN%NBv#YBf-lLxvF=6YEL+5f8XqFmj>^Ob_RO4d0vg#l@x!jPrRh`m9`lnkDYCZjkPK3p5k)R!-xKeV4jd|q7 z8FI_`7-V}PwO(N`YNs&WECYsyOsn6FdQ24q3sp2^g#my2<>0!HW~>+}o}e=;tVZoL zY2K?D^xqgGJH@WU7WFP6SS;wBE~hk(OWCyb=Imw9YZ{SQY5NI)gZgqBj`C zm+9?lt9XhkRvE~5RfthL`8(+QRqbPS8SGjMT#IM>8a=YgYSc~+C+zvortj9Dt%KuZ zItzl_hj@)PtuiS6i5ICJ_f|T&(O}e03Xja4>>H#zJ3%L6;_NJ;j?6Vy`~#O(V-Oo@ zQq4f4cI+3sKKbDF8R=6n22|Nkg>EB{Y1C^ce(6P#J$ibz=%TjAbQe+hTI(Oh z81-KMl)pAu7w%89*2d$-JY+>6om{IIYv}CSo$&7LbyiV{-{0xnI;+k*hcs=!6Z$R^r>$QgWsCIp{$1(#F3eO@Hu4~l>35Mkri-H8nJ5R^m z^_=s@E9k=#lfaYCAU~vvk}&dnR+xlgfZgu=MmU3oy){Y2gXDryh@BEQ^ zwI0uzcG!5JQ`PyTVZo2f4wY9BJi=j{;>EKxWRumX9cB(2bMyWW^V9w*cOXZkl2&dq zglcD+?jz!d9&fhsUDRWN;)q3s)5n|QbzyDDV{?k}e;UF$izip8r-IFfMD0v7DZJn4 z7b+hgpqc|i|3d)ilg%NDOGP>QN+^10Vg2LQ4_v3^jN-&lLn>+g9HI+btl~LpvDJVh zIMddFy3k0Pur*%%f;MfniU%p}HG_DBCcS1bYUiXhb($W`DyxuzS z_pi&gzq~HB<5uk`v#ls}P+p|kNv(DW+E!lM;$v*IeOqX@dRnWUgd#$SGfe;GU{^ac zRchj3t9sPy(oRuzB1au_5dPeJL*gHs|K}~O0S{@?ZKhi;qlm|n{V_i z$hZt+$x)}6}Tbd9~V=S^+FQ^)1CQfoZ*;2t6mR_{{z#|OOqz%rn8}5XWG=+&wuUa+2t$6 zJtMxNgiDb$zTQ7X+ZSrPz-m=x%ndl%NDD93(R_UJO|IzVIsb;zdfSaZZ2i5e65JrS R-dS&xc#A1-Eb(PW{|0bR0>1zN delta 8082 zcmeHMX;@T8*1mP2wJ)0psI;vhDvBCfXqHA4T8%LxqQTEKF(^udC|hVSGDc~~IEuy? zJj5_DA?_P40bFoNFpJMjYfOS!Cd()~`DPL`(Qz3wF<&(Eo_lW_CgWu0nLpp36wiCl zJ*Q4pojT`K)un6adtR>Byq>WP_%LR}yeXGE6vplPT1kAez| zFml(`Hc2h(tEYxnzFMO(+S5mpTyDH}26Z?K@{7tOX@-|1vAhK|5VXLtpop~(>hjGx z-2cFU{p%)B32d45gv*#3- zvL9L?XX{t{tMY-adI&;T(J{L`4+@1&9!`=rLe2vo4bTScL_+RgnF$*qaLvtjM#IFs z(coFjgb<)d*IZCGBYU1|wR8=9ckt&xd6e?f{OEZYp`^U1Voq_6<7tc%1o=`>ZeOTd zGXXRV{77A1hQ_F`ysHp{3U26vsXSmW@a(_Wx@hyB=Zvn%E-a9;OY=&yOP!8`kn_C% z+*PCJL3zN6Zkm5MgxTM|?36u>k3zZ}^b$u=*$s|=G-|)hgc^o_id@t}Rs)FR| zYL=wF;Adjm;h=Xw(T6J$lsjJ3Lo@itc~*;d9H<>Vwff7T%+D^(p5uVWaxkb*B?P)6xa7Hx1S~lc1C}+|NUEk>_ zT;M2`q$MzvCo~ciRjx=(j3*Ec-k%3Vf|Zr!J4%}<*1KEfl|GuiHK6dN>kuZ-S}$T~ z_}Eng%KQeb70#3kpgjCKP?oPkJqOUER{)+JKN^&Wg}YQ*UY?(WSE;1fS(aCdH0NZM zr2D1X{#qoT1m%Vmpgd4XUUXSSiNo0zrEL%k%N#}A!&&eg`8Pq?kB6d~k`1H1Lu&_I zcjs*k&iqpfz1=iE|8?_~acS>HhMlJ0Dkqdr&@dg!Mr!p*GQuMXU=*tMv58z#eC^^L zO2e}oRpI#{x$#^^il1GyP@11zzU57;{8E*{21&xaT!LDCZK9kMf4g{<((vp_Rd`M( zH=Zw&5@1)(VZT8%pj+*46BDQ^z%JI3JHT!@-BFTKX^r1dp-@^UyOM$pB^8<-sMX)5 zECQDZ&On_5ZQ?hi1lkn?9Ia_UrrZxM>8{2iazo>|t|3t?G`rs@EjXd)%pF5mE_vBB^H!q4vW zq0C^LC?t2V-Ea)DWXk9?RD@Dmh+RoXgoZ;0{tmV&HQ1$Km;Mn6bdqe4gd>;wUOXae_u^+ssHNd943=XRZn(W0t z+~v4k2-XwVc~fW9%>*|JTt~LeumxNaTcdo6Hy&BFQt5{skH>-o(54g|I}y?Jvx$A= z4z(-3K{%~c4fc5+xQD<=R2`mV1QQPiKKHfBe-5Hkrc}{~lppairFrfQkvPW9K)g*p?2JAvdQ0t&?$4OG8}u_-4s^J!LhgD33&T` zaN{)&eu=>%=0JL@nUM>QeWuN-4xHAG0y$ji4dx85LM(3w;O&3*%VsiGt_Vh z8`2o{Eg5@fmio2{ZZ++K!Ys

I>?kGo>GgW{>ooCAR zMLN%v`zg`+5+0uw9@7Q)qF$&g)9c$)?r4E7XUb`{6yQ`^0T_X`0P9r)T$u8B>zF}V z$Ma_a>9htE9~;uI0dBYf;KG#kUeGCjr7>RzaA{9j{u03LHv!CV2DmU~`Bs4CmD}*( zaxcn?I{-Flk4_svxwNM|!9|5?ur##S6RjN_uj{(rHlwQFL3tk1dFlD|)rzb$U zAfjW=o3J2{6Ezd{}alF{a$S-N$2!-O#bU_g%{*d zy`y_k9&i}I9!S;oev1CQt^B;L{JgFFzi%s8%KyoB!V8-9{$X2bx|RH1H1(J}jzaVO z>CD`8v4WP)pGv<4_s99^_?(U{m`V%h`O~un>0&iq1Q#{mk46=a6Kklta4LN?-=A(4 zrsLnv5k*sJWr06!D@qsDbPZfmp+8M7P8UR*il@>S;0z_{;#tZpS!P%PJN&8DdAR9n zNsLS^DYS!)$$CUtixvoMpcXF_~mx{br~PUtjA}%0RQVrT}XMMxRQ_0 zMt~0v{0q%LY^W#LJD~3Zt-x8}J%CTh$ACN_AD9Oe0Q?m+8<+uP0W$&qM4JXo1pWt@ z1Y`o^feFA^Af1oh2kp5vpyQs7B|c{XhY@GF2#A>b*1dt;NU08UiywHn|wTnnrLo(8x*n}30A zLz&n}UvBE>g*WQ7Ibj0FH3Vzq!lZf6d^x_x*{cIbHUJVAs1ImY!}OlxdoFVuMb;2Y zY=R|59hPfObr|qUdd$+lq9)dYzj&#J_HK^D?@|{xoA6W4|KZ(DgSVJOB(2?NGJAgc z%>CHjI5|4$kFrS8I>i5?i?<9?79iU@qhB{QY%$573wmoysF}Y+*ue7m8Cy-}k!Xo!;CgsP z#4m;%{Q9KUbD||tnm|jpn(%|u255PHa8AvVkH7yHV;i($Eiq7%o~C!UW{8c{b6cFK zr_608{DwCl&(qZKvWe=pjhD~MbY)wpd|4*rcB_0_reWKy!brv2YB`++l{34(KQio1Yz7IF_w6C7x!gK@Z4rhZ@FT4 z;r!(&wXax<4f_=~C(ethneS&jdfk0dw-_yh>Cv4gv78>?8HZmg>hXS@PVUSwd#)@7 z??^rIr{%L3qXk08V~wQfT^aHfA9`X}sC>zn*6*^KJr@wQHN)S0zj<0Hv=b}{C3ZU1 zq2KKa>U+~qyCu*sC0yCBN1S}H)^roP@z6!j=xp3>wa;<8bIqPRiQ!$^YLi=xYynb( z^>EJYjuWFv)SE;mnd{?(i6-EEF6Gz9ap3CeLwk5GBR*c)?@sC?D;0GD@fHN>T4y?2 zZ#8>vA^!SFpDvqreP&k`R6EKje9u8~kUrQG_Y<*vjr#13OYmGV)JII){O$7Z`=S?4 zF<9xjg!t;4+2>{k8{<)fSaD4St=?-AWwdSYPxXJAZtNW-iYRBO z%OQ1DCTJHHlI!BiPs|; zukJumM=ARwm(TM#$>D&GBqLv1~ ze9xtTuo9Az|^crDLuu!_pI2u)NkUa&w?Zk%X16__v=ZEU

!n6hVnZ+O;$ua}0d*N$8hJt=K-whKJ@wlt2jhA55$ReRDaOa!*ium zJ^0}@{SKxe>PxJczSWX+A(FBVhMNCbVIWQ)E;Uqr(4=*94=qnOi>M7+t0?xcNwj-6 z|M+z4efM;0r$x1gy9eJxlTIGqlIXc4TAT2fuGgK{pHf|($f5IGBh7h!&lkgsb1tC9 zYDwg@tnE*A&7tzP{?y!Tl@CSHSIwb4Jhw<`@Agn;Ec$Q>e4sD#vr*LRNT}QvMaf5U zeKs~~6Ti?%yN`^bt4CtGFYlox#FI%0Z;6EgcLJL6l~8(Jm?c{D%L#aG+t M=ob?y { + return await this.message.createMessage(content, creator, created) + } + + async placeMessage(message: MessageID, card: CardID, workspace: string): Promise { + return await this.message.placeMessage(message, card, workspace) + } + + async createPatch(message: MessageID, content: RichText, creator: SocialID, created: Date): Promise { + return await this.message.createPatch(message, content, creator, created) + } + + async removeMessage(message: MessageID): Promise { + return await this.message.removeMessage(message) + } + + async createReaction(message: MessageID, reaction: string, creator: SocialID, created: Date): Promise { + return await this.message.createReaction(message, reaction, creator, created) + } + + async removeReaction(message: MessageID, reaction: string, creator: SocialID): Promise { + return await this.message.removeReaction(message, reaction, creator) + } + + async createAttachment(message: MessageID, card: CardID, creator: SocialID, created: Date): Promise { + return await this.message.createAttachment(message, card, creator, created) + } + + async removeAttachment(message: MessageID, card: CardID): Promise { + return await this.message.removeAttachment(message, card) + } + + async findMessages(workspace: string, params: FindMessagesParams): Promise { + return await this.message.find(workspace, params) + } + + async createNotification(message: MessageID, context: ContextID): Promise { + return await this.notification.createNotification(message, context) + } + async removeNotification(message: MessageID, context: ContextID): Promise { + return await this.notification.removeNotification(message, context) + } + + async createContext( + workspace: string, + card: CardID, + personWorkspace: string, + lastView?: Date, + lastUpdate?: Date + ): Promise { + return await this.notification.createContext(workspace, card, personWorkspace, lastView, lastUpdate) + } + + async removeContext(context: ContextID): Promise { + return await this.notification.removeContext(context) + } + + async updateContext(context: ContextID, update: NotificationContextUpdate): Promise { + return await this.notification.updateContext(context, update) + } + + async findContexts( + params: FindNotificationContextParams, + personWorkspaces: string[], + workspace?: string + ): Promise { + return await this.notification.findContexts(params, personWorkspaces, workspace) + } + + async findNotifications( + params: FindNotificationsParams, + personWorkspace: string, + workspace?: string + ): Promise { + return await this.notification.findNotifications(params, personWorkspace, workspace) + } + + close(): void { + void this.worker('close') + } +} + +export async function createDbAdapter(connectionString: string): Promise { + const { worker, dbId } = await initializeSQLite(connectionString) + + await applyMigrations(worker, dbId) + return new SqliteAdapter(worker, dbId) +} diff --git a/packages/sqlite-wasm/src/connection.ts b/packages/sqlite-wasm/src/connection.ts new file mode 100644 index 00000000000..7f82b3b91d1 --- /dev/null +++ b/packages/sqlite-wasm/src/connection.ts @@ -0,0 +1,56 @@ +// @ts-expect-error error +import { sqlite3Worker1Promiser } from '@sqlite.org/sqlite-wasm' + +export type Sqlite3Worker1Promiser = { + ( + command: 'config-get', + params: object + ): Promise<{ + result: { + version: { + libVersion: string + } + } + }> + + ( + command: 'open', + params: { filename: string } + ): Promise<{ + dbId: string + }> + + ( + command: 'exec', + params: { + dbId: string + sql: string + callback?: (row: SqlResult | null | undefined) => void + } + ): Promise + + (command: 'close'): Promise +} + +type SqlResult = { + columnNames: string[] + row: any[] | null | undefined + rowNumber: number | null | undefined +} + +export async function initializeSQLite( + connectionString: string +): Promise<{ worker: Sqlite3Worker1Promiser; dbId: string }> { + const promiser: Sqlite3Worker1Promiser = await new Promise((resolve) => { + const _promiser = sqlite3Worker1Promiser({ + onready: () => resolve(_promiser) + }) + }) + + const configResponse = await promiser('config-get', {}) + console.log('SQLite3 config', configResponse.result) + + const { dbId } = await promiser('open', { filename: connectionString }) + + return { worker: promiser, dbId } +} diff --git a/packages/sqlite-wasm/src/db/base.ts b/packages/sqlite-wasm/src/db/base.ts new file mode 100644 index 00000000000..63d1ff3c35b --- /dev/null +++ b/packages/sqlite-wasm/src/db/base.ts @@ -0,0 +1,62 @@ +import type {Sqlite3Worker1Promiser} from "../connection"; + +export class BaseDb { + constructor(protected readonly worker: Sqlite3Worker1Promiser, protected readonly dbId: string) { + } + + async insert( table: string, data: Record): Promise { + const keys = Object.keys(data) + const values = Object.values(data) + const sql = ` + INSERT INTO ${table} (${keys.map((k) => `${k}`).join(', ')}) + VALUES (${values.map((value) => `'${value}'`).join(', ')}); + ` + await this.worker('exec', { + dbId: this.dbId, + sql + }); + } + + async remove(table: string, where: Record): Promise { + const keys = Object.keys(where) + const values = Object.values(where) + + const sql = ` + DELETE + FROM ${table} + WHERE ${keys.map((k, idx) => `${k} = '${values[idx]}'`).join(' AND ')};` + + await this.worker('exec', { + dbId: this.dbId, + sql + }); + } + + + async select(sql: string): Promise[]> { + return new Promise(async (resolve) => { + const rows: Record[] = []; + + await this.worker('exec', { + dbId: this.dbId, + sql, + callback: (res) => { + if (res == null) { + resolve(rows); + return; + } + if (res.row == null || res.rowNumber == null) { + resolve(rows); + } else { + const rowObject: Record = {}; + res.columnNames.forEach((columnName, index) => { + rowObject[columnName] = res.row?.[index] ?? undefined + }); + + rows.push(rowObject); + } + }, + }); + }); + } +} \ No newline at end of file diff --git a/packages/sqlite-wasm/src/db/message.ts b/packages/sqlite-wasm/src/db/message.ts new file mode 100644 index 00000000000..b6d751893e2 --- /dev/null +++ b/packages/sqlite-wasm/src/db/message.ts @@ -0,0 +1,213 @@ +import { + type Message, + type MessageID, + type CardID, + type FindMessagesParams, + SortOrder, + type SocialID, + type RichText, + Direction, type Reaction, type Attachment +} from '@communication/types' + +import {BaseDb} from './base.ts' +import { + TableName, + type MessageDb, + type MessagePlaceDb, + type AttachmentDb, + type ReactionDb, + type PatchDb +} from './types.ts' + +export class MessagesDb extends BaseDb { + //Message + async createMessage(content: RichText, creator: SocialID, created: Date): Promise { + const dbData: MessageDb = { + id: self.crypto.randomUUID(), + content: content, + creator: creator, + created: created, + } + await this.insert(TableName.Message, dbData) + return dbData.id as MessageID + } + + async removeMessage(message: MessageID): Promise { + await this.remove(TableName.Message, {id: message}) + } + + async placeMessage(message: MessageID, card: CardID, workspace: string): Promise { + const dbData: MessagePlaceDb = { + workspace_id: workspace, + card_id: card, + message_id: message + } + await this.insert(TableName.MessagePlace, dbData) + } + + async createPatch(message: MessageID, content: RichText, creator: SocialID, created: Date): Promise { + const dbData: PatchDb = { + id: self.crypto.randomUUID(), + message_id: message, + content: content, + creator: creator, + created: created + } + + await this.insert(TableName.Patch, dbData) + } + + //Attachment + async createAttachment(message: MessageID, card: CardID, creator: SocialID, created: Date): Promise { + const dbData: AttachmentDb = { + message_id: message, + card_id: card, + creator: creator, + created: created + } + await this.insert(TableName.Attachment, dbData) + } + + async removeAttachment(message: MessageID, card: CardID): Promise { + await this.remove(TableName.Attachment, { + message_id: message, + card_id: card + }) + } + + //Reaction + async createReaction(message: MessageID, reaction: string, creator: SocialID, created: Date): Promise { + const dbData: ReactionDb = { + message_id: message, + reaction: reaction, + creator: creator, + created: created + } + await this.insert(TableName.Reaction, dbData) + } + + async removeReaction(message: MessageID, reaction: string, creator: SocialID): Promise { + await this.remove(TableName.Reaction, { + message_id: message, + reaction: reaction, + creator: creator + }) + } + + //Find messages + async find(workspace: string, params: FindMessagesParams): Promise { + const select = `SELECT m.id, + m.content, + m.creator, + m.created, + json_group_array( + json_object( + 'content', p.content, + 'creator', p.creator, + 'created', p.created + ) + ) AS patches, + json_group_array( + json_object( + 'card_id', a.card_id, + 'message_id', a.message_id, + 'creator', a.creator, + 'created', a.created + ) + ) AS attachments, + json_group_array( + json_object( + 'message_id', r.message_id, + 'reaction', r.reaction, + 'creator', r.creator, + 'created', r.created + ) + ) AS reactions + FROM ${TableName.Message} m + INNER JOIN ${TableName.MessagePlace} mp ON m.id = mp.message_id + LEFT JOIN ${TableName.Patch} p ON p.message_id = m.id + LEFT JOIN ${TableName.Attachment} a ON a.message_id = m.id + LEFT JOIN ${TableName.Reaction} r ON r.message_id = m.id` + + const where= this.buildMessageWhere(workspace, params) + const groupBy = `GROUP BY m.id` + const orderBy = params.sort ? `ORDER BY m.created ${params.sort === SortOrder.Asc ? 'ASC' : 'DESC'}` : '' + const limit = params.limit ? ` LIMIT ${params.limit}` : '' + const sql = [select, where, groupBy, orderBy, limit].join(' ') + + const result = await this.select(sql) + + return result.map(it => this.toMessage(it)) + } + + buildMessageWhere(workspace: string, params: FindMessagesParams): string { + const where: string[] = [`mp.workspace_id = '${workspace}'`] + for (const key of Object.keys(params)) { + const value = (params as any)[key] + switch (key) { + case 'id': { + where.push(`m.id = '${value}'`) + break + } + case 'card': { + where.push(`mp.card_id = '${value}'`) + break + } + case 'from': { + if(value == null) continue + const exclude = params.excluded ?? false + const direction = params.direction ?? Direction.Forward + const getOperator = () => { + if (exclude) { + return direction === Direction.Forward ? '>' : '<' + } else { + return direction === Direction.Forward ? '>=' : '<=' + } + } + + where.push(`m.created ${getOperator()} ${value}`) + break + } + } + } + + return `WHERE ${where.join(' AND ')}` + } + + private toMessage(row: any): Message { + const patches = JSON.parse(row.patches).filter((it: any) => it.created != null) + const attachments = JSON.parse(row.attachments).filter((it: any) => it.created != null) + const reactions = JSON.parse(row.reactions).filter((it: any) => it.created != null) + + const lastPatch = patches?.[0] + + return { + id: row.id, + content: lastPatch?.content ?? row.content, + creator: row.creator, + created: new Date(row.created), + edited: (lastPatch?.created ?? row.created), + reactions: (reactions ?? []).map((it: any) => this.toReaction(it)), + attachments: (attachments ?? []).map((it: any) => this.toAttachment(it)) + } + } + + private toReaction(row: any): Reaction { + return { + message: row.message_id, + reaction: row.reaction, + creator: row.creator, + created: new Date(row.created) + } + } + + private toAttachment(row: any): Attachment { + return { + message: row.message_id, + card: row.card, + creator: row.creator, + created: new Date(row.created) + } + } +} + diff --git a/packages/sqlite-wasm/src/db/notification.ts b/packages/sqlite-wasm/src/db/notification.ts new file mode 100644 index 00000000000..a917b62e513 --- /dev/null +++ b/packages/sqlite-wasm/src/db/notification.ts @@ -0,0 +1,233 @@ +import { + type MessageID, + type ContextID, + type CardID, + type NotificationContext, + type FindNotificationContextParams, SortOrder, + type FindNotificationsParams, type Notification, + type NotificationContextUpdate +} from '@communication/types' + +import {BaseDb} from './base.ts' +import {TableName, type ContextDb, type NotificationDb } from './types.ts' + +export class NotificationsDb extends BaseDb { + async createNotification(message: MessageID, context: ContextID): Promise { + const dbData: NotificationDb = { + message_id: message, + context_id: context + } + await this.insert(TableName.Notification, dbData) + } + + async removeNotification(message: MessageID, context: ContextID): Promise { + await this.remove(TableName.Notification, { + message_id: message, + context + }) + } + + async createContext(workspace: string, card: CardID, personWorkspace: string, lastView?: Date, lastUpdate?: Date): Promise { + const dbData: ContextDb = { + id: self.crypto.randomUUID(), + workspace_id: workspace, + card_id: card, + person_workspace: personWorkspace, + last_view: lastView, + last_update: lastUpdate + } + await this.insert(TableName.NotificationContext, dbData) + return dbData.id as ContextID + } + + async removeContext(context: ContextID): Promise { + await this.remove(TableName.NotificationContext, { + id: context + }) + } + + async updateContext(context: ContextID, update: NotificationContextUpdate): Promise { + const dbData: Partial = {} + + if (update.archivedFrom != null) { + dbData.archived_from = update.archivedFrom + } + if (update.lastView != null) { + dbData.last_view = update.lastView + } + if (update.lastUpdate != null) { + dbData.last_update = update.lastUpdate + } + + if (Object.keys(dbData).length === 0) { + return + } + + const keys = Object.keys(dbData) + const values = Object.values(dbData) + + const sql = ` + UPDATE ${TableName.NotificationContext} + SET ${keys.map((k, idx) => `"${k}" = '${values[idx]}'`).join(', ')} + WHERE id = '${context}' + `; + + await this.worker('exec', { + dbId: this.dbId, + sql + }); + } + + async findContexts(params: FindNotificationContextParams, personWorkspaces: string[], workspace?: string,): Promise { + const select = ` + SELECT nc.id, nc.card_id, nc.archived_from, nc.last_view, nc.last_update, nc.workspace_id, nc.person_workspace + FROM ${TableName.NotificationContext} nc`; + const where = this.buildContextWhere(params, personWorkspaces, workspace); + // const orderSql = `ORDER BY nc.created ${params.sort === SortOrder.Asc ? 'ASC' : 'DESC'}` + const limit = params.limit ? ` LIMIT ${params.limit}` : '' + const sql = [select, where, limit].join(' ') + + const result = await this.select(sql) + + return result.map(it => this.toNotificationContext(it)); + } + + + async findNotifications(params: FindNotificationsParams, personWorkspace: string, workspace?: string): Promise { + //TODO: should join with attachments and reactions? + const select = ` + SELECT + n.message_id, + n.context_id, + m.content AS message_content, + m.creator AS message_creator, + m.created AS message_created, + nc.card_id, + nc.archived_from, + nc.last_view, + nc.last_update, + json_group_array( + json_object( + 'id', p.id, + 'content', p.content, + 'creator', p.creator, + 'created', p.created + ) + ) AS patches + FROM + ${TableName.Notification} n + JOIN + ${TableName.NotificationContext} nc ON n.context_id = nc.id + JOIN + ${TableName.Message} m ON n.message_id = m.id + LEFT JOIN + ${TableName.Patch} p ON p.message_id = m.id + `; + const where = this.buildNotificationWhere(params, personWorkspace, workspace) + const groupBy = `GROUP BY n.message_id, n.context_id, m.id, nc.card_id, nc.archived_from, nc.last_view, nc.last_update`; + const orderBy = `ORDER BY m.created ${params.sort === SortOrder.Asc ? 'ASC' : 'DESC'}` + const limit = params.limit ? ` LIMIT ${params.limit}` : '' + const sql = [select, where, groupBy,orderBy, limit].join(' ') + + const result = await this.select(sql) + + return result.map(it => this.toNotification(it)); + } + + buildContextWhere(params: FindNotificationContextParams, personWorkspaces: string[], workspace?: string, ): string { + const where: string[] = [] + + if(workspace != null) { + where.push(`nc.workspace_id = '${workspace}'`) + } + if(personWorkspaces.length > 0) { + where.push(`nc.person_workspace IN (${personWorkspaces.map(it => `'${it}'`).join(', ')})`) + } + + for (const key of Object.keys(params)) { + const value = (params as any)[key] + switch (key) { + case 'card': { + where.push(`nc.card_id = '${value}'`) + break + } + } + } + + return `WHERE ${where.join(' AND ')}` + } + + buildNotificationWhere(params: FindNotificationsParams, personWorkspace: string, workspace?: string ): string { + const where: string[] = [`nc.person_workspace = '${personWorkspace}'`] + if(workspace != null) { + where.push(`nc.workspace_id = '${workspace}'`) + } + + for (const key of Object.keys(params)) { + const value = (params as any)[key] + switch (key) { + case 'context': { + where.push(`n.context = '${value}'`) + break + } + case 'card': { + where.push(`nc.card_id = '${value}'`) + break + } + case 'read': { + if (value === true) { + where.push(`nc.last_view IS NOT NULL AND nc.last_view >= m.created`) + } else if (value === false) { + where.push(`(nc.last_view IS NULL OR nc.last_view > m.created)`) + } + break + } + case 'archived': { + if (value === true) { + where.push(`nc.archived_from IS NOT NULL AND nc.archived_from >= m.created`) + } else if (value === false) { + where.push(`(nc.archived_from IS NULL OR nc.archived_from > m.created)`) + } + break + } + } + } + + return `WHERE ${where.join(' AND ')}` + } + + toNotificationContext(row: any): NotificationContext { + return { + id: row.id, + card: row.card_id, + archivedFrom: row.archived_from ? new Date(row.archived_from) : undefined, + lastView: row.last_view ? new Date(row.last_view) : undefined, + lastUpdate: row.last_update ? new Date(row.last_update) : undefined, + workspace: row.workspace, + personWorkspace: row.person_workspace + } + } + + toNotification(row: any): Notification { + const patches = JSON.parse(row.patches).filter((p: any) => p.created != null) + const lastPatch = patches[patches.length - 1] + const lastView = row.last_view ? new Date(row.last_view) : undefined + const archivedFrom = row.archived_from ? new Date(row.archived_from) : undefined + const created = new Date(row.message_created) + return { + message: { + id: row.message_id, + content: lastPatch?.content ?? row.message_content, + creator: row.message_creator, + created, + edited: new Date(lastPatch?.created ?? row.message_created), + reactions: [], + attachments: [] + }, + context: row.context_id, + read: lastView != null && lastView >= created, + archived: archivedFrom != null && archivedFrom >= created + } + } +} + diff --git a/packages/sqlite-wasm/src/db/types.ts b/packages/sqlite-wasm/src/db/types.ts new file mode 100644 index 00000000000..ac6f7940fcf --- /dev/null +++ b/packages/sqlite-wasm/src/db/types.ts @@ -0,0 +1,62 @@ +import type {CardID, ContextID, MessageID, RichText, SocialID } from "@communication/types" + +export enum TableName { + Message = 'message', + Patch = 'patch', + MessagePlace = 'message_place', + Attachment = 'attachment', + Reaction = 'reaction', + Notification = 'notification', + NotificationContext = 'notification_context' +} + +export interface MessageDb { + id: string + content: RichText, + creator: SocialID, + created: Date, +} + +export interface PatchDb { + id: string, + message_id: MessageID, + content: RichText, + creator: SocialID, + created: Date, +} + +export interface MessagePlaceDb { + workspace_id: string, + card_id: CardID, + message_id: MessageID +} + +export interface ReactionDb { + message_id: MessageID, + reaction: string, + creator: SocialID + created: Date +} + +export interface AttachmentDb { + message_id: MessageID, + card_id: CardID, + creator: SocialID + created: Date +} + +export interface NotificationDb { + message_id: MessageID, + context_id: ContextID +} + +export interface ContextDb { + id: string + workspace_id: string + card_id: CardID + person_workspace: string + + archived_from?: Date + last_view?: Date + last_update?: Date +} \ No newline at end of file diff --git a/packages/sqlite-wasm/src/index.ts b/packages/sqlite-wasm/src/index.ts new file mode 100644 index 00000000000..03eeab5ffaf --- /dev/null +++ b/packages/sqlite-wasm/src/index.ts @@ -0,0 +1 @@ +export * from './adapter.ts' diff --git a/packages/sqlite-wasm/src/migrations.ts b/packages/sqlite-wasm/src/migrations.ts new file mode 100644 index 00000000000..3a0259c6c28 --- /dev/null +++ b/packages/sqlite-wasm/src/migrations.ts @@ -0,0 +1,138 @@ +import type { Sqlite3Worker1Promiser } from './connection' + +export async function applyMigrations(worker: Sqlite3Worker1Promiser, dbId: string): Promise { + await migrationV1(worker, dbId) +} + +async function migrationV1(worker: Sqlite3Worker1Promiser, dbId: string): Promise { + await worker('exec', { + dbId, + sql: ` + CREATE TABLE IF NOT EXISTS message + ( + id TEXT NOT NULL, + content TEXT NOT NULL, + creator TEXT NOT NULL, + created DATETIME NOT NULL, + PRIMARY KEY (id) + ) + ` + }) + + await worker('exec', { + dbId, + sql: ` + CREATE TABLE IF NOT EXISTS message_place + ( + workspace_id TEXT NOT NULL, + card_id TEXT NOT NULL, + message_id TEXT NOT NULL, + + PRIMARY KEY (workspace_id, card_id, message_id), + FOREIGN KEY (message_id) REFERENCES message (id) ON DELETE CASCADE + ) + ` + }) + + await worker('exec', { + dbId, + sql: `CREATE INDEX IF NOT EXISTS idx_message_place_workspace_card ON message_place (workspace_id, card_id)` + }) + + await worker('exec', { + dbId, + sql: `CREATE INDEX IF NOT EXISTS idx_message_place_message_id ON message_place (message_id)` + }) + + await worker('exec', { + dbId, + sql: ` + CREATE TABLE IF NOT EXISTS patch + ( + id TEXT NOT NULL, + message_id TEXT NOT NULL, + content TEXT NOT NULL, + creator VARCHAR(255) NOT NULL, + created DATETIME NOT NULL, + + PRIMARY KEY (id), + FOREIGN KEY (message_id) REFERENCES message (id) ON DELETE CASCADE + ) + ` + }) + await worker('exec', { + dbId, + sql: `CREATE INDEX IF NOT EXISTS idx_patch_message_id ON patch (message_id)` + }) + + await worker('exec', { + dbId, + sql: ` + CREATE TABLE IF NOT EXISTS attachment + ( + message_id TEXT NOT NULL, + card_id TEXT NOT NULL, + creator VARCHAR(255) NOT NULL, + created DATETIME NOT NULL, + + PRIMARY KEY (message_id, card_id), + FOREIGN KEY (message_id) REFERENCES message (id) ON DELETE CASCADE + ) + ` + }) + + await worker('exec', { + dbId, + sql: `CREATE INDEX IF NOT EXISTS attachment_message_idx ON attachment (message_id)` + }) + + await worker('exec', { + dbId, + sql: ` + CREATE TABLE IF NOT EXISTS reaction + ( + message_id TEXT NOT NULL, + reaction TEXT NOT NULL, + creator VARCHAR(255) NOT NULL, + created DATETIME NOT NULL, + + PRIMARY KEY (message_id, creator, reaction), + FOREIGN KEY (message_id) REFERENCES message (id) ON DELETE CASCADE + ) + ` + }) + + await worker('exec', { + dbId, + sql: `CREATE INDEX IF NOT EXISTS reaction_message_idx ON reaction (message_id)` + }) + + await worker('exec', { + dbId, + sql: ` + CREATE TABLE IF NOT EXISTS notification_context + ( + id TEXT NOT NULL, + workspace_id TEXT NOT NULL, + card_id TEXT NOT NULL, + person_workspace TEXT NOT NULL, + archived_from DATETIME, + last_view DATETIME, + last_update DATETIME, + + PRIMARY KEY (id), + UNIQUE (workspace_id, card_id, person_workspace) + ); + + CREATE TABLE IF NOT EXISTS notification + ( + message_id TEXT NOT NULL, + context_id TEXT NOT NULL, + + PRIMARY KEY (message_id, context_id), + FOREIGN KEY (message_id) REFERENCES message (id) ON DELETE CASCADE, + FOREIGN KEY (context_id) REFERENCES notification_context (id) ON DELETE CASCADE + ); + ` + }) +} diff --git a/packages/sqlite-wasm/tsconfig.json b/packages/sqlite-wasm/tsconfig.json new file mode 100644 index 00000000000..49e05cea1ee --- /dev/null +++ b/packages/sqlite-wasm/tsconfig.json @@ -0,0 +1,8 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src"] +} From 2799e8bb54da8e906ebaf855dcdc4514722a21d8 Mon Sep 17 00:00:00 2001 From: Kristina Date: Wed, 25 Dec 2024 14:09:50 +0400 Subject: [PATCH 014/636] Init ws server (#6) * Init ws server --- .npmrc | 2 + .vscode/launch.json | 24 +++ bun.lockb | Bin 59816 -> 101408 bytes eslint.config.js | 2 +- packages/sdk-types/src/event.ts | 227 ++++++++++++++++++++ packages/sdk-types/src/index.ts | 2 + packages/sdk-types/src/ws.ts | 17 ++ packages/server/package.json | 27 +++ packages/server/src/config.ts | 29 +++ packages/server/src/eventProcessor.ts | 275 +++++++++++++++++++++++++ packages/server/src/index.ts | 6 + packages/server/src/main.ts | 141 +++++++++++++ packages/server/src/manager.ts | 179 ++++++++++++++++ packages/server/src/server/error.ts | 8 + packages/server/src/server/server.ts | 26 +++ packages/server/src/session.ts | 76 +++++++ packages/server/src/triggers.ts | 112 ++++++++++ packages/server/src/types.ts | 7 + packages/server/src/utils/account.ts | 23 +++ packages/server/src/utils/logger.ts | 24 +++ packages/server/src/utils/serialize.ts | 41 ++++ packages/server/tsconfig.json | 8 + 22 files changed, 1255 insertions(+), 1 deletion(-) create mode 100644 .npmrc create mode 100644 .vscode/launch.json create mode 100644 packages/sdk-types/src/event.ts create mode 100644 packages/sdk-types/src/ws.ts create mode 100644 packages/server/package.json create mode 100644 packages/server/src/config.ts create mode 100644 packages/server/src/eventProcessor.ts create mode 100644 packages/server/src/index.ts create mode 100644 packages/server/src/main.ts create mode 100644 packages/server/src/manager.ts create mode 100644 packages/server/src/server/error.ts create mode 100644 packages/server/src/server/server.ts create mode 100644 packages/server/src/session.ts create mode 100644 packages/server/src/triggers.ts create mode 100644 packages/server/src/types.ts create mode 100644 packages/server/src/utils/account.ts create mode 100644 packages/server/src/utils/logger.ts create mode 100644 packages/server/src/utils/serialize.ts create mode 100644 packages/server/tsconfig.json diff --git a/.npmrc b/.npmrc new file mode 100644 index 00000000000..07f82b72930 --- /dev/null +++ b/.npmrc @@ -0,0 +1,2 @@ +@hcengineering:registry=https://npm.pkg.github.com/ +//npm.pkg.github.com/:_authToken=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 \ No newline at end of file diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 00000000000..65434e460a5 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,24 @@ +{ + "version": "0.2.0", + "configurations": [ + { + "type": "node", + "request": "launch", + "name": "Debug Server", + "env": { + "DB_URL": "postgresql://root@127.0.0.1:26257/my_database?sslmode=disable", + "ACCOUNTS_URL": "http://localhost:3000", + "SECRET": "secret" + }, + "runtimeExecutable": "bun", + "runtimeArgs": ["run"], + "args": ["src/index.ts"], + "cwd": "${workspaceFolder}/packages/server", + "protocol": "inspector", + "runtimeVersion": "20", + "showAsyncStacks": true, + "outputCapture": "std", + "sourceMaps": true + } + ] +} diff --git a/bun.lockb b/bun.lockb index 76fa802047aa892f2968c867da494978ab0cb830..bcfd9769cc9d102bab1d9e18fd386925410165fd 100755 GIT binary patch delta 34910 zcmeHwcR*9i@^FFy3B4&G(!qwIcajn~oh`Gov$N%F!ff89PTSVWtTgXaUgw*;^cCUp zBKo|1(A$r9iz2j7+}^|=K5fID*xe3Sa#k*3v+$8-XO7nsM@vx@8O4%gv66KlLKU!3 z6c!p0%VHIDVv+Hx92QFj;=_dT5tzF((3tg8~F3^EOVH7V^#M%n9GSF)z@oX82 z76Qg{;{Yp(A+Q)SC;%1$HU%6i;su9A`m?TerZP^1czKKuj);sAvRFP6Iw&+EkQW!l zQi57By^{n-@xmel1hF7RSZH`?EUOdLhUt7LhYiRA8U+Xn6U2&Rg@LS@!o+xNkt^iF zjFylQB^Qd~Vnf43ETJemP8gHO>IYI{L7^xrMhJrFLOyk%4?sM&To4c$gQ`)5cx=#Z zMJoP>6pN(+bfC~bE*R3oMOYh)^;U|dL{_K(wPA$wreF zWh9fDuamROMo%=F-hgwfc1dRlEl}5M9lP5 zh{tjlK``VV12k$&y(`to86sYyAUupEhzX7o#E68QAs!n!K$F6jfPtSlxgG+j4eUb! zXfFsDVa8E_Q42L%ELIo5PC#n|)&Y!y(b~)d`!fJ~1z=1M(P6Pn02f0W^#P9r8dxN! z1C4wex>EvEb|z*e(}wmOGNK?~bg2w{s0C^N7(sv#y5IoN*wXKSQQsqgJEn&VV}ga& z?RNGI%A+QN1ftkz&`yjnP81rH$Z`WIvHLq2QiAvZRz`!qAt@jdhQ|wISgZ>mDYhsJ zFeD|M*1(k&3rLrbylK}qyTlJOQ~AQ?z99tDhDv4+UzSct8AQfxgS7R3q&j2$#f zVy59D=<$G=F$ujXJ;<1`SbZVAsSkzs0yYOaSwfEn%mdm4umxZaU<1J0AQMU_On?T9 zM6B142>KHzPlEs^+yRU;&pE)T@Z*57V@f1=Ctx($^?hyM8IpQzBzT?!^b1-{BLuKO zj9?beKQb`UKQR_9logv8CB(V3){$!2QNVJL4+Y=_`m>@XT~y*kmA7}MI<^O3)L>`8 zsKFh8QFFKYQ4D(57@;lkLE$UqTG5D*}YiWRZUT&a8kk&)t=p`fC80Zg#) z1V+XRBjQ;sm`cgEIS-^HzuSw&(gDWCZd5@9z$m~Kz$jSYAQsCAa5G>WV?zOV16(wi z#exYU*%oLVD_?-0I^Y&}s=-&>i6)MoThAd>HR^zIWXJ+WeMANGun$BLo>co1Ksr>A z0BF?PXuzmocTd7uy2toNkP=DGFv{dt0>;|X03$o2;S|0)lu}PCU`)RU@z{#HKqF4_ zruftVKP*=|U3xl4+<(-U8Sw*svO06TlFFC&uQxuC6w_(k03+-AQP)%4Lnmq%Y3!>m z`I$O?RBS^{(uGTDrKfiLq`VwpXtgqplR5nTLwG}L+@sAS^==Oucvjsk~5&h?{pSW>uU+KqHpYGc?G&|=OH2;ukHazPsr%>$Z zF5K?lNpq6Qio``rr?iB4uRrSWP;P0#?WHxD@(P`K_jOLbUoey1s=DF1rq?o_g zvj5r&&miN4*IJZQq$+~71M1m_`oyRlJNWj? z^%`^DUv(1aM~#2fsJgPiep<`rpU)nZakW5i{7|`ln?I&z*;U>na#Dx! zQ4bWlHHuiHwT&&Ki*O&YVUh&b(InS-3-_i}QldHDGRFrBLF4S)voj#@9EH+i4&|uo`Qnm6E z=Z%7Va{6oB>V8pU_e9?e)yMhKhq)dj2F4w(U=yk#d?Hy+mfe@gkmGZoz*=kvBacgD z$~myh39dX}%8)}0m3Lu>5*hM*_IjcYKCcj51-_K3G%-}cg*}YOP~dY@q**LjN|V_{ zqr3zAG{IHmOR2~ZLls@vkwk_fpPf(C!RKp&tHkG8bcX!|nK@I@ftwAKJy0B?QO%L4 zQ{qeO%EIbIEK}&moUH z1&aoih222Zsqnc6LM-U z++i>yVew9wg`GiUsPVbyAr3Q25t&L3Qql^9oVp9!o$yizeZYEwnv^4a)g9Q!h&pw? zv?9!%&V;veKlTj5OM}nd3^c3=AVZ_N1NRB00>!}+(grZ0jwBRS`mv)3uP%JFHPMM^RC3@-!SZNLazRDfwo#Zf zNJ4Q4RY)ra3KkKl0c4Z@0F)z2!{xzxhc%N@aua~UxJBAdumaFAlD5ofeq zxKm(epG;-}wM!oYY9z5tz8{whD}XJ@C=(2308yvSXU7rTZhUS9q`*Q8Y@l({AAuT3 zmT-V2Y7E65dSEHR)#0-*5neict|DyQV1>i3)plTyAnG7)6~ti&kfzK%4HT*x)BvjP z(Urv-K~bQTRG_dI!TvxEnAM>E59chS_+}5Wau4){K(J{Sl>M`#9amG4~YyzzLdT$amLVvI{`KY zI4Y^eYy}D}jI!%adctj{^!B zbtX+L6DTYPx&UK51r!*c~8N_h3+bLW~+Jr8yRZ7cz5EM$a95$r(Klx77z&m994GU1|) zuy+#~z4`3VL>+v(5Zpd|wwUmO&r%`-KA#bF@L5EtK66-QCM%uFg3T4tD6+wMmOHU0 zi)96jGhh;g z5vHnQhHMr2y;&^Sv?jA5Rvv7ejOA8BEKYJ%k6N3tSP^a1w%;fvm|J4n(vpE{&-n_d zsJ67JJlLSql%k_KixmPC%>FFsE=j%l5Q{cI&WnoAfWk4sfjnK>@<>@yV~|Ct!h|*w zD2df5IB=H(g<}OQ3i{K!Z`@)f>yl%3Mx+q zfmzbD#EQBwonfpL#L5tvdQ@9|AeLGffWPz{pezYR^?uxZ43QjtdqG=(qS}(#6WRh* z;rOLw@&?M4%n7q6cNtJnJxGG`o@h^lp~!}Alt{1WpvVUbhbih+0~Re(9@TV+9RRTq z$>IQL0iT|v>EDJFS4e?cp|mcDL+T9jw6#%s3`AKIii-VI7eNRLklbY;5867#=Mto# z6f9gQRaL?4u|CuaN@7gsQyc*)GzVLVCFN60gIF-(WaOzx$8wKCtSi=r<6C+tC}$+u zFU1g|Y!CWb@e5FBZCH;Y-C{)-SghP6vCuZj7SeSxQt!7SEzpwJp%vxQ96xe5?DS`rBpu{_m;vk*HDVj(9>3P2m1 zu*#6j0c#qHQ;%9Wgm5Co9KQ?C{R~OaNy+5et7Z@85dPPUt~^hv91mhK&qeB{2XThy z!W}S#HZo9-lz0d+)ZB%89wLz!Sdcl)O<>wEn6N$2uVSDmyFjWAD4cCTx3G?X^CT8q z08cM^@yMVA&LMKr)9wH`uXx6m}?XEF?q z7)T1LRLB7eq$Sl$j5P1NN|mbsQ7A7eNwrP6JQxJ@I7bU?vk%okNWp4RpIs=Y5) z0pg)~Bx_~b&bV(O7DqQU$J9a90lFVlP3BSmjq4IfK^jD`V5%0hbrrI61*Vax||u%w6vD}CaYl`QenN_GMk z8;Bq9Ffrx^qb6$^jEV3d#&~EoiJ`qD2DOt9VoV2JK`q|a`Xoc>1oA-Lm7B5L>Vq}yciAO9CFdsk$AOipmEE53p5diogMuDD^ScjcJLW4{ubkqOq1oT^#D|lCH%ex2(2o#l z3OC(6edTq2c81)#yWXc2OIEmCX?S>K+slteH`vaGL|A4<7+PcaONqR*MIW(89#W+N29Dgs&gunc{xFP3-ydrBYKu(d|8@x zqvHD8cdu_A{5CSr!()x}?EB;M+y&K!6OyGBUDX}7B422~EY7<4x=cTu^|x-{k`_(R z&)Q(Re9(zy3#XQUUo71%Bk{$p2KO8C?#;7#YNws%`rY@@%XfPoJ+T(FF3v*Khja49sj>%dvLWt|!et_<}o=dI8@dOGe{?WShk-9f== zo5m?jG_Fq)-fyvsx_^H`@S7vUlPWLoH5|KW<;Y!cR}jl0w8W(^UG{d#-DzX#IkrjN zU9(tw`l~LET^)Ax9-&t&oBf#Iv&X{;J`);B>NIn;x9m1LGTMo~cBg^IgfpUA(VLV$ z{YdF3i(N~L2AC76P-ZDS^tibE%EaQ+eMS^t-rQ~bo5^qa^A=gA*h^{D+#Y4%T3tRs zTXu5IrFxqidrDW$HJVoZXsyQumFxH38l9*o21II!Z=QIbT;sZ-SDbvmq^;)93v+ig z8cnEt@WS@E#TUWsu4}^kO<(gs@UV5jBj=?joU*lN#;kW3 zn>=2*yGHtuvo(H|2MY-$-#tG2yoeT%u3+=UoDpZDZ|v$_T$qv*JD+W=@mE#n^$f;fvv0la+2zgh*Wug8^_0_nAGa>IK6Uqx=$Z4k z+P92;xjNc)@#W;l`*bf2D*Ie=QZOll7eCgSXBhLMS3~nY246VwVT^Y5RaMjW=jWaq zx_IiOw2hyi^NpRJH1{d+vJO(XBkd6@_+CBHXF{ApQPX2b%luVKhpRsH{}Jzb@=;_! zTEmI?+*}6ZElhz2D|#$*AHL2uZQM?dkL&9m|ex0}2} zBR5nS#62t?mzAdbLqkc{c-Y5_2~VO9P7J~$LR!UJnT-3Jy1Je1I(z0}-&lKQB;NWZ83XqV4TnNbC)r@Q%uetxoG(a`jSjeC%< zSYlb*n2g8n2{2u?A&w>P?@@iSbxOZw$4{?ym)@MEVrFx`=EAFW6({fh8W6mnpQG4v zFL!y(KK~1&n}=%jC8TN(cSmt`tI5*dKZ~a z<}UQkU#hivOmbh5OGHIP4~G>Kdq{up9aQf8T~2QvzQfaecQE;mKilJdx$meIYs9`g zyuZGWP<3AT?xI1^x-p(Nbh~!m_`Ld<`uu!_`Ew4gkYc}7)1BokUpI9wU#|DpFGrT| zk0>*xUe>8eVJDMu*IQ;fp9(tLFIcuHGq2gSzPN19=j~Hd$8Krud}DKHmgcyW%A8~K zdh6P5{N$|H{p@^kebP_GutO8?j?H;=+2hhWFNVZ;lu19FoQ-PzvrXo|sdc@v=wV#! z&!_q3hecza29DC_Y29o}oIP#VOXJ?p9uHkAzW(~eh_QZULynq6ynLKK`24>9E+ab+ zX7J6WnF1DbhIDfMu=SfuN!jxQ^2S!fS7*;#d*s2%O)qz=NvD*`hdanC=PJ!uVeh?j z%iIqaDs{KQ*X+xmsHx2w_44kM;XNM3F&O7D7;}hkHKFtP{$meL>c3R({RO9ZJ%c&V z9rJ_pG$R_Uo}J6mJ}Yn1t5!VWdf|diTB~es)eKFQPB=5ptZRAPqVfK>pkVVqqV&p&-gFfzJ9DFci)Vf(w`aiRZA;+{b1TeJ}oU^&hWS6@=wm_ zvUB{#>1)<~d&>8>&RICh=GcYa)w{~YC##$eKGR?Oprwn<`nxZNJNztoAui6FpTX7} zV>4aKe{gK4;!6yP_c9qbo%ArZUu&>1^O};8sd0#dXRy`snw{sLzH#Z}X%y8emo{P6 zx0rhdn+CSZY|;xIr#$=3fweKamn9e9y0v?tY_}f3S4BNN+wLe`%T>-6*TP@ezS_2S|qcVebSE!j40$u=+VA&NP2vtz#X zp6_|>TBztmW#1jt{TlD0ds_b4rKnS9c#zOj!O zjEk6zPrC=dJ>&UJKI8JMRj;@=LuLE(wGwO_Z@%1-ZZGKC$GG~w+VWkK9xN9xTNTt} zy_WKex=pV=KdK%iy6Bjd%B;jwOIpLlOuiwhUo`ce%#;>SyzAXX=D|I}3Z&OndxTb6T*8E)%9V2h-J>gR*X}*L-TFl8 z#ffK4PL+Bn%pO0ZpzghaaO|h6pFL6%@2-32JvA)YdGNpj2H&!FfyFFNvwG5j;%^a~ zpC6swSl^F*zS&{*rqOm;H~m@`j2pSC(Z9Rmfg-`U@s+`kEq#8t{xH0_##GkQZG?Td zkMSZ)=a^Fr#{1hDvpBo@9|%t$Rr2)B#8E{9&hNZkH3zyetDxY#hpx$>ov!wepF8vY z#iJiQ2>L!l+QiR-t+{`${mVPG?`C)mW8LZA(wo8f0F&`%$LC4M-PQUGjf*P0pL}6_ z&nHm_hHh*S#!l%iwRrTgKBu(D-Wrng%a5h|D5Bxpu+fFpeKNdxU2+FCk1Dsn80E;| zd$4`~11jE8GrBTE{mNZJV`i1}9$lxp=tk)st7H#*H+6ff0u=`Blk5#|7bkF@Y3YnRyADC>+yHJ%kD53 z+`O{;(YZyLcATJ$bauUPPT9CAPd7X1eyHNk89O;|@+*}kSM!hb%(auwo#S_Fh^(81 z@0-5M_JmZa%)~`Kl?#VS1m1m?K!iy z+TJofFJ$@@XQ|e0;`qV?X42wh*Uq&r=g+S3Nhy2(uI$C7Vcy>Ek)PD`%6dL2e>-XJ zp5vv9%beFFub#_bOdlkYjK5#|=F|7m=^Y2_)JBBe>+<@}?84J#Hy`G_VI7LUYw7vp zgsYt1w5^KU=BAFW`hGT4af(@hcdx@@8}VVyp#$S;bkrD(@f$??;fxqOwD6{ek80;P zr>B*htY{fu+Ouz?ZSBMBn>Xk9&%CvF*VDV)F^wiAFVDO7eys7KoUj=A)xc$a;lOU; z&Qn-1Zy9`#(o6w!;tTi8_8TxmJn&i6GjWQ$uTK|GXIJhu&Ma5?oA-6PEVJk}HYGqI z@YaiCQKlJL8@w)SX1=j3-4;n4of%V{6tsH)gYhvY<3M|turG%iY)^ez?6y~Wz@~i~ z$;s;m3{crWz*{JYd~p8g@EhGz#u68mj^5WTFc!}@cI*GPC@Su}&Kir&h7GxEmoOL~ zXEHAO;CN{2ZU0Fv<2OZBsa9S1wmogMzk1nQcA1k`*dST`Am1hDQV*;2lTmSfu%hSj z^e~_Os&+!>)cqrQxuX-bjxzY3XyXgxkMm93$N2K{!un~!3woF|39W|DSaz_YZ~p5b z>CNgYBi7&Dl9JW3v}pFrnPs|NK2>};m%m@*`mNbpYF`hVy8h!!{ap;kC)*eUU!gc@ zPQLGn=NeOY%=7E^a*gK1r=tarsxO>ea_B%@r<95qn;`?msUd@vt}Nt#pVVhw_U?C$ zdd-&|6uyWp-h`J2Gv8>cn2g82Kc?4u!8_yh;*-tqPxs-7G6+Fn_t!0JHa?!!=jrT- zZi#XUX?X|dn+Zx~opZfIhAw#g^vtbYhOSGeY%H8^a-AWtxSGk>uh&Vt&kDtr2lqRK zTnfFp{1ShTT+?%#(;Kqx&rVeL*YB-!b5rmSpVmH(6)7Il&-gOVBgP&-X`Oofr&&s` zlyS^W<|!uMm6i()D+3ixt8c%^5UzeF$`5IIliy3^`BOuj=@uI?=-$|+qea28X_}`W z{@P=IP;lg@?ZY?k(*+JEdC!Ig4$0iXkQl!wryowf4yW|B>(|##dfLMAtK2hAPV4XJ zyP7ZXTM|=Yu0$$~!mj^jm-OhhECX%r81=vm%F%2|HRP z)(sx}kdB z^J%v{p7>kVct)&uw)%cj)vjurQsvm9`uH7kA8!kuHJ@qBx^qg}FjQ{C&+O4(5Un@6E^uONotn(Rx#vZX~`7_O?PjkaO z7y_SVGPc@cG-!KN_j@Ck#l1b&Fqdciu?YQedd(MavK}~4Q?qK2{MfF8Kbmx27g3of?J1+| zwnwP7JWzI_tT^Yxh~ZtoKF@Uf(xoD%-af+V;l4`_Q{7)_1RUgjW=MR2<_ef2)0OjO zdQ&}n`VGsm)2|#?f6czkc{)OU;O^-2Ph!n4+GWH|y56MoDA!7`59m5LJb|$&iCK+I#sY_VFW5WnN4E}r{-9;~+P7Ej<3sn~7}WX2j?iAU{?qon zBsOVIjW-A%qI9s%Xjqb1%AtAqi<3n@MQ`Ir^0@E1GX%cK`ZddwK3%(&yWRnoX-O zCLIgoH$1rD%HDdPlS4)&FS_$!7KCdcNbgMs?f*PgB}iv_MNl$q40Tbw^LIt zA9_|alk4*OQHBMB?{y~M&+dKm0>t%2=W~8JxTURoxp`jP{InO{N2gEg{FFPoW;K^j ztTUQ1M6Tqen^Tc*#kPwZn|fEhJ6tkmqv>@+*K;~D48}K@jGLWmw``i@xjETPoa#N0 zAJ||q!A>W8<(BzcZa1~k>lzzB+C&UzsmUCy-jN{o*Lo85Gw#ck;+AI%OyBmC$ys`g z`M%%8WXx&YIR4=6#>dK`<$+V&gD&#T55FH==a{(t{lp))@~Ymz|(_&s}+fjCJpVO-o6rFV{V=lK`w z8^zSTD(KlPxbWe4OUQ=cv(htL~iIRgtf~V2NJ9 zsd1kJBX0dndpYT-$%W=AUrg1;)|>Xeb!&`@?zr;k5rP%>CXZ_43+=zdWSmsxIWYTk z>x3Q^Z$pZH>|@{C>X%)a?K7zVkIVtqH?e$@2=?m#9b!ehaOpV75DXT#xFV&9Hl6}(xYYgh1G!xeZ?cH@8x~4(w_cg zr1P~}hmXgrM)vZ!En|Q8o|H?H>?9TA(c^ZHJtdWOu1#VV>mHMFYV*S~|4Bh3JTKo} zFh_4i(ZSi#P_Pjbp;4>`_yBHB~jLq}6CXywM-me=f$ z`)kF)4954Fj9;BdZQ)*Y4LZalzJ3t7kFO;N{vfP2Z#ISCh!k3&XEi-q_M(nfs-f-sAi~otQaCz4IR1 zVzsB2#q00i$@Y7A((9Kkb38v{GFH*rF@0jZNl(wY0-Z;ZQ)+vwYdNJpEm@_fG18IO z6wpn1!q~{9>GPLuGSNq!me46 zx1)H9`H?}TcfI5NYMavXcn7j31vp;X9ql;(o%*PiY2820{a}(m&uUCox5X-Xqay_u zwpU0c-DJM+KVdTVP}F<9yLrR#PnFX=T}Evg_sDunhHc>i?WITe)>eN0`TXL1H=U&) z-8z3>H&R>`HeIRwj%>mPxofvw&Zwk#R<9h%JhghtWV|tLW_+}6;oaz6cl(#Oscfch0c+D=EUNtP^{8Et-bk&C|o~8B^ffrNOHo%=&m(X2*EVp&RFQp01Je zCO^|D&%M{02kTO!yI7iO_R$g?s;j#_^~&3%3p;CaP4?8>eV=zDc!I%a+dw7eqVyd8 zKzSVd1(7my9Q!4413p^_tFUqGS40MUz9#O&=NrN)d>p%#$b`?g#54GOM+}S@$9_+& zhtCg0D|~(=yduZ3KM~pR`I-0vpI->yD4({I*ssh6D1DrnVXU`6P=AFOcFufh#*LRs zU9@e!$M4h}WRM#bK4?ZhyVcrn3ZWDHqE*?f&*B}kiM2m_x^GmqJv`x5Rw#4#{f)`@ z=%fuLYhRBaHu%9OHKkFRcF(_vulGCGtth_c=jM#Lhez&RJjrWDkm(|=3%!T+9dYb+ zC-pV)FPC1a9C6=%`1F_VwJ{8he`hjYu*YoOnnykMUg*~S!Aw0LqYIaZA6NKz-+oGL z51V+Tr(oj(*hVNUks;bS~kyCvmu+1}qvQiV4!ifx9vd+fT%{NC{glksVR!ZYg; zbq6=KoQs!E(k>yIW9wCX?+j~xALab)O5GU4sRpBZ|2RI!vi^;uMe3c9#vy0yz9lz4 zlrIdE_enP5Fa-X|Eq8?AGp^e;Pu`k z48~yHZ4YNsO^>hoJr_QT3dw0wR{T8B%d#lsu=_b{|NJvum(ScN@KaMsi?kbC*yVD~ zgT<|uYRhI?7p<|~QnySt#LaPu8FOZ4xATRQSY_8V%leqO64%)Z=V}vL(sSz9e0g%m z$;_TMR0x}S-)?OES8-z&iyHPrK-k!OP6YA%;L<| zy-{(yAUeM+Ly>D~I8yzH^G@0x`$Eh& zJ~Q#u2f3Ko$?xZ}G#zH}<#;n0zcHMDv?$=3W3Q%c zmu%iM$G+?Ax2-5N?ivtOBWBsgoo?PK*j97CqV?K)kA@eb%cuFNfzE0#*@gQbj|-LB z-F+nfxmO?fb1x2YHcD3d2C!CyKbk#Q`Xj1lYazfQfOLQifW-j#+xOJ~YXH^)gaXU}z&#pnEJFZZLirYeR{+>o zZvox`ya)II@Dbn>z-Rn1$QKBpVPIcj$74rh$C48(FvGsXzQVpi?V#>)--fD1Rif%} zw!n>H0|55rF#r)jC;*SeRl!m` z3|I!R9AE{&N`UPETLHEKYy`l)8GZ{92EYeU2j~ib?_fayxZ(!@1Oo5?@b%Xkzy?4c zzz9GW0C%{!8^w*|Q-Edw@iX{%4iE(p4G;<74=@>E62N$XaR8nGUI2puTmbk0ivgAb z;2&1tZ>Ayu@F*<`z!hL1fE&OdfWZJR0Qh$_FQ7~ez!dENp%8EfSOc&YU_HPFfLV}H z1TYi;&tg0vE*oGwz(#;g0Mj7e2Vf)szTk}p@B_bfmifVdt$&H&T`oCd&1%zy)7t$2hD6X{_i9z-0gw z@;U(a?`?pm0QUgy0^A2k0C)oM2;hN)#_&V<(_k_Y3C#3Zf-(FI;2r=@0rv>sY<2NV z2)zJE1NaQ^3E&ODYk*e(EdXeq9|3;=cn|OnpcUXP0G9s(@CyK^Yg`JyN$7cie*k<3 z_z8eC9f#99gtf@|olfvnMh-BqJF|%w*`}Q!NGNZ&6Nf{f3k)aPDVu@JRESZAm^FaA z0SbnRY<2jaEdC&ft%T8@JpXj{HhV4W2gU^XX zA#|fDw0vwg&qn>$(mDx8D_BQ}BfGS9XrHzEQV*v3zIqi!w#drd8fGD)b(c1~ zfRN9%f!At&u1$B^@9lA=r@M!g2BP*Lr@0+$3pm8`Tx%WLac;t)4-V4KUipv#l#Ajc z18tqo9IEdf*YL6otJFZ4d6wq3sD3itj`n9d;n*wbO?O&)krh~(JHVPELs`!_-^<=p z-qzhGQ!KHhkeHIE&Avv=%CnK`#3lCTX-O$@iJH9OI@2#GKvQ?0$sR9t~pmZvF1GbK7-UuGN>z$7?};J9Fq>SjRgNk9PMl9->H@JMHaO zaMJM9kagN0Afqj01jfjWVyR68?iry&ySsJma(A8cF974-=sfwj?&^YwK93QG@A5dXJwjlhV0l18mKq?*^ zKjf3?q#QMb{@(pkox2iO_gZ)FxQo@h5+?hsbtDdLlk*M7Z~vP1%LMqK)q#`)i0FOV z?0RAeeAW;<_u2ddGf>j|eSO%+3H<^?@QpOEKpUJtO@dF#kjQ~*N4vtkpMEp8c*Gf5 zs0tc{be*|3?W)e&7?_Q#K1yb;%lKT`#E7z^tZzSM!&f&Y{$vAF!|GNe4iF1 zW{!Ih{fTS~Ihl~Ty3_7@x0)vRHA@rvtFT=LP_qqNW>vA4UB@;}yVYG1m{nZaREN_5 znhjXuGs3%A8wPlAu??JNZG=xgalTmFj`r|7;N9`!Gb(l=P{ILh9ZZGx`O6t^?bjf) z2PYnwY%yg7p<80Zb|ZXCyxBL1ttHxaoeU|Tr;9rG-r}88g3}w$z)rB=gA|2tET*`%Jibf1wqrF5i-qorE^sdL&2Jt0iXY;=AYlkZ8% zUlQiGJ~Z8;tpTcZBu(@+Au7Y6e4@-oO4gM4SZ2+BMC9++) z198Nz$UZ9%x`i3BO^9U&GMF56IxfwqXBzD=m3Hgc3vC2eJqH%;nw54S2`Sj)Fq~;G z$vO4w61UE{7z!zl=62>*Ht0B(^oZQ%gOV`Zt}Ql_0mCQVw00~L?XH&gj%kFmti8F7 zg9Gi@miD6wDJUT<7qshJaJ5N#+JqDzXy?qdFIX&O zZiOkd`)1l(ETn)Uz&=<#ds7Qeh^XM#SLOUr4dg}|zZIn5cI1MchTnu+?>nPKp_Z&! zwDV`{dNJi}mV=8tIr?b-&a^vODYhM~e=rDGCSW`kz>an}%VA>?8%UvjMAI&5+f!&S z(zMgs_7sVqX>l^`*S0;0c12BlzHLvDF{7M<(Y|rpQ)qY8wD(&nYC3@_fcC0PJ8_j_ z+k-Wrsi4PbF^JEiJ=99EB`a5(FKF8JEQ~BGv^;ool(?y;z2Zu-LD#r@YIAx_J6R>m zklPH}<23Ey7VELcDTfs#mN=!Rec&QHP&g!kGiB13GVQw+Q((S>6x!`F?d6rM$j00e zJRwLNGt+)x+eM&#Pt&e$$rMn8*s;yuHSJzk!VWrCByq7#d-SFBgCfwrv1wPlP)4FU z+GjNFqE?au`y1L%Htm*IiVeyUlNLm~?514=lOm8QwAXLi$#8oY(f+|{cf^=NZX8&& z2XfluF=fIiHtn*T_7E(^7UM1elW4Erv=d>82rxOMfYB>}-P7)dQEnTu4DG6%_KZxH zvBo6YsX6UK8Oy-%hkm8qo=dzeizkvvBs!S5Y9>{OHpim9 ztJBV$F@@ZXvS|P75`WKqs1zKo5?AfX=owm!uAwQT!+Ee{)pgwC9d{n>N&RmuiylRa z66exnTb+pMd%B82f#j@0IduQOjZXUPnRXqGjkYHh^$%n4_aP|WOsNCMAniJy_Eg=j z_y5z-z%BuH9V?1y0QSM>dG^2>;g^k9MQ4DweqIrbIxa z=-B!?)&lLNpLPmPSs#i(-+E53F05@GFBiaZx#6Klt;V& zr@f6!CI}dW|GdILlmBLcq3;BsU64z`VoI(REOEz{`d1Mo?=b&hs2xrGjx73m0@58i zctgjIfTaglMDi*Fm~Uv$={O=_ivq^LwA6oVs2xnLut}nCN}xTnOR-NmK>aw|cC4U| zwea7FO<%%5`+SFA(ZNy%U18gHPXp~19<@Mj99Z;?4YY%Jv~D}{wTQmEfp!%S`w+>5 zOkci0`+6r+NOttS4wM^tvAZIf1bY-47JU-~?f6~74vemcGj%Bh?So#D0wavRHiGt2 zFG+z3h`!%}b|>GS!VpeOdZH(jNa^U?CTQpOWD0d60wUn5M7zHyQ%JEp7VbZbApWa@ z{u>eKJ1uAz`jWPSe&|~-XeaxU6woPoF9z6OGVO=Iy%0k|f9Qw4T!Z%I-_8#2DY)l?ex)x0q3<0a z%aCtF9gEOmgW%2`kXsW(_^awVEX3-#MID#At>cP*A7IS!Mc-&b zU!VX(9O@@G3~qe#E8JxIjs+a!aNY^C<6ou>YR0g!ZMz7CzIOp<40un3GW3lo?JoyY zkrh1711T$^uV4^E@8PNo&lgBi2W8N=DL{(ky_3E$g}$GmJ%zqfg}%XoOtHiBN&0dX z`Ys1ZaiZQR=&Ol5w%*>eU94I08Io`g1f!#GTA?p?KrN6vTo!#N3w;FyrogKzh)&<$ zLSGhv3k%7t?G7ZeqTlLi=O(6ZlxSy1UjjqlIe{rQ*r0#K?oV#Nwe>dd)@V|=eNB$nh3?$RbFR*7-w6pk34=pgQW0=}&Y>VNeJ>7udxsR8K7OKa#-Z=; zXiuT<(4lYoXiuSU;i2yek*Ef2w1awJB4z#cgp3v;Z5TDr(pQbNS43a)LtjSHo_w``?9VAgH9gA>pICUgN-*eI~LdOlFZyBQRLXq-pQ+LOOI!+ny%qFG$ z4BwRH9~wlwIbWe%U3(laD zO+IRL39x_k>Ko8#-I+HJZRox!Lqo=#`r@XRUARGa)%G=JK;we>bkv#CGj4Kc^}?T* z!P6UmBLd&l@egNo6UM+TY_0*3;o)%+p#g%}(8vf25gA!}^zh(bwyxw2PZoGECkaZ< z+gw8egb~4^5kj~vEFxG^h8G(-Qy4+09@FVPOJu>bm?h#-$7<%W!L$A2f`WuGcxzx> zY!J_p7cU444HU!@hPB9nZ z8f{Q4ne8A=65>(cEg&ypL;%bZaJ6AnOk_eLPY@UwgZC4{906B=!YNpQ04^vDi46}6 ziH(io31ebl5{-<2>lY(JMIpjKkswG2(+fQ*ig@JRngNj!aE)Fpd9z+ns2H>tB8UhK zgVKVCV4;LJIYaaCxCrKKNJ+%IP-CH7lpr<)CShKTFc{<&31dOD*hrkiBZH_DaGnUR ziiN8lV}xXNJUEjJhzX5~1=;AT14Bhok)lwX@lZ#+zljHZ{7ozm4%>;$lZLvCra?{y zg&jfMKW^CLFO@^Szr^u~qxVfr|CF1o8xQl_o5HgqHXk)M{Jp(c^KS}(Tmw%V6H!&t zhHWZ>Ouy?4+5iiWHIaQ(U;j7NLeAf4;O2YM*ofH`WR0+(N_qxYFgT<}Rx0>C2bm1l z^5QofMA0oneP)+JB9nx&q@r{f1_N0b6c}i9gK-!7a1s*fASD@CUYpfHB7@@Ld#)u~ zaBO5~AQ)R{P$-dkLszGr6vjXiCP|EZVyaG`PeK|UBpR=o5`!OT5vxubDB=+_q#*+7 z*wx0}7=r*37!=vHUD`$vCeovcFg#(ZE;%YDxsmscQk|zx-Z~1gBu0Cq^h=zsRgxyZ z1ww6e!*TNwE~11`#^WWV@m^5aqNC~+@pe*(Lj;_Q?ig#*r-L?phi%RK0ShPctRSJgp!s-$}vN2@{(n0X29Zj zCHy$QDa-(I_Mq!QZ1ZWZuB+({Z1{jbBb_V?o zY;fep5Xq;E49SpWwt|vO63hvW74)u6pMgRSItZ!QN14|Q)bQR`6caHlBUq^Ms7aqt zjFg} z{~5UAghq&>0RNUbEL8M+M7SX8w+yp{f|(G9V^`#CVF9x`lpyqvDGvX;Y=6okDGf5= zi#Ny=AP5Tsn~94E3>Kn9v9NU)g^~-Q@ox;F&Oez!6q=k#{TX4%!(lGw~EQWC&z(~Lz<2^_~1TnBE8UN8DNc_K)VTwjF}~f352O33O8Q(hNn#hVV4PI+QOh5C){#q zwv~X?wlL2U)vqb}rXNxyq4v%+l>9yb(j*~NH~e}5goqFZN5+N{Z;t5eP(d(9Tshkb zn3U{s2~nqZF_0$N&cLq^P%2eC$%I%$?R9pge(nPCRDehQ)&-|7BYYJn*~Gy5hChx$ zLFmyS!rdJ?N&OZB`G2Ef6N?iS6r`t?h^TP$sF}g$!J)AsasK9TAkg+2ZSfDU`WBJ{ zREwa%AUmf(dw;7SC;x!JAnRfFfdU7i&{Ak=9~9^>6xdqX+uQnE+4%p#3|b@nBPt5E zvd}-_GiJrY4lg_^3{MEeZ4@qC?F6|6ASaUlq9t@qZLM&!46qjp?1VPYH1SBAevgRp;%;gf1;NordrD|P-0u?bqB_Bg2d zJ3+br%79EF*U3MG!@Q!I!Z2Yh%(A#W!fKp;ujaROa&i7kDVWj8!)8Ko*}%80<$tyj zWcxD;hXx#m|E@7ov0;%gvbQiX?6miP;$-4wD2? zaKrQ8jU6=iHf!?_v~#racMw|H1o_+B`}+%>?Ck>rEgb?K?Hz-_>g;R+9D;=Q0ZxAm z5CLpB6XDHIM2aPhjuXHTsq9YNJ0&e9DV!hL(?syng)cS#oy2XGwTW#5!@|K4MthL0 zm6el|y@R!rjT4R+J1a*88(XUYmEp14B>tBx3@qCiA7T_hdn7RwhFmz1y1BX z6<2q77eE%updW}P_opz4IDplfI#P$Pl9x_M{jE7LaO~h~DV%DkYT91}NuvR#dd!zY zDBqp~R~SotqsO{!m@gQRjnP3kJe{bONKy;DbVR^*7mfvuv?O6DA_>9fAAbGrh{9V# z-0$m|6)7wpP~$VpCuW#vq+n18b!N?4nZYJzHk;VczdBvl`)ytDHg#Y6FHPvbE2002 z?P5H4??xm&eMr2KREB_h)OcEZ0CCJjwxFZ%J==Khtm5{HZ*;f&N)J*@; K3SQs4_WuD$ybT`! delta 8309 zcmeHNdw3K@w(pu`AWh!yOeP_j1VTti$b)3^Fw=m@!yp0zAwVJ%GK4^0WD*n+GAexH z3i2qR2(UtetFXM(ECyU$Fo=7Rhp?`@gk21XBCxQ?-t`9cg7RXMPi4#c9@{?hSecsiY4@{_UwnSjtcrnW(oelzJN})yA>pa5cfOso z|JnEcl=a)MBCbek*(9x++`DP#2949My(G!ijoUy_hqJ1(ra_W^<{?Q`e;qUcw8}BB zhNLgL>rc7qSx_16gaIR)jMFw*9J)n2EQ8hG;{lm+In)$b=1>@_WNq}o!~>jmpjTDE6`r;q|qelq(Smi z2fXa34OqaLpOadq7EgqkHg$puNG@x$9eD(GS~8QBVDL!!Xi520VG|MyNK)+0K;t_UbChUSCmX zuXj2wqMjyc3fHI)DD*YCUc(J}gVxXv?+t|_5=;dpFSJHT68gGA!J9z4nP!;o&Bfqj zQ9lQi+J~7XsSoIK%rORZ0C=cym4hd}+oLptwK!{%Tza6r4?!~I<7iF5jOi0!R&Sr- zz!IzmPcuCOO8!oQc51J7)K@q%zK^pb=ue)QX?Hfvg?H*5jn2xM^QGRfl9s; zU==B#_qsbc9o6$3^^&v`meP#sK+)uS3^St{Oa)H^BAOZ+DjoF~pgat`bAHW?ISAYa z?7%YAMPd46Xh*yx!MU!_z*E0tZut&S;*VnYq)?>zC1@5MLIpMKgMg~QEw}-cmZF}w zcn<94*H2SBT;d<}>^(H0zZMulpd{22l$6$0q%_R0b2zJ$wEXa7gQKP#il&37KpGE9 zZXA}xle`kzws}qWeU@a7=JaNU!i4YIL*+!UTZw^)r*NpsZu#FXRQ;5Qz z=f$`l+#Y6?GvPwFM3~h$7hDc63Ckzha7DiAZk4z_+$tx+%_E`BOYQj-xCg*_ zk`?k>;IhGa@C%_rOFT$b?KpjSd#4o{-Jw5q_K&#vhj(l!Vr}Hq9;!toNDz^+=j>d&r>H(M08hj&^O`3hQwpam-&Yh_8iI8KpkkiqVTvAh)=xg3sx%MIAW zV>AwqDFsK1i3uV;)_|kwYEwE6PU}aD(-(W2IL#Ffa5Q-}f{m|(%i|^C`SMlVSyAst zi-52&^;K9gFYc?z?Gcjnu%Bg?d{caWPB6~nhtd`hBC&O$=dB81jx0XT{_EyTN` zd1QZ^u}^Q+_Sk&66nC^L2zGd86F6E>1Om8A;3zDN>SAXi7n35iA%LpEQ9t-1$SSvh zv#F8@Cqq{Zk4(0)bY7gSuyWpv>jvJLtjK?fy=ycUCC_p zA8qv!JDB53;3jFEF{|s~G*z_v>B!ZjM5Z`lOSmabVZZ0axJL43T&MC*Tvu>Yy26g| z;&jEhOhUrt;|^rg=h9unJa&uaac@G?I7d7nVZ`>R<7RF0M zn$p4mt7a@Gj+aDc@ypGv9t$^7FT|QET|~7gD&@PThL55?QvlL66`(?t_-Srh z0!oD_xz$b#$`2{yzSF{a)HE+0O{BS8mAjXcW{10;s0`G&d7{)_>*k4)p1E$GD7DwS z`TJ3tZ=>!$+^B^pDSARg?l{0^Z0o%l8YQE$|W^C~Av_oFo5&jA|X5V~l|1)XMQjych3jg51y|08>@A{?S6tWfn|q7+t+l><;#-AG z@RqlV__lSv+}K{on)$f)B0gF8^1a|Ta<=bLJ|irV2LyV``p;{7+a5X4Z7>$#%S^^A zT)W!%p1=0NezMK?P#0rk+pfP0nS&O5Y4e-!7xBSIyo?F)c+)|T1=Pd$7$$(P30J?q z=!x2`RLfoOdl~!o!}mJ%^36@uzT}62`m~KZ@H}h#@=6hFJ9O$mIRElWI=ES$-e1yV z4&~{Ml6cy{3YzYGrxmKCML>-i4p3PS=(=81 zYb2@JUDpG2836eZ^|?!e+cx5Y67sSnrh2iyVt^+bkVOxeKLCFO&HzV&4}b%}I{=v# z2m}H6+N0kzq%g6|lhroSBeV^m#{>LNKVQ&uN;(Vt3HUQ`4)_G1=iUi`o}cx=EMPV; z2dD-piOm4$ZRv4fIxrb11|9?OZcpEr^fjiwwT-3cVIi2&z;Iv)FceUL2LO6|#oI1@ zX3`rmy|}B7^nIZFfnC5JfZVwlcoLWoEClEocpkU_dF%w(oD>IL?L9{t}bOnc_iE!IC@OIv*so9eL>vLXu;e^O%x^|)kvrSEh|HXCEt^ZtYD>WSW z{SB`!w?H=bv9JWON7zYG8wAJczooy_^5(?b3qPC)xj|3_8?GC~Z-Q8a;cJ696U1gR zAE5-pcC#oBW|=HmEGLpJ-UTu1M-7uAN1l$k*@l_u=FAlSA?Q^vrbfal{R|^C;Ei8D zwW4OdDu?lKDk4fkpzoAe1rnv7eH`-pV*7};a&mDxT#T)I(LqJu++!-ZKgrR%5_#zC`SS`%q z%tWrs4rfX6@xEH*>jxP24$F}d>rG!l%b?Uu(h@Fyjt=M!QeoE5F9wHpt&Avv-iT%{rGR+_6nv zDyx$iL_rfH;!RMipKbj8Y14L7qSwn@x2 zGmBaOLI1ttwC-NZ-ddsRRoAdw?4mvUkK;+()GSndOLF>2!K(Jdhb!aKmwvyumy3Z> zFk`0}6~&Ux`WF9jE(l=9#fm6qVFSb)B(C)`>jx1vz5nyz$7lNFey?_zcsG_MvsXlN zG@HoY7tchaSH0LsM6>-zs`S&EFLuvgRrKlHZq-q0QqhLY(vM{}_jL_OUg9VVV73A+ z?daz-#Rvat-1N!yM0BNS!Ln+hZPw3EUMyep#(}d(DHo7Hh9n&kn<@5_#s1z{gpb8_ zB4%NULFH7%^Bxz9`dAjJd&IqFn(t(NEi`#UHxmICDE?XN>>7d6O#;zJTWqHEh0<7br3`{X2HOEiv zfdD%yZstO+0|^@+QmzHpDo!j7`b(UF(NieXipTChQ?y;A$3xLRak>x4J)Bq{&mvQP z{0xk{`wZ02lWJbAOuxQL?TeJ1ksTEl_!v5y` z+cI_CvT3>X^Q##fc3vJ_TYg@RqU=<>QYG{kW2{J)nf=8qE6XqpOA?#FN9iY6qfSN1 z(-!<;5!~ud3)7RtDe6(3B(7T7Oz$u01q>zaZAol=fklded91&fl*cmqwMA)XZ;NxX zj9^1s%8CqWG@?k^CYXC)0b6`nH3Q4E{fW&ox#6 diff --git a/eslint.config.js b/eslint.config.js index 0a7a95e2752..ab025368465 100644 --- a/eslint.config.js +++ b/eslint.config.js @@ -5,10 +5,10 @@ import configPrettier from "eslint-config-prettier"; /** @type {import('eslint').Linter.Config[]} */ export default [ - {files: ["**/*.ts"]}, pluginJs.configs.recommended, prettierRecommended, configPrettier, ...tseslint.configs.recommended, + {files: ["**/*.ts"], rules: {"@typescript-eslint/no-explicit-any": "off", '@typescript-eslint/no-empty-object-type': 'off'}}, ]; diff --git a/packages/sdk-types/src/event.ts b/packages/sdk-types/src/event.ts new file mode 100644 index 00000000000..00f49b67142 --- /dev/null +++ b/packages/sdk-types/src/event.ts @@ -0,0 +1,227 @@ +import type { + Attachment, + CardID, + ContextID, + Message, + MessageID, + NotificationContext, + NotificationContextUpdate, + Patch, + Reaction, + RichText, + SocialID, + Notification +} from '@communication/types' + +export enum EventType { + CreateMessage = 'createMessage', + RemoveMessage = 'removeMessage', + CreatePatch = 'createPatch', + CreateReaction = 'createReaction', + RemoveReaction = 'removeReaction', + CreateAttachment = 'createAttachment', + RemoveAttachment = 'removeAttachment', + CreateNotification = 'createNotification', + RemoveNotification = 'removeNotification', + CreateNotificationContext = 'createNotificationContext', + RemoveNotificationContext = 'removeNotificationContext', + UpdateNotificationContext = 'updateNotificationContext', + + MessageCreated = 'messageCreated', + MessageRemoved = 'messageRemoved', + PatchCreated = 'patchCreated', + ReactionCreated = 'reactionCreated', + ReactionRemoved = 'reactionRemoved', + AttachmentCreated = 'attachmentCreated', + AttachmentRemoved = 'attachmentRemoved', + NotificationCreated = 'notificationCreated', + NotificationRemoved = 'notificationRemoved', + NotificationContextCreated = 'notificationContextCreated', + NotificationContextRemoved = 'notificationContextRemoved', + NotificationContextUpdated = 'notificationContextUpdated' +} + +export type Event = + | CreateMessageEvent + | RemoveMessageEvent + | CreatePatchEvent + | CreateReactionEvent + | RemoveReactionEvent + | CreateAttachmentEvent + | RemoveAttachmentEvent + | CreateNotificationEvent + | RemoveNotificationEvent + | CreateNotificationContextEvent + | RemoveNotificationContextEvent + | UpdateNotificationContextEvent + +export interface CreateMessageEvent { + type: EventType.CreateMessage + card: CardID + content: RichText + creator: SocialID +} + +export interface RemoveMessageEvent { + type: EventType.RemoveMessage + message: MessageID +} + +export interface CreatePatchEvent { + type: EventType.CreatePatch + message: MessageID + content: RichText + creator: SocialID +} + +export interface CreateReactionEvent { + type: EventType.CreateReaction + message: MessageID + reaction: string + creator: SocialID +} + +export interface RemoveReactionEvent { + type: EventType.RemoveReaction + message: MessageID + reaction: string + creator: SocialID +} + +export interface CreateAttachmentEvent { + type: EventType.CreateAttachment + message: MessageID + card: CardID + creator: SocialID +} + +export interface RemoveAttachmentEvent { + type: EventType.RemoveAttachment + message: MessageID + card: CardID +} + +export interface CreateNotificationEvent { + type: EventType.CreateNotification + message: MessageID + context: ContextID +} + +export interface RemoveNotificationEvent { + type: EventType.RemoveNotification + message: MessageID + context: ContextID +} + +export interface CreateNotificationContextEvent { + type: EventType.CreateNotificationContext + card: CardID + lastView?: Date + lastUpdate?: Date +} + +export interface RemoveNotificationContextEvent { + type: EventType.RemoveNotificationContext + context: ContextID +} + +export interface UpdateNotificationContextEvent { + type: EventType.UpdateNotificationContext + context: ContextID + update: NotificationContextUpdate +} + +export type EventResult = CreateMessageResult | CreateNotificationContextResult | {} + +export interface CreateMessageResult { + id: MessageID +} + +export interface CreateNotificationContextResult { + id: ContextID +} + +//TODO: THINK ABOUT BETTER NAMES +export type BroadcastEvent = + | MessageCreatedEvent + | MessageRemovedEvent + | PatchCreatedEvent + | ReactionCreatedEvent + | ReactionRemovedEvent + | AttachmentCreatedEvent + | AttachmentRemovedEvent + | NotificationCreatedEvent + | NotificationRemovedEvent + | NotificationContextCreatedEvent + | NotificationContextRemovedEvent + | NotificationContextUpdatedEvent + +export interface MessageCreatedEvent { + type: EventType.MessageCreated + card: CardID + message: Message +} + +export interface MessageRemovedEvent { + type: EventType.MessageRemoved + message: MessageID +} + +export interface PatchCreatedEvent { + type: EventType.PatchCreated + patch: Patch +} + +export interface ReactionCreatedEvent { + type: EventType.ReactionCreated + reaction: Reaction +} + +export interface ReactionRemovedEvent { + type: EventType.ReactionRemoved + message: MessageID + reaction: string + creator: SocialID +} + +export interface AttachmentCreatedEvent { + type: EventType.AttachmentCreated + attachment: Attachment +} + +export interface AttachmentRemovedEvent { + type: EventType.AttachmentRemoved + message: MessageID + card: CardID +} + +export interface NotificationCreatedEvent { + type: EventType.NotificationCreated + personWorkspace: string + notification: Notification +} + +export interface NotificationRemovedEvent { + type: EventType.NotificationRemoved + personWorkspace: string + message: MessageID + context: ContextID +} + +export interface NotificationContextCreatedEvent { + type: EventType.NotificationContextCreated + context: NotificationContext +} + +export interface NotificationContextRemovedEvent { + type: EventType.NotificationContextRemoved + personWorkspace: string + context: ContextID +} + +export interface NotificationContextUpdatedEvent { + type: EventType.NotificationContextUpdated + personWorkspace: string + context: ContextID + update: NotificationContextUpdate +} diff --git a/packages/sdk-types/src/index.ts b/packages/sdk-types/src/index.ts index 1beb455f5e3..084f5c7e901 100644 --- a/packages/sdk-types/src/index.ts +++ b/packages/sdk-types/src/index.ts @@ -1 +1,3 @@ export * from './db' +export * from './event' +export * from './ws' diff --git a/packages/sdk-types/src/ws.ts b/packages/sdk-types/src/ws.ts new file mode 100644 index 00000000000..ed91b811b7a --- /dev/null +++ b/packages/sdk-types/src/ws.ts @@ -0,0 +1,17 @@ +export type RequestId = string + +export interface Response { + id?: RequestId + result?: any + error?: string //TODO: Use platform error +} + +export interface Request { + id?: RequestId + method: string + params: any[] +} + +export interface HelloRequest extends Request { + binary?: boolean +} diff --git a/packages/server/package.json b/packages/server/package.json new file mode 100644 index 00000000000..43c5e0cd851 --- /dev/null +++ b/packages/server/package.json @@ -0,0 +1,27 @@ +{ + "name": "@communication/server", + "version": "0.1.0", + "main": "src/index.ts", + "module": "src/index.ts", + "type": "module", + "devDependencies": { + "@types/bun": "^1.1.14", + "@types/express": "^5.0.0", + "@types/cors": "^2.8.17", + "@types/ws": "^8.5.13" + }, + "dependencies": { + "@hcengineering/server-token": "^0.6.377", + "@communication/cockroach": "workspace:*", + "@communication/sdk-types": "workspace:*", + "@communication/types": "workspace:*", + "cors": "^2.8.5", + "dotenv": "^16.4.7", + "express": "^4.21.2", + "msgpackr": "^1.11.2", + "ws": "^8.18.0" + }, + "peerDependencies": { + "typescript": "^5.6.3" + } +} diff --git a/packages/server/src/config.ts b/packages/server/src/config.ts new file mode 100644 index 00000000000..2f835bc72a3 --- /dev/null +++ b/packages/server/src/config.ts @@ -0,0 +1,29 @@ +interface Config { + Port: number + DbUrl: string + AccountsUrl: string + Secret: string +} + +const parseNumber = (str: string | undefined): number | undefined => (str !== undefined ? Number(str) : undefined) + +const config: Config = (() => { + const params: Partial = { + Port: parseNumber(process.env.PORT) ?? 8090, + DbUrl: process.env.DB_URL, + AccountsUrl: process.env.ACCOUNTS_URL, + Secret: process.env.SECRET + } + + const missingEnv = Object.entries(params) + .filter(([, value]) => value === undefined) + .map(([key]) => key) + + if (missingEnv.length > 0) { + throw Error(`Missing env variables: ${missingEnv.join(', ')}`) + } + + return params as Config +})() + +export default config diff --git a/packages/server/src/eventProcessor.ts b/packages/server/src/eventProcessor.ts new file mode 100644 index 00000000000..ecdb2d9a5d3 --- /dev/null +++ b/packages/server/src/eventProcessor.ts @@ -0,0 +1,275 @@ +import { type Message, type Patch, type Reaction, type Attachment } from '@communication/types' +import { + EventType, + type CreateAttachmentEvent, + type AttachmentCreatedEvent, + type CreateMessageEvent, + type MessageCreatedEvent, + type CreatePatchEvent, + type PatchCreatedEvent, + type CreateReactionEvent, + type ReactionCreatedEvent, + type Event, + type BroadcastEvent, + type RemoveAttachmentEvent, + type AttachmentRemovedEvent, + type RemoveMessageEvent, + type MessageRemovedEvent, + type RemoveReactionEvent, + type ReactionRemovedEvent, + type EventResult, + type DbAdapter, + type CreateNotificationEvent, + type RemoveNotificationEvent, + type CreateNotificationContextEvent, + type RemoveNotificationContextEvent, + type UpdateNotificationContextEvent, + type NotificationRemovedEvent, + type NotificationContextCreatedEvent, + type NotificationContextRemovedEvent, + type NotificationContextUpdatedEvent +} from '@communication/sdk-types' + +type Result = { + broadcastEvent?: BroadcastEvent + result: EventResult +} + +export class EventProcessor { + constructor( + private readonly db: DbAdapter, + private readonly workspace: string, + private readonly personWorkspace: string + ) {} + + async process(event: Event): Promise { + switch (event.type) { + case EventType.CreateMessage: + return await this.createMessage(event) + case EventType.RemoveMessage: + return await this.removeMessage(event) + case EventType.CreatePatch: + return await this.createPatch(event) + case EventType.CreateReaction: + return await this.createReaction(event) + case EventType.RemoveReaction: + return await this.removeReaction(event) + case EventType.CreateAttachment: + return await this.createAttachment(event) + case EventType.RemoveAttachment: + return await this.removeAttachment(event) + case EventType.CreateNotification: + return await this.createNotification(event) + case EventType.RemoveNotification: + return await this.removeNotification(event) + case EventType.CreateNotificationContext: + return await this.createNotificationContext(event) + case EventType.RemoveNotificationContext: + return await this.removeNotificationContext(event) + case EventType.UpdateNotificationContext: + return await this.updateNotificationContext(event) + } + } + + private async createMessage(event: CreateMessageEvent): Promise { + const created = new Date() + const id = await this.db.createMessage(event.content, event.creator, created) + await this.db.placeMessage(id, event.card, this.workspace) + const message: Message = { + id, + content: event.content, + creator: event.creator, + created: created, + edited: created, + reactions: [], + attachments: [] + } + const broadcastEvent: MessageCreatedEvent = { + type: EventType.MessageCreated, + card: event.card, + message + } + return { + broadcastEvent, + result: { id } + } + } + + private async createPatch(event: CreatePatchEvent): Promise { + const created = new Date() + await this.db.createPatch(event.message, event.content, event.creator, created) + + const patch: Patch = { + message: event.message, + content: event.content, + creator: event.creator, + created: created + } + const broadcastEvent: PatchCreatedEvent = { + type: EventType.PatchCreated, + patch + } + return { + broadcastEvent, + result: {} + } + } + + private async removeMessage(event: RemoveMessageEvent): Promise { + await this.db.removeMessage(event.message) + + const broadcastEvent: MessageRemovedEvent = { + type: EventType.MessageRemoved, + message: event.message + } + + return { + broadcastEvent, + result: {} + } + } + + private async createReaction(event: CreateReactionEvent): Promise { + const created = new Date() + await this.db.createReaction(event.message, event.reaction, event.creator, created) + + const reaction: Reaction = { + message: event.message, + reaction: event.reaction, + creator: event.creator, + created: created + } + const broadcastEvent: ReactionCreatedEvent = { + type: EventType.ReactionCreated, + reaction + } + return { + broadcastEvent, + result: {} + } + } + + private async removeReaction(event: RemoveReactionEvent): Promise { + await this.db.removeReaction(event.message, event.reaction, event.creator) + const broadcastEvent: ReactionRemovedEvent = { + type: EventType.ReactionRemoved, + message: event.message, + reaction: event.reaction, + creator: event.creator + } + return { + broadcastEvent, + result: {} + } + } + + private async createAttachment(event: CreateAttachmentEvent): Promise { + const created = new Date() + await this.db.createAttachment(event.message, event.card, event.creator, created) + + const attachment: Attachment = { + message: event.message, + card: event.card, + creator: event.creator, + created: created + } + const broadcastEvent: AttachmentCreatedEvent = { + type: EventType.AttachmentCreated, + attachment + } + + return { + broadcastEvent, + result: {} + } + } + + private async removeAttachment(event: RemoveAttachmentEvent): Promise { + await this.db.removeAttachment(event.message, event.card) + const broadcastEvent: AttachmentRemovedEvent = { + type: EventType.AttachmentRemoved, + message: event.message, + card: event.card + } + return { + broadcastEvent, + result: {} + } + } + + private async createNotification(event: CreateNotificationEvent): Promise { + await this.db.createNotification(event.message, event.context) + + return { + result: {} + } + } + + private async removeNotification(event: RemoveNotificationEvent): Promise { + await this.db.removeNotification(event.message, event.context) + + const broadcastEvent: NotificationRemovedEvent = { + type: EventType.NotificationRemoved, + personWorkspace: this.personWorkspace, + message: event.message, + context: event.context + } + return { + broadcastEvent, + result: {} + } + } + + private async createNotificationContext(event: CreateNotificationContextEvent): Promise { + const id = await this.db.createContext( + this.workspace, + event.card, + this.personWorkspace, + event.lastView, + event.lastUpdate + ) + const broadcastEvent: NotificationContextCreatedEvent = { + type: EventType.NotificationContextCreated, + context: { + id, + workspace: this.workspace, + personWorkspace: this.personWorkspace, + card: event.card, + lastView: event.lastView, + lastUpdate: event.lastUpdate + } + } + return { + broadcastEvent, + result: { id } + } + } + + private async removeNotificationContext(event: RemoveNotificationContextEvent): Promise { + await this.db.removeContext(event.context) + const broadcastEvent: NotificationContextRemovedEvent = { + type: EventType.NotificationContextRemoved, + personWorkspace: this.personWorkspace, + context: event.context + } + return { + broadcastEvent, + result: {} + } + } + + async updateNotificationContext(event: UpdateNotificationContextEvent): Promise { + await this.db.updateContext(event.context, event.update) + + const broadcastEvent: NotificationContextUpdatedEvent = { + type: EventType.NotificationContextUpdated, + personWorkspace: this.personWorkspace, + context: event.context, + update: event.update + } + return { + broadcastEvent, + result: {} + } + } +} diff --git a/packages/server/src/index.ts b/packages/server/src/index.ts new file mode 100644 index 00000000000..f959c1eb964 --- /dev/null +++ b/packages/server/src/index.ts @@ -0,0 +1,6 @@ +import { config } from 'dotenv' + +import { main } from './main.ts' + +config() +void main() diff --git a/packages/server/src/main.ts b/packages/server/src/main.ts new file mode 100644 index 00000000000..8d22d60e433 --- /dev/null +++ b/packages/server/src/main.ts @@ -0,0 +1,141 @@ +import WebSocket, { WebSocketServer, type RawData } from 'ws' +import { createDbAdapter } from '@communication/cockroach' +import type { Response, HelloRequest } from '@communication/sdk-types' +import { decodeToken } from '@hcengineering/server-token' + +import type { ConnectionInfo } from './types.ts' +import { deserializeRequest, serializeResponse } from './utils/serialize.ts' +import config from './config' +import { listen, createServer } from './server/server' +import { ConsoleLogger } from './utils/logger' +import { Manager } from './manager.ts' +import type { Session } from './session.ts' +import { getWorkspaceInfo } from './utils/account.ts' + +const logger = new ConsoleLogger() + +const pingTimeout = 10000 +const requestTimeout = 60 * 1000 + +//TODO: use platform errors +const UNAUTHORIZED_ERROR = 'Unauthorized' +const UNKNOWN_ERROR = 'Unknown' + +export const main = async (): Promise => { + const server = listen(createServer(), config.Port) + const wss = new WebSocketServer({ noServer: true }) + const db = await createDbAdapter(config.DbUrl) + const manager = new Manager(db) + + server.on('upgrade', async (req, socket, head) => { + const url = new URL('http://localhost' + (req.url ?? '')) + const token = url.searchParams.get('token') ?? '' + + try { + const info = await validateToken(token) + wss.handleUpgrade(req, socket, head, (ws) => { + handleConnection(ws, manager, info) + }) + } catch (error: any) { + logger.error('Invalid token', { error }) + wss.handleUpgrade(req, socket, head, (ws) => { + const resp: Response = { + result: UNAUTHORIZED_ERROR, + error + } + sendResponse(ws, resp, false) + socket.destroy() + }) + } + }) + + const shutdown = (): void => { + db.close() + server.close(() => { + process.exit() + }) + } + + process.on('SIGINT', shutdown) + process.on('SIGTERM', shutdown) + process.on('uncaughtException', (e) => { + console.error(e) + }) + process.on('unhandledRejection', (e) => { + console.error(e) + }) +} + +function handleConnection(ws: WebSocket, manager: Manager, info: ConnectionInfo) { + const session = manager.createSession(ws, info) + const pingInterval = setInterval(() => { + const now = Date.now() + const lastRequestDiff = now - session.lastRequest + + if (lastRequestDiff > requestTimeout) { + console.warn(`Connection inactive for ${lastRequestDiff}ms, closing`, info) + manager.closeSession(ws, info.workspace) + ws.close() + clearInterval(pingInterval) + return + } + sendResponse(ws, { id: 'ping', result: 'ping' }, session.binary) + }, pingTimeout) + + ws.on('message', async (raw: RawData) => { + try { + await handleRequest(raw, session, ws) + } catch (err: any) { + logger.error('Error during message handling', { err }) + } + }) + + ws.on('close', () => { + manager.closeSession(ws, info.workspace) + clearInterval(pingInterval) + }) + + ws.on('error', (error) => { + logger.log('Error', { error, ...info }) + }) +} + +function sendResponse(ws: WebSocket, resp: Response, binary: boolean) { + ws.send(serializeResponse(resp, binary), { binary }) +} + +async function handleRequest(raw: RawData, session: Session, ws: WebSocket) { + const request = deserializeRequest(raw, session.binary) + if (request === undefined) return + + if (request.id === 'hello') { + const hello = request as HelloRequest + session.binary = hello.binary ?? false + sendResponse(ws, { id: 'hello', result: 'hello' }, false) + return + } + + try { + const fn = (session as any)[request.method] + const params = [...request.params] + const result = await fn.apply(session, params) + const response: Response = { id: request.id, result } + sendResponse(ws, response, session.binary) + } catch (err: any) { + const response: Response = { id: request.id, result: UNKNOWN_ERROR, error: err } + sendResponse(ws, response, session.binary) + } +} + +//TODO: decodeToken or authorize with account service or both +async function validateToken(token: string): Promise { + const { email } = decodeToken(token, true, config.Secret) + const info = await getWorkspaceInfo(token) + + if (info === undefined) { + throw new Error('No workspace info') + } + + const personWorkspace = 'cd0aba36-1c4f-4170-95f2-27a12a5415f7' + return { workspace: info.workspaceId, personWorkspace, socialId: email } +} diff --git a/packages/server/src/manager.ts b/packages/server/src/manager.ts new file mode 100644 index 00000000000..3bb640cc36c --- /dev/null +++ b/packages/server/src/manager.ts @@ -0,0 +1,179 @@ +import WebSocket from 'ws' +import { + type BroadcastEvent, + type DbAdapter, + EventType, + type MessageCreatedEvent, + type NotificationContextCreatedEvent, + type NotificationCreatedEvent, + type Response +} from '@communication/sdk-types' +import type { FindMessagesParams, FindNotificationContextParams, FindNotificationsParams } from '@communication/types' + +import { Session } from './session' +import type { ConnectionInfo } from './types' +import { serializeResponse } from './utils/serialize.ts' +import { Triggers } from './triggers.ts' + +type QueryId = number | string +type QueryType = 'message' | 'notification' | 'context' + +type SessionInfo = { + session: Session + ws: WebSocket + messageQueries: Map + notificationQueries: Map + contextQueries: Map +} + +export class Manager { + private sessionsByWorkspace: Map = new Map() + private triggers: Triggers + private lastSessionId: number = 0 + + constructor(private readonly db: DbAdapter) { + this.triggers = new Triggers(db) + } + + createSession(ws: WebSocket, info: ConnectionInfo): Session { + const current = this.sessionsByWorkspace.get(info.workspace) ?? [] + this.lastSessionId++ + const session = new Session(this.lastSessionId, info, this.db, this) + current.push({ session, ws, messageQueries: new Map(), notificationQueries: new Map(), contextQueries: new Map() }) + this.sessionsByWorkspace.set(info.workspace, current) + return session + } + + closeSession(ws: WebSocket, workspace: string): void { + const sessions = this.sessionsByWorkspace.get(workspace) ?? [] + if (sessions.length === 0) return + const newSessions = sessions.filter((it) => it.ws !== ws) + if (newSessions.length === 0) { + this.sessionsByWorkspace.delete(workspace) + } else { + this.sessionsByWorkspace.set(workspace, newSessions) + } + } + + getSessionInfo(sessionId: number, workspace: string): SessionInfo | undefined { + const sessions = this.sessionsByWorkspace.get(workspace) ?? [] + return sessions.find((it) => it.session.id === sessionId) + } + + subscribeQuery( + sessionId: number, + workspace: string, + type: QueryType, + queryId: number, + params: Record + ): void { + const info = this.getSessionInfo(sessionId, workspace) + if (info == null) return + + if (type === 'message') { + info.messageQueries.set(queryId, params) + } else if (type === 'notification') { + info.notificationQueries.set(queryId, params) + } else if (type === 'context') { + info.contextQueries.set(queryId, params) + } + } + + unsubscribeQuery(sessionId: number, workspace: string, queryId: number): void { + const info = this.getSessionInfo(sessionId, workspace) + if (info == null) return + + info.messageQueries.delete(queryId) + info.notificationQueries.delete(queryId) + info.contextQueries.delete(queryId) + } + + async next(event: BroadcastEvent, workspace: string): Promise { + await this.broadcast(event, workspace) + const derived = await this.triggers.process(event, workspace) + const derivedPromises: Promise[] = [] + for (const d of derived) { + derivedPromises.push(this.next(d, workspace)) + } + await Promise.all(derivedPromises) + } + + private async broadcast(event: BroadcastEvent, workspace: string): Promise { + const sessions = this.sessionsByWorkspace.get(workspace) ?? [] + const response: Response = { result: event } + for (const session of sessions) { + const msg = serializeResponse(response, session.session.binary) + if (this.match(event, session)) { + session.ws.send(msg) + } + } + } + + private match(event: BroadcastEvent, info: SessionInfo): boolean { + switch (event.type) { + case EventType.MessageCreated: + return this.matchMessagesQuery(event, Array.from(info.messageQueries.values())) + case EventType.PatchCreated: + case EventType.MessageRemoved: + case EventType.ReactionCreated: + case EventType.ReactionRemoved: + case EventType.AttachmentCreated: + case EventType.AttachmentRemoved: + return info.messageQueries.size > 0 + case EventType.NotificationCreated: + return ( + info.session.info.personWorkspace === event.personWorkspace && + this.matchNotificationQuery(event, Array.from(info.notificationQueries.values())) + ) + case EventType.NotificationRemoved: + return info.session.info.personWorkspace === event.personWorkspace && info.notificationQueries.size > 0 + case EventType.NotificationContextCreated: + return ( + info.session.info.personWorkspace === event.context.personWorkspace && + this.matchContextQuery(event, Array.from(info.contextQueries.values())) + ) + case EventType.NotificationContextRemoved: + return info.session.info.personWorkspace === event.personWorkspace && info.contextQueries.size > 0 + case EventType.NotificationContextUpdated: + return info.session.info.personWorkspace === event.personWorkspace && info.contextQueries.size > 0 + } + } + + private matchMessagesQuery(event: MessageCreatedEvent, queries: FindMessagesParams[]): boolean { + if (queries.length === 0) return false + + for (const query of queries) { + if (query.id != null && query.id !== event.message.id) continue + if (query.card != null && query.card !== event.card) continue + return true + } + + return false + } + + private matchNotificationQuery(event: NotificationCreatedEvent, queries: FindNotificationsParams[]): boolean { + if (queries.length === 0) return false + + for (const query of queries) { + if (query.context != null && query.context !== event.notification.context) continue + if (query.message != null && query.message !== event.notification.message.id) continue + if (query.read != null && query.read !== event.notification.read) continue + if (query.archived != null && query.archived !== event.notification.archived) continue + return true + } + + return false + } + + private matchContextQuery(event: NotificationContextCreatedEvent, queries: FindNotificationContextParams[]): boolean { + if (queries.length === 0) return false + + for (const query of queries) { + if (query.id != null && query.id !== event.context.id) continue + if (query.card != null && query.card !== event.context.card) continue + return true + } + + return false + } +} diff --git a/packages/server/src/server/error.ts b/packages/server/src/server/error.ts new file mode 100644 index 00000000000..55af5f0946f --- /dev/null +++ b/packages/server/src/server/error.ts @@ -0,0 +1,8 @@ +export class ApiError extends Error { + constructor ( + readonly code: string, + readonly message: string + ) { + super(message) + } +} \ No newline at end of file diff --git a/packages/server/src/server/server.ts b/packages/server/src/server/server.ts new file mode 100644 index 00000000000..0941596c973 --- /dev/null +++ b/packages/server/src/server/server.ts @@ -0,0 +1,26 @@ +import cors from 'cors' +import express, { type Express } from 'express' +import { Server } from 'http' + +import { ApiError } from './error' + +export function createServer (): Express { + const app = express() + + app.use(cors()) + app.use(express.json()) + + app.use((_req, res, _next) => { + res.status(404).send({ message: 'Not found' }) + }) + + return app +} + +export function listen (e: Express, port: number, host?: string): Server { + const cb = (): void => { + console.log(`Communication server has been started at ${host ?? '*'}:${port}`) + } + + return host !== undefined ? e.listen(port, host, cb) : e.listen(port, cb) +} diff --git a/packages/server/src/session.ts b/packages/server/src/session.ts new file mode 100644 index 00000000000..3efd0a1a861 --- /dev/null +++ b/packages/server/src/session.ts @@ -0,0 +1,76 @@ +import type { DbAdapter, Event, EventResult } from '@communication/sdk-types' +import type { + FindMessagesParams, + FindNotificationContextParams, + FindNotificationsParams, + Message, + Notification, + NotificationContext +} from '@communication/types' + +import type { ConnectionInfo } from './types' +import { EventProcessor } from './eventProcessor.ts' +import type { Manager } from './manager.ts' + +export class Session { + binary: boolean = false + lastRequest: number = Date.now() + + private readonly eventProcessor: EventProcessor + + constructor( + readonly id: number, + readonly info: ConnectionInfo, + private readonly db: DbAdapter, + private readonly manager: Manager + ) { + this.eventProcessor = new EventProcessor(db, info.workspace, info.personWorkspace) + } + + ping(): string { + this.lastRequest = Date.now() + return 'pong' + } + + async findMessages(params: FindMessagesParams, queryId?: number): Promise { + const result = await this.db.findMessages(this.info.workspace, params) + if (queryId != null) { + this.manager.subscribeQuery(this.id, this.info.workspace, 'message', queryId, params) + } + return result + } + + async unsubscribeQuery(id: number): Promise { + this.manager.unsubscribeQuery(this.id, this.info.workspace, id) + } + + async findNotifications(params: FindNotificationsParams, queryId?: number): Promise { + //TODO: do we need filter by workspace by default? + const result = await this.db.findNotifications(params, this.info.personWorkspace) + if (queryId != null) { + this.manager.subscribeQuery(this.id, this.info.workspace, 'notification', queryId, params) + } + return result + } + + async findNotificationContexts( + params: FindNotificationContextParams, + queryId?: number + ): Promise { + //TODO: do we need filter by workspace by default? + const result = await this.db.findContexts(params, [this.info.personWorkspace]) + if (queryId != null) { + this.manager.subscribeQuery(this.id, this.info.workspace, 'context', queryId, params) + } + + return result + } + + async event(event: Event): Promise { + const { result, broadcastEvent } = await this.eventProcessor.process(event) + if (broadcastEvent !== undefined) { + void this.manager.next(broadcastEvent, this.info.workspace) + } + return result + } +} diff --git a/packages/server/src/triggers.ts b/packages/server/src/triggers.ts new file mode 100644 index 00000000000..4f131853c66 --- /dev/null +++ b/packages/server/src/triggers.ts @@ -0,0 +1,112 @@ +import { + type BroadcastEvent, + type DbAdapter, + EventType, + type MessageCreatedEvent, + type NotificationContextCreatedEvent, + type NotificationCreatedEvent +} from '@communication/sdk-types' +import type { NotificationContext, ContextID } from '@communication/types' + +export class Triggers { + constructor(private readonly db: DbAdapter) {} + + async process(event: BroadcastEvent, workspace: string): Promise { + switch (event.type) { + case EventType.MessageCreated: + return this.createNotifications(event, workspace) + } + + return [] + } + + private async createNotifications(event: MessageCreatedEvent, workspace: string): Promise { + const card = event.card + const subscribedPersonWorkspaces = ['cd0aba36-1c4f-4170-95f2-27a12a5415f7', 'cd0aba36-1c4f-4170-95f2-27a12a5415f8'] + + const res: BroadcastEvent[] = [] + const contexts = await this.db.findContexts({ card }, [], workspace) + + res.push(...(await this.updateNotificationContexts(event.message.created, contexts))) + + for (const personWorkspace of subscribedPersonWorkspaces) { + const existsContext = contexts.find( + (it) => it.card === card && it.personWorkspace === personWorkspace && workspace === it.workspace + ) + const contextId = await this.getOrCreateContextId( + workspace, + card, + personWorkspace, + res, + event.message.created, + existsContext + ) + + await this.db.createNotification(event.message.id, contextId) + + const resultEvent: NotificationCreatedEvent = { + type: EventType.NotificationCreated, + personWorkspace, + notification: { + context: contextId, + message: event.message, + read: false, + archived: false + } + } + res.push(resultEvent) + } + + return res + } + + private async getOrCreateContextId( + workspace: string, + card: string, + personWorkspace: string, + res: BroadcastEvent[], + lastUpdate: Date, + context?: NotificationContext + ): Promise { + if (context !== undefined) { + return context.id + } else { + const contextId = await this.db.createContext(workspace, card, personWorkspace, undefined, lastUpdate) + const newContext = { + id: contextId, + card, + workspace, + personWorkspace + } + const resultEvent: NotificationContextCreatedEvent = { + type: EventType.NotificationContextCreated, + context: newContext + } + + res.push(resultEvent) + + return contextId + } + } + + private async updateNotificationContexts( + lastUpdate: Date, + contexts: NotificationContext[] + ): Promise { + const res: BroadcastEvent[] = [] + for (const context of contexts) { + if (context.lastUpdate === undefined || context.lastUpdate < lastUpdate) { + await this.db.updateContext(context.id, { lastUpdate }) + res.push({ + type: EventType.NotificationContextUpdated, + personWorkspace: context.personWorkspace, + context: context.id, + update: { + lastUpdate + } + }) + } + } + return res + } +} diff --git a/packages/server/src/types.ts b/packages/server/src/types.ts new file mode 100644 index 00000000000..8d46b658167 --- /dev/null +++ b/packages/server/src/types.ts @@ -0,0 +1,7 @@ +import type { SocialID } from '@communication/types' + +export interface ConnectionInfo { + workspace: string + personWorkspace: string + socialId: SocialID +} diff --git a/packages/server/src/utils/account.ts b/packages/server/src/utils/account.ts new file mode 100644 index 00000000000..882e437f257 --- /dev/null +++ b/packages/server/src/utils/account.ts @@ -0,0 +1,23 @@ +import config from "../config.ts"; + + +type WorkspaceInfo = { +workspaceId: string +} + +export async function getWorkspaceInfo (token: string): Promise { + const accountsUrl = config.AccountsUrl + const response = await fetch(accountsUrl, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: 'Bearer ' + token + }, + body: JSON.stringify({ + method: 'getWorkspaceInfo', + params: [] + }) + }) + const result = await response.json() + return result.result as WorkspaceInfo | undefined +} \ No newline at end of file diff --git a/packages/server/src/utils/logger.ts b/packages/server/src/utils/logger.ts new file mode 100644 index 00000000000..828c27e4327 --- /dev/null +++ b/packages/server/src/utils/logger.ts @@ -0,0 +1,24 @@ +export interface Logger { + log: (message: string, data?: Record) => void + warn: (message: string, data?: Record) => void + error: (message: string, data?: Record) => void + debug: (message: string, data?: Record) => void +} + +export class ConsoleLogger implements Logger { + log (message: string, data?: Record): void { + console.log({ message, ...data }) + } + + warn (message: string, data?: Record): void { + console.warn({ message, ...data }) + } + + error (message: string, data?: Record): void { + console.error({ message, ...data }) + } + + debug (message: string, data?: Record): void { + console.debug({ message, ...data }) + } +} \ No newline at end of file diff --git a/packages/server/src/utils/serialize.ts b/packages/server/src/utils/serialize.ts new file mode 100644 index 00000000000..1aa35209da0 --- /dev/null +++ b/packages/server/src/utils/serialize.ts @@ -0,0 +1,41 @@ +import { Packr } from 'msgpackr' +import type {Response, Request} from '@communication/sdk-types' + +import type {RawData} from "ws"; + +const packr = new Packr({ structuredClone: true, bundleStrings: true, copyBuffers: false }) + +export function serializeResponse(resp: Response, binary: boolean) { + return binary ? serializeBinary(resp) : serializeJson(resp) +} + +export function deserializeRequest(raw: RawData, binary: boolean): Request | undefined { + let buff: Buffer | undefined + if (raw instanceof Buffer) { + buff = raw + } else if (Array.isArray(raw)) { + buff = Buffer.concat(raw.map(it => new Uint8Array(it))) + } + + if(buff === undefined) { + return undefined + } + + return binary ? deserializeBinary(buff) : deserializeJson(buff) +} + +function deserializeBinary(data: any): any { + return packr.decode(data) +} + +function deserializeJson(data: any): any { + return JSON.parse(data.toString()) +} + +function serializeBinary(data: any) { + return new Uint8Array(packr.encode(data)) +} + +function serializeJson(data: any) { + return JSON.stringify(data) +} \ No newline at end of file diff --git a/packages/server/tsconfig.json b/packages/server/tsconfig.json new file mode 100644 index 00000000000..49e05cea1ee --- /dev/null +++ b/packages/server/tsconfig.json @@ -0,0 +1,8 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src"] +} From 7bf170cd03c545da7d4f96be6df7b9291625c306 Mon Sep 17 00:00:00 2001 From: Kristina Date: Wed, 25 Dec 2024 17:02:59 +0400 Subject: [PATCH 015/636] Init ws client (#7) * Init client-ws --- bun.lockb | Bin 101408 -> 102304 bytes packages/client-ws/package.json | 18 +++ packages/client-ws/src/client.ts | 213 +++++++++++++++++++++++++++ packages/client-ws/src/connection.ts | 123 ++++++++++++++++ packages/client-ws/src/index.ts | 1 + packages/client-ws/tsconfig.json | 9 ++ packages/sdk-types/src/client.ts | 45 ++++++ packages/sdk-types/src/index.ts | 1 + 8 files changed, 410 insertions(+) create mode 100644 packages/client-ws/package.json create mode 100644 packages/client-ws/src/client.ts create mode 100644 packages/client-ws/src/connection.ts create mode 100644 packages/client-ws/src/index.ts create mode 100644 packages/client-ws/tsconfig.json create mode 100644 packages/sdk-types/src/client.ts diff --git a/bun.lockb b/bun.lockb index bcfd9769cc9d102bab1d9e18fd386925410165fd..3bdd61c26d229dbfe9b10bd7edc768e618bbe2ed 100755 GIT binary patch delta 14171 zcmeHOcYIYv_P_JU3%Nl^FE2fzBoNX=UP!w^4FN()=r2GZp+|1o9n+mv)egz-XqawqoF&=e64kRi>KpB7H6MCD;7>zN-6<`vYos`2LQdXy z?4rthHAh3>3qggwYFJjf|c#Bb#kgBZdPecwN%W8;S-ESSg~n z5{5|ePoU(;#Xw2&11$${0nGy?$BbzH9my5}ejg~6uMU=^ZlLd>kKv%R!NWxJUhri1 z`w-1xEvcHWXO%As$dRtFPCcpuCBCe&wA_vnIIYWjz$(RiCMcEH*c&VD$=8$02YM8V z$)yfw1ESMtpYEudJWHyEmo)rwk(wj3Ks~Y`=%80{*lX^xH%iiv@RWM=0%$1c7W9mI zupK-#h-`8?tL%*lJjo@%HWi&EcRfK7%I0KD0MfFc7GmD)2}*oG4=pXmgHjy2fRfx5 z~WK;8XZoD&d=d{%;EABog2j4%}U=v;3yC~LCMzi+WM+` zOxI01zed-dpwn$uEnq+KBG>SyOX*rmpMkogc1=}{J+7imYS8EG`+YU5lQOih7J^cj zl0d0TuY*!gT#*SLpqRyVGioL2e5NM<36y$LSy!+2@DzB`i$OhjJ-qoC1T+y}1SR)( zfKnd31O;+P0i{@ufcwN(D_VEUby{P0PN}PKNTucF_Ijs78pZo{jIOHar!_BQq=Qjw z$n9F~G*GgB6_i?CeupGQfqLJm<*E~W5cm_|F`t@Cz*81>g+3*JK!2?}SM#)d>d)Wo z7|^dMUu!KJlyWZ_lsa8s8Rwi;Z+EO4p!H#x?eyr5CkIsC zamcaxnSUyS-`Y2_dCqRnZLJ5~yxhD>KWLnq*0t-XKOZ?fqBk};#@}3_a8EBc^Jq-4 z0=_XW%grQ7ePCDSkzQ8zA#d?gSOIUxFXt9-g&pQL{9ffP_|4?)_?^xzK8n24EJ+Ao zGvlS+R`wNd@ljX?Z%0Xst0d7%BJnL5>D=b4m@FOndfyz`))DKES|!rg%3k9ZKgHC^ zjTignnDgBvDTjypWU__4-A|GC5f4>YUJBLkxy@gZ??9MH=EBbmNq5s3DEM2=2f(HC z3I3U+8lcGO$SqQpcxixDz6;y{aBie4{}mj$=fdHI?1Gr4f$PA}_*v0`mOw?G4jI+% zKpL!-TP%wFZ(YyCBcYeTTPzBzKjUSU^DyBk z@Ful4>%bwK;Vbp+1UO86;{2>~Z%plEO%pvWxz0gzrOvtVEk0KHJ#d&KXo36-#|$Ma zE~?Ej;PkM=m#yHaJ$MG2=fP1d8ISa_vR>R4uE>j!wc}L{#NPZbaD_b7J5x?U_NS^= zBatwb+!mp*O}r&SF@KDbOn%au<$~$fPgO3(-6+j4hTv)55~;AuyggEpt=N9C9MI54 zSlLW&i&EtMkkJsRIV^t#j)ILKpfiP-@Iy5YA=w3vh8tZ&VtfdWI;wRh2uq1pkA}se zb6OBu!BMx-QD3Y13b+hDAs|yu50oS;K2^dfrx(Ifxhc$T5$=3N@OCdS#lEv9=9R@1Dk8iFy|rB1t3Fy9V6#r#ra~cjTx# z^-P^Zc~DFa8^~=jihMV=Xe~KtcwPlZiKZoY9JlmT%#C5H?-7~uF5J<8Ak7h(&%tGZ zLkfUP#p47egjpTt`QXS7%8)wGfunkeg|}6{0FKhbOo_zMMy$e`xDCI1c?*6&Ol3S7#lTS1+PRcQ#kg!hkqWvj+Ws;kD7dT&) z!(uBhh{o#$>~&HXkEggTSz({>mSjZ^z^bliAeNP3+>)ZO2e~apF_*xC_ceQq!Fg!* z;$+s72c_n)DsD?vrz<6|YM7A>iq`6zKbG%d>wi%Fv)mUn==rX&Z(NmBi_ zvc=$>2FJ>{ElrW%j@O1*UHeQv3H(G_j@&y@n^#!n1FiCGaC#OXC!PUExsMD%q)q5L zEx(8@majqJD6=qTK~}b$+tL;JEy!-yWPVnYUotHlx13bGr(ysVkMK?z@Z_E3w-(FdKuSvYz z0yIJ6FifSPW!9l0A<(N(4mI8&ZOG)R2 z{xWYVP)z20exe{puEdinIipSpEK;_?cyNJ?szNr{DqAs(T82>nw}aCI1@3EblrQKC zBHewUVQWhs<_0)@)?$Em>YSFx7r<#zq^Z^&28qM&i}uEW)3X|@`3mCrgrH3GVcccY z1DiawK$1$KkDbOpowpBDOumD8P+<`5rId(k>7XTmWwXTL(Qy zNUi{0T-1xG2|yZ33_vW^>$fQBA+;nE-#9#+zhCUxld523)ayn{n#c#W1-LCq{Bm&s zy)4swQ!k>#Bgn89IwHIui7_M8izt;MGU`Q?x?ZTUDy8xwohNGIH6sGNNIFWF679e@ zjtHPeBpX1b#l+wuN)Z?jkbD9_7g6F%bUG1~E}|~{@`!*YDy<+9uHQqcmC361&6Mm^ z>T;qoP_Oeusk}kwiIN=$K8dL!+^7mpl~Z>&_3lQVLN-G$zmbxmS-PAkUIL^A042!1 zfE%zHz`#rhPzQM}ICAHHoeEGhumK=z4+0X88>Izj3vsw^q*Qw=K-v!j#6JSiMU>=^ z0VLlB(Dh4{^mYK$`fi;*2}&1eka44S=2>u5VLwTE=aPUXx_KU;W?lfO^dLa|ivV3j zi9e*%!=R-1vQA$C?FhUM&_$HW-vFrIn*d$EK%3Mw{s#(af_?~4#g720_%T3+&jED( zHpW32e|%yB-!LXnofqBp zlHWtAtG#voU!u36JRRkv(pT3bN}grwJW&dv!X)l9)+-6OQZ8gv+z&sLCjE6<07`X- zQK3%V`LapTeAgs5)%$UJ$&HjIQ3>SK$|RkZ>9j(plR@eFB}(WW0E z;ToMMO1HK6p)NFnQu%aHYG8&=XMy5BX|_)1fRf%k66%z0skapVdvy6d3^RiS%XGmn zQHteil#>eA^?nZ})irv(wR%0G)R7jQZdBtB!A1zEN1JrT8!7cdJlt==nStJ?dgR-$hf#0#}hi3T8Hlc&SYMS$4MDq{Myt~Z%^$nULl^XT@HT0*n_QKzWBY# z+&K!5o&2jKV|eb%Zf0j!{7DE^=1`T+7WnBe;V+2YQ5DjMNXna!xS1W%n4VNiUKFGK zQE4^*^2j)OFWzAA;ko%(Ytbv?WPW-@OzXw7V_56KH}(hci)WL-%@2e)5lpiCU-Xj} z7tLahi78E2u4)%813v>4CVEh!%i!rbUu>40uex0 zAPo2zjeY`r3Va5f1wIG902+WspdKg(=*!D^zy^#31_FbCJU{_*fu+DQU^N}Npp~%> zs0SKELr3N_ArH3-K<|y`phurcN`L~OKR_QY)&c8*2Y{K-a{>jx5Fj72t-wRT24EvF z3GzaqNSx`&0w#~e?V}Js3_J)t25bU01BZZ1zzm=Qpb17RRyTkq7R{qKAYTrU%s>nH zwLmjXCS1L})cImEv$lC|?0(lUip_k0`hPb-^JuYCuDc-dg@i`dmsaJ|Bd4zXeCQfSgzE{2xo=ko=L^S^OpB}wu4mnwO(`#o85u(7UhDm!m?|7LBTl6cAnLslCga%J@-2HuU7mY<6f{#|3f;*R&wIn$ zeDP0jR>_Km;sdTq*nAMXSz;NH1>!}JVB-Ye=%4{_hJV$Ho*3>tDLi~p>!27HgdQ75 z1N}VrJU4qq?K)Kttzm^06MR{UDa=L0`>{Z=!uO%=!dp>7wS z`Xe@{MQ{MPe9yb1&Hvzjwdy=@qJ3b@=v*$7$03dO~Tlnw5*6NY6Nwx2o;%;i{;l#n&hI zdmHqhOpQWV~*dt7554crgMU zxg=sk*{ERSlwy+daQ8pm?G=QMq+ryOvB`@ELSawq&n?!C!;+sqF>BSB4;#K$W2de( znis*wsmM*;n|sGDw3m6ZoNR3^iWNna&stiC7-toiR;<~x|HP}b8c;H!d7m(`IgABj zKJN>|{OcMf&W9oKt3+fts-G5_Ai>7jNtdz7BagZ6pmhk#6eZ0}Q5TM6s1^@}V}kte zW~dcB4B7%q_*y7h7Ty2y?Rgq4AG2+A2)?nG)Ax>BP9AjYVnkf0n+vJghwRo z{-!Dd6Eh=OvMIWo*cHhFO%^7OM6$t{xjs?oN3zHVF?H!7u0*lGVB_%Rs)XWY zJ)X+O=*~+JwM{CWiM#$7e)+ODeZkZ9A`Afv8rkjClH+DGept!%r!I*JVyr@FEw zx7ZH;bqhB;fhqb$H|#0KNxcJyw~hPj-V-0hXlZ~+j$TG|XMu&cJ{?5;>eIpR>4)4? zEbh+M2N(6!_Q@%ZC$9eJv9roo+o6zt<3x5348L)(HTkvS@6OMwDt4jSrKWehXn+Fx zzm)WhgRK>Z7e2jv?NN6ZMr!nQze5~=LLcKK?84TUw-1|tF8jKIaaMNhaObfjbKiLD zdP$<_9E~Q9wnay?X^agL^Lv3EAs&xGi^l2G-bdd)_xKw=d6XDPAv8HfoPh#!h*j|* z^Mn!yvP2~Ggd4`$)zN3pr#{|4el68fr`xsDt-+JNUbSGHe<3oGitr4!Q_MwG)+Qo* zgB%unh&wJ`0}0j-;5K)V(llN#Ma(e1Llb5dt+nOgR0k>*M?wO8QUh}641$;LKtV9|GgzbScHDUbi54{ z=HH#8p7P7xe9^=%h_B*V<}G$U6QyMJl9-c#72$oc8=NU7L+npRXOl#B3JMi*8PZ_m zRB(?U8W#_AwYjRxnL1FDL{cL3UJ}PZuGM~Bup~$|BfnAmx2mxn;zBZvyeNjjR)}$q zIB?UPmSx*UCSP~KaQT$jNR6HsZAs|QPvQ#@lWUfUKz~Av^U4pj91Uuyd8U(E9VseF zMs=+@qYGklG7B=8;9sw;JE?ZqpU(Ce=JdXoEl*o*0D?4dr!nd=pOF8E(^T)GcUB%$;0-8hw9zJAwdxpft%QId=j1g%I6vBJp` zQE5e|>cj)!f{jDj1K$Xd$Ip3Zo<3KxxGxpQNqwC-i_@qf#xZQm?t8-D9I<$UiS<%P z0pnR9s?)J+8V9tWiFN+@w%rjEuWQ7MP3g$lUpJxer);;V>kFTZqu|p{eEsgy%awTi zGuj#~BB5aHzXsLog&0S}x3_vf^WyQS$=B-{4Ll~E|E2mj?|WnatMNb-=4nO+J z)c52W@9GU!+^03{o;5u^Cn8BCdf*K!I+OW@S6A6_pF!o*5_BqxMzN5hTZ9R5MsXZ%uzJ;aXC-|&Qm+}q z8VekSrFU%BU3=rYi)pDj5$U2|2J>|@K0k;lOIVD1Abn=SmALok#>WTsO9Q?H^Zx;Lm@23M delta 13593 zcmeHOcX*V=)_-S{P4*2nBq3#qX#^5N%BHZ{gyc<-n$U|BDK2CQp(G&*WkJXWBBEjeX`hM(5ZhIPg5U4FC4e95z0dRg^*s*$_RN_%bLPyM zGi5j9y0t#*R{AVWihucy0UH;8&AVTmdaTL#?|wYHxioa&>D5Z_F{{p0ZL*)JUAl-# zl+4S9MpREVX^dJT`AL$;iUNO7SLNiA(ke-M!Anxh{ky|)dWYN zya|;TmzGtyB&nayk1r`LN}N_MHAk;h@5S?3aJ3h76Q@9m%v}zix*I>)SykmKk|w#_ z)5-ii2$5V0}(cPOV0o1!=B(8-ypy`vU3HU~GsS(ykE zi&ugtsY#duX!Ps=B{Q58RjZ}DTS-zF_+;)h1y`SgP84|F1tsY{df)3o?*hMEFFy!_ zZmpk(a%%Su>>}+I;K^A5t+hc;tW0z}r%aZd6~*Pwib|I+%4v`}!5U2gg}!Rf5oE|4 zG+|D7Z#)!{;6PAv!od(p3IcV24+XV?l3|87|AAnO1pf#q)laZWQd`h17-IxzfACP@ zSpc5&9ΜtOZrIFAR?_r)MQMig18vb61s~}RDqT~iyDB8v0wp^)D?6MpL;HV5`P)x z6c(xNwC+FE$44QT@?jpXBOZB1hpm(3`Zqyylo{5U9$q`0hH9pN@z?gsS2>VckG z6wpExpk%%Wl;U6&6q7+gpybPVm`{8cMH{X|r&C;26UvGzCFi(tuJWo%DS_wvwlB%b z*SbF&CrMVcis_=YZVF1;Pk>UdMR!Y5H0T$g6srZGVW6A4N)i^DCmlRRVFUEh3}5c1 z4d(cJn$_j)t^H-jpZzOqI3FadJaNgle%q|1PedE zGmod(W$y4-z8Q((+P~3vtYX%iYTEfZ{pZc z-q2c+A43`9Rg+*xxjR^qnn2#+ly8k>q%tgAI9^7BGl# zbU^0)3mhp?T`VVJwdoE=-Ar)Q9SnoY1K`M!jOPc~nZ(@@iaZK2H&T^=3>=JB!KQ_`7k9)f=AL({wnt{m^N^!i zL7>Akd%@9sA`Za$;YL7FVOHmQC^*uCAVl~q14r%P3xwor;3!7Sd>_V%J`xl*guC&% zfY;-5H*ZK#ENvSa!5v8m1(Xfe%t5I6@%ki%74Qbs+}~E4dIV08-Le(jJ&?nGn8H*1 zEwX>Cp^RpK2slzBQ=G6(+>xS~uEz3NDY-0{*QY3~nm6FHjyr6M>C1L}mMz!3S;A(^ z_w7pKn{5{JJaB<3N7_fUmn0kXHBmQ^MckdLu${aWNv7J;j)_K6)l>8c8 zxlv~xohV6FJSU?wcXv|cXOpz4MKA@~O`l zh>Re+`9p9$`Sr-o7L1&N3G)2LF%@3ob7Hg2qmawv zyRdhxL5_NW5$NF*IPw-$AxZ<%j5>?z)R4uqy3itUcTwbv*t+#~3`g3qeb6Ydj^Te7xGrilGHoL` zZ3tLT=Fh;T@ax%~eRDJg>MH!EuWSKNPQr13Mfe!F-bT;zd2lzUHwxPBX{?(84m!0t zwA6v4FjTdc=gF{kN4H$uT^hP6@_cMH)CINybg>5%()2Hd@93T@XP_I>Ceaqe zrgBFQMSdD(+SJlEhbvJJ1-nZFYI@{r1I3Yo1-y{EdnzoE*Y{M+B^du?KBs54{5f)c zpb-aDzFqzsZjxic!BN;;aGU6*uu;4LpKo$Ufntuqjk17(-#i7m395#x$dLop4ViV~ zjzWcJ4$Nb-BgM-!NnQnGVPA_cFRx9@X3}ykBDhl2m`4CrW({1gLxvAo1&ifayNSYnWFrHtfc&@&lw=;&1^*pNqFiscTyIB|2C`D8 ztMziCG@vJS{>f@114<)Vuk%DnuojexHs};tHH!bw4#QhdN9yR`I1R}LDG&JTFueT` zB)_=GFTi+Wkcj?u7{UqUl#KwTJ5h@EzYasyJ+~c}LL(MOL9ft73|KG!~BgHxr zosNHc7}kZ4du2HPbY~oX>GSq8{+9m{tJ}TnN0Zr!15(CsY}?h^FIs4S9{R3D-Q0a= z7^|uqwf`rR`I9IdrrddGDBpb0VlIfmB@s<#QXP*(l;;1*I z{uXh~%tj2MJK#Lv0pLO41nS-gJ^)Su9|NZW^5$9K9B>#o0_+CJqpg4dAP@)wg2nSb ztjtN@&Hf9ZOBk6)=Fk=MZ@}My1;9e!A%Jdzj{z$H0npudA}|S{`|<>Uw)e}x*8ok> zcVdn&8&~}avPS`O@gATOC;=t`lYvrzt{X+bXy87;2@D5D0E2-cz&*eqpg%AG=neD% zdH`L3yMa7_u7dQV3V!0GAAYLok026$KvchK($~)&zze`;faZ8U;09&^a{;;q(@lB_ z@GwAc9+m=60c(MEz!Lyn+wTV^0}2oTv<6(jcpwKD#}JW4$Rq;EKq`<1L;%qMeSiBD zoqh&f06qu40KNpi0?L7@Kp8L=px;041?X22`sJi2&?{x-W_JnTL`< zlW?rPW zVyRYIHmxDCcI>4~&hE#tQ;c>%A_W_csBXp5tj43SS1Z@O{DIQ@T?k|**)kw8LmX|z zLamQr+@a7g{v&1Uki^W-OiZtr{vlWZOOOviz#jruksreXeNsueL-Y$^p&7*4iVhg_d?YzQRNp$c94iz`sY!i5~fidl-78U#)ei-K4h>nHXQ z86mz0u^La=?g{I8JmT9rm~EKzkjQV1Rx8AkHZa?GBNxu&dFst#xZg2C?H}$A1`c{=rO~QucEBu35S{vhu-pu-c+09wKo`nGB zAqj)j^|Ub_?Pab=TpqQ*6%3o3lB9O1OE4y$4~Bt5#hOSEmpBj3^ntJVC77i$FOeLA z(&nO926wRjbI_}cdv8TrbSkhISRHKiG7UAgRpokB@ z**ILx2xkdwuc!@YQC8!9*n#0GKlv?w=@B%xX*07xoJHe5h!t&#GIg~DMNo&vwk^21TDJ)2U9jYBG5ao-5MeCZ->&YK>>KPyA;KBK!dZX8C=_mO8OkOITLg?h zBt}KBAy(t{olV&gd;bIeVK6=|DJ>}#+q^g$fyUYxZdYYB9sz#-f_vG}|DO7j>MwQU z(7MbpUi+<$^>j>_?;6{Z<>sU(k%aN=Z}^_C&8yE`jzd$556rF>xEaQ?yhTOJx9mLm z2Ca7zfJMd&zdaqFIGH^pqyRNG=uE@WA}&T^%{7FJ$S7g&=Am)-)#cSqSf6EMuKbM2z7Qk+8N*Ud*JH$) z7#3o>B8jUptT$F`x3)0aBHSRRPh&+&EDNz3FAtX`k66-fTaE)eMk>abnv|I;&bP(h za5+|#X0wnsbj!e%*E9Q4@zDjXE=HRe9YSfiCH=Xoqavp>RDCRox#*MK&i452ks952 zr2h7JjBY$0ji-;L&y;moy1mQfXx3Kvw_^)`qucuX8g4fNYd5?-3#~SusqNYSk9!Yo zJ$WWhn{#Xs#*@t2BP-o&C%#jHn#`n(BwHFirBkm2#$_S;cP|TnIx;gHd^S$(XwOzz z+sA9i{)EaGe!bS>*^)r*phEcBL|GipUE`r=>XCsT)^sZw;Z3VjT?58b&TA`rX8Rv_ zX1|GLUgd5a zZ@+s}&PON^VC%LIk7tt@ixfG@VB^Kv4rpe)#_V|b#HHuo4(LX)f~bQ2TyeA`gyd(AbBI?aPeoeQi z0;#kvjTfg++~3js+5Vr-t1;S%2L80TPP+9ss-7m(i>ptZy^&yIbfVcHuvgf~u(w1Z zNE_pgYu}U8ua%bX?m&t(|6EBHo{orN-DZ8=j$Zd6;0j;SgV^W6ZvyECUt= ziR4sFgYj6l-PMW(y?loIsJoiFJsQtxzic)xrsAXfe$i{-$U#A z!ar)pJR*v$5~3yzq*gou(%8c`ag0h2h)L-zRG8A?gX1DR9ajP4P49}>L#}D924Rh; zcQ~wcO}J5orr%&y9R{Q8HB3;4)7p&t*zF$6FrM?y{^5N`^|R%}DD3E%!`CT%@uFSz z^JhOBRJ!$X+;eP6xB{bQx7bC)N)R7pU?#FelTL_Vft0Qu#Fs(DRcj2vTP9WX1Dyv1>} zYiyt&kZitKpUpx$84sH$o&5g8Mc0dQ(KFf^Pn=g>@$7%H;*~lWP7{r!t1!-5U6tuv zZ?O|cZ)5QsaRogXtU>*TW#PLS($y?+axt@tPdc+$`UOl9mK>Ji{{hY?+87?p%y^T{ zY<|OA^vYoo1@9zE5>6OT+aCji#&!Di>z~2nrz+2~Q}wIbtX>%ClcfFcI)~VctHK|B zYDEHg-03}^4?O((#8dK&xNNQ0_=RF?4$Io!I+tDUBZ?NYmVOiH=O3h{3&q^UEZU1q zDSw#7`F*HY9C~OwEKOp5SLsJ0q{v08q-M1$I$@EnW05wfT@N$6KYf`tzB+G4GkTlI F{|9?2vC#kk diff --git a/packages/client-ws/package.json b/packages/client-ws/package.json new file mode 100644 index 00000000000..cfe8cb09187 --- /dev/null +++ b/packages/client-ws/package.json @@ -0,0 +1,18 @@ +{ + "name": "@communication/client-ws", + "version": "0.1.0", + "main": "src/index.ts", + "module": "src/index.ts", + "type": "module", + "devDependencies": { + "@types/bun": "^1.1.14" + }, + "dependencies": { + "@communication/types": "workspace:*", + "@communication/sdk-types": "workspace:*", + "@msgpack/msgpack": "^3.0.0-beta2" + }, + "peerDependencies": { + "typescript": "^5.6.3" + } +} diff --git a/packages/client-ws/src/client.ts b/packages/client-ws/src/client.ts new file mode 100644 index 00000000000..b8329f77550 --- /dev/null +++ b/packages/client-ws/src/client.ts @@ -0,0 +1,213 @@ +import { + type CardID, + type ContextID, + type FindMessagesParams, + type FindNotificationContextParams, + type FindNotificationsParams, + type Message, + type MessageID, + type Notification, + type NotificationContext, + type NotificationContextUpdate, + type RichText, + type SocialID +} from '@communication/types' +import { + type BroadcastEvent, + type Client, + type CreateAttachmentEvent, + type CreateMessageEvent, + type CreateMessageResult, + type CreateNotificationContextEvent, + type CreateNotificationContextResult, + type CreateNotificationEvent, + type CreatePatchEvent, + type CreateReactionEvent, + type Event, + type EventResult, + EventType, + type RemoveAttachmentEvent, + type RemoveMessageEvent, + type RemoveNotificationContextEvent, + type RemoveNotificationEvent, + type RemoveReactionEvent, + type UpdateNotificationContextEvent +} from '@communication/sdk-types' + +import { WebSocketConnection } from './connection' + +class WsClient implements Client { + private readonly ws: WebSocketConnection + + onEvent: (event: BroadcastEvent) => void = () => {} + + constructor( + private readonly url: string, + private readonly token: string, + private readonly binary: boolean = false + ) { + const connectionUrl = this.url + '?token=' + this.token + this.ws = new WebSocketConnection(connectionUrl, this.binary) + this.ws.onEvent = (event) => { + void this.onEvent(event) + } + } + + async createMessage(card: CardID, content: RichText, creator: SocialID): Promise { + const event: CreateMessageEvent = { + type: EventType.CreateMessage, + card, + content, + creator + } + const result = await this.sendEvent(event) + return (result as CreateMessageResult).id + } + + async removeMessage(message: MessageID) { + const event: RemoveMessageEvent = { + type: EventType.RemoveMessage, + message + } + await this.sendEvent(event) + } + + async createPatch(message: MessageID, content: RichText, creator: SocialID): Promise { + const event: CreatePatchEvent = { + type: EventType.CreatePatch, + message, + content, + creator + } + await this.sendEvent(event) + } + + async createReaction(message: MessageID, reaction: string, creator: SocialID): Promise { + const event: CreateReactionEvent = { + type: EventType.CreateReaction, + message, + reaction, + creator + } + await this.sendEvent(event) + } + + async removeReaction(message: MessageID, reaction: string, creator: SocialID): Promise { + const event: RemoveReactionEvent = { + type: EventType.RemoveReaction, + message, + reaction, + creator + } + await this.sendEvent(event) + } + + async createAttachment(message: MessageID, card: CardID, creator: SocialID): Promise { + const event: CreateAttachmentEvent = { + type: EventType.CreateAttachment, + message, + card, + creator + } + await this.sendEvent(event) + } + + async removeAttachment(message: MessageID, card: CardID): Promise { + const event: RemoveAttachmentEvent = { + type: EventType.RemoveAttachment, + message, + card + } + await this.sendEvent(event) + } + + async findMessages(params: FindMessagesParams, queryId?: number): Promise { + const rawMessages = await this.ws.send('findMessages', [params, queryId]) + return rawMessages.map(this.toMessage) + } + + toMessage(raw: any): Message { + return { + id: raw.id, + content: raw.content, + creator: raw.creator, + created: new Date(raw.created), + edited: new Date(raw.edited), + reactions: raw.reactions, + attachments: raw.attachments + } + } + + async createNotification(message: MessageID, context: ContextID): Promise { + const event: CreateNotificationEvent = { + type: EventType.CreateNotification, + message, + context + } + await this.sendEvent(event) + } + + async removeNotification(message: MessageID, context: ContextID): Promise { + const event: RemoveNotificationEvent = { + type: EventType.RemoveNotification, + message, + context + } + await this.sendEvent(event) + } + + async createNotificationContext(card: CardID, lastView?: Date, lastUpdate?: Date): Promise { + const event: CreateNotificationContextEvent = { + type: EventType.CreateNotificationContext, + card, + lastView, + lastUpdate + } + const result = await this.sendEvent(event) + return (result as CreateNotificationContextResult).id + } + + async removeNotificationContext(context: ContextID): Promise { + const event: RemoveNotificationContextEvent = { + type: EventType.RemoveNotificationContext, + context + } + await this.sendEvent(event) + } + + async updateNotificationContext(context: ContextID, update: NotificationContextUpdate): Promise { + const event: UpdateNotificationContextEvent = { + type: EventType.UpdateNotificationContext, + context, + update + } + await this.sendEvent(event) + } + + async findNotificationContexts( + params: FindNotificationContextParams, + queryId?: number + ): Promise { + return await this.ws.send('findNotificationContexts', [params, queryId]) + } + + async findNotifications(params: FindNotificationsParams, queryId?: number): Promise { + return await this.ws.send('findNotifications', [params, queryId]) + } + + async unsubscribeQuery(id: number): Promise { + await this.ws.send('unsubscribeQuery', [id]) + } + + private async sendEvent(event: Event): Promise { + return await this.ws.send('event', [event]) + } + + close() { + void this.ws.close() + } +} + +export async function getWebsocketClient(url: string, token: string): Promise { + return new WsClient(url, token) +} diff --git a/packages/client-ws/src/connection.ts b/packages/client-ws/src/connection.ts new file mode 100644 index 00000000000..679d3f8d064 --- /dev/null +++ b/packages/client-ws/src/connection.ts @@ -0,0 +1,123 @@ +import type { Response, HelloRequest, RequestId, BroadcastEvent, Request } from '@communication/sdk-types' +import { encode, decode } from '@msgpack/msgpack' + +const PING_TIMEOUT = 10000 +const RECONNECT_TIMEOUT = 1000 + +export class WebSocketConnection { + private ws!: WebSocket | Promise + private requests: { [key: RequestId]: { resolve: (response: any) => void; reject: (reason: any) => void } } = {} + private lastId: number = 0 + + private pingInterval: any + private reconnectTimeout: any + + onEvent: (event: BroadcastEvent) => void = () => {} + + constructor( + private url: string, + private readonly binary: boolean = false + ) { + this.connect() + } + + private connect(): void { + const ws = new WebSocket(this.url) + + ws.onmessage = (event: MessageEvent) => { + const response = deserializeResponse(event.data, this.binary) + if (response.id !== undefined) { + const handlers = this.requests[response.id] + if (handlers === undefined) return + delete this.requests[response.id] + if (response.error !== undefined) { + console.error('Websocket error', response.error) + handlers.reject(response.error) + } else { + handlers.resolve(response.result) + } + } else { + if (response.error !== undefined) { + console.error('Websocket error', response.error) + } else { + const event = response.result as BroadcastEvent + this.onEvent(event) + } + } + } + + ws.onclose = () => { + clearInterval(this.pingInterval) + this.handleReconnect() + } + + this.ws = new Promise((resolve, reject) => { + ws.onopen = () => { + const request: HelloRequest = { id: 'hello', method: 'hello', params: [], binary: this.binary } + ws.send(serializeRequest(request, this.binary)) + clearInterval(this.pingInterval) + this.pingInterval = setInterval(() => { + void this.sendRequest({ method: 'ping', params: [] }) + }, PING_TIMEOUT) + resolve(ws) + } + ws.onerror = (event: any) => { + console.error('Websocket error', event) + reject(new Error('Websocket error')) + } + }) + } + + private handleReconnect() { + clearTimeout(this.reconnectTimeout) + this.reconnectTimeout = setTimeout(() => { + this.connect() + }, RECONNECT_TIMEOUT) + } + + async waitWs(): Promise { + if (this.ws instanceof Promise) { + this.ws = await this.ws + } + return this.ws + } + + async send(method: string, params: any[]): Promise { + const id = ++this.lastId + return await this.sendRequest({ id: id.toString(), method, params }) + } + + private async sendRequest(request: Request): Promise { + const ws = await this.waitWs() + + return new Promise((resolve, reject) => { + if (request.id !== undefined) { + this.requests[request.id] = { resolve, reject } + } + ws.send(serializeRequest(request, this.binary)) + }) + } + + async close(): Promise { + clearInterval(this.pingInterval) + clearTimeout(this.reconnectTimeout) + const ws = await this.waitWs() + ws.close() + } +} + +function serializeRequest(request: Request, binary: boolean): any { + if (binary) { + return encode(request) + } else { + return JSON.stringify(request) + } +} + +function deserializeResponse(data: any, binary: boolean): Response { + if (binary) { + return decode(data) as Response + } else { + return JSON.parse(data.toString()) + } +} diff --git a/packages/client-ws/src/index.ts b/packages/client-ws/src/index.ts new file mode 100644 index 00000000000..83dae7638cc --- /dev/null +++ b/packages/client-ws/src/index.ts @@ -0,0 +1 @@ +export * from './client' diff --git a/packages/client-ws/tsconfig.json b/packages/client-ws/tsconfig.json new file mode 100644 index 00000000000..3ae07cd3fa2 --- /dev/null +++ b/packages/client-ws/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "jsx": "react-jsx", + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src"] +} diff --git a/packages/sdk-types/src/client.ts b/packages/sdk-types/src/client.ts new file mode 100644 index 00000000000..dce6d468e9d --- /dev/null +++ b/packages/sdk-types/src/client.ts @@ -0,0 +1,45 @@ +import type { + CardID, + ContextID, + FindNotificationContextParams, + FindNotificationsParams, + Message, + MessageID, + NotificationContext, + NotificationContextUpdate, + RichText, + SocialID, + Notification +} from '@communication/types' +import type { FindMessagesParams } from '@communication/types' + +import type { BroadcastEvent } from './event.ts' + +export interface Client { + createMessage(card: CardID, content: RichText, creator: SocialID): Promise + removeMessage(id: MessageID): Promise + createPatch(message: MessageID, content: RichText, creator: SocialID): Promise + + createReaction(message: MessageID, reaction: string, creator: SocialID): Promise + removeReaction(message: MessageID, reaction: string, creator: SocialID): Promise + + createAttachment(message: MessageID, card: CardID, creator: SocialID): Promise + removeAttachment(message: MessageID, card: CardID): Promise + + createNotification(message: MessageID, context: ContextID): Promise + removeNotification(message: MessageID, context: ContextID): Promise + + createNotificationContext(card: CardID, lastView?: Date, lastUpdate?: Date): Promise + removeNotificationContext(context: ContextID): Promise + updateNotificationContext(context: ContextID, update: NotificationContextUpdate): Promise + + onEvent(event: BroadcastEvent): void + + findMessages(params: FindMessagesParams, queryId?: number): Promise + findNotificationContexts(params: FindNotificationContextParams, queryId?: number): Promise + findNotifications(params: FindNotificationsParams, queryId?: number): Promise + + unsubscribeQuery(id: number): Promise + close(): void +} + diff --git a/packages/sdk-types/src/index.ts b/packages/sdk-types/src/index.ts index 084f5c7e901..38c4587b9ec 100644 --- a/packages/sdk-types/src/index.ts +++ b/packages/sdk-types/src/index.ts @@ -1,3 +1,4 @@ export * from './db' export * from './event' export * from './ws' +export * from './client' \ No newline at end of file From 805dc642a238fca7ddcec578fd2377e67672785f Mon Sep 17 00:00:00 2001 From: Kristina Date: Wed, 25 Dec 2024 17:06:20 +0400 Subject: [PATCH 016/636] Init sqlite wasm client (#8) * Init client sqlite-wasm --- bun.lockb | Bin 102304 -> 102896 bytes packages/client-sqlite/package.json | 19 ++++ packages/client-sqlite/src/client.ts | 157 +++++++++++++++++++++++++++ packages/client-sqlite/src/index.ts | 2 + packages/client-sqlite/tsconfig.json | 9 ++ 5 files changed, 187 insertions(+) create mode 100644 packages/client-sqlite/package.json create mode 100644 packages/client-sqlite/src/client.ts create mode 100644 packages/client-sqlite/src/index.ts create mode 100644 packages/client-sqlite/tsconfig.json diff --git a/bun.lockb b/bun.lockb index 3bdd61c26d229dbfe9b10bd7edc768e618bbe2ed..ecf187af96e4354f27cb4a87c734623b2bd2eedf 100755 GIT binary patch delta 14158 zcmeHNcVJY-*1xl4!`?sw3F(`dLPB)R89;0x+(sH~`Nlq4??0DKi_Z_rBTylT=e)#amgIt5flewou* zn^4gp{i~ZK^#cDrD9NXKYJ3MM*=q&$8HvOfQ0NKT0oot5q9LKYvZh2zwP=dZLf(Vq z<<&LyPD$FX^D`@|%M#|+O0!{|@~88B7SuEywuxI0Eo$y*@KoK*%HqaGXPGp|>6%B) zzaL7Z*8)9irn6ygV?||y#TQ40!^ZF&W750Cu(&;uMc=XZ>>AUr8V{BhbqXa z^$s6ZKF~cG0)Gg~oF#M1NuhzNlcXZZslr*PjVf&C%oO7|k2a#DdscBn0-9L<19;MU z1w(+E+<|^tGm2-ct(G=|4~2XgFE#}=Wy2-}?j%ssw(52NYLcXDz@G*sd33<#`G+8< za)}sg>g1c?$yqglS|?{WB)E#JDy8E3^4jA124^Yc)XB9$8f^iE-6nTTu;vXKuwr;` z4-Apwlc3~;=nzRl-R^tABk7(GN)0o-=>a|x{0TIK@?Ww_Qa?~{&WfRA7y&<6Bl#py^7mt)lwaknFL$O~45|W@ zBTvjMZfLB-sMR~?HdM@XNgL2g8vf}~T0^#j_R5AJT`$n!teWSnm!v^xG4<#)T8zfJ z-$Bo)2Ooi_3K30>jTO%NVs7&YntVSxOU)Vr3ZJ^mFab!b5T)RA_YhFxM-0%yVg)Gq zArF+~naEem>w2M}XaaaQXcFiQQ0mz)P@)iZEr0Pe94m z-0IqjT1?mdI{&h+eYZ{z+BJtIWNP~L#q$$NYRX(Cu0{$IsnJ#Iq}8jRL+eZ^s2A*3 zRa7|>%1WdW+F&>fyV@|nO_YLf~U?WfReY$ zYiiZb`|5gAP@Xz;9_49@o&lwfeGEzw@)Po@L3N)9jVb2@Z`E1^!)4_w7z74Cs!T- zPj;eVkIEHcys51fNYKzlf~USc-DB%x|1-S&+C2@M{`P?~vh@{9oW*iHf4tA<3uCMK z(dYAca)!(uzKT4=BuVKgA@h7+yUE{!@Al1Qi+Pu?!v4V>ehT}MyZjWB&CGZE<+3W? z1<@ni(MOR#F-sC$;WLKL={cOmB!V&|Gr`;$blVH6V+<%)3GrxeI(Alq|eC)GiOfcw!<@Kfo;lH(KM+t2cO; zRgup@HV86Q7h;zOVk%ODx^w1dm*<0{K`^P^*$WQQOdUX1dmyf?qo zKUrQ0Y08BXdvwl&cl5E#-dOZ7TVR`7HU=D(S3NA>1y1)j^4kcJ>%|MJw_*kkJ^Z5iR${6enlH z3263h;4oD*m&k{}(Kvdjk>QWUgSxAAX(TwU91Y9EI;Xkf7&z)Yy6bN@CkNB;E)L3) zXCq~Y+(!+`&D_ynVF$RYzaralGPqu|4F@&zF39#mHWo6(9vorkt^ta9DHf<3d5&+E zxeQC&a5eQMQq$Dbs4z(yuco#jHI!2FS4dGi)mg|C?ub>`Lhg!Ho`roXf^@SF*y^ z@izQ^#JiFeQ?D34*p|x*xXY%PdonE4yrVsp@32|Sha_ws8W$y7VkF51b3N46<2ZMv zD5l_8zB?sX9ua#X1hKF*b4RMe-sY}U#k>+0{4Utr4X&5UCHrC-PgB@>?n+bS;DOrW zrVZnYcuA^8MH=_ZW!!8mDD@ZeJ_dJD^ZEoys=bi68(gEov3s~HLy@~tf`Cdx?`a8E=efKH9EBGKF4WFm4CD>#ZatG-lX88&ns=XMR;aFhw zY|N0U;PeJ!Xtuem-MkaY6yA<)_A~CtQ{*I^5Xp)} zTM+j6wmb!EX%~Kva!0-*hoW_Qo#@n7aHCZ>cw@EJ29oy13EVM+W`t{qA`ipzq0ef# zaXC2Z1ZFkd-vMq2IHtC&TaM8KOfESeoaQD3gQX4}SyAWZ=@{7po*V@yVp@I&Zluc5 z`j&$ZXl*2JF}NEqhyVaIvf7{%1Th@TykYsN6@iz+z?6L_qh zCy(vP0H)gL06wkDQUu)Y7H1)N#al36cj>IcvXFf5)U82Ui-L+d=jJI>O+)rapF-QqTPTo z8mm&uAFK03P278YkS|Fwf7FL)H=Z*-NUd}VILf_A&C@7(V5%;krpt+v-gKSL0HtRJ z;K3glAJjy-PJo`@L#dW>Rr_*Ec4p~vqB2mY^F%4XUgwFDokmHTt%{IP73Qj(B>fg8 zmo3oqFQsJY7F|vhZwk^Gxwh@r{z6qLh z9wZLWrIbo<0Z98{fcUKdJw!>q4IufW06o7&NpCwqweQgBGobW5n zhVgsI^Z`BtsNgYx3Vs2Q;Zp!Te}q!`Z`4v6jX>HPLX!K?k4ncGiT|I{>fwii-k=e> z0iu+Q)On(2K6OSC&z=;l&Wizh&hMer)dXGtw`fn~JCIK*gLOTk)UsTiCrU2NV-jzj zr~x$uOOc1f{a0DO0CjJf}E@-&|}_TYs~jb19`sG#zrPrC6t>I(6!_ z9F(5lqExOz&;Jj!Nv&Xxu1J(BuGV>?l&rxIb)f;2^5=n41q*a~3n>0e3w62(l=Rfz z>6G%__#yY-L5xZAAUXCbrWSveo7%kiY0rhB; zu6QY>o@~+OL`m;qP!es`=_8=j(8o!rQ%^0aZi-ji7z_T~8UNfF)rk6YXZ&+#ykyJN z)(#(iIU!2x)}K40wz2%VGty4^U)>pv!^nSSXKW2Goijyd(z#ye(2wqT`yC}lEoZU# zn&=@L*AUWM+uMIOncHy`l<|hSOU=BlxAxboqSnP7hZ%djb!sPclxUn7dGV_g`Hq(@ z=B<74*Bz8tNF}_DKde5|A0;8(LQ;PE6^r@d82l}YO37`p+TVv>;!|I}N&Xx!A6A@J z&bM~Fx

>>)+b@)H&bQ+SlLpr->sZz;g~D9XfH)D>Oal0g}=5*IqL;0m&!;z60oC5Rff8 zb({g3i0K9)Dbg8>Dx}9lmr+9~O%FAlk}~i%3GjI8GB2cCRSE3qGJ16jM4B34(PeaQ zPDYyQ!q66~2I&}2hjOaSOIM_KkQD$uJ#-nJkM0JjGWr&b|57B?hG%>aan{VHO{H>l zzS;?F2W|(J0)GKMK;DPI0pJth5O5ec0vrWC1>OMm0WSjNU4I||2n2$}PES@-j1MaG zIww5~P?HdB^oMQrzr~ci~>dg z*8v5<5I_Mk0DJ*$qEA&4K4Q|JkJPu+=Rltab^uQUG~Rar3xP#|8=$i^tvYuDD*?Jw zv>JE_*bF=fJOI$)zZ#(Tp#UHlC9?oCNBD2A~co1L)JqRA34)8Mq!O1cm~6Kt9j{tN`dmVgtFq7N`U2MV-w0OdX13 z9zZXCr=UmQIi>?6fnflBc~}dq2kr-4(3=a41d4#+kZl1T0v-T10L74x0mh1>G7FkH z8OcW=ei+yYYy&m{n}JT?46p!j0yMWmf&Ku^sR6)#$nOS7W?&uoHGrGu5uV|GG*zNa z-Px{Nz1WkU(d3tAU>R^bumm8VE})YHo|asZ;mZazMIs#rJI3F|+E zb3h>(qa>P0!MFprN4o*=4iqvIZRyZ>N9^-sAz>$AAp{m?9#Ed1kdS%I#Pod0!z4_7 z*g(0TNxRxGU*z{?{+=mh{a!Ju4+}{*E=L`T3i0#G@iCd$wB$sa0~&i!s3**ZcuwqR zjh%OonnM}Vo8kc!wI-rojDG^lRwH($puqb4UasKQERwOq2!xhGLwiTP7p7879 zwscgG=fylGs42adZ*>_{;@cj|rwW8HV zYrYWqfhhI8SRRH}8&?JcdhdMhmb!je7HTyv9KPst z{Lx{pW#6gVDT(P=8HH~Usx@vbE@=DFZXP^hI}|VsiRhwc#%f$)%v_UjPgJKL+I4Gk zqFSG(z@~~PP#tR(4@81=h@-@%ihsd6n=X=q!Oa#Ug4qpLVmte z>8Hg`D6sp*c~Tc51Qtww-l8ak1;cseAuL|rf}N3~-67V8s7h~LAmvu$!eCy&;rj-^ zU?J1g4VZ2hV?xnSP1yBC7Z*yiFUN&&AxZ>uGE>H>?L7~KPP2>4j*#1+=8#=XenPr6o3{Iu?8)oJS5p;eo1T!7rz-#sXP zsk5Xv%gsqkBn{(!?c^7}G2egaR4j^8m|&2#@U1*Z{3eKqD2%>wgL1-*ljqN_EdEJVPbGVP5u!Q@ONeoo(qmG}_& z>4U8K{lapq6=3SB8=?=YF*p~LhKNV|qIr|WCw(abG;2e7-Y zupM@F3zs*-6dfn7iNSyy7x7-`+;-EePaixKs|`37ggh}OhJ}o}@(>Vx)kDDV=|`4m zi(%`nOX9RWZ&t&T=g;=qUg59pKnSztq9zvmsd1q-<@K>2HWyS(@u0b;2DWi!_3WDA zS-vlA?KH7;Ev|68t{p|K#vRwP&ZW;jb?;s;4@ML>-lSP87$3yet-iSdBZZ)){A3-F{QxXapZ^;&?6jL_7tB4CBJA=a*4$ z+~C!7r>c-b6VSL?yMECi^RdpKkE-#QK{JXai*xZTILrs5Mu(;z^U9mvJA5wQ#Av^= zK_e_#JfDaaabU6-IS5@h?(-HMoOiam_U{A9mgb{E(L{aFTdnte-7a)2oE&i?0hzzq z&#%O|L{@InYD!^Gh;I`y0}TV3XLjs@OYDpt6w__6bD5*kuHqW3DaOU#F7N1P0`fMU zHnFHo?NB9pCbLj+oefLKWj4c%JHAPbi)pVH|3H%*zDP;KEnxAajip%i+rDn^z<*9T z-ExGsVDbS@5>v#fG_+fYr`t5@;I46%rZg66O2`)1q+!?26l2hHBTp}4n9~fX1q@@l5naP1Wr^%` zEPIzZFvGaKyzqzj9ZlP7Cs9<==7x7Dyt2{?dwlz`an(<+#UaO*h_f$pLLK7oWa$p^ zK{{6MHKIob9AsQfp1)whyPFRD)f*nffMH3t&k_n0!i<~Gd`olmqn%$zsS4>drnYNE zV+KauxZ^zS{awj%l>%0aK%EM_{TN@u`QE5h<&Wk0XG;ouS?fG{$*ZG z*%4Hg0ykg{St6RCz*dU24zzl+*hlKd#p@AohsifDeE(K`$-w}$3pN<)dxdo{PWvh1 z1aVp70T8Qk;reg!jkooDDssAs4aB(6GehhrJzZVfEZI@98GB~BagBSPM&$Xh_G|+f-3zr#U+2T3a)3;)FKzw@%r)gf{(9wU)q1r-e+!DX- z<<4<~Y#vfHn~Jw#dRx1Ew`{t)@$b=tOJ8cEuliDZaWlU%R-!M&%7?i)bl_P)-%;>* zWjEB9CRT{!EzC!Jmd*N`@Hxto!;*bd!?lLqo|*mzTi|nUh8U5?=2J;1N}sBK-?{df77fbS3fRrb`n@YN5lxdS|2RNTkM?tNwUNAmn_dc`wW zh^KQ{UUWlUWksViue7GBYHoE!X>nskO?6UfWref4F+t1Q^=S@UT_kU#FGqNei2M~S z2Jf@RhX%2!g(Z8%(nlpc3s&wr*uti<)%Zf&`_v$>ZSx5_)1}XlUNBQKgiYZ4WP!y1@%(`J`3~z1K0pGB>(^b delta 14033 zcmeHOd0-Vq(w~0hC7D1VC%H*ThzW$;Auo`SNl=ai5(y9xK_tf`1QK$*a74regq0_9 z#6}yDn;1M>7D?7xcj+(eSbJu?>ALbU0q#W zeaxgicaP`hO`dB~`rqhyHRbihmMN3mpB_nQJiF-gCuc{Ox76Pil$f8eaQMEteyf>A zmsiK6=(gD&k})NkkEXQ`hrl1`bk)_)Y0n8qiCRZOFA)4m$f-FMRSnG)hg`_X zd#|3Rd_a2w1VIo~J1ghbK%w46)oEHJtcBb4_DKv&)$dIW@8-MB=qT*lTP52W}`D)L|8f z-WnJp#T$X-$hlBW3kFt!4+9Pdl4DjhUq!OTfPWH5&u@>=w0^+1(Z*=t4d7v-eHVDL z`*oyrSSD55g{%s~19Bt|)~Q9cK;kQ#E2^C6fs;yp1*}rM7Xayby|cN-nQ6eFE9$Y1udf% zJOrL9L^idw)H<6}dAdi)4TRJ~;Tr=qEd~6;K$0&4 zCId@>slXH$amVK^_hw*=5N`JwqkG;h;2Z!HFr*9Z6k*3 zCdIE++Se(3&>;i%J&^R9E9NIxHdMD(wzg23Xf3UcPMW>G$dNvr0{TF&zP8?(TwSR( zsnPt-V6%K%`}83);$wl-+H@eb_GKV7(i2J23y1;UFn^Awoz0c$uLHEMTk)?+1Yn|U3lJA{^tRWS_>@1C1t!;8WU{adAF<7L4PmdZQB4A#K) za6|-2OtJLg{^1VW$36J1;rVt4I(rEB?VZckay`P(4}mX$k~i-Ncj$iTEMz$xKoNT} zxUu{Qbk}n|($N0|ejuKrsxXKC4LEYei~9#S^x+su6n+E`4Os^cX$m)qI}VOw(T#Tm zJM@7Vvl*r)BFUFW`SCYCB@ z_=9kr1xLYSye!bc25@(@q2GceoN8)xggESf2RD|N1mx;or)6B{SUKDSz2e>=l(Ct|1i=|n_=4!5AVd(218wVMZ z4Kc8D_Z5bHJSLopyu?4(o`I=qlzI0A?xvY{QIVQ9!Mtn2-3YqV_u!6PG>0AY>DT- z>}D@721jPkb|FrqwK(_L@QB8lEK=z8^61FXOdw%6~lc7=Iev|cDan+ zSPo9j8?g@kWpD;e*my^xLl2LY=Dm1XqQhPaP6p>*+#!(=rmt`(12H5{Iwk|LlozES z3m}^y9YTn+cxQ^iTDYES==b-Ny^fp-c6h%7ZX)zB8OmuP7n>4lC9)7Mwt*uvI;Dv1 zwRm2fmd|3jJI!D-c_)4!;(EGad-n=noSx4DxjWskAJ;JR@}}o9`Tlfo`*v_aCP$Ob z(geIqz+QKA%6Oc+GYs2@3H)$Iz8;d;m4BE`uI5FV2HVKpnTCBDOaycpTn?_6Wsvpb zMOg-`|Gj zhW>hr>})yj1#;bC=mXQ_$kO<+P=|g4IF$}ajz55-A%IjtQrXac^3ojUdNDXkG4x%y zgRSE397BH%vSCt&NCb0zuwkE=fxz*V@wxU}aEGyospJsus22EvTI`uJez1gEhJusN zXd1i)T!Cqk2E-wyDM$MGF78;8Lw6vj;Vx`axf*a}#q`{MA2jsUuBE!(VV4uvuC}#DM;HW}0InQBl!)*pXf)#cr zFDfwfAHkCqjdC7KduM@x$y6^i*bZJ)Xy~V~|5GkuE^mRaW6dzUjXT+u2rG-@MZ;)V zxQ7{fFeVW-v@x|#0Y{C%&_?)IgBu2pnVua3CtE<1aIc|On2-+M1~{_PH8__+K#`Jz zb04@dCP%&69ab*%@n~=pyPjHe9SufC_`_;0!7WmDuZm^ z&E2C6Hi>tRGVC1UI=*sLuI`TwW(jn#W)(X0N5NHsL%2fBeMOPMZsB^dVf*J*ytp{u zUV?pRF%1E`z}-yKL@WY@cFDP#P3J|U4Lf44CJx%>9pRm$4Xif$7(=hY-jtj&2MZ=K zH)P*JMiw9&gMomKl^H{EcnF*v!o+eGOyr-Ak52Cfeyoxbl2)1G38}8} zAd*i2X}n}YNE?;921L@yAi6F_l3!~|C6bk?Ad+9FtL#|;Ud1JOlD`~igrfn@lP3J(FjKre&nBBbZ9fT-Lt5M7r;>dHTZXa@Kj z$VUDDK5nSs2Ou(h21M7dK`Q^LSxRCw?t4H;a$oxSC8YkpoPr1B{wU}R?4t}2(rvWj z38`!PDgM7f8ZD*)#*?o}rT2GjpfYzk?1ASwcup#Vl^!9vJ5=$6)FFoA2^s(NnvgSSs3F-D)#hb_nO-?XptZ9%^9n%$7Dy&vG14!59kjl+e&;K2g z-Ylg@NXb*r)Qp82)4e%BYDTkCms-JJUjN)a-vB@P=e0ny`HzQoGZ)|a;uPOo66GJSZU^?@{)Z;{?&>f9ly?LehB_AR z{-)DI-|hEWLWa+y9hQAVN ztB416SF#wY4cAy7F|9j`6_H*nuP<%X9tPbES_)bQdIL&-0lf)&M=bJUA-*r+_D`UF zpa8MOiv_Y!@wgXjsGwKE$3f%;)rwRx_p`Ua&~lI)bSr2Dh_;(|f&^$Ch&I@>L3JS7 zl-Gjjh3YKmGZ6K~mtvVWt7>~6w=aVzn)^X5pjn_gP(7#tGy~)WT@R`NRf6a%#uQLF zXd-A5XacAVG!9e(Dg`07+LfTIK!qUMXVNF4Y>9dDRB#RSH%EgF2RN2=XODRLOsBOU*WNraE`VJ@ zeMx5q%)hl){Iz*jc3fO}j3*7O09c`xh5|nv%3t1HT6-YIGfO?8Skser{lDm}*&J9F z<4Mt>G8CC$VBWZIXU!|TdTos7a!7hXvL6@@taB7pp4>NLWsK+cIEaEEqAn#oIbqhC z)1_-&F`maE=?TdXnn=TNjG*BSx7rVT_J4MC+TyRg*|hW&{L9k(L{%WmjIbu;O_gWP zRE#)=S!LQ_D5N1^VkapKhXsFFIN;j+hrbwOUV}nTO8OvZY!T-JSy<$gP{5?wKI1K8 zS9x;I2R5djYp*hq6~qR_eW6TPN2OfPly7^)*6VW{o0gT5L3LG$#X&5L-6-w`L|8|z z!aOJSi%6We7J6wZgGq0LI0}V9G|f`0dfO*Ql}6p%X=Cu2T+*y#V6HK<%coztWSLns zBL#o`*(;)gSy;Apj%-N!=n3JAPi%(*TAPxNv4QTUV19KxE_lmBe>cpcS@h&bo2UzB zq0C1t3uYxOQoI(7z*wi^R&Ci;{^O#f<wc^>{ER(g0&k2@__z;#k$T~APF?`gq=+AefZI*-9slqwm4*sTO*0>0> z!t9g`Onzct6e{c#Nzp9K_GUM6AcSSGKB6O(g^8Y_EIh(G%DBJhM-PqMT}`1+N9Zw| zrHd;=(HiS4p)}b1Nr+uTvoXVbwQgVw^#ff3T&204~M!_ zum~0;t`BF?wv#Q3~HFD4rhMd{aBN)92;KZNLK+>t|r1dmRJycV%?B^-c$`W z3Dv9+H%Fk&*2%=u)B{_?4j&AH0{WX2j)-GWh_sF@RyOq;TF|{$j7L8FX zI0jj#Y1W~==O?Fq?Q`1`t6?HN1$m1FOZX#uR1z^cxS2UHgJY<6>-b;Co`VN#W7C&k z80T+?i?kRt?z?dDNHiM|VU72n&kTMnTznbL`m!6uVx*1j(sH5fcJV+zmSp>ai8V27 zLWFeyGTpd0{>GdA!_C%a%6vZ`16P&Dms(R9V%70=q}b3G6QOlhck;fo_6>jiG!e!q z84wYf!-K3-k1MLz?tJEW=Lw~N(PbT=+&A#<VB`I-prT z{ml9A)>Qn{v|tAPWFN6P7EV}4H9fA$m~h1N;XgtHvlcnAK)gnl>V#Ju2FU;Rg?y#0 zN3v*$!+>k;E7ykH=uy*dxV?C^atY2}8VvD991FMNq`Fi_S+_b#7Kps;+-&BB>{Y9G zy2|;9xBJ241ko!VT@fbw$D0FpemsO3VjDqEQE>&Hg@}cK29X}t+t^Km zQZlkrva}GfDGv&?pTOq0ec_p!Bg+FmiM26WsxqZl# z7ca4~T}$lGSIgM0)zUguI_Lg|0c-XQtBYl&;>rZ({+aE_U$*cwpUx$Uv573S%{u3| z@8E+|Ufgy3uZgn5G1{!-uJ^pOu65I_S8MSkCuLAddZxKgx_I{x`-{7W|JXDSL{k@^ zB(l3B%KFPCbEfN&^WXK_RvRQ&E2Q2Ov86wHX{&e|5MdpO{qD|Dx&F`Jf6&IzNi^oP zQt=rSvaK_;)d!b7vEz=zJ|2vwAgpGZb;`DH`Wp8Owh1>~c=E;okv{G65r&1u{_fNaTt zgo&Nxn{rXLY33fZB8r+AMa{TuqokK&V=61P$?7uLLeV!3Bh0cQVeO(%Z|`reJ$KtlT4bsD*f%7K=xn%?F18RDq9PM8RxAWWOdlfmS|9sX z#WlY*{RdSF(S2T|XF*REjCIwk$l zwhzY7*>x9oG3nSWV_<8tyFyvH&>fhLtHn?UT5X+gp1)whpEtjCt1sL`e_@_=WQ!&! zuoAHXU^_od-0wgSTj!#uy|E{)|Aa4JCi@wt{T5M}1N~LPHwWew7a*ow;KYMD2+Z-T z#Pd09OoVlk`i1pRUH8<{o>&1ctvSLvay@nB^V_?{Jk-cujE0Oxtg>T%X zCKmLmb-??B(z6e}y74`H4zi}4b)qDf;dNa^VRd7=IGPJX)=BO^B(*H*u_NYs8ykS0 zqRTo9o-}gR+Q`w_v_+@JqY7m~E|YABC?k7MiP^MhS|`QVf7X6*WAoFyQHr`Amr}p9 zl#lq73M=6QqVLcPoD}{n#@GD17|XX$^Y}R?k|i%396u2MS5+)a!uJ%KTD<9R!nhtB zDo(Fr5n^XPix+R^vowDeB{i1j4EhsW(DSDhfrHl3#Xlr#8kVm1e*X*#uF8J@YQZK8b@K6^d7ilzC4s*k!ktMdUM|nIlWPuFXi}*$^QkckIp9m diff --git a/packages/client-sqlite/package.json b/packages/client-sqlite/package.json new file mode 100644 index 00000000000..55565de3c5e --- /dev/null +++ b/packages/client-sqlite/package.json @@ -0,0 +1,19 @@ +{ + "name": "@communication/client-sqlite", + "version": "0.1.0", + "main": "src/index.ts", + "module": "src/index.ts", + "type": "module", + "devDependencies": { + "@types/bun": "^1.1.14" + }, + "dependencies": { + "@communication/types": "workspace:*", + "@communication/sdk-types": "workspace:*", + "@communication/sqlite-wasm": "workspace:*", + "fast-equals": "^5.0.1" + }, + "peerDependencies": { + "typescript": "^5.6.3" + } +} diff --git a/packages/client-sqlite/src/client.ts b/packages/client-sqlite/src/client.ts new file mode 100644 index 00000000000..fb0edd5c662 --- /dev/null +++ b/packages/client-sqlite/src/client.ts @@ -0,0 +1,157 @@ +import { + type CardID, + type Message, + type FindMessagesParams, + type MessageID, + type RichText, + type SocialID, + type ContextID, + type NotificationContextUpdate, + type FindNotificationContextParams, + type NotificationContext, + type FindNotificationsParams, + type Notification +} from '@communication/types' +import { + type Client, + type MessageCreatedEvent, + type DbAdapter, + EventType, + type BroadcastEvent, +} from '@communication/sdk-types' +import { createDbAdapter as createSqliteDbAdapter } from '@communication/sqlite-wasm' + +class DbClient implements Client { + onEvent: (event: BroadcastEvent) => void = () => {} + + constructor( + private readonly db: DbAdapter, + private readonly workspace: string, + private readonly personWorkspace: string + ) {} + + async createMessage(card: CardID, content: RichText, creator: SocialID): Promise { + const created = new Date() + const id = await this.db.createMessage(content, creator, created) + await this.db.placeMessage(id, card, this.workspace) + + const event: MessageCreatedEvent = { + type: EventType.MessageCreated, + card, + message: { + id, + content, + creator, + created, + edited: created, + reactions: [], + attachments: [] + } + } + + this.onEvent(event) + + return id + } + + async removeMessage(message: MessageID) { + await this.db.removeMessage(message) + this.onEvent({ type: EventType.MessageRemoved, message }) + } + + async createPatch(message: MessageID, content: RichText, creator: SocialID): Promise { + const created = new Date() + await this.db.createPatch(message, content, creator, created) + this.onEvent({ type: EventType.PatchCreated, patch: { message, content, creator, created } }) + } + + async createReaction(message: MessageID, reaction: string, creator: SocialID): Promise { + const created = new Date() + await this.db.createReaction(message, reaction, creator, created) + this.onEvent({ type: EventType.ReactionCreated, reaction: { message, reaction, creator, created } }) + } + + async removeReaction(message: MessageID, reaction: string, creator: SocialID): Promise { + await this.db.removeReaction(message, reaction, creator) + this.onEvent({ type: EventType.ReactionRemoved, message, reaction, creator }) + } + + async createAttachment(message: MessageID, card: CardID, creator: SocialID): Promise { + const created = new Date() + await this.db.createAttachment(message, card, creator, created) + this.onEvent({ type: EventType.AttachmentCreated, attachment: { message, card, creator, created } }) + } + + async removeAttachment(message: MessageID, card: CardID): Promise { + await this.db.removeAttachment(message, card) + this.onEvent({ type: EventType.AttachmentRemoved, message, card }) + } + + async findMessages(params: FindMessagesParams): Promise { + const rawMessages = await this.db.findMessages(this.workspace, params) + return rawMessages.map(this.toMessage) + } + + async findMessage(params: FindMessagesParams): Promise { + return (await this.findMessages({ ...params, limit: 1 }))[0] + } + + toMessage(raw: any): Message { + return { + id: raw.id, + content: raw.content, + creator: raw.creator, + created: new Date(raw.created), + edited: new Date(raw.edited), + reactions: raw.reactions, + attachments: raw.attachments + } + } + + async createNotification(message: MessageID, context: ContextID): Promise { + await this.db.createNotification(message, context) + } + + async removeNotification(message: MessageID, context: ContextID): Promise { + await this.db.removeNotification(message, context) + } + + async createNotificationContext(card: CardID, lastView?: Date, lastUpdate?: Date): Promise { + return await this.db.createContext(this.workspace, card, this.personWorkspace, lastView, lastUpdate) + } + + async updateNotificationContext(context: ContextID, update: NotificationContextUpdate): Promise { + await this.db.updateContext(context, update) + } + + async removeNotificationContext(context: ContextID): Promise { + await this.db.removeContext(context) + } + + async findNotificationContexts(params: FindNotificationContextParams): Promise { + //TODO: should we filter by workspace? + return await this.db.findContexts(params, [this.personWorkspace]) + } + + async findNotifications(params: FindNotificationsParams): Promise { + //TODO: should we filter by workspace? + return await this.db.findNotifications(params, this.personWorkspace) + } + + async unsubscribeQuery() { + //ignore + } + + close() { + this.db.close() + } +} + +export async function getSqliteClient( + workspace: string, + personWorkspace: string, + dbUrl = 'file:communication.sqlite3?vfs=opfs' +): Promise { + const db = await createSqliteDbAdapter(dbUrl) + return new DbClient(db, workspace, personWorkspace) +} diff --git a/packages/client-sqlite/src/index.ts b/packages/client-sqlite/src/index.ts new file mode 100644 index 00000000000..666c7eef50d --- /dev/null +++ b/packages/client-sqlite/src/index.ts @@ -0,0 +1,2 @@ +export { type Client } from '@communication/sdk-types' +export * from './client' diff --git a/packages/client-sqlite/tsconfig.json b/packages/client-sqlite/tsconfig.json new file mode 100644 index 00000000000..3ae07cd3fa2 --- /dev/null +++ b/packages/client-sqlite/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "jsx": "react-jsx", + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src"] +} From 4f0f00f465f4cdaff36242b801a8675ae6b31ae2 Mon Sep 17 00:00:00 2001 From: Kristina Date: Wed, 25 Dec 2024 17:15:39 +0400 Subject: [PATCH 017/636] Init query (#9) --- bun.lockb | Bin 102896 -> 104400 bytes packages/client-query/package.json | 19 ++ packages/client-query/src/index.ts | 26 +++ packages/client-query/src/query.ts | 65 +++++++ packages/client-query/tsconfig.json | 9 + packages/client-sqlite/src/client.ts | 2 +- packages/query/package.json | 19 ++ packages/query/src/index.ts | 1 + packages/query/src/lq.ts | 150 ++++++++++++++++ packages/query/src/messages/query.ts | 202 ++++++++++++++++++++++ packages/query/src/notifications/query.ts | 129 ++++++++++++++ packages/query/src/query.ts | 198 +++++++++++++++++++++ packages/query/src/result.ts | 92 ++++++++++ packages/query/src/types.ts | 33 ++++ packages/query/src/window.ts | 34 ++++ packages/query/tsconfig.json | 9 + packages/sdk-types/src/client.ts | 1 - packages/sdk-types/src/index.ts | 3 +- packages/sdk-types/src/query.ts | 6 + 19 files changed, 995 insertions(+), 3 deletions(-) create mode 100644 packages/client-query/package.json create mode 100644 packages/client-query/src/index.ts create mode 100644 packages/client-query/src/query.ts create mode 100644 packages/client-query/tsconfig.json create mode 100644 packages/query/package.json create mode 100644 packages/query/src/index.ts create mode 100644 packages/query/src/lq.ts create mode 100644 packages/query/src/messages/query.ts create mode 100644 packages/query/src/notifications/query.ts create mode 100644 packages/query/src/query.ts create mode 100644 packages/query/src/result.ts create mode 100644 packages/query/src/types.ts create mode 100644 packages/query/src/window.ts create mode 100644 packages/query/tsconfig.json create mode 100644 packages/sdk-types/src/query.ts diff --git a/bun.lockb b/bun.lockb index ecf187af96e4354f27cb4a87c734623b2bd2eedf..1a93241bbc237c225b0dc6bb8706a74378d7120f 100755 GIT binary patch delta 15445 zcmeHOd3;UR_CNc`P3}n~k;r^Q2D8XaZb;mdM3KZ8^HfA!M3aczm}PT|p_!-xJ_>Q_%+i}JLu^jG}8>kJ`&yx-4zfBin6p7v{j!r>ms6tWuIjcu1OjKWHP!OPo{7NPCwd-(b*JK^2sb zbvn!Ai(OJL@c!T*G347kb-uBeZqEYh*AoiqP-qC+7_$-L8Ry$Gi5v!N?p{PB&~y-8hi(>QG@=CxGcBqSb$igs(V7AD;_?M z?*^XK!UH7f79-oJxVd8xBvyffua zRteOjNQ^6VRZhmRRXC@(ipR~6uE0wg|LrllBR4_)Ga-1=sNiyzPIXpD(j0h7J<0)v zv+fq?8TB9@JT-`Ps;n$_R_x@-9-+f7qqF4Ja!|z7{U&ArX}!QC2{Css2PJ-WOFb>l zf>Iow0VVk&lxy`3y%JE0`y-5R@JOgS+e$b4GkQW!*icNvG0jNIQWwUz=&_2HZS`Ki z5hqFQP`=Qh!$1?jCm1{fjR$`M`iY=BK&i96!T5kWr=xo=m-IrC7OgREU))f^Q=rIf z_bkvxpk<&m45JO&ACwZfJ1F(f8Me3)VDTccLHbUlVuyudT6YHCNlr4mqzY-w?+Gk)wC zX|fTr(VcV;zXPR6e+JqJbQdV~s9%QOlSN1lfAE;tlctp+k-$?At3atI<0qACJuEWx zUPXO)-OpVB0nN>>pcIM(P)dzl7$%1fL6JiFCEO?eimG?_fd|~y-A(V#d{D};&HT7mX!hsb_0|r8Qjp#RrB0WRkFT6j?sP@w>3x`9sgJWi zcnS^!r66A8ZLF=Ts^BYeJ9_9@=IE)nnF>nQJ~ikQC|AI*1tp)B_R{;Z1UyADuD5Qd z685OxF(ejwwGDC_x;fy<_lA9RzAt$6)zghNLbC1LYmKue`F-$Ih3n#J&)1$%+V^ZJ zCKrBFF)^)Kvw`zIJk-BkvWIjbdw}0}Gh3H+k6&{nAZzhFp1n7VX9ZZVPH`UJzhs<9Ka=%0A&rpel!2cz&Qm?r!0W0v&8VuMJe$ey%iD*$tl8SXB}|B?+;0 zGhP#DXH$4BO7?LjNR^v-@%$hM8_KJLRQ3X|4N{e_y^v^{azT(??qlWo!4CE`uMSo% zNANgx@Lr8GWJTfLAr7T8Lgs+3C$9;zD>K2RfwS=AA$Il=uMJTZ3(`y1fK2HE4l~8j zn8~X{RqGx@Lof?VqV6icL6QqCE7?(cBHTz{w+Dwm%0h6|MgtyzHumw_Fjct$8MV=X zG-P{2o*(W|W+U%OQRW4~cD9q(hO6uvS0WG%WC_L5o7ZHe^XdrI@-8HuxNk^?9OlQp zZ4RXeQYH&3R$dcfSC)grgn-v60{?7!7z$uw8i_T#BYip*&Z|k!BF)DwV;3xO6KROe{12 z-z*!!rIRu*&yC4Aj9$|*1ufst`lbYNbPPt?gnUg zzu>ML-6moji<3uyrEEpU_+7HzmK(YhTkEzeYNR1X0`M!LW`G|l7aE82)t-(AsV}0Zxxm z>t}6h!e=qXdBvp=Y#l4dqtdQp=t4#3f zWYsc~VO8go52o@1$yQ4exFC(ARmj>}l9JJy4~1Lq*qSd&aj=QJHbqrdqsT}^tSu*b zUaBg`w&9CX9hP$vo)?D2hRolz#y0c3_A0x?tJ|x}#J2kSrVr%VI7uo)J37}JE6E*h z54eiDvV{1%%bo{UURTyIL6Ry>j{TKar>V-&M183BB~Wn1t}3r5>GMnC$HVN(EpSFM zAT=`4U77|+6r|Ljz|ktKO?KrNI7%=KT!dW?OyP^t9m?PobE;u@*mGRzs9H`#mduy5 z$gudNYEu$RN)OyoGjIgWRD+}F!4#S~SXV|XoevTzODm)K&;y(iXK?fDxZ`nlu~tS*={K^aOB?kIsYx8Y%4rpk`+>P%I+i4uL)Mtw^gC?)Zu;Tg(M zy3=X|B*Is4x~~NZ@D&{8o8IPsfzy4((q_%T+5#u^v4S^BF-Nw6GyDs(BMlT) zmDAFBzUojG!#*j(Xk)u_1f0?3OuMB4HjpX&C?2>SI`aH1hcXuevLn%>2+#Q1EES8Y zlC8?Aop^q>Ln(oOMzdJeuY>EW1!5G|ZGAKmShk7h<a~vqg^2<%@b^bAzEI z8xZ!yQ~;;v422>-&z!}?O#_D+Nxed-w}8`)9q($l{s;~|xHD#nh&2iH{C0uUV@jj6 z#o+LmMw4g2QJTO3WOw5pI)|-CX1Wy&l0o0&DJQ@cY8={^3wmim z9N3E=?(2}d_2S+I4rLvJiprxqu7E0_!`gK}O65ox%^-$ZyNHq;!>3Vu-?IKF$ zm~dJ>1WCJylDvv%M9I!@ zfaD{Z;~%<4=tQGb@TkEPmHFNQp~)mI1c;6S=psr3H;x!wcT>ij3=HM%23k#2k%PsW zUVTayCmM313g9w$qEudK@I=YZRD&l<<ke6@cVh0lMx-N$<}9IkbcNPdB>(y6&dbHLSkPz1afcS#| z=^rxq_d)3*O64CI^g~e6tNPfuISh(dTj@AJ7g16;2~fq)0J`o+X)M14{DAWS)w=*t zz3&0C`x8LdJ+$gxK|P)fT+yoOGz$0r5R$wR{qvC21NuRz=L;I6)kpm4hVElEflg^i zS{eMmL&-#I!_Ivv+OMZTp-MC=+>bUyc?QZ!CDYI&NuyRpQV2OYR%B2ollbbP3g0s{ zntwO65ye7!%&2=er8=dCoG95UH+Z6SI~o6w?^8gjd^#xAd)%P2K=Hpc+n{qmNpCI* zb=pZ&m#rJ+ZZ+kK(ja({!t z-%ZKwmkc>k(%Zzal#y&R{vo;r|Ih-mjTnQ{7{6igM5+88gObj__t@9^fB$^|wV4Bu zr;h-1{WD7XT{W%yQ!4KUkiA?(FPGYYSHAt@ z;l4g?^gly}fHvd-9}V_RYpefJwg5C7ZOq(xy*5mN#mt?JP7X=oUtcQZjZP-}u+3Il zFY(@@hzbkKsgiEp)-O$}^;h#&V=45cPD|=JT_Q0oGx;lZs|?^1PO2{hKzRcyEF-$H)QmL z4Z}S-;A6<>^&$oL)K){1(PoO?mRf_M#(WJ$devJ7(B)^y{Bd6cP-Es>S{Sv4Ylxp1 z>BC}0q!-I_k|M@h+k5Uu`A*~m2U@LG6<)?wKfHS~Z;9G#g{GY&gz%k$x;5~ps z9ReU(=r7GuxY+B(9&>iUZ5lxD)o%f`#SH`+1GKZJ(;bU}r+_8EQh@g4&jG7|)d0P6 zlmU|fdM%+7AXiZTD)4vUnpk3GqpB!06p9NF9S3L(=y^O1CkW0aydf zgyJ-yH_#WzhwLTbWne9^4k&`WKQI6o1Plg-07HRcz;Iv-^fm$OfmeVHz((K$;1?RS z$8j?rp!pXGv;t_}(KMqkE;RtjOgs;MC9seZjA*bnohae8`AqZN3-|%Nh3#JC!&Z65 zctWrkg89Jrz@xxiz?4}Td-@fGfR==oiRZRz>07CgA{)asDFOlO}pF zB$Sz0*4*X^jc89wLpWD=?!!FG5$#!ml0cMf1`PxKYiE9JXYb{>%nf4UveZC44~voJ`M_|`!Od;0r!I$LQbI>kjBX%4g+`j&=vIhjXp`Pe z=GV#$PLofPd1%qq^Ra=WyUbgl)lErA#WZRlng+vP^Vnmj&4cIlfj6Sd17diw2w~%G=26k` zk86~#7red|dZ~yf+P@_BhOkt2Q(Og+tsbIfC`(B*53~-7$UPo)btigadSf1WExXb2 zdasH7Y+8$H2`N}4Jj8(}EKKg@A=*W;Fg8LQ429(lq9%-m3%@WHVKWcf9`L)crTfmY z6zgQfx&xL^(IpK1F;D7FfBP4^rBfdHiJ?eH!%WxAuw7y+tQkhxHgN`=Z1EDG#vq2~ z`P|wiv;0Ek&!@`_p`Z{-3q*W4eAzAv!%=&Wm>JG`MVjYr6E`^y{B^;Y*=P*f6ucZC z@m)CU$u5iJ2XM|@Fb5wHZB%N>-3Fk<0H*e zvsuAsR(9HNrDmv4XvV*hc*4d~BD*1zv1!0j#P>FYA3hWWjrO`D1p(p&G;HRP+A))x zch2(hPd99!YZF9NBm(z<0fxfYqBIf?UlmV6k7>yiBtD76si#Wuc_iC~kS=b5p*Mz) z9Tca)vTuZMQ_NKJ)a=FUW4<2M*xFumO>=&R$c6%p3~zc@?IU7KQx<0}Lo{ef6Xi`< za0XHhXD`7k5WPP%=~wTk-h3LCk`vlf7|a8*$AWXSU;KIE_gYCRl^Cfc>vNiE6EZl8 znm3QA*6coXs5mxx;hpI{N+d?ZuQ}q+Q7q18&hmSggGOwPxE94?*ex+P2I_a$3YSA7 z#pdR$Et@Kyj%EWR%>%m0>WeLA%@2rxgQ*Frdd{z)#CvcL>YLZI#%yz@SdFG5&11kr z_g}U=|IMY=uul1bkQsU1!91tCc zF^ro;0f@~!Qao_<#oNCvEBuFMK^vB_Vr?wO**sh9F(hTcQO{RCfC3gSa^iP!k_=rD zUd>o9+y8Whe8mKowu?#4VCZIy{&dKQ${qRmGx3&@v53eA$4wt`b08FR z7xqqE@{JFH-L5ZmW!{NqZ3<&e+vxu3!+U6tf801v=$J)DYg=MMijP0*=jKp z#AY5=RwiYB_(Q>}cV#xGLqbYgLVIboSepq2+Q(5k&%8MP=)%AsV`WB9uv9yCdUqRQ zGsV@;FuO=>BkT3J?)N@*W9y!}_dY>m>z<(IvFNgwC$(L+JEtU;oe*7GvFiG!Z4Wk3 z&jn21KCM}J-Er0Zhc*xYc*p5)TI<7)DQKS7-gso?jI|R_6r&_Pp+iD)s`jV3yEhuK zbsLR`?w1DwxkUWfnyt2d-bUXVOmMw+`#1ls#X+oZ)8kQtpYlq!JGej4!g>KvEYc)>{59x~(qbGiQ{GG}u znK(Rg*xr*K%_+CE$%-@O;&;@wnP<&c&uV8mf9Th5wd~WL!u>@e+P0a8(|x9nuloGV ztvGrVB0&+JGI0rBu~}j^>6wSyd!L^ATUq(Owq!^T(hFic6ilzpj$Yi2t~H^4Hz6zc z@2TOXSd++lGp)T;c3Z?GVa^%x((9R_*}W%;HDz+Ubnzd_*f`XWTl)vZ32RPOx*owB zzOlQ4v({aanahv}M>2~LQ&U*PJyuzX_$iHrc%-zKriwEuSQBu~CAY zD##Sa+Sj?piUjL`9#x1JKt%fvEP_>uJ{=Gf^9KMcT72xB64)PeON$9i=*nj29_)nP zd*P7=qedL{sNXgiHqqmYapyBM4Q0!R6 zzZaeDP&beGPn$mdvvsEy`Jw|DQY_E*cf@2Uu;XGek@?~!)Uut!3oyM^jr?MFQkwzS zj*;~g&H8nbosL-$_=xaJXDLi)*$Oc{9gXRn*qn||jueN|k>B%0Ku6fsTd|qHFc`7q z{Z|`Azf%b}jHjyMM!m&2{-M@QajGNRh=@*TijbY36Sg@;;4JM$G(Tjk%ZWAL;scwxC`IOp-WhBh zdqOn9^N%eNpJZT>HGjA8R$S%0hTEb?$*e8Lgf8=!4{_a>E|2VyMtgngJK8WT=nv9J z^A{2S$m_H@=V-=LcPu!1ijqt$cIIy-R{i8Yw5HWdJ(=iGh*v9qh3n% z`B!Y4zXh51q5k`hSVn(pr8Tae?cxX9zW3EW_JObVcQz*f#~xMP`xPSA^8iMYV)9^r z9=w}*aH|XHYZ|WB)EQjfnXZbWgktgQQsyVNJ6J2c|BEjiEG1w;6FoXlq<8oWo9=gO zhrn+(qVnAdl7!8OyZJwYf{W6=ySxQFy`|NB_4caB=c|L>0N(=q$86iGCeLhGaSA-% z1Kk(;9sX$ISIV>@M#FD&F;8V#(b|tQvx+8_mQE=vE-I`no>Z1tR8s6LtBlu*#154` z9ezJ8O}0BdiyiIdpQZf>DY2+xMtS9=_=zHW9E%id#xXCODRL#o*A=_swHy^}DVySb zoxVTf%33bmOIeH7=JzJ^>yr5`$^3F;ekT$Kma=H?g|y?vmA+#44@+T>Ck1-o%YG8j z&U=D!EaUYRWV+=;n!aw-&JyB>Wh^I~zBid4y6|;M`wnG(#5w@p59Q|fsfO^1zDAkf Mp0qDb_&WsuZ+v7s=Kufz delta 14649 zcmeHO2~<=^)_(P*u^%c5BD;bp3Mvf^Leqf#02MdDC9Whcpshj#5pcmJw3OANSo` zx2kU4y0yI5<=Z>GFK_a$wZ(4w{tvJEjh=P&hnz8sk8U66%9y<4(2VmLW8!}<3rjdL zGJC=bCQ-7k7!^@7)uJ<+MDmd&&uj?%K;4zoip#1b$;S%-Ukcg=beels8EH>6c{s1V|R|o2wi-%93&=T|jXf$YXW!#i$(~G1e ztFHJW@;liWo!r;tJ=H77~=kW+(G(Hb?luQiwDF3#&P5=A{z z3oGMb6>%oUYzLXcrK{Xku6ar5?C~1cojbD=`sRQ`S zpd^oKcWeF`$f=wSgH64B6FhlqdZ6CRlFGR1!qRC{Va1g4!iq}wM98U^tAlj93>5X& zc)AAb?w}DXg!>MnB2xSlD0v|&M3T_9=P~ej^vnh&!%TO2fsX`#5r$Cy;V?<+4B84b z0`yr>G~qdqK2yE1?RA6osH(XURypCiAtO*b^=K_9gko|s%%Sv3R0R^gsmSv{Kt3H4cP(OIs<}aqd=v*be6k9 zl6t^m>d|Fb3}Zd-p=Z>Cli;aAgi}>jvAd#>+r5Gco+tlsOQ?&tvWQw(YawShm?O_%QkjRWrijR&0oN}c@)N))y3Ip|(xr8Fo(^Hz~( zFSIBl4FXzKx`C2=!$D~nd=1J#DNw(MKJ~BCT{a1}N}n72E~6f|J8p7uMP=1ygMW_i zwnQjv3~oOE(4so$gCpOZ2Bo@YmX#NmV+y}x@J9^oM-2LrL-%1EAL<=Z)7hz8917}# z(xt_v?zl-s(hS38zG-@`kAjk?7lBfbCW2Crq;$O}sR)kN;4!hM&n|@1gzCGV!%QJuOKc)A<%BTMFzM5P2sWrsP+xP|Z!k04n zu3&{%`>9H{EJ?{Ir0|7)4i>;0{ZzJqJN;Gp6)!&5-z9(V#T)!x%+4GARaVNKZB@3F zSGQG_(-ujBmpqIg@OQ9Iyb(FmxHCYt9`}|cgn>sCHfIUE+|SCI^ZEdl4dRUfs zmLaX&!T^VStvR3D&c%B1`gW?N8hfORFKC-C@3HcCcF9=Fk zX5xwDUVLGYL#Z=3Z{p-{6h1e|#bSAVkZLVKNRc8h+#k=U_w=!X$DnwuTEVN}$WT-h z;80TF%oK3VcvheTU1BF-^9W0F3hpLuh#6cSG6__p`X~omRTuLJZnNYIw1ECHj8v}_601Hq=nGbG=#!*LJ z_vLBfF6Czkdq9ZRLL5prOhPiKInVNUD6_%Q7$6URI0g=JOvZw1iC9S0xC8AR$~bU| znkGD{)HZRXdC=gzI7415%tp)>v_J+91xK~$o?Ze@_dA(;0vxr6)=@4Ju}RKkd|_J$ zyNB0Ds7gI#_h=dif*h81i1s0Tfq%N<##2wdR>aO~ULUE-@3iGtBV9@mCK#3oRD%v$ zhJnk{if}xQ(5uHtd=|j3c670L?u=5EV(bA}4?Ge-5b0pg@OsEDL6!`e7S~EA%yDvh zGp$dHz+tWs2hW@VN2BS*VWSd&)r0!0_bC^gUXJ>^+2C|vTmVPCM}HBU37COtd>>-7 z1WyjgeQ6XB8P(CM{9!O}h;}IsoCyZ%mBU2~xwDI^9D{5aWT+IbaPayrs^tMJQ6u>R zzjVtaEN=a^r_b;-PJ0?0E=j|+r%iaup(o{YJdvULWK?-|jLPQm`WRJt0lT&lA{e9} z!BM>FF+QGG$Eud6J7}gyrYmRgM8kprf@8vQrpN$?Kma!x9EF8N8|*r8R8Mm))V=~o z<=_egWyeT8NGv?7Ln^QCuFAV3c|&)Xd_Iz2?e1cs+}T4_?(NuQFTQLrc2b}Jk|H2c(30~^b$@IfCJds_RALJK1@wqmaatXrW5L!5c z+b8ozo65FuXS}L>&{-dQL{2-0)fcCgkI9Vy z=k@rzhBqdt^2shd&F*5Yd9__-eR;iIwX|edsriQeN&K?iYB?ie_t3b6u8Os*B-v40 z3v#vmX;qnLhbh4~J3=4)#*#P;NCiszo#1@;*3B_keW85339daE*b51@t%cX?EJfl(cY74j_nipDO$<~Jw ziy<4qtFviZ)Mu+oKP(}}%!U^q21mWX%!c<5fXfEQG|QUxGP^(vaTYk;PY4I=3~*G7 zHZ`gDdmtcJ!Ht-k-+;^2INFJNp#!>+#4P|fvMKL9aI~mt!)R@dRfghGpYZ_-Z|qCQ z8)uHHoI@5hfTaNqgrPnRfNm)p${V4v0y3%!u8K)znS5?Pm%^|Q=~grN0f(l3Di$1P ze^uFp9O^5!8BF*KS-hdYi!I@e{Z-2=kWb_L`llNrfO9Thnr{swRM_3%Bu&dmdTR2Y`}H$^&Eb6 zkPBOlGgnns<0wmJX;Vc$*^f8m;(&vpq>X#XJC~M+|x5BshHv6Bj)|KUSbuaP=r~ zdSxufVO1Zl2 zEfJ-9#sDNQFkM^#h7wWAxSJRxqEzu%faK!hWHH3hVxfO&Y_oY9-ka}*+u(0UDeE31|7J=tG9Gf7mlj0$fsv*dS{mi2i@Nfk zih^rMugu6HN{`d=hYYI(rTkf-)WJChoePS8(maFC2PHji5i=;|d+>)m{2(y~C4RZV zgVF|Zi$L=bhXU!WG!%agW&DkaK{aId8k8cs&L~Zk%zw(@Z>D7SCPPk?^frT%Xp2F& zg3`D>OG1OvT0!X-yLioidK=e_$i`o?t+%<1_SVF=ZtJAb2cRD282Lm=-p`=@4NBWL zC8Csn=WU#{v;(G2X&On<5(8{ z|7_##|Hw98w|Y{K0zUKQLHIGL@TI}x<(2pdQoFw;ORjrz|378RJ{(&WysPiQ4^Cc6 zpEmmMf%$b~5C5IXZwJ=7j{G27)F>QpdDih!y!NQovZWJ#Fhh}fRHT*p8}-&`6oqsX zN%`Bytd`AP@$(*)@~Mx}e{wqCnlD zzDrQL4v>t7B^1yS1SF%A(Kot8Vh~VWbUOMDz;lh%%uuA0A2mqH%aD;F^h}BDrbh*M zn*>PShKx?db(#bxN<&63s)2YW1FVLO=6M31sVzm5;YdSoAM`Rnjrkag^y;)6pwz;U z(OK>hfEuIk?D!`|Qfo-VTZnHJRuoHTz}>(O;C|o%;19t2(D?xP5I7B-5et1-+mP4s z_y({aAZG>u?SMeBp*3qO_WQ6&Zh8lO5uoM~WAxLtc4B@A)h+@42rLDb0jmH3JPxb| z?ggd+r9d%I0?^9;HE0E&Q# z0DTO(2cWMgqkz%C2w)g+7myDO2I#yC@7 zhSO<09_&C*AQ|WgbOz|H^b<7rDexKaIq(H=5%?0Q04jkQz$Ab^1C0gfyh-nFbV42k z?24jv<#WI(8v>oeG6A4*3TKF_;xbOLSF=dh7 zG-U%Jp+F1g`u86%b520?H3=lvT;tEJ=vBZrHXxlEQDo?_XAlH8zfo; zfh!czK`bfUyl!|WD*lejS%zo~W_c`M^b3Jk#)x?#EF{dl zPPle;=a=&M)+;ELs+&|Tc80JiVdj0skk?iyrytn00}4qtJDIdr*h5j#4&eroUu-7U zgtA2WKr``XC<{(8Z&{W#I{%edGAs<`dfHNKiIQYq(2S7(F>dIkOXWt5$oa9Ecq$5> z_YuiqsCkGeCQ=}t41@U-#UA2H#YJ#o<}J{=qcx9aPkA;PmDpi7dOJ}>gu|70RxmGy zj(Pc#<%u(2#b`zCHY}=j-l8lVt~9TV9({PeZ;<@AIWkMIB~aZ`lGqT=lETJgQ*D9K z7&-KKeqozGzlthSpn-ub5nqSH$>T+@_AErc>?20EXSreK<A?JzO^5)RB~Fpm0Uif1bt;-c_)*US1&@FocNlFb zEEVS>(B^vaBlK99h>rvpB?d&Y7cex}BT@Ly13O*hbwsIUVqr&=S}oRsgqc@4FSd*f zX|X6e5yp^L;KV5L7S#Vuebu6U6zk5y#egW*Q9co|q8Ag1ma-8+8XrY$;lW{wo|)~H>5I(q(Mzfi5u+Bo0Fc~~=SXv63hyR|Jn zE=*ypXdR8k={BKljIR;eELn_&VwidB^yMCFKT01FJP>x!^ues&Ay!0Vs78oaK$6W{ zts^FUdHrf_;SXBCVE$vNkI+}&Uf=ECeXLFLROC3YE+A*Li0cBMJR|ybK}GAtye=#+ z*}S#7bkfS*fBop_-(^OIC1XmB@2D@A4>pfom$&J8S`<-@4M&?D;u=-d)NgxNtTW~y zo2#|q?51T}*A6r9{Z{^U#Ew77t-nF7J#9THxU@lT8l2kfUi`}rLP7IXl6E4x`6v-}$D_or=~ucK--E4Xi^Y#UFd-g})z1oVCkG54bLvWS z8cv)Lu&|zp6$x?Jr_3A0S6B5<_dC4hkjyYXwD98Yuzu&bZtv8yFYMj$7cXtm#uSlu zb`$rZY?ygVxud;y$q{+@eK$0$Vs{*dDMcIy2{&&!hwKV??(p+JFS?=klkG(tD`9ND zxHleAU|z8;J-q2Y^~?uD5iK-Lur)m*)axr_ilfOa{TJoJ%zN7L zjEix{3vpt?7-8}3DRmZirC?1kFKo98{8z!{WoH*?3t9qBYEpsNfMLUSIm3a$Ht$u> zdaHIr?%)&e8G2Zv7KyK+mu%kn{=C)1P8Aakw8aYN2 zk~rsNfnHc&^~RW9W!P^nD4$9fzNv@{^LF_2JH7b{d(P4u)|&UngV)bn{qVNow2vEM zryCPy-Z-~q9CCm5$Gb+}C>kmJoH*QUOJ$)fP8>>w_snbTtD;|X&-A|w^HHk}UeT3t z93&^2b{VXg<3trk=XJ#<4b2xJW~Skf$3%P@Tx{ND|8&QNyULzl9Y8USmk31X9fmon zr}Us$kcJ&#mDr6Muu&c-GF7b40I3nziL;B4bg0ydymU+f^GbX3hoU!+oKcsdO&mMT zf&IP3qI8_g?yS*AyhSx_5zR6{S_(%7%L_B_z8_iBFt*_>Uu-IP!$`3uIUKqfVdf?I zdmcXgY_rIQDk#vZhkhA$-m-Yd{igjx~ku>XD23mu&I1;FP7t8Ww?1G|3dzy?eDHTCqaXv z2u?cIUmSF?$t+Q*-hb7)<~-a3j!y(we!)=Ai%YswIX zv|Sr|=Kl-WUNCft--R7BZnStyeYbAGgMZWL81KgB|1?;1|Rn78;f^&dMmwU z8zmZ4X@s0e(nPPy5=#4KDAGlxz(i~L`T!~kOvAtPW@lbD;;Fr{1H{$-( zyc5>Ny_8vJW)=bHA9 zZd~;sczPNBbl9<1OHL}Yx6wB#q{+)gVjq?%HuhmPdwTa}hx5c}&Sv;*qi1F14<8T@&Et; diff --git a/packages/client-query/package.json b/packages/client-query/package.json new file mode 100644 index 00000000000..b5582f16cd6 --- /dev/null +++ b/packages/client-query/package.json @@ -0,0 +1,19 @@ +{ + "name": "@communication/client-query", + "version": "0.1.0", + "main": "src/index.ts", + "module": "src/index.ts", + "type": "module", + "devDependencies": { + "@types/bun": "^1.1.14" + }, + "dependencies": { + "@communication/types": "workspace:*", + "@communication/sdk-types": "workspace:*", + "@communication/query": "workspace:*", + "fast-equals": "^5.0.1" + }, + "peerDependencies": { + "typescript": "^5.6.3" + } +} diff --git a/packages/client-query/src/index.ts b/packages/client-query/src/index.ts new file mode 100644 index 00000000000..4a1c9697018 --- /dev/null +++ b/packages/client-query/src/index.ts @@ -0,0 +1,26 @@ +import { LiveQueries } from '@communication/query' +import type { Client } from '@communication/sdk-types' + +import { MessagesQuery, NotificationsQuery } from './query' + +let lq: LiveQueries + +export function createMessagesQuery(): MessagesQuery { + return new MessagesQuery(lq) +} + +export function createNotificationsQuery(): NotificationsQuery { + return new NotificationsQuery(lq) +} + +export function initLiveQueries(client: Client) { + if (lq != null) { + lq.close() + } + + lq = new LiveQueries(client) + + client.onEvent = (event) => { + void lq.onEvent(event) + } +} diff --git a/packages/client-query/src/query.ts b/packages/client-query/src/query.ts new file mode 100644 index 00000000000..bbaad06b38e --- /dev/null +++ b/packages/client-query/src/query.ts @@ -0,0 +1,65 @@ +import { type LiveQueries } from '@communication/query' +import type { MessagesQueryCallback, NotificationsQueryCallback, QueryCallback } from '@communication/sdk-types' +import { type FindMessagesParams, type FindNotificationsParams } from '@communication/types' +import { deepEqual } from 'fast-equals' + +class BaseQuery

, C extends QueryCallback> { + private oldQuery: P | undefined + private oldCallback: QueryCallback | undefined + + constructor(protected readonly lq: LiveQueries) {} + + unsubscribe: () => void = () => {} + + query(params: P, callback: C): boolean { + if (!this.needUpdate(params, callback)) { + return false + } + this.doQuery(params, callback) + return true + } + + private doQuery(query: P, callback: C): void { + this.unsubscribe() + this.oldCallback = callback + this.oldQuery = query + + const { unsubscribe } = this.createQuery(query, callback) + this.unsubscribe = () => { + unsubscribe() + this.oldCallback = undefined + this.oldQuery = undefined + this.unsubscribe = () => {} + } + } + + // eslint-disable-next-line @typescript-eslint/no-unused-vars + createQuery(params: P, callback: C): { unsubscribe: () => void } { + return { + unsubscribe: () => {} + } + } + + private needUpdate(params: FindMessagesParams, callback: MessagesQueryCallback): boolean { + if (!deepEqual(params, this.oldQuery)) return true + if (!deepEqual(callback.toString(), this.oldCallback?.toString())) return true + return false + } +} + +export class MessagesQuery extends BaseQuery { + override createQuery(params: FindMessagesParams, callback: MessagesQueryCallback): { unsubscribe: () => void } { + return this.lq.queryMessages(params, callback) + } +} + +export class NotificationsQuery extends BaseQuery { + override createQuery( + params: FindNotificationsParams, + callback: NotificationsQueryCallback + ): { + unsubscribe: () => void + } { + return this.lq.queryNotifications(params, callback) + } +} diff --git a/packages/client-query/tsconfig.json b/packages/client-query/tsconfig.json new file mode 100644 index 00000000000..3ae07cd3fa2 --- /dev/null +++ b/packages/client-query/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "jsx": "react-jsx", + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src"] +} diff --git a/packages/client-sqlite/src/client.ts b/packages/client-sqlite/src/client.ts index fb0edd5c662..4b50d2e06ca 100644 --- a/packages/client-sqlite/src/client.ts +++ b/packages/client-sqlite/src/client.ts @@ -17,7 +17,7 @@ import { type MessageCreatedEvent, type DbAdapter, EventType, - type BroadcastEvent, + type BroadcastEvent } from '@communication/sdk-types' import { createDbAdapter as createSqliteDbAdapter } from '@communication/sqlite-wasm' diff --git a/packages/query/package.json b/packages/query/package.json new file mode 100644 index 00000000000..f613302134f --- /dev/null +++ b/packages/query/package.json @@ -0,0 +1,19 @@ +{ + "name": "@communication/query", + "version": "0.1.0", + "main": "src/index.ts", + "module": "src/index.ts", + "type": "module", + "devDependencies": { + "@types/bun": "^1.1.14", + "@types/crypto-js": "^4.2.2" + }, + "dependencies": { + "@communication/types": "workspace:*", + "@communication/sdk-types": "workspace:*", + "fast-equals": "^5.0.1" + }, + "peerDependencies": { + "typescript": "^5.6.3" + } +} diff --git a/packages/query/src/index.ts b/packages/query/src/index.ts new file mode 100644 index 00000000000..57ad51bd4b0 --- /dev/null +++ b/packages/query/src/index.ts @@ -0,0 +1 @@ +export * from './lq.ts' diff --git a/packages/query/src/lq.ts b/packages/query/src/lq.ts new file mode 100644 index 00000000000..d19baf7c8af --- /dev/null +++ b/packages/query/src/lq.ts @@ -0,0 +1,150 @@ +import { type FindMessagesParams, type FindNotificationsParams } from '@communication/types' +import { deepEqual } from 'fast-equals' +import type { + Client, + MessagesQueryCallback, + NotificationsQueryCallback, + BroadcastEvent +} from '@communication/sdk-types' + +import type { Query, QueryId } from './types' +import { MessagesQuery } from './messages/query' +import { NotificationQuery } from './notifications/query' + +interface CreateQueryResult { + unsubscribe: () => void +} + +const maxQueriesCache = 10 + +export class LiveQueries { + private readonly client: Client + private readonly queries = new Map() + private readonly unsubscribed = new Set() + private counter: number = 0 + + constructor(client: Client) { + this.client = client + } + + async onEvent(event: BroadcastEvent): Promise { + for (const q of this.queries.values()) { + await q.onEvent(event) + } + } + + queryMessages(params: FindMessagesParams, callback: MessagesQueryCallback): CreateQueryResult { + const query = this.createMessagesQuery(params, callback) + this.queries.set(query.id, query) + + return { + unsubscribe: () => { + this.unsubscribeQuery(query) + } + } + } + + queryNotifications(params: FindNotificationsParams, callback: NotificationsQueryCallback): CreateQueryResult { + const query = this.createNotificationQuery(params, callback) + this.queries.set(query.id, query) + + return { + unsubscribe: () => { + this.unsubscribeQuery(query) + } + } + } + + private createMessagesQuery(params: FindMessagesParams, callback: MessagesQueryCallback): MessagesQuery { + const id = ++this.counter + const exists = this.findMessagesQuery(params) + + if (exists !== undefined) { + if (this.unsubscribed.has(id)) { + this.unsubscribed.delete(id) + exists.setCallback(callback) + return exists + } else { + const result = exists.copyResult() + return new MessagesQuery(this.client, id, params, callback, result) + } + } + + return new MessagesQuery(this.client, id, params, callback) + } + + private createNotificationQuery( + params: FindNotificationsParams, + callback: NotificationsQueryCallback + ): NotificationQuery { + const id = ++this.counter + const exists = this.findNotificationQuery(params) + + if (exists !== undefined) { + if (this.unsubscribed.has(id)) { + this.unsubscribed.delete(id) + exists.setCallback(callback) + return exists + } else { + const result = exists.copyResult() + return new NotificationQuery(this.client, id, params, callback, result) + } + } + + return new NotificationQuery(this.client, id, params, callback) + } + + private findMessagesQuery(params: FindMessagesParams): MessagesQuery | undefined { + for (const query of this.queries.values()) { + if (query instanceof MessagesQuery) { + if (!this.queryCompare(params, query.params)) continue + return query + } + } + } + + private findNotificationQuery(params: FindMessagesParams): NotificationQuery | undefined { + for (const query of this.queries.values()) { + if (query instanceof NotificationQuery) { + if (!this.queryCompare(params, query.params)) continue + return query + } + } + } + + private queryCompare(q1: FindMessagesParams, q2: FindMessagesParams): boolean { + if (Object.keys(q1).length !== Object.keys(q2).length) { + return false + } + return deepEqual(q1, q2) + } + + private removeOldQueries(): void { + const unsubscribed = Array.from(this.unsubscribed) + for (let i = 0; i < this.unsubscribed.size / 2; i++) { + const id = unsubscribed.shift() + if (id === undefined) return + this.unsubscribe(id) + } + } + + private unsubscribe(id: QueryId): void { + const query = this.queries.get(id) + if (query == null) return + void query.unsubscribe() + this.queries.delete(id) + this.unsubscribed.delete(id) + } + + private unsubscribeQuery(query: Query): void { + this.unsubscribed.add(query.id) + query.removeCallback() + if (this.unsubscribed.size > maxQueriesCache) { + this.removeOldQueries() + } + } + + close(): void { + this.queries.clear() + } +} diff --git a/packages/query/src/messages/query.ts b/packages/query/src/messages/query.ts new file mode 100644 index 00000000000..794b346c261 --- /dev/null +++ b/packages/query/src/messages/query.ts @@ -0,0 +1,202 @@ +import { + type CardID, + type FindMessagesParams, + type ID, + type Message, + type Patch, + SortOrder +} from '@communication/types' +import { + type AttachmentCreatedEvent, + type MessageCreatedEvent, + type PatchCreatedEvent, + type ReactionCreatedEvent, + EventType, + type BroadcastEvent, + type AttachmentRemovedEvent, + type MessageRemovedEvent, + type ReactionRemovedEvent +} from '@communication/sdk-types' + +import { BaseQuery } from '../query' + +export class MessagesQuery extends BaseQuery { + override async find(params: FindMessagesParams): Promise { + return this.client.findMessages(params, this.id) + } + + override getObjectId(object: Message): ID { + return object.id + } + + override getObjectDate(object: Message): Date { + return object.created + } + + override async onEvent(event: BroadcastEvent): Promise { + switch (event.type) { + case EventType.MessageCreated: + return await this.onCreateMessageEvent(event) + case EventType.MessageRemoved: + return await this.onRemoveMessageEvent(event) + case EventType.PatchCreated: + return await this.onCreatePatchEvent(event) + case EventType.ReactionCreated: + return await this.onCreateReactionEvent(event) + case EventType.ReactionRemoved: + return await this.onRemoveReactionEvent(event) + case EventType.AttachmentCreated: + return await this.onCreateAttachmentEvent(event) + case EventType.AttachmentRemoved: + return await this.onRemoveAttachmentEvent(event) + } + } + + async onCreateMessageEvent(event: MessageCreatedEvent): Promise { + if (this.result instanceof Promise) { + this.result = await this.result + } + + const message = { + ...event.message, + edited: new Date(event.message.edited), + created: new Date(event.message.created) + } + const exists = this.result.get(message.id) + + if (exists !== undefined) return + if (!this.match(message, event.card)) return + + if (this.result.isTail()) { + if (this.params.sort === SortOrder.Asc) { + this.result.push(message) + } else { + this.result.unshift(message) + } + await this.notify() + } + } + + private match(message: Message, card: CardID): boolean { + if (this.params.id != null && this.params.id !== message.id) { + return false + } + if (this.params.card != null && this.params.card !== card) { + return false + } + return true + } + + private async onCreatePatchEvent(event: PatchCreatedEvent): Promise { + if (this.result instanceof Promise) { + this.result = await this.result + } + + const patch = { + ...event.patch, + created: new Date(event.patch.created) + } + + const message = this.result.get(patch.message) + + if (message === undefined) return + + if (message.created < patch.created) { + this.result.update(this.applyPatch(message, patch)) + await this.notify() + } + } + + private async onRemoveMessageEvent(event: MessageRemovedEvent): Promise { + if (this.result instanceof Promise) { + this.result = await this.result + } + + const deleted = this.result.delete(event.message) + + if (deleted !== undefined) { + await this.notify() + } + } + + private async onCreateReactionEvent(event: ReactionCreatedEvent): Promise { + if (this.result instanceof Promise) { + this.result = await this.result + } + + const reaction = { + ...event.reaction, + created: new Date(event.reaction.created) + } + const message = this.result.get(reaction.message) + if (message === undefined) return + + message.reactions.push(reaction) + this.result.update(message) + await this.notify() + } + + private async onRemoveReactionEvent(event: ReactionRemovedEvent): Promise { + if (this.result instanceof Promise) { + this.result = await this.result + } + + const message = this.result.get(event.message) + if (message === undefined) return + + const reactions = message.reactions.filter((it) => it.reaction !== event.reaction && it.creator !== event.creator) + if (reactions.length === message.reactions.length) return + + const updated = { + ...message, + reactions + } + this.result.update(updated) + await this.notify() + } + + private async onCreateAttachmentEvent(event: AttachmentCreatedEvent): Promise { + if (this.result instanceof Promise) { + this.result = await this.result + } + + const attachment = { + ...event.attachment, + created: new Date(event.attachment.created) + } + const message = this.result.get(attachment.message) + if (message === undefined) return + + message.attachments.push(attachment) + this.result.update(message) + await this.notify() + } + + private async onRemoveAttachmentEvent(event: AttachmentRemovedEvent): Promise { + if (this.result instanceof Promise) { + this.result = await this.result + } + + const message = this.result.get(event.message) + if (message === undefined) return + + const attachments = message.attachments.filter((it) => it.card !== event.card) + if (attachments.length === message.attachments.length) return + + const updated = { + ...message, + attachments + } + this.result.update(updated) + await this.notify() + } + + private applyPatch(message: Message, patch: Patch): Message { + return { + ...message, + content: patch.content, + creator: patch.creator, + created: patch.created + } + } +} diff --git a/packages/query/src/notifications/query.ts b/packages/query/src/notifications/query.ts new file mode 100644 index 00000000000..8e9d39cfe30 --- /dev/null +++ b/packages/query/src/notifications/query.ts @@ -0,0 +1,129 @@ +import { + type FindNotificationsParams, + SortOrder, + type Notification, + type ID, +} from '@communication/types' +import { + type NotificationCreatedEvent, + EventType, + type BroadcastEvent, + type NotificationContextRemovedEvent, + type NotificationRemovedEvent, + type NotificationContextUpdatedEvent, +} from '@communication/sdk-types' + +import {BaseQuery} from '../query.ts'; + +export class NotificationQuery extends BaseQuery { + override async find(params: FindNotificationsParams): Promise { + return this.client.findNotifications(params, this.id) + } + + override getObjectId(object: Notification): ID { + return object.message.id + } + + override getObjectDate(object: Notification): Date { + return object.message.created + } + + override async onEvent(event: BroadcastEvent): Promise { + switch (event.type) { + case EventType.NotificationCreated: + return await this.onCreateNotificationEvent(event) + case EventType.NotificationRemoved: + return await this.onRemoveNotificationEvent(event) + case EventType.NotificationContextUpdated: + return await this.onUpdateNotificationContextEvent(event) + case EventType.NotificationContextRemoved: + return await this.onRemoveNotificationContextEvent(event) + } + } + + async onCreateNotificationEvent(event: NotificationCreatedEvent): Promise { + if (this.result instanceof Promise) { + this.result = await this.result + } + + const exists = this.result.get(event.notification.message.id) + if (exists !== undefined) return + + if (this.params.message != null && this.params.message !== event.notification.message.id) return + if (this.params.context != null && this.params.context !== event.notification.context) return + + if (this.result.isTail()) { + if (this.params.sort === SortOrder.Asc) { + this.result.push(event.notification) + } else { + this.result.unshift(event.notification) + } + await this.notify() + } + } + + + private async onUpdateNotificationContextEvent(event: NotificationContextUpdatedEvent): Promise { + if (this.result instanceof Promise) { + this.result = await this.result + } + + if (this.params.context != null && this.params.context !== event.context) return + if (event.update.lastView === undefined && event.update.archivedFrom === undefined) return + + const toUpdate = this.params.context === event.context ? + this.result.getResult() + : this.result.getResult().filter(it => it.context === event.context) + if (toUpdate.length === 0) return + + for (const notification of toUpdate) { + this.result.update({ + ...notification, + ...event.update.lastView !== undefined ? { + read: event.update.lastView < notification.message.created + } : {}, + ...event.update.archivedFrom !== undefined ? { + archived: event.update.archivedFrom < notification.message.created + } : {} + }) + } + } + + private async onRemoveNotificationEvent(event: NotificationRemovedEvent): Promise { + if (this.result instanceof Promise) { + this.result = await this.result + } + + const deleted = this.result.delete(event.message) + + if (deleted !== undefined) { + await this.notify() + } + } + + private async onRemoveNotificationContextEvent(event: NotificationContextRemovedEvent): Promise { + if (this.result instanceof Promise) { + this.result = await this.result + } + + if (this.params.context != null && this.params.context !== event.context) return + + if (event.context === this.params.context) { + if (this.result.length === 0) return + this.result.deleteAll() + this.result.setHead(true) + this.result.setTail(true) + await this.notify() + } else { + const toRemove = this.result.getResult().filter(it => it.context === event.context) + if (toRemove.length === 0) return + + for (const notification of toRemove) { + this.result.delete(notification.message.id) + } + await this.notify() + } + + } + +} diff --git a/packages/query/src/query.ts b/packages/query/src/query.ts new file mode 100644 index 00000000000..bec8fc200f7 --- /dev/null +++ b/packages/query/src/query.ts @@ -0,0 +1,198 @@ +import { Direction, type ID, SortOrder } from '@communication/types' +import { type BroadcastEvent, type QueryCallback, type Client } from '@communication/sdk-types' + +import { QueryResult } from './result' +import { defaultQueryParams, type FindParams, type Query, type QueryId } from './types' +import { WindowImpl } from './window' + +export class BaseQuery implements Query { + protected result: QueryResult | Promise> + private forward: Promise | T[] = [] + private backward: Promise | T[] = [] + + constructor( + protected readonly client: Client, + public readonly id: QueryId, + public readonly params: P, + private callback?: QueryCallback, + initialResult?: QueryResult + ) { + if (initialResult !== undefined) { + this.result = initialResult + void this.notify() + } else { + const limit = this.params.limit ?? defaultQueryParams.limit + const findParams = { + ...this.params, + excluded: this.params.excluded ?? defaultQueryParams.excluded, + direction: this.params.direction ?? defaultQueryParams.direction, + sort: this.params.sort ?? defaultQueryParams.sort, + limit: limit + 1 + } + + const findPromise = this.find(findParams) + this.result = findPromise.then((res) => { + const isTail = params.from ? res.length <= limit : params.sort === SortOrder.Desc + const isHead = params.from === undefined && params.sort === SortOrder.Asc + if (!isTail) { + res.pop() + } + const qResult = new QueryResult(res, this.getObjectId) + qResult.setTail(isTail) + qResult.setHead(isHead) + + return qResult + }) + this.result + .then(async () => { + await this.notify() + }) + .catch((err: any) => { + console.error('Failed to update Live query: ', err) + }) + } + } + + // eslint-disable-next-line @typescript-eslint/no-unused-vars + protected async find(params: FindParams): Promise { + /*Implement in subclass*/ + return [] as T[] + } + + // eslint-disable-next-line @typescript-eslint/no-unused-vars + protected getObjectId(object: T): ID { + /*Implement in subclass*/ + return '' as ID + } + + // eslint-disable-next-line @typescript-eslint/no-unused-vars + protected getObjectDate(object: T): Date { + /*Implement in subclass*/ + return new Date(0) as Date + } + + // eslint-disable-next-line @typescript-eslint/no-unused-vars + async onEvent(event: BroadcastEvent): Promise { + /*Implement in subclass*/ + } + + setCallback(callback: QueryCallback): void { + this.callback = callback + void this.notify() + } + + removeCallback(): void { + this.callback = () => {} + } + + protected async notify(): Promise { + if (this.callback === undefined) return + if (this.result instanceof Promise) { + this.result = await this.result + } + + const result = this.result.getResult() + const isTail = this.result.isTail() + const isHead = this.result.isHead() + + const window = new WindowImpl(result, isTail, isHead, this) + this.callback(window) + } + + async loadForward() { + if (this.result instanceof Promise) { + this.result = await this.result + } + if (this.forward instanceof Promise) { + this.forward = await this.forward + } + + if (this.result.isTail()) return + + const last = this.result.getLast() + if (last === undefined) return + + const limit = this.params.limit ?? defaultQueryParams.limit + const findParams: FindParams = { + ...this.params, + from: this.getObjectDate(last), + excluded: true, + direction: Direction.Forward, + limit: limit + 1, + sort: SortOrder.Asc + } + + const forward = this.find(findParams) + + this.forward = forward.then(async (res) => { + if (this.result instanceof Promise) { + this.result = await this.result + } + const isTail = res.length <= limit + if (!isTail) { + res.pop() + } + this.result.append(res) + this.result.setTail(isTail) + await this.notify() + return res + }) + } + + async loadBackward() { + if (this.result instanceof Promise) { + this.result = await this.result + } + if (this.backward instanceof Promise) { + this.backward = await this.backward + } + + if (this.result.isHead()) return + + const first = this.params.sort === SortOrder.Asc ? this.result.getFirst() : this.result.getLast() + if (first === undefined) return + + const limit = this.params.limit ?? defaultQueryParams.limit + const findParams: FindParams = { + ...this.params, + from: this.getObjectDate(first), + excluded: true, + direction: Direction.Backward, + limit: limit + 1, + sort: SortOrder.Desc + } + + const backward = this.find(findParams) + this.backward = backward.then(async (res) => { + if (this.result instanceof Promise) { + this.result = await this.result + } + const isHead = res.length <= limit + if (!isHead) { + res.pop() + } + + if (this.params.sort === SortOrder.Asc) { + const reversed = res.reverse() + this.result.prepend(reversed) + } else { + this.result.append(res) + } + this.result.setHead(isHead) + await this.notify() + return res + }) + } + + copyResult(): QueryResult | undefined { + if (this.result instanceof Promise) { + return undefined + } + + return this.result.copy() + } + + async unsubscribe(): Promise { + await this.client.unsubscribeQuery(this.id) + } +} diff --git a/packages/query/src/result.ts b/packages/query/src/result.ts new file mode 100644 index 00000000000..36395f74d51 --- /dev/null +++ b/packages/query/src/result.ts @@ -0,0 +1,92 @@ +import type { ID } from '@communication/types' + +export class QueryResult { + private objectById: Map + + private tail: boolean = false + private head: boolean = false + + get length(): number { + return this.objectById.size + } + + constructor( + messages: T[], + private readonly getId: (it: T) => ID + ) { + this.objectById = new Map(messages.map((it) => [getId(it), it])) + } + + isTail(): boolean { + return this.tail + } + + isHead(): boolean { + return this.head + } + + setHead(head: boolean) { + this.head = head + } + + setTail(tail: boolean) { + this.tail = tail + } + + getResult(): T[] { + return Array.from(this.objectById.values()) + } + + get(id: ID): Readonly | undefined { + return this.objectById.get(id) + } + + delete(id: ID): T | undefined { + const object = this.objectById.get(id) + this.objectById.delete(id) + return object + } + + deleteAll() { + this.objectById.clear() + } + + push(object: T): void { + this.objectById.set(this.getId(object), object) + } + + unshift(object: T): void { + this.objectById = new Map([[this.getId(object), object], ...this.objectById]) + } + + update(object: T): void { + this.objectById.set(this.getId(object), object) + } + + getFirst(): T | undefined { + return Array.from(this.objectById.values())[0] + } + + getLast(): T | undefined { + return Array.from(this.objectById.values())[this.objectById.size - 1] + } + + prepend(objects: T[]) { + const current = Array.from(this.objectById.entries()) + this.objectById = new Map([...objects.map<[ID, T]>((object) => [this.getId(object), object]), ...current]) + } + + append(objects: T[]) { + for (const object of objects) { + this.objectById.set(this.getId(object), object) + } + } + + copy(): QueryResult { + const copy = new QueryResult(Array.from(this.objectById.values()), this.getId) + + copy.setHead(this.head) + copy.setTail(this.tail) + return copy + } +} diff --git a/packages/query/src/types.ts b/packages/query/src/types.ts new file mode 100644 index 00000000000..895e21d3d97 --- /dev/null +++ b/packages/query/src/types.ts @@ -0,0 +1,33 @@ +import { type BroadcastEvent } from '@communication/sdk-types' +import { Direction, SortOrder, type Window } from '@communication/types' + +import { QueryResult } from './result.ts' + +export type QueryId = number + +export const defaultQueryParams = { + limit: 50, + excluded: false, + direction: Direction.Forward, + sort: SortOrder.Desc +} + +export type FindParams = Partial & { + from?: Date +} + +export interface Query { + readonly id: QueryId + readonly params: P + + onEvent(event: BroadcastEvent): Promise + + loadForward(): Promise + loadBackward(): Promise + + unsubscribe(): Promise + + setCallback(callback: (window: Window) => void): void + removeCallback(): void + copyResult(): QueryResult | undefined +} diff --git a/packages/query/src/window.ts b/packages/query/src/window.ts new file mode 100644 index 00000000000..508c2b5d891 --- /dev/null +++ b/packages/query/src/window.ts @@ -0,0 +1,34 @@ +import type { Window } from '@communication/types' + +import type { Query } from './types' + +export class WindowImpl implements Window { + constructor( + private readonly result: T[], + private readonly isTail: boolean, + private readonly isHead: boolean, + private readonly query: Query + ) {} + + getResult(): T[] { + return this.result + } + + async loadNextPage(): Promise { + if (!this.hasNextPage()) return + await this.query.loadForward() + } + + async loadPrevPage(): Promise { + if (!this.hasPrevPage()) return + await this.query.loadBackward() + } + + hasNextPage(): boolean { + return !this.isTail + } + + hasPrevPage(): boolean { + return !this.isHead + } +} diff --git a/packages/query/tsconfig.json b/packages/query/tsconfig.json new file mode 100644 index 00000000000..3ae07cd3fa2 --- /dev/null +++ b/packages/query/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "jsx": "react-jsx", + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src"] +} diff --git a/packages/sdk-types/src/client.ts b/packages/sdk-types/src/client.ts index dce6d468e9d..af3edce94bb 100644 --- a/packages/sdk-types/src/client.ts +++ b/packages/sdk-types/src/client.ts @@ -42,4 +42,3 @@ export interface Client { unsubscribeQuery(id: number): Promise close(): void } - diff --git a/packages/sdk-types/src/index.ts b/packages/sdk-types/src/index.ts index 38c4587b9ec..01596627a63 100644 --- a/packages/sdk-types/src/index.ts +++ b/packages/sdk-types/src/index.ts @@ -1,4 +1,5 @@ export * from './db' export * from './event' export * from './ws' -export * from './client' \ No newline at end of file +export * from './client' +export * from './query' diff --git a/packages/sdk-types/src/query.ts b/packages/sdk-types/src/query.ts new file mode 100644 index 00000000000..c09a164af5f --- /dev/null +++ b/packages/sdk-types/src/query.ts @@ -0,0 +1,6 @@ +import type { Message, Window, Notification } from '@communication/types' + +export type QueryCallback = (window: Window) => void + +export type MessagesQueryCallback = QueryCallback +export type NotificationsQueryCallback = QueryCallback From 506f6f26be72f3b273a33ec45abe648374b1e1f7 Mon Sep 17 00:00:00 2001 From: Kristina Date: Wed, 25 Dec 2024 17:24:06 +0400 Subject: [PATCH 018/636] Init examples (#10) --- bun.lockb | Bin 104400 -> 104968 bytes packages/examples/package.json | 19 +++++ packages/examples/src/httpServer.ts | 70 ++++++++++++++++ packages/examples/src/index.html | 51 ++++++++++++ packages/examples/src/index.ts | 125 ++++++++++++++++++++++++++++ packages/examples/tsconfig.json | 8 ++ 6 files changed, 273 insertions(+) create mode 100644 packages/examples/package.json create mode 100644 packages/examples/src/httpServer.ts create mode 100644 packages/examples/src/index.html create mode 100644 packages/examples/src/index.ts create mode 100644 packages/examples/tsconfig.json diff --git a/bun.lockb b/bun.lockb index 1a93241bbc237c225b0dc6bb8706a74378d7120f..f7b7b222768b8fa236328c717b66a251fd76d9b5 100755 GIT binary patch delta 14598 zcmeHOd3;nww!YP*L*GDvkev=>AqfePed%oEChU6%5caJT8c2X7q!Xl7NI*wH!2t?W zvI?RsqOwH61P~Y&MP@{S83hDUagfnb7<_0Lk@uauusw8i{(L|0kFQUiQ>Ushu(-jQoiX zM|n)KQ>p^rlJs@?hwd7m*;K192DC+gC`^DtbI?rC2+(3@%;afhh0?1QO)(zwW+b0n zT2|qJj?PaiE}a-tRW4mbtCWA5=Q985)93?nzR)6bOTbXKCQY+fRyrn1Qys3EWWFDo zBt3WNk(mx>Rb}xsr{r+XsB%=eBri{`oWogO;Xtd`nn;p2^cF%+-Lw~#RgfPpK~C1! zwNT|9YfeJo1HnW`Vbx?POm|XqlJpMb)Zk}mjT*FZX6jJwf~!%oW{TYz0~05gf+wxq z){=ylY8Hc%`SudkYAFylv;l7hrBe~tX3{J(96Sen2oxNk(5=Z1lB96Z zC#{kc3>pa@ZPbhdPZh6ks~LQMob87m8FC!mrVja{F!6;I_96#7wh+8WH3TP+pdkoH ztE4d9Q8C$(cqh*Gp#t*6B)hY61_rXiQROV2@w1M5K>pLCOXF4h* zX#)(U4wZny)|vt67sQ()VZCR&7(==CZ$f=Jzva~qClrMXzRFYPJ zC;3KD>f_`x&Gx5ty;CR;tB2IMAfPEa9h96j43r|I92Jv6UqO+)d<)$qJ|IWyZcCj` zcT`R(o9LA6MMaMCN~gqfdDo_q_EufB_TPw>B!py5BX}zQ87Ni1rJL4lMt4aH1OFB% z#c>E|8_*|uNKz1}2Y3q5<0yyKspeo$tv_4%(Wb#&Z}!q!xeQ9-cpj8`TRu6a(pBzo zcJHGZKdVw3dK-B1N;D{W&)SD~v_w{KgptIa>8C~Br2blCqd}>0Rj7~3UB>{DSsy}9gZ2S<>iir~nkUJ5s=V%POCO8nuSY75Wp6*! zjkofeVltWd!(Q3)9+M=cKvCv%y=?3m-sq*U?|FJ_g(=*H-!fj0-z~fmzn6Hrw_*x) z=MQ^l%fqoR!c#SjA82i3t9hfh!v4>=**RoE_G@2i-vAt#&9_ReGpJl#)`9dJW7G~M|DADekSxKuvdKU4lz zm%H(~{x&%VBd%$IV^g`yUt#Ncy}x3)sONC|{y1LdCCg#(S8r5kQ43arBO_6bk4@eN zE)`r8p6h2rHyQ&JITW!nOJZ961xC>TP3xeEquBg1xD37*v3U_G z8{{o$6in7Y{%}M#8^Y@&6nQ(&1bwy2;i7-@^hibSj%8@DRw<^?c3vN;m~TNgjL-JU zG@r+!)>};#21(L5HFXrJp=zovma?9dl8cZc)3k|L&s|Xp`yXB(rO0vZZifg4sR|s0 zn-=4zxhq;Rx5F6%riWz8BaotDK>#6SYQfQfA`rk`07qeARtGyKM60K%8fuflQ8~B* zLAeqf1&Nu$iJ^^73hTt{@jH$;;&&NO@2tpQgxMxzj8#k_5j-O< zTON$_35`8M$Jb_=4{jJZZhtGDmwQ>{OAu(a(eQVQ)asEbP*^p0#VhPRULUWRd^+$O z@!70^rza?E0e9hdAFoeP%unE?(t@wspTw^uSj;29`KTOK9*@&Z0$OXv=k`eB^@$2w z${Q0E`5@$JkRt-Iy!@NHk`&gL*C#1v50r0xyS8L-EsfgP``neRn0!0(hU9E{X-939 z)5g)#S&~Xop~lU;hr0-_;&$G!n7cI(fh)hAmmDiel?G>O5yu-+v*o#Q+E8n@?&Ik; zMgAyWn_UuTSg#_{MLih2ZSq8LGyxDO2&vb=(PFGlb@@6t3NQ>E*3JxGpQf0r6EVkl zmy9f)p01cpN$?oo8QREStF#j)IB&CidE~+?Auqo552xlDc5#}e6>dkL<&*HAGma(hN5K=aOwoC>bLS`iKD zE^v1S_%567fl3YMAl-iq87-K3skZ^2w1&+B_D zrY7C^jo#U21t-&Kd~feec`s4}poFa|*Cvm_94G__S7DXG>7tLql6ZPwg}uOCeHC*f zPONM8FAQg+mBrgqVv^g#;S&s~Gy!vVZuP&O8c zMuGX)EgB z-mE)MAF|ut8Zv?#;0ItwkuWtRcL0H-K159b23}$S{GmR-L`e@LEt&Z2p~3v?F|DGh z2u4nQ?xv)PHw?7}I9rmq=UD$hk|NmEhbZyrGwQoJG&q;UgXjkjQOeCH1`p9Dz))iF z5T*PAV(<_(@c{+?UL+k!B0NN!@Yx0astIGkQSLZ^9-`!d2>{9M06j#BFVtxfC_O~o z__+fAYRa8LB0Rr_QY$5@_PvzqnX1c)%0Q*g6Q%qrohM55JY-aXgsL!0S9lO5mp!8A z-%Y8a*}9x)GhjYI0rCuB0crsgj~VZ;4)StvWX=kmt^_p$YXPe51ptFQTyxG##NoM{ zQt1rS6E-we=0l;m3glD`7b^B_ukuL0D0olf5br3bW)kx)DH7C0)fpQOC)1pjJE z8~~`9KLM0`NaqiO(nFN;kLdJmP|`c9(_^4`Ih0NS^bn=|Qvj9wD?rZ!Xtf%~A0e{^ z@OOX;ehE;)%K%k;1)%4*P%3{_Ev3<5q+3Bqa&=aybcm*LNAUAJtscH8?Fkx2wdj$*fLnu?H9(;9S2fnA!qFO&*&$*k@Br1fQTA8TRNjjaP(-Kg6 z9z?0!G(Gd|^# z@oq{z*`Ujbk{*3AC(&k|z6|OPej5pOO8idn)Pdc)oG8f;=+uFq za2~LK(9)$A)(kJyd1Bu(7R49uZ_c*Xt=|8=$!x>9SH}D5E_|MJlizBk{juwvy0M2o zVXSXmmm|=r55uXG=N=o*7az5lL&9;8MTtkKgs1U$*w9FngmgVg`TOr$%)wFgCX7;Y zQndD0rEK2i*l2lNN9_-5uDW%{PRcyT%cm~q%1>T(Ie-3=-Sab+@ti9O$Q*7Y_V`0m zNYZ9(t*jj!E_jGy3(Kzl>6Z4;=g@WN3_~yF^fb|BbP#H* zO0an7G76dRbeS14s*BDm*8!xfrKY+fo!ZpaK`gqAUf?L79RB8E+)5 zs5Lx8TZ(_ltiCwZP-~c88^jTs&Kp!tg z0i%HtKmjle$Oi@k{efNp27SK19WrYSzsgZBJdKh7NCRr41nIU0st#e0!#(E0#jHBdWA$HK%e++ zKpGGZbO6GbXyM7)RDX*`tALpReeReH*ntVaSYQl5pN9qleSmI2ci@k}5`bPs)&WkS z5~u>`m9Hnz3+N5>0lES78cA={^wp#g$fJJuMPene3RnX?4?GIRSwJ3;5A=g<1F#WT z3#HvDBAEHbE(|%a;{be}EKbO^xvv_7L~|ka$5tCS#)1v^c!5!uj9fR`Xo+zm85`>zyCsPS&)Bg#7Lcnl-qwWM*#OvJiKQhL(`f)WL_EnrXJ2 z!VCL)E(&oc7y6_@w2Ts<7lxKB{>Q*&&Jgz}AfbjffqH=YnMbxA*!Hob7B((DHX$W8 zUfLn{`msQ^M||kVlGtI<%pcrY5#i60tXENrI`Ajw+BZK``kzFpG_BMCQQZN04zY;T z=ZKB|tdqRPq^VyOXCbv}*QBJHPv@Vl+&=JKi;0a(j!j66O^~8Q%K#RbV%)C^bRQmW zjheX(3h}Y&ut_p*a5?5&eyC#PryG%z6q`sno5WZYwHjBxoc*U3Oz1Z64{FiG*d)v* zu>=b2A+d$@j5}oCooVm8Zu94gss}6Z$}2u0y`7>lfDK^ph+cu{{z>sjAW9h*<5n&Y ze`^5W_#H~6Y0X~~TLanTAmip;;5$p?bAQ;jMYS)1n)h`RiEUV-DZ))m0%h@Hc^g!m zDgM$16;E)}uD_HvrvEUYWUv+Gl4Db26R{u)W`#Lb;`uO;$3==2rQQ|AL{5p-ly^?- zBJPs-3Y^uryI6O$dTH0muSB4#1lWk4ofE-9@b1qm7}ps`?)%2P>ij>W)S?NoSl?sB zbQHB3_Z*KddbEYV>94a)EIu}#s+L|A>w*v%r?DSmtE(B7e{7(=)$4DghE!-^d@hP_ zgW%88qDxztTq1_IWdnkY3yagE+ZDdNtT4~SlJxq#JjMRDEYWIQO?+-y&Y@FpjtD@7 zDX|!G>|sg`<)L&28QR zVpS*$u&%>u0N>Vp)jTAy*}RBEXppPm*j3^lB+FB(h1Tuef zvLr9cY9 z-+WV!94rH|XhZ9g>At44n7A!nFIq+*Y9A2R`siw5&9;iM&Iw$5Go#xWc^c(i{? zp`uuvMk2z*Pmu`kJDpGHfC$h>Szm!QcY7G_)^)AM%~a=}f-SF^T3$u1$p}}(kvh)C zFx_6V_lUeGOpue|+5~Bw_1M+VBi01^u9nDY)WiGymU}}8;vnBZg|RNSx5enzhXBOQeqQRF|-H8Iw)Y@*-d)? zsUo&sXfYc_t;Sv1(1gVcj+lndxuY;oY=;8dCXSJwao09*o6oC3fB2fl~6ypl+%11k!zc~2)r=*ae zZm-5A-N*ARrCE{^(>+^FwM&;>(yf6p{iMe8K&RcfP2rVh!XHi`Auon{o&U9l@ZnYCkV zsK`q}rT4k1pI8m4_eLE4@RnKA=o$8gZ_e5)Cbm)>O<|e$m9iSwf8!VzMi;&uJ=PMy^C zu)O>zzJ*?jagq44XHol#(-VHw6%d8Sjo;U6HwS%uVmzJPXb(h=-o(S=({$$NhP6>^ zjA>O`e|y1U!ZQuV8h4I&Y<1_S5_&#;$5@xhg#yl3)HkbfX*o9QpySJ@h77x-w?cTO z;|#JXjkRGL#lbXq%D4$#8}W{#s`U`eGGi;(l=VuPW`oX&S?Q=j@29S(E2zCTOwrwJ zYMCSAGgv@sK#q1QxU}VqA*DN(`%v`a_>MUInGwZhCzdPbWZ-0FT#+8U;BT+B8`0MT zhEf3IIWCT6p!c7N=d(Z>#Vz8th`>yiWb)`P24>UW!j{DbShw`lP7y~|G>mOH-U1r}&QYncNj95iht;@x zJ!a9NSDJ)0R6>E4H*{7v^qz$`<~J>$yx=27_zyuR2n<4%M3)OYXZnP*%`f9-KOxqqaXflV=4+lqeEsOhh;2SsoGuP8}v z-@YKseHtWqHTXa8&v%Xs_q3Y#-xZ17hDu*Xcwu@Hrdb$aAiA_E`L! zmT}=Zagw8RlWSS%ePyHQnQ*es_Znmx3e_4}^9XQTPk?!A3}pWk22hjqTQ)?PES zX3d(%-ps-so^NgOTw;rhKDIF{LTP(BvZ&2n3+#IjtUQ_?+wbt3*uh9iAhY zLdmCoWMthqFN4t~inpS;8X@opI-KQYl{JcTr-!1;8-T4KFL%tSB<-D2zE0wEKo!p? zIUH5-Wlp6KydU`9QohC0-~*c*^;v-}2SOnO3N3(vz>dH&XMAb-v9vJG@ZB1FZQ*k(_*E*_aD__DSD(7%kRXfn?A?O8ypX6(Fvt;75YVtz^ zSp*JcQUNYOE&wiF5#PHIk3)<8}T?nZ0WpdV+JPIbj_HA=dslsMyIVrf2j z(h3Vu6tv`;0wgm^rs`HJm%z6H{~?fCsjV)HpNbZ$YAa_?oHohv2wK4-*L^@LUm>k2 z0=5I6C*?K4`t!Obg+x4{f-Vd{H5dh+{I{>Q(eJ6w_}L{Di5|Y z13v;%gR?^o|IiSYz@I^=hx8O68C(;lC}`XDxkXXJlsd#4GGv+Q;1S@X!1vS@6j$pA zMd<*3T)3h{0)Gag4OcplDxTZcF!<&;TaI!Rj~h@qb?7{h_zBe|6CLo_Bvj^I2f?$* z&=C9tB!vn`b*UrydYlOqBu`8(an?-7Kvp|yon@0}D;HrUMa0%OeerUuqBmR#Q{vsNT+#JVHiaKxfI`r9k-9^%CX;X{}<4 z0-w8<0*PN4YedQ?K=Q+5K$0)QbG^LOD+iL}d-Js(3I3mSGHU%69U&va`Du@ky4Ws8 z7k6}3lx~nNlsE=x18+T*byv!nKsF(luVrHsH$-)r*e2+^VpIva*g&E$0-Uz(seA)D83s=)z9x`G#k)cQDVR^ z15zAcfxHcHY#&950Ga zG&->uJo&KeAfuid)JNrxU;xRiEs)co%>z&KqeYRy4+f7;ZhzM+*NaE_PqDnm_#*#o zb*V*B(x9jEdVf2c%+&ym9pwf14dHJ5_T`QEozK-kjWzIsK+SU1$`=J@s|lWp0tdMm zKNetTwOkF-*sHt%ziqi2zoU60ephidSW`c2jwsP}>x1m9h`WO|_9$-*)~tuHM`rWI zftgI@1+6u;C;FTXO;3I-$ga)>mk!R#{X^{R?_3ShR4XFG(11+s1r9+jHRkfh5Y6iq zsUecQSR!}%d#OJ{(hpi*RF66k{zBxrJUFaT7lNZUn(^Y+cJx3E)6}bwQ5(%jgW0(o zvU?G+q-Wvv!FINtt8FyPX(9)urGN>@Wc(tVh5N=Ds1BtQ{|i(A2?x#wv!U!tClj;K-Qfyg0zFHi9F! zTlB6(V5kwTFqAqr792UF89x?mS6>8|tZR}h&optQ8H8y@&pfz4o{a;CIe`{nn{_?7 z46;UjA317Sw^EHo*il=s3T3LnkrNrO545uvcw?ld`XEs6)HRNU*sZ1D2J^)Mnd&a& zQuTW31MHTwf&5@pHfzt-XiY6c%wqPV8g$P3XK=ZCk^Mf(jY7XXYQxvPU;Kau`8#`*M8>fW+ zM#XT?30@GZsS#LghCqh6!;EqB##qhzE@UJ4V*gC*OIW)4>A4=5r(^ZpTI7c5x$lt6 zr<|IJrHhO-=3PB^ch=Zp-q=}Fo3}GVqqt9o7Z{2#BdoV_cbsPZo@D%RRHmAUqXLZx z0vwT22`&p9#vk0v;3yib`Z!yojCz{u5vGBoa_|IV(FrbF*F+RCw9!>#&3Gez)4AGB zW97V{o2LFXy2)USMvIuHWh2I}4grV79sWPo*{(hXj%sht>pR=6$H5t{>D*qABsggn za)ujTMUMJsxWS9NZJL_X0q!#Ffzw~+YJz6D(t-C+$X2`H5JJO_s0p@vjR!XZ9N+h< zu|PCHVAO_@SDVEe^{5mhY$SIlYU~-_n5bFK#d7bYY!=T8k~CJr-S`!}5x--_^cBqh zShANjkzpa`#rsqAmB*{Iq9mauAAQy6&Kr|8HkGR>nz|BCWawc%Il*>q^qJ-q9ubH)a^#@6>!x}&usBG zK6?^eRnxN;Hbtp1Im>_B_`&pSb##I;&c@Owc!6D0UraP+mBRh8M*R#Je}hT^Kj4l>xjlLsLg4oAV2U!&dNu?wa*I$ddTsj+s`U6n!2B zW~u{_qgG%C77I5xtW({yyqd(AhN=%DD930V%hUjH(#zoHH*x;m?CKtHup8A;xwGJ? za@}Za999!dm7q*@B61WthQTYiJ4<7Scw?5PUc(bO)+ld{gHj^*ZIh{vrkq})IKi&E z!I1^1*JfAufule(+WaRt!&avOUfxPf>KH)0Kb6kHB3%**mZW`rqL zJ7O)Ng$zo`8O^~^6q*&X&0NjL@kQhP^Rv|ybdqYvf`A!c0S;qr`e|5a3i@g4cX*NqZR{(EVLNZ^r&-?5r96W_(1}BRC8uQ@=_?^Su{Wa?`oJ;$2-@r_3I2+1LY#_oOoDm?@^Y{XD zt`avB93~$10>iZloKcs5zTN9AI9S}|Pc;GFC4mvkPH=_`F+l1j$zcmdlOKbln1BIT z*aHWEKUkd2x^s1i#%A(@A)0lsf&+5X!r8zZhiJUt&>R-Q-9z`(`Qs^(2PhbX z-GrQ?c!J<6JrPiIDMY!$Po9 zDhhm}H;&lXE5%7hPlVK1k--|shqa-%%(bqo?0OYPd}5 z-H24tR4FG^K~BjN((@Y06H+}hBtL`NCxw|(a4RGa&5_SDJS#+JqV&uS`6|6 zJq{v00ir}m?JXw;X*uFP?01>|t zM2V2(&w@x^w;4Z_ZiS@yJctb1CUF;#(v3(x{ws*|8$gt9L~3X+36b`Kh~Ec-ex34~ z6zm65A|!W#cxPF2y6~I2BJhr&rg7;+({6nTOkeQyP%e!&p;L$|Id-3f?t5B z;;%uJ{u89~7xhvGMk4PAA<0|O&+QQH-%`N?@*ouS1xCvX2q_yQc|sbRPLlsUNV7$+ zfboja38Sh139`CdVGBIZ#B)-~l6r(>ZjR&$X+ZKMPsn)un2=mDwg4RAARv_*BDDy; z!QUx)LdxDHc^!G}m`?hNRRSrsF;SvJVyVP3Af;O&mAhL$|1BiF3aLj(!BfTLiiM1B z-gF>!qgE=+0#d;_67K=xPq|m(JRntc9|qIYR2llM>fRIU#juo#fZmnHeDUzShiw|o>PH;CBYkvEKXp)!Hw+8@W8XHSuvEI{w~%7u z#{HU#5KqHT`!yxPKlW|%$kQNw76FO>W8bEI{EvMbPWaV6|Hr;f9=YW{PSfX)eR~a7 z5AsCQzCDVT6DjyV-M6=2o%H?~-fizd{3~bQt0VcWy@R>mrOx;a>cI!@^Rhk&Gkozv zzH9r%H!m_?wLgdF>~F#Rc;WsA-({_g|JkhpwvtMrOfP%xbuVj7i19MNi(hcWUYZMbV+&_cfJ>9F=u3cD$c|F_P{|c$A## ztuy$MJh}ahlrbqIkJCwpl10ksbQ7UVu);|hos2F^nFnN47oCp2qjRUuNJTmv{vs8d zOPLobBTtEXKv`NSj*|dMl`=ATr!Ilp|3*`6?7jts#wSDaQJ-7_nMqR^ji%dd1P{uK~0j^e51RpoO5# zpi?OE9_W40$KqZe7UFvZ*`uJpfr7`KvO^yK$AcY5PeD+3mOF)4H^R)2^t0(4jK$91`Pn^gZhGcgK|OioeH0E z>GNeB{qu}Iq3Q40^zQvCXd7q;h{o0px)(GLbU%pB`}7h~4|)v5L3AVI8PIyrpFyiZ z^nyY!H@!e1pf;c~&{R+^h~BWKFa%=~GAW>RkR8+>6bp(5eSrqQ1bqcM3pxin54r%V z0o8(>pvj=|pm8Aj=tLi)=z~`ws2?Z~)C=?oh~kT0c4%R%rvHvOK{cS>VtPv!cvl{> z^lD14?eu-81T+X#0HUudD?zJ3Pl4t_ZzhOdLI;ETLq;dBXF#h#Yd{kr9|{@<8X-=% zWFZqaLA(L97W6D=9cVr10O&i=98f8U=1~Nw6Nu&yO_vjp*MmrA(v#rnOVC1^GlaqV zv88AA!!N8x4=-~^0;prOV1Ysl59GjfZTMtSQ0 zeuAVop0oj$+jAy-(vZJ6$}*8gI*RkHr1AtY)e9%=RyHX_8p`b)eO4?3~|i+ za%T1D(;J|dVoQb{ic35Sg*5Z>m~-IW!^ii!e*qK{p^$>mLs2Yst}zcr&*{GN`vdxu zq;y-lQY1d4CMrZpGz;=XyLg)y{%s(7M09P#3fUSlyAArYSv(J6cmpcJLh4$%qQk3& ze8ab>D8sPfkT~6jl}4Bs`NH0)S3g|v;%42*B(mXrGchTgC0njF6U%|jOB@PE#ew3N zaM*5MKdijk{Yv4~A>k;OYD>2zV@6XP3m2+njd)Thj#B=(Y7r({TT_1uXEw%Rmjl|7pzZybiOkxPO>H0?ASKMYUqWVcO)AZ z-_tV0^7bqXOSC1Dhm-~4L|c{;z7=+1t8tAeJ~XJL)sDZThID9PL|zfm?cltvVp=;i zpC{blBFxK<6>;q*JiBy4k%gtm`o3%-zG=sj!_8}r%a`WtKl$>gP*j*^!yx10Be^$B zjEZCd9lpa*QW(q2j;@lwMIY$9JnTDYr$QUeqrOKXS*S&#*c-_PvE9Ni3dZ<}xF~og zU))7pv6x9*nAl9*6mbAt`fUagJvT2ro?00DbM_qzRWzAs)1&56fUrelSoKH>V>M!K zG}{S}C&j=~w;#iOq7DiX=Do$u(WBP=xZsx#s5i;*QNIwulPzxxZsF zG{}4CY>i;;f3uB4qNqLV$|}X&_AHvki&Ydg>7t<{K*o?UPj537sg*nt*#RM8UVpTF z@TYs0yHhGHEW=m}lzHN7D2Vb7EFj#R;O6DY$)~h!!{akPYsH=tD>|@f%g^D<;?Spm zRU2z93gcK;t6ra2)q%AYBRaBFwp=^}+i&I7?r=#ewlTeP|GF6?s-zy9D{^Duk}cv6 z0%H=e2gRe{BFyWT4c%70mpMGNzdk_v+JgDKKNbVOLHrQQQY`1&iEf?HH~nHKg0$!5 zy;~2qN}GZLcFZos(6eG(Cl+S;DN@Yog!SNJlz5>N8;FMf330^MXk&$WuzB25h3mJ` zDo3*tM}=$ABC#`q+Hf8lCnk1ABeQ}7W%^i$Y^Xjbmm;S5cQ(-|YyJvX@1s^eGp);#U3uj(?6~-{ zD|7$4>hRlF@GBF-&Fg%-)%-!ZwY&FpW@D3Wm?(CoPWZ*c;>Sc>JWEOYJ*F-e_3>;a zdt1cY;Hi(}jMKndX+gtAfADQ{8X|1XSTSQo2^82iF_-krtFqhoO!;Kzp0zJ~=u0nl zC54GCP)IZH)`jds?8r5xH*A06y==uHIe!l;@ z!k}GA3G8mhj)+x>EG*r;5n8c-{XFfXe-vZ*v3=8q({$DK;M?b(e=D$$g(caN6KU5x zmmm_;5cEGJh$quoD6LT3w|t3BHPRzh zHgDVF$xkNbKXlzF^X_x#>bWZxZyZLOunaiEig5F`v@Pp3#}|u+j<{a5TI}nN<+@IM z2M9NBUoY$UhNCuM=xaCWh2a>dyUDN!TzwA|mO{O%FlIapH!pOb+tPDw(9Yq;F)sOk zxjMqUe?9lxce>YYt{O!#sGpvdi<-CxH(VpkE9D7{i?N4FDBf@ix22@wj<$F)6X&xp za*Z8fi2I!v+KuY(4YMhTkv52HnQ;GZajYlcm`KV3%ohW)5QArW30D@z(7dYMd|}6@ zM@-+IrO&V=ng#)4I~0FIVVPh(QPY)PBE2UzoUx*;Co2q}kC$qC2YUU9-FNLi+7kN- zPGjk|6uVv8u~M8Rb0qhxMLPalSbgIQ)Nk$+qAr`Q4>xa%KbrP+>u;RjJgkpeI;|!9 zL{ANyw|Tw%*_VDQ+LGP%nuoD@L*YzcQLVv)m+>Y{i^%?kCy$Kz_N_3v1|r~@Xh0R= z=8f~u23^>6{HfFUzNAljI4MG0)>tWfK-`f7J@X2A(fX{Q4uuZFA0yb}My18VBPSTP-YYAWIv zi{XDXkn~my_pcPiz0s#%H}QKaGH>SJ|N3h~yCr$hzw?WXrSIRZ`{tMOnA^UTUq7$h z()=SB{~+|u{8kX-xfc@Z?eFQB(S-Lx`gDZUxi@`0&GFP3MGWr468#snHG29$M%tTf zR?DBaiTXY)vcJowC^(k5I(#1#JTd*#3!A{xTVnnDe|aL}Nq5i=@EyUw!L~fn_OW%# zPJySF^UsGI{QK1R)tRHnBS?SY;>SMh>s|hR*~UTMSLhoTQqEE_bveE(eDOGo60Mf8 z&fW{@%NJ6{GBPoI*Imol3FbXT{_p(7Wu%{Ps9ur69HFZU>k8JlZA-LmemgP0l$hT| J^c%tAe*n%=heiMZ diff --git a/packages/examples/package.json b/packages/examples/package.json new file mode 100644 index 00000000000..c21d65d65a4 --- /dev/null +++ b/packages/examples/package.json @@ -0,0 +1,19 @@ +{ + "name": "@communication/examples", + "version": "0.1.0", + "main": "src/index.ts", + "module": "src/index.ts", + "type": "module", + "devDependencies": { + "@types/bun": "^1.1.14" + }, + "dependencies": { + "@communication/client-query": "workspace:*", + "@communication/client-ws": "workspace:*", + "@communication/client-sqlite": "workspace:*", + "@communication/types": "workspace:*" + }, + "peerDependencies": { + "typescript": "^5.6.3" + } +} diff --git a/packages/examples/src/httpServer.ts b/packages/examples/src/httpServer.ts new file mode 100644 index 00000000000..4320997f2b5 --- /dev/null +++ b/packages/examples/src/httpServer.ts @@ -0,0 +1,70 @@ +Bun.serve({ + port: 3003, + + fetch(req) { + const url = new URL(req.url) + const headers = { + 'Cross-Origin-Opener-Policy': 'same-origin', + 'Cross-Origin-Embedder-Policy': 'require-corp' + } + if (url.pathname === '/') { + return new Response(Bun.file('./index.html'), { headers }) + } + + if (url.pathname.endsWith('index.js')) { + return new Response(Bun.file('../dist/index.js'), { headers }) + } + + if (url.pathname.endsWith('sqlite3-opfs-async-proxy.js')) { + return new Response( + Bun.file('../../../node_modules/@sqlite.org/sqlite-wasm/sqlite-wasm/jswasm/sqlite3-opfs-async-proxy.js'), + { + headers + } + ) + } + + if (url.pathname.endsWith('index.mjs')) { + return new Response(Bun.file('../../../node_modules/@sqlite.org/sqlite-wasm/index.mjs'), { + headers + }) + } + + if (url.pathname.endsWith('sqlite3-worker1-promiser.mjs')) { + return new Response( + Bun.file('../../../node_modules/@sqlite.org/sqlite-wasm/sqlite-wasm/jswasm/sqlite3-worker1-promiser.mjs'), + { + headers + } + ) + } + + if (url.pathname.endsWith('sqlite3-worker1-bundler-friendly.mjs')) { + return new Response( + Bun.file( + '../../../node_modules/@sqlite.org/sqlite-wasm/sqlite-wasm/jswasm/sqlite3-worker1-bundler-friendly.mjs' + ), + { + headers + } + ) + } + + if (url.pathname.endsWith('sqlite3.js')) { + return new Response(Bun.file('../../../node_modules/@sqlite.org/sqlite-wasm/sqlite-wasm/jswasm/sqlite3.js'), { + headers + }) + } + + if (url.pathname.endsWith('.wasm')) { + return new Response(Bun.file('../../../node_modules/@sqlite.org/sqlite-wasm/sqlite-wasm/jswasm/sqlite3.wasm'), { + headers: { + ...headers, + ContentType: 'application/wasm' + } + }) + } + + return new Response('Not Found', { status: 404, headers }) + } +}) diff --git a/packages/examples/src/index.html b/packages/examples/src/index.html new file mode 100644 index 00000000000..1eae9ec030c --- /dev/null +++ b/packages/examples/src/index.html @@ -0,0 +1,51 @@ + + + + + +

+
+
+
+ + +
+ + + + + + + \ No newline at end of file diff --git a/packages/examples/src/index.ts b/packages/examples/src/index.ts new file mode 100644 index 00000000000..1f031549822 --- /dev/null +++ b/packages/examples/src/index.ts @@ -0,0 +1,125 @@ +import { type CardID, type Message, SortOrder, type Window } from '@communication/types' +import { getWebsocketClient } from '@communication/client-ws' +import { getSqliteClient } from '@communication/client-sqlite' +import { createMessagesQuery, initLiveQueries } from '@communication/client-query' + +const card1: CardID = 'cd0aba36-1c4f-4170-95f2-27a12a5415f6' +const workspace = 'cd0aba36-1c4f-4170-95f2-27a12a5415f6' +const personWorkspace = 'cd0aba36-1c4f-4170-95f2-27a12a5415f5' +const creator1 = 'email:vasya@huly.com' + +async function getClient(type: 'ws' | 'sqlite') { + if (type === 'ws') { + const platformUrl = 'ws://localhost:8090' + const token = 'token' + return await getWebsocketClient(platformUrl, token) + } + + return await getSqliteClient(workspace, personWorkspace) +} + +export async function example() { + const client = await getClient('sqlite') + initLiveQueries(client) + + const query1 = createMessagesQuery() + + let window: Window | undefined = undefined + + query1.query({ card: card1, sort: SortOrder.Desc }, (res) => { + window = res + const r = window.getResult() + r.reverse() + showMessages(r) + }) + + document.getElementById('forward-button')?.addEventListener('click', async () => { + if (window == null) return + await window.loadNextPage() + }) + + document.getElementById('backward-button')?.addEventListener('click', async () => { + if (window == null) return + await window.loadPrevPage() + }) + + async function editMessage(message: Message) { + await client.createPatch(message.id, message.content + '_1_', creator1) + } + + async function deleteMessage(message: Message) { + await client.removeMessage(message.id) + } + + async function addReaction(message: Message) { + await client.createReaction(message.id, '👍', creator1) + } + + async function removeReaction(message: Message) { + await client.removeReaction(message.id, '👍', creator1) + } + + function scrollToBottom() { + const el = document.getElementById('chat') + if (el == null) return + el.scrollTo(0, el.scrollHeight) + } + + async function showMessages(messages: ReadonlyArray) { + const el = document.getElementById('messages') + if (el == null) return + el.innerHTML = '' + for (const message of messages) { + const div = el.appendChild(document.createElement('div')) + div.className = 'message' + + const messageContent = document.createElement('span') + messageContent.textContent = message.content + ' ' + message.reactions.map((it) => it.reaction).join(' ') + // + ' ' + messages.created.getTime() + div.appendChild(messageContent) + + const buttonsDiv = document.createElement('div') + buttonsDiv.className = 'buttons' + + const editButton = document.createElement('button') + editButton.textContent = 'Edit' + editButton.className = 'edit-button' + editButton.addEventListener('click', () => editMessage(message)) + buttonsDiv.appendChild(editButton) + + const deleteButton = document.createElement('button') + deleteButton.textContent = 'Remove' + deleteButton.className = 'delete-button' + deleteButton.addEventListener('click', () => deleteMessage(message)) + buttonsDiv.appendChild(deleteButton) + + const addReactionButton = document.createElement('button') + addReactionButton.textContent = '+R' + addReactionButton.className = 'add-reaction-button' + addReactionButton.addEventListener('click', () => addReaction(message)) + buttonsDiv.appendChild(addReactionButton) + + const removeReactionButton = document.createElement('button') + removeReactionButton.textContent = '-R' + removeReactionButton.className = 'remove-reaction-button' + removeReactionButton.addEventListener('click', () => removeReaction(message)) + buttonsDiv.appendChild(removeReactionButton) + + div.appendChild(buttonsDiv) + } + scrollToBottom() + } + + document.getElementById('form')?.addEventListener('submit', async (event) => { + event.preventDefault() + // @ts-expect-error error + const el = event.target?.getElementsByTagName('input')[0] as HTMLInputElement + if (el.value == '' || el.value == null) return + + await client.createMessage(card1, el.value, creator1) + + el.value = '' + }) +} + +void example() diff --git a/packages/examples/tsconfig.json b/packages/examples/tsconfig.json new file mode 100644 index 00000000000..49e05cea1ee --- /dev/null +++ b/packages/examples/tsconfig.json @@ -0,0 +1,8 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src"] +} From db8de799d2c6d7be259614b166f9267b43ca61a0 Mon Sep 17 00:00:00 2001 From: Kristina Date: Wed, 25 Dec 2024 18:20:06 +0400 Subject: [PATCH 019/636] Remove message place table (#11) Signed-off-by: Kristina Fefelova --- package.json | 2 +- packages/client-sqlite/src/client.ts | 62 ++++++--- packages/client-ws/src/client.ts | 52 +++++-- packages/cockroach/migrations/01_message.sql | 25 +--- packages/cockroach/src/adapter.ts | 17 ++- packages/cockroach/src/db/message.ts | 73 +++++----- packages/cockroach/src/db/notification.ts | 75 +++++----- packages/cockroach/src/db/types.ts | 12 +- packages/examples/src/index.ts | 18 +-- packages/query/src/messages/query.ts | 7 +- packages/sdk-types/src/client.ts | 17 +-- packages/sdk-types/src/db.ts | 13 +- packages/sdk-types/src/event.ts | 18 ++- packages/server/src/eventProcessor.ts | 13 +- packages/server/src/main.ts | 3 +- packages/server/src/manager.ts | 44 +++++- packages/server/src/triggers.ts | 8 +- packages/sqlite-wasm/src/adapter.ts | 17 ++- packages/sqlite-wasm/src/db/message.ts | 72 ++++------ packages/sqlite-wasm/src/db/notification.ts | 136 +++++++++---------- packages/sqlite-wasm/src/db/types.ts | 9 +- packages/sqlite-wasm/src/migrations.ts | 77 ++++------- packages/types/src/message.ts | 6 +- packages/types/src/query.ts | 4 +- 24 files changed, 398 insertions(+), 382 deletions(-) diff --git a/package.json b/package.json index 034075fe14d..c69f5f7f260 100644 --- a/package.json +++ b/package.json @@ -3,7 +3,7 @@ "type": "module", "workspaces": ["packages/*"], "scripts": { - "build": "sh scripts/build.sh", + "build": "bun x tsc --noEmit && sh scripts/build.sh", "lint": "eslint packages/**/src/*.ts", "format": "prettier --write packages/**/src/*.ts && bun run lint" }, diff --git a/packages/client-sqlite/src/client.ts b/packages/client-sqlite/src/client.ts index 4b50d2e06ca..9e4e7219fac 100644 --- a/packages/client-sqlite/src/client.ts +++ b/packages/client-sqlite/src/client.ts @@ -10,7 +10,10 @@ import { type FindNotificationContextParams, type NotificationContext, type FindNotificationsParams, - type Notification + type Notification, + type ThreadID, + type Attachment, + type Reaction } from '@communication/types' import { type Client, @@ -30,16 +33,15 @@ class DbClient implements Client { private readonly personWorkspace: string ) {} - async createMessage(card: CardID, content: RichText, creator: SocialID): Promise { + async createMessage(thread: ThreadID, content: RichText, creator: SocialID): Promise { const created = new Date() - const id = await this.db.createMessage(content, creator, created) - await this.db.placeMessage(id, card, this.workspace) + const id = await this.db.createMessage(this.workspace, thread, content, creator, created) const event: MessageCreatedEvent = { type: EventType.MessageCreated, - card, message: { id, + thread, content, creator, created, @@ -54,42 +56,42 @@ class DbClient implements Client { return id } - async removeMessage(message: MessageID) { + async removeMessage(thread: ThreadID, message: MessageID) { await this.db.removeMessage(message) - this.onEvent({ type: EventType.MessageRemoved, message }) + this.onEvent({ type: EventType.MessageRemoved, message, thread }) } - async createPatch(message: MessageID, content: RichText, creator: SocialID): Promise { + async createPatch(thread: ThreadID, message: MessageID, content: RichText, creator: SocialID): Promise { const created = new Date() await this.db.createPatch(message, content, creator, created) - this.onEvent({ type: EventType.PatchCreated, patch: { message, content, creator, created } }) + this.onEvent({ type: EventType.PatchCreated, thread, patch: { message, content, creator, created } }) } - async createReaction(message: MessageID, reaction: string, creator: SocialID): Promise { + async createReaction(thread: ThreadID, message: MessageID, reaction: string, creator: SocialID): Promise { const created = new Date() await this.db.createReaction(message, reaction, creator, created) - this.onEvent({ type: EventType.ReactionCreated, reaction: { message, reaction, creator, created } }) + this.onEvent({ type: EventType.ReactionCreated, thread, reaction: { message, reaction, creator, created } }) } - async removeReaction(message: MessageID, reaction: string, creator: SocialID): Promise { + async removeReaction(thread: ThreadID, message: MessageID, reaction: string, creator: SocialID): Promise { await this.db.removeReaction(message, reaction, creator) - this.onEvent({ type: EventType.ReactionRemoved, message, reaction, creator }) + this.onEvent({ type: EventType.ReactionRemoved, thread, message, reaction, creator }) } - async createAttachment(message: MessageID, card: CardID, creator: SocialID): Promise { + async createAttachment(thread: ThreadID, message: MessageID, card: CardID, creator: SocialID): Promise { const created = new Date() await this.db.createAttachment(message, card, creator, created) - this.onEvent({ type: EventType.AttachmentCreated, attachment: { message, card, creator, created } }) + this.onEvent({ type: EventType.AttachmentCreated, thread, attachment: { message, card, creator, created } }) } - async removeAttachment(message: MessageID, card: CardID): Promise { + async removeAttachment(thread: ThreadID, message: MessageID, card: CardID): Promise { await this.db.removeAttachment(message, card) - this.onEvent({ type: EventType.AttachmentRemoved, message, card }) + this.onEvent({ type: EventType.AttachmentRemoved, message, card, thread }) } async findMessages(params: FindMessagesParams): Promise { const rawMessages = await this.db.findMessages(this.workspace, params) - return rawMessages.map(this.toMessage) + return rawMessages.map((it) => this.toMessage(it)) } async findMessage(params: FindMessagesParams): Promise { @@ -99,15 +101,33 @@ class DbClient implements Client { toMessage(raw: any): Message { return { id: raw.id, + thread: raw.thread, content: raw.content, creator: raw.creator, created: new Date(raw.created), edited: new Date(raw.edited), - reactions: raw.reactions, - attachments: raw.attachments + reactions: raw.reactions.map((it: any) => this.toReaction(it)), + attachments: raw.attachments.map((it: any) => this.toAttachment(it)) } } + toAttachment(raw: any): Attachment { + return { + message: raw.message, + card: raw.card, + creator: raw.creator, + created: new Date(raw.created) + } + } + + toReaction(raw: any): Reaction { + return { + message: raw.message, + reaction: raw.reaction, + creator: raw.creator, + created: new Date(raw.created) + } + } async createNotification(message: MessageID, context: ContextID): Promise { await this.db.createNotification(message, context) } @@ -117,7 +137,7 @@ class DbClient implements Client { } async createNotificationContext(card: CardID, lastView?: Date, lastUpdate?: Date): Promise { - return await this.db.createContext(this.workspace, card, this.personWorkspace, lastView, lastUpdate) + return await this.db.createContext(this.personWorkspace, this.workspace, card, lastView, lastUpdate) } async updateNotificationContext(context: ContextID, update: NotificationContextUpdate): Promise { diff --git a/packages/client-ws/src/client.ts b/packages/client-ws/src/client.ts index b8329f77550..15eef05da61 100644 --- a/packages/client-ws/src/client.ts +++ b/packages/client-ws/src/client.ts @@ -1,4 +1,5 @@ import { + type Attachment, type CardID, type ContextID, type FindMessagesParams, @@ -9,8 +10,10 @@ import { type Notification, type NotificationContext, type NotificationContextUpdate, + type Reaction, type RichText, - type SocialID + type SocialID, + type ThreadID } from '@communication/types' import { type BroadcastEvent, @@ -53,10 +56,10 @@ class WsClient implements Client { } } - async createMessage(card: CardID, content: RichText, creator: SocialID): Promise { + async createMessage(thread: ThreadID, content: RichText, creator: SocialID): Promise { const event: CreateMessageEvent = { type: EventType.CreateMessage, - card, + thread, content, creator } @@ -64,17 +67,19 @@ class WsClient implements Client { return (result as CreateMessageResult).id } - async removeMessage(message: MessageID) { + async removeMessage(thread: ThreadID, message: MessageID): Promise { const event: RemoveMessageEvent = { type: EventType.RemoveMessage, + thread, message } await this.sendEvent(event) } - async createPatch(message: MessageID, content: RichText, creator: SocialID): Promise { + async createPatch(thread: ThreadID, message: MessageID, content: RichText, creator: SocialID): Promise { const event: CreatePatchEvent = { type: EventType.CreatePatch, + thread, message, content, creator @@ -82,9 +87,10 @@ class WsClient implements Client { await this.sendEvent(event) } - async createReaction(message: MessageID, reaction: string, creator: SocialID): Promise { + async createReaction(thread: ThreadID, message: MessageID, reaction: string, creator: SocialID): Promise { const event: CreateReactionEvent = { type: EventType.CreateReaction, + thread, message, reaction, creator @@ -92,9 +98,10 @@ class WsClient implements Client { await this.sendEvent(event) } - async removeReaction(message: MessageID, reaction: string, creator: SocialID): Promise { + async removeReaction(thread: ThreadID, message: MessageID, reaction: string, creator: SocialID): Promise { const event: RemoveReactionEvent = { type: EventType.RemoveReaction, + thread, message, reaction, creator @@ -102,9 +109,10 @@ class WsClient implements Client { await this.sendEvent(event) } - async createAttachment(message: MessageID, card: CardID, creator: SocialID): Promise { + async createAttachment(thread: ThreadID, message: MessageID, card: CardID, creator: SocialID): Promise { const event: CreateAttachmentEvent = { type: EventType.CreateAttachment, + thread, message, card, creator @@ -112,9 +120,10 @@ class WsClient implements Client { await this.sendEvent(event) } - async removeAttachment(message: MessageID, card: CardID): Promise { + async removeAttachment(thread: ThreadID, message: MessageID, card: CardID): Promise { const event: RemoveAttachmentEvent = { type: EventType.RemoveAttachment, + thread, message, card } @@ -123,18 +132,37 @@ class WsClient implements Client { async findMessages(params: FindMessagesParams, queryId?: number): Promise { const rawMessages = await this.ws.send('findMessages', [params, queryId]) - return rawMessages.map(this.toMessage) + return rawMessages.map((it: any) => this.toMessage(it)) } toMessage(raw: any): Message { return { id: raw.id, + thread: raw.thread, content: raw.content, creator: raw.creator, created: new Date(raw.created), edited: new Date(raw.edited), - reactions: raw.reactions, - attachments: raw.attachments + reactions: raw.reactions.map((it: any) => this.toReaction(it)), + attachments: raw.attachments.map((it: any) => this.toAttachment(it)) + } + } + + toAttachment(raw: any): Attachment { + return { + message: raw.message, + card: raw.card, + creator: raw.creator, + created: new Date(raw.created) + } + } + + toReaction(raw: any): Reaction { + return { + message: raw.message, + reaction: raw.reaction, + creator: raw.creator, + created: new Date(raw.created) } } diff --git a/packages/cockroach/migrations/01_message.sql b/packages/cockroach/migrations/01_message.sql index 1236df5ae25..e03be3b3435 100644 --- a/packages/cockroach/migrations/01_message.sql +++ b/packages/cockroach/migrations/01_message.sql @@ -1,24 +1,13 @@ CREATE TABLE IF NOT EXISTS message ( - id UUID NOT NULL DEFAULT gen_random_uuid(), - content TEXT NOT NULL, - creator VARCHAR(255) NOT NULL, - created TIMESTAMPTZ NOT NULL, + id UUID NOT NULL DEFAULT gen_random_uuid(), + workspace_id UUID NOT NULL, + thread_id UUID NOT NULL, - PRIMARY KEY (id) -); - -CREATE TABLE IF NOT EXISTS message_place -( - workspace_id UUID NOT NULL, - card_id UUID NOT NULL, - message_id UUID NOT NULL, + content TEXT NOT NULL, + creator VARCHAR(255) NOT NULL, + created TIMESTAMPTZ NOT NULL, - PRIMARY KEY (workspace_id, card_id, message_id), - FOREIGN KEY (message_id) REFERENCES message (id) ON DELETE CASCADE + PRIMARY KEY (id) ); - - -CREATE INDEX idx_message_place_workspace_card ON message_place (workspace_id, card_id); -CREATE INDEX idx_message_place_message_id ON message_place (message_id); diff --git a/packages/cockroach/src/adapter.ts b/packages/cockroach/src/adapter.ts index 22185295daa..a1841b6252b 100644 --- a/packages/cockroach/src/adapter.ts +++ b/packages/cockroach/src/adapter.ts @@ -11,7 +11,8 @@ import { type FindNotificationContextParams, type NotificationContext, type FindNotificationsParams, - type Notification + type Notification, + type ThreadID } from '@communication/types' import type { DbAdapter } from '@communication/sdk-types' @@ -31,12 +32,14 @@ export class CockroachAdapter implements DbAdapter { this.notification = new NotificationsDb(this.sqlClient) } - async createMessage(content: RichText, creator: SocialID, created: Date): Promise { - return await this.message.createMessage(content, creator, created) - } - - async placeMessage(message: MessageID, card: CardID, workspace: string): Promise { - return await this.message.placeMessage(message, card, workspace) + async createMessage( + workspace: string, + thread: ThreadID, + content: RichText, + creator: SocialID, + created: Date + ): Promise { + return await this.message.createMessage(workspace, thread, content, creator, created) } async createPatch(message: MessageID, content: RichText, creator: SocialID, created: Date): Promise { diff --git a/packages/cockroach/src/db/message.ts b/packages/cockroach/src/db/message.ts index 2722f68bef6..deedf63ac4f 100644 --- a/packages/cockroach/src/db/message.ts +++ b/packages/cockroach/src/db/message.ts @@ -6,14 +6,13 @@ import { SortOrder, type SocialID, type RichText, - Direction, type Reaction, type Attachment + Direction, type Reaction, type Attachment, type ThreadID } from '@communication/types' import {BaseDb} from './base.ts' import { TableName, type MessageDb, - type MessagePlaceDb, type AttachmentDb, type ReactionDb, type PatchDb @@ -21,8 +20,10 @@ import { export class MessagesDb extends BaseDb { //Message - async createMessage(content: RichText, creator: SocialID, created: Date): Promise { + async createMessage(workspace: string, thread: ThreadID, content: RichText, creator: SocialID, created: Date): Promise { const dbData: MessageDb = { + workspace_id: workspace, + thread_id: thread, content: content, creator: creator, created: created, @@ -37,15 +38,6 @@ export class MessagesDb extends BaseDb { await this.remove(TableName.Message, {id: message}) } - async placeMessage(message: MessageID, card: CardID, workspace: string): Promise { - const dbData: MessagePlaceDb = { - workspace_id: workspace, - card_id: card, - message_id: message - } - await this.insert(TableName.MessagePlace, dbData) - } - async createPatch(message: MessageID, content: RichText, creator: SocialID, created: Date): Promise { const dbData: PatchDb = { message_id: message, @@ -104,8 +96,7 @@ export class MessagesDb extends BaseDb { ${this.subSelectPatches()}, ${this.subSelectAttachments()}, ${this.subSelectReactions()} - FROM ${TableName.Message} m - INNER JOIN ${TableName.MessagePlace} mp ON m.id = mp.message_id` + FROM ${TableName.Message} m` const {where, values} = this.buildMessageWhere(workspace, params) const orderBy = params.sort ? `ORDER BY m.created ${params.sort === SortOrder.Asc ? 'ASC' : 'DESC'}` : '' @@ -118,40 +109,37 @@ export class MessagesDb extends BaseDb { } buildMessageWhere(workspace: string, params: FindMessagesParams): { where: string, values: any[] } { - const where: string[] = ['mp.workspace_id = $1'] + const where: string[] = ['m.workspace_id = $1'] const values: any[] = [workspace] + let index = 2 - for (const key of Object.keys(params)) { - const value = (params as any)[key] - switch (key) { - case 'id': { - where.push(`m.id = $${index++}`) - values.push(value) - break - } - case 'card': { - where.push(`mp.card_id = $${index++}`) - values.push(value) - break - } - case 'from': { - const exclude = params.excluded ?? false - const direction = params.direction ?? Direction.Forward - const getOperator = () => { - if (exclude) { - return direction === Direction.Forward ? '>' : '<' - } else { - return direction === Direction.Forward ? '>=' : '<=' - } - } - - where.push(`m.created ${getOperator()} $${index++}`) - values.push(value) - break + + if (params.id != null) { + where.push(`m.id = $${index++}`) + values.push(params.id) + } + + if (params.thread != null) { + where.push(`m.thread_id = $${index++}`) + values.push(params.thread) + } + + if (params.from != null) { + const exclude = params.excluded ?? false + const direction = params.direction ?? Direction.Forward + const getOperator = () => { + if (exclude) { + return direction === Direction.Forward ? '>' : '<' + } else { + return direction === Direction.Forward ? '>=' : '<=' } } + + where.push(`m.created ${getOperator()} $${index++}`) + values.push(params.from) } + return {where: `WHERE ${where.join(' AND ')}`, values} } @@ -198,6 +186,7 @@ export class MessagesDb extends BaseDb { return { id: row.id, + thread: row.thread_id, content: lastPatch?.content ?? row.content, creator: row.creator, created: new Date(row.created), diff --git a/packages/cockroach/src/db/notification.ts b/packages/cockroach/src/db/notification.ts index dfa9528d12c..a2342041d58 100644 --- a/packages/cockroach/src/db/notification.ts +++ b/packages/cockroach/src/db/notification.ts @@ -66,12 +66,12 @@ export class NotificationsDb extends BaseDb { const sql = `UPDATE ${TableName.NotificationContext} SET ${keys.map((k, idx) => `"${k}" = $${idx + 1}`).join(', ')} - WHERE id =$${keys.length + 1}` + WHERE id = $${keys.length + 1}` await this.client.unsafe(sql, [values, context]) } - async findContexts( params: FindNotificationContextParams, personWorkspaces: string[], workspace?: string,): Promise { + async findContexts(params: FindNotificationContextParams, personWorkspaces: string[], workspace?: string,): Promise { const select = ` SELECT nc.id, nc.card_id, nc.archived_from, nc.last_view, nc.last_update FROM ${TableName.NotificationContext} nc`; @@ -91,6 +91,7 @@ export class NotificationsDb extends BaseDb { const select = ` SELECT n.message_id, n.context, + m.thread_id AS message_thread, m.content AS message_content, m.creator AS message_creator, m.created AS message_created, @@ -130,25 +131,19 @@ export class NotificationsDb extends BaseDb { const values: any[] = [] let index = 1 - if(workspace != null) { + if (workspace != null) { where.push(`nc.workspace_id = $${index++}`) values.push(workspace) } - if(personWorkspaces.length > 0) { + if (personWorkspaces.length > 0) { where.push(`nc.person_workspace IN (${personWorkspaces.map((it) => `$${index++}`).join(', ')})`) values.push(...personWorkspaces) } - for (const key of Object.keys(params)) { - const value = (params as any)[key] - switch (key) { - case 'card': { - where.push(`nc.card_id = $${index++}`) - values.push(value) - break - } - } + if (params.card != null) { + where.push(`nc.card_id = $${index++}`) + values.push(params.card) } return {where: `WHERE ${where.join(' AND ')}`, values} @@ -162,43 +157,34 @@ export class NotificationsDb extends BaseDb { const values: any[] = [personWorkspace] let index = 2 - if(workspace != null) { + if (workspace != null) { where.push(`nc.workspace_id = $${index++}`) values.push(workspace) } - for (const key of Object.keys(params)) { - const value = (params as any)[key] - switch (key) { - case 'context': { - where.push(`n.context = $${index++}`) - values.push(value) - break - } - case 'card': { - where.push(`nc.card_id = $${index++}`) - values.push(value) - break - } - case 'read': { - if (value === true) { - where.push(`nc.last_view IS NOT NULL AND nc.last_view >= m.created`) - } else if (value === false) { - where.push(`(nc.last_view IS NULL OR nc.last_view > m.created)`) - } - break - } - case 'archived': { - if (value === true) { - where.push(`nc.archived_from IS NOT NULL AND nc.archived_from >= m.created`) - } else if (value === false) { - where.push(`(nc.archived_from IS NULL OR nc.archived_from > m.created)`) - } - break - } - } + if (params.context != null) { + where.push(`n.context = $${index++}`) + values.push(params.context) } + + if (params.read === true) { + where.push(`nc.last_view IS NOT NULL AND nc.last_view >= m.created`) + } + + if (params.read === false) { + where.push(`(nc.last_view IS NULL OR nc.last_view > m.created)`) + } + + if (params.archived === true) { + where.push(`nc.archived_from IS NOT NULL AND nc.archived_from >= m.created`) + } + + if (params.archived === false) { + where.push(`(nc.archived_from IS NULL OR nc.archived_from > m.created)`) + } + + return {where: `WHERE ${where.join(' AND ')}`, values} } @@ -223,6 +209,7 @@ export class NotificationsDb extends BaseDb { return { message: { id: row.id, + thread: row.message_thread, content: lastPatch?.content ?? row.message_content, creator: row.message_creator, created, diff --git a/packages/cockroach/src/db/types.ts b/packages/cockroach/src/db/types.ts index 9dab08561a0..3c7bc183729 100644 --- a/packages/cockroach/src/db/types.ts +++ b/packages/cockroach/src/db/types.ts @@ -1,9 +1,8 @@ -import type {CardID, ContextID, MessageID, RichText, SocialID } from "@communication/types" +import type {CardID, ContextID, MessageID, RichText, SocialID, ThreadID } from "@communication/types" export enum TableName { Message = 'message', Patch = 'patch', - MessagePlace = 'message_place', Attachment = 'attachment', Reaction = 'reaction', Notification = 'notification', @@ -11,6 +10,8 @@ export enum TableName { } export interface MessageDb { + workspace_id: string, + thread_id: ThreadID, content: RichText, creator: SocialID, created: Date, @@ -22,13 +23,6 @@ export interface PatchDb { creator: SocialID, created: Date, } - -export interface MessagePlaceDb { - workspace_id: string, - card_id: CardID, - message_id: MessageID -} - export interface ReactionDb { message_id: MessageID, reaction: string, diff --git a/packages/examples/src/index.ts b/packages/examples/src/index.ts index 1f031549822..27c7257439e 100644 --- a/packages/examples/src/index.ts +++ b/packages/examples/src/index.ts @@ -1,12 +1,12 @@ -import { type CardID, type Message, SortOrder, type Window } from '@communication/types' +import { type Message, type SocialID, SortOrder, type ThreadID, type Window } from '@communication/types' import { getWebsocketClient } from '@communication/client-ws' import { getSqliteClient } from '@communication/client-sqlite' import { createMessagesQuery, initLiveQueries } from '@communication/client-query' -const card1: CardID = 'cd0aba36-1c4f-4170-95f2-27a12a5415f6' +const thread = 'cd0aba36-1c4f-4170-95f2-27a12a5415f6' as ThreadID const workspace = 'cd0aba36-1c4f-4170-95f2-27a12a5415f6' const personWorkspace = 'cd0aba36-1c4f-4170-95f2-27a12a5415f5' -const creator1 = 'email:vasya@huly.com' +const creator1 = 'email:vasya@huly.com' as SocialID async function getClient(type: 'ws' | 'sqlite') { if (type === 'ws') { @@ -26,7 +26,7 @@ export async function example() { let window: Window | undefined = undefined - query1.query({ card: card1, sort: SortOrder.Desc }, (res) => { + query1.query({ thread, sort: SortOrder.Desc }, (res) => { window = res const r = window.getResult() r.reverse() @@ -44,19 +44,19 @@ export async function example() { }) async function editMessage(message: Message) { - await client.createPatch(message.id, message.content + '_1_', creator1) + await client.createPatch(thread, message.id, message.content + '_1_', creator1) } async function deleteMessage(message: Message) { - await client.removeMessage(message.id) + await client.removeMessage(thread, message.id) } async function addReaction(message: Message) { - await client.createReaction(message.id, '👍', creator1) + await client.createReaction(thread, message.id, '👍', creator1) } async function removeReaction(message: Message) { - await client.removeReaction(message.id, '👍', creator1) + await client.removeReaction(thread, message.id, '👍', creator1) } function scrollToBottom() { @@ -116,7 +116,7 @@ export async function example() { const el = event.target?.getElementsByTagName('input')[0] as HTMLInputElement if (el.value == '' || el.value == null) return - await client.createMessage(card1, el.value, creator1) + await client.createMessage(thread, el.value, creator1) el.value = '' }) diff --git a/packages/query/src/messages/query.ts b/packages/query/src/messages/query.ts index 794b346c261..a466d467019 100644 --- a/packages/query/src/messages/query.ts +++ b/packages/query/src/messages/query.ts @@ -1,5 +1,4 @@ import { - type CardID, type FindMessagesParams, type ID, type Message, @@ -65,7 +64,7 @@ export class MessagesQuery extends BaseQuery { const exists = this.result.get(message.id) if (exists !== undefined) return - if (!this.match(message, event.card)) return + if (!this.match(message)) return if (this.result.isTail()) { if (this.params.sort === SortOrder.Asc) { @@ -77,11 +76,11 @@ export class MessagesQuery extends BaseQuery { } } - private match(message: Message, card: CardID): boolean { + private match(message: Message): boolean { if (this.params.id != null && this.params.id !== message.id) { return false } - if (this.params.card != null && this.params.card !== card) { + if (this.params.thread != null && this.params.thread !== message.thread) { return false } return true diff --git a/packages/sdk-types/src/client.ts b/packages/sdk-types/src/client.ts index af3edce94bb..d232cb73f8d 100644 --- a/packages/sdk-types/src/client.ts +++ b/packages/sdk-types/src/client.ts @@ -9,22 +9,23 @@ import type { NotificationContextUpdate, RichText, SocialID, - Notification + Notification, + ThreadID } from '@communication/types' import type { FindMessagesParams } from '@communication/types' import type { BroadcastEvent } from './event.ts' export interface Client { - createMessage(card: CardID, content: RichText, creator: SocialID): Promise - removeMessage(id: MessageID): Promise - createPatch(message: MessageID, content: RichText, creator: SocialID): Promise + createMessage(thread: ThreadID, content: RichText, creator: SocialID): Promise + removeMessage(thread: ThreadID, id: MessageID): Promise + createPatch(thread: ThreadID, message: MessageID, content: RichText, creator: SocialID): Promise - createReaction(message: MessageID, reaction: string, creator: SocialID): Promise - removeReaction(message: MessageID, reaction: string, creator: SocialID): Promise + createReaction(thread: ThreadID, message: MessageID, reaction: string, creator: SocialID): Promise + removeReaction(thread: ThreadID, message: MessageID, reaction: string, creator: SocialID): Promise - createAttachment(message: MessageID, card: CardID, creator: SocialID): Promise - removeAttachment(message: MessageID, card: CardID): Promise + createAttachment(thread: ThreadID, message: MessageID, card: CardID, creator: SocialID): Promise + removeAttachment(thread: ThreadID, message: MessageID, card: CardID): Promise createNotification(message: MessageID, context: ContextID): Promise removeNotification(message: MessageID, context: ContextID): Promise diff --git a/packages/sdk-types/src/db.ts b/packages/sdk-types/src/db.ts index 0e5c9c38d33..17231010164 100644 --- a/packages/sdk-types/src/db.ts +++ b/packages/sdk-types/src/db.ts @@ -10,14 +10,19 @@ import type { NotificationContextUpdate, RichText, SocialID, - Notification + Notification, + ThreadID } from '@communication/types' export interface DbAdapter { - createMessage(content: RichText, creator: SocialID, created: Date): Promise + createMessage( + workspace: string, + thread: ThreadID, + content: RichText, + creator: SocialID, + created: Date + ): Promise removeMessage(id: MessageID): Promise - - placeMessage(message: MessageID, card: CardID, workspace: string): Promise createPatch(message: MessageID, content: RichText, creator: SocialID, created: Date): Promise createReaction(message: MessageID, reaction: string, creator: SocialID, created: Date): Promise diff --git a/packages/sdk-types/src/event.ts b/packages/sdk-types/src/event.ts index 00f49b67142..8771477c533 100644 --- a/packages/sdk-types/src/event.ts +++ b/packages/sdk-types/src/event.ts @@ -10,7 +10,8 @@ import type { Reaction, RichText, SocialID, - Notification + Notification, + ThreadID } from '@communication/types' export enum EventType { @@ -57,18 +58,20 @@ export type Event = export interface CreateMessageEvent { type: EventType.CreateMessage - card: CardID + thread: ThreadID content: RichText creator: SocialID } export interface RemoveMessageEvent { type: EventType.RemoveMessage + thread: ThreadID message: MessageID } export interface CreatePatchEvent { type: EventType.CreatePatch + thread: ThreadID message: MessageID content: RichText creator: SocialID @@ -76,6 +79,7 @@ export interface CreatePatchEvent { export interface CreateReactionEvent { type: EventType.CreateReaction + thread: ThreadID message: MessageID reaction: string creator: SocialID @@ -83,6 +87,7 @@ export interface CreateReactionEvent { export interface RemoveReactionEvent { type: EventType.RemoveReaction + thread: ThreadID message: MessageID reaction: string creator: SocialID @@ -90,6 +95,7 @@ export interface RemoveReactionEvent { export interface CreateAttachmentEvent { type: EventType.CreateAttachment + thread: ThreadID message: MessageID card: CardID creator: SocialID @@ -97,6 +103,7 @@ export interface CreateAttachmentEvent { export interface RemoveAttachmentEvent { type: EventType.RemoveAttachment + thread: ThreadID message: MessageID card: CardID } @@ -158,27 +165,30 @@ export type BroadcastEvent = export interface MessageCreatedEvent { type: EventType.MessageCreated - card: CardID message: Message } export interface MessageRemovedEvent { type: EventType.MessageRemoved + thread: ThreadID message: MessageID } export interface PatchCreatedEvent { type: EventType.PatchCreated + thread: ThreadID patch: Patch } export interface ReactionCreatedEvent { type: EventType.ReactionCreated + thread: ThreadID reaction: Reaction } export interface ReactionRemovedEvent { type: EventType.ReactionRemoved + thread: ThreadID message: MessageID reaction: string creator: SocialID @@ -186,11 +196,13 @@ export interface ReactionRemovedEvent { export interface AttachmentCreatedEvent { type: EventType.AttachmentCreated + thread: ThreadID attachment: Attachment } export interface AttachmentRemovedEvent { type: EventType.AttachmentRemoved + thread: ThreadID message: MessageID card: CardID } diff --git a/packages/server/src/eventProcessor.ts b/packages/server/src/eventProcessor.ts index ecdb2d9a5d3..cffcbdca96c 100644 --- a/packages/server/src/eventProcessor.ts +++ b/packages/server/src/eventProcessor.ts @@ -73,10 +73,10 @@ export class EventProcessor { private async createMessage(event: CreateMessageEvent): Promise { const created = new Date() - const id = await this.db.createMessage(event.content, event.creator, created) - await this.db.placeMessage(id, event.card, this.workspace) + const id = await this.db.createMessage(this.workspace, event.thread, event.content, event.creator, created) const message: Message = { id, + thread: event.thread, content: event.content, creator: event.creator, created: created, @@ -86,7 +86,6 @@ export class EventProcessor { } const broadcastEvent: MessageCreatedEvent = { type: EventType.MessageCreated, - card: event.card, message } return { @@ -107,6 +106,7 @@ export class EventProcessor { } const broadcastEvent: PatchCreatedEvent = { type: EventType.PatchCreated, + thread: event.thread, patch } return { @@ -120,6 +120,7 @@ export class EventProcessor { const broadcastEvent: MessageRemovedEvent = { type: EventType.MessageRemoved, + thread: event.thread, message: event.message } @@ -141,6 +142,7 @@ export class EventProcessor { } const broadcastEvent: ReactionCreatedEvent = { type: EventType.ReactionCreated, + thread: event.thread, reaction } return { @@ -153,6 +155,7 @@ export class EventProcessor { await this.db.removeReaction(event.message, event.reaction, event.creator) const broadcastEvent: ReactionRemovedEvent = { type: EventType.ReactionRemoved, + thread: event.thread, message: event.message, reaction: event.reaction, creator: event.creator @@ -175,6 +178,7 @@ export class EventProcessor { } const broadcastEvent: AttachmentCreatedEvent = { type: EventType.AttachmentCreated, + thread: event.thread, attachment } @@ -188,6 +192,7 @@ export class EventProcessor { await this.db.removeAttachment(event.message, event.card) const broadcastEvent: AttachmentRemovedEvent = { type: EventType.AttachmentRemoved, + thread: event.thread, message: event.message, card: event.card } @@ -222,9 +227,9 @@ export class EventProcessor { private async createNotificationContext(event: CreateNotificationContextEvent): Promise { const id = await this.db.createContext( + this.personWorkspace, this.workspace, event.card, - this.personWorkspace, event.lastView, event.lastUpdate ) diff --git a/packages/server/src/main.ts b/packages/server/src/main.ts index 8d22d60e433..3fa14adf07d 100644 --- a/packages/server/src/main.ts +++ b/packages/server/src/main.ts @@ -2,6 +2,7 @@ import WebSocket, { WebSocketServer, type RawData } from 'ws' import { createDbAdapter } from '@communication/cockroach' import type { Response, HelloRequest } from '@communication/sdk-types' import { decodeToken } from '@hcengineering/server-token' +import type { SocialID } from '@communication/types' import type { ConnectionInfo } from './types.ts' import { deserializeRequest, serializeResponse } from './utils/serialize.ts' @@ -137,5 +138,5 @@ async function validateToken(token: string): Promise { } const personWorkspace = 'cd0aba36-1c4f-4170-95f2-27a12a5415f7' - return { workspace: info.workspaceId, personWorkspace, socialId: email } + return { workspace: info.workspaceId, personWorkspace, socialId: email as SocialID } } diff --git a/packages/server/src/manager.ts b/packages/server/src/manager.ts index 3bb640cc36c..6ca6a82a6ca 100644 --- a/packages/server/src/manager.ts +++ b/packages/server/src/manager.ts @@ -3,12 +3,16 @@ import { type BroadcastEvent, type DbAdapter, EventType, - type MessageCreatedEvent, type NotificationContextCreatedEvent, type NotificationCreatedEvent, type Response } from '@communication/sdk-types' -import type { FindMessagesParams, FindNotificationContextParams, FindNotificationsParams } from '@communication/types' +import type { + FindMessagesParams, + FindNotificationContextParams, + FindNotificationsParams, + MessageID +} from '@communication/types' import { Session } from './session' import type { ConnectionInfo } from './types' @@ -112,14 +116,40 @@ export class Manager { private match(event: BroadcastEvent, info: SessionInfo): boolean { switch (event.type) { case EventType.MessageCreated: - return this.matchMessagesQuery(event, Array.from(info.messageQueries.values())) + return this.matchMessagesQuery( + { id: event.message.id, thread: event.message.thread }, + Array.from(info.messageQueries.values()) + ) case EventType.PatchCreated: + return this.matchMessagesQuery( + { thread: event.thread, id: event.patch.message }, + Array.from(info.messageQueries.values()) + ) case EventType.MessageRemoved: + return this.matchMessagesQuery( + { thread: event.thread, id: event.message }, + Array.from(info.messageQueries.values()) + ) case EventType.ReactionCreated: + return this.matchMessagesQuery( + { thread: event.thread, id: event.reaction.message }, + Array.from(info.messageQueries.values()) + ) case EventType.ReactionRemoved: + return this.matchMessagesQuery( + { thread: event.thread, id: event.message }, + Array.from(info.messageQueries.values()) + ) case EventType.AttachmentCreated: + return this.matchMessagesQuery( + { thread: event.thread, id: event.attachment.message }, + Array.from(info.messageQueries.values()) + ) case EventType.AttachmentRemoved: - return info.messageQueries.size > 0 + return this.matchMessagesQuery( + { thread: event.thread, id: event.message }, + Array.from(info.messageQueries.values()) + ) case EventType.NotificationCreated: return ( info.session.info.personWorkspace === event.personWorkspace && @@ -139,12 +169,12 @@ export class Manager { } } - private matchMessagesQuery(event: MessageCreatedEvent, queries: FindMessagesParams[]): boolean { + private matchMessagesQuery(params: { id?: MessageID; thread?: string }, queries: FindMessagesParams[]): boolean { if (queries.length === 0) return false for (const query of queries) { - if (query.id != null && query.id !== event.message.id) continue - if (query.card != null && query.card !== event.card) continue + if (query.id != null && query.id !== params.id) continue + if (query.thread != null && query.thread !== params.thread) continue return true } diff --git a/packages/server/src/triggers.ts b/packages/server/src/triggers.ts index 4f131853c66..4500a2ec20f 100644 --- a/packages/server/src/triggers.ts +++ b/packages/server/src/triggers.ts @@ -6,7 +6,7 @@ import { type NotificationContextCreatedEvent, type NotificationCreatedEvent } from '@communication/sdk-types' -import type { NotificationContext, ContextID } from '@communication/types' +import type { NotificationContext, ContextID, CardID } from '@communication/types' export class Triggers { constructor(private readonly db: DbAdapter) {} @@ -21,7 +21,7 @@ export class Triggers { } private async createNotifications(event: MessageCreatedEvent, workspace: string): Promise { - const card = event.card + const card = event.message.thread as any as CardID const subscribedPersonWorkspaces = ['cd0aba36-1c4f-4170-95f2-27a12a5415f7', 'cd0aba36-1c4f-4170-95f2-27a12a5415f8'] const res: BroadcastEvent[] = [] @@ -62,7 +62,7 @@ export class Triggers { private async getOrCreateContextId( workspace: string, - card: string, + card: CardID, personWorkspace: string, res: BroadcastEvent[], lastUpdate: Date, @@ -71,7 +71,7 @@ export class Triggers { if (context !== undefined) { return context.id } else { - const contextId = await this.db.createContext(workspace, card, personWorkspace, undefined, lastUpdate) + const contextId = await this.db.createContext(personWorkspace, workspace, card, undefined, lastUpdate) const newContext = { id: contextId, card, diff --git a/packages/sqlite-wasm/src/adapter.ts b/packages/sqlite-wasm/src/adapter.ts index 6043bf1f4fb..20e3b42ba2c 100644 --- a/packages/sqlite-wasm/src/adapter.ts +++ b/packages/sqlite-wasm/src/adapter.ts @@ -10,7 +10,8 @@ import { type FindNotificationsParams, type FindNotificationContextParams, type NotificationContext, - type Notification + type Notification, + type ThreadID } from '@communication/types' import type { DbAdapter } from '@communication/sdk-types' @@ -31,12 +32,14 @@ export class SqliteAdapter implements DbAdapter { this.notification = new NotificationsDb(worker, dbId) } - async createMessage(content: RichText, creator: SocialID, created: Date): Promise { - return await this.message.createMessage(content, creator, created) - } - - async placeMessage(message: MessageID, card: CardID, workspace: string): Promise { - return await this.message.placeMessage(message, card, workspace) + async createMessage( + workspace: string, + thread: ThreadID, + content: RichText, + creator: SocialID, + created: Date + ): Promise { + return await this.message.createMessage(workspace, thread, content, creator, created) } async createPatch(message: MessageID, content: RichText, creator: SocialID, created: Date): Promise { diff --git a/packages/sqlite-wasm/src/db/message.ts b/packages/sqlite-wasm/src/db/message.ts index b6d751893e2..581fc0f51b7 100644 --- a/packages/sqlite-wasm/src/db/message.ts +++ b/packages/sqlite-wasm/src/db/message.ts @@ -6,14 +6,13 @@ import { SortOrder, type SocialID, type RichText, - Direction, type Reaction, type Attachment + Direction, type Reaction, type Attachment, type ThreadID } from '@communication/types' import {BaseDb} from './base.ts' import { TableName, type MessageDb, - type MessagePlaceDb, type AttachmentDb, type ReactionDb, type PatchDb @@ -21,9 +20,11 @@ import { export class MessagesDb extends BaseDb { //Message - async createMessage(content: RichText, creator: SocialID, created: Date): Promise { + async createMessage(workspace: string, thread: ThreadID, content: RichText, creator: SocialID, created: Date): Promise { const dbData: MessageDb = { id: self.crypto.randomUUID(), + workspace_id: workspace, + thread_id: thread, content: content, creator: creator, created: created, @@ -36,15 +37,6 @@ export class MessagesDb extends BaseDb { await this.remove(TableName.Message, {id: message}) } - async placeMessage(message: MessageID, card: CardID, workspace: string): Promise { - const dbData: MessagePlaceDb = { - workspace_id: workspace, - card_id: card, - message_id: message - } - await this.insert(TableName.MessagePlace, dbData) - } - async createPatch(message: MessageID, content: RichText, creator: SocialID, created: Date): Promise { const dbData: PatchDb = { id: self.crypto.randomUUID(), @@ -97,6 +89,7 @@ export class MessagesDb extends BaseDb { //Find messages async find(workspace: string, params: FindMessagesParams): Promise { const select = `SELECT m.id, + m.thread_id, m.content, m.creator, m.created, @@ -124,12 +117,11 @@ export class MessagesDb extends BaseDb { ) ) AS reactions FROM ${TableName.Message} m - INNER JOIN ${TableName.MessagePlace} mp ON m.id = mp.message_id - LEFT JOIN ${TableName.Patch} p ON p.message_id = m.id - LEFT JOIN ${TableName.Attachment} a ON a.message_id = m.id - LEFT JOIN ${TableName.Reaction} r ON r.message_id = m.id` + LEFT JOIN ${TableName.Patch} p ON p.message_id = m.id + LEFT JOIN ${TableName.Attachment} a ON a.message_id = m.id + LEFT JOIN ${TableName.Reaction} r ON r.message_id = m.id` - const where= this.buildMessageWhere(workspace, params) + const where = this.buildMessageWhere(workspace, params) const groupBy = `GROUP BY m.id` const orderBy = params.sort ? `ORDER BY m.created ${params.sort === SortOrder.Asc ? 'ASC' : 'DESC'}` : '' const limit = params.limit ? ` LIMIT ${params.limit}` : '' @@ -141,34 +133,27 @@ export class MessagesDb extends BaseDb { } buildMessageWhere(workspace: string, params: FindMessagesParams): string { - const where: string[] = [`mp.workspace_id = '${workspace}'`] - for (const key of Object.keys(params)) { - const value = (params as any)[key] - switch (key) { - case 'id': { - where.push(`m.id = '${value}'`) - break - } - case 'card': { - where.push(`mp.card_id = '${value}'`) - break - } - case 'from': { - if(value == null) continue - const exclude = params.excluded ?? false - const direction = params.direction ?? Direction.Forward - const getOperator = () => { - if (exclude) { - return direction === Direction.Forward ? '>' : '<' - } else { - return direction === Direction.Forward ? '>=' : '<=' - } - } - - where.push(`m.created ${getOperator()} ${value}`) - break + const where: string[] = [`m.workspace_id = '${workspace}'`] + + if (params.thread != null) { + where.push(`m.thread_id = '${params.thread}'`) + } + if (params.id != null) { + where.push(`m.id = '${params.id}'`) + } + + if (params.from != null) { + const exclude = params.excluded ?? false + const direction = params.direction ?? Direction.Forward + const getOperator = () => { + if (exclude) { + return direction === Direction.Forward ? '>' : '<' + } else { + return direction === Direction.Forward ? '>=' : '<=' } } + + where.push(`m.created ${getOperator()} ${params.from}`) } return `WHERE ${where.join(' AND ')}` @@ -183,6 +168,7 @@ export class MessagesDb extends BaseDb { return { id: row.id, + thread: row.thread_id, content: lastPatch?.content ?? row.content, creator: row.creator, created: new Date(row.created), diff --git a/packages/sqlite-wasm/src/db/notification.ts b/packages/sqlite-wasm/src/db/notification.ts index a917b62e513..2cf901a866e 100644 --- a/packages/sqlite-wasm/src/db/notification.ts +++ b/packages/sqlite-wasm/src/db/notification.ts @@ -9,7 +9,7 @@ import { } from '@communication/types' import {BaseDb} from './base.ts' -import {TableName, type ContextDb, type NotificationDb } from './types.ts' +import {TableName, type ContextDb, type NotificationDb} from './types.ts' export class NotificationsDb extends BaseDb { async createNotification(message: MessageID, context: ContextID): Promise { @@ -80,7 +80,13 @@ export class NotificationsDb extends BaseDb { async findContexts(params: FindNotificationContextParams, personWorkspaces: string[], workspace?: string,): Promise { const select = ` - SELECT nc.id, nc.card_id, nc.archived_from, nc.last_view, nc.last_update, nc.workspace_id, nc.person_workspace + SELECT nc.id, + nc.card_id, + nc.archived_from, + nc.last_view, + nc.last_update, + nc.workspace_id, + nc.person_workspace FROM ${TableName.NotificationContext} nc`; const where = this.buildContextWhere(params, personWorkspaces, workspace); // const orderSql = `ORDER BY nc.created ${params.sort === SortOrder.Asc ? 'ASC' : 'DESC'}` @@ -96,104 +102,87 @@ export class NotificationsDb extends BaseDb { async findNotifications(params: FindNotificationsParams, personWorkspace: string, workspace?: string): Promise { //TODO: should join with attachments and reactions? const select = ` - SELECT - n.message_id, - n.context_id, - m.content AS message_content, - m.creator AS message_creator, - m.created AS message_created, - nc.card_id, - nc.archived_from, - nc.last_view, - nc.last_update, - json_group_array( - json_object( - 'id', p.id, - 'content', p.content, - 'creator', p.creator, - 'created', p.created - ) - ) AS patches - FROM - ${TableName.Notification} n - JOIN - ${TableName.NotificationContext} nc ON n.context_id = nc.id - JOIN - ${TableName.Message} m ON n.message_id = m.id - LEFT JOIN - ${TableName.Patch} p ON p.message_id = m.id + SELECT n.message_id, + n.context_id, + m.thread_id AS message_thread, + m.content AS message_content, + m.creator AS message_creator, + m.created AS message_created, + nc.card_id, + nc.archived_from, + nc.last_view, + nc.last_update, + json_group_array( + json_object( + 'id', p.id, + 'content', p.content, + 'creator', p.creator, + 'created', p.created + ) + ) AS patches + FROM ${TableName.Notification} n + JOIN + ${TableName.NotificationContext} nc ON n.context_id = nc.id + JOIN + ${TableName.Message} m ON n.message_id = m.id + LEFT JOIN + ${TableName.Patch} p ON p.message_id = m.id `; const where = this.buildNotificationWhere(params, personWorkspace, workspace) const groupBy = `GROUP BY n.message_id, n.context_id, m.id, nc.card_id, nc.archived_from, nc.last_view, nc.last_update`; const orderBy = `ORDER BY m.created ${params.sort === SortOrder.Asc ? 'ASC' : 'DESC'}` const limit = params.limit ? ` LIMIT ${params.limit}` : '' - const sql = [select, where, groupBy,orderBy, limit].join(' ') + const sql = [select, where, groupBy, orderBy, limit].join(' ') const result = await this.select(sql) return result.map(it => this.toNotification(it)); } - buildContextWhere(params: FindNotificationContextParams, personWorkspaces: string[], workspace?: string, ): string { + buildContextWhere(params: FindNotificationContextParams, personWorkspaces: string[], workspace?: string,): string { const where: string[] = [] - if(workspace != null) { + if (workspace != null) { where.push(`nc.workspace_id = '${workspace}'`) } - if(personWorkspaces.length > 0) { + if (personWorkspaces.length > 0) { where.push(`nc.person_workspace IN (${personWorkspaces.map(it => `'${it}'`).join(', ')})`) } - for (const key of Object.keys(params)) { - const value = (params as any)[key] - switch (key) { - case 'card': { - where.push(`nc.card_id = '${value}'`) - break - } - } + if (params.card != null) { + where.push(`nc.card_id = '${params.card}'`) } return `WHERE ${where.join(' AND ')}` } - buildNotificationWhere(params: FindNotificationsParams, personWorkspace: string, workspace?: string ): string { + buildNotificationWhere(params: FindNotificationsParams, personWorkspace: string, workspace?: string): string { const where: string[] = [`nc.person_workspace = '${personWorkspace}'`] - if(workspace != null) { + if (workspace != null) { where.push(`nc.workspace_id = '${workspace}'`) } - for (const key of Object.keys(params)) { - const value = (params as any)[key] - switch (key) { - case 'context': { - where.push(`n.context = '${value}'`) - break - } - case 'card': { - where.push(`nc.card_id = '${value}'`) - break - } - case 'read': { - if (value === true) { - where.push(`nc.last_view IS NOT NULL AND nc.last_view >= m.created`) - } else if (value === false) { - where.push(`(nc.last_view IS NULL OR nc.last_view > m.created)`) - } - break - } - case 'archived': { - if (value === true) { - where.push(`nc.archived_from IS NOT NULL AND nc.archived_from >= m.created`) - } else if (value === false) { - where.push(`(nc.archived_from IS NULL OR nc.archived_from > m.created)`) - } - break - } - } - } - - return `WHERE ${where.join(' AND ')}` + if (params.context != null) { + where.push(`n.context_id = '${params.context}'`) + } + + if (params.read === true) { + where.push(`nc.last_view IS NOT NULL AND nc.last_view >= m.created`) + } + + if (params.read === false) { + where.push(`(nc.last_view IS NULL OR nc.last_view > m.created)`) + } + + if (params.archived === true) { + where.push(`nc.archived_from IS NOT NULL AND nc.archived_from >= m.created`) + } + + if (params.archived === false) { + where.push(`(nc.archived_from IS NULL OR nc.archived_from > m.created)`) + } + + return `WHERE ${where.join(' AND ')}` } toNotificationContext(row: any): NotificationContext { @@ -217,6 +206,7 @@ export class NotificationsDb extends BaseDb { return { message: { id: row.message_id, + thread: row.message_thread, content: lastPatch?.content ?? row.message_content, creator: row.message_creator, created, diff --git a/packages/sqlite-wasm/src/db/types.ts b/packages/sqlite-wasm/src/db/types.ts index ac6f7940fcf..4c7285fc46e 100644 --- a/packages/sqlite-wasm/src/db/types.ts +++ b/packages/sqlite-wasm/src/db/types.ts @@ -3,7 +3,6 @@ import type {CardID, ContextID, MessageID, RichText, SocialID } from "@communica export enum TableName { Message = 'message', Patch = 'patch', - MessagePlace = 'message_place', Attachment = 'attachment', Reaction = 'reaction', Notification = 'notification', @@ -12,6 +11,8 @@ export enum TableName { export interface MessageDb { id: string + workspace_id: string, + thread_id: string, content: RichText, creator: SocialID, created: Date, @@ -25,12 +26,6 @@ export interface PatchDb { created: Date, } -export interface MessagePlaceDb { - workspace_id: string, - card_id: CardID, - message_id: MessageID -} - export interface ReactionDb { message_id: MessageID, reaction: string, diff --git a/packages/sqlite-wasm/src/migrations.ts b/packages/sqlite-wasm/src/migrations.ts index 3a0259c6c28..1a59020ab3c 100644 --- a/packages/sqlite-wasm/src/migrations.ts +++ b/packages/sqlite-wasm/src/migrations.ts @@ -10,40 +10,17 @@ async function migrationV1(worker: Sqlite3Worker1Promiser, dbId: string): Promis sql: ` CREATE TABLE IF NOT EXISTS message ( - id TEXT NOT NULL, - content TEXT NOT NULL, - creator TEXT NOT NULL, - created DATETIME NOT NULL, + id TEXT NOT NULL, + workspace_id TEXT NOT NULL, + thread_id TEXT NOT NULL, + content TEXT NOT NULL, + creator TEXT NOT NULL, + created DATETIME NOT NULL, PRIMARY KEY (id) ) ` }) - await worker('exec', { - dbId, - sql: ` - CREATE TABLE IF NOT EXISTS message_place - ( - workspace_id TEXT NOT NULL, - card_id TEXT NOT NULL, - message_id TEXT NOT NULL, - - PRIMARY KEY (workspace_id, card_id, message_id), - FOREIGN KEY (message_id) REFERENCES message (id) ON DELETE CASCADE - ) - ` - }) - - await worker('exec', { - dbId, - sql: `CREATE INDEX IF NOT EXISTS idx_message_place_workspace_card ON message_place (workspace_id, card_id)` - }) - - await worker('exec', { - dbId, - sql: `CREATE INDEX IF NOT EXISTS idx_message_place_message_id ON message_place (message_id)` - }) - await worker('exec', { dbId, sql: ` @@ -110,29 +87,29 @@ async function migrationV1(worker: Sqlite3Worker1Promiser, dbId: string): Promis await worker('exec', { dbId, sql: ` - CREATE TABLE IF NOT EXISTS notification_context - ( - id TEXT NOT NULL, - workspace_id TEXT NOT NULL, - card_id TEXT NOT NULL, - person_workspace TEXT NOT NULL, - archived_from DATETIME, - last_view DATETIME, - last_update DATETIME, + CREATE TABLE IF NOT EXISTS notification_context + ( + id TEXT NOT NULL, + workspace_id TEXT NOT NULL, + card_id TEXT NOT NULL, + person_workspace TEXT NOT NULL, + archived_from DATETIME, + last_view DATETIME, + last_update DATETIME, - PRIMARY KEY (id), - UNIQUE (workspace_id, card_id, person_workspace) - ); + PRIMARY KEY (id), + UNIQUE (workspace_id, card_id, person_workspace) + ); - CREATE TABLE IF NOT EXISTS notification - ( - message_id TEXT NOT NULL, - context_id TEXT NOT NULL, + CREATE TABLE IF NOT EXISTS notification + ( + message_id TEXT NOT NULL, + context_id TEXT NOT NULL, - PRIMARY KEY (message_id, context_id), - FOREIGN KEY (message_id) REFERENCES message (id) ON DELETE CASCADE, - FOREIGN KEY (context_id) REFERENCES notification_context (id) ON DELETE CASCADE - ); - ` + PRIMARY KEY (message_id, context_id), + FOREIGN KEY (message_id) REFERENCES message (id) ON DELETE CASCADE, + FOREIGN KEY (context_id) REFERENCES notification_context (id) ON DELETE CASCADE + ); + ` }) } diff --git a/packages/types/src/message.ts b/packages/types/src/message.ts index 5641439e2e2..df3c18bd420 100644 --- a/packages/types/src/message.ts +++ b/packages/types/src/message.ts @@ -1,5 +1,6 @@ -export type CardID = string -export type SocialID = string +export type CardID = string & { card: true } +export type SocialID = string & { social: true } +export type ThreadID = string & { thread: true } export type RichText = string export type ID = string @@ -12,6 +13,7 @@ interface Object { export interface Message extends Object { id: MessageID + thread: ThreadID content: RichText edited: Date reactions: Reaction[] diff --git a/packages/types/src/query.ts b/packages/types/src/query.ts index 302893542a1..7109e761a14 100644 --- a/packages/types/src/query.ts +++ b/packages/types/src/query.ts @@ -1,4 +1,4 @@ -import type { CardID, MessageID } from './message' +import type { CardID, MessageID, ThreadID } from './message' import type { ContextID } from './notification' export enum SortOrder { @@ -31,7 +31,7 @@ interface FindParams { export interface FindMessagesParams extends FindParams { id?: MessageID - card?: CardID + thread?: ThreadID } export interface FindNotificationsParams extends FindParams { From 5a8e0cb36487d382e5d2f12cd7509ee71518cff7 Mon Sep 17 00:00:00 2001 From: Kristina Date: Thu, 26 Dec 2024 22:44:16 +0400 Subject: [PATCH 020/636] Add types generation (#12) --- .gitignore | 3 +++ package.json | 2 +- packages/client-query/package.json | 1 + packages/client-query/tsconfig.json | 3 ++- packages/client-sqlite/package.json | 1 + packages/client-ws/package.json | 1 + packages/client-ws/tsconfig.json | 3 ++- packages/cockroach/package.json | 1 + packages/cockroach/tsconfig.json | 2 ++ packages/examples/package.json | 1 + packages/examples/tsconfig.json | 2 ++ packages/query/package.json | 1 + packages/query/tsconfig.json | 3 ++- packages/sdk-types/package.json | 1 + packages/sdk-types/tsconfig.json | 2 ++ packages/server/package.json | 1 + packages/server/tsconfig.json | 2 ++ packages/sqlite-wasm/package.json | 1 + packages/sqlite-wasm/tsconfig.json | 2 ++ packages/types/package.json | 1 + packages/types/tsconfig.json | 2 ++ scripts/build.sh | 5 +++++ tsconfig.json | 5 ++++- 23 files changed, 41 insertions(+), 5 deletions(-) diff --git a/.gitignore b/.gitignore index cef2ceb3c7a..899d5701a52 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,9 @@ # build output dist/ lib/ +**/types/ + +!packages/types/ # VS Code settings .vscode/settings.json diff --git a/package.json b/package.json index c69f5f7f260..e413684ec9d 100644 --- a/package.json +++ b/package.json @@ -3,7 +3,7 @@ "type": "module", "workspaces": ["packages/*"], "scripts": { - "build": "bun x tsc --noEmit && sh scripts/build.sh", + "build": "sh scripts/build.sh && tsc --noEmit", "lint": "eslint packages/**/src/*.ts", "format": "prettier --write packages/**/src/*.ts && bun run lint" }, diff --git a/packages/client-query/package.json b/packages/client-query/package.json index b5582f16cd6..2d40fcb6ddc 100644 --- a/packages/client-query/package.json +++ b/packages/client-query/package.json @@ -3,6 +3,7 @@ "version": "0.1.0", "main": "src/index.ts", "module": "src/index.ts", + "types": "./types/index.d.ts", "type": "module", "devDependencies": { "@types/bun": "^1.1.14" diff --git a/packages/client-query/tsconfig.json b/packages/client-query/tsconfig.json index 3ae07cd3fa2..e7a6cb178dd 100644 --- a/packages/client-query/tsconfig.json +++ b/packages/client-query/tsconfig.json @@ -1,7 +1,8 @@ { "extends": "../../tsconfig.json", "compilerOptions": { - "jsx": "react-jsx", + "declarationDir": "./types", + "emitDeclarationOnly": true, "outDir": "./dist", "rootDir": "./src" }, diff --git a/packages/client-sqlite/package.json b/packages/client-sqlite/package.json index 55565de3c5e..339dcef1239 100644 --- a/packages/client-sqlite/package.json +++ b/packages/client-sqlite/package.json @@ -3,6 +3,7 @@ "version": "0.1.0", "main": "src/index.ts", "module": "src/index.ts", + "types": "./types/index.d.ts", "type": "module", "devDependencies": { "@types/bun": "^1.1.14" diff --git a/packages/client-ws/package.json b/packages/client-ws/package.json index cfe8cb09187..e8aafd03604 100644 --- a/packages/client-ws/package.json +++ b/packages/client-ws/package.json @@ -3,6 +3,7 @@ "version": "0.1.0", "main": "src/index.ts", "module": "src/index.ts", + "types": "./types/index.d.ts", "type": "module", "devDependencies": { "@types/bun": "^1.1.14" diff --git a/packages/client-ws/tsconfig.json b/packages/client-ws/tsconfig.json index 3ae07cd3fa2..e7a6cb178dd 100644 --- a/packages/client-ws/tsconfig.json +++ b/packages/client-ws/tsconfig.json @@ -1,7 +1,8 @@ { "extends": "../../tsconfig.json", "compilerOptions": { - "jsx": "react-jsx", + "declarationDir": "./types", + "emitDeclarationOnly": true, "outDir": "./dist", "rootDir": "./src" }, diff --git a/packages/cockroach/package.json b/packages/cockroach/package.json index 0cdf1bb26c1..4918593c650 100644 --- a/packages/cockroach/package.json +++ b/packages/cockroach/package.json @@ -3,6 +3,7 @@ "version": "0.1.0", "main": "src/index.ts", "module": "src/index.ts", + "types": "./types/index.d.ts", "type": "module", "devDependencies": { "@types/bun": "^1.1.14" diff --git a/packages/cockroach/tsconfig.json b/packages/cockroach/tsconfig.json index 49e05cea1ee..e7a6cb178dd 100644 --- a/packages/cockroach/tsconfig.json +++ b/packages/cockroach/tsconfig.json @@ -1,6 +1,8 @@ { "extends": "../../tsconfig.json", "compilerOptions": { + "declarationDir": "./types", + "emitDeclarationOnly": true, "outDir": "./dist", "rootDir": "./src" }, diff --git a/packages/examples/package.json b/packages/examples/package.json index c21d65d65a4..871f2b35536 100644 --- a/packages/examples/package.json +++ b/packages/examples/package.json @@ -3,6 +3,7 @@ "version": "0.1.0", "main": "src/index.ts", "module": "src/index.ts", + "types": "./types/index.d.ts", "type": "module", "devDependencies": { "@types/bun": "^1.1.14" diff --git a/packages/examples/tsconfig.json b/packages/examples/tsconfig.json index 49e05cea1ee..e7a6cb178dd 100644 --- a/packages/examples/tsconfig.json +++ b/packages/examples/tsconfig.json @@ -1,6 +1,8 @@ { "extends": "../../tsconfig.json", "compilerOptions": { + "declarationDir": "./types", + "emitDeclarationOnly": true, "outDir": "./dist", "rootDir": "./src" }, diff --git a/packages/query/package.json b/packages/query/package.json index f613302134f..b09ef90b558 100644 --- a/packages/query/package.json +++ b/packages/query/package.json @@ -3,6 +3,7 @@ "version": "0.1.0", "main": "src/index.ts", "module": "src/index.ts", + "types": "./types/index.d.ts", "type": "module", "devDependencies": { "@types/bun": "^1.1.14", diff --git a/packages/query/tsconfig.json b/packages/query/tsconfig.json index 3ae07cd3fa2..e7a6cb178dd 100644 --- a/packages/query/tsconfig.json +++ b/packages/query/tsconfig.json @@ -1,7 +1,8 @@ { "extends": "../../tsconfig.json", "compilerOptions": { - "jsx": "react-jsx", + "declarationDir": "./types", + "emitDeclarationOnly": true, "outDir": "./dist", "rootDir": "./src" }, diff --git a/packages/sdk-types/package.json b/packages/sdk-types/package.json index eccea9085e0..9e46522b2c2 100644 --- a/packages/sdk-types/package.json +++ b/packages/sdk-types/package.json @@ -3,6 +3,7 @@ "version": "0.1.0", "main": "src/index.ts", "module": "src/index.ts", + "types": "./types/index.d.ts", "type": "module", "devDependencies": { "@types/bun": "^1.1.14" diff --git a/packages/sdk-types/tsconfig.json b/packages/sdk-types/tsconfig.json index 49e05cea1ee..e7a6cb178dd 100644 --- a/packages/sdk-types/tsconfig.json +++ b/packages/sdk-types/tsconfig.json @@ -1,6 +1,8 @@ { "extends": "../../tsconfig.json", "compilerOptions": { + "declarationDir": "./types", + "emitDeclarationOnly": true, "outDir": "./dist", "rootDir": "./src" }, diff --git a/packages/server/package.json b/packages/server/package.json index 43c5e0cd851..86de3c16c6f 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -3,6 +3,7 @@ "version": "0.1.0", "main": "src/index.ts", "module": "src/index.ts", + "types": "./types/index.d.ts", "type": "module", "devDependencies": { "@types/bun": "^1.1.14", diff --git a/packages/server/tsconfig.json b/packages/server/tsconfig.json index 49e05cea1ee..e7a6cb178dd 100644 --- a/packages/server/tsconfig.json +++ b/packages/server/tsconfig.json @@ -1,6 +1,8 @@ { "extends": "../../tsconfig.json", "compilerOptions": { + "declarationDir": "./types", + "emitDeclarationOnly": true, "outDir": "./dist", "rootDir": "./src" }, diff --git a/packages/sqlite-wasm/package.json b/packages/sqlite-wasm/package.json index 782d845bdfc..b89affa87f3 100644 --- a/packages/sqlite-wasm/package.json +++ b/packages/sqlite-wasm/package.json @@ -3,6 +3,7 @@ "version": "0.1.0", "main": "src/index.ts", "module": "src/index.ts", + "types": "./types/index.d.ts", "type": "module", "devDependencies": { "@types/bun": "^1.1.14" diff --git a/packages/sqlite-wasm/tsconfig.json b/packages/sqlite-wasm/tsconfig.json index 49e05cea1ee..e7a6cb178dd 100644 --- a/packages/sqlite-wasm/tsconfig.json +++ b/packages/sqlite-wasm/tsconfig.json @@ -1,6 +1,8 @@ { "extends": "../../tsconfig.json", "compilerOptions": { + "declarationDir": "./types", + "emitDeclarationOnly": true, "outDir": "./dist", "rootDir": "./src" }, diff --git a/packages/types/package.json b/packages/types/package.json index 9c80d980935..fa2e9fbcf46 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -3,6 +3,7 @@ "version": "0.1.0", "main": "src/index.ts", "module": "src/index.ts", + "types": "./types/index.d.ts", "type": "module", "devDependencies": { "@types/bun": "^1.1.14" diff --git a/packages/types/tsconfig.json b/packages/types/tsconfig.json index 49e05cea1ee..e7a6cb178dd 100644 --- a/packages/types/tsconfig.json +++ b/packages/types/tsconfig.json @@ -1,6 +1,8 @@ { "extends": "../../tsconfig.json", "compilerOptions": { + "declarationDir": "./types", + "emitDeclarationOnly": true, "outDir": "./dist", "rootDir": "./src" }, diff --git a/scripts/build.sh b/scripts/build.sh index 6fee4b135bb..24777cd63b5 100644 --- a/scripts/build.sh +++ b/scripts/build.sh @@ -7,19 +7,24 @@ for package in "$PACKAGES_DIR"/*; do PACKAGE_NAME=$(basename "$package") ENTRY_POINT="$package/src/index.ts" OUT_DIR="$package/dist" + TYPES_OUT_DIR="$package/types" + TSCONFIG="$package/tsconfig.json" echo "Building package: $PACKAGE_NAME" if [ -f "$ENTRY_POINT" ]; then + tsc --project "$TSCONFIG" --emitDeclarationOnly --declarationDir "$TYPES_OUT_DIR" bun build "$ENTRY_POINT" --outdir "$OUT_DIR" --target bun if [ $? -eq 0 ]; then echo "Package $PACKAGE_NAME built successfully" else echo "Error building package $PACKAGE_NAME" + continue fi else echo "Entry point $ENTRY_POINT not found for package $PACKAGE_NAME" + continue fi fi done diff --git a/tsconfig.json b/tsconfig.json index 42f77bc858b..746e7f39354 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -11,13 +11,16 @@ "moduleResolution": "bundler", "allowImportingTsExtensions": true, "verbatimModuleSyntax": true, - "noEmit": true, // Best practices "strict": true, "skipLibCheck": true, "noFallthroughCasesInSwitch": true, + "declaration": true, + "emitDeclarationOnly": true, + "noEmit": false, + // Some stricter flags (disabled by default) "noUnusedLocals": false, "noUnusedParameters": false, From 532d1fe31b9c325e2d46beda5dd470012089b1ae Mon Sep 17 00:00:00 2001 From: Kristina Date: Thu, 26 Dec 2024 22:58:58 +0400 Subject: [PATCH 021/636] Rename packages (#13) --- bun.lockb | Bin 104968 -> 105104 bytes package.json | 2 +- packages/client-query/package.json | 15 +++++++++++---- packages/client-query/src/index.ts | 4 ++-- packages/client-query/src/query.ts | 10 +++++++--- packages/client-sqlite/package.json | 15 +++++++++++---- packages/client-sqlite/src/client.ts | 6 +++--- packages/client-sqlite/src/index.ts | 2 +- packages/client-ws/package.json | 13 ++++++++++--- packages/client-ws/src/client.ts | 4 ++-- packages/client-ws/src/connection.ts | 2 +- packages/cockroach/package.json | 10 +++++++--- packages/cockroach/src/adapter.ts | 4 ++-- packages/cockroach/src/db/message.ts | 2 +- packages/cockroach/src/db/notification.ts | 2 +- packages/cockroach/src/db/types.ts | 2 +- packages/examples/package.json | 14 +++++++++----- packages/examples/src/index.ts | 8 ++++---- packages/query/package.json | 10 +++++++--- packages/query/src/lq.ts | 4 ++-- packages/query/src/messages/query.ts | 4 ++-- packages/query/src/notifications/query.ts | 4 ++-- packages/query/src/query.ts | 4 ++-- packages/query/src/result.ts | 2 +- packages/query/src/types.ts | 4 ++-- packages/query/src/window.ts | 2 +- packages/sdk-types/package.json | 11 +++++++++-- packages/sdk-types/src/client.ts | 4 ++-- packages/sdk-types/src/db.ts | 2 +- packages/sdk-types/src/event.ts | 2 +- packages/sdk-types/src/query.ts | 2 +- packages/server/package.json | 15 +++++++++++---- packages/server/src/eventProcessor.ts | 4 ++-- packages/server/src/main.ts | 6 +++--- packages/server/src/manager.ts | 4 ++-- packages/server/src/session.ts | 4 ++-- packages/server/src/triggers.ts | 4 ++-- packages/server/src/types.ts | 2 +- packages/server/src/utils/serialize.ts | 2 +- packages/sqlite-wasm/package.json | 10 +++++++--- packages/sqlite-wasm/src/adapter.ts | 4 ++-- packages/sqlite-wasm/src/db/message.ts | 2 +- packages/sqlite-wasm/src/db/notification.ts | 2 +- packages/sqlite-wasm/src/db/types.ts | 2 +- packages/types/package.json | 9 ++++++++- 45 files changed, 151 insertions(+), 89 deletions(-) diff --git a/bun.lockb b/bun.lockb index f7b7b222768b8fa236328c717b66a251fd76d9b5..3d6f8d9cb292102776bc804bc88bc5a28b16dc37 100755 GIT binary patch delta 11352 zcmb_i34D}Avj6%cljIu+i$$z1og3suD*YL2dwPE9>14g`d3}u)zwwi zRn`4<&$1n!D|dRXv|88@|Dm@jDfzGNdo8`=(dgqho%}%_wxI8!PZr&MZB5X7Yo)b) zQ*wK5mnF$hlH5*Lb7O0lB>9>o$qd{D=nL5FyrosbU!&eWQ-?PLD(IiNf3hUT_9DG%I;a5p-J0yZ|Zwskrs zX{(OUYiymH(%mk#gNXW_I&4pAZmX~Df)ve-Esb506P?t*0pm!4^FWC2yyn`jF6Ufn zfphUKq|g(PVyM(3xgP={N$zxYcQrP%$msQo73fq-+Jb=?in?qWSg#`f!sxI?v><*=ByN zD+OXUT)=`5>wtsQeLEltW*F2AJR0~p2txgX5t1|pFaU5A;5IM*Et()(0Y~(JFkc_wa<<&!?3APoNDSWY z^N<)ax!;Cnq`@iRG$9Gx5U31vLVrLD+TUf86wjabNa|S?ud!}N3%I+V1{@8z zQ1{jr7goJ~LFb7?%@*^MB?K#RB@B?8QAQ{~PNQ)DwLoYyb>mNWr4AA9loeL95U+MU7bv{mK%Dl!- zSJx&T-_Ez1MwuViu_bBzgvs8s1Sr|(BS2!--P+#Rj@W)n$KTM&AJ*Xqxtbj_^ECR- z+Jz}~ZF3jbE$$+RlDZbRJ1P4mj?=V^1|++;G`2WX=GI9ax-|py)$u*<)u>QRECcif zYy>22Jqt7~?S)!$Kq_ck*ovhAPTFSz4h3vzYge@o*6D7>cv8p&oPzm#@FB%M1SHS- z5&b04LO?$$Npjx@&8Xq2B5mM09kw{T=C{ptNwxL$&h{>sw1O9VjqQ1)M4P@CUJ#B^ zOUG-YTLFo^?HX;`1AydT6%#ave-1chDF0GPLM*uZfs>=p#<&2$>WP{K6tcbRNV(=ci@G$o*aw{KwG)u+`V3F- z9@~=%@d=w(sX67>fF#BzfW+gA`%5Nk>J%I@1_{4)BjVnOVklOXt z+G;Am5wh;Jx;lHc_Tlb1XuudZ@`RMI{iJuehi~Yo@>}7_fxZ1R-w#X*4t44n2Rjl2sWe zI%+G8{#Ou&(9`@nYh{CF5hM175;X1i1+$C*bjWb-(j+ON>TwooL9g+c}0Mo zZRWiJ4&@`XVqM*gAN9A%30~YBXqP*^ctxO{9p$}&4%x?>52CHco125|N}o4Us7l`# zXp@I1e0h*v9Io27BLpSSWnTHrcvZ{nm-h#)}R9vl)W z1;A*QA-ra!P3|1V&7pQ>Ga8A-5F%lp^WIR0k`B)zLYemk+2jTvZVt1vXLxxSY=SPb zsF~M<*-SBTlF58Ub%6&e<9OA`eD($}4|ga(11|!ZH$NI?Q_64*K*E3+G-wI1DcmPG zpB>}n5e~%z`5{@GE!3u@0V8?5c#XeJxg8iSfvl=?1Q-GpvXXNCewsyw@S{OCWiGG` zm68nFe-R_fgF5EH{m>Tx4@a0{2FPZ*7FaGxqdbTj&8$kPoB&31!3G!;hbSULGTs+p zlj{Qb;3&JY2aPwVBu9g7rf`Jq6u!bgUs-@!mddEl-^SMSfoKPNiz_h>B@)L2Bq*?f za;EEm6{{oGpf*Eek3~4c6^ld89?2^#cBKvJq!?WiKN@XgFYp12L-`hM*=SSUT1h~H zB3r`_u=O%vlQj%h`4||jqz8wHN*GcQ`H-g5RAAaTT9r+Nsdo4rHPRfq2HH$HIH`=| zhhd*~)NE)UPS%Gz^u{{a`+OkQp%fwoR%m=-oqNN0MVwtZg2t(6gzq7G3d8weoZWOE zlIu*q!Y|*{gmhV^*3O_dN3G3>#OYD3ZA7hdhunt_o=nAEj?5g76D zQoZaOU^EWafIp6n(R>7fiV%0k@a0K%wu=uWIoN5gBs=723$IAFD;<`LQe#c_0@D+T z#in?UmLvx_%KT`&O_>Ue_#ytrMtl_V6nW@a zzC6t?&l<}I)9mbVuB1EIDPErLkb~m+@^rgADV`6e+f5!aHVR&|Kl7msZ_`2S3W0?1 zz2S;?JdSMOHSomu!CMh03 zMyLofn|wneU!G-W8~8w$Ly1b#(ww%MtB^igF_(tPDSwN#1M9rl_u()4rdTDZ{bJu< zU|j~r*7AWIhvJi_t+$p5C#CTUn_Za;y(q*a?uUf64H%^jbpe#Kz$g^pE^w@I8Jd`C zyemt9U8D9PP}u=Kkn2!}W*WhUHIl0`c}1Sx^Z**u`Qg}n(;n0il>zz6KfkOY2M z2$#v#SYeuOU|Js{Q`rNoND~23@v}~;#k>t^gT^7BCfk%YU{H`Sj9U+kxTvC=P69*Z z1m-Kj@F8*|cwHDo%;C!m>~c>IA1tsdThXEAZg4Q21eV5q!txa#BnsjT#Zw_IFiqM% zE2ITRey7d41(+r+@|*Wpz##A?CmoG(dN+cl9+)m7^5h0y?x48qbvP6U4z5H8rck;E zn5Jf5fz7lFSO)hA%x9nR-Xe#RjoFEdMB9<<8pqAWc4XG_Vh8(#_ZB;pXyj;pQfT!Y zuqmn~0Rl_7&-e#+L?<9w5 zuViEpN*bmzZmzV;g=M^=(ypw>;*nhNWXN@j4^-l;Q+{zdDiIODG{+%hoC2l=Ghs=n zB7#XP*!xCc8ZW<6oA-KPP(fd~e8pSGH6I^4$uKId)QviZ{TefJVB{*03HkqZ!uVlC z&=07=Z=hREWUe<>y;emqwQQ>56;teTWffmO#SX&`OmQHaC^ZgtftS}fl<7!L_&%Xn zNVc&xd;mRPa%HMRPO9b=Q|-(uI@T~7oflvK9$`%E(>;U)!>+`9cwZ=@BLOV}{z&jd(SPn7d0GS&4qFVwwsI)YU7M)?+n z{=Cs3iyAWv54vA#k%sDIkU`26cbZv1Py(8WlaJozgL>$$jb?T)>j^|X0L35WLG(X> zav#bHl>1SZqpU@F1Z54%I+RwFHWa$0XhCU4`5xoVfF$Aw6w-j!iRSzo@F(g*{To2i zg)#qbbSgah*R{3QD zk4K>u4@HUGl|6#3_v(Q!x{XiHXiqb^1yKtieA8`wltz21p@WvS=!pX~K3}6f)i6WL zF}=t5`i=J7ixzl~n-(Y*(D?L?_Eh5(Ed^vP3o6D}aX8rkniua=N1CpWoE&-AfH#|y zXHCzxrb%{LoDF6na+xg5AuL0#k%c`3c!MmiBm8z*+z*_+Ew;w9P z!V=AiNH0hpxIPubLs>|;ah-Z<**jfZCl7kdY)+OnJ;RzV?GVMG7=KXA28b~3b)9#7 zwy^X1Q_p}d)0zQsq%2vi1g9M1CfHTEV8+ekmn;QE8YpnGGVYFN-5c|(yq)J?Q+v{L ztU1yU5Ahz2HEx@~Juy0H!^V#sDh1?#lX_@ZG4?s~?&Dv1zmJ}DYZ``1B_1LTCzrmk z31pepbP{)is1IkEY^x9ic8fy8TR^@l_lk0rVN zDXU`k`47nKjx=i)nOvs`H?FL&-*d+F$a`PLW6pHwfMec^Vtpi5&3HE%SaI9%VEKO* z$t(@VgIV!VB92G0j0ocaWM<8w$+f;aUI&XDkYJK_v#>s&MVa z1DBHtQFYI*s;ib+rq1$FaW0BwMi?&}t5+56KmNPxLclTG3SXhO53+ya=#7Y^Fqq1C z>!|&&nAaw*4*g$HW`PoOtE>V&RsYiJ;^k3nn%wFue4`=DK9LZOMY$kujD~NU{KcYZ zmcb5-O@u}Shyy^gudu?<7H&|4>n|McO)=L!abfAtV=x(cFtl7R4qLED#>>a&!=ginE{V+m z0hts8@nJ$9ebq@|(kY`^qHGBlw~c1e?38$PG=lcD*b@s&=$@yif-A&2&2(Oj8Usm; zw-Wi?dv06Zo7pO}TrKC|Ekc|fgV1Rn!~7$R9AZ4R%zN9hZAMD&pMBZX2(fky#MmZ& zM(+9N{MewVN?=JQm7{oc42u*qV_BA587c0MMRxiJ*3E-yvLs2<@Sjhl$Gkyky!xz< zbtfk+b=LVZdtsKE9qPTr4RLTSHFCn(D6tY)xbcXyCwbk8{23t?>f)(MZL8Q9$1>&F zqr`V{@VyNpX)H7jiPjEih2QMmI_R4{AM`dvBs^?#B{q*`RdW6) z@grKpjmMySy%HX)dU6}3SBhjD0k(=%O5>VE<$wq=Hy+dJ>gzdPv!Y_Gi*9}6h0L{k z#-``x5$C`&OUobnYU_j#e$id$K8k%TG!RD0RXq}W&5;CF z$(|6AiBP^NPE1Y20OP^v$wR9buWvfmh)k7h&9$C5=LMnOe+Dr3@^KUkG5Rgqc7iYE&6_or>}VJ zDWdh)ZHX&)l{8z}J@Mjn5?gjT_lPU=xJ-@+<9`1orF3#}_e(FwvpE@71e8t6OccH; zETrdhT|uGREmJy%6io*jp4RRQdP%ZnPyu_d`rBU!$w9S#~AN>O|w&H{r(Rg$lN#| zs5D=R4e8+ZgLoMr!gyS~yV|?8;I1{Zh)#`pDKK4}O~?7hc#d3X|H`?yd8L&o)XhyY z9xT`GPTFN(ymg!=2aY#VQM#~aKu!Iv(|vM$@w+E$E^MSdJeyLWq^V-qLsxg#D;dm# zg&S{_JAU)wuSVFO$0_IzQV_fFKeURbkhms`O_nX`VpSG$%y#j57E+XMZo`_I8F$rX zvna+s5O?PMorP5ch6Wn1o!|Rdb_NwaepY5<^TdxiY}_wL$#nB@6Jb2dS_}3$KV3d;=B2T5h2n#8*oIH!u`oHaP@K<$ z!Hn15{@8=gZvScf{*GMM8SAncsCaf9xEL1DS?e?r##8baTM9P>?wp|=Ei>rcZ4A&? zfvpp@`S5|K#H|3~(fE4B_nJGEaWUsmE%_ZjCajrR(h9L9A4eU04^^*mG??xO8lo@sv z^>13GSVv__|2UbN)%Ar#U?J}54=OODSf^g$q{e1bj-;^BAhhhe=uZ3SRHl(|& zL^2&6j3a~&bFMtFWU{u2ZrtwwyI0?|%)7)=9~)sj&TpJm)9d%=O&xznadEcG)^3XN zd~ZCdV>73dB{J^6x2oDwz<17E1Klp0rD+t5`M0+tmMh+lSgv?GV!3!b@-H7|ru-*5 z=s#R=FFBz7n83nzRBJyhxlF68a94k81lKe!Pb^owJh^n3yMpbe7(cWyu5~O|yw>?w zj*QwY57p*HX*%7VoBak`H2f#yH%k}#%kus9|9W3tW2?2vB}qwJPrMWN7JE1=Ra?O^ z=L3W4b3Q(EBXGKa?R)3NwGof>2F?dg_fiMhmbH-&J<-2HoSMLbuR|=<8>2R*i{X+Yu`k6zRKKA*L?Wf+b^0{vzY6RYfIyj(zuc| zPV&Zeqv3?cO`>scXgIoYC+ICS{-5qD?q{Q9a|qpP<8S^NnlEnF|Mz?^==`O%!gm4- n7p65VX@YUjYTT;sg#h$1V%(S-*Q3T|sB!gaTyz>Q+5GWm$k%1$oKElxl_SC8ni~ zF{WmjTWW5ZuiQ;nZG9xTWtO|8rE6smtF_G5Ew{)2dwvfrHuv@U^Wl8XoS8Fc&N*}D z%=~8VvYp!vW66-TV0q2Q&m8X;_~n_`rtNmTF#oaT z2CWbZ$>wUJ|e`+HXU9Z{S?uSV?MWt@0$ztf{Y%p0nzVDQM^ghN_u$^^I;Y z8T5>*y6H)C8>Dj>mD*1lxFM;gzOuXtTGUk4Ry9c{p*^(+qaPVi4T21sQB&U3W0+tbkKh9Z}GNKzp9mY|+2 zFR!d`B-eb5da`Ic%%l1@K?ea(cUR1viS}9#4JS!QP)~}zkHKg>C$#h77J1;%W=sJ2 zW>&c;32M%)15KO-0pI{$223WDS8JL}p->|Xw1t<;{aRjxTrjk50w(T9jq&FJ_Xk~V z)UOHtdHWR9Q@@Rf0Fpi$G&$^iU)|1XPf~MvZH-jkIJ2R=(c?aida^SxM91F17_X&u z9SUSPO=CG6m!RSqNMPqrT8?&$kG}XTWO#Y6+sMKET zZk*{(yBccy(4RapqukRp2Q$~`p6jWa(JX~T>7F?WOp3%tVxNC2BH%iho z(3C8y28Ob&V__LtFbOmbh=6Kps&Y5J!w`p{JMTet?RJ#tC zCTX4#t+gJ^dgbiK(!shf-c6LGA!vWWzzcyLpeGx20&o)OE8tHCJ_SrJxE(|?*yi>t_LQkd)#%?;UcNhpa%nM1GtlB zR5f~<1{m}pzST0o7GzMDlldu&+Hx7%kbiD4I?t_ZsA|C49%In?2D@V5iP^dzHvrS% zjpg%_D(a^Fj;#C>dDG&d3tg{DyW}VCrPV7Q~d^DvT|m9gJ%7M2Hy$vCxe4;{`njwfGI!hu@hX zNr>InZ$VS<_kl_L=Iiufb4Ex~H0Zs+6!4M2VZir~l%#NA8)ypjW9SzMe4tP_XEQKG z-$wY7a6`SPX(r;NffA=A4VS(jr7LhAm;(3=F!`ZjW)k_{Gonb3p7~9BSU5qGzY>7S zuXdhb9n`V`>JxQpj2@ITN^~`*0+Y0F4E!6kE1+KiCN*CitDCnIGQeDm($fJIj zp(APa7V2p>kAo(+EC42RGRA53zOCa?L4H8uk?h+~TkT$cXD3!K`?Pe0r^cCIaduzf zQf~E0@mrbjRmaRH1IzLzd|Q2c!MGLMNBevtTR$4cU)x?<^#`8q_j2Tvwb?d?F%Fi@bXAK-UY2o1m*AGecl0Gh&=zH$Y__|R^YmfTpKe&*; zyf>e>`dgXI+x%V19$Atyp^?Iu`#a@_ytpkumH+O=ivv{V;%xygR?j;ET=HfMw*{*5 z#}-~3sIn;D7U)uL@RlS@Tr1+6s+FaX-AXiOY&1k#wXRK| zutr)TD9EY204fVq4_?;SDSy<9+d@?(3JZy(_aGK_D{l*RDNm!07-haZ*eSo$o7=)v zX6Hp=@CxFNJZj@*VNUr`AKn?JT0TH!4j1P^3{$UKZd~E`p+IKpT890s)BCMcRV89~5#H zsN=y-?Ez3&s~CawI|_=V=-%#uB|xL=+LnT%@i032 ztpY{v#1scQ`Hz{vu6N2H65;YyTC zS&r3&j0G_;(DFx61zHauY)zAN@tBGVu0*@!O~Jf4T2($kQvsSJemv62Vt7ZiOPPgS zLsOvzxAHhB@;Q6~o4x~uD=4yF z3u()ZLD8fl7(kr`MWJEQ zCOj!p7t~V=x0#@*AKZb!Tn&l>#X=z^pN!;-6IGVTI}%-N8drw6W&pmYa~AE?C?T=gVMx-&jPiA>a-fwkY} zKtRO6X}U&m{@ECAOIBGVFG_YP6L7Yo2tfP<&nX9#?xohIz*D{AW)}n{~2`-RAX1$KV4g@qH8 zt2~GjMUkep{BkB=oTti{GI?j7s>GrJ{?#Q|%0Q)XpRiozZYpVg%95ciC|%p-4rmLC z;!YoRAX191EpnW-5)_m&ViY@@xY~@h@)jsVMdZq0UgV->*XD95+aaIC$Xcqu1j;Zv z&uK}-$u*7p1m&_@cw4?p*#w%TNZM}p@lb9nP?24W3S4X|Z!2&qE1;gC6Owx*vi5k* z6Az%I&n7%8AIs&7htuNd817PDMlvzZ4RB@8VY(Su;c$8`sNoooX}bLilx_p1<=vok zM%%4S^(qH1H@W%#W_Cd^DfiMyX1#0$rL!DT^G)t558cHx*Pur|FZyV#X zv`a<;p(G?XkK(oxRo*&^7ni6?ASRFW(pC-fSw{&@J*XoYs4KyW0Hp^GIb%|hxt57~ z0u&Z2Sq1MO2c^sMFLYW1;a*zVT@!bMQK1L6gF)P*>jA!zhF18EuX+aTFRz9fR*x2)a^%|znOJwe(FIsg124ovLHaDzQnL? z;>23Z78ze6pmuE^|K}7Lj`mW_8zUzldvKr6UO)X?>Ikr(#_0D-2qQ-R`C+T2xSy`V zF7Efrj_uQrEMoHSqWOi63hY~Do%bl_T|g8+-MO6&;LDFr^+`|Aze`l$-uz<64L;Kb z>mP2-z@ZrA&h2miF=bfM*HD%IEZ97tr{oQ|44`sP)Zt(13D62aR-?{J_0)(zh3Z6^ zjoE`=)N7<(23s%Cq2g{E3k*(26-n|jntV`B*tN;V9$+m2CvhEX$D? zI?I3_051T!!y7;*lF4KuZMI}rZvYwL3+N-x_h!ivl@Q|w%m7RS+zPl6a1&sv$n;@} zEy$vjIOtGIxrSDev>&h+un({uuoFNYTntzMXaU>=qPn24LaP=~=?UmXMriJxdP*mIK-$W*xu-XadXyj06+{Mgc|xiU4%glqZ(@ zGT|SJVgw)_K#`KLE3*%KqGyX8i_`pYjr6ubcra?l1BU~fpSF?S+LA*Jxh(|P{0NTp z))pvgHX1GFH*}>oW609m+v% z5@%kZnV;^F-rC6wE!w3UVdG0b(px(OqK0Oa#xd^#BE7YDP9zWqJN&43KVYM4!ake zpVxTfdmF%)?nr|=(ncnhK~k1^pX@1_J?XaV7TyPr6ma0!W!^yF^gz@dLw8<2q_w1G zIkKdAJ;WL6Yu;IZeQIFvx=kOrG!Ez?N#?D#I!)el;tT6v(30v%L09R;9%4usE0K@% z5O;*Z?l*gg=fhY?xOpGH`mvb3WBG=!(IZ>e^imIzKY)dZD`BkEZeII`9$BuOzVC(2 zU`uzTlDfg7($3OZtPq4!#8E=I;zv6R&AQD?zhA5ScIY=_t0&meE5niHNJBD0ivq_E zkl&wHcHu&UArCEcy+mm^Cg@%-F)JL3c6y05gim;h*Qx%rm-v+EkG(|y2+(?aNVxgr zlHH!PBI-auhs+kFI5NoP28Z3e_`Y%91}YAZ-;P%xA`2`S>SZ>NMnVx|MJg8Z!cfAGKvU;TE_ zd@(8iL)4*BD?|SV&J1v3Y)w>-Vp8hFEs==rW`8k1 zlBKZ%v6;wa0pc*o%xlarthWT}>+a;DxF6K1ixdn@o<&QjNQ#0DzZj32W^01QT~Ta@ ze7dhli-!NMJ?-cFidJygjdzmP%~4aHx^mx-F&K;@7*+;~SEDgW>yWu9az5)F8QN=M zY#JEIrC?YSD&&FxauB$5{6IEXZVwZ84P=pQl6ZU|7VR{#FBYCKBF{(#*I4T`QneTm z15L~q7x~@$?^@ZGUMI6`J?G#>Lwp&7rBf5b0>aH4Vm{c+IO*CpDJlCyKX%wIR>eS# zIPoKL&%c+)R53PzC0aCz;_(<3AtuMN40(UJSQ3lu^b6h{3fE*{ztt@K_X8Pg-eff2 zj-H5Z9g=vTyTXsD!!oq&@Rql@IS!$vuN-+xf3Xat-Fyq$H)Qpx+({wD+U#jrEmjP2i_HXU<%bW#%Czd2a>x}{8wm}#mMl=sX?1qTv2C=a; z&UdJ_pN!HI%L6?V){b4jjq)okWt<;kMKa}a-KFvab}>C3!x{D)NnUrOVor?Ve)FNt zvuD!g=jA>ZAu~fyAjWhW?(f9`>^f#QAS!$?GFTLn3-$CrlX*+>IXHeUpyv+N8U0xY5RjiwoI! z{p!B%nfk%Yb`7tIW>*G@PZQaq?$X1rEu)(r;pQ{JOG;sB!Q7X2$Fpf^4y-7rv>`$G zC9#l}?zVzM^IN*q{oE4Wb#D1ztTW$lo_qZA!WEf&&PG$h%W{YfN$fuP`vl=|AOw5j z8v&WQ_4Ul4i8sFcbx)dg95|4EzU4aThU~Z+zQ#&pNwv_qyMDXWKxtnIKoalusv^V#se?T0pvyZiHDS6g046hA4nsd1A@j= zDb}SzR=wB_6mGsz-aF1(mv_%&HxZw<=A}g`;>%PVaLiZDdFmJL{WZ%R#G&nOlKJkr zdQajmwRvlft_RLH(vB3Nrol|($+Y$Kbp`L9F1xacHt|f#fs$^D=?}x)U3=4*7qgpB zojW$Ze@7qZ^Ed`AAOo=t|DsoPgT#moRw}orh!q*gF@wY_8AwrvyG?KEZroLo$@()k zQQV#NPaf7B7#d_gnttmOy)!uf$uDKLI$L~~#d3b$OI|Tl)Htz>*A5l;JCRI`$I@1x zzTe#R<&v|%(o&&ziiY!ry)4Ryvy+&M-yZkt=!+y((HZ1pN0s*<|4wiZx)q#BST-%}=BjK6q6( z^Q|`Ii8~)#x^W`y_eSU$%7mM*yNe%_POMIgny{zz}WlgqttYpKlqqE@=2XC{3F$=7Z zD9=RqJQ&V%8>F@}NcJ z2>poJb5ZQmljm&D(^h0EEt(v$Jr7!3n_=$=ArFJ3NRc@VTipF4MfEUNl4U+sA71s+ zEiWDO#Rg_7b!~MI6pGJ>Ax^K&(QV6c{CCM+w5sf>aPv+4;mj}ke&zXU2{cQEyO7Gx zj}od28N_@=fA+=i#%)m(zxUD)7vQjoIWCrNx8r-BQvbn4Ctkbh>(@h#Op1k+AojW7 zPxD>*$Jy)s5D zBM$QsK5^7TE5gTQ(W!)X8H{Dfkc|_8-F%+ET$HnM_^Y`v%#<}l_!nS9`ldt-p`(L& zgfMW{wL6+G`kS@~{PyKz(KD`!H2Q{{Z~U8XDr@uqaPyph;^-Q!+uV&ee+6K^%wsdB zqa`x#e>bc8R3LUPjDT(3MyVbEXa4kh1cw{c{L=ME^fj+XqPwm~{^P^U`2WQQ>`I^zOGO(bv2^xq6tphVRB-hNprM=H|Ul^fm8w{*xo4KFTBI z&qe4q-J6~HM>gO02lJ;-SK3GC`X7AeKt)xZquL`$iCa&-6ZaN-bfW{y3R=Wm4hpW! zI(OkXXgY~3f9H3r!q>C~y$c%8j;%-7mQ@jtJk`#`dm~vtdDElf%1Bn|TUlRQJGZV1 zzj>^xuS*s;7qWrf>sA-C$n2lzb10dFWSN)i9pYa43tyBxE#_7ll&9kseH4$t)2`cHJaaf#zS8$=9OJ5-Fo8hww1zfB(v}Ou$}dgZOc~bwP#m~aqTSXR*mKVd+2z& z8^_=L$F!lv(#KfhNb^S2y#H(mLjZzlKd?Wrd68*eTV_B8en--3UP+pd)%^bewM4V1 diff --git a/package.json b/package.json index e413684ec9d..92c68b7cd15 100644 --- a/package.json +++ b/package.json @@ -1,5 +1,5 @@ { - "name": "communication", + "name": "@hcengineering/communication", "type": "module", "workspaces": ["packages/*"], "scripts": { diff --git a/packages/client-query/package.json b/packages/client-query/package.json index 2d40fcb6ddc..f438d35aea7 100644 --- a/packages/client-query/package.json +++ b/packages/client-query/package.json @@ -1,20 +1,27 @@ { - "name": "@communication/client-query", + "name": "@hcengineering/communication-client-query", "version": "0.1.0", "main": "src/index.ts", "module": "src/index.ts", "types": "./types/index.d.ts", "type": "module", + "files": [ + "./dist", + "./types" + ], "devDependencies": { "@types/bun": "^1.1.14" }, "dependencies": { - "@communication/types": "workspace:*", - "@communication/sdk-types": "workspace:*", - "@communication/query": "workspace:*", + "@hcengineering/communication-types": "workspace:*", + "@hcengineering/communication-sdk-types": "workspace:*", + "@hcengineering/communication-query": "workspace:*", "fast-equals": "^5.0.1" }, "peerDependencies": { "typescript": "^5.6.3" + }, + "publishConfig": { + "registry": "https://npm.pkg.github.com" } } diff --git a/packages/client-query/src/index.ts b/packages/client-query/src/index.ts index 4a1c9697018..29a5e3905c1 100644 --- a/packages/client-query/src/index.ts +++ b/packages/client-query/src/index.ts @@ -1,5 +1,5 @@ -import { LiveQueries } from '@communication/query' -import type { Client } from '@communication/sdk-types' +import { LiveQueries } from '@hcengineering/communication-query' +import type { Client } from '@hcengineering/communication-sdk-types' import { MessagesQuery, NotificationsQuery } from './query' diff --git a/packages/client-query/src/query.ts b/packages/client-query/src/query.ts index bbaad06b38e..55b2840c907 100644 --- a/packages/client-query/src/query.ts +++ b/packages/client-query/src/query.ts @@ -1,6 +1,10 @@ -import { type LiveQueries } from '@communication/query' -import type { MessagesQueryCallback, NotificationsQueryCallback, QueryCallback } from '@communication/sdk-types' -import { type FindMessagesParams, type FindNotificationsParams } from '@communication/types' +import { type LiveQueries } from '@hcengineering/communication-query' +import type { + MessagesQueryCallback, + NotificationsQueryCallback, + QueryCallback +} from '@hcengineering/communication-sdk-types' +import { type FindMessagesParams, type FindNotificationsParams } from '@hcengineering/communication-types' import { deepEqual } from 'fast-equals' class BaseQuery

, C extends QueryCallback> { diff --git a/packages/client-sqlite/package.json b/packages/client-sqlite/package.json index 339dcef1239..3e5de830baa 100644 --- a/packages/client-sqlite/package.json +++ b/packages/client-sqlite/package.json @@ -1,20 +1,27 @@ { - "name": "@communication/client-sqlite", + "name": "@hcengineering/communication-client-sqlite", "version": "0.1.0", "main": "src/index.ts", "module": "src/index.ts", "types": "./types/index.d.ts", "type": "module", + "files": [ + "./dist", + "./types" + ], "devDependencies": { "@types/bun": "^1.1.14" }, "dependencies": { - "@communication/types": "workspace:*", - "@communication/sdk-types": "workspace:*", - "@communication/sqlite-wasm": "workspace:*", + "@hcengineering/communication-types": "workspace:*", + "@hcengineering/communication-sdk-types": "workspace:*", + "@hcengineering/communication-sqlite-wasm": "workspace:*", "fast-equals": "^5.0.1" }, "peerDependencies": { "typescript": "^5.6.3" + }, + "publishConfig": { + "registry": "https://npm.pkg.github.com" } } diff --git a/packages/client-sqlite/src/client.ts b/packages/client-sqlite/src/client.ts index 9e4e7219fac..e2645b9a855 100644 --- a/packages/client-sqlite/src/client.ts +++ b/packages/client-sqlite/src/client.ts @@ -14,15 +14,15 @@ import { type ThreadID, type Attachment, type Reaction -} from '@communication/types' +} from '@hcengineering/communication-types' import { type Client, type MessageCreatedEvent, type DbAdapter, EventType, type BroadcastEvent -} from '@communication/sdk-types' -import { createDbAdapter as createSqliteDbAdapter } from '@communication/sqlite-wasm' +} from '@hcengineering/communication-sdk-types' +import { createDbAdapter as createSqliteDbAdapter } from '@hcengineering/communication-sqlite-wasm' class DbClient implements Client { onEvent: (event: BroadcastEvent) => void = () => {} diff --git a/packages/client-sqlite/src/index.ts b/packages/client-sqlite/src/index.ts index 666c7eef50d..c91338aabfa 100644 --- a/packages/client-sqlite/src/index.ts +++ b/packages/client-sqlite/src/index.ts @@ -1,2 +1,2 @@ -export { type Client } from '@communication/sdk-types' +export { type Client } from '@hcengineering/communication-sdk-types' export * from './client' diff --git a/packages/client-ws/package.json b/packages/client-ws/package.json index e8aafd03604..36a0990ad72 100644 --- a/packages/client-ws/package.json +++ b/packages/client-ws/package.json @@ -1,19 +1,26 @@ { - "name": "@communication/client-ws", + "name": "@hcengineering/communication-client-ws", "version": "0.1.0", "main": "src/index.ts", "module": "src/index.ts", "types": "./types/index.d.ts", "type": "module", + "files": [ + "./dist", + "./types" + ], "devDependencies": { "@types/bun": "^1.1.14" }, "dependencies": { - "@communication/types": "workspace:*", - "@communication/sdk-types": "workspace:*", + "@hcengineering/communication-types": "workspace:*", + "@hcengineering/communication-sdk-types": "workspace:*", "@msgpack/msgpack": "^3.0.0-beta2" }, "peerDependencies": { "typescript": "^5.6.3" + }, + "publishConfig": { + "registry": "https://npm.pkg.github.com" } } diff --git a/packages/client-ws/src/client.ts b/packages/client-ws/src/client.ts index 15eef05da61..83eb39050a7 100644 --- a/packages/client-ws/src/client.ts +++ b/packages/client-ws/src/client.ts @@ -14,7 +14,7 @@ import { type RichText, type SocialID, type ThreadID -} from '@communication/types' +} from '@hcengineering/communication-types' import { type BroadcastEvent, type Client, @@ -35,7 +35,7 @@ import { type RemoveNotificationEvent, type RemoveReactionEvent, type UpdateNotificationContextEvent -} from '@communication/sdk-types' +} from '@hcengineering/communication-sdk-types' import { WebSocketConnection } from './connection' diff --git a/packages/client-ws/src/connection.ts b/packages/client-ws/src/connection.ts index 679d3f8d064..9844f840053 100644 --- a/packages/client-ws/src/connection.ts +++ b/packages/client-ws/src/connection.ts @@ -1,4 +1,4 @@ -import type { Response, HelloRequest, RequestId, BroadcastEvent, Request } from '@communication/sdk-types' +import type { Response, HelloRequest, RequestId, BroadcastEvent, Request } from '@hcengineering/communication-sdk-types' import { encode, decode } from '@msgpack/msgpack' const PING_TIMEOUT = 10000 diff --git a/packages/cockroach/package.json b/packages/cockroach/package.json index 4918593c650..17461c75ffc 100644 --- a/packages/cockroach/package.json +++ b/packages/cockroach/package.json @@ -1,16 +1,20 @@ { - "name": "@communication/cockroach", + "name": "@hcengineering/communication-cockroach", "version": "0.1.0", "main": "src/index.ts", "module": "src/index.ts", "types": "./types/index.d.ts", "type": "module", + "files": [ + "./dist", + "./types" + ], "devDependencies": { "@types/bun": "^1.1.14" }, "dependencies": { - "@communication/types": "workspace:*", - "@communication/sdk-types": "workspace:*", + "@hcengineering/communication-types": "workspace:*", + "@hcengineering/communication-sdk-types": "workspace:*", "pg": "8.12.0", "postgres": "^3.4.4", "uuid": "^11.0.3" diff --git a/packages/cockroach/src/adapter.ts b/packages/cockroach/src/adapter.ts index a1841b6252b..1064bbdfe64 100644 --- a/packages/cockroach/src/adapter.ts +++ b/packages/cockroach/src/adapter.ts @@ -13,8 +13,8 @@ import { type FindNotificationsParams, type Notification, type ThreadID -} from '@communication/types' -import type { DbAdapter } from '@communication/sdk-types' +} from '@hcengineering/communication-types' +import type { DbAdapter } from '@hcengineering/communication-sdk-types' import { MessagesDb } from './db/message' import { NotificationsDb } from './db/notification' diff --git a/packages/cockroach/src/db/message.ts b/packages/cockroach/src/db/message.ts index deedf63ac4f..e8e6d5fdca1 100644 --- a/packages/cockroach/src/db/message.ts +++ b/packages/cockroach/src/db/message.ts @@ -7,7 +7,7 @@ import { type SocialID, type RichText, Direction, type Reaction, type Attachment, type ThreadID -} from '@communication/types' +} from '@hcengineering/communication-types' import {BaseDb} from './base.ts' import { diff --git a/packages/cockroach/src/db/notification.ts b/packages/cockroach/src/db/notification.ts index a2342041d58..ec5d2d125cc 100644 --- a/packages/cockroach/src/db/notification.ts +++ b/packages/cockroach/src/db/notification.ts @@ -6,7 +6,7 @@ import { type FindNotificationContextParams, SortOrder, type FindNotificationsParams, type Notification, type NotificationContextUpdate -} from '@communication/types' +} from '@hcengineering/communication-types' import {BaseDb} from './base.ts' import {TableName, type ContextDb, type NotificationDb} from './types.ts' diff --git a/packages/cockroach/src/db/types.ts b/packages/cockroach/src/db/types.ts index 3c7bc183729..2a88a6a80a9 100644 --- a/packages/cockroach/src/db/types.ts +++ b/packages/cockroach/src/db/types.ts @@ -1,4 +1,4 @@ -import type {CardID, ContextID, MessageID, RichText, SocialID, ThreadID } from "@communication/types" +import type {CardID, ContextID, MessageID, RichText, SocialID, ThreadID } from "@hcengineering/communication-types" export enum TableName { Message = 'message', diff --git a/packages/examples/package.json b/packages/examples/package.json index 871f2b35536..351cf9cf8d4 100644 --- a/packages/examples/package.json +++ b/packages/examples/package.json @@ -1,18 +1,22 @@ { - "name": "@communication/examples", + "name": "@hcengineering/communication-examples", "version": "0.1.0", "main": "src/index.ts", "module": "src/index.ts", "types": "./types/index.d.ts", "type": "module", + "files": [ + "./dist", + "./types" + ], "devDependencies": { "@types/bun": "^1.1.14" }, "dependencies": { - "@communication/client-query": "workspace:*", - "@communication/client-ws": "workspace:*", - "@communication/client-sqlite": "workspace:*", - "@communication/types": "workspace:*" + "@hcengineering/communication-client-query": "workspace:*", + "@hcengineering/communication-client-ws": "workspace:*", + "@hcengineering/communication-client-sqlite": "workspace:*", + "@hcengineering/communication-types": "workspace:*" }, "peerDependencies": { "typescript": "^5.6.3" diff --git a/packages/examples/src/index.ts b/packages/examples/src/index.ts index 27c7257439e..bceebf8794d 100644 --- a/packages/examples/src/index.ts +++ b/packages/examples/src/index.ts @@ -1,7 +1,7 @@ -import { type Message, type SocialID, SortOrder, type ThreadID, type Window } from '@communication/types' -import { getWebsocketClient } from '@communication/client-ws' -import { getSqliteClient } from '@communication/client-sqlite' -import { createMessagesQuery, initLiveQueries } from '@communication/client-query' +import { type Message, type SocialID, SortOrder, type ThreadID, type Window } from '@hcengineering/communication-types' +import { getWebsocketClient } from '@hcengineering/communication-client-ws' +import { getSqliteClient } from '@hcengineering/communication-client-sqlite' +import { createMessagesQuery, initLiveQueries } from '@hcengineering/communication-client-query' const thread = 'cd0aba36-1c4f-4170-95f2-27a12a5415f6' as ThreadID const workspace = 'cd0aba36-1c4f-4170-95f2-27a12a5415f6' diff --git a/packages/query/package.json b/packages/query/package.json index b09ef90b558..665cb054414 100644 --- a/packages/query/package.json +++ b/packages/query/package.json @@ -1,17 +1,21 @@ { - "name": "@communication/query", + "name": "@hcengineering/communication-query", "version": "0.1.0", "main": "src/index.ts", "module": "src/index.ts", "types": "./types/index.d.ts", "type": "module", + "files": [ + "./dist", + "./types" + ], "devDependencies": { "@types/bun": "^1.1.14", "@types/crypto-js": "^4.2.2" }, "dependencies": { - "@communication/types": "workspace:*", - "@communication/sdk-types": "workspace:*", + "@hcengineering/communication-types": "workspace:*", + "@hcengineering/communication-sdk-types": "workspace:*", "fast-equals": "^5.0.1" }, "peerDependencies": { diff --git a/packages/query/src/lq.ts b/packages/query/src/lq.ts index d19baf7c8af..19a70eba2db 100644 --- a/packages/query/src/lq.ts +++ b/packages/query/src/lq.ts @@ -1,11 +1,11 @@ -import { type FindMessagesParams, type FindNotificationsParams } from '@communication/types' +import { type FindMessagesParams, type FindNotificationsParams } from '@hcengineering/communication-types' import { deepEqual } from 'fast-equals' import type { Client, MessagesQueryCallback, NotificationsQueryCallback, BroadcastEvent -} from '@communication/sdk-types' +} from '@hcengineering/communication-sdk-types' import type { Query, QueryId } from './types' import { MessagesQuery } from './messages/query' diff --git a/packages/query/src/messages/query.ts b/packages/query/src/messages/query.ts index a466d467019..75c79b82bc9 100644 --- a/packages/query/src/messages/query.ts +++ b/packages/query/src/messages/query.ts @@ -4,7 +4,7 @@ import { type Message, type Patch, SortOrder -} from '@communication/types' +} from '@hcengineering/communication-types' import { type AttachmentCreatedEvent, type MessageCreatedEvent, @@ -15,7 +15,7 @@ import { type AttachmentRemovedEvent, type MessageRemovedEvent, type ReactionRemovedEvent -} from '@communication/sdk-types' +} from '@hcengineering/communication-sdk-types' import { BaseQuery } from '../query' diff --git a/packages/query/src/notifications/query.ts b/packages/query/src/notifications/query.ts index 8e9d39cfe30..7e39d4496a9 100644 --- a/packages/query/src/notifications/query.ts +++ b/packages/query/src/notifications/query.ts @@ -3,7 +3,7 @@ import { SortOrder, type Notification, type ID, -} from '@communication/types' +} from '@hcengineering/communication-types' import { type NotificationCreatedEvent, EventType, @@ -11,7 +11,7 @@ import { type NotificationContextRemovedEvent, type NotificationRemovedEvent, type NotificationContextUpdatedEvent, -} from '@communication/sdk-types' +} from '@hcengineering/communication-sdk-types' import {BaseQuery} from '../query.ts'; diff --git a/packages/query/src/query.ts b/packages/query/src/query.ts index bec8fc200f7..02e74aa419e 100644 --- a/packages/query/src/query.ts +++ b/packages/query/src/query.ts @@ -1,5 +1,5 @@ -import { Direction, type ID, SortOrder } from '@communication/types' -import { type BroadcastEvent, type QueryCallback, type Client } from '@communication/sdk-types' +import { Direction, type ID, SortOrder } from '@hcengineering/communication-types' +import { type BroadcastEvent, type QueryCallback, type Client } from '@hcengineering/communication-sdk-types' import { QueryResult } from './result' import { defaultQueryParams, type FindParams, type Query, type QueryId } from './types' diff --git a/packages/query/src/result.ts b/packages/query/src/result.ts index 36395f74d51..58e288c9676 100644 --- a/packages/query/src/result.ts +++ b/packages/query/src/result.ts @@ -1,4 +1,4 @@ -import type { ID } from '@communication/types' +import type { ID } from '@hcengineering/communication-types' export class QueryResult { private objectById: Map diff --git a/packages/query/src/types.ts b/packages/query/src/types.ts index 895e21d3d97..6fcbf17e09a 100644 --- a/packages/query/src/types.ts +++ b/packages/query/src/types.ts @@ -1,5 +1,5 @@ -import { type BroadcastEvent } from '@communication/sdk-types' -import { Direction, SortOrder, type Window } from '@communication/types' +import { type BroadcastEvent } from '@hcengineering/communication-sdk-types' +import { Direction, SortOrder, type Window } from '@hcengineering/communication-types' import { QueryResult } from './result.ts' diff --git a/packages/query/src/window.ts b/packages/query/src/window.ts index 508c2b5d891..824df8615a8 100644 --- a/packages/query/src/window.ts +++ b/packages/query/src/window.ts @@ -1,4 +1,4 @@ -import type { Window } from '@communication/types' +import type { Window } from '@hcengineering/communication-types' import type { Query } from './types' diff --git a/packages/sdk-types/package.json b/packages/sdk-types/package.json index 9e46522b2c2..3a12c8e2a2e 100644 --- a/packages/sdk-types/package.json +++ b/packages/sdk-types/package.json @@ -1,17 +1,24 @@ { - "name": "@communication/sdk-types", + "name": "@hcengineering/communication-sdk-types", "version": "0.1.0", "main": "src/index.ts", "module": "src/index.ts", "types": "./types/index.d.ts", "type": "module", + "files": [ + "./dist", + "./types" + ], "devDependencies": { "@types/bun": "^1.1.14" }, "dependencies": { - "@communication/types": "workspace:*" + "@hcengineering/communication-types": "workspace:*" }, "peerDependencies": { "typescript": "^5.6.3" + }, + "publishConfig": { + "registry": "https://npm.pkg.github.com" } } diff --git a/packages/sdk-types/src/client.ts b/packages/sdk-types/src/client.ts index d232cb73f8d..05cb861740c 100644 --- a/packages/sdk-types/src/client.ts +++ b/packages/sdk-types/src/client.ts @@ -11,8 +11,8 @@ import type { SocialID, Notification, ThreadID -} from '@communication/types' -import type { FindMessagesParams } from '@communication/types' +} from '@hcengineering/communication-types' +import type { FindMessagesParams } from '@hcengineering/communication-types' import type { BroadcastEvent } from './event.ts' diff --git a/packages/sdk-types/src/db.ts b/packages/sdk-types/src/db.ts index 17231010164..621102b46cd 100644 --- a/packages/sdk-types/src/db.ts +++ b/packages/sdk-types/src/db.ts @@ -12,7 +12,7 @@ import type { SocialID, Notification, ThreadID -} from '@communication/types' +} from '@hcengineering/communication-types' export interface DbAdapter { createMessage( diff --git a/packages/sdk-types/src/event.ts b/packages/sdk-types/src/event.ts index 8771477c533..70f287ceb59 100644 --- a/packages/sdk-types/src/event.ts +++ b/packages/sdk-types/src/event.ts @@ -12,7 +12,7 @@ import type { SocialID, Notification, ThreadID -} from '@communication/types' +} from '@hcengineering/communication-types' export enum EventType { CreateMessage = 'createMessage', diff --git a/packages/sdk-types/src/query.ts b/packages/sdk-types/src/query.ts index c09a164af5f..ce851e16c86 100644 --- a/packages/sdk-types/src/query.ts +++ b/packages/sdk-types/src/query.ts @@ -1,4 +1,4 @@ -import type { Message, Window, Notification } from '@communication/types' +import type { Message, Window, Notification } from '@hcengineering/communication-types' export type QueryCallback = (window: Window) => void diff --git a/packages/server/package.json b/packages/server/package.json index 86de3c16c6f..77ed2402714 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -1,10 +1,14 @@ { - "name": "@communication/server", + "name": "@hcengineering/communication-server", "version": "0.1.0", "main": "src/index.ts", "module": "src/index.ts", "types": "./types/index.d.ts", "type": "module", + "files": [ + "./dist", + "./types" + ], "devDependencies": { "@types/bun": "^1.1.14", "@types/express": "^5.0.0", @@ -13,9 +17,9 @@ }, "dependencies": { "@hcengineering/server-token": "^0.6.377", - "@communication/cockroach": "workspace:*", - "@communication/sdk-types": "workspace:*", - "@communication/types": "workspace:*", + "@hcengineering/communication-cockroach": "workspace:*", + "@hcengineering/communication-sdk-types": "workspace:*", + "@hcengineering/communication-types": "workspace:*", "cors": "^2.8.5", "dotenv": "^16.4.7", "express": "^4.21.2", @@ -24,5 +28,8 @@ }, "peerDependencies": { "typescript": "^5.6.3" + }, + "publishConfig": { + "registry": "https://npm.pkg.github.com" } } diff --git a/packages/server/src/eventProcessor.ts b/packages/server/src/eventProcessor.ts index cffcbdca96c..6d7d72a1751 100644 --- a/packages/server/src/eventProcessor.ts +++ b/packages/server/src/eventProcessor.ts @@ -1,4 +1,4 @@ -import { type Message, type Patch, type Reaction, type Attachment } from '@communication/types' +import { type Message, type Patch, type Reaction, type Attachment } from '@hcengineering/communication-types' import { EventType, type CreateAttachmentEvent, @@ -28,7 +28,7 @@ import { type NotificationContextCreatedEvent, type NotificationContextRemovedEvent, type NotificationContextUpdatedEvent -} from '@communication/sdk-types' +} from '@hcengineering/communication-sdk-types' type Result = { broadcastEvent?: BroadcastEvent diff --git a/packages/server/src/main.ts b/packages/server/src/main.ts index 3fa14adf07d..a45aca60777 100644 --- a/packages/server/src/main.ts +++ b/packages/server/src/main.ts @@ -1,8 +1,8 @@ import WebSocket, { WebSocketServer, type RawData } from 'ws' -import { createDbAdapter } from '@communication/cockroach' -import type { Response, HelloRequest } from '@communication/sdk-types' +import { createDbAdapter } from '@hcengineering/communication-cockroach' +import type { Response, HelloRequest } from '@hcengineering/communication-sdk-types' import { decodeToken } from '@hcengineering/server-token' -import type { SocialID } from '@communication/types' +import type { SocialID } from '@hcengineering/communication-types' import type { ConnectionInfo } from './types.ts' import { deserializeRequest, serializeResponse } from './utils/serialize.ts' diff --git a/packages/server/src/manager.ts b/packages/server/src/manager.ts index 6ca6a82a6ca..b6afedd4bfa 100644 --- a/packages/server/src/manager.ts +++ b/packages/server/src/manager.ts @@ -6,13 +6,13 @@ import { type NotificationContextCreatedEvent, type NotificationCreatedEvent, type Response -} from '@communication/sdk-types' +} from '@hcengineering/communication-sdk-types' import type { FindMessagesParams, FindNotificationContextParams, FindNotificationsParams, MessageID -} from '@communication/types' +} from '@hcengineering/communication-types' import { Session } from './session' import type { ConnectionInfo } from './types' diff --git a/packages/server/src/session.ts b/packages/server/src/session.ts index 3efd0a1a861..fdf487d3349 100644 --- a/packages/server/src/session.ts +++ b/packages/server/src/session.ts @@ -1,4 +1,4 @@ -import type { DbAdapter, Event, EventResult } from '@communication/sdk-types' +import type { DbAdapter, Event, EventResult } from '@hcengineering/communication-sdk-types' import type { FindMessagesParams, FindNotificationContextParams, @@ -6,7 +6,7 @@ import type { Message, Notification, NotificationContext -} from '@communication/types' +} from '@hcengineering/communication-types' import type { ConnectionInfo } from './types' import { EventProcessor } from './eventProcessor.ts' diff --git a/packages/server/src/triggers.ts b/packages/server/src/triggers.ts index 4500a2ec20f..12197b98ae0 100644 --- a/packages/server/src/triggers.ts +++ b/packages/server/src/triggers.ts @@ -5,8 +5,8 @@ import { type MessageCreatedEvent, type NotificationContextCreatedEvent, type NotificationCreatedEvent -} from '@communication/sdk-types' -import type { NotificationContext, ContextID, CardID } from '@communication/types' +} from '@hcengineering/communication-sdk-types' +import type { NotificationContext, ContextID, CardID } from '@hcengineering/communication-types' export class Triggers { constructor(private readonly db: DbAdapter) {} diff --git a/packages/server/src/types.ts b/packages/server/src/types.ts index 8d46b658167..69ca3515c07 100644 --- a/packages/server/src/types.ts +++ b/packages/server/src/types.ts @@ -1,4 +1,4 @@ -import type { SocialID } from '@communication/types' +import type { SocialID } from '@hcengineering/communication-types' export interface ConnectionInfo { workspace: string diff --git a/packages/server/src/utils/serialize.ts b/packages/server/src/utils/serialize.ts index 1aa35209da0..959b09d9dc3 100644 --- a/packages/server/src/utils/serialize.ts +++ b/packages/server/src/utils/serialize.ts @@ -1,5 +1,5 @@ import { Packr } from 'msgpackr' -import type {Response, Request} from '@communication/sdk-types' +import type {Response, Request} from '@hcengineering/communication-sdk-types' import type {RawData} from "ws"; diff --git a/packages/sqlite-wasm/package.json b/packages/sqlite-wasm/package.json index b89affa87f3..ae3f61db831 100644 --- a/packages/sqlite-wasm/package.json +++ b/packages/sqlite-wasm/package.json @@ -1,16 +1,20 @@ { - "name": "@communication/sqlite-wasm", + "name": "@hcengineering/communication-sqlite-wasm", "version": "0.1.0", "main": "src/index.ts", "module": "src/index.ts", "types": "./types/index.d.ts", "type": "module", + "files": [ + "./dist", + "./types" + ], "devDependencies": { "@types/bun": "^1.1.14" }, "dependencies": { - "@communication/types": "workspace:*", - "@communication/sdk-types": "workspace:*", + "@hcengineering/communication-types": "workspace:*", + "@hcengineering/communication-sdk-types": "workspace:*", "@sqlite.org/sqlite-wasm": "^3.47.1-build1", "path": "^0.12.7", "uuid": "^11.0.3" diff --git a/packages/sqlite-wasm/src/adapter.ts b/packages/sqlite-wasm/src/adapter.ts index 20e3b42ba2c..7a08c1dec42 100644 --- a/packages/sqlite-wasm/src/adapter.ts +++ b/packages/sqlite-wasm/src/adapter.ts @@ -12,8 +12,8 @@ import { type NotificationContext, type Notification, type ThreadID -} from '@communication/types' -import type { DbAdapter } from '@communication/sdk-types' +} from '@hcengineering/communication-types' +import type { DbAdapter } from '@hcengineering/communication-sdk-types' import { initializeSQLite, type Sqlite3Worker1Promiser } from './connection' import { applyMigrations } from './migrations.ts' diff --git a/packages/sqlite-wasm/src/db/message.ts b/packages/sqlite-wasm/src/db/message.ts index 581fc0f51b7..95bceae0a26 100644 --- a/packages/sqlite-wasm/src/db/message.ts +++ b/packages/sqlite-wasm/src/db/message.ts @@ -7,7 +7,7 @@ import { type SocialID, type RichText, Direction, type Reaction, type Attachment, type ThreadID -} from '@communication/types' +} from '@hcengineering/communication-types' import {BaseDb} from './base.ts' import { diff --git a/packages/sqlite-wasm/src/db/notification.ts b/packages/sqlite-wasm/src/db/notification.ts index 2cf901a866e..83edb04e960 100644 --- a/packages/sqlite-wasm/src/db/notification.ts +++ b/packages/sqlite-wasm/src/db/notification.ts @@ -6,7 +6,7 @@ import { type FindNotificationContextParams, SortOrder, type FindNotificationsParams, type Notification, type NotificationContextUpdate -} from '@communication/types' +} from '@hcengineering/communication-types' import {BaseDb} from './base.ts' import {TableName, type ContextDb, type NotificationDb} from './types.ts' diff --git a/packages/sqlite-wasm/src/db/types.ts b/packages/sqlite-wasm/src/db/types.ts index 4c7285fc46e..7834993bd69 100644 --- a/packages/sqlite-wasm/src/db/types.ts +++ b/packages/sqlite-wasm/src/db/types.ts @@ -1,4 +1,4 @@ -import type {CardID, ContextID, MessageID, RichText, SocialID } from "@communication/types" +import type {CardID, ContextID, MessageID, RichText, SocialID } from "@hcengineering/communication-types" export enum TableName { Message = 'message', diff --git a/packages/types/package.json b/packages/types/package.json index fa2e9fbcf46..e8f8b39fc3f 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -1,14 +1,21 @@ { - "name": "@communication/types", + "name": "@hcengineering/communication-types", "version": "0.1.0", "main": "src/index.ts", "module": "src/index.ts", "types": "./types/index.d.ts", "type": "module", + "files": [ + "./dist", + "./types" + ], "devDependencies": { "@types/bun": "^1.1.14" }, "peerDependencies": { "typescript": "^5.6.3" + }, + "publishConfig": { + "registry": "https://npm.pkg.github.com" } } From 64c5a4c9e2d7bd038d7917d0834bc3dcf1d376d5 Mon Sep 17 00:00:00 2001 From: Kristina Date: Thu, 26 Dec 2024 23:26:47 +0400 Subject: [PATCH 022/636] Add repository to package.json (#14) --- .npmrc | 2 +- packages/client-query/package.json | 4 ++++ packages/client-ws/package.json | 4 ++++ packages/types/package.json | 4 ++++ 4 files changed, 13 insertions(+), 1 deletion(-) diff --git a/.npmrc b/.npmrc index 07f82b72930..0999e7d0c33 100644 --- a/.npmrc +++ b/.npmrc @@ -1,2 +1,2 @@ @hcengineering:registry=https://npm.pkg.github.com/ -//npm.pkg.github.com/:_authToken=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 \ No newline at end of file +//npm.pkg.github.com/:_authToken=TOKEN \ No newline at end of file diff --git a/packages/client-query/package.json b/packages/client-query/package.json index f438d35aea7..5f4830e744b 100644 --- a/packages/client-query/package.json +++ b/packages/client-query/package.json @@ -21,6 +21,10 @@ "peerDependencies": { "typescript": "^5.6.3" }, + "repository": { + "type": "git", + "url": "git+https://github.com/hcengineering/communication.git" + }, "publishConfig": { "registry": "https://npm.pkg.github.com" } diff --git a/packages/client-ws/package.json b/packages/client-ws/package.json index 36a0990ad72..7c4170889fc 100644 --- a/packages/client-ws/package.json +++ b/packages/client-ws/package.json @@ -20,6 +20,10 @@ "peerDependencies": { "typescript": "^5.6.3" }, + "repository": { + "type": "git", + "url": "git+https://github.com/hcengineering/communication.git" + }, "publishConfig": { "registry": "https://npm.pkg.github.com" } diff --git a/packages/types/package.json b/packages/types/package.json index e8f8b39fc3f..3e792e715a3 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -15,6 +15,10 @@ "peerDependencies": { "typescript": "^5.6.3" }, + "repository": { + "type": "git", + "url": "git+https://github.com/hcengineering/communication.git" + }, "publishConfig": { "registry": "https://npm.pkg.github.com" } From af95e9a1da401717509a5e6f485e1bb0b44f8866 Mon Sep 17 00:00:00 2001 From: denis-tingaikin Date: Thu, 30 Jan 2025 01:41:38 +0300 Subject: [PATCH 023/636] init Signed-off-by: denis-tingaikin --- README.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 README.md diff --git a/README.md b/README.md new file mode 100644 index 00000000000..1b7f8dd7743 --- /dev/null +++ b/README.md @@ -0,0 +1 @@ +# huly-stream From 5524f00a79996f7d159f0424324aa3fea83cc873 Mon Sep 17 00:00:00 2001 From: Kristina Date: Mon, 3 Feb 2025 20:42:35 +0400 Subject: [PATCH 024/636] Add repository for sdk-types (#15) Signed-off-by: Kristina Fefelova --- .npmrc | 3 +-- packages/sdk-types/package.json | 4 ++++ 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/.npmrc b/.npmrc index 0999e7d0c33..2d9b55b4f9d 100644 --- a/.npmrc +++ b/.npmrc @@ -1,2 +1 @@ -@hcengineering:registry=https://npm.pkg.github.com/ -//npm.pkg.github.com/:_authToken=TOKEN \ No newline at end of file +@hcengineering:registry=https://npm.pkg.github.com/ \ No newline at end of file diff --git a/packages/sdk-types/package.json b/packages/sdk-types/package.json index 3a12c8e2a2e..32eb5b89288 100644 --- a/packages/sdk-types/package.json +++ b/packages/sdk-types/package.json @@ -18,6 +18,10 @@ "peerDependencies": { "typescript": "^5.6.3" }, + "repository": { + "type": "git", + "url": "git+https://github.com/hcengineering/communication.git" + }, "publishConfig": { "registry": "https://npm.pkg.github.com" } From de97f778f9f39e163cf455394d1596a1c28c773d Mon Sep 17 00:00:00 2001 From: denis-tingaikin Date: Tue, 4 Feb 2025 04:54:44 +0300 Subject: [PATCH 025/636] add initial version of huly-stream Signed-off-by: denis-tingaikin --- .DS_Store | Bin 0 -> 6148 bytes .github/workflows/docker-push.yaml | 44 +++ .github/workflows/main.yaml | 73 +++++ .github/yamllint.yaml | 12 + .golangci.yaml | 160 ++++++++++ Dockerfile | 36 +++ LICENSE | 277 ++++++++++++++++++ README.md | 122 +++++++- cmd/huly-stream/main.go | 104 +++++++ go.mod | 44 +++ go.sum | 84 ++++++ internal/pkg/config/config.go | 51 ++++ internal/pkg/log/zap.go | 50 ++++ internal/pkg/manifest/hls.go | 125 ++++++++ internal/pkg/manifest/hls_test.go | 143 +++++++++ internal/pkg/pprof/pprof.go | 54 ++++ internal/pkg/sharedpipe/shared_pipe.go | 116 ++++++++ .../pkg/sharedpipe/shared_pipe_bench_test.go | 248 ++++++++++++++++ internal/pkg/transcoding/command.go | 163 +++++++++++ internal/pkg/transcoding/command_test.go | 62 ++++ internal/pkg/transcoding/limiter.go | 78 +++++ internal/pkg/transcoding/limiter_test.go | 88 ++++++ internal/pkg/transcoding/scheduler.go | 129 ++++++++ internal/pkg/transcoding/worker.go | 125 ++++++++ internal/pkg/uploader/datalake.go | 124 ++++++++ internal/pkg/uploader/options.go | 19 ++ internal/pkg/uploader/postpone.go | 40 +++ internal/pkg/uploader/postpone_test.go | 44 +++ internal/pkg/uploader/s3.go | 143 +++++++++ internal/pkg/uploader/uploader.go | 219 ++++++++++++++ 30 files changed, 2976 insertions(+), 1 deletion(-) create mode 100644 .DS_Store create mode 100644 .github/workflows/docker-push.yaml create mode 100644 .github/workflows/main.yaml create mode 100644 .github/yamllint.yaml create mode 100644 .golangci.yaml create mode 100644 Dockerfile create mode 100644 LICENSE create mode 100644 cmd/huly-stream/main.go create mode 100644 go.mod create mode 100644 go.sum create mode 100644 internal/pkg/config/config.go create mode 100644 internal/pkg/log/zap.go create mode 100644 internal/pkg/manifest/hls.go create mode 100644 internal/pkg/manifest/hls_test.go create mode 100644 internal/pkg/pprof/pprof.go create mode 100644 internal/pkg/sharedpipe/shared_pipe.go create mode 100644 internal/pkg/sharedpipe/shared_pipe_bench_test.go create mode 100644 internal/pkg/transcoding/command.go create mode 100644 internal/pkg/transcoding/command_test.go create mode 100644 internal/pkg/transcoding/limiter.go create mode 100644 internal/pkg/transcoding/limiter_test.go create mode 100644 internal/pkg/transcoding/scheduler.go create mode 100644 internal/pkg/transcoding/worker.go create mode 100644 internal/pkg/uploader/datalake.go create mode 100644 internal/pkg/uploader/options.go create mode 100644 internal/pkg/uploader/postpone.go create mode 100644 internal/pkg/uploader/postpone_test.go create mode 100644 internal/pkg/uploader/s3.go create mode 100644 internal/pkg/uploader/uploader.go diff --git a/.DS_Store b/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..0923990b38bec54c6cb023fcb61ce6be5c8afe41 GIT binary patch literal 6148 zcmeHKyG{c^3>-s>NNG}1?k4~Z?J7#XARhn)5)DcsM5wRgyZE$>A3{1^C{m#)|UB-M9`4L$qUJ iv}10(9p6P!)-_-AycZ6ML1#YbMEwl7E;1=_Z3RxGZ4~VQ literal 0 HcmV?d00001 diff --git a/.github/workflows/docker-push.yaml b/.github/workflows/docker-push.yaml new file mode 100644 index 00000000000..0308ce654b8 --- /dev/null +++ b/.github/workflows/docker-push.yaml @@ -0,0 +1,44 @@ +--- +name: Docker push +on: + workflow_dispatch: + inputs: + version: + description: 'Version tag for the image' + required: false + default: 'latest' + type: string + +jobs: + push: + runs-on: ubuntu-latest + steps: + - name: "Checkout" + uses: actions/checkout@v4 + + - name: "Set up Docker Buildx" + uses: docker/setup-buildx-action@v1 + + - name: "Login to GitHub Container Registry" + uses: docker/login-action@v1 + with: + username: hardcoreeng + password: ${{ secrets.DOCKER_ACCESS_TOKEN }} + + - name: Docker meta + id: metaci + uses: docker/metadata-action@v3 + with: + images: hardcoreeng/huly-stream + tags: | + type=ref,event=pr + type=sha,prefix= + + - name: "Build and push" + uses: docker/build-push-action@v2 + with: + file: Dockerfile + context: . + platforms: linux/amd64,linux/arm64 + push: true + tags: ${{ inputs.version }} diff --git a/.github/workflows/main.yaml b/.github/workflows/main.yaml new file mode 100644 index 00000000000..a2889730490 --- /dev/null +++ b/.github/workflows/main.yaml @@ -0,0 +1,73 @@ +--- +name: ci + +on: + push: + branches: + - main + - develop + pull_request: + +concurrency: + group: 'main' + cancel-in-progress: true + +jobs: + yamllint: + name: yamllint + runs-on: ubuntu-latest + steps: + - name: Check out code + uses: actions/checkout@v4 + - name: yaml-lint + uses: ibiqlik/action-yamllint@v1 + with: + config_file: .github/yamllint.yaml + strict: true + build-and-test: + strategy: + matrix: + os: + - ubuntu + - windows + - macos + runs-on: ${{ matrix.os }}-latest + steps: + - name: Check out code + uses: actions/checkout@v4 + - name: Setup Go + uses: actions/setup-go@v5 + with: + go-version: 1.23.5 + - name: Build + run: go build -race ./... + - name: Test + run: go test -race ./... + checkgomod: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-go@v5 + with: + go-version: 1.23.5 + - run: go mod tidy + - name: Check for changes in go.mod or go.sum + run: | + git diff --name-only --exit-code go.mod || ( echo "Run go mod tidy" && false ) + golangci-lint: + name: golangci-lint + runs-on: ubuntu-latest + steps: + - name: Check out code into the Go module directory + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Setup Go + uses: actions/setup-go@v5 + with: + go-version: 1.23.5 + - name: golangci-lint + uses: golangci/golangci-lint-action@v4 + with: + version: v1.60.3 + args: --timeout 3m --verbose diff --git a/.github/yamllint.yaml b/.github/yamllint.yaml new file mode 100644 index 00000000000..7c94f8d8fe0 --- /dev/null +++ b/.github/yamllint.yaml @@ -0,0 +1,12 @@ +--- +extends: default + +yaml-files: + - '*.yaml' + - '*.yml' + +rules: + truthy: disable + line-length: disable + comments: + min-spaces-from-content: 1 diff --git a/.golangci.yaml b/.golangci.yaml new file mode 100644 index 00000000000..0099487e6f5 --- /dev/null +++ b/.golangci.yaml @@ -0,0 +1,160 @@ +--- +run: + go: "1.23" + timeout: 2m + issues-exit-code: 1 + tests: true +linters-settings: + goheader: + template: |- + Copyright © {{ mod-year-range }} Hardcore Engineering Inc. + + Licensed under the Eclipse Public License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. You may + obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + + See the License for the specific language governing permissions and + limitations under the License. + errcheck: + check-type-assertions: false + check-blank: false + govet: + enable: + - shadow + settings: + printf: + funcs: + - (github.com/sirupsen/logrus.FieldLogger).Infof + - (github.com/sirupsen/logrus.FieldLogger).Warnf + - (github.com/sirupsen/logrus.FieldLogger).Errorf + - (github.com/sirupsen/logrus.FieldLogger).Fatalf + revive: + confidence: 0.8 + rules: + - name: exported + - name: blank-imports + - name: context-as-argument + - name: context-keys-type + - name: dot-imports + - name: error-return + - name: error-strings + - name: error-naming + - name: exported + - name: increment-decrement + - name: package-comments + - name: range + - name: receiver-naming + - name: time-naming + - name: unexported-return + - name: indent-error-flow + - name: errorf + - name: superfluous-else + - name: unreachable-code + goimports: + local-prefixes: github.com/networkservicemesh/sdk + gocyclo: + min-complexity: 15 + dupl: + threshold: 150 + funlen: + lines: 120 + statements: 60 + goconst: + min-len: 2 + min-occurrences: 2 + depguard: + rules: + main: + deny: + - pkg: "errors" + desc: "Please use \"github.com/pkg/errors\" instead of \"errors\" in go imports" + misspell: + locale: US + unparam: + check-exported: false + nakedret: + max-func-lines: 30 + prealloc: + simple: true + range-loops: true + for-loops: false + gosec: + excludes: + - G115 + - G204 + - G301 + - G302 + - G306 + gocritic: + enabled-checks: + - appendCombine + - boolExprSimplify + - builtinShadow + - commentedOutCode + - commentedOutImport + - docStub + - dupImport + - emptyFallthrough + - emptyStringTest + - equalFold + - evalOrder + - hexLiteral + - importShadow + - indexAlloc + - initClause + - methodExprCall + - nestingReduce + - nilValReturn + - octalLiteral + - paramTypeCombine + - rangeExprCopy + - rangeValCopy + - regexpPattern + - sloppyReassign + - stringXbytes + - typeAssertChain + - typeUnparen + - unlabelStmt + - unnamedResult + - unnecessaryBlock + - weakCond + - yodaStyleExpr +linters: + disable-all: true + enable: + - goheader + - bodyclose + - unused + - depguard + - dogsled + - dupl + - errcheck + - funlen + - gochecknoinits + - goconst + - gocritic + - gocyclo + - gofmt + - goimports + - revive + - gosec + - gosimple + - govet + - ineffassign + - misspell + - nakedret + - copyloopvar + - staticcheck + - stylecheck + - typecheck + - unconvert + - unparam + - whitespace +issues: + exclude-use-default: false + max-issues-per-linter: 0 + max-same-issues: 0 diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000000..0b4dca5d82c --- /dev/null +++ b/Dockerfile @@ -0,0 +1,36 @@ +# Copyright © 2025 Hardcore Engineering Inc. +# +# Licensed under the Eclipse Public License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. You may +# obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM --platform=$BUILDPLATFORM golang:1.23.5 AS builder +ENV GO111MODULE=on +ENV CGO_ENABLED=0 +ENV GOBIN=/bin +ARG BUILDARCH=amd64 + +COPY . ./ + +RUN set -xe && GOOS=$TARGETOS GOARCH=$TARGETARCH go build -o /go/bin/huly-stream ./cmd/huly-stream + +FROM alpine + +RUN set -xe && apk add --no-cache ffmpeg +RUN apk add --no-cache ca-certificates jq bash \ + && addgroup -g 1000 huly-stream \ + && adduser -u 1000 -G huly-stream -s /bin/sh -D huly-stream \ + && chown huly-stream:huly-stream /. +COPY --from=builder /go/bin/huly-stream /huly-stream + +EXPOSE 1080 +USER huly-stream + +ENTRYPOINT ["/huly-stream"] \ No newline at end of file diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000000..e48e0963459 --- /dev/null +++ b/LICENSE @@ -0,0 +1,277 @@ +Eclipse Public License - v 2.0 + + THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE + PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION + OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT. + +1. DEFINITIONS + +"Contribution" means: + + a) in the case of the initial Contributor, the initial content + Distributed under this Agreement, and + + b) in the case of each subsequent Contributor: + i) changes to the Program, and + ii) additions to the Program; + where such changes and/or additions to the Program originate from + and are Distributed by that particular Contributor. A Contribution + "originates" from a Contributor if it was added to the Program by + such Contributor itself or anyone acting on such Contributor's behalf. + Contributions do not include changes or additions to the Program that + are not Modified Works. + +"Contributor" means any person or entity that Distributes the Program. + +"Licensed Patents" mean patent claims licensable by a Contributor which +are necessarily infringed by the use or sale of its Contribution alone +or when combined with the Program. + +"Program" means the Contributions Distributed in accordance with this +Agreement. + +"Recipient" means anyone who receives the Program under this Agreement +or any Secondary License (as applicable), including Contributors. + +"Derivative Works" shall mean any work, whether in Source Code or other +form, that is based on (or derived from) the Program and for which the +editorial revisions, annotations, elaborations, or other modifications +represent, as a whole, an original work of authorship. + +"Modified Works" shall mean any work in Source Code or other form that +results from an addition to, deletion from, or modification of the +contents of the Program, including, for purposes of clarity any new file +in Source Code form that contains any contents of the Program. Modified +Works shall not include works that contain only declarations, +interfaces, types, classes, structures, or files of the Program solely +in each case in order to link to, bind by name, or subclass the Program +or Modified Works thereof. + +"Distribute" means the acts of a) distributing or b) making available +in any manner that enables the transfer of a copy. + +"Source Code" means the form of a Program preferred for making +modifications, including but not limited to software source code, +documentation source, and configuration files. + +"Secondary License" means either the GNU General Public License, +Version 2.0, or any later versions of that license, including any +exceptions or additional permissions as identified by the initial +Contributor. + +2. GRANT OF RIGHTS + + a) Subject to the terms of this Agreement, each Contributor hereby + grants Recipient a non-exclusive, worldwide, royalty-free copyright + license to reproduce, prepare Derivative Works of, publicly display, + publicly perform, Distribute and sublicense the Contribution of such + Contributor, if any, and such Derivative Works. + + b) Subject to the terms of this Agreement, each Contributor hereby + grants Recipient a non-exclusive, worldwide, royalty-free patent + license under Licensed Patents to make, use, sell, offer to sell, + import and otherwise transfer the Contribution of such Contributor, + if any, in Source Code or other form. This patent license shall + apply to the combination of the Contribution and the Program if, at + the time the Contribution is added by the Contributor, such addition + of the Contribution causes such combination to be covered by the + Licensed Patents. The patent license shall not apply to any other + combinations which include the Contribution. No hardware per se is + licensed hereunder. + + c) Recipient understands that although each Contributor grants the + licenses to its Contributions set forth herein, no assurances are + provided by any Contributor that the Program does not infringe the + patent or other intellectual property rights of any other entity. + Each Contributor disclaims any liability to Recipient for claims + brought by any other entity based on infringement of intellectual + property rights or otherwise. As a condition to exercising the + rights and licenses granted hereunder, each Recipient hereby + assumes sole responsibility to secure any other intellectual + property rights needed, if any. For example, if a third party + patent license is required to allow Recipient to Distribute the + Program, it is Recipient's responsibility to acquire that license + before distributing the Program. + + d) Each Contributor represents that to its knowledge it has + sufficient copyright rights in its Contribution, if any, to grant + the copyright license set forth in this Agreement. + + e) Notwithstanding the terms of any Secondary License, no + Contributor makes additional grants to any Recipient (other than + those set forth in this Agreement) as a result of such Recipient's + receipt of the Program under the terms of a Secondary License + (if permitted under the terms of Section 3). + +3. REQUIREMENTS + +3.1 If a Contributor Distributes the Program in any form, then: + + a) the Program must also be made available as Source Code, in + accordance with section 3.2, and the Contributor must accompany + the Program with a statement that the Source Code for the Program + is available under this Agreement, and informs Recipients how to + obtain it in a reasonable manner on or through a medium customarily + used for software exchange; and + + b) the Contributor may Distribute the Program under a license + different than this Agreement, provided that such license: + i) effectively disclaims on behalf of all other Contributors all + warranties and conditions, express and implied, including + warranties or conditions of title and non-infringement, and + implied warranties or conditions of merchantability and fitness + for a particular purpose; + + ii) effectively excludes on behalf of all other Contributors all + liability for damages, including direct, indirect, special, + incidental and consequential damages, such as lost profits; + + iii) does not attempt to limit or alter the recipients' rights + in the Source Code under section 3.2; and + + iv) requires any subsequent distribution of the Program by any + party to be under a license that satisfies the requirements + of this section 3. + +3.2 When the Program is Distributed as Source Code: + + a) it must be made available under this Agreement, or if the + Program (i) is combined with other material in a separate file or + files made available under a Secondary License, and (ii) the initial + Contributor attached to the Source Code the notice described in + Exhibit A of this Agreement, then the Program may be made available + under the terms of such Secondary Licenses, and + + b) a copy of this Agreement must be included with each copy of + the Program. + +3.3 Contributors may not remove or alter any copyright, patent, +trademark, attribution notices, disclaimers of warranty, or limitations +of liability ("notices") contained within the Program from any copy of +the Program which they Distribute, provided that Contributors may add +their own appropriate notices. + +4. COMMERCIAL DISTRIBUTION + +Commercial distributors of software may accept certain responsibilities +with respect to end users, business partners and the like. While this +license is intended to facilitate the commercial use of the Program, +the Contributor who includes the Program in a commercial product +offering should do so in a manner which does not create potential +liability for other Contributors. Therefore, if a Contributor includes +the Program in a commercial product offering, such Contributor +("Commercial Contributor") hereby agrees to defend and indemnify every +other Contributor ("Indemnified Contributor") against any losses, +damages and costs (collectively "Losses") arising from claims, lawsuits +and other legal actions brought by a third party against the Indemnified +Contributor to the extent caused by the acts or omissions of such +Commercial Contributor in connection with its distribution of the Program +in a commercial product offering. The obligations in this section do not +apply to any claims or Losses relating to any actual or alleged +intellectual property infringement. In order to qualify, an Indemnified +Contributor must: a) promptly notify the Commercial Contributor in +writing of such claim, and b) allow the Commercial Contributor to control, +and cooperate with the Commercial Contributor in, the defense and any +related settlement negotiations. The Indemnified Contributor may +participate in any such claim at its own expense. + +For example, a Contributor might include the Program in a commercial +product offering, Product X. That Contributor is then a Commercial +Contributor. If that Commercial Contributor then makes performance +claims, or offers warranties related to Product X, those performance +claims and warranties are such Commercial Contributor's responsibility +alone. Under this section, the Commercial Contributor would have to +defend claims against the other Contributors related to those performance +claims and warranties, and if a court requires any other Contributor to +pay any damages as a result, the Commercial Contributor must pay +those damages. + +5. NO WARRANTY + +EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT +PERMITTED BY APPLICABLE LAW, THE PROGRAM IS PROVIDED ON AN "AS IS" +BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR +IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF +TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR +PURPOSE. Each Recipient is solely responsible for determining the +appropriateness of using and distributing the Program and assumes all +risks associated with its exercise of rights under this Agreement, +including but not limited to the risks and costs of program errors, +compliance with applicable laws, damage to or loss of data, programs +or equipment, and unavailability or interruption of operations. + +6. DISCLAIMER OF LIABILITY + +EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT +PERMITTED BY APPLICABLE LAW, NEITHER RECIPIENT NOR ANY CONTRIBUTORS +SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST +PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE +EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGES. + +7. GENERAL + +If any provision of this Agreement is invalid or unenforceable under +applicable law, it shall not affect the validity or enforceability of +the remainder of the terms of this Agreement, and without further +action by the parties hereto, such provision shall be reformed to the +minimum extent necessary to make such provision valid and enforceable. + +If Recipient institutes patent litigation against any entity +(including a cross-claim or counterclaim in a lawsuit) alleging that the +Program itself (excluding combinations of the Program with other software +or hardware) infringes such Recipient's patent(s), then such Recipient's +rights granted under Section 2(b) shall terminate as of the date such +litigation is filed. + +All Recipient's rights under this Agreement shall terminate if it +fails to comply with any of the material terms or conditions of this +Agreement and does not cure such failure in a reasonable period of +time after becoming aware of such noncompliance. If all Recipient's +rights under this Agreement terminate, Recipient agrees to cease use +and distribution of the Program as soon as reasonably practicable. +However, Recipient's obligations under this Agreement and any licenses +granted by Recipient relating to the Program shall continue and survive. + +Everyone is permitted to copy and distribute copies of this Agreement, +but in order to avoid inconsistency the Agreement is copyrighted and +may only be modified in the following manner. The Agreement Steward +reserves the right to publish new versions (including revisions) of +this Agreement from time to time. No one other than the Agreement +Steward has the right to modify this Agreement. The Eclipse Foundation +is the initial Agreement Steward. The Eclipse Foundation may assign the +responsibility to serve as the Agreement Steward to a suitable separate +entity. Each new version of the Agreement will be given a distinguishing +version number. The Program (including Contributions) may always be +Distributed subject to the version of the Agreement under which it was +received. In addition, after a new version of the Agreement is published, +Contributor may elect to Distribute the Program (including its +Contributions) under the new version. + +Except as expressly stated in Sections 2(a) and 2(b) above, Recipient +receives no rights or licenses to the intellectual property of any +Contributor under this Agreement, whether expressly, by implication, +estoppel or otherwise. All rights in the Program not expressly granted +under this Agreement are reserved. Nothing in this Agreement is intended +to be enforceable by any entity that is not a Contributor or Recipient. +No third-party beneficiary rights are created under this Agreement. + +Exhibit A - Form of Secondary Licenses Notice + +"This Source Code may also be made available under the following +Secondary Licenses when the conditions for such availability set forth +in the Eclipse Public License, v. 2.0 are satisfied: {name license(s), +version(s), and exceptions or additional permissions here}." + + Simply including a copy of this Agreement, including this Exhibit A + is not sufficient to license the Source Code under Secondary Licenses. + + If it is not possible or desirable to put the notice in a particular + file, then You may include the notice in a location (such as a LICENSE + file in a relevant directory) where a recipient would be likely to + look for such a notice. + + You may add additional accurate notices of copyright ownership. diff --git a/README.md b/README.md index 1b7f8dd7743..8eb361d4426 100644 --- a/README.md +++ b/README.md @@ -1 +1,121 @@ -# huly-stream +# Huly Stream + +[![X (formerly Twitter) Follow](https://img.shields.io/twitter/follow/huly_io?style=for-the-badge)](https://x.com/huly_io) +![GitHub License](https://img.shields.io/github/license/hcengineering/platform?style=for-the-badge) + +## About + +The Huly Stream high-performance HTTP-based transcoding service. Huly-stream is built around the **TUS protocol**, enabling reliable, resumable file uploads and downloads. Designed for seamless and consistent media processing,it supports advanced transcoding features with robust integration options. + +--- + +## Features + +### TUS Protocol Support +- **Resumable transcoding**: Leveraging the TUS protocol, Huly-stream ensures reliable and efficient stream processing. + +### Input Support +- **Supported Input Formats**: + - `mp4` + - `webm` + +### Output Options +- **TUS Upload**: Resumable file uploads via TUS protocol. +- **s3 Upload**: Direct upload to Amazon S3. +- **datalake Upload**: Integration for data lake storage systems. +- **Supported Output Formats**: + - `aac` + - `hls` + +### Key Functionalities +- **Live transcoing with minimal upload time**: Transcoding results are going to be avaible after stream completion. +- **Transcoding Cancelation**: Cancel or pause ongoing transcoding in real-time. +- **Transcoding Resumption**: Resume incomplete transcoding tasks efficiently. + +--- + +## Installation + +### Prerequisites +- [Go](https://golang.org/dl/) (v1.23+ recommended) +- [ffmpeg](https://www.ffmpeg.org/download.html) (ensure it’s installed and available in your system's PATH) + +### Steps + +1. Install dependencies: + ```bash + go mod tidy + ``` + +2. Build the service: + ```bash + docker build . -t hcengineering/huly-stream:latest + ``` + + +--- + +## Configuraiton + +### App env configuraiton +The following environment variables can be used: +``` +KEY TYPE DEFAULT REQUIRED DESCRIPTION +STREAM_SECRET_TOKEN String secret token for authorize requests +STREAM_LOG_LEVEL String debug sets log level for the application +STREAM_PPROF_ENABLED True or False false starts profile server on localhost:6060 if true +STREAM_INSECURE True or False false ignores authorization check if true +STREAM_SERVE_URL String 0.0.0.0:1080 app listen url +STREAM_ENDPOINT_URL URL S3 or Datalake endpoint, example: s3://my-ip-address, datalake://my-ip-address +STREAM_MAX_CAPACITY Integer 6220800 represents the amount of maximum possible capacity for the transcoding. The default value is 1920 * 1080 * 3. +STREAM_MAX_THREADS Integer 4 means upper bound for the transcoing provider. +STREAM_OUTPUT_DIR String /tmp/transcoing/ path to the directory with transcoding result. +STREAM_REMOVE_CONTENT_ON_UPLOAD True or False true deletes all content when content delivered if true +STREAM_UPLOAD_RAW_CONTENT True or False false uploads content in raw quality to the endpoint if true +``` + +### Metadata: + +**resolutions** if passed, set the resolution for the output, for example, 'resolutions: 1920:1080, 1280:720.' + +**token** must be provided to be authorized in the Huly's datalake service. + +**workspace** required for uploading content. + + + +#### S3 Env configuration + +if you're working with S3 storage type, these envs must be provided: +**AWS_ACCESS_KEY_ID** +**AWS_SECRET_ACCESS_KEY** + +## Usage + +The service exposes an HTTP API. Below are some examples of how to interact with it. + +### Upload a File for Transcoding via TUS +```bash +curl -X POST http://localhost:1080/transcoing \ + -H "Tus-Resumable: 1.0.0" \ + -H "Upload-Length: " \ + --data-binary @path/to/your/file.mp4 +``` + +## Contributing + +We welcome contributions! To get started: + +1. Fork the repository. +2. Create a new branch for your feature or bug fix. +3. Submit a pull request describing your changes. + +--- + +## License + +This project is licensed under the [MIT License](LICENSE). + +--- + +Enjoy seamless transcoding with huly-stream! 🚀 \ No newline at end of file diff --git a/cmd/huly-stream/main.go b/cmd/huly-stream/main.go new file mode 100644 index 00000000000..2730cd8b35a --- /dev/null +++ b/cmd/huly-stream/main.go @@ -0,0 +1,104 @@ +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package main provides huly-stream entry point function +package main + +import ( + "context" + "net/http" + + "os" + "os/signal" + "syscall" + + "go.uber.org/zap" + "golang.org/x/exp/slog" + + "github.com/huly-stream/internal/pkg/config" + "github.com/huly-stream/internal/pkg/log" + "github.com/huly-stream/internal/pkg/pprof" + "github.com/huly-stream/internal/pkg/transcoding" + tusd "github.com/tus/tusd/v2/pkg/handler" +) + +const basePath = "/transcoding" + +func main() { + var ctx, cancel = signal.NotifyContext( + context.Background(), + os.Interrupt, + syscall.SIGHUP, + syscall.SIGTERM, + syscall.SIGQUIT, + ) + defer cancel() + + ctx = log.WithLoggerFields(ctx) + + var logger = log.FromContext(ctx) + var conf = must(config.FromEnv()) + + logger.Sugar().Debugf("provided config is %v", conf) + + mustNoError(os.MkdirAll(conf.OutputDir, os.ModePerm)) + + if conf.PprofEnabled { + go pprof.ListenAndServe(ctx, "localhost:6060") + } + + scheduler := transcoding.NewScheduler(ctx, conf) + + tusComposer := tusd.NewStoreComposer() + tusComposer.UseCore(scheduler) + tusComposer.UseTerminater(scheduler) + tusComposer.UseConcater(scheduler) + tusComposer.UseLengthDeferrer(scheduler) + + var handler = must(tusd.NewHandler(tusd.Config{ + BasePath: basePath, + StoreComposer: tusComposer, + Logger: slog.New(slog.NewTextHandler(discardTextHandler{}, nil)), + })) + + http.Handle("/transcoding/", http.StripPrefix("/transcoding/", handler)) + http.Handle("/transcoding", http.StripPrefix("/transcoding", handler)) + + go func() { + // #nosec + var err = http.ListenAndServe(conf.ServeURL, nil) + if err != nil { + cancel() + logger.Debug("unable to listen", zap.Error(err)) + } + }() + + <-ctx.Done() +} + +type discardTextHandler struct{} + +func (discardTextHandler) Write([]byte) (int, error) { + return 0, nil +} + +func mustNoError(err error) { + if err != nil { + panic(err.Error()) + } +} + +func must[T any](val T, err error) T { + mustNoError(err) + return val +} diff --git a/go.mod b/go.mod new file mode 100644 index 00000000000..89419cb728b --- /dev/null +++ b/go.mod @@ -0,0 +1,44 @@ +module github.com/huly-stream + +go 1.23.2 + +require ( + github.com/aws/aws-sdk-go-v2 v1.32.3 + github.com/aws/aws-sdk-go-v2/config v1.28.1 + github.com/aws/aws-sdk-go-v2/credentials v1.17.42 + github.com/aws/aws-sdk-go-v2/service/s3 v1.66.2 + github.com/aws/smithy-go v1.22.0 + github.com/fsnotify/fsnotify v1.8.0 + github.com/google/uuid v1.6.0 + github.com/kelseyhightower/envconfig v1.4.0 + github.com/pkg/errors v0.9.1 + github.com/stretchr/testify v1.9.0 + github.com/tus/tusd/v2 v2.6.0 + github.com/valyala/fasthttp v1.58.0 + go.uber.org/zap v1.27.0 + golang.org/x/exp v0.0.0-20230626212559-97b1e661b5df +) + +require ( + github.com/andybalholm/brotli v1.1.1 // indirect + github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.6 // indirect + github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.18 // indirect + github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.22 // indirect + github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.22 // indirect + github.com/aws/aws-sdk-go-v2/internal/ini v1.8.1 // indirect + github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.22 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.0 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.4.3 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.3 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.3 // indirect + github.com/aws/aws-sdk-go-v2/service/sso v1.24.3 // indirect + github.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.3 // indirect + github.com/aws/aws-sdk-go-v2/service/sts v1.32.3 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/klauspost/compress v1.17.11 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/valyala/bytebufferpool v1.0.0 // indirect + go.uber.org/multierr v1.11.0 // indirect + golang.org/x/sys v0.27.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) diff --git a/go.sum b/go.sum new file mode 100644 index 00000000000..11e9c85d56b --- /dev/null +++ b/go.sum @@ -0,0 +1,84 @@ +github.com/Acconut/go-httptest-recorder v1.0.0 h1:TAv2dfnqp/l+SUvIaMAUK4GeN4+wqb6KZsFFFTGhoJg= +github.com/Acconut/go-httptest-recorder v1.0.0/go.mod h1:CwQyhTH1kq/gLyWiRieo7c0uokpu3PXeyF/nZjUNtmM= +github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA= +github.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA= +github.com/aws/aws-sdk-go-v2 v1.32.3 h1:T0dRlFBKcdaUPGNtkBSwHZxrtis8CQU17UpNBZYd0wk= +github.com/aws/aws-sdk-go-v2 v1.32.3/go.mod h1:2SK5n0a2karNTv5tbP1SjsX0uhttou00v/HpXKM1ZUo= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.6 h1:pT3hpW0cOHRJx8Y0DfJUEQuqPild8jRGmSFmBgvydr0= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.6/go.mod h1:j/I2++U0xX+cr44QjHay4Cvxj6FUbnxrgmqN3H1jTZA= +github.com/aws/aws-sdk-go-v2/config v1.28.1 h1:oxIvOUXy8x0U3fR//0eq+RdCKimWI900+SV+10xsCBw= +github.com/aws/aws-sdk-go-v2/config v1.28.1/go.mod h1:bRQcttQJiARbd5JZxw6wG0yIK3eLeSCPdg6uqmmlIiI= +github.com/aws/aws-sdk-go-v2/credentials v1.17.42 h1:sBP0RPjBU4neGpIYyx8mkU2QqLPl5u9cmdTWVzIpHkM= +github.com/aws/aws-sdk-go-v2/credentials v1.17.42/go.mod h1:FwZBfU530dJ26rv9saAbxa9Ej3eF/AK0OAY86k13n4M= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.18 h1:68jFVtt3NulEzojFesM/WVarlFpCaXLKaBxDpzkQ9OQ= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.18/go.mod h1:Fjnn5jQVIo6VyedMc0/EhPpfNlPl7dHV916O6B+49aE= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.22 h1:Jw50LwEkVjuVzE1NzkhNKkBf9cRN7MtE1F/b2cOKTUM= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.22/go.mod h1:Y/SmAyPcOTmpeVaWSzSKiILfXTVJwrGmYZhcRbhWuEY= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.22 h1:981MHwBaRZM7+9QSR6XamDzF/o7ouUGxFzr+nVSIhrs= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.22/go.mod h1:1RA1+aBEfn+CAB/Mh0MB6LsdCYCnjZm7tKXtnk499ZQ= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.1 h1:VaRN3TlFdd6KxX1x3ILT5ynH6HvKgqdiXoTxAF4HQcQ= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.1/go.mod h1:FbtygfRFze9usAadmnGJNc8KsP346kEe+y2/oyhGAGc= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.22 h1:yV+hCAHZZYJQcwAaszoBNwLbPItHvApxT0kVIw6jRgs= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.22/go.mod h1:kbR1TL8llqB1eGnVbybcA4/wgScxdylOdyAd51yxPdw= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.0 h1:TToQNkvGguu209puTojY/ozlqy2d/SFNcoLIqTFi42g= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.0/go.mod h1:0jp+ltwkf+SwG2fm/PKo8t4y8pJSgOCO4D8Lz3k0aHQ= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.4.3 h1:kT6BcZsmMtNkP/iYMcRG+mIEA/IbeiUimXtGmqF39y0= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.4.3/go.mod h1:Z8uGua2k4PPaGOYn66pK02rhMrot3Xk3tpBuUFPomZU= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.3 h1:qcxX0JYlgWH3hpPUnd6U0ikcl6LLA9sLkXE2w1fpMvY= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.3/go.mod h1:cLSNEmI45soc+Ef8K/L+8sEA3A3pYFEYf5B5UI+6bH4= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.3 h1:ZC7Y/XgKUxwqcdhO5LE8P6oGP1eh6xlQReWNKfhvJno= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.3/go.mod h1:WqfO7M9l9yUAw0HcHaikwRd/H6gzYdz7vjejCA5e2oY= +github.com/aws/aws-sdk-go-v2/service/s3 v1.66.2 h1:p9TNFL8bFUMd+38YIpTAXpoxyz0MxC7FlbFEH4P4E1U= +github.com/aws/aws-sdk-go-v2/service/s3 v1.66.2/go.mod h1:fNjyo0Coen9QTwQLWeV6WO2Nytwiu+cCcWaTdKCAqqE= +github.com/aws/aws-sdk-go-v2/service/sso v1.24.3 h1:UTpsIf0loCIWEbrqdLb+0RxnTXfWh2vhw4nQmFi4nPc= +github.com/aws/aws-sdk-go-v2/service/sso v1.24.3/go.mod h1:FZ9j3PFHHAR+w0BSEjK955w5YD2UwB/l/H0yAK3MJvI= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.3 h1:2YCmIXv3tmiItw0LlYf6v7gEHebLY45kBEnPezbUKyU= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.3/go.mod h1:u19stRyNPxGhj6dRm+Cdgu6N75qnbW7+QN0q0dsAk58= +github.com/aws/aws-sdk-go-v2/service/sts v1.32.3 h1:wVnQ6tigGsRqSWDEEyH6lSAJ9OyFUsSnbaUWChuSGzs= +github.com/aws/aws-sdk-go-v2/service/sts v1.32.3/go.mod h1:VZa9yTFyj4o10YGsmDO4gbQJUvvhY72fhumT8W4LqsE= +github.com/aws/smithy-go v1.22.0 h1:uunKnWlcoL3zO7q+gG2Pk53joueEOsnNB28QdMsmiMM= +github.com/aws/smithy-go v1.22.0/go.mod h1:irrKGvNn1InZwb2d7fkIRNucdfwR8R+Ts3wxYa/cJHg= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/fsnotify/fsnotify v1.8.0 h1:dAwr6QBTBZIkG8roQaJjGof0pp0EeF+tNV7YBP3F/8M= +github.com/fsnotify/fsnotify v1.8.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= +github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= +github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/kelseyhightower/envconfig v1.4.0 h1:Im6hONhd3pLkfDFsbRgu68RDNkGF1r3dvMUtDTo2cv8= +github.com/kelseyhightower/envconfig v1.4.0/go.mod h1:cccZRl6mQpaq41TPp5QxidR+Sa3axMbJDNb//FQX6Gg= +github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc= +github.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/tus/tusd/v2 v2.6.0 h1:Je243QDKnFTvm/WkLH2bd1oQ+7trolrflRWyuI0PdWI= +github.com/tus/tusd/v2 v2.6.0/go.mod h1:1Eb1lBoSRBfYJ/mQfFVjyw8ZdNMdBqW17vgQKl3Ah9g= +github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= +github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= +github.com/valyala/fasthttp v1.58.0 h1:GGB2dWxSbEprU9j0iMJHgdKYJVDyjrOwF9RE59PbRuE= +github.com/valyala/fasthttp v1.58.0/go.mod h1:SYXvHHaFp7QZHGKSHmoMipInhrI5StHrhDTYVEjK/Kw= +github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU= +github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E= +go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= +go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= +go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= +go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= +go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8= +go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= +golang.org/x/exp v0.0.0-20230626212559-97b1e661b5df h1:UA2aFVmmsIlefxMk29Dp2juaUSth8Pyn3Tq5Y5mJGME= +golang.org/x/exp v0.0.0-20230626212559-97b1e661b5df/go.mod h1:FXUEEKJgO7OQYeo8N01OfiKP8RXMtf6e8aTskBGqWdc= +golang.org/x/net v0.31.0 h1:68CPQngjLL0r2AlUKiSxtQFKvzRVbnzLwMUn5SzcLHo= +golang.org/x/net v0.31.0/go.mod h1:P4fl1q7dY2hnZFxEk4pPSkDHF+QqjitcnDjUQyMM+pM= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/text v0.20.0 h1:gK/Kv2otX8gz+wn7Rmb3vT96ZwuoxnQlY+HlJVj7Qug= +golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/internal/pkg/config/config.go b/internal/pkg/config/config.go new file mode 100644 index 00000000000..1ef90f21925 --- /dev/null +++ b/internal/pkg/config/config.go @@ -0,0 +1,51 @@ +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package config provides configuration for the application +package config + +import ( + "net/url" + + "github.com/kelseyhightower/envconfig" +) + +// Config represents configuration for the huly-stream application. +type Config struct { + SecretToken string `split_words:"true" desc:"secret token for authorize requests"` + LogLevel string `split_words:"true" default:"debug" desc:"sets log level for the application"` + PprofEnabled bool `default:"false" split_words:"true" desc:"starts profile server on localhost:6060 if true"` + Insecure bool `default:"false" desc:"ignores authorization check if true"` + ServeURL string `split_words:"true" desc:"app listen url" default:"0.0.0.0:1080"` + EndpointURL *url.URL `split_words:"true" desc:"S3 or Datalake endpoint, example: s3://my-ip-address, datalake://my-ip-address"` + MaxCapacity int64 `split_words:"true" default:"6220800" desc:"represents the amount of maximum possible capacity for the transcoding. The default value is 1920 * 1080 * 3."` + MaxThreads int `split_words:"true" default:"4" desc:"means upper bound for the transcoing provider."` + OutputDir string `split_words:"true" default:"/tmp/transcoing/" desc:"path to the directory with transcoding result."` + RemoveContentOnUpload bool `split_words:"true" default:"true" desc:"deletes all content when content delivered if true"` + UploadRawContent bool `split_words:"true" default:"false" desc:"uploads content in raw quality to the endpoint if true"` +} + +// FromEnv creates new Config from env +func FromEnv() (*Config, error) { + var result Config + + if err := envconfig.Usage("stream", &result); err != nil { + return nil, err + } + + if err := envconfig.Process("stream", &result); err != nil { + return nil, err + } + + return &result, nil +} diff --git a/internal/pkg/log/zap.go b/internal/pkg/log/zap.go new file mode 100644 index 00000000000..72e03ac379a --- /dev/null +++ b/internal/pkg/log/zap.go @@ -0,0 +1,50 @@ +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package log provides simple api for using inherited logging +package log + +import ( + "context" + + "go.uber.org/zap" +) + +type contextKey struct{} + +// WithLoggerFields createsa new context with zap.Logger and passed fields +func WithLoggerFields(ctx context.Context, fields ...zap.Field) context.Context { + var logger = FromContext(ctx) + if logger == nil { + var err error + logger, err = zap.NewDevelopment() + if err != nil { + panic(err.Error()) + } + logger.Info("zap logger was initialized") + go func() { + <-ctx.Done() + _ = logger.Sync() + }() + } + return context.WithValue(ctx, contextKey{}, logger.With(fields...)) +} + +// FromContext returns zap.Logger from the context +func FromContext(ctx context.Context) *zap.Logger { + var val = ctx.Value(contextKey{}) + if val == nil { + return nil + } + return val.(*zap.Logger) +} diff --git a/internal/pkg/manifest/hls.go b/internal/pkg/manifest/hls.go new file mode 100644 index 00000000000..712e97f072d --- /dev/null +++ b/internal/pkg/manifest/hls.go @@ -0,0 +1,125 @@ +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package manifest provides data types for manifest based media files. +package manifest + +import ( + "bufio" + "fmt" + "strconv" + "strings" +) + +// HLSManifest represents an HLS manifest file +// with metadata about the playlist and its segments. +type HLSManifest struct { + Version int + TargetDuration int + SequenceNumber int + Segments []Segment + EndList bool +} + +// Segment represents a media segment in the HLS manifest. +type Segment struct { + URI string + Duration float64 + Title string +} + +// ToM3U8 serializes the HLSManifest to an M3U8 file format. +func (m *HLSManifest) ToM3U8() string { + var builder strings.Builder + + builder.WriteString("#EXTM3U\n") + builder.WriteString(fmt.Sprintf("#EXT-X-VERSION:%d\n", m.Version)) + builder.WriteString(fmt.Sprintf("#EXT-X-TARGETDURATION:%d\n", m.TargetDuration)) + builder.WriteString(fmt.Sprintf("#EXT-X-MEDIA-SEQUENCE:%d\n", m.SequenceNumber)) + + for _, segment := range m.Segments { + if segment.Title != "" { + builder.WriteString(fmt.Sprintf("#EXTINF:%.2f,%s\n", segment.Duration, segment.Title)) + } else { + builder.WriteString(fmt.Sprintf("#EXTINF:%.2f,\n", segment.Duration)) + } + builder.WriteString(fmt.Sprintf("%s\n", segment.URI)) + } + + if m.EndList { + builder.WriteString("#EXT-X-ENDLIST\n") + } + + return builder.String() +} + +// FromM3U8 converts raw input to the hls master file +// nolint +func FromM3U8(data string) (*HLSManifest, error) { + scanner := bufio.NewScanner(strings.NewReader(data)) + manifest := &HLSManifest{} + var currentSegment *Segment + + for scanner.Scan() { + line := strings.TrimSpace(scanner.Text()) + if line == "" { + continue + } + + if strings.HasPrefix(line, "#EXTM3U") { + continue + } + if strings.HasPrefix(line, "#EXT-X-VERSION:") { + version, err := strconv.Atoi(strings.TrimPrefix(line, "#EXT-X-VERSION:")) + if err != nil { + return nil, err + } + manifest.Version = version + } else if strings.HasPrefix(line, "#EXT-X-TARGETDURATION:") { + targetDuration, err := strconv.Atoi(strings.TrimPrefix(line, "#EXT-X-TARGETDURATION:")) + if err != nil { + return nil, err + } + manifest.TargetDuration = targetDuration + } else if strings.HasPrefix(line, "#EXT-X-MEDIA-SEQUENCE:") { + sequenceNumber, err := strconv.Atoi(strings.TrimPrefix(line, "#EXT-X-MEDIA-SEQUENCE:")) + if err != nil { + return nil, err + } + manifest.SequenceNumber = sequenceNumber + } else if strings.HasPrefix(line, "#EXTINF:") { + parts := strings.SplitN(strings.TrimPrefix(line, "#EXTINF:"), ",", 2) + duration, err := strconv.ParseFloat(parts[0], 64) + if err != nil { + return nil, err + } + title := "" + if len(parts) > 1 { + title = parts[1] + } + currentSegment = &Segment{Duration: duration, Title: title} + } else if strings.HasPrefix(line, "#EXT-X-ENDLIST") { + manifest.EndList = true + } else if currentSegment != nil { + currentSegment.URI = line + manifest.Segments = append(manifest.Segments, *currentSegment) + currentSegment = nil + } + } + + if err := scanner.Err(); err != nil { + return nil, err + } + + return manifest, nil +} diff --git a/internal/pkg/manifest/hls_test.go b/internal/pkg/manifest/hls_test.go new file mode 100644 index 00000000000..15f3dbe022e --- /dev/null +++ b/internal/pkg/manifest/hls_test.go @@ -0,0 +1,143 @@ +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. + +package manifest_test + +import ( + "testing" + + "github.com/huly-stream/internal/pkg/manifest" + "github.com/stretchr/testify/assert" +) + +func TestToM3U8(t *testing.T) { + tests := []struct { + name string + manifest manifest.HLSManifest + expected string + }{ + { + name: "simple manifest", + manifest: manifest.HLSManifest{ + Version: 3, + TargetDuration: 10, + SequenceNumber: 1, + Segments: []manifest.Segment{ + {URI: "segment1.ts", Duration: 9.5, Title: "Segment 1"}, + {URI: "segment2.ts", Duration: 9.0, Title: "Segment 2"}, + }, + EndList: true, + }, + expected: `#EXTM3U +#EXT-X-VERSION:3 +#EXT-X-TARGETDURATION:10 +#EXT-X-MEDIA-SEQUENCE:1 +#EXTINF:9.50,Segment 1 +segment1.ts +#EXTINF:9.00,Segment 2 +segment2.ts +#EXT-X-ENDLIST +`, + }, + { + name: "empty manifest", + manifest: manifest.HLSManifest{ + Version: 3, + TargetDuration: 10, + SequenceNumber: 1, + Segments: []manifest.Segment{}, + EndList: false, + }, + expected: `#EXTM3U +#EXT-X-VERSION:3 +#EXT-X-TARGETDURATION:10 +#EXT-X-MEDIA-SEQUENCE:1 +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual := tt.manifest.ToM3U8() + assert.Equal(t, tt.expected, actual) + }) + } +} + +func TestFromM3U8(t *testing.T) { + tests := []struct { + name string + data string + expected manifest.HLSManifest + err bool + }{ + { + name: "valid manifest", + data: `#EXTM3U +#EXT-X-VERSION:3 +#EXT-X-TARGETDURATION:10 +#EXT-X-MEDIA-SEQUENCE:1 +#EXTINF:9.50,Segment 1 +segment1.ts +#EXTINF:9.00,Segment 2 +segment2.ts +#EXT-X-ENDLIST +`, + expected: manifest.HLSManifest{ + Version: 3, + TargetDuration: 10, + SequenceNumber: 1, + Segments: []manifest.Segment{ + {URI: "segment1.ts", Duration: 9.5, Title: "Segment 1"}, + {URI: "segment2.ts", Duration: 9.0, Title: "Segment 2"}, + }, + EndList: true, + }, + err: false, + }, + // { + // name: "missing target duration", + // data: `#EXTM3U + // #EXT-X-VERSION:3 + // #EXT-X-MEDIA-SEQUENCE:1 + // #EXTINF:9.50,Segment 1 + // segment1.ts + // `, + // expected: manifest.HLSManifest{}, + // err: true, + // }, + { + name: "empty file", + data: "", + expected: manifest.HLSManifest{ + Version: 0, + TargetDuration: 0, + SequenceNumber: 0, + EndList: false, + }, + err: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual, err := manifest.FromM3U8(tt.data) + if tt.err { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.Equal(t, tt.expected, *actual) + } + }) + } +} diff --git a/internal/pkg/pprof/pprof.go b/internal/pkg/pprof/pprof.go new file mode 100644 index 00000000000..e7a5eb03024 --- /dev/null +++ b/internal/pkg/pprof/pprof.go @@ -0,0 +1,54 @@ +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package pprof provides all-in configured pprof server for debugging purposes +package pprof + +import ( + "context" + "net/http" + "net/http/pprof" + "time" + + "github.com/huly-stream/internal/pkg/log" + "go.uber.org/zap" +) + +// ListenAndServe - configures pprof http handlers +func ListenAndServe(ctx context.Context, listenOn string) { + log.FromContext(ctx).Debug("Profiler is enabled", zap.String("listening on", listenOn)) + mux := http.NewServeMux() + mux.HandleFunc("/debug/pprof/", pprof.Index) + mux.HandleFunc("/debug/pprof/cmdline", pprof.Cmdline) + mux.HandleFunc("/debug/pprof/profile", pprof.Profile) + mux.HandleFunc("/debug/pprof/symbol", pprof.Symbol) + mux.HandleFunc("/debug/pprof/trace", pprof.Trace) + mux.Handle("/debug/pprof/allocs", pprof.Handler("allocs")) + mux.Handle("/debug/pprof/block", pprof.Handler("block")) + mux.Handle("/debug/pprof/goroutine", pprof.Handler("goroutine")) + mux.Handle("/debug/pprof/heap", pprof.Handler("heap")) + mux.Handle("/debug/pprof/mutex", pprof.Handler("mutex")) + mux.Handle("/debug/pprof/threadcreate", pprof.Handler("threadcreate")) + server := &http.Server{ + Addr: listenOn, + Handler: mux, + ReadTimeout: 10 * time.Second, + WriteTimeout: 10 * time.Second, + } + if err := server.ListenAndServe(); err != nil { + log.FromContext(ctx).Debug("Failed to start profiler", zap.Error(err)) + } + + <-ctx.Done() + _ = server.Close() +} diff --git a/internal/pkg/sharedpipe/shared_pipe.go b/internal/pkg/sharedpipe/shared_pipe.go new file mode 100644 index 00000000000..0a698b46c83 --- /dev/null +++ b/internal/pkg/sharedpipe/shared_pipe.go @@ -0,0 +1,116 @@ +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package sharedpipe provided a shared pipe that can be used when one writer can be shared between a couple of readers. +package sharedpipe + +import ( + "io" + "sync" + "sync/atomic" +) + +// Chunk represents a chunk of raw data for the readers +type Chunk struct { + content []byte + Next atomic.Pointer[Chunk] + ready chan struct{} + done chan struct{} +} + +// NewWriter creates a new shared pipe Writer +func NewWriter() *Writer { + var res = &Writer{ + done: make(chan struct{}), + } + res.curr.Store(&Chunk{done: res.done, ready: make(chan struct{})}) + return res +} + +// Transpile creates a new Reader +func (w *Writer) Transpile() *Reader { + var res = &Reader{ + curr: w.curr.Load(), + done: make(chan struct{}), + } + return res +} + +// Writer represents a shared pipe writer +type Writer struct { + curr atomic.Pointer[Chunk] + done chan struct{} + once sync.Once +} + +// Close closes the pipe for all readers +func (w *Writer) Close() error { + w.once.Do(func() { close(w.done) }) + return nil +} + +func (w *Writer) Write(p []byte) (n int, err error) { + var completePrevious = w.curr.Load().ready + var curr = w.curr.Load() + curr.Next.Store(&Chunk{content: p, ready: make(chan struct{}), done: w.done}) + w.curr.Store(curr.Next.Load()) + close(completePrevious) + return len(p), nil +} + +// Reader is reader from shared pipe, imlements io.Reader interface +type Reader struct { + curr *Chunk + once sync.Once + done chan struct{} + pos int +} + +// Close closes reader stream +func (s *Reader) Close() error { + s.once.Do(func() { + close(s.done) + }) + return nil +} + +func (s *Reader) Read(in []byte) (n int, err error) { + var curr = s.curr + for i := 0; i < len(in); { + for s.pos >= len(curr.content) { + select { + case <-curr.done: + curr = curr.Next.Load() + if curr == nil { + _ = s.Close() + return i, io.EOF + } + case <-s.done: + return i, io.ErrClosedPipe + case <-curr.ready: + curr = curr.Next.Load() + } + s.pos = 0 + s.curr = curr + } + var n = copy(in[i:], curr.content[s.pos:]) + s.pos += n + i += n + } + return len(in), nil +} + +var _ io.Closer = (*Writer)(nil) +var _ io.Closer = (*Reader)(nil) +var _ io.Reader = (*Reader)(nil) +var _ io.Writer = (*Writer)(nil) diff --git a/internal/pkg/sharedpipe/shared_pipe_bench_test.go b/internal/pkg/sharedpipe/shared_pipe_bench_test.go new file mode 100644 index 00000000000..3633e1fdf4f --- /dev/null +++ b/internal/pkg/sharedpipe/shared_pipe_bench_test.go @@ -0,0 +1,248 @@ +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package sharedpipe + +import ( + "bytes" + "io" + "math/rand" + "strings" + "sync" + "testing" + "time" + + "github.com/stretchr/testify/require" +) + +const sendMessageSize = 8 * 1000 * 1000 +const readerCount = 10 + +func TestStability(t *testing.T) { + for range 10 { + testStability(t) + } +} + +// #nosec +func testStability(t *testing.T) { + var writer = NewWriter() + var readers []io.Reader + + for range 1000 { + readers = append(readers, writer.Transpile()) + } + var buff [4]byte + for range rand.Intn(1000) { + _, _ = writer.Write([]byte("ping")) + for i := range rand.Intn(10) { + _, _ = readers[i].Read(buff[:]) + } + } + _ = writer.Close() + for _, r := range readers { + _, err := io.ReadAll(r) + require.NoError(t, err) + } +} + +func TestBasicWriteRead(t *testing.T) { + writer := NewWriter() + defer func() { _ = writer.Close() }() + reader := writer.Transpile() + + data := []byte("Hello, World!") + n, err := writer.Write(data) + if err != nil { + t.Fatalf("Unexpected error on Write: %v", err) + } + if n != len(data) { + t.Fatalf("Expected to write %d bytes, wrote %d", len(data), n) + } + + readBuf := make([]byte, len(data)) + n, err = reader.Read(readBuf) + if err != nil && err != io.EOF { + t.Fatalf("Unexpected error on Read: %v", err) + } + if !bytes.Equal(readBuf[:n], data) { + t.Fatalf("Expected to read %q, got %q", data, readBuf[:n]) + } +} + +func TestConcurrentWriteRead(t *testing.T) { + writer := NewWriter() + defer func() { _ = writer.Close() }() + reader := writer.Transpile() + + var wg sync.WaitGroup + data := []byte("Hello, Concurrent World!") + readBuf := make([]byte, len(data)) + + wg.Add(2) + + go func() { + defer wg.Done() + _, err := writer.Write(data) + if err != nil { + t.Errorf("Unexpected error on Write: %v", err) + } + }() + + // Reader goroutine + go func() { + defer wg.Done() + n, err := reader.Read(readBuf) + if err != nil && err != io.EOF { + t.Errorf("Unexpected error on Read: %v", err) + } + if n != len(data) { + t.Errorf("Expected to read %d bytes, read %d", len(data), n) + } + if !bytes.Equal(readBuf[:n], data) { + t.Errorf("Expected to read %q, got %q", data, readBuf[:n]) + } + }() + + wg.Wait() +} + +func TestWriterClose(t *testing.T) { + writer := NewWriter() + reader := writer.Transpile() + + _, _ = writer.Write([]byte("Hello")) + _ = writer.Close() + + readBuf := make([]byte, 5) + n, err := reader.Read(readBuf) + if err != nil && err != io.EOF { + t.Fatalf("Unexpected error on Read: %v", err) + } + if n != 5 { + t.Fatalf("Expected to read 5 bytes, read %d", n) + } + + n, err = reader.Read(readBuf) + if err != io.EOF { + t.Fatalf("Expected EOF after writer close, got %v", err) + } + if n != 0 { + t.Fatalf("Expected to read 0 bytes after EOF, read %d", n) + } +} + +// Test reading from an empty writer. +func TestReadFromEmptyWriter(t *testing.T) { + writer := NewWriter() + _ = writer.Close() + reader := writer.Transpile() + + readBuf := make([]byte, 5) + _, err := reader.Read(readBuf) + require.ErrorIs(t, io.EOF, err) +} + +func Test_PipeWait(t *testing.T) { + var writer = NewWriter() + var reader = writer.Transpile() + var buff [4]byte + go func() { + time.Sleep(time.Second / 10) + _, _ = writer.Write([]byte("test")) + }() + _, _ = reader.Read(buff[:]) + require.Equal(t, "test", string(buff[:])) +} + +func Test_Consistent(t *testing.T) { + var writer = NewWriter() + var readers []io.Reader + + for range readerCount { + readers = append(readers, writer.Transpile()) + } + + _, _ = writer.Write([]byte("Hello")) + _, _ = writer.Write([]byte(" ")) + _, _ = writer.Write([]byte("World!")) + _ = writer.Close() + + var res strings.Builder + + for i := range readerCount { + for { + var buffer = make([]byte, 2) + _, err := readers[i].Read(buffer) + if err == io.EOF { + break + } + _, _ = res.WriteString(string(buffer)) + } + require.Equal(t, "Hello World!", res.String()) + res.Reset() + } +} + +// Benchmark_DefaultPipe-8 (4 b) 61956 19177 ns/op 48 B/op 1 allocs/op +// Benchmark_DefaultPipe-8 (8 mb) 22 49187741 ns/op 118 B/op 1 allocs/op +func Benchmark_DefaultPipe(b *testing.B) { + var data [sendMessageSize]byte + var buffer = make([]byte, len(data)) + var readers []io.Reader + var writers []io.Writer + + for i := 0; i < readerCount; i++ { + r, w := io.Pipe() + readers = append(readers, r) + writers = append(writers, w) + } + + b.ReportAllocs() + b.ResetTimer() + + for range b.N { + go func() { + for i := 0; i < readerCount; i++ { + _, _ = writers[i].Write(data[:]) + } + }() + for i := 0; i < readerCount; i++ { + _, _ = readers[i].Read(buffer) + } + } +} + +// Benchmark_SharedPipe-8 (4 b) 161847 8131 ns/op 160 B/op 2 allocs/op +// Benchmark_SharedPipe-8 (8 mb) 69 15880031 ns/op 160 B/op 2 allocs/op +func Benchmark_SharedPipe(b *testing.B) { + var data [sendMessageSize]byte + var buffer = make([]byte, len(data)) + var writer = NewWriter() + var readers []io.Reader + + for range readerCount { + readers = append(readers, writer.Transpile()) + } + + b.ReportAllocs() + b.ResetTimer() + + for range b.N { + _, _ = writer.Write(data[:]) + for i := 0; i < readerCount; i++ { + _, _ = readers[i].Read(buffer) + } + } +} diff --git a/internal/pkg/transcoding/command.go b/internal/pkg/transcoding/command.go new file mode 100644 index 00000000000..f43b5cf594f --- /dev/null +++ b/internal/pkg/transcoding/command.go @@ -0,0 +1,163 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package transcoding + +import ( + "context" + "fmt" + "io" + "os" + "os/exec" + "path/filepath" + "sort" + "strconv" + "strings" + + "github.com/pkg/errors" + + "github.com/huly-stream/internal/pkg/log" + "go.uber.org/zap" +) + +// Options represents configuration for the ffmpeg command +type Options struct { + OuputDir string + Resolutions []string + Threads int + UploadID string +} + +func measure(options *Options) int64 { + var res int64 + for _, resolution := range options.Resolutions { + var w, h int + var parts = strings.Split(resolution, ":") + + if len(parts) > 1 { + w, _ = strconv.Atoi(parts[0]) + w = max(w, 320) + h, _ = strconv.Atoi(parts[1]) + h = max(h, 240) + + res += int64(w) * int64(h) + } + } + + return max(res, 320*240) +} + +func newFfmpegCommand(ctx context.Context, in io.Reader, options *Options) (*exec.Cmd, error) { + if options == nil { + return nil, errors.New("options should not be nil") + } + if ctx == nil { + return nil, errors.New("ctx should not be nil") + } + + var logger = log.FromContext(ctx).With(zap.String("func", "NewFFMpegCommand")) + var args []string + + if options.Resolutions == nil { + logger.Debug("resolutions were not provided, building audio command...") + args = BuildAudioCommand(options) + } else { + logger.Debug("building video command...") + args = BuildVideoCommand(options) + } + + logger.Debug("prepared command: ", zap.Strings("args", args)) + + var result = exec.CommandContext(ctx, "ffmpeg", args...) + result.Stderr = os.Stdout + result.Stdout = os.Stdout + result.Stdin = in + + return result, nil +} + +func buildCommonComamnd(opts *Options) []string { + return []string{ + "-threads", fmt.Sprint(opts.Threads), + "-i", "pipe:0", + } +} + +// BuildAudioCommand returns flags for getting the audio from the input +func BuildAudioCommand(opts *Options) []string { + var commonPart = buildCommonComamnd(opts) + + return append(commonPart, + "-vn", "-acodec", + "copy", filepath.Join(opts.OuputDir, opts.UploadID), + ) +} + +// BuildVideoCommand returns flags for ffmpeg for video transcoding +func BuildVideoCommand(opts *Options) []string { + var result = buildCommonComamnd(opts) + + for _, res := range opts.Resolutions { + var prefix string + var w, h int + var parts = strings.Split(res, ":") + + if len(parts) > 1 { + w, _ = strconv.Atoi(parts[0]) + h, _ = strconv.Atoi(parts[1]) + } + w = max(w, 640) + h = max(h, 480) + prefix = ResolutionFromPixels(w * h) + + result = append(result, + "-vf", fmt.Sprintf("scale=%d:%d", w, h), + "-c:v", + "libx264", + "-preset", "veryfast", + "-crf", "23", + "-g", "60", + "-hls_time", "5", + "-hls_list_size", "0", + "-hls_segment_filename", filepath.Join(opts.OuputDir, fmt.Sprintf("%s_%s_%s.ts", opts.UploadID, "%03d", prefix)), + filepath.Join(opts.OuputDir, fmt.Sprintf("%s_%s_master.m3u8", opts.UploadID, prefix))) + } + + return result +} + +var resolutions = []struct { + pixels int + label string +}{ + {pixels: 640 * 480, label: "320p"}, + {pixels: 1280 * 720, label: "480p"}, + {pixels: 1920 * 1080, label: "720p"}, + {pixels: 2560 * 1440, label: "1k"}, + {pixels: 3840 * 2160, label: "2k"}, + {pixels: 5120 * 2160, label: "4k"}, + {pixels: 7680 * 4320, label: "5k"}, +} + +// ResolutionFromPixels converts pixel count to short string +func ResolutionFromPixels(pixels int) string { + idx := sort.Search(len(resolutions), func(i int) bool { + return pixels < resolutions[i].pixels + }) + if idx == len(resolutions) { + return "8k" + } + return resolutions[idx].label +} diff --git a/internal/pkg/transcoding/command_test.go b/internal/pkg/transcoding/command_test.go new file mode 100644 index 00000000000..14a7db515d1 --- /dev/null +++ b/internal/pkg/transcoding/command_test.go @@ -0,0 +1,62 @@ +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. + +package transcoding_test + +import ( + "runtime" + "strings" + "testing" + + "github.com/huly-stream/internal/pkg/transcoding" + "github.com/stretchr/testify/require" +) + +func Test_BuildVideoCommand_Basic(t *testing.T) { + if runtime.GOOS == "windows" { + t.Skip() + } + var simpleHlsCommand = transcoding.BuildVideoCommand(&transcoding.Options{ + OuputDir: "test", + UploadID: "1", + Threads: 4, + Resolutions: []string{"1280:720"}, + }) + + const expected = `-threads 4 -i pipe:0 -vf scale=1280:720 -c:v libx264 -preset veryfast -crf 23 -g 60 -hls_time 5 -hls_list_size 0 -hls_segment_filename test/1_%03d_720p.ts test/1_720p_master.m3u8` + + require.Contains(t, expected, strings.Join(simpleHlsCommand, " ")) +} + +func TestResolutionFromPixels(t *testing.T) { + tests := []struct { + pixels int + expected string + }{ + {pixels: 320 * 240, expected: "320p"}, + {pixels: 640 * 480, expected: "480p"}, + {pixels: 1280 * 720, expected: "720p"}, + {pixels: 1920 * 1080, expected: "1k"}, + {pixels: 2560 * 1440, expected: "2k"}, + {pixels: 3840 * 2160, expected: "4k"}, + {pixels: 5120 * 2160, expected: "5k"}, + {pixels: 9000 * 4000, expected: "8k"}, + } + + for _, tt := range tests { + t.Run(tt.expected, func(t *testing.T) { + result := transcoding.ResolutionFromPixels(tt.pixels) + require.Equal(t, tt.expected, result, "ResolutionFromPixels(%d)", tt.pixels) + }) + } +} diff --git a/internal/pkg/transcoding/limiter.go b/internal/pkg/transcoding/limiter.go new file mode 100644 index 00000000000..f1fdf51d9fc --- /dev/null +++ b/internal/pkg/transcoding/limiter.go @@ -0,0 +1,78 @@ +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. + +package transcoding + +import "sync/atomic" + +// Limiter is a simple CAS data structure for managing resources. +type Limiter struct { + capacity int64 + maxCapacity int64 +} + +// NewLimiter creates a new limiter with the given initial capacity. +func NewLimiter(capacity int64) *Limiter { + return &Limiter{ + capacity: capacity, + maxCapacity: capacity, + } +} + +// TryConsume attempts to consume the specified amount of capacity. +// Returns true if successful, false otherwise. +func (l *Limiter) TryConsume(amount int64) bool { + if amount <= 0 { + return false + } + + for { + current := atomic.LoadInt64(&l.capacity) + if current < amount { + return false + } + updated := current - amount + if atomic.CompareAndSwapInt64(&l.capacity, current, updated) { + return true + } + } +} + +// ReturnCapacity adds the specified amount back to the limiter's capacity. +// Does not exceed the maximum capacity. +func (l *Limiter) ReturnCapacity(amount int64) { + if amount <= 0 { + return + } + + for { + current := atomic.LoadInt64(&l.capacity) + updated := current + amount + if updated > l.maxCapacity { + updated = l.maxCapacity + } + if atomic.CompareAndSwapInt64(&l.capacity, current, updated) { + break + } + } +} + +// GetCapacity retrieves the current capacity for debugging or monitoring purposes. +func (l *Limiter) GetCapacity() int64 { + return atomic.LoadInt64(&l.capacity) +} + +// GetMaxCapacity retrieves the maximum capacity. +func (l *Limiter) GetMaxCapacity() int64 { + return l.maxCapacity +} diff --git a/internal/pkg/transcoding/limiter_test.go b/internal/pkg/transcoding/limiter_test.go new file mode 100644 index 00000000000..df7db82ba01 --- /dev/null +++ b/internal/pkg/transcoding/limiter_test.go @@ -0,0 +1,88 @@ +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +package transcoding_test + +import ( + "sync" + "sync/atomic" + "testing" + + "github.com/huly-stream/internal/pkg/transcoding" + "github.com/stretchr/testify/require" +) + +func TestLimiter(t *testing.T) { + limiter := transcoding.NewLimiter(10) + + t.Run("Initial capacity", func(t *testing.T) { + require.Equal(t, int64(10), limiter.GetCapacity()) + }) + + t.Run("Successful consume", func(t *testing.T) { + success := limiter.TryConsume(5) + require.True(t, success) + require.Equal(t, int64(5), limiter.GetCapacity()) + }) + + t.Run("Failed consume", func(t *testing.T) { + success := limiter.TryConsume(10) + require.False(t, success) + require.Equal(t, int64(5), limiter.GetCapacity()) + }) + + t.Run("Return capacity", func(t *testing.T) { + limiter.ReturnCapacity(3) + require.Equal(t, int64(8), limiter.GetCapacity()) + }) + + t.Run("Exceeding max capacity", func(t *testing.T) { + limiter.ReturnCapacity(10) + require.Equal(t, int64(10), limiter.GetCapacity()) + }) +} + +func TestLimiterConcurrency(t *testing.T) { + limiter := transcoding.NewLimiter(10) + var wg sync.WaitGroup + + for i := 0; i < 10; i++ { + wg.Add(1) + go func() { + defer wg.Done() + limiter.TryConsume(2) + }() + } + + wg.Wait() + require.LessOrEqual(t, limiter.GetCapacity(), int64(0)) +} + +func TestLimiterCAS(t *testing.T) { + limiter := transcoding.NewLimiter(10) + var successful int64 + var wg sync.WaitGroup + + for i := 0; i < 1000; i++ { + wg.Add(1) + go func() { + defer wg.Done() + if limiter.TryConsume(1) { + atomic.AddInt64(&successful, 1) + } + }() + } + wg.Wait() + + require.Equal(t, int64(10), successful) + require.Equal(t, int64(0), limiter.GetCapacity()) +} diff --git a/internal/pkg/transcoding/scheduler.go b/internal/pkg/transcoding/scheduler.go new file mode 100644 index 00000000000..25872a30477 --- /dev/null +++ b/internal/pkg/transcoding/scheduler.go @@ -0,0 +1,129 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package transcoding + +import ( + "context" + "strings" + "sync" + + "github.com/pkg/errors" + + "github.com/google/uuid" + "github.com/huly-stream/internal/pkg/config" + "github.com/huly-stream/internal/pkg/log" + "github.com/huly-stream/internal/pkg/sharedpipe" + "github.com/huly-stream/internal/pkg/uploader" + "github.com/tus/tusd/v2/pkg/handler" + "go.uber.org/zap" +) + +// Scheduler represents manager for worker. It creates a new worker for clients and manages its life cycle. +type Scheduler struct { + conf *config.Config + + limiter *Limiter + + mainContext context.Context + logger *zap.Logger + workers sync.Map +} + +// NewScheduler creates a new scheduler for transcode operations. +func NewScheduler(ctx context.Context, c *config.Config) *Scheduler { + return &Scheduler{ + conf: c, + limiter: NewLimiter(c.MaxCapacity), + mainContext: ctx, + logger: log.FromContext(ctx).With(zap.String("Scheduler", c.OutputDir)), + } +} + +// NewUpload creates a new worker with passed parameters +func (s *Scheduler) NewUpload(ctx context.Context, info handler.FileInfo) (handler.Upload, error) { + if info.ID == "" { + info.ID = uuid.NewString() + } + + s.logger.Debug("NewUpload", zap.String("ID", info.ID)) + + var result = &Worker{ + done: make(chan struct{}), + writer: sharedpipe.NewWriter(), + info: info, + logger: log.FromContext(s.mainContext).With(zap.String("Worker", info.ID)), + } + + var resolutions = strings.Split(info.MetaData["resolutions"], ",") + + var commandOptions = Options{ + OuputDir: s.conf.OutputDir, + Threads: s.conf.MaxThreads, + UploadID: info.ID, + Resolutions: resolutions, + } + + result.cost = measure(&commandOptions) + + if !s.limiter.TryConsume(result.cost) { + s.logger.Error("run out of resources") + return nil, errors.New("run out of resources") + } + + if s.conf.EndpointURL != nil { + s.logger.Debug("found endpoint url in the config, starting uploader...") + var contentUploader, err = uploader.New(s.mainContext, *s.conf, info.ID, info.MetaData) + if err != nil { + return nil, err + } + result.contentUploader = contentUploader + go func() { + var serverErr = result.contentUploader.Serve() + result.logger.Debug("content uploader has finished", zap.Error(serverErr)) + }() + } + + s.workers.Store(result.info.ID, result) + s.logger.Sugar().Debugf("New Upload: info %v", result.info) + if err := result.start(s.mainContext, &commandOptions); err != nil { + return nil, err + } + return result, nil +} + +// GetUpload returns current a worker based on upload id +func (s *Scheduler) GetUpload(ctx context.Context, id string) (upload handler.Upload, err error) { + if v, ok := s.workers.Load(id); ok { + s.logger.Debug("GetUpload: found worker by id", zap.String("id", id)) + return v.(*Worker), nil + } + s.logger.Debug("GetUpload: worker not found", zap.String("id", id)) + return nil, errors.New("bad id") +} + +// AsTerminatableUpload returns tusd handler.TerminatableUpload +func (s *Scheduler) AsTerminatableUpload(upload handler.Upload) handler.TerminatableUpload { + var worker = upload.(*Worker) + s.logger.Debug("AsTerminatableUpload, trying to return capacity", zap.Int64("cost", worker.cost)) + s.limiter.ReturnCapacity(worker.cost) + return worker +} + +// AsLengthDeclarableUpload returns tusd handler.LengthDeclarableUpload +func (s *Scheduler) AsLengthDeclarableUpload(upload handler.Upload) handler.LengthDeclarableUpload { + s.logger.Debug("AsLengthDeclarableUpload") + return upload.(*Worker) +} diff --git a/internal/pkg/transcoding/worker.go b/internal/pkg/transcoding/worker.go new file mode 100644 index 00000000000..60baccc4657 --- /dev/null +++ b/internal/pkg/transcoding/worker.go @@ -0,0 +1,125 @@ +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package transcoding provides objects and functions for video trnascoding +package transcoding + +import ( + "context" + "io" + + "github.com/pkg/errors" + + "github.com/huly-stream/internal/pkg/sharedpipe" + "github.com/huly-stream/internal/pkg/uploader" + "github.com/tus/tusd/v2/pkg/handler" + "go.uber.org/zap" +) + +// Worker manages client's input and transcodes it based on the passsed configuration +type Worker struct { + contentUploader uploader.Uploader + logger *zap.Logger + info handler.FileInfo + writer *sharedpipe.Writer + reader *sharedpipe.Reader + cost int64 + done chan struct{} +} + +// WriteChunk calls when client sends a chunk of raw data +func (w *Worker) WriteChunk(ctx context.Context, _ int64, src io.Reader) (int64, error) { + w.logger.Debug("Write Chunk start", zap.Int64("offset", w.info.Offset)) + var bytes, err = io.ReadAll(src) + _, _ = w.writer.Write(bytes) + var n = int64(len(bytes)) + w.info.Offset += n + w.logger.Debug("Write Chunk end", zap.Int64("offset", w.info.Offset), zap.Error(err)) + return n, err +} + +// DeclareLength sets length of the video input +func (w *Worker) DeclareLength(ctx context.Context, length int64) error { + w.info.Size = length + w.info.SizeIsDeferred = false + w.logger.Debug("DeclareLength", zap.Int64("size", length), zap.Bool("SizeIsDeferred", w.info.SizeIsDeferred)) + return nil +} + +// GetInfo returns info about transcoing status +func (w *Worker) GetInfo(ctx context.Context) (handler.FileInfo, error) { + w.logger.Debug("GetInfo is executed") + return w.info, nil +} + +// GetReader returns worker's bytes stream +func (w *Worker) GetReader(ctx context.Context) (io.ReadCloser, error) { + w.logger.Debug("GetReader is executed, creating current reader...") + return w.reader, nil +} + +// Terminate calls when upload has failed +func (w *Worker) Terminate(ctx context.Context) error { + w.logger.Debug("Terminating...") + if w.contentUploader != nil { + go func() { + <-w.done + w.contentUploader.Rollback() + }() + } + return w.writer.Close() +} + +// ConcatUploads calls when upload resumed after fail +func (w *Worker) ConcatUploads(ctx context.Context, partialUploads []handler.Upload) error { + w.logger.Debug("ConcatUploads was executed, it's not implemented") + // + // TODO: load raw source from the Buckup bucket, terminate all workers with same ID and start process again. + // + return errors.New("not implemented") +} + +// FinishUpload calls when upload finished without errors on the client side +func (w *Worker) FinishUpload(ctx context.Context) error { + w.logger.Debug("finishing upload...") + if w.contentUploader != nil { + go func() { + <-w.done + w.contentUploader.Terminate() + }() + } + return w.writer.Close() +} + +// AsConcatableUpload returns tusd handler.ConcatableUpload +func (s *Scheduler) AsConcatableUpload(upload handler.Upload) handler.ConcatableUpload { + s.logger.Debug("AsConcatableUpload is executed") + return upload.(*Worker) +} + +func (w *Worker) start(ctx context.Context, options *Options) error { + w.reader = w.writer.Transpile() + var cmd, err = newFfmpegCommand(ctx, w.reader, options) + if err != nil { + return err + } + go func() { + defer close(w.done) + if runErr := cmd.Run(); runErr != nil { + w.logger.Error("transoding provider is exited with error", zap.Error(err)) + } else { + w.logger.Debug("transoding provider has finished without errors") + } + }() + return nil +} diff --git a/internal/pkg/uploader/datalake.go b/internal/pkg/uploader/datalake.go new file mode 100644 index 00000000000..6869cc9f99d --- /dev/null +++ b/internal/pkg/uploader/datalake.go @@ -0,0 +1,124 @@ +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. + +package uploader + +import ( + "bytes" + "context" + "io" + "mime/multipart" + "os" + "path/filepath" + + "github.com/huly-stream/internal/pkg/log" + "github.com/pkg/errors" + "github.com/valyala/fasthttp" + "go.uber.org/zap" +) + +// DatalakeStorage represents datalake storage +type DatalakeStorage struct { + baseURL string + workspace string + token string +} + +// NewDatalakeStorage creates a new datalake client +func NewDatalakeStorage(baseURL, workspace, token string) Storage { + return &DatalakeStorage{ + baseURL: "https://" + baseURL, + token: token, + workspace: workspace, + } +} + +// UploadFile uploads file to the datalake +func (d *DatalakeStorage) UploadFile(ctx context.Context, fileName string) error { + // #nosec + file, err := os.Open(fileName) + if err != nil { + return err + } + defer func() { + _ = file.Close() + }() + + var _, objectKey = filepath.Split(fileName) + var logger = log.FromContext(ctx).With(zap.String("datalake upload", d.workspace), zap.String("fileName", fileName)) + + logger.Debug("start uploading") + + body := &bytes.Buffer{} + writer := multipart.NewWriter(body) + + part, err := writer.CreateFormFile("file", objectKey) + if err != nil { + return errors.Wrapf(err, "failed to create form file") + } + + _, err = io.Copy(part, file) + if err != nil { + return errors.Wrapf(err, "failed to copy file data") + } + + _ = writer.Close() + + req := fasthttp.AcquireRequest() + defer fasthttp.ReleaseRequest(req) + + res := fasthttp.AcquireResponse() + defer fasthttp.ReleaseResponse(res) + + req.SetRequestURI(d.baseURL + "/upload/form-data/" + d.workspace) + req.Header.SetMethod(fasthttp.MethodPost) + req.Header.Add("Authorization", "Bearer "+d.token) + req.Header.SetContentType(writer.FormDataContentType()) + req.SetBody(body.Bytes()) + + client := fasthttp.Client{} + if err := client.Do(req, res); err != nil { + return errors.Wrapf(err, "upload failed") + } + + logger.Debug("file uploaded") + + return nil +} + +// DeleteFile deletes file from the datalake +func (d *DatalakeStorage) DeleteFile(ctx context.Context, fileName string) error { + var logger = log.FromContext(ctx).With(zap.String("datalake delete", d.workspace), zap.String("fileName", fileName)) + logger.Debug("start deleting") + + var _, objectKey = filepath.Split(fileName) + + req := fasthttp.AcquireRequest() + defer fasthttp.ReleaseRequest(req) + + res := fasthttp.AcquireResponse() + defer fasthttp.ReleaseResponse(res) + + req.SetRequestURI(d.baseURL + "/blob/" + d.workspace + "/" + objectKey) + req.Header.SetMethod(fasthttp.MethodDelete) + req.Header.Add("Authorization", "Bearer "+d.token) + + client := fasthttp.Client{} + if err := client.Do(req, res); err != nil { + return errors.Wrapf(err, "delete failed") + } + + logger.Debug("file deleted") + + return nil +} diff --git a/internal/pkg/uploader/options.go b/internal/pkg/uploader/options.go new file mode 100644 index 00000000000..076e2c70afb --- /dev/null +++ b/internal/pkg/uploader/options.go @@ -0,0 +1,19 @@ +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. + +package uploader + +type options struct{} + +// Option provides option for storages +type Option func(*options) diff --git a/internal/pkg/uploader/postpone.go b/internal/pkg/uploader/postpone.go new file mode 100644 index 00000000000..61b22b79b42 --- /dev/null +++ b/internal/pkg/uploader/postpone.go @@ -0,0 +1,40 @@ +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. + +package uploader + +import ( + "context" + "time" +) + +func (u *uploader) postpone(id string, action func()) { + var ctx, cancel = context.WithCancel(context.Background()) + var startCh = time.After(u.postponeDuration) + + if v, ok := u.contexts.Load(id); ok { + (*v.(*context.CancelFunc))() + } + u.contexts.Store(id, &cancel) + + go func() { + defer cancel() + select { + case <-ctx.Done(): + return + case <-startCh: + action() + u.contexts.CompareAndDelete(id, &cancel) + } + }() +} diff --git a/internal/pkg/uploader/postpone_test.go b/internal/pkg/uploader/postpone_test.go new file mode 100644 index 00000000000..89522b4cec6 --- /dev/null +++ b/internal/pkg/uploader/postpone_test.go @@ -0,0 +1,44 @@ +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. + +package uploader + +import ( + "sync/atomic" + "testing" + "time" + + "github.com/stretchr/testify/require" +) + +func Test_Postpone(t *testing.T) { + var u = uploader{ + postponeDuration: time.Second / 4, + } + var counter atomic.Int32 + u.postpone("1", func() { counter.Add(1) }) + time.Sleep(time.Second / 8) + u.postpone("1", func() { counter.Add(1) }) + time.Sleep(time.Second / 2) + require.Equal(t, int32(1), counter.Load()) + time.Sleep(time.Second / 2) + require.Equal(t, int32(1), counter.Load()) +} + +func Test_WithoutPostpone(t *testing.T) { + var counter atomic.Int32 + var u uploader + u.postpone("1", func() { counter.Add(1) }) + time.Sleep(time.Second / 10) + require.Equal(t, int32(1), counter.Load()) +} diff --git a/internal/pkg/uploader/s3.go b/internal/pkg/uploader/s3.go new file mode 100644 index 00000000000..ba688618f9f --- /dev/null +++ b/internal/pkg/uploader/s3.go @@ -0,0 +1,143 @@ +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. + +package uploader + +import ( + "context" + "fmt" + + "github.com/pkg/errors" + + "os" + "path/filepath" + "strings" + "time" + + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/config" + "github.com/aws/aws-sdk-go-v2/credentials" + "github.com/aws/aws-sdk-go-v2/service/s3" + "github.com/aws/smithy-go" + "github.com/huly-stream/internal/pkg/log" + "go.uber.org/zap" +) + +// S3Storage represents S3 storage +type S3Storage struct { + client *s3.Client + bucketName string +} + +// NewS3 creates a new S3 storage +func NewS3(ctx context.Context, endpoint string) Storage { + var accessKeyID = os.Getenv("AWS_ACCESS_KEY_ID") + var accessKeySecret = os.Getenv("AWS_SECRET_ACCESS_KEY") + var bucketName = os.Getenv("AWS_BUCKET_NAME") + + cfg, err := config.LoadDefaultConfig(ctx, + config.WithCredentialsProvider(credentials.NewStaticCredentialsProvider(accessKeyID, accessKeySecret, "")), + config.WithRegion("auto"), + ) + if err != nil { + panic(err.Error()) + } + + var s3Client = s3.NewFromConfig(cfg, func(o *s3.Options) { + endpoint = "https://" + endpoint + o.BaseEndpoint = &endpoint + }) + + return &S3Storage{ + client: s3Client, + bucketName: bucketName, + } +} + +func getContentType(objectKey string) string { + if strings.HasSuffix(objectKey, ".txt") { + return "txt" + } + if strings.HasSuffix(objectKey, ".ts") { + return "video/mp2t" + } + if strings.HasSuffix(objectKey, ".m3u8") { + return "application/x-mpegurl" + } + return "application/octet-stream" +} + +// DeleteFile deletes file from the s3 storage +func (u *S3Storage) DeleteFile(ctx context.Context, fileName string) error { + var _, objectKey = filepath.Split(fileName) + var logger = log.FromContext(ctx).With(zap.String("s3 delete", u.bucketName), zap.String("fileName", fileName)) + + logger.Debug("start deleting") + input := &s3.DeleteObjectInput{ + Bucket: aws.String(u.bucketName), + Key: aws.String(objectKey), + } + + _, err := u.client.DeleteObject(ctx, input) + if err != nil { + return fmt.Errorf("failed to delete file from S3: %w", err) + } + logger.Debug("file deleted") + return nil +} + +// UploadFile uploads file to the s3 storage +func (u *S3Storage) UploadFile(ctx context.Context, fileName string) error { + var _, objectKey = filepath.Split(fileName) + var logger = log.FromContext(ctx).With(zap.String("s3 upload", u.bucketName), zap.String("fileName", fileName)) + logger.Debug("start upload file") + + // #nosec + var file, err = os.Open(fileName) + + if err != nil { + logger.Error("can not open file", zap.Error(err)) + return err + } + + defer func() { + _ = file.Close() + }() + _, err = u.client.PutObject(ctx, &s3.PutObjectInput{ + Bucket: aws.String(u.bucketName), + Key: aws.String(objectKey), + Body: file, + ContentType: aws.String(getContentType(objectKey)), + }) + + if err != nil { + var apiErr smithy.APIError + if errors.As(err, &apiErr) && apiErr.ErrorCode() == "EntityTooLarge" { + logger.Error("Error while uploading object. The object is too large." + + "To upload objects larger than 5GB, use the S3 console (160GB max)" + + "or the multipart upload API (5TB max).") + } else { + logger.Error("Couldn't upload file", zap.Error(err)) + } + return apiErr + } + + err = s3.NewObjectExistsWaiter(u.client).Wait( + ctx, &s3.HeadObjectInput{Bucket: aws.String(u.bucketName), Key: aws.String(objectKey)}, time.Minute) + if err != nil { + logger.Debug("Failed attempt to wait for object to exist.") + } + + logger.Debug("file has uploaded") + return err +} diff --git a/internal/pkg/uploader/uploader.go b/internal/pkg/uploader/uploader.go new file mode 100644 index 00000000000..86735191a4a --- /dev/null +++ b/internal/pkg/uploader/uploader.go @@ -0,0 +1,219 @@ +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package uploader provides objects and functions to work with uploading and monitoring files +package uploader + +import ( + "context" + "net/url" + "os" + "strings" + "sync" + "time" + + "github.com/pkg/errors" + + "github.com/fsnotify/fsnotify" + "github.com/huly-stream/internal/pkg/config" + "github.com/huly-stream/internal/pkg/log" + "go.uber.org/zap" +) + +type uploader struct { + ctx context.Context + cancel context.CancelFunc + baseDir string + uploadID string + masterFiles sync.Map + postponeDuration time.Duration + sentFiles sync.Map + storage Storage + contexts sync.Map + retryCount int + removeLocalContentOnUpload bool + eventBufferCount uint + isMasterFileFunc func(s string) bool +} + +// Rollback deletes all delivered files and also deletes all local content by uploadID +func (u *uploader) Rollback() { + log.FromContext(u.ctx).Debug("cancel") + defer u.cancel() + u.sentFiles.Range(func(key, value any) bool { + log.FromContext(u.ctx).Debug("deleting remote file", zap.String("key", key.(string))) + for range u.retryCount { + var err = u.storage.DeleteFile(u.ctx, key.(string)) + if err == nil { + break + } + log.FromContext(u.ctx).Debug("can not delete file", zap.Error(err)) + } + return true + }) + if !u.removeLocalContentOnUpload { + return + } + u.sentFiles.Range(func(key, value any) bool { + log.FromContext(u.ctx).Debug("deleting local file", zap.String("key", key.(string))) + _ = os.Remove(key.(string)) + return true + }) +} + +func (u *uploader) Terminate() { + log.FromContext(u.ctx).Debug("terminate") + defer u.cancel() + u.masterFiles.Range(func(key, value any) bool { + log.FromContext(u.ctx).Debug("uploading master file", zap.String("key", key.(string))) + for range u.retryCount { + var uploadErr = u.storage.UploadFile(u.ctx, key.(string)) + if uploadErr == nil { + break + } + log.FromContext(u.ctx).Debug("can not upload file", zap.Error(uploadErr)) + } + return true + }) + if !u.removeLocalContentOnUpload { + return + } + u.masterFiles.Range(func(key, value any) bool { + log.FromContext(u.ctx).Debug("deleting local master file", zap.String("key", key.(string))) + _ = os.Remove(key.(string)) + return true + }) + u.sentFiles.Range(func(key, value any) bool { + log.FromContext(u.ctx).Debug("deleting local file", zap.String("key", key.(string))) + _ = os.Remove(key.(string)) + return true + }) +} + +func (u *uploader) Serve() error { + var logger = log.FromContext(u.ctx) + logger = logger.With(zap.String("uploader", u.uploadID), zap.String("dir", u.baseDir)) + var watcher, err = fsnotify.NewBufferedWatcher(u.eventBufferCount) + if err != nil { + logger.Error("can not start watcher") + return err + } + if err := watcher.Add(u.baseDir); err != nil { + return err + } + defer func() { + _ = watcher.Close() + }() + + logger.Debug("uploader initialized and started to watch") + + for { + select { + case <-u.ctx.Done(): + logger.Debug("done") + return u.ctx.Err() + case event, ok := <-watcher.Events: + if !strings.Contains(event.Name, u.uploadID) { + continue + } + if !ok { + return u.ctx.Err() + } + if u.isMasterFileFunc(event.Name) { + u.masterFiles.Store(event.Name, struct{}{}) + logger.Debug("found master file", zap.String("eventName", event.Name)) + continue + } + u.postpone(event.Name, func() { + logger.Debug("started to upload", zap.String("eventName", event.Name)) + for range u.retryCount { + var uploadErr = u.storage.UploadFile(u.ctx, event.Name) + if uploadErr == nil { + break + } + logger.Error("can not upload file", zap.Error(uploadErr)) + } + logger.Debug("added to sentFiles", zap.String("eventName", event.Name)) + u.sentFiles.Store(event.Name, struct{}{}) + }) + case err, ok := <-watcher.Errors: + if !ok { + return u.ctx.Err() + } + logger.Error("get an error", zap.Error(err)) + } + } +} + +// Uploader manages content delivering +type Uploader interface { + Terminate() + Rollback() + Serve() error +} + +// Storage represents file-based storage +type Storage interface { + UploadFile(ctx context.Context, fileName string) error + DeleteFile(ctx context.Context, fileName string) error +} + +// New creates a new instance of Uplaoder +func New(ctx context.Context, conf config.Config, uploadID string, metadata map[string]string) (Uploader, error) { + var uploaderCtx, uploaderCancel = context.WithCancel(ctx) + var storage Storage + var err error + + if conf.EndpointURL != nil { + storage, err = NewStorageByURL(ctx, conf.EndpointURL, metadata) + if err != nil { + uploaderCancel() + return nil, err + } + } + + return &uploader{ + ctx: uploaderCtx, + cancel: uploaderCancel, + uploadID: uploadID, + removeLocalContentOnUpload: conf.RemoveContentOnUpload, + postponeDuration: time.Second * 2, + storage: storage, + retryCount: 5, + baseDir: conf.OutputDir, + eventBufferCount: 100, + isMasterFileFunc: func(s string) bool { + return strings.HasSuffix(s, "m3u8") + }, + }, nil +} + +// NewStorageByURL creates a new storage basd on the type from the url scheme, for example "datalake://my-datalake-endpoint" +func NewStorageByURL(ctx context.Context, u *url.URL, headers map[string]string) (Storage, error) { + switch u.Scheme { + case "tus": + return nil, errors.New("not imlemented yet") + case "datalake": + if headers["workspace"] == "" { + return nil, errors.New("missed workspace in the client's metadata") + } + if headers["token"] == "" { + return nil, errors.New("missed auth token in the client's metadata") + } + return NewDatalakeStorage(u.Hostname(), headers["workspace"], headers["token"]), nil + case "s3": + return NewS3(ctx, u.Hostname()), nil + default: + return nil, errors.New("unknown scheme") + } +} From 1b8be96319b735d0196738772a6ff65d68f7a29e Mon Sep 17 00:00:00 2001 From: denis-tingaikin Date: Tue, 4 Feb 2025 05:15:28 +0300 Subject: [PATCH 026/636] apply review comments Signed-off-by: denis-tingaikin --- .github/workflows/main.yaml | 2 -- README.md | 16 +++++++++------- internal/pkg/uploader/datalake.go | 10 ++++++++-- 3 files changed, 17 insertions(+), 11 deletions(-) diff --git a/.github/workflows/main.yaml b/.github/workflows/main.yaml index a2889730490..635804cf495 100644 --- a/.github/workflows/main.yaml +++ b/.github/workflows/main.yaml @@ -29,8 +29,6 @@ jobs: matrix: os: - ubuntu - - windows - - macos runs-on: ${{ matrix.os }}-latest steps: - name: Check out code diff --git a/README.md b/README.md index 8eb361d4426..f977167dba4 100644 --- a/README.md +++ b/README.md @@ -20,13 +20,15 @@ The Huly Stream high-performance HTTP-based transcoding service. Huly-stream is - `webm` ### Output Options -- **TUS Upload**: Resumable file uploads via TUS protocol. -- **s3 Upload**: Direct upload to Amazon S3. -- **datalake Upload**: Integration for data lake storage systems. - **Supported Output Formats**: - `aac` - `hls` +### Upload options +- **TUS Upload**: Resumable file uploads via TUS protocol. +- **s3 Upload**: Direct upload to S3 storage. +- **datalake Upload**: Upload to datalake storage. + ### Key Functionalities - **Live transcoing with minimal upload time**: Transcoding results are going to be avaible after stream completion. - **Transcoding Cancelation**: Cancel or pause ongoing transcoding in real-time. @@ -74,13 +76,13 @@ STREAM_REMOVE_CONTENT_ON_UPLOAD True or False true STREAM_UPLOAD_RAW_CONTENT True or False false uploads content in raw quality to the endpoint if true ``` -### Metadata: +### Metadata -**resolutions** if passed, set the resolution for the output, for example, 'resolutions: 1920:1080, 1280:720.' +**resolutions:** if passed, set the resolution for the output, for example, 'resolutions: 1920:1080, 1280:720.' -**token** must be provided to be authorized in the Huly's datalake service. +**token:** must be provided to be authorized in the Huly's datalake service. -**workspace** required for uploading content. +**workspace:** required for uploading content to the datalake storage. diff --git a/internal/pkg/uploader/datalake.go b/internal/pkg/uploader/datalake.go index 6869cc9f99d..43ba535efd5 100644 --- a/internal/pkg/uploader/datalake.go +++ b/internal/pkg/uploader/datalake.go @@ -18,6 +18,7 @@ import ( "context" "io" "mime/multipart" + "net/url" "os" "path/filepath" @@ -54,7 +55,7 @@ func (d *DatalakeStorage) UploadFile(ctx context.Context, fileName string) error _ = file.Close() }() - var _, objectKey = filepath.Split(fileName) + var objectKey = getObjectKey(fileName) var logger = log.FromContext(ctx).With(zap.String("datalake upload", d.workspace), zap.String("fileName", fileName)) logger.Debug("start uploading") @@ -101,7 +102,7 @@ func (d *DatalakeStorage) DeleteFile(ctx context.Context, fileName string) error var logger = log.FromContext(ctx).With(zap.String("datalake delete", d.workspace), zap.String("fileName", fileName)) logger.Debug("start deleting") - var _, objectKey = filepath.Split(fileName) + var objectKey = getObjectKey(fileName) req := fasthttp.AcquireRequest() defer fasthttp.ReleaseRequest(req) @@ -122,3 +123,8 @@ func (d *DatalakeStorage) DeleteFile(ctx context.Context, fileName string) error return nil } + +func getObjectKey(s string) string { + var _, objectKey = filepath.Split(s) + return url.QueryEscape(objectKey) +} From 37e39e649d1f5e7155f46206d59a992045190fab Mon Sep 17 00:00:00 2001 From: Kristina Date: Tue, 4 Feb 2025 14:48:41 +0400 Subject: [PATCH 027/636] Fix package.json (#16) Signed-off-by: Kristina Fefelova --- bun.lock | 725 ++++++++++++++++++++++++++++ bun.lockb | Bin 105104 -> 0 bytes packages/client-query/package.json | 4 +- packages/client-sqlite/package.json | 4 +- packages/client-ws/package.json | 4 +- packages/cockroach/package.json | 4 +- packages/examples/package.json | 4 +- packages/query/package.json | 4 +- packages/sdk-types/package.json | 14 +- packages/server/package.json | 4 +- packages/sqlite-wasm/package.json | 4 +- packages/types/package.json | 4 +- 12 files changed, 750 insertions(+), 25 deletions(-) create mode 100644 bun.lock delete mode 100755 bun.lockb diff --git a/bun.lock b/bun.lock new file mode 100644 index 00000000000..07fb97651a9 --- /dev/null +++ b/bun.lock @@ -0,0 +1,725 @@ +{ + "lockfileVersion": 1, + "workspaces": { + "": { + "name": "@hcengineering/communication", + "devDependencies": { + "@eslint/js": "^9.15.0", + "@types/bun": "^1.1.14", + "bun-types": "^1.1.38", + "eslint": "^9.15.0", + "eslint-config-prettier": "^9.1.0", + "eslint-plugin-prettier": "^5.2.1", + "prettier": "^3.3.3", + "typescript-eslint": "^8.15.0", + }, + }, + "packages/client-query": { + "name": "@hcengineering/communication-client-query", + "version": "0.1.0", + "dependencies": { + "@hcengineering/communication-query": "workspace:*", + "@hcengineering/communication-sdk-types": "workspace:*", + "@hcengineering/communication-types": "workspace:*", + "fast-equals": "^5.0.1", + }, + "devDependencies": { + "@types/bun": "^1.1.14", + }, + "peerDependencies": { + "typescript": "^5.6.3", + }, + }, + "packages/client-sqlite": { + "name": "@hcengineering/communication-client-sqlite", + "version": "0.1.0", + "dependencies": { + "@hcengineering/communication-sdk-types": "workspace:*", + "@hcengineering/communication-sqlite-wasm": "workspace:*", + "@hcengineering/communication-types": "workspace:*", + "fast-equals": "^5.0.1", + }, + "devDependencies": { + "@types/bun": "^1.1.14", + }, + "peerDependencies": { + "typescript": "^5.6.3", + }, + }, + "packages/client-ws": { + "name": "@hcengineering/communication-client-ws", + "version": "0.1.0", + "dependencies": { + "@hcengineering/communication-sdk-types": "workspace:*", + "@hcengineering/communication-types": "workspace:*", + "@msgpack/msgpack": "^3.0.0-beta2", + }, + "devDependencies": { + "@types/bun": "^1.1.14", + }, + "peerDependencies": { + "typescript": "^5.6.3", + }, + }, + "packages/cockroach": { + "name": "@hcengineering/communication-cockroach", + "version": "0.1.0", + "dependencies": { + "@hcengineering/communication-sdk-types": "workspace:*", + "@hcengineering/communication-types": "workspace:*", + "pg": "8.12.0", + "postgres": "^3.4.4", + "uuid": "^11.0.3", + }, + "devDependencies": { + "@types/bun": "^1.1.14", + }, + "peerDependencies": { + "typescript": "^5.6.3", + }, + }, + "packages/examples": { + "name": "@hcengineering/communication-examples", + "version": "0.1.0", + "dependencies": { + "@hcengineering/communication-client-query": "workspace:*", + "@hcengineering/communication-client-sqlite": "workspace:*", + "@hcengineering/communication-client-ws": "workspace:*", + "@hcengineering/communication-types": "workspace:*", + }, + "devDependencies": { + "@types/bun": "^1.1.14", + }, + "peerDependencies": { + "typescript": "^5.6.3", + }, + }, + "packages/query": { + "name": "@hcengineering/communication-query", + "version": "0.1.0", + "dependencies": { + "@hcengineering/communication-sdk-types": "workspace:*", + "@hcengineering/communication-types": "workspace:*", + "fast-equals": "^5.0.1", + }, + "devDependencies": { + "@types/bun": "^1.1.14", + "@types/crypto-js": "^4.2.2", + }, + "peerDependencies": { + "typescript": "^5.6.3", + }, + }, + "packages/sdk-types": { + "name": "@hcengineering/communication-sdk-types", + "version": "0.1.1", + "dependencies": { + "@hcengineering/communication-types": "^0.1.0", + }, + "devDependencies": { + "@types/bun": "^1.1.14", + }, + "peerDependencies": { + "typescript": "^5.6.3", + }, + }, + "packages/server": { + "name": "@hcengineering/communication-server", + "version": "0.1.0", + "dependencies": { + "@hcengineering/communication-cockroach": "workspace:*", + "@hcengineering/communication-sdk-types": "workspace:*", + "@hcengineering/communication-types": "workspace:*", + "@hcengineering/server-token": "^0.6.377", + "cors": "^2.8.5", + "dotenv": "^16.4.7", + "express": "^4.21.2", + "msgpackr": "^1.11.2", + "ws": "^8.18.0", + }, + "devDependencies": { + "@types/bun": "^1.1.14", + "@types/cors": "^2.8.17", + "@types/express": "^5.0.0", + "@types/ws": "^8.5.13", + }, + "peerDependencies": { + "typescript": "^5.6.3", + }, + }, + "packages/sqlite-wasm": { + "name": "@hcengineering/communication-sqlite-wasm", + "version": "0.1.0", + "dependencies": { + "@hcengineering/communication-sdk-types": "workspace:*", + "@hcengineering/communication-types": "workspace:*", + "@sqlite.org/sqlite-wasm": "^3.47.1-build1", + "path": "^0.12.7", + "uuid": "^11.0.3", + }, + "devDependencies": { + "@types/bun": "^1.1.14", + }, + "peerDependencies": { + "typescript": "^5.6.3", + }, + }, + "packages/types": { + "name": "@hcengineering/communication-types", + "version": "0.1.0", + "devDependencies": { + "@types/bun": "^1.1.14", + }, + "peerDependencies": { + "typescript": "^5.6.3", + }, + }, + }, + "packages": { + "@eslint-community/eslint-utils": ["@eslint-community/eslint-utils@4.4.1", "", { "dependencies": { "eslint-visitor-keys": "^3.4.3" }, "peerDependencies": { "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" } }, "sha512-s3O3waFUrMV8P/XaF/+ZTp1X9XBZW1a4B97ZnjQF2KYWaFD2A8KyFBsrsfSjEmjn3RGWAIuvlneuZm3CUK3jbA=="], + + "@eslint-community/regexpp": ["@eslint-community/regexpp@4.12.1", "", {}, "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ=="], + + "@eslint/config-array": ["@eslint/config-array@0.19.2", "", { "dependencies": { "@eslint/object-schema": "^2.1.6", "debug": "^4.3.1", "minimatch": "^3.1.2" } }, "sha512-GNKqxfHG2ySmJOBSHg7LxeUx4xpuCoFjacmlCoYWEbaPXLwvfIjixRI12xCQZeULksQb23uiA8F40w5TojpV7w=="], + + "@eslint/core": ["@eslint/core@0.10.0", "", { "dependencies": { "@types/json-schema": "^7.0.15" } }, "sha512-gFHJ+xBOo4G3WRlR1e/3G8A6/KZAH6zcE/hkLRCZTi/B9avAG365QhFA8uOGzTMqgTghpn7/fSnscW++dpMSAw=="], + + "@eslint/eslintrc": ["@eslint/eslintrc@3.2.0", "", { "dependencies": { "ajv": "^6.12.4", "debug": "^4.3.2", "espree": "^10.0.1", "globals": "^14.0.0", "ignore": "^5.2.0", "import-fresh": "^3.2.1", "js-yaml": "^4.1.0", "minimatch": "^3.1.2", "strip-json-comments": "^3.1.1" } }, "sha512-grOjVNN8P3hjJn/eIETF1wwd12DdnwFDoyceUJLYYdkpbwq3nLi+4fqrTAONx7XDALqlL220wC/RHSC/QTI/0w=="], + + "@eslint/js": ["@eslint/js@9.19.0", "", {}, "sha512-rbq9/g38qjfqFLOVPvwjIvFFdNziEC5S65jmjPw5r6A//QH+W91akh9irMwjDN8zKUTak6W9EsAv4m/7Wnw0UQ=="], + + "@eslint/object-schema": ["@eslint/object-schema@2.1.6", "", {}, "sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA=="], + + "@eslint/plugin-kit": ["@eslint/plugin-kit@0.2.5", "", { "dependencies": { "@eslint/core": "^0.10.0", "levn": "^0.4.1" } }, "sha512-lB05FkqEdUg2AA0xEbUz0SnkXT1LcCTa438W4IWTUh4hdOnVbQyOJ81OrDXsJk/LSiJHubgGEFoR5EHq1NsH1A=="], + + "@formatjs/ecma402-abstract": ["@formatjs/ecma402-abstract@1.11.4", "", { "dependencies": { "@formatjs/intl-localematcher": "0.2.25", "tslib": "^2.1.0" } }, "sha512-EBikYFp2JCdIfGEb5G9dyCkTGDmC57KSHhRQOC3aYxoPWVZvfWCDjZwkGYHN7Lis/fmuWl906bnNTJifDQ3sXw=="], + + "@formatjs/fast-memoize": ["@formatjs/fast-memoize@1.2.1", "", { "dependencies": { "tslib": "^2.1.0" } }, "sha512-Rg0e76nomkz3vF9IPlKeV+Qynok0r7YZjL6syLz4/urSg0IbjPZCB/iYUMNsYA643gh4mgrX3T7KEIFIxJBQeg=="], + + "@formatjs/icu-messageformat-parser": ["@formatjs/icu-messageformat-parser@2.1.0", "", { "dependencies": { "@formatjs/ecma402-abstract": "1.11.4", "@formatjs/icu-skeleton-parser": "1.3.6", "tslib": "^2.1.0" } }, "sha512-Qxv/lmCN6hKpBSss2uQ8IROVnta2r9jd3ymUEIjm2UyIkUCHVcbUVRGL/KS/wv7876edvsPe+hjHVJ4z8YuVaw=="], + + "@formatjs/icu-skeleton-parser": ["@formatjs/icu-skeleton-parser@1.3.6", "", { "dependencies": { "@formatjs/ecma402-abstract": "1.11.4", "tslib": "^2.1.0" } }, "sha512-I96mOxvml/YLrwU2Txnd4klA7V8fRhb6JG/4hm3VMNmeJo1F03IpV2L3wWt7EweqNLES59SZ4d6hVOPCSf80Bg=="], + + "@formatjs/intl-localematcher": ["@formatjs/intl-localematcher@0.2.25", "", { "dependencies": { "tslib": "^2.1.0" } }, "sha512-YmLcX70BxoSopLFdLr1Ds99NdlTI2oWoLbaUW2M406lxOIPzE1KQhRz2fPUkq34xVZQaihCoU29h0KK7An3bhA=="], + + "@hcengineering/analytics": ["@hcengineering/analytics@0.6.411", "https://npm.pkg.github.com/download/@hcengineering/analytics/0.6.411/69cc5a0cf77032394306ccc44b3332756f83687c", { "dependencies": { "@hcengineering/platform": "^0.6.411" } }, "sha512-pruHZ75es4HMefJEYPWOm/bimffnF7RP+tW+f8boJMZh8hfIb33RmwTzuneToG4kdYtiU/tcIvFwvUdcILVLcQ=="], + + "@hcengineering/communication-client-query": ["@hcengineering/communication-client-query@workspace:packages/client-query"], + + "@hcengineering/communication-client-sqlite": ["@hcengineering/communication-client-sqlite@workspace:packages/client-sqlite"], + + "@hcengineering/communication-client-ws": ["@hcengineering/communication-client-ws@workspace:packages/client-ws"], + + "@hcengineering/communication-cockroach": ["@hcengineering/communication-cockroach@workspace:packages/cockroach"], + + "@hcengineering/communication-examples": ["@hcengineering/communication-examples@workspace:packages/examples"], + + "@hcengineering/communication-query": ["@hcengineering/communication-query@workspace:packages/query"], + + "@hcengineering/communication-sdk-types": ["@hcengineering/communication-sdk-types@workspace:packages/sdk-types"], + + "@hcengineering/communication-server": ["@hcengineering/communication-server@workspace:packages/server"], + + "@hcengineering/communication-sqlite-wasm": ["@hcengineering/communication-sqlite-wasm@workspace:packages/sqlite-wasm"], + + "@hcengineering/communication-types": ["@hcengineering/communication-types@workspace:packages/types"], + + "@hcengineering/core": ["@hcengineering/core@0.6.411", "https://npm.pkg.github.com/download/@hcengineering/core/0.6.411/ca7cde2060d1b081d76d163524296a37954dd810", { "dependencies": { "@hcengineering/analytics": "^0.6.411", "@hcengineering/platform": "^0.6.411", "fast-equals": "^5.0.1" } }, "sha512-dsRwiOqO4W1i4Z3dzTBOasUly2pFQnK1K6va0hsII/3f7uP/N7pYR8Lk+teTMfd9Gbv5o7CYozClXb3ntt8Jgg=="], + + "@hcengineering/platform": ["@hcengineering/platform@0.6.411", "https://npm.pkg.github.com/download/@hcengineering/platform/0.6.411/e511e044a9f12e4cd281922cf827b77cde1b73be", { "dependencies": { "intl-messageformat": "^9.7.1" } }, "sha512-5S6wUT9fbZsKAm/JaAEpYrRFNVwM0QfNjf22s96Ka39nTcNApxfa0j8jSINdKRf8zQN1EAIk12Lmf/0ZhKcJyw=="], + + "@hcengineering/server-token": ["@hcengineering/server-token@0.6.411", "https://npm.pkg.github.com/download/@hcengineering/server-token/0.6.411/66e75ee369f988a9dae85d24d98e0dd3e2661f8b", { "dependencies": { "@hcengineering/core": "^0.6.411", "@hcengineering/platform": "^0.6.411", "jwt-simple": "^0.5.6" } }, "sha512-KHS1ET3rxIDuj+dSN3btrMQC3BZexTZZuuy0zoevdMJERD4+aM4SoWaJLgmwrcTLOBIvGJTOfMdZb1rHJ8KOOw=="], + + "@humanfs/core": ["@humanfs/core@0.19.1", "", {}, "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA=="], + + "@humanfs/node": ["@humanfs/node@0.16.6", "", { "dependencies": { "@humanfs/core": "^0.19.1", "@humanwhocodes/retry": "^0.3.0" } }, "sha512-YuI2ZHQL78Q5HbhDiBA1X4LmYdXCKCMQIfw0pw7piHJwyREFebJUvrQN4cMssyES6x+vfUbx1CIpaQUKYdQZOw=="], + + "@humanwhocodes/module-importer": ["@humanwhocodes/module-importer@1.0.1", "", {}, "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA=="], + + "@humanwhocodes/retry": ["@humanwhocodes/retry@0.4.1", "", {}, "sha512-c7hNEllBlenFTHBky65mhq8WD2kbN9Q6gk0bTk8lSBvc554jpXSkST1iePudpt7+A/AQvuHs9EMqjHDXMY1lrA=="], + + "@msgpack/msgpack": ["@msgpack/msgpack@3.0.0-beta3", "", {}, "sha512-LZYWBmrkKO0quyjnJCeSaqHOcsuZUvE+hlIYRqFc0qI27dLnsOdnv8Fsj2cyitzQTJZmCPm53vZ/P8QTH7E84A=="], + + "@msgpackr-extract/msgpackr-extract-darwin-arm64": ["@msgpackr-extract/msgpackr-extract-darwin-arm64@3.0.3", "", { "os": "darwin", "cpu": "arm64" }, "sha512-QZHtlVgbAdy2zAqNA9Gu1UpIuI8Xvsd1v8ic6B2pZmeFnFcMWiPLfWXh7TVw4eGEZ/C9TH281KwhVoeQUKbyjw=="], + + "@msgpackr-extract/msgpackr-extract-darwin-x64": ["@msgpackr-extract/msgpackr-extract-darwin-x64@3.0.3", "", { "os": "darwin", "cpu": "x64" }, "sha512-mdzd3AVzYKuUmiWOQ8GNhl64/IoFGol569zNRdkLReh6LRLHOXxU4U8eq0JwaD8iFHdVGqSy4IjFL4reoWCDFw=="], + + "@msgpackr-extract/msgpackr-extract-linux-arm": ["@msgpackr-extract/msgpackr-extract-linux-arm@3.0.3", "", { "os": "linux", "cpu": "arm" }, "sha512-fg0uy/dG/nZEXfYilKoRe7yALaNmHoYeIoJuJ7KJ+YyU2bvY8vPv27f7UKhGRpY6euFYqEVhxCFZgAUNQBM3nw=="], + + "@msgpackr-extract/msgpackr-extract-linux-arm64": ["@msgpackr-extract/msgpackr-extract-linux-arm64@3.0.3", "", { "os": "linux", "cpu": "arm64" }, "sha512-YxQL+ax0XqBJDZiKimS2XQaf+2wDGVa1enVRGzEvLLVFeqa5kx2bWbtcSXgsxjQB7nRqqIGFIcLteF/sHeVtQg=="], + + "@msgpackr-extract/msgpackr-extract-linux-x64": ["@msgpackr-extract/msgpackr-extract-linux-x64@3.0.3", "", { "os": "linux", "cpu": "x64" }, "sha512-cvwNfbP07pKUfq1uH+S6KJ7dT9K8WOE4ZiAcsrSes+UY55E/0jLYc+vq+DO7jlmqRb5zAggExKm0H7O/CBaesg=="], + + "@msgpackr-extract/msgpackr-extract-win32-x64": ["@msgpackr-extract/msgpackr-extract-win32-x64@3.0.3", "", { "os": "win32", "cpu": "x64" }, "sha512-x0fWaQtYp4E6sktbsdAqnehxDgEc/VwM7uLsRCYWaiGu0ykYdZPiS8zCWdnjHwyiumousxfBm4SO31eXqwEZhQ=="], + + "@nodelib/fs.scandir": ["@nodelib/fs.scandir@2.1.5", "", { "dependencies": { "@nodelib/fs.stat": "2.0.5", "run-parallel": "^1.1.9" } }, "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g=="], + + "@nodelib/fs.stat": ["@nodelib/fs.stat@2.0.5", "", {}, "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A=="], + + "@nodelib/fs.walk": ["@nodelib/fs.walk@1.2.8", "", { "dependencies": { "@nodelib/fs.scandir": "2.1.5", "fastq": "^1.6.0" } }, "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg=="], + + "@pkgr/core": ["@pkgr/core@0.1.1", "", {}, "sha512-cq8o4cWH0ibXh9VGi5P20Tu9XF/0fFXl9EUinr9QfTM7a7p0oTA4iJRCQWppXR1Pg8dSM0UCItCkPwsk9qWWYA=="], + + "@sqlite.org/sqlite-wasm": ["@sqlite.org/sqlite-wasm@3.47.1-build1", "", { "bin": { "sqlite-wasm": "bin/index.js" } }, "sha512-3qnVGab3sjJ8ov0ce1rQGZrMkglYEQ/q5fnq1s1BpRWFMYsiJVaLD1zKepcI9E9r3qx7929+2G27Hfsasvrm6Q=="], + + "@types/body-parser": ["@types/body-parser@1.19.5", "", { "dependencies": { "@types/connect": "*", "@types/node": "*" } }, "sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg=="], + + "@types/bun": ["@types/bun@1.2.2", "", { "dependencies": { "bun-types": "1.2.2" } }, "sha512-tr74gdku+AEDN5ergNiBnplr7hpDp3V1h7fqI2GcR/rsUaM39jpSeKH0TFibRvU0KwniRx5POgaYnaXbk0hU+w=="], + + "@types/connect": ["@types/connect@3.4.38", "", { "dependencies": { "@types/node": "*" } }, "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug=="], + + "@types/cors": ["@types/cors@2.8.17", "", { "dependencies": { "@types/node": "*" } }, "sha512-8CGDvrBj1zgo2qE+oS3pOCyYNqCPryMWY2bGfwA0dcfopWGgxs+78df0Rs3rc9THP4JkOhLsAa+15VdpAqkcUA=="], + + "@types/crypto-js": ["@types/crypto-js@4.2.2", "", {}, "sha512-sDOLlVbHhXpAUAL0YHDUUwDZf3iN4Bwi4W6a0W0b+QcAezUbRtH4FVb+9J4h+XFPW7l/gQ9F8qC7P+Ec4k8QVQ=="], + + "@types/estree": ["@types/estree@1.0.6", "", {}, "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw=="], + + "@types/express": ["@types/express@5.0.0", "", { "dependencies": { "@types/body-parser": "*", "@types/express-serve-static-core": "^5.0.0", "@types/qs": "*", "@types/serve-static": "*" } }, "sha512-DvZriSMehGHL1ZNLzi6MidnsDhUZM/x2pRdDIKdwbUNqqwHxMlRdkxtn6/EPKyqKpHqTl/4nRZsRNLpZxZRpPQ=="], + + "@types/express-serve-static-core": ["@types/express-serve-static-core@5.0.6", "", { "dependencies": { "@types/node": "*", "@types/qs": "*", "@types/range-parser": "*", "@types/send": "*" } }, "sha512-3xhRnjJPkULekpSzgtoNYYcTWgEZkp4myc+Saevii5JPnHNvHMRlBSHDbs7Bh1iPPoVTERHEZXyhyLbMEsExsA=="], + + "@types/http-errors": ["@types/http-errors@2.0.4", "", {}, "sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA=="], + + "@types/json-schema": ["@types/json-schema@7.0.15", "", {}, "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA=="], + + "@types/mime": ["@types/mime@1.3.5", "", {}, "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w=="], + + "@types/node": ["@types/node@22.13.1", "", { "dependencies": { "undici-types": "~6.20.0" } }, "sha512-jK8uzQlrvXqEU91UxiK5J7pKHyzgnI1Qnl0QDHIgVGuolJhRb9EEl28Cj9b3rGR8B2lhFCtvIm5os8lFnO/1Ew=="], + + "@types/qs": ["@types/qs@6.9.18", "", {}, "sha512-kK7dgTYDyGqS+e2Q4aK9X3D7q234CIZ1Bv0q/7Z5IwRDoADNU81xXJK/YVyLbLTZCoIwUoDoffFeF+p/eIklAA=="], + + "@types/range-parser": ["@types/range-parser@1.2.7", "", {}, "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ=="], + + "@types/send": ["@types/send@0.17.4", "", { "dependencies": { "@types/mime": "^1", "@types/node": "*" } }, "sha512-x2EM6TJOybec7c52BX0ZspPodMsQUd5L6PRwOunVyVUhXiBSKf3AezDL8Dgvgt5o0UfKNfuA0eMLr2wLT4AiBA=="], + + "@types/serve-static": ["@types/serve-static@1.15.7", "", { "dependencies": { "@types/http-errors": "*", "@types/node": "*", "@types/send": "*" } }, "sha512-W8Ym+h8nhuRwaKPaDw34QUkwsGi6Rc4yYqvKFo5rm2FUEhCFbzVWrxXUxuKK8TASjWsysJY0nsmNCGhCOIsrOw=="], + + "@types/ws": ["@types/ws@8.5.14", "", { "dependencies": { "@types/node": "*" } }, "sha512-bd/YFLW+URhBzMXurx7lWByOu+xzU9+kb3RboOteXYDfW+tr+JZa99OyNmPINEGB/ahzKrEuc8rcv4gnpJmxTw=="], + + "@typescript-eslint/eslint-plugin": ["@typescript-eslint/eslint-plugin@8.23.0", "", { "dependencies": { "@eslint-community/regexpp": "^4.10.0", "@typescript-eslint/scope-manager": "8.23.0", "@typescript-eslint/type-utils": "8.23.0", "@typescript-eslint/utils": "8.23.0", "@typescript-eslint/visitor-keys": "8.23.0", "graphemer": "^1.4.0", "ignore": "^5.3.1", "natural-compare": "^1.4.0", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0", "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-vBz65tJgRrA1Q5gWlRfvoH+w943dq9K1p1yDBY2pc+a1nbBLZp7fB9+Hk8DaALUbzjqlMfgaqlVPT1REJdkt/w=="], + + "@typescript-eslint/parser": ["@typescript-eslint/parser@8.23.0", "", { "dependencies": { "@typescript-eslint/scope-manager": "8.23.0", "@typescript-eslint/types": "8.23.0", "@typescript-eslint/typescript-estree": "8.23.0", "@typescript-eslint/visitor-keys": "8.23.0", "debug": "^4.3.4" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-h2lUByouOXFAlMec2mILeELUbME5SZRN/7R9Cw2RD2lRQQY08MWMM+PmVVKKJNK1aIwqTo9t/0CvOxwPbRIE2Q=="], + + "@typescript-eslint/scope-manager": ["@typescript-eslint/scope-manager@8.23.0", "", { "dependencies": { "@typescript-eslint/types": "8.23.0", "@typescript-eslint/visitor-keys": "8.23.0" } }, "sha512-OGqo7+dXHqI7Hfm+WqkZjKjsiRtFUQHPdGMXzk5mYXhJUedO7e/Y7i8AK3MyLMgZR93TX4bIzYrfyVjLC+0VSw=="], + + "@typescript-eslint/type-utils": ["@typescript-eslint/type-utils@8.23.0", "", { "dependencies": { "@typescript-eslint/typescript-estree": "8.23.0", "@typescript-eslint/utils": "8.23.0", "debug": "^4.3.4", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-iIuLdYpQWZKbiH+RkCGc6iu+VwscP5rCtQ1lyQ7TYuKLrcZoeJVpcLiG8DliXVkUxirW/PWlmS+d6yD51L9jvA=="], + + "@typescript-eslint/types": ["@typescript-eslint/types@8.23.0", "", {}, "sha512-1sK4ILJbCmZOTt9k4vkoulT6/y5CHJ1qUYxqpF1K/DBAd8+ZUL4LlSCxOssuH5m4rUaaN0uS0HlVPvd45zjduQ=="], + + "@typescript-eslint/typescript-estree": ["@typescript-eslint/typescript-estree@8.23.0", "", { "dependencies": { "@typescript-eslint/types": "8.23.0", "@typescript-eslint/visitor-keys": "8.23.0", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", "minimatch": "^9.0.4", "semver": "^7.6.0", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "typescript": ">=4.8.4 <5.8.0" } }, "sha512-LcqzfipsB8RTvH8FX24W4UUFk1bl+0yTOf9ZA08XngFwMg4Kj8A+9hwz8Cr/ZS4KwHrmo9PJiLZkOt49vPnuvQ=="], + + "@typescript-eslint/utils": ["@typescript-eslint/utils@8.23.0", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@typescript-eslint/scope-manager": "8.23.0", "@typescript-eslint/types": "8.23.0", "@typescript-eslint/typescript-estree": "8.23.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-uB/+PSo6Exu02b5ZEiVtmY6RVYO7YU5xqgzTIVZwTHvvK3HsL8tZZHFaTLFtRG3CsV4A5mhOv+NZx5BlhXPyIA=="], + + "@typescript-eslint/visitor-keys": ["@typescript-eslint/visitor-keys@8.23.0", "", { "dependencies": { "@typescript-eslint/types": "8.23.0", "eslint-visitor-keys": "^4.2.0" } }, "sha512-oWWhcWDLwDfu++BGTZcmXWqpwtkwb5o7fxUIGksMQQDSdPW9prsSnfIOZMlsj4vBOSrcnjIUZMiIjODgGosFhQ=="], + + "accepts": ["accepts@1.3.8", "", { "dependencies": { "mime-types": "~2.1.34", "negotiator": "0.6.3" } }, "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw=="], + + "acorn": ["acorn@8.14.0", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA=="], + + "acorn-jsx": ["acorn-jsx@5.3.2", "", { "peerDependencies": { "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ=="], + + "ajv": ["ajv@6.12.6", "", { "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", "json-schema-traverse": "^0.4.1", "uri-js": "^4.2.2" } }, "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g=="], + + "ansi-styles": ["ansi-styles@4.3.0", "", { "dependencies": { "color-convert": "^2.0.1" } }, "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg=="], + + "argparse": ["argparse@2.0.1", "", {}, "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="], + + "array-flatten": ["array-flatten@1.1.1", "", {}, "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg=="], + + "balanced-match": ["balanced-match@1.0.2", "", {}, "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="], + + "body-parser": ["body-parser@1.20.3", "", { "dependencies": { "bytes": "3.1.2", "content-type": "~1.0.5", "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", "http-errors": "2.0.0", "iconv-lite": "0.4.24", "on-finished": "2.4.1", "qs": "6.13.0", "raw-body": "2.5.2", "type-is": "~1.6.18", "unpipe": "1.0.0" } }, "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g=="], + + "brace-expansion": ["brace-expansion@1.1.11", "", { "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" } }, "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA=="], + + "braces": ["braces@3.0.3", "", { "dependencies": { "fill-range": "^7.1.1" } }, "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA=="], + + "bun-types": ["bun-types@1.2.2", "", { "dependencies": { "@types/node": "*", "@types/ws": "~8.5.10" } }, "sha512-RCbMH5elr9gjgDGDhkTTugA21XtJAy/9jkKe/G3WR2q17VPGhcquf9Sir6uay9iW+7P/BV0CAHA1XlHXMAVKHg=="], + + "bytes": ["bytes@3.1.2", "", {}, "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg=="], + + "call-bind-apply-helpers": ["call-bind-apply-helpers@1.0.1", "", { "dependencies": { "es-errors": "^1.3.0", "function-bind": "^1.1.2" } }, "sha512-BhYE+WDaywFg2TBWYNXAE+8B1ATnThNBqXHP5nQu0jWJdVvY2hvkpyB3qOmtmDePiS5/BDQ8wASEWGMWRG148g=="], + + "call-bound": ["call-bound@1.0.3", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.1", "get-intrinsic": "^1.2.6" } }, "sha512-YTd+6wGlNlPxSuri7Y6X8tY2dmm12UMH66RpKMhiX6rsk5wXXnYgbUcOt8kiS31/AjfoTOvCsE+w8nZQLQnzHA=="], + + "callsites": ["callsites@3.1.0", "", {}, "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ=="], + + "chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], + + "color-convert": ["color-convert@2.0.1", "", { "dependencies": { "color-name": "~1.1.4" } }, "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ=="], + + "color-name": ["color-name@1.1.4", "", {}, "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="], + + "concat-map": ["concat-map@0.0.1", "", {}, "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="], + + "content-disposition": ["content-disposition@0.5.4", "", { "dependencies": { "safe-buffer": "5.2.1" } }, "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ=="], + + "content-type": ["content-type@1.0.5", "", {}, "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA=="], + + "cookie": ["cookie@0.7.1", "", {}, "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w=="], + + "cookie-signature": ["cookie-signature@1.0.6", "", {}, "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ=="], + + "cors": ["cors@2.8.5", "", { "dependencies": { "object-assign": "^4", "vary": "^1" } }, "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g=="], + + "cross-spawn": ["cross-spawn@7.0.6", "", { "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", "which": "^2.0.1" } }, "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA=="], + + "debug": ["debug@4.4.0", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA=="], + + "deep-is": ["deep-is@0.1.4", "", {}, "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ=="], + + "depd": ["depd@2.0.0", "", {}, "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw=="], + + "destroy": ["destroy@1.2.0", "", {}, "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg=="], + + "detect-libc": ["detect-libc@2.0.3", "", {}, "sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw=="], + + "dotenv": ["dotenv@16.4.7", "", {}, "sha512-47qPchRCykZC03FhkYAhrvwU4xDBFIj1QPqaarj6mdM/hgUzfPHcpkHJOn3mJAufFeeAxAzeGsr5X0M4k6fLZQ=="], + + "dunder-proto": ["dunder-proto@1.0.1", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.1", "es-errors": "^1.3.0", "gopd": "^1.2.0" } }, "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A=="], + + "ee-first": ["ee-first@1.1.1", "", {}, "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow=="], + + "encodeurl": ["encodeurl@2.0.0", "", {}, "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg=="], + + "es-define-property": ["es-define-property@1.0.1", "", {}, "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g=="], + + "es-errors": ["es-errors@1.3.0", "", {}, "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw=="], + + "es-object-atoms": ["es-object-atoms@1.1.1", "", { "dependencies": { "es-errors": "^1.3.0" } }, "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA=="], + + "escape-html": ["escape-html@1.0.3", "", {}, "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow=="], + + "escape-string-regexp": ["escape-string-regexp@4.0.0", "", {}, "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA=="], + + "eslint": ["eslint@9.19.0", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.12.1", "@eslint/config-array": "^0.19.0", "@eslint/core": "^0.10.0", "@eslint/eslintrc": "^3.2.0", "@eslint/js": "9.19.0", "@eslint/plugin-kit": "^0.2.5", "@humanfs/node": "^0.16.6", "@humanwhocodes/module-importer": "^1.0.1", "@humanwhocodes/retry": "^0.4.1", "@types/estree": "^1.0.6", "@types/json-schema": "^7.0.15", "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.6", "debug": "^4.3.2", "escape-string-regexp": "^4.0.0", "eslint-scope": "^8.2.0", "eslint-visitor-keys": "^4.2.0", "espree": "^10.3.0", "esquery": "^1.5.0", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", "file-entry-cache": "^8.0.0", "find-up": "^5.0.0", "glob-parent": "^6.0.2", "ignore": "^5.2.0", "imurmurhash": "^0.1.4", "is-glob": "^4.0.0", "json-stable-stringify-without-jsonify": "^1.0.1", "lodash.merge": "^4.6.2", "minimatch": "^3.1.2", "natural-compare": "^1.4.0", "optionator": "^0.9.3" }, "peerDependencies": { "jiti": "*" }, "optionalPeers": ["jiti"], "bin": { "eslint": "bin/eslint.js" } }, "sha512-ug92j0LepKlbbEv6hD911THhoRHmbdXt2gX+VDABAW/Ir7D3nqKdv5Pf5vtlyY6HQMTEP2skXY43ueqTCWssEA=="], + + "eslint-config-prettier": ["eslint-config-prettier@9.1.0", "", { "peerDependencies": { "eslint": ">=7.0.0" }, "bin": { "eslint-config-prettier": "bin/cli.js" } }, "sha512-NSWl5BFQWEPi1j4TjVNItzYV7dZXZ+wP6I6ZhrBGpChQhZRUaElihE9uRRkcbRnNb76UMKDF3r+WTmNcGPKsqw=="], + + "eslint-plugin-prettier": ["eslint-plugin-prettier@5.2.3", "", { "dependencies": { "prettier-linter-helpers": "^1.0.0", "synckit": "^0.9.1" }, "peerDependencies": { "@types/eslint": ">=8.0.0", "eslint": ">=8.0.0", "eslint-config-prettier": "*", "prettier": ">=3.0.0" }, "optionalPeers": ["@types/eslint", "eslint-config-prettier"] }, "sha512-qJ+y0FfCp/mQYQ/vWQ3s7eUlFEL4PyKfAJxsnYTJ4YT73nsJBWqmEpFryxV9OeUiqmsTsYJ5Y+KDNaeP31wrRw=="], + + "eslint-scope": ["eslint-scope@8.2.0", "", { "dependencies": { "esrecurse": "^4.3.0", "estraverse": "^5.2.0" } }, "sha512-PHlWUfG6lvPc3yvP5A4PNyBL1W8fkDUccmI21JUu/+GKZBoH/W5u6usENXUrWFRsyoW5ACUjFGgAFQp5gUlb/A=="], + + "eslint-visitor-keys": ["eslint-visitor-keys@4.2.0", "", {}, "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw=="], + + "espree": ["espree@10.3.0", "", { "dependencies": { "acorn": "^8.14.0", "acorn-jsx": "^5.3.2", "eslint-visitor-keys": "^4.2.0" } }, "sha512-0QYC8b24HWY8zjRnDTL6RiHfDbAWn63qb4LMj1Z4b076A4une81+z03Kg7l7mn/48PUTqoLptSXez8oknU8Clg=="], + + "esquery": ["esquery@1.6.0", "", { "dependencies": { "estraverse": "^5.1.0" } }, "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg=="], + + "esrecurse": ["esrecurse@4.3.0", "", { "dependencies": { "estraverse": "^5.2.0" } }, "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag=="], + + "estraverse": ["estraverse@5.3.0", "", {}, "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA=="], + + "esutils": ["esutils@2.0.3", "", {}, "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g=="], + + "etag": ["etag@1.8.1", "", {}, "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg=="], + + "express": ["express@4.21.2", "", { "dependencies": { "accepts": "~1.3.8", "array-flatten": "1.1.1", "body-parser": "1.20.3", "content-disposition": "0.5.4", "content-type": "~1.0.4", "cookie": "0.7.1", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "etag": "~1.8.1", "finalhandler": "1.3.1", "fresh": "0.5.2", "http-errors": "2.0.0", "merge-descriptors": "1.0.3", "methods": "~1.1.2", "on-finished": "2.4.1", "parseurl": "~1.3.3", "path-to-regexp": "0.1.12", "proxy-addr": "~2.0.7", "qs": "6.13.0", "range-parser": "~1.2.1", "safe-buffer": "5.2.1", "send": "0.19.0", "serve-static": "1.16.2", "setprototypeof": "1.2.0", "statuses": "2.0.1", "type-is": "~1.6.18", "utils-merge": "1.0.1", "vary": "~1.1.2" } }, "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA=="], + + "fast-deep-equal": ["fast-deep-equal@3.1.3", "", {}, "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="], + + "fast-diff": ["fast-diff@1.3.0", "", {}, "sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw=="], + + "fast-equals": ["fast-equals@5.2.2", "", {}, "sha512-V7/RktU11J3I36Nwq2JnZEM7tNm17eBJz+u25qdxBZeCKiX6BkVSZQjwWIr+IobgnZy+ag73tTZgZi7tr0LrBw=="], + + "fast-glob": ["fast-glob@3.3.3", "", { "dependencies": { "@nodelib/fs.stat": "^2.0.2", "@nodelib/fs.walk": "^1.2.3", "glob-parent": "^5.1.2", "merge2": "^1.3.0", "micromatch": "^4.0.8" } }, "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg=="], + + "fast-json-stable-stringify": ["fast-json-stable-stringify@2.1.0", "", {}, "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw=="], + + "fast-levenshtein": ["fast-levenshtein@2.0.6", "", {}, "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw=="], + + "fastq": ["fastq@1.19.0", "", { "dependencies": { "reusify": "^1.0.4" } }, "sha512-7SFSRCNjBQIZH/xZR3iy5iQYR8aGBE0h3VG6/cwlbrpdciNYBMotQav8c1XI3HjHH+NikUpP53nPdlZSdWmFzA=="], + + "file-entry-cache": ["file-entry-cache@8.0.0", "", { "dependencies": { "flat-cache": "^4.0.0" } }, "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ=="], + + "fill-range": ["fill-range@7.1.1", "", { "dependencies": { "to-regex-range": "^5.0.1" } }, "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg=="], + + "finalhandler": ["finalhandler@1.3.1", "", { "dependencies": { "debug": "2.6.9", "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "on-finished": "2.4.1", "parseurl": "~1.3.3", "statuses": "2.0.1", "unpipe": "~1.0.0" } }, "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ=="], + + "find-up": ["find-up@5.0.0", "", { "dependencies": { "locate-path": "^6.0.0", "path-exists": "^4.0.0" } }, "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng=="], + + "flat-cache": ["flat-cache@4.0.1", "", { "dependencies": { "flatted": "^3.2.9", "keyv": "^4.5.4" } }, "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw=="], + + "flatted": ["flatted@3.3.2", "", {}, "sha512-AiwGJM8YcNOaobumgtng+6NHuOqC3A7MixFeDafM3X9cIUM+xUXoS5Vfgf+OihAYe20fxqNM9yPBXJzRtZ/4eA=="], + + "forwarded": ["forwarded@0.2.0", "", {}, "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow=="], + + "fresh": ["fresh@0.5.2", "", {}, "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q=="], + + "function-bind": ["function-bind@1.1.2", "", {}, "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA=="], + + "get-intrinsic": ["get-intrinsic@1.2.7", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.1", "es-define-property": "^1.0.1", "es-errors": "^1.3.0", "es-object-atoms": "^1.0.0", "function-bind": "^1.1.2", "get-proto": "^1.0.0", "gopd": "^1.2.0", "has-symbols": "^1.1.0", "hasown": "^2.0.2", "math-intrinsics": "^1.1.0" } }, "sha512-VW6Pxhsrk0KAOqs3WEd0klDiF/+V7gQOpAvY1jVU/LHmaD/kQO4523aiJuikX/QAKYiW6x8Jh+RJej1almdtCA=="], + + "get-proto": ["get-proto@1.0.1", "", { "dependencies": { "dunder-proto": "^1.0.1", "es-object-atoms": "^1.0.0" } }, "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g=="], + + "glob-parent": ["glob-parent@6.0.2", "", { "dependencies": { "is-glob": "^4.0.3" } }, "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A=="], + + "globals": ["globals@14.0.0", "", {}, "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ=="], + + "gopd": ["gopd@1.2.0", "", {}, "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg=="], + + "graphemer": ["graphemer@1.4.0", "", {}, "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag=="], + + "has-flag": ["has-flag@4.0.0", "", {}, "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ=="], + + "has-symbols": ["has-symbols@1.1.0", "", {}, "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ=="], + + "hasown": ["hasown@2.0.2", "", { "dependencies": { "function-bind": "^1.1.2" } }, "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ=="], + + "http-errors": ["http-errors@2.0.0", "", { "dependencies": { "depd": "2.0.0", "inherits": "2.0.4", "setprototypeof": "1.2.0", "statuses": "2.0.1", "toidentifier": "1.0.1" } }, "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ=="], + + "iconv-lite": ["iconv-lite@0.4.24", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3" } }, "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA=="], + + "ignore": ["ignore@5.3.2", "", {}, "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g=="], + + "import-fresh": ["import-fresh@3.3.1", "", { "dependencies": { "parent-module": "^1.0.0", "resolve-from": "^4.0.0" } }, "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ=="], + + "imurmurhash": ["imurmurhash@0.1.4", "", {}, "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA=="], + + "inherits": ["inherits@2.0.4", "", {}, "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="], + + "intl-messageformat": ["intl-messageformat@9.13.0", "", { "dependencies": { "@formatjs/ecma402-abstract": "1.11.4", "@formatjs/fast-memoize": "1.2.1", "@formatjs/icu-messageformat-parser": "2.1.0", "tslib": "^2.1.0" } }, "sha512-7sGC7QnSQGa5LZP7bXLDhVDtQOeKGeBFGHF2Y8LVBwYZoQZCgWeKoPGTa5GMG8g/TzDgeXuYJQis7Ggiw2xTOw=="], + + "ipaddr.js": ["ipaddr.js@1.9.1", "", {}, "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g=="], + + "is-extglob": ["is-extglob@2.1.1", "", {}, "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ=="], + + "is-glob": ["is-glob@4.0.3", "", { "dependencies": { "is-extglob": "^2.1.1" } }, "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg=="], + + "is-number": ["is-number@7.0.0", "", {}, "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng=="], + + "isexe": ["isexe@2.0.0", "", {}, "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="], + + "js-yaml": ["js-yaml@4.1.0", "", { "dependencies": { "argparse": "^2.0.1" }, "bin": { "js-yaml": "bin/js-yaml.js" } }, "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA=="], + + "json-buffer": ["json-buffer@3.0.1", "", {}, "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ=="], + + "json-schema-traverse": ["json-schema-traverse@0.4.1", "", {}, "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="], + + "json-stable-stringify-without-jsonify": ["json-stable-stringify-without-jsonify@1.0.1", "", {}, "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw=="], + + "jwt-simple": ["jwt-simple@0.5.6", "", {}, "sha512-40aUybvhH9t2h71ncA1/1SbtTNCVZHgsTsTgqPUxGWDmUDrXyDf2wMNQKEbdBjbf4AI+fQhbECNTV6lWxQKUzg=="], + + "keyv": ["keyv@4.5.4", "", { "dependencies": { "json-buffer": "3.0.1" } }, "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw=="], + + "levn": ["levn@0.4.1", "", { "dependencies": { "prelude-ls": "^1.2.1", "type-check": "~0.4.0" } }, "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ=="], + + "locate-path": ["locate-path@6.0.0", "", { "dependencies": { "p-locate": "^5.0.0" } }, "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw=="], + + "lodash.merge": ["lodash.merge@4.6.2", "", {}, "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ=="], + + "math-intrinsics": ["math-intrinsics@1.1.0", "", {}, "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g=="], + + "media-typer": ["media-typer@0.3.0", "", {}, "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ=="], + + "merge-descriptors": ["merge-descriptors@1.0.3", "", {}, "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ=="], + + "merge2": ["merge2@1.4.1", "", {}, "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg=="], + + "methods": ["methods@1.1.2", "", {}, "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w=="], + + "micromatch": ["micromatch@4.0.8", "", { "dependencies": { "braces": "^3.0.3", "picomatch": "^2.3.1" } }, "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA=="], + + "mime": ["mime@1.6.0", "", { "bin": { "mime": "cli.js" } }, "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg=="], + + "mime-db": ["mime-db@1.52.0", "", {}, "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="], + + "mime-types": ["mime-types@2.1.35", "", { "dependencies": { "mime-db": "1.52.0" } }, "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw=="], + + "minimatch": ["minimatch@3.1.2", "", { "dependencies": { "brace-expansion": "^1.1.7" } }, "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw=="], + + "ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="], + + "msgpackr": ["msgpackr@1.11.2", "", { "optionalDependencies": { "msgpackr-extract": "^3.0.2" } }, "sha512-F9UngXRlPyWCDEASDpTf6c9uNhGPTqnTeLVt7bN+bU1eajoR/8V9ys2BRaV5C/e5ihE6sJ9uPIKaYt6bFuO32g=="], + + "msgpackr-extract": ["msgpackr-extract@3.0.3", "", { "dependencies": { "node-gyp-build-optional-packages": "5.2.2" }, "optionalDependencies": { "@msgpackr-extract/msgpackr-extract-darwin-arm64": "3.0.3", "@msgpackr-extract/msgpackr-extract-darwin-x64": "3.0.3", "@msgpackr-extract/msgpackr-extract-linux-arm": "3.0.3", "@msgpackr-extract/msgpackr-extract-linux-arm64": "3.0.3", "@msgpackr-extract/msgpackr-extract-linux-x64": "3.0.3", "@msgpackr-extract/msgpackr-extract-win32-x64": "3.0.3" }, "bin": { "download-msgpackr-prebuilds": "bin/download-prebuilds.js" } }, "sha512-P0efT1C9jIdVRefqjzOQ9Xml57zpOXnIuS+csaB4MdZbTdmGDLo8XhzBG1N7aO11gKDDkJvBLULeFTo46wwreA=="], + + "natural-compare": ["natural-compare@1.4.0", "", {}, "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw=="], + + "negotiator": ["negotiator@0.6.3", "", {}, "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg=="], + + "node-gyp-build-optional-packages": ["node-gyp-build-optional-packages@5.2.2", "", { "dependencies": { "detect-libc": "^2.0.1" }, "bin": { "node-gyp-build-optional-packages": "bin.js", "node-gyp-build-optional-packages-optional": "optional.js", "node-gyp-build-optional-packages-test": "build-test.js" } }, "sha512-s+w+rBWnpTMwSFbaE0UXsRlg7hU4FjekKU4eyAih5T8nJuNZT1nNsskXpxmeqSK9UzkBl6UgRlnKc8hz8IEqOw=="], + + "object-assign": ["object-assign@4.1.1", "", {}, "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg=="], + + "object-inspect": ["object-inspect@1.13.3", "", {}, "sha512-kDCGIbxkDSXE3euJZZXzc6to7fCrKHNI/hSRQnRuQ+BWjFNzZwiFF8fj/6o2t2G9/jTj8PSIYTfCLelLZEeRpA=="], + + "on-finished": ["on-finished@2.4.1", "", { "dependencies": { "ee-first": "1.1.1" } }, "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg=="], + + "optionator": ["optionator@0.9.4", "", { "dependencies": { "deep-is": "^0.1.3", "fast-levenshtein": "^2.0.6", "levn": "^0.4.1", "prelude-ls": "^1.2.1", "type-check": "^0.4.0", "word-wrap": "^1.2.5" } }, "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g=="], + + "p-limit": ["p-limit@3.1.0", "", { "dependencies": { "yocto-queue": "^0.1.0" } }, "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ=="], + + "p-locate": ["p-locate@5.0.0", "", { "dependencies": { "p-limit": "^3.0.2" } }, "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw=="], + + "parent-module": ["parent-module@1.0.1", "", { "dependencies": { "callsites": "^3.0.0" } }, "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g=="], + + "parseurl": ["parseurl@1.3.3", "", {}, "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ=="], + + "path": ["path@0.12.7", "", { "dependencies": { "process": "^0.11.1", "util": "^0.10.3" } }, "sha512-aXXC6s+1w7otVF9UletFkFcDsJeO7lSZBPUQhtb5O0xJe8LtYhj/GxldoL09bBj9+ZmE2hNoHqQSFMN5fikh4Q=="], + + "path-exists": ["path-exists@4.0.0", "", {}, "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w=="], + + "path-key": ["path-key@3.1.1", "", {}, "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q=="], + + "path-to-regexp": ["path-to-regexp@0.1.12", "", {}, "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ=="], + + "pg": ["pg@8.12.0", "", { "dependencies": { "pg-connection-string": "^2.6.4", "pg-pool": "^3.6.2", "pg-protocol": "^1.6.1", "pg-types": "^2.1.0", "pgpass": "1.x" }, "optionalDependencies": { "pg-cloudflare": "^1.1.1" }, "peerDependencies": { "pg-native": ">=3.0.1" }, "optionalPeers": ["pg-native"] }, "sha512-A+LHUSnwnxrnL/tZ+OLfqR1SxLN3c/pgDztZ47Rpbsd4jUytsTtwQo/TLPRzPJMp/1pbhYVhH9cuSZLAajNfjQ=="], + + "pg-cloudflare": ["pg-cloudflare@1.1.1", "", {}, "sha512-xWPagP/4B6BgFO+EKz3JONXv3YDgvkbVrGw2mTo3D6tVDQRh1e7cqVGvyR3BE+eQgAvx1XhW/iEASj4/jCWl3Q=="], + + "pg-connection-string": ["pg-connection-string@2.7.0", "", {}, "sha512-PI2W9mv53rXJQEOb8xNR8lH7Hr+EKa6oJa38zsK0S/ky2er16ios1wLKhZyxzD7jUReiWokc9WK5nxSnC7W1TA=="], + + "pg-int8": ["pg-int8@1.0.1", "", {}, "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw=="], + + "pg-pool": ["pg-pool@3.7.0", "", { "peerDependencies": { "pg": ">=8.0" } }, "sha512-ZOBQForurqh4zZWjrgSwwAtzJ7QiRX0ovFkZr2klsen3Nm0aoh33Ls0fzfv3imeH/nw/O27cjdz5kzYJfeGp/g=="], + + "pg-protocol": ["pg-protocol@1.7.0", "", {}, "sha512-hTK/mE36i8fDDhgDFjy6xNOG+LCorxLG3WO17tku+ij6sVHXh1jQUJ8hYAnRhNla4QVD2H8er/FOjc/+EgC6yQ=="], + + "pg-types": ["pg-types@2.2.0", "", { "dependencies": { "pg-int8": "1.0.1", "postgres-array": "~2.0.0", "postgres-bytea": "~1.0.0", "postgres-date": "~1.0.4", "postgres-interval": "^1.1.0" } }, "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA=="], + + "pgpass": ["pgpass@1.0.5", "", { "dependencies": { "split2": "^4.1.0" } }, "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug=="], + + "picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="], + + "postgres": ["postgres@3.4.5", "", {}, "sha512-cDWgoah1Gez9rN3H4165peY9qfpEo+SA61oQv65O3cRUE1pOEoJWwddwcqKE8XZYjbblOJlYDlLV4h67HrEVDg=="], + + "postgres-array": ["postgres-array@2.0.0", "", {}, "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA=="], + + "postgres-bytea": ["postgres-bytea@1.0.0", "", {}, "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w=="], + + "postgres-date": ["postgres-date@1.0.7", "", {}, "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q=="], + + "postgres-interval": ["postgres-interval@1.2.0", "", { "dependencies": { "xtend": "^4.0.0" } }, "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ=="], + + "prelude-ls": ["prelude-ls@1.2.1", "", {}, "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g=="], + + "prettier": ["prettier@3.4.2", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-e9MewbtFo+Fevyuxn/4rrcDAaq0IYxPGLvObpQjiZBMAzB9IGmzlnG9RZy3FFas+eBMu2vA0CszMeduow5dIuQ=="], + + "prettier-linter-helpers": ["prettier-linter-helpers@1.0.0", "", { "dependencies": { "fast-diff": "^1.1.2" } }, "sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w=="], + + "process": ["process@0.11.10", "", {}, "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A=="], + + "proxy-addr": ["proxy-addr@2.0.7", "", { "dependencies": { "forwarded": "0.2.0", "ipaddr.js": "1.9.1" } }, "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg=="], + + "punycode": ["punycode@2.3.1", "", {}, "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg=="], + + "qs": ["qs@6.13.0", "", { "dependencies": { "side-channel": "^1.0.6" } }, "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg=="], + + "queue-microtask": ["queue-microtask@1.2.3", "", {}, "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A=="], + + "range-parser": ["range-parser@1.2.1", "", {}, "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg=="], + + "raw-body": ["raw-body@2.5.2", "", { "dependencies": { "bytes": "3.1.2", "http-errors": "2.0.0", "iconv-lite": "0.4.24", "unpipe": "1.0.0" } }, "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA=="], + + "resolve-from": ["resolve-from@4.0.0", "", {}, "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g=="], + + "reusify": ["reusify@1.0.4", "", {}, "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw=="], + + "run-parallel": ["run-parallel@1.2.0", "", { "dependencies": { "queue-microtask": "^1.2.2" } }, "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA=="], + + "safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="], + + "safer-buffer": ["safer-buffer@2.1.2", "", {}, "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="], + + "semver": ["semver@7.7.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA=="], + + "send": ["send@0.19.0", "", { "dependencies": { "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "etag": "~1.8.1", "fresh": "0.5.2", "http-errors": "2.0.0", "mime": "1.6.0", "ms": "2.1.3", "on-finished": "2.4.1", "range-parser": "~1.2.1", "statuses": "2.0.1" } }, "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw=="], + + "serve-static": ["serve-static@1.16.2", "", { "dependencies": { "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "parseurl": "~1.3.3", "send": "0.19.0" } }, "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw=="], + + "setprototypeof": ["setprototypeof@1.2.0", "", {}, "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw=="], + + "shebang-command": ["shebang-command@2.0.0", "", { "dependencies": { "shebang-regex": "^3.0.0" } }, "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA=="], + + "shebang-regex": ["shebang-regex@3.0.0", "", {}, "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A=="], + + "side-channel": ["side-channel@1.1.0", "", { "dependencies": { "es-errors": "^1.3.0", "object-inspect": "^1.13.3", "side-channel-list": "^1.0.0", "side-channel-map": "^1.0.1", "side-channel-weakmap": "^1.0.2" } }, "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw=="], + + "side-channel-list": ["side-channel-list@1.0.0", "", { "dependencies": { "es-errors": "^1.3.0", "object-inspect": "^1.13.3" } }, "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA=="], + + "side-channel-map": ["side-channel-map@1.0.1", "", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "get-intrinsic": "^1.2.5", "object-inspect": "^1.13.3" } }, "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA=="], + + "side-channel-weakmap": ["side-channel-weakmap@1.0.2", "", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "get-intrinsic": "^1.2.5", "object-inspect": "^1.13.3", "side-channel-map": "^1.0.1" } }, "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A=="], + + "split2": ["split2@4.2.0", "", {}, "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg=="], + + "statuses": ["statuses@2.0.1", "", {}, "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ=="], + + "strip-json-comments": ["strip-json-comments@3.1.1", "", {}, "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig=="], + + "supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], + + "synckit": ["synckit@0.9.2", "", { "dependencies": { "@pkgr/core": "^0.1.0", "tslib": "^2.6.2" } }, "sha512-vrozgXDQwYO72vHjUb/HnFbQx1exDjoKzqx23aXEg2a9VIg2TSFZ8FmeZpTjUCFMYw7mpX4BE2SFu8wI7asYsw=="], + + "to-regex-range": ["to-regex-range@5.0.1", "", { "dependencies": { "is-number": "^7.0.0" } }, "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ=="], + + "toidentifier": ["toidentifier@1.0.1", "", {}, "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA=="], + + "ts-api-utils": ["ts-api-utils@2.0.1", "", { "peerDependencies": { "typescript": ">=4.8.4" } }, "sha512-dnlgjFSVetynI8nzgJ+qF62efpglpWRk8isUEWZGWlJYySCTD6aKvbUDu+zbPeDakk3bg5H4XpitHukgfL1m9w=="], + + "tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], + + "type-check": ["type-check@0.4.0", "", { "dependencies": { "prelude-ls": "^1.2.1" } }, "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew=="], + + "type-is": ["type-is@1.6.18", "", { "dependencies": { "media-typer": "0.3.0", "mime-types": "~2.1.24" } }, "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g=="], + + "typescript": ["typescript@5.7.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-84MVSjMEHP+FQRPy3pX9sTVV/INIex71s9TL2Gm5FG/WG1SqXeKyZ0k7/blY/4FdOzI12CBy1vGc4og/eus0fw=="], + + "typescript-eslint": ["typescript-eslint@8.23.0", "", { "dependencies": { "@typescript-eslint/eslint-plugin": "8.23.0", "@typescript-eslint/parser": "8.23.0", "@typescript-eslint/utils": "8.23.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-/LBRo3HrXr5LxmrdYSOCvoAMm7p2jNizNfbIpCgvG4HMsnoprRUOce/+8VJ9BDYWW68rqIENE/haVLWPeFZBVQ=="], + + "undici-types": ["undici-types@6.20.0", "", {}, "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg=="], + + "unpipe": ["unpipe@1.0.0", "", {}, "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ=="], + + "uri-js": ["uri-js@4.4.1", "", { "dependencies": { "punycode": "^2.1.0" } }, "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg=="], + + "util": ["util@0.10.4", "", { "dependencies": { "inherits": "2.0.3" } }, "sha512-0Pm9hTQ3se5ll1XihRic3FDIku70C+iHUdT/W926rSgHV5QgXsYbKZN8MSC3tJtSkhuROzvsQjAaFENRXr+19A=="], + + "utils-merge": ["utils-merge@1.0.1", "", {}, "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA=="], + + "uuid": ["uuid@11.0.5", "", { "bin": { "uuid": "dist/esm/bin/uuid" } }, "sha512-508e6IcKLrhxKdBbcA2b4KQZlLVp2+J5UwQ6F7Drckkc5N9ZJwFa4TgWtsww9UG8fGHbm6gbV19TdM5pQ4GaIA=="], + + "vary": ["vary@1.1.2", "", {}, "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg=="], + + "which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="], + + "word-wrap": ["word-wrap@1.2.5", "", {}, "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA=="], + + "ws": ["ws@8.18.0", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": ">=5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw=="], + + "xtend": ["xtend@4.0.2", "", {}, "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ=="], + + "yocto-queue": ["yocto-queue@0.1.0", "", {}, "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q=="], + + "@eslint-community/eslint-utils/eslint-visitor-keys": ["eslint-visitor-keys@3.4.3", "", {}, "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag=="], + + "@humanfs/node/@humanwhocodes/retry": ["@humanwhocodes/retry@0.3.1", "", {}, "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA=="], + + "@typescript-eslint/typescript-estree/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], + + "body-parser/debug": ["debug@2.6.9", "", { "dependencies": { "ms": "2.0.0" } }, "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA=="], + + "express/debug": ["debug@2.6.9", "", { "dependencies": { "ms": "2.0.0" } }, "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA=="], + + "fast-glob/glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="], + + "finalhandler/debug": ["debug@2.6.9", "", { "dependencies": { "ms": "2.0.0" } }, "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA=="], + + "send/debug": ["debug@2.6.9", "", { "dependencies": { "ms": "2.0.0" } }, "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA=="], + + "send/encodeurl": ["encodeurl@1.0.2", "", {}, "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w=="], + + "util/inherits": ["inherits@2.0.3", "", {}, "sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw=="], + + "@typescript-eslint/typescript-estree/minimatch/brace-expansion": ["brace-expansion@2.0.1", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA=="], + + "body-parser/debug/ms": ["ms@2.0.0", "", {}, "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="], + + "express/debug/ms": ["ms@2.0.0", "", {}, "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="], + + "finalhandler/debug/ms": ["ms@2.0.0", "", {}, "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="], + + "send/debug/ms": ["ms@2.0.0", "", {}, "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="], + } +} diff --git a/bun.lockb b/bun.lockb deleted file mode 100755 index 3d6f8d9cb292102776bc804bc88bc5a28b16dc37..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 105104 zcmeFaWmr|)_CLO9BqRipE>S{K6j54QQfUO~kS>)DDFp!uk&;pxBn1I!K~MyP6huS? zkq`sK{LKk_owL5@x%ap^=l|mW;*RGm#-4MI&u5G|W~{l^-rmd2DeU9rDQxBFDs1a| zhQrFol?+^h&TiIDc8<=rf_846E|%VcXGF;GF&GRU0WEpJHo+t9op(h&vN)cen*4@O z;dD81_FTB3F8%X#OgbL0!eA&i6^3-9g!w~6FDCfO|a01f$W z1HB#S1<+0bbPv!xKs$N~+B>^hVI%<=v}*?SlugQ>!n?HpZg1by5w?!XAP z+hVmlOqr!O2*TOX#nBsM3mRd2F2KR~S3v>t+BsW#d)wMzoNRs1!2aZcAfR0$u!C{i zdii)eI(uPky*zwuJ$*4*l$$tPFLzH{5Jxbu+X3u$flk1FTUxt$!o+xide|RXKmzMe zfifM?Hnvti_MqLx3wpy~^guoI=L&*?{yKpME<)Tby&VKWP-h(+tsQ`!qnGVD;0|N$ zVCn1xG!gA)-W)8w1VJC{zkLpMhmWVD;As$p zyN|1{wVREt0*C`ZLk<89@toLr34mq;dkc!Os957@v=;yQ4d>9Kn_!VEv?E^@~HB z8Z(ny?xIgnB-pm^Z&~Ut=e?c27j0bjn=#FgSuL2F% zVKmUxKsy2r^G7VTnKv2f&3O#=Be%1zV5~tI=5s&L_ko5F0mda6bk6r<>PnmI zund&pK1!>M!GI+ias`y(`ZEQ%9YAZVZ1z_IXt-V<1K)7oySsUL+k-K8-xyU4=7Q>` ze;=S>UQYuJ^W<(X2#3aNP;Il{=RnvtfTI(XVLt1ChIuc__`S|De5{#^yTt05ptq7HH@%Tys+&0_AO>d;;69ptae5c~FMqGX=_! zhZpccoD^8cFfZ2HoAUu4Ab3DI9oyeFY`r-s14szi-Y|SXSCbc%)T-Ja?eNNJ7?IU@ za$7IN|0Z?O7mhRPm7a9!ZRc)(mLNK;R=S^*M^jD1^x~l_g2|knw7oUG!HEj?Wjt-$ zXNy&8Y*xm&`+34u7v$)vtD5}cU9Ym^zcklfT5>mTT_VbSzR25qDPTJT{?Iu?-KUyn zA9|-<*CoGbo<7cOz@++!eWj-NY`EPUA#I-nWLeYW;UqKrg{erb*e}=2&mZsZ9kneu zeJ!%-Xwi@D1>4^^ZfB~~^->Agk!xi>U1*?tU(1rZwr7M}iX_4}D)O_LPV5h_K7Qds znYK3(R)J#9H6Qy19PE4ePKBJEwxfEKGxfCM7JJFhIj&D@-t=iMd~8-F$yVBo_bzXn zjHoO<$Nz#kpsmYwc>m`)${JouH10 zuCcXvo5q}1D;a6KTQrjjKUsEg-c`BE_&F}OTK`AjfPUP0jQBKDU-$LeuA#b-cvYPs z=jMs3nld^1sjG)uNdw8=8nVjf#D0D3)%tVjxYV`>E^%x5+8>mQ_8E?!pq(EXmh<;bZHJu)g8RQr_u`m=WOcDV_4 z`Ec2%%<6;pRK}wB{FE_`-nN%Ar2C}6#YjFGs|ym_iOlZ36)*Fte7+!Kj^F%rnRM#Y zI5&N6@r>+Y(PMf{p;KihQJj8~b*pzDlfSX@PZDfL+PyTK^ev8tMeHOpX4PUn-)kH6 z_N=GzbfI?nqqB=6j0fVzVyM>h%kc53mWB*Eo_9aLRZ3-j{6@~g7puurJ4nS#IgRqZ zM*Q0Sj)H@cJd8(7sHH(jL5%k?BeCIbX5KT&&p8=s$)vaAADal}mL^$On~r|wU8U7W z>eZ}YT7gfsg5>|+gNO4>I_5%lW3tuyf`?ugvrgU{s#aRXpNI^&dSSoosu`R`kG+5+D11@+}JtG zF8#5ss*n1B1EG%NTfBl^44&%9KBBBFK4 zjB2-%o@~m#ll>H$PfhBU`Jzbpu4(317an0(HTO$YKNe$?T-7|@J4LVd+JG`~_lzss z$JU|I-IAB#5w-in_v7dc)naQo8t<04-|X>8*o^;|Cq z27Mn^&YmpEV`%y!>T0KB6=GE5Ju>j|nd=>Wze@RVEva1nY^#L@OXWux{{{R0XxevN zmxWC(?WvQN+~y|LJ+GVIlz;!KZ}#cOj(x;@caDjKenRXR=kQ0E_t9OrVlYLwk0F?c zc}YWUkcaL5#EX-6g={H*fsUkzD|LOn)X@qJ=X#^{)Bo3=Sj82 z#}OXg>2u8w6!zt=43ZG41vbP<-REu(^dvBp<`TPqd^A+a(S)&*uDq-IXPD7(@BZ$< z!6y+lcZ;<{7o<5w6E1Y)>jl|#M%=oY(nnU)j5EzvUMtg z>fK5nLT+r6N989bb-xyKJ`*ZXrstzgH=2;Oim{Q04M#Y*f zI%uY~NX(GMR!qirUnZv@5u9f3_!RDhw}SLvJg!oq`H-`Fuk84PmgtPy+M<_!R1y`R z;#@C3?DCyC@U4wD>aK-b)?fDnJMPGlMXFHzCVNKf`*^qcJ*ivkKdhfjK=7wcg-z}} zA>18j{4VVruV4nV#mCFxraN^ErkBLam4#CF^$jhQ5AIOvy(!aZqbJ3&PRUhAFPHQF zocPJ0kb0i5qI(CrG#lTzc%Ayrn(!rB-paTzwDL{-Az{3vRP+0mpE~beU-{8}!Gpw>hyW9@Z7WY zp*_*5DjT!yg8GZWjI=%`50?gVlFZ8wxX-`I`ys2_+u)FzCM4BqJ9qJf0gonUPM7S& zj$PUQX_t!h#Hw!+Cp{Haj^U(S$y+C8y;^&vm}TPesfaTJ3Aq~DPoKDSGi)2Tv_NZ6i^fU2jPCByxH)CztRg>B;q1q9+!Ks9!v@ zdD>asygvG}ML&v`w&d1# z?VB!1++=a|B> z*3Kf+d)4F51ytgaS&kiCaT@@ z4x_lqajLk6wxNB1%T7CAif)4CFoqzO-xo=9GGlr1;U2DuVH_?#>hYLf z{{NHt=Lzue{saBPIfTUVyUrl-mjgV^AF^(@8vi?hhx<1&eh3e){zbvd8rXj%o~`2T zv3MBwR^~9`uMptj_#x~6cX1=_2!00O;rRtDgN^+IY5$!@@SNaf5qM=30>=%h`yCdx zBere;5AWYd{7C)pI1sxB0KXsL;rs_5aTWys3xMRXc%%*&62w*&92#N#u>ZJ@J>*C5 zVF0g##pB`^u|e>U0Uo^m`@=uDo#P}3o{)O;eLF0}If!c?5WEP$!}W*w#dQuN_&|V% z>kld86g)6O><0l}0pM|s8{tRrwBV&P-2WjS_J6DUhYi5P{3CpbM*RFYjo4lJ3;w^^ z|Fs^$-^1eJ{DZM0^?$YhYZ~ZY};@=A3g#jMUdvGhkc@IbMtpI-#8$YhG!#pE+ zX7Htg4#2}Y7(1>U2;K?c;rS6_{|P=D;Nkc~K4cAnkG~bf{|Lat^^fcyU^~M}5IigR zvKV}Iv@w1VxYhl~2;k)b9%BDV|I+|oALW0m`CkHfc>jR@!B)2A_z&#-6Az}+mdD=> z;F13Slkuwrctvdd$Q*!u`JEu+_X*$+06cgM`A6?h_`sKVasUrwM>v1y8@401(EzW7 z#lx}N>iE3^_=5nymAQlV&$R2$^#kj->i;;v%VFdHC;Mk977yd!iXi!W2k;8OKN2^h zf5(O34>E4f->u9&$c^CL0bT<5-^$q!jV}cFLjVu!;2gwN5dTvEuZG3L^@pn@cxCY9bDf7A>-c!@UZ_#d8_je5B%T&`iF7D3*mu%{5wJHYylpg|B!Wu z>w6RgUxM`y%h3N;<9`M4@cf139npUmJ8VPzb8&3O56@4~_g4Ko1H2;ekBr+^@wWjU z<{vq~fe>&KB>s;aoBfA=|H=Ndmvi&^0mctG5MJ={Zw0Zl1$bEyKZ4!r_}2scek>k5 zifuXn^8gRe570jpWFBso{`+4fehw}SMhW0y8FC=w_}`J6EePHN;N`IXk-FbuVLM{m z2JmqGLf^1%tMiwbdo%wC-&W&44)AdO!1?!2_OD2Q|2Oxa34n*sU;lLf-naYT?f;$t z57!^eKdy5JnEo+-%>WP2KLnr-xnTS61R1YS01xkfaNJ-WGLLb^JM(PLe`pWyAqX$7 z_%eVO1O6cnj=@&_KS%k8*sbE}_H51{TyaRhkbZgrJTiVT|F{Z*uLpRTf0(XxCnxu26(vs;n+hgc%6-t zAb5J-KlfkQ|EX z-y7pP2NC=t77s&%<4*!V;2;Q|jc?OGguw402ywtd69lgf@NoY^%3F;;3X6xFFb6OO zTmU|fNJ z1czu`@%W(g{|4_4@Sgv||I2^rUq=Z1+kt=KzYXBM{{=oBOnL`^hkGv^Q)FCm%|E5c z=J_4LZFT&P0X%&F1NRN^Ds{{L;TqKfco;A21LVO)ka(*BUIyTi{=;>Eiy-*-0Iv@4 z2yUzWKP`7#IV*2jC)zFLLnkq=5Ag@kl>@ z$AREA0bU-9rv@OT9#?!Ez=I(Of%ZFo2ST+Yep&%u4(lJ*Z8iR101xL6#KO6^RlGDf z{J{Gk#32{>_*+5Z_W^js|8GGk0|eg+@N!uHuz9QT|H8(P@NE?@0lqvy-~V9_aS_Cy z8^D7tWF!B$;1C-GpAGOZe@NV0UB8n6kDUJ@H^Phf`EMGrV*oD?koN~j-G9TN>k(T` z00v7q1hyf27~1Z?eMa!XSUfDly=U8h1N@~P!B+u1xP|}Y{7(Wr96y+UT=za03gVv< z+&mCGJOd%)2yOmO5WF2{Z_}{2jJoOBYc176WxZ`-voF$Z2zI}t;W9$@L&o4 zF@MoM{_Zp4pBr=@@el3c9KuBqyfwh1*Uwht&jono{T&@}yw&{A13dVri6Jm%xOeP= zA8-&Pepzt%l>>NKhOr~{zoS6xf&m_cuz~+4>#rW*;rd0!9^u8+|670uQ($BK;rgNd z9T3}&_&ES3FC0Hujce|qErRy~c$j}!hWX=$jW`H`uLO7`elU-576d;F@bLKu_MH-E z^q=u_fQNrMZ2oYayU-2dzYO4Yuy}Mm{QP%<;CFy8FTk_lAL|#_7!bTEz{B$!#KHdK zI)(^72jCU3cvy$)9*p2M!Qd-n@wmng^Mc@$03PmNNEy-4=I;ct>j8KLlz*iD@3629 zvD*eFKeB)QllWBt7`gu-oWJvp_J0=Ok^JF;Lu?RVcK{wNfsOIUb?njp*8pAvi-+~N z<^b`36l~sd{{nv=;0^u-ehlE@_kT#-xaI(fpB@B{etv^Fz(o-JDY;GmNO`O8&&mNF z89#XTLi+K$7!ZH6SUj{x>VAhs+j7ePdH#gHapgel3{iNPdw2%JMG$-{z{Bx_7`XRr zb^M+HJlsDZ9_AkQ0T)61lPLVTejygA{~ZTnCjs!_5g>#b0O0v!tK;vC^$+=A?zVdV zDh7D)UkLOKeE$l^5f?%7F9dELh5!#S zu#ORaz(Ejv1i&Nv-&S&f;0FNS0qY<3eXH}wL}l~-i_Cjm#{ltv72sX|fq!u3+K@Kz z7o3?xHVRw#gZL)`MDY9%#~zLy(vRO!A$Vzk7X)}x0KoO#8{|gt*#NHq@Gx)41>1io z2!0yi;rSJoaoxKhH-Z;c`}6$tPw;L4k32sioWF}5?SC5J(eoDiRRo;^F+qweQdm5^oZ~ z!}~YlcdP5C2aAW}hj2pv-w9&-4dB5eL!S|qB?!O-R@<$2a5x%YVKMml)Eo{R-{Qh98{a*xl@CfzC{=Wy8&I21HUIFbt z_dj?Kz%_p$XB#2}7S49yA6vv(A^yF9e|dn1d4qLG{qKkn|6hP1h&kkU_J3_h@X?^* z-`sx(LHuz1;oL=w75E=!1N%D0GQqAY?Z zgBUj6oxwoiS^|%?8v->P6EH>_d$Ynu?Z!Q4BWxkkT8kWGqmfD7V_zy-_2;DQ1*tSl>elm zT?M#cJT+L|05lY|hW%{;7qo8!7nHV*zndERX~&kKhUE@$LHjOj`7Y2oK^XKn>fUf(zn?zy;+$X*iCf-~#sr%o}h)+#I+d?k%_=?|X1T*@}kv zl?~LUW(S|CK_#rGfq%BB;d-G5I7*ek*(LE* zPa5VM{{9Y{q+;!G(a`iN7MF&_K@Iznh1I#(dZ=N4^08&KhW#kO)TTfq-}wT&E4-Qk0Vla^tOplI}zsai+7TEkw;%VRydolDgOo2V>`0h z<^IpU@6z=cibA%Ld#^b~NIsX2u1PxejnKey(S`r(aMTF4YFgbU?was?^k~4#&;uya319t=8kJ;1&PRl*|0<*L% zG4%Ijm&r4AUMQ#OO&u$nA1OIwx|{x6i{ld6@{=O?41n;$y%G_^78L@STH|~@M#05$gBZ(U~ZdpqFSDcw+VY_jvZ&ybtDlVv;Qv=q~9g0DM@7oMFEAgizA@ z=*nEZ!4rj5wxM_G*}>i^xmEAho`-3Zir#JzlMm%P@8%u#*xPj>ob-vBKO<2_#`e>P ze3{MsltK=8b!!|$@scB?Kojn5^m|Tcg)enLTvVh}zbPi@CkdB;r1fnxLQH?wkYvXv zsfmn^)HjA7y=O(`1_-5f_l0`|l}{dR5x7!9L=er6;)T!1h!92##IuN6l=*yPck1H) z=}{iQ?NvvfQ*7{i~ggv#0iuz0$~eLzMCeFIDxx$ENIj2@^J=iyon-?@S`2 zQM~Y-0V0Gy_?M@jiS!c42SqCv$~wG_Tw|7gTrM|H804fir>t@-_m;sZzmG?!T?bv4 z?WLR}G#=w-*&>x^9lTASCDInaXDno%!FMKz5IzsTrn9(1)VwKtQZB7-K_E80SZgtk z^R*k@QJZ;{!5eqJ>`k4nm2(WlWHWo$v&L2=JDL(Q$>Q^Ha&C)=kd#I7QX!;36AqSG z7klfs+!7BE?*%!@}1q%axw7qz-u0QZ!!&o<5i1Kc2r5UTZ5? zt|U8pe9qJ2NIoZumm1AWG(30zt$!`$Lq^w#cup0Qqw7L1Jmd9VO?l4rl{5A1n@pPe z#lw56>FmG|VZqtsmpUp^4iyk5lQ%E$mKpGWMaK)?>Hi^wci$d&;_R6!u-I|SAkwJm zhRc`m;r`Ul{MN*jbcLfYsm-YTEmtI7t*$av*2tv!#M#lkg?x(ciYc;VTF=le+{!HF7bun6j^2*6so+#5JK{Y`%7vAC# zTz{jwQ*QBEg_;WE&D3)X@Yxy}H~8HRB7}QvAJ~@(HA}`sd+bS_xc)G|F%{1=_Xja^ z*lu^b@B@RFDI{YJd_OEbRH=+6*>ZAQmL73xAx24Usk0+A1| z-0j4~74|yte?{@qqj^uUkiI{ft53SKu;{$-;ulzM?i#dd0@ay+C90tGgPXxrir_B z)NA0_2-U*uw`Q^HDh+|&qGa}V#VcjhpG4C+66G$9$La?T)g7Q4aLiV zkOED3T3hFS@{voCtEMDu_b)rPP+rU-k+q3FrdpFoY-86)u)w^H-e8j5ww%A>$^Ngd z6d2SzlnV7_%7eKJve z%L6w19Ut92N+|mD;*(yr*m%hgNliL;Xk-gz_4&y$Js5iEe=PYD~ZBrp0Jfg^Cwdy&<`}E_9i-Fq5`GRi#YPgxi zXMkx93bcuP;GZsD5nB>nh~niyNP#AtuJa}ObSAxUP$R9yFhg5<W2Tj#AJoS$MVwIy`K-K|3?)9N_MdX7|apfZK^(4aLie<_-2sSPd6jl@1AK zXk{N#SUX96HAi8a_6v%)ed5H#Z(te2Ud)*JJDg$-fKviPVc@!@N+>Q?N;(!Ipglo zYPw2VTQ?nPyt!_%gm*+@S8aOu-HE`}DnAF3y(%a^Pb?ERh!eSFOS$sS9vK-C2fB9 z4cgnQ_4}WV{$w~czMc4$3*D<_e3s7vlewi7y0x0T`^AgNgTK(8BW_eWhcVAU#k&{H zTXEC*OOj=<0)FI?qo){c_ck0Fj}bXOX$e4LmP7N@|9@PLnik)Cs3#BB@@H=P59p3FB?&Gb7e zUVbz$zkQ$ZnXk;R44Z#Z4u4(KDz_2-iobK)>Vb%^TlwF=-+9YdqDK(g?5=N;WA^w& zU_Ge}Ltk=m;FlscN5PnzPfnwF1<<@&<0r+lMhK+NGTq!~%+l*m24P`vxlytH=;y;i;{*D%=7 zDA4jXYVgG-X{LSn&@s02<#m(C?y%yT;%WV2ec^PLdc}5;!E;iN zLrT!&CWz*pGxoY&d*8;TckE`r9tY1kRqKZp4&8Vk9C)OABbMEpgmVp|_S{^UrN8&? z^N4Jz*CM+I=AB9wcWa%$(xlCaWK_IDXx`gq36j>USd0+EsLX@vzslyTpz zo;{OLv4j6FsmOgu1kJm=oVAnrWKZDw^A{$iZJjFTUAq{{Nkp1#33AiKNi#iI-`19z zjfUSBxyh_GHWRVSyEfNYBegzBwvXkzp9=F+6t5_nH&61KgnHIDLGBt+!Q42hn@69Q zrn-x$ktTCjyxo3k#`sj?1BQlDql#U;T8XpwqDV4a=_1R5ULP(Pz3{lLLGdk$R}9Td zSS_JtcEZ$aIrR?D6o0*S1=o5RU%Ow~uAMs*)njj+840T)_{RIKg1$C!nxIF_#L^?5 z#?>LTL|wd{O#I$U_}v?lhy7??#Q+VVY}?G-ZdS$@pYbdextT3U=nI1GRNF*)mQPKU zS%>NyXzlt^lJ`vG_UT0d?)U83oRo?UoW$IY>NUIBxKX^~Xx^OSXW=I=bNcDsUicop zW6wS1hOCA!9IwP08Du+!ZLC{|_0~iv8@k`Qu-F|hhs9NXH(>Y$#i4&9cY-otrJ?q!3?%RhxFPOdHTKg0jf8Zy-~ zCH{T??09+{afjJ4 z$+I|~qiznTd3F0Onk{Z=Q|hu9Pei&?dH5ZiFf~iYzvi9y^tR}ikhzE_uXkqMdPXF- zFWTHDnU)sCdkD>|M1wDL@7u-Y6}NOU%lkyE{El>E3-LeCNa{V}YuPWGzV{T7($oRF zw8NC{v)$w`E^klm`=J#u{gGp;TV-1=5BfcfB%0TIp=E;n%4B)vfmwF{hl&^cTa`bA zF#hb25c!k*tftceSMMc7YKECIznje>7T%$DJKd3GNUSBBny)}EMkMT$<26=SeT9Re ziII#gNuq~rHLeq%=Xi83pK5wC`3Fvw3B>GKpCm@{g5MJUA%u5g{B@-DK06#93EmgpbnNh& zT+=c^rSQvSQ$@YAB;P2qJoq(txL^Xgvu^*4y-Z^g-lAk(_c>Q#WUK|zs7MX{LZkQq^x3} zjz|pkK+ZvTnMs*aqwo>MTlhDM9E*1|OQaHh#e19iJvG*+$_o{*Jet?5L6g>4KIQFQ z4R@RS6?Ty~cTzbrOFTUsrpc_O&7;aZF?)^shWB+fX{&SJvI?I6BzRBVGQIHWD1WT` zi0CETG8C@@n%9l_xnV>=>lNmgXNfeW-%0Z_63#0mO7uMu;?>tzYl}IrN2fvQH_+>S zY;Az+bxB~x@a#Q#ZsYaaCUSOlYxe_CyhqWzRxc0Qc#|)8NwPK^jix?uD(sErPzNtJ zHRpHn!tHOPJ>7e0hg;%w>OT4-z%|Cws)u_aS98?^x@LH*|J4Zhg)VE!4bH^=Zm( zQ2*n=lav`dFYb$-6m3ha>Uklg38YG4;q+nFv5Od6-MEOPk!PRo^=8g~SWiI3tAggO z?V2x-5AUyg!<+1@6(*I++;l15mu9S#@VKLbmRcI0r_ckx{FpOKtx^*e9+gdD7W}7p zsUvF$XpQ|TzKZyx&+Dpa-msc{y(a;(=dAYxKj%=qJ3_%aVlkPGcQk)5n@-}W{W!7C#uY^tKQ}eb^&@+j zfaJr8lsX%dyqt!66-BBC^mFeQ;P-TtNHSb2x+=yP+b;Zu+VkzAVeXa5`P5>?izr@o zG;c?yam^ElyH_sIa4B;pf1pslT3|-R$00+D+GG@Igs38q2P z1LGPj0U_tj*q>ZUq}MTZSC0(-IMS)hjh9{WY_*G+va@BrZxR)+2AcQIn_UsaEmV4V z0+P!S@+)pVsuc65Umu9M!lTBk+R-Lz@+|^FF|yN=UjanOBJIf`h6er`&UR$DBEU@ ze|OndIECW?>4=faHay|w$Q|!)(ZwXH(G5ix-Kl@mW_Kw_rj%Wyaozcizz6Z8C+pb` z-^CxCyuP%fX`dr156JIZHh6Q3lv`$q6;8bWDR9qX*N}Q$NT5}_-?7`blDft|#tR8w zVO_gt+>|>$m>;#9Q)9=2;LFmnq9iX)6lg~8YOk)WL-FdO^RPhp=#v@Sw3{T=50S;Y z;;N(HO61tO+FP9Qx@_j=J{Jo!&8h5gY-)5K?q7fFq||mkvC2!wApOY$!-|=`%lgMr zyvNbJ{f}OZp14Q%INqmy_!jqxucAbxXXp!b7!)OSsQ9a&h*te9FFxJOx!Bmv!<%%M zQC5?CxAVZShf`CpIA8QirK8`MoIvvu-_2t`8Nqa5ojERzk{1uMeS6QG7<#&0z3ehW z`-lF16t6y-x5lf!Jy);nZh-9PafV-MT3Lpl-HlBR8c3dxT@xo7E!gRxvr5oWO~))y z^(mj$VRn+{3MS^}m9rIgE*(1MGwA)-0L|NxDkFF{uaCE^YhCfO#?$%xAC4Km<5+xf ze%I%~i6$AhUFQ!8Ik8@vFleP(%^5nKaX5XF``y)d1azqbNKwGZyIpC^2l zH+iohUzPzMl?NjjPtB}*-yQ`(jygLoEYieG0#%A=mAFvp-ex7>8rRY?} z^NZ=3t~xVcBM&53W(1q?)OIV~IL7-f?*{t$%^1x)G{ZM~mC|=$_Cf~D56PKTvS2DjhFnp>7C0z#=G-p+`UN{=Ifh z_i*hqKDz45=Ef3HgkPAi*$3URA0<&L5gysk@&^6f0)Ow02;q~!Oxc~`0t7OQ(#r&i zh7w{-F;Ztg_DxsuG_9^&7Pp;JqGqMuwwk^!G54+SmG47OmMiwRviB*?TL=u0H7}v( zuNguLG-0F!?XbhCCf){PH&T<)fyKtoGMCJTgRV*Zqvd-lay>4cXQf@1V3XAv_qnjG zw_RX1M@x6Vc7OHWyy#1l>z_+ec|d-D59tY+uYa-)lIkYg-_sJXq|SC~c;D!-*qr0d zUmw4(whQ_X1$NLfUGG$SxmUg0=a#}?rqJ%;l-BnFdou6`iA;9M!{1sW&&Q|G@tV7n z-l1aK{}%u44f38cA%%VP)d34JBWF!nh9*o5G%OB$GQTyf!Rp$^LU-c@pZu7~gO;?o zJYiE3ZTfJ!edQD=UJEpDZeVExo=*F(hZVAWmV-uL7kW?<)b7Lh{}9s1rq0qBoP2pL zw1$hm`j?tQ+YT4;lJ~ift~Zmi*4gR|m*1VLL!bLB(Y(?D?8G~T9#bpGQW0d!%NQMG zUMyVm;Tke3s%LTf{vt|gS}Gz(P^^Z=Z1A*b=k3R>;lE@>w;g5PRjC?5T5=Hm-qQ-r z%cHq?&bvR(@}M4P#KSjIGyxryCkcyK(^tNcwh|}So|7}W%%(r-FogL&$a8o-Rng2l z#zjC#7~eX9!=RXsP7jp_Ycy{!@kq$>L$#EnqcL1UCUf7arruHpP^b5)9%IdU(0A@b zs^{d3W?5dr&IIq+?#A@>owDJS%N$46J-l?h0{T4A?~QEGyjJbS{ytIrZZcX42^t+b zxL5GG3(IP`p9mG>>$8!sBgRe2+Sjg7>{WAu~xquBgkz@$H`r z#+@0{nThV@(58K{`S3XD^T#XCnYQ1*!W(>m^Rc0ATaB^(Lq8UERJ`zas)!I)(I0)T zpTg;rGsn`nlH}jQB<*CP<32S^Xb^(m*2y5L7Wb97=jiu=I-mMGIc3LB?P0FBSe0*F zyd7Mf-s#Xkg5q^TNP#9iG1l|3yFPH>c>|7(nqlqj_7(8;Hw4WL)l2aJj_Gzt-#i z{?fx`t@a%e)U%0M=LN=n8uK1~5nZZq>z6z%&g9!SV@~_^+SV`jnRI(P6Z3lchvA7Y z`~))x$(W24-0?rKb4_2iArfYI&2*iDrfsQ((@(nZ6DnReG_SI%woFz{MNXR>)oSdh z{lgI|eABPfXDzv|jW5TYBS;tKu5?Y>CAe_++wBQVQ^1R-5`5B{0s}t!LwnOpRu6qa z@w%gVa}BS{8j{thz=)i(o6rb>egrSmsY4oM^>c7lA5gB=hdcXT@>1+;d=8qz_5N|Pk~r<)H1Tyym@>N<=U(=531YPejz?<9AMgUOv9V-ECr(gz)HjMGtZ z%I&W%J~-0ipYnB5J|S~_e?O)1qM=&rw^+_-iD5^vY>`o#MT$@2B#{H_kBZFq%>E1$ zJ~%M5ZpAivW(R8A&Y*ci0$-|}JH)dR)O&uM=K|%DD9>l&_vczigVbq~$Cfzcy9P{q z40H;NUbURF%u`5BfInORssib+sl+Quaf0j-d{^A$y$%8d?A}j>gSeA@&pr}Q!aaTE%ij9L|e|# zN3Y$7rw0`MkDts-UVDt^B;NWA#p{da^}NYDeK+dyo{^{_<(Q<^M@R0QJoPC+|Me#& zu4}D2f%}g*HG1cY9Ewg>kU#Bc#;Se3=lFgle6GRkvgEs0Z(OlJ@%o{88LMx(|FSQ? zo~Uz5b1ZtzP2su=#5^wtQCe~i>!%U zy#~!4@#uK{(Y%H`240M396$6`G-tcx=WE9UGtI0f$i}P_^#yHzkY+t+IQoj}mW8<6 zsr~kX+?*6aJLvScv5zSv*j?1JHNU5c&Tjyk_W;k0S2Q|CM_&%h?6`D7wf-Re;;^Cp z0@YkxT4bg2bzYr_WIYEpt8i^sB}Nt(#(uY!WRaY^&s(o(S)bhXp$vUah5rr`5ki+4 z=1h|-E|ofL#=)+EZzJ~Y$$7OWE3mM>Q+1U)f!E-L$`!u~-8HK>v1g33K792(X)ieS zxj|kJFP&*SZB#=xD!)MpDbR#fy`8g99W7*iy3Yy7<98=$Hy*EgDl2i(*1h>tQ00j^YhQ^X}1!Ju;i?tw2_y>yebl zZV*Ce;IM!3?vV$w)1S|Hmr4{>alX+>(dyqVd?);>u`kg{;pl+sM9#J8)?@T`^M2P+ zydh}byiVCrixW~sB!+iiX}z8GBYebl|L%P3p5RQGJ%j8=wjUWAcRAz2n(XwYOTdqr zkj7mwRqx%hqV^|eGHUlb3F!BIq5p?B#VA_0{D@ulqp@M4VS_9D)t1yzvz#uVRVlQtKgr2m z5rTg1K9A!4*B?>whW~3a6ak7i0#!|TR%2~MrNiW>+1_mx*`^6)4_FG_Rvi$NWHDXQQCr zDZP|;N(qJpm8o${dTE58sUwHik7%g*cPv`%`o+v>mUBnidDoBiwD9pA4s6$H={kLb z_&CwOTZuyR4zAyc5SOT0vQ5>08L=yKLg!xM(kJq{DBY*OOeY)5c}WbDIn2A6`H})e zIqGXOjwdo-K5}G1L4=9!sIGZy9r|1njppsqBvl;hzh6Gn=D00m|I4u0MAlm+*Pqq! zRcd93wVb!kvP+ikd?!jem7>Zz<5Z0aU5t}GT^vj|b8bW|&bF=zm4^#xUXMrDg4djk zOvJpU)hZt>(f>N|L~Sf}I_cYD~-J}*v z6DPtsNc0}XdlAk1ym*MQV69p#Anyl@zsVqD`h&!6Ml@9Ouh-{?`{i%YM~KF7@fgHV zo$T{VCH6O#1?Zlt}yM4x6!RzjSt2F9zGpXFWs9rMVZ#u1;z8P*%sw4E4|4 z$yr!^kaYE@P;XmVo-hhI+U?0xZwFit(!^^OKRH+DvRD)3qG^8mFwL*K zfqng zkR1n0b?vp9*Slsf%RUwx^?En|Wq(tgafwok7d|OrS8*+SO)KfnYn#94BHZ}@M@>ZY zTGNrQmwB7#s8L?Mc+uvT8FBVEX97lpIzJy>W>MjNbxM8(x06^ijGIoXo!i$fj<@7} zx+pY)vt#kxsRh>8E(#m*;&1RKp?QNSh3>B@|2(5;w#t=ULgU{YBA9Q-7RkmH=DN4~ z!g_4-Skvf-kIT>Mzv&byD_=S6H*Hj;WZA`O9704Xw?ks{evBa{1f0oe-n5Cy2_i3x z(&TiWxTEctBF!7VUnF9>;QMZ(Uule7Y0giOwns8RZ(5tJRKeXrmW%GJKGXH;7twcx zFFwz;IM1*VFX6_&6HY<%%3oegF=ED>xb=YS62YU2A$8S;c!ri9pVoVsbF9U7_7Rze z6eisF18t`l54cZHhugouqY>Erw2adrDnX~6Zs~Uz(s(Sdfs-9*s<-I@oK{hB#B=jIYUH9a14!N!dY0{J3Pwwv@ zE2&8g;5t?HDph%qdi2Eyu6FeAJ+Gp9pS912bjzjl`j8(9%oduzS$KWC(2ODUWJ=Bc z4(iL)Y;p7}Kg-?sHVjtgpL)OZ>!K?;HD$|gwYncZ%eM55Bblf?q@j6tuBB-_i|o{5 z>DJ*gZhrYryiIuB(r@P#mm?NeURutqNWVH9+b)^wkQbH#FZ3Pw0vLfd9`r6MMA>wG>qnysbBe zXPhbMuIuqh+#xRUIdz!lt*;UVAGKZonZ%=QlbS)YkDU&W-?{je<}m!X=*WA|Of;_? z=3`3k#D~4~@8gzT@(lNqv3~NoT6jOK_=ktn`Rs#>Cl*pXJCfO z3xa114hwR6zT@qmD@XBWp?P;CcAly$Eqthc(cCm5uhO~!ZX3!I_XUuJ~LH@$q}qS=(dw`A6tEV?u~oBi5(3p z-fT3l6px(3gPnd(jk*)g6uO4L=IDJdUc`?&OXWkKF=@?CQ5ZH;iN8t}#lH9837IeB zm)$GEdYJSazq|>LQjb2DQG(*lLGv1vSo0*N`(T2ll)E}s%#K`Y>q%82%1Wjd*gw!c zxOBCtW9pZ*eVuGR<>HINxbAYR!4q@G^2iKk)T~!cs(Io6<^wWrxoBP=*8@!{rv&|* zRj>NG5!&@eFyn`ZP7b&#u*G~}@4MBmOZ~anCD69Qs5c+Xi-&vD?zL9hGJY;+8qBH* z{dy}3#hZubWoy{iGcQmQn{&-U$+;kEN8!P>ydYK)YV%k{h9b(wWUX||3(B!I0WK5n z-`kp4JM8B8INAu5Pj#fYdHL!j4db;74LO4uhD}&pBi+J z$6wLCuKD$oE6w4^kB>R*uIj6fvF;?vnD1)calVw|ynlT>KHdT?>)FHPz2*V3WPD%O z@5I%)*7Bg=Pu@WD?wk-{UaKHE7=9`0a?u>m{i@oMFL~x+2G>?d#1!&&<3|gt&-RIxmlN*bg@$vpSTPoD( zr9W$PoLz9&vvFU)Qw0m$d~aW?#|Dosw`&>b)vC1Yc3pTKtvFCFC8I;4h{{7DnpcHD z?sN7xnd;j4TjboL8Y#K|uf4Ybh^qPC#}`Dz1QEp+QS7DD0=vaPY;=JIR$+nNrA!PI z?C$Qy?gkONySuyl|D3sZcQ459`ucr8zyCMD_j&KVbLN~mbLLFmxwC_&@40($&hwR) zd53H&7~xUSe0h;RD}e#d)kY}J+!op>+t;6!`4;q#|Y%E5XznWEkicQ zJJ5ey_qt;Wyg%&~Ti&MsQ_t0*E3D-woSvMVQtEiY%GENvUQQi8r{pA;>lrPF=8Zqv zxn_y&F(YSIEHLkYK<-MR++jNoKmIv4({n`?47dCAEj?kaUrNe|$@hQmG(K_h(}3iD zm%e$=`BtIXs;ZgCE1Q`NuH9~1rp4wdjZ-@eH!(hDCR~SEC6w!4dEL@^Mf%-YpPFh{ za^%L@UuspHoPXg3Q!ksD-~E>RTG#B?^wp~EsM2UDjZj+uIF!Yd;F;JY-UmQ*xut-H#V}d}2$zN*@j^liy#J$N6CXjZ=F74)n%|4zo!d=16IizJ*7cIkU3P~) zaPn#J^pWn=U*QaSK(ao|iste?970R9K z`E=m^CYCi?#zdvwNj%-9>cgn5EoYyQs(My89@TMQjYFmUuQyx#%THA9zWl_uRvlB* zYoxRh7hm4!afj{hXQMm?a<>WPF3IS)JEho#Ta&H&rqy0m&g)3@nY>FTneA+Iwe(xj z?tyO-jo18G?s87r+c{!azVbuz{HoJ>YpY-WBZ>u8Uf5x<+i-#0?LxUP*E{8_cHVi3 z$Bzw1j;;UdDSdVLT_NSr8m5CUo%vMgLf*2X(Xx>BjRT91=oh*6MD&j{r$(IXvC=6c zzsajEx7uvJDUiEED0iRIdhUd>dk3ta=w=dB zjJ~*fPu1mad6)P1yWY&Kp+o!EwZ?7+?_(Xl35-#PfRl39C%c0a=x+o zBIkE6TLvssO{jF%x!#7u!#Z8+`Rv+*v&~;#Re1EylOAdj9=&VN@rmWzl#MJfr{VS~ zTLp4=3FW?XT6J`Ky1QfYim={2Ya72=5SzNSX5PVxKKU~1pE^FxcgW`V?>0X>*Q!mM zCXt^k%WtmwaQoZt16J+dG-`A0X^B&X`!aV6<$k~P&9~OM!%Mauv1}WDyZEb{{ZbFt zymoK#YtfF_TaL|t9PlYnzSsQ13kD4BnD+g+tZ>hoL2at-9PF~wx@Ciyj53x2efJ3E zwr$)p^_rD$(R{BD_u5{0{EIG|s@6K=ntAW?+_{Sb`(9tO?9r|Koz7I=`23Vl^#@iT zwoi6w_tmEU%+v;@BWm{)Dc%a??iI?7Oe)yLi zCtMGG8P#x2-g#2} zIaOww!`zA|mQNoo(09L3?)nd&I|6P5c7M@jPE=aawA0@fC3Orew)w5mX0I0Ejm#>9 zv>$zP=uV3|rWQVT$5-8-9PYcNsGGF*&@Ju6%R9!G+$)fKKq&Xy;2M?Ak4rt)D|C33 z%2%aMt^17KwyW0aS0Q=l7PDwO^VYl}Q(lZ&-|uMd56ulJjm`KndG z%#Y72(Aiq*arHr=TkSi$~Zw*|l-!y!#ip`qbs{tSNW;B@_#+P`%8xIiWv%-_-Ej zHKb|YC*G#D+xqW6=sa})&zeK34e2Z#PaYD=o!~g4;_l$WcIh{srAVi~Q?3qs@p^SN zW%Hj_6C0}Jjc)so>8K1fOR_$E@7F5#ZIV4dUGKeqpDgh>D1Oo^xY@)d0(~=ta#xoz z+4Rci>npGFrKDXZHAy+xtFCAJS#=*vuJpUJxJ38s6K!TrI%XaEdPwE%!@O?!e)D}2 zdB!^t9LYrVj%hGn&h1)^_@Su&7(wf+D;9*B^^rQsCna$&<%N z&rG>_C~rksfmuHncHC^e*E6Mg(RZ>VPH(n07sx#-lzUxtZLcqd|RW+2)An0TP_v0 z566UZr41*JOugT(n<&Lm+-BI7s_{{u@eO1eGC!&toeBC{( zTj1rY$7>&zSgjhY9QVZf>ZbwWO$GWM7s_>-XWM8|RN33@#;yn*Z#wkC?B*tew=ODq zb+%)Xn$J#I7xnWGt`J>B@{8Hq@-lUQ7Pt2~So|`jm#rNfxdXMmr6v#axlq>z& z?DCq3YbDR^*%;{^;$N$wae9%(UoEP*INKeav;ENi-WT6aonN6|cd==N!;-<%9rL$u za^iH-NaA&G_bZ# zYgEv`MB|T@^UaX&UXi!Csd>Xy(vstX%|@6l{?N8{iLXy5HvCe2m*SYa-0R-ja~=Ur zURnih6Mq)i;j~b$X$g}ry{{fK>V3u0zxRdx#a8&$^|=biaJH z%>CsyaVwszo#8P)#eT4fYu)`O!@C5PXdDqQ?1#<>(g;7XGBbC&|W(mxP}?7GX$v2u9x z87U)E4*IOw^toB<>32?yDzK?$Yk}NzLb=oYYB_G6Qn=B%d&xJ}`kuVCbLqUQcQ?EI z3@z|*=JaaK*Nm?5t!u`dsb>f73->;8_q31E{M(hAb?vxu%-U52EdWqsB_pP&~Ei;~-HIF(~@Xf|AzH2-Sw=%x}eO8N!JHOX=6-|;o zNN;qnS#nKK_<1WpMsa$DiuY0QQ zXTLA(U^7tCwR6n8BJVs03<}&(!B&y+a$VKOlG7jdzsUG?>Zw5P4WZmMGm14#u$nV4 z+}hmPcWH-?1JZ6DTDEI-so|r`2OaACIaq%EXVUZTdn=!Q-1Cc_rT;Oz>esJ#vMASi zdvsgL_}e`?2;|-r${m>2yurfHZ@O05^)_t%kF`d(=ld<(z0kK&-5(R{IbTaGKVWK? zugBK<=G`DYw0u&F8c+SkzDV$UwC;7CPJ`+`UM%V+kb6rg_g>Q}M|Rz*a4mM^p3tbm zgD+Hh(fah%;P_f?({Hc%;8d#j!*;bVWqN$vpVqEg(;KGlw{9EPA86LyqEg4sEBy}{ zPdO=&ds`@X=;M2v1G|T`ZGQgN@c!k;uiw^h(V}&d-IY6JZcT~4?!NOt!tF|XGafG9 z+^^Yw(L?2`4OPlm7YLa!d-;o3Mjy9ih6?1~5z2jeVCakdXMI9;h$nxGP^2Zbd*SnF zRH@rdN=C;99lVgwwA4b=hC@O};Y~x|iDSxSdiy%wPUt_w&TP%|Fx#~7j>7jicZG5r zmH9QT_t>YFEA!T@b@hJTip8eRySO#=@{L>7r&uIEywW;(qR+@q8Rt6{9sPaDzJou% zzgj+bZ`#eTB@g?}b9pm*YF~lA_k?mQ-CS0u%){v)3#R6E_bqkfZVR6my(%;<@wIZ~ z=MfPX9Iwo)GPcP%MfJ{spAPixUo77$*A14B&dA1)-&c8F8sWgK;H*Kxq*S#d+jmG7`|f3hMx9&8g07OCe|r{e1V<%5ojjH^nKx_BOB6_1ef9eNmAjGeTzx^`?&NPE0Fs_DECm%)x=+)^DXFY^=f#{dPh7jytr)k)_y;EaC56K&R3p(cv_>@CF7$>b2c2W zf3LmuqHZZ=KLqZWZ?(MOmMy~Z*(;&kB~gx@{EEHvpBa9o+nv0PE)OoB`ljvJQ$H<5JC6wTeJzx`(or;R zL#^q@JZo392uh7A@~Ke8^M#+~Q$9>So&0>Kq?rBD0ReOFsp`x!UNWinw9Fn)58D`x zud#dXxcOfP<&SkODUka{DEGsR&ecvmOBb zjK5US=JhX^Zi6NJKEABkrl@yP>XhfJe^@jcHCu7W|Guz4ek+t)YnjzW8*9558>56D z?MvR>b+5!oJL~etZY|$2XGhAO7`p;BeGiVidVYU9C-W2gX4uW%ybJr@O2@S}+c+}* zXlJuH4QW7mY07G3j3op-Q58nI$+ z;G*lZOD;$Yuy~mH-KTh~6|JoYKA&x~)PK|h>$aU8Vn4qBR;tKm_gg+`^Y8ji7VfwI zAe8Grt#5~=UU^aj8rf}CPDs31r&YrKH6Bv$@cL6nbvsnJ&yH_X&5Cy1HFwR~8%|}* z{ZMZISmfu?5qnx)d=^&sN%1S&1^Rvz%I#9KTZH}E8>cKMO*qiO|HCAgH7^5pW*nGX zW0HADrsshQLvB6W)2`sMt0OzC?-eocd+&G6<3lYky4Y85>axaa)CA%B&L^SV;M?s= zjC#3lMf{1Wi`SW~-(OH!b+c`gfz@_*EtM*>+wkO*dqn9)`3K%v`BR|pXQABU7xtunJ{@8;Z&4Yy(EetZYBljW6X|s1;h@K#yA-qX zEw(z3?Qxlh$%O@lOe;SKPwN)&x?#fyfoIC!i90s8aHVfM1#-U#<+jhfUirb2d7FZ* z7a!|&;%St!cD4B8)vgqO7E-tG>=}i-|2T4HLT6Dv$5kU$Gj3k+ySS`N#+i`nW}bcX zpDxmSP%+^+>Z?$0k&k^-DwQ8DId)-ktCRL)Qm#BNQoNMQ_t>S@jck@jMKtQO+USk5 zU(d;9LZ7`cuUTW%l75q?|E$_%c2U=zT@Ou>3D+OK3FYqXK5OIjS6x~)zWd3tNQa4T zPrqEQbF%dM*o>cZQwHp8w`NrL7JWkOMwUEXy;ZHY`(EWKHZAt~mr|lsySoY{VHotmo9s^Z+Tza)8||7FZA(_drwsrmq9m<+^pz#YKN?A|AjqT zcWOSh;lLI7joN(QI8=J=tV_!#O_!Y&?yLAAlzUiG=!tXNBirV@I2oIFV5yChAFGa8 zw7=Qv@%yOSPc9tkR58G&L-il~`#Ta6;O(rBQK6cu5 z|MGM5Z(jZWepp~~OWUreehK9M63SgW$UFQ~g<0R575QO3_HARa>$9&mgSHOzxT?&w zU-5HT$Ry(@4DcNV!$SQk!whn6jEAO&BH3Cg$2Cr!v^lHr>fm|%CYW_^RXH@xG zq3VeHQDKX(niu}upoQc5u$@g#ItQ*kT4G$^*%CiXi=;?5|J35=GwzOh<7hdyuk-q8 zuJewJH4AI#Il88BoN6SLYwnZecuWzq(WhUblbP`^k{2JF_T}Nt%2$p|=@2sX>D6FK z;VYvDB#p~I?3T~Gvb#c8*D&+@cHd#_p-RhtN?sPV6s}Lw++O`>GN9a*T{qT6uil(e zIKN}XcEz^-iv3k&TiX+r6tjX;%-UUe`cCEoNI;`p@Kb#bJBb2VL^4c6DL>h2kfk zwPv_47@u0HL{Lb^V3BLg;m1oQ3o=gadUNT0(-Y5>=MQ@i4X$nZ+$eR+gU&MJm1Wxr z^vx@ftCs)QSAkp_yQ}|9*1L?J>VG(?pHDmE_r0UecNAS296$DUS=&4ES!W{3e90&3 z?^dSina{=jf4-kr@k_LC_Z@?Wm^XR5V3+ruvUhzgItk?F6UbG||LdzjuGxP`ZhnDW zlVeX}>RwRws5|3rnn&QZF}2o378`tL;@fhcYu8<})7$fVuZ%lB9rKx<>SvqMYU;hz ztEO{`otVF@iL~`rzftn7b4LpFC7=7({seLh{>yUzR7Y%O6-kxhGPz15s*{KP7Et1! z^5(|NwZQ*W3y|MSf+7`iT+@oM-&#rhpO_cQN6VrjQ2~)C(GOSb9seJhkN=6b|9_z~ zm2V%VI6)E-F1l#_cfU3I|3c6IjcJ(hnNnZ>H|FubYo5sOmC}eTJc?_f|3LpnX*{hV5|x4MyfsClQvZQ8bmcF5IJGqd)9i%Y=l@d{AfJqoMMx>r zLE$p{KFa@7+0V^vt_5-}kZXZl3*=hhKVpG@bGh|DqF}DjTnpq{AlCx97Ra?gt_5-} zkZXZl3*=fL*8;f~$hAPO1#&HrYk^z~nW9$6m6pR+|hbP_U4?8MQSE&fHm&uhX zNqD$@cw|uD5Lvj?-cKQw*0*tXv{B0XNh3pQ*|^$B!eycI$PgSxLVrJhaes7A=ZTRf z1?d5Q>Sy}SReg3XckZ`Hgky;4J9zd-=XwzzeG^@d3leeXwTVP|>6wW5qw}=rmcE;& zGYCm8caE({ln?jn^Q-7e@yr1A*@|=q6~)gFsLwIy&K6_e;fux~oc`#HCgP<$xZsA; zFT{u8yL1u#4i2SNm=7zA`%<{4w74@#+4&us-}W@;+oQULl58ajKM&dH`Tt#1Q&fV%*d-+kZ#a0;L^@zVe*LmXKmDhc45dQoYhEI{R0 z9;g7=02KjSpb}6Sumf-ef~YEhV+BO`>|cb>^F{bHUsMYa1NMLe;0QPY&VUQx3b+C8 zfCoUo@3RB20*V2azWQXmKj0D1x4fbKw7pbO9m@CVuh9e~zA zGoU%p1ZW7<2I>P|KwY2?upih9>;sa3VZd-;5HJ`>0mc9mq4OkQGB5?03d{hO0}BB9 zeb=SHU?3Xk1M~%)0XM)C@Bmza%0N}X76=1mKx2T;ifsryMS9PG=K!6_`x1BsJON^W zSU?4Y0urD%&;#fRv<2D#Er3QqV_+gM9+(E%3jr0N1Y&?jKwY2-&=hC}GzVG$ufY=s z1OmQ5OJFuI2bc%U2k3W^;sIZvJvwFgB33&0#G2owPF z0Mv8k!!6xyDfFc03rIrB6D+yQwrGU~v8K4|c0VogH0F{AiKvjTr zrTZEHg%hZaa|WCM2Y}i+7r+B>1Ka^mfa+&Gfbv%tpn6 Y0_NN2JU*@o=V4xqfZ z23i7?XY&76fVw`A|8@k3hv@zQl@0N!fF1zZiP{}MKml|Eq5+CW`6E3g?@ z1S|mN1M`5nz#L#UFcYBoB#UH_Y)YSeUail1+^cn2gL_JQH6Tor(pUlz&kBIjpm@uG zRDk%^X^@WUJZ!yi0k?r$z#V{m`5|y0xXVA&{XM}w#d*L#)BO|RHt?9g_QZ7vp!Ut@xPJyD z0iS_Sz-!M$C2 z(LIf^)ZuE~Xq=|jkMvfD3;VHQf^ctQrK{S;t6G_`#p8|X}QEJ27kIfgTD2k1RU6Y}gjgbkJdh}hjINok%WFQ{#&kbIh(h_X!&TDqWyvq8 zw_4hmQVA7B2#suPaSiBH&~<9HHOFjB?GaK0AvFFaX*XKDIGYrmG0Vm@03jr8DXx`p zeZ2d4!&(d9oVGEgag#YhuH#w_*J=K@tZG>gwzDzKM2HnaERDHzN&XpC=}Ajvfn#6ejZX( zIvx}^duMwm?KyGR0~|YSthr+>C@%K+hmj3<$bJWv3$|bKZha0EPfqJApp*n9U)%n1 zQPp2IG&bT1qBo$F0wv^%_r^|Q&j(scfjn${keGS=VAf{@m3Q26fa z8#a+{B+U&Y5ousdD0s#V6S>|geKdpdz&;qq@LG>sxUtiZfu}lYr71xnThE?Zd2gG^ z%in|I$yszTD8)c|v@T&{=eyCL7!NB`sS;-vO3}+%H|bMicd|1mPL6OzM|ga6j8u^z zy6E!Zap6tXq(rf&>H|uQN*1mZ#n-r9X2AY*IVg}tx=lhlR9{PFPRxHJY5i(YTu>KK zJ_|vi(smBt^zP`XuuxDuIU9t?AHan?4=NFdlcx>1R-=rKn(hH=zB? zmr;0gX-EnA2ZdTBo7k4?6;>_xgW_oKMb_yfQ_0A8Qs!Lj zY~o*Nzm*Z%X!hsN^K3dYcv8dAHFiuIYeRyi(kQV^DY{bWgx&Dw&wZFQX6xSIA>H;D zd9S)TdZq`7}Ow=Ysp>JLqP}MM|Ow$g5^d@ku|wy-P{P z!&)TWtxI-k7Dg)}r8J~?qnRLSl7kfwHJefD1EjgKW+!LeSl!N1YetDslJ8>tzB&EE zmP_NCLmKJ`)vUNMSx^`#$0NplTI~C_DWfnWL`hU(Vwm-;a^KGj9qQ%>g=#QLK@NAP zR?MgPtr@lVnNj(W$Agjv3iS&Go^&qfyOuSL01 zXzOrrQK_S8R-mAdA!&InIlpd_78G=%>N^D}9vmf1q7y2 zq(NYG<$68^%{}>aP^e{#abacVHcxdbb>!?rH9(QH5`Ip6OXe7j! zw(cGy2GU^EMBlY%;$B=TMLiRFBBV(ap-~crQfh4e@`C@tn;D>xCxS8wLqYO`)fKxe z`kM5sGAOV#S#&-q)Mq{a9C)KwF*8@D0i%d9c%wS^tdLEqfSeUzRzPuVP2)X57s@3&Uti8=J|+p$|0SJ+tSixwob{~ zQFO7gY#lDq`1*(eGg4eI5>k)Jok6hykG%Z5eHSiQ_R>;*w}E=(oqZf{VblRqXw0vp z6i~M}@-Ze}YhkqUOz)A}bpA%SqmWh^>8&hxBlE(>CLguZbZf0!H}#3z#ale^FFdL8 zJ(Q1!J$h7qeQ;|Sno8Trdx_nEVMR-Whh|1-t8(x+%p<6M)9nX;W1Sqlm!JRWmgay7 z6ZNO_?GkI%Op*p#7}c-GG379-;cKw&QB568-z^|*bnui7yRROh*$45!2CaC?zFM=d z*6CETCG&VT|H)yjOg%byBBY8?DX#CASj=keZR9!mt?wWiL&1w1GOqn1L>WfkI2tRp> zc+mHYpwOHNe$Wd%)B}ErANJ;+-Mms(sB_$uGW&{*b~g2UE2idApWLrrk)ayQb&iXW z4*9i{JfvM)I>&~0p$E%nfs401;GaklrI z0t)qj`6KIQJZ(Q~7bsX^az_>Zou>vgrN8WJT*gK;Omx9K>u)s3-tsw8-FByzBKs;^ zH7Zm5a~1s!Pxe(qTg|Fr_7y-|%?jWYEE!^Gl_*~pS!2SshT*nGBM?F@5sm%H7T0h0 zIAkv^JlK}az??DJRR01&nU}3woVF`}3=}7FMHl?GLnPWnrD#jT(iWE2c4u;aKn)zN zhLALexh|z@PjKzdQHVz-50ffnDrM1GPpwPdIv+}H6>CQS$_Bb~`ZpG>28ST8+L^y; z+XgW^ccFaXQT9$AA{S7|O8e4QbkE#)^(H9%oHo0MfWmy#g}tQ^t`Xas?uSJz$<|n@ z6|VRGNy#;U1{icDuATdF=oGizw^{kXt2{jLBTML)P_CtSU&!BNm=P$XH5%;TNR?EM zIdcCEJthtAf8ix4PN)eeQ)xVEH%d<2GNVbfePnr1&^c3ej8I~H9n@D*>A*lmp3e1b ziN_x0?JSC8so<@wNsEEo$JHf2fNjveh*F?i0q7Q{T=(m%`D&RJ*K49hn#|knw40S* z*X!@|QHw-|a~53%g?e=7V;wFJX(H=p!u6+UDMD&;_eM+;~vcR{HIN=2s$DH+CogS3<)2(||$ zY15&VzC)hZ(^Be!LOfkMs7_=IJbg|}3F9fH)?2Mj-}rN=mST)CTtV=JeHoBw;=M&Z z2ZRlxdAnK14-1Ky`|gCX5f(w&Xd_q}6-*`PI&OW#-JaoCF{8QwK_aP25=xX^p{0V3 z8SSXSX0EU{)}Ewt#^ZkCaqZ>n9{-^A9k#42ij`n#5`PxAuLYk!G6rMvLDagyaKHDb z)F`9d7KOkIsM>$qZ#nW%@5s3%4c10TP-J9Z8K&=(&n>y{)bT}LqG0d>iXhA!C{*l*Q^hEy$~$Eaz8~J#2|C9bUVOPDqeF4s2Wi55$C<6 z^>Lo!f9Iv^+NSnXNdqoFmp~~5t;_U&H*t9P676^mFzUR=Q~Gpw=(gva3Ffq9YLfPf zr293R-jP( zRxmbn@P%7HYcj7we~LFrJSDFFE9w6533y)uyD=Yi1jPb8vv=3rRzG1wZKAM6P>~NP zML}tMHTJ7KYA?K(>o2h51e9W++-TI``i=HqS91P_jEZ#IO?PSQE;&73rL+G0zGB8N zn4$js^LncOZIO^lBQCu#AA(g6e&GFAwJ}(#R4F18MC0TWPo^i_!+MCi6#h{yp<+Op z>bBe8c)V^;luE@RGKErQF+O4Bn4T{%|Dgs6E}+MvIgby>Lv(whZX0X@Bh{=MV=^Wd zER;{oc_L|$Q)zQ;ukP0JZ#+?VOQFX{b+&vl@+espTIF$FpUsS~CZ5f=6sYlk)5`=V zsAx(nT9N6N-e%{+2wDwi9flqs{apicDkuORT|TP2^;byZ#DS5)3GuEy1|@E@8%pgc z*Zvu4?<4ws+8B|dk1J?8d6V;aA)X1O;g)N9(cskk8B`+6}(Vd7+^(YvO3T})L7@>B%CtVO!8E%}X^dq@ zjMkaf;L)vt?)>Psn{FQ6`O(dzTQ}Vr=I>JkOhQ)r$63YH-gf6Ul)Hf`Z3%#3LK24N!I>FAc$ zvI+Nw!!dbX@(Pc`^(h?}%FbU&)2)GJQ?_q9afIKQCHqee#Y!;wfirq0-8{M}9?iJ7 zBN?-H_S|`@2v)xgmz*znsMhxwo4zKG%}y0~__b)=rTsUnD}S@<5yfV~*d5vE?%qx# zEiG;7Z4#>{QISejC}yv=&Tm#H&jht$FMd8GM`Z&=RC51r%zPS8e_7n_9o-53G9dL*i+mQ13kK!p^59 zdR0D3DO>mg*W*1!rp>$~LQeKxOImaD#^o)z9xy%m{K;kJ^Ei9q#Y-wfkGNQX!~;I*?D(FJO#R)28H^1MZ;r>B4t2-mLFDu;<1br3@f=_ z9)I)Wxu$lYaIb@*8#AA#d}xiBR)jEnj|!KmV83*Cd*ctIm(xBj{;jWPYc8jyQb$xc z(=oZHu~7|9h^PT5me9a=ab2tHWzVAJQ&T#ELi5J$HC00krP}n;QbItn0i|ZMaZ}2+ za;M!H{K}l}babx-KY}!>3m;q5UeWMG9h9kBgSWf}eXqW|Jo-l{-nptN!`pB?vtA{p z%~R~6)o{Lr)t!!Rif#=Ww&kp|a+Bru1E*|5v>ND^b_+Z-Mq1geeM->>tE2yfN4K=U zsr5m=+&FRF@cgc=t4xo^`Y62yq?+{?tDxE2DqBrk^*6fV#a)&?!L~a01l#J|6Kt#5 z6a4S4MgNU;_VczLnvvGAmXnFd)@Uxx8$<#rMA_% zm-w!_@yrQHs8Jp+DC<465dgzkjX>q+c;@Ap?hjJp{tZZtd-mU z&1$J)$^@%GncRWG%x^BP`?6g0PL)zARp8gSd!Dly(jlbPXD_@zr;;1)>2};GzR1t) ztK#5izgb&z!_{k5E*VXAb>Mut@<*%ULGJfo_QW%_WD~FLm{oR8id7h%@!Bpi(2J7{x}=@;0}Low80#I_%;pu>78_Krx(RVD*7t%rU1K$@VY<} zDpfk*XLO}lYS-^Ma#hqFOHKaBlNRr<7s%=2uUv&7lV7%>(EE(!=4=iW90ac%OFP+2Sv(5 zWEhR{KO4`7Muo?~nKYq{J4hjmQi-_;VJcOWvbKYRLK-T=DpZ2KJSw7((mqlV>hN1! z2l4M=Vhk&3*q{pS$EhY1?!fkT=Mc_T@Dw=^vS>~31MxoFX21X&OnfjrqpHuvKn>QT zITr`&^5VEH|4c_dgs)br1a%Zn)8D8t3*Pue`S22X8%q=lNrE;^AywaV(G)@IXWshQ zbl?*f6Csg@C>`Y3)W|)=g+!g-IhML5fJFQjDq@CzmRNBzR?nh-f~;+&$bOk$mFsJi7F^ej<-_^NjMxff}}=9QPU=YGI~l>8wsVVeGt?B z4Oc2yxP5p`uv8qbMEe>nj#EgY$bZGqABRFv2C)ZJDu>rdu~4qt>9a>ga@m8#G*xh? zMD@s_9jD->3zVb*y#DxFWx=I=V7#n$=}AkQckpT-#BAnoAYF~Mz^b`toNfj<`ELfm z$KOF$EGHP?%2xQ`&K82SX^N!ZS{h#hzvf=d*5HYgZ8;O8;!5O7_?8~iWs3=M*+QTW zTP7i?l5mMUNE$3=UW}#yoigsD^oZ9+f@JLjEJe1`pU14pk!D%ahhntepsz)DfB$B1!U`1fa zc_}(R-QgK=2V)p^K>8F2EObFC_y&laht=l5n2->uzAjl13laoRNZ4@emTk;IYG)mV zu(D+JUXQ`}uLr@+-?6^gFc+IDFfMk7mWk>cvQ7gm2_}Lj}agVN&>en1mctze#c$ zJu@M{52{(MG$fC0iGTohWg2)G&>%vI=n+cRVHt8{!LT1pf+sN*7#pH0!=!-{Oay4c zBasK|DX?rYAuwAAt6MC?Y66>G{DDXoPau#zvu47O{a8nY)(On&Th=hJ){v%BV)jv< zy#u+V_F0ckEI!1sE6b9hPMUp=2M%`C8bMDQ>?3V(uq#TP2I5r1F&f2ODFJIA%9tpc z>?u($;d&=NStCMb)&LZ=;d-wA0ubEl+dpz%fxciBJTXURMaR^qr3bW`Dt#8AY^vjt zWrWYnrmRG{V7VkhU-PStq!C0MY@}{z`hm~=C@XzpF)+dFFjhsl!7H0bi`8v8>x?); zwkfWjOzLJrn!1U|K8@w|H%vJx38^^+WAI{lzR7*Z2?^W{O4Klq`kPE}sc)e>&ARmK z{`-1GHqEKE!Ya}qN#T}2Ab`7JlL|wV@tjsvAc3u{{E@Y?3COl!15w#R{%ESh3FViB zAcHkuFa!>oAX8|_*ru+pI%J6lF%+G~uUPalWKp&zrCEkz<`9_W4+@s+u^qI_TwvEe zKnrpY1I5bA=tKP;zIfHH)#<5e>h(2_R}lnO^)0#wzD4BL)5L72Hmy)au~jM76JjY# z-()C<*hnFVP;FmC{SW`WuKu-2J`$JN1W80V>mLkNfAz<^U{l{}TTMNN650T~!bj-N zkB7LmJu&CQ*clKR9*g~_ipU5(f`!o_SoruyeM^=!At1}Mw)zC&TvseqVz0gKB|A+V zh|%1$k}+gensDQDZSe7TV(x40KXh5DX_O{c;b?{W&s-hdC9Tx6Rb z$iP0*AQivRtH%sE#)tSU{H(7r)JgJ^SWUsZCoGb4%g9lX#@~_CQn&R-85RiQYN?p5 zwnLimQQHOOH1XiSt<#f#Ue+JXjo1g|hE)Ik?FXL#*S=|$=i58&dyr79J!4TG10tnZ z5+&m@o5Q;fw#uvP4T&%tIq-6mB8=$q{!oGUBv@t8PS3=W@Tf3}y`vsurt&zH5?r!GeQvRjXCS_ZW?+$%ZuDWw6g*Q}+3RdbInnZ9k(U)8fMn}a`F z$RBlAn(_p{=ALy_Yy#y&LnM4B>ky)}r%xAz&v7^xD?)}jFk7A1V;^lKmPs`S>$N;7 zmr*iImgz;69wC}IkfFK%qrosU3w1f%4Kl|sZ)5E+T&%!cU0ZweJYTWGXYhcA9rd}p~Y{hLL4RykHW$|XJ?*UkFWBFK~Q0G z$3kG}?FHFX`=k1XD%h}I4?o*V4sLdZV`BVg$gebF!KR+28|2cFO^DDzVJ!)(xU2^a zmW71q$%ZBl1ZwUff}a|a17Opc{wXbAQv9?=tri64OrR+~8aw?>oo06v#AxoJ7hIDo zBUVWruxX9Ep*BNLHU+UDkgc?E$6jE<2SsAg!UxH??NoZC2%|xa@R9t|-QXyRS0j%4 z|2QS8C^{@d6@=%35qMLO5jeH=sH(9U&%~_6?B~MXK+v9lbW`HM7+H9*lMlBgkZt?o zZrOIWV7xR&Ax5y(b^1C$Pl7}yo8ThmPPs5p!nB?b2DYq^&nkF0W zF4bSef=PYLdJsbv=TGM#<+yV?C^^`i`K-V^R5@_qs5D-G_0VBlh!@C0TPY|HvIJl5 zXeiV^&YwMzZMz`1`&F!p>?_r`(?#oZbn*lxz`fxmbH=Db#Gc|<{6%j~@R=kiD-!0viYy2#+d_3=aia;`0u91d_Z@4jQp>ss5iZT zD>dH9b(m92{g>0zX;5LM)23;tHf!p)x}KvYG01~eWfg0U#B`^>?J*<_Wo0M|8@aNv zOR!W$E4o<72+~s(1hF7p@bpK07~fZblfT3I1|3sG4Zl=Q`;KvR47#`Y1hFJISV2b# z(1a143BLs*OclXqj$)}ofi4JJjj_p0hByDgN=b+m%_?mV;$|9b>WFF%)7- z!ox6a4hJ)>Xz-$0SA{p=_?RJxt$x$NwAi&IXM+Is0z{Njs)A;!NE)+5hH&TUh?Uq@ zM;nJ}VHEC zJsHa49*~jcnZ_LWr(c55*;q#Kkr3~D#BX@>D;*fc7;&aNbT<5cN9gKc6h1P;8PZr# z$G|RlLX9{4c3ypE3|Q5-e`F^r1Tk01!76xS?F<_!sv>2p*dTyOhUx-;J`7V8FQFq7z-71^viIKLVOQ3e8BPbk6Two!6&ArsX9h}4yIGCj=Bmef zbn*&>vjbQ(`z`gvr<2?epDxI+;pG+9d!bVUkNOsT^pab@*J;Xv64R6m+1Jn-g|M3g zv+$9nY{+l9({RAa-GIk%n(B^<`bKaH9~r-)_cVeYn-a$4PbucQoIk0df?)!<1y4wu z&V(aVvmP4K4IJ+tjsjp?+1t4>|OBj4&F+3LoLk zRA=;ig8ED)@TzaYM+=Vn`Pg|!WDhzO3WHX|DN*U+R6p?Wcd#EF>PHE&3PQok2x+iP z!Uk0OT3tab$P+xln1(mJYEvU-Ivj~5&Ftl3$eCFD;6OEJa3HclhbbAbS2SfjT1i{* zcmnJV&V<8 zHT*6R-WZUQHGs@#xT(ydG-PCXh6sK`C6`P3DIQ5wRCt1VWj+|$Mm1(bdlXWz6b&Y3 z1`51NQK%9S!mET&0V9+lG4dex$um}pf@#;PG9e-`GF*ulvte4p>Dv~REhf~>7D9P8 z=&MK?t5{&>Zcr}_e|42bcd%x8W~mx#Q3M(0l2&K`k94MUoxnyHRw#z*V>Z)+HCqUz z819+CqZvhN?!jre4MOZ}&cLaVEs$wp$j^mN1}VbFKjPQnyAY*=@(-j^z1Ik$2gYk? ze3GRFpoR+~ehXug43)EXx(^ZB2j+G9aU{05qht{)Kbv5CiEr>=1{l5N?L8v z(<^0(2Ps*eX>TC@L7Z9w&J?i6%TN1Id>tMWXpikRn#F$yJ#RA{__Na-LV`owyn@{W zokF|~t*gQ@QdcUm{ASvKAvtnPZm~+Ttt+W@z=vZY>DS9z->p_F2Gg43*EyZ+s(<-#nmYYjhL5PNRUL2cdF0lgP!VJ z%qi*PTtSPIgZD+zY6*_K!%7L?kUpo)c>FPm+%|npNzfc$qIsg%<(danUK|XB7azLY<=^5`(&~iN{H$&n$~ik(5Zvs_ z_O2S5Q)tp5rG@FzGt#7yi2-Y?HDyK+ieLduh9SK*TByH%$Qr=Lq6TGf=Q@%C+#O5# z4}6bKs^qxn4z=n}e1uNEWK49!QZiH%xf2}0!`;xEH-nzc@AmlpwVmcj2R=r$dN+t* z^^GS9-*{w|Cl-=W1@(VW##IXSd&A#YkujY#)|@g=)aWD+=5K}XL-f><9AYCyAu|fW z@NOYN`cx8lmxJyPJ9D`nm&+opUm4bfVnGC3?4pT-q2lLL)~<44lx3)DX|&b8q+(iY zACYN%BFW_Nkx6rY%569kg2)gocw#lq(3=uIMS;sfh~SBo^WqG{N9B7P!;0|{A&#Z^XtfTCgebgJ*1K&rdsNax&BiJ&b7uC7b&lddyo_*GZ-`OvfYPTV zzo$qY6~g%tWD+YXoIq-I*~x+UU!1R^m9m|rQ51ie;C}6)g6^*MZ9Fxn3Tn?s42TSW z)I4(j?OQ(OCYJ7IGH!vFYok>g@?2eHbj@^KUh~u#`uqO? Date: Tue, 4 Feb 2025 16:33:11 +0400 Subject: [PATCH 028/636] Rename person workspace (#17) Signed-off-by: Kristina Fefelova --- packages/client-sqlite/src/client.ts | 12 ++++---- .../migrations/05_notificationContext.sql | 4 +-- packages/cockroach/package.json | 7 +++++ packages/cockroach/src/adapter.ts | 12 ++++---- packages/cockroach/src/db/notification.ts | 28 +++++++++---------- packages/cockroach/src/db/types.ts | 2 +- packages/examples/src/index.ts | 4 +-- packages/sdk-types/src/db.ts | 6 ++-- packages/sdk-types/src/event.ts | 8 +++--- packages/server/src/eventProcessor.ts | 12 ++++---- packages/server/src/main.ts | 4 +-- packages/server/src/manager.ts | 10 +++---- packages/server/src/session.ts | 6 ++-- packages/server/src/triggers.ts | 21 ++++++++------ packages/server/src/types.ts | 2 +- packages/sqlite-wasm/src/adapter.ts | 12 ++++---- packages/sqlite-wasm/src/db/notification.ts | 26 ++++++++--------- packages/sqlite-wasm/src/db/types.ts | 2 +- packages/sqlite-wasm/src/migrations.ts | 4 +-- packages/types/src/notification.ts | 2 +- 20 files changed, 97 insertions(+), 87 deletions(-) diff --git a/packages/client-sqlite/src/client.ts b/packages/client-sqlite/src/client.ts index e2645b9a855..51b350c9c46 100644 --- a/packages/client-sqlite/src/client.ts +++ b/packages/client-sqlite/src/client.ts @@ -30,7 +30,7 @@ class DbClient implements Client { constructor( private readonly db: DbAdapter, private readonly workspace: string, - private readonly personWorkspace: string + private readonly personalWorkspace: string ) {} async createMessage(thread: ThreadID, content: RichText, creator: SocialID): Promise { @@ -137,7 +137,7 @@ class DbClient implements Client { } async createNotificationContext(card: CardID, lastView?: Date, lastUpdate?: Date): Promise { - return await this.db.createContext(this.personWorkspace, this.workspace, card, lastView, lastUpdate) + return await this.db.createContext(this.personalWorkspace, this.workspace, card, lastView, lastUpdate) } async updateNotificationContext(context: ContextID, update: NotificationContextUpdate): Promise { @@ -150,12 +150,12 @@ class DbClient implements Client { async findNotificationContexts(params: FindNotificationContextParams): Promise { //TODO: should we filter by workspace? - return await this.db.findContexts(params, [this.personWorkspace]) + return await this.db.findContexts(params, [this.personalWorkspace]) } async findNotifications(params: FindNotificationsParams): Promise { //TODO: should we filter by workspace? - return await this.db.findNotifications(params, this.personWorkspace) + return await this.db.findNotifications(params, this.personalWorkspace) } async unsubscribeQuery() { @@ -169,9 +169,9 @@ class DbClient implements Client { export async function getSqliteClient( workspace: string, - personWorkspace: string, + personalWorkspace: string, dbUrl = 'file:communication.sqlite3?vfs=opfs' ): Promise { const db = await createSqliteDbAdapter(dbUrl) - return new DbClient(db, workspace, personWorkspace) + return new DbClient(db, workspace, personalWorkspace) } diff --git a/packages/cockroach/migrations/05_notificationContext.sql b/packages/cockroach/migrations/05_notificationContext.sql index b461c45fc58..b08a691a91c 100644 --- a/packages/cockroach/migrations/05_notificationContext.sql +++ b/packages/cockroach/migrations/05_notificationContext.sql @@ -4,12 +4,12 @@ CREATE TABLE IF NOT EXISTS notification_context workspace_id UUID NOT NULL, card_id UUID NOT NULL, - person_workspace UUID NOT NULL, + personal_workspace UUID NOT NULL, archived_from TIMESTAMPTZ, last_view TIMESTAMPTZ, last_update TIMESTAMPTZ, PRIMARY KEY (id), - UNIQUE (workspace_id, card_id, person_workspace) + UNIQUE (workspace_id, card_id, personal_workspace) ); diff --git a/packages/cockroach/package.json b/packages/cockroach/package.json index 1a52dccfd5f..ec3644aec91 100644 --- a/packages/cockroach/package.json +++ b/packages/cockroach/package.json @@ -21,5 +21,12 @@ }, "peerDependencies": { "typescript": "^5.6.3" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/hcengineering/communication.git" + }, + "publishConfig": { + "registry": "https://npm.pkg.github.com" } } diff --git a/packages/cockroach/src/adapter.ts b/packages/cockroach/src/adapter.ts index 1064bbdfe64..aa88a615704 100644 --- a/packages/cockroach/src/adapter.ts +++ b/packages/cockroach/src/adapter.ts @@ -81,11 +81,11 @@ export class CockroachAdapter implements DbAdapter { async createContext( workspace: string, card: CardID, - personWorkspace: string, + personalWorkspace: string, lastView?: Date, lastUpdate?: Date ): Promise { - return await this.notification.createContext(workspace, card, personWorkspace, lastView, lastUpdate) + return await this.notification.createContext(workspace, card, personalWorkspace, lastView, lastUpdate) } async updateContext(context: ContextID, update: NotificationContextUpdate): Promise { @@ -98,18 +98,18 @@ export class CockroachAdapter implements DbAdapter { async findContexts( params: FindNotificationContextParams, - personWorkspaces: string[], + personalWorkspaces: string[], workspace?: string ): Promise { - return await this.notification.findContexts(params, personWorkspaces, workspace) + return await this.notification.findContexts(params, personalWorkspaces, workspace) } async findNotifications( params: FindNotificationsParams, - personWorkspace: string, + personalWorkspace: string, workspace?: string ): Promise { - return await this.notification.findNotifications(params, personWorkspace, workspace) + return await this.notification.findNotifications(params, personalWorkspace, workspace) } close(): void { diff --git a/packages/cockroach/src/db/notification.ts b/packages/cockroach/src/db/notification.ts index ec5d2d125cc..d58fa56f5e1 100644 --- a/packages/cockroach/src/db/notification.ts +++ b/packages/cockroach/src/db/notification.ts @@ -27,11 +27,11 @@ export class NotificationsDb extends BaseDb { }) } - async createContext(workspace: string, card: CardID, personWorkspace: string, lastView?: Date, lastUpdate?: Date): Promise { + async createContext(workspace: string, card: CardID, personalWorkspace: string, lastView?: Date, lastUpdate?: Date): Promise { const dbData: ContextDb = { workspace_id: workspace, card_id: card, - person_workspace: personWorkspace, + personal_workspace: personalWorkspace, last_view: lastView, last_update: lastUpdate } @@ -71,11 +71,11 @@ export class NotificationsDb extends BaseDb { await this.client.unsafe(sql, [values, context]) } - async findContexts(params: FindNotificationContextParams, personWorkspaces: string[], workspace?: string,): Promise { + async findContexts(params: FindNotificationContextParams, personalWorkspaces: string[], workspace?: string,): Promise { const select = ` SELECT nc.id, nc.card_id, nc.archived_from, nc.last_view, nc.last_update FROM ${TableName.NotificationContext} nc`; - const {where, values} = this.buildContextWhere(params, personWorkspaces, workspace) + const {where, values} = this.buildContextWhere(params, personalWorkspaces, workspace) // const orderSql = `ORDER BY nc.created ${params.sort === SortOrder.Asc ? 'ASC' : 'DESC'}` const limit = params.limit ? ` LIMIT ${params.limit}` : '' const sql = [select, where, limit].join(' ') @@ -86,7 +86,7 @@ export class NotificationsDb extends BaseDb { } - async findNotifications(params: FindNotificationsParams, personWorkspace: string, workspace?: string): Promise { + async findNotifications(params: FindNotificationsParams, personalWorkspace: string, workspace?: string): Promise { //TODO: experiment with select to improve performance, should join with attachments and reactions? const select = ` SELECT n.message_id, @@ -113,7 +113,7 @@ export class NotificationsDb extends BaseDb { JOIN ${TableName.NotificationContext} nc ON n.context = nc.id JOIN ${TableName.Message} m ON n.message_id = m.id `; - const {where, values} = this.buildNotificationWhere(params, personWorkspace, workspace) + const {where, values} = this.buildNotificationWhere(params, personalWorkspace, workspace) const orderBy = params.sort ? `ORDER BY m.created ${params.sort === SortOrder.Asc ? 'ASC' : 'DESC'}` : '' const limit = params.limit ? ` LIMIT ${params.limit}` : '' const sql = [select, where, orderBy, limit].join(' ') @@ -123,7 +123,7 @@ export class NotificationsDb extends BaseDb { return result.map(this.toNotification); } - buildContextWhere(params: FindNotificationContextParams, personWorkspaces: string[], workspace?: string,): { + buildContextWhere(params: FindNotificationContextParams, personalWorkspaces: string[], workspace?: string,): { where: string, values: any[] } { @@ -136,9 +136,9 @@ export class NotificationsDb extends BaseDb { values.push(workspace) } - if (personWorkspaces.length > 0) { - where.push(`nc.person_workspace IN (${personWorkspaces.map((it) => `$${index++}`).join(', ')})`) - values.push(...personWorkspaces) + if (personalWorkspaces.length > 0) { + where.push(`nc.personal_workspace IN (${personalWorkspaces.map((it) => `$${index++}`).join(', ')})`) + values.push(...personalWorkspaces) } if (params.card != null) { @@ -149,12 +149,12 @@ export class NotificationsDb extends BaseDb { return {where: `WHERE ${where.join(' AND ')}`, values} } - buildNotificationWhere(params: FindNotificationsParams, personWorkspace: string, workspace?: string): { + buildNotificationWhere(params: FindNotificationsParams, personalWorkspace: string, workspace?: string): { where: string, values: any[] } { - const where: string[] = ['nc.person_workspace = $1'] - const values: any[] = [personWorkspace] + const where: string[] = ['nc.personal_workspace = $1'] + const values: any[] = [personalWorkspace] let index = 2 if (workspace != null) { @@ -193,7 +193,7 @@ export class NotificationsDb extends BaseDb { id: row.id, card: row.card_id, workspace: row.workspace_id, - personWorkspace: row.person_workspace, + personalWorkspace: row.personal_workspace, archivedFrom: row.archived_from ? new Date(row.archived_from) : undefined, lastView: row.last_view ? new Date(row.last_view) : undefined, lastUpdate: row.last_update ? new Date(row.last_update) : undefined diff --git a/packages/cockroach/src/db/types.ts b/packages/cockroach/src/db/types.ts index 2a88a6a80a9..7742067ebf2 100644 --- a/packages/cockroach/src/db/types.ts +++ b/packages/cockroach/src/db/types.ts @@ -45,7 +45,7 @@ export interface NotificationDb { export interface ContextDb { workspace_id: string card_id: CardID - person_workspace: string + personal_workspace: string archived_from?: Date last_view?: Date diff --git a/packages/examples/src/index.ts b/packages/examples/src/index.ts index bceebf8794d..3b1764a9b15 100644 --- a/packages/examples/src/index.ts +++ b/packages/examples/src/index.ts @@ -5,7 +5,7 @@ import { createMessagesQuery, initLiveQueries } from '@hcengineering/communicati const thread = 'cd0aba36-1c4f-4170-95f2-27a12a5415f6' as ThreadID const workspace = 'cd0aba36-1c4f-4170-95f2-27a12a5415f6' -const personWorkspace = 'cd0aba36-1c4f-4170-95f2-27a12a5415f5' +const personalWorkspace = 'cd0aba36-1c4f-4170-95f2-27a12a5415f5' const creator1 = 'email:vasya@huly.com' as SocialID async function getClient(type: 'ws' | 'sqlite') { @@ -15,7 +15,7 @@ async function getClient(type: 'ws' | 'sqlite') { return await getWebsocketClient(platformUrl, token) } - return await getSqliteClient(workspace, personWorkspace) + return await getSqliteClient(workspace, personalWorkspace) } export async function example() { diff --git a/packages/sdk-types/src/db.ts b/packages/sdk-types/src/db.ts index 621102b46cd..ceba22316be 100644 --- a/packages/sdk-types/src/db.ts +++ b/packages/sdk-types/src/db.ts @@ -36,7 +36,7 @@ export interface DbAdapter { createNotification(message: MessageID, context: ContextID): Promise removeNotification(message: MessageID, context: ContextID): Promise createContext( - personWorkspace: string, + personalWorkspace: string, workspace: string, card: CardID, lastView?: Date, @@ -46,12 +46,12 @@ export interface DbAdapter { removeContext(context: ContextID): Promise findContexts( params: FindNotificationContextParams, - personWorkspaces: string[], + personalWorkspaces: string[], workspace?: string ): Promise findNotifications( params: FindNotificationsParams, - personWorkspace: string, + personalWorkspace: string, workspace?: string ): Promise diff --git a/packages/sdk-types/src/event.ts b/packages/sdk-types/src/event.ts index 70f287ceb59..0bd2927290d 100644 --- a/packages/sdk-types/src/event.ts +++ b/packages/sdk-types/src/event.ts @@ -209,13 +209,13 @@ export interface AttachmentRemovedEvent { export interface NotificationCreatedEvent { type: EventType.NotificationCreated - personWorkspace: string + personalWorkspace: string notification: Notification } export interface NotificationRemovedEvent { type: EventType.NotificationRemoved - personWorkspace: string + personalWorkspace: string message: MessageID context: ContextID } @@ -227,13 +227,13 @@ export interface NotificationContextCreatedEvent { export interface NotificationContextRemovedEvent { type: EventType.NotificationContextRemoved - personWorkspace: string + personalWorkspace: string context: ContextID } export interface NotificationContextUpdatedEvent { type: EventType.NotificationContextUpdated - personWorkspace: string + personalWorkspace: string context: ContextID update: NotificationContextUpdate } diff --git a/packages/server/src/eventProcessor.ts b/packages/server/src/eventProcessor.ts index 6d7d72a1751..1dde23ae600 100644 --- a/packages/server/src/eventProcessor.ts +++ b/packages/server/src/eventProcessor.ts @@ -39,7 +39,7 @@ export class EventProcessor { constructor( private readonly db: DbAdapter, private readonly workspace: string, - private readonly personWorkspace: string + private readonly personalWorkspace: string ) {} async process(event: Event): Promise { @@ -215,7 +215,7 @@ export class EventProcessor { const broadcastEvent: NotificationRemovedEvent = { type: EventType.NotificationRemoved, - personWorkspace: this.personWorkspace, + personalWorkspace: this.personalWorkspace, message: event.message, context: event.context } @@ -227,7 +227,7 @@ export class EventProcessor { private async createNotificationContext(event: CreateNotificationContextEvent): Promise { const id = await this.db.createContext( - this.personWorkspace, + this.personalWorkspace, this.workspace, event.card, event.lastView, @@ -238,7 +238,7 @@ export class EventProcessor { context: { id, workspace: this.workspace, - personWorkspace: this.personWorkspace, + personalWorkspace: this.personalWorkspace, card: event.card, lastView: event.lastView, lastUpdate: event.lastUpdate @@ -254,7 +254,7 @@ export class EventProcessor { await this.db.removeContext(event.context) const broadcastEvent: NotificationContextRemovedEvent = { type: EventType.NotificationContextRemoved, - personWorkspace: this.personWorkspace, + personalWorkspace: this.personalWorkspace, context: event.context } return { @@ -268,7 +268,7 @@ export class EventProcessor { const broadcastEvent: NotificationContextUpdatedEvent = { type: EventType.NotificationContextUpdated, - personWorkspace: this.personWorkspace, + personalWorkspace: this.personalWorkspace, context: event.context, update: event.update } diff --git a/packages/server/src/main.ts b/packages/server/src/main.ts index a45aca60777..78f8af01746 100644 --- a/packages/server/src/main.ts +++ b/packages/server/src/main.ts @@ -137,6 +137,6 @@ async function validateToken(token: string): Promise { throw new Error('No workspace info') } - const personWorkspace = 'cd0aba36-1c4f-4170-95f2-27a12a5415f7' - return { workspace: info.workspaceId, personWorkspace, socialId: email as SocialID } + const personalWorkspace = 'cd0aba36-1c4f-4170-95f2-27a12a5415f7' + return { workspace: info.workspaceId, personalWorkspace, socialId: email as SocialID } } diff --git a/packages/server/src/manager.ts b/packages/server/src/manager.ts index b6afedd4bfa..b9a85039203 100644 --- a/packages/server/src/manager.ts +++ b/packages/server/src/manager.ts @@ -152,20 +152,20 @@ export class Manager { ) case EventType.NotificationCreated: return ( - info.session.info.personWorkspace === event.personWorkspace && + info.session.info.personalWorkspace === event.personalWorkspace && this.matchNotificationQuery(event, Array.from(info.notificationQueries.values())) ) case EventType.NotificationRemoved: - return info.session.info.personWorkspace === event.personWorkspace && info.notificationQueries.size > 0 + return info.session.info.personalWorkspace === event.personalWorkspace && info.notificationQueries.size > 0 case EventType.NotificationContextCreated: return ( - info.session.info.personWorkspace === event.context.personWorkspace && + info.session.info.personalWorkspace === event.context.personalWorkspace && this.matchContextQuery(event, Array.from(info.contextQueries.values())) ) case EventType.NotificationContextRemoved: - return info.session.info.personWorkspace === event.personWorkspace && info.contextQueries.size > 0 + return info.session.info.personalWorkspace === event.personalWorkspace && info.contextQueries.size > 0 case EventType.NotificationContextUpdated: - return info.session.info.personWorkspace === event.personWorkspace && info.contextQueries.size > 0 + return info.session.info.personalWorkspace === event.personalWorkspace && info.contextQueries.size > 0 } } diff --git a/packages/server/src/session.ts b/packages/server/src/session.ts index fdf487d3349..246294ded37 100644 --- a/packages/server/src/session.ts +++ b/packages/server/src/session.ts @@ -24,7 +24,7 @@ export class Session { private readonly db: DbAdapter, private readonly manager: Manager ) { - this.eventProcessor = new EventProcessor(db, info.workspace, info.personWorkspace) + this.eventProcessor = new EventProcessor(db, info.workspace, info.personalWorkspace) } ping(): string { @@ -46,7 +46,7 @@ export class Session { async findNotifications(params: FindNotificationsParams, queryId?: number): Promise { //TODO: do we need filter by workspace by default? - const result = await this.db.findNotifications(params, this.info.personWorkspace) + const result = await this.db.findNotifications(params, this.info.personalWorkspace) if (queryId != null) { this.manager.subscribeQuery(this.id, this.info.workspace, 'notification', queryId, params) } @@ -58,7 +58,7 @@ export class Session { queryId?: number ): Promise { //TODO: do we need filter by workspace by default? - const result = await this.db.findContexts(params, [this.info.personWorkspace]) + const result = await this.db.findContexts(params, [this.info.personalWorkspace]) if (queryId != null) { this.manager.subscribeQuery(this.id, this.info.workspace, 'context', queryId, params) } diff --git a/packages/server/src/triggers.ts b/packages/server/src/triggers.ts index 12197b98ae0..589205b18fd 100644 --- a/packages/server/src/triggers.ts +++ b/packages/server/src/triggers.ts @@ -22,21 +22,24 @@ export class Triggers { private async createNotifications(event: MessageCreatedEvent, workspace: string): Promise { const card = event.message.thread as any as CardID - const subscribedPersonWorkspaces = ['cd0aba36-1c4f-4170-95f2-27a12a5415f7', 'cd0aba36-1c4f-4170-95f2-27a12a5415f8'] + const subscribedPersonalWorkspaces = [ + 'cd0aba36-1c4f-4170-95f2-27a12a5415f7', + 'cd0aba36-1c4f-4170-95f2-27a12a5415f8' + ] const res: BroadcastEvent[] = [] const contexts = await this.db.findContexts({ card }, [], workspace) res.push(...(await this.updateNotificationContexts(event.message.created, contexts))) - for (const personWorkspace of subscribedPersonWorkspaces) { + for (const personalWorkspace of subscribedPersonalWorkspaces) { const existsContext = contexts.find( - (it) => it.card === card && it.personWorkspace === personWorkspace && workspace === it.workspace + (it) => it.card === card && it.personalWorkspace === personalWorkspace && workspace === it.workspace ) const contextId = await this.getOrCreateContextId( workspace, card, - personWorkspace, + personalWorkspace, res, event.message.created, existsContext @@ -46,7 +49,7 @@ export class Triggers { const resultEvent: NotificationCreatedEvent = { type: EventType.NotificationCreated, - personWorkspace, + personalWorkspace, notification: { context: contextId, message: event.message, @@ -63,7 +66,7 @@ export class Triggers { private async getOrCreateContextId( workspace: string, card: CardID, - personWorkspace: string, + personalWorkspace: string, res: BroadcastEvent[], lastUpdate: Date, context?: NotificationContext @@ -71,12 +74,12 @@ export class Triggers { if (context !== undefined) { return context.id } else { - const contextId = await this.db.createContext(personWorkspace, workspace, card, undefined, lastUpdate) + const contextId = await this.db.createContext(personalWorkspace, workspace, card, undefined, lastUpdate) const newContext = { id: contextId, card, workspace, - personWorkspace + personalWorkspace } const resultEvent: NotificationContextCreatedEvent = { type: EventType.NotificationContextCreated, @@ -99,7 +102,7 @@ export class Triggers { await this.db.updateContext(context.id, { lastUpdate }) res.push({ type: EventType.NotificationContextUpdated, - personWorkspace: context.personWorkspace, + personalWorkspace: context.personalWorkspace, context: context.id, update: { lastUpdate diff --git a/packages/server/src/types.ts b/packages/server/src/types.ts index 69ca3515c07..26760ef7078 100644 --- a/packages/server/src/types.ts +++ b/packages/server/src/types.ts @@ -2,6 +2,6 @@ import type { SocialID } from '@hcengineering/communication-types' export interface ConnectionInfo { workspace: string - personWorkspace: string + personalWorkspace: string socialId: SocialID } diff --git a/packages/sqlite-wasm/src/adapter.ts b/packages/sqlite-wasm/src/adapter.ts index 7a08c1dec42..2ee4a3c98f1 100644 --- a/packages/sqlite-wasm/src/adapter.ts +++ b/packages/sqlite-wasm/src/adapter.ts @@ -80,11 +80,11 @@ export class SqliteAdapter implements DbAdapter { async createContext( workspace: string, card: CardID, - personWorkspace: string, + personalWorkspace: string, lastView?: Date, lastUpdate?: Date ): Promise { - return await this.notification.createContext(workspace, card, personWorkspace, lastView, lastUpdate) + return await this.notification.createContext(workspace, card, personalWorkspace, lastView, lastUpdate) } async removeContext(context: ContextID): Promise { @@ -97,18 +97,18 @@ export class SqliteAdapter implements DbAdapter { async findContexts( params: FindNotificationContextParams, - personWorkspaces: string[], + personalWorkspaces: string[], workspace?: string ): Promise { - return await this.notification.findContexts(params, personWorkspaces, workspace) + return await this.notification.findContexts(params, personalWorkspaces, workspace) } async findNotifications( params: FindNotificationsParams, - personWorkspace: string, + personalWorkspace: string, workspace?: string ): Promise { - return await this.notification.findNotifications(params, personWorkspace, workspace) + return await this.notification.findNotifications(params, personalWorkspace, workspace) } close(): void { diff --git a/packages/sqlite-wasm/src/db/notification.ts b/packages/sqlite-wasm/src/db/notification.ts index 83edb04e960..1b0784eb5b9 100644 --- a/packages/sqlite-wasm/src/db/notification.ts +++ b/packages/sqlite-wasm/src/db/notification.ts @@ -27,12 +27,12 @@ export class NotificationsDb extends BaseDb { }) } - async createContext(workspace: string, card: CardID, personWorkspace: string, lastView?: Date, lastUpdate?: Date): Promise { + async createContext(workspace: string, card: CardID, personalWorkspace: string, lastView?: Date, lastUpdate?: Date): Promise { const dbData: ContextDb = { id: self.crypto.randomUUID(), workspace_id: workspace, card_id: card, - person_workspace: personWorkspace, + personal_workspace: personalWorkspace, last_view: lastView, last_update: lastUpdate } @@ -78,7 +78,7 @@ export class NotificationsDb extends BaseDb { }); } - async findContexts(params: FindNotificationContextParams, personWorkspaces: string[], workspace?: string,): Promise { + async findContexts(params: FindNotificationContextParams, personalWorkspaces: string[], workspace?: string,): Promise { const select = ` SELECT nc.id, nc.card_id, @@ -86,9 +86,9 @@ export class NotificationsDb extends BaseDb { nc.last_view, nc.last_update, nc.workspace_id, - nc.person_workspace + nc.personal_workspace FROM ${TableName.NotificationContext} nc`; - const where = this.buildContextWhere(params, personWorkspaces, workspace); + const where = this.buildContextWhere(params, personalWorkspaces, workspace); // const orderSql = `ORDER BY nc.created ${params.sort === SortOrder.Asc ? 'ASC' : 'DESC'}` const limit = params.limit ? ` LIMIT ${params.limit}` : '' const sql = [select, where, limit].join(' ') @@ -99,7 +99,7 @@ export class NotificationsDb extends BaseDb { } - async findNotifications(params: FindNotificationsParams, personWorkspace: string, workspace?: string): Promise { + async findNotifications(params: FindNotificationsParams, personalWorkspace: string, workspace?: string): Promise { //TODO: should join with attachments and reactions? const select = ` SELECT n.message_id, @@ -128,7 +128,7 @@ export class NotificationsDb extends BaseDb { LEFT JOIN ${TableName.Patch} p ON p.message_id = m.id `; - const where = this.buildNotificationWhere(params, personWorkspace, workspace) + const where = this.buildNotificationWhere(params, personalWorkspace, workspace) const groupBy = `GROUP BY n.message_id, n.context_id, m.id, nc.card_id, nc.archived_from, nc.last_view, nc.last_update`; const orderBy = `ORDER BY m.created ${params.sort === SortOrder.Asc ? 'ASC' : 'DESC'}` const limit = params.limit ? ` LIMIT ${params.limit}` : '' @@ -139,14 +139,14 @@ export class NotificationsDb extends BaseDb { return result.map(it => this.toNotification(it)); } - buildContextWhere(params: FindNotificationContextParams, personWorkspaces: string[], workspace?: string,): string { + buildContextWhere(params: FindNotificationContextParams, personalWorkspaces: string[], workspace?: string,): string { const where: string[] = [] if (workspace != null) { where.push(`nc.workspace_id = '${workspace}'`) } - if (personWorkspaces.length > 0) { - where.push(`nc.person_workspace IN (${personWorkspaces.map(it => `'${it}'`).join(', ')})`) + if (personalWorkspaces.length > 0) { + where.push(`nc.personal_workspace IN (${personalWorkspaces.map(it => `'${it}'`).join(', ')})`) } if (params.card != null) { @@ -156,8 +156,8 @@ export class NotificationsDb extends BaseDb { return `WHERE ${where.join(' AND ')}` } - buildNotificationWhere(params: FindNotificationsParams, personWorkspace: string, workspace?: string): string { - const where: string[] = [`nc.person_workspace = '${personWorkspace}'`] + buildNotificationWhere(params: FindNotificationsParams, personalWorkspace: string, workspace?: string): string { + const where: string[] = [`nc.personal_workspace = '${personalWorkspace}'`] if (workspace != null) { where.push(`nc.workspace_id = '${workspace}'`) } @@ -193,7 +193,7 @@ export class NotificationsDb extends BaseDb { lastView: row.last_view ? new Date(row.last_view) : undefined, lastUpdate: row.last_update ? new Date(row.last_update) : undefined, workspace: row.workspace, - personWorkspace: row.person_workspace + personalWorkspace: row.personal_workspace } } diff --git a/packages/sqlite-wasm/src/db/types.ts b/packages/sqlite-wasm/src/db/types.ts index 7834993bd69..5aff499ee8e 100644 --- a/packages/sqlite-wasm/src/db/types.ts +++ b/packages/sqlite-wasm/src/db/types.ts @@ -49,7 +49,7 @@ export interface ContextDb { id: string workspace_id: string card_id: CardID - person_workspace: string + personal_workspace: string archived_from?: Date last_view?: Date diff --git a/packages/sqlite-wasm/src/migrations.ts b/packages/sqlite-wasm/src/migrations.ts index 1a59020ab3c..e729111a0a5 100644 --- a/packages/sqlite-wasm/src/migrations.ts +++ b/packages/sqlite-wasm/src/migrations.ts @@ -92,13 +92,13 @@ async function migrationV1(worker: Sqlite3Worker1Promiser, dbId: string): Promis id TEXT NOT NULL, workspace_id TEXT NOT NULL, card_id TEXT NOT NULL, - person_workspace TEXT NOT NULL, + personal_workspace TEXT NOT NULL, archived_from DATETIME, last_view DATETIME, last_update DATETIME, PRIMARY KEY (id), - UNIQUE (workspace_id, card_id, person_workspace) + UNIQUE (workspace_id, card_id, personal_workspace) ); CREATE TABLE IF NOT EXISTS notification diff --git a/packages/types/src/notification.ts b/packages/types/src/notification.ts index 79b9b83a5ef..007e19b4c16 100644 --- a/packages/types/src/notification.ts +++ b/packages/types/src/notification.ts @@ -13,7 +13,7 @@ export interface NotificationContext { id: ContextID card: CardID workspace: string - personWorkspace: string + personalWorkspace: string archivedFrom?: Date lastView?: Date lastUpdate?: Date From 878e7effcb6eb81bacc7e5730cbd039743e799ff Mon Sep 17 00:00:00 2001 From: Kristina Date: Tue, 4 Feb 2025 20:54:48 +0400 Subject: [PATCH 029/636] Fix package.json main (#18) Signed-off-by: Kristina Fefelova --- packages/client-query/package.json | 4 ++-- packages/client-sqlite/package.json | 4 ++-- packages/client-ws/package.json | 4 ++-- packages/cockroach/package.json | 4 ++-- packages/examples/package.json | 4 ++-- packages/query/package.json | 4 ++-- packages/server/package.json | 4 ++-- packages/sqlite-wasm/package.json | 4 ++-- packages/types/package.json | 4 ++-- 9 files changed, 18 insertions(+), 18 deletions(-) diff --git a/packages/client-query/package.json b/packages/client-query/package.json index 064b79c0cf7..8906527b50b 100644 --- a/packages/client-query/package.json +++ b/packages/client-query/package.json @@ -1,8 +1,8 @@ { "name": "@hcengineering/communication-client-query", "version": "0.1.0", - "main": "src/index.ts", - "module": "src/index.ts", + "main": "dist/index.js", + "module": "dist/index.js", "types": "./types/index.d.ts", "type": "module", "files": [ diff --git a/packages/client-sqlite/package.json b/packages/client-sqlite/package.json index e0dfa8b5ee7..5e72701c57e 100644 --- a/packages/client-sqlite/package.json +++ b/packages/client-sqlite/package.json @@ -1,8 +1,8 @@ { "name": "@hcengineering/communication-client-sqlite", "version": "0.1.0", - "main": "src/index.ts", - "module": "src/index.ts", + "main": "dist/index.js", + "module": "dist/index.js", "types": "./types/index.d.ts", "type": "module", "files": [ diff --git a/packages/client-ws/package.json b/packages/client-ws/package.json index b9c26de6508..429950023a7 100644 --- a/packages/client-ws/package.json +++ b/packages/client-ws/package.json @@ -1,8 +1,8 @@ { "name": "@hcengineering/communication-client-ws", "version": "0.1.0", - "main": "src/index.ts", - "module": "src/index.ts", + "main": "dist/index.js", + "module": "dist/index.js", "types": "./types/index.d.ts", "type": "module", "files": [ diff --git a/packages/cockroach/package.json b/packages/cockroach/package.json index ec3644aec91..e684b6954d0 100644 --- a/packages/cockroach/package.json +++ b/packages/cockroach/package.json @@ -1,8 +1,8 @@ { "name": "@hcengineering/communication-cockroach", "version": "0.1.0", - "main": "src/index.ts", - "module": "src/index.ts", + "main": "dist/index.js", + "module": "dist/index.js", "types": "./types/index.d.ts", "type": "module", "files": [ diff --git a/packages/examples/package.json b/packages/examples/package.json index 8111d21e1de..c7b2c7cbd72 100644 --- a/packages/examples/package.json +++ b/packages/examples/package.json @@ -1,8 +1,8 @@ { "name": "@hcengineering/communication-examples", "version": "0.1.0", - "main": "src/index.ts", - "module": "src/index.ts", + "main": "dist/index.js", + "module": "dist/index.js", "types": "./types/index.d.ts", "type": "module", "files": [ diff --git a/packages/query/package.json b/packages/query/package.json index 4c816b9f657..fafefb95c30 100644 --- a/packages/query/package.json +++ b/packages/query/package.json @@ -1,8 +1,8 @@ { "name": "@hcengineering/communication-query", "version": "0.1.0", - "main": "src/index.ts", - "module": "src/index.ts", + "main": "dist/index.js", + "module": "dist/index.js", "types": "./types/index.d.ts", "type": "module", "files": [ diff --git a/packages/server/package.json b/packages/server/package.json index 400ae98ad6b..b24123eb9df 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -1,8 +1,8 @@ { "name": "@hcengineering/communication-server", "version": "0.1.0", - "main": "src/index.ts", - "module": "src/index.ts", + "main": "dist/index.js", + "module": "dist/index.js", "types": "./types/index.d.ts", "type": "module", "files": [ diff --git a/packages/sqlite-wasm/package.json b/packages/sqlite-wasm/package.json index cb8baa27609..c8c2242a33b 100644 --- a/packages/sqlite-wasm/package.json +++ b/packages/sqlite-wasm/package.json @@ -1,8 +1,8 @@ { "name": "@hcengineering/communication-sqlite-wasm", "version": "0.1.0", - "main": "src/index.ts", - "module": "src/index.ts", + "main": "dist/index.js", + "module": "dist/index.js", "types": "./types/index.d.ts", "type": "module", "files": [ diff --git a/packages/types/package.json b/packages/types/package.json index 77fd4277f8e..423fa4b2798 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -1,8 +1,8 @@ { "name": "@hcengineering/communication-types", "version": "0.1.0", - "main": "src/index.ts", - "module": "src/index.ts", + "main": "dist/index.js", + "module": "dist/index.js", "types": "./types/index.d.ts", "type": "module", "files": [ From 2380cd129c159c67b74da535eba2a1eee028173c Mon Sep 17 00:00:00 2001 From: Kristina Date: Tue, 4 Feb 2025 20:59:36 +0400 Subject: [PATCH 030/636] Fix package json 2 (#19) Signed-off-by: Kristina --- packages/sdk-types/package.json | 2 +- packages/types/package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/sdk-types/package.json b/packages/sdk-types/package.json index 483d1d31bae..f605ca25704 100644 --- a/packages/sdk-types/package.json +++ b/packages/sdk-types/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-sdk-types", - "version": "0.1.1", + "version": "0.1.2", "main": "./dist/index.js", "module": "./dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/types/package.json b/packages/types/package.json index 423fa4b2798..64513aeaf5b 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-types", - "version": "0.1.0", + "version": "0.1.2", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", From 2cac79abe0921d7b4138183e90fc75ec827a1ada Mon Sep 17 00:00:00 2001 From: Kristina Date: Wed, 5 Feb 2025 13:58:45 +0400 Subject: [PATCH 031/636] Extract server core (#20) Extract server api Signed-off-by: Kristina Fefelova --- bun.lock | 29 +- packages/sdk-types/src/index.ts | 1 + packages/sdk-types/src/serverApi.ts | 23 ++ packages/server-core/package.json | 34 +++ packages/server-core/src/eventProcessor.ts | 285 ++++++++++++++++++ packages/server-core/src/index.ts | 40 +++ packages/server-core/src/manager.ts | 219 ++++++++++++++ .../{server => server-core}/src/triggers.ts | 0 .../{server => server-core}/tsconfig.json | 0 packages/{server => server-ws}/package.json | 4 +- packages/{server => server-ws}/src/config.ts | 0 .../src/eventProcessor.ts | 0 packages/{server => server-ws}/src/index.ts | 0 packages/{server => server-ws}/src/main.ts | 0 packages/{server => server-ws}/src/manager.ts | 0 .../{server => server-ws}/src/server/error.ts | 0 .../src/server/server.ts | 0 packages/{server => server-ws}/src/session.ts | 0 packages/server-ws/src/triggers.ts | 115 +++++++ packages/{server => server-ws}/src/types.ts | 0 .../src/utils/account.ts | 0 .../{server => server-ws}/src/utils/logger.ts | 0 .../src/utils/serialize.ts | 0 packages/server-ws/tsconfig.json | 10 + 24 files changed, 754 insertions(+), 6 deletions(-) create mode 100644 packages/sdk-types/src/serverApi.ts create mode 100644 packages/server-core/package.json create mode 100644 packages/server-core/src/eventProcessor.ts create mode 100644 packages/server-core/src/index.ts create mode 100644 packages/server-core/src/manager.ts rename packages/{server => server-core}/src/triggers.ts (100%) rename packages/{server => server-core}/tsconfig.json (100%) rename packages/{server => server-ws}/package.json (88%) rename packages/{server => server-ws}/src/config.ts (100%) rename packages/{server => server-ws}/src/eventProcessor.ts (100%) rename packages/{server => server-ws}/src/index.ts (100%) rename packages/{server => server-ws}/src/main.ts (100%) rename packages/{server => server-ws}/src/manager.ts (100%) rename packages/{server => server-ws}/src/server/error.ts (100%) rename packages/{server => server-ws}/src/server/server.ts (100%) rename packages/{server => server-ws}/src/session.ts (100%) create mode 100644 packages/server-ws/src/triggers.ts rename packages/{server => server-ws}/src/types.ts (100%) rename packages/{server => server-ws}/src/utils/account.ts (100%) rename packages/{server => server-ws}/src/utils/logger.ts (100%) rename packages/{server => server-ws}/src/utils/serialize.ts (100%) create mode 100644 packages/server-ws/tsconfig.json diff --git a/bun.lock b/bun.lock index 07fb97651a9..a3ed205bbe3 100644 --- a/bun.lock +++ b/bun.lock @@ -123,14 +123,33 @@ "typescript": "^5.6.3", }, }, - "packages/server": { - "name": "@hcengineering/communication-server", + "packages/server-core": { + "name": "@hcengineering/communication-server-core", "version": "0.1.0", "dependencies": { "@hcengineering/communication-cockroach": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", - "@hcengineering/server-token": "^0.6.377", + "@hcengineering/core": "0.6.411", + }, + "devDependencies": { + "@types/bun": "^1.1.14", + "@types/cors": "^2.8.17", + "@types/express": "^5.0.0", + "@types/ws": "^8.5.13", + }, + "peerDependencies": { + "typescript": "^5.6.3", + }, + }, + "packages/server-ws": { + "name": "@hcengineering/communication-server-ws", + "version": "0.1.0", + "dependencies": { + "@hcengineering/communication-cockroach": "workspace:*", + "@hcengineering/communication-sdk-types": "workspace:*", + "@hcengineering/communication-types": "workspace:*", + "@hcengineering/server-token": "^0.6.411", "cors": "^2.8.5", "dotenv": "^16.4.7", "express": "^4.21.2", @@ -218,7 +237,9 @@ "@hcengineering/communication-sdk-types": ["@hcengineering/communication-sdk-types@workspace:packages/sdk-types"], - "@hcengineering/communication-server": ["@hcengineering/communication-server@workspace:packages/server"], + "@hcengineering/communication-server-core": ["@hcengineering/communication-server-core@workspace:packages/server-core"], + + "@hcengineering/communication-server-ws": ["@hcengineering/communication-server-ws@workspace:packages/server-ws"], "@hcengineering/communication-sqlite-wasm": ["@hcengineering/communication-sqlite-wasm@workspace:packages/sqlite-wasm"], diff --git a/packages/sdk-types/src/index.ts b/packages/sdk-types/src/index.ts index 01596627a63..2823f002ade 100644 --- a/packages/sdk-types/src/index.ts +++ b/packages/sdk-types/src/index.ts @@ -3,3 +3,4 @@ export * from './event' export * from './ws' export * from './client' export * from './query' +export * from './serverApi' diff --git a/packages/sdk-types/src/serverApi.ts b/packages/sdk-types/src/serverApi.ts new file mode 100644 index 00000000000..fb4923b6a4b --- /dev/null +++ b/packages/sdk-types/src/serverApi.ts @@ -0,0 +1,23 @@ +import type { FindMessagesParams, Message, SocialID } from '@hcengineering/communication-types' +import type { BroadcastEvent, EventResult, Event } from './event' + +export interface ConnectionInfo { + sessionId: string + personalWorkspace: string + socialId: SocialID +} + +export type Result = { + broadcastEvent?: BroadcastEvent + result: EventResult +} + +export interface ServerApi { + findMessages(info: ConnectionInfo, params: FindMessagesParams, queryId?: number): Promise + + unsubscribeQuery(info: ConnectionInfo, id: number): Promise + + event(info: ConnectionInfo, event: Event): Promise + + close(): Promise +} diff --git a/packages/server-core/package.json b/packages/server-core/package.json new file mode 100644 index 00000000000..f373bb6879b --- /dev/null +++ b/packages/server-core/package.json @@ -0,0 +1,34 @@ +{ + "name": "@hcengineering/communication-server-core", + "version": "0.1.2", + "main": "dist/index.js", + "module": "dist/index.js", + "types": "./types/index.d.ts", + "type": "module", + "files": [ + "dist/index.js", + "types/**/*.d.ts" + ], + "devDependencies": { + "@types/bun": "^1.1.14", + "@types/express": "^5.0.0", + "@types/cors": "^2.8.17", + "@types/ws": "^8.5.13" + }, + "dependencies": { + "@hcengineering/core": "0.6.411", + "@hcengineering/communication-cockroach": "workspace:*", + "@hcengineering/communication-sdk-types": "workspace:*", + "@hcengineering/communication-types": "workspace:*" + }, + "peerDependencies": { + "typescript": "^5.6.3" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/hcengineering/communication.git" + }, + "publishConfig": { + "registry": "https://npm.pkg.github.com" + } +} diff --git a/packages/server-core/src/eventProcessor.ts b/packages/server-core/src/eventProcessor.ts new file mode 100644 index 00000000000..cd6ca397187 --- /dev/null +++ b/packages/server-core/src/eventProcessor.ts @@ -0,0 +1,285 @@ +import { type Message, type Patch, type Reaction, type Attachment } from '@hcengineering/communication-types' +import { + EventType, + type CreateAttachmentEvent, + type AttachmentCreatedEvent, + type CreateMessageEvent, + type MessageCreatedEvent, + type CreatePatchEvent, + type PatchCreatedEvent, + type CreateReactionEvent, + type ReactionCreatedEvent, + type Event, + type BroadcastEvent, + type RemoveAttachmentEvent, + type AttachmentRemovedEvent, + type RemoveMessageEvent, + type MessageRemovedEvent, + type RemoveReactionEvent, + type ReactionRemovedEvent, + type EventResult, + type DbAdapter, + type CreateNotificationEvent, + type RemoveNotificationEvent, + type CreateNotificationContextEvent, + type RemoveNotificationContextEvent, + type UpdateNotificationContextEvent, + type NotificationRemovedEvent, + type NotificationContextCreatedEvent, + type NotificationContextRemovedEvent, + type NotificationContextUpdatedEvent +} from '@hcengineering/communication-sdk-types' + +export type Result = { + broadcastEvent?: BroadcastEvent + result: EventResult +} + +export class EventProcessor { + constructor( + private readonly db: DbAdapter, + private readonly workspace: string + ) {} + + async process(personalWorkspace: string, event: Event): Promise { + switch (event.type) { + case EventType.CreateMessage: + return await this.createMessage(personalWorkspace, event) + case EventType.RemoveMessage: + return await this.removeMessage(personalWorkspace, event) + case EventType.CreatePatch: + return await this.createPatch(personalWorkspace, event) + case EventType.CreateReaction: + return await this.createReaction(personalWorkspace, event) + case EventType.RemoveReaction: + return await this.removeReaction(personalWorkspace, event) + case EventType.CreateAttachment: + return await this.createAttachment(personalWorkspace, event) + case EventType.RemoveAttachment: + return await this.removeAttachment(personalWorkspace, event) + case EventType.CreateNotification: + return await this.createNotification(personalWorkspace, event) + case EventType.RemoveNotification: + return await this.removeNotification(personalWorkspace, event) + case EventType.CreateNotificationContext: + return await this.createNotificationContext(personalWorkspace, event) + case EventType.RemoveNotificationContext: + return await this.removeNotificationContext(personalWorkspace, event) + case EventType.UpdateNotificationContext: + return await this.updateNotificationContext(personalWorkspace, event) + } + } + + private async createMessage(_personalWorkspace: string, event: CreateMessageEvent): Promise { + const created = new Date() + const id = await this.db.createMessage(this.workspace, event.thread, event.content, event.creator, created) + const message: Message = { + id, + thread: event.thread, + content: event.content, + creator: event.creator, + created: created, + edited: created, + reactions: [], + attachments: [] + } + const broadcastEvent: MessageCreatedEvent = { + type: EventType.MessageCreated, + message + } + return { + broadcastEvent, + result: { id } + } + } + + private async createPatch(_personalWorkspace: string, event: CreatePatchEvent): Promise { + const created = new Date() + await this.db.createPatch(event.message, event.content, event.creator, created) + + const patch: Patch = { + message: event.message, + content: event.content, + creator: event.creator, + created: created + } + const broadcastEvent: PatchCreatedEvent = { + type: EventType.PatchCreated, + thread: event.thread, + patch + } + return { + broadcastEvent, + result: {} + } + } + + private async removeMessage(_personalWorkspace: string, event: RemoveMessageEvent): Promise { + await this.db.removeMessage(event.message) + + const broadcastEvent: MessageRemovedEvent = { + type: EventType.MessageRemoved, + thread: event.thread, + message: event.message + } + + return { + broadcastEvent, + result: {} + } + } + + private async createReaction(_personalWorkspace: string, event: CreateReactionEvent): Promise { + const created = new Date() + await this.db.createReaction(event.message, event.reaction, event.creator, created) + + const reaction: Reaction = { + message: event.message, + reaction: event.reaction, + creator: event.creator, + created: created + } + const broadcastEvent: ReactionCreatedEvent = { + type: EventType.ReactionCreated, + thread: event.thread, + reaction + } + return { + broadcastEvent, + result: {} + } + } + + private async removeReaction(_personalWorkspace: string, event: RemoveReactionEvent): Promise { + await this.db.removeReaction(event.message, event.reaction, event.creator) + const broadcastEvent: ReactionRemovedEvent = { + type: EventType.ReactionRemoved, + thread: event.thread, + message: event.message, + reaction: event.reaction, + creator: event.creator + } + return { + broadcastEvent, + result: {} + } + } + + private async createAttachment(_personalWorkspace: string, event: CreateAttachmentEvent): Promise { + const created = new Date() + await this.db.createAttachment(event.message, event.card, event.creator, created) + + const attachment: Attachment = { + message: event.message, + card: event.card, + creator: event.creator, + created: created + } + const broadcastEvent: AttachmentCreatedEvent = { + type: EventType.AttachmentCreated, + thread: event.thread, + attachment + } + + return { + broadcastEvent, + result: {} + } + } + + private async removeAttachment(_personalWorkspace: string, event: RemoveAttachmentEvent): Promise { + await this.db.removeAttachment(event.message, event.card) + const broadcastEvent: AttachmentRemovedEvent = { + type: EventType.AttachmentRemoved, + thread: event.thread, + message: event.message, + card: event.card + } + return { + broadcastEvent, + result: {} + } + } + + private async createNotification(_personalWorkspace: string, event: CreateNotificationEvent): Promise { + await this.db.createNotification(event.message, event.context) + + return { + result: {} + } + } + + private async removeNotification(personalWorkspace: string, event: RemoveNotificationEvent): Promise { + await this.db.removeNotification(event.message, event.context) + + const broadcastEvent: NotificationRemovedEvent = { + type: EventType.NotificationRemoved, + personalWorkspace: personalWorkspace, + message: event.message, + context: event.context + } + return { + broadcastEvent, + result: {} + } + } + + private async createNotificationContext( + personalWorkspace: string, + event: CreateNotificationContextEvent + ): Promise { + const id = await this.db.createContext( + personalWorkspace, + this.workspace, + event.card, + event.lastView, + event.lastUpdate + ) + const broadcastEvent: NotificationContextCreatedEvent = { + type: EventType.NotificationContextCreated, + context: { + id, + workspace: this.workspace, + personalWorkspace: personalWorkspace, + card: event.card, + lastView: event.lastView, + lastUpdate: event.lastUpdate + } + } + return { + broadcastEvent, + result: { id } + } + } + + private async removeNotificationContext( + personalWorkspace: string, + event: RemoveNotificationContextEvent + ): Promise { + await this.db.removeContext(event.context) + const broadcastEvent: NotificationContextRemovedEvent = { + type: EventType.NotificationContextRemoved, + personalWorkspace: personalWorkspace, + context: event.context + } + return { + broadcastEvent, + result: {} + } + } + + async updateNotificationContext(personalWorkspace: string, event: UpdateNotificationContextEvent): Promise { + await this.db.updateContext(event.context, event.update) + + const broadcastEvent: NotificationContextUpdatedEvent = { + type: EventType.NotificationContextUpdated, + personalWorkspace: personalWorkspace, + context: event.context, + update: event.update + } + return { + broadcastEvent, + result: {} + } + } +} diff --git a/packages/server-core/src/index.ts b/packages/server-core/src/index.ts new file mode 100644 index 00000000000..9c919919e6b --- /dev/null +++ b/packages/server-core/src/index.ts @@ -0,0 +1,40 @@ +import type { MeasureContext } from '@hcengineering/core' +import type { FindMessagesParams, Message } from '@hcengineering/communication-types' +import { createDbAdapter } from '@hcengineering/communication-cockroach' +import type { ConnectionInfo, DbAdapter, Event, ServerApi } from '@hcengineering/communication-sdk-types' + +import { type Result } from './eventProcessor.ts' +import { Manager } from './manager.ts' + +export class Api implements ServerApi { + private readonly manager: Manager + + private constructor( + private readonly ctx: MeasureContext, + private readonly workspace: string, + db: DbAdapter + ) { + this.manager = new Manager(this.ctx, db, this.workspace) + } + + static async create(ctx: MeasureContext, workspace: string, dbUrl: string): Promise { + const db = await createDbAdapter(dbUrl) + return new Api(ctx, workspace, db) + } + + async findMessages(info: ConnectionInfo, params: FindMessagesParams, queryId?: number): Promise { + return await this.manager.findMessages(info, params, queryId) + } + + async unsubscribeQuery(info: ConnectionInfo, id: number): Promise { + this.manager.unsubscribeQuery(info, id) + } + + async event(info: ConnectionInfo, event: Event): Promise { + return await this.manager.event(info, event) + } + + async close(): Promise { + this.manager.close() + } +} diff --git a/packages/server-core/src/manager.ts b/packages/server-core/src/manager.ts new file mode 100644 index 00000000000..f2066d88e5b --- /dev/null +++ b/packages/server-core/src/manager.ts @@ -0,0 +1,219 @@ +import { + type BroadcastEvent, + type ConnectionInfo, + type DbAdapter, + EventType, + type NotificationContextCreatedEvent, + type NotificationCreatedEvent, + type Event +} from '@hcengineering/communication-sdk-types' +import type { + FindMessagesParams, + FindNotificationContextParams, + FindNotificationsParams, + Message, + MessageID +} from '@hcengineering/communication-types' + +import { Triggers } from './triggers.ts' +import { EventProcessor, type Result } from './eventProcessor.ts' +import type { MeasureContext } from '@hcengineering/core' + +type QueryId = number | string +type QueryType = 'message' | 'notification' | 'context' + +type SessionInfo = { + personalWorkspace: string + messageQueries: Map + notificationQueries: Map + contextQueries: Map +} + +export class Manager { + private dataBySessionId: Map = new Map() + private triggers: Triggers + private readonly eventProcessor: EventProcessor + + constructor( + private readonly ctx: MeasureContext, + private readonly db: DbAdapter, + private readonly workspace: string + ) { + this.eventProcessor = new EventProcessor(db, this.workspace) + this.triggers = new Triggers(db) + } + + async findMessages(info: ConnectionInfo, params: FindMessagesParams, queryId?: number): Promise { + const result = await this.db.findMessages(this.workspace, params) + if (queryId != null && info.sessionId != null && info.sessionId !== '') { + this.subscribeQuery(info, 'message', queryId, params) + } + return result + } + + async event(info: ConnectionInfo, event: Event): Promise { + return await this.eventProcessor.process(info.personalWorkspace, event) + // const { result, broadcastEvent } = await this.eventProcessor.process(personalWorkspace, event) + // if (broadcastEvent !== undefined) { + // void this.manager.next(broadcastEvent) + // } + // return result + } + // + // async broadcastEvent (ctx: MeasureContext, personalWorkspace: string, event: BroadcastEvent): Promise { + // void this.manager.next(event, personalWorkspace) + // } + + subscribeQuery(info: ConnectionInfo, type: QueryType, queryId: number, params: Record): void { + const { sessionId, personalWorkspace } = info + const data = this.dataBySessionId.get(sessionId) ?? { + personalWorkspace, + messageQueries: new Map(), + notificationQueries: new Map(), + contextQueries: new Map() + } + if (!this.dataBySessionId.has(sessionId)) { + this.dataBySessionId.set(sessionId, data) + } + + if (type === 'message') { + data.messageQueries.set(queryId, params) + } else if (type === 'notification') { + data.notificationQueries.set(queryId, params) + } else if (type === 'context') { + data.contextQueries.set(queryId, params) + } + } + + unsubscribeQuery(info: ConnectionInfo, queryId: number): void { + const data = this.dataBySessionId.get(info.sessionId) + if (data == null) return + + data.messageQueries.delete(queryId) + data.notificationQueries.delete(queryId) + data.contextQueries.delete(queryId) + } + + closeSession(sessionId: string): void { + this.dataBySessionId.delete(sessionId) + } + + async next(event: BroadcastEvent, workspace: string): Promise { + // await this.broadcast(event, workspace) + // const derived = await this.triggers.process(event, workspace) + // const derivedPromises: Promise[] = [] + // for (const d of derived) { + // derivedPromises.push(this.next(d, workspace)) + // } + // await Promise.all(derivedPromises) + } + + private async broadcast(event: BroadcastEvent, workspace: string): Promise { + // const sessions = this.sessionsByWorkspace.get(workspace) ?? [] + // const response: Response = { result: event } + // for (const session of sessions) { + // const msg = serializeResponse(response, session.session.binary) + // if (this.match(event, session)) { + // session.ws.send(msg) + // } + // } + } + + private match(event: BroadcastEvent, info: SessionInfo): boolean { + switch (event.type) { + case EventType.MessageCreated: + return this.matchMessagesQuery( + { id: event.message.id, thread: event.message.thread }, + Array.from(info.messageQueries.values()) + ) + case EventType.PatchCreated: + return this.matchMessagesQuery( + { thread: event.thread, id: event.patch.message }, + Array.from(info.messageQueries.values()) + ) + case EventType.MessageRemoved: + return this.matchMessagesQuery( + { thread: event.thread, id: event.message }, + Array.from(info.messageQueries.values()) + ) + case EventType.ReactionCreated: + return this.matchMessagesQuery( + { thread: event.thread, id: event.reaction.message }, + Array.from(info.messageQueries.values()) + ) + case EventType.ReactionRemoved: + return this.matchMessagesQuery( + { thread: event.thread, id: event.message }, + Array.from(info.messageQueries.values()) + ) + case EventType.AttachmentCreated: + return this.matchMessagesQuery( + { thread: event.thread, id: event.attachment.message }, + Array.from(info.messageQueries.values()) + ) + case EventType.AttachmentRemoved: + return this.matchMessagesQuery( + { thread: event.thread, id: event.message }, + Array.from(info.messageQueries.values()) + ) + case EventType.NotificationCreated: + return ( + info.personalWorkspace === event.personalWorkspace && + this.matchNotificationQuery(event, Array.from(info.notificationQueries.values())) + ) + case EventType.NotificationRemoved: + return info.personalWorkspace === event.personalWorkspace && info.notificationQueries.size > 0 + case EventType.NotificationContextCreated: + return ( + info.personalWorkspace === event.context.personalWorkspace && + this.matchContextQuery(event, Array.from(info.contextQueries.values())) + ) + case EventType.NotificationContextRemoved: + return info.personalWorkspace === event.personalWorkspace && info.contextQueries.size > 0 + case EventType.NotificationContextUpdated: + return info.personalWorkspace === event.personalWorkspace && info.contextQueries.size > 0 + } + } + + private matchMessagesQuery(params: { id?: MessageID; thread?: string }, queries: FindMessagesParams[]): boolean { + if (queries.length === 0) return false + + for (const query of queries) { + if (query.id != null && query.id !== params.id) continue + if (query.thread != null && query.thread !== params.thread) continue + return true + } + + return false + } + + private matchNotificationQuery(event: NotificationCreatedEvent, queries: FindNotificationsParams[]): boolean { + if (queries.length === 0) return false + + for (const query of queries) { + if (query.context != null && query.context !== event.notification.context) continue + if (query.message != null && query.message !== event.notification.message.id) continue + if (query.read != null && query.read !== event.notification.read) continue + if (query.archived != null && query.archived !== event.notification.archived) continue + return true + } + + return false + } + + private matchContextQuery(event: NotificationContextCreatedEvent, queries: FindNotificationContextParams[]): boolean { + if (queries.length === 0) return false + + for (const query of queries) { + if (query.id != null && query.id !== event.context.id) continue + if (query.card != null && query.card !== event.context.card) continue + return true + } + + return false + } + + close(): void { + this.db.close() + } +} diff --git a/packages/server/src/triggers.ts b/packages/server-core/src/triggers.ts similarity index 100% rename from packages/server/src/triggers.ts rename to packages/server-core/src/triggers.ts diff --git a/packages/server/tsconfig.json b/packages/server-core/tsconfig.json similarity index 100% rename from packages/server/tsconfig.json rename to packages/server-core/tsconfig.json diff --git a/packages/server/package.json b/packages/server-ws/package.json similarity index 88% rename from packages/server/package.json rename to packages/server-ws/package.json index b24123eb9df..234db94dc48 100644 --- a/packages/server/package.json +++ b/packages/server-ws/package.json @@ -1,5 +1,5 @@ { - "name": "@hcengineering/communication-server", + "name": "@hcengineering/communication-server-ws", "version": "0.1.0", "main": "dist/index.js", "module": "dist/index.js", @@ -16,7 +16,7 @@ "@types/ws": "^8.5.13" }, "dependencies": { - "@hcengineering/server-token": "^0.6.377", + "@hcengineering/server-token": "^0.6.411", "@hcengineering/communication-cockroach": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", diff --git a/packages/server/src/config.ts b/packages/server-ws/src/config.ts similarity index 100% rename from packages/server/src/config.ts rename to packages/server-ws/src/config.ts diff --git a/packages/server/src/eventProcessor.ts b/packages/server-ws/src/eventProcessor.ts similarity index 100% rename from packages/server/src/eventProcessor.ts rename to packages/server-ws/src/eventProcessor.ts diff --git a/packages/server/src/index.ts b/packages/server-ws/src/index.ts similarity index 100% rename from packages/server/src/index.ts rename to packages/server-ws/src/index.ts diff --git a/packages/server/src/main.ts b/packages/server-ws/src/main.ts similarity index 100% rename from packages/server/src/main.ts rename to packages/server-ws/src/main.ts diff --git a/packages/server/src/manager.ts b/packages/server-ws/src/manager.ts similarity index 100% rename from packages/server/src/manager.ts rename to packages/server-ws/src/manager.ts diff --git a/packages/server/src/server/error.ts b/packages/server-ws/src/server/error.ts similarity index 100% rename from packages/server/src/server/error.ts rename to packages/server-ws/src/server/error.ts diff --git a/packages/server/src/server/server.ts b/packages/server-ws/src/server/server.ts similarity index 100% rename from packages/server/src/server/server.ts rename to packages/server-ws/src/server/server.ts diff --git a/packages/server/src/session.ts b/packages/server-ws/src/session.ts similarity index 100% rename from packages/server/src/session.ts rename to packages/server-ws/src/session.ts diff --git a/packages/server-ws/src/triggers.ts b/packages/server-ws/src/triggers.ts new file mode 100644 index 00000000000..589205b18fd --- /dev/null +++ b/packages/server-ws/src/triggers.ts @@ -0,0 +1,115 @@ +import { + type BroadcastEvent, + type DbAdapter, + EventType, + type MessageCreatedEvent, + type NotificationContextCreatedEvent, + type NotificationCreatedEvent +} from '@hcengineering/communication-sdk-types' +import type { NotificationContext, ContextID, CardID } from '@hcengineering/communication-types' + +export class Triggers { + constructor(private readonly db: DbAdapter) {} + + async process(event: BroadcastEvent, workspace: string): Promise { + switch (event.type) { + case EventType.MessageCreated: + return this.createNotifications(event, workspace) + } + + return [] + } + + private async createNotifications(event: MessageCreatedEvent, workspace: string): Promise { + const card = event.message.thread as any as CardID + const subscribedPersonalWorkspaces = [ + 'cd0aba36-1c4f-4170-95f2-27a12a5415f7', + 'cd0aba36-1c4f-4170-95f2-27a12a5415f8' + ] + + const res: BroadcastEvent[] = [] + const contexts = await this.db.findContexts({ card }, [], workspace) + + res.push(...(await this.updateNotificationContexts(event.message.created, contexts))) + + for (const personalWorkspace of subscribedPersonalWorkspaces) { + const existsContext = contexts.find( + (it) => it.card === card && it.personalWorkspace === personalWorkspace && workspace === it.workspace + ) + const contextId = await this.getOrCreateContextId( + workspace, + card, + personalWorkspace, + res, + event.message.created, + existsContext + ) + + await this.db.createNotification(event.message.id, contextId) + + const resultEvent: NotificationCreatedEvent = { + type: EventType.NotificationCreated, + personalWorkspace, + notification: { + context: contextId, + message: event.message, + read: false, + archived: false + } + } + res.push(resultEvent) + } + + return res + } + + private async getOrCreateContextId( + workspace: string, + card: CardID, + personalWorkspace: string, + res: BroadcastEvent[], + lastUpdate: Date, + context?: NotificationContext + ): Promise { + if (context !== undefined) { + return context.id + } else { + const contextId = await this.db.createContext(personalWorkspace, workspace, card, undefined, lastUpdate) + const newContext = { + id: contextId, + card, + workspace, + personalWorkspace + } + const resultEvent: NotificationContextCreatedEvent = { + type: EventType.NotificationContextCreated, + context: newContext + } + + res.push(resultEvent) + + return contextId + } + } + + private async updateNotificationContexts( + lastUpdate: Date, + contexts: NotificationContext[] + ): Promise { + const res: BroadcastEvent[] = [] + for (const context of contexts) { + if (context.lastUpdate === undefined || context.lastUpdate < lastUpdate) { + await this.db.updateContext(context.id, { lastUpdate }) + res.push({ + type: EventType.NotificationContextUpdated, + personalWorkspace: context.personalWorkspace, + context: context.id, + update: { + lastUpdate + } + }) + } + } + return res + } +} diff --git a/packages/server/src/types.ts b/packages/server-ws/src/types.ts similarity index 100% rename from packages/server/src/types.ts rename to packages/server-ws/src/types.ts diff --git a/packages/server/src/utils/account.ts b/packages/server-ws/src/utils/account.ts similarity index 100% rename from packages/server/src/utils/account.ts rename to packages/server-ws/src/utils/account.ts diff --git a/packages/server/src/utils/logger.ts b/packages/server-ws/src/utils/logger.ts similarity index 100% rename from packages/server/src/utils/logger.ts rename to packages/server-ws/src/utils/logger.ts diff --git a/packages/server/src/utils/serialize.ts b/packages/server-ws/src/utils/serialize.ts similarity index 100% rename from packages/server/src/utils/serialize.ts rename to packages/server-ws/src/utils/serialize.ts diff --git a/packages/server-ws/tsconfig.json b/packages/server-ws/tsconfig.json new file mode 100644 index 00000000000..e7a6cb178dd --- /dev/null +++ b/packages/server-ws/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "declarationDir": "./types", + "emitDeclarationOnly": true, + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src"] +} From 3dc234b89961f1396c40e58a24f39354fd2a70fb Mon Sep 17 00:00:00 2001 From: Kristina Date: Wed, 5 Feb 2025 15:35:50 +0400 Subject: [PATCH 032/636] Fix db schema (#21) Update db schema Signed-off-by: Kristina Fefelova --- bun.lock | 9 ++- packages/client-query/package.json | 2 +- packages/client-sqlite/package.json | 2 +- packages/client-sqlite/src/client.ts | 37 ++++++----- packages/client-ws/package.json | 2 +- packages/client-ws/src/client.ts | 37 ++++++----- packages/cockroach/migrations/01_message.sql | 20 +++++- packages/cockroach/migrations/02_patch.sql | 7 ++- .../cockroach/migrations/03_attachment.sql | 7 ++- packages/cockroach/migrations/04_reaction.sql | 9 +-- .../migrations/05_notificationContext.sql | 15 ----- .../migrations/05_notification_context.sql | 16 +++++ .../cockroach/migrations/06_notification.sql | 7 ++- packages/cockroach/package.json | 2 +- packages/cockroach/src/adapter.ts | 25 +++++--- packages/cockroach/src/db/message.ts | 30 ++++++--- packages/cockroach/src/db/notification.ts | 4 +- packages/cockroach/src/db/types.ts | 26 +++++--- packages/examples/package.json | 2 +- packages/examples/src/index.ts | 16 ++--- packages/query/package.json | 2 +- packages/query/src/messages/query.ts | 2 +- packages/sdk-types/package.json | 2 +- packages/sdk-types/src/client.ts | 17 +++-- packages/sdk-types/src/db.ts | 17 +++-- packages/sdk-types/src/event.ts | 35 +++++------ packages/sdk-types/src/serverApi.ts | 1 + packages/server-core/package.json | 2 +- packages/server-core/src/eventProcessor.ts | 18 +++--- packages/server-core/src/manager.ts | 62 +++++++++---------- packages/server-core/src/triggers.ts | 2 +- packages/server-ws/package.json | 2 +- packages/server-ws/src/eventProcessor.ts | 18 +++--- packages/server-ws/src/manager.ts | 18 +++--- packages/server-ws/src/server/server.ts | 2 - packages/server-ws/src/triggers.ts | 2 +- packages/sqlite-wasm/package.json | 2 +- packages/sqlite-wasm/src/adapter.ts | 26 +++++--- packages/sqlite-wasm/src/db/message.ts | 14 ++--- packages/sqlite-wasm/src/db/notification.ts | 4 +- packages/sqlite-wasm/src/db/types.ts | 2 +- packages/sqlite-wasm/src/migrations.ts | 2 +- packages/types/package.json | 5 +- packages/types/src/message.ts | 16 ++++- packages/types/src/query.ts | 4 +- 45 files changed, 322 insertions(+), 230 deletions(-) delete mode 100644 packages/cockroach/migrations/05_notificationContext.sql create mode 100644 packages/cockroach/migrations/05_notification_context.sql diff --git a/bun.lock b/bun.lock index a3ed205bbe3..6b3b71842a3 100644 --- a/bun.lock +++ b/bun.lock @@ -112,7 +112,7 @@ }, "packages/sdk-types": { "name": "@hcengineering/communication-sdk-types", - "version": "0.1.1", + "version": "0.1.2", "dependencies": { "@hcengineering/communication-types": "^0.1.0", }, @@ -125,7 +125,7 @@ }, "packages/server-core": { "name": "@hcengineering/communication-server-core", - "version": "0.1.0", + "version": "0.1.2", "dependencies": { "@hcengineering/communication-cockroach": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", @@ -185,7 +185,10 @@ }, "packages/types": { "name": "@hcengineering/communication-types", - "version": "0.1.0", + "version": "0.1.2", + "dependencies": { + "@hcengineering/core": "0.6.411", + }, "devDependencies": { "@types/bun": "^1.1.14", }, diff --git a/packages/client-query/package.json b/packages/client-query/package.json index 8906527b50b..8d0cd5c1036 100644 --- a/packages/client-query/package.json +++ b/packages/client-query/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-client-query", - "version": "0.1.0", + "version": "0.1.3", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/client-sqlite/package.json b/packages/client-sqlite/package.json index 5e72701c57e..d3237c0ed6e 100644 --- a/packages/client-sqlite/package.json +++ b/packages/client-sqlite/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-client-sqlite", - "version": "0.1.0", + "version": "0.1.3", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/client-sqlite/src/client.ts b/packages/client-sqlite/src/client.ts index 51b350c9c46..29f8624bf35 100644 --- a/packages/client-sqlite/src/client.ts +++ b/packages/client-sqlite/src/client.ts @@ -11,7 +11,6 @@ import { type NotificationContext, type FindNotificationsParams, type Notification, - type ThreadID, type Attachment, type Reaction } from '@hcengineering/communication-types' @@ -33,15 +32,15 @@ class DbClient implements Client { private readonly personalWorkspace: string ) {} - async createMessage(thread: ThreadID, content: RichText, creator: SocialID): Promise { + async createMessage(card: CardID, content: RichText, creator: SocialID): Promise { const created = new Date() - const id = await this.db.createMessage(this.workspace, thread, content, creator, created) + const id = await this.db.createMessage(this.workspace, card, content, creator, created) const event: MessageCreatedEvent = { type: EventType.MessageCreated, message: { id, - thread, + card, content, creator, created, @@ -56,37 +55,41 @@ class DbClient implements Client { return id } - async removeMessage(thread: ThreadID, message: MessageID) { + async removeMessage(card: CardID, message: MessageID) { await this.db.removeMessage(message) - this.onEvent({ type: EventType.MessageRemoved, message, thread }) + this.onEvent({ type: EventType.MessageRemoved, message, card }) } - async createPatch(thread: ThreadID, message: MessageID, content: RichText, creator: SocialID): Promise { + async createPatch(card: CardID, message: MessageID, content: RichText, creator: SocialID): Promise { const created = new Date() await this.db.createPatch(message, content, creator, created) - this.onEvent({ type: EventType.PatchCreated, thread, patch: { message, content, creator, created } }) + this.onEvent({ type: EventType.PatchCreated, card, patch: { message, content, creator, created } }) } - async createReaction(thread: ThreadID, message: MessageID, reaction: string, creator: SocialID): Promise { + async createReaction(card: CardID, message: MessageID, reaction: string, creator: SocialID): Promise { const created = new Date() await this.db.createReaction(message, reaction, creator, created) - this.onEvent({ type: EventType.ReactionCreated, thread, reaction: { message, reaction, creator, created } }) + this.onEvent({ type: EventType.ReactionCreated, card, reaction: { message, reaction, creator, created } }) } - async removeReaction(thread: ThreadID, message: MessageID, reaction: string, creator: SocialID): Promise { + async removeReaction(card: CardID, message: MessageID, reaction: string, creator: SocialID): Promise { await this.db.removeReaction(message, reaction, creator) - this.onEvent({ type: EventType.ReactionRemoved, thread, message, reaction, creator }) + this.onEvent({ type: EventType.ReactionRemoved, card, message, reaction, creator }) } - async createAttachment(thread: ThreadID, message: MessageID, card: CardID, creator: SocialID): Promise { + async createAttachment(card: CardID, message: MessageID, attachment: CardID, creator: SocialID): Promise { const created = new Date() await this.db.createAttachment(message, card, creator, created) - this.onEvent({ type: EventType.AttachmentCreated, thread, attachment: { message, card, creator, created } }) + this.onEvent({ + type: EventType.AttachmentCreated, + card, + attachment: { message, card: attachment, creator, created } + }) } - async removeAttachment(thread: ThreadID, message: MessageID, card: CardID): Promise { + async removeAttachment(card: CardID, message: MessageID, attachment: CardID): Promise { await this.db.removeAttachment(message, card) - this.onEvent({ type: EventType.AttachmentRemoved, message, card, thread }) + this.onEvent({ type: EventType.AttachmentRemoved, message, card, attachment }) } async findMessages(params: FindMessagesParams): Promise { @@ -101,7 +104,7 @@ class DbClient implements Client { toMessage(raw: any): Message { return { id: raw.id, - thread: raw.thread, + card: raw.card, content: raw.content, creator: raw.creator, created: new Date(raw.created), diff --git a/packages/client-ws/package.json b/packages/client-ws/package.json index 429950023a7..b33ff3a07a8 100644 --- a/packages/client-ws/package.json +++ b/packages/client-ws/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-client-ws", - "version": "0.1.0", + "version": "0.1.3", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/client-ws/src/client.ts b/packages/client-ws/src/client.ts index 83eb39050a7..6af6606df14 100644 --- a/packages/client-ws/src/client.ts +++ b/packages/client-ws/src/client.ts @@ -12,8 +12,7 @@ import { type NotificationContextUpdate, type Reaction, type RichText, - type SocialID, - type ThreadID + type SocialID } from '@hcengineering/communication-types' import { type BroadcastEvent, @@ -56,10 +55,10 @@ class WsClient implements Client { } } - async createMessage(thread: ThreadID, content: RichText, creator: SocialID): Promise { + async createMessage(card: CardID, content: RichText, creator: SocialID): Promise { const event: CreateMessageEvent = { type: EventType.CreateMessage, - thread, + card, content, creator } @@ -67,19 +66,19 @@ class WsClient implements Client { return (result as CreateMessageResult).id } - async removeMessage(thread: ThreadID, message: MessageID): Promise { + async removeMessage(card: CardID, message: MessageID): Promise { const event: RemoveMessageEvent = { type: EventType.RemoveMessage, - thread, + card, message } await this.sendEvent(event) } - async createPatch(thread: ThreadID, message: MessageID, content: RichText, creator: SocialID): Promise { + async createPatch(card: CardID, message: MessageID, content: RichText, creator: SocialID): Promise { const event: CreatePatchEvent = { type: EventType.CreatePatch, - thread, + card, message, content, creator @@ -87,10 +86,10 @@ class WsClient implements Client { await this.sendEvent(event) } - async createReaction(thread: ThreadID, message: MessageID, reaction: string, creator: SocialID): Promise { + async createReaction(card: CardID, message: MessageID, reaction: string, creator: SocialID): Promise { const event: CreateReactionEvent = { type: EventType.CreateReaction, - thread, + card, message, reaction, creator @@ -98,10 +97,10 @@ class WsClient implements Client { await this.sendEvent(event) } - async removeReaction(thread: ThreadID, message: MessageID, reaction: string, creator: SocialID): Promise { + async removeReaction(card: CardID, message: MessageID, reaction: string, creator: SocialID): Promise { const event: RemoveReactionEvent = { type: EventType.RemoveReaction, - thread, + card, message, reaction, creator @@ -109,23 +108,23 @@ class WsClient implements Client { await this.sendEvent(event) } - async createAttachment(thread: ThreadID, message: MessageID, card: CardID, creator: SocialID): Promise { + async createAttachment(card: CardID, message: MessageID, attachment: CardID, creator: SocialID): Promise { const event: CreateAttachmentEvent = { type: EventType.CreateAttachment, - thread, - message, card, + message, + attachment, creator } await this.sendEvent(event) } - async removeAttachment(thread: ThreadID, message: MessageID, card: CardID): Promise { + async removeAttachment(card: CardID, message: MessageID, attachment: CardID): Promise { const event: RemoveAttachmentEvent = { type: EventType.RemoveAttachment, - thread, + card, message, - card + attachment } await this.sendEvent(event) } @@ -138,7 +137,7 @@ class WsClient implements Client { toMessage(raw: any): Message { return { id: raw.id, - thread: raw.thread, + card: raw.card, content: raw.content, creator: raw.creator, created: new Date(raw.created), diff --git a/packages/cockroach/migrations/01_message.sql b/packages/cockroach/migrations/01_message.sql index e03be3b3435..4622f819b59 100644 --- a/packages/cockroach/migrations/01_message.sql +++ b/packages/cockroach/migrations/01_message.sql @@ -1,8 +1,11 @@ -CREATE TABLE IF NOT EXISTS message +DROP TABLE IF EXISTS c_message CASCADE; +DROP TABLE IF EXISTS c_messages_group CASCADE; + +CREATE TABLE IF NOT EXISTS c_message ( id UUID NOT NULL DEFAULT gen_random_uuid(), workspace_id UUID NOT NULL, - thread_id UUID NOT NULL, + card_id UUID NOT NULL, content TEXT NOT NULL, creator VARCHAR(255) NOT NULL, @@ -11,3 +14,16 @@ CREATE TABLE IF NOT EXISTS message PRIMARY KEY (id) ); + +CREATE TABLE IF NOT EXISTS c_messages_group +( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + workspace_id UUID NOT NULL, + card_id UUID NOT NULL, + start_at TIMESTAMPTZ NOT NULL, + end_at TIMESTAMPTZ NOT NULL, + blob_id UUID NOT NULL, + count INT NOT NULL, + + UNIQUE (workspace_id, card_id, blob_id) +); diff --git a/packages/cockroach/migrations/02_patch.sql b/packages/cockroach/migrations/02_patch.sql index 0f3baa57315..be2e04210db 100644 --- a/packages/cockroach/migrations/02_patch.sql +++ b/packages/cockroach/migrations/02_patch.sql @@ -1,4 +1,5 @@ -CREATE TABLE IF NOT EXISTS patch +DROP TABLE IF EXISTS c_patch CASCADE; +CREATE TABLE IF NOT EXISTS c_patch ( id INT8 NOT NULL DEFAULT unique_rowid(), message_id UUID NOT NULL, @@ -7,7 +8,7 @@ CREATE TABLE IF NOT EXISTS patch created TIMESTAMPTZ NOT NULL, PRIMARY KEY (id), - FOREIGN KEY (message_id) REFERENCES message (id) ON DELETE CASCADE + FOREIGN KEY (message_id) REFERENCES c_message (id) ON DELETE CASCADE ); -CREATE INDEX idx_patch_message_id ON patch (message_id); \ No newline at end of file +CREATE INDEX idx_patch_message_id ON c_patch (message_id); \ No newline at end of file diff --git a/packages/cockroach/migrations/03_attachment.sql b/packages/cockroach/migrations/03_attachment.sql index 6f732045484..3029eec5699 100644 --- a/packages/cockroach/migrations/03_attachment.sql +++ b/packages/cockroach/migrations/03_attachment.sql @@ -1,4 +1,5 @@ -CREATE TABLE IF NOT EXISTS attachment +DROP TABLE IF EXISTS c_attachment CASCADE; +CREATE TABLE IF NOT EXISTS c_attachment ( message_id UUID NOT NULL, card_id UUID NOT NULL, @@ -6,7 +7,7 @@ CREATE TABLE IF NOT EXISTS attachment created TIMESTAMPTZ NOT NULL DEFAULT now(), PRIMARY KEY (message_id, card_id), - FOREIGN KEY (message_id) REFERENCES message (id) ON DELETE CASCADE + FOREIGN KEY (message_id) REFERENCES c_message (id) ON DELETE CASCADE ); -CREATE INDEX IF NOT EXISTS attachment_message_idx ON attachment (message_id); +CREATE INDEX IF NOT EXISTS attachment_message_idx ON c_attachment (message_id); diff --git a/packages/cockroach/migrations/04_reaction.sql b/packages/cockroach/migrations/04_reaction.sql index 3d3af2c0872..a698f9ad672 100644 --- a/packages/cockroach/migrations/04_reaction.sql +++ b/packages/cockroach/migrations/04_reaction.sql @@ -1,12 +1,13 @@ -CREATE TABLE IF NOT EXISTS reaction +DROP TABLE IF EXISTS c_reaction CASCADE; +CREATE TABLE IF NOT EXISTS c_reaction ( message_id UUID NOT NULL, - reaction VARCHAR(100) NOT NULL, + reaction VARCHAR(100) NOT NULL, creator VARCHAR(255) NOT NULL, created TIMESTAMPTZ NOT NULL DEFAULT now(), PRIMARY KEY (message_id, creator, reaction), - FOREIGN KEY (message_id) REFERENCES message (id) ON DELETE CASCADE + FOREIGN KEY (message_id) REFERENCES c_message (id) ON DELETE CASCADE ); -CREATE INDEX IF NOT EXISTS reaction_message_idx ON reaction (message_id); +CREATE INDEX IF NOT EXISTS reaction_message_idx ON c_reaction (message_id); diff --git a/packages/cockroach/migrations/05_notificationContext.sql b/packages/cockroach/migrations/05_notificationContext.sql deleted file mode 100644 index b08a691a91c..00000000000 --- a/packages/cockroach/migrations/05_notificationContext.sql +++ /dev/null @@ -1,15 +0,0 @@ -CREATE TABLE IF NOT EXISTS notification_context -( - id UUID NOT NULL DEFAULT gen_random_uuid(), - workspace_id UUID NOT NULL, - card_id UUID NOT NULL, - - personal_workspace UUID NOT NULL, - - archived_from TIMESTAMPTZ, - last_view TIMESTAMPTZ, - last_update TIMESTAMPTZ, - - PRIMARY KEY (id), - UNIQUE (workspace_id, card_id, personal_workspace) -); diff --git a/packages/cockroach/migrations/05_notification_context.sql b/packages/cockroach/migrations/05_notification_context.sql new file mode 100644 index 00000000000..df42a3da378 --- /dev/null +++ b/packages/cockroach/migrations/05_notification_context.sql @@ -0,0 +1,16 @@ +DROP TABLE IF EXISTS c_notification_context CASCADE; +CREATE TABLE IF NOT EXISTS c_notification_context +( + id UUID NOT NULL DEFAULT gen_random_uuid(), + workspace_id UUID NOT NULL, + card_id UUID NOT NULL, + + personal_workspace UUID NOT NULL, + + archived_from TIMESTAMPTZ, + last_view TIMESTAMPTZ, + last_update TIMESTAMPTZ, + + PRIMARY KEY (id), + UNIQUE (workspace_id, card_id, personal_workspace) +); diff --git a/packages/cockroach/migrations/06_notification.sql b/packages/cockroach/migrations/06_notification.sql index df4bcce202f..905d6c9a508 100644 --- a/packages/cockroach/migrations/06_notification.sql +++ b/packages/cockroach/migrations/06_notification.sql @@ -1,9 +1,10 @@ -CREATE TABLE IF NOT EXISTS notification +DROP TABLE IF EXISTS c_notification CASCADE; +CREATE TABLE IF NOT EXISTS c_notification ( message_id UUID NOT NULL, context UUID NOT NULL, PRIMARY KEY (message_id, context), - FOREIGN KEY (message_id) REFERENCES message (id) ON DELETE CASCADE, - FOREIGN KEY (context) REFERENCES notification_context (id) ON DELETE CASCADE + FOREIGN KEY (message_id) REFERENCES c_message (id) ON DELETE CASCADE, + FOREIGN KEY (context) REFERENCES c_notification_context (id) ON DELETE CASCADE ); diff --git a/packages/cockroach/package.json b/packages/cockroach/package.json index e684b6954d0..0057ad6b2bc 100644 --- a/packages/cockroach/package.json +++ b/packages/cockroach/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-cockroach", - "version": "0.1.0", + "version": "0.1.3", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/cockroach/src/adapter.ts b/packages/cockroach/src/adapter.ts index aa88a615704..3ee7b6f5692 100644 --- a/packages/cockroach/src/adapter.ts +++ b/packages/cockroach/src/adapter.ts @@ -12,7 +12,7 @@ import { type NotificationContext, type FindNotificationsParams, type Notification, - type ThreadID + type BlobID } from '@hcengineering/communication-types' import type { DbAdapter } from '@hcengineering/communication-sdk-types' @@ -34,12 +34,12 @@ export class CockroachAdapter implements DbAdapter { async createMessage( workspace: string, - thread: ThreadID, + card: CardID, content: RichText, creator: SocialID, created: Date ): Promise { - return await this.message.createMessage(workspace, thread, content, creator, created) + return await this.message.createMessage(workspace, card, content, creator, created) } async createPatch(message: MessageID, content: RichText, creator: SocialID, created: Date): Promise { @@ -50,6 +50,17 @@ export class CockroachAdapter implements DbAdapter { return await this.message.removeMessage(message) } + async createMessagesGroup( + workspace: string, + card: CardID, + startAt: Date, + endAt: Date, + blobId: BlobID, + count: number + ): Promise { + return await this.message.createMessagesGroup(workspace, card, startAt, endAt, blobId, count) + } + async createReaction(message: MessageID, reaction: string, creator: SocialID, created: Date): Promise { return await this.message.createReaction(message, reaction, creator, created) } @@ -58,12 +69,12 @@ export class CockroachAdapter implements DbAdapter { return await this.message.removeReaction(message, reaction, creator) } - async createAttachment(message: MessageID, card: CardID, creator: SocialID, created: Date): Promise { - return await this.message.createAttachment(message, card, creator, created) + async createAttachment(message: MessageID, attachment: CardID, creator: SocialID, created: Date): Promise { + return await this.message.createAttachment(message, attachment, creator, created) } - async removeAttachment(message: MessageID, card: CardID): Promise { - return await this.message.removeAttachment(message, card) + async removeAttachment(message: MessageID, attachment: CardID): Promise { + return await this.message.removeAttachment(message, attachment) } async findMessages(workspace: string, params: FindMessagesParams): Promise { diff --git a/packages/cockroach/src/db/message.ts b/packages/cockroach/src/db/message.ts index e8e6d5fdca1..492e355f9d5 100644 --- a/packages/cockroach/src/db/message.ts +++ b/packages/cockroach/src/db/message.ts @@ -6,7 +6,7 @@ import { SortOrder, type SocialID, type RichText, - Direction, type Reaction, type Attachment, type ThreadID + Direction, type Reaction, type Attachment, type BlobID } from '@hcengineering/communication-types' import {BaseDb} from './base.ts' @@ -15,15 +15,16 @@ import { type MessageDb, type AttachmentDb, type ReactionDb, - type PatchDb + type PatchDb, + type MessagesGroupDb } from './types.ts' export class MessagesDb extends BaseDb { //Message - async createMessage(workspace: string, thread: ThreadID, content: RichText, creator: SocialID, created: Date): Promise { + async createMessage(workspace: string, card: CardID, content: RichText, creator: SocialID, created: Date): Promise { const dbData: MessageDb = { workspace_id: workspace, - thread_id: thread, + card_id: card, content: content, creator: creator, created: created, @@ -49,6 +50,19 @@ export class MessagesDb extends BaseDb { await this.insert(TableName.Patch, dbData) } + + async createMessagesGroup(workspace: string,card: CardID, startAt: Date, endAt: Date, blobId: BlobID, count: number): Promise { + const dbData: MessagesGroupDb = { + workspace_id: workspace, + card_id: card, + start_at: startAt, + end_at: endAt, + blob_id: blobId, + count + } + await this.insert(TableName.MessagesGroup, dbData) + } + //Attachment async createAttachment(message: MessageID, card: CardID, creator: SocialID, created: Date): Promise { const dbData: AttachmentDb = { @@ -119,9 +133,9 @@ export class MessagesDb extends BaseDb { values.push(params.id) } - if (params.thread != null) { - where.push(`m.thread_id = $${index++}`) - values.push(params.thread) + if (params.card != null) { + where.push(`m.card_id = $${index++}`) + values.push(params.card) } if (params.from != null) { @@ -186,7 +200,7 @@ export class MessagesDb extends BaseDb { return { id: row.id, - thread: row.thread_id, + card: row.card_id, content: lastPatch?.content ?? row.content, creator: row.creator, created: new Date(row.created), diff --git a/packages/cockroach/src/db/notification.ts b/packages/cockroach/src/db/notification.ts index d58fa56f5e1..638a77ab467 100644 --- a/packages/cockroach/src/db/notification.ts +++ b/packages/cockroach/src/db/notification.ts @@ -91,7 +91,7 @@ export class NotificationsDb extends BaseDb { const select = ` SELECT n.message_id, n.context, - m.thread_id AS message_thread, + m.card_id AS message_card, m.content AS message_content, m.creator AS message_creator, m.created AS message_created, @@ -209,7 +209,7 @@ export class NotificationsDb extends BaseDb { return { message: { id: row.id, - thread: row.message_thread, + card: row.message_card, content: lastPatch?.content ?? row.message_content, creator: row.message_creator, created, diff --git a/packages/cockroach/src/db/types.ts b/packages/cockroach/src/db/types.ts index 7742067ebf2..d8d1e7d0b1a 100644 --- a/packages/cockroach/src/db/types.ts +++ b/packages/cockroach/src/db/types.ts @@ -1,22 +1,32 @@ -import type {CardID, ContextID, MessageID, RichText, SocialID, ThreadID } from "@hcengineering/communication-types" +import type {ContextID, MessageID, RichText, SocialID, CardID, BlobID } from "@hcengineering/communication-types" export enum TableName { - Message = 'message', - Patch = 'patch', - Attachment = 'attachment', - Reaction = 'reaction', - Notification = 'notification', - NotificationContext = 'notification_context' + Message = 'c_message', + MessagesGroup = 'c_messages_group', + Patch = 'c_patch', + Attachment = 'c_attachment', + Reaction = 'c_reaction', + Notification = 'c_notification', + NotificationContext = 'c_notification_context' } export interface MessageDb { workspace_id: string, - thread_id: ThreadID, + card_id: CardID, content: RichText, creator: SocialID, created: Date, } +export interface MessagesGroupDb { + workspace_id: string, + card_id: CardID, + start_at: Date, + end_at: Date, + blob_id: BlobID, + count: number +} + export interface PatchDb { message_id: MessageID, content: RichText, diff --git a/packages/examples/package.json b/packages/examples/package.json index c7b2c7cbd72..4cf5e1e2bb6 100644 --- a/packages/examples/package.json +++ b/packages/examples/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-examples", - "version": "0.1.0", + "version": "0.1.3", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/examples/src/index.ts b/packages/examples/src/index.ts index 3b1764a9b15..2f6c9fbf2e5 100644 --- a/packages/examples/src/index.ts +++ b/packages/examples/src/index.ts @@ -1,9 +1,9 @@ -import { type Message, type SocialID, SortOrder, type ThreadID, type Window } from '@hcengineering/communication-types' +import { type Message, type SocialID, SortOrder, type CardID, type Window } from '@hcengineering/communication-types' import { getWebsocketClient } from '@hcengineering/communication-client-ws' import { getSqliteClient } from '@hcengineering/communication-client-sqlite' import { createMessagesQuery, initLiveQueries } from '@hcengineering/communication-client-query' -const thread = 'cd0aba36-1c4f-4170-95f2-27a12a5415f6' as ThreadID +const card = 'cd0aba36-1c4f-4170-95f2-27a12a5415f6' as CardID const workspace = 'cd0aba36-1c4f-4170-95f2-27a12a5415f6' const personalWorkspace = 'cd0aba36-1c4f-4170-95f2-27a12a5415f5' const creator1 = 'email:vasya@huly.com' as SocialID @@ -26,7 +26,7 @@ export async function example() { let window: Window | undefined = undefined - query1.query({ thread, sort: SortOrder.Desc }, (res) => { + query1.query({ card, sort: SortOrder.Desc }, (res) => { window = res const r = window.getResult() r.reverse() @@ -44,19 +44,19 @@ export async function example() { }) async function editMessage(message: Message) { - await client.createPatch(thread, message.id, message.content + '_1_', creator1) + await client.createPatch(card, message.id, message.content + '_1_', creator1) } async function deleteMessage(message: Message) { - await client.removeMessage(thread, message.id) + await client.removeMessage(card, message.id) } async function addReaction(message: Message) { - await client.createReaction(thread, message.id, '👍', creator1) + await client.createReaction(card, message.id, '👍', creator1) } async function removeReaction(message: Message) { - await client.removeReaction(thread, message.id, '👍', creator1) + await client.removeReaction(card, message.id, '👍', creator1) } function scrollToBottom() { @@ -116,7 +116,7 @@ export async function example() { const el = event.target?.getElementsByTagName('input')[0] as HTMLInputElement if (el.value == '' || el.value == null) return - await client.createMessage(thread, el.value, creator1) + await client.createMessage(card, el.value, creator1) el.value = '' }) diff --git a/packages/query/package.json b/packages/query/package.json index fafefb95c30..9bbdc5937ee 100644 --- a/packages/query/package.json +++ b/packages/query/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-query", - "version": "0.1.0", + "version": "0.1.3", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/query/src/messages/query.ts b/packages/query/src/messages/query.ts index 75c79b82bc9..1616cb24dfd 100644 --- a/packages/query/src/messages/query.ts +++ b/packages/query/src/messages/query.ts @@ -80,7 +80,7 @@ export class MessagesQuery extends BaseQuery { if (this.params.id != null && this.params.id !== message.id) { return false } - if (this.params.thread != null && this.params.thread !== message.thread) { + if (this.params.card != null && this.params.card !== message.card) { return false } return true diff --git a/packages/sdk-types/package.json b/packages/sdk-types/package.json index f605ca25704..a8fb2b61bc2 100644 --- a/packages/sdk-types/package.json +++ b/packages/sdk-types/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-sdk-types", - "version": "0.1.2", + "version": "0.1.3", "main": "./dist/index.js", "module": "./dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/sdk-types/src/client.ts b/packages/sdk-types/src/client.ts index 05cb861740c..aa9593f0258 100644 --- a/packages/sdk-types/src/client.ts +++ b/packages/sdk-types/src/client.ts @@ -9,23 +9,22 @@ import type { NotificationContextUpdate, RichText, SocialID, - Notification, - ThreadID + Notification } from '@hcengineering/communication-types' import type { FindMessagesParams } from '@hcengineering/communication-types' import type { BroadcastEvent } from './event.ts' export interface Client { - createMessage(thread: ThreadID, content: RichText, creator: SocialID): Promise - removeMessage(thread: ThreadID, id: MessageID): Promise - createPatch(thread: ThreadID, message: MessageID, content: RichText, creator: SocialID): Promise + createMessage(card: CardID, content: RichText, creator: SocialID): Promise + removeMessage(card: CardID, id: MessageID): Promise + createPatch(card: CardID, message: MessageID, content: RichText, creator: SocialID): Promise - createReaction(thread: ThreadID, message: MessageID, reaction: string, creator: SocialID): Promise - removeReaction(thread: ThreadID, message: MessageID, reaction: string, creator: SocialID): Promise + createReaction(card: CardID, message: MessageID, reaction: string, creator: SocialID): Promise + removeReaction(card: CardID, message: MessageID, reaction: string, creator: SocialID): Promise - createAttachment(thread: ThreadID, message: MessageID, card: CardID, creator: SocialID): Promise - removeAttachment(thread: ThreadID, message: MessageID, card: CardID): Promise + createAttachment(card: CardID, message: MessageID, attachment: CardID, creator: SocialID): Promise + removeAttachment(card: CardID, message: MessageID, attachment: CardID): Promise createNotification(message: MessageID, context: ContextID): Promise removeNotification(message: MessageID, context: ContextID): Promise diff --git a/packages/sdk-types/src/db.ts b/packages/sdk-types/src/db.ts index ceba22316be..a453b8566ef 100644 --- a/packages/sdk-types/src/db.ts +++ b/packages/sdk-types/src/db.ts @@ -11,13 +11,13 @@ import type { RichText, SocialID, Notification, - ThreadID + BlobID } from '@hcengineering/communication-types' export interface DbAdapter { createMessage( workspace: string, - thread: ThreadID, + card: CardID, content: RichText, creator: SocialID, created: Date @@ -25,11 +25,20 @@ export interface DbAdapter { removeMessage(id: MessageID): Promise createPatch(message: MessageID, content: RichText, creator: SocialID, created: Date): Promise + createMessagesGroup( + workspace: string, + card: CardID, + startAt: Date, + endAt: Date, + blobId: BlobID, + count: number + ): Promise + createReaction(message: MessageID, reaction: string, creator: SocialID, created: Date): Promise removeReaction(message: MessageID, reaction: string, creator: SocialID): Promise - createAttachment(message: MessageID, card: CardID, creator: SocialID, created: Date): Promise - removeAttachment(message: MessageID, card: CardID): Promise + createAttachment(message: MessageID, attachment: CardID, creator: SocialID, created: Date): Promise + removeAttachment(message: MessageID, attachment: CardID): Promise findMessages(workspace: string, query: FindMessagesParams): Promise diff --git a/packages/sdk-types/src/event.ts b/packages/sdk-types/src/event.ts index 0bd2927290d..2190829c7ce 100644 --- a/packages/sdk-types/src/event.ts +++ b/packages/sdk-types/src/event.ts @@ -10,8 +10,7 @@ import type { Reaction, RichText, SocialID, - Notification, - ThreadID + Notification } from '@hcengineering/communication-types' export enum EventType { @@ -58,20 +57,20 @@ export type Event = export interface CreateMessageEvent { type: EventType.CreateMessage - thread: ThreadID + card: CardID content: RichText creator: SocialID } export interface RemoveMessageEvent { type: EventType.RemoveMessage - thread: ThreadID + card: CardID message: MessageID } export interface CreatePatchEvent { type: EventType.CreatePatch - thread: ThreadID + card: CardID message: MessageID content: RichText creator: SocialID @@ -79,7 +78,7 @@ export interface CreatePatchEvent { export interface CreateReactionEvent { type: EventType.CreateReaction - thread: ThreadID + card: CardID message: MessageID reaction: string creator: SocialID @@ -87,7 +86,7 @@ export interface CreateReactionEvent { export interface RemoveReactionEvent { type: EventType.RemoveReaction - thread: ThreadID + card: CardID message: MessageID reaction: string creator: SocialID @@ -95,17 +94,17 @@ export interface RemoveReactionEvent { export interface CreateAttachmentEvent { type: EventType.CreateAttachment - thread: ThreadID - message: MessageID card: CardID + message: MessageID + attachment: CardID creator: SocialID } export interface RemoveAttachmentEvent { type: EventType.RemoveAttachment - thread: ThreadID - message: MessageID card: CardID + message: MessageID + attachment: CardID } export interface CreateNotificationEvent { @@ -170,25 +169,25 @@ export interface MessageCreatedEvent { export interface MessageRemovedEvent { type: EventType.MessageRemoved - thread: ThreadID + card: CardID message: MessageID } export interface PatchCreatedEvent { type: EventType.PatchCreated - thread: ThreadID + card: CardID patch: Patch } export interface ReactionCreatedEvent { type: EventType.ReactionCreated - thread: ThreadID + card: CardID reaction: Reaction } export interface ReactionRemovedEvent { type: EventType.ReactionRemoved - thread: ThreadID + card: CardID message: MessageID reaction: string creator: SocialID @@ -196,15 +195,15 @@ export interface ReactionRemovedEvent { export interface AttachmentCreatedEvent { type: EventType.AttachmentCreated - thread: ThreadID + card: CardID attachment: Attachment } export interface AttachmentRemovedEvent { type: EventType.AttachmentRemoved - thread: ThreadID - message: MessageID card: CardID + message: MessageID + attachment: CardID } export interface NotificationCreatedEvent { diff --git a/packages/sdk-types/src/serverApi.ts b/packages/sdk-types/src/serverApi.ts index fb4923b6a4b..c26f7106c13 100644 --- a/packages/sdk-types/src/serverApi.ts +++ b/packages/sdk-types/src/serverApi.ts @@ -1,4 +1,5 @@ import type { FindMessagesParams, Message, SocialID } from '@hcengineering/communication-types' + import type { BroadcastEvent, EventResult, Event } from './event' export interface ConnectionInfo { diff --git a/packages/server-core/package.json b/packages/server-core/package.json index f373bb6879b..3c18a6d6cb5 100644 --- a/packages/server-core/package.json +++ b/packages/server-core/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-server-core", - "version": "0.1.2", + "version": "0.1.3", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/server-core/src/eventProcessor.ts b/packages/server-core/src/eventProcessor.ts index cd6ca397187..6efeb0a81fa 100644 --- a/packages/server-core/src/eventProcessor.ts +++ b/packages/server-core/src/eventProcessor.ts @@ -72,10 +72,10 @@ export class EventProcessor { private async createMessage(_personalWorkspace: string, event: CreateMessageEvent): Promise { const created = new Date() - const id = await this.db.createMessage(this.workspace, event.thread, event.content, event.creator, created) + const id = await this.db.createMessage(this.workspace, event.card, event.content, event.creator, created) const message: Message = { id, - thread: event.thread, + card: event.card, content: event.content, creator: event.creator, created: created, @@ -105,7 +105,7 @@ export class EventProcessor { } const broadcastEvent: PatchCreatedEvent = { type: EventType.PatchCreated, - thread: event.thread, + card: event.card, patch } return { @@ -119,7 +119,7 @@ export class EventProcessor { const broadcastEvent: MessageRemovedEvent = { type: EventType.MessageRemoved, - thread: event.thread, + card: event.card, message: event.message } @@ -141,7 +141,7 @@ export class EventProcessor { } const broadcastEvent: ReactionCreatedEvent = { type: EventType.ReactionCreated, - thread: event.thread, + card: event.card, reaction } return { @@ -154,7 +154,7 @@ export class EventProcessor { await this.db.removeReaction(event.message, event.reaction, event.creator) const broadcastEvent: ReactionRemovedEvent = { type: EventType.ReactionRemoved, - thread: event.thread, + card: event.card, message: event.message, reaction: event.reaction, creator: event.creator @@ -177,7 +177,7 @@ export class EventProcessor { } const broadcastEvent: AttachmentCreatedEvent = { type: EventType.AttachmentCreated, - thread: event.thread, + card: event.card, attachment } @@ -191,9 +191,9 @@ export class EventProcessor { await this.db.removeAttachment(event.message, event.card) const broadcastEvent: AttachmentRemovedEvent = { type: EventType.AttachmentRemoved, - thread: event.thread, + card: event.card, message: event.message, - card: event.card + attachment: event.attachment } return { broadcastEvent, diff --git a/packages/server-core/src/manager.ts b/packages/server-core/src/manager.ts index f2066d88e5b..85f3e6b2c60 100644 --- a/packages/server-core/src/manager.ts +++ b/packages/server-core/src/manager.ts @@ -94,66 +94,66 @@ export class Manager { data.contextQueries.delete(queryId) } - closeSession(sessionId: string): void { - this.dataBySessionId.delete(sessionId) - } + // closeSession(sessionId: string): void { + // this.dataBySessionId.delete(sessionId) + // } - async next(event: BroadcastEvent, workspace: string): Promise { - // await this.broadcast(event, workspace) - // const derived = await this.triggers.process(event, workspace) - // const derivedPromises: Promise[] = [] - // for (const d of derived) { - // derivedPromises.push(this.next(d, workspace)) - // } - // await Promise.all(derivedPromises) - } + // async next(event: BroadcastEvent, workspace: string): Promise { + // // await this.broadcast(event, workspace) + // // const derived = await this.triggers.process(event, workspace) + // // const derivedPromises: Promise[] = [] + // // for (const d of derived) { + // // derivedPromises.push(this.next(d, workspace)) + // // } + // // await Promise.all(derivedPromises) + // } - private async broadcast(event: BroadcastEvent, workspace: string): Promise { - // const sessions = this.sessionsByWorkspace.get(workspace) ?? [] - // const response: Response = { result: event } - // for (const session of sessions) { - // const msg = serializeResponse(response, session.session.binary) - // if (this.match(event, session)) { - // session.ws.send(msg) - // } - // } - } + // private async broadcast(event: BroadcastEvent, workspace: string): Promise { + // // const sessions = this.sessionsByWorkspace.get(workspace) ?? [] + // // const response: Response = { result: event } + // // for (const session of sessions) { + // // const msg = serializeResponse(response, session.session.binary) + // // if (this.match(event, session)) { + // // session.ws.send(msg) + // // } + // // } + // } private match(event: BroadcastEvent, info: SessionInfo): boolean { switch (event.type) { case EventType.MessageCreated: return this.matchMessagesQuery( - { id: event.message.id, thread: event.message.thread }, + { id: event.message.id, card: event.message.card }, Array.from(info.messageQueries.values()) ) case EventType.PatchCreated: return this.matchMessagesQuery( - { thread: event.thread, id: event.patch.message }, + { card: event.card, id: event.patch.message }, Array.from(info.messageQueries.values()) ) case EventType.MessageRemoved: return this.matchMessagesQuery( - { thread: event.thread, id: event.message }, + { card: event.card, id: event.message }, Array.from(info.messageQueries.values()) ) case EventType.ReactionCreated: return this.matchMessagesQuery( - { thread: event.thread, id: event.reaction.message }, + { card: event.card, id: event.reaction.message }, Array.from(info.messageQueries.values()) ) case EventType.ReactionRemoved: return this.matchMessagesQuery( - { thread: event.thread, id: event.message }, + { card: event.card, id: event.message }, Array.from(info.messageQueries.values()) ) case EventType.AttachmentCreated: return this.matchMessagesQuery( - { thread: event.thread, id: event.attachment.message }, + { card: event.card, id: event.attachment.message }, Array.from(info.messageQueries.values()) ) case EventType.AttachmentRemoved: return this.matchMessagesQuery( - { thread: event.thread, id: event.message }, + { card: event.card, id: event.message }, Array.from(info.messageQueries.values()) ) case EventType.NotificationCreated: @@ -175,12 +175,12 @@ export class Manager { } } - private matchMessagesQuery(params: { id?: MessageID; thread?: string }, queries: FindMessagesParams[]): boolean { + private matchMessagesQuery(params: { id?: MessageID; card?: string }, queries: FindMessagesParams[]): boolean { if (queries.length === 0) return false for (const query of queries) { if (query.id != null && query.id !== params.id) continue - if (query.thread != null && query.thread !== params.thread) continue + if (query.card != null && query.card !== params.card) continue return true } diff --git a/packages/server-core/src/triggers.ts b/packages/server-core/src/triggers.ts index 589205b18fd..cf512903d44 100644 --- a/packages/server-core/src/triggers.ts +++ b/packages/server-core/src/triggers.ts @@ -21,7 +21,7 @@ export class Triggers { } private async createNotifications(event: MessageCreatedEvent, workspace: string): Promise { - const card = event.message.thread as any as CardID + const card = event.message.card as any as CardID const subscribedPersonalWorkspaces = [ 'cd0aba36-1c4f-4170-95f2-27a12a5415f7', 'cd0aba36-1c4f-4170-95f2-27a12a5415f8' diff --git a/packages/server-ws/package.json b/packages/server-ws/package.json index 234db94dc48..d6d0fa5259a 100644 --- a/packages/server-ws/package.json +++ b/packages/server-ws/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-server-ws", - "version": "0.1.0", + "version": "0.1.3", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/server-ws/src/eventProcessor.ts b/packages/server-ws/src/eventProcessor.ts index 1dde23ae600..512fb52ef54 100644 --- a/packages/server-ws/src/eventProcessor.ts +++ b/packages/server-ws/src/eventProcessor.ts @@ -73,10 +73,10 @@ export class EventProcessor { private async createMessage(event: CreateMessageEvent): Promise { const created = new Date() - const id = await this.db.createMessage(this.workspace, event.thread, event.content, event.creator, created) + const id = await this.db.createMessage(this.workspace, event.card, event.content, event.creator, created) const message: Message = { id, - thread: event.thread, + card: event.card, content: event.content, creator: event.creator, created: created, @@ -106,7 +106,7 @@ export class EventProcessor { } const broadcastEvent: PatchCreatedEvent = { type: EventType.PatchCreated, - thread: event.thread, + card: event.card, patch } return { @@ -120,7 +120,7 @@ export class EventProcessor { const broadcastEvent: MessageRemovedEvent = { type: EventType.MessageRemoved, - thread: event.thread, + card: event.card, message: event.message } @@ -142,7 +142,7 @@ export class EventProcessor { } const broadcastEvent: ReactionCreatedEvent = { type: EventType.ReactionCreated, - thread: event.thread, + card: event.card, reaction } return { @@ -155,7 +155,7 @@ export class EventProcessor { await this.db.removeReaction(event.message, event.reaction, event.creator) const broadcastEvent: ReactionRemovedEvent = { type: EventType.ReactionRemoved, - thread: event.thread, + card: event.card, message: event.message, reaction: event.reaction, creator: event.creator @@ -178,7 +178,7 @@ export class EventProcessor { } const broadcastEvent: AttachmentCreatedEvent = { type: EventType.AttachmentCreated, - thread: event.thread, + card: event.card, attachment } @@ -192,9 +192,9 @@ export class EventProcessor { await this.db.removeAttachment(event.message, event.card) const broadcastEvent: AttachmentRemovedEvent = { type: EventType.AttachmentRemoved, - thread: event.thread, + card: event.card, message: event.message, - card: event.card + attachment: event.attachment } return { broadcastEvent, diff --git a/packages/server-ws/src/manager.ts b/packages/server-ws/src/manager.ts index b9a85039203..2fbb7eea5c0 100644 --- a/packages/server-ws/src/manager.ts +++ b/packages/server-ws/src/manager.ts @@ -117,37 +117,37 @@ export class Manager { switch (event.type) { case EventType.MessageCreated: return this.matchMessagesQuery( - { id: event.message.id, thread: event.message.thread }, + { id: event.message.id, card: event.message.card }, Array.from(info.messageQueries.values()) ) case EventType.PatchCreated: return this.matchMessagesQuery( - { thread: event.thread, id: event.patch.message }, + { card: event.card, id: event.patch.message }, Array.from(info.messageQueries.values()) ) case EventType.MessageRemoved: return this.matchMessagesQuery( - { thread: event.thread, id: event.message }, + { card: event.card, id: event.message }, Array.from(info.messageQueries.values()) ) case EventType.ReactionCreated: return this.matchMessagesQuery( - { thread: event.thread, id: event.reaction.message }, + { card: event.card, id: event.reaction.message }, Array.from(info.messageQueries.values()) ) case EventType.ReactionRemoved: return this.matchMessagesQuery( - { thread: event.thread, id: event.message }, + { card: event.card, id: event.message }, Array.from(info.messageQueries.values()) ) case EventType.AttachmentCreated: return this.matchMessagesQuery( - { thread: event.thread, id: event.attachment.message }, + { card: event.card, id: event.attachment.message }, Array.from(info.messageQueries.values()) ) case EventType.AttachmentRemoved: return this.matchMessagesQuery( - { thread: event.thread, id: event.message }, + { card: event.card, id: event.message }, Array.from(info.messageQueries.values()) ) case EventType.NotificationCreated: @@ -169,12 +169,12 @@ export class Manager { } } - private matchMessagesQuery(params: { id?: MessageID; thread?: string }, queries: FindMessagesParams[]): boolean { + private matchMessagesQuery(params: { id?: MessageID; card?: string }, queries: FindMessagesParams[]): boolean { if (queries.length === 0) return false for (const query of queries) { if (query.id != null && query.id !== params.id) continue - if (query.thread != null && query.thread !== params.thread) continue + if (query.card != null && query.card !== params.card) continue return true } diff --git a/packages/server-ws/src/server/server.ts b/packages/server-ws/src/server/server.ts index 0941596c973..f26916359e8 100644 --- a/packages/server-ws/src/server/server.ts +++ b/packages/server-ws/src/server/server.ts @@ -2,8 +2,6 @@ import cors from 'cors' import express, { type Express } from 'express' import { Server } from 'http' -import { ApiError } from './error' - export function createServer (): Express { const app = express() diff --git a/packages/server-ws/src/triggers.ts b/packages/server-ws/src/triggers.ts index 589205b18fd..cf512903d44 100644 --- a/packages/server-ws/src/triggers.ts +++ b/packages/server-ws/src/triggers.ts @@ -21,7 +21,7 @@ export class Triggers { } private async createNotifications(event: MessageCreatedEvent, workspace: string): Promise { - const card = event.message.thread as any as CardID + const card = event.message.card as any as CardID const subscribedPersonalWorkspaces = [ 'cd0aba36-1c4f-4170-95f2-27a12a5415f7', 'cd0aba36-1c4f-4170-95f2-27a12a5415f8' diff --git a/packages/sqlite-wasm/package.json b/packages/sqlite-wasm/package.json index c8c2242a33b..944828f56de 100644 --- a/packages/sqlite-wasm/package.json +++ b/packages/sqlite-wasm/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-sqlite-wasm", - "version": "0.1.0", + "version": "0.1.3", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/sqlite-wasm/src/adapter.ts b/packages/sqlite-wasm/src/adapter.ts index 2ee4a3c98f1..a3430bdb6d9 100644 --- a/packages/sqlite-wasm/src/adapter.ts +++ b/packages/sqlite-wasm/src/adapter.ts @@ -11,7 +11,7 @@ import { type FindNotificationContextParams, type NotificationContext, type Notification, - type ThreadID + type BlobID } from '@hcengineering/communication-types' import type { DbAdapter } from '@hcengineering/communication-sdk-types' @@ -34,12 +34,12 @@ export class SqliteAdapter implements DbAdapter { async createMessage( workspace: string, - thread: ThreadID, + card: CardID, content: RichText, creator: SocialID, created: Date ): Promise { - return await this.message.createMessage(workspace, thread, content, creator, created) + return await this.message.createMessage(workspace, card, content, creator, created) } async createPatch(message: MessageID, content: RichText, creator: SocialID, created: Date): Promise { @@ -50,6 +50,18 @@ export class SqliteAdapter implements DbAdapter { return await this.message.removeMessage(message) } + /* eslint-disable @typescript-eslint/no-unused-vars */ + async createMessagesGroup( + workspace: string, + card: CardID, + startAt: Date, + endAt: Date, + blobId: BlobID, + count: number + ): Promise { + //TODO: implement + } + async createReaction(message: MessageID, reaction: string, creator: SocialID, created: Date): Promise { return await this.message.createReaction(message, reaction, creator, created) } @@ -58,12 +70,12 @@ export class SqliteAdapter implements DbAdapter { return await this.message.removeReaction(message, reaction, creator) } - async createAttachment(message: MessageID, card: CardID, creator: SocialID, created: Date): Promise { - return await this.message.createAttachment(message, card, creator, created) + async createAttachment(message: MessageID, attachment: CardID, creator: SocialID, created: Date): Promise { + return await this.message.createAttachment(message, attachment, creator, created) } - async removeAttachment(message: MessageID, card: CardID): Promise { - return await this.message.removeAttachment(message, card) + async removeAttachment(message: MessageID, attachment: CardID): Promise { + return await this.message.removeAttachment(message, attachment) } async findMessages(workspace: string, params: FindMessagesParams): Promise { diff --git a/packages/sqlite-wasm/src/db/message.ts b/packages/sqlite-wasm/src/db/message.ts index 95bceae0a26..cf39b447e07 100644 --- a/packages/sqlite-wasm/src/db/message.ts +++ b/packages/sqlite-wasm/src/db/message.ts @@ -6,7 +6,7 @@ import { SortOrder, type SocialID, type RichText, - Direction, type Reaction, type Attachment, type ThreadID + Direction, type Reaction, type Attachment } from '@hcengineering/communication-types' import {BaseDb} from './base.ts' @@ -20,11 +20,11 @@ import { export class MessagesDb extends BaseDb { //Message - async createMessage(workspace: string, thread: ThreadID, content: RichText, creator: SocialID, created: Date): Promise { + async createMessage(workspace: string, card: CardID, content: RichText, creator: SocialID, created: Date): Promise { const dbData: MessageDb = { id: self.crypto.randomUUID(), workspace_id: workspace, - thread_id: thread, + card_id: card, content: content, creator: creator, created: created, @@ -89,7 +89,7 @@ export class MessagesDb extends BaseDb { //Find messages async find(workspace: string, params: FindMessagesParams): Promise { const select = `SELECT m.id, - m.thread_id, + m.card_id, m.content, m.creator, m.created, @@ -135,8 +135,8 @@ export class MessagesDb extends BaseDb { buildMessageWhere(workspace: string, params: FindMessagesParams): string { const where: string[] = [`m.workspace_id = '${workspace}'`] - if (params.thread != null) { - where.push(`m.thread_id = '${params.thread}'`) + if (params.card != null) { + where.push(`m.card_id = '${params.card}'`) } if (params.id != null) { where.push(`m.id = '${params.id}'`) @@ -168,7 +168,7 @@ export class MessagesDb extends BaseDb { return { id: row.id, - thread: row.thread_id, + card: row.card_id, content: lastPatch?.content ?? row.content, creator: row.creator, created: new Date(row.created), diff --git a/packages/sqlite-wasm/src/db/notification.ts b/packages/sqlite-wasm/src/db/notification.ts index 1b0784eb5b9..d1daf8ce17c 100644 --- a/packages/sqlite-wasm/src/db/notification.ts +++ b/packages/sqlite-wasm/src/db/notification.ts @@ -104,7 +104,7 @@ export class NotificationsDb extends BaseDb { const select = ` SELECT n.message_id, n.context_id, - m.thread_id AS message_thread, + m.card_id AS message_card, m.content AS message_content, m.creator AS message_creator, m.created AS message_created, @@ -206,7 +206,7 @@ export class NotificationsDb extends BaseDb { return { message: { id: row.message_id, - thread: row.message_thread, + card: row.message_card, content: lastPatch?.content ?? row.message_content, creator: row.message_creator, created, diff --git a/packages/sqlite-wasm/src/db/types.ts b/packages/sqlite-wasm/src/db/types.ts index 5aff499ee8e..ee6c3d5adb8 100644 --- a/packages/sqlite-wasm/src/db/types.ts +++ b/packages/sqlite-wasm/src/db/types.ts @@ -12,7 +12,7 @@ export enum TableName { export interface MessageDb { id: string workspace_id: string, - thread_id: string, + card_id: string, content: RichText, creator: SocialID, created: Date, diff --git a/packages/sqlite-wasm/src/migrations.ts b/packages/sqlite-wasm/src/migrations.ts index e729111a0a5..a1aee473477 100644 --- a/packages/sqlite-wasm/src/migrations.ts +++ b/packages/sqlite-wasm/src/migrations.ts @@ -12,7 +12,7 @@ async function migrationV1(worker: Sqlite3Worker1Promiser, dbId: string): Promis ( id TEXT NOT NULL, workspace_id TEXT NOT NULL, - thread_id TEXT NOT NULL, + card_id TEXT NOT NULL, content TEXT NOT NULL, creator TEXT NOT NULL, created DATETIME NOT NULL, diff --git a/packages/types/package.json b/packages/types/package.json index 64513aeaf5b..bf9c4ed3a5b 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-types", - "version": "0.1.2", + "version": "0.1.3", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", @@ -15,6 +15,9 @@ "peerDependencies": { "typescript": "^5.6.3" }, + "dependencies": { + "@hcengineering/core": "0.6.411" + }, "repository": { "type": "git", "url": "git+https://github.com/hcengineering/communication.git" diff --git a/packages/types/src/message.ts b/packages/types/src/message.ts index df3c18bd420..32cfee8179b 100644 --- a/packages/types/src/message.ts +++ b/packages/types/src/message.ts @@ -1,6 +1,8 @@ -export type CardID = string & { card: true } +import type { Card, Ref, Blob } from '@hcengineering/core' + +export type BlobID = Ref +export type CardID = Ref export type SocialID = string & { social: true } -export type ThreadID = string & { thread: true } export type RichText = string export type ID = string @@ -13,13 +15,21 @@ interface Object { export interface Message extends Object { id: MessageID - thread: ThreadID + card: CardID content: RichText edited: Date reactions: Reaction[] attachments: Attachment[] } +export interface MessagesGroup { + card: CardID + startAt: Date + endAt: Date + blobId: Ref + count: number +} + export interface Patch extends Object { message: MessageID content: RichText diff --git a/packages/types/src/query.ts b/packages/types/src/query.ts index 7109e761a14..302893542a1 100644 --- a/packages/types/src/query.ts +++ b/packages/types/src/query.ts @@ -1,4 +1,4 @@ -import type { CardID, MessageID, ThreadID } from './message' +import type { CardID, MessageID } from './message' import type { ContextID } from './notification' export enum SortOrder { @@ -31,7 +31,7 @@ interface FindParams { export interface FindMessagesParams extends FindParams { id?: MessageID - thread?: ThreadID + card?: CardID } export interface FindNotificationsParams extends FindParams { From 458c9b14b5861de24bfdfe1123aa21325ba8cc02 Mon Sep 17 00:00:00 2001 From: Kristina Date: Wed, 5 Feb 2025 16:32:19 +0400 Subject: [PATCH 033/636] Update version (#22) Signed-off-by: Kristina Fefelova --- bun.lock | 26 +++++++++++++------------- packages/client-query/package.json | 2 +- packages/client-sqlite/package.json | 2 +- packages/client-ws/package.json | 2 +- packages/cockroach/package.json | 2 +- packages/examples/package.json | 2 +- packages/query/package.json | 2 +- packages/sdk-types/package.json | 2 +- packages/server-core/package.json | 2 +- packages/server-ws/package.json | 2 +- packages/sqlite-wasm/package.json | 2 +- packages/types/package.json | 2 +- 12 files changed, 24 insertions(+), 24 deletions(-) diff --git a/bun.lock b/bun.lock index 6b3b71842a3..b7b75d82bbb 100644 --- a/bun.lock +++ b/bun.lock @@ -16,7 +16,7 @@ }, "packages/client-query": { "name": "@hcengineering/communication-client-query", - "version": "0.1.0", + "version": "0.1.4", "dependencies": { "@hcengineering/communication-query": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", @@ -32,7 +32,7 @@ }, "packages/client-sqlite": { "name": "@hcengineering/communication-client-sqlite", - "version": "0.1.0", + "version": "0.1.4", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-sqlite-wasm": "workspace:*", @@ -48,7 +48,7 @@ }, "packages/client-ws": { "name": "@hcengineering/communication-client-ws", - "version": "0.1.0", + "version": "0.1.4", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -63,7 +63,7 @@ }, "packages/cockroach": { "name": "@hcengineering/communication-cockroach", - "version": "0.1.0", + "version": "0.1.4", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -80,7 +80,7 @@ }, "packages/examples": { "name": "@hcengineering/communication-examples", - "version": "0.1.0", + "version": "0.1.4", "dependencies": { "@hcengineering/communication-client-query": "workspace:*", "@hcengineering/communication-client-sqlite": "workspace:*", @@ -96,7 +96,7 @@ }, "packages/query": { "name": "@hcengineering/communication-query", - "version": "0.1.0", + "version": "0.1.4", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -112,7 +112,7 @@ }, "packages/sdk-types": { "name": "@hcengineering/communication-sdk-types", - "version": "0.1.2", + "version": "0.1.4", "dependencies": { "@hcengineering/communication-types": "^0.1.0", }, @@ -125,7 +125,7 @@ }, "packages/server-core": { "name": "@hcengineering/communication-server-core", - "version": "0.1.2", + "version": "0.1.4", "dependencies": { "@hcengineering/communication-cockroach": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", @@ -144,7 +144,7 @@ }, "packages/server-ws": { "name": "@hcengineering/communication-server-ws", - "version": "0.1.0", + "version": "0.1.4", "dependencies": { "@hcengineering/communication-cockroach": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", @@ -168,7 +168,7 @@ }, "packages/sqlite-wasm": { "name": "@hcengineering/communication-sqlite-wasm", - "version": "0.1.0", + "version": "0.1.4", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -185,7 +185,7 @@ }, "packages/types": { "name": "@hcengineering/communication-types", - "version": "0.1.2", + "version": "0.1.4", "dependencies": { "@hcengineering/core": "0.6.411", }, @@ -262,7 +262,7 @@ "@humanwhocodes/retry": ["@humanwhocodes/retry@0.4.1", "", {}, "sha512-c7hNEllBlenFTHBky65mhq8WD2kbN9Q6gk0bTk8lSBvc554jpXSkST1iePudpt7+A/AQvuHs9EMqjHDXMY1lrA=="], - "@msgpack/msgpack": ["@msgpack/msgpack@3.0.0-beta3", "", {}, "sha512-LZYWBmrkKO0quyjnJCeSaqHOcsuZUvE+hlIYRqFc0qI27dLnsOdnv8Fsj2cyitzQTJZmCPm53vZ/P8QTH7E84A=="], + "@msgpack/msgpack": ["@msgpack/msgpack@3.0.0-beta4", "", {}, "sha512-pIHfHZefX0jX8enNRdkwc08TXoRdBm/gLrc09M9374bkvlRJMxs2jbek0MKb4o7msDQEhIpGv3lfJWjBh/CuQQ=="], "@msgpackr-extract/msgpackr-extract-darwin-arm64": ["@msgpackr-extract/msgpackr-extract-darwin-arm64@3.0.3", "", { "os": "darwin", "cpu": "arm64" }, "sha512-QZHtlVgbAdy2zAqNA9Gu1UpIuI8Xvsd1v8ic6B2pZmeFnFcMWiPLfWXh7TVw4eGEZ/C9TH281KwhVoeQUKbyjw=="], @@ -566,7 +566,7 @@ "object-assign": ["object-assign@4.1.1", "", {}, "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg=="], - "object-inspect": ["object-inspect@1.13.3", "", {}, "sha512-kDCGIbxkDSXE3euJZZXzc6to7fCrKHNI/hSRQnRuQ+BWjFNzZwiFF8fj/6o2t2G9/jTj8PSIYTfCLelLZEeRpA=="], + "object-inspect": ["object-inspect@1.13.4", "", {}, "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew=="], "on-finished": ["on-finished@2.4.1", "", { "dependencies": { "ee-first": "1.1.1" } }, "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg=="], diff --git a/packages/client-query/package.json b/packages/client-query/package.json index 8d0cd5c1036..5b1834f7bc9 100644 --- a/packages/client-query/package.json +++ b/packages/client-query/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-client-query", - "version": "0.1.3", + "version": "0.1.4", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/client-sqlite/package.json b/packages/client-sqlite/package.json index d3237c0ed6e..a1fe8a26f6b 100644 --- a/packages/client-sqlite/package.json +++ b/packages/client-sqlite/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-client-sqlite", - "version": "0.1.3", + "version": "0.1.4", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/client-ws/package.json b/packages/client-ws/package.json index b33ff3a07a8..c5d682eae91 100644 --- a/packages/client-ws/package.json +++ b/packages/client-ws/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-client-ws", - "version": "0.1.3", + "version": "0.1.4", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/cockroach/package.json b/packages/cockroach/package.json index 0057ad6b2bc..33c53a3028f 100644 --- a/packages/cockroach/package.json +++ b/packages/cockroach/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-cockroach", - "version": "0.1.3", + "version": "0.1.4", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/examples/package.json b/packages/examples/package.json index 4cf5e1e2bb6..351076565ec 100644 --- a/packages/examples/package.json +++ b/packages/examples/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-examples", - "version": "0.1.3", + "version": "0.1.4", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/query/package.json b/packages/query/package.json index 9bbdc5937ee..c331e13ae54 100644 --- a/packages/query/package.json +++ b/packages/query/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-query", - "version": "0.1.3", + "version": "0.1.4", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/sdk-types/package.json b/packages/sdk-types/package.json index a8fb2b61bc2..0e2d2655023 100644 --- a/packages/sdk-types/package.json +++ b/packages/sdk-types/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-sdk-types", - "version": "0.1.3", + "version": "0.1.4", "main": "./dist/index.js", "module": "./dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/server-core/package.json b/packages/server-core/package.json index 3c18a6d6cb5..f731ff2fbf2 100644 --- a/packages/server-core/package.json +++ b/packages/server-core/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-server-core", - "version": "0.1.3", + "version": "0.1.4", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/server-ws/package.json b/packages/server-ws/package.json index d6d0fa5259a..0f313d72ecc 100644 --- a/packages/server-ws/package.json +++ b/packages/server-ws/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-server-ws", - "version": "0.1.3", + "version": "0.1.4", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/sqlite-wasm/package.json b/packages/sqlite-wasm/package.json index 944828f56de..d14a8a11f6b 100644 --- a/packages/sqlite-wasm/package.json +++ b/packages/sqlite-wasm/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-sqlite-wasm", - "version": "0.1.3", + "version": "0.1.4", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/types/package.json b/packages/types/package.json index bf9c4ed3a5b..39778187f5b 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-types", - "version": "0.1.3", + "version": "0.1.4", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", From 0875a9d06fd447c5676cb94ba8c6c86a63d34354 Mon Sep 17 00:00:00 2001 From: Kristina Date: Wed, 5 Feb 2025 20:58:30 +0400 Subject: [PATCH 034/636] V0.1.4 (#23) Signed-off-by: Kristina Fefelova --- packages/query/package.json | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/query/package.json b/packages/query/package.json index c331e13ae54..8ba544a91db 100644 --- a/packages/query/package.json +++ b/packages/query/package.json @@ -20,5 +20,12 @@ }, "peerDependencies": { "typescript": "^5.6.3" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/hcengineering/communication.git" + }, + "publishConfig": { + "registry": "https://npm.pkg.github.com" } } From ed3f99a3e5d773c71b68ad2137777f80a3f89e7a Mon Sep 17 00:00:00 2001 From: Kristina Date: Wed, 5 Feb 2025 22:06:51 +0400 Subject: [PATCH 035/636] Update version and server-core (#24) Signed-off-by: Kristina Fefelova --- bun.lock | 22 ++++---- packages/client-query/package.json | 2 +- packages/client-sqlite/package.json | 2 +- packages/client-ws/package.json | 2 +- packages/cockroach/package.json | 2 +- packages/examples/package.json | 2 +- packages/query/package.json | 2 +- packages/sdk-types/package.json | 2 +- packages/sdk-types/src/serverApi.ts | 12 ++--- packages/server-core/package.json | 2 +- packages/server-core/src/index.ts | 25 +++++++--- packages/server-core/src/manager.ts | 75 +++++++++++++--------------- packages/server-core/src/triggers.ts | 17 ++++--- packages/server-ws/package.json | 2 +- packages/sqlite-wasm/package.json | 2 +- packages/types/package.json | 2 +- 16 files changed, 89 insertions(+), 84 deletions(-) diff --git a/bun.lock b/bun.lock index b7b75d82bbb..7b233e4c85a 100644 --- a/bun.lock +++ b/bun.lock @@ -16,7 +16,7 @@ }, "packages/client-query": { "name": "@hcengineering/communication-client-query", - "version": "0.1.4", + "version": "0.1.5", "dependencies": { "@hcengineering/communication-query": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", @@ -32,7 +32,7 @@ }, "packages/client-sqlite": { "name": "@hcengineering/communication-client-sqlite", - "version": "0.1.4", + "version": "0.1.5", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-sqlite-wasm": "workspace:*", @@ -48,7 +48,7 @@ }, "packages/client-ws": { "name": "@hcengineering/communication-client-ws", - "version": "0.1.4", + "version": "0.1.5", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -63,7 +63,7 @@ }, "packages/cockroach": { "name": "@hcengineering/communication-cockroach", - "version": "0.1.4", + "version": "0.1.5", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -80,7 +80,7 @@ }, "packages/examples": { "name": "@hcengineering/communication-examples", - "version": "0.1.4", + "version": "0.1.5", "dependencies": { "@hcengineering/communication-client-query": "workspace:*", "@hcengineering/communication-client-sqlite": "workspace:*", @@ -96,7 +96,7 @@ }, "packages/query": { "name": "@hcengineering/communication-query", - "version": "0.1.4", + "version": "0.1.5", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -112,7 +112,7 @@ }, "packages/sdk-types": { "name": "@hcengineering/communication-sdk-types", - "version": "0.1.4", + "version": "0.1.5", "dependencies": { "@hcengineering/communication-types": "^0.1.0", }, @@ -125,7 +125,7 @@ }, "packages/server-core": { "name": "@hcengineering/communication-server-core", - "version": "0.1.4", + "version": "0.1.5", "dependencies": { "@hcengineering/communication-cockroach": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", @@ -144,7 +144,7 @@ }, "packages/server-ws": { "name": "@hcengineering/communication-server-ws", - "version": "0.1.4", + "version": "0.1.5", "dependencies": { "@hcengineering/communication-cockroach": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", @@ -168,7 +168,7 @@ }, "packages/sqlite-wasm": { "name": "@hcengineering/communication-sqlite-wasm", - "version": "0.1.4", + "version": "0.1.5", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -185,7 +185,7 @@ }, "packages/types": { "name": "@hcengineering/communication-types", - "version": "0.1.4", + "version": "0.1.5", "dependencies": { "@hcengineering/core": "0.6.411", }, diff --git a/packages/client-query/package.json b/packages/client-query/package.json index 5b1834f7bc9..10d80ae805d 100644 --- a/packages/client-query/package.json +++ b/packages/client-query/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-client-query", - "version": "0.1.4", + "version": "0.1.5", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/client-sqlite/package.json b/packages/client-sqlite/package.json index a1fe8a26f6b..a5b3b7c72cf 100644 --- a/packages/client-sqlite/package.json +++ b/packages/client-sqlite/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-client-sqlite", - "version": "0.1.4", + "version": "0.1.5", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/client-ws/package.json b/packages/client-ws/package.json index c5d682eae91..3140dc508be 100644 --- a/packages/client-ws/package.json +++ b/packages/client-ws/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-client-ws", - "version": "0.1.4", + "version": "0.1.5", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/cockroach/package.json b/packages/cockroach/package.json index 33c53a3028f..f59ea50c070 100644 --- a/packages/cockroach/package.json +++ b/packages/cockroach/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-cockroach", - "version": "0.1.4", + "version": "0.1.5", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/examples/package.json b/packages/examples/package.json index 351076565ec..42af3af812e 100644 --- a/packages/examples/package.json +++ b/packages/examples/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-examples", - "version": "0.1.4", + "version": "0.1.5", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/query/package.json b/packages/query/package.json index 8ba544a91db..bedab9911b9 100644 --- a/packages/query/package.json +++ b/packages/query/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-query", - "version": "0.1.4", + "version": "0.1.5", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/sdk-types/package.json b/packages/sdk-types/package.json index 0e2d2655023..b2d4bef0f48 100644 --- a/packages/sdk-types/package.json +++ b/packages/sdk-types/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-sdk-types", - "version": "0.1.4", + "version": "0.1.5", "main": "./dist/index.js", "module": "./dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/sdk-types/src/serverApi.ts b/packages/sdk-types/src/serverApi.ts index c26f7106c13..77bc2ea4855 100644 --- a/packages/sdk-types/src/serverApi.ts +++ b/packages/sdk-types/src/serverApi.ts @@ -1,6 +1,6 @@ import type { FindMessagesParams, Message, SocialID } from '@hcengineering/communication-types' -import type { BroadcastEvent, EventResult, Event } from './event' +import type { EventResult, Event } from './event' export interface ConnectionInfo { sessionId: string @@ -8,17 +8,13 @@ export interface ConnectionInfo { socialId: SocialID } -export type Result = { - broadcastEvent?: BroadcastEvent - result: EventResult -} - export interface ServerApi { findMessages(info: ConnectionInfo, params: FindMessagesParams, queryId?: number): Promise - unsubscribeQuery(info: ConnectionInfo, id: number): Promise + event(info: ConnectionInfo, event: Event): Promise - event(info: ConnectionInfo, event: Event): Promise + closeSession(sessionId: string): Promise + unsubscribeQuery(info: ConnectionInfo, id: number): Promise close(): Promise } diff --git a/packages/server-core/package.json b/packages/server-core/package.json index f731ff2fbf2..c0ef6fb0ad6 100644 --- a/packages/server-core/package.json +++ b/packages/server-core/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-server-core", - "version": "0.1.4", + "version": "0.1.5", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/server-core/src/index.ts b/packages/server-core/src/index.ts index 9c919919e6b..9df66f2b2fc 100644 --- a/packages/server-core/src/index.ts +++ b/packages/server-core/src/index.ts @@ -1,10 +1,9 @@ import type { MeasureContext } from '@hcengineering/core' import type { FindMessagesParams, Message } from '@hcengineering/communication-types' import { createDbAdapter } from '@hcengineering/communication-cockroach' -import type { ConnectionInfo, DbAdapter, Event, ServerApi } from '@hcengineering/communication-sdk-types' +import type { ConnectionInfo, DbAdapter, Event, EventResult, ServerApi } from '@hcengineering/communication-sdk-types' -import { type Result } from './eventProcessor.ts' -import { Manager } from './manager.ts' +import { Manager, type BroadcastSessionsFunc } from './manager.ts' export class Api implements ServerApi { private readonly manager: Manager @@ -12,14 +11,20 @@ export class Api implements ServerApi { private constructor( private readonly ctx: MeasureContext, private readonly workspace: string, - db: DbAdapter + db: DbAdapter, + broadcast: BroadcastSessionsFunc ) { - this.manager = new Manager(this.ctx, db, this.workspace) + this.manager = new Manager(this.ctx, db, this.workspace, broadcast) } - static async create(ctx: MeasureContext, workspace: string, dbUrl: string): Promise { + static async create( + ctx: MeasureContext, + workspace: string, + dbUrl: string, + broadcast: BroadcastSessionsFunc + ): Promise { const db = await createDbAdapter(dbUrl) - return new Api(ctx, workspace, db) + return new Api(ctx, workspace, db, broadcast) } async findMessages(info: ConnectionInfo, params: FindMessagesParams, queryId?: number): Promise { @@ -30,10 +35,14 @@ export class Api implements ServerApi { this.manager.unsubscribeQuery(info, id) } - async event(info: ConnectionInfo, event: Event): Promise { + async event(info: ConnectionInfo, event: Event): Promise { return await this.manager.event(info, event) } + async closeSession(sessionId: string): Promise { + this.manager.closeSession(sessionId) + } + async close(): Promise { this.manager.close() } diff --git a/packages/server-core/src/manager.ts b/packages/server-core/src/manager.ts index 85f3e6b2c60..e66af2e92b1 100644 --- a/packages/server-core/src/manager.ts +++ b/packages/server-core/src/manager.ts @@ -5,7 +5,8 @@ import { EventType, type NotificationContextCreatedEvent, type NotificationCreatedEvent, - type Event + type Event, + type EventResult } from '@hcengineering/communication-sdk-types' import type { FindMessagesParams, @@ -16,12 +17,14 @@ import type { } from '@hcengineering/communication-types' import { Triggers } from './triggers.ts' -import { EventProcessor, type Result } from './eventProcessor.ts' +import { EventProcessor } from './eventProcessor.ts' import type { MeasureContext } from '@hcengineering/core' type QueryId = number | string type QueryType = 'message' | 'notification' | 'context' +export type BroadcastSessionsFunc = (ctx: MeasureContext, sessionIds: string[], result: any) => void + type SessionInfo = { personalWorkspace: string messageQueries: Map @@ -37,10 +40,11 @@ export class Manager { constructor( private readonly ctx: MeasureContext, private readonly db: DbAdapter, - private readonly workspace: string + private readonly workspace: string, + private readonly broadcast: BroadcastSessionsFunc ) { this.eventProcessor = new EventProcessor(db, this.workspace) - this.triggers = new Triggers(db) + this.triggers = new Triggers(db, this.workspace) } async findMessages(info: ConnectionInfo, params: FindMessagesParams, queryId?: number): Promise { @@ -51,18 +55,13 @@ export class Manager { return result } - async event(info: ConnectionInfo, event: Event): Promise { - return await this.eventProcessor.process(info.personalWorkspace, event) - // const { result, broadcastEvent } = await this.eventProcessor.process(personalWorkspace, event) - // if (broadcastEvent !== undefined) { - // void this.manager.next(broadcastEvent) - // } - // return result + async event(info: ConnectionInfo, event: Event): Promise { + const { result, broadcastEvent } = await this.eventProcessor.process(info.personalWorkspace, event) + if (broadcastEvent !== undefined) { + void this.next(broadcastEvent) + } + return result } - // - // async broadcastEvent (ctx: MeasureContext, personalWorkspace: string, event: BroadcastEvent): Promise { - // void this.manager.next(event, personalWorkspace) - // } subscribeQuery(info: ConnectionInfo, type: QueryType, queryId: number, params: Record): void { const { sessionId, personalWorkspace } = info @@ -94,30 +93,28 @@ export class Manager { data.contextQueries.delete(queryId) } - // closeSession(sessionId: string): void { - // this.dataBySessionId.delete(sessionId) - // } - - // async next(event: BroadcastEvent, workspace: string): Promise { - // // await this.broadcast(event, workspace) - // // const derived = await this.triggers.process(event, workspace) - // // const derivedPromises: Promise[] = [] - // // for (const d of derived) { - // // derivedPromises.push(this.next(d, workspace)) - // // } - // // await Promise.all(derivedPromises) - // } - - // private async broadcast(event: BroadcastEvent, workspace: string): Promise { - // // const sessions = this.sessionsByWorkspace.get(workspace) ?? [] - // // const response: Response = { result: event } - // // for (const session of sessions) { - // // const msg = serializeResponse(response, session.session.binary) - // // if (this.match(event, session)) { - // // session.ws.send(msg) - // // } - // // } - // } + closeSession(sessionId: string): void { + this.dataBySessionId.delete(sessionId) + } + + async next(event: BroadcastEvent): Promise { + await this.broadcastEvent(event) + const derived = await this.triggers.process(event) + await Promise.all(derived.map((it) => this.next(it))) + } + + private async broadcastEvent(event: BroadcastEvent): Promise { + const sessionIds: string[] = [] + for (const [sessionId, session] of this.dataBySessionId.entries()) { + if (this.match(event, session)) { + sessionIds.push(sessionId) + } + } + + if (sessionIds.length > 0) { + this.broadcast(this.ctx, sessionIds, event) + } + } private match(event: BroadcastEvent, info: SessionInfo): boolean { switch (event.type) { diff --git a/packages/server-core/src/triggers.ts b/packages/server-core/src/triggers.ts index cf512903d44..abc13967186 100644 --- a/packages/server-core/src/triggers.ts +++ b/packages/server-core/src/triggers.ts @@ -9,18 +9,21 @@ import { import type { NotificationContext, ContextID, CardID } from '@hcengineering/communication-types' export class Triggers { - constructor(private readonly db: DbAdapter) {} + constructor( + private readonly db: DbAdapter, + private readonly workspace: string + ) {} - async process(event: BroadcastEvent, workspace: string): Promise { + async process(event: BroadcastEvent): Promise { switch (event.type) { case EventType.MessageCreated: - return this.createNotifications(event, workspace) + return this.createNotifications(event) } return [] } - private async createNotifications(event: MessageCreatedEvent, workspace: string): Promise { + private async createNotifications(event: MessageCreatedEvent): Promise { const card = event.message.card as any as CardID const subscribedPersonalWorkspaces = [ 'cd0aba36-1c4f-4170-95f2-27a12a5415f7', @@ -28,16 +31,16 @@ export class Triggers { ] const res: BroadcastEvent[] = [] - const contexts = await this.db.findContexts({ card }, [], workspace) + const contexts = await this.db.findContexts({ card }, [], this.workspace) res.push(...(await this.updateNotificationContexts(event.message.created, contexts))) for (const personalWorkspace of subscribedPersonalWorkspaces) { const existsContext = contexts.find( - (it) => it.card === card && it.personalWorkspace === personalWorkspace && workspace === it.workspace + (it) => it.card === card && it.personalWorkspace === personalWorkspace && this.workspace === it.workspace ) const contextId = await this.getOrCreateContextId( - workspace, + this.workspace, card, personalWorkspace, res, diff --git a/packages/server-ws/package.json b/packages/server-ws/package.json index 0f313d72ecc..0d3d8b8592c 100644 --- a/packages/server-ws/package.json +++ b/packages/server-ws/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-server-ws", - "version": "0.1.4", + "version": "0.1.5", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/sqlite-wasm/package.json b/packages/sqlite-wasm/package.json index d14a8a11f6b..2a3ac5ff70d 100644 --- a/packages/sqlite-wasm/package.json +++ b/packages/sqlite-wasm/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-sqlite-wasm", - "version": "0.1.4", + "version": "0.1.5", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/types/package.json b/packages/types/package.json index 39778187f5b..b0e085b1ec4 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-types", - "version": "0.1.4", + "version": "0.1.5", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", From 2e1a003ee6a78d72f9e80422b974ae5acf3f0591 Mon Sep 17 00:00:00 2001 From: Kristina Date: Wed, 5 Feb 2025 23:15:55 +0400 Subject: [PATCH 036/636] Fix undefined card in messages (#25) Signed-off-by: Kristina Fefelova --- bun.lock | 22 +++++++++++----------- packages/client-query/package.json | 2 +- packages/client-sqlite/package.json | 2 +- packages/client-ws/package.json | 2 +- packages/cockroach/package.json | 2 +- packages/cockroach/src/db/message.ts | 1 + packages/examples/package.json | 2 +- packages/query/package.json | 2 +- packages/sdk-types/package.json | 2 +- packages/server-core/package.json | 2 +- packages/server-ws/package.json | 2 +- packages/sqlite-wasm/package.json | 2 +- packages/types/package.json | 2 +- 13 files changed, 23 insertions(+), 22 deletions(-) diff --git a/bun.lock b/bun.lock index 7b233e4c85a..334a1c3cef8 100644 --- a/bun.lock +++ b/bun.lock @@ -16,7 +16,7 @@ }, "packages/client-query": { "name": "@hcengineering/communication-client-query", - "version": "0.1.5", + "version": "0.1.6", "dependencies": { "@hcengineering/communication-query": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", @@ -32,7 +32,7 @@ }, "packages/client-sqlite": { "name": "@hcengineering/communication-client-sqlite", - "version": "0.1.5", + "version": "0.1.6", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-sqlite-wasm": "workspace:*", @@ -48,7 +48,7 @@ }, "packages/client-ws": { "name": "@hcengineering/communication-client-ws", - "version": "0.1.5", + "version": "0.1.6", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -63,7 +63,7 @@ }, "packages/cockroach": { "name": "@hcengineering/communication-cockroach", - "version": "0.1.5", + "version": "0.1.6", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -80,7 +80,7 @@ }, "packages/examples": { "name": "@hcengineering/communication-examples", - "version": "0.1.5", + "version": "0.1.6", "dependencies": { "@hcengineering/communication-client-query": "workspace:*", "@hcengineering/communication-client-sqlite": "workspace:*", @@ -96,7 +96,7 @@ }, "packages/query": { "name": "@hcengineering/communication-query", - "version": "0.1.5", + "version": "0.1.6", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -112,7 +112,7 @@ }, "packages/sdk-types": { "name": "@hcengineering/communication-sdk-types", - "version": "0.1.5", + "version": "0.1.6", "dependencies": { "@hcengineering/communication-types": "^0.1.0", }, @@ -125,7 +125,7 @@ }, "packages/server-core": { "name": "@hcengineering/communication-server-core", - "version": "0.1.5", + "version": "0.1.6", "dependencies": { "@hcengineering/communication-cockroach": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", @@ -144,7 +144,7 @@ }, "packages/server-ws": { "name": "@hcengineering/communication-server-ws", - "version": "0.1.5", + "version": "0.1.6", "dependencies": { "@hcengineering/communication-cockroach": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", @@ -168,7 +168,7 @@ }, "packages/sqlite-wasm": { "name": "@hcengineering/communication-sqlite-wasm", - "version": "0.1.5", + "version": "0.1.6", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -185,7 +185,7 @@ }, "packages/types": { "name": "@hcengineering/communication-types", - "version": "0.1.5", + "version": "0.1.6", "dependencies": { "@hcengineering/core": "0.6.411", }, diff --git a/packages/client-query/package.json b/packages/client-query/package.json index 10d80ae805d..5b53ab7e8f2 100644 --- a/packages/client-query/package.json +++ b/packages/client-query/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-client-query", - "version": "0.1.5", + "version": "0.1.6", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/client-sqlite/package.json b/packages/client-sqlite/package.json index a5b3b7c72cf..148c2f9acb1 100644 --- a/packages/client-sqlite/package.json +++ b/packages/client-sqlite/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-client-sqlite", - "version": "0.1.5", + "version": "0.1.6", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/client-ws/package.json b/packages/client-ws/package.json index 3140dc508be..94b4beba79c 100644 --- a/packages/client-ws/package.json +++ b/packages/client-ws/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-client-ws", - "version": "0.1.5", + "version": "0.1.6", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/cockroach/package.json b/packages/cockroach/package.json index f59ea50c070..dfe6abf8c7a 100644 --- a/packages/cockroach/package.json +++ b/packages/cockroach/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-cockroach", - "version": "0.1.5", + "version": "0.1.6", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/cockroach/src/db/message.ts b/packages/cockroach/src/db/message.ts index 492e355f9d5..5257fee0fbe 100644 --- a/packages/cockroach/src/db/message.ts +++ b/packages/cockroach/src/db/message.ts @@ -104,6 +104,7 @@ export class MessagesDb extends BaseDb { async find(workspace: string, params: FindMessagesParams): Promise { //TODO: experiment with select to improve performance const select = `SELECT m.id, + m.card_id, m.content, m.creator, m.created, diff --git a/packages/examples/package.json b/packages/examples/package.json index 42af3af812e..19b969a0cc5 100644 --- a/packages/examples/package.json +++ b/packages/examples/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-examples", - "version": "0.1.5", + "version": "0.1.6", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/query/package.json b/packages/query/package.json index bedab9911b9..2bd148635d7 100644 --- a/packages/query/package.json +++ b/packages/query/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-query", - "version": "0.1.5", + "version": "0.1.6", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/sdk-types/package.json b/packages/sdk-types/package.json index b2d4bef0f48..de40bac425e 100644 --- a/packages/sdk-types/package.json +++ b/packages/sdk-types/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-sdk-types", - "version": "0.1.5", + "version": "0.1.6", "main": "./dist/index.js", "module": "./dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/server-core/package.json b/packages/server-core/package.json index c0ef6fb0ad6..0e5f3e8870d 100644 --- a/packages/server-core/package.json +++ b/packages/server-core/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-server-core", - "version": "0.1.5", + "version": "0.1.6", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/server-ws/package.json b/packages/server-ws/package.json index 0d3d8b8592c..be65ebb48a7 100644 --- a/packages/server-ws/package.json +++ b/packages/server-ws/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-server-ws", - "version": "0.1.5", + "version": "0.1.6", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/sqlite-wasm/package.json b/packages/sqlite-wasm/package.json index 2a3ac5ff70d..dc8c419d486 100644 --- a/packages/sqlite-wasm/package.json +++ b/packages/sqlite-wasm/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-sqlite-wasm", - "version": "0.1.5", + "version": "0.1.6", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/types/package.json b/packages/types/package.json index b0e085b1ec4..4fd34511ba2 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-types", - "version": "0.1.5", + "version": "0.1.6", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", From aacb92a877b02ce5aed984600726055652fe7dad Mon Sep 17 00:00:00 2001 From: Kristina Date: Thu, 6 Feb 2025 00:03:40 +0400 Subject: [PATCH 037/636] card_id UUID -> VARCHAR (#26) Signed-off-by: Kristina Fefelova --- packages/cockroach/migrations/01_message.sql | 14 +++++++------- packages/cockroach/migrations/03_attachment.sql | 2 +- .../migrations/05_notification_context.sql | 8 ++++---- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/packages/cockroach/migrations/01_message.sql b/packages/cockroach/migrations/01_message.sql index 4622f819b59..3e255f21b9b 100644 --- a/packages/cockroach/migrations/01_message.sql +++ b/packages/cockroach/migrations/01_message.sql @@ -5,7 +5,7 @@ CREATE TABLE IF NOT EXISTS c_message ( id UUID NOT NULL DEFAULT gen_random_uuid(), workspace_id UUID NOT NULL, - card_id UUID NOT NULL, + card_id VARCHAR(255) NOT NULL, content TEXT NOT NULL, creator VARCHAR(255) NOT NULL, @@ -18,12 +18,12 @@ CREATE TABLE IF NOT EXISTS c_message CREATE TABLE IF NOT EXISTS c_messages_group ( id UUID PRIMARY KEY DEFAULT gen_random_uuid(), - workspace_id UUID NOT NULL, - card_id UUID NOT NULL, - start_at TIMESTAMPTZ NOT NULL, - end_at TIMESTAMPTZ NOT NULL, - blob_id UUID NOT NULL, - count INT NOT NULL, + workspace_id UUID NOT NULL, + card_id VARCHAR(255) NOT NULL, + start_at TIMESTAMPTZ NOT NULL, + end_at TIMESTAMPTZ NOT NULL, + blob_id UUID NOT NULL, + count INT NOT NULL, UNIQUE (workspace_id, card_id, blob_id) ); diff --git a/packages/cockroach/migrations/03_attachment.sql b/packages/cockroach/migrations/03_attachment.sql index 3029eec5699..00fa9294413 100644 --- a/packages/cockroach/migrations/03_attachment.sql +++ b/packages/cockroach/migrations/03_attachment.sql @@ -2,7 +2,7 @@ DROP TABLE IF EXISTS c_attachment CASCADE; CREATE TABLE IF NOT EXISTS c_attachment ( message_id UUID NOT NULL, - card_id UUID NOT NULL, + card_id VARCHAR(255) NOT NULL, creator VARCHAR(255) NOT NULL, created TIMESTAMPTZ NOT NULL DEFAULT now(), diff --git a/packages/cockroach/migrations/05_notification_context.sql b/packages/cockroach/migrations/05_notification_context.sql index df42a3da378..7514d5febfe 100644 --- a/packages/cockroach/migrations/05_notification_context.sql +++ b/packages/cockroach/migrations/05_notification_context.sql @@ -1,11 +1,11 @@ DROP TABLE IF EXISTS c_notification_context CASCADE; CREATE TABLE IF NOT EXISTS c_notification_context ( - id UUID NOT NULL DEFAULT gen_random_uuid(), - workspace_id UUID NOT NULL, - card_id UUID NOT NULL, + id UUID NOT NULL DEFAULT gen_random_uuid(), + workspace_id UUID NOT NULL, + card_id VARCHAR(255) NOT NULL, - personal_workspace UUID NOT NULL, + personal_workspace UUID NOT NULL, archived_from TIMESTAMPTZ, last_view TIMESTAMPTZ, From 54a5840b8192e8c7c156825351a0bf9bc1fcac76 Mon Sep 17 00:00:00 2001 From: Kristina Date: Thu, 6 Feb 2025 12:06:12 +0400 Subject: [PATCH 038/636] Fix types (#27) Signed-off-by: Kristina Fefelova --- bun.lock | 53 ++++++++++++++++------------- packages/client-query/package.json | 2 +- packages/client-query/src/index.ts | 4 +-- packages/client-sqlite/package.json | 2 +- packages/client-ws/package.json | 2 +- packages/cockroach/package.json | 2 +- packages/examples/package.json | 2 +- packages/query/package.json | 2 +- packages/query/src/lq.ts | 8 ++--- packages/query/src/query.ts | 4 +-- packages/sdk-types/package.json | 2 +- packages/sdk-types/src/query.ts | 7 ++++ packages/server-core/package.json | 4 +-- packages/server-ws/package.json | 4 +-- packages/sqlite-wasm/package.json | 2 +- packages/types/package.json | 5 +-- packages/types/src/message.ts | 7 ++-- 17 files changed, 63 insertions(+), 49 deletions(-) diff --git a/bun.lock b/bun.lock index 334a1c3cef8..b7b027bcb24 100644 --- a/bun.lock +++ b/bun.lock @@ -16,7 +16,7 @@ }, "packages/client-query": { "name": "@hcengineering/communication-client-query", - "version": "0.1.6", + "version": "0.1.7", "dependencies": { "@hcengineering/communication-query": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", @@ -32,7 +32,7 @@ }, "packages/client-sqlite": { "name": "@hcengineering/communication-client-sqlite", - "version": "0.1.6", + "version": "0.1.7", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-sqlite-wasm": "workspace:*", @@ -48,7 +48,7 @@ }, "packages/client-ws": { "name": "@hcengineering/communication-client-ws", - "version": "0.1.6", + "version": "0.1.7", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -63,7 +63,7 @@ }, "packages/cockroach": { "name": "@hcengineering/communication-cockroach", - "version": "0.1.6", + "version": "0.1.7", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -80,7 +80,7 @@ }, "packages/examples": { "name": "@hcengineering/communication-examples", - "version": "0.1.6", + "version": "0.1.7", "dependencies": { "@hcengineering/communication-client-query": "workspace:*", "@hcengineering/communication-client-sqlite": "workspace:*", @@ -96,7 +96,7 @@ }, "packages/query": { "name": "@hcengineering/communication-query", - "version": "0.1.6", + "version": "0.1.7", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -112,7 +112,7 @@ }, "packages/sdk-types": { "name": "@hcengineering/communication-sdk-types", - "version": "0.1.6", + "version": "0.1.7", "dependencies": { "@hcengineering/communication-types": "^0.1.0", }, @@ -125,12 +125,12 @@ }, "packages/server-core": { "name": "@hcengineering/communication-server-core", - "version": "0.1.6", + "version": "0.1.7", "dependencies": { "@hcengineering/communication-cockroach": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", - "@hcengineering/core": "0.6.411", + "@hcengineering/core": "0.6.429", }, "devDependencies": { "@types/bun": "^1.1.14", @@ -144,12 +144,12 @@ }, "packages/server-ws": { "name": "@hcengineering/communication-server-ws", - "version": "0.1.6", + "version": "0.1.7", "dependencies": { "@hcengineering/communication-cockroach": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", - "@hcengineering/server-token": "^0.6.411", + "@hcengineering/server-token": "^0.6.429", "cors": "^2.8.5", "dotenv": "^16.4.7", "express": "^4.21.2", @@ -168,7 +168,7 @@ }, "packages/sqlite-wasm": { "name": "@hcengineering/communication-sqlite-wasm", - "version": "0.1.6", + "version": "0.1.7", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -185,9 +185,10 @@ }, "packages/types": { "name": "@hcengineering/communication-types", - "version": "0.1.6", + "version": "0.1.7", "dependencies": { - "@hcengineering/core": "0.6.411", + "@hcengineering/card": "0.6.429", + "@hcengineering/core": "0.6.429", }, "devDependencies": { "@types/bun": "^1.1.14", @@ -214,17 +215,19 @@ "@eslint/plugin-kit": ["@eslint/plugin-kit@0.2.5", "", { "dependencies": { "@eslint/core": "^0.10.0", "levn": "^0.4.1" } }, "sha512-lB05FkqEdUg2AA0xEbUz0SnkXT1LcCTa438W4IWTUh4hdOnVbQyOJ81OrDXsJk/LSiJHubgGEFoR5EHq1NsH1A=="], - "@formatjs/ecma402-abstract": ["@formatjs/ecma402-abstract@1.11.4", "", { "dependencies": { "@formatjs/intl-localematcher": "0.2.25", "tslib": "^2.1.0" } }, "sha512-EBikYFp2JCdIfGEb5G9dyCkTGDmC57KSHhRQOC3aYxoPWVZvfWCDjZwkGYHN7Lis/fmuWl906bnNTJifDQ3sXw=="], + "@formatjs/ecma402-abstract": ["@formatjs/ecma402-abstract@2.3.2", "", { "dependencies": { "@formatjs/fast-memoize": "2.2.6", "@formatjs/intl-localematcher": "0.5.10", "decimal.js": "10", "tslib": "2" } }, "sha512-6sE5nyvDloULiyOMbOTJEEgWL32w+VHkZQs8S02Lnn8Y/O5aQhjOEXwWzvR7SsBE/exxlSpY2EsWZgqHbtLatg=="], - "@formatjs/fast-memoize": ["@formatjs/fast-memoize@1.2.1", "", { "dependencies": { "tslib": "^2.1.0" } }, "sha512-Rg0e76nomkz3vF9IPlKeV+Qynok0r7YZjL6syLz4/urSg0IbjPZCB/iYUMNsYA643gh4mgrX3T7KEIFIxJBQeg=="], + "@formatjs/fast-memoize": ["@formatjs/fast-memoize@2.2.6", "", { "dependencies": { "tslib": "2" } }, "sha512-luIXeE2LJbQnnzotY1f2U2m7xuQNj2DA8Vq4ce1BY9ebRZaoPB1+8eZ6nXpLzsxuW5spQxr7LdCg+CApZwkqkw=="], - "@formatjs/icu-messageformat-parser": ["@formatjs/icu-messageformat-parser@2.1.0", "", { "dependencies": { "@formatjs/ecma402-abstract": "1.11.4", "@formatjs/icu-skeleton-parser": "1.3.6", "tslib": "^2.1.0" } }, "sha512-Qxv/lmCN6hKpBSss2uQ8IROVnta2r9jd3ymUEIjm2UyIkUCHVcbUVRGL/KS/wv7876edvsPe+hjHVJ4z8YuVaw=="], + "@formatjs/icu-messageformat-parser": ["@formatjs/icu-messageformat-parser@2.11.0", "", { "dependencies": { "@formatjs/ecma402-abstract": "2.3.2", "@formatjs/icu-skeleton-parser": "1.8.12", "tslib": "2" } }, "sha512-Hp81uTjjdTk3FLh/dggU5NK7EIsVWc5/ZDWrIldmf2rBuPejuZ13CZ/wpVE2SToyi4EiroPTQ1XJcJuZFIxTtw=="], - "@formatjs/icu-skeleton-parser": ["@formatjs/icu-skeleton-parser@1.3.6", "", { "dependencies": { "@formatjs/ecma402-abstract": "1.11.4", "tslib": "^2.1.0" } }, "sha512-I96mOxvml/YLrwU2Txnd4klA7V8fRhb6JG/4hm3VMNmeJo1F03IpV2L3wWt7EweqNLES59SZ4d6hVOPCSf80Bg=="], + "@formatjs/icu-skeleton-parser": ["@formatjs/icu-skeleton-parser@1.8.12", "", { "dependencies": { "@formatjs/ecma402-abstract": "2.3.2", "tslib": "2" } }, "sha512-QRAY2jC1BomFQHYDMcZtClqHR55EEnB96V7Xbk/UiBodsuFc5kujybzt87+qj1KqmJozFhk6n4KiT1HKwAkcfg=="], - "@formatjs/intl-localematcher": ["@formatjs/intl-localematcher@0.2.25", "", { "dependencies": { "tslib": "^2.1.0" } }, "sha512-YmLcX70BxoSopLFdLr1Ds99NdlTI2oWoLbaUW2M406lxOIPzE1KQhRz2fPUkq34xVZQaihCoU29h0KK7An3bhA=="], + "@formatjs/intl-localematcher": ["@formatjs/intl-localematcher@0.5.10", "", { "dependencies": { "tslib": "2" } }, "sha512-af3qATX+m4Rnd9+wHcjJ4w2ijq+rAVP3CCinJQvFv1kgSu1W6jypUmvleJxcewdxmutM8dmIRZFxO/IQBZmP2Q=="], - "@hcengineering/analytics": ["@hcengineering/analytics@0.6.411", "https://npm.pkg.github.com/download/@hcengineering/analytics/0.6.411/69cc5a0cf77032394306ccc44b3332756f83687c", { "dependencies": { "@hcengineering/platform": "^0.6.411" } }, "sha512-pruHZ75es4HMefJEYPWOm/bimffnF7RP+tW+f8boJMZh8hfIb33RmwTzuneToG4kdYtiU/tcIvFwvUdcILVLcQ=="], + "@hcengineering/analytics": ["@hcengineering/analytics@0.6.429", "https://npm.pkg.github.com/download/@hcengineering/analytics/0.6.429/e60824bdd42d809011c95bf1cc0806e7408a70e9", { "dependencies": { "@hcengineering/platform": "^0.6.429" } }, "sha512-RZ9Scw2YZoDXcOoTv9FCktGgNXyzJl+IJ2j3/cv5y8M3LNK6YmUlZ4ZA/9+h+ASn2g4VcVstVQ5RLECeHTmPpA=="], + + "@hcengineering/card": ["@hcengineering/card@0.6.429", "https://npm.pkg.github.com/download/@hcengineering/card/0.6.429/320bd1a194865367557dd9560f364e9adfdd1e05", { "dependencies": { "@hcengineering/core": "^0.6.429", "@hcengineering/platform": "^0.6.429" } }, "sha512-8MHLLCw8htEGIvCfL0pLKq0AObtCVXBDzWJ4tr8hgkZiRn6mvlODvFDECWfgmis3bBVv3FbCOTBlFGxq+v0Tlw=="], "@hcengineering/communication-client-query": ["@hcengineering/communication-client-query@workspace:packages/client-query"], @@ -248,11 +251,11 @@ "@hcengineering/communication-types": ["@hcengineering/communication-types@workspace:packages/types"], - "@hcengineering/core": ["@hcengineering/core@0.6.411", "https://npm.pkg.github.com/download/@hcengineering/core/0.6.411/ca7cde2060d1b081d76d163524296a37954dd810", { "dependencies": { "@hcengineering/analytics": "^0.6.411", "@hcengineering/platform": "^0.6.411", "fast-equals": "^5.0.1" } }, "sha512-dsRwiOqO4W1i4Z3dzTBOasUly2pFQnK1K6va0hsII/3f7uP/N7pYR8Lk+teTMfd9Gbv5o7CYozClXb3ntt8Jgg=="], + "@hcengineering/core": ["@hcengineering/core@0.6.429", "https://npm.pkg.github.com/download/@hcengineering/core/0.6.429/7fec78487f3cda2d117f1fbd32b624424bf26b5d", { "dependencies": { "@hcengineering/analytics": "^0.6.429", "@hcengineering/platform": "^0.6.429", "fast-equals": "^5.2.2" } }, "sha512-RkUFIyCJnDnOWfCYwKNAL1HfCswXGc4xox0Ge+vUdkyXTSsEDvjQ5vu+jJiy9FP8cxxXZNU40GuJs/L/yrnk2w=="], - "@hcengineering/platform": ["@hcengineering/platform@0.6.411", "https://npm.pkg.github.com/download/@hcengineering/platform/0.6.411/e511e044a9f12e4cd281922cf827b77cde1b73be", { "dependencies": { "intl-messageformat": "^9.7.1" } }, "sha512-5S6wUT9fbZsKAm/JaAEpYrRFNVwM0QfNjf22s96Ka39nTcNApxfa0j8jSINdKRf8zQN1EAIk12Lmf/0ZhKcJyw=="], + "@hcengineering/platform": ["@hcengineering/platform@0.6.429", "https://npm.pkg.github.com/download/@hcengineering/platform/0.6.429/cc119c4d1f69e4b6e245ad62e4dec9d538c38714", { "dependencies": { "intl-messageformat": "^10.7.14" } }, "sha512-y2BxHl+kCac26pCl0l1n2BFAIsL9aG2Na3qEo6ekOisAsepR0nqY7th1WOb2uEMK/U7LBrTPcyvKwM/tQpBWzQ=="], - "@hcengineering/server-token": ["@hcengineering/server-token@0.6.411", "https://npm.pkg.github.com/download/@hcengineering/server-token/0.6.411/66e75ee369f988a9dae85d24d98e0dd3e2661f8b", { "dependencies": { "@hcengineering/core": "^0.6.411", "@hcengineering/platform": "^0.6.411", "jwt-simple": "^0.5.6" } }, "sha512-KHS1ET3rxIDuj+dSN3btrMQC3BZexTZZuuy0zoevdMJERD4+aM4SoWaJLgmwrcTLOBIvGJTOfMdZb1rHJ8KOOw=="], + "@hcengineering/server-token": ["@hcengineering/server-token@0.6.429", "https://npm.pkg.github.com/download/@hcengineering/server-token/0.6.429/2b60e0c9b8e3b8dc2c1517491e8dae497075b4e7", { "dependencies": { "@hcengineering/core": "^0.6.429", "@hcengineering/platform": "^0.6.429", "jwt-simple": "^0.5.6" } }, "sha512-NPD3PSYjv3U+J5kfbq6xlaUmokpHthT8PiSbGGinghVK1xnKQ/pDCno82NK0xi2iDbaI7xJG4k82zp5uE6KReA=="], "@humanfs/core": ["@humanfs/core@0.19.1", "", {}, "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA=="], @@ -390,6 +393,8 @@ "debug": ["debug@4.4.0", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA=="], + "decimal.js": ["decimal.js@10.5.0", "", {}, "sha512-8vDa8Qxvr/+d94hSh5P3IJwI5t8/c0KsMp+g8bNw9cY2icONa5aPfvKeieW1WlG0WQYwwhJ7mjui2xtiePQSXw=="], + "deep-is": ["deep-is@0.1.4", "", {}, "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ=="], "depd": ["depd@2.0.0", "", {}, "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw=="], @@ -502,7 +507,7 @@ "inherits": ["inherits@2.0.4", "", {}, "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="], - "intl-messageformat": ["intl-messageformat@9.13.0", "", { "dependencies": { "@formatjs/ecma402-abstract": "1.11.4", "@formatjs/fast-memoize": "1.2.1", "@formatjs/icu-messageformat-parser": "2.1.0", "tslib": "^2.1.0" } }, "sha512-7sGC7QnSQGa5LZP7bXLDhVDtQOeKGeBFGHF2Y8LVBwYZoQZCgWeKoPGTa5GMG8g/TzDgeXuYJQis7Ggiw2xTOw=="], + "intl-messageformat": ["intl-messageformat@10.7.14", "", { "dependencies": { "@formatjs/ecma402-abstract": "2.3.2", "@formatjs/fast-memoize": "2.2.6", "@formatjs/icu-messageformat-parser": "2.11.0", "tslib": "2" } }, "sha512-mMGnE4E1otdEutV5vLUdCxRJygHB5ozUBxsPB5qhitewssrS/qGruq9bmvIRkkGsNeK5ZWLfYRld18UHGTIifQ=="], "ipaddr.js": ["ipaddr.js@1.9.1", "", {}, "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g=="], diff --git a/packages/client-query/package.json b/packages/client-query/package.json index 5b53ab7e8f2..2d706a8633a 100644 --- a/packages/client-query/package.json +++ b/packages/client-query/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-client-query", - "version": "0.1.6", + "version": "0.1.7", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/client-query/src/index.ts b/packages/client-query/src/index.ts index 29a5e3905c1..c48319513fb 100644 --- a/packages/client-query/src/index.ts +++ b/packages/client-query/src/index.ts @@ -1,5 +1,5 @@ import { LiveQueries } from '@hcengineering/communication-query' -import type { Client } from '@hcengineering/communication-sdk-types' +import type { QueryClient } from '@hcengineering/communication-sdk-types' import { MessagesQuery, NotificationsQuery } from './query' @@ -13,7 +13,7 @@ export function createNotificationsQuery(): NotificationsQuery { return new NotificationsQuery(lq) } -export function initLiveQueries(client: Client) { +export function initLiveQueries(client: QueryClient) { if (lq != null) { lq.close() } diff --git a/packages/client-sqlite/package.json b/packages/client-sqlite/package.json index 148c2f9acb1..97249a08cae 100644 --- a/packages/client-sqlite/package.json +++ b/packages/client-sqlite/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-client-sqlite", - "version": "0.1.6", + "version": "0.1.7", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/client-ws/package.json b/packages/client-ws/package.json index 94b4beba79c..2980c0174f0 100644 --- a/packages/client-ws/package.json +++ b/packages/client-ws/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-client-ws", - "version": "0.1.6", + "version": "0.1.7", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/cockroach/package.json b/packages/cockroach/package.json index dfe6abf8c7a..0a1701fbdb8 100644 --- a/packages/cockroach/package.json +++ b/packages/cockroach/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-cockroach", - "version": "0.1.6", + "version": "0.1.7", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/examples/package.json b/packages/examples/package.json index 19b969a0cc5..d5c25cad89e 100644 --- a/packages/examples/package.json +++ b/packages/examples/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-examples", - "version": "0.1.6", + "version": "0.1.7", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/query/package.json b/packages/query/package.json index 2bd148635d7..2d885ac1eb9 100644 --- a/packages/query/package.json +++ b/packages/query/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-query", - "version": "0.1.6", + "version": "0.1.7", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/query/src/lq.ts b/packages/query/src/lq.ts index 19a70eba2db..b787d60d6a9 100644 --- a/packages/query/src/lq.ts +++ b/packages/query/src/lq.ts @@ -1,10 +1,10 @@ import { type FindMessagesParams, type FindNotificationsParams } from '@hcengineering/communication-types' import { deepEqual } from 'fast-equals' import type { - Client, MessagesQueryCallback, NotificationsQueryCallback, - BroadcastEvent + BroadcastEvent, + QueryClient } from '@hcengineering/communication-sdk-types' import type { Query, QueryId } from './types' @@ -18,12 +18,12 @@ interface CreateQueryResult { const maxQueriesCache = 10 export class LiveQueries { - private readonly client: Client + private readonly client: QueryClient private readonly queries = new Map() private readonly unsubscribed = new Set() private counter: number = 0 - constructor(client: Client) { + constructor(client: QueryClient) { this.client = client } diff --git a/packages/query/src/query.ts b/packages/query/src/query.ts index 02e74aa419e..3258756679f 100644 --- a/packages/query/src/query.ts +++ b/packages/query/src/query.ts @@ -1,5 +1,5 @@ import { Direction, type ID, SortOrder } from '@hcengineering/communication-types' -import { type BroadcastEvent, type QueryCallback, type Client } from '@hcengineering/communication-sdk-types' +import { type BroadcastEvent, type QueryCallback, type QueryClient } from '@hcengineering/communication-sdk-types' import { QueryResult } from './result' import { defaultQueryParams, type FindParams, type Query, type QueryId } from './types' @@ -11,7 +11,7 @@ export class BaseQuery implements Query { private backward: Promise | T[] = [] constructor( - protected readonly client: Client, + protected readonly client: QueryClient, public readonly id: QueryId, public readonly params: P, private callback?: QueryCallback, diff --git a/packages/sdk-types/package.json b/packages/sdk-types/package.json index de40bac425e..7e598f01b8a 100644 --- a/packages/sdk-types/package.json +++ b/packages/sdk-types/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-sdk-types", - "version": "0.1.6", + "version": "0.1.7", "main": "./dist/index.js", "module": "./dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/sdk-types/src/query.ts b/packages/sdk-types/src/query.ts index ce851e16c86..29fbe8eb9e6 100644 --- a/packages/sdk-types/src/query.ts +++ b/packages/sdk-types/src/query.ts @@ -1,5 +1,12 @@ import type { Message, Window, Notification } from '@hcengineering/communication-types' +import type { Client } from './client' + +export type QueryClient = Pick< + Client, + 'onEvent' | 'findMessages' | 'findNotificationContexts' | 'findNotifications' | 'unsubscribeQuery' | 'close' +> + export type QueryCallback = (window: Window) => void export type MessagesQueryCallback = QueryCallback diff --git a/packages/server-core/package.json b/packages/server-core/package.json index 0e5f3e8870d..51e1b86dd78 100644 --- a/packages/server-core/package.json +++ b/packages/server-core/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-server-core", - "version": "0.1.6", + "version": "0.1.7", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", @@ -16,7 +16,7 @@ "@types/ws": "^8.5.13" }, "dependencies": { - "@hcengineering/core": "0.6.411", + "@hcengineering/core": "0.6.429", "@hcengineering/communication-cockroach": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*" diff --git a/packages/server-ws/package.json b/packages/server-ws/package.json index be65ebb48a7..41c68d7427a 100644 --- a/packages/server-ws/package.json +++ b/packages/server-ws/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-server-ws", - "version": "0.1.6", + "version": "0.1.7", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", @@ -16,7 +16,7 @@ "@types/ws": "^8.5.13" }, "dependencies": { - "@hcengineering/server-token": "^0.6.411", + "@hcengineering/server-token": "^0.6.429", "@hcengineering/communication-cockroach": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", diff --git a/packages/sqlite-wasm/package.json b/packages/sqlite-wasm/package.json index dc8c419d486..1fb98b52056 100644 --- a/packages/sqlite-wasm/package.json +++ b/packages/sqlite-wasm/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-sqlite-wasm", - "version": "0.1.6", + "version": "0.1.7", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/types/package.json b/packages/types/package.json index 4fd34511ba2..c7b3d0dbd9c 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-types", - "version": "0.1.6", + "version": "0.1.7", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", @@ -16,7 +16,8 @@ "typescript": "^5.6.3" }, "dependencies": { - "@hcengineering/core": "0.6.411" + "@hcengineering/core": "0.6.429", + "@hcengineering/card": "0.6.429" }, "repository": { "type": "git", diff --git a/packages/types/src/message.ts b/packages/types/src/message.ts index 32cfee8179b..7f264dede5b 100644 --- a/packages/types/src/message.ts +++ b/packages/types/src/message.ts @@ -1,8 +1,9 @@ -import type { Card, Ref, Blob } from '@hcengineering/core' +import type { Ref, Blob } from '@hcengineering/core' +import type { Card } from '@hcengineering/card' export type BlobID = Ref export type CardID = Ref -export type SocialID = string & { social: true } +export type SocialID = string export type RichText = string export type ID = string @@ -26,7 +27,7 @@ export interface MessagesGroup { card: CardID startAt: Date endAt: Date - blobId: Ref + blobId: BlobID count: number } From 75a00086cea2017ba26d1625ffea259e0822f902 Mon Sep 17 00:00:00 2001 From: Kristina Date: Mon, 17 Feb 2025 21:49:08 +0400 Subject: [PATCH 039/636] Change message id type, init messages_groups (#28) Signed-off-by: Kristina Fefelova --- .vscode/launch.json | 24 -- bun.lock | 314 +++++----------- packages/client-query/package.json | 2 +- packages/client-sqlite/package.json | 2 +- packages/client-sqlite/src/client.ts | 46 +-- packages/client-ws/package.json | 2 +- packages/client-ws/src/client.ts | 34 +- packages/client-ws/src/connection.ts | 24 +- packages/cockroach/migrations/00_schema.sql | 2 + packages/cockroach/migrations/01_message.sql | 21 +- packages/cockroach/migrations/02_patch.sql | 20 +- .../cockroach/migrations/03_attachment.sql | 10 +- packages/cockroach/migrations/04_reaction.sql | 18 +- .../migrations/05_notification_context.sql | 3 +- .../cockroach/migrations/06_notification.sql | 8 +- packages/cockroach/package.json | 5 +- packages/cockroach/src/adapter.ts | 80 ++-- packages/cockroach/src/db/base.ts | 46 ++- packages/cockroach/src/db/message.ts | 166 ++++++--- packages/cockroach/src/db/notification.ts | 17 +- packages/cockroach/src/db/schema.ts | 122 ++++++ packages/cockroach/src/db/types.ts | 63 ---- packages/cockroach/src/db/utils.ts | 22 ++ packages/core/package.json | 28 ++ packages/core/src/index.ts | 1 + packages/core/src/message.ts | 24 ++ packages/{server-core => core}/tsconfig.json | 0 packages/examples/package.json | 2 +- packages/examples/src/index.ts | 13 +- packages/query/package.json | 2 +- packages/query/src/lq.ts | 4 +- packages/query/src/messages/query.ts | 34 +- packages/query/src/notifications/query.ts | 18 +- packages/query/src/query.ts | 4 +- packages/query/src/types.ts | 4 +- packages/sdk-types/package.json | 2 +- packages/sdk-types/src/client.ts | 4 +- packages/sdk-types/src/db.ts | 59 +-- packages/sdk-types/src/event.ts | 238 ------------ packages/sdk-types/src/index.ts | 6 +- packages/sdk-types/src/requestEvent.ts | 146 ++++++++ packages/sdk-types/src/responseEvent.ts | 125 +++++++ packages/sdk-types/src/serverApi.ts | 18 +- packages/sdk-types/src/ws.ts | 17 - packages/server-core/src/eventProcessor.ts | 285 -------------- packages/server-ws/package.json | 35 -- packages/server-ws/src/config.ts | 29 -- packages/server-ws/src/eventProcessor.ts | 280 -------------- packages/server-ws/src/index.ts | 6 - packages/server-ws/src/main.ts | 142 ------- packages/server-ws/src/manager.ts | 209 ----------- packages/server-ws/src/server/error.ts | 8 - packages/server-ws/src/server/server.ts | 24 -- packages/server-ws/src/session.ts | 76 ---- packages/server-ws/src/triggers.ts | 115 ------ packages/server-ws/src/types.ts | 7 - packages/server-ws/src/utils/account.ts | 23 -- packages/server-ws/src/utils/logger.ts | 24 -- packages/server-ws/src/utils/serialize.ts | 41 -- packages/{server-core => server}/package.json | 6 +- packages/server/src/eventProcessor.ts | 351 ++++++++++++++++++ packages/{server-core => server}/src/index.ts | 32 +- .../{server-core => server}/src/manager.ts | 66 ++-- .../{server-core => server}/src/triggers.ts | 42 +-- packages/{server-ws => server}/tsconfig.json | 0 packages/sqlite-wasm/package.json | 2 +- packages/sqlite-wasm/src/adapter.ts | 61 ++- packages/sqlite-wasm/src/db/message.ts | 3 +- packages/types/package.json | 6 +- packages/types/src/message.ts | 17 +- packages/types/src/notification.ts | 8 +- packages/types/src/query.ts | 20 +- 72 files changed, 1507 insertions(+), 2211 deletions(-) delete mode 100644 .vscode/launch.json create mode 100644 packages/cockroach/migrations/00_schema.sql create mode 100644 packages/cockroach/src/db/schema.ts delete mode 100644 packages/cockroach/src/db/types.ts create mode 100644 packages/cockroach/src/db/utils.ts create mode 100644 packages/core/package.json create mode 100644 packages/core/src/index.ts create mode 100644 packages/core/src/message.ts rename packages/{server-core => core}/tsconfig.json (100%) delete mode 100644 packages/sdk-types/src/event.ts create mode 100644 packages/sdk-types/src/requestEvent.ts create mode 100644 packages/sdk-types/src/responseEvent.ts delete mode 100644 packages/sdk-types/src/ws.ts delete mode 100644 packages/server-core/src/eventProcessor.ts delete mode 100644 packages/server-ws/package.json delete mode 100644 packages/server-ws/src/config.ts delete mode 100644 packages/server-ws/src/eventProcessor.ts delete mode 100644 packages/server-ws/src/index.ts delete mode 100644 packages/server-ws/src/main.ts delete mode 100644 packages/server-ws/src/manager.ts delete mode 100644 packages/server-ws/src/server/error.ts delete mode 100644 packages/server-ws/src/server/server.ts delete mode 100644 packages/server-ws/src/session.ts delete mode 100644 packages/server-ws/src/triggers.ts delete mode 100644 packages/server-ws/src/types.ts delete mode 100644 packages/server-ws/src/utils/account.ts delete mode 100644 packages/server-ws/src/utils/logger.ts delete mode 100644 packages/server-ws/src/utils/serialize.ts rename packages/{server-core => server}/package.json (86%) create mode 100644 packages/server/src/eventProcessor.ts rename packages/{server-core => server}/src/index.ts (59%) rename packages/{server-core => server}/src/manager.ts (78%) rename packages/{server-core => server}/src/triggers.ts (75%) rename packages/{server-ws => server}/tsconfig.json (100%) diff --git a/.vscode/launch.json b/.vscode/launch.json deleted file mode 100644 index 65434e460a5..00000000000 --- a/.vscode/launch.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "version": "0.2.0", - "configurations": [ - { - "type": "node", - "request": "launch", - "name": "Debug Server", - "env": { - "DB_URL": "postgresql://root@127.0.0.1:26257/my_database?sslmode=disable", - "ACCOUNTS_URL": "http://localhost:3000", - "SECRET": "secret" - }, - "runtimeExecutable": "bun", - "runtimeArgs": ["run"], - "args": ["src/index.ts"], - "cwd": "${workspaceFolder}/packages/server", - "protocol": "inspector", - "runtimeVersion": "20", - "showAsyncStacks": true, - "outputCapture": "std", - "sourceMaps": true - } - ] -} diff --git a/bun.lock b/bun.lock index b7b027bcb24..4fe1f65a2e4 100644 --- a/bun.lock +++ b/bun.lock @@ -65,6 +65,7 @@ "name": "@hcengineering/communication-cockroach", "version": "0.1.7", "dependencies": { + "@hcengineering/communication-core": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", "pg": "8.12.0", @@ -78,6 +79,19 @@ "typescript": "^5.6.3", }, }, + "packages/core": { + "name": "@hcengineering/communication-core", + "version": "0.1.7", + "dependencies": { + "@hcengineering/communication-types": "workspace:*", + }, + "devDependencies": { + "@types/bun": "^1.1.14", + }, + "peerDependencies": { + "typescript": "^5.6.3", + }, + }, "packages/examples": { "name": "@hcengineering/communication-examples", "version": "0.1.7", @@ -123,38 +137,14 @@ "typescript": "^5.6.3", }, }, - "packages/server-core": { - "name": "@hcengineering/communication-server-core", + "packages/server": { + "name": "@hcengineering/communication-server", "version": "0.1.7", "dependencies": { "@hcengineering/communication-cockroach": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", - "@hcengineering/core": "0.6.429", - }, - "devDependencies": { - "@types/bun": "^1.1.14", - "@types/cors": "^2.8.17", - "@types/express": "^5.0.0", - "@types/ws": "^8.5.13", - }, - "peerDependencies": { - "typescript": "^5.6.3", - }, - }, - "packages/server-ws": { - "name": "@hcengineering/communication-server-ws", - "version": "0.1.7", - "dependencies": { - "@hcengineering/communication-cockroach": "workspace:*", - "@hcengineering/communication-sdk-types": "workspace:*", - "@hcengineering/communication-types": "workspace:*", - "@hcengineering/server-token": "^0.6.429", - "cors": "^2.8.5", - "dotenv": "^16.4.7", - "express": "^4.21.2", - "msgpackr": "^1.11.2", - "ws": "^8.18.0", + "@hcengineering/core": "0.7.2", }, "devDependencies": { "@types/bun": "^1.1.14", @@ -187,8 +177,8 @@ "name": "@hcengineering/communication-types", "version": "0.1.7", "dependencies": { - "@hcengineering/card": "0.6.429", - "@hcengineering/core": "0.6.429", + "@hcengineering/card": "0.7.2", + "@hcengineering/core": "0.7.2", }, "devDependencies": { "@types/bun": "^1.1.14", @@ -199,35 +189,39 @@ }, }, "packages": { + "@ampproject/remapping": ["@ampproject/remapping@2.3.0", "", { "dependencies": { "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw=="], + + "@babel/runtime": ["@babel/runtime@7.26.7", "", { "dependencies": { "regenerator-runtime": "^0.14.0" } }, "sha512-AOPI3D+a8dXnja+iwsUqGRjr1BbZIe771sXdapOtYI531gSqpi92vXivKcq2asu/DFpdl1ceFAKZyRzK2PCVcQ=="], + "@eslint-community/eslint-utils": ["@eslint-community/eslint-utils@4.4.1", "", { "dependencies": { "eslint-visitor-keys": "^3.4.3" }, "peerDependencies": { "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" } }, "sha512-s3O3waFUrMV8P/XaF/+ZTp1X9XBZW1a4B97ZnjQF2KYWaFD2A8KyFBsrsfSjEmjn3RGWAIuvlneuZm3CUK3jbA=="], "@eslint-community/regexpp": ["@eslint-community/regexpp@4.12.1", "", {}, "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ=="], "@eslint/config-array": ["@eslint/config-array@0.19.2", "", { "dependencies": { "@eslint/object-schema": "^2.1.6", "debug": "^4.3.1", "minimatch": "^3.1.2" } }, "sha512-GNKqxfHG2ySmJOBSHg7LxeUx4xpuCoFjacmlCoYWEbaPXLwvfIjixRI12xCQZeULksQb23uiA8F40w5TojpV7w=="], - "@eslint/core": ["@eslint/core@0.10.0", "", { "dependencies": { "@types/json-schema": "^7.0.15" } }, "sha512-gFHJ+xBOo4G3WRlR1e/3G8A6/KZAH6zcE/hkLRCZTi/B9avAG365QhFA8uOGzTMqgTghpn7/fSnscW++dpMSAw=="], + "@eslint/core": ["@eslint/core@0.11.0", "", { "dependencies": { "@types/json-schema": "^7.0.15" } }, "sha512-DWUB2pksgNEb6Bz2fggIy1wh6fGgZP4Xyy/Mt0QZPiloKKXerbqq9D3SBQTlCRYOrcRPu4vuz+CGjwdfqxnoWA=="], "@eslint/eslintrc": ["@eslint/eslintrc@3.2.0", "", { "dependencies": { "ajv": "^6.12.4", "debug": "^4.3.2", "espree": "^10.0.1", "globals": "^14.0.0", "ignore": "^5.2.0", "import-fresh": "^3.2.1", "js-yaml": "^4.1.0", "minimatch": "^3.1.2", "strip-json-comments": "^3.1.1" } }, "sha512-grOjVNN8P3hjJn/eIETF1wwd12DdnwFDoyceUJLYYdkpbwq3nLi+4fqrTAONx7XDALqlL220wC/RHSC/QTI/0w=="], - "@eslint/js": ["@eslint/js@9.19.0", "", {}, "sha512-rbq9/g38qjfqFLOVPvwjIvFFdNziEC5S65jmjPw5r6A//QH+W91akh9irMwjDN8zKUTak6W9EsAv4m/7Wnw0UQ=="], + "@eslint/js": ["@eslint/js@9.20.0", "", {}, "sha512-iZA07H9io9Wn836aVTytRaNqh00Sad+EamwOVJT12GTLw1VGMFV/4JaME+JjLtr9fiGaoWgYnS54wrfWsSs4oQ=="], "@eslint/object-schema": ["@eslint/object-schema@2.1.6", "", {}, "sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA=="], "@eslint/plugin-kit": ["@eslint/plugin-kit@0.2.5", "", { "dependencies": { "@eslint/core": "^0.10.0", "levn": "^0.4.1" } }, "sha512-lB05FkqEdUg2AA0xEbUz0SnkXT1LcCTa438W4IWTUh4hdOnVbQyOJ81OrDXsJk/LSiJHubgGEFoR5EHq1NsH1A=="], - "@formatjs/ecma402-abstract": ["@formatjs/ecma402-abstract@2.3.2", "", { "dependencies": { "@formatjs/fast-memoize": "2.2.6", "@formatjs/intl-localematcher": "0.5.10", "decimal.js": "10", "tslib": "2" } }, "sha512-6sE5nyvDloULiyOMbOTJEEgWL32w+VHkZQs8S02Lnn8Y/O5aQhjOEXwWzvR7SsBE/exxlSpY2EsWZgqHbtLatg=="], + "@formatjs/ecma402-abstract": ["@formatjs/ecma402-abstract@2.3.3", "", { "dependencies": { "@formatjs/fast-memoize": "2.2.6", "@formatjs/intl-localematcher": "0.6.0", "decimal.js": "10", "tslib": "2" } }, "sha512-pJT1OkhplSmvvr6i3CWTPvC/FGC06MbN5TNBfRO6Ox62AEz90eMq+dVvtX9Bl3jxCEkS0tATzDarRZuOLw7oFg=="], "@formatjs/fast-memoize": ["@formatjs/fast-memoize@2.2.6", "", { "dependencies": { "tslib": "2" } }, "sha512-luIXeE2LJbQnnzotY1f2U2m7xuQNj2DA8Vq4ce1BY9ebRZaoPB1+8eZ6nXpLzsxuW5spQxr7LdCg+CApZwkqkw=="], - "@formatjs/icu-messageformat-parser": ["@formatjs/icu-messageformat-parser@2.11.0", "", { "dependencies": { "@formatjs/ecma402-abstract": "2.3.2", "@formatjs/icu-skeleton-parser": "1.8.12", "tslib": "2" } }, "sha512-Hp81uTjjdTk3FLh/dggU5NK7EIsVWc5/ZDWrIldmf2rBuPejuZ13CZ/wpVE2SToyi4EiroPTQ1XJcJuZFIxTtw=="], + "@formatjs/icu-messageformat-parser": ["@formatjs/icu-messageformat-parser@2.11.1", "", { "dependencies": { "@formatjs/ecma402-abstract": "2.3.3", "@formatjs/icu-skeleton-parser": "1.8.13", "tslib": "2" } }, "sha512-o0AhSNaOfKoic0Sn1GkFCK4MxdRsw7mPJ5/rBpIqdvcC7MIuyUSW8WChUEvrK78HhNpYOgqCQbINxCTumJLzZA=="], - "@formatjs/icu-skeleton-parser": ["@formatjs/icu-skeleton-parser@1.8.12", "", { "dependencies": { "@formatjs/ecma402-abstract": "2.3.2", "tslib": "2" } }, "sha512-QRAY2jC1BomFQHYDMcZtClqHR55EEnB96V7Xbk/UiBodsuFc5kujybzt87+qj1KqmJozFhk6n4KiT1HKwAkcfg=="], + "@formatjs/icu-skeleton-parser": ["@formatjs/icu-skeleton-parser@1.8.13", "", { "dependencies": { "@formatjs/ecma402-abstract": "2.3.3", "tslib": "2" } }, "sha512-N/LIdTvVc1TpJmMt2jVg0Fr1F7Q1qJPdZSCs19unMskCmVQ/sa0H9L8PWt13vq+gLdLg1+pPsvBLydL1Apahjg=="], - "@formatjs/intl-localematcher": ["@formatjs/intl-localematcher@0.5.10", "", { "dependencies": { "tslib": "2" } }, "sha512-af3qATX+m4Rnd9+wHcjJ4w2ijq+rAVP3CCinJQvFv1kgSu1W6jypUmvleJxcewdxmutM8dmIRZFxO/IQBZmP2Q=="], + "@formatjs/intl-localematcher": ["@formatjs/intl-localematcher@0.6.0", "", { "dependencies": { "tslib": "2" } }, "sha512-4rB4g+3hESy1bHSBG3tDFaMY2CH67iT7yne1e+0CLTsGLDcmoEWWpJjjpWVaYgYfYuohIRuo0E+N536gd2ZHZA=="], - "@hcengineering/analytics": ["@hcengineering/analytics@0.6.429", "https://npm.pkg.github.com/download/@hcengineering/analytics/0.6.429/e60824bdd42d809011c95bf1cc0806e7408a70e9", { "dependencies": { "@hcengineering/platform": "^0.6.429" } }, "sha512-RZ9Scw2YZoDXcOoTv9FCktGgNXyzJl+IJ2j3/cv5y8M3LNK6YmUlZ4ZA/9+h+ASn2g4VcVstVQ5RLECeHTmPpA=="], + "@hcengineering/analytics": ["@hcengineering/analytics@0.7.2", "https://npm.pkg.github.com/download/@hcengineering/analytics/0.7.2/6169c108c11d7903a30eca0e09dfc79facdc1170", { "dependencies": { "@hcengineering/platform": "^0.7.2" } }, "sha512-2+6O2f3yrrtho0SXVSTd44t2RFOdeR6v9IXimYhyvy+vQaj6i5W2i5rGNyxjS7wZj7u/ft/LwcusbL+yvR7rQA=="], - "@hcengineering/card": ["@hcengineering/card@0.6.429", "https://npm.pkg.github.com/download/@hcengineering/card/0.6.429/320bd1a194865367557dd9560f364e9adfdd1e05", { "dependencies": { "@hcengineering/core": "^0.6.429", "@hcengineering/platform": "^0.6.429" } }, "sha512-8MHLLCw8htEGIvCfL0pLKq0AObtCVXBDzWJ4tr8hgkZiRn6mvlODvFDECWfgmis3bBVv3FbCOTBlFGxq+v0Tlw=="], + "@hcengineering/card": ["@hcengineering/card@0.7.2", "https://npm.pkg.github.com/download/@hcengineering/card/0.7.2/f4f615f288a91cc875a6c20d75f8383010328003", { "dependencies": { "@hcengineering/core": "^0.7.2", "@hcengineering/platform": "^0.7.2", "@hcengineering/ui": "^0.7.2" } }, "sha512-e1H1Q6ncH+wyXXD9nLNjADd+6TFaPBuWywuP2gLPYubHPlJyeB7KVCIt81H+3axKTTDLCMjKqWpBXKHawcK5pQ=="], "@hcengineering/communication-client-query": ["@hcengineering/communication-client-query@workspace:packages/client-query"], @@ -237,25 +231,27 @@ "@hcengineering/communication-cockroach": ["@hcengineering/communication-cockroach@workspace:packages/cockroach"], + "@hcengineering/communication-core": ["@hcengineering/communication-core@workspace:packages/core"], + "@hcengineering/communication-examples": ["@hcengineering/communication-examples@workspace:packages/examples"], "@hcengineering/communication-query": ["@hcengineering/communication-query@workspace:packages/query"], "@hcengineering/communication-sdk-types": ["@hcengineering/communication-sdk-types@workspace:packages/sdk-types"], - "@hcengineering/communication-server-core": ["@hcengineering/communication-server-core@workspace:packages/server-core"], - - "@hcengineering/communication-server-ws": ["@hcengineering/communication-server-ws@workspace:packages/server-ws"], + "@hcengineering/communication-server": ["@hcengineering/communication-server@workspace:packages/server"], "@hcengineering/communication-sqlite-wasm": ["@hcengineering/communication-sqlite-wasm@workspace:packages/sqlite-wasm"], "@hcengineering/communication-types": ["@hcengineering/communication-types@workspace:packages/types"], - "@hcengineering/core": ["@hcengineering/core@0.6.429", "https://npm.pkg.github.com/download/@hcengineering/core/0.6.429/7fec78487f3cda2d117f1fbd32b624424bf26b5d", { "dependencies": { "@hcengineering/analytics": "^0.6.429", "@hcengineering/platform": "^0.6.429", "fast-equals": "^5.2.2" } }, "sha512-RkUFIyCJnDnOWfCYwKNAL1HfCswXGc4xox0Ge+vUdkyXTSsEDvjQ5vu+jJiy9FP8cxxXZNU40GuJs/L/yrnk2w=="], + "@hcengineering/core": ["@hcengineering/core@0.7.2", "https://npm.pkg.github.com/download/@hcengineering/core/0.7.2/4c8d66ef6468b2ed9d4d43857b11a78f6e71d48f", { "dependencies": { "@hcengineering/analytics": "^0.7.2", "@hcengineering/platform": "^0.7.2", "fast-equals": "^5.2.2" } }, "sha512-K8wg4c3hkn98wo/C+lS9z3QD35lIgUc0APpzOE0La6XFeV06H4Iztgvd4xR/IgVGRX3tFbyycsJcSgQXj/Z/Kw=="], - "@hcengineering/platform": ["@hcengineering/platform@0.6.429", "https://npm.pkg.github.com/download/@hcengineering/platform/0.6.429/cc119c4d1f69e4b6e245ad62e4dec9d538c38714", { "dependencies": { "intl-messageformat": "^10.7.14" } }, "sha512-y2BxHl+kCac26pCl0l1n2BFAIsL9aG2Na3qEo6ekOisAsepR0nqY7th1WOb2uEMK/U7LBrTPcyvKwM/tQpBWzQ=="], + "@hcengineering/platform": ["@hcengineering/platform@0.7.2", "https://npm.pkg.github.com/download/@hcengineering/platform/0.7.2/854c94ef4607aba2306ec92a32f41cce30139de5", { "dependencies": { "intl-messageformat": "^10.7.14" } }, "sha512-RtUWhC0MPjFABojyOSGoOCLZGPVKiZkfZB9n+7Mwbc7tBznVQKeXBBOVUq6QpC50cLyx7ce8WVJBNVIpggwaeQ=="], - "@hcengineering/server-token": ["@hcengineering/server-token@0.6.429", "https://npm.pkg.github.com/download/@hcengineering/server-token/0.6.429/2b60e0c9b8e3b8dc2c1517491e8dae497075b4e7", { "dependencies": { "@hcengineering/core": "^0.6.429", "@hcengineering/platform": "^0.6.429", "jwt-simple": "^0.5.6" } }, "sha512-NPD3PSYjv3U+J5kfbq6xlaUmokpHthT8PiSbGGinghVK1xnKQ/pDCno82NK0xi2iDbaI7xJG4k82zp5uE6KReA=="], + "@hcengineering/theme": ["@hcengineering/theme@0.7.2", "https://npm.pkg.github.com/download/@hcengineering/theme/0.7.2/a6f832fd9ddbb3e5683f361e54d8bb4f3e096be8", { "dependencies": { "@hcengineering/analytics": "^0.7.2", "@hcengineering/platform": "^0.7.2", "svelte": "^4.2.19" } }, "sha512-Sqrt6ETmBKGk8SiSD/gvfitqP1GUaSpfiGy4I7xaAgXuyeWENni+YJ9x4C+9yTWT7caRnKu5WKbv/zN1PpWuqg=="], + + "@hcengineering/ui": ["@hcengineering/ui@0.7.2", "https://npm.pkg.github.com/download/@hcengineering/ui/0.7.2/40ec34d2d251af4ad2b8a26aa89c12894cbe5eda", { "dependencies": { "@hcengineering/analytics": "^0.7.2", "@hcengineering/core": "^0.7.2", "@hcengineering/platform": "^0.7.2", "@hcengineering/theme": "^0.7.2", "autolinker": "4.0.0", "date-fns": "^2.30.0", "date-fns-tz": "^2.0.0", "dompurify": "^3.1.6", "emoji-regex": "^10.1.0", "fast-equals": "^5.2.2", "svelte": "^4.2.19" } }, "sha512-5t6JFO65Tm6iZqveEvBjfTM8DDZ9ca9uBXj7GbtfTD/1K8WC1Yf3REsNDW2I9DFYzxKPBayx9M4TRrJBseHlWg=="], "@humanfs/core": ["@humanfs/core@0.19.1", "", {}, "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA=="], @@ -265,19 +261,17 @@ "@humanwhocodes/retry": ["@humanwhocodes/retry@0.4.1", "", {}, "sha512-c7hNEllBlenFTHBky65mhq8WD2kbN9Q6gk0bTk8lSBvc554jpXSkST1iePudpt7+A/AQvuHs9EMqjHDXMY1lrA=="], - "@msgpack/msgpack": ["@msgpack/msgpack@3.0.0-beta4", "", {}, "sha512-pIHfHZefX0jX8enNRdkwc08TXoRdBm/gLrc09M9374bkvlRJMxs2jbek0MKb4o7msDQEhIpGv3lfJWjBh/CuQQ=="], - - "@msgpackr-extract/msgpackr-extract-darwin-arm64": ["@msgpackr-extract/msgpackr-extract-darwin-arm64@3.0.3", "", { "os": "darwin", "cpu": "arm64" }, "sha512-QZHtlVgbAdy2zAqNA9Gu1UpIuI8Xvsd1v8ic6B2pZmeFnFcMWiPLfWXh7TVw4eGEZ/C9TH281KwhVoeQUKbyjw=="], + "@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.8", "", { "dependencies": { "@jridgewell/set-array": "^1.2.1", "@jridgewell/sourcemap-codec": "^1.4.10", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA=="], - "@msgpackr-extract/msgpackr-extract-darwin-x64": ["@msgpackr-extract/msgpackr-extract-darwin-x64@3.0.3", "", { "os": "darwin", "cpu": "x64" }, "sha512-mdzd3AVzYKuUmiWOQ8GNhl64/IoFGol569zNRdkLReh6LRLHOXxU4U8eq0JwaD8iFHdVGqSy4IjFL4reoWCDFw=="], + "@jridgewell/resolve-uri": ["@jridgewell/resolve-uri@3.1.2", "", {}, "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw=="], - "@msgpackr-extract/msgpackr-extract-linux-arm": ["@msgpackr-extract/msgpackr-extract-linux-arm@3.0.3", "", { "os": "linux", "cpu": "arm" }, "sha512-fg0uy/dG/nZEXfYilKoRe7yALaNmHoYeIoJuJ7KJ+YyU2bvY8vPv27f7UKhGRpY6euFYqEVhxCFZgAUNQBM3nw=="], + "@jridgewell/set-array": ["@jridgewell/set-array@1.2.1", "", {}, "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A=="], - "@msgpackr-extract/msgpackr-extract-linux-arm64": ["@msgpackr-extract/msgpackr-extract-linux-arm64@3.0.3", "", { "os": "linux", "cpu": "arm64" }, "sha512-YxQL+ax0XqBJDZiKimS2XQaf+2wDGVa1enVRGzEvLLVFeqa5kx2bWbtcSXgsxjQB7nRqqIGFIcLteF/sHeVtQg=="], + "@jridgewell/sourcemap-codec": ["@jridgewell/sourcemap-codec@1.5.0", "", {}, "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ=="], - "@msgpackr-extract/msgpackr-extract-linux-x64": ["@msgpackr-extract/msgpackr-extract-linux-x64@3.0.3", "", { "os": "linux", "cpu": "x64" }, "sha512-cvwNfbP07pKUfq1uH+S6KJ7dT9K8WOE4ZiAcsrSes+UY55E/0jLYc+vq+DO7jlmqRb5zAggExKm0H7O/CBaesg=="], + "@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.25", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ=="], - "@msgpackr-extract/msgpackr-extract-win32-x64": ["@msgpackr-extract/msgpackr-extract-win32-x64@3.0.3", "", { "os": "win32", "cpu": "x64" }, "sha512-x0fWaQtYp4E6sktbsdAqnehxDgEc/VwM7uLsRCYWaiGu0ykYdZPiS8zCWdnjHwyiumousxfBm4SO31eXqwEZhQ=="], + "@msgpack/msgpack": ["@msgpack/msgpack@3.0.1", "", {}, "sha512-9qysoVTITLcOFIIJeXbdtUgvvY25ojUp+WWfLc0O4H4KKWeamUNAqkjS5mej/PnVDnH70llWKNa7pzv5U4TqVQ=="], "@nodelib/fs.scandir": ["@nodelib/fs.scandir@2.1.5", "", { "dependencies": { "@nodelib/fs.stat": "2.0.5", "run-parallel": "^1.1.9" } }, "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g=="], @@ -311,7 +305,7 @@ "@types/mime": ["@types/mime@1.3.5", "", {}, "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w=="], - "@types/node": ["@types/node@22.13.1", "", { "dependencies": { "undici-types": "~6.20.0" } }, "sha512-jK8uzQlrvXqEU91UxiK5J7pKHyzgnI1Qnl0QDHIgVGuolJhRb9EEl28Cj9b3rGR8B2lhFCtvIm5os8lFnO/1Ew=="], + "@types/node": ["@types/node@22.13.4", "", { "dependencies": { "undici-types": "~6.20.0" } }, "sha512-ywP2X0DYtX3y08eFVx5fNIw7/uIv8hYUKgXoK8oayJlLnKcRfEYCxWMVE1XagUdVtCJlZT1AU4LXEABW+L1Peg=="], "@types/qs": ["@types/qs@6.9.18", "", {}, "sha512-kK7dgTYDyGqS+e2Q4aK9X3D7q234CIZ1Bv0q/7Z5IwRDoADNU81xXJK/YVyLbLTZCoIwUoDoffFeF+p/eIklAA=="], @@ -321,25 +315,25 @@ "@types/serve-static": ["@types/serve-static@1.15.7", "", { "dependencies": { "@types/http-errors": "*", "@types/node": "*", "@types/send": "*" } }, "sha512-W8Ym+h8nhuRwaKPaDw34QUkwsGi6Rc4yYqvKFo5rm2FUEhCFbzVWrxXUxuKK8TASjWsysJY0nsmNCGhCOIsrOw=="], - "@types/ws": ["@types/ws@8.5.14", "", { "dependencies": { "@types/node": "*" } }, "sha512-bd/YFLW+URhBzMXurx7lWByOu+xzU9+kb3RboOteXYDfW+tr+JZa99OyNmPINEGB/ahzKrEuc8rcv4gnpJmxTw=="], + "@types/trusted-types": ["@types/trusted-types@2.0.7", "", {}, "sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw=="], - "@typescript-eslint/eslint-plugin": ["@typescript-eslint/eslint-plugin@8.23.0", "", { "dependencies": { "@eslint-community/regexpp": "^4.10.0", "@typescript-eslint/scope-manager": "8.23.0", "@typescript-eslint/type-utils": "8.23.0", "@typescript-eslint/utils": "8.23.0", "@typescript-eslint/visitor-keys": "8.23.0", "graphemer": "^1.4.0", "ignore": "^5.3.1", "natural-compare": "^1.4.0", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0", "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-vBz65tJgRrA1Q5gWlRfvoH+w943dq9K1p1yDBY2pc+a1nbBLZp7fB9+Hk8DaALUbzjqlMfgaqlVPT1REJdkt/w=="], + "@types/ws": ["@types/ws@8.5.14", "", { "dependencies": { "@types/node": "*" } }, "sha512-bd/YFLW+URhBzMXurx7lWByOu+xzU9+kb3RboOteXYDfW+tr+JZa99OyNmPINEGB/ahzKrEuc8rcv4gnpJmxTw=="], - "@typescript-eslint/parser": ["@typescript-eslint/parser@8.23.0", "", { "dependencies": { "@typescript-eslint/scope-manager": "8.23.0", "@typescript-eslint/types": "8.23.0", "@typescript-eslint/typescript-estree": "8.23.0", "@typescript-eslint/visitor-keys": "8.23.0", "debug": "^4.3.4" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-h2lUByouOXFAlMec2mILeELUbME5SZRN/7R9Cw2RD2lRQQY08MWMM+PmVVKKJNK1aIwqTo9t/0CvOxwPbRIE2Q=="], + "@typescript-eslint/eslint-plugin": ["@typescript-eslint/eslint-plugin@8.24.0", "", { "dependencies": { "@eslint-community/regexpp": "^4.10.0", "@typescript-eslint/scope-manager": "8.24.0", "@typescript-eslint/type-utils": "8.24.0", "@typescript-eslint/utils": "8.24.0", "@typescript-eslint/visitor-keys": "8.24.0", "graphemer": "^1.4.0", "ignore": "^5.3.1", "natural-compare": "^1.4.0", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0", "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-aFcXEJJCI4gUdXgoo/j9udUYIHgF23MFkg09LFz2dzEmU0+1Plk4rQWv/IYKvPHAtlkkGoB3m5e6oUp+JPsNaQ=="], - "@typescript-eslint/scope-manager": ["@typescript-eslint/scope-manager@8.23.0", "", { "dependencies": { "@typescript-eslint/types": "8.23.0", "@typescript-eslint/visitor-keys": "8.23.0" } }, "sha512-OGqo7+dXHqI7Hfm+WqkZjKjsiRtFUQHPdGMXzk5mYXhJUedO7e/Y7i8AK3MyLMgZR93TX4bIzYrfyVjLC+0VSw=="], + "@typescript-eslint/parser": ["@typescript-eslint/parser@8.24.0", "", { "dependencies": { "@typescript-eslint/scope-manager": "8.24.0", "@typescript-eslint/types": "8.24.0", "@typescript-eslint/typescript-estree": "8.24.0", "@typescript-eslint/visitor-keys": "8.24.0", "debug": "^4.3.4" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-MFDaO9CYiard9j9VepMNa9MTcqVvSny2N4hkY6roquzj8pdCBRENhErrteaQuu7Yjn1ppk0v1/ZF9CG3KIlrTA=="], - "@typescript-eslint/type-utils": ["@typescript-eslint/type-utils@8.23.0", "", { "dependencies": { "@typescript-eslint/typescript-estree": "8.23.0", "@typescript-eslint/utils": "8.23.0", "debug": "^4.3.4", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-iIuLdYpQWZKbiH+RkCGc6iu+VwscP5rCtQ1lyQ7TYuKLrcZoeJVpcLiG8DliXVkUxirW/PWlmS+d6yD51L9jvA=="], + "@typescript-eslint/scope-manager": ["@typescript-eslint/scope-manager@8.24.0", "", { "dependencies": { "@typescript-eslint/types": "8.24.0", "@typescript-eslint/visitor-keys": "8.24.0" } }, "sha512-HZIX0UByphEtdVBKaQBgTDdn9z16l4aTUz8e8zPQnyxwHBtf5vtl1L+OhH+m1FGV9DrRmoDuYKqzVrvWDcDozw=="], - "@typescript-eslint/types": ["@typescript-eslint/types@8.23.0", "", {}, "sha512-1sK4ILJbCmZOTt9k4vkoulT6/y5CHJ1qUYxqpF1K/DBAd8+ZUL4LlSCxOssuH5m4rUaaN0uS0HlVPvd45zjduQ=="], + "@typescript-eslint/type-utils": ["@typescript-eslint/type-utils@8.24.0", "", { "dependencies": { "@typescript-eslint/typescript-estree": "8.24.0", "@typescript-eslint/utils": "8.24.0", "debug": "^4.3.4", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-8fitJudrnY8aq0F1wMiPM1UUgiXQRJ5i8tFjq9kGfRajU+dbPyOuHbl0qRopLEidy0MwqgTHDt6CnSeXanNIwA=="], - "@typescript-eslint/typescript-estree": ["@typescript-eslint/typescript-estree@8.23.0", "", { "dependencies": { "@typescript-eslint/types": "8.23.0", "@typescript-eslint/visitor-keys": "8.23.0", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", "minimatch": "^9.0.4", "semver": "^7.6.0", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "typescript": ">=4.8.4 <5.8.0" } }, "sha512-LcqzfipsB8RTvH8FX24W4UUFk1bl+0yTOf9ZA08XngFwMg4Kj8A+9hwz8Cr/ZS4KwHrmo9PJiLZkOt49vPnuvQ=="], + "@typescript-eslint/types": ["@typescript-eslint/types@8.24.0", "", {}, "sha512-VacJCBTyje7HGAw7xp11q439A+zeGG0p0/p2zsZwpnMzjPB5WteaWqt4g2iysgGFafrqvyLWqq6ZPZAOCoefCw=="], - "@typescript-eslint/utils": ["@typescript-eslint/utils@8.23.0", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@typescript-eslint/scope-manager": "8.23.0", "@typescript-eslint/types": "8.23.0", "@typescript-eslint/typescript-estree": "8.23.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-uB/+PSo6Exu02b5ZEiVtmY6RVYO7YU5xqgzTIVZwTHvvK3HsL8tZZHFaTLFtRG3CsV4A5mhOv+NZx5BlhXPyIA=="], + "@typescript-eslint/typescript-estree": ["@typescript-eslint/typescript-estree@8.24.0", "", { "dependencies": { "@typescript-eslint/types": "8.24.0", "@typescript-eslint/visitor-keys": "8.24.0", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", "minimatch": "^9.0.4", "semver": "^7.6.0", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "typescript": ">=4.8.4 <5.8.0" } }, "sha512-ITjYcP0+8kbsvT9bysygfIfb+hBj6koDsu37JZG7xrCiy3fPJyNmfVtaGsgTUSEuTzcvME5YI5uyL5LD1EV5ZQ=="], - "@typescript-eslint/visitor-keys": ["@typescript-eslint/visitor-keys@8.23.0", "", { "dependencies": { "@typescript-eslint/types": "8.23.0", "eslint-visitor-keys": "^4.2.0" } }, "sha512-oWWhcWDLwDfu++BGTZcmXWqpwtkwb5o7fxUIGksMQQDSdPW9prsSnfIOZMlsj4vBOSrcnjIUZMiIjODgGosFhQ=="], + "@typescript-eslint/utils": ["@typescript-eslint/utils@8.24.0", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@typescript-eslint/scope-manager": "8.24.0", "@typescript-eslint/types": "8.24.0", "@typescript-eslint/typescript-estree": "8.24.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-07rLuUBElvvEb1ICnafYWr4hk8/U7X9RDCOqd9JcAMtjh/9oRmcfN4yGzbPVirgMR0+HLVHehmu19CWeh7fsmQ=="], - "accepts": ["accepts@1.3.8", "", { "dependencies": { "mime-types": "~2.1.34", "negotiator": "0.6.3" } }, "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw=="], + "@typescript-eslint/visitor-keys": ["@typescript-eslint/visitor-keys@8.24.0", "", { "dependencies": { "@typescript-eslint/types": "8.24.0", "eslint-visitor-keys": "^4.2.0" } }, "sha512-kArLq83QxGLbuHrTMoOEWO+l2MwsNS2TGISEdx8xgqpkbytB07XmlQyQdNDrCc1ecSqx0cnmhGvpX+VBwqqSkg=="], "acorn": ["acorn@8.14.0", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA=="], @@ -351,11 +345,13 @@ "argparse": ["argparse@2.0.1", "", {}, "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="], - "array-flatten": ["array-flatten@1.1.1", "", {}, "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg=="], + "aria-query": ["aria-query@5.3.2", "", {}, "sha512-COROpnaoap1E2F000S62r6A60uHZnmlvomhfyT2DlTcrY1OrBKn2UhH7qn5wTC9zMvD0AY7csdPSNwKP+7WiQw=="], - "balanced-match": ["balanced-match@1.0.2", "", {}, "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="], + "autolinker": ["autolinker@4.0.0", "", { "dependencies": { "tslib": "^2.3.0" } }, "sha512-fl5Kh6BmEEZx+IWBfEirnRUU5+cOiV0OK7PEt0RBKvJMJ8GaRseIOeDU3FKf4j3CE5HVefcjHmhYPOcaVt0bZw=="], - "body-parser": ["body-parser@1.20.3", "", { "dependencies": { "bytes": "3.1.2", "content-type": "~1.0.5", "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", "http-errors": "2.0.0", "iconv-lite": "0.4.24", "on-finished": "2.4.1", "qs": "6.13.0", "raw-body": "2.5.2", "type-is": "~1.6.18", "unpipe": "1.0.0" } }, "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g=="], + "axobject-query": ["axobject-query@4.1.0", "", {}, "sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ=="], + + "balanced-match": ["balanced-match@1.0.2", "", {}, "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="], "brace-expansion": ["brace-expansion@1.1.11", "", { "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" } }, "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA=="], @@ -363,33 +359,25 @@ "bun-types": ["bun-types@1.2.2", "", { "dependencies": { "@types/node": "*", "@types/ws": "~8.5.10" } }, "sha512-RCbMH5elr9gjgDGDhkTTugA21XtJAy/9jkKe/G3WR2q17VPGhcquf9Sir6uay9iW+7P/BV0CAHA1XlHXMAVKHg=="], - "bytes": ["bytes@3.1.2", "", {}, "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg=="], - - "call-bind-apply-helpers": ["call-bind-apply-helpers@1.0.1", "", { "dependencies": { "es-errors": "^1.3.0", "function-bind": "^1.1.2" } }, "sha512-BhYE+WDaywFg2TBWYNXAE+8B1ATnThNBqXHP5nQu0jWJdVvY2hvkpyB3qOmtmDePiS5/BDQ8wASEWGMWRG148g=="], - - "call-bound": ["call-bound@1.0.3", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.1", "get-intrinsic": "^1.2.6" } }, "sha512-YTd+6wGlNlPxSuri7Y6X8tY2dmm12UMH66RpKMhiX6rsk5wXXnYgbUcOt8kiS31/AjfoTOvCsE+w8nZQLQnzHA=="], - "callsites": ["callsites@3.1.0", "", {}, "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ=="], "chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], + "code-red": ["code-red@1.0.4", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.4.15", "@types/estree": "^1.0.1", "acorn": "^8.10.0", "estree-walker": "^3.0.3", "periscopic": "^3.1.0" } }, "sha512-7qJWqItLA8/VPVlKJlFXU+NBlo/qyfs39aJcuMT/2ere32ZqvF5OSxgdM5xOfJJ7O429gg2HM47y8v9P+9wrNw=="], + "color-convert": ["color-convert@2.0.1", "", { "dependencies": { "color-name": "~1.1.4" } }, "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ=="], "color-name": ["color-name@1.1.4", "", {}, "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="], "concat-map": ["concat-map@0.0.1", "", {}, "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="], - "content-disposition": ["content-disposition@0.5.4", "", { "dependencies": { "safe-buffer": "5.2.1" } }, "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ=="], - - "content-type": ["content-type@1.0.5", "", {}, "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA=="], - - "cookie": ["cookie@0.7.1", "", {}, "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w=="], + "cross-spawn": ["cross-spawn@7.0.6", "", { "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", "which": "^2.0.1" } }, "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA=="], - "cookie-signature": ["cookie-signature@1.0.6", "", {}, "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ=="], + "css-tree": ["css-tree@2.3.1", "", { "dependencies": { "mdn-data": "2.0.30", "source-map-js": "^1.0.1" } }, "sha512-6Fv1DV/TYw//QF5IzQdqsNDjx/wc8TrMBZsqjL9eW01tWb7R7k/mq+/VXfJCl7SoD5emsJop9cOByJZfs8hYIw=="], - "cors": ["cors@2.8.5", "", { "dependencies": { "object-assign": "^4", "vary": "^1" } }, "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g=="], + "date-fns": ["date-fns@2.30.0", "", { "dependencies": { "@babel/runtime": "^7.21.0" } }, "sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw=="], - "cross-spawn": ["cross-spawn@7.0.6", "", { "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", "which": "^2.0.1" } }, "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA=="], + "date-fns-tz": ["date-fns-tz@2.0.1", "", { "peerDependencies": { "date-fns": "2.x" } }, "sha512-fJCG3Pwx8HUoLhkepdsP7Z5RsucUi+ZBOxyM5d0ZZ6c4SdYustq0VMmOu6Wf7bli+yS/Jwp91TOCqn9jMcVrUA=="], "debug": ["debug@4.4.0", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA=="], @@ -397,31 +385,13 @@ "deep-is": ["deep-is@0.1.4", "", {}, "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ=="], - "depd": ["depd@2.0.0", "", {}, "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw=="], - - "destroy": ["destroy@1.2.0", "", {}, "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg=="], - - "detect-libc": ["detect-libc@2.0.3", "", {}, "sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw=="], - - "dotenv": ["dotenv@16.4.7", "", {}, "sha512-47qPchRCykZC03FhkYAhrvwU4xDBFIj1QPqaarj6mdM/hgUzfPHcpkHJOn3mJAufFeeAxAzeGsr5X0M4k6fLZQ=="], - - "dunder-proto": ["dunder-proto@1.0.1", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.1", "es-errors": "^1.3.0", "gopd": "^1.2.0" } }, "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A=="], - - "ee-first": ["ee-first@1.1.1", "", {}, "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow=="], + "dompurify": ["dompurify@3.2.4", "", { "optionalDependencies": { "@types/trusted-types": "^2.0.7" } }, "sha512-ysFSFEDVduQpyhzAob/kkuJjf5zWkZD8/A9ywSp1byueyuCfHamrCBa14/Oc2iiB0e51B+NpxSl5gmzn+Ms/mg=="], - "encodeurl": ["encodeurl@2.0.0", "", {}, "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg=="], - - "es-define-property": ["es-define-property@1.0.1", "", {}, "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g=="], - - "es-errors": ["es-errors@1.3.0", "", {}, "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw=="], - - "es-object-atoms": ["es-object-atoms@1.1.1", "", { "dependencies": { "es-errors": "^1.3.0" } }, "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA=="], - - "escape-html": ["escape-html@1.0.3", "", {}, "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow=="], + "emoji-regex": ["emoji-regex@10.4.0", "", {}, "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw=="], "escape-string-regexp": ["escape-string-regexp@4.0.0", "", {}, "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA=="], - "eslint": ["eslint@9.19.0", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.12.1", "@eslint/config-array": "^0.19.0", "@eslint/core": "^0.10.0", "@eslint/eslintrc": "^3.2.0", "@eslint/js": "9.19.0", "@eslint/plugin-kit": "^0.2.5", "@humanfs/node": "^0.16.6", "@humanwhocodes/module-importer": "^1.0.1", "@humanwhocodes/retry": "^0.4.1", "@types/estree": "^1.0.6", "@types/json-schema": "^7.0.15", "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.6", "debug": "^4.3.2", "escape-string-regexp": "^4.0.0", "eslint-scope": "^8.2.0", "eslint-visitor-keys": "^4.2.0", "espree": "^10.3.0", "esquery": "^1.5.0", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", "file-entry-cache": "^8.0.0", "find-up": "^5.0.0", "glob-parent": "^6.0.2", "ignore": "^5.2.0", "imurmurhash": "^0.1.4", "is-glob": "^4.0.0", "json-stable-stringify-without-jsonify": "^1.0.1", "lodash.merge": "^4.6.2", "minimatch": "^3.1.2", "natural-compare": "^1.4.0", "optionator": "^0.9.3" }, "peerDependencies": { "jiti": "*" }, "optionalPeers": ["jiti"], "bin": { "eslint": "bin/eslint.js" } }, "sha512-ug92j0LepKlbbEv6hD911THhoRHmbdXt2gX+VDABAW/Ir7D3nqKdv5Pf5vtlyY6HQMTEP2skXY43ueqTCWssEA=="], + "eslint": ["eslint@9.20.1", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.12.1", "@eslint/config-array": "^0.19.0", "@eslint/core": "^0.11.0", "@eslint/eslintrc": "^3.2.0", "@eslint/js": "9.20.0", "@eslint/plugin-kit": "^0.2.5", "@humanfs/node": "^0.16.6", "@humanwhocodes/module-importer": "^1.0.1", "@humanwhocodes/retry": "^0.4.1", "@types/estree": "^1.0.6", "@types/json-schema": "^7.0.15", "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.6", "debug": "^4.3.2", "escape-string-regexp": "^4.0.0", "eslint-scope": "^8.2.0", "eslint-visitor-keys": "^4.2.0", "espree": "^10.3.0", "esquery": "^1.5.0", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", "file-entry-cache": "^8.0.0", "find-up": "^5.0.0", "glob-parent": "^6.0.2", "ignore": "^5.2.0", "imurmurhash": "^0.1.4", "is-glob": "^4.0.0", "json-stable-stringify-without-jsonify": "^1.0.1", "lodash.merge": "^4.6.2", "minimatch": "^3.1.2", "natural-compare": "^1.4.0", "optionator": "^0.9.3" }, "peerDependencies": { "jiti": "*" }, "optionalPeers": ["jiti"], "bin": { "eslint": "bin/eslint.js" } }, "sha512-m1mM33o6dBUjxl2qb6wv6nGNwCAsns1eKtaQ4l/NPHeTvhiUPbtdfMyktxN4B3fgHIgsYh1VT3V9txblpQHq+g=="], "eslint-config-prettier": ["eslint-config-prettier@9.1.0", "", { "peerDependencies": { "eslint": ">=7.0.0" }, "bin": { "eslint-config-prettier": "bin/cli.js" } }, "sha512-NSWl5BFQWEPi1j4TjVNItzYV7dZXZ+wP6I6ZhrBGpChQhZRUaElihE9uRRkcbRnNb76UMKDF3r+WTmNcGPKsqw=="], @@ -439,11 +409,9 @@ "estraverse": ["estraverse@5.3.0", "", {}, "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA=="], - "esutils": ["esutils@2.0.3", "", {}, "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g=="], - - "etag": ["etag@1.8.1", "", {}, "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg=="], + "estree-walker": ["estree-walker@3.0.3", "", { "dependencies": { "@types/estree": "^1.0.0" } }, "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g=="], - "express": ["express@4.21.2", "", { "dependencies": { "accepts": "~1.3.8", "array-flatten": "1.1.1", "body-parser": "1.20.3", "content-disposition": "0.5.4", "content-type": "~1.0.4", "cookie": "0.7.1", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "etag": "~1.8.1", "finalhandler": "1.3.1", "fresh": "0.5.2", "http-errors": "2.0.0", "merge-descriptors": "1.0.3", "methods": "~1.1.2", "on-finished": "2.4.1", "parseurl": "~1.3.3", "path-to-regexp": "0.1.12", "proxy-addr": "~2.0.7", "qs": "6.13.0", "range-parser": "~1.2.1", "safe-buffer": "5.2.1", "send": "0.19.0", "serve-static": "1.16.2", "setprototypeof": "1.2.0", "statuses": "2.0.1", "type-is": "~1.6.18", "utils-merge": "1.0.1", "vary": "~1.1.2" } }, "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA=="], + "esutils": ["esutils@2.0.3", "", {}, "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g=="], "fast-deep-equal": ["fast-deep-equal@3.1.3", "", {}, "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="], @@ -463,53 +431,29 @@ "fill-range": ["fill-range@7.1.1", "", { "dependencies": { "to-regex-range": "^5.0.1" } }, "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg=="], - "finalhandler": ["finalhandler@1.3.1", "", { "dependencies": { "debug": "2.6.9", "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "on-finished": "2.4.1", "parseurl": "~1.3.3", "statuses": "2.0.1", "unpipe": "~1.0.0" } }, "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ=="], - "find-up": ["find-up@5.0.0", "", { "dependencies": { "locate-path": "^6.0.0", "path-exists": "^4.0.0" } }, "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng=="], "flat-cache": ["flat-cache@4.0.1", "", { "dependencies": { "flatted": "^3.2.9", "keyv": "^4.5.4" } }, "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw=="], "flatted": ["flatted@3.3.2", "", {}, "sha512-AiwGJM8YcNOaobumgtng+6NHuOqC3A7MixFeDafM3X9cIUM+xUXoS5Vfgf+OihAYe20fxqNM9yPBXJzRtZ/4eA=="], - "forwarded": ["forwarded@0.2.0", "", {}, "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow=="], - - "fresh": ["fresh@0.5.2", "", {}, "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q=="], - - "function-bind": ["function-bind@1.1.2", "", {}, "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA=="], - - "get-intrinsic": ["get-intrinsic@1.2.7", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.1", "es-define-property": "^1.0.1", "es-errors": "^1.3.0", "es-object-atoms": "^1.0.0", "function-bind": "^1.1.2", "get-proto": "^1.0.0", "gopd": "^1.2.0", "has-symbols": "^1.1.0", "hasown": "^2.0.2", "math-intrinsics": "^1.1.0" } }, "sha512-VW6Pxhsrk0KAOqs3WEd0klDiF/+V7gQOpAvY1jVU/LHmaD/kQO4523aiJuikX/QAKYiW6x8Jh+RJej1almdtCA=="], - - "get-proto": ["get-proto@1.0.1", "", { "dependencies": { "dunder-proto": "^1.0.1", "es-object-atoms": "^1.0.0" } }, "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g=="], - "glob-parent": ["glob-parent@6.0.2", "", { "dependencies": { "is-glob": "^4.0.3" } }, "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A=="], "globals": ["globals@14.0.0", "", {}, "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ=="], - "gopd": ["gopd@1.2.0", "", {}, "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg=="], - "graphemer": ["graphemer@1.4.0", "", {}, "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag=="], "has-flag": ["has-flag@4.0.0", "", {}, "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ=="], - "has-symbols": ["has-symbols@1.1.0", "", {}, "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ=="], - - "hasown": ["hasown@2.0.2", "", { "dependencies": { "function-bind": "^1.1.2" } }, "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ=="], - - "http-errors": ["http-errors@2.0.0", "", { "dependencies": { "depd": "2.0.0", "inherits": "2.0.4", "setprototypeof": "1.2.0", "statuses": "2.0.1", "toidentifier": "1.0.1" } }, "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ=="], - - "iconv-lite": ["iconv-lite@0.4.24", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3" } }, "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA=="], - "ignore": ["ignore@5.3.2", "", {}, "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g=="], "import-fresh": ["import-fresh@3.3.1", "", { "dependencies": { "parent-module": "^1.0.0", "resolve-from": "^4.0.0" } }, "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ=="], "imurmurhash": ["imurmurhash@0.1.4", "", {}, "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA=="], - "inherits": ["inherits@2.0.4", "", {}, "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="], - - "intl-messageformat": ["intl-messageformat@10.7.14", "", { "dependencies": { "@formatjs/ecma402-abstract": "2.3.2", "@formatjs/fast-memoize": "2.2.6", "@formatjs/icu-messageformat-parser": "2.11.0", "tslib": "2" } }, "sha512-mMGnE4E1otdEutV5vLUdCxRJygHB5ozUBxsPB5qhitewssrS/qGruq9bmvIRkkGsNeK5ZWLfYRld18UHGTIifQ=="], + "inherits": ["inherits@2.0.3", "", {}, "sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw=="], - "ipaddr.js": ["ipaddr.js@1.9.1", "", {}, "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g=="], + "intl-messageformat": ["intl-messageformat@10.7.15", "", { "dependencies": { "@formatjs/ecma402-abstract": "2.3.3", "@formatjs/fast-memoize": "2.2.6", "@formatjs/icu-messageformat-parser": "2.11.1", "tslib": "2" } }, "sha512-LRyExsEsefQSBjU2p47oAheoKz+EOJxSLDdjOaEjdriajfHsMXOmV/EhMvYSg9bAgCUHasuAC+mcUBe/95PfIg=="], "is-extglob": ["is-extglob@2.1.1", "", {}, "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ=="], @@ -517,6 +461,8 @@ "is-number": ["is-number@7.0.0", "", {}, "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng=="], + "is-reference": ["is-reference@3.0.3", "", { "dependencies": { "@types/estree": "^1.0.6" } }, "sha512-ixkJoqQvAP88E6wLydLGGqCJsrFUnqoH6HnaczB8XmDH1oaWU+xxdptvikTgaEhtZ53Ky6YXiBuUI2WXLMCwjw=="], + "isexe": ["isexe@2.0.0", "", {}, "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="], "js-yaml": ["js-yaml@4.1.0", "", { "dependencies": { "argparse": "^2.0.1" }, "bin": { "js-yaml": "bin/js-yaml.js" } }, "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA=="], @@ -527,54 +473,30 @@ "json-stable-stringify-without-jsonify": ["json-stable-stringify-without-jsonify@1.0.1", "", {}, "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw=="], - "jwt-simple": ["jwt-simple@0.5.6", "", {}, "sha512-40aUybvhH9t2h71ncA1/1SbtTNCVZHgsTsTgqPUxGWDmUDrXyDf2wMNQKEbdBjbf4AI+fQhbECNTV6lWxQKUzg=="], - "keyv": ["keyv@4.5.4", "", { "dependencies": { "json-buffer": "3.0.1" } }, "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw=="], "levn": ["levn@0.4.1", "", { "dependencies": { "prelude-ls": "^1.2.1", "type-check": "~0.4.0" } }, "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ=="], + "locate-character": ["locate-character@3.0.0", "", {}, "sha512-SW13ws7BjaeJ6p7Q6CO2nchbYEc3X3J6WrmTTDto7yMPqVSZTUyY5Tjbid+Ab8gLnATtygYtiDIJGQRRn2ZOiA=="], + "locate-path": ["locate-path@6.0.0", "", { "dependencies": { "p-locate": "^5.0.0" } }, "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw=="], "lodash.merge": ["lodash.merge@4.6.2", "", {}, "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ=="], - "math-intrinsics": ["math-intrinsics@1.1.0", "", {}, "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g=="], + "magic-string": ["magic-string@0.30.17", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.0" } }, "sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA=="], - "media-typer": ["media-typer@0.3.0", "", {}, "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ=="], - - "merge-descriptors": ["merge-descriptors@1.0.3", "", {}, "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ=="], + "mdn-data": ["mdn-data@2.0.30", "", {}, "sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA=="], "merge2": ["merge2@1.4.1", "", {}, "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg=="], - "methods": ["methods@1.1.2", "", {}, "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w=="], - "micromatch": ["micromatch@4.0.8", "", { "dependencies": { "braces": "^3.0.3", "picomatch": "^2.3.1" } }, "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA=="], - "mime": ["mime@1.6.0", "", { "bin": { "mime": "cli.js" } }, "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg=="], - - "mime-db": ["mime-db@1.52.0", "", {}, "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="], - - "mime-types": ["mime-types@2.1.35", "", { "dependencies": { "mime-db": "1.52.0" } }, "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw=="], - "minimatch": ["minimatch@3.1.2", "", { "dependencies": { "brace-expansion": "^1.1.7" } }, "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw=="], "ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="], - "msgpackr": ["msgpackr@1.11.2", "", { "optionalDependencies": { "msgpackr-extract": "^3.0.2" } }, "sha512-F9UngXRlPyWCDEASDpTf6c9uNhGPTqnTeLVt7bN+bU1eajoR/8V9ys2BRaV5C/e5ihE6sJ9uPIKaYt6bFuO32g=="], - - "msgpackr-extract": ["msgpackr-extract@3.0.3", "", { "dependencies": { "node-gyp-build-optional-packages": "5.2.2" }, "optionalDependencies": { "@msgpackr-extract/msgpackr-extract-darwin-arm64": "3.0.3", "@msgpackr-extract/msgpackr-extract-darwin-x64": "3.0.3", "@msgpackr-extract/msgpackr-extract-linux-arm": "3.0.3", "@msgpackr-extract/msgpackr-extract-linux-arm64": "3.0.3", "@msgpackr-extract/msgpackr-extract-linux-x64": "3.0.3", "@msgpackr-extract/msgpackr-extract-win32-x64": "3.0.3" }, "bin": { "download-msgpackr-prebuilds": "bin/download-prebuilds.js" } }, "sha512-P0efT1C9jIdVRefqjzOQ9Xml57zpOXnIuS+csaB4MdZbTdmGDLo8XhzBG1N7aO11gKDDkJvBLULeFTo46wwreA=="], - "natural-compare": ["natural-compare@1.4.0", "", {}, "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw=="], - "negotiator": ["negotiator@0.6.3", "", {}, "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg=="], - - "node-gyp-build-optional-packages": ["node-gyp-build-optional-packages@5.2.2", "", { "dependencies": { "detect-libc": "^2.0.1" }, "bin": { "node-gyp-build-optional-packages": "bin.js", "node-gyp-build-optional-packages-optional": "optional.js", "node-gyp-build-optional-packages-test": "build-test.js" } }, "sha512-s+w+rBWnpTMwSFbaE0UXsRlg7hU4FjekKU4eyAih5T8nJuNZT1nNsskXpxmeqSK9UzkBl6UgRlnKc8hz8IEqOw=="], - - "object-assign": ["object-assign@4.1.1", "", {}, "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg=="], - - "object-inspect": ["object-inspect@1.13.4", "", {}, "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew=="], - - "on-finished": ["on-finished@2.4.1", "", { "dependencies": { "ee-first": "1.1.1" } }, "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg=="], - "optionator": ["optionator@0.9.4", "", { "dependencies": { "deep-is": "^0.1.3", "fast-levenshtein": "^2.0.6", "levn": "^0.4.1", "prelude-ls": "^1.2.1", "type-check": "^0.4.0", "word-wrap": "^1.2.5" } }, "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g=="], "p-limit": ["p-limit@3.1.0", "", { "dependencies": { "yocto-queue": "^0.1.0" } }, "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ=="], @@ -583,15 +505,13 @@ "parent-module": ["parent-module@1.0.1", "", { "dependencies": { "callsites": "^3.0.0" } }, "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g=="], - "parseurl": ["parseurl@1.3.3", "", {}, "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ=="], - "path": ["path@0.12.7", "", { "dependencies": { "process": "^0.11.1", "util": "^0.10.3" } }, "sha512-aXXC6s+1w7otVF9UletFkFcDsJeO7lSZBPUQhtb5O0xJe8LtYhj/GxldoL09bBj9+ZmE2hNoHqQSFMN5fikh4Q=="], "path-exists": ["path-exists@4.0.0", "", {}, "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w=="], "path-key": ["path-key@3.1.1", "", {}, "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q=="], - "path-to-regexp": ["path-to-regexp@0.1.12", "", {}, "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ=="], + "periscopic": ["periscopic@3.1.0", "", { "dependencies": { "@types/estree": "^1.0.0", "estree-walker": "^3.0.0", "is-reference": "^3.0.0" } }, "sha512-vKiQ8RRtkl9P+r/+oefh25C3fhybptkHKCZSPlcXiJux2tJF55GnEj3BVn4A5gKfq9NWWXXrxkHBwVPUfH0opw=="], "pg": ["pg@8.12.0", "", { "dependencies": { "pg-connection-string": "^2.6.4", "pg-pool": "^3.6.2", "pg-protocol": "^1.6.1", "pg-types": "^2.1.0", "pgpass": "1.x" }, "optionalDependencies": { "pg-cloudflare": "^1.1.1" }, "peerDependencies": { "pg-native": ">=3.0.1" }, "optionalPeers": ["pg-native"] }, "sha512-A+LHUSnwnxrnL/tZ+OLfqR1SxLN3c/pgDztZ47Rpbsd4jUytsTtwQo/TLPRzPJMp/1pbhYVhH9cuSZLAajNfjQ=="], @@ -601,9 +521,9 @@ "pg-int8": ["pg-int8@1.0.1", "", {}, "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw=="], - "pg-pool": ["pg-pool@3.7.0", "", { "peerDependencies": { "pg": ">=8.0" } }, "sha512-ZOBQForurqh4zZWjrgSwwAtzJ7QiRX0ovFkZr2klsen3Nm0aoh33Ls0fzfv3imeH/nw/O27cjdz5kzYJfeGp/g=="], + "pg-pool": ["pg-pool@3.7.1", "", { "peerDependencies": { "pg": ">=8.0" } }, "sha512-xIOsFoh7Vdhojas6q3596mXFsR8nwBQBXX5JiV7p9buEVAGqYL4yFzclON5P9vFrpu1u7Zwl2oriyDa89n0wbw=="], - "pg-protocol": ["pg-protocol@1.7.0", "", {}, "sha512-hTK/mE36i8fDDhgDFjy6xNOG+LCorxLG3WO17tku+ij6sVHXh1jQUJ8hYAnRhNla4QVD2H8er/FOjc/+EgC6yQ=="], + "pg-protocol": ["pg-protocol@1.7.1", "", {}, "sha512-gjTHWGYWsEgy9MsY0Gp6ZJxV24IjDqdpTW7Eh0x+WfJLFsm/TJx1MzL6T0D88mBvkpxotCQ6TwW6N+Kko7lhgQ=="], "pg-types": ["pg-types@2.2.0", "", { "dependencies": { "pg-int8": "1.0.1", "postgres-array": "~2.0.0", "postgres-bytea": "~1.0.0", "postgres-date": "~1.0.4", "postgres-interval": "^1.1.0" } }, "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA=="], @@ -623,23 +543,17 @@ "prelude-ls": ["prelude-ls@1.2.1", "", {}, "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g=="], - "prettier": ["prettier@3.4.2", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-e9MewbtFo+Fevyuxn/4rrcDAaq0IYxPGLvObpQjiZBMAzB9IGmzlnG9RZy3FFas+eBMu2vA0CszMeduow5dIuQ=="], + "prettier": ["prettier@3.5.1", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-hPpFQvHwL3Qv5AdRvBFMhnKo4tYxp0ReXiPn2bxkiohEX6mBeBwEpBSQTkD458RaaDKQMYSp4hX4UtfUTA5wDw=="], "prettier-linter-helpers": ["prettier-linter-helpers@1.0.0", "", { "dependencies": { "fast-diff": "^1.1.2" } }, "sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w=="], "process": ["process@0.11.10", "", {}, "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A=="], - "proxy-addr": ["proxy-addr@2.0.7", "", { "dependencies": { "forwarded": "0.2.0", "ipaddr.js": "1.9.1" } }, "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg=="], - "punycode": ["punycode@2.3.1", "", {}, "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg=="], - "qs": ["qs@6.13.0", "", { "dependencies": { "side-channel": "^1.0.6" } }, "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg=="], - "queue-microtask": ["queue-microtask@1.2.3", "", {}, "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A=="], - "range-parser": ["range-parser@1.2.1", "", {}, "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg=="], - - "raw-body": ["raw-body@2.5.2", "", { "dependencies": { "bytes": "3.1.2", "http-errors": "2.0.0", "iconv-lite": "0.4.24", "unpipe": "1.0.0" } }, "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA=="], + "regenerator-runtime": ["regenerator-runtime@0.14.1", "", {}, "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw=="], "resolve-from": ["resolve-from@4.0.0", "", {}, "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g=="], @@ -647,108 +561,62 @@ "run-parallel": ["run-parallel@1.2.0", "", { "dependencies": { "queue-microtask": "^1.2.2" } }, "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA=="], - "safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="], - - "safer-buffer": ["safer-buffer@2.1.2", "", {}, "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="], - "semver": ["semver@7.7.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA=="], - "send": ["send@0.19.0", "", { "dependencies": { "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "etag": "~1.8.1", "fresh": "0.5.2", "http-errors": "2.0.0", "mime": "1.6.0", "ms": "2.1.3", "on-finished": "2.4.1", "range-parser": "~1.2.1", "statuses": "2.0.1" } }, "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw=="], - - "serve-static": ["serve-static@1.16.2", "", { "dependencies": { "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "parseurl": "~1.3.3", "send": "0.19.0" } }, "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw=="], - - "setprototypeof": ["setprototypeof@1.2.0", "", {}, "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw=="], - "shebang-command": ["shebang-command@2.0.0", "", { "dependencies": { "shebang-regex": "^3.0.0" } }, "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA=="], "shebang-regex": ["shebang-regex@3.0.0", "", {}, "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A=="], - "side-channel": ["side-channel@1.1.0", "", { "dependencies": { "es-errors": "^1.3.0", "object-inspect": "^1.13.3", "side-channel-list": "^1.0.0", "side-channel-map": "^1.0.1", "side-channel-weakmap": "^1.0.2" } }, "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw=="], - - "side-channel-list": ["side-channel-list@1.0.0", "", { "dependencies": { "es-errors": "^1.3.0", "object-inspect": "^1.13.3" } }, "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA=="], - - "side-channel-map": ["side-channel-map@1.0.1", "", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "get-intrinsic": "^1.2.5", "object-inspect": "^1.13.3" } }, "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA=="], - - "side-channel-weakmap": ["side-channel-weakmap@1.0.2", "", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "get-intrinsic": "^1.2.5", "object-inspect": "^1.13.3", "side-channel-map": "^1.0.1" } }, "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A=="], + "source-map-js": ["source-map-js@1.2.1", "", {}, "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA=="], "split2": ["split2@4.2.0", "", {}, "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg=="], - "statuses": ["statuses@2.0.1", "", {}, "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ=="], - "strip-json-comments": ["strip-json-comments@3.1.1", "", {}, "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig=="], "supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], + "svelte": ["svelte@4.2.19", "", { "dependencies": { "@ampproject/remapping": "^2.2.1", "@jridgewell/sourcemap-codec": "^1.4.15", "@jridgewell/trace-mapping": "^0.3.18", "@types/estree": "^1.0.1", "acorn": "^8.9.0", "aria-query": "^5.3.0", "axobject-query": "^4.0.0", "code-red": "^1.0.3", "css-tree": "^2.3.1", "estree-walker": "^3.0.3", "is-reference": "^3.0.1", "locate-character": "^3.0.0", "magic-string": "^0.30.4", "periscopic": "^3.1.0" } }, "sha512-IY1rnGr6izd10B0A8LqsBfmlT5OILVuZ7XsI0vdGPEvuonFV7NYEUK4dAkm9Zg2q0Um92kYjTpS1CAP3Nh/KWw=="], + "synckit": ["synckit@0.9.2", "", { "dependencies": { "@pkgr/core": "^0.1.0", "tslib": "^2.6.2" } }, "sha512-vrozgXDQwYO72vHjUb/HnFbQx1exDjoKzqx23aXEg2a9VIg2TSFZ8FmeZpTjUCFMYw7mpX4BE2SFu8wI7asYsw=="], "to-regex-range": ["to-regex-range@5.0.1", "", { "dependencies": { "is-number": "^7.0.0" } }, "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ=="], - "toidentifier": ["toidentifier@1.0.1", "", {}, "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA=="], - "ts-api-utils": ["ts-api-utils@2.0.1", "", { "peerDependencies": { "typescript": ">=4.8.4" } }, "sha512-dnlgjFSVetynI8nzgJ+qF62efpglpWRk8isUEWZGWlJYySCTD6aKvbUDu+zbPeDakk3bg5H4XpitHukgfL1m9w=="], "tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], "type-check": ["type-check@0.4.0", "", { "dependencies": { "prelude-ls": "^1.2.1" } }, "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew=="], - "type-is": ["type-is@1.6.18", "", { "dependencies": { "media-typer": "0.3.0", "mime-types": "~2.1.24" } }, "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g=="], - "typescript": ["typescript@5.7.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-84MVSjMEHP+FQRPy3pX9sTVV/INIex71s9TL2Gm5FG/WG1SqXeKyZ0k7/blY/4FdOzI12CBy1vGc4og/eus0fw=="], - "typescript-eslint": ["typescript-eslint@8.23.0", "", { "dependencies": { "@typescript-eslint/eslint-plugin": "8.23.0", "@typescript-eslint/parser": "8.23.0", "@typescript-eslint/utils": "8.23.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-/LBRo3HrXr5LxmrdYSOCvoAMm7p2jNizNfbIpCgvG4HMsnoprRUOce/+8VJ9BDYWW68rqIENE/haVLWPeFZBVQ=="], + "typescript-eslint": ["typescript-eslint@8.24.0", "", { "dependencies": { "@typescript-eslint/eslint-plugin": "8.24.0", "@typescript-eslint/parser": "8.24.0", "@typescript-eslint/utils": "8.24.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-/lmv4366en/qbB32Vz5+kCNZEMf6xYHwh1z48suBwZvAtnXKbP+YhGe8OLE2BqC67LMqKkCNLtjejdwsdW6uOQ=="], "undici-types": ["undici-types@6.20.0", "", {}, "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg=="], - "unpipe": ["unpipe@1.0.0", "", {}, "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ=="], - "uri-js": ["uri-js@4.4.1", "", { "dependencies": { "punycode": "^2.1.0" } }, "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg=="], "util": ["util@0.10.4", "", { "dependencies": { "inherits": "2.0.3" } }, "sha512-0Pm9hTQ3se5ll1XihRic3FDIku70C+iHUdT/W926rSgHV5QgXsYbKZN8MSC3tJtSkhuROzvsQjAaFENRXr+19A=="], - "utils-merge": ["utils-merge@1.0.1", "", {}, "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA=="], - "uuid": ["uuid@11.0.5", "", { "bin": { "uuid": "dist/esm/bin/uuid" } }, "sha512-508e6IcKLrhxKdBbcA2b4KQZlLVp2+J5UwQ6F7Drckkc5N9ZJwFa4TgWtsww9UG8fGHbm6gbV19TdM5pQ4GaIA=="], - "vary": ["vary@1.1.2", "", {}, "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg=="], - "which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="], "word-wrap": ["word-wrap@1.2.5", "", {}, "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA=="], - "ws": ["ws@8.18.0", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": ">=5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw=="], - "xtend": ["xtend@4.0.2", "", {}, "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ=="], "yocto-queue": ["yocto-queue@0.1.0", "", {}, "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q=="], "@eslint-community/eslint-utils/eslint-visitor-keys": ["eslint-visitor-keys@3.4.3", "", {}, "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag=="], + "@eslint/plugin-kit/@eslint/core": ["@eslint/core@0.10.0", "", { "dependencies": { "@types/json-schema": "^7.0.15" } }, "sha512-gFHJ+xBOo4G3WRlR1e/3G8A6/KZAH6zcE/hkLRCZTi/B9avAG365QhFA8uOGzTMqgTghpn7/fSnscW++dpMSAw=="], + "@humanfs/node/@humanwhocodes/retry": ["@humanwhocodes/retry@0.3.1", "", {}, "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA=="], "@typescript-eslint/typescript-estree/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], - "body-parser/debug": ["debug@2.6.9", "", { "dependencies": { "ms": "2.0.0" } }, "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA=="], - - "express/debug": ["debug@2.6.9", "", { "dependencies": { "ms": "2.0.0" } }, "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA=="], - "fast-glob/glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="], - "finalhandler/debug": ["debug@2.6.9", "", { "dependencies": { "ms": "2.0.0" } }, "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA=="], - - "send/debug": ["debug@2.6.9", "", { "dependencies": { "ms": "2.0.0" } }, "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA=="], - - "send/encodeurl": ["encodeurl@1.0.2", "", {}, "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w=="], - - "util/inherits": ["inherits@2.0.3", "", {}, "sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw=="], - "@typescript-eslint/typescript-estree/minimatch/brace-expansion": ["brace-expansion@2.0.1", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA=="], - - "body-parser/debug/ms": ["ms@2.0.0", "", {}, "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="], - - "express/debug/ms": ["ms@2.0.0", "", {}, "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="], - - "finalhandler/debug/ms": ["ms@2.0.0", "", {}, "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="], - - "send/debug/ms": ["ms@2.0.0", "", {}, "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="], } } diff --git a/packages/client-query/package.json b/packages/client-query/package.json index 2d706a8633a..60168a7e13e 100644 --- a/packages/client-query/package.json +++ b/packages/client-query/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-client-query", - "version": "0.1.7", + "version": "0.1.8", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/client-sqlite/package.json b/packages/client-sqlite/package.json index 97249a08cae..2866014d61b 100644 --- a/packages/client-sqlite/package.json +++ b/packages/client-sqlite/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-client-sqlite", - "version": "0.1.7", + "version": "0.1.8", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/client-sqlite/src/client.ts b/packages/client-sqlite/src/client.ts index 29f8624bf35..2dc53627e99 100644 --- a/packages/client-sqlite/src/client.ts +++ b/packages/client-sqlite/src/client.ts @@ -12,32 +12,33 @@ import { type FindNotificationsParams, type Notification, type Attachment, - type Reaction + type Reaction, + type WorkspaceID } from '@hcengineering/communication-types' import { type Client, type MessageCreatedEvent, type DbAdapter, - EventType, - type BroadcastEvent + type ResponseEvent, + ResponseEventType } from '@hcengineering/communication-sdk-types' import { createDbAdapter as createSqliteDbAdapter } from '@hcengineering/communication-sqlite-wasm' class DbClient implements Client { - onEvent: (event: BroadcastEvent) => void = () => {} + onEvent: (event: ResponseEvent) => void = () => {} constructor( private readonly db: DbAdapter, - private readonly workspace: string, - private readonly personalWorkspace: string + private readonly workspace: WorkspaceID, + private readonly personalWorkspace: WorkspaceID ) {} async createMessage(card: CardID, content: RichText, creator: SocialID): Promise { const created = new Date() - const id = await this.db.createMessage(this.workspace, card, content, creator, created) + const id = await this.db.createMessage(card, content, creator, created) const event: MessageCreatedEvent = { - type: EventType.MessageCreated, + type: ResponseEventType.MessageCreated, message: { id, card, @@ -56,32 +57,32 @@ class DbClient implements Client { } async removeMessage(card: CardID, message: MessageID) { - await this.db.removeMessage(message) - this.onEvent({ type: EventType.MessageRemoved, message, card }) + await this.db.removeMessage(card, message) + this.onEvent({ type: ResponseEventType.MessageRemoved, message, card }) } async createPatch(card: CardID, message: MessageID, content: RichText, creator: SocialID): Promise { const created = new Date() - await this.db.createPatch(message, content, creator, created) - this.onEvent({ type: EventType.PatchCreated, card, patch: { message, content, creator, created } }) + await this.db.createPatch(card, message, content, creator, created) + this.onEvent({ type: ResponseEventType.PatchCreated, card, patch: { message, content, creator, created } }) } async createReaction(card: CardID, message: MessageID, reaction: string, creator: SocialID): Promise { const created = new Date() - await this.db.createReaction(message, reaction, creator, created) - this.onEvent({ type: EventType.ReactionCreated, card, reaction: { message, reaction, creator, created } }) + await this.db.createReaction(card, message, reaction, creator, created) + this.onEvent({ type: ResponseEventType.ReactionCreated, card, reaction: { message, reaction, creator, created } }) } async removeReaction(card: CardID, message: MessageID, reaction: string, creator: SocialID): Promise { - await this.db.removeReaction(message, reaction, creator) - this.onEvent({ type: EventType.ReactionRemoved, card, message, reaction, creator }) + await this.db.removeReaction(card, message, reaction, creator) + this.onEvent({ type: ResponseEventType.ReactionRemoved, card, message, reaction, creator }) } async createAttachment(card: CardID, message: MessageID, attachment: CardID, creator: SocialID): Promise { const created = new Date() await this.db.createAttachment(message, card, creator, created) this.onEvent({ - type: EventType.AttachmentCreated, + type: ResponseEventType.AttachmentCreated, card, attachment: { message, card: attachment, creator, created } }) @@ -89,11 +90,11 @@ class DbClient implements Client { async removeAttachment(card: CardID, message: MessageID, attachment: CardID): Promise { await this.db.removeAttachment(message, card) - this.onEvent({ type: EventType.AttachmentRemoved, message, card, attachment }) + this.onEvent({ type: ResponseEventType.AttachmentRemoved, message, card, attachment }) } async findMessages(params: FindMessagesParams): Promise { - const rawMessages = await this.db.findMessages(this.workspace, params) + const rawMessages = await this.db.findMessages(params) return rawMessages.map((it) => this.toMessage(it)) } @@ -131,6 +132,7 @@ class DbClient implements Client { created: new Date(raw.created) } } + async createNotification(message: MessageID, context: ContextID): Promise { await this.db.createNotification(message, context) } @@ -140,7 +142,7 @@ class DbClient implements Client { } async createNotificationContext(card: CardID, lastView?: Date, lastUpdate?: Date): Promise { - return await this.db.createContext(this.personalWorkspace, this.workspace, card, lastView, lastUpdate) + return await this.db.createContext(this.personalWorkspace, card, lastView, lastUpdate) } async updateNotificationContext(context: ContextID, update: NotificationContextUpdate): Promise { @@ -171,8 +173,8 @@ class DbClient implements Client { } export async function getSqliteClient( - workspace: string, - personalWorkspace: string, + workspace: WorkspaceID, + personalWorkspace: WorkspaceID, dbUrl = 'file:communication.sqlite3?vfs=opfs' ): Promise { const db = await createSqliteDbAdapter(dbUrl) diff --git a/packages/client-ws/package.json b/packages/client-ws/package.json index 2980c0174f0..5b015e5a24e 100644 --- a/packages/client-ws/package.json +++ b/packages/client-ws/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-client-ws", - "version": "0.1.7", + "version": "0.1.8", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/client-ws/src/client.ts b/packages/client-ws/src/client.ts index 6af6606df14..74d28284371 100644 --- a/packages/client-ws/src/client.ts +++ b/packages/client-ws/src/client.ts @@ -15,7 +15,7 @@ import { type SocialID } from '@hcengineering/communication-types' import { - type BroadcastEvent, + RequestEventType, type Client, type CreateAttachmentEvent, type CreateMessageEvent, @@ -25,14 +25,14 @@ import { type CreateNotificationEvent, type CreatePatchEvent, type CreateReactionEvent, - type Event, type EventResult, - EventType, type RemoveAttachmentEvent, type RemoveMessageEvent, type RemoveNotificationContextEvent, type RemoveNotificationEvent, type RemoveReactionEvent, + type RequestEvent, + type ResponseEvent, type UpdateNotificationContextEvent } from '@hcengineering/communication-sdk-types' @@ -41,7 +41,7 @@ import { WebSocketConnection } from './connection' class WsClient implements Client { private readonly ws: WebSocketConnection - onEvent: (event: BroadcastEvent) => void = () => {} + onEvent: (event: ResponseEvent) => void = () => {} constructor( private readonly url: string, @@ -57,7 +57,7 @@ class WsClient implements Client { async createMessage(card: CardID, content: RichText, creator: SocialID): Promise { const event: CreateMessageEvent = { - type: EventType.CreateMessage, + type: RequestEventType.CreateMessage, card, content, creator @@ -68,7 +68,7 @@ class WsClient implements Client { async removeMessage(card: CardID, message: MessageID): Promise { const event: RemoveMessageEvent = { - type: EventType.RemoveMessage, + type: RequestEventType.RemoveMessage, card, message } @@ -77,7 +77,7 @@ class WsClient implements Client { async createPatch(card: CardID, message: MessageID, content: RichText, creator: SocialID): Promise { const event: CreatePatchEvent = { - type: EventType.CreatePatch, + type: RequestEventType.CreatePatch, card, message, content, @@ -88,7 +88,7 @@ class WsClient implements Client { async createReaction(card: CardID, message: MessageID, reaction: string, creator: SocialID): Promise { const event: CreateReactionEvent = { - type: EventType.CreateReaction, + type: RequestEventType.CreateReaction, card, message, reaction, @@ -99,7 +99,7 @@ class WsClient implements Client { async removeReaction(card: CardID, message: MessageID, reaction: string, creator: SocialID): Promise { const event: RemoveReactionEvent = { - type: EventType.RemoveReaction, + type: RequestEventType.RemoveReaction, card, message, reaction, @@ -110,7 +110,7 @@ class WsClient implements Client { async createAttachment(card: CardID, message: MessageID, attachment: CardID, creator: SocialID): Promise { const event: CreateAttachmentEvent = { - type: EventType.CreateAttachment, + type: RequestEventType.CreateAttachment, card, message, attachment, @@ -121,7 +121,7 @@ class WsClient implements Client { async removeAttachment(card: CardID, message: MessageID, attachment: CardID): Promise { const event: RemoveAttachmentEvent = { - type: EventType.RemoveAttachment, + type: RequestEventType.RemoveAttachment, card, message, attachment @@ -167,7 +167,7 @@ class WsClient implements Client { async createNotification(message: MessageID, context: ContextID): Promise { const event: CreateNotificationEvent = { - type: EventType.CreateNotification, + type: RequestEventType.CreateNotification, message, context } @@ -176,7 +176,7 @@ class WsClient implements Client { async removeNotification(message: MessageID, context: ContextID): Promise { const event: RemoveNotificationEvent = { - type: EventType.RemoveNotification, + type: RequestEventType.RemoveNotification, message, context } @@ -185,7 +185,7 @@ class WsClient implements Client { async createNotificationContext(card: CardID, lastView?: Date, lastUpdate?: Date): Promise { const event: CreateNotificationContextEvent = { - type: EventType.CreateNotificationContext, + type: RequestEventType.CreateNotificationContext, card, lastView, lastUpdate @@ -196,7 +196,7 @@ class WsClient implements Client { async removeNotificationContext(context: ContextID): Promise { const event: RemoveNotificationContextEvent = { - type: EventType.RemoveNotificationContext, + type: RequestEventType.RemoveNotificationContext, context } await this.sendEvent(event) @@ -204,7 +204,7 @@ class WsClient implements Client { async updateNotificationContext(context: ContextID, update: NotificationContextUpdate): Promise { const event: UpdateNotificationContextEvent = { - type: EventType.UpdateNotificationContext, + type: RequestEventType.UpdateNotificationContext, context, update } @@ -226,7 +226,7 @@ class WsClient implements Client { await this.ws.send('unsubscribeQuery', [id]) } - private async sendEvent(event: Event): Promise { + private async sendEvent(event: RequestEvent): Promise { return await this.ws.send('event', [event]) } diff --git a/packages/client-ws/src/connection.ts b/packages/client-ws/src/connection.ts index 9844f840053..98a0df175d4 100644 --- a/packages/client-ws/src/connection.ts +++ b/packages/client-ws/src/connection.ts @@ -1,9 +1,27 @@ -import type { Response, HelloRequest, RequestId, BroadcastEvent, Request } from '@hcengineering/communication-sdk-types' +import type { ResponseEvent } from '@hcengineering/communication-sdk-types' import { encode, decode } from '@msgpack/msgpack' const PING_TIMEOUT = 10000 const RECONNECT_TIMEOUT = 1000 +export type RequestId = string + +export interface Response { + id?: RequestId + result?: any + error?: string +} + +export interface Request { + id?: RequestId + method: string + params: any[] +} + +export interface HelloRequest extends Request { + binary?: boolean +} + export class WebSocketConnection { private ws!: WebSocket | Promise private requests: { [key: RequestId]: { resolve: (response: any) => void; reject: (reason: any) => void } } = {} @@ -12,7 +30,7 @@ export class WebSocketConnection { private pingInterval: any private reconnectTimeout: any - onEvent: (event: BroadcastEvent) => void = () => {} + onEvent: (event: ResponseEvent) => void = () => {} constructor( private url: string, @@ -40,7 +58,7 @@ export class WebSocketConnection { if (response.error !== undefined) { console.error('Websocket error', response.error) } else { - const event = response.result as BroadcastEvent + const event = response.result as ResponseEvent this.onEvent(event) } } diff --git a/packages/cockroach/migrations/00_schema.sql b/packages/cockroach/migrations/00_schema.sql new file mode 100644 index 00000000000..0f19be60a3c --- /dev/null +++ b/packages/cockroach/migrations/00_schema.sql @@ -0,0 +1,2 @@ +DROP SCHEMA IF EXISTS communication CASCADE; +CREATE SCHEMA IF NOT EXISTS communication; diff --git a/packages/cockroach/migrations/01_message.sql b/packages/cockroach/migrations/01_message.sql index 3e255f21b9b..a6c5224d504 100644 --- a/packages/cockroach/migrations/01_message.sql +++ b/packages/cockroach/migrations/01_message.sql @@ -1,9 +1,6 @@ -DROP TABLE IF EXISTS c_message CASCADE; -DROP TABLE IF EXISTS c_messages_group CASCADE; - -CREATE TABLE IF NOT EXISTS c_message +CREATE TABLE IF NOT EXISTS communication.message ( - id UUID NOT NULL DEFAULT gen_random_uuid(), + id INT8 NOT NULL, workspace_id UUID NOT NULL, card_id VARCHAR(255) NOT NULL, @@ -11,19 +8,21 @@ CREATE TABLE IF NOT EXISTS c_message creator VARCHAR(255) NOT NULL, created TIMESTAMPTZ NOT NULL, - PRIMARY KEY (id) + PRIMARY KEY (id, workspace_id, card_id) ); -CREATE TABLE IF NOT EXISTS c_messages_group +CREATE TABLE IF NOT EXISTS communication.messages_group ( - id UUID PRIMARY KEY DEFAULT gen_random_uuid(), workspace_id UUID NOT NULL, card_id VARCHAR(255) NOT NULL, - start_at TIMESTAMPTZ NOT NULL, - end_at TIMESTAMPTZ NOT NULL, blob_id UUID NOT NULL, + + from_id INT8 NOT NULL, + to_id INT8 NOT NULL, + from_date TIMESTAMPTZ NOT NULL, + to_date TIMESTAMPTZ NOT NULL, count INT NOT NULL, - UNIQUE (workspace_id, card_id, blob_id) + PRIMARY KEY (workspace_id, card_id, blob_id) ); diff --git a/packages/cockroach/migrations/02_patch.sql b/packages/cockroach/migrations/02_patch.sql index be2e04210db..0513090d97f 100644 --- a/packages/cockroach/migrations/02_patch.sql +++ b/packages/cockroach/migrations/02_patch.sql @@ -1,14 +1,14 @@ -DROP TABLE IF EXISTS c_patch CASCADE; -CREATE TABLE IF NOT EXISTS c_patch +CREATE TABLE IF NOT EXISTS communication.patch ( - id INT8 NOT NULL DEFAULT unique_rowid(), - message_id UUID NOT NULL, - content TEXT NOT NULL, - creator VARCHAR(255) NOT NULL, - created TIMESTAMPTZ NOT NULL, + id INT8 NOT NULL DEFAULT unique_rowid(), + workspace_id UUID NOT NULL, + card_id VARCHAR(255) NOT NULL, + message_id INT8 NOT NULL, + content TEXT NOT NULL, + creator VARCHAR(255) NOT NULL, + created TIMESTAMPTZ NOT NULL, - PRIMARY KEY (id), - FOREIGN KEY (message_id) REFERENCES c_message (id) ON DELETE CASCADE + PRIMARY KEY (id) ); -CREATE INDEX idx_patch_message_id ON c_patch (message_id); \ No newline at end of file +CREATE INDEX idx_patch_message_id ON communication.patch (message_id); \ No newline at end of file diff --git a/packages/cockroach/migrations/03_attachment.sql b/packages/cockroach/migrations/03_attachment.sql index 00fa9294413..7fc727dd47e 100644 --- a/packages/cockroach/migrations/03_attachment.sql +++ b/packages/cockroach/migrations/03_attachment.sql @@ -1,13 +1,11 @@ -DROP TABLE IF EXISTS c_attachment CASCADE; -CREATE TABLE IF NOT EXISTS c_attachment +CREATE TABLE IF NOT EXISTS communication.attachment ( - message_id UUID NOT NULL, + message_id INT8 NOT NULL, card_id VARCHAR(255) NOT NULL, creator VARCHAR(255) NOT NULL, created TIMESTAMPTZ NOT NULL DEFAULT now(), - PRIMARY KEY (message_id, card_id), - FOREIGN KEY (message_id) REFERENCES c_message (id) ON DELETE CASCADE + PRIMARY KEY (message_id, card_id) ); -CREATE INDEX IF NOT EXISTS attachment_message_idx ON c_attachment (message_id); +CREATE INDEX IF NOT EXISTS attachment_message_idx ON communication.attachment (message_id); diff --git a/packages/cockroach/migrations/04_reaction.sql b/packages/cockroach/migrations/04_reaction.sql index a698f9ad672..5719fb8de2d 100644 --- a/packages/cockroach/migrations/04_reaction.sql +++ b/packages/cockroach/migrations/04_reaction.sql @@ -1,13 +1,13 @@ -DROP TABLE IF EXISTS c_reaction CASCADE; -CREATE TABLE IF NOT EXISTS c_reaction +CREATE TABLE IF NOT EXISTS communication.reaction ( - message_id UUID NOT NULL, - reaction VARCHAR(100) NOT NULL, - creator VARCHAR(255) NOT NULL, - created TIMESTAMPTZ NOT NULL DEFAULT now(), + workspace_id UUID NOT NULL, + card_id VARCHAR(255) NOT NULL, + message_id INT8 NOT NULL, + reaction VARCHAR(100) NOT NULL, + creator VARCHAR(255) NOT NULL, + created TIMESTAMPTZ NOT NULL DEFAULT now(), - PRIMARY KEY (message_id, creator, reaction), - FOREIGN KEY (message_id) REFERENCES c_message (id) ON DELETE CASCADE + PRIMARY KEY (workspace_id, card_id, message_id, creator, reaction) ); -CREATE INDEX IF NOT EXISTS reaction_message_idx ON c_reaction (message_id); +CREATE INDEX IF NOT EXISTS reaction_message_idx ON communication.reaction (message_id); diff --git a/packages/cockroach/migrations/05_notification_context.sql b/packages/cockroach/migrations/05_notification_context.sql index 7514d5febfe..33fc3866494 100644 --- a/packages/cockroach/migrations/05_notification_context.sql +++ b/packages/cockroach/migrations/05_notification_context.sql @@ -1,5 +1,4 @@ -DROP TABLE IF EXISTS c_notification_context CASCADE; -CREATE TABLE IF NOT EXISTS c_notification_context +CREATE TABLE IF NOT EXISTS communication.notification_context ( id UUID NOT NULL DEFAULT gen_random_uuid(), workspace_id UUID NOT NULL, diff --git a/packages/cockroach/migrations/06_notification.sql b/packages/cockroach/migrations/06_notification.sql index 905d6c9a508..3a89fb8be38 100644 --- a/packages/cockroach/migrations/06_notification.sql +++ b/packages/cockroach/migrations/06_notification.sql @@ -1,10 +1,10 @@ -DROP TABLE IF EXISTS c_notification CASCADE; -CREATE TABLE IF NOT EXISTS c_notification +CREATE TABLE IF NOT EXISTS communication.notification ( message_id UUID NOT NULL, context UUID NOT NULL, PRIMARY KEY (message_id, context), - FOREIGN KEY (message_id) REFERENCES c_message (id) ON DELETE CASCADE, - FOREIGN KEY (context) REFERENCES c_notification_context (id) ON DELETE CASCADE + FOREIGN KEY (context) REFERENCES communication.notification_context (id) ON DELETE CASCADE ); + +CREATE INDEX IF NOT EXISTS notification_context_idx ON communication.notification (context); \ No newline at end of file diff --git a/packages/cockroach/package.json b/packages/cockroach/package.json index 0a1701fbdb8..bf1b86497fa 100644 --- a/packages/cockroach/package.json +++ b/packages/cockroach/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-cockroach", - "version": "0.1.7", + "version": "0.1.8", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", @@ -13,8 +13,9 @@ "@types/bun": "^1.1.14" }, "dependencies": { - "@hcengineering/communication-types": "workspace:*", + "@hcengineering/communication-core": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", + "@hcengineering/communication-types": "workspace:*", "pg": "8.12.0", "postgres": "^3.4.4", "uuid": "^11.0.3" diff --git a/packages/cockroach/src/adapter.ts b/packages/cockroach/src/adapter.ts index 3ee7b6f5692..0a805aab03b 100644 --- a/packages/cockroach/src/adapter.ts +++ b/packages/cockroach/src/adapter.ts @@ -12,7 +12,10 @@ import { type NotificationContext, type FindNotificationsParams, type Notification, - type BlobID + type BlobID, + type MessagesGroup, + type FindMessagesGroupsParams, + type WorkspaceID } from '@hcengineering/communication-types' import type { DbAdapter } from '@hcengineering/communication-sdk-types' @@ -26,47 +29,59 @@ export class CockroachAdapter implements DbAdapter { constructor( private readonly db: PostgresClientReference, - private readonly sqlClient: postgres.Sql + private readonly sqlClient: postgres.Sql, + private readonly workspace: WorkspaceID ) { - this.message = new MessagesDb(this.sqlClient) - this.notification = new NotificationsDb(this.sqlClient) + this.message = new MessagesDb(this.sqlClient, this.workspace) + this.notification = new NotificationsDb(this.sqlClient, this.workspace) } - async createMessage( - workspace: string, + async createMessage(card: CardID, content: RichText, creator: SocialID, created: Date): Promise { + return await this.message.createMessage(card, content, creator, created) + } + + async createPatch( card: CardID, + message: MessageID, content: RichText, creator: SocialID, created: Date - ): Promise { - return await this.message.createMessage(workspace, card, content, creator, created) + ): Promise { + return await this.message.createPatch(card, message, content, creator, created) } - async createPatch(message: MessageID, content: RichText, creator: SocialID, created: Date): Promise { - return await this.message.createPatch(message, content, creator, created) + async removeMessage(card: CardID, message: MessageID): Promise { + return await this.message.removeMessage(card, message) } - async removeMessage(message: MessageID): Promise { - return await this.message.removeMessage(message) + async removeMessages(card: CardID, ids: MessageID[]): Promise { + return await this.message.removeMessages(card, ids) } async createMessagesGroup( - workspace: string, card: CardID, - startAt: Date, - endAt: Date, blobId: BlobID, + from_id: MessageID, + to_id: MessageID, + from_date: Date, + to_date: Date, count: number ): Promise { - return await this.message.createMessagesGroup(workspace, card, startAt, endAt, blobId, count) + return await this.message.createMessagesGroup(card, blobId, from_id, to_id, from_date, to_date, count) } - async createReaction(message: MessageID, reaction: string, creator: SocialID, created: Date): Promise { - return await this.message.createReaction(message, reaction, creator, created) + async createReaction( + card: CardID, + message: MessageID, + reaction: string, + creator: SocialID, + created: Date + ): Promise { + return await this.message.createReaction(card, message, reaction, creator, created) } - async removeReaction(message: MessageID, reaction: string, creator: SocialID): Promise { - return await this.message.removeReaction(message, reaction, creator) + async removeReaction(card: CardID, message: MessageID, reaction: string, creator: SocialID): Promise { + return await this.message.removeReaction(card, message, reaction, creator) } async createAttachment(message: MessageID, attachment: CardID, creator: SocialID, created: Date): Promise { @@ -77,8 +92,12 @@ export class CockroachAdapter implements DbAdapter { return await this.message.removeAttachment(message, attachment) } - async findMessages(workspace: string, params: FindMessagesParams): Promise { - return await this.message.find(workspace, params) + async findMessages(params: FindMessagesParams): Promise { + return await this.message.find(params) + } + + async findMessagesGroups(params: FindMessagesGroupsParams): Promise { + return await this.message.findGroups(params) } async createNotification(message: MessageID, context: ContextID): Promise { @@ -90,13 +109,12 @@ export class CockroachAdapter implements DbAdapter { } async createContext( - workspace: string, + personalWorkspace: WorkspaceID, card: CardID, - personalWorkspace: string, lastView?: Date, lastUpdate?: Date ): Promise { - return await this.notification.createContext(workspace, card, personalWorkspace, lastView, lastUpdate) + return await this.notification.createContext(personalWorkspace, card, lastView, lastUpdate) } async updateContext(context: ContextID, update: NotificationContextUpdate): Promise { @@ -109,16 +127,16 @@ export class CockroachAdapter implements DbAdapter { async findContexts( params: FindNotificationContextParams, - personalWorkspaces: string[], - workspace?: string + personalWorkspaces: WorkspaceID[], + workspace?: WorkspaceID ): Promise { return await this.notification.findContexts(params, personalWorkspaces, workspace) } async findNotifications( params: FindNotificationsParams, - personalWorkspace: string, - workspace?: string + personalWorkspace: WorkspaceID, + workspace?: WorkspaceID ): Promise { return await this.notification.findNotifications(params, personalWorkspace, workspace) } @@ -128,9 +146,9 @@ export class CockroachAdapter implements DbAdapter { } } -export async function createDbAdapter(connectionString: string): Promise { +export async function createDbAdapter(connectionString: string, workspace: WorkspaceID): Promise { const db = connect(connectionString) const sqlClient = await db.getClient() - return new CockroachAdapter(db, sqlClient) + return new CockroachAdapter(db, sqlClient, workspace) } diff --git a/packages/cockroach/src/db/base.ts b/packages/cockroach/src/db/base.ts index 671ffc80dc7..1c71ecb7348 100644 --- a/packages/cockroach/src/db/base.ts +++ b/packages/cockroach/src/db/base.ts @@ -1,9 +1,12 @@ import type postgres from 'postgres' +import type {WorkspaceID} from "@hcengineering/communication-types"; export class BaseDb { constructor( - readonly client: postgres.Sql - ) {} + readonly client: postgres.Sql, + readonly workspace: WorkspaceID + ) { + } async insert(table: string, data: Record): Promise { const keys = Object.keys(data) @@ -15,14 +18,14 @@ export class BaseDb { await this.client.unsafe(sql, values) } - async insertWithReturn(table: string, data: Record, returnField : string): Promise { + async insertWithReturn(table: string, data: Record, returnField: string): Promise { const keys = Object.keys(data) const values = Object.values(data) const sql = ` INSERT INTO ${table} (${keys.map((k) => `"${k}"`).join(', ')}) VALUES (${keys.map((_, idx) => `$${idx + 1}`).join(', ')}) RETURNING ${returnField};` - const result =await this.client.unsafe(sql, values) + const result = await this.client.unsafe(sql, values) return result[0][returnField] } @@ -31,6 +34,10 @@ export class BaseDb { const keys = Object.keys(where) const values = Object.values(where) + if (keys.length === 0) { + throw new Error("WHERE condition cannot be empty"); + } + const sql = ` DELETE FROM ${table} @@ -38,4 +45,35 @@ export class BaseDb { await this.client.unsafe(sql, values) } + + async removeWithReturn(table: string, where: Record, returnField: string): Promise { + const keys = Object.keys(where); + const values: any[] = []; + + if (keys.length === 0) { + throw new Error("WHERE condition cannot be empty"); + } + + const whereClause = keys.map((key) => { + const value = where[key]; + if (Array.isArray(value)) { + const placeholders = value.map((_, i) => `$${values.length + i + 1}`).join(", "); + values.push(...value); + return `"${key}" IN (${placeholders})`; + } else { + values.push(value); + return `"${key}" = $${values.length}`; + } + }).join(" AND "); + + const sql = ` + DELETE FROM ${table} + WHERE ${whereClause} + RETURNING ${returnField}; + `; + + const result = await this.client.unsafe(sql, values); + + return result.map((it: any) => it[returnField]); + } } diff --git a/packages/cockroach/src/db/message.ts b/packages/cockroach/src/db/message.ts index 5257fee0fbe..79462c4372c 100644 --- a/packages/cockroach/src/db/message.ts +++ b/packages/cockroach/src/db/message.ts @@ -6,8 +6,12 @@ import { SortOrder, type SocialID, type RichText, - Direction, type Reaction, type Attachment, type BlobID + Direction, + type BlobID, + type MessagesGroup, + type FindMessagesGroupsParams } from '@hcengineering/communication-types' +import {generateMessageId} from "@hcengineering/communication-core"; import {BaseDb} from './base.ts' import { @@ -16,31 +20,48 @@ import { type AttachmentDb, type ReactionDb, type PatchDb, - type MessagesGroupDb -} from './types.ts' + type MessagesGroupDb, + toMessage, + toMessagesGroup +} from './schema.ts' +import {getCondition} from './utils.ts'; + export class MessagesDb extends BaseDb { //Message - async createMessage(workspace: string, card: CardID, content: RichText, creator: SocialID, created: Date): Promise { + async createMessage(card: CardID, content: RichText, creator: SocialID, created: Date): Promise { const dbData: MessageDb = { - workspace_id: workspace, + id: generateMessageId(), + workspace_id: this.workspace, card_id: card, content: content, creator: creator, created: created, } - const id = await this.insertWithReturn(TableName.Message, dbData, 'id') + await this.insert(TableName.Message, dbData) - return id as MessageID + return dbData.id as MessageID } - async removeMessage(message: MessageID): Promise { - await this.remove(TableName.Message, {id: message}) + async removeMessage(card: CardID, message: MessageID): Promise { + const result = await this.removeWithReturn(TableName.Message, {id: message, workspace_id: this.workspace, card_id: card}, "id") + return result[0] as MessageID | undefined } - async createPatch(message: MessageID, content: RichText, creator: SocialID, created: Date): Promise { + async removeMessages(card: CardID, ids: MessageID[]): Promise { + const result = await this.removeWithReturn(TableName.Message, { + workspace_id: this.workspace, + card_id: card, + id: ids + }, "id") + return result.map((it: any) => it.id) + } + + async createPatch(card: CardID, message: MessageID, content: RichText, creator: SocialID, created: Date): Promise { const dbData: PatchDb = { + workspace_id: this.workspace, + card_id: card, message_id: message, content: content, creator: creator, @@ -50,14 +71,16 @@ export class MessagesDb extends BaseDb { await this.insert(TableName.Patch, dbData) } - - async createMessagesGroup(workspace: string,card: CardID, startAt: Date, endAt: Date, blobId: BlobID, count: number): Promise { + //MessagesGroup + async createMessagesGroup(card: CardID, blobId: BlobID, from_id: MessageID, to_id: MessageID, from_date: Date, to_date: Date, count: number): Promise { const dbData: MessagesGroupDb = { - workspace_id: workspace, + workspace_id: this.workspace, card_id: card, - start_at: startAt, - end_at: endAt, blob_id: blobId, + from_id, + to_id, + from_date, + to_date, count } await this.insert(TableName.MessagesGroup, dbData) @@ -82,8 +105,10 @@ export class MessagesDb extends BaseDb { } //Reaction - async createReaction(message: MessageID, reaction: string, creator: SocialID, created: Date): Promise { + async createReaction(card: CardID, message: MessageID, reaction: string, creator: SocialID, created: Date): Promise { const dbData: ReactionDb = { + workspace_id: this.workspace, + card_id: card, message_id: message, reaction: reaction, creator: creator, @@ -92,8 +117,10 @@ export class MessagesDb extends BaseDb { await this.insert(TableName.Reaction, dbData) } - async removeReaction(message: MessageID, reaction: string, creator: SocialID): Promise { + async removeReaction( card: CardID, message: MessageID, reaction: string, creator: SocialID): Promise { await this.remove(TableName.Reaction, { + workspace_id: this.workspace, + card_id: card, message_id: message, reaction: reaction, creator: creator @@ -101,7 +128,7 @@ export class MessagesDb extends BaseDb { } //Find messages - async find(workspace: string, params: FindMessagesParams): Promise { + async find(params: FindMessagesParams): Promise { //TODO: experiment with select to improve performance const select = `SELECT m.id, m.card_id, @@ -113,19 +140,19 @@ export class MessagesDb extends BaseDb { ${this.subSelectReactions()} FROM ${TableName.Message} m` - const {where, values} = this.buildMessageWhere(workspace, params) + const {where, values} = this.buildMessageWhere( params) const orderBy = params.sort ? `ORDER BY m.created ${params.sort === SortOrder.Asc ? 'ASC' : 'DESC'}` : '' const limit = params.limit ? ` LIMIT ${params.limit}` : '' const sql = [select, where, orderBy, limit].join(' ') const result = await this.client.unsafe(sql, values) - return result.map(it => this.toMessage(it)) as Message[] + return result.map((it: any) => toMessage(it)) } - buildMessageWhere(workspace: string, params: FindMessagesParams): { where: string, values: any[] } { + buildMessageWhere(params: FindMessagesParams): { where: string, values: any[] } { const where: string[] = ['m.workspace_id = $1'] - const values: any[] = [workspace] + const values: any[] = [this.workspace] let index = 2 @@ -166,7 +193,7 @@ export class MessagesDb extends BaseDb { 'created', p.created ) FROM ${TableName.Patch} p - WHERE p.message_id = m.id + WHERE p.message_id = m.id AND p.workspace_id = m.workspace_id AND p.card_id = m.card_id ) AS patches` } @@ -192,41 +219,84 @@ export class MessagesDb extends BaseDb { 'created', r.created ) FROM ${TableName.Reaction} r - WHERE r.message_id = m.id + WHERE r.message_id = m.id AND r.workspace_id = m.workspace_id AND r.card_id = m.card_id ) AS reactions` } - toMessage(row: any): Message { - const lastPatch = row.patches?.[0] - return { - id: row.id, - card: row.card_id, - content: lastPatch?.content ?? row.content, - creator: row.creator, - created: new Date(row.created), - edited: new Date(lastPatch?.created ?? row.created), - reactions: (row.reactions ?? []).map(this.toReaction), - attachments: (row.attachments ?? []).map(this.toAttachment) + //Find messages groups + async findGroups(params: FindMessagesGroupsParams): Promise { + const select = `SELECT mg.card_id, + mg.blob_id, + mg.from_id, + mg.to_id, + mg.count + FROM ${TableName.MessagesGroup} mg` + + const {where, values, index} = this.buildMessagesGroupWhere(this.workspace, params) + const orderBy = params.sortBy ? `ORDER BY ${index} ${params.sort === SortOrder.Asc ? 'ASC' : 'DESC'}` : '' + if(params.sortBy) { + values.push(params.sortBy) } + const limit = params.limit ? ` LIMIT ${params.limit}` : '' + const sql = [select, where, orderBy, limit].join(' ') + + const result = await this.client.unsafe(sql, values) + + return result.map((it: any) => toMessagesGroup(it)) } - toReaction(row: any): Reaction { - return { - message: row.message_id, - reaction: row.reaction, - creator: row.creator, - created: new Date(row.created) + buildMessagesGroupWhere(workspace: string, params: FindMessagesGroupsParams): { + where: string, + values: any[], + index: number + } { + const where: string[] = ['mg.workspace_id = $1'] + const values: any[] = [workspace] + + let index = 2 + + if (params.card != null) { + where.push(`mg.card_id = $${index++}`) + values.push(params.card) + } + + if (params.blobId != null) { + where.push(`mg.blob_id = $${index++}`) + values.push(params.blobId) + } + + const fromIdCondition = getCondition("mg", "from_id", index, params.fromId); + if (fromIdCondition != null) { + where.push(fromIdCondition.where); + values.push(fromIdCondition.value); + index++; + } + + const toIdCondition = getCondition("mg", "to_id", index, params.toId); + + if (toIdCondition != null) { + where.push(toIdCondition.where); + values.push(toIdCondition.value); + index++; } - } - toAttachment(row: any): Attachment { - return { - message: row.message_id, - card: row.card_id, - creator: row.creator, - created: new Date(row.created) + const fromDateCondition = getCondition("mg", "from_date", index, params.fromDate); + if (fromDateCondition != null) { + where.push(fromDateCondition.where); + values.push(fromDateCondition.value); + index++; } + + const toDateCondition = getCondition("mg", "to_date", index, params.toDate); + if (toDateCondition != null) { + where.push(toDateCondition.where); + values.push(toDateCondition.value); + index++; + } + + + return {where: `WHERE ${where.join(' AND ')}`, values, index} } } diff --git a/packages/cockroach/src/db/notification.ts b/packages/cockroach/src/db/notification.ts index 638a77ab467..ba43fa769eb 100644 --- a/packages/cockroach/src/db/notification.ts +++ b/packages/cockroach/src/db/notification.ts @@ -5,11 +5,12 @@ import { type NotificationContext, type FindNotificationContextParams, SortOrder, type FindNotificationsParams, type Notification, - type NotificationContextUpdate + type NotificationContextUpdate, + type WorkspaceID } from '@hcengineering/communication-types' import {BaseDb} from './base.ts' -import {TableName, type ContextDb, type NotificationDb} from './types.ts' +import {TableName, type ContextDb, type NotificationDb} from './schema.ts' export class NotificationsDb extends BaseDb { async createNotification(message: MessageID, context: ContextID): Promise { @@ -27,9 +28,9 @@ export class NotificationsDb extends BaseDb { }) } - async createContext(workspace: string, card: CardID, personalWorkspace: string, lastView?: Date, lastUpdate?: Date): Promise { + async createContext(personalWorkspace: WorkspaceID,card: CardID, lastView?: Date, lastUpdate?: Date): Promise { const dbData: ContextDb = { - workspace_id: workspace, + workspace_id: this.workspace, card_id: card, personal_workspace: personalWorkspace, last_view: lastView, @@ -71,7 +72,7 @@ export class NotificationsDb extends BaseDb { await this.client.unsafe(sql, [values, context]) } - async findContexts(params: FindNotificationContextParams, personalWorkspaces: string[], workspace?: string,): Promise { + async findContexts(params: FindNotificationContextParams, personalWorkspaces: WorkspaceID[], workspace?: WorkspaceID,): Promise { const select = ` SELECT nc.id, nc.card_id, nc.archived_from, nc.last_view, nc.last_update FROM ${TableName.NotificationContext} nc`; @@ -86,7 +87,7 @@ export class NotificationsDb extends BaseDb { } - async findNotifications(params: FindNotificationsParams, personalWorkspace: string, workspace?: string): Promise { + async findNotifications(params: FindNotificationsParams, personalWorkspace: WorkspaceID, workspace?: WorkspaceID): Promise { //TODO: experiment with select to improve performance, should join with attachments and reactions? const select = ` SELECT n.message_id, @@ -123,7 +124,7 @@ export class NotificationsDb extends BaseDb { return result.map(this.toNotification); } - buildContextWhere(params: FindNotificationContextParams, personalWorkspaces: string[], workspace?: string,): { + buildContextWhere(params: FindNotificationContextParams, personalWorkspaces: WorkspaceID[], workspace?: WorkspaceID): { where: string, values: any[] } { @@ -149,7 +150,7 @@ export class NotificationsDb extends BaseDb { return {where: `WHERE ${where.join(' AND ')}`, values} } - buildNotificationWhere(params: FindNotificationsParams, personalWorkspace: string, workspace?: string): { + buildNotificationWhere(params: FindNotificationsParams, personalWorkspace: WorkspaceID, workspace?: WorkspaceID): { where: string, values: any[] } { diff --git a/packages/cockroach/src/db/schema.ts b/packages/cockroach/src/db/schema.ts new file mode 100644 index 00000000000..19dbd4ecc89 --- /dev/null +++ b/packages/cockroach/src/db/schema.ts @@ -0,0 +1,122 @@ +import type {ContextID, MessageID, RichText, SocialID, CardID, BlobID, Message, Reaction, Attachment, MessagesGroup, WorkspaceID } from "@hcengineering/communication-types" + +export enum TableName { + Message = 'communication.message', + MessagesGroup = 'communication.messages_group', + Patch = 'communication.patch', + Attachment = 'communication.attachment', + Reaction = 'communication.reaction', + Notification = 'communication.notification', + NotificationContext = 'communication.notification_context' +} + +export interface MessageDb { + id: MessageID, + workspace_id: WorkspaceID, + card_id: CardID, + content: RichText, + creator: SocialID, + created: Date, +} + +export interface MessagesGroupDb { + workspace_id: WorkspaceID, + card_id: CardID, + blob_id: BlobID, + from_id: MessageID, + to_id: MessageID, + from_date: Date, + to_date: Date, + count: number +} + +export interface PatchDb { + workspace_id: WorkspaceID, + card_id: CardID, + message_id: MessageID, + content: RichText, + creator: SocialID, + created: Date, +} + +export interface ReactionDb { + workspace_id: WorkspaceID, + card_id: CardID, + message_id: MessageID, + reaction: string, + creator: SocialID + created: Date +} + +export interface AttachmentDb { + message_id: MessageID, + card_id: CardID, + creator: SocialID + created: Date +} + +export interface NotificationDb { + message_id: MessageID, + context: ContextID +} + +export interface ContextDb { + workspace_id: WorkspaceID + card_id: CardID + personal_workspace: WorkspaceID + + archived_from?: Date + last_view?: Date + last_update?: Date +} + + +interface RawMessage extends MessageDb { + patches?: PatchDb[] + attachments?: AttachmentDb[] + reactions?: ReactionDb[] +} + +export function toMessage(raw: RawMessage): Message { + const lastPatch = raw.patches?.[0] + return { + id: raw.id, + card: raw.card_id, + content: lastPatch?.content ?? raw.content, + creator: raw.creator, + created: new Date(raw.created), + edited: lastPatch?.created ? new Date(lastPatch.created) : undefined, + reactions: (raw.reactions ?? []).map(toReaction), + attachments: (raw.attachments ?? []).map(toAttachment) + } +} + +export function toReaction(raw: ReactionDb): Reaction { + return { + message: raw.message_id, + reaction: raw.reaction, + creator: raw.creator, + created: new Date(raw.created) + } +} + +export function toAttachment(raw: AttachmentDb): Attachment { + return { + message: raw.message_id, + card: raw.card_id, + creator: raw.creator, + created: new Date(raw.created) + } +} + +export function toMessagesGroup(raw: MessagesGroupDb): MessagesGroup { + return { + card: raw.card_id, + blobId: raw.blob_id, + fromId: raw.from_id, + toId: raw.to_id, + fromDate: new Date(raw.from_date), + toDate: new Date(raw.to_date), + count: raw.count + } +} \ No newline at end of file diff --git a/packages/cockroach/src/db/types.ts b/packages/cockroach/src/db/types.ts deleted file mode 100644 index d8d1e7d0b1a..00000000000 --- a/packages/cockroach/src/db/types.ts +++ /dev/null @@ -1,63 +0,0 @@ -import type {ContextID, MessageID, RichText, SocialID, CardID, BlobID } from "@hcengineering/communication-types" - -export enum TableName { - Message = 'c_message', - MessagesGroup = 'c_messages_group', - Patch = 'c_patch', - Attachment = 'c_attachment', - Reaction = 'c_reaction', - Notification = 'c_notification', - NotificationContext = 'c_notification_context' -} - -export interface MessageDb { - workspace_id: string, - card_id: CardID, - content: RichText, - creator: SocialID, - created: Date, -} - -export interface MessagesGroupDb { - workspace_id: string, - card_id: CardID, - start_at: Date, - end_at: Date, - blob_id: BlobID, - count: number -} - -export interface PatchDb { - message_id: MessageID, - content: RichText, - creator: SocialID, - created: Date, -} -export interface ReactionDb { - message_id: MessageID, - reaction: string, - creator: SocialID - created: Date -} - -export interface AttachmentDb { - message_id: MessageID, - card_id: CardID, - creator: SocialID - created: Date -} - -export interface NotificationDb { - message_id: MessageID, - context: ContextID -} - -export interface ContextDb { - workspace_id: string - card_id: CardID - personal_workspace: string - - archived_from?: Date - last_view?: Date - last_update?: Date -} \ No newline at end of file diff --git a/packages/cockroach/src/db/utils.ts b/packages/cockroach/src/db/utils.ts new file mode 100644 index 00000000000..4b56ad3f865 --- /dev/null +++ b/packages/cockroach/src/db/utils.ts @@ -0,0 +1,22 @@ +export function getCondition(table: string, dbField: string, index: number, param: any): { where: string, value: any } | undefined { + if (typeof param === 'object') { + if (param.less != null) { + return {where: `${table}.${dbField} < $${index}`, value: param.less}; + } + if (param.lessOrEqual != null) { + return {where: `${table}.${dbField} <= $${index}`, value: param.lessOrEqual}; + } + if (param.greater != null) { + return {where: `${table}.${dbField} > $${index}`, value: param.greater}; + } + if (param.greaterOrEqual != null) { + return {where: `${table}.${dbField} >= $${index}`, value: param.greaterOrEqual}; + } + } + + if(param != null) { + return {where: `${table}.${dbField} = $${index}`, value: param}; + } + + return undefined +} \ No newline at end of file diff --git a/packages/core/package.json b/packages/core/package.json new file mode 100644 index 00000000000..db801ed38ad --- /dev/null +++ b/packages/core/package.json @@ -0,0 +1,28 @@ +{ + "name": "@hcengineering/communication-core", + "version": "0.1.8", + "main": "dist/index.js", + "module": "dist/index.js", + "types": "./types/index.d.ts", + "type": "module", + "files": [ + "dist/index.js", + "types/**/*.d.ts" + ], + "devDependencies": { + "@types/bun": "^1.1.14" + }, + "peerDependencies": { + "typescript": "^5.6.3" + }, + "dependencies": { + "@hcengineering/communication-types": "workspace:*" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/hcengineering/communication.git" + }, + "publishConfig": { + "registry": "https://npm.pkg.github.com" + } +} diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts new file mode 100644 index 00000000000..0560e4bd55b --- /dev/null +++ b/packages/core/src/index.ts @@ -0,0 +1 @@ +export * from './message.ts' diff --git a/packages/core/src/message.ts b/packages/core/src/message.ts new file mode 100644 index 00000000000..48a43227a53 --- /dev/null +++ b/packages/core/src/message.ts @@ -0,0 +1,24 @@ +import type { MessageID } from '@hcengineering/communication-types' + +let lastTimestamp = Math.floor(Date.now() / 1000) +let counter = 0 + +export function generateMessageId(): MessageID { + const timestamp = Math.floor(Date.now() / 1000) + + if (timestamp !== lastTimestamp) { + lastTimestamp = timestamp + counter = 0 + } + + counter++ + + return ((timestamp << 24) | counter) as MessageID +} + +export function parseMessageId(id: MessageID): { timestamp: number; counter: number } { + const timestamp = id >> 24 + const counter = id & 0xffffff + + return { timestamp, counter } +} diff --git a/packages/server-core/tsconfig.json b/packages/core/tsconfig.json similarity index 100% rename from packages/server-core/tsconfig.json rename to packages/core/tsconfig.json diff --git a/packages/examples/package.json b/packages/examples/package.json index d5c25cad89e..d7e579df670 100644 --- a/packages/examples/package.json +++ b/packages/examples/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-examples", - "version": "0.1.7", + "version": "0.1.8", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/examples/src/index.ts b/packages/examples/src/index.ts index 2f6c9fbf2e5..414362ed149 100644 --- a/packages/examples/src/index.ts +++ b/packages/examples/src/index.ts @@ -1,11 +1,18 @@ -import { type Message, type SocialID, SortOrder, type CardID, type Window } from '@hcengineering/communication-types' +import { + type Message, + type SocialID, + SortOrder, + type CardID, + type Window, + type WorkspaceID +} from '@hcengineering/communication-types' import { getWebsocketClient } from '@hcengineering/communication-client-ws' import { getSqliteClient } from '@hcengineering/communication-client-sqlite' import { createMessagesQuery, initLiveQueries } from '@hcengineering/communication-client-query' const card = 'cd0aba36-1c4f-4170-95f2-27a12a5415f6' as CardID -const workspace = 'cd0aba36-1c4f-4170-95f2-27a12a5415f6' -const personalWorkspace = 'cd0aba36-1c4f-4170-95f2-27a12a5415f5' +const workspace = 'cd0aba36-1c4f-4170-95f2-27a12a5415f6' as WorkspaceID +const personalWorkspace = 'cd0aba36-1c4f-4170-95f2-27a12a5415f5' as WorkspaceID const creator1 = 'email:vasya@huly.com' as SocialID async function getClient(type: 'ws' | 'sqlite') { diff --git a/packages/query/package.json b/packages/query/package.json index 2d885ac1eb9..c02e9747bd0 100644 --- a/packages/query/package.json +++ b/packages/query/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-query", - "version": "0.1.7", + "version": "0.1.8", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/query/src/lq.ts b/packages/query/src/lq.ts index b787d60d6a9..9dbae0845b7 100644 --- a/packages/query/src/lq.ts +++ b/packages/query/src/lq.ts @@ -3,7 +3,7 @@ import { deepEqual } from 'fast-equals' import type { MessagesQueryCallback, NotificationsQueryCallback, - BroadcastEvent, + ResponseEvent, QueryClient } from '@hcengineering/communication-sdk-types' @@ -27,7 +27,7 @@ export class LiveQueries { this.client = client } - async onEvent(event: BroadcastEvent): Promise { + async onEvent(event: ResponseEvent): Promise { for (const q of this.queries.values()) { await q.onEvent(event) } diff --git a/packages/query/src/messages/query.ts b/packages/query/src/messages/query.ts index 1616cb24dfd..08279eeaaf3 100644 --- a/packages/query/src/messages/query.ts +++ b/packages/query/src/messages/query.ts @@ -1,6 +1,6 @@ import { type FindMessagesParams, - type ID, + type MessageID, type Message, type Patch, SortOrder @@ -10,8 +10,8 @@ import { type MessageCreatedEvent, type PatchCreatedEvent, type ReactionCreatedEvent, - EventType, - type BroadcastEvent, + type ResponseEvent, + ResponseEventType, type AttachmentRemovedEvent, type MessageRemovedEvent, type ReactionRemovedEvent @@ -24,7 +24,7 @@ export class MessagesQuery extends BaseQuery { return this.client.findMessages(params, this.id) } - override getObjectId(object: Message): ID { + override getObjectId(object: Message): MessageID { return object.id } @@ -32,21 +32,21 @@ export class MessagesQuery extends BaseQuery { return object.created } - override async onEvent(event: BroadcastEvent): Promise { + override async onEvent(event: ResponseEvent): Promise { switch (event.type) { - case EventType.MessageCreated: + case ResponseEventType.MessageCreated: return await this.onCreateMessageEvent(event) - case EventType.MessageRemoved: + case ResponseEventType.MessageRemoved: return await this.onRemoveMessageEvent(event) - case EventType.PatchCreated: + case ResponseEventType.PatchCreated: return await this.onCreatePatchEvent(event) - case EventType.ReactionCreated: + case ResponseEventType.ReactionCreated: return await this.onCreateReactionEvent(event) - case EventType.ReactionRemoved: + case ResponseEventType.ReactionRemoved: return await this.onRemoveReactionEvent(event) - case EventType.AttachmentCreated: + case ResponseEventType.AttachmentCreated: return await this.onCreateAttachmentEvent(event) - case EventType.AttachmentRemoved: + case ResponseEventType.AttachmentRemoved: return await this.onRemoveAttachmentEvent(event) } } @@ -58,8 +58,8 @@ export class MessagesQuery extends BaseQuery { const message = { ...event.message, - edited: new Date(event.message.edited), - created: new Date(event.message.created) + edited: event.message.edited, + created: event.message.created } const exists = this.result.get(message.id) @@ -93,7 +93,7 @@ export class MessagesQuery extends BaseQuery { const patch = { ...event.patch, - created: new Date(event.patch.created) + created: event.patch.created } const message = this.result.get(patch.message) @@ -125,7 +125,7 @@ export class MessagesQuery extends BaseQuery { const reaction = { ...event.reaction, - created: new Date(event.reaction.created) + created: event.reaction.created } const message = this.result.get(reaction.message) if (message === undefined) return @@ -161,7 +161,7 @@ export class MessagesQuery extends BaseQuery { const attachment = { ...event.attachment, - created: new Date(event.attachment.created) + created: event.attachment.created } const message = this.result.get(attachment.message) if (message === undefined) return diff --git a/packages/query/src/notifications/query.ts b/packages/query/src/notifications/query.ts index 7e39d4496a9..e318a01660b 100644 --- a/packages/query/src/notifications/query.ts +++ b/packages/query/src/notifications/query.ts @@ -2,12 +2,12 @@ import { type FindNotificationsParams, SortOrder, type Notification, - type ID, + type MessageID, } from '@hcengineering/communication-types' import { type NotificationCreatedEvent, - EventType, - type BroadcastEvent, + ResponseEventType, + type ResponseEvent, type NotificationContextRemovedEvent, type NotificationRemovedEvent, type NotificationContextUpdatedEvent, @@ -20,7 +20,7 @@ export class NotificationQuery extends BaseQuery { + override async onEvent(event: ResponseEvent): Promise { switch (event.type) { - case EventType.NotificationCreated: + case ResponseEventType.NotificationCreated: return await this.onCreateNotificationEvent(event) - case EventType.NotificationRemoved: + case ResponseEventType.NotificationRemoved: return await this.onRemoveNotificationEvent(event) - case EventType.NotificationContextUpdated: + case ResponseEventType.NotificationContextUpdated: return await this.onUpdateNotificationContextEvent(event) - case EventType.NotificationContextRemoved: + case ResponseEventType.NotificationContextRemoved: return await this.onRemoveNotificationContextEvent(event) } } diff --git a/packages/query/src/query.ts b/packages/query/src/query.ts index 3258756679f..fd95c80cf3b 100644 --- a/packages/query/src/query.ts +++ b/packages/query/src/query.ts @@ -1,5 +1,5 @@ import { Direction, type ID, SortOrder } from '@hcengineering/communication-types' -import { type BroadcastEvent, type QueryCallback, type QueryClient } from '@hcengineering/communication-sdk-types' +import { type ResponseEvent, type QueryCallback, type QueryClient } from '@hcengineering/communication-sdk-types' import { QueryResult } from './result' import { defaultQueryParams, type FindParams, type Query, type QueryId } from './types' @@ -72,7 +72,7 @@ export class BaseQuery implements Query { } // eslint-disable-next-line @typescript-eslint/no-unused-vars - async onEvent(event: BroadcastEvent): Promise { + async onEvent(event: ResponseEvent): Promise { /*Implement in subclass*/ } diff --git a/packages/query/src/types.ts b/packages/query/src/types.ts index 6fcbf17e09a..31ddd46f956 100644 --- a/packages/query/src/types.ts +++ b/packages/query/src/types.ts @@ -1,4 +1,4 @@ -import { type BroadcastEvent } from '@hcengineering/communication-sdk-types' +import { type ResponseEvent } from '@hcengineering/communication-sdk-types' import { Direction, SortOrder, type Window } from '@hcengineering/communication-types' import { QueryResult } from './result.ts' @@ -20,7 +20,7 @@ export interface Query { readonly id: QueryId readonly params: P - onEvent(event: BroadcastEvent): Promise + onEvent(event: ResponseEvent): Promise loadForward(): Promise loadBackward(): Promise diff --git a/packages/sdk-types/package.json b/packages/sdk-types/package.json index 7e598f01b8a..d6d019f1e7e 100644 --- a/packages/sdk-types/package.json +++ b/packages/sdk-types/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-sdk-types", - "version": "0.1.7", + "version": "0.1.8", "main": "./dist/index.js", "module": "./dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/sdk-types/src/client.ts b/packages/sdk-types/src/client.ts index aa9593f0258..8d03ea2e708 100644 --- a/packages/sdk-types/src/client.ts +++ b/packages/sdk-types/src/client.ts @@ -13,7 +13,7 @@ import type { } from '@hcengineering/communication-types' import type { FindMessagesParams } from '@hcengineering/communication-types' -import type { BroadcastEvent } from './event.ts' +import type { ResponseEvent } from './responseEvent.ts' export interface Client { createMessage(card: CardID, content: RichText, creator: SocialID): Promise @@ -33,7 +33,7 @@ export interface Client { removeNotificationContext(context: ContextID): Promise updateNotificationContext(context: ContextID, update: NotificationContextUpdate): Promise - onEvent(event: BroadcastEvent): void + onEvent(event: ResponseEvent): void findMessages(params: FindMessagesParams, queryId?: number): Promise findNotificationContexts(params: FindNotificationContextParams, queryId?: number): Promise diff --git a/packages/sdk-types/src/db.ts b/packages/sdk-types/src/db.ts index a453b8566ef..24d9a1cb852 100644 --- a/packages/sdk-types/src/db.ts +++ b/packages/sdk-types/src/db.ts @@ -11,57 +11,62 @@ import type { RichText, SocialID, Notification, - BlobID + BlobID, + FindMessagesGroupsParams, + MessagesGroup, + WorkspaceID } from '@hcengineering/communication-types' export interface DbAdapter { - createMessage( - workspace: string, - card: CardID, - content: RichText, - creator: SocialID, - created: Date - ): Promise - removeMessage(id: MessageID): Promise - createPatch(message: MessageID, content: RichText, creator: SocialID, created: Date): Promise + createMessage(card: CardID, content: RichText, creator: SocialID, created: Date): Promise + + removeMessage(card: CardID, id: MessageID): Promise + removeMessages(card: CardID, ids: MessageID[]): Promise + + createPatch(card: CardID, message: MessageID, content: RichText, creator: SocialID, created: Date): Promise createMessagesGroup( - workspace: string, card: CardID, - startAt: Date, - endAt: Date, blobId: BlobID, + from_id: MessageID, + to_id: MessageID, + from_date: Date, + to_date: Date, count: number ): Promise - createReaction(message: MessageID, reaction: string, creator: SocialID, created: Date): Promise - removeReaction(message: MessageID, reaction: string, creator: SocialID): Promise + createReaction(card: CardID, message: MessageID, reaction: string, creator: SocialID, created: Date): Promise + + removeReaction(card: CardID, message: MessageID, reaction: string, creator: SocialID): Promise createAttachment(message: MessageID, attachment: CardID, creator: SocialID, created: Date): Promise + removeAttachment(message: MessageID, attachment: CardID): Promise - findMessages(workspace: string, query: FindMessagesParams): Promise + findMessages(query: FindMessagesParams): Promise + + findMessagesGroups(query: FindMessagesGroupsParams): Promise createNotification(message: MessageID, context: ContextID): Promise + removeNotification(message: MessageID, context: ContextID): Promise - createContext( - personalWorkspace: string, - workspace: string, - card: CardID, - lastView?: Date, - lastUpdate?: Date - ): Promise + + createContext(personalWorkspace: WorkspaceID, card: CardID, lastView?: Date, lastUpdate?: Date): Promise + updateContext(context: ContextID, update: NotificationContextUpdate): Promise + removeContext(context: ContextID): Promise + findContexts( params: FindNotificationContextParams, - personalWorkspaces: string[], - workspace?: string + personalWorkspaces: WorkspaceID[], + workspace?: WorkspaceID ): Promise + findNotifications( params: FindNotificationsParams, - personalWorkspace: string, - workspace?: string + personalWorkspace: WorkspaceID, + workspace?: WorkspaceID ): Promise close(): void diff --git a/packages/sdk-types/src/event.ts b/packages/sdk-types/src/event.ts deleted file mode 100644 index 2190829c7ce..00000000000 --- a/packages/sdk-types/src/event.ts +++ /dev/null @@ -1,238 +0,0 @@ -import type { - Attachment, - CardID, - ContextID, - Message, - MessageID, - NotificationContext, - NotificationContextUpdate, - Patch, - Reaction, - RichText, - SocialID, - Notification -} from '@hcengineering/communication-types' - -export enum EventType { - CreateMessage = 'createMessage', - RemoveMessage = 'removeMessage', - CreatePatch = 'createPatch', - CreateReaction = 'createReaction', - RemoveReaction = 'removeReaction', - CreateAttachment = 'createAttachment', - RemoveAttachment = 'removeAttachment', - CreateNotification = 'createNotification', - RemoveNotification = 'removeNotification', - CreateNotificationContext = 'createNotificationContext', - RemoveNotificationContext = 'removeNotificationContext', - UpdateNotificationContext = 'updateNotificationContext', - - MessageCreated = 'messageCreated', - MessageRemoved = 'messageRemoved', - PatchCreated = 'patchCreated', - ReactionCreated = 'reactionCreated', - ReactionRemoved = 'reactionRemoved', - AttachmentCreated = 'attachmentCreated', - AttachmentRemoved = 'attachmentRemoved', - NotificationCreated = 'notificationCreated', - NotificationRemoved = 'notificationRemoved', - NotificationContextCreated = 'notificationContextCreated', - NotificationContextRemoved = 'notificationContextRemoved', - NotificationContextUpdated = 'notificationContextUpdated' -} - -export type Event = - | CreateMessageEvent - | RemoveMessageEvent - | CreatePatchEvent - | CreateReactionEvent - | RemoveReactionEvent - | CreateAttachmentEvent - | RemoveAttachmentEvent - | CreateNotificationEvent - | RemoveNotificationEvent - | CreateNotificationContextEvent - | RemoveNotificationContextEvent - | UpdateNotificationContextEvent - -export interface CreateMessageEvent { - type: EventType.CreateMessage - card: CardID - content: RichText - creator: SocialID -} - -export interface RemoveMessageEvent { - type: EventType.RemoveMessage - card: CardID - message: MessageID -} - -export interface CreatePatchEvent { - type: EventType.CreatePatch - card: CardID - message: MessageID - content: RichText - creator: SocialID -} - -export interface CreateReactionEvent { - type: EventType.CreateReaction - card: CardID - message: MessageID - reaction: string - creator: SocialID -} - -export interface RemoveReactionEvent { - type: EventType.RemoveReaction - card: CardID - message: MessageID - reaction: string - creator: SocialID -} - -export interface CreateAttachmentEvent { - type: EventType.CreateAttachment - card: CardID - message: MessageID - attachment: CardID - creator: SocialID -} - -export interface RemoveAttachmentEvent { - type: EventType.RemoveAttachment - card: CardID - message: MessageID - attachment: CardID -} - -export interface CreateNotificationEvent { - type: EventType.CreateNotification - message: MessageID - context: ContextID -} - -export interface RemoveNotificationEvent { - type: EventType.RemoveNotification - message: MessageID - context: ContextID -} - -export interface CreateNotificationContextEvent { - type: EventType.CreateNotificationContext - card: CardID - lastView?: Date - lastUpdate?: Date -} - -export interface RemoveNotificationContextEvent { - type: EventType.RemoveNotificationContext - context: ContextID -} - -export interface UpdateNotificationContextEvent { - type: EventType.UpdateNotificationContext - context: ContextID - update: NotificationContextUpdate -} - -export type EventResult = CreateMessageResult | CreateNotificationContextResult | {} - -export interface CreateMessageResult { - id: MessageID -} - -export interface CreateNotificationContextResult { - id: ContextID -} - -//TODO: THINK ABOUT BETTER NAMES -export type BroadcastEvent = - | MessageCreatedEvent - | MessageRemovedEvent - | PatchCreatedEvent - | ReactionCreatedEvent - | ReactionRemovedEvent - | AttachmentCreatedEvent - | AttachmentRemovedEvent - | NotificationCreatedEvent - | NotificationRemovedEvent - | NotificationContextCreatedEvent - | NotificationContextRemovedEvent - | NotificationContextUpdatedEvent - -export interface MessageCreatedEvent { - type: EventType.MessageCreated - message: Message -} - -export interface MessageRemovedEvent { - type: EventType.MessageRemoved - card: CardID - message: MessageID -} - -export interface PatchCreatedEvent { - type: EventType.PatchCreated - card: CardID - patch: Patch -} - -export interface ReactionCreatedEvent { - type: EventType.ReactionCreated - card: CardID - reaction: Reaction -} - -export interface ReactionRemovedEvent { - type: EventType.ReactionRemoved - card: CardID - message: MessageID - reaction: string - creator: SocialID -} - -export interface AttachmentCreatedEvent { - type: EventType.AttachmentCreated - card: CardID - attachment: Attachment -} - -export interface AttachmentRemovedEvent { - type: EventType.AttachmentRemoved - card: CardID - message: MessageID - attachment: CardID -} - -export interface NotificationCreatedEvent { - type: EventType.NotificationCreated - personalWorkspace: string - notification: Notification -} - -export interface NotificationRemovedEvent { - type: EventType.NotificationRemoved - personalWorkspace: string - message: MessageID - context: ContextID -} - -export interface NotificationContextCreatedEvent { - type: EventType.NotificationContextCreated - context: NotificationContext -} - -export interface NotificationContextRemovedEvent { - type: EventType.NotificationContextRemoved - personalWorkspace: string - context: ContextID -} - -export interface NotificationContextUpdatedEvent { - type: EventType.NotificationContextUpdated - personalWorkspace: string - context: ContextID - update: NotificationContextUpdate -} diff --git a/packages/sdk-types/src/index.ts b/packages/sdk-types/src/index.ts index 2823f002ade..2a57740ddfd 100644 --- a/packages/sdk-types/src/index.ts +++ b/packages/sdk-types/src/index.ts @@ -1,6 +1,6 @@ -export * from './db' -export * from './event' -export * from './ws' export * from './client' +export * from './db' export * from './query' +export * from './requestEvent.ts' +export * from './responseEvent.ts' export * from './serverApi' diff --git a/packages/sdk-types/src/requestEvent.ts b/packages/sdk-types/src/requestEvent.ts new file mode 100644 index 00000000000..d2f50edc5f7 --- /dev/null +++ b/packages/sdk-types/src/requestEvent.ts @@ -0,0 +1,146 @@ +import type { + CardID, + ContextID, + MessageID, + NotificationContextUpdate, + RichText, + SocialID, + MessagesGroup +} from '@hcengineering/communication-types' + +export enum RequestEventType { + CreateMessage = 'createMessage', + RemoveMessage = 'removeMessage', + RemoveMessages = 'removeMessages', + CreatePatch = 'createPatch', + CreateReaction = 'createReaction', + RemoveReaction = 'removeReaction', + CreateAttachment = 'createAttachment', + RemoveAttachment = 'removeAttachment', + CreateNotification = 'createNotification', + RemoveNotification = 'removeNotification', + CreateNotificationContext = 'createNotificationContext', + RemoveNotificationContext = 'removeNotificationContext', + UpdateNotificationContext = 'updateNotificationContext', + CreateMessagesGroup = 'createMessagesGroup' +} + +export type RequestEvent = + | CreateMessageEvent + | RemoveMessageEvent + | RemoveMessagesEvent + | CreatePatchEvent + | CreateReactionEvent + | RemoveReactionEvent + | CreateAttachmentEvent + | RemoveAttachmentEvent + | CreateNotificationEvent + | RemoveNotificationEvent + | CreateNotificationContextEvent + | RemoveNotificationContextEvent + | UpdateNotificationContextEvent + | CreateMessagesGroupEvent + +export interface CreateMessageEvent { + type: RequestEventType.CreateMessage + card: CardID + content: RichText + creator: SocialID +} + +export interface RemoveMessageEvent { + type: RequestEventType.RemoveMessage + card: CardID + message: MessageID +} + +export interface RemoveMessagesEvent { + type: RequestEventType.RemoveMessages + card: CardID + messages: MessageID[] + silent?: boolean +} + +export interface CreatePatchEvent { + type: RequestEventType.CreatePatch + card: CardID + message: MessageID + content: RichText + creator: SocialID +} + +export interface CreateReactionEvent { + type: RequestEventType.CreateReaction + card: CardID + message: MessageID + reaction: string + creator: SocialID +} + +export interface RemoveReactionEvent { + type: RequestEventType.RemoveReaction + card: CardID + message: MessageID + reaction: string + creator: SocialID +} + +export interface CreateAttachmentEvent { + type: RequestEventType.CreateAttachment + card: CardID + message: MessageID + attachment: CardID + creator: SocialID +} + +export interface RemoveAttachmentEvent { + type: RequestEventType.RemoveAttachment + card: CardID + message: MessageID + attachment: CardID +} + +export interface CreateNotificationEvent { + type: RequestEventType.CreateNotification + message: MessageID + context: ContextID +} + +export interface RemoveNotificationEvent { + type: RequestEventType.RemoveNotification + message: MessageID + context: ContextID +} + +export interface CreateNotificationContextEvent { + type: RequestEventType.CreateNotificationContext + card: CardID + lastView?: Date + lastUpdate?: Date +} + +export interface RemoveNotificationContextEvent { + type: RequestEventType.RemoveNotificationContext + context: ContextID +} + +export interface UpdateNotificationContextEvent { + type: RequestEventType.UpdateNotificationContext + context: ContextID + update: NotificationContextUpdate +} + +export interface CreateMessagesGroupEvent { + type: RequestEventType.CreateMessagesGroup + group: MessagesGroup +} + +export type EventResult = CreateMessageResult | CreateNotificationContextResult | {} + +export interface CreateMessageResult { + id: MessageID +} + +export interface CreateNotificationContextResult { + id: ContextID +} diff --git a/packages/sdk-types/src/responseEvent.ts b/packages/sdk-types/src/responseEvent.ts new file mode 100644 index 00000000000..f6b41d09fac --- /dev/null +++ b/packages/sdk-types/src/responseEvent.ts @@ -0,0 +1,125 @@ +import type { + Attachment, + CardID, + ContextID, + Message, + MessageID, + NotificationContext, + NotificationContextUpdate, + Patch, + Reaction, + SocialID, + Notification +} from '@hcengineering/communication-types' + +export enum ResponseEventType { + MessageCreated = 'messageCreated', + MessageRemoved = 'messageRemoved', + MessagesRemoved = 'messagesRemoved', + PatchCreated = 'patchCreated', + ReactionCreated = 'reactionCreated', + ReactionRemoved = 'reactionRemoved', + AttachmentCreated = 'attachmentCreated', + AttachmentRemoved = 'attachmentRemoved', + NotificationCreated = 'notificationCreated', + NotificationRemoved = 'notificationRemoved', + NotificationContextCreated = 'notificationContextCreated', + NotificationContextRemoved = 'notificationContextRemoved', + NotificationContextUpdated = 'notificationContextUpdated' +} + +export type ResponseEvent = + | MessageCreatedEvent + | MessageRemovedEvent + | MessagesRemovedEvent + | PatchCreatedEvent + | ReactionCreatedEvent + | ReactionRemovedEvent + | AttachmentCreatedEvent + | AttachmentRemovedEvent + | NotificationCreatedEvent + | NotificationRemovedEvent + | NotificationContextCreatedEvent + | NotificationContextRemovedEvent + | NotificationContextUpdatedEvent + +export interface MessageCreatedEvent { + type: ResponseEventType.MessageCreated + message: Message +} + +export interface MessageRemovedEvent { + type: ResponseEventType.MessageRemoved + card: CardID + message: MessageID +} + +export interface MessagesRemovedEvent { + type: ResponseEventType.MessagesRemoved + card: CardID + messages: MessageID[] +} + +export interface PatchCreatedEvent { + type: ResponseEventType.PatchCreated + card: CardID + patch: Patch +} + +export interface ReactionCreatedEvent { + type: ResponseEventType.ReactionCreated + card: CardID + reaction: Reaction +} + +export interface ReactionRemovedEvent { + type: ResponseEventType.ReactionRemoved + card: CardID + message: MessageID + reaction: string + creator: SocialID +} + +export interface AttachmentCreatedEvent { + type: ResponseEventType.AttachmentCreated + card: CardID + attachment: Attachment +} + +export interface AttachmentRemovedEvent { + type: ResponseEventType.AttachmentRemoved + card: CardID + message: MessageID + attachment: CardID +} + +export interface NotificationCreatedEvent { + type: ResponseEventType.NotificationCreated + personalWorkspace: string + notification: Notification +} + +export interface NotificationRemovedEvent { + type: ResponseEventType.NotificationRemoved + personalWorkspace: string + message: MessageID + context: ContextID +} + +export interface NotificationContextCreatedEvent { + type: ResponseEventType.NotificationContextCreated + context: NotificationContext +} + +export interface NotificationContextRemovedEvent { + type: ResponseEventType.NotificationContextRemoved + personalWorkspace: string + context: ContextID +} + +export interface NotificationContextUpdatedEvent { + type: ResponseEventType.NotificationContextUpdated + personalWorkspace: string + context: ContextID + update: NotificationContextUpdate +} diff --git a/packages/sdk-types/src/serverApi.ts b/packages/sdk-types/src/serverApi.ts index 77bc2ea4855..4a9d1f918d5 100644 --- a/packages/sdk-types/src/serverApi.ts +++ b/packages/sdk-types/src/serverApi.ts @@ -1,17 +1,25 @@ -import type { FindMessagesParams, Message, SocialID } from '@hcengineering/communication-types' +import type { + FindMessagesGroupsParams, + FindMessagesParams, + Message, + MessagesGroup, + SocialID, + WorkspaceID +} from '@hcengineering/communication-types' -import type { EventResult, Event } from './event' +import type { EventResult, RequestEvent } from './requestEvent.ts' export interface ConnectionInfo { sessionId: string - personalWorkspace: string - socialId: SocialID + personalWorkspace: WorkspaceID + socialIds: SocialID[] } export interface ServerApi { findMessages(info: ConnectionInfo, params: FindMessagesParams, queryId?: number): Promise + findMessagesGroups(info: ConnectionInfo, params: FindMessagesGroupsParams): Promise - event(info: ConnectionInfo, event: Event): Promise + event(info: ConnectionInfo, event: RequestEvent): Promise closeSession(sessionId: string): Promise unsubscribeQuery(info: ConnectionInfo, id: number): Promise diff --git a/packages/sdk-types/src/ws.ts b/packages/sdk-types/src/ws.ts deleted file mode 100644 index ed91b811b7a..00000000000 --- a/packages/sdk-types/src/ws.ts +++ /dev/null @@ -1,17 +0,0 @@ -export type RequestId = string - -export interface Response { - id?: RequestId - result?: any - error?: string //TODO: Use platform error -} - -export interface Request { - id?: RequestId - method: string - params: any[] -} - -export interface HelloRequest extends Request { - binary?: boolean -} diff --git a/packages/server-core/src/eventProcessor.ts b/packages/server-core/src/eventProcessor.ts deleted file mode 100644 index 6efeb0a81fa..00000000000 --- a/packages/server-core/src/eventProcessor.ts +++ /dev/null @@ -1,285 +0,0 @@ -import { type Message, type Patch, type Reaction, type Attachment } from '@hcengineering/communication-types' -import { - EventType, - type CreateAttachmentEvent, - type AttachmentCreatedEvent, - type CreateMessageEvent, - type MessageCreatedEvent, - type CreatePatchEvent, - type PatchCreatedEvent, - type CreateReactionEvent, - type ReactionCreatedEvent, - type Event, - type BroadcastEvent, - type RemoveAttachmentEvent, - type AttachmentRemovedEvent, - type RemoveMessageEvent, - type MessageRemovedEvent, - type RemoveReactionEvent, - type ReactionRemovedEvent, - type EventResult, - type DbAdapter, - type CreateNotificationEvent, - type RemoveNotificationEvent, - type CreateNotificationContextEvent, - type RemoveNotificationContextEvent, - type UpdateNotificationContextEvent, - type NotificationRemovedEvent, - type NotificationContextCreatedEvent, - type NotificationContextRemovedEvent, - type NotificationContextUpdatedEvent -} from '@hcengineering/communication-sdk-types' - -export type Result = { - broadcastEvent?: BroadcastEvent - result: EventResult -} - -export class EventProcessor { - constructor( - private readonly db: DbAdapter, - private readonly workspace: string - ) {} - - async process(personalWorkspace: string, event: Event): Promise { - switch (event.type) { - case EventType.CreateMessage: - return await this.createMessage(personalWorkspace, event) - case EventType.RemoveMessage: - return await this.removeMessage(personalWorkspace, event) - case EventType.CreatePatch: - return await this.createPatch(personalWorkspace, event) - case EventType.CreateReaction: - return await this.createReaction(personalWorkspace, event) - case EventType.RemoveReaction: - return await this.removeReaction(personalWorkspace, event) - case EventType.CreateAttachment: - return await this.createAttachment(personalWorkspace, event) - case EventType.RemoveAttachment: - return await this.removeAttachment(personalWorkspace, event) - case EventType.CreateNotification: - return await this.createNotification(personalWorkspace, event) - case EventType.RemoveNotification: - return await this.removeNotification(personalWorkspace, event) - case EventType.CreateNotificationContext: - return await this.createNotificationContext(personalWorkspace, event) - case EventType.RemoveNotificationContext: - return await this.removeNotificationContext(personalWorkspace, event) - case EventType.UpdateNotificationContext: - return await this.updateNotificationContext(personalWorkspace, event) - } - } - - private async createMessage(_personalWorkspace: string, event: CreateMessageEvent): Promise { - const created = new Date() - const id = await this.db.createMessage(this.workspace, event.card, event.content, event.creator, created) - const message: Message = { - id, - card: event.card, - content: event.content, - creator: event.creator, - created: created, - edited: created, - reactions: [], - attachments: [] - } - const broadcastEvent: MessageCreatedEvent = { - type: EventType.MessageCreated, - message - } - return { - broadcastEvent, - result: { id } - } - } - - private async createPatch(_personalWorkspace: string, event: CreatePatchEvent): Promise { - const created = new Date() - await this.db.createPatch(event.message, event.content, event.creator, created) - - const patch: Patch = { - message: event.message, - content: event.content, - creator: event.creator, - created: created - } - const broadcastEvent: PatchCreatedEvent = { - type: EventType.PatchCreated, - card: event.card, - patch - } - return { - broadcastEvent, - result: {} - } - } - - private async removeMessage(_personalWorkspace: string, event: RemoveMessageEvent): Promise { - await this.db.removeMessage(event.message) - - const broadcastEvent: MessageRemovedEvent = { - type: EventType.MessageRemoved, - card: event.card, - message: event.message - } - - return { - broadcastEvent, - result: {} - } - } - - private async createReaction(_personalWorkspace: string, event: CreateReactionEvent): Promise { - const created = new Date() - await this.db.createReaction(event.message, event.reaction, event.creator, created) - - const reaction: Reaction = { - message: event.message, - reaction: event.reaction, - creator: event.creator, - created: created - } - const broadcastEvent: ReactionCreatedEvent = { - type: EventType.ReactionCreated, - card: event.card, - reaction - } - return { - broadcastEvent, - result: {} - } - } - - private async removeReaction(_personalWorkspace: string, event: RemoveReactionEvent): Promise { - await this.db.removeReaction(event.message, event.reaction, event.creator) - const broadcastEvent: ReactionRemovedEvent = { - type: EventType.ReactionRemoved, - card: event.card, - message: event.message, - reaction: event.reaction, - creator: event.creator - } - return { - broadcastEvent, - result: {} - } - } - - private async createAttachment(_personalWorkspace: string, event: CreateAttachmentEvent): Promise { - const created = new Date() - await this.db.createAttachment(event.message, event.card, event.creator, created) - - const attachment: Attachment = { - message: event.message, - card: event.card, - creator: event.creator, - created: created - } - const broadcastEvent: AttachmentCreatedEvent = { - type: EventType.AttachmentCreated, - card: event.card, - attachment - } - - return { - broadcastEvent, - result: {} - } - } - - private async removeAttachment(_personalWorkspace: string, event: RemoveAttachmentEvent): Promise { - await this.db.removeAttachment(event.message, event.card) - const broadcastEvent: AttachmentRemovedEvent = { - type: EventType.AttachmentRemoved, - card: event.card, - message: event.message, - attachment: event.attachment - } - return { - broadcastEvent, - result: {} - } - } - - private async createNotification(_personalWorkspace: string, event: CreateNotificationEvent): Promise { - await this.db.createNotification(event.message, event.context) - - return { - result: {} - } - } - - private async removeNotification(personalWorkspace: string, event: RemoveNotificationEvent): Promise { - await this.db.removeNotification(event.message, event.context) - - const broadcastEvent: NotificationRemovedEvent = { - type: EventType.NotificationRemoved, - personalWorkspace: personalWorkspace, - message: event.message, - context: event.context - } - return { - broadcastEvent, - result: {} - } - } - - private async createNotificationContext( - personalWorkspace: string, - event: CreateNotificationContextEvent - ): Promise { - const id = await this.db.createContext( - personalWorkspace, - this.workspace, - event.card, - event.lastView, - event.lastUpdate - ) - const broadcastEvent: NotificationContextCreatedEvent = { - type: EventType.NotificationContextCreated, - context: { - id, - workspace: this.workspace, - personalWorkspace: personalWorkspace, - card: event.card, - lastView: event.lastView, - lastUpdate: event.lastUpdate - } - } - return { - broadcastEvent, - result: { id } - } - } - - private async removeNotificationContext( - personalWorkspace: string, - event: RemoveNotificationContextEvent - ): Promise { - await this.db.removeContext(event.context) - const broadcastEvent: NotificationContextRemovedEvent = { - type: EventType.NotificationContextRemoved, - personalWorkspace: personalWorkspace, - context: event.context - } - return { - broadcastEvent, - result: {} - } - } - - async updateNotificationContext(personalWorkspace: string, event: UpdateNotificationContextEvent): Promise { - await this.db.updateContext(event.context, event.update) - - const broadcastEvent: NotificationContextUpdatedEvent = { - type: EventType.NotificationContextUpdated, - personalWorkspace: personalWorkspace, - context: event.context, - update: event.update - } - return { - broadcastEvent, - result: {} - } - } -} diff --git a/packages/server-ws/package.json b/packages/server-ws/package.json deleted file mode 100644 index 41c68d7427a..00000000000 --- a/packages/server-ws/package.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "name": "@hcengineering/communication-server-ws", - "version": "0.1.7", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "./types/index.d.ts", - "type": "module", - "files": [ - "dist/index.js", - "types/**/*.d.ts" - ], - "devDependencies": { - "@types/bun": "^1.1.14", - "@types/express": "^5.0.0", - "@types/cors": "^2.8.17", - "@types/ws": "^8.5.13" - }, - "dependencies": { - "@hcengineering/server-token": "^0.6.429", - "@hcengineering/communication-cockroach": "workspace:*", - "@hcengineering/communication-sdk-types": "workspace:*", - "@hcengineering/communication-types": "workspace:*", - "cors": "^2.8.5", - "dotenv": "^16.4.7", - "express": "^4.21.2", - "msgpackr": "^1.11.2", - "ws": "^8.18.0" - }, - "peerDependencies": { - "typescript": "^5.6.3" - }, - "publishConfig": { - "registry": "https://npm.pkg.github.com" - } -} diff --git a/packages/server-ws/src/config.ts b/packages/server-ws/src/config.ts deleted file mode 100644 index 2f835bc72a3..00000000000 --- a/packages/server-ws/src/config.ts +++ /dev/null @@ -1,29 +0,0 @@ -interface Config { - Port: number - DbUrl: string - AccountsUrl: string - Secret: string -} - -const parseNumber = (str: string | undefined): number | undefined => (str !== undefined ? Number(str) : undefined) - -const config: Config = (() => { - const params: Partial = { - Port: parseNumber(process.env.PORT) ?? 8090, - DbUrl: process.env.DB_URL, - AccountsUrl: process.env.ACCOUNTS_URL, - Secret: process.env.SECRET - } - - const missingEnv = Object.entries(params) - .filter(([, value]) => value === undefined) - .map(([key]) => key) - - if (missingEnv.length > 0) { - throw Error(`Missing env variables: ${missingEnv.join(', ')}`) - } - - return params as Config -})() - -export default config diff --git a/packages/server-ws/src/eventProcessor.ts b/packages/server-ws/src/eventProcessor.ts deleted file mode 100644 index 512fb52ef54..00000000000 --- a/packages/server-ws/src/eventProcessor.ts +++ /dev/null @@ -1,280 +0,0 @@ -import { type Message, type Patch, type Reaction, type Attachment } from '@hcengineering/communication-types' -import { - EventType, - type CreateAttachmentEvent, - type AttachmentCreatedEvent, - type CreateMessageEvent, - type MessageCreatedEvent, - type CreatePatchEvent, - type PatchCreatedEvent, - type CreateReactionEvent, - type ReactionCreatedEvent, - type Event, - type BroadcastEvent, - type RemoveAttachmentEvent, - type AttachmentRemovedEvent, - type RemoveMessageEvent, - type MessageRemovedEvent, - type RemoveReactionEvent, - type ReactionRemovedEvent, - type EventResult, - type DbAdapter, - type CreateNotificationEvent, - type RemoveNotificationEvent, - type CreateNotificationContextEvent, - type RemoveNotificationContextEvent, - type UpdateNotificationContextEvent, - type NotificationRemovedEvent, - type NotificationContextCreatedEvent, - type NotificationContextRemovedEvent, - type NotificationContextUpdatedEvent -} from '@hcengineering/communication-sdk-types' - -type Result = { - broadcastEvent?: BroadcastEvent - result: EventResult -} - -export class EventProcessor { - constructor( - private readonly db: DbAdapter, - private readonly workspace: string, - private readonly personalWorkspace: string - ) {} - - async process(event: Event): Promise { - switch (event.type) { - case EventType.CreateMessage: - return await this.createMessage(event) - case EventType.RemoveMessage: - return await this.removeMessage(event) - case EventType.CreatePatch: - return await this.createPatch(event) - case EventType.CreateReaction: - return await this.createReaction(event) - case EventType.RemoveReaction: - return await this.removeReaction(event) - case EventType.CreateAttachment: - return await this.createAttachment(event) - case EventType.RemoveAttachment: - return await this.removeAttachment(event) - case EventType.CreateNotification: - return await this.createNotification(event) - case EventType.RemoveNotification: - return await this.removeNotification(event) - case EventType.CreateNotificationContext: - return await this.createNotificationContext(event) - case EventType.RemoveNotificationContext: - return await this.removeNotificationContext(event) - case EventType.UpdateNotificationContext: - return await this.updateNotificationContext(event) - } - } - - private async createMessage(event: CreateMessageEvent): Promise { - const created = new Date() - const id = await this.db.createMessage(this.workspace, event.card, event.content, event.creator, created) - const message: Message = { - id, - card: event.card, - content: event.content, - creator: event.creator, - created: created, - edited: created, - reactions: [], - attachments: [] - } - const broadcastEvent: MessageCreatedEvent = { - type: EventType.MessageCreated, - message - } - return { - broadcastEvent, - result: { id } - } - } - - private async createPatch(event: CreatePatchEvent): Promise { - const created = new Date() - await this.db.createPatch(event.message, event.content, event.creator, created) - - const patch: Patch = { - message: event.message, - content: event.content, - creator: event.creator, - created: created - } - const broadcastEvent: PatchCreatedEvent = { - type: EventType.PatchCreated, - card: event.card, - patch - } - return { - broadcastEvent, - result: {} - } - } - - private async removeMessage(event: RemoveMessageEvent): Promise { - await this.db.removeMessage(event.message) - - const broadcastEvent: MessageRemovedEvent = { - type: EventType.MessageRemoved, - card: event.card, - message: event.message - } - - return { - broadcastEvent, - result: {} - } - } - - private async createReaction(event: CreateReactionEvent): Promise { - const created = new Date() - await this.db.createReaction(event.message, event.reaction, event.creator, created) - - const reaction: Reaction = { - message: event.message, - reaction: event.reaction, - creator: event.creator, - created: created - } - const broadcastEvent: ReactionCreatedEvent = { - type: EventType.ReactionCreated, - card: event.card, - reaction - } - return { - broadcastEvent, - result: {} - } - } - - private async removeReaction(event: RemoveReactionEvent): Promise { - await this.db.removeReaction(event.message, event.reaction, event.creator) - const broadcastEvent: ReactionRemovedEvent = { - type: EventType.ReactionRemoved, - card: event.card, - message: event.message, - reaction: event.reaction, - creator: event.creator - } - return { - broadcastEvent, - result: {} - } - } - - private async createAttachment(event: CreateAttachmentEvent): Promise { - const created = new Date() - await this.db.createAttachment(event.message, event.card, event.creator, created) - - const attachment: Attachment = { - message: event.message, - card: event.card, - creator: event.creator, - created: created - } - const broadcastEvent: AttachmentCreatedEvent = { - type: EventType.AttachmentCreated, - card: event.card, - attachment - } - - return { - broadcastEvent, - result: {} - } - } - - private async removeAttachment(event: RemoveAttachmentEvent): Promise { - await this.db.removeAttachment(event.message, event.card) - const broadcastEvent: AttachmentRemovedEvent = { - type: EventType.AttachmentRemoved, - card: event.card, - message: event.message, - attachment: event.attachment - } - return { - broadcastEvent, - result: {} - } - } - - private async createNotification(event: CreateNotificationEvent): Promise { - await this.db.createNotification(event.message, event.context) - - return { - result: {} - } - } - - private async removeNotification(event: RemoveNotificationEvent): Promise { - await this.db.removeNotification(event.message, event.context) - - const broadcastEvent: NotificationRemovedEvent = { - type: EventType.NotificationRemoved, - personalWorkspace: this.personalWorkspace, - message: event.message, - context: event.context - } - return { - broadcastEvent, - result: {} - } - } - - private async createNotificationContext(event: CreateNotificationContextEvent): Promise { - const id = await this.db.createContext( - this.personalWorkspace, - this.workspace, - event.card, - event.lastView, - event.lastUpdate - ) - const broadcastEvent: NotificationContextCreatedEvent = { - type: EventType.NotificationContextCreated, - context: { - id, - workspace: this.workspace, - personalWorkspace: this.personalWorkspace, - card: event.card, - lastView: event.lastView, - lastUpdate: event.lastUpdate - } - } - return { - broadcastEvent, - result: { id } - } - } - - private async removeNotificationContext(event: RemoveNotificationContextEvent): Promise { - await this.db.removeContext(event.context) - const broadcastEvent: NotificationContextRemovedEvent = { - type: EventType.NotificationContextRemoved, - personalWorkspace: this.personalWorkspace, - context: event.context - } - return { - broadcastEvent, - result: {} - } - } - - async updateNotificationContext(event: UpdateNotificationContextEvent): Promise { - await this.db.updateContext(event.context, event.update) - - const broadcastEvent: NotificationContextUpdatedEvent = { - type: EventType.NotificationContextUpdated, - personalWorkspace: this.personalWorkspace, - context: event.context, - update: event.update - } - return { - broadcastEvent, - result: {} - } - } -} diff --git a/packages/server-ws/src/index.ts b/packages/server-ws/src/index.ts deleted file mode 100644 index f959c1eb964..00000000000 --- a/packages/server-ws/src/index.ts +++ /dev/null @@ -1,6 +0,0 @@ -import { config } from 'dotenv' - -import { main } from './main.ts' - -config() -void main() diff --git a/packages/server-ws/src/main.ts b/packages/server-ws/src/main.ts deleted file mode 100644 index 78f8af01746..00000000000 --- a/packages/server-ws/src/main.ts +++ /dev/null @@ -1,142 +0,0 @@ -import WebSocket, { WebSocketServer, type RawData } from 'ws' -import { createDbAdapter } from '@hcengineering/communication-cockroach' -import type { Response, HelloRequest } from '@hcengineering/communication-sdk-types' -import { decodeToken } from '@hcengineering/server-token' -import type { SocialID } from '@hcengineering/communication-types' - -import type { ConnectionInfo } from './types.ts' -import { deserializeRequest, serializeResponse } from './utils/serialize.ts' -import config from './config' -import { listen, createServer } from './server/server' -import { ConsoleLogger } from './utils/logger' -import { Manager } from './manager.ts' -import type { Session } from './session.ts' -import { getWorkspaceInfo } from './utils/account.ts' - -const logger = new ConsoleLogger() - -const pingTimeout = 10000 -const requestTimeout = 60 * 1000 - -//TODO: use platform errors -const UNAUTHORIZED_ERROR = 'Unauthorized' -const UNKNOWN_ERROR = 'Unknown' - -export const main = async (): Promise => { - const server = listen(createServer(), config.Port) - const wss = new WebSocketServer({ noServer: true }) - const db = await createDbAdapter(config.DbUrl) - const manager = new Manager(db) - - server.on('upgrade', async (req, socket, head) => { - const url = new URL('http://localhost' + (req.url ?? '')) - const token = url.searchParams.get('token') ?? '' - - try { - const info = await validateToken(token) - wss.handleUpgrade(req, socket, head, (ws) => { - handleConnection(ws, manager, info) - }) - } catch (error: any) { - logger.error('Invalid token', { error }) - wss.handleUpgrade(req, socket, head, (ws) => { - const resp: Response = { - result: UNAUTHORIZED_ERROR, - error - } - sendResponse(ws, resp, false) - socket.destroy() - }) - } - }) - - const shutdown = (): void => { - db.close() - server.close(() => { - process.exit() - }) - } - - process.on('SIGINT', shutdown) - process.on('SIGTERM', shutdown) - process.on('uncaughtException', (e) => { - console.error(e) - }) - process.on('unhandledRejection', (e) => { - console.error(e) - }) -} - -function handleConnection(ws: WebSocket, manager: Manager, info: ConnectionInfo) { - const session = manager.createSession(ws, info) - const pingInterval = setInterval(() => { - const now = Date.now() - const lastRequestDiff = now - session.lastRequest - - if (lastRequestDiff > requestTimeout) { - console.warn(`Connection inactive for ${lastRequestDiff}ms, closing`, info) - manager.closeSession(ws, info.workspace) - ws.close() - clearInterval(pingInterval) - return - } - sendResponse(ws, { id: 'ping', result: 'ping' }, session.binary) - }, pingTimeout) - - ws.on('message', async (raw: RawData) => { - try { - await handleRequest(raw, session, ws) - } catch (err: any) { - logger.error('Error during message handling', { err }) - } - }) - - ws.on('close', () => { - manager.closeSession(ws, info.workspace) - clearInterval(pingInterval) - }) - - ws.on('error', (error) => { - logger.log('Error', { error, ...info }) - }) -} - -function sendResponse(ws: WebSocket, resp: Response, binary: boolean) { - ws.send(serializeResponse(resp, binary), { binary }) -} - -async function handleRequest(raw: RawData, session: Session, ws: WebSocket) { - const request = deserializeRequest(raw, session.binary) - if (request === undefined) return - - if (request.id === 'hello') { - const hello = request as HelloRequest - session.binary = hello.binary ?? false - sendResponse(ws, { id: 'hello', result: 'hello' }, false) - return - } - - try { - const fn = (session as any)[request.method] - const params = [...request.params] - const result = await fn.apply(session, params) - const response: Response = { id: request.id, result } - sendResponse(ws, response, session.binary) - } catch (err: any) { - const response: Response = { id: request.id, result: UNKNOWN_ERROR, error: err } - sendResponse(ws, response, session.binary) - } -} - -//TODO: decodeToken or authorize with account service or both -async function validateToken(token: string): Promise { - const { email } = decodeToken(token, true, config.Secret) - const info = await getWorkspaceInfo(token) - - if (info === undefined) { - throw new Error('No workspace info') - } - - const personalWorkspace = 'cd0aba36-1c4f-4170-95f2-27a12a5415f7' - return { workspace: info.workspaceId, personalWorkspace, socialId: email as SocialID } -} diff --git a/packages/server-ws/src/manager.ts b/packages/server-ws/src/manager.ts deleted file mode 100644 index 2fbb7eea5c0..00000000000 --- a/packages/server-ws/src/manager.ts +++ /dev/null @@ -1,209 +0,0 @@ -import WebSocket from 'ws' -import { - type BroadcastEvent, - type DbAdapter, - EventType, - type NotificationContextCreatedEvent, - type NotificationCreatedEvent, - type Response -} from '@hcengineering/communication-sdk-types' -import type { - FindMessagesParams, - FindNotificationContextParams, - FindNotificationsParams, - MessageID -} from '@hcengineering/communication-types' - -import { Session } from './session' -import type { ConnectionInfo } from './types' -import { serializeResponse } from './utils/serialize.ts' -import { Triggers } from './triggers.ts' - -type QueryId = number | string -type QueryType = 'message' | 'notification' | 'context' - -type SessionInfo = { - session: Session - ws: WebSocket - messageQueries: Map - notificationQueries: Map - contextQueries: Map -} - -export class Manager { - private sessionsByWorkspace: Map = new Map() - private triggers: Triggers - private lastSessionId: number = 0 - - constructor(private readonly db: DbAdapter) { - this.triggers = new Triggers(db) - } - - createSession(ws: WebSocket, info: ConnectionInfo): Session { - const current = this.sessionsByWorkspace.get(info.workspace) ?? [] - this.lastSessionId++ - const session = new Session(this.lastSessionId, info, this.db, this) - current.push({ session, ws, messageQueries: new Map(), notificationQueries: new Map(), contextQueries: new Map() }) - this.sessionsByWorkspace.set(info.workspace, current) - return session - } - - closeSession(ws: WebSocket, workspace: string): void { - const sessions = this.sessionsByWorkspace.get(workspace) ?? [] - if (sessions.length === 0) return - const newSessions = sessions.filter((it) => it.ws !== ws) - if (newSessions.length === 0) { - this.sessionsByWorkspace.delete(workspace) - } else { - this.sessionsByWorkspace.set(workspace, newSessions) - } - } - - getSessionInfo(sessionId: number, workspace: string): SessionInfo | undefined { - const sessions = this.sessionsByWorkspace.get(workspace) ?? [] - return sessions.find((it) => it.session.id === sessionId) - } - - subscribeQuery( - sessionId: number, - workspace: string, - type: QueryType, - queryId: number, - params: Record - ): void { - const info = this.getSessionInfo(sessionId, workspace) - if (info == null) return - - if (type === 'message') { - info.messageQueries.set(queryId, params) - } else if (type === 'notification') { - info.notificationQueries.set(queryId, params) - } else if (type === 'context') { - info.contextQueries.set(queryId, params) - } - } - - unsubscribeQuery(sessionId: number, workspace: string, queryId: number): void { - const info = this.getSessionInfo(sessionId, workspace) - if (info == null) return - - info.messageQueries.delete(queryId) - info.notificationQueries.delete(queryId) - info.contextQueries.delete(queryId) - } - - async next(event: BroadcastEvent, workspace: string): Promise { - await this.broadcast(event, workspace) - const derived = await this.triggers.process(event, workspace) - const derivedPromises: Promise[] = [] - for (const d of derived) { - derivedPromises.push(this.next(d, workspace)) - } - await Promise.all(derivedPromises) - } - - private async broadcast(event: BroadcastEvent, workspace: string): Promise { - const sessions = this.sessionsByWorkspace.get(workspace) ?? [] - const response: Response = { result: event } - for (const session of sessions) { - const msg = serializeResponse(response, session.session.binary) - if (this.match(event, session)) { - session.ws.send(msg) - } - } - } - - private match(event: BroadcastEvent, info: SessionInfo): boolean { - switch (event.type) { - case EventType.MessageCreated: - return this.matchMessagesQuery( - { id: event.message.id, card: event.message.card }, - Array.from(info.messageQueries.values()) - ) - case EventType.PatchCreated: - return this.matchMessagesQuery( - { card: event.card, id: event.patch.message }, - Array.from(info.messageQueries.values()) - ) - case EventType.MessageRemoved: - return this.matchMessagesQuery( - { card: event.card, id: event.message }, - Array.from(info.messageQueries.values()) - ) - case EventType.ReactionCreated: - return this.matchMessagesQuery( - { card: event.card, id: event.reaction.message }, - Array.from(info.messageQueries.values()) - ) - case EventType.ReactionRemoved: - return this.matchMessagesQuery( - { card: event.card, id: event.message }, - Array.from(info.messageQueries.values()) - ) - case EventType.AttachmentCreated: - return this.matchMessagesQuery( - { card: event.card, id: event.attachment.message }, - Array.from(info.messageQueries.values()) - ) - case EventType.AttachmentRemoved: - return this.matchMessagesQuery( - { card: event.card, id: event.message }, - Array.from(info.messageQueries.values()) - ) - case EventType.NotificationCreated: - return ( - info.session.info.personalWorkspace === event.personalWorkspace && - this.matchNotificationQuery(event, Array.from(info.notificationQueries.values())) - ) - case EventType.NotificationRemoved: - return info.session.info.personalWorkspace === event.personalWorkspace && info.notificationQueries.size > 0 - case EventType.NotificationContextCreated: - return ( - info.session.info.personalWorkspace === event.context.personalWorkspace && - this.matchContextQuery(event, Array.from(info.contextQueries.values())) - ) - case EventType.NotificationContextRemoved: - return info.session.info.personalWorkspace === event.personalWorkspace && info.contextQueries.size > 0 - case EventType.NotificationContextUpdated: - return info.session.info.personalWorkspace === event.personalWorkspace && info.contextQueries.size > 0 - } - } - - private matchMessagesQuery(params: { id?: MessageID; card?: string }, queries: FindMessagesParams[]): boolean { - if (queries.length === 0) return false - - for (const query of queries) { - if (query.id != null && query.id !== params.id) continue - if (query.card != null && query.card !== params.card) continue - return true - } - - return false - } - - private matchNotificationQuery(event: NotificationCreatedEvent, queries: FindNotificationsParams[]): boolean { - if (queries.length === 0) return false - - for (const query of queries) { - if (query.context != null && query.context !== event.notification.context) continue - if (query.message != null && query.message !== event.notification.message.id) continue - if (query.read != null && query.read !== event.notification.read) continue - if (query.archived != null && query.archived !== event.notification.archived) continue - return true - } - - return false - } - - private matchContextQuery(event: NotificationContextCreatedEvent, queries: FindNotificationContextParams[]): boolean { - if (queries.length === 0) return false - - for (const query of queries) { - if (query.id != null && query.id !== event.context.id) continue - if (query.card != null && query.card !== event.context.card) continue - return true - } - - return false - } -} diff --git a/packages/server-ws/src/server/error.ts b/packages/server-ws/src/server/error.ts deleted file mode 100644 index 55af5f0946f..00000000000 --- a/packages/server-ws/src/server/error.ts +++ /dev/null @@ -1,8 +0,0 @@ -export class ApiError extends Error { - constructor ( - readonly code: string, - readonly message: string - ) { - super(message) - } -} \ No newline at end of file diff --git a/packages/server-ws/src/server/server.ts b/packages/server-ws/src/server/server.ts deleted file mode 100644 index f26916359e8..00000000000 --- a/packages/server-ws/src/server/server.ts +++ /dev/null @@ -1,24 +0,0 @@ -import cors from 'cors' -import express, { type Express } from 'express' -import { Server } from 'http' - -export function createServer (): Express { - const app = express() - - app.use(cors()) - app.use(express.json()) - - app.use((_req, res, _next) => { - res.status(404).send({ message: 'Not found' }) - }) - - return app -} - -export function listen (e: Express, port: number, host?: string): Server { - const cb = (): void => { - console.log(`Communication server has been started at ${host ?? '*'}:${port}`) - } - - return host !== undefined ? e.listen(port, host, cb) : e.listen(port, cb) -} diff --git a/packages/server-ws/src/session.ts b/packages/server-ws/src/session.ts deleted file mode 100644 index 246294ded37..00000000000 --- a/packages/server-ws/src/session.ts +++ /dev/null @@ -1,76 +0,0 @@ -import type { DbAdapter, Event, EventResult } from '@hcengineering/communication-sdk-types' -import type { - FindMessagesParams, - FindNotificationContextParams, - FindNotificationsParams, - Message, - Notification, - NotificationContext -} from '@hcengineering/communication-types' - -import type { ConnectionInfo } from './types' -import { EventProcessor } from './eventProcessor.ts' -import type { Manager } from './manager.ts' - -export class Session { - binary: boolean = false - lastRequest: number = Date.now() - - private readonly eventProcessor: EventProcessor - - constructor( - readonly id: number, - readonly info: ConnectionInfo, - private readonly db: DbAdapter, - private readonly manager: Manager - ) { - this.eventProcessor = new EventProcessor(db, info.workspace, info.personalWorkspace) - } - - ping(): string { - this.lastRequest = Date.now() - return 'pong' - } - - async findMessages(params: FindMessagesParams, queryId?: number): Promise { - const result = await this.db.findMessages(this.info.workspace, params) - if (queryId != null) { - this.manager.subscribeQuery(this.id, this.info.workspace, 'message', queryId, params) - } - return result - } - - async unsubscribeQuery(id: number): Promise { - this.manager.unsubscribeQuery(this.id, this.info.workspace, id) - } - - async findNotifications(params: FindNotificationsParams, queryId?: number): Promise { - //TODO: do we need filter by workspace by default? - const result = await this.db.findNotifications(params, this.info.personalWorkspace) - if (queryId != null) { - this.manager.subscribeQuery(this.id, this.info.workspace, 'notification', queryId, params) - } - return result - } - - async findNotificationContexts( - params: FindNotificationContextParams, - queryId?: number - ): Promise { - //TODO: do we need filter by workspace by default? - const result = await this.db.findContexts(params, [this.info.personalWorkspace]) - if (queryId != null) { - this.manager.subscribeQuery(this.id, this.info.workspace, 'context', queryId, params) - } - - return result - } - - async event(event: Event): Promise { - const { result, broadcastEvent } = await this.eventProcessor.process(event) - if (broadcastEvent !== undefined) { - void this.manager.next(broadcastEvent, this.info.workspace) - } - return result - } -} diff --git a/packages/server-ws/src/triggers.ts b/packages/server-ws/src/triggers.ts deleted file mode 100644 index cf512903d44..00000000000 --- a/packages/server-ws/src/triggers.ts +++ /dev/null @@ -1,115 +0,0 @@ -import { - type BroadcastEvent, - type DbAdapter, - EventType, - type MessageCreatedEvent, - type NotificationContextCreatedEvent, - type NotificationCreatedEvent -} from '@hcengineering/communication-sdk-types' -import type { NotificationContext, ContextID, CardID } from '@hcengineering/communication-types' - -export class Triggers { - constructor(private readonly db: DbAdapter) {} - - async process(event: BroadcastEvent, workspace: string): Promise { - switch (event.type) { - case EventType.MessageCreated: - return this.createNotifications(event, workspace) - } - - return [] - } - - private async createNotifications(event: MessageCreatedEvent, workspace: string): Promise { - const card = event.message.card as any as CardID - const subscribedPersonalWorkspaces = [ - 'cd0aba36-1c4f-4170-95f2-27a12a5415f7', - 'cd0aba36-1c4f-4170-95f2-27a12a5415f8' - ] - - const res: BroadcastEvent[] = [] - const contexts = await this.db.findContexts({ card }, [], workspace) - - res.push(...(await this.updateNotificationContexts(event.message.created, contexts))) - - for (const personalWorkspace of subscribedPersonalWorkspaces) { - const existsContext = contexts.find( - (it) => it.card === card && it.personalWorkspace === personalWorkspace && workspace === it.workspace - ) - const contextId = await this.getOrCreateContextId( - workspace, - card, - personalWorkspace, - res, - event.message.created, - existsContext - ) - - await this.db.createNotification(event.message.id, contextId) - - const resultEvent: NotificationCreatedEvent = { - type: EventType.NotificationCreated, - personalWorkspace, - notification: { - context: contextId, - message: event.message, - read: false, - archived: false - } - } - res.push(resultEvent) - } - - return res - } - - private async getOrCreateContextId( - workspace: string, - card: CardID, - personalWorkspace: string, - res: BroadcastEvent[], - lastUpdate: Date, - context?: NotificationContext - ): Promise { - if (context !== undefined) { - return context.id - } else { - const contextId = await this.db.createContext(personalWorkspace, workspace, card, undefined, lastUpdate) - const newContext = { - id: contextId, - card, - workspace, - personalWorkspace - } - const resultEvent: NotificationContextCreatedEvent = { - type: EventType.NotificationContextCreated, - context: newContext - } - - res.push(resultEvent) - - return contextId - } - } - - private async updateNotificationContexts( - lastUpdate: Date, - contexts: NotificationContext[] - ): Promise { - const res: BroadcastEvent[] = [] - for (const context of contexts) { - if (context.lastUpdate === undefined || context.lastUpdate < lastUpdate) { - await this.db.updateContext(context.id, { lastUpdate }) - res.push({ - type: EventType.NotificationContextUpdated, - personalWorkspace: context.personalWorkspace, - context: context.id, - update: { - lastUpdate - } - }) - } - } - return res - } -} diff --git a/packages/server-ws/src/types.ts b/packages/server-ws/src/types.ts deleted file mode 100644 index 26760ef7078..00000000000 --- a/packages/server-ws/src/types.ts +++ /dev/null @@ -1,7 +0,0 @@ -import type { SocialID } from '@hcengineering/communication-types' - -export interface ConnectionInfo { - workspace: string - personalWorkspace: string - socialId: SocialID -} diff --git a/packages/server-ws/src/utils/account.ts b/packages/server-ws/src/utils/account.ts deleted file mode 100644 index 882e437f257..00000000000 --- a/packages/server-ws/src/utils/account.ts +++ /dev/null @@ -1,23 +0,0 @@ -import config from "../config.ts"; - - -type WorkspaceInfo = { -workspaceId: string -} - -export async function getWorkspaceInfo (token: string): Promise { - const accountsUrl = config.AccountsUrl - const response = await fetch(accountsUrl, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - Authorization: 'Bearer ' + token - }, - body: JSON.stringify({ - method: 'getWorkspaceInfo', - params: [] - }) - }) - const result = await response.json() - return result.result as WorkspaceInfo | undefined -} \ No newline at end of file diff --git a/packages/server-ws/src/utils/logger.ts b/packages/server-ws/src/utils/logger.ts deleted file mode 100644 index 828c27e4327..00000000000 --- a/packages/server-ws/src/utils/logger.ts +++ /dev/null @@ -1,24 +0,0 @@ -export interface Logger { - log: (message: string, data?: Record) => void - warn: (message: string, data?: Record) => void - error: (message: string, data?: Record) => void - debug: (message: string, data?: Record) => void -} - -export class ConsoleLogger implements Logger { - log (message: string, data?: Record): void { - console.log({ message, ...data }) - } - - warn (message: string, data?: Record): void { - console.warn({ message, ...data }) - } - - error (message: string, data?: Record): void { - console.error({ message, ...data }) - } - - debug (message: string, data?: Record): void { - console.debug({ message, ...data }) - } -} \ No newline at end of file diff --git a/packages/server-ws/src/utils/serialize.ts b/packages/server-ws/src/utils/serialize.ts deleted file mode 100644 index 959b09d9dc3..00000000000 --- a/packages/server-ws/src/utils/serialize.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { Packr } from 'msgpackr' -import type {Response, Request} from '@hcengineering/communication-sdk-types' - -import type {RawData} from "ws"; - -const packr = new Packr({ structuredClone: true, bundleStrings: true, copyBuffers: false }) - -export function serializeResponse(resp: Response, binary: boolean) { - return binary ? serializeBinary(resp) : serializeJson(resp) -} - -export function deserializeRequest(raw: RawData, binary: boolean): Request | undefined { - let buff: Buffer | undefined - if (raw instanceof Buffer) { - buff = raw - } else if (Array.isArray(raw)) { - buff = Buffer.concat(raw.map(it => new Uint8Array(it))) - } - - if(buff === undefined) { - return undefined - } - - return binary ? deserializeBinary(buff) : deserializeJson(buff) -} - -function deserializeBinary(data: any): any { - return packr.decode(data) -} - -function deserializeJson(data: any): any { - return JSON.parse(data.toString()) -} - -function serializeBinary(data: any) { - return new Uint8Array(packr.encode(data)) -} - -function serializeJson(data: any) { - return JSON.stringify(data) -} \ No newline at end of file diff --git a/packages/server-core/package.json b/packages/server/package.json similarity index 86% rename from packages/server-core/package.json rename to packages/server/package.json index 51e1b86dd78..345e2e7d372 100644 --- a/packages/server-core/package.json +++ b/packages/server/package.json @@ -1,6 +1,6 @@ { - "name": "@hcengineering/communication-server-core", - "version": "0.1.7", + "name": "@hcengineering/communication-server", + "version": "0.1.8", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", @@ -16,7 +16,7 @@ "@types/ws": "^8.5.13" }, "dependencies": { - "@hcengineering/core": "0.6.429", + "@hcengineering/core": "0.7.2", "@hcengineering/communication-cockroach": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*" diff --git a/packages/server/src/eventProcessor.ts b/packages/server/src/eventProcessor.ts new file mode 100644 index 00000000000..5e37920c49d --- /dev/null +++ b/packages/server/src/eventProcessor.ts @@ -0,0 +1,351 @@ +import { + type Message, + type Patch, + type Reaction, + type Attachment, + type SocialID, + type WorkspaceID +} from '@hcengineering/communication-types' +import { + type CreateAttachmentEvent, + type AttachmentCreatedEvent, + type CreateMessageEvent, + type MessageCreatedEvent, + type CreatePatchEvent, + type PatchCreatedEvent, + type CreateReactionEvent, + type ReactionCreatedEvent, + type RemoveAttachmentEvent, + type AttachmentRemovedEvent, + type RemoveMessageEvent, + type MessageRemovedEvent, + type RemoveReactionEvent, + type ReactionRemovedEvent, + type EventResult, + type DbAdapter, + type CreateNotificationEvent, + type RemoveNotificationEvent, + type CreateNotificationContextEvent, + type RemoveNotificationContextEvent, + type UpdateNotificationContextEvent, + type NotificationRemovedEvent, + type NotificationContextCreatedEvent, + type NotificationContextRemovedEvent, + type NotificationContextUpdatedEvent, + type ResponseEvent, + RequestEventType, + type RequestEvent, + ResponseEventType, + type CreateMessagesGroupEvent, + type RemoveMessagesEvent, + type MessagesRemovedEvent +} from '@hcengineering/communication-sdk-types' + +export type Result = { + responseEvent?: ResponseEvent + result: EventResult +} + +export type UserInfo = { + personalWorkspace: WorkspaceID + socialIds: SocialID[] +} + +export class EventProcessor { + constructor( + private readonly db: DbAdapter, + private readonly workspace: WorkspaceID + ) {} + + async process(user: UserInfo, event: RequestEvent): Promise { + switch (event.type) { + case RequestEventType.CreateMessage: + return await this.createMessage(event, user) + case RequestEventType.RemoveMessage: + return await this.removeMessage(event, user) + case RequestEventType.RemoveMessages: + return await this.removeMessages(event, user) + case RequestEventType.CreatePatch: + return await this.createPatch(event, user) + case RequestEventType.CreateReaction: + return await this.createReaction(event, user) + case RequestEventType.RemoveReaction: + return await this.removeReaction(event, user) + case RequestEventType.CreateAttachment: + return await this.createAttachment(event, user) + case RequestEventType.RemoveAttachment: + return await this.removeAttachment(event, user) + case RequestEventType.CreateNotification: + return await this.createNotification(event, user) + case RequestEventType.RemoveNotification: + return await this.removeNotification(event, user) + case RequestEventType.CreateNotificationContext: + return await this.createNotificationContext(event, user) + case RequestEventType.RemoveNotificationContext: + return await this.removeNotificationContext(event, user) + case RequestEventType.UpdateNotificationContext: + return await this.updateNotificationContext(event, user) + case RequestEventType.CreateMessagesGroup: + return await this.createMessagesGroup(event, user) + } + } + + private async createMessage(event: CreateMessageEvent, user: UserInfo): Promise { + if (!user.socialIds.includes(event.creator)) { + throw new Error('Forbidden') + } + + const created = new Date() + const id = await this.db.createMessage(event.card, event.content, event.creator, created) + const message: Message = { + id, + card: event.card, + content: event.content, + creator: event.creator, + created: created, + edited: created, + reactions: [], + attachments: [] + } + const responseEvent: MessageCreatedEvent = { + type: ResponseEventType.MessageCreated, + message + } + return { + responseEvent, + result: { id } + } + } + + private async createPatch(event: CreatePatchEvent, user: UserInfo): Promise { + if (!user.socialIds.includes(event.creator)) { + throw new Error('Forbidden') + } + const created = new Date() + await this.db.createPatch(event.card, event.message, event.content, event.creator, created) + + const patch: Patch = { + message: event.message, + content: event.content, + creator: event.creator, + created: created + } + const responseEvent: PatchCreatedEvent = { + type: ResponseEventType.PatchCreated, + card: event.card, + patch + } + return { + responseEvent, + result: {} + } + } + + //eslint-disable-next-line @typescript-eslint/no-unused-vars + private async removeMessage(event: RemoveMessageEvent, _: UserInfo): Promise { + const res = await this.db.removeMessage(event.card, event.message) + + if (res === undefined) { + return { + responseEvent: undefined, + result: { id: res } + } + } + + const responseEvent: MessageRemovedEvent = { + type: ResponseEventType.MessageRemoved, + card: event.card, + message: event.message + } + + return { + responseEvent, + result: { id: res } + } + } + + //eslint-disable-next-line @typescript-eslint/no-unused-vars + private async removeMessages(event: RemoveMessagesEvent, _: UserInfo): Promise { + const ids = await this.db.removeMessages(event.card, event.messages) + + if (event.silent === true) { + return { + responseEvent: undefined, + result: { ids } + } + } + + const responseEvent: MessagesRemovedEvent = { + type: ResponseEventType.MessagesRemoved, + card: event.card, + messages: ids + } + + return { + responseEvent, + result: { ids } + } + } + + private async createReaction(event: CreateReactionEvent, user: UserInfo): Promise { + if (!user.socialIds.includes(event.creator)) { + throw new Error('Forbidden') + } + const created = new Date() + await this.db.createReaction(event.card, event.message, event.reaction, event.creator, created) + + const reaction: Reaction = { + message: event.message, + reaction: event.reaction, + creator: event.creator, + created: created + } + const responseEvent: ReactionCreatedEvent = { + type: ResponseEventType.ReactionCreated, + card: event.card, + reaction + } + return { + responseEvent, + result: {} + } + } + + private async removeReaction(event: RemoveReactionEvent, user: UserInfo): Promise { + if (!user.socialIds.includes(event.creator)) { + throw new Error('Forbidden') + } + await this.db.removeReaction(event.card, event.message, event.reaction, event.creator) + const responseEvent: ReactionRemovedEvent = { + type: ResponseEventType.ReactionRemoved, + card: event.card, + message: event.message, + reaction: event.reaction, + creator: event.creator + } + return { + responseEvent, + result: {} + } + } + + //eslint-disable-next-line @typescript-eslint/no-unused-vars + private async createAttachment(event: CreateAttachmentEvent, _: UserInfo): Promise { + const created = new Date() + await this.db.createAttachment(event.message, event.card, event.creator, created) + + const attachment: Attachment = { + message: event.message, + card: event.card, + creator: event.creator, + created: created + } + const responseEvent: AttachmentCreatedEvent = { + type: ResponseEventType.AttachmentCreated, + card: event.card, + attachment + } + + return { + responseEvent, + result: {} + } + } + + //eslint-disable-next-line @typescript-eslint/no-unused-vars + private async removeAttachment(event: RemoveAttachmentEvent, _: UserInfo): Promise { + await this.db.removeAttachment(event.message, event.card) + const responseEvent: AttachmentRemovedEvent = { + type: ResponseEventType.AttachmentRemoved, + card: event.card, + message: event.message, + attachment: event.attachment + } + return { + responseEvent, + result: {} + } + } + + //eslint-disable-next-line @typescript-eslint/no-unused-vars + private async createNotification(event: CreateNotificationEvent, _: UserInfo): Promise { + await this.db.createNotification(event.message, event.context) + + return { + result: {} + } + } + + private async removeNotification(event: RemoveNotificationEvent, user: UserInfo): Promise { + await this.db.removeNotification(event.message, event.context) + + const responseEvent: NotificationRemovedEvent = { + type: ResponseEventType.NotificationRemoved, + personalWorkspace: user.personalWorkspace, + message: event.message, + context: event.context + } + return { + responseEvent, + result: {} + } + } + + private async createNotificationContext(event: CreateNotificationContextEvent, user: UserInfo): Promise { + const id = await this.db.createContext(user.personalWorkspace, event.card, event.lastView, event.lastUpdate) + const responseEvent: NotificationContextCreatedEvent = { + type: ResponseEventType.NotificationContextCreated, + context: { + id, + workspace: this.workspace, + personalWorkspace: user.personalWorkspace, + card: event.card, + lastView: event.lastView, + lastUpdate: event.lastUpdate + } + } + return { + responseEvent, + result: { id } + } + } + + private async removeNotificationContext(event: RemoveNotificationContextEvent, user: UserInfo): Promise { + await this.db.removeContext(event.context) + const responseEvent: NotificationContextRemovedEvent = { + type: ResponseEventType.NotificationContextRemoved, + personalWorkspace: user.personalWorkspace, + context: event.context + } + return { + responseEvent, + result: {} + } + } + + async updateNotificationContext(event: UpdateNotificationContextEvent, user: UserInfo): Promise { + await this.db.updateContext(event.context, event.update) + + const responseEvent: NotificationContextUpdatedEvent = { + type: ResponseEventType.NotificationContextUpdated, + personalWorkspace: user.personalWorkspace, + context: event.context, + update: event.update + } + return { + responseEvent, + result: {} + } + } + + //eslint-disable-next-line @typescript-eslint/no-unused-vars + async createMessagesGroup(event: CreateMessagesGroupEvent, _: UserInfo): Promise { + const { fromId, toId, fromDate, toDate, count } = event.group + await this.db.createMessagesGroup(event.group.card, event.group.blobId, fromId, toId, fromDate, toDate, count) + + return { + responseEvent: undefined, + result: {} + } + } +} diff --git a/packages/server-core/src/index.ts b/packages/server/src/index.ts similarity index 59% rename from packages/server-core/src/index.ts rename to packages/server/src/index.ts index 9df66f2b2fc..8e1c99d13b7 100644 --- a/packages/server-core/src/index.ts +++ b/packages/server/src/index.ts @@ -1,7 +1,19 @@ import type { MeasureContext } from '@hcengineering/core' -import type { FindMessagesParams, Message } from '@hcengineering/communication-types' +import type { + FindMessagesGroupsParams, + FindMessagesParams, + Message, + MessagesGroup, + WorkspaceID +} from '@hcengineering/communication-types' import { createDbAdapter } from '@hcengineering/communication-cockroach' -import type { ConnectionInfo, DbAdapter, Event, EventResult, ServerApi } from '@hcengineering/communication-sdk-types' +import type { + ConnectionInfo, + DbAdapter, + EventResult, + RequestEvent, + ServerApi +} from '@hcengineering/communication-sdk-types' import { Manager, type BroadcastSessionsFunc } from './manager.ts' @@ -10,20 +22,20 @@ export class Api implements ServerApi { private constructor( private readonly ctx: MeasureContext, - private readonly workspace: string, - db: DbAdapter, - broadcast: BroadcastSessionsFunc + private readonly workspace: WorkspaceID, + private readonly db: DbAdapter, + private readonly broadcast: BroadcastSessionsFunc ) { this.manager = new Manager(this.ctx, db, this.workspace, broadcast) } static async create( ctx: MeasureContext, - workspace: string, + workspace: WorkspaceID, dbUrl: string, broadcast: BroadcastSessionsFunc ): Promise { - const db = await createDbAdapter(dbUrl) + const db = await createDbAdapter(dbUrl, workspace) return new Api(ctx, workspace, db, broadcast) } @@ -31,11 +43,15 @@ export class Api implements ServerApi { return await this.manager.findMessages(info, params, queryId) } + async findMessagesGroups(info: ConnectionInfo, params: FindMessagesGroupsParams): Promise { + return await this.manager.findMessagesGroups(info, params) + } + async unsubscribeQuery(info: ConnectionInfo, id: number): Promise { this.manager.unsubscribeQuery(info, id) } - async event(info: ConnectionInfo, event: Event): Promise { + async event(info: ConnectionInfo, event: RequestEvent): Promise { return await this.manager.event(info, event) } diff --git a/packages/server-core/src/manager.ts b/packages/server/src/manager.ts similarity index 78% rename from packages/server-core/src/manager.ts rename to packages/server/src/manager.ts index e66af2e92b1..b77c01c7416 100644 --- a/packages/server-core/src/manager.ts +++ b/packages/server/src/manager.ts @@ -1,19 +1,22 @@ import { - type BroadcastEvent, type ConnectionInfo, type DbAdapter, - EventType, + type EventResult, type NotificationContextCreatedEvent, type NotificationCreatedEvent, - type Event, - type EventResult + type RequestEvent, + type ResponseEvent, + ResponseEventType } from '@hcengineering/communication-sdk-types' import type { + FindMessagesGroupsParams, FindMessagesParams, FindNotificationContextParams, FindNotificationsParams, Message, - MessageID + MessageID, + MessagesGroup, + WorkspaceID } from '@hcengineering/communication-types' import { Triggers } from './triggers.ts' @@ -40,7 +43,7 @@ export class Manager { constructor( private readonly ctx: MeasureContext, private readonly db: DbAdapter, - private readonly workspace: string, + private readonly workspace: WorkspaceID, private readonly broadcast: BroadcastSessionsFunc ) { this.eventProcessor = new EventProcessor(db, this.workspace) @@ -48,17 +51,24 @@ export class Manager { } async findMessages(info: ConnectionInfo, params: FindMessagesParams, queryId?: number): Promise { - const result = await this.db.findMessages(this.workspace, params) + const result = await this.db.findMessages(params) if (queryId != null && info.sessionId != null && info.sessionId !== '') { this.subscribeQuery(info, 'message', queryId, params) } return result } - async event(info: ConnectionInfo, event: Event): Promise { - const { result, broadcastEvent } = await this.eventProcessor.process(info.personalWorkspace, event) - if (broadcastEvent !== undefined) { - void this.next(broadcastEvent) + async findMessagesGroups(info: ConnectionInfo, params: FindMessagesGroupsParams): Promise { + return await this.db.findMessagesGroups(params) + } + + async event(info: ConnectionInfo, event: RequestEvent): Promise { + const { result, responseEvent } = await this.eventProcessor.process( + { personalWorkspace: info.personalWorkspace, socialIds: info.socialIds }, + event + ) + if (responseEvent !== undefined) { + void this.next(responseEvent) } return result } @@ -97,13 +107,13 @@ export class Manager { this.dataBySessionId.delete(sessionId) } - async next(event: BroadcastEvent): Promise { - await this.broadcastEvent(event) + async next(event: ResponseEvent): Promise { + await this.responseEvent(event) const derived = await this.triggers.process(event) await Promise.all(derived.map((it) => this.next(it))) } - private async broadcastEvent(event: BroadcastEvent): Promise { + private async responseEvent(event: ResponseEvent): Promise { const sessionIds: string[] = [] for (const [sessionId, session] of this.dataBySessionId.entries()) { if (this.match(event, session)) { @@ -116,58 +126,60 @@ export class Manager { } } - private match(event: BroadcastEvent, info: SessionInfo): boolean { + private match(event: ResponseEvent, info: SessionInfo): boolean { switch (event.type) { - case EventType.MessageCreated: + case ResponseEventType.MessageCreated: return this.matchMessagesQuery( { id: event.message.id, card: event.message.card }, Array.from(info.messageQueries.values()) ) - case EventType.PatchCreated: + case ResponseEventType.PatchCreated: return this.matchMessagesQuery( { card: event.card, id: event.patch.message }, Array.from(info.messageQueries.values()) ) - case EventType.MessageRemoved: + case ResponseEventType.MessageRemoved: return this.matchMessagesQuery( { card: event.card, id: event.message }, Array.from(info.messageQueries.values()) ) - case EventType.ReactionCreated: + case ResponseEventType.MessagesRemoved: + return this.matchMessagesQuery({ card: event.card }, Array.from(info.messageQueries.values())) + case ResponseEventType.ReactionCreated: return this.matchMessagesQuery( { card: event.card, id: event.reaction.message }, Array.from(info.messageQueries.values()) ) - case EventType.ReactionRemoved: + case ResponseEventType.ReactionRemoved: return this.matchMessagesQuery( { card: event.card, id: event.message }, Array.from(info.messageQueries.values()) ) - case EventType.AttachmentCreated: + case ResponseEventType.AttachmentCreated: return this.matchMessagesQuery( { card: event.card, id: event.attachment.message }, Array.from(info.messageQueries.values()) ) - case EventType.AttachmentRemoved: + case ResponseEventType.AttachmentRemoved: return this.matchMessagesQuery( { card: event.card, id: event.message }, Array.from(info.messageQueries.values()) ) - case EventType.NotificationCreated: + case ResponseEventType.NotificationCreated: return ( info.personalWorkspace === event.personalWorkspace && this.matchNotificationQuery(event, Array.from(info.notificationQueries.values())) ) - case EventType.NotificationRemoved: + case ResponseEventType.NotificationRemoved: return info.personalWorkspace === event.personalWorkspace && info.notificationQueries.size > 0 - case EventType.NotificationContextCreated: + case ResponseEventType.NotificationContextCreated: return ( info.personalWorkspace === event.context.personalWorkspace && this.matchContextQuery(event, Array.from(info.contextQueries.values())) ) - case EventType.NotificationContextRemoved: + case ResponseEventType.NotificationContextRemoved: return info.personalWorkspace === event.personalWorkspace && info.contextQueries.size > 0 - case EventType.NotificationContextUpdated: + case ResponseEventType.NotificationContextUpdated: return info.personalWorkspace === event.personalWorkspace && info.contextQueries.size > 0 } } diff --git a/packages/server-core/src/triggers.ts b/packages/server/src/triggers.ts similarity index 75% rename from packages/server-core/src/triggers.ts rename to packages/server/src/triggers.ts index abc13967186..c4f61b597a0 100644 --- a/packages/server-core/src/triggers.ts +++ b/packages/server/src/triggers.ts @@ -1,36 +1,36 @@ import { - type BroadcastEvent, + ResponseEventType, type DbAdapter, - EventType, type MessageCreatedEvent, type NotificationContextCreatedEvent, - type NotificationCreatedEvent + type NotificationCreatedEvent, + type ResponseEvent } from '@hcengineering/communication-sdk-types' -import type { NotificationContext, ContextID, CardID } from '@hcengineering/communication-types' +import type { NotificationContext, ContextID, CardID, WorkspaceID } from '@hcengineering/communication-types' export class Triggers { constructor( private readonly db: DbAdapter, - private readonly workspace: string + private readonly workspace: WorkspaceID ) {} - async process(event: BroadcastEvent): Promise { + async process(event: ResponseEvent): Promise { switch (event.type) { - case EventType.MessageCreated: + case ResponseEventType.MessageCreated: return this.createNotifications(event) } return [] } - private async createNotifications(event: MessageCreatedEvent): Promise { + private async createNotifications(event: MessageCreatedEvent): Promise { const card = event.message.card as any as CardID const subscribedPersonalWorkspaces = [ 'cd0aba36-1c4f-4170-95f2-27a12a5415f7', 'cd0aba36-1c4f-4170-95f2-27a12a5415f8' - ] + ] as WorkspaceID[] - const res: BroadcastEvent[] = [] + const res: ResponseEvent[] = [] const contexts = await this.db.findContexts({ card }, [], this.workspace) res.push(...(await this.updateNotificationContexts(event.message.created, contexts))) @@ -40,9 +40,8 @@ export class Triggers { (it) => it.card === card && it.personalWorkspace === personalWorkspace && this.workspace === it.workspace ) const contextId = await this.getOrCreateContextId( - this.workspace, - card, personalWorkspace, + card, res, event.message.created, existsContext @@ -51,7 +50,7 @@ export class Triggers { await this.db.createNotification(event.message.id, contextId) const resultEvent: NotificationCreatedEvent = { - type: EventType.NotificationCreated, + type: ResponseEventType.NotificationCreated, personalWorkspace, notification: { context: contextId, @@ -67,25 +66,24 @@ export class Triggers { } private async getOrCreateContextId( - workspace: string, + personalWorkspace: WorkspaceID, card: CardID, - personalWorkspace: string, - res: BroadcastEvent[], + res: ResponseEvent[], lastUpdate: Date, context?: NotificationContext ): Promise { if (context !== undefined) { return context.id } else { - const contextId = await this.db.createContext(personalWorkspace, workspace, card, undefined, lastUpdate) + const contextId = await this.db.createContext(personalWorkspace, card, undefined, lastUpdate) const newContext = { id: contextId, card, - workspace, + workspace: this.workspace, personalWorkspace } const resultEvent: NotificationContextCreatedEvent = { - type: EventType.NotificationContextCreated, + type: ResponseEventType.NotificationContextCreated, context: newContext } @@ -98,13 +96,13 @@ export class Triggers { private async updateNotificationContexts( lastUpdate: Date, contexts: NotificationContext[] - ): Promise { - const res: BroadcastEvent[] = [] + ): Promise { + const res: ResponseEvent[] = [] for (const context of contexts) { if (context.lastUpdate === undefined || context.lastUpdate < lastUpdate) { await this.db.updateContext(context.id, { lastUpdate }) res.push({ - type: EventType.NotificationContextUpdated, + type: ResponseEventType.NotificationContextUpdated, personalWorkspace: context.personalWorkspace, context: context.id, update: { diff --git a/packages/server-ws/tsconfig.json b/packages/server/tsconfig.json similarity index 100% rename from packages/server-ws/tsconfig.json rename to packages/server/tsconfig.json diff --git a/packages/sqlite-wasm/package.json b/packages/sqlite-wasm/package.json index 1fb98b52056..d00cba0e858 100644 --- a/packages/sqlite-wasm/package.json +++ b/packages/sqlite-wasm/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-sqlite-wasm", - "version": "0.1.7", + "version": "0.1.8", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/sqlite-wasm/src/adapter.ts b/packages/sqlite-wasm/src/adapter.ts index a3430bdb6d9..c5a7ed5eb85 100644 --- a/packages/sqlite-wasm/src/adapter.ts +++ b/packages/sqlite-wasm/src/adapter.ts @@ -11,7 +11,9 @@ import { type FindNotificationContextParams, type NotificationContext, type Notification, - type BlobID + type BlobID, + type MessagesGroup, + type FindMessagesGroupsParams } from '@hcengineering/communication-types' import type { DbAdapter } from '@hcengineering/communication-sdk-types' @@ -20,7 +22,9 @@ import { applyMigrations } from './migrations.ts' import { MessagesDb } from './db/message.ts' import { NotificationsDb } from './db/notification.ts' -export class SqliteAdapter implements DbAdapter { +//TODO: FIXME +//export class SqliteAdapter implements DbAdapter +export class SqliteAdapter { private readonly message: MessagesDb private readonly notification: NotificationsDb @@ -42,31 +46,67 @@ export class SqliteAdapter implements DbAdapter { return await this.message.createMessage(workspace, card, content, creator, created) } - async createPatch(message: MessageID, content: RichText, creator: SocialID, created: Date): Promise { + async createPatch( + workspace: string, + card: CardID, + message: MessageID, + content: RichText, + creator: SocialID, + created: Date + ): Promise { + //TODO: FIXME return await this.message.createPatch(message, content, creator, created) } - async removeMessage(message: MessageID): Promise { - return await this.message.removeMessage(message) + async removeMessage(workspace: string, card: CardID, id: MessageID): Promise { + await this.message.removeMessage(id) + return id + } + + async removeMessages(workspace: string, card: CardID, ids: MessageID[]): Promise { + //TODO: implement + return ids } /* eslint-disable @typescript-eslint/no-unused-vars */ async createMessagesGroup( workspace: string, card: CardID, - startAt: Date, - endAt: Date, blobId: BlobID, + from_id: MessageID, + to_id: MessageID, + from_date: Date, + to_date: Date, count: number ): Promise { //TODO: implement } - async createReaction(message: MessageID, reaction: string, creator: SocialID, created: Date): Promise { + async findMessagesGroups(workspace: string, params: FindMessagesGroupsParams): Promise { + //TODO: implement + return [] + } + + async createReaction( + workspace: string, + card: CardID, + message: MessageID, + reaction: string, + creator: SocialID, + created: Date + ): Promise { + //TODO: FIXME return await this.message.createReaction(message, reaction, creator, created) } - async removeReaction(message: MessageID, reaction: string, creator: SocialID): Promise { + async removeReaction( + workspace: string, + card: CardID, + message: MessageID, + reaction: string, + creator: SocialID + ): Promise { + //TODO: FIXME return await this.message.removeReaction(message, reaction, creator) } @@ -132,5 +172,6 @@ export async function createDbAdapter(connectionString: string): Promise { diff --git a/packages/types/package.json b/packages/types/package.json index c7b3d0dbd9c..209bf93ebb0 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-types", - "version": "0.1.7", + "version": "0.1.8", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", @@ -16,8 +16,8 @@ "typescript": "^5.6.3" }, "dependencies": { - "@hcengineering/core": "0.6.429", - "@hcengineering/card": "0.6.429" + "@hcengineering/core": "0.7.2", + "@hcengineering/card": "0.7.2" }, "repository": { "type": "git", diff --git a/packages/types/src/message.ts b/packages/types/src/message.ts index 7f264dede5b..e848df14a7c 100644 --- a/packages/types/src/message.ts +++ b/packages/types/src/message.ts @@ -1,13 +1,14 @@ -import type { Ref, Blob } from '@hcengineering/core' +import type { Ref, Blob, PersonId, WorkspaceUuid } from '@hcengineering/core' import type { Card } from '@hcengineering/card' export type BlobID = Ref export type CardID = Ref -export type SocialID = string +export type SocialID = PersonId +export type WorkspaceID = WorkspaceUuid export type RichText = string -export type ID = string -export type MessageID = ID & { message: true } +export type ID = string | number +export type MessageID = number & { message: true } interface Object { creator: SocialID @@ -18,16 +19,18 @@ export interface Message extends Object { id: MessageID card: CardID content: RichText - edited: Date + edited?: Date reactions: Reaction[] attachments: Attachment[] } export interface MessagesGroup { card: CardID - startAt: Date - endAt: Date blobId: BlobID + fromId: MessageID + toId: MessageID + fromDate: Date + toDate: Date count: number } diff --git a/packages/types/src/notification.ts b/packages/types/src/notification.ts index 007e19b4c16..11bb3be7e45 100644 --- a/packages/types/src/notification.ts +++ b/packages/types/src/notification.ts @@ -1,6 +1,6 @@ -import type { Message, CardID, ID } from './message' +import type { Message, CardID, WorkspaceID } from './message' -export type ContextID = ID & { context: true } +export type ContextID = string & { context: true } export interface Notification { message: Message @@ -12,8 +12,8 @@ export interface Notification { export interface NotificationContext { id: ContextID card: CardID - workspace: string - personalWorkspace: string + workspace: WorkspaceID + personalWorkspace: WorkspaceID archivedFrom?: Date lastView?: Date lastUpdate?: Date diff --git a/packages/types/src/query.ts b/packages/types/src/query.ts index 302893542a1..63e08dca8b5 100644 --- a/packages/types/src/query.ts +++ b/packages/types/src/query.ts @@ -1,4 +1,4 @@ -import type { CardID, MessageID } from './message' +import type { BlobID, CardID, MessageID } from './message' import type { ContextID } from './notification' export enum SortOrder { @@ -45,3 +45,21 @@ export interface FindNotificationContextParams extends FindParams { id?: ContextID card?: CardID } + +export type ComparisonOperator = 'less' | 'lessOrEqual' | 'greater' | 'greaterOrEqual' | 'notEqual' + +type Exclusive = { + [K in keyof T]: Record & Partial, never>> +}[keyof T] + +export interface FindMessagesGroupsParams { + card?: CardID + blobId?: BlobID + fromId?: Exclusive> | MessageID + toId?: Exclusive> | MessageID + fromDate?: Exclusive> | Date + toDate?: Exclusive> | Date + limit?: number + sortBy?: 'fromId' | 'toId' | 'fromDate' | 'toDate' + sort?: SortOrder +} From 3d43e04eecb6889478f72b87ead12fd087284de0 Mon Sep 17 00:00:00 2001 From: Kristina Date: Tue, 18 Feb 2025 14:12:47 +0400 Subject: [PATCH 040/636] Fix version (#29) Signed-off-by: Kristina Fefelova --- bun.lock | 46 ++++++++++++++++----------------- packages/sdk-types/package.json | 2 +- 2 files changed, 24 insertions(+), 24 deletions(-) diff --git a/bun.lock b/bun.lock index 4fe1f65a2e4..4bed3a011c8 100644 --- a/bun.lock +++ b/bun.lock @@ -16,7 +16,7 @@ }, "packages/client-query": { "name": "@hcengineering/communication-client-query", - "version": "0.1.7", + "version": "0.1.8", "dependencies": { "@hcengineering/communication-query": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", @@ -32,7 +32,7 @@ }, "packages/client-sqlite": { "name": "@hcengineering/communication-client-sqlite", - "version": "0.1.7", + "version": "0.1.8", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-sqlite-wasm": "workspace:*", @@ -48,7 +48,7 @@ }, "packages/client-ws": { "name": "@hcengineering/communication-client-ws", - "version": "0.1.7", + "version": "0.1.8", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -63,7 +63,7 @@ }, "packages/cockroach": { "name": "@hcengineering/communication-cockroach", - "version": "0.1.7", + "version": "0.1.8", "dependencies": { "@hcengineering/communication-core": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", @@ -81,7 +81,7 @@ }, "packages/core": { "name": "@hcengineering/communication-core", - "version": "0.1.7", + "version": "0.1.8", "dependencies": { "@hcengineering/communication-types": "workspace:*", }, @@ -94,7 +94,7 @@ }, "packages/examples": { "name": "@hcengineering/communication-examples", - "version": "0.1.7", + "version": "0.1.8", "dependencies": { "@hcengineering/communication-client-query": "workspace:*", "@hcengineering/communication-client-sqlite": "workspace:*", @@ -110,7 +110,7 @@ }, "packages/query": { "name": "@hcengineering/communication-query", - "version": "0.1.7", + "version": "0.1.8", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -126,9 +126,9 @@ }, "packages/sdk-types": { "name": "@hcengineering/communication-sdk-types", - "version": "0.1.7", + "version": "0.1.8", "dependencies": { - "@hcengineering/communication-types": "^0.1.0", + "@hcengineering/communication-types": "workspace:*", }, "devDependencies": { "@types/bun": "^1.1.14", @@ -139,7 +139,7 @@ }, "packages/server": { "name": "@hcengineering/communication-server", - "version": "0.1.7", + "version": "0.1.8", "dependencies": { "@hcengineering/communication-cockroach": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", @@ -158,7 +158,7 @@ }, "packages/sqlite-wasm": { "name": "@hcengineering/communication-sqlite-wasm", - "version": "0.1.7", + "version": "0.1.8", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -175,7 +175,7 @@ }, "packages/types": { "name": "@hcengineering/communication-types", - "version": "0.1.7", + "version": "0.1.8", "dependencies": { "@hcengineering/card": "0.7.2", "@hcengineering/core": "0.7.2", @@ -191,7 +191,7 @@ "packages": { "@ampproject/remapping": ["@ampproject/remapping@2.3.0", "", { "dependencies": { "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw=="], - "@babel/runtime": ["@babel/runtime@7.26.7", "", { "dependencies": { "regenerator-runtime": "^0.14.0" } }, "sha512-AOPI3D+a8dXnja+iwsUqGRjr1BbZIe771sXdapOtYI531gSqpi92vXivKcq2asu/DFpdl1ceFAKZyRzK2PCVcQ=="], + "@babel/runtime": ["@babel/runtime@7.26.9", "", { "dependencies": { "regenerator-runtime": "^0.14.0" } }, "sha512-aA63XwOkcl4xxQa3HjPMqOP6LiK0ZDv3mUPYEFXkpHbaFjtGggE1A61FjFzJnB+p7/oy2gA8E+rcBNl/zC1tMg=="], "@eslint-community/eslint-utils": ["@eslint-community/eslint-utils@4.4.1", "", { "dependencies": { "eslint-visitor-keys": "^3.4.3" }, "peerDependencies": { "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" } }, "sha512-s3O3waFUrMV8P/XaF/+ZTp1X9XBZW1a4B97ZnjQF2KYWaFD2A8KyFBsrsfSjEmjn3RGWAIuvlneuZm3CUK3jbA=="], @@ -319,21 +319,21 @@ "@types/ws": ["@types/ws@8.5.14", "", { "dependencies": { "@types/node": "*" } }, "sha512-bd/YFLW+URhBzMXurx7lWByOu+xzU9+kb3RboOteXYDfW+tr+JZa99OyNmPINEGB/ahzKrEuc8rcv4gnpJmxTw=="], - "@typescript-eslint/eslint-plugin": ["@typescript-eslint/eslint-plugin@8.24.0", "", { "dependencies": { "@eslint-community/regexpp": "^4.10.0", "@typescript-eslint/scope-manager": "8.24.0", "@typescript-eslint/type-utils": "8.24.0", "@typescript-eslint/utils": "8.24.0", "@typescript-eslint/visitor-keys": "8.24.0", "graphemer": "^1.4.0", "ignore": "^5.3.1", "natural-compare": "^1.4.0", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0", "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-aFcXEJJCI4gUdXgoo/j9udUYIHgF23MFkg09LFz2dzEmU0+1Plk4rQWv/IYKvPHAtlkkGoB3m5e6oUp+JPsNaQ=="], + "@typescript-eslint/eslint-plugin": ["@typescript-eslint/eslint-plugin@8.24.1", "", { "dependencies": { "@eslint-community/regexpp": "^4.10.0", "@typescript-eslint/scope-manager": "8.24.1", "@typescript-eslint/type-utils": "8.24.1", "@typescript-eslint/utils": "8.24.1", "@typescript-eslint/visitor-keys": "8.24.1", "graphemer": "^1.4.0", "ignore": "^5.3.1", "natural-compare": "^1.4.0", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0", "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-ll1StnKtBigWIGqvYDVuDmXJHVH4zLVot1yQ4fJtLpL7qacwkxJc1T0bptqw+miBQ/QfUbhl1TcQ4accW5KUyA=="], - "@typescript-eslint/parser": ["@typescript-eslint/parser@8.24.0", "", { "dependencies": { "@typescript-eslint/scope-manager": "8.24.0", "@typescript-eslint/types": "8.24.0", "@typescript-eslint/typescript-estree": "8.24.0", "@typescript-eslint/visitor-keys": "8.24.0", "debug": "^4.3.4" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-MFDaO9CYiard9j9VepMNa9MTcqVvSny2N4hkY6roquzj8pdCBRENhErrteaQuu7Yjn1ppk0v1/ZF9CG3KIlrTA=="], + "@typescript-eslint/parser": ["@typescript-eslint/parser@8.24.1", "", { "dependencies": { "@typescript-eslint/scope-manager": "8.24.1", "@typescript-eslint/types": "8.24.1", "@typescript-eslint/typescript-estree": "8.24.1", "@typescript-eslint/visitor-keys": "8.24.1", "debug": "^4.3.4" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-Tqoa05bu+t5s8CTZFaGpCH2ub3QeT9YDkXbPd3uQ4SfsLoh1/vv2GEYAioPoxCWJJNsenXlC88tRjwoHNts1oQ=="], - "@typescript-eslint/scope-manager": ["@typescript-eslint/scope-manager@8.24.0", "", { "dependencies": { "@typescript-eslint/types": "8.24.0", "@typescript-eslint/visitor-keys": "8.24.0" } }, "sha512-HZIX0UByphEtdVBKaQBgTDdn9z16l4aTUz8e8zPQnyxwHBtf5vtl1L+OhH+m1FGV9DrRmoDuYKqzVrvWDcDozw=="], + "@typescript-eslint/scope-manager": ["@typescript-eslint/scope-manager@8.24.1", "", { "dependencies": { "@typescript-eslint/types": "8.24.1", "@typescript-eslint/visitor-keys": "8.24.1" } }, "sha512-OdQr6BNBzwRjNEXMQyaGyZzgg7wzjYKfX2ZBV3E04hUCBDv3GQCHiz9RpqdUIiVrMgJGkXm3tcEh4vFSHreS2Q=="], - "@typescript-eslint/type-utils": ["@typescript-eslint/type-utils@8.24.0", "", { "dependencies": { "@typescript-eslint/typescript-estree": "8.24.0", "@typescript-eslint/utils": "8.24.0", "debug": "^4.3.4", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-8fitJudrnY8aq0F1wMiPM1UUgiXQRJ5i8tFjq9kGfRajU+dbPyOuHbl0qRopLEidy0MwqgTHDt6CnSeXanNIwA=="], + "@typescript-eslint/type-utils": ["@typescript-eslint/type-utils@8.24.1", "", { "dependencies": { "@typescript-eslint/typescript-estree": "8.24.1", "@typescript-eslint/utils": "8.24.1", "debug": "^4.3.4", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-/Do9fmNgCsQ+K4rCz0STI7lYB4phTtEXqqCAs3gZW0pnK7lWNkvWd5iW545GSmApm4AzmQXmSqXPO565B4WVrw=="], - "@typescript-eslint/types": ["@typescript-eslint/types@8.24.0", "", {}, "sha512-VacJCBTyje7HGAw7xp11q439A+zeGG0p0/p2zsZwpnMzjPB5WteaWqt4g2iysgGFafrqvyLWqq6ZPZAOCoefCw=="], + "@typescript-eslint/types": ["@typescript-eslint/types@8.24.1", "", {}, "sha512-9kqJ+2DkUXiuhoiYIUvIYjGcwle8pcPpdlfkemGvTObzgmYfJ5d0Qm6jwb4NBXP9W1I5tss0VIAnWFumz3mC5A=="], - "@typescript-eslint/typescript-estree": ["@typescript-eslint/typescript-estree@8.24.0", "", { "dependencies": { "@typescript-eslint/types": "8.24.0", "@typescript-eslint/visitor-keys": "8.24.0", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", "minimatch": "^9.0.4", "semver": "^7.6.0", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "typescript": ">=4.8.4 <5.8.0" } }, "sha512-ITjYcP0+8kbsvT9bysygfIfb+hBj6koDsu37JZG7xrCiy3fPJyNmfVtaGsgTUSEuTzcvME5YI5uyL5LD1EV5ZQ=="], + "@typescript-eslint/typescript-estree": ["@typescript-eslint/typescript-estree@8.24.1", "", { "dependencies": { "@typescript-eslint/types": "8.24.1", "@typescript-eslint/visitor-keys": "8.24.1", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", "minimatch": "^9.0.4", "semver": "^7.6.0", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "typescript": ">=4.8.4 <5.8.0" } }, "sha512-UPyy4MJ/0RE648DSKQe9g0VDSehPINiejjA6ElqnFaFIhI6ZEiZAkUI0D5MCk0bQcTf/LVqZStvQ6K4lPn/BRg=="], - "@typescript-eslint/utils": ["@typescript-eslint/utils@8.24.0", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@typescript-eslint/scope-manager": "8.24.0", "@typescript-eslint/types": "8.24.0", "@typescript-eslint/typescript-estree": "8.24.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-07rLuUBElvvEb1ICnafYWr4hk8/U7X9RDCOqd9JcAMtjh/9oRmcfN4yGzbPVirgMR0+HLVHehmu19CWeh7fsmQ=="], + "@typescript-eslint/utils": ["@typescript-eslint/utils@8.24.1", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@typescript-eslint/scope-manager": "8.24.1", "@typescript-eslint/types": "8.24.1", "@typescript-eslint/typescript-estree": "8.24.1" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-OOcg3PMMQx9EXspId5iktsI3eMaXVwlhC8BvNnX6B5w9a4dVgpkQZuU8Hy67TolKcl+iFWq0XX+jbDGN4xWxjQ=="], - "@typescript-eslint/visitor-keys": ["@typescript-eslint/visitor-keys@8.24.0", "", { "dependencies": { "@typescript-eslint/types": "8.24.0", "eslint-visitor-keys": "^4.2.0" } }, "sha512-kArLq83QxGLbuHrTMoOEWO+l2MwsNS2TGISEdx8xgqpkbytB07XmlQyQdNDrCc1ecSqx0cnmhGvpX+VBwqqSkg=="], + "@typescript-eslint/visitor-keys": ["@typescript-eslint/visitor-keys@8.24.1", "", { "dependencies": { "@typescript-eslint/types": "8.24.1", "eslint-visitor-keys": "^4.2.0" } }, "sha512-EwVHlp5l+2vp8CoqJm9KikPZgi3gbdZAtabKT9KPShGeOcJhsv4Zdo3oc8T8I0uKEmYoU4ItyxbptjF08enaxg=="], "acorn": ["acorn@8.14.0", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA=="], @@ -435,7 +435,7 @@ "flat-cache": ["flat-cache@4.0.1", "", { "dependencies": { "flatted": "^3.2.9", "keyv": "^4.5.4" } }, "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw=="], - "flatted": ["flatted@3.3.2", "", {}, "sha512-AiwGJM8YcNOaobumgtng+6NHuOqC3A7MixFeDafM3X9cIUM+xUXoS5Vfgf+OihAYe20fxqNM9yPBXJzRtZ/4eA=="], + "flatted": ["flatted@3.3.3", "", {}, "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg=="], "glob-parent": ["glob-parent@6.0.2", "", { "dependencies": { "is-glob": "^4.0.3" } }, "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A=="], @@ -589,7 +589,7 @@ "typescript": ["typescript@5.7.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-84MVSjMEHP+FQRPy3pX9sTVV/INIex71s9TL2Gm5FG/WG1SqXeKyZ0k7/blY/4FdOzI12CBy1vGc4og/eus0fw=="], - "typescript-eslint": ["typescript-eslint@8.24.0", "", { "dependencies": { "@typescript-eslint/eslint-plugin": "8.24.0", "@typescript-eslint/parser": "8.24.0", "@typescript-eslint/utils": "8.24.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-/lmv4366en/qbB32Vz5+kCNZEMf6xYHwh1z48suBwZvAtnXKbP+YhGe8OLE2BqC67LMqKkCNLtjejdwsdW6uOQ=="], + "typescript-eslint": ["typescript-eslint@8.24.1", "", { "dependencies": { "@typescript-eslint/eslint-plugin": "8.24.1", "@typescript-eslint/parser": "8.24.1", "@typescript-eslint/utils": "8.24.1" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-cw3rEdzDqBs70TIcb0Gdzbt6h11BSs2pS0yaq7hDWDBtCCSei1pPSUXE9qUdQ/Wm9NgFg8mKtMt1b8fTHIl1jA=="], "undici-types": ["undici-types@6.20.0", "", {}, "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg=="], diff --git a/packages/sdk-types/package.json b/packages/sdk-types/package.json index d6d019f1e7e..9dde5468104 100644 --- a/packages/sdk-types/package.json +++ b/packages/sdk-types/package.json @@ -13,7 +13,7 @@ "@types/bun": "^1.1.14" }, "dependencies": { - "@hcengineering/communication-types": "^0.1.0" + "@hcengineering/communication-types": "workspace:*" }, "peerDependencies": { "typescript": "^5.6.3" From 197c3b4c278ce8eab6c1367ec4d0cad0cc4d6adf Mon Sep 17 00:00:00 2001 From: Kristina Date: Tue, 18 Feb 2025 15:25:15 +0400 Subject: [PATCH 041/636] Fix version (#30) Signed-off-by: Kristina Fefelova --- bun.lock | 22 +++++++++---------- packages/client-query/package.json | 2 +- packages/client-sqlite/package.json | 2 +- packages/client-ws/package.json | 2 +- packages/cockroach/migrations/01_message.sql | 8 +++---- .../cockroach/migrations/03_attachment.sql | 4 ++-- packages/cockroach/migrations/04_reaction.sql | 4 ++-- .../cockroach/migrations/06_notification.sql | 4 ++-- packages/cockroach/package.json | 2 +- packages/cockroach/src/db/schema.ts | 10 ++++----- packages/core/package.json | 2 +- packages/examples/package.json | 2 +- packages/query/package.json | 2 +- packages/sdk-types/package.json | 2 +- packages/server/package.json | 2 +- packages/sqlite-wasm/package.json | 2 +- packages/types/package.json | 2 +- 17 files changed, 37 insertions(+), 37 deletions(-) diff --git a/bun.lock b/bun.lock index 4bed3a011c8..098028ab1c2 100644 --- a/bun.lock +++ b/bun.lock @@ -16,7 +16,7 @@ }, "packages/client-query": { "name": "@hcengineering/communication-client-query", - "version": "0.1.8", + "version": "0.1.9", "dependencies": { "@hcengineering/communication-query": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", @@ -32,7 +32,7 @@ }, "packages/client-sqlite": { "name": "@hcengineering/communication-client-sqlite", - "version": "0.1.8", + "version": "0.1.9", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-sqlite-wasm": "workspace:*", @@ -48,7 +48,7 @@ }, "packages/client-ws": { "name": "@hcengineering/communication-client-ws", - "version": "0.1.8", + "version": "0.1.9", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -63,7 +63,7 @@ }, "packages/cockroach": { "name": "@hcengineering/communication-cockroach", - "version": "0.1.8", + "version": "0.1.9", "dependencies": { "@hcengineering/communication-core": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", @@ -81,7 +81,7 @@ }, "packages/core": { "name": "@hcengineering/communication-core", - "version": "0.1.8", + "version": "0.1.9", "dependencies": { "@hcengineering/communication-types": "workspace:*", }, @@ -94,7 +94,7 @@ }, "packages/examples": { "name": "@hcengineering/communication-examples", - "version": "0.1.8", + "version": "0.1.9", "dependencies": { "@hcengineering/communication-client-query": "workspace:*", "@hcengineering/communication-client-sqlite": "workspace:*", @@ -110,7 +110,7 @@ }, "packages/query": { "name": "@hcengineering/communication-query", - "version": "0.1.8", + "version": "0.1.9", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -126,7 +126,7 @@ }, "packages/sdk-types": { "name": "@hcengineering/communication-sdk-types", - "version": "0.1.8", + "version": "0.1.9", "dependencies": { "@hcengineering/communication-types": "workspace:*", }, @@ -139,7 +139,7 @@ }, "packages/server": { "name": "@hcengineering/communication-server", - "version": "0.1.8", + "version": "0.1.9", "dependencies": { "@hcengineering/communication-cockroach": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", @@ -158,7 +158,7 @@ }, "packages/sqlite-wasm": { "name": "@hcengineering/communication-sqlite-wasm", - "version": "0.1.8", + "version": "0.1.9", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -175,7 +175,7 @@ }, "packages/types": { "name": "@hcengineering/communication-types", - "version": "0.1.8", + "version": "0.1.9", "dependencies": { "@hcengineering/card": "0.7.2", "@hcengineering/core": "0.7.2", diff --git a/packages/client-query/package.json b/packages/client-query/package.json index 60168a7e13e..79c0faedee0 100644 --- a/packages/client-query/package.json +++ b/packages/client-query/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-client-query", - "version": "0.1.8", + "version": "0.1.9", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/client-sqlite/package.json b/packages/client-sqlite/package.json index 2866014d61b..97743c2e9b5 100644 --- a/packages/client-sqlite/package.json +++ b/packages/client-sqlite/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-client-sqlite", - "version": "0.1.8", + "version": "0.1.9", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/client-ws/package.json b/packages/client-ws/package.json index 5b015e5a24e..e9c75bc5f95 100644 --- a/packages/client-ws/package.json +++ b/packages/client-ws/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-client-ws", - "version": "0.1.8", + "version": "0.1.9", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/cockroach/migrations/01_message.sql b/packages/cockroach/migrations/01_message.sql index a6c5224d504..65ee7b4aabb 100644 --- a/packages/cockroach/migrations/01_message.sql +++ b/packages/cockroach/migrations/01_message.sql @@ -1,18 +1,18 @@ -CREATE TABLE IF NOT EXISTS communication.message +CREATE TABLE IF NOT EXISTS communication.messages ( - id INT8 NOT NULL, workspace_id UUID NOT NULL, card_id VARCHAR(255) NOT NULL, + id INT8 NOT NULL, content TEXT NOT NULL, creator VARCHAR(255) NOT NULL, created TIMESTAMPTZ NOT NULL, - PRIMARY KEY (id, workspace_id, card_id) + PRIMARY KEY (id, card_id, workspace_id) ); -CREATE TABLE IF NOT EXISTS communication.messages_group +CREATE TABLE IF NOT EXISTS communication.messages_groups ( workspace_id UUID NOT NULL, card_id VARCHAR(255) NOT NULL, diff --git a/packages/cockroach/migrations/03_attachment.sql b/packages/cockroach/migrations/03_attachment.sql index 7fc727dd47e..3a45d1fcb99 100644 --- a/packages/cockroach/migrations/03_attachment.sql +++ b/packages/cockroach/migrations/03_attachment.sql @@ -1,4 +1,4 @@ -CREATE TABLE IF NOT EXISTS communication.attachment +CREATE TABLE IF NOT EXISTS communication.attachments ( message_id INT8 NOT NULL, card_id VARCHAR(255) NOT NULL, @@ -8,4 +8,4 @@ CREATE TABLE IF NOT EXISTS communication.attachment PRIMARY KEY (message_id, card_id) ); -CREATE INDEX IF NOT EXISTS attachment_message_idx ON communication.attachment (message_id); +CREATE INDEX IF NOT EXISTS attachment_message_idx ON communication.attachments (message_id); diff --git a/packages/cockroach/migrations/04_reaction.sql b/packages/cockroach/migrations/04_reaction.sql index 5719fb8de2d..a8b4fc9496d 100644 --- a/packages/cockroach/migrations/04_reaction.sql +++ b/packages/cockroach/migrations/04_reaction.sql @@ -1,4 +1,4 @@ -CREATE TABLE IF NOT EXISTS communication.reaction +CREATE TABLE IF NOT EXISTS communication.reactions ( workspace_id UUID NOT NULL, card_id VARCHAR(255) NOT NULL, @@ -10,4 +10,4 @@ CREATE TABLE IF NOT EXISTS communication.reaction PRIMARY KEY (workspace_id, card_id, message_id, creator, reaction) ); -CREATE INDEX IF NOT EXISTS reaction_message_idx ON communication.reaction (message_id); +CREATE INDEX IF NOT EXISTS reaction_message_idx ON communication.reactions (message_id); diff --git a/packages/cockroach/migrations/06_notification.sql b/packages/cockroach/migrations/06_notification.sql index 3a89fb8be38..0bb7b2c9466 100644 --- a/packages/cockroach/migrations/06_notification.sql +++ b/packages/cockroach/migrations/06_notification.sql @@ -1,4 +1,4 @@ -CREATE TABLE IF NOT EXISTS communication.notification +CREATE TABLE IF NOT EXISTS communication.notifications ( message_id UUID NOT NULL, context UUID NOT NULL, @@ -7,4 +7,4 @@ CREATE TABLE IF NOT EXISTS communication.notification FOREIGN KEY (context) REFERENCES communication.notification_context (id) ON DELETE CASCADE ); -CREATE INDEX IF NOT EXISTS notification_context_idx ON communication.notification (context); \ No newline at end of file +CREATE INDEX IF NOT EXISTS notification_context_idx ON communication.notifications (context); \ No newline at end of file diff --git a/packages/cockroach/package.json b/packages/cockroach/package.json index bf1b86497fa..619b223cf47 100644 --- a/packages/cockroach/package.json +++ b/packages/cockroach/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-cockroach", - "version": "0.1.8", + "version": "0.1.9", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/cockroach/src/db/schema.ts b/packages/cockroach/src/db/schema.ts index 19dbd4ecc89..755cadc9e83 100644 --- a/packages/cockroach/src/db/schema.ts +++ b/packages/cockroach/src/db/schema.ts @@ -1,12 +1,12 @@ import type {ContextID, MessageID, RichText, SocialID, CardID, BlobID, Message, Reaction, Attachment, MessagesGroup, WorkspaceID } from "@hcengineering/communication-types" export enum TableName { - Message = 'communication.message', - MessagesGroup = 'communication.messages_group', + Message = 'communication.messages', + MessagesGroup = 'communication.messages_groups', Patch = 'communication.patch', - Attachment = 'communication.attachment', - Reaction = 'communication.reaction', - Notification = 'communication.notification', + Attachment = 'communication.attachments', + Reaction = 'communication.reactions', + Notification = 'communication.notifications', NotificationContext = 'communication.notification_context' } diff --git a/packages/core/package.json b/packages/core/package.json index db801ed38ad..ba3abbb3724 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-core", - "version": "0.1.8", + "version": "0.1.9", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/examples/package.json b/packages/examples/package.json index d7e579df670..9dfa63f4697 100644 --- a/packages/examples/package.json +++ b/packages/examples/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-examples", - "version": "0.1.8", + "version": "0.1.9", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/query/package.json b/packages/query/package.json index c02e9747bd0..1a3669fcb8f 100644 --- a/packages/query/package.json +++ b/packages/query/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-query", - "version": "0.1.8", + "version": "0.1.9", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/sdk-types/package.json b/packages/sdk-types/package.json index 9dde5468104..4295ff71495 100644 --- a/packages/sdk-types/package.json +++ b/packages/sdk-types/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-sdk-types", - "version": "0.1.8", + "version": "0.1.9", "main": "./dist/index.js", "module": "./dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/server/package.json b/packages/server/package.json index 345e2e7d372..e215f29720b 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-server", - "version": "0.1.8", + "version": "0.1.9", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/sqlite-wasm/package.json b/packages/sqlite-wasm/package.json index d00cba0e858..991c97f1dbd 100644 --- a/packages/sqlite-wasm/package.json +++ b/packages/sqlite-wasm/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-sqlite-wasm", - "version": "0.1.8", + "version": "0.1.9", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/types/package.json b/packages/types/package.json index 209bf93ebb0..988ac8491dd 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-types", - "version": "0.1.8", + "version": "0.1.9", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", From ac22070f89801603b4ffb6dd745bd1250c079ca5 Mon Sep 17 00:00:00 2001 From: denis-tingaikin Date: Fri, 21 Feb 2025 05:03:12 +0300 Subject: [PATCH 042/636] apply fixes after testing Signed-off-by: denis-tingaikin --- .github/workflows/docker-push.yaml | 2 +- cmd/huly-stream/main.go | 11 +- go.mod | 44 +++---- go.sum | 96 +++++++-------- internal/pkg/config/config.go | 27 +++-- internal/pkg/manifest/hls.go | 120 +++++-------------- internal/pkg/manifest/hls_test.go | 142 ++++------------------- internal/pkg/resconv/resconv.go | 129 ++++++++++++++++++++ internal/pkg/resconv/resconv_test.go | 120 +++++++++++++++++++ internal/pkg/transcoding/command.go | 111 +++++------------- internal/pkg/transcoding/command_test.go | 53 ++++----- internal/pkg/transcoding/scheduler.go | 94 ++++++++++----- internal/pkg/transcoding/worker.go | 56 +++++++-- internal/pkg/uploader/datalake.go | 4 +- internal/pkg/uploader/s3.go | 3 + internal/pkg/uploader/uploader.go | 123 ++++++++++---------- 16 files changed, 621 insertions(+), 514 deletions(-) create mode 100644 internal/pkg/resconv/resconv.go create mode 100644 internal/pkg/resconv/resconv_test.go diff --git a/.github/workflows/docker-push.yaml b/.github/workflows/docker-push.yaml index 0308ce654b8..ad67bcee7cc 100644 --- a/.github/workflows/docker-push.yaml +++ b/.github/workflows/docker-push.yaml @@ -41,4 +41,4 @@ jobs: context: . platforms: linux/amd64,linux/arm64 push: true - tags: ${{ inputs.version }} + tags: hardcoreeng/huly-stream:${{ inputs.version }} diff --git a/cmd/huly-stream/main.go b/cmd/huly-stream/main.go index 2730cd8b35a..f660a2511f1 100644 --- a/cmd/huly-stream/main.go +++ b/cmd/huly-stream/main.go @@ -32,7 +32,7 @@ import ( tusd "github.com/tus/tusd/v2/pkg/handler" ) -const basePath = "/transcoding" +const basePath = "/recording" func main() { var ctx, cancel = signal.NotifyContext( @@ -43,16 +43,13 @@ func main() { syscall.SIGQUIT, ) defer cancel() - ctx = log.WithLoggerFields(ctx) var logger = log.FromContext(ctx) var conf = must(config.FromEnv()) - logger.Sugar().Debugf("provided config is %v", conf) mustNoError(os.MkdirAll(conf.OutputDir, os.ModePerm)) - if conf.PprofEnabled { go pprof.ListenAndServe(ctx, "localhost:6060") } @@ -71,10 +68,12 @@ func main() { Logger: slog.New(slog.NewTextHandler(discardTextHandler{}, nil)), })) - http.Handle("/transcoding/", http.StripPrefix("/transcoding/", handler)) - http.Handle("/transcoding", http.StripPrefix("/transcoding", handler)) + http.Handle("/recording/", http.StripPrefix("/recording/", handler)) + http.Handle("/recording", http.StripPrefix("/recording", handler)) go func() { + logger.Info("started to listen") + defer logger.Info("server has finished") // #nosec var err = http.ListenAndServe(conf.ServeURL, nil) if err != nil { diff --git a/go.mod b/go.mod index 89419cb728b..3775572bbff 100644 --- a/go.mod +++ b/go.mod @@ -3,42 +3,42 @@ module github.com/huly-stream go 1.23.2 require ( - github.com/aws/aws-sdk-go-v2 v1.32.3 - github.com/aws/aws-sdk-go-v2/config v1.28.1 - github.com/aws/aws-sdk-go-v2/credentials v1.17.42 - github.com/aws/aws-sdk-go-v2/service/s3 v1.66.2 - github.com/aws/smithy-go v1.22.0 + github.com/aws/aws-sdk-go-v2 v1.36.1 + github.com/aws/aws-sdk-go-v2/config v1.29.6 + github.com/aws/aws-sdk-go-v2/credentials v1.17.59 + github.com/aws/aws-sdk-go-v2/service/s3 v1.77.0 + github.com/aws/smithy-go v1.22.3 github.com/fsnotify/fsnotify v1.8.0 github.com/google/uuid v1.6.0 github.com/kelseyhightower/envconfig v1.4.0 github.com/pkg/errors v0.9.1 - github.com/stretchr/testify v1.9.0 + github.com/stretchr/testify v1.10.0 github.com/tus/tusd/v2 v2.6.0 - github.com/valyala/fasthttp v1.58.0 + github.com/valyala/fasthttp v1.59.0 go.uber.org/zap v1.27.0 - golang.org/x/exp v0.0.0-20230626212559-97b1e661b5df + golang.org/x/exp v0.0.0-20250215185904-eff6e970281f ) require ( github.com/andybalholm/brotli v1.1.1 // indirect - github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.6 // indirect - github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.18 // indirect - github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.22 // indirect - github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.22 // indirect - github.com/aws/aws-sdk-go-v2/internal/ini v1.8.1 // indirect - github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.22 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.0 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.4.3 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.3 // indirect - github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.3 // indirect - github.com/aws/aws-sdk-go-v2/service/sso v1.24.3 // indirect - github.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.3 // indirect - github.com/aws/aws-sdk-go-v2/service/sts v1.32.3 // indirect + github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.9 // indirect + github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.28 // indirect + github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.32 // indirect + github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.32 // indirect + github.com/aws/aws-sdk-go-v2/internal/ini v1.8.2 // indirect + github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.32 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.2 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.6.0 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.13 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.13 // indirect + github.com/aws/aws-sdk-go-v2/service/sso v1.24.15 // indirect + github.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.14 // indirect + github.com/aws/aws-sdk-go-v2/service/sts v1.33.14 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/klauspost/compress v1.17.11 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/valyala/bytebufferpool v1.0.0 // indirect go.uber.org/multierr v1.11.0 // indirect - golang.org/x/sys v0.27.0 // indirect + golang.org/x/sys v0.30.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index 11e9c85d56b..1ffa95ca7c2 100644 --- a/go.sum +++ b/go.sum @@ -2,42 +2,42 @@ github.com/Acconut/go-httptest-recorder v1.0.0 h1:TAv2dfnqp/l+SUvIaMAUK4GeN4+wqb github.com/Acconut/go-httptest-recorder v1.0.0/go.mod h1:CwQyhTH1kq/gLyWiRieo7c0uokpu3PXeyF/nZjUNtmM= github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA= github.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA= -github.com/aws/aws-sdk-go-v2 v1.32.3 h1:T0dRlFBKcdaUPGNtkBSwHZxrtis8CQU17UpNBZYd0wk= -github.com/aws/aws-sdk-go-v2 v1.32.3/go.mod h1:2SK5n0a2karNTv5tbP1SjsX0uhttou00v/HpXKM1ZUo= -github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.6 h1:pT3hpW0cOHRJx8Y0DfJUEQuqPild8jRGmSFmBgvydr0= -github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.6/go.mod h1:j/I2++U0xX+cr44QjHay4Cvxj6FUbnxrgmqN3H1jTZA= -github.com/aws/aws-sdk-go-v2/config v1.28.1 h1:oxIvOUXy8x0U3fR//0eq+RdCKimWI900+SV+10xsCBw= -github.com/aws/aws-sdk-go-v2/config v1.28.1/go.mod h1:bRQcttQJiARbd5JZxw6wG0yIK3eLeSCPdg6uqmmlIiI= -github.com/aws/aws-sdk-go-v2/credentials v1.17.42 h1:sBP0RPjBU4neGpIYyx8mkU2QqLPl5u9cmdTWVzIpHkM= -github.com/aws/aws-sdk-go-v2/credentials v1.17.42/go.mod h1:FwZBfU530dJ26rv9saAbxa9Ej3eF/AK0OAY86k13n4M= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.18 h1:68jFVtt3NulEzojFesM/WVarlFpCaXLKaBxDpzkQ9OQ= -github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.18/go.mod h1:Fjnn5jQVIo6VyedMc0/EhPpfNlPl7dHV916O6B+49aE= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.22 h1:Jw50LwEkVjuVzE1NzkhNKkBf9cRN7MtE1F/b2cOKTUM= -github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.22/go.mod h1:Y/SmAyPcOTmpeVaWSzSKiILfXTVJwrGmYZhcRbhWuEY= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.22 h1:981MHwBaRZM7+9QSR6XamDzF/o7ouUGxFzr+nVSIhrs= -github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.22/go.mod h1:1RA1+aBEfn+CAB/Mh0MB6LsdCYCnjZm7tKXtnk499ZQ= -github.com/aws/aws-sdk-go-v2/internal/ini v1.8.1 h1:VaRN3TlFdd6KxX1x3ILT5ynH6HvKgqdiXoTxAF4HQcQ= -github.com/aws/aws-sdk-go-v2/internal/ini v1.8.1/go.mod h1:FbtygfRFze9usAadmnGJNc8KsP346kEe+y2/oyhGAGc= -github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.22 h1:yV+hCAHZZYJQcwAaszoBNwLbPItHvApxT0kVIw6jRgs= -github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.22/go.mod h1:kbR1TL8llqB1eGnVbybcA4/wgScxdylOdyAd51yxPdw= -github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.0 h1:TToQNkvGguu209puTojY/ozlqy2d/SFNcoLIqTFi42g= -github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.0/go.mod h1:0jp+ltwkf+SwG2fm/PKo8t4y8pJSgOCO4D8Lz3k0aHQ= -github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.4.3 h1:kT6BcZsmMtNkP/iYMcRG+mIEA/IbeiUimXtGmqF39y0= -github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.4.3/go.mod h1:Z8uGua2k4PPaGOYn66pK02rhMrot3Xk3tpBuUFPomZU= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.3 h1:qcxX0JYlgWH3hpPUnd6U0ikcl6LLA9sLkXE2w1fpMvY= -github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.3/go.mod h1:cLSNEmI45soc+Ef8K/L+8sEA3A3pYFEYf5B5UI+6bH4= -github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.3 h1:ZC7Y/XgKUxwqcdhO5LE8P6oGP1eh6xlQReWNKfhvJno= -github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.3/go.mod h1:WqfO7M9l9yUAw0HcHaikwRd/H6gzYdz7vjejCA5e2oY= -github.com/aws/aws-sdk-go-v2/service/s3 v1.66.2 h1:p9TNFL8bFUMd+38YIpTAXpoxyz0MxC7FlbFEH4P4E1U= -github.com/aws/aws-sdk-go-v2/service/s3 v1.66.2/go.mod h1:fNjyo0Coen9QTwQLWeV6WO2Nytwiu+cCcWaTdKCAqqE= -github.com/aws/aws-sdk-go-v2/service/sso v1.24.3 h1:UTpsIf0loCIWEbrqdLb+0RxnTXfWh2vhw4nQmFi4nPc= -github.com/aws/aws-sdk-go-v2/service/sso v1.24.3/go.mod h1:FZ9j3PFHHAR+w0BSEjK955w5YD2UwB/l/H0yAK3MJvI= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.3 h1:2YCmIXv3tmiItw0LlYf6v7gEHebLY45kBEnPezbUKyU= -github.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.3/go.mod h1:u19stRyNPxGhj6dRm+Cdgu6N75qnbW7+QN0q0dsAk58= -github.com/aws/aws-sdk-go-v2/service/sts v1.32.3 h1:wVnQ6tigGsRqSWDEEyH6lSAJ9OyFUsSnbaUWChuSGzs= -github.com/aws/aws-sdk-go-v2/service/sts v1.32.3/go.mod h1:VZa9yTFyj4o10YGsmDO4gbQJUvvhY72fhumT8W4LqsE= -github.com/aws/smithy-go v1.22.0 h1:uunKnWlcoL3zO7q+gG2Pk53joueEOsnNB28QdMsmiMM= -github.com/aws/smithy-go v1.22.0/go.mod h1:irrKGvNn1InZwb2d7fkIRNucdfwR8R+Ts3wxYa/cJHg= +github.com/aws/aws-sdk-go-v2 v1.36.1 h1:iTDl5U6oAhkNPba0e1t1hrwAo02ZMqbrGq4k5JBWM5E= +github.com/aws/aws-sdk-go-v2 v1.36.1/go.mod h1:5PMILGVKiW32oDzjj6RU52yrNrDPUHcbZQYr1sM7qmM= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.9 h1:VZPDrbzdsU1ZxhyWrvROqLY0nxFWgMCAzhn/nYz3X48= +github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.9/go.mod h1:3XkePX5dSaxveLAYY7nsbsZZrKxCyEuE5pM4ziFxyGg= +github.com/aws/aws-sdk-go-v2/config v1.29.6 h1:fqgqEKK5HaZVWLQoLiC9Q+xDlSp+1LYidp6ybGE2OGg= +github.com/aws/aws-sdk-go-v2/config v1.29.6/go.mod h1:Ft+WLODzDQmCTHDvqAH1JfC2xxbZ0MxpZAcJqmE1LTQ= +github.com/aws/aws-sdk-go-v2/credentials v1.17.59 h1:9btwmrt//Q6JcSdgJOLI98sdr5p7tssS9yAsGe8aKP4= +github.com/aws/aws-sdk-go-v2/credentials v1.17.59/go.mod h1:NM8fM6ovI3zak23UISdWidyZuI1ghNe2xjzUZAyT+08= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.28 h1:KwsodFKVQTlI5EyhRSugALzsV6mG/SGrdjlMXSZSdso= +github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.28/go.mod h1:EY3APf9MzygVhKuPXAc5H+MkGb8k/DOSQjWS0LgkKqI= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.32 h1:BjUcr3X3K0wZPGFg2bxOWW3VPN8rkE3/61zhP+IHviA= +github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.32/go.mod h1:80+OGC/bgzzFFTUmcuwD0lb4YutwQeKLFpmt6hoWapU= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.32 h1:m1GeXHVMJsRsUAqG6HjZWx9dj7F5TR+cF1bjyfYyBd4= +github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.32/go.mod h1:IitoQxGfaKdVLNg0hD8/DXmAqNy0H4K2H2Sf91ti8sI= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.2 h1:Pg9URiobXy85kgFev3og2CuOZ8JZUBENF+dcgWBaYNk= +github.com/aws/aws-sdk-go-v2/internal/ini v1.8.2/go.mod h1:FbtygfRFze9usAadmnGJNc8KsP346kEe+y2/oyhGAGc= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.32 h1:OIHj/nAhVzIXGzbAE+4XmZ8FPvro3THr6NlqErJc3wY= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.32/go.mod h1:LiBEsDo34OJXqdDlRGsilhlIiXR7DL+6Cx2f4p1EgzI= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.2 h1:D4oz8/CzT9bAEYtVhSBmFj2dNOtaHOtMKc2vHBwYizA= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.2/go.mod h1:Za3IHqTQ+yNcRHxu1OFucBh0ACZT4j4VQFF0BqpZcLY= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.6.0 h1:kT2WeWcFySdYpPgyqJMSUE7781Qucjtn6wBvrgm9P+M= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.6.0/go.mod h1:WYH1ABybY7JK9TITPnk6ZlP7gQB8psI4c9qDmMsnLSA= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.13 h1:SYVGSFQHlchIcy6e7x12bsrxClCXSP5et8cqVhL8cuw= +github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.13/go.mod h1:kizuDaLX37bG5WZaoxGPQR/LNFXpxp0vsUnqfkWXfNE= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.13 h1:OBsrtam3rk8NfBEq7OLOMm5HtQ9Yyw32X4UQMya/wjw= +github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.13/go.mod h1:3U4gFA5pmoCOja7aq4nSaIAGbaOHv2Yl2ug018cmC+Q= +github.com/aws/aws-sdk-go-v2/service/s3 v1.77.0 h1:RCOi1rDmLqOICym/6UeS2cqKED4T4m966w2rl1HfL+g= +github.com/aws/aws-sdk-go-v2/service/s3 v1.77.0/go.mod h1:VC4EKSHqT3nzOcU955VWHMGsQ+w67wfAUBSjC8NOo8U= +github.com/aws/aws-sdk-go-v2/service/sso v1.24.15 h1:/eE3DogBjYlvlbhd2ssWyeuovWunHLxfgw3s/OJa4GQ= +github.com/aws/aws-sdk-go-v2/service/sso v1.24.15/go.mod h1:2PCJYpi7EKeA5SkStAmZlF6fi0uUABuhtF8ILHjGc3Y= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.14 h1:M/zwXiL2iXUrHputuXgmO94TVNmcenPHxgLXLutodKE= +github.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.14/go.mod h1:RVwIw3y/IqxC2YEXSIkAzRDdEU1iRabDPaYjpGCbCGQ= +github.com/aws/aws-sdk-go-v2/service/sts v1.33.14 h1:TzeR06UCMUq+KA3bDkujxK1GVGy+G8qQN/QVYzGLkQE= +github.com/aws/aws-sdk-go-v2/service/sts v1.33.14/go.mod h1:dspXf/oYWGWo6DEvj98wpaTeqt5+DMidZD0A9BYTizc= +github.com/aws/smithy-go v1.22.3 h1:Z//5NuZCSW6R4PhQ93hShNbyBbn8BWCmCVCt+Q8Io5k= +github.com/aws/smithy-go v1.22.3/go.mod h1:t1ufH5HMublsJYulve2RKmHDC15xu1f26kHCp/HgceI= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/fsnotify/fsnotify v1.8.0 h1:dAwr6QBTBZIkG8roQaJjGof0pp0EeF+tNV7YBP3F/8M= @@ -54,14 +54,14 @@ github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= -github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/tus/tusd/v2 v2.6.0 h1:Je243QDKnFTvm/WkLH2bd1oQ+7trolrflRWyuI0PdWI= github.com/tus/tusd/v2 v2.6.0/go.mod h1:1Eb1lBoSRBfYJ/mQfFVjyw8ZdNMdBqW17vgQKl3Ah9g= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= -github.com/valyala/fasthttp v1.58.0 h1:GGB2dWxSbEprU9j0iMJHgdKYJVDyjrOwF9RE59PbRuE= -github.com/valyala/fasthttp v1.58.0/go.mod h1:SYXvHHaFp7QZHGKSHmoMipInhrI5StHrhDTYVEjK/Kw= +github.com/valyala/fasthttp v1.59.0 h1:Qu0qYHfXvPk1mSLNqcFtEk6DpxgA26hy6bmydotDpRI= +github.com/valyala/fasthttp v1.59.0/go.mod h1:GTxNb9Bc6r2a9D0TWNSPwDz78UxnTGBViY3xZNEqyYU= github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU= github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E= go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= @@ -70,14 +70,14 @@ go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8= go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= -golang.org/x/exp v0.0.0-20230626212559-97b1e661b5df h1:UA2aFVmmsIlefxMk29Dp2juaUSth8Pyn3Tq5Y5mJGME= -golang.org/x/exp v0.0.0-20230626212559-97b1e661b5df/go.mod h1:FXUEEKJgO7OQYeo8N01OfiKP8RXMtf6e8aTskBGqWdc= -golang.org/x/net v0.31.0 h1:68CPQngjLL0r2AlUKiSxtQFKvzRVbnzLwMUn5SzcLHo= -golang.org/x/net v0.31.0/go.mod h1:P4fl1q7dY2hnZFxEk4pPSkDHF+QqjitcnDjUQyMM+pM= -golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= -golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/text v0.20.0 h1:gK/Kv2otX8gz+wn7Rmb3vT96ZwuoxnQlY+HlJVj7Qug= -golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4= +golang.org/x/exp v0.0.0-20250215185904-eff6e970281f h1:oFMYAjX0867ZD2jcNiLBrI9BdpmEkvPyi5YrBGXbamg= +golang.org/x/exp v0.0.0-20250215185904-eff6e970281f/go.mod h1:BHOTPb3L19zxehTsLoJXVaTktb06DFgmdW6Wb9s8jqk= +golang.org/x/net v0.35.0 h1:T5GQRQb2y08kTAByq9L4/bz8cipCdA8FbRTXewonqY8= +golang.org/x/net v0.35.0/go.mod h1:EglIi67kWsHKlRzzVMUD93VMSWGFOMSZgxFjparz1Qk= +golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc= +golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM= +golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= diff --git a/internal/pkg/config/config.go b/internal/pkg/config/config.go index 1ef90f21925..9c296309c60 100644 --- a/internal/pkg/config/config.go +++ b/internal/pkg/config/config.go @@ -16,23 +16,24 @@ package config import ( "net/url" + "time" "github.com/kelseyhightower/envconfig" ) // Config represents configuration for the huly-stream application. type Config struct { - SecretToken string `split_words:"true" desc:"secret token for authorize requests"` - LogLevel string `split_words:"true" default:"debug" desc:"sets log level for the application"` - PprofEnabled bool `default:"false" split_words:"true" desc:"starts profile server on localhost:6060 if true"` - Insecure bool `default:"false" desc:"ignores authorization check if true"` - ServeURL string `split_words:"true" desc:"app listen url" default:"0.0.0.0:1080"` - EndpointURL *url.URL `split_words:"true" desc:"S3 or Datalake endpoint, example: s3://my-ip-address, datalake://my-ip-address"` - MaxCapacity int64 `split_words:"true" default:"6220800" desc:"represents the amount of maximum possible capacity for the transcoding. The default value is 1920 * 1080 * 3."` - MaxThreads int `split_words:"true" default:"4" desc:"means upper bound for the transcoing provider."` - OutputDir string `split_words:"true" default:"/tmp/transcoing/" desc:"path to the directory with transcoding result."` - RemoveContentOnUpload bool `split_words:"true" default:"true" desc:"deletes all content when content delivered if true"` - UploadRawContent bool `split_words:"true" default:"false" desc:"uploads content in raw quality to the endpoint if true"` + SecretToken string `split_words:"true" desc:"secret token for authorize requests"` + LogLevel string `split_words:"true" default:"debug" desc:"sets log level for the application"` + PprofEnabled bool `default:"false" split_words:"true" desc:"starts profile server on localhost:6060 if true"` + Insecure bool `default:"false" desc:"ignores authorization check if true"` + ServeURL string `split_words:"true" desc:"app listen url" default:"0.0.0.0:1080"` + EndpointURL *url.URL `split_words:"true" desc:"S3 or Datalake endpoint, example: s3://my-ip-address, datalake://my-ip-address"` + AuthURL *url.URL `split_words:"true" desc:"url to auth the upload"` + MaxCapacity int64 `split_words:"true" default:"6220800" desc:"represents the amount of maximum possible capacity for the transcoding. The default value is 1920 * 1080 * 3."` + MaxThreads int `split_words:"true" default:"4" desc:"means upper bound for the transcoing provider."` + OutputDir string `split_words:"true" default:"/tmp/transcoing/" desc:"path to the directory with transcoding result."` + Timeout time.Duration `default:"5m" desc:"timeout for the upload"` } // FromEnv creates new Config from env @@ -47,5 +48,9 @@ func FromEnv() (*Config, error) { return nil, err } + if *result.EndpointURL == (url.URL{}) { + result.EndpointURL = nil + } + return &result, nil } diff --git a/internal/pkg/manifest/hls.go b/internal/pkg/manifest/hls.go index 712e97f072d..d8dd7a53613 100644 --- a/internal/pkg/manifest/hls.go +++ b/internal/pkg/manifest/hls.go @@ -15,111 +15,45 @@ package manifest import ( - "bufio" "fmt" - "strconv" + "os" + "path/filepath" "strings" -) - -// HLSManifest represents an HLS manifest file -// with metadata about the playlist and its segments. -type HLSManifest struct { - Version int - TargetDuration int - SequenceNumber int - Segments []Segment - EndList bool -} -// Segment represents a media segment in the HLS manifest. -type Segment struct { - URI string - Duration float64 - Title string -} - -// ToM3U8 serializes the HLSManifest to an M3U8 file format. -func (m *HLSManifest) ToM3U8() string { - var builder strings.Builder - - builder.WriteString("#EXTM3U\n") - builder.WriteString(fmt.Sprintf("#EXT-X-VERSION:%d\n", m.Version)) - builder.WriteString(fmt.Sprintf("#EXT-X-TARGETDURATION:%d\n", m.TargetDuration)) - builder.WriteString(fmt.Sprintf("#EXT-X-MEDIA-SEQUENCE:%d\n", m.SequenceNumber)) + "github.com/huly-stream/internal/pkg/resconv" +) - for _, segment := range m.Segments { - if segment.Title != "" { - builder.WriteString(fmt.Sprintf("#EXTINF:%.2f,%s\n", segment.Duration, segment.Title)) - } else { - builder.WriteString(fmt.Sprintf("#EXTINF:%.2f,\n", segment.Duration)) - } - builder.WriteString(fmt.Sprintf("%s\n", segment.URI)) +// GenerateHLSPlaylist generates master file for master files for resolution levels +func GenerateHLSPlaylist(levels []string, outputPath, uploadID string) error { + p := filepath.Join(outputPath, uploadID, fmt.Sprintf("%v_master.m3u8", uploadID)) + d := filepath.Dir(p) + _ = os.MkdirAll(d, os.ModePerm) + // #nosec + file, err := os.Create(p) + if err != nil { + return err } + defer func() { _ = file.Close() }() - if m.EndList { - builder.WriteString("#EXT-X-ENDLIST\n") + _, err = file.WriteString("#EXTM3U\n") + if err != nil { + return err } - return builder.String() -} - -// FromM3U8 converts raw input to the hls master file -// nolint -func FromM3U8(data string) (*HLSManifest, error) { - scanner := bufio.NewScanner(strings.NewReader(data)) - manifest := &HLSManifest{} - var currentSegment *Segment + for _, res := range levels { + var bandwidth = resconv.Bandwidth(res) + var resolution = strings.ReplaceAll(resconv.Resolution(res), ":", "x") - for scanner.Scan() { - line := strings.TrimSpace(scanner.Text()) - if line == "" { - continue + _, err = file.WriteString(fmt.Sprintf("#EXT-X-STREAM-INF:BANDWIDTH=%d,RESOLUTION=%v\n", bandwidth, resolution)) + if err != nil { + return err } - if strings.HasPrefix(line, "#EXTM3U") { - continue + _, err = file.WriteString(fmt.Sprintf("%s_%s_master.m3u8\n", uploadID, res)) + if err != nil { + return err } - if strings.HasPrefix(line, "#EXT-X-VERSION:") { - version, err := strconv.Atoi(strings.TrimPrefix(line, "#EXT-X-VERSION:")) - if err != nil { - return nil, err - } - manifest.Version = version - } else if strings.HasPrefix(line, "#EXT-X-TARGETDURATION:") { - targetDuration, err := strconv.Atoi(strings.TrimPrefix(line, "#EXT-X-TARGETDURATION:")) - if err != nil { - return nil, err - } - manifest.TargetDuration = targetDuration - } else if strings.HasPrefix(line, "#EXT-X-MEDIA-SEQUENCE:") { - sequenceNumber, err := strconv.Atoi(strings.TrimPrefix(line, "#EXT-X-MEDIA-SEQUENCE:")) - if err != nil { - return nil, err - } - manifest.SequenceNumber = sequenceNumber - } else if strings.HasPrefix(line, "#EXTINF:") { - parts := strings.SplitN(strings.TrimPrefix(line, "#EXTINF:"), ",", 2) - duration, err := strconv.ParseFloat(parts[0], 64) - if err != nil { - return nil, err - } - title := "" - if len(parts) > 1 { - title = parts[1] - } - currentSegment = &Segment{Duration: duration, Title: title} - } else if strings.HasPrefix(line, "#EXT-X-ENDLIST") { - manifest.EndList = true - } else if currentSegment != nil { - currentSegment.URI = line - manifest.Segments = append(manifest.Segments, *currentSegment) - currentSegment = nil - } - } - - if err := scanner.Err(); err != nil { - return nil, err } - return manifest, nil + return nil } diff --git a/internal/pkg/manifest/hls_test.go b/internal/pkg/manifest/hls_test.go index 15f3dbe022e..624e3a27a98 100644 --- a/internal/pkg/manifest/hls_test.go +++ b/internal/pkg/manifest/hls_test.go @@ -14,130 +14,38 @@ package manifest_test import ( + "os" + "path/filepath" "testing" "github.com/huly-stream/internal/pkg/manifest" - "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) -func TestToM3U8(t *testing.T) { - tests := []struct { - name string - manifest manifest.HLSManifest - expected string - }{ - { - name: "simple manifest", - manifest: manifest.HLSManifest{ - Version: 3, - TargetDuration: 10, - SequenceNumber: 1, - Segments: []manifest.Segment{ - {URI: "segment1.ts", Duration: 9.5, Title: "Segment 1"}, - {URI: "segment2.ts", Duration: 9.0, Title: "Segment 2"}, - }, - EndList: true, - }, - expected: `#EXTM3U -#EXT-X-VERSION:3 -#EXT-X-TARGETDURATION:10 -#EXT-X-MEDIA-SEQUENCE:1 -#EXTINF:9.50,Segment 1 -segment1.ts -#EXTINF:9.00,Segment 2 -segment2.ts -#EXT-X-ENDLIST -`, - }, - { - name: "empty manifest", - manifest: manifest.HLSManifest{ - Version: 3, - TargetDuration: 10, - SequenceNumber: 1, - Segments: []manifest.Segment{}, - EndList: false, - }, - expected: `#EXTM3U -#EXT-X-VERSION:3 -#EXT-X-TARGETDURATION:10 -#EXT-X-MEDIA-SEQUENCE:1 -`, - }, - } +func TestGenerateHLSPlaylist(t *testing.T) { + resolutions := []string{"320p", "480p", "720p", "1080p", "4k", "8k"} + uploadID := "test123" - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - actual := tt.manifest.ToM3U8() - assert.Equal(t, tt.expected, actual) - }) - } -} + err := manifest.GenerateHLSPlaylist(resolutions, "", uploadID) + require.NoError(t, err) -func TestFromM3U8(t *testing.T) { - tests := []struct { - name string - data string - expected manifest.HLSManifest - err bool - }{ - { - name: "valid manifest", - data: `#EXTM3U -#EXT-X-VERSION:3 -#EXT-X-TARGETDURATION:10 -#EXT-X-MEDIA-SEQUENCE:1 -#EXTINF:9.50,Segment 1 -segment1.ts -#EXTINF:9.00,Segment 2 -segment2.ts -#EXT-X-ENDLIST -`, - expected: manifest.HLSManifest{ - Version: 3, - TargetDuration: 10, - SequenceNumber: 1, - Segments: []manifest.Segment{ - {URI: "segment1.ts", Duration: 9.5, Title: "Segment 1"}, - {URI: "segment2.ts", Duration: 9.0, Title: "Segment 2"}, - }, - EndList: true, - }, - err: false, - }, - // { - // name: "missing target duration", - // data: `#EXTM3U - // #EXT-X-VERSION:3 - // #EXT-X-MEDIA-SEQUENCE:1 - // #EXTINF:9.50,Segment 1 - // segment1.ts - // `, - // expected: manifest.HLSManifest{}, - // err: true, - // }, - { - name: "empty file", - data: "", - expected: manifest.HLSManifest{ - Version: 0, - TargetDuration: 0, - SequenceNumber: 0, - EndList: false, - }, - err: false, - }, - } + outputPath := filepath.Join(uploadID, uploadID+"_master.m3u8") + + _, err = os.Stat(outputPath) + require.NoError(t, err, "Master playlist file should exist") - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - actual, err := manifest.FromM3U8(tt.data) - if tt.err { - assert.Error(t, err) - } else { - assert.NoError(t, err) - assert.Equal(t, tt.expected, *actual) - } - }) + // #nosec + data, err := os.ReadFile(outputPath) + require.NoError(t, err, "Error reading the generated file") + + playlistContent := string(data) + + require.Contains(t, playlistContent, "#EXTM3U", "File must start with #EXTM3U") + + for _, res := range resolutions { + expectedLine := uploadID + "_" + res + "_master.m3u8" + require.Contains(t, playlistContent, expectedLine, "Missing expected reference: "+expectedLine) } + + _ = os.RemoveAll(uploadID) } diff --git a/internal/pkg/resconv/resconv.go b/internal/pkg/resconv/resconv.go new file mode 100644 index 00000000000..4e33a8db440 --- /dev/null +++ b/internal/pkg/resconv/resconv.go @@ -0,0 +1,129 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +// Package resconv implements conversions to and from string representations of video resolutions. +package resconv + +import ( + "sort" + "strconv" + "strings" +) + +const defaultLevel = "320p" + +var prefixes = []struct { + pixels int + label string +}{ + {pixels: 640 * 480, label: "320p"}, + {pixels: 1280 * 720, label: "480p"}, + {pixels: 1920 * 1080, label: "720p"}, + {pixels: 2560 * 1440, label: "1080p"}, + {pixels: 3840 * 2160, label: "2k"}, + {pixels: 5120 * 2880, label: "4k"}, + {pixels: 7680 * 4320, label: "5k"}, +} + +var bandwidthMap = map[string]int{ + "320p": 300000, + "360p": 500000, + "480p": 2000000, + "720p": 5000000, + "1080p": 8000000, + "1440p": 16000000, + "4k": 25000000, + "8k": 50000000, +} + +var resolutions = map[string]string{ + "320p": "480:240", + "480p": "640:480", + "720p": "1280:720", + "1080p": "1920:1080", + "2k": "2048:1080", + "4k": "3840:2160", + "5k": "5120:2880", + "8k": "7680:4320", +} + +// SubLevels returns sublevels for the resolution +func SubLevels(resolution string) (res []string) { + var pixels = Pixels(resolution) + var idx = sort.Search(len(prefixes), func(i int) bool { + return pixels < prefixes[i].pixels + }) + if idx < 2 { + return res + } + + idx-- + idx = min(idx, 3) + + for idx >= 1 { + res = append(res, prefixes[idx].label) + idx-- + if len(res) == 2 { + break + } + } + + return res +} + +// Resolution returns default resolution based on the level +func Resolution(level string) string { + if v, ok := resolutions[level]; ok { + return v + } + return Resolution(defaultLevel) +} + +// Level converts the resolution to short prefix +func Level(resolution string) string { + var pixels = Pixels(resolution) + idx := sort.Search(len(prefixes), func(i int) bool { + return pixels < prefixes[i].pixels + }) + if idx == len(prefixes) { + return "8k" + } + + return prefixes[idx].label +} + +// Pixels returns amount of pixels for the resolution +func Pixels(resolution string) int { + var parts = strings.Split(resolution, ":") + var w, h = 420, 240 + + if len(parts) > 1 { + var _w, _ = strconv.Atoi(parts[0]) + var _h, _ = strconv.Atoi(parts[1]) + w = max(w, _w) + h = max(h, _h) + } + + return w * h +} + +// Bandwidth returns default bandwidth for the resolution +func Bandwidth(resolution string) int { + if v, ok := bandwidthMap[resolution]; ok { + return v + } + + return bandwidthMap[defaultLevel] +} diff --git a/internal/pkg/resconv/resconv_test.go b/internal/pkg/resconv/resconv_test.go new file mode 100644 index 00000000000..4139e21370a --- /dev/null +++ b/internal/pkg/resconv/resconv_test.go @@ -0,0 +1,120 @@ +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. + +package resconv_test + +import ( + "testing" + + "github.com/huly-stream/internal/pkg/resconv" + "github.com/stretchr/testify/require" +) + +func Test_Resconv_ShouldReturnCorrectPrefix(t *testing.T) { + tests := []struct { + res string + expected string + }{ + {res: "320:240", expected: "320p"}, + {res: "640:480", expected: "480p"}, + {res: "1280:720", expected: "720p"}, + {res: "1920:1080", expected: "1080p"}, + {res: "2560:1440", expected: "2k"}, + {res: "3840:2160", expected: "4k"}, + {res: "5120:2880", expected: "5k"}, + {res: "9000:4000", expected: "8k"}, + } + + for _, tt := range tests { + t.Run(tt.expected, func(t *testing.T) { + result := resconv.Level(tt.res) + require.Equal(t, tt.expected, result, "ResolutionFromPixels(%d)", tt.res) + }) + } +} + +func Test_Resconv_ShouldReturnCorrectPrefixes(t *testing.T) { + tests := []struct { + name string + res string + expected []string + }{ + { + name: "pixels below smallest resolution", + res: "640:479", + expected: nil, + }, + { + name: "pixels equal to smallest resolution", + res: "640:480", + expected: nil, + }, + { + name: "pixels just above smallest resolution", + res: "641:480", + expected: nil, + }, + { + name: "pixels equal to 720p", + res: "1280:720", + expected: []string{"480p"}, + }, + { + name: "pixels just above 480p", + res: "1280:721", + expected: []string{"480p"}, + }, + { + name: "pixels equal to 1k", + res: "1920:1080", + expected: []string{"720p", "480p"}, + }, + { + name: "pixels just above 1k", + res: "1920:1081", + expected: []string{"720p", "480p"}, + }, + { + name: "pixels equal to 1k", + res: "2560:1440", + expected: []string{"1080p", "720p"}, + }, + { + name: "pixels equal to 2k", + res: "3840:2160", + expected: []string{"1080p", "720p"}, + }, + { + name: "pixels equal to 4k", + res: "5120:2160", + expected: []string{"1080p", "720p"}, + }, + { + name: "pixels equal to 5k", + res: "7680:4320", + expected: []string{"1080p", "720p"}, + }, + { + name: "pixels above largest resolution", + res: "7681:4320", + expected: []string{"1080p", "720p"}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := resconv.SubLevels(tt.res) + require.Equal(t, tt.expected, result, "SubResolutionsFromPixels(%d) returned unexpected result", tt.res) + }) + } +} diff --git a/internal/pkg/transcoding/command.go b/internal/pkg/transcoding/command.go index f43b5cf594f..0065fa31969 100644 --- a/internal/pkg/transcoding/command.go +++ b/internal/pkg/transcoding/command.go @@ -22,61 +22,29 @@ import ( "os" "os/exec" "path/filepath" - "sort" - "strconv" - "strings" "github.com/pkg/errors" "github.com/huly-stream/internal/pkg/log" + "github.com/huly-stream/internal/pkg/resconv" "go.uber.org/zap" ) // Options represents configuration for the ffmpeg command type Options struct { - OuputDir string - Resolutions []string - Threads int - UploadID string + OuputDir string + ScalingLevels []string + Level string + Threads int + UploadID string } -func measure(options *Options) int64 { - var res int64 - for _, resolution := range options.Resolutions { - var w, h int - var parts = strings.Split(resolution, ":") - - if len(parts) > 1 { - w, _ = strconv.Atoi(parts[0]) - w = max(w, 320) - h, _ = strconv.Atoi(parts[1]) - h = max(h, 240) - - res += int64(w) * int64(h) - } - } - - return max(res, 320*240) -} - -func newFfmpegCommand(ctx context.Context, in io.Reader, options *Options) (*exec.Cmd, error) { - if options == nil { - return nil, errors.New("options should not be nil") - } +func newFfmpegCommand(ctx context.Context, in io.Reader, args []string) (*exec.Cmd, error) { if ctx == nil { return nil, errors.New("ctx should not be nil") } - var logger = log.FromContext(ctx).With(zap.String("func", "NewFFMpegCommand")) - var args []string - - if options.Resolutions == nil { - logger.Debug("resolutions were not provided, building audio command...") - args = BuildAudioCommand(options) - } else { - logger.Debug("building video command...") - args = BuildVideoCommand(options) - } + var logger = log.FromContext(ctx).With(zap.String("func", "newFFMpegCommand")) logger.Debug("prepared command: ", zap.Strings("args", args)) @@ -90,6 +58,7 @@ func newFfmpegCommand(ctx context.Context, in io.Reader, options *Options) (*exe func buildCommonComamnd(opts *Options) []string { return []string{ + "-nostdin", "-threads", fmt.Sprint(opts.Threads), "-i", "pipe:0", } @@ -105,25 +74,25 @@ func BuildAudioCommand(opts *Options) []string { ) } -// BuildVideoCommand returns flags for ffmpeg for video transcoding -func BuildVideoCommand(opts *Options) []string { - var result = buildCommonComamnd(opts) - - for _, res := range opts.Resolutions { - var prefix string - var w, h int - var parts = strings.Split(res, ":") - - if len(parts) > 1 { - w, _ = strconv.Atoi(parts[0]) - h, _ = strconv.Atoi(parts[1]) - } - w = max(w, 640) - h = max(h, 480) - prefix = ResolutionFromPixels(w * h) +// BuildRawVideoCommand returns an extremely lightweight ffmpeg command for converting raw video without extra cost. +func BuildRawVideoCommand(opts *Options) []string { + return append(buildCommonComamnd(opts), + "-c:v", + "copy", + "-fps_mode", + "vfr", + "-hls_time", "5", + "-hls_list_size", "0", + "-hls_segment_filename", filepath.Join(opts.OuputDir, opts.UploadID, fmt.Sprintf("%s_%s_%s.ts", opts.UploadID, "%03d", opts.Level)), + filepath.Join(opts.OuputDir, opts.UploadID, fmt.Sprintf("%s_%s_master.m3u8", opts.UploadID, opts.Level))) +} +// BuildScalingVideoCommand returns flags for ffmpeg for video scaling +func BuildScalingVideoCommand(opts *Options) []string { + var result = buildCommonComamnd(opts) + for _, level := range opts.ScalingLevels { result = append(result, - "-vf", fmt.Sprintf("scale=%d:%d", w, h), + "-vf", "scale="+resconv.Resolution(level), "-c:v", "libx264", "-preset", "veryfast", @@ -131,33 +100,9 @@ func BuildVideoCommand(opts *Options) []string { "-g", "60", "-hls_time", "5", "-hls_list_size", "0", - "-hls_segment_filename", filepath.Join(opts.OuputDir, fmt.Sprintf("%s_%s_%s.ts", opts.UploadID, "%03d", prefix)), - filepath.Join(opts.OuputDir, fmt.Sprintf("%s_%s_master.m3u8", opts.UploadID, prefix))) + "-hls_segment_filename", filepath.Join(opts.OuputDir, opts.UploadID, fmt.Sprintf("%s_%s_%s.ts", opts.UploadID, "%03d", level)), + filepath.Join(opts.OuputDir, opts.UploadID, fmt.Sprintf("%s_%s_master.m3u8", opts.UploadID, level))) } return result } - -var resolutions = []struct { - pixels int - label string -}{ - {pixels: 640 * 480, label: "320p"}, - {pixels: 1280 * 720, label: "480p"}, - {pixels: 1920 * 1080, label: "720p"}, - {pixels: 2560 * 1440, label: "1k"}, - {pixels: 3840 * 2160, label: "2k"}, - {pixels: 5120 * 2160, label: "4k"}, - {pixels: 7680 * 4320, label: "5k"}, -} - -// ResolutionFromPixels converts pixel count to short string -func ResolutionFromPixels(pixels int) string { - idx := sort.Search(len(resolutions), func(i int) bool { - return pixels < resolutions[i].pixels - }) - if idx == len(resolutions) { - return "8k" - } - return resolutions[idx].label -} diff --git a/internal/pkg/transcoding/command_test.go b/internal/pkg/transcoding/command_test.go index 14a7db515d1..719c38467a7 100644 --- a/internal/pkg/transcoding/command_test.go +++ b/internal/pkg/transcoding/command_test.go @@ -14,49 +14,36 @@ package transcoding_test import ( - "runtime" "strings" "testing" + "github.com/huly-stream/internal/pkg/resconv" "github.com/huly-stream/internal/pkg/transcoding" "github.com/stretchr/testify/require" ) -func Test_BuildVideoCommand_Basic(t *testing.T) { - if runtime.GOOS == "windows" { - t.Skip() - } - var simpleHlsCommand = transcoding.BuildVideoCommand(&transcoding.Options{ - OuputDir: "test", - UploadID: "1", - Threads: 4, - Resolutions: []string{"1280:720"}, +func Test_BuildVideoCommand_Scaling(t *testing.T) { + var scaleCommand = transcoding.BuildScalingVideoCommand(&transcoding.Options{ + OuputDir: "test", + UploadID: "1", + Threads: 4, + ScalingLevels: []string{"720p", "480p"}, }) - const expected = `-threads 4 -i pipe:0 -vf scale=1280:720 -c:v libx264 -preset veryfast -crf 23 -g 60 -hls_time 5 -hls_list_size 0 -hls_segment_filename test/1_%03d_720p.ts test/1_720p_master.m3u8` + const expected = `-nostdin -threads 4 -i pipe:0 -vf scale=1280:720 -c:v libx264 -preset veryfast -crf 23 -g 60 -hls_time 5 -hls_list_size 0 -hls_segment_filename test/1/1_%03d_720p.ts test/1/1_720p_master.m3u8 -vf scale=640:480 -c:v libx264 -preset veryfast -crf 23 -g 60 -hls_time 5 -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` - require.Contains(t, expected, strings.Join(simpleHlsCommand, " ")) + require.Contains(t, expected, strings.Join(scaleCommand, " ")) } -func TestResolutionFromPixels(t *testing.T) { - tests := []struct { - pixels int - expected string - }{ - {pixels: 320 * 240, expected: "320p"}, - {pixels: 640 * 480, expected: "480p"}, - {pixels: 1280 * 720, expected: "720p"}, - {pixels: 1920 * 1080, expected: "1k"}, - {pixels: 2560 * 1440, expected: "2k"}, - {pixels: 3840 * 2160, expected: "4k"}, - {pixels: 5120 * 2160, expected: "5k"}, - {pixels: 9000 * 4000, expected: "8k"}, - } - - for _, tt := range tests { - t.Run(tt.expected, func(t *testing.T) { - result := transcoding.ResolutionFromPixels(tt.pixels) - require.Equal(t, tt.expected, result, "ResolutionFromPixels(%d)", tt.pixels) - }) - } +func Test_BuildVideoCommand_Raw(t *testing.T) { + var rawCommand = transcoding.BuildRawVideoCommand(&transcoding.Options{ + OuputDir: "test", + UploadID: "1", + Threads: 4, + Level: resconv.Level("651:490"), + }) + + const expected = `-nostdin -threads 4 -i pipe:0 -c:v copy -fps_mode vfr -hls_time 5 -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` + + require.Contains(t, expected, strings.Join(rawCommand, " ")) } diff --git a/internal/pkg/transcoding/scheduler.go b/internal/pkg/transcoding/scheduler.go index 25872a30477..080aeedbb58 100644 --- a/internal/pkg/transcoding/scheduler.go +++ b/internal/pkg/transcoding/scheduler.go @@ -17,14 +17,15 @@ package transcoding import ( "context" - "strings" "sync" + "time" "github.com/pkg/errors" "github.com/google/uuid" "github.com/huly-stream/internal/pkg/config" "github.com/huly-stream/internal/pkg/log" + "github.com/huly-stream/internal/pkg/resconv" "github.com/huly-stream/internal/pkg/sharedpipe" "github.com/huly-stream/internal/pkg/uploader" "github.com/tus/tusd/v2/pkg/handler" @@ -40,6 +41,7 @@ type Scheduler struct { mainContext context.Context logger *zap.Logger workers sync.Map + cancels sync.Map } // NewScheduler creates a new scheduler for transcode operations. @@ -57,58 +59,95 @@ func (s *Scheduler) NewUpload(ctx context.Context, info handler.FileInfo) (handl if info.ID == "" { info.ID = uuid.NewString() } - + s.logger.Sugar().Debugf("upload: %v", info) s.logger.Debug("NewUpload", zap.String("ID", info.ID)) - var result = &Worker{ - done: make(chan struct{}), + var worker = &Worker{ writer: sharedpipe.NewWriter(), info: info, - logger: log.FromContext(s.mainContext).With(zap.String("Worker", info.ID)), + logger: log.FromContext(s.mainContext).With(zap.String("worker", info.ID)), + done: make(chan struct{}), } - var resolutions = strings.Split(info.MetaData["resolutions"], ",") + var scaling = resconv.SubLevels(info.MetaData["resolution"]) + var level = resconv.Level(info.MetaData["resolution"]) + var cost int64 - var commandOptions = Options{ - OuputDir: s.conf.OutputDir, - Threads: s.conf.MaxThreads, - UploadID: info.ID, - Resolutions: resolutions, + for _, scale := range scaling { + cost += int64(resconv.Pixels(resconv.Resolution(scale))) } - result.cost = measure(&commandOptions) + if !s.limiter.TryConsume(cost) { + s.logger.Debug("run out of resources for scaling") + scaling = nil + } - if !s.limiter.TryConsume(result.cost) { - s.logger.Error("run out of resources") - return nil, errors.New("run out of resources") + var commandOptions = Options{ + OuputDir: s.conf.OutputDir, + Threads: s.conf.MaxThreads, + UploadID: info.ID, + Level: level, + ScalingLevels: scaling, } if s.conf.EndpointURL != nil { - s.logger.Debug("found endpoint url in the config, starting uploader...") - var contentUploader, err = uploader.New(s.mainContext, *s.conf, info.ID, info.MetaData) + s.logger.Sugar().Debugf("initializing uploader for %v", info) + var contentUploader, err = uploader.New(s.mainContext, s.conf.OutputDir, s.conf.EndpointURL, info) if err != nil { + s.logger.Error("can not create uploader", zap.Error(err)) return nil, err } - result.contentUploader = contentUploader + + worker.contentUploader = contentUploader go func() { - var serverErr = result.contentUploader.Serve() - result.logger.Debug("content uploader has finished", zap.Error(serverErr)) + var serverErr = worker.contentUploader.Serve() + worker.logger.Debug("content uploader has finished", zap.Error(serverErr)) }() } - - s.workers.Store(result.info.ID, result) - s.logger.Sugar().Debugf("New Upload: info %v", result.info) - if err := result.start(s.mainContext, &commandOptions); err != nil { + s.workers.Store(worker.info.ID, worker) + if err := worker.start(s.mainContext, &commandOptions); err != nil { return nil, err } - return result, nil + + go func() { + worker.wg.Wait() + s.limiter.ReturnCapacity(cost) + s.logger.Debug("returned capacity", zap.Int64("capacity", cost)) + close(worker.done) + }() + + s.logger.Debug("NewUpload", zap.String("done", info.ID)) + return worker, nil } // GetUpload returns current a worker based on upload id func (s *Scheduler) GetUpload(ctx context.Context, id string) (upload handler.Upload, err error) { if v, ok := s.workers.Load(id); ok { s.logger.Debug("GetUpload: found worker by id", zap.String("id", id)) - return v.(*Worker), nil + var w = v.(*Worker) + var cancelCtx, cancel = context.WithCancel(context.Background()) + if v, ok := s.cancels.Load(id); ok { + v.(context.CancelFunc)() + } + s.cancels.Store(id, cancel) + go func() { + select { + case <-w.done: + w.logger.Debug("upload timeout just canceled") + s.cancels.Delete(id) + return + case <-cancelCtx.Done(): + w.logger.Debug("upload refreshed") + return + case <-time.After(s.conf.Timeout): + w.logger.Debug("upload timeout") + s.cancels.Delete(id) + var terminateCtx, terminateCancel = context.WithTimeout(context.Background(), s.conf.Timeout) + defer terminateCancel() + _ = w.Terminate(terminateCtx) + } + }() + return w, nil } s.logger.Debug("GetUpload: worker not found", zap.String("id", id)) return nil, errors.New("bad id") @@ -117,8 +156,7 @@ func (s *Scheduler) GetUpload(ctx context.Context, id string) (upload handler.Up // AsTerminatableUpload returns tusd handler.TerminatableUpload func (s *Scheduler) AsTerminatableUpload(upload handler.Upload) handler.TerminatableUpload { var worker = upload.(*Worker) - s.logger.Debug("AsTerminatableUpload, trying to return capacity", zap.Int64("cost", worker.cost)) - s.limiter.ReturnCapacity(worker.cost) + s.logger.Debug("AsTerminatableUpload") return worker } diff --git a/internal/pkg/transcoding/worker.go b/internal/pkg/transcoding/worker.go index 60baccc4657..1235bfcf022 100644 --- a/internal/pkg/transcoding/worker.go +++ b/internal/pkg/transcoding/worker.go @@ -17,9 +17,11 @@ package transcoding import ( "context" "io" + "sync" "github.com/pkg/errors" + "github.com/huly-stream/internal/pkg/manifest" "github.com/huly-stream/internal/pkg/sharedpipe" "github.com/huly-stream/internal/pkg/uploader" "github.com/tus/tusd/v2/pkg/handler" @@ -33,8 +35,9 @@ type Worker struct { info handler.FileInfo writer *sharedpipe.Writer reader *sharedpipe.Reader - cost int64 - done chan struct{} + + wg sync.WaitGroup + done chan struct{} } // WriteChunk calls when client sends a chunk of raw data @@ -73,7 +76,7 @@ func (w *Worker) Terminate(ctx context.Context) error { w.logger.Debug("Terminating...") if w.contentUploader != nil { go func() { - <-w.done + w.wg.Wait() w.contentUploader.Rollback() }() } @@ -94,7 +97,7 @@ func (w *Worker) FinishUpload(ctx context.Context) error { w.logger.Debug("finishing upload...") if w.contentUploader != nil { go func() { - <-w.done + w.wg.Wait() w.contentUploader.Terminate() }() } @@ -108,18 +111,49 @@ func (s *Scheduler) AsConcatableUpload(upload handler.Upload) handler.Concatable } func (w *Worker) start(ctx context.Context, options *Options) error { + defer w.logger.Debug("start done") w.reader = w.writer.Transpile() - var cmd, err = newFfmpegCommand(ctx, w.reader, options) - if err != nil { + + if err := manifest.GenerateHLSPlaylist(append(options.ScalingLevels, options.Level), options.OuputDir, options.UploadID); err != nil { return err } + + w.wg.Add(1) go func() { - defer close(w.done) - if runErr := cmd.Run(); runErr != nil { - w.logger.Error("transoding provider is exited with error", zap.Error(err)) - } else { - w.logger.Debug("transoding provider has finished without errors") + defer w.wg.Done() + var logger = w.logger.With(zap.String("command", "raw")) + defer logger.Debug("done") + + var args = BuildRawVideoCommand(options) + var convertSourceCommand, err = newFfmpegCommand(ctx, w.reader, args) + if err != nil { + logger.Debug("can not start", zap.Error(err)) + } + err = convertSourceCommand.Run() + if err != nil { + logger.Debug("finished with error", zap.Error(err)) } }() + + if len(options.ScalingLevels) > 0 { + w.wg.Add(1) + var scalingCommandReader = w.writer.Transpile() + go func() { + defer w.wg.Done() + var logger = w.logger.With(zap.String("command", "scaling")) + defer logger.Debug("done") + + var args = BuildScalingVideoCommand(options) + var convertSourceCommand, err = newFfmpegCommand(ctx, scalingCommandReader, args) + if err != nil { + logger.Debug("can not start", zap.Error(err)) + } + err = convertSourceCommand.Run() + if err != nil { + logger.Debug("finished with error", zap.Error(err)) + } + }() + } + return nil } diff --git a/internal/pkg/uploader/datalake.go b/internal/pkg/uploader/datalake.go index 43ba535efd5..dfff6970128 100644 --- a/internal/pkg/uploader/datalake.go +++ b/internal/pkg/uploader/datalake.go @@ -18,7 +18,6 @@ import ( "context" "io" "mime/multipart" - "net/url" "os" "path/filepath" @@ -116,6 +115,7 @@ func (d *DatalakeStorage) DeleteFile(ctx context.Context, fileName string) error client := fasthttp.Client{} if err := client.Do(req, res); err != nil { + logger.Error("failed to del", zap.Error(err)) return errors.Wrapf(err, "delete failed") } @@ -126,5 +126,5 @@ func (d *DatalakeStorage) DeleteFile(ctx context.Context, fileName string) error func getObjectKey(s string) string { var _, objectKey = filepath.Split(s) - return url.QueryEscape(objectKey) + return objectKey } diff --git a/internal/pkg/uploader/s3.go b/internal/pkg/uploader/s3.go index ba688618f9f..faf15e22d0c 100644 --- a/internal/pkg/uploader/s3.go +++ b/internal/pkg/uploader/s3.go @@ -37,6 +37,7 @@ import ( type S3Storage struct { client *s3.Client bucketName string + logger *zap.Logger } // NewS3 creates a new S3 storage @@ -44,6 +45,7 @@ func NewS3(ctx context.Context, endpoint string) Storage { var accessKeyID = os.Getenv("AWS_ACCESS_KEY_ID") var accessKeySecret = os.Getenv("AWS_SECRET_ACCESS_KEY") var bucketName = os.Getenv("AWS_BUCKET_NAME") + var logger = log.FromContext(ctx).With(zap.String("s3", "storage")) cfg, err := config.LoadDefaultConfig(ctx, config.WithCredentialsProvider(credentials.NewStaticCredentialsProvider(accessKeyID, accessKeySecret, "")), @@ -61,6 +63,7 @@ func NewS3(ctx context.Context, endpoint string) Storage { return &S3Storage{ client: s3Client, bucketName: bucketName, + logger: logger, } } diff --git a/internal/pkg/uploader/uploader.go b/internal/pkg/uploader/uploader.go index 86735191a4a..f61a6396ee9 100644 --- a/internal/pkg/uploader/uploader.go +++ b/internal/pkg/uploader/uploader.go @@ -18,52 +18,54 @@ import ( "context" "net/url" "os" + "path/filepath" "strings" "sync" "time" "github.com/pkg/errors" + "github.com/tus/tusd/v2/pkg/handler" "github.com/fsnotify/fsnotify" - "github.com/huly-stream/internal/pkg/config" "github.com/huly-stream/internal/pkg/log" "go.uber.org/zap" ) type uploader struct { - ctx context.Context - cancel context.CancelFunc - baseDir string - uploadID string - masterFiles sync.Map - postponeDuration time.Duration - sentFiles sync.Map - storage Storage - contexts sync.Map - retryCount int - removeLocalContentOnUpload bool - eventBufferCount uint - isMasterFileFunc func(s string) bool + ctx context.Context + cancel context.CancelFunc + baseDir string + uploadID string + masterFiles sync.Map + postponeDuration time.Duration + sentFiles sync.Map + storage Storage + contexts sync.Map + retryCount int + eventBufferCount uint + isMasterFileFunc func(s string) bool +} + +func (u *uploader) retry(action func() error) { + for range u.retryCount { + if err := action(); err == nil { + return + } + } } // Rollback deletes all delivered files and also deletes all local content by uploadID func (u *uploader) Rollback() { - log.FromContext(u.ctx).Debug("cancel") + logger := log.FromContext(u.ctx).With(zap.String("uploader", "Rollback")) + logger.Debug("starting") defer u.cancel() + u.sentFiles.Range(func(key, value any) bool { - log.FromContext(u.ctx).Debug("deleting remote file", zap.String("key", key.(string))) - for range u.retryCount { - var err = u.storage.DeleteFile(u.ctx, key.(string)) - if err == nil { - break - } - log.FromContext(u.ctx).Debug("can not delete file", zap.Error(err)) - } + logger.Debug("deleting remote file", zap.String("key", key.(string))) + u.retry(func() error { return u.storage.DeleteFile(u.ctx, key.(string)) }) return true }) - if !u.removeLocalContentOnUpload { - return - } + u.sentFiles.Range(func(key, value any) bool { log.FromContext(u.ctx).Debug("deleting local file", zap.String("key", key.(string))) _ = os.Remove(key.(string)) @@ -72,50 +74,47 @@ func (u *uploader) Rollback() { } func (u *uploader) Terminate() { - log.FromContext(u.ctx).Debug("terminate") + logger := log.FromContext(u.ctx).With(zap.String("uploader", "Terminate")) + logger.Debug("starting") defer u.cancel() + u.masterFiles.Range(func(key, value any) bool { log.FromContext(u.ctx).Debug("uploading master file", zap.String("key", key.(string))) - for range u.retryCount { - var uploadErr = u.storage.UploadFile(u.ctx, key.(string)) - if uploadErr == nil { - break - } - log.FromContext(u.ctx).Debug("can not upload file", zap.Error(uploadErr)) - } + go u.retry(func() error { return u.storage.UploadFile(u.ctx, key.(string)) }) return true }) - if !u.removeLocalContentOnUpload { - return - } + u.masterFiles.Range(func(key, value any) bool { - log.FromContext(u.ctx).Debug("deleting local master file", zap.String("key", key.(string))) _ = os.Remove(key.(string)) return true }) + u.sentFiles.Range(func(key, value any) bool { - log.FromContext(u.ctx).Debug("deleting local file", zap.String("key", key.(string))) _ = os.Remove(key.(string)) return true }) } func (u *uploader) Serve() error { - var logger = log.FromContext(u.ctx) - logger = logger.With(zap.String("uploader", u.uploadID), zap.String("dir", u.baseDir)) + var logger = log.FromContext(u.ctx).With(zap.String("uploader", u.uploadID), zap.String("dir", u.baseDir)) var watcher, err = fsnotify.NewBufferedWatcher(u.eventBufferCount) + if err != nil { logger.Error("can not start watcher") return err } + + _ = os.MkdirAll(u.baseDir, os.ModePerm) + if err := watcher.Add(u.baseDir); err != nil { return err } + defer func() { _ = watcher.Close() }() - logger.Debug("uploader initialized and started to watch") + logger.Debug("the uploader has initialized and started watching") for { select { @@ -123,6 +122,9 @@ func (u *uploader) Serve() error { logger.Debug("done") return u.ctx.Err() case event, ok := <-watcher.Events: + if strings.HasSuffix(event.Name, "tmp") { + continue + } if !strings.Contains(event.Name, u.uploadID) { continue } @@ -169,29 +171,32 @@ type Storage interface { } // New creates a new instance of Uplaoder -func New(ctx context.Context, conf config.Config, uploadID string, metadata map[string]string) (Uploader, error) { - var uploaderCtx, uploaderCancel = context.WithCancel(ctx) +func New(ctx context.Context, baseDir string, endpointURL *url.URL, uploadInfo handler.FileInfo) (Uploader, error) { + var uploaderCtx, uploadCancel = context.WithCancel(context.Background()) + uploaderCtx = log.WithLoggerFields(uploaderCtx) + go func() { + <-ctx.Done() + time.Sleep(time.Minute * 2) + uploadCancel() + }() var storage Storage var err error - if conf.EndpointURL != nil { - storage, err = NewStorageByURL(ctx, conf.EndpointURL, metadata) - if err != nil { - uploaderCancel() - return nil, err - } + storage, err = NewStorageByURL(ctx, endpointURL, uploadInfo.MetaData) + if err != nil { + uploadCancel() + return nil, err } return &uploader{ - ctx: uploaderCtx, - cancel: uploaderCancel, - uploadID: uploadID, - removeLocalContentOnUpload: conf.RemoveContentOnUpload, - postponeDuration: time.Second * 2, - storage: storage, - retryCount: 5, - baseDir: conf.OutputDir, - eventBufferCount: 100, + ctx: uploaderCtx, + cancel: uploadCancel, + uploadID: uploadInfo.ID, + postponeDuration: time.Second * 2, + storage: storage, + retryCount: 5, + baseDir: filepath.Join(baseDir, uploadInfo.ID), + eventBufferCount: 100, isMasterFileFunc: func(s string) bool { return strings.HasSuffix(s, "m3u8") }, From a6e428cef0c7bd71c95ccb0821ec781016d405f1 Mon Sep 17 00:00:00 2001 From: denis-tingaikin Date: Tue, 25 Feb 2025 03:03:29 +0300 Subject: [PATCH 043/636] fix: correctly handle uploader termination Signed-off-by: denis-tingaikin --- cmd/huly-stream/main.go | 3 +- internal/pkg/config/config.go | 11 +++++ internal/pkg/manifest/hls.go | 5 ++ internal/pkg/transcoding/worker.go | 2 - internal/pkg/uploader/datalake.go | 2 +- internal/pkg/uploader/postpone.go | 2 + internal/pkg/uploader/s3.go | 2 +- internal/pkg/uploader/uploader.go | 73 ++++++++++++------------------ 8 files changed, 51 insertions(+), 49 deletions(-) diff --git a/cmd/huly-stream/main.go b/cmd/huly-stream/main.go index f660a2511f1..d7c5d9690bf 100644 --- a/cmd/huly-stream/main.go +++ b/cmd/huly-stream/main.go @@ -49,11 +49,12 @@ func main() { var conf = must(config.FromEnv()) logger.Sugar().Debugf("provided config is %v", conf) + logger.Sugar().Info(conf.Endpoint()) + mustNoError(os.MkdirAll(conf.OutputDir, os.ModePerm)) if conf.PprofEnabled { go pprof.ListenAndServe(ctx, "localhost:6060") } - scheduler := transcoding.NewScheduler(ctx, conf) tusComposer := tusd.NewStoreComposer() diff --git a/internal/pkg/config/config.go b/internal/pkg/config/config.go index 9c296309c60..4eb601749e5 100644 --- a/internal/pkg/config/config.go +++ b/internal/pkg/config/config.go @@ -54,3 +54,14 @@ func FromEnv() (*Config, error) { return &result, nil } + +func (c *Config) Endpoint() *url.URL { + var scheme = "https" + if c.Insecure { + scheme = "http" + } + return &url.URL{ + Scheme: scheme, + Host: c.EndpointURL.Host, + } +} diff --git a/internal/pkg/manifest/hls.go b/internal/pkg/manifest/hls.go index d8dd7a53613..c301e4f4f51 100644 --- a/internal/pkg/manifest/hls.go +++ b/internal/pkg/manifest/hls.go @@ -53,6 +53,11 @@ func GenerateHLSPlaylist(levels []string, outputPath, uploadID string) error { if err != nil { return err } + + _, err = file.WriteString("#EXT-X-ENDLIST") + if err != nil { + return err + } } return nil diff --git a/internal/pkg/transcoding/worker.go b/internal/pkg/transcoding/worker.go index 1235bfcf022..c2aaca3d349 100644 --- a/internal/pkg/transcoding/worker.go +++ b/internal/pkg/transcoding/worker.go @@ -113,11 +113,9 @@ func (s *Scheduler) AsConcatableUpload(upload handler.Upload) handler.Concatable func (w *Worker) start(ctx context.Context, options *Options) error { defer w.logger.Debug("start done") w.reader = w.writer.Transpile() - if err := manifest.GenerateHLSPlaylist(append(options.ScalingLevels, options.Level), options.OuputDir, options.UploadID); err != nil { return err } - w.wg.Add(1) go func() { defer w.wg.Done() diff --git a/internal/pkg/uploader/datalake.go b/internal/pkg/uploader/datalake.go index dfff6970128..097bf0a6b5d 100644 --- a/internal/pkg/uploader/datalake.go +++ b/internal/pkg/uploader/datalake.go @@ -37,7 +37,7 @@ type DatalakeStorage struct { // NewDatalakeStorage creates a new datalake client func NewDatalakeStorage(baseURL, workspace, token string) Storage { return &DatalakeStorage{ - baseURL: "https://" + baseURL, + baseURL: baseURL, token: token, workspace: workspace, } diff --git a/internal/pkg/uploader/postpone.go b/internal/pkg/uploader/postpone.go index 61b22b79b42..3486996ec18 100644 --- a/internal/pkg/uploader/postpone.go +++ b/internal/pkg/uploader/postpone.go @@ -19,6 +19,7 @@ import ( ) func (u *uploader) postpone(id string, action func()) { + u.wg.Add(1) var ctx, cancel = context.WithCancel(context.Background()) var startCh = time.After(u.postponeDuration) @@ -28,6 +29,7 @@ func (u *uploader) postpone(id string, action func()) { u.contexts.Store(id, &cancel) go func() { + u.wg.Done() defer cancel() select { case <-ctx.Done(): diff --git a/internal/pkg/uploader/s3.go b/internal/pkg/uploader/s3.go index faf15e22d0c..016e6fc87c3 100644 --- a/internal/pkg/uploader/s3.go +++ b/internal/pkg/uploader/s3.go @@ -56,8 +56,8 @@ func NewS3(ctx context.Context, endpoint string) Storage { } var s3Client = s3.NewFromConfig(cfg, func(o *s3.Options) { - endpoint = "https://" + endpoint o.BaseEndpoint = &endpoint + o.UsePathStyle = true }) return &S3Storage{ diff --git a/internal/pkg/uploader/uploader.go b/internal/pkg/uploader/uploader.go index f61a6396ee9..a2c76811cda 100644 --- a/internal/pkg/uploader/uploader.go +++ b/internal/pkg/uploader/uploader.go @@ -27,23 +27,24 @@ import ( "github.com/tus/tusd/v2/pkg/handler" "github.com/fsnotify/fsnotify" + "github.com/huly-stream/internal/pkg/config" "github.com/huly-stream/internal/pkg/log" "go.uber.org/zap" ) type uploader struct { + done chan struct{} + wg sync.WaitGroup ctx context.Context cancel context.CancelFunc baseDir string uploadID string - masterFiles sync.Map postponeDuration time.Duration sentFiles sync.Map storage Storage contexts sync.Map retryCount int eventBufferCount uint - isMasterFileFunc func(s string) bool } func (u *uploader) retry(action func() error) { @@ -60,13 +61,11 @@ func (u *uploader) Rollback() { logger.Debug("starting") defer u.cancel() + u.wg.Wait() + u.sentFiles.Range(func(key, value any) bool { logger.Debug("deleting remote file", zap.String("key", key.(string))) u.retry(func() error { return u.storage.DeleteFile(u.ctx, key.(string)) }) - return true - }) - - u.sentFiles.Range(func(key, value any) bool { log.FromContext(u.ctx).Debug("deleting local file", zap.String("key", key.(string))) _ = os.Remove(key.(string)) return true @@ -78,16 +77,7 @@ func (u *uploader) Terminate() { logger.Debug("starting") defer u.cancel() - u.masterFiles.Range(func(key, value any) bool { - log.FromContext(u.ctx).Debug("uploading master file", zap.String("key", key.(string))) - go u.retry(func() error { return u.storage.UploadFile(u.ctx, key.(string)) }) - return true - }) - - u.masterFiles.Range(func(key, value any) bool { - _ = os.Remove(key.(string)) - return true - }) + u.wg.Wait() u.sentFiles.Range(func(key, value any) bool { _ = os.Remove(key.(string)) @@ -98,6 +88,7 @@ func (u *uploader) Terminate() { func (u *uploader) Serve() error { var logger = log.FromContext(u.ctx).With(zap.String("uploader", u.uploadID), zap.String("dir", u.baseDir)) var watcher, err = fsnotify.NewBufferedWatcher(u.eventBufferCount) + defer close(u.done) if err != nil { logger.Error("can not start watcher") @@ -105,6 +96,16 @@ func (u *uploader) Serve() error { } _ = os.MkdirAll(u.baseDir, os.ModePerm) + initFiles, _ := os.ReadDir(u.baseDir) + for _, f := range initFiles { + var name = filepath.Join(u.baseDir, f.Name()) + u.postpone(name, func() { + logger.Debug("started uploading", zap.String("eventName", name)) + u.retry(func() error { return u.storage.UploadFile(u.ctx, name) }) + logger.Debug("added to sentFiles", zap.String("eventName", name)) + u.sentFiles.Store(name, struct{}{}) + }) + } if err := watcher.Add(u.baseDir); err != nil { return err @@ -114,37 +115,26 @@ func (u *uploader) Serve() error { _ = watcher.Close() }() - logger.Debug("the uploader has initialized and started watching") + logger.Debug("uploader has initialized and started watching") + defer logger.Debug("done") for { select { case <-u.ctx.Done(): - logger.Debug("done") return u.ctx.Err() case event, ok := <-watcher.Events: + if !ok { + return u.ctx.Err() + } if strings.HasSuffix(event.Name, "tmp") { continue } if !strings.Contains(event.Name, u.uploadID) { continue } - if !ok { - return u.ctx.Err() - } - if u.isMasterFileFunc(event.Name) { - u.masterFiles.Store(event.Name, struct{}{}) - logger.Debug("found master file", zap.String("eventName", event.Name)) - continue - } u.postpone(event.Name, func() { - logger.Debug("started to upload", zap.String("eventName", event.Name)) - for range u.retryCount { - var uploadErr = u.storage.UploadFile(u.ctx, event.Name) - if uploadErr == nil { - break - } - logger.Error("can not upload file", zap.Error(uploadErr)) - } + logger.Debug("started uploading", zap.String("eventName", event.Name)) + u.retry(func() error { return u.storage.UploadFile(u.ctx, event.Name) }) logger.Debug("added to sentFiles", zap.String("eventName", event.Name)) u.sentFiles.Store(event.Name, struct{}{}) }) @@ -174,11 +164,7 @@ type Storage interface { func New(ctx context.Context, baseDir string, endpointURL *url.URL, uploadInfo handler.FileInfo) (Uploader, error) { var uploaderCtx, uploadCancel = context.WithCancel(context.Background()) uploaderCtx = log.WithLoggerFields(uploaderCtx) - go func() { - <-ctx.Done() - time.Sleep(time.Minute * 2) - uploadCancel() - }() + var storage Storage var err error @@ -191,20 +177,19 @@ func New(ctx context.Context, baseDir string, endpointURL *url.URL, uploadInfo h return &uploader{ ctx: uploaderCtx, cancel: uploadCancel, + done: make(chan struct{}), uploadID: uploadInfo.ID, postponeDuration: time.Second * 2, storage: storage, retryCount: 5, baseDir: filepath.Join(baseDir, uploadInfo.ID), eventBufferCount: 100, - isMasterFileFunc: func(s string) bool { - return strings.HasSuffix(s, "m3u8") - }, }, nil } // NewStorageByURL creates a new storage basd on the type from the url scheme, for example "datalake://my-datalake-endpoint" func NewStorageByURL(ctx context.Context, u *url.URL, headers map[string]string) (Storage, error) { + c, _ := config.FromEnv() switch u.Scheme { case "tus": return nil, errors.New("not imlemented yet") @@ -215,9 +200,9 @@ func NewStorageByURL(ctx context.Context, u *url.URL, headers map[string]string) if headers["token"] == "" { return nil, errors.New("missed auth token in the client's metadata") } - return NewDatalakeStorage(u.Hostname(), headers["workspace"], headers["token"]), nil + return NewDatalakeStorage(c.Endpoint().String(), headers["workspace"], headers["token"]), nil case "s3": - return NewS3(ctx, u.Hostname()), nil + return NewS3(ctx, c.Endpoint().String()), nil default: return nil, errors.New("unknown scheme") } From f25bd82709b1f94be948948564fc4aeb33756c25 Mon Sep 17 00:00:00 2001 From: denis-tingaikin Date: Tue, 25 Feb 2025 03:06:03 +0300 Subject: [PATCH 044/636] fix linter Signed-off-by: denis-tingaikin --- internal/pkg/config/config.go | 1 + 1 file changed, 1 insertion(+) diff --git a/internal/pkg/config/config.go b/internal/pkg/config/config.go index 4eb601749e5..057a4b0a6e4 100644 --- a/internal/pkg/config/config.go +++ b/internal/pkg/config/config.go @@ -55,6 +55,7 @@ func FromEnv() (*Config, error) { return &result, nil } +// Endpoint returns upload address func (c *Config) Endpoint() *url.URL { var scheme = "https" if c.Insecure { From c7184a2bb08db68540bc304fc83821ea6155ec1b Mon Sep 17 00:00:00 2001 From: Kristina Date: Thu, 27 Feb 2025 11:14:55 +0400 Subject: [PATCH 045/636] Adjust groups (#31) * Messages groups & threads --- bun.lock | 141 ++-- package.json | 15 +- packages/client-query/package.json | 2 +- packages/client-query/src/index.ts | 5 +- packages/client-query/src/query.ts | 2 +- packages/client-sqlite/package.json | 2 +- packages/client-sqlite/src/client.ts | 30 +- packages/client-ws/package.json | 3 +- packages/client-ws/src/client.ts | 59 +- packages/client-ws/src/index.ts | 3 + packages/cockroach/migrations/00_schema.sql | 1 - packages/cockroach/migrations/01_message.sql | 11 +- packages/cockroach/migrations/02_patch.sql | 3 +- .../cockroach/migrations/03_attachment.sql | 2 +- packages/cockroach/migrations/04_reaction.sql | 3 +- .../migrations/05_notification_context.sql | 15 - packages/cockroach/migrations/05_thread.sql | 15 + .../cockroach/migrations/06_notification.sql | 17 + packages/cockroach/package.json | 5 +- packages/cockroach/src/adapter.ts | 197 ++++- packages/cockroach/src/connection.ts | 9 +- packages/cockroach/src/db/base.ts | 108 +-- packages/cockroach/src/db/message.ts | 717 ++++++++++------ packages/cockroach/src/db/notification.ts | 387 +++++---- packages/cockroach/src/db/schema.ts | 251 +++--- packages/cockroach/src/db/utils.ts | 44 +- packages/cockroach/src/index.ts | 2 +- packages/cockroach/src/types.ts | 16 + packages/core/src/index.ts | 1 - packages/core/src/message.ts | 24 - packages/examples/package.json | 4 +- packages/examples/src/index.ts | 32 +- packages/query/package.json | 7 +- packages/query/src/lq.ts | 29 +- packages/query/src/messages/query.ts | 778 ++++++++++++++---- packages/query/src/messages/utils.ts | 54 ++ packages/query/src/notifications/query.ts | 215 ++--- packages/query/src/query.ts | 41 +- packages/query/src/result.ts | 10 +- packages/query/src/types.ts | 31 +- packages/query/src/window.ts | 8 +- packages/sdk-types/package.json | 4 +- packages/sdk-types/src/client.ts | 9 +- packages/sdk-types/src/db.ts | 41 +- packages/sdk-types/src/query.ts | 8 +- packages/sdk-types/src/requestEvent.ts | 44 +- packages/sdk-types/src/responseEvent.ts | 18 +- packages/sdk-types/src/serverApi.ts | 1 + packages/server/package.json | 2 +- packages/server/src/eventProcessor.ts | 214 +++-- packages/server/src/index.ts | 6 +- packages/server/src/manager.ts | 38 +- packages/server/src/triggers.ts | 138 ++-- packages/{core => shared}/package.json | 10 +- packages/shared/src/files.ts | 58 ++ packages/shared/src/index.ts | 3 + packages/shared/src/message.ts | 24 + packages/shared/src/retry.ts | 20 + packages/{core => shared}/tsconfig.json | 0 packages/sqlite-wasm/package.json | 2 +- packages/sqlite-wasm/src/adapter.ts | 2 - packages/sqlite-wasm/src/db/message.ts | 33 +- packages/sqlite-wasm/src/db/notification.ts | 4 +- packages/types/package.json | 2 +- packages/types/src/file.ts | 36 + packages/types/src/index.ts | 1 + packages/types/src/message.ts | 47 +- packages/types/src/query.ts | 43 +- 68 files changed, 2602 insertions(+), 1505 deletions(-) delete mode 100644 packages/cockroach/migrations/05_notification_context.sql create mode 100644 packages/cockroach/migrations/05_thread.sql create mode 100644 packages/cockroach/src/types.ts delete mode 100644 packages/core/src/index.ts delete mode 100644 packages/core/src/message.ts create mode 100644 packages/query/src/messages/utils.ts rename packages/{core => shared}/package.json (68%) create mode 100644 packages/shared/src/files.ts create mode 100644 packages/shared/src/index.ts create mode 100644 packages/shared/src/message.ts create mode 100644 packages/shared/src/retry.ts rename packages/{core => shared}/tsconfig.json (100%) create mode 100644 packages/types/src/file.ts diff --git a/bun.lock b/bun.lock index 098028ab1c2..238a096925a 100644 --- a/bun.lock +++ b/bun.lock @@ -4,19 +4,19 @@ "": { "name": "@hcengineering/communication", "devDependencies": { - "@eslint/js": "^9.15.0", - "@types/bun": "^1.1.14", - "bun-types": "^1.1.38", - "eslint": "^9.15.0", + "@eslint/js": "^9.21.0", + "@types/bun": "^1.2.4", + "bun-types": "^1.2.4", + "eslint": "^9.21.0", "eslint-config-prettier": "^9.1.0", - "eslint-plugin-prettier": "^5.2.1", - "prettier": "^3.3.3", - "typescript-eslint": "^8.15.0", + "eslint-plugin-prettier": "^5.2.3", + "prettier": "^3.5.2", + "typescript-eslint": "^8.25.0", }, }, "packages/client-query": { "name": "@hcengineering/communication-client-query", - "version": "0.1.9", + "version": "0.1.47", "dependencies": { "@hcengineering/communication-query": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", @@ -32,7 +32,7 @@ }, "packages/client-sqlite": { "name": "@hcengineering/communication-client-sqlite", - "version": "0.1.9", + "version": "0.1.47", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-sqlite-wasm": "workspace:*", @@ -48,8 +48,9 @@ }, "packages/client-ws": { "name": "@hcengineering/communication-client-ws", - "version": "0.1.9", + "version": "0.1.47", "dependencies": { + "@hcengineering/communication-client-query": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", "@msgpack/msgpack": "^3.0.0-beta2", @@ -63,12 +64,11 @@ }, "packages/cockroach": { "name": "@hcengineering/communication-cockroach", - "version": "0.1.9", + "version": "0.1.47", "dependencies": { - "@hcengineering/communication-core": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", + "@hcengineering/communication-shared": "workspace:*", "@hcengineering/communication-types": "workspace:*", - "pg": "8.12.0", "postgres": "^3.4.4", "uuid": "^11.0.3", }, @@ -79,25 +79,10 @@ "typescript": "^5.6.3", }, }, - "packages/core": { - "name": "@hcengineering/communication-core", - "version": "0.1.9", - "dependencies": { - "@hcengineering/communication-types": "workspace:*", - }, - "devDependencies": { - "@types/bun": "^1.1.14", - }, - "peerDependencies": { - "typescript": "^5.6.3", - }, - }, "packages/examples": { "name": "@hcengineering/communication-examples", - "version": "0.1.9", + "version": "0.1.47", "dependencies": { - "@hcengineering/communication-client-query": "workspace:*", - "@hcengineering/communication-client-sqlite": "workspace:*", "@hcengineering/communication-client-ws": "workspace:*", "@hcengineering/communication-types": "workspace:*", }, @@ -110,9 +95,10 @@ }, "packages/query": { "name": "@hcengineering/communication-query", - "version": "0.1.9", + "version": "0.1.47", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", + "@hcengineering/communication-shared": "workspace:*", "@hcengineering/communication-types": "workspace:*", "fast-equals": "^5.0.1", }, @@ -126,7 +112,7 @@ }, "packages/sdk-types": { "name": "@hcengineering/communication-sdk-types", - "version": "0.1.9", + "version": "0.1.47", "dependencies": { "@hcengineering/communication-types": "workspace:*", }, @@ -139,7 +125,7 @@ }, "packages/server": { "name": "@hcengineering/communication-server", - "version": "0.1.9", + "version": "0.1.47", "dependencies": { "@hcengineering/communication-cockroach": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", @@ -156,9 +142,24 @@ "typescript": "^5.6.3", }, }, + "packages/shared": { + "name": "@hcengineering/communication-shared", + "version": "0.1.47", + "dependencies": { + "@hcengineering/communication-types": "workspace:*", + "js-yaml": "^4.1.0", + }, + "devDependencies": { + "@types/bun": "^1.1.14", + "@types/js-yaml": "^4.0.9", + }, + "peerDependencies": { + "typescript": "^5.6.3", + }, + }, "packages/sqlite-wasm": { "name": "@hcengineering/communication-sqlite-wasm", - "version": "0.1.9", + "version": "0.1.47", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -175,7 +176,7 @@ }, "packages/types": { "name": "@hcengineering/communication-types", - "version": "0.1.9", + "version": "0.1.47", "dependencies": { "@hcengineering/card": "0.7.2", "@hcengineering/core": "0.7.2", @@ -199,15 +200,15 @@ "@eslint/config-array": ["@eslint/config-array@0.19.2", "", { "dependencies": { "@eslint/object-schema": "^2.1.6", "debug": "^4.3.1", "minimatch": "^3.1.2" } }, "sha512-GNKqxfHG2ySmJOBSHg7LxeUx4xpuCoFjacmlCoYWEbaPXLwvfIjixRI12xCQZeULksQb23uiA8F40w5TojpV7w=="], - "@eslint/core": ["@eslint/core@0.11.0", "", { "dependencies": { "@types/json-schema": "^7.0.15" } }, "sha512-DWUB2pksgNEb6Bz2fggIy1wh6fGgZP4Xyy/Mt0QZPiloKKXerbqq9D3SBQTlCRYOrcRPu4vuz+CGjwdfqxnoWA=="], + "@eslint/core": ["@eslint/core@0.12.0", "", { "dependencies": { "@types/json-schema": "^7.0.15" } }, "sha512-cmrR6pytBuSMTaBweKoGMwu3EiHiEC+DoyupPmlZ0HxBJBtIxwe+j/E4XPIKNx+Q74c8lXKPwYawBf5glsTkHg=="], - "@eslint/eslintrc": ["@eslint/eslintrc@3.2.0", "", { "dependencies": { "ajv": "^6.12.4", "debug": "^4.3.2", "espree": "^10.0.1", "globals": "^14.0.0", "ignore": "^5.2.0", "import-fresh": "^3.2.1", "js-yaml": "^4.1.0", "minimatch": "^3.1.2", "strip-json-comments": "^3.1.1" } }, "sha512-grOjVNN8P3hjJn/eIETF1wwd12DdnwFDoyceUJLYYdkpbwq3nLi+4fqrTAONx7XDALqlL220wC/RHSC/QTI/0w=="], + "@eslint/eslintrc": ["@eslint/eslintrc@3.3.0", "", { "dependencies": { "ajv": "^6.12.4", "debug": "^4.3.2", "espree": "^10.0.1", "globals": "^14.0.0", "ignore": "^5.2.0", "import-fresh": "^3.2.1", "js-yaml": "^4.1.0", "minimatch": "^3.1.2", "strip-json-comments": "^3.1.1" } }, "sha512-yaVPAiNAalnCZedKLdR21GOGILMLKPyqSLWaAjQFvYA2i/ciDi8ArYVr69Anohb6cH2Ukhqti4aFnYyPm8wdwQ=="], - "@eslint/js": ["@eslint/js@9.20.0", "", {}, "sha512-iZA07H9io9Wn836aVTytRaNqh00Sad+EamwOVJT12GTLw1VGMFV/4JaME+JjLtr9fiGaoWgYnS54wrfWsSs4oQ=="], + "@eslint/js": ["@eslint/js@9.21.0", "", {}, "sha512-BqStZ3HX8Yz6LvsF5ByXYrtigrV5AXADWLAGc7PH/1SxOb7/FIYYMszZZWiUou/GB9P2lXWk2SV4d+Z8h0nknw=="], "@eslint/object-schema": ["@eslint/object-schema@2.1.6", "", {}, "sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA=="], - "@eslint/plugin-kit": ["@eslint/plugin-kit@0.2.5", "", { "dependencies": { "@eslint/core": "^0.10.0", "levn": "^0.4.1" } }, "sha512-lB05FkqEdUg2AA0xEbUz0SnkXT1LcCTa438W4IWTUh4hdOnVbQyOJ81OrDXsJk/LSiJHubgGEFoR5EHq1NsH1A=="], + "@eslint/plugin-kit": ["@eslint/plugin-kit@0.2.7", "", { "dependencies": { "@eslint/core": "^0.12.0", "levn": "^0.4.1" } }, "sha512-JubJ5B2pJ4k4yGxaNLdbjrnk9d/iDz6/q8wOilpIowd6PJPgaxCuHBnBszq7Ce2TyMrywm5r4PnKm6V3iiZF+g=="], "@formatjs/ecma402-abstract": ["@formatjs/ecma402-abstract@2.3.3", "", { "dependencies": { "@formatjs/fast-memoize": "2.2.6", "@formatjs/intl-localematcher": "0.6.0", "decimal.js": "10", "tslib": "2" } }, "sha512-pJT1OkhplSmvvr6i3CWTPvC/FGC06MbN5TNBfRO6Ox62AEz90eMq+dVvtX9Bl3jxCEkS0tATzDarRZuOLw7oFg=="], @@ -231,8 +232,6 @@ "@hcengineering/communication-cockroach": ["@hcengineering/communication-cockroach@workspace:packages/cockroach"], - "@hcengineering/communication-core": ["@hcengineering/communication-core@workspace:packages/core"], - "@hcengineering/communication-examples": ["@hcengineering/communication-examples@workspace:packages/examples"], "@hcengineering/communication-query": ["@hcengineering/communication-query@workspace:packages/query"], @@ -241,6 +240,8 @@ "@hcengineering/communication-server": ["@hcengineering/communication-server@workspace:packages/server"], + "@hcengineering/communication-shared": ["@hcengineering/communication-shared@workspace:packages/shared"], + "@hcengineering/communication-sqlite-wasm": ["@hcengineering/communication-sqlite-wasm@workspace:packages/sqlite-wasm"], "@hcengineering/communication-types": ["@hcengineering/communication-types@workspace:packages/types"], @@ -259,7 +260,7 @@ "@humanwhocodes/module-importer": ["@humanwhocodes/module-importer@1.0.1", "", {}, "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA=="], - "@humanwhocodes/retry": ["@humanwhocodes/retry@0.4.1", "", {}, "sha512-c7hNEllBlenFTHBky65mhq8WD2kbN9Q6gk0bTk8lSBvc554jpXSkST1iePudpt7+A/AQvuHs9EMqjHDXMY1lrA=="], + "@humanwhocodes/retry": ["@humanwhocodes/retry@0.4.2", "", {}, "sha512-xeO57FpIu4p1Ri3Jq/EXq4ClRm86dVF2z/+kvFnyqVYRavTZmaFaUBbWCOuuTh0o/g7DSsk6kc2vrS4Vl5oPOQ=="], "@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.8", "", { "dependencies": { "@jridgewell/set-array": "^1.2.1", "@jridgewell/sourcemap-codec": "^1.4.10", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA=="], @@ -285,7 +286,7 @@ "@types/body-parser": ["@types/body-parser@1.19.5", "", { "dependencies": { "@types/connect": "*", "@types/node": "*" } }, "sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg=="], - "@types/bun": ["@types/bun@1.2.2", "", { "dependencies": { "bun-types": "1.2.2" } }, "sha512-tr74gdku+AEDN5ergNiBnplr7hpDp3V1h7fqI2GcR/rsUaM39jpSeKH0TFibRvU0KwniRx5POgaYnaXbk0hU+w=="], + "@types/bun": ["@types/bun@1.2.4", "", { "dependencies": { "bun-types": "1.2.4" } }, "sha512-QtuV5OMR8/rdKJs213iwXDpfVvnskPXY/S0ZiFbsTjQZycuqPbMW8Gf/XhLfwE5njW8sxI2WjISURXPlHypMFA=="], "@types/connect": ["@types/connect@3.4.38", "", { "dependencies": { "@types/node": "*" } }, "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug=="], @@ -301,6 +302,8 @@ "@types/http-errors": ["@types/http-errors@2.0.4", "", {}, "sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA=="], + "@types/js-yaml": ["@types/js-yaml@4.0.9", "", {}, "sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg=="], + "@types/json-schema": ["@types/json-schema@7.0.15", "", {}, "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA=="], "@types/mime": ["@types/mime@1.3.5", "", {}, "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w=="], @@ -319,21 +322,21 @@ "@types/ws": ["@types/ws@8.5.14", "", { "dependencies": { "@types/node": "*" } }, "sha512-bd/YFLW+URhBzMXurx7lWByOu+xzU9+kb3RboOteXYDfW+tr+JZa99OyNmPINEGB/ahzKrEuc8rcv4gnpJmxTw=="], - "@typescript-eslint/eslint-plugin": ["@typescript-eslint/eslint-plugin@8.24.1", "", { "dependencies": { "@eslint-community/regexpp": "^4.10.0", "@typescript-eslint/scope-manager": "8.24.1", "@typescript-eslint/type-utils": "8.24.1", "@typescript-eslint/utils": "8.24.1", "@typescript-eslint/visitor-keys": "8.24.1", "graphemer": "^1.4.0", "ignore": "^5.3.1", "natural-compare": "^1.4.0", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0", "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-ll1StnKtBigWIGqvYDVuDmXJHVH4zLVot1yQ4fJtLpL7qacwkxJc1T0bptqw+miBQ/QfUbhl1TcQ4accW5KUyA=="], + "@typescript-eslint/eslint-plugin": ["@typescript-eslint/eslint-plugin@8.25.0", "", { "dependencies": { "@eslint-community/regexpp": "^4.10.0", "@typescript-eslint/scope-manager": "8.25.0", "@typescript-eslint/type-utils": "8.25.0", "@typescript-eslint/utils": "8.25.0", "@typescript-eslint/visitor-keys": "8.25.0", "graphemer": "^1.4.0", "ignore": "^5.3.1", "natural-compare": "^1.4.0", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0", "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-VM7bpzAe7JO/BFf40pIT1lJqS/z1F8OaSsUB3rpFJucQA4cOSuH2RVVVkFULN+En0Djgr29/jb4EQnedUo95KA=="], - "@typescript-eslint/parser": ["@typescript-eslint/parser@8.24.1", "", { "dependencies": { "@typescript-eslint/scope-manager": "8.24.1", "@typescript-eslint/types": "8.24.1", "@typescript-eslint/typescript-estree": "8.24.1", "@typescript-eslint/visitor-keys": "8.24.1", "debug": "^4.3.4" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-Tqoa05bu+t5s8CTZFaGpCH2ub3QeT9YDkXbPd3uQ4SfsLoh1/vv2GEYAioPoxCWJJNsenXlC88tRjwoHNts1oQ=="], + "@typescript-eslint/parser": ["@typescript-eslint/parser@8.25.0", "", { "dependencies": { "@typescript-eslint/scope-manager": "8.25.0", "@typescript-eslint/types": "8.25.0", "@typescript-eslint/typescript-estree": "8.25.0", "@typescript-eslint/visitor-keys": "8.25.0", "debug": "^4.3.4" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-4gbs64bnbSzu4FpgMiQ1A+D+urxkoJk/kqlDJ2W//5SygaEiAP2B4GoS7TEdxgwol2el03gckFV9lJ4QOMiiHg=="], - "@typescript-eslint/scope-manager": ["@typescript-eslint/scope-manager@8.24.1", "", { "dependencies": { "@typescript-eslint/types": "8.24.1", "@typescript-eslint/visitor-keys": "8.24.1" } }, "sha512-OdQr6BNBzwRjNEXMQyaGyZzgg7wzjYKfX2ZBV3E04hUCBDv3GQCHiz9RpqdUIiVrMgJGkXm3tcEh4vFSHreS2Q=="], + "@typescript-eslint/scope-manager": ["@typescript-eslint/scope-manager@8.25.0", "", { "dependencies": { "@typescript-eslint/types": "8.25.0", "@typescript-eslint/visitor-keys": "8.25.0" } }, "sha512-6PPeiKIGbgStEyt4NNXa2ru5pMzQ8OYKO1hX1z53HMomrmiSB+R5FmChgQAP1ro8jMtNawz+TRQo/cSXrauTpg=="], - "@typescript-eslint/type-utils": ["@typescript-eslint/type-utils@8.24.1", "", { "dependencies": { "@typescript-eslint/typescript-estree": "8.24.1", "@typescript-eslint/utils": "8.24.1", "debug": "^4.3.4", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-/Do9fmNgCsQ+K4rCz0STI7lYB4phTtEXqqCAs3gZW0pnK7lWNkvWd5iW545GSmApm4AzmQXmSqXPO565B4WVrw=="], + "@typescript-eslint/type-utils": ["@typescript-eslint/type-utils@8.25.0", "", { "dependencies": { "@typescript-eslint/typescript-estree": "8.25.0", "@typescript-eslint/utils": "8.25.0", "debug": "^4.3.4", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-d77dHgHWnxmXOPJuDWO4FDWADmGQkN5+tt6SFRZz/RtCWl4pHgFl3+WdYCn16+3teG09DY6XtEpf3gGD0a186g=="], - "@typescript-eslint/types": ["@typescript-eslint/types@8.24.1", "", {}, "sha512-9kqJ+2DkUXiuhoiYIUvIYjGcwle8pcPpdlfkemGvTObzgmYfJ5d0Qm6jwb4NBXP9W1I5tss0VIAnWFumz3mC5A=="], + "@typescript-eslint/types": ["@typescript-eslint/types@8.25.0", "", {}, "sha512-+vUe0Zb4tkNgznQwicsvLUJgZIRs6ITeWSCclX1q85pR1iOiaj+4uZJIUp//Z27QWu5Cseiw3O3AR8hVpax7Aw=="], - "@typescript-eslint/typescript-estree": ["@typescript-eslint/typescript-estree@8.24.1", "", { "dependencies": { "@typescript-eslint/types": "8.24.1", "@typescript-eslint/visitor-keys": "8.24.1", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", "minimatch": "^9.0.4", "semver": "^7.6.0", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "typescript": ">=4.8.4 <5.8.0" } }, "sha512-UPyy4MJ/0RE648DSKQe9g0VDSehPINiejjA6ElqnFaFIhI6ZEiZAkUI0D5MCk0bQcTf/LVqZStvQ6K4lPn/BRg=="], + "@typescript-eslint/typescript-estree": ["@typescript-eslint/typescript-estree@8.25.0", "", { "dependencies": { "@typescript-eslint/types": "8.25.0", "@typescript-eslint/visitor-keys": "8.25.0", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", "minimatch": "^9.0.4", "semver": "^7.6.0", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "typescript": ">=4.8.4 <5.8.0" } }, "sha512-ZPaiAKEZ6Blt/TPAx5Ot0EIB/yGtLI2EsGoY6F7XKklfMxYQyvtL+gT/UCqkMzO0BVFHLDlzvFqQzurYahxv9Q=="], - "@typescript-eslint/utils": ["@typescript-eslint/utils@8.24.1", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@typescript-eslint/scope-manager": "8.24.1", "@typescript-eslint/types": "8.24.1", "@typescript-eslint/typescript-estree": "8.24.1" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-OOcg3PMMQx9EXspId5iktsI3eMaXVwlhC8BvNnX6B5w9a4dVgpkQZuU8Hy67TolKcl+iFWq0XX+jbDGN4xWxjQ=="], + "@typescript-eslint/utils": ["@typescript-eslint/utils@8.25.0", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@typescript-eslint/scope-manager": "8.25.0", "@typescript-eslint/types": "8.25.0", "@typescript-eslint/typescript-estree": "8.25.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-syqRbrEv0J1wywiLsK60XzHnQe/kRViI3zwFALrNEgnntn1l24Ra2KvOAWwWbWZ1lBZxZljPDGOq967dsl6fkA=="], - "@typescript-eslint/visitor-keys": ["@typescript-eslint/visitor-keys@8.24.1", "", { "dependencies": { "@typescript-eslint/types": "8.24.1", "eslint-visitor-keys": "^4.2.0" } }, "sha512-EwVHlp5l+2vp8CoqJm9KikPZgi3gbdZAtabKT9KPShGeOcJhsv4Zdo3oc8T8I0uKEmYoU4ItyxbptjF08enaxg=="], + "@typescript-eslint/visitor-keys": ["@typescript-eslint/visitor-keys@8.25.0", "", { "dependencies": { "@typescript-eslint/types": "8.25.0", "eslint-visitor-keys": "^4.2.0" } }, "sha512-kCYXKAum9CecGVHGij7muybDfTS2sD3t0L4bJsEZLkyrXUImiCTq1M3LG2SRtOhiHFwMR9wAFplpT6XHYjTkwQ=="], "acorn": ["acorn@8.14.0", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA=="], @@ -357,7 +360,7 @@ "braces": ["braces@3.0.3", "", { "dependencies": { "fill-range": "^7.1.1" } }, "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA=="], - "bun-types": ["bun-types@1.2.2", "", { "dependencies": { "@types/node": "*", "@types/ws": "~8.5.10" } }, "sha512-RCbMH5elr9gjgDGDhkTTugA21XtJAy/9jkKe/G3WR2q17VPGhcquf9Sir6uay9iW+7P/BV0CAHA1XlHXMAVKHg=="], + "bun-types": ["bun-types@1.2.4", "", { "dependencies": { "@types/node": "*", "@types/ws": "~8.5.10" } }, "sha512-nDPymR207ZZEoWD4AavvEaa/KZe/qlrbMSchqpQwovPZCKc7pwMoENjEtHgMKaAjJhy+x6vfqSBA1QU3bJgs0Q=="], "callsites": ["callsites@3.1.0", "", {}, "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ=="], @@ -391,7 +394,7 @@ "escape-string-regexp": ["escape-string-regexp@4.0.0", "", {}, "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA=="], - "eslint": ["eslint@9.20.1", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.12.1", "@eslint/config-array": "^0.19.0", "@eslint/core": "^0.11.0", "@eslint/eslintrc": "^3.2.0", "@eslint/js": "9.20.0", "@eslint/plugin-kit": "^0.2.5", "@humanfs/node": "^0.16.6", "@humanwhocodes/module-importer": "^1.0.1", "@humanwhocodes/retry": "^0.4.1", "@types/estree": "^1.0.6", "@types/json-schema": "^7.0.15", "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.6", "debug": "^4.3.2", "escape-string-regexp": "^4.0.0", "eslint-scope": "^8.2.0", "eslint-visitor-keys": "^4.2.0", "espree": "^10.3.0", "esquery": "^1.5.0", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", "file-entry-cache": "^8.0.0", "find-up": "^5.0.0", "glob-parent": "^6.0.2", "ignore": "^5.2.0", "imurmurhash": "^0.1.4", "is-glob": "^4.0.0", "json-stable-stringify-without-jsonify": "^1.0.1", "lodash.merge": "^4.6.2", "minimatch": "^3.1.2", "natural-compare": "^1.4.0", "optionator": "^0.9.3" }, "peerDependencies": { "jiti": "*" }, "optionalPeers": ["jiti"], "bin": { "eslint": "bin/eslint.js" } }, "sha512-m1mM33o6dBUjxl2qb6wv6nGNwCAsns1eKtaQ4l/NPHeTvhiUPbtdfMyktxN4B3fgHIgsYh1VT3V9txblpQHq+g=="], + "eslint": ["eslint@9.21.0", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.12.1", "@eslint/config-array": "^0.19.2", "@eslint/core": "^0.12.0", "@eslint/eslintrc": "^3.3.0", "@eslint/js": "9.21.0", "@eslint/plugin-kit": "^0.2.7", "@humanfs/node": "^0.16.6", "@humanwhocodes/module-importer": "^1.0.1", "@humanwhocodes/retry": "^0.4.2", "@types/estree": "^1.0.6", "@types/json-schema": "^7.0.15", "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.6", "debug": "^4.3.2", "escape-string-regexp": "^4.0.0", "eslint-scope": "^8.2.0", "eslint-visitor-keys": "^4.2.0", "espree": "^10.3.0", "esquery": "^1.5.0", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", "file-entry-cache": "^8.0.0", "find-up": "^5.0.0", "glob-parent": "^6.0.2", "ignore": "^5.2.0", "imurmurhash": "^0.1.4", "is-glob": "^4.0.0", "json-stable-stringify-without-jsonify": "^1.0.1", "lodash.merge": "^4.6.2", "minimatch": "^3.1.2", "natural-compare": "^1.4.0", "optionator": "^0.9.3" }, "peerDependencies": { "jiti": "*" }, "optionalPeers": ["jiti"], "bin": { "eslint": "bin/eslint.js" } }, "sha512-KjeihdFqTPhOMXTt7StsDxriV4n66ueuF/jfPNC3j/lduHwr/ijDwJMsF+wyMJethgiKi5wniIE243vi07d3pg=="], "eslint-config-prettier": ["eslint-config-prettier@9.1.0", "", { "peerDependencies": { "eslint": ">=7.0.0" }, "bin": { "eslint-config-prettier": "bin/cli.js" } }, "sha512-NSWl5BFQWEPi1j4TjVNItzYV7dZXZ+wP6I6ZhrBGpChQhZRUaElihE9uRRkcbRnNb76UMKDF3r+WTmNcGPKsqw=="], @@ -513,37 +516,13 @@ "periscopic": ["periscopic@3.1.0", "", { "dependencies": { "@types/estree": "^1.0.0", "estree-walker": "^3.0.0", "is-reference": "^3.0.0" } }, "sha512-vKiQ8RRtkl9P+r/+oefh25C3fhybptkHKCZSPlcXiJux2tJF55GnEj3BVn4A5gKfq9NWWXXrxkHBwVPUfH0opw=="], - "pg": ["pg@8.12.0", "", { "dependencies": { "pg-connection-string": "^2.6.4", "pg-pool": "^3.6.2", "pg-protocol": "^1.6.1", "pg-types": "^2.1.0", "pgpass": "1.x" }, "optionalDependencies": { "pg-cloudflare": "^1.1.1" }, "peerDependencies": { "pg-native": ">=3.0.1" }, "optionalPeers": ["pg-native"] }, "sha512-A+LHUSnwnxrnL/tZ+OLfqR1SxLN3c/pgDztZ47Rpbsd4jUytsTtwQo/TLPRzPJMp/1pbhYVhH9cuSZLAajNfjQ=="], - - "pg-cloudflare": ["pg-cloudflare@1.1.1", "", {}, "sha512-xWPagP/4B6BgFO+EKz3JONXv3YDgvkbVrGw2mTo3D6tVDQRh1e7cqVGvyR3BE+eQgAvx1XhW/iEASj4/jCWl3Q=="], - - "pg-connection-string": ["pg-connection-string@2.7.0", "", {}, "sha512-PI2W9mv53rXJQEOb8xNR8lH7Hr+EKa6oJa38zsK0S/ky2er16ios1wLKhZyxzD7jUReiWokc9WK5nxSnC7W1TA=="], - - "pg-int8": ["pg-int8@1.0.1", "", {}, "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw=="], - - "pg-pool": ["pg-pool@3.7.1", "", { "peerDependencies": { "pg": ">=8.0" } }, "sha512-xIOsFoh7Vdhojas6q3596mXFsR8nwBQBXX5JiV7p9buEVAGqYL4yFzclON5P9vFrpu1u7Zwl2oriyDa89n0wbw=="], - - "pg-protocol": ["pg-protocol@1.7.1", "", {}, "sha512-gjTHWGYWsEgy9MsY0Gp6ZJxV24IjDqdpTW7Eh0x+WfJLFsm/TJx1MzL6T0D88mBvkpxotCQ6TwW6N+Kko7lhgQ=="], - - "pg-types": ["pg-types@2.2.0", "", { "dependencies": { "pg-int8": "1.0.1", "postgres-array": "~2.0.0", "postgres-bytea": "~1.0.0", "postgres-date": "~1.0.4", "postgres-interval": "^1.1.0" } }, "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA=="], - - "pgpass": ["pgpass@1.0.5", "", { "dependencies": { "split2": "^4.1.0" } }, "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug=="], - "picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="], "postgres": ["postgres@3.4.5", "", {}, "sha512-cDWgoah1Gez9rN3H4165peY9qfpEo+SA61oQv65O3cRUE1pOEoJWwddwcqKE8XZYjbblOJlYDlLV4h67HrEVDg=="], - "postgres-array": ["postgres-array@2.0.0", "", {}, "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA=="], - - "postgres-bytea": ["postgres-bytea@1.0.0", "", {}, "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w=="], - - "postgres-date": ["postgres-date@1.0.7", "", {}, "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q=="], - - "postgres-interval": ["postgres-interval@1.2.0", "", { "dependencies": { "xtend": "^4.0.0" } }, "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ=="], - "prelude-ls": ["prelude-ls@1.2.1", "", {}, "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g=="], - "prettier": ["prettier@3.5.1", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-hPpFQvHwL3Qv5AdRvBFMhnKo4tYxp0ReXiPn2bxkiohEX6mBeBwEpBSQTkD458RaaDKQMYSp4hX4UtfUTA5wDw=="], + "prettier": ["prettier@3.5.2", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-lc6npv5PH7hVqozBR7lkBNOGXV9vMwROAPlumdBkX0wTbbzPu/U1hk5yL8p2pt4Xoc+2mkT8t/sow2YrV/M5qg=="], "prettier-linter-helpers": ["prettier-linter-helpers@1.0.0", "", { "dependencies": { "fast-diff": "^1.1.2" } }, "sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w=="], @@ -569,8 +548,6 @@ "source-map-js": ["source-map-js@1.2.1", "", {}, "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA=="], - "split2": ["split2@4.2.0", "", {}, "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg=="], - "strip-json-comments": ["strip-json-comments@3.1.1", "", {}, "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig=="], "supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], @@ -589,7 +566,7 @@ "typescript": ["typescript@5.7.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-84MVSjMEHP+FQRPy3pX9sTVV/INIex71s9TL2Gm5FG/WG1SqXeKyZ0k7/blY/4FdOzI12CBy1vGc4og/eus0fw=="], - "typescript-eslint": ["typescript-eslint@8.24.1", "", { "dependencies": { "@typescript-eslint/eslint-plugin": "8.24.1", "@typescript-eslint/parser": "8.24.1", "@typescript-eslint/utils": "8.24.1" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-cw3rEdzDqBs70TIcb0Gdzbt6h11BSs2pS0yaq7hDWDBtCCSei1pPSUXE9qUdQ/Wm9NgFg8mKtMt1b8fTHIl1jA=="], + "typescript-eslint": ["typescript-eslint@8.25.0", "", { "dependencies": { "@typescript-eslint/eslint-plugin": "8.25.0", "@typescript-eslint/parser": "8.25.0", "@typescript-eslint/utils": "8.25.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-TxRdQQLH4g7JkoFlYG3caW5v1S6kEkz8rqt80iQJZUYPq1zD1Ra7HfQBJJ88ABRaMvHAXnwRvRB4V+6sQ9xN5Q=="], "undici-types": ["undici-types@6.20.0", "", {}, "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg=="], @@ -603,14 +580,10 @@ "word-wrap": ["word-wrap@1.2.5", "", {}, "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA=="], - "xtend": ["xtend@4.0.2", "", {}, "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ=="], - "yocto-queue": ["yocto-queue@0.1.0", "", {}, "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q=="], "@eslint-community/eslint-utils/eslint-visitor-keys": ["eslint-visitor-keys@3.4.3", "", {}, "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag=="], - "@eslint/plugin-kit/@eslint/core": ["@eslint/core@0.10.0", "", { "dependencies": { "@types/json-schema": "^7.0.15" } }, "sha512-gFHJ+xBOo4G3WRlR1e/3G8A6/KZAH6zcE/hkLRCZTi/B9avAG365QhFA8uOGzTMqgTghpn7/fSnscW++dpMSAw=="], - "@humanfs/node/@humanwhocodes/retry": ["@humanwhocodes/retry@0.3.1", "", {}, "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA=="], "@typescript-eslint/typescript-estree/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], diff --git a/package.json b/package.json index 92c68b7cd15..1d53a75e857 100644 --- a/package.json +++ b/package.json @@ -3,18 +3,19 @@ "type": "module", "workspaces": ["packages/*"], "scripts": { + "publish": "sh scripts/publish.sh", "build": "sh scripts/build.sh && tsc --noEmit", "lint": "eslint packages/**/src/*.ts", "format": "prettier --write packages/**/src/*.ts && bun run lint" }, "devDependencies": { - "@eslint/js": "^9.15.0", - "@types/bun": "^1.1.14", - "bun-types": "^1.1.38", - "eslint": "^9.15.0", + "@eslint/js": "^9.21.0", + "@types/bun": "^1.2.4", + "bun-types": "^1.2.4", + "eslint": "^9.21.0", "eslint-config-prettier": "^9.1.0", - "eslint-plugin-prettier": "^5.2.1", - "prettier": "^3.3.3", - "typescript-eslint": "^8.15.0" + "eslint-plugin-prettier": "^5.2.3", + "prettier": "^3.5.2", + "typescript-eslint": "^8.25.0" } } diff --git a/packages/client-query/package.json b/packages/client-query/package.json index 79c0faedee0..df4ca1bc4c1 100644 --- a/packages/client-query/package.json +++ b/packages/client-query/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-client-query", - "version": "0.1.9", + "version": "0.1.47", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/client-query/src/index.ts b/packages/client-query/src/index.ts index c48319513fb..b1df62dcd80 100644 --- a/packages/client-query/src/index.ts +++ b/packages/client-query/src/index.ts @@ -1,5 +1,6 @@ import { LiveQueries } from '@hcengineering/communication-query' import type { QueryClient } from '@hcengineering/communication-sdk-types' +import type { WorkspaceID } from '@hcengineering/communication-types' import { MessagesQuery, NotificationsQuery } from './query' @@ -13,12 +14,12 @@ export function createNotificationsQuery(): NotificationsQuery { return new NotificationsQuery(lq) } -export function initLiveQueries(client: QueryClient) { +export function initLiveQueries(client: QueryClient, workspace: WorkspaceID, filesUrl: string): void { if (lq != null) { lq.close() } - lq = new LiveQueries(client) + lq = new LiveQueries(client, workspace, filesUrl) client.onEvent = (event) => { void lq.onEvent(event) diff --git a/packages/client-query/src/query.ts b/packages/client-query/src/query.ts index 55b2840c907..400e1e4d722 100644 --- a/packages/client-query/src/query.ts +++ b/packages/client-query/src/query.ts @@ -44,7 +44,7 @@ class BaseQuery

, C extends QueryCallback> { } } - private needUpdate(params: FindMessagesParams, callback: MessagesQueryCallback): boolean { + private needUpdate(params: P, callback: C): boolean { if (!deepEqual(params, this.oldQuery)) return true if (!deepEqual(callback.toString(), this.oldCallback?.toString())) return true return false diff --git a/packages/client-sqlite/package.json b/packages/client-sqlite/package.json index 97743c2e9b5..ee0fa824cbb 100644 --- a/packages/client-sqlite/package.json +++ b/packages/client-sqlite/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-client-sqlite", - "version": "0.1.9", + "version": "0.1.47", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/client-sqlite/src/client.ts b/packages/client-sqlite/src/client.ts index 2dc53627e99..2393490f9f0 100644 --- a/packages/client-sqlite/src/client.ts +++ b/packages/client-sqlite/src/client.ts @@ -13,7 +13,10 @@ import { type Notification, type Attachment, type Reaction, - type WorkspaceID + type WorkspaceID, + type FindMessagesGroupsParams, + type MessagesGroup, + PatchType } from '@hcengineering/communication-types' import { type Client, @@ -24,7 +27,8 @@ import { } from '@hcengineering/communication-sdk-types' import { createDbAdapter as createSqliteDbAdapter } from '@hcengineering/communication-sqlite-wasm' -class DbClient implements Client { +//TODO: FIXME +class DbClient { onEvent: (event: ResponseEvent) => void = () => {} constructor( @@ -61,10 +65,14 @@ class DbClient implements Client { this.onEvent({ type: ResponseEventType.MessageRemoved, message, card }) } - async createPatch(card: CardID, message: MessageID, content: RichText, creator: SocialID): Promise { + async updateMessage(card: CardID, message: MessageID, content: RichText, creator: SocialID): Promise { const created = new Date() - await this.db.createPatch(card, message, content, creator, created) - this.onEvent({ type: ResponseEventType.PatchCreated, card, patch: { message, content, creator, created } }) + await this.db.createPatch(card, message, PatchType.update, content, creator, created) + this.onEvent({ + type: ResponseEventType.PatchCreated, + card, + patch: { message, type: PatchType.update, content, creator, created } + }) } async createReaction(card: CardID, message: MessageID, reaction: string, creator: SocialID): Promise { @@ -98,6 +106,10 @@ class DbClient implements Client { return rawMessages.map((it) => this.toMessage(it)) } + async findMessagesGroups(params: FindMessagesGroupsParams): Promise { + return await this.db.findMessagesGroups(params) + } + async findMessage(params: FindMessagesParams): Promise { return (await this.findMessages({ ...params, limit: 1 }))[0] } @@ -158,11 +170,17 @@ class DbClient implements Client { return await this.db.findContexts(params, [this.personalWorkspace]) } + //eslint-disable-next-line @typescript-eslint/no-unused-vars async findNotifications(params: FindNotificationsParams): Promise { //TODO: should we filter by workspace? return await this.db.findNotifications(params, this.personalWorkspace) } + //eslint-disable-next-line @typescript-eslint/no-unused-vars + async createThread(card: CardID, message: MessageID, thread: CardID, created: Date): Promise { + //TODO: implement + } + async unsubscribeQuery() { //ignore } @@ -178,5 +196,5 @@ export async function getSqliteClient( dbUrl = 'file:communication.sqlite3?vfs=opfs' ): Promise { const db = await createSqliteDbAdapter(dbUrl) - return new DbClient(db, workspace, personalWorkspace) + return new DbClient(db, workspace, personalWorkspace) as unknown as Client } diff --git a/packages/client-ws/package.json b/packages/client-ws/package.json index e9c75bc5f95..86e4dadcb1f 100644 --- a/packages/client-ws/package.json +++ b/packages/client-ws/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-client-ws", - "version": "0.1.9", + "version": "0.1.47", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", @@ -14,6 +14,7 @@ }, "dependencies": { "@hcengineering/communication-types": "workspace:*", + "@hcengineering/communication-client-query": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", "@msgpack/msgpack": "^3.0.0-beta2" }, diff --git a/packages/client-ws/src/client.ts b/packages/client-ws/src/client.ts index 74d28284371..d11674f83ba 100644 --- a/packages/client-ws/src/client.ts +++ b/packages/client-ws/src/client.ts @@ -1,18 +1,19 @@ import { - type Attachment, type CardID, type ContextID, + type FindMessagesGroupsParams, type FindMessagesParams, type FindNotificationContextParams, type FindNotificationsParams, type Message, type MessageID, + type MessagesGroup, type Notification, type NotificationContext, type NotificationContextUpdate, - type Reaction, type RichText, - type SocialID + type SocialID, + type WorkspaceID } from '@hcengineering/communication-types' import { RequestEventType, @@ -35,6 +36,7 @@ import { type ResponseEvent, type UpdateNotificationContextEvent } from '@hcengineering/communication-sdk-types' +import { initLiveQueries } from '@hcengineering/communication-client-query' import { WebSocketConnection } from './connection' @@ -75,7 +77,7 @@ class WsClient implements Client { await this.sendEvent(event) } - async createPatch(card: CardID, message: MessageID, content: RichText, creator: SocialID): Promise { + async updateMessage(card: CardID, message: MessageID, content: RichText, creator: SocialID): Promise { const event: CreatePatchEvent = { type: RequestEventType.CreatePatch, card, @@ -130,39 +132,11 @@ class WsClient implements Client { } async findMessages(params: FindMessagesParams, queryId?: number): Promise { - const rawMessages = await this.ws.send('findMessages', [params, queryId]) - return rawMessages.map((it: any) => this.toMessage(it)) - } - - toMessage(raw: any): Message { - return { - id: raw.id, - card: raw.card, - content: raw.content, - creator: raw.creator, - created: new Date(raw.created), - edited: new Date(raw.edited), - reactions: raw.reactions.map((it: any) => this.toReaction(it)), - attachments: raw.attachments.map((it: any) => this.toAttachment(it)) - } - } - - toAttachment(raw: any): Attachment { - return { - message: raw.message, - card: raw.card, - creator: raw.creator, - created: new Date(raw.created) - } + return await this.ws.send('findMessages', [params, queryId]) } - toReaction(raw: any): Reaction { - return { - message: raw.message, - reaction: raw.reaction, - creator: raw.creator, - created: new Date(raw.created) - } + async findMessagesGroups(params: FindMessagesGroupsParams): Promise { + return await this.ws.send('findMessagesGroups', [params]) } async createNotification(message: MessageID, context: ContextID): Promise { @@ -229,12 +203,23 @@ class WsClient implements Client { private async sendEvent(event: RequestEvent): Promise { return await this.ws.send('event', [event]) } + //eslint-disable-next-line @typescript-eslint/no-unused-vars + async createThread(card: CardID, message: MessageID, thread: CardID, created: Date): Promise { + //TODO: implement + } close() { void this.ws.close() } } -export async function getWebsocketClient(url: string, token: string): Promise { - return new WsClient(url, token) +export async function getWebsocketClient( + url: string, + token: string, + workspace: WorkspaceID, + filesUrl: string +): Promise { + const client = new WsClient(url, token) + initLiveQueries(client, workspace, filesUrl) + return client } diff --git a/packages/client-ws/src/index.ts b/packages/client-ws/src/index.ts index 83dae7638cc..226fb61584a 100644 --- a/packages/client-ws/src/index.ts +++ b/packages/client-ws/src/index.ts @@ -1 +1,4 @@ +import { createMessagesQuery, createNotificationsQuery } from '@hcengineering/communication-client-query' + export * from './client' +export { createMessagesQuery, createNotificationsQuery } diff --git a/packages/cockroach/migrations/00_schema.sql b/packages/cockroach/migrations/00_schema.sql index 0f19be60a3c..ed4ea1dc0c5 100644 --- a/packages/cockroach/migrations/00_schema.sql +++ b/packages/cockroach/migrations/00_schema.sql @@ -1,2 +1 @@ -DROP SCHEMA IF EXISTS communication CASCADE; CREATE SCHEMA IF NOT EXISTS communication; diff --git a/packages/cockroach/migrations/01_message.sql b/packages/cockroach/migrations/01_message.sql index 65ee7b4aabb..d113fe2fd38 100644 --- a/packages/cockroach/migrations/01_message.sql +++ b/packages/cockroach/migrations/01_message.sql @@ -8,9 +8,11 @@ CREATE TABLE IF NOT EXISTS communication.messages creator VARCHAR(255) NOT NULL, created TIMESTAMPTZ NOT NULL, - PRIMARY KEY (id, card_id, workspace_id) + PRIMARY KEY (workspace_id, card_id, id) ); +CREATE INDEX IF NOT EXISTS idx_messages_workspace_card ON communication.messages (workspace_id, card_id); +CREATE INDEX IF NOT EXISTS idx_messages_workspace_card_id ON communication.messages (workspace_id, card_id, id); CREATE TABLE IF NOT EXISTS communication.messages_groups ( @@ -18,11 +20,14 @@ CREATE TABLE IF NOT EXISTS communication.messages_groups card_id VARCHAR(255) NOT NULL, blob_id UUID NOT NULL, - from_id INT8 NOT NULL, - to_id INT8 NOT NULL, from_date TIMESTAMPTZ NOT NULL, to_date TIMESTAMPTZ NOT NULL, + from_id INT8 NOT NULL, + to_id INT8 NOT NULL, count INT NOT NULL, PRIMARY KEY (workspace_id, card_id, blob_id) ); + + +CREATE INDEX IF NOT EXISTS idx_messages_groups_workspace_card ON communication.messages_groups (workspace_id, card_id); \ No newline at end of file diff --git a/packages/cockroach/migrations/02_patch.sql b/packages/cockroach/migrations/02_patch.sql index 0513090d97f..527df14bf03 100644 --- a/packages/cockroach/migrations/02_patch.sql +++ b/packages/cockroach/migrations/02_patch.sql @@ -4,6 +4,7 @@ CREATE TABLE IF NOT EXISTS communication.patch workspace_id UUID NOT NULL, card_id VARCHAR(255) NOT NULL, message_id INT8 NOT NULL, + type VARCHAR(255) NOT NULL, content TEXT NOT NULL, creator VARCHAR(255) NOT NULL, created TIMESTAMPTZ NOT NULL, @@ -11,4 +12,4 @@ CREATE TABLE IF NOT EXISTS communication.patch PRIMARY KEY (id) ); -CREATE INDEX idx_patch_message_id ON communication.patch (message_id); \ No newline at end of file +CREATE INDEX IF NOT EXISTS idx_patch_workspace_card_message ON communication.patch (workspace_id, card_id, message_id); \ No newline at end of file diff --git a/packages/cockroach/migrations/03_attachment.sql b/packages/cockroach/migrations/03_attachment.sql index 3a45d1fcb99..780002f3355 100644 --- a/packages/cockroach/migrations/03_attachment.sql +++ b/packages/cockroach/migrations/03_attachment.sql @@ -5,7 +5,7 @@ CREATE TABLE IF NOT EXISTS communication.attachments creator VARCHAR(255) NOT NULL, created TIMESTAMPTZ NOT NULL DEFAULT now(), - PRIMARY KEY (message_id, card_id) + PRIMARY KEY (card_id, message_id) ); CREATE INDEX IF NOT EXISTS attachment_message_idx ON communication.attachments (message_id); diff --git a/packages/cockroach/migrations/04_reaction.sql b/packages/cockroach/migrations/04_reaction.sql index a8b4fc9496d..30e8ce02a70 100644 --- a/packages/cockroach/migrations/04_reaction.sql +++ b/packages/cockroach/migrations/04_reaction.sql @@ -7,7 +7,8 @@ CREATE TABLE IF NOT EXISTS communication.reactions creator VARCHAR(255) NOT NULL, created TIMESTAMPTZ NOT NULL DEFAULT now(), + FOREIGN KEY (workspace_id, card_id, message_id) REFERENCES communication.messages (workspace_id, card_id, id) ON DELETE CASCADE, PRIMARY KEY (workspace_id, card_id, message_id, creator, reaction) ); -CREATE INDEX IF NOT EXISTS reaction_message_idx ON communication.reactions (message_id); +CREATE INDEX IF NOT EXISTS idx_reactions_workspace_card_message ON communication.reactions (workspace_id, card_id, message_id); \ No newline at end of file diff --git a/packages/cockroach/migrations/05_notification_context.sql b/packages/cockroach/migrations/05_notification_context.sql deleted file mode 100644 index 33fc3866494..00000000000 --- a/packages/cockroach/migrations/05_notification_context.sql +++ /dev/null @@ -1,15 +0,0 @@ -CREATE TABLE IF NOT EXISTS communication.notification_context -( - id UUID NOT NULL DEFAULT gen_random_uuid(), - workspace_id UUID NOT NULL, - card_id VARCHAR(255) NOT NULL, - - personal_workspace UUID NOT NULL, - - archived_from TIMESTAMPTZ, - last_view TIMESTAMPTZ, - last_update TIMESTAMPTZ, - - PRIMARY KEY (id), - UNIQUE (workspace_id, card_id, personal_workspace) -); diff --git a/packages/cockroach/migrations/05_thread.sql b/packages/cockroach/migrations/05_thread.sql new file mode 100644 index 00000000000..73d6e50120b --- /dev/null +++ b/packages/cockroach/migrations/05_thread.sql @@ -0,0 +1,15 @@ +CREATE TABLE IF NOT EXISTS communication.thread +( + workspace_id UUID NOT NULL, + card_id VARCHAR(255) NOT NULL, + message_id INT8 NOT NULL, + thread_id VARCHAR(255) NOT NULL, + replies_count INT NOT NULL, + last_reply TIMESTAMPTZ NOT NULL, + + PRIMARY KEY (workspace_id, thread_id), + UNIQUE (workspace_id, card_id, message_id) +); + +CREATE INDEX IF NOT EXISTS idx_thread_workspace_card_message ON communication.thread (workspace_id, thread_id); +CREATE INDEX IF NOT EXISTS idx_thread_workspace_card_message ON communication.thread (workspace_id, card_id, message_id); \ No newline at end of file diff --git a/packages/cockroach/migrations/06_notification.sql b/packages/cockroach/migrations/06_notification.sql index 0bb7b2c9466..df565632a1d 100644 --- a/packages/cockroach/migrations/06_notification.sql +++ b/packages/cockroach/migrations/06_notification.sql @@ -1,3 +1,20 @@ +CREATE TABLE IF NOT EXISTS communication.notification_context +( + id UUID NOT NULL DEFAULT gen_random_uuid(), + workspace_id UUID NOT NULL, + card_id VARCHAR(255) NOT NULL, + + personal_workspace UUID NOT NULL, + + archived_from TIMESTAMPTZ, + last_view TIMESTAMPTZ, + last_update TIMESTAMPTZ, + + PRIMARY KEY (id), + UNIQUE (workspace_id, card_id, personal_workspace) +); + + CREATE TABLE IF NOT EXISTS communication.notifications ( message_id UUID NOT NULL, diff --git a/packages/cockroach/package.json b/packages/cockroach/package.json index 619b223cf47..b34725c8ed0 100644 --- a/packages/cockroach/package.json +++ b/packages/cockroach/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-cockroach", - "version": "0.1.9", + "version": "0.1.47", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", @@ -13,10 +13,9 @@ "@types/bun": "^1.1.14" }, "dependencies": { - "@hcengineering/communication-core": "workspace:*", + "@hcengineering/communication-shared": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", - "pg": "8.12.0", "postgres": "^3.4.4", "uuid": "^11.0.3" }, diff --git a/packages/cockroach/src/adapter.ts b/packages/cockroach/src/adapter.ts index 0a805aab03b..0ca7ef20b1e 100644 --- a/packages/cockroach/src/adapter.ts +++ b/packages/cockroach/src/adapter.ts @@ -1,73 +1,88 @@ +import type { ParameterOrJSON, Row } from 'postgres' import type postgres from 'postgres' import { - type Message, - type FindMessagesParams, + type BlobID, type CardID, - type RichText, - type SocialID, - type MessageID, type ContextID, - type NotificationContextUpdate, + type FindMessagesGroupsParams, + type FindMessagesParams, type FindNotificationContextParams, - type NotificationContext, type FindNotificationsParams, - type Notification, - type BlobID, + type Message, + type MessageID, type MessagesGroup, - type FindMessagesGroupsParams, - type WorkspaceID + type Notification, + type NotificationContext, + type NotificationContextUpdate, + type PatchType, + type RichText, + type SocialID, + type WorkspaceID, + type Thread } from '@hcengineering/communication-types' import type { DbAdapter } from '@hcengineering/communication-sdk-types' +import { retry } from '@hcengineering/communication-shared' import { MessagesDb } from './db/message' import { NotificationsDb } from './db/notification' import { connect, type PostgresClientReference } from './connection' +import { type Options, type Logger, type SqlClient } from './types' export class CockroachAdapter implements DbAdapter { private readonly message: MessagesDb private readonly notification: NotificationsDb constructor( - private readonly db: PostgresClientReference, - private readonly sqlClient: postgres.Sql, - private readonly workspace: WorkspaceID + private readonly sql: SqlClient, + private readonly workspace: WorkspaceID, + private readonly logger?: Logger, + private readonly options?: Options ) { - this.message = new MessagesDb(this.sqlClient, this.workspace) - this.notification = new NotificationsDb(this.sqlClient, this.workspace) + this.message = new MessagesDb(this.sql, this.workspace, logger, options) + this.notification = new NotificationsDb(this.sql, this.workspace, logger, options) } async createMessage(card: CardID, content: RichText, creator: SocialID, created: Date): Promise { return await this.message.createMessage(card, content, creator, created) } + async removeMessage(card: CardID, message: MessageID, socialIds?: SocialID[]): Promise { + await this.message.removeMessage(card, message, socialIds) + } + + async removeMessages(card: CardID, fromId: MessageID, toId: MessageID): Promise { + await this.message.removeMessages(card, fromId, toId) + } + async createPatch( card: CardID, message: MessageID, + type: PatchType, content: RichText, creator: SocialID, created: Date ): Promise { - return await this.message.createPatch(card, message, content, creator, created) - } - - async removeMessage(card: CardID, message: MessageID): Promise { - return await this.message.removeMessage(card, message) + await this.message.createPatch(card, message, type, content, creator, created) } - async removeMessages(card: CardID, ids: MessageID[]): Promise { - return await this.message.removeMessages(card, ids) + async removePatches(card: CardID, fromId: MessageID, toId: MessageID): Promise { + await this.message.removePatches(card, fromId, toId) } async createMessagesGroup( card: CardID, blobId: BlobID, - from_id: MessageID, - to_id: MessageID, - from_date: Date, - to_date: Date, + fromDate: Date, + toDate: Date, + fromId: MessageID, + toId: MessageID, count: number ): Promise { - return await this.message.createMessagesGroup(card, blobId, from_id, to_id, from_date, to_date, count) + await this.message.createMessagesGroup(card, blobId, fromDate, toDate, fromId, toId, count) + } + + async removeMessagesGroup(card: CardID, blobId: BlobID): Promise { + await this.message.removeMessagesGroup(card, blobId) } async createReaction( @@ -77,35 +92,35 @@ export class CockroachAdapter implements DbAdapter { creator: SocialID, created: Date ): Promise { - return await this.message.createReaction(card, message, reaction, creator, created) + await this.message.createReaction(card, message, reaction, creator, created) } async removeReaction(card: CardID, message: MessageID, reaction: string, creator: SocialID): Promise { - return await this.message.removeReaction(card, message, reaction, creator) + await this.message.removeReaction(card, message, reaction, creator, new Date()) } async createAttachment(message: MessageID, attachment: CardID, creator: SocialID, created: Date): Promise { - return await this.message.createAttachment(message, attachment, creator, created) + await this.message.createAttachment(message, attachment, creator, created) } async removeAttachment(message: MessageID, attachment: CardID): Promise { - return await this.message.removeAttachment(message, attachment) + await this.message.removeAttachment(message, attachment) } - async findMessages(params: FindMessagesParams): Promise { - return await this.message.find(params) + async createThread(card: CardID, message: MessageID, thread: CardID, created: Date): Promise { + await this.message.createThread(card, message, thread, created) } - async findMessagesGroups(params: FindMessagesGroupsParams): Promise { - return await this.message.findGroups(params) + async updateThread(thread: CardID, lastReply: Date, op: 'increment' | 'decrement'): Promise { + await this.message.updateThread(thread, lastReply, op) } async createNotification(message: MessageID, context: ContextID): Promise { - return await this.notification.createNotification(message, context) + await this.notification.createNotification(message, context) } async removeNotification(message: MessageID, context: ContextID): Promise { - return await this.notification.removeNotification(message, context) + await this.notification.removeNotification(message, context) } async createContext( @@ -118,11 +133,24 @@ export class CockroachAdapter implements DbAdapter { } async updateContext(context: ContextID, update: NotificationContextUpdate): Promise { - return await this.notification.updateContext(context, update) + await this.notification.updateContext(context, update) } async removeContext(context: ContextID): Promise { - return await this.notification.removeContext(context) + await this.notification.removeContext(context) + } + + // Finds + async findMessages(params: FindMessagesParams): Promise { + return await this.message.find(params) + } + + async findMessagesGroups(params: FindMessagesGroupsParams): Promise { + return await this.message.findMessagesGroups(params) + } + + async findThread(thread: CardID): Promise { + return await this.message.findThread(thread) } async findContexts( @@ -142,13 +170,92 @@ export class CockroachAdapter implements DbAdapter { } close(): void { - this.db.close() + this.sql.close() + } +} + +export async function createDbAdapter( + connectionString: string, + workspace: WorkspaceID, + logger?: Logger, + options?: Options +): Promise { + const greenUrl = process.env.GREEN_URL ?? '' + if (greenUrl !== '') { + const client = new GreenClient(greenUrl) + return new CockroachAdapter(client, workspace, logger, options) + } else { + const connection = connect(connectionString) + const sql = await connection.getClient() + const client = new CockroachClient(connection, sql) + + return new CockroachAdapter(client, workspace, logger, options) } } -export async function createDbAdapter(connectionString: string, workspace: WorkspaceID): Promise { - const db = connect(connectionString) - const sqlClient = await db.getClient() +class GreenClient implements SqlClient { + private readonly url: string + private readonly token: string + constructor(endpoint: string) { + const url = new URL(endpoint) + this.token = url.searchParams.get('token') ?? 'secret' + + const compression = url.searchParams.get('compression') ?? '' + + const newHost = url.host + const newPathname = url.pathname + const newSearchParams = new URLSearchParams() + + if (compression !== '') { + newSearchParams.set('compression', compression) + } + + this.url = `${url.protocol}//${newHost}${newPathname}${newSearchParams.size > 0 ? '?' + newSearchParams.toString() : ''}` + } + + async execute)[]>(query: string, params?: ParameterOrJSON[]): Promise { + return await retry(() => this.fetch(query, params), { retries: 5 }) + } - return new CockroachAdapter(db, sqlClient, workspace) + private async fetch)[]>( + query: string, + params?: ParameterOrJSON[] + ): Promise { + const url = this.url.endsWith('/') ? this.url + 'api/v1/sql' : this.url + '/api/v1/sql' + + const response = await fetch(url, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: 'Bearer ' + this.token, + Connection: 'keep-alive' + }, + body: JSON.stringify({ query, params }, (_, value) => (typeof value === 'bigint' ? value.toString() : value)) + }) + + if (!response.ok) { + throw new Error(`Failed to execute sql: ${response.status} ${response.statusText}`) + } + + return await response.json() + } + + close(): void { + // do nothing + } +} + +class CockroachClient implements SqlClient { + constructor( + private readonly db: PostgresClientReference, + private readonly sql: postgres.Sql + ) {} + + async execute)[]>(query: string, params?: ParameterOrJSON[]): Promise { + return await this.sql.unsafe(query, params) + } + + close(): void { + this.db.close() + } } diff --git a/packages/cockroach/src/connection.ts b/packages/cockroach/src/connection.ts index a9aa16e74b0..b14941e5162 100644 --- a/packages/cockroach/src/connection.ts +++ b/packages/cockroach/src/connection.ts @@ -1,4 +1,4 @@ -//Full copy from @hcengineering/postgres +// Full copy from @hcengineering/postgres import postgres from 'postgres' import { v4 as uuid } from 'uuid' @@ -86,10 +86,9 @@ export function connect(connectionString: string, database?: string): PostgresCl application_name: 'communication' }, database, - max: 10, - transform: { - undefined: null - }, + max: 5, + fetch_types: false, + prepare: true, ...extraOptions }) diff --git a/packages/cockroach/src/db/base.ts b/packages/cockroach/src/db/base.ts index 1c71ecb7348..e81dad2d219 100644 --- a/packages/cockroach/src/db/base.ts +++ b/packages/cockroach/src/db/base.ts @@ -1,79 +1,41 @@ -import type postgres from 'postgres' -import type {WorkspaceID} from "@hcengineering/communication-types"; +import { type ParameterOrJSON, type Row } from 'postgres' +import type { WorkspaceID } from '@hcengineering/communication-types' -export class BaseDb { - constructor( - readonly client: postgres.Sql, - readonly workspace: WorkspaceID - ) { - } - - async insert(table: string, data: Record): Promise { - const keys = Object.keys(data) - const values = Object.values(data) - const sql = ` - INSERT INTO ${table} (${keys.map((k) => `"${k}"`).join(', ')}) - VALUES (${keys.map((_, idx) => `$${idx + 1}`).join(', ')}); - ` - await this.client.unsafe(sql, values) - } - - async insertWithReturn(table: string, data: Record, returnField: string): Promise { - const keys = Object.keys(data) - const values = Object.values(data) - const sql = ` - INSERT INTO ${table} (${keys.map((k) => `"${k}"`).join(', ')}) - VALUES (${keys.map((_, idx) => `$${idx + 1}`).join(', ')}) - RETURNING ${returnField};` - const result = await this.client.unsafe(sql, values) - - return result[0][returnField] - } - - async remove(table: string, where: Record): Promise { - const keys = Object.keys(where) - const values = Object.values(where) +import { type SqlClient, type Logger, type Options } from '../types' - if (keys.length === 0) { - throw new Error("WHERE condition cannot be empty"); - } - - const sql = ` - DELETE - FROM ${table} - WHERE ${keys.map((k, idx) => `"${k}" = $${idx + 1}`).join(' AND ')};` - - await this.client.unsafe(sql, values) +export class BaseDb { + constructor ( + readonly client: SqlClient, + readonly workspace: WorkspaceID, + readonly logger?: Logger, + readonly options?: Options + ) {} + + async execute)[]>( + sql: string, + params?: ParameterOrJSON[], + name?: string + ): Promise { + if (this.options?.withLogs === true && this.logger !== undefined) { + return await this.executeWithLogs(name ?? 'execute sql', this.logger, sql, params) } - async removeWithReturn(table: string, where: Record, returnField: string): Promise { - const keys = Object.keys(where); - const values: any[] = []; - - if (keys.length === 0) { - throw new Error("WHERE condition cannot be empty"); - } - - const whereClause = keys.map((key) => { - const value = where[key]; - if (Array.isArray(value)) { - const placeholders = value.map((_, i) => `$${values.length + i + 1}`).join(", "); - values.push(...value); - return `"${key}" IN (${placeholders})`; - } else { - values.push(value); - return `"${key}" = $${values.length}`; - } - }).join(" AND "); - - const sql = ` - DELETE FROM ${table} - WHERE ${whereClause} - RETURNING ${returnField}; - `; - - const result = await this.client.unsafe(sql, values); - - return result.map((it: any) => it[returnField]); + return await this.client.execute(sql, params) + } + + private async executeWithLogs)[]>( + name: string, + logger: Logger, + sql: string, + params?: ParameterOrJSON[] + ): Promise { + const start = performance.now() + + try { + return await this.client.execute(sql, params) + } finally { + const time = performance.now() - start + logger.info(name, { time }) } + } } diff --git a/packages/cockroach/src/db/message.ts b/packages/cockroach/src/db/message.ts index 79462c4372c..1ddad0af650 100644 --- a/packages/cockroach/src/db/message.ts +++ b/packages/cockroach/src/db/message.ts @@ -1,302 +1,481 @@ import { - type Message, - type MessageID, - type CardID, - type FindMessagesParams, - SortOrder, - type SocialID, - type RichText, - Direction, - type BlobID, - type MessagesGroup, - type FindMessagesGroupsParams + type Message, + type MessageID, + type CardID, + type FindMessagesParams, + type SocialID, + type RichText, + SortingOrder, + PatchType, + type Thread, + type BlobID, + type FindMessagesGroupsParams, + type MessagesGroup } from '@hcengineering/communication-types' -import {generateMessageId} from "@hcengineering/communication-core"; +import { generateMessageId } from '@hcengineering/communication-shared' -import {BaseDb} from './base.ts' +import { BaseDb } from './base' import { - TableName, - type MessageDb, - type AttachmentDb, - type ReactionDb, - type PatchDb, - type MessagesGroupDb, - toMessage, - toMessagesGroup -} from './schema.ts' -import {getCondition} from './utils.ts'; - + TableName, + type MessageDb, + type AttachmentDb, + type ReactionDb, + type PatchDb, + toMessage, + type ThreadDb, + toThread, + type MessagesGroupDb, + toMessagesGroup +} from './schema' +import { getCondition } from './utils' export class MessagesDb extends BaseDb { - //Message - async createMessage(card: CardID, content: RichText, creator: SocialID, created: Date): Promise { - const dbData: MessageDb = { - id: generateMessageId(), - workspace_id: this.workspace, - card_id: card, - content: content, - creator: creator, - created: created, - } - - await this.insert(TableName.Message, dbData) - - return dbData.id as MessageID + // Message + async createMessage (card: CardID, content: RichText, creator: SocialID, created: Date): Promise { + const id = generateMessageId() + const db: MessageDb = { + id, + workspace_id: this.workspace, + card_id: card, + content, + creator, + created } - async removeMessage(card: CardID, message: MessageID): Promise { - const result = await this.removeWithReturn(TableName.Message, {id: message, workspace_id: this.workspace, card_id: card}, "id") - return result[0] as MessageID | undefined + const sql = `INSERT INTO ${TableName.Message} (workspace_id, card_id, id, content, creator, created) + VALUES ($1::uuid, $2::varchar, $3::bigint, $4::text, $5::varchar, $6::timestamptz)` + + await this.execute(sql, [db.workspace_id, db.card_id, db.id, db.content, db.creator, db.created], 'insert message') + + return id + } + + async removeMessage (card: CardID, message: MessageID, socialIds?: SocialID[]): Promise { + if (socialIds === undefined || socialIds.length === 0) { + const sql = `DELETE + FROM ${TableName.Message} + WHERE workspace_id = $1::uuid + AND card_id = $2::varchar + AND id = $2::bigint;` + await this.execute(sql, [this.workspace, card, message], 'remove message') + } else if (socialIds.length === 1) { + const sql = `DELETE + FROM ${TableName.Message} + WHERE workspace_id = $1::uuid + AND card_id = $2::varchar + AND id = $2::bigint + AND creator = $3::varchar;` + await this.execute(sql, [this.workspace, card, message, socialIds[0]], 'remove message') + } else { + const sql = `DELETE + FROM ${TableName.Message} + WHERE workspace_id = $1::uuid + AND card_id = $2::varchar + AND id = $2::bigint + AND creator = ANY ($3::varchar[]);` + + await this.execute(sql, [this.workspace, card, message, socialIds], 'remove message') } - - async removeMessages(card: CardID, ids: MessageID[]): Promise { - const result = await this.removeWithReturn(TableName.Message, { - workspace_id: this.workspace, - card_id: card, - id: ids - }, "id") - return result.map((it: any) => it.id) + } + + async removeMessages (card: CardID, fromId: MessageID, toId: MessageID): Promise { + const sql = `DELETE + FROM ${TableName.Message} + WHERE workspace_id = $1::uuid + AND card_id = $2::varchar + AND id >= $3::bigint + AND id <= $4::bigint;` + + await this.execute(sql, [this.workspace, card, BigInt(fromId), BigInt(toId)], 'remove messages') + } + + async createPatch ( + card: CardID, + message: MessageID, + type: PatchType, + content: string, + creator: SocialID, + created: Date + ): Promise { + const db: PatchDb = { + workspace_id: this.workspace, + card_id: card, + message_id: message, + type, + content, + creator, + created } - async createPatch(card: CardID, message: MessageID, content: RichText, creator: SocialID, created: Date): Promise { - const dbData: PatchDb = { - workspace_id: this.workspace, - card_id: card, - message_id: message, - content: content, - creator: creator, - created: created - } - - await this.insert(TableName.Patch, dbData) + const sql = `INSERT INTO ${TableName.Patch} (workspace_id, card_id, message_id, type, content, creator, created) + VALUES ($1::uuid, $2::varchar, $3::bigint, $4::varchar, $5::text, $6::varchar, $7::timestamptz)` + + await this.execute( + sql, + [db.workspace_id, db.card_id, db.message_id, db.type, db.content, db.creator, db.created], + 'insert patch' + ) + } + + async removePatches (card: CardID, fromId: MessageID, toId: MessageID): Promise { + const sql = `DELETE + FROM ${TableName.Patch} + WHERE workspace_id = $1::uuid + AND card_id = $2::varchar + AND message_id >= $3::bigint + AND message_id <= $4::bigint;` + + await this.execute(sql, [this.workspace, card, BigInt(fromId), BigInt(toId)], 'remove patches') + } + + // Attachment + async createAttachment (message: MessageID, card: CardID, creator: SocialID, created: Date): Promise { + const db: AttachmentDb = { + message_id: message, + card_id: card, + creator, + created } - - //MessagesGroup - async createMessagesGroup(card: CardID, blobId: BlobID, from_id: MessageID, to_id: MessageID, from_date: Date, to_date: Date, count: number): Promise { - const dbData: MessagesGroupDb = { - workspace_id: this.workspace, - card_id: card, - blob_id: blobId, - from_id, - to_id, - from_date, - to_date, - count - } - await this.insert(TableName.MessagesGroup, dbData) + const sql = `INSERT INTO ${TableName.Attachment} (message_id, card_id, creator, created) + VALUES ($1::bigint, $2::varchar, $3::varchar, $4::timestamptz)` + + await this.execute(sql, [db.message_id, db.card_id, db.creator, db.created], 'insert attachment') + } + + async removeAttachment (message: MessageID, card: CardID): Promise { + const sql = `DELETE + FROM ${TableName.Attachment} + WHERE message_id = $1::bigint + AND card_id = $2::varchar` + await this.execute(sql, [message, card], 'remove attachment') + } + + // Reaction + async createReaction ( + card: CardID, + message: MessageID, + reaction: string, + creator: SocialID, + created: Date + ): Promise { + const select = `SELECT m.id + FROM ${TableName.Message} m + WHERE m.id = $1::bigint` + + const messageDb = await this.execute(select, [message], 'select message') + + if (messageDb.length > 0) { + const db: ReactionDb = { + workspace_id: this.workspace, + card_id: card, + message_id: message, + reaction, + creator, + created + } + const sql = `INSERT INTO ${TableName.Reaction} (workspace_id, card_id, message_id, reaction, creator, created) + VALUES ($1::uuid, $2::varchar, $3::bigint, $4::varchar, $5::varchar, $6::timestamptz)` + + await this.execute( + sql, + [db.workspace_id, db.card_id, db.message_id, db.reaction, db.creator, db.created], + 'insert reaction' + ) + } else { + await this.createPatch(card, message, PatchType.addReaction, reaction, creator, created) } - - //Attachment - async createAttachment(message: MessageID, card: CardID, creator: SocialID, created: Date): Promise { - const dbData: AttachmentDb = { - message_id: message, - card_id: card, - creator: creator, - created: created - } - await this.insert(TableName.Attachment, dbData) + } + + async removeReaction ( + card: CardID, + message: MessageID, + reaction: string, + creator: SocialID, + created: Date + ): Promise { + const select = `SELECT m.id + FROM ${TableName.Message} m + WHERE m.id = $1::bigint` + + const messageDb = await this.execute(select, [message], 'select message') + + if (messageDb.length > 0) { + const sql = `DELETE + FROM ${TableName.Reaction} + WHERE workspace_id = $1::uuid + AND card_id = $2::varchar + AND message_id = $3::bigint + AND reaction = $4::varchar + AND creator = $5::varchar` + await this.execute(sql, [this.workspace, card, message, reaction, creator], 'remove reaction') + } else { + await this.createPatch(card, message, PatchType.removeReaction, reaction, creator, created) } - - async removeAttachment(message: MessageID, card: CardID): Promise { - await this.remove(TableName.Attachment, { - message_id: message, - card_id: card - }) + } + + // Thread + async createThread (card: CardID, message: MessageID, thread: CardID, created: Date): Promise { + const db: ThreadDb = { + workspace_id: this.workspace, + card_id: card, + message_id: message, + thread_id: thread, + replies_count: 0, + last_reply: created } - - //Reaction - async createReaction(card: CardID, message: MessageID, reaction: string, creator: SocialID, created: Date): Promise { - const dbData: ReactionDb = { - workspace_id: this.workspace, - card_id: card, - message_id: message, - reaction: reaction, - creator: creator, - created: created - } - await this.insert(TableName.Reaction, dbData) + const sql = `INSERT INTO ${TableName.Thread} (workspace_id, card_id, message_id, thread_id, replies_count, + last_reply) + VALUES ($1::uuid, $2::varchar, $3::bigint, $4::varchar, $5::int, $6::timestamptz)` + await this.execute( + sql, + [db.workspace_id, db.card_id, db.message_id, db.thread_id, db.replies_count, db.last_reply], + 'insert thread' + ) + } + + async updateThread (thread: CardID, lastReply: Date, op: 'increment' | 'decrement'): Promise { + if (op === 'increment') { + const sql = `UPDATE ${TableName.Thread} + SET replies_count = replies_count + 1, + last_reply = $3::timestamptz + WHERE workspace_id = $1::uuid + AND thread_id = $2::varchar` + await this.execute(sql, [this.workspace, thread, lastReply], 'update thread') + } else if (op === 'decrement') { + const sql = `UPDATE ${TableName.Thread} + SET replies_count = GREATEST(replies_count - 1, 0) + WHERE workspace_id = $1::uuid + AND thread_id = $2::varchar` + await this.execute(sql, [this.workspace, thread], 'update thread') } - - async removeReaction( card: CardID, message: MessageID, reaction: string, creator: SocialID): Promise { - await this.remove(TableName.Reaction, { - workspace_id: this.workspace, - card_id: card, - message_id: message, - reaction: reaction, - creator: creator - }) + } + + // MessagesGroup + async createMessagesGroup ( + card: CardID, + blobId: BlobID, + fromDate: Date, + toDate: Date, + fromId: MessageID, + toId: MessageID, + count: number + ): Promise { + const db: MessagesGroupDb = { + workspace_id: this.workspace, + card_id: card, + blob_id: blobId, + from_date: fromDate, + to_date: toDate, + from_id: fromId, + to_id: toId, + count } - //Find messages - async find(params: FindMessagesParams): Promise { - //TODO: experiment with select to improve performance - const select = `SELECT m.id, - m.card_id, - m.content, - m.creator, - m.created, - ${this.subSelectPatches()}, - ${this.subSelectAttachments()}, - ${this.subSelectReactions()} - FROM ${TableName.Message} m` - - const {where, values} = this.buildMessageWhere( params) - const orderBy = params.sort ? `ORDER BY m.created ${params.sort === SortOrder.Asc ? 'ASC' : 'DESC'}` : '' - const limit = params.limit ? ` LIMIT ${params.limit}` : '' - const sql = [select, where, orderBy, limit].join(' ') - - const result = await this.client.unsafe(sql, values) - - return result.map((it: any) => toMessage(it)) + const sql = `INSERT INTO ${TableName.MessagesGroup} (workspace_id, card_id, blob_id, from_date, to_date, from_id, + to_id, count) + VALUES ($1::uuid, $2::varchar, $3::uuid, $4::timestamptz, $5::timestamptz, $6::bigint, $7::bigint, + $8::int)` + await this.execute( + sql, + [db.workspace_id, db.card_id, db.blob_id, db.from_date, db.to_date, db.from_id, db.to_id, db.count], + 'insert messages group' + ) + } + + async removeMessagesGroup (card: CardID, blobId: BlobID): Promise { + const sql = `DELETE + FROM ${TableName.MessagesGroup} + WHERE workspace_id = $1::uuid + AND card_id = $2::varchar + AND blob_id = $3::uuid` + await this.execute(sql, [this.workspace, card, blobId], 'remove messages group') + } + + // Find messages + async find (params: FindMessagesParams): Promise { + // TODO: experiment with select to improve performance + const select = `SELECT m.id, + m.card_id, + m.content, + m.creator, + m.created, + t.thread_id as thread_id, + t.replies_count as replies_count, + t.last_reply as last_reply, + ${this.subSelectPatches()}, + ${this.subSelectReactions()} + FROM ${TableName.Message} m + LEFT JOIN ${TableName.Thread} t + ON t.workspace_id = m.workspace_id AND t.card_id = m.card_id AND + t.message_id = m.id` + + const { where, values } = this.buildMessageWhere(params) + const orderBy = + params.order != null ? `ORDER BY m.created ${params.order === SortingOrder.Ascending ? 'ASC' : 'DESC'}` : '' + const limit = params.limit != null ? ` LIMIT ${params.limit}` : '' + const sql = [select, where, orderBy, limit].join(' ') + + const result = await this.execute(sql, values, 'find messages') + + return result.map((it: any) => toMessage(it)) + } + + buildMessageWhere (params: FindMessagesParams): { where: string, values: any[] } { + const where: string[] = ['m.workspace_id = $1::uuid'] + const values: any[] = [this.workspace] + + let index = 2 + + if (params.id != null) { + where.push(`m.id = $${index++}::bigint`) + values.push(params.id) } - buildMessageWhere(params: FindMessagesParams): { where: string, values: any[] } { - const where: string[] = ['m.workspace_id = $1'] - const values: any[] = [this.workspace] - - let index = 2 - - if (params.id != null) { - where.push(`m.id = $${index++}`) - values.push(params.id) - } - - if (params.card != null) { - where.push(`m.card_id = $${index++}`) - values.push(params.card) - } - - if (params.from != null) { - const exclude = params.excluded ?? false - const direction = params.direction ?? Direction.Forward - const getOperator = () => { - if (exclude) { - return direction === Direction.Forward ? '>' : '<' - } else { - return direction === Direction.Forward ? '>=' : '<=' - } - } - - where.push(`m.created ${getOperator()} $${index++}`) - values.push(params.from) - } + if (params.card != null) { + where.push(`m.card_id = $${index++}::varchar`) + values.push(params.card) + } + const createdCondition = getCondition('m', 'created', index, params.created, 'timestamptz') - return {where: `WHERE ${where.join(' AND ')}`, values} + if (createdCondition != null) { + where.push(createdCondition.where) + values.push(createdCondition.value) + index++ } - subSelectPatches(): string { - return `array( - SELECT jsonb_build_object( - 'content', p.content, - 'creator', p.creator, - 'created', p.created - ) - FROM ${TableName.Patch} p - WHERE p.message_id = m.id AND p.workspace_id = m.workspace_id AND p.card_id = m.card_id - ) AS patches` + return { where: `WHERE ${where.join(' AND ')}`, values } + } + + subSelectPatches (): string { + return `COALESCE( + (SELECT jsonb_agg(jsonb_build_object( + 'content', p.content, + 'creator', p.creator, + 'created', p.created + ) ORDER BY p.created DESC) + FROM ${TableName.Patch} p + WHERE p.message_id = m.id + AND p.workspace_id = m.workspace_id + AND p.card_id = m.card_id + AND p.type = 'update' + ), '[]'::jsonb) AS patches` + } + + subSelectAttachments (): string { + return `COALESCE( + (SELECT jsonb_agg(jsonb_build_object( + 'card_id', a.card_id, + 'message_id', a.message_id, + 'creator', a.creator, + 'created', a.created + )) + FROM ${TableName.Attachment} a + WHERE a.message_id = m.id + ), '[]'::jsonb) AS attachments` + } + + subSelectReactions (): string { + return `COALESCE( + (SELECT jsonb_agg(jsonb_build_object( + 'message_id', r.message_id, + 'reaction', r.reaction, + 'creator', r.creator, + 'created', r.created + )) + FROM ${TableName.Reaction} r + WHERE r.workspace_id = m.workspace_id + AND r.card_id = m.card_id + AND r.message_id = m.id + ), '[]'::jsonb) AS reactions` + } + + // Find thread + async findThread (thread: CardID): Promise { + const sql = `SELECT t.card_id, + t.message_id, + t.thread_id, + t.replies_count, + t.last_reply + FROM ${TableName.Thread} t + WHERE t.workspace_id = $1::uuid + AND t.thread_id = $2::varchar + LIMIT 1;` + + const result = await this.execute(sql, [this.workspace, thread], 'find thread') + return result.map((it: any) => toThread(it))[0] + } + + // Find messages groups + async findMessagesGroups (params: FindMessagesGroupsParams): Promise { + const select = `SELECT mg.card_id, + mg.blob_id, + mg.from_date, + mg.to_date, + mg.from_id, + mg.to_id, + mg.count, + jsonb_agg(jsonb_build_object( + 'message_id', p.message_id, + 'type', p.type, + 'content', p.content, + 'creator', p.creator, + 'created', p.created + ) ORDER BY p.created) AS patches + FROM ${TableName.MessagesGroup} mg + LEFT JOIN ${TableName.Patch} p + ON p.workspace_id = mg.workspace_id + AND p.card_id = mg.card_id + AND p.message_id BETWEEN mg.from_id AND mg.to_id` + + const { where, values } = this.buildMessagesGroupWhere(params) + const orderBy = + params.orderBy === 'toDate' + ? `ORDER BY mg.to_date ${params.order === SortingOrder.Ascending ? 'ASC' : 'DESC'}` + : `ORDER BY mg.from_date ${params.order === SortingOrder.Ascending ? 'ASC' : 'DESC'}` + const groupBy = 'GROUP BY mg.card_id, mg.blob_id, mg.from_date, mg.to_date, mg.from_id, mg.to_id, mg.count' + + const limit = params.limit != null ? ` LIMIT ${params.limit}` : '' + const sql = [select, where, groupBy, orderBy, limit].join(' ') + + const result = await this.execute(sql, values, 'find messages groups') + + return result.map((it: any) => toMessagesGroup(it)) + } + + buildMessagesGroupWhere (params: FindMessagesGroupsParams): { + where: string + values: any[] + } { + const where: string[] = ['mg.workspace_id = $1::uuid'] + const values: any[] = [this.workspace] + + let index = 2 + + if (params.card != null) { + where.push(`mg.card_id = $${index++}::varchar`) + values.push(params.card) } - subSelectAttachments(): string { - return `array( - SELECT jsonb_build_object( - 'card_id', a.card_id, - 'message_id', a.message_id, - 'creator', a.creator, - 'created', a.created - ) - FROM ${TableName.Attachment} a - WHERE a.message_id = m.id - ) AS attachments` + if (params.blobId != null) { + where.push(`mg.blob_id = $${index++}`) + values.push(params.blobId) } - subSelectReactions(): string { - return `array( - SELECT jsonb_build_object( - 'message_id', r.message_id, - 'reaction', r.reaction, - 'creator', r.creator, - 'created', r.created - ) - FROM ${TableName.Reaction} r - WHERE r.message_id = m.id AND r.workspace_id = m.workspace_id AND r.card_id = m.card_id - ) AS reactions` + const fromDateCondition = getCondition('mg', 'from_date', index, params.fromDate, 'timestamptz') + if (fromDateCondition != null) { + where.push(fromDateCondition.where) + values.push(fromDateCondition.value) + index++ } - - //Find messages groups - async findGroups(params: FindMessagesGroupsParams): Promise { - const select = `SELECT mg.card_id, - mg.blob_id, - mg.from_id, - mg.to_id, - mg.count - FROM ${TableName.MessagesGroup} mg` - - const {where, values, index} = this.buildMessagesGroupWhere(this.workspace, params) - const orderBy = params.sortBy ? `ORDER BY ${index} ${params.sort === SortOrder.Asc ? 'ASC' : 'DESC'}` : '' - if(params.sortBy) { - values.push(params.sortBy) - } - const limit = params.limit ? ` LIMIT ${params.limit}` : '' - const sql = [select, where, orderBy, limit].join(' ') - - const result = await this.client.unsafe(sql, values) - - return result.map((it: any) => toMessagesGroup(it)) + const toDateCondition = getCondition('mg', 'to_date', index, params.toDate, 'timestamptz') + if (toDateCondition != null) { + where.push(toDateCondition.where) + values.push(toDateCondition.value) + index++ } - buildMessagesGroupWhere(workspace: string, params: FindMessagesGroupsParams): { - where: string, - values: any[], - index: number - } { - const where: string[] = ['mg.workspace_id = $1'] - const values: any[] = [workspace] - - let index = 2 - - if (params.card != null) { - where.push(`mg.card_id = $${index++}`) - values.push(params.card) - } - - if (params.blobId != null) { - where.push(`mg.blob_id = $${index++}`) - values.push(params.blobId) - } - - const fromIdCondition = getCondition("mg", "from_id", index, params.fromId); - if (fromIdCondition != null) { - where.push(fromIdCondition.where); - values.push(fromIdCondition.value); - index++; - } - - const toIdCondition = getCondition("mg", "to_id", index, params.toId); - - if (toIdCondition != null) { - where.push(toIdCondition.where); - values.push(toIdCondition.value); - index++; - } - - const fromDateCondition = getCondition("mg", "from_date", index, params.fromDate); - if (fromDateCondition != null) { - where.push(fromDateCondition.where); - values.push(fromDateCondition.value); - index++; - } - - const toDateCondition = getCondition("mg", "to_date", index, params.toDate); - if (toDateCondition != null) { - where.push(toDateCondition.where); - values.push(toDateCondition.value); - index++; - } - - - return {where: `WHERE ${where.join(' AND ')}`, values, index} - } + return { where: `WHERE ${where.join(' AND ')}`, values } + } } - diff --git a/packages/cockroach/src/db/notification.ts b/packages/cockroach/src/db/notification.ts index ba43fa769eb..705edc24f76 100644 --- a/packages/cockroach/src/db/notification.ts +++ b/packages/cockroach/src/db/notification.ts @@ -1,95 +1,120 @@ import { - type MessageID, - type ContextID, - type CardID, - type NotificationContext, - type FindNotificationContextParams, SortOrder, - type FindNotificationsParams, type Notification, - type NotificationContextUpdate, - type WorkspaceID + type MessageID, + type ContextID, + type CardID, + type NotificationContext, + type FindNotificationContextParams, + SortingOrder, + type FindNotificationsParams, + type Notification, + type NotificationContextUpdate, + type WorkspaceID } from '@hcengineering/communication-types' -import {BaseDb} from './base.ts' -import {TableName, type ContextDb, type NotificationDb} from './schema.ts' +import { BaseDb } from './base' +import { TableName, type ContextDb, type NotificationDb } from './schema' export class NotificationsDb extends BaseDb { - async createNotification(message: MessageID, context: ContextID): Promise { - const dbData: NotificationDb = { - message_id: message, - context - } - await this.insert(TableName.Notification, dbData) + async createNotification (message: MessageID, context: ContextID): Promise { + const db: NotificationDb = { + message_id: message, + context } - - async removeNotification(message: MessageID, context: ContextID): Promise { - await this.remove(TableName.Notification, { - message_id: message, - context - }) + const sql = `INSERT INTO ${TableName.Notification} (message_id, context_id) + VALUES ($1::bigint, $2::uuid)` + await this.execute(sql, [db.message_id, db.context], 'insert notification') + } + + async removeNotification (message: MessageID, context: ContextID): Promise { + // TODO: do we need to remove notifications? + const sql = `DELETE + FROM ${TableName.NotificationContext} + WHERE id = $1::uuid` + await this.execute(sql, [context], 'remove notification') + } + + async createContext ( + personalWorkspace: WorkspaceID, + card: CardID, + lastView?: Date, + lastUpdate?: Date + ): Promise { + const db: ContextDb = { + workspace_id: this.workspace, + card_id: card, + personal_workspace: personalWorkspace, + last_view: lastView, + last_update: lastUpdate } - - async createContext(personalWorkspace: WorkspaceID,card: CardID, lastView?: Date, lastUpdate?: Date): Promise { - const dbData: ContextDb = { - workspace_id: this.workspace, - card_id: card, - personal_workspace: personalWorkspace, - last_view: lastView, - last_update: lastUpdate - } - return await this.insertWithReturn(TableName.NotificationContext, dbData, 'id') as ContextID + const sql = `INSERT INTO ${TableName.NotificationContext} (workspace_id, card_id, personal_workspace, last_view, last_update) + VALUES ($1::uuid, $2::varchar, $3::uuid, $4::timestamptz, $5::timestamptz) + RETURNING id` + const result = await this.execute( + sql, + [db.workspace_id, db.card_id, db.personal_workspace, db.last_view, db.last_update], + 'insert notification context' + ) + return result[0].id as ContextID + } + + async removeContext (context: ContextID): Promise { + const sql = `DELETE + FROM ${TableName.Notification} + WHERE context = $1::uuid` + await this.execute(sql, [context], 'remove notification context') + } + + async updateContext (context: ContextID, update: NotificationContextUpdate): Promise { + const dbData: Partial = {} + + if (update.archivedFrom != null) { + dbData.archived_from = update.archivedFrom } - - async removeContext(context: ContextID): Promise { - await this.remove(TableName.NotificationContext, { - id: context - }) + if (update.lastView != null) { + dbData.last_view = update.lastView } - - async updateContext(context: ContextID, update: NotificationContextUpdate): Promise { - const dbData: Partial = {} - - if (update.archivedFrom != null) { - dbData.archived_from = update.archivedFrom - } - if (update.lastView != null) { - dbData.last_view = update.lastView - } - if (update.lastUpdate != null) { - dbData.last_update = update.lastUpdate - } - - if (Object.keys(dbData).length === 0) { - return - } - - const keys = Object.keys(dbData) - const values = Object.values(dbData) - - const sql = `UPDATE ${TableName.NotificationContext} - SET ${keys.map((k, idx) => `"${k}" = $${idx + 1}`).join(', ')} - WHERE id = $${keys.length + 1}` - - await this.client.unsafe(sql, [values, context]) + if (update.lastUpdate != null) { + dbData.last_update = update.lastUpdate } - async findContexts(params: FindNotificationContextParams, personalWorkspaces: WorkspaceID[], workspace?: WorkspaceID,): Promise { - const select = ` - SELECT nc.id, nc.card_id, nc.archived_from, nc.last_view, nc.last_update - FROM ${TableName.NotificationContext} nc`; - const {where, values} = this.buildContextWhere(params, personalWorkspaces, workspace) - // const orderSql = `ORDER BY nc.created ${params.sort === SortOrder.Asc ? 'ASC' : 'DESC'}` - const limit = params.limit ? ` LIMIT ${params.limit}` : '' - const sql = [select, where, limit].join(' ') - - const result = await this.client.unsafe(sql, values); - - return result.map(this.toNotificationContext); + if (Object.keys(dbData).length === 0) { + return } - - async findNotifications(params: FindNotificationsParams, personalWorkspace: WorkspaceID, workspace?: WorkspaceID): Promise { - //TODO: experiment with select to improve performance, should join with attachments and reactions? - const select = ` + const keys = Object.keys(dbData) + const values = Object.values(dbData) + + const sql = `UPDATE ${TableName.NotificationContext} + SET ${keys.map((k, idx) => `"${k}" = $${idx + 1}::timestamptz`).join(', ')} + WHERE id = $${keys.length + 1}::uuid;` + await this.execute(sql, [values, context], 'update notification context') + } + + async findContexts ( + params: FindNotificationContextParams, + personalWorkspaces: WorkspaceID[], + workspace?: WorkspaceID + ): Promise { + const select = ` + SELECT nc.id, nc.card_id, nc.archived_from, nc.last_view, nc.last_update + FROM ${TableName.NotificationContext} nc` + const { where, values } = this.buildContextWhere(params, personalWorkspaces, workspace) + // const orderSql = `ORDER BY nc.created ${params.sort === SortOrder.Asc ? 'ASC' : 'DESC'}` + const limit = params.limit != null ? ` LIMIT ${params.limit}` : '' + const sql = [select, where, limit].join(' ') + + const result = await this.execute(sql, values, 'find notification contexts') + + return result.map((it) => this.toNotificationContext(it)) + } + + async findNotifications ( + params: FindNotificationsParams, + personalWorkspace: WorkspaceID, + workspace?: WorkspaceID + ): Promise { + // TODO: experiment with select to improve performance, should join with attachments and reactions? + const select = ` SELECT n.message_id, n.context, m.card_id AS message_card, @@ -113,115 +138,121 @@ export class NotificationsDb extends BaseDb { FROM ${TableName.Notification} n JOIN ${TableName.NotificationContext} nc ON n.context = nc.id JOIN ${TableName.Message} m ON n.message_id = m.id - `; - const {where, values} = this.buildNotificationWhere(params, personalWorkspace, workspace) - const orderBy = params.sort ? `ORDER BY m.created ${params.sort === SortOrder.Asc ? 'ASC' : 'DESC'}` : '' - const limit = params.limit ? ` LIMIT ${params.limit}` : '' - const sql = [select, where, orderBy, limit].join(' ') + ` + const { where, values } = this.buildNotificationWhere(params, personalWorkspace, workspace) + const orderBy = + params.order != null ? `ORDER BY m.created ${params.order === SortingOrder.Ascending ? 'ASC' : 'DESC'}` : '' + const limit = params.limit != null ? ` LIMIT ${params.limit}` : '' + const sql = [select, where, orderBy, limit].join(' ') + + const result = await this.execute(sql, values, 'find notifications') + + return result.map((it) => this.toNotification(it)) + } + + buildContextWhere ( + params: FindNotificationContextParams, + personalWorkspaces: WorkspaceID[], + workspace?: WorkspaceID + ): { + where: string + values: any[] + } { + const where: string[] = [] + const values: any[] = [] + let index = 1 - const result = await this.client.unsafe(sql, values); + if (workspace != null) { + where.push(`nc.workspace_id = $${index++}::uuid`) + values.push(workspace) + } + + if (personalWorkspaces.length > 0) { + where.push(`nc.personal_workspace IN (${personalWorkspaces.map((it) => `$${index++}::uuid`).join(', ')})`) + values.push(...personalWorkspaces) + } - return result.map(this.toNotification); + if (params.card != null) { + where.push(`nc.card_id = $${index++}::varchar`) + values.push(params.card) } - buildContextWhere(params: FindNotificationContextParams, personalWorkspaces: WorkspaceID[], workspace?: WorkspaceID): { - where: string, - values: any[] + return { where: `WHERE ${where.join(' AND ')}`, values } + } + + buildNotificationWhere ( + params: FindNotificationsParams, + personalWorkspace: WorkspaceID, + workspace?: WorkspaceID + ): { + where: string + values: any[] } { - const where: string[] = [] - const values: any[] = [] - let index = 1 + const where: string[] = ['nc.personal_workspace = $1::uuid'] + const values: any[] = [personalWorkspace] + let index = 2 + + if (workspace != null) { + where.push(`nc.workspace_id = $${index++}::uuid`) + values.push(workspace) + } + + if (params.context != null) { + where.push(`n.context = $${index++}::uuid`) + values.push(params.context) + } - if (workspace != null) { - where.push(`nc.workspace_id = $${index++}`) - values.push(workspace) - } + if (params.read === true) { + where.push('nc.last_view IS NOT NULL AND nc.last_view >= m.created') + } - if (personalWorkspaces.length > 0) { - where.push(`nc.personal_workspace IN (${personalWorkspaces.map((it) => `$${index++}`).join(', ')})`) - values.push(...personalWorkspaces) - } + if (params.read === false) { + where.push('(nc.last_view IS NULL OR nc.last_view > m.created)') + } - if (params.card != null) { - where.push(`nc.card_id = $${index++}`) - values.push(params.card) - } + if (params.archived === true) { + where.push('nc.archived_from IS NOT NULL AND nc.archived_from >= m.created') + } - return {where: `WHERE ${where.join(' AND ')}`, values} + if (params.archived === false) { + where.push('(nc.archived_from IS NULL OR nc.archived_from > m.created)') } - buildNotificationWhere(params: FindNotificationsParams, personalWorkspace: WorkspaceID, workspace?: WorkspaceID): { - where: string, - values: any[] - } { - const where: string[] = ['nc.personal_workspace = $1'] - const values: any[] = [personalWorkspace] - let index = 2 - - if (workspace != null) { - where.push(`nc.workspace_id = $${index++}`) - values.push(workspace) - } - - if (params.context != null) { - where.push(`n.context = $${index++}`) - values.push(params.context) - } - - - if (params.read === true) { - where.push(`nc.last_view IS NOT NULL AND nc.last_view >= m.created`) - } - - if (params.read === false) { - where.push(`(nc.last_view IS NULL OR nc.last_view > m.created)`) - } - - if (params.archived === true) { - where.push(`nc.archived_from IS NOT NULL AND nc.archived_from >= m.created`) - } - - if (params.archived === false) { - where.push(`(nc.archived_from IS NULL OR nc.archived_from > m.created)`) - } - - - return {where: `WHERE ${where.join(' AND ')}`, values} - } - - toNotificationContext(row: any): NotificationContext { - return { - id: row.id, - card: row.card_id, - workspace: row.workspace_id, - personalWorkspace: row.personal_workspace, - archivedFrom: row.archived_from ? new Date(row.archived_from) : undefined, - lastView: row.last_view ? new Date(row.last_view) : undefined, - lastUpdate: row.last_update ? new Date(row.last_update) : undefined - } - } - - toNotification(row: any): Notification { - const lastPatch = row.patches?.[0] - const lastView = row.last_view ? new Date(row.last_view) : undefined - const archivedFrom = row.archived_from ? new Date(row.archived_from) : undefined - const created = new Date(row.message_created) - - return { - message: { - id: row.id, - card: row.message_card, - content: lastPatch?.content ?? row.message_content, - creator: row.message_creator, - created, - edited: new Date(lastPatch?.created ?? row.message_created), - reactions: row.reactions ?? [], - attachments: row.attachments ?? [] - }, - context: row.context, - read: lastView != null && lastView >= created, - archived: archivedFrom != null && archivedFrom >= created - } + return { where: `WHERE ${where.join(' AND ')}`, values } + } + + toNotificationContext (row: any): NotificationContext { + return { + id: row.id, + card: row.card_id, + workspace: row.workspace_id, + personalWorkspace: row.personal_workspace, + archivedFrom: row.archived_from != null ? new Date(row.archived_from) : undefined, + lastView: row.last_view != null ? new Date(row.last_view) : undefined, + lastUpdate: row.last_update != null ? new Date(row.last_update) : undefined + } + } + + toNotification (row: any): Notification { + const lastPatch = row.patches?.[0] + const lastView = row.last_view != null ? new Date(row.last_view) : undefined + const archivedFrom = row.archived_from != null ? new Date(row.archived_from) : undefined + const created = new Date(row.message_created) + + return { + message: { + id: row.id, + card: row.message_card, + content: lastPatch?.content ?? row.message_content, + creator: row.message_creator, + created, + edited: new Date(lastPatch?.created ?? row.message_created), + reactions: row.reactions ?? [], + attachments: row.attachments ?? [] + }, + context: row.context, + read: lastView != null && lastView >= created, + archived: archivedFrom != null && archivedFrom >= created } + } } - diff --git a/packages/cockroach/src/db/schema.ts b/packages/cockroach/src/db/schema.ts index 755cadc9e83..1dfc6578c20 100644 --- a/packages/cockroach/src/db/schema.ts +++ b/packages/cockroach/src/db/schema.ts @@ -1,122 +1,183 @@ -import type {ContextID, MessageID, RichText, SocialID, CardID, BlobID, Message, Reaction, Attachment, MessagesGroup, WorkspaceID } from "@hcengineering/communication-types" +import { + type ContextID, + type MessageID, + type RichText, + type SocialID, + type CardID, + type BlobID, + type Message, + type Reaction, + type Attachment, + type MessagesGroup, + type WorkspaceID, + type PatchType, + type Patch, + type Thread +} from '@hcengineering/communication-types' export enum TableName { - Message = 'communication.messages', - MessagesGroup = 'communication.messages_groups', - Patch = 'communication.patch', - Attachment = 'communication.attachments', - Reaction = 'communication.reactions', - Notification = 'communication.notifications', - NotificationContext = 'communication.notification_context' + Attachment = 'communication.attachments', + Message = 'communication.messages', + MessagesGroup = 'communication.messages_groups', + Notification = 'communication.notifications', + NotificationContext = 'communication.notification_context', + Patch = 'communication.patch', + Reaction = 'communication.reactions', + Thread = 'communication.thread' } export interface MessageDb { - id: MessageID, - workspace_id: WorkspaceID, - card_id: CardID, - content: RichText, - creator: SocialID, - created: Date, + id: MessageID + workspace_id: WorkspaceID + card_id: CardID + content: RichText + creator: SocialID + created: Date + thread_id?: CardID + replies_count?: number + last_reply?: Date } export interface MessagesGroupDb { - workspace_id: WorkspaceID, - card_id: CardID, - blob_id: BlobID, - from_id: MessageID, - to_id: MessageID, - from_date: Date, - to_date: Date, - count: number + workspace_id: WorkspaceID + card_id: CardID + blob_id: BlobID + from_date: Date + to_date: Date + from_id: MessageID + to_id: MessageID + count: number + patches?: PatchDb[] } export interface PatchDb { - workspace_id: WorkspaceID, - card_id: CardID, - message_id: MessageID, - content: RichText, - creator: SocialID, - created: Date, + workspace_id: WorkspaceID + card_id: CardID + message_id: MessageID + type: PatchType + content: RichText + creator: SocialID + created: Date } export interface ReactionDb { - workspace_id: WorkspaceID, - card_id: CardID, - message_id: MessageID, - reaction: string, - creator: SocialID - created: Date + workspace_id: WorkspaceID + card_id: CardID + message_id: MessageID + reaction: string + creator: SocialID + created: Date } export interface AttachmentDb { - message_id: MessageID, - card_id: CardID, - creator: SocialID - created: Date + message_id: MessageID + card_id: CardID + creator: SocialID + created: Date +} + +export interface ThreadDb { + workspace_id: WorkspaceID + card_id: CardID + message_id: MessageID + thread_id: CardID + replies_count: number + last_reply: Date } export interface NotificationDb { - message_id: MessageID, - context: ContextID + message_id: MessageID + context: ContextID } export interface ContextDb { - workspace_id: WorkspaceID - card_id: CardID - personal_workspace: WorkspaceID + workspace_id: WorkspaceID + card_id: CardID + personal_workspace: WorkspaceID - archived_from?: Date - last_view?: Date - last_update?: Date + archived_from?: Date + last_view?: Date + last_update?: Date } - interface RawMessage extends MessageDb { - patches?: PatchDb[] - attachments?: AttachmentDb[] - reactions?: ReactionDb[] -} - -export function toMessage(raw: RawMessage): Message { - const lastPatch = raw.patches?.[0] - return { - id: raw.id, - card: raw.card_id, - content: lastPatch?.content ?? raw.content, - creator: raw.creator, - created: new Date(raw.created), - edited: lastPatch?.created ? new Date(lastPatch.created) : undefined, - reactions: (raw.reactions ?? []).map(toReaction), - attachments: (raw.attachments ?? []).map(toAttachment) - } -} - -export function toReaction(raw: ReactionDb): Reaction { - return { - message: raw.message_id, - reaction: raw.reaction, - creator: raw.creator, - created: new Date(raw.created) - } -} - -export function toAttachment(raw: AttachmentDb): Attachment { - return { - message: raw.message_id, - card: raw.card_id, - creator: raw.creator, - created: new Date(raw.created) - } -} - -export function toMessagesGroup(raw: MessagesGroupDb): MessagesGroup { - return { - card: raw.card_id, - blobId: raw.blob_id, - fromId: raw.from_id, - toId: raw.to_id, - fromDate: new Date(raw.from_date), - toDate: new Date(raw.to_date), - count: raw.count - } -} \ No newline at end of file + patches?: PatchDb[] + attachments?: AttachmentDb[] + reactions?: ReactionDb[] +} + +export function toMessage (raw: RawMessage): Message { + const lastPatch = raw.patches?.[0] + + return { + id: String(raw.id) as MessageID, + card: raw.card_id, + content: lastPatch?.content ?? raw.content, + creator: raw.creator, + created: raw.created, + edited: lastPatch?.created ?? undefined, + thread: + raw.thread_id != null + ? { + card: raw.card_id, + message: String(raw.id) as MessageID, + thread: raw.thread_id, + repliesCount: raw.replies_count ?? 0, + lastReply: raw.last_reply ?? new Date() + } + : undefined, + reactions: (raw.reactions ?? []).map(toReaction), + attachments: (raw.attachments ?? []).map(toAttachment) + } +} + +export function toReaction (raw: ReactionDb): Reaction { + return { + message: String(raw.message_id) as MessageID, + reaction: raw.reaction, + creator: raw.creator, + created: raw.created + } +} + +export function toAttachment (raw: AttachmentDb): Attachment { + return { + message: String(raw.message_id) as MessageID, + card: raw.card_id, + creator: raw.creator, + created: raw.created + } +} + +export function toMessagesGroup (raw: MessagesGroupDb): MessagesGroup { + return { + card: raw.card_id, + blobId: raw.blob_id, + fromDate: raw.from_date, + toDate: raw.to_date, + fromId: String(raw.from_id) as MessageID, + toId: String(raw.to_id) as MessageID, + count: raw.count, + patches: raw.patches == null ? [] : raw.patches.filter((it: any) => it.message_id != null).map(toPatch) + } +} + +export function toPatch (raw: PatchDb): Patch { + return { + type: raw.type, + message: String(raw.message_id) as MessageID, + content: raw.content, + creator: raw.creator, + created: new Date(raw.created) + } +} + +export function toThread (raw: ThreadDb): Thread { + return { + card: raw.card_id, + message: String(raw.message_id) as MessageID, + thread: raw.thread_id, + repliesCount: raw.replies_count, + lastReply: raw.last_reply + } +} diff --git a/packages/cockroach/src/db/utils.ts b/packages/cockroach/src/db/utils.ts index 4b56ad3f865..9f4ac0a1b1d 100644 --- a/packages/cockroach/src/db/utils.ts +++ b/packages/cockroach/src/db/utils.ts @@ -1,22 +1,28 @@ -export function getCondition(table: string, dbField: string, index: number, param: any): { where: string, value: any } | undefined { - if (typeof param === 'object') { - if (param.less != null) { - return {where: `${table}.${dbField} < $${index}`, value: param.less}; - } - if (param.lessOrEqual != null) { - return {where: `${table}.${dbField} <= $${index}`, value: param.lessOrEqual}; - } - if (param.greater != null) { - return {where: `${table}.${dbField} > $${index}`, value: param.greater}; - } - if (param.greaterOrEqual != null) { - return {where: `${table}.${dbField} >= $${index}`, value: param.greaterOrEqual}; - } +export function getCondition ( + table: string, + dbField: string, + index: number, + param: any, + type: string +): { where: string, value: any } | undefined { + if (typeof param === 'object') { + if (param.less != null) { + return { where: `${table}.${dbField} < $${index}::${type}`, value: param.less } } - - if(param != null) { - return {where: `${table}.${dbField} = $${index}`, value: param}; + if (param.lessOrEqual != null) { + return { where: `${table}.${dbField} <= $${index}::${type}`, value: param.lessOrEqual } + } + if (param.greater != null) { + return { where: `${table}.${dbField} > $${index}::${type}`, value: param.greater } } + if (param.greaterOrEqual != null) { + return { where: `${table}.${dbField} >= $${index}::${type}`, value: param.greaterOrEqual } + } + } + + if (param != null) { + return { where: `${table}.${dbField} = $${index}::${type}`, value: param } + } - return undefined -} \ No newline at end of file + return undefined +} diff --git a/packages/cockroach/src/index.ts b/packages/cockroach/src/index.ts index 03eeab5ffaf..32544c6dfa9 100644 --- a/packages/cockroach/src/index.ts +++ b/packages/cockroach/src/index.ts @@ -1 +1 @@ -export * from './adapter.ts' +export * from './adapter' diff --git a/packages/cockroach/src/types.ts b/packages/cockroach/src/types.ts new file mode 100644 index 00000000000..d15e572af3e --- /dev/null +++ b/packages/cockroach/src/types.ts @@ -0,0 +1,16 @@ +import { type ParameterOrJSON, type Row } from 'postgres' + +export interface Logger { + info: (message: string, data?: Record) => void + warn: (message: string, data?: Record) => void + error: (message: string, data?: Record) => void +} + +export interface Options { + withLogs?: boolean +} + +export interface SqlClient { + execute: )[]>(query: string, params?: ParameterOrJSON[]) => Promise + close: () => void +} diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts deleted file mode 100644 index 0560e4bd55b..00000000000 --- a/packages/core/src/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from './message.ts' diff --git a/packages/core/src/message.ts b/packages/core/src/message.ts deleted file mode 100644 index 48a43227a53..00000000000 --- a/packages/core/src/message.ts +++ /dev/null @@ -1,24 +0,0 @@ -import type { MessageID } from '@hcengineering/communication-types' - -let lastTimestamp = Math.floor(Date.now() / 1000) -let counter = 0 - -export function generateMessageId(): MessageID { - const timestamp = Math.floor(Date.now() / 1000) - - if (timestamp !== lastTimestamp) { - lastTimestamp = timestamp - counter = 0 - } - - counter++ - - return ((timestamp << 24) | counter) as MessageID -} - -export function parseMessageId(id: MessageID): { timestamp: number; counter: number } { - const timestamp = id >> 24 - const counter = id & 0xffffff - - return { timestamp, counter } -} diff --git a/packages/examples/package.json b/packages/examples/package.json index 9dfa63f4697..e79ec57aa20 100644 --- a/packages/examples/package.json +++ b/packages/examples/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-examples", - "version": "0.1.9", + "version": "0.1.47", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", @@ -13,9 +13,7 @@ "@types/bun": "^1.1.14" }, "dependencies": { - "@hcengineering/communication-client-query": "workspace:*", "@hcengineering/communication-client-ws": "workspace:*", - "@hcengineering/communication-client-sqlite": "workspace:*", "@hcengineering/communication-types": "workspace:*" }, "peerDependencies": { diff --git a/packages/examples/src/index.ts b/packages/examples/src/index.ts index 414362ed149..d72db680f71 100644 --- a/packages/examples/src/index.ts +++ b/packages/examples/src/index.ts @@ -1,39 +1,33 @@ import { type Message, type SocialID, - SortOrder, + SortingOrder, type CardID, type Window, type WorkspaceID } from '@hcengineering/communication-types' -import { getWebsocketClient } from '@hcengineering/communication-client-ws' -import { getSqliteClient } from '@hcengineering/communication-client-sqlite' -import { createMessagesQuery, initLiveQueries } from '@hcengineering/communication-client-query' +import { getWebsocketClient, createMessagesQuery } from '@hcengineering/communication-client-ws' const card = 'cd0aba36-1c4f-4170-95f2-27a12a5415f6' as CardID const workspace = 'cd0aba36-1c4f-4170-95f2-27a12a5415f6' as WorkspaceID -const personalWorkspace = 'cd0aba36-1c4f-4170-95f2-27a12a5415f5' as WorkspaceID +// const personalWorkspace = 'cd0aba36-1c4f-4170-95f2-27a12a5415f5' as WorkspaceID const creator1 = 'email:vasya@huly.com' as SocialID -async function getClient(type: 'ws' | 'sqlite') { - if (type === 'ws') { - const platformUrl = 'ws://localhost:8090' - const token = 'token' - return await getWebsocketClient(platformUrl, token) - } - - return await getSqliteClient(workspace, personalWorkspace) -} - export async function example() { - const client = await getClient('sqlite') - initLiveQueries(client) + const platformUrl = 'ws://localhost:8090' + const token = 'token' + const client = await getWebsocketClient( + platformUrl, + token, + workspace, + 'http://localhost:4022/blob/:workspace/:blobId/:filename' + ) const query1 = createMessagesQuery() let window: Window | undefined = undefined - query1.query({ card, sort: SortOrder.Desc }, (res) => { + query1.query({ card, order: SortingOrder.Descending }, (res) => { window = res const r = window.getResult() r.reverse() @@ -51,7 +45,7 @@ export async function example() { }) async function editMessage(message: Message) { - await client.createPatch(card, message.id, message.content + '_1_', creator1) + await client.updateMessage(card, message.id, message.content + '_1_', creator1) } async function deleteMessage(message: Message) { diff --git a/packages/query/package.json b/packages/query/package.json index 1a3669fcb8f..87dbd672de3 100644 --- a/packages/query/package.json +++ b/packages/query/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-query", - "version": "0.1.9", + "version": "0.1.47", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", @@ -11,11 +11,12 @@ ], "devDependencies": { "@types/bun": "^1.1.14", - "@types/crypto-js": "^4.2.2" + "@types/crypto-js": "^4.2.2" }, "dependencies": { - "@hcengineering/communication-types": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", + "@hcengineering/communication-shared": "workspace:*", + "@hcengineering/communication-types": "workspace:*", "fast-equals": "^5.0.1" }, "peerDependencies": { diff --git a/packages/query/src/lq.ts b/packages/query/src/lq.ts index 9dbae0845b7..44563787ec1 100644 --- a/packages/query/src/lq.ts +++ b/packages/query/src/lq.ts @@ -1,4 +1,8 @@ -import { type FindMessagesParams, type FindNotificationsParams } from '@hcengineering/communication-types' +import { + type FindMessagesParams, + type FindNotificationsParams, + type WorkspaceID +} from '@hcengineering/communication-types' import { deepEqual } from 'fast-equals' import type { MessagesQueryCallback, @@ -7,7 +11,7 @@ import type { QueryClient } from '@hcengineering/communication-sdk-types' -import type { Query, QueryId } from './types' +import type { FindParams, PagedQuery, QueryId } from './types' import { MessagesQuery } from './messages/query' import { NotificationQuery } from './notifications/query' @@ -18,14 +22,15 @@ interface CreateQueryResult { const maxQueriesCache = 10 export class LiveQueries { - private readonly client: QueryClient - private readonly queries = new Map() + private readonly queries = new Map() private readonly unsubscribed = new Set() private counter: number = 0 - constructor(client: QueryClient) { - this.client = client - } + constructor( + private readonly client: QueryClient, + private readonly workspace: WorkspaceID, + private readonly filesUrl: string + ) {} async onEvent(event: ResponseEvent): Promise { for (const q of this.queries.values()) { @@ -66,11 +71,11 @@ export class LiveQueries { return exists } else { const result = exists.copyResult() - return new MessagesQuery(this.client, id, params, callback, result) + return new MessagesQuery(this.client, this.workspace, this.filesUrl, id, params, callback, result) } } - return new MessagesQuery(this.client, id, params, callback) + return new MessagesQuery(this.client, this.workspace, this.filesUrl, id, params, callback) } private createNotificationQuery( @@ -103,7 +108,7 @@ export class LiveQueries { } } - private findNotificationQuery(params: FindMessagesParams): NotificationQuery | undefined { + private findNotificationQuery(params: FindNotificationsParams): NotificationQuery | undefined { for (const query of this.queries.values()) { if (query instanceof NotificationQuery) { if (!this.queryCompare(params, query.params)) continue @@ -112,7 +117,7 @@ export class LiveQueries { } } - private queryCompare(q1: FindMessagesParams, q2: FindMessagesParams): boolean { + private queryCompare(q1: FindParams, q2: FindParams): boolean { if (Object.keys(q1).length !== Object.keys(q2).length) { return false } @@ -136,7 +141,7 @@ export class LiveQueries { this.unsubscribed.delete(id) } - private unsubscribeQuery(query: Query): void { + private unsubscribeQuery(query: PagedQuery): void { this.unsubscribed.add(query.id) query.removeCallback() if (this.unsubscribed.size > maxQueriesCache) { diff --git a/packages/query/src/messages/query.ts b/packages/query/src/messages/query.ts index 08279eeaaf3..073527450b7 100644 --- a/packages/query/src/messages/query.ts +++ b/packages/query/src/messages/query.ts @@ -1,201 +1,693 @@ import { - type FindMessagesParams, - type MessageID, - type Message, - type Patch, - SortOrder + type FindMessagesParams, + type Message, + type MessagesGroup, + type WorkspaceID, + type MessageID, + SortingOrder, + type Patch, + type ParsedFile, + PatchType, + type CardID } from '@hcengineering/communication-types' import { - type AttachmentCreatedEvent, - type MessageCreatedEvent, - type PatchCreatedEvent, - type ReactionCreatedEvent, - type ResponseEvent, - ResponseEventType, - type AttachmentRemovedEvent, - type MessageRemovedEvent, - type ReactionRemovedEvent + ResponseEventType, + type AttachmentCreatedEvent, + type AttachmentRemovedEvent, + type MessageCreatedEvent, + type MessageRemovedEvent, + type PatchCreatedEvent, + type QueryCallback, + type QueryClient, + type ReactionCreatedEvent, + type ReactionRemovedEvent, + type ResponseEvent, + type ThreadCreatedEvent } from '@hcengineering/communication-sdk-types' +import { loadGroupFile, parseMessageId } from '@hcengineering/communication-shared' + +import { QueryResult } from '../result' +import { defaultQueryParams, Direction, type QueryId, type PagedQuery } from '../types' +import { WindowImpl } from '../window' +import { addReaction, addReply, removeReaction, removeReply } from './utils' + +const GROUPS_LIMIT = 20 + +export class MessagesQuery implements PagedQuery { + protected result: Promise> | QueryResult + + private messagesFromFiles: Message[] = [] + + private readonly groupsBuffer: MessagesGroup[] = [] + + private firstGroup?: MessagesGroup + private lastGroup?: MessagesGroup + + private readonly limit: number + + private readonly next = { + hasMessages: true, + hasGroups: true + } + + private readonly prev = { + hasMessages: true, + hasGroups: true + } + + constructor ( + protected readonly client: QueryClient, + private readonly workspace: WorkspaceID, + private readonly filesUrl: string, + public readonly id: QueryId, + public readonly params: FindMessagesParams, + private callback?: QueryCallback, + initialResult?: QueryResult + ) { + const baseLimit = params.id != null ? 1 : this.params.limit ?? defaultQueryParams.limit + this.limit = baseLimit + 1 + this.params = { + ...params, + order: params.order ?? defaultQueryParams.order + } + if (initialResult !== undefined) { + const messages = initialResult.getResult() + const count = messages.length + + if (count < this.limit) { + this.result = initialResult + } else { + if (this.params.order === SortingOrder.Ascending) { + this.result = new QueryResult(messages.slice(0, baseLimit), (x) => x.id) + this.result.setHead(true) + this.result.setTail(false) + } else { + this.result = new QueryResult(messages.slice(0, baseLimit), (x) => x.id) + this.result.setHead(false) + this.result.setTail(true) + } + } + void this.notify() + } else { + this.result = new QueryResult([] as Message[], (x) => x.id) + + if (this.isInitLoadingForward()) { + this.result.setHead(true) + void this.requestLoadNextPage() + } else { + this.result.setTail(true) + void this.requestLoadPrevPage() + } + } + } + + setCallback (callback: QueryCallback): void { + this.callback = callback + void this.notify() + } + + removeCallback (): void { + this.callback = () => {} + } + + async requestLoadNextPage (): Promise { + if (this.result instanceof Promise) this.result = await this.result + + if (!this.result.isTail()) { + this.result = this.loadPage(Direction.Forward, this.result) + void this.result + .then(() => this.notify()) + .catch((error) => { + console.error('Failed to load messages', error) + }) + } + } + + async requestLoadPrevPage (): Promise { + if (this.result instanceof Promise) this.result = await this.result + if (!this.result.isHead()) { + this.result = this.loadPage(Direction.Backward, this.result) + void this.result + .then(() => this.notify()) + .catch((error) => { + console.error('Failed to load messages', error) + }) + } + } -import { BaseQuery } from '../query' + private isInitLoadingForward (): boolean { + const { order, created, id } = this.params -export class MessagesQuery extends BaseQuery { - override async find(params: FindMessagesParams): Promise { - return this.client.findMessages(params, this.id) + if (id != null) { + return false } - override getObjectId(object: Message): MessageID { - return object.id + if (created == null) return order === SortingOrder.Ascending + if (created instanceof Date) return order === SortingOrder.Ascending + // TODO: fix me + if (created.less != null) return order !== SortingOrder.Ascending + if (created.lessOrEqual != null) return order !== SortingOrder.Ascending + if (created.greater != null) return order === SortingOrder.Ascending + if (created.greaterOrEqual != null) return order === SortingOrder.Ascending + + return false + } + + private async loadPage (direction: Direction, result: QueryResult): Promise> { + const { messages, fromDb } = + direction === Direction.Forward ? await this.loadNextMessages(result) : await this.loadPrevMessages(result) + + if (!result.isHead() && direction === Direction.Backward) { + result.setHead(messages.length < this.limit) + } + if (!result.isTail() && direction === Direction.Forward) { + result.setTail(messages.length < this.limit) } - override getObjectDate(object: Message): Date { - return object.created + if (messages.length === this.limit && this.limit > 1) { + const lastMessage = messages.pop() + if (lastMessage != null && !fromDb) { + direction === Direction.Forward + ? this.messagesFromFiles.unshift(lastMessage) + : this.messagesFromFiles.push(lastMessage) + } } - override async onEvent(event: ResponseEvent): Promise { - switch (event.type) { - case ResponseEventType.MessageCreated: - return await this.onCreateMessageEvent(event) - case ResponseEventType.MessageRemoved: - return await this.onRemoveMessageEvent(event) - case ResponseEventType.PatchCreated: - return await this.onCreatePatchEvent(event) - case ResponseEventType.ReactionCreated: - return await this.onCreateReactionEvent(event) - case ResponseEventType.ReactionRemoved: - return await this.onRemoveReactionEvent(event) - case ResponseEventType.AttachmentCreated: - return await this.onCreateAttachmentEvent(event) - case ResponseEventType.AttachmentRemoved: - return await this.onRemoveAttachmentEvent(event) - } + result.append(messages) + + return result + } + + // Load next + private async loadNextMessages (result: QueryResult): Promise<{ messages: Message[], fromDb: boolean }> { + const messages: Message[] = this.messagesFromFiles.splice(0, this.limit) + + if (messages.length >= this.limit) return { messages, fromDb: false } + + while (this.next.hasGroups || this.groupsBuffer.length > 0) { + await this.loadGroups(Direction.Forward, result) + + messages.push(...this.messagesFromFiles.splice(0, this.limit - messages.length)) + + if (messages.length >= this.limit) return { messages, fromDb: false } } - async onCreateMessageEvent(event: MessageCreatedEvent): Promise { - if (this.result instanceof Promise) { - this.result = await this.result - } + const dbMessages = await this.findNextMessages(this.limit - messages.length, result) + this.next.hasMessages = dbMessages.length > 0 + messages.push(...dbMessages) + return { messages, fromDb: dbMessages.length > 0 } + } - const message = { - ...event.message, - edited: event.message.edited, - created: event.message.created - } - const exists = this.result.get(message.id) + private async findNextMessages (limit: number, result: QueryResult): Promise { + if (this.next.hasGroups) { + return [] + } + + if (result.isTail()) return [] - if (exists !== undefined) return - if (!this.match(message)) return + const last = result.getLast() - if (this.result.isTail()) { - if (this.params.sort === SortOrder.Asc) { - this.result.push(message) - } else { - this.result.unshift(message) + return await this.find({ + ...this.params, + created: + last != null + ? { + greater: last?.created } - await this.notify() - } + : undefined, + limit, + order: SortingOrder.Ascending + }) + } + + // Load prev + private async loadPrevMessages (result: QueryResult): Promise<{ messages: Message[], fromDb: boolean }> { + const messages: Message[] = [] + + if (this.prev.hasMessages) { + const prevMessages = await this.findPrevMessages(this.limit, result) + this.prev.hasMessages = prevMessages.length > 0 + messages.push(...prevMessages) } - private match(message: Message): boolean { - if (this.params.id != null && this.params.id !== message.id) { - return false + if (messages.length >= this.limit) return { messages, fromDb: true } + + const restLimit = this.limit - messages.length + const fromBuffer = this.messagesFromFiles.splice(-restLimit, restLimit).reverse() + messages.push(...fromBuffer) + + if (messages.length >= this.limit) return { messages, fromDb: false } + + while (this.prev.hasGroups || this.groupsBuffer.length > 0) { + await this.loadGroups(Direction.Backward, result) + + const rest = this.limit - messages.length + const fromBuffer2 = this.messagesFromFiles.splice(-rest, rest).reverse() + + messages.push(...fromBuffer2) + if (messages.length >= this.limit) return { messages, fromDb: false } + } + + return { messages, fromDb: false } + } + + private async findPrevMessages (limit: number, result: QueryResult): Promise { + if (!this.prev.hasMessages || result.isHead()) return [] + + const first = result.getLast() + + return await this.find({ + ...this.params, + created: + first != null + ? { + less: first?.created + } + : undefined, + limit, + order: SortingOrder.Descending + }) + } + + private async loadGroups (direction: Direction, result: QueryResult): Promise { + let messagesCount = 0 + const lastResult = result.getLast() + const toLoad: MessagesGroup[] = [] + const toBuffer: MessagesGroup[] = [] + + while (messagesCount < this.limit) { + const currentGroups = this.groupsBuffer.splice(direction === Direction.Forward ? 0 : -GROUPS_LIMIT, GROUPS_LIMIT) + const hasGroups = direction === Direction.Forward ? this.next.hasGroups : this.prev.hasGroups + if (currentGroups.length === 0 && !hasGroups) break + + const groups = + currentGroups.length > 0 + ? currentGroups + : await this.findGroups( + direction, + direction === Direction.Forward ? this.lastGroup?.fromDate : this.firstGroup?.fromDate + ) + + if (currentGroups.length === 0) { + this.firstGroup = direction === Direction.Forward ? this.firstGroup ?? groups[0] : groups[groups.length - 1] + this.lastGroup = + direction === Direction.Forward ? groups[groups.length - 1] ?? this.lastGroup : this.lastGroup ?? groups[0] + + if (direction === Direction.Forward) { + this.next.hasGroups = groups.length >= GROUPS_LIMIT + } else { + this.prev.hasGroups = groups.length >= GROUPS_LIMIT } - if (this.params.card != null && this.params.card !== message.card) { - return false + if (this.params.id != null) { + this.next.hasGroups = false + this.prev.hasGroups = false } - return true + } + + const orderedGroups = direction === Direction.Forward ? groups : groups.reverse() + while (messagesCount < this.limit && orderedGroups.length > 0) { + const group = direction === Direction.Forward ? orderedGroups.shift() : orderedGroups.pop() + if (group == null) break + toLoad.push(group) + messagesCount += group.count + } + + while (orderedGroups.length > 0) { + const group = direction === Direction.Forward ? orderedGroups.shift() : orderedGroups.pop() + if (group == null) break + toBuffer.push(group) + messagesCount += group.count + } } - private async onCreatePatchEvent(event: PatchCreatedEvent): Promise { - if (this.result instanceof Promise) { - this.result = await this.result - } + if (direction === Direction.Forward) { + this.groupsBuffer.push(...toBuffer) + } else { + this.groupsBuffer.unshift(...toBuffer) + } - const patch = { - ...event.patch, - created: event.patch.created - } + const parsedFiles = await Promise.all(toLoad.map((group) => this.loadMessagesFromFiles(group))) + + for (const file of parsedFiles) { + if (file.messages.length === 0) continue + if (direction === Direction.Forward) { + const firstInFile = file.messages[0] + const queryDate = + lastResult != null && firstInFile.created < lastResult?.created ? lastResult?.created : undefined + this.messagesFromFiles.push(...this.matchFileMessages(file, queryDate)) + } else { + const lastInFile = file.messages[file.messages.length - 1] + const queryDate = + lastResult != null && lastInFile.created > lastResult?.created ? lastResult?.created : undefined + this.messagesFromFiles.unshift(...this.matchFileMessages(file, queryDate)) + } + } + } + + private matchFileMessages (file: ParsedFile, created?: Date): Message[] { + let result: Message[] = file.messages + if (this.params.id != null) { + const msg = file.messages.find((it) => it.id === this.params.id) + result = msg != null ? [msg] : [] + } - const message = this.result.get(patch.message) + if (created != null) { + result = + this.params.order === SortingOrder.Ascending + ? result.filter((it) => it.created > created) + : result.filter((it) => it.created < created) + } - if (message === undefined) return + return result + } - if (message.created < patch.created) { - this.result.update(this.applyPatch(message, patch)) - await this.notify() - } + private async loadMessagesFromFiles (group: MessagesGroup): Promise { + const parsedFile = await loadGroupFile(this.workspace, this.filesUrl, group, { retries: 5 }) + + const patches = group.patches ?? [] + + const patchesMap = new Map() + for (const patch of patches) { + patchesMap.set(patch.message, [...(patchesMap.get(patch.message) ?? []), patch]) } - private async onRemoveMessageEvent(event: MessageRemovedEvent): Promise { - if (this.result instanceof Promise) { - this.result = await this.result - } + return { + metadata: parsedFile.metadata, + messages: + patches.length > 0 + ? parsedFile.messages.map((message) => this.applyPatches(message, patchesMap.get(message.id) ?? [])) + : parsedFile.messages + } + } + + private async findGroupByMessage (id: MessageID): Promise { + const date = parseMessageId(id) + const group1 = (await this.client.findMessagesGroups({ + card: this.params.card, + limit: 1, + toDate: { + greaterOrEqual: date + }, + order: SortingOrder.Descending, + orderBy: 'fromDate' + }))[0] + + if (group1 !== undefined) { + return group1 + } - const deleted = this.result.delete(event.message) + return (await this.client.findMessagesGroups({ + card: this.params.card, + limit: 1, + fromDate: { + lessOrEqual: date + }, + order: SortingOrder.Ascending, + orderBy: 'fromDate' + }))[0] + } + + private async findGroups (direction: Direction, fromDate?: Date): Promise { + if (this.params.id != null) { + const group = await this.findGroupByMessage(this.params.id) + return group !== undefined ? [group] : [] + } - if (deleted !== undefined) { - await this.notify() - } + if (fromDate == null) { + return await this.client.findMessagesGroups({ + card: this.params.card, + limit: GROUPS_LIMIT, + order: direction === Direction.Forward ? SortingOrder.Ascending : SortingOrder.Descending, + orderBy: 'fromDate' + }) } - private async onCreateReactionEvent(event: ReactionCreatedEvent): Promise { - if (this.result instanceof Promise) { - this.result = await this.result - } + return await this.client.findMessagesGroups({ + card: this.params.card, + limit: GROUPS_LIMIT, + order: direction === Direction.Forward ? SortingOrder.Ascending : SortingOrder.Descending, + orderBy: 'fromDate', + fromDate: + direction === Direction.Forward + ? { + greater: fromDate + } + : { + less: fromDate + } + }) + } + + private async find (params: FindMessagesParams): Promise { + return await this.client.findMessages(params, this.id) + } + + private async notify (): Promise { + if (this.callback == null) return + if (this.result instanceof Promise) this.result = await this.result + const result = this.result.getResult() + this.callback(new WindowImpl(result, this.result.isTail(), this.result.isHead(), this)) + } + + async unsubscribe (): Promise { + await this.client.unsubscribeQuery(this.id) + } + + async onEvent (event: ResponseEvent): Promise { + switch (event.type) { + case ResponseEventType.MessageCreated: { + await this.onCreateMessageEvent(event) + return + } + case ResponseEventType.MessageRemoved: { + await this.onRemoveMessageEvent(event) + return + } + case ResponseEventType.PatchCreated: { + await this.onCreatePatchEvent(event) + return + } + case ResponseEventType.ReactionCreated: { + await this.onCreateReactionEvent(event) + return + } + case ResponseEventType.ReactionRemoved: { + await this.onRemoveReactionEvent(event) + return + } + case ResponseEventType.AttachmentCreated: { + await this.onCreateAttachmentEvent(event) + return + } + case ResponseEventType.AttachmentRemoved: { + await this.onRemoveAttachmentEvent(event) + return + } + case ResponseEventType.ThreadCreated: { + await this.onCreateThreadEvent(event) + } + } + } - const reaction = { - ...event.reaction, - created: event.reaction.created - } - const message = this.result.get(reaction.message) - if (message === undefined) return + async onCreateThreadEvent (event: ThreadCreatedEvent): Promise { + if (this.params.card !== event.thread.card) return + if (this.result instanceof Promise) this.result = await this.result + + const message = this.result.get(event.thread.message) + if (message !== undefined) { + const updated: Message = { + ...message, + thread: event.thread + } - message.reactions.push(reaction) - this.result.update(message) - await this.notify() + this.result.update(updated) + void this.notify() } - private async onRemoveReactionEvent(event: ReactionRemovedEvent): Promise { - if (this.result instanceof Promise) { - this.result = await this.result + this.messagesFromFiles = this.messagesFromFiles.map((it) => { + if (it.id === event.thread.message) { + return { + ...it, + thread: event.thread } + } + return it + }) + } + + async onCreateMessageEvent (event: MessageCreatedEvent): Promise { + if (this.result instanceof Promise) this.result = await this.result + if (this.params.card !== event.message.card) return + const { message } = event + const exists = this.result.get(message.id) + + if (exists !== undefined) return + if (!this.match(message)) return + + if (this.result.isTail()) { + if (this.params.order === SortingOrder.Ascending) { + this.result.push(message) + } else { + this.result.unshift(message) + } + await this.notify() + } + } + + private match (message: Message): boolean { + if (this.params.id != null && this.params.id !== message.id) { + return false + } + if (this.params.card !== message.card) { + return false + } + return true + } - const message = this.result.get(event.message) - if (message === undefined) return + private async onCreatePatchEvent (event: PatchCreatedEvent): Promise { + if (this.params.card !== event.card) return + if (this.result instanceof Promise) this.result = await this.result - const reactions = message.reactions.filter((it) => it.reaction !== event.reaction && it.creator !== event.creator) - if (reactions.length === message.reactions.length) return + const { patch } = event + const messageId = BigInt(patch.message) + const group = this.groupsBuffer.find((it) => BigInt(it.fromId) <= messageId && BigInt(it.toId) >= messageId) - const updated = { - ...message, - reactions - } - this.result.update(updated) - await this.notify() + if (group != null) { + group.patches.push(patch) } - private async onCreateAttachmentEvent(event: AttachmentCreatedEvent): Promise { - if (this.result instanceof Promise) { - this.result = await this.result - } + const message = this.result.get(patch.message) + if (message === undefined) return - const attachment = { - ...event.attachment, - created: event.attachment.created - } - const message = this.result.get(attachment.message) - if (message === undefined) return + if (message.created < patch.created) { + this.result.update(this.applyPatch(message, patch)) + await this.notify() + } + } + + private async onRemoveMessageEvent (event: MessageRemovedEvent): Promise { + if (this.params.card !== event.card) return + if (this.result instanceof Promise) this.result = await this.result + + const deleted = this.result.delete(event.message) - message.attachments.push(attachment) - this.result.update(message) - await this.notify() + if (deleted !== undefined) { + void this.notify() } - private async onRemoveAttachmentEvent(event: AttachmentRemovedEvent): Promise { - if (this.result instanceof Promise) { - this.result = await this.result - } + this.messagesFromFiles = this.messagesFromFiles.filter((it) => it.id !== event.message) + } + + private async onCreateReactionEvent (event: ReactionCreatedEvent): Promise { + if (this.result instanceof Promise) this.result = await this.result + if (this.params.card !== event.card) return - const message = this.result.get(event.message) - if (message === undefined) return + const reaction = { + ...event.reaction, + created: event.reaction.created + } - const attachments = message.attachments.filter((it) => it.card !== event.card) - if (attachments.length === message.attachments.length) return + const message = this.result.get(reaction.message) + if (message !== undefined) { + this.result.update(addReaction(message, reaction)) + void this.notify() + } - const updated = { - ...message, - attachments - } + const fromBuffer = this.messagesFromFiles.find((it) => it.id === reaction.message) + if (fromBuffer !== undefined) { + addReaction(fromBuffer, reaction) + } + } + + private async onRemoveReactionEvent (event: ReactionRemovedEvent): Promise { + if (this.result instanceof Promise) this.result = await this.result + if (this.params.card !== event.card) return + + const message = this.result.get(event.message) + if (message !== undefined) { + const updated = removeReaction(message, event.reaction, event.creator) + if (updated.reactions.length !== message.reactions.length) { this.result.update(updated) - await this.notify() + void this.notify() + } + } + this.messagesFromFiles = this.messagesFromFiles.map((it) => + it.id === event.message ? removeReaction(it, event.reaction, event.creator) : it + ) + } + + private async onCreateAttachmentEvent (event: AttachmentCreatedEvent): Promise { + if (this.result instanceof Promise) this.result = await this.result + + const attachment = { + ...event.attachment, + created: event.attachment.created + } + const message = this.result.get(attachment.message) + if (message === undefined) return + + message.attachments.push(attachment) + this.result.update(message) + await this.notify() + } + + private async onRemoveAttachmentEvent (event: AttachmentRemovedEvent): Promise { + if (this.result instanceof Promise) this.result = await this.result + + const message = this.result.get(event.message) + if (message === undefined) return + + const attachments = message.attachments.filter((it) => it.card !== event.card) + if (attachments.length === message.attachments.length) return + + const updated = { + ...message, + attachments } + this.result.update(updated) + await this.notify() + } - private applyPatch(message: Message, patch: Patch): Message { + private applyPatch (message: Message, patch: Patch): Message { + switch (patch.type) { + case PatchType.update: return { - ...message, - content: patch.content, - creator: patch.creator, - created: patch.created + ...message, + edited: patch.created, + content: patch.content } + case PatchType.addReaction: + return addReaction(message, { + message: message.id, + reaction: patch.content, + creator: patch.creator, + created: patch.created + }) + case PatchType.removeReaction: + return removeReaction(message, patch.content, patch.creator) + case PatchType.addReply: + return addReply(message, patch.content as CardID, patch.created) + case PatchType.removeReply: + return removeReply(message, patch.content as CardID) } + + return message + } + + private applyPatches (message: Message, patches: Patch[]): Message { + if (patches.length === 0) return message + + for (const p of patches) { + message = this.applyPatch(message, p) + } + return message + } + + copyResult (): QueryResult | undefined { + if (this.result instanceof Promise) { + return undefined + } + + return this.result.copy() + } } diff --git a/packages/query/src/messages/utils.ts b/packages/query/src/messages/utils.ts new file mode 100644 index 00000000000..1db6a1260df --- /dev/null +++ b/packages/query/src/messages/utils.ts @@ -0,0 +1,54 @@ +import type { CardID, Message, Reaction, SocialID } from '@hcengineering/communication-types' + +export function addReaction (message: Message, reaction: Reaction): Message { + message.reactions.push(reaction) + return message +} + +export function removeReaction (message: Message, emoji: string, creator: SocialID): Message { + const reactions = message.reactions.filter((it) => it.reaction !== emoji || it.creator !== creator) + if (reactions.length === message.reactions.length) return message + + return { + ...message, + reactions + } +} + +export function addReply (message: Message, thread: CardID, created: Date): Message { + if (message.thread === undefined) { + return { + ...message, + thread: { + card: message.card, + message: message.id, + thread, + repliesCount: 1, + lastReply: created + } + } + } + + if (message.thread.thread !== thread) return message + + return { + ...message, + thread: { + ...message.thread, + repliesCount: message.thread.repliesCount + 1, + lastReply: created + } + } +} + +export function removeReply (message: Message, thread: CardID): Message { + if (message.thread === undefined || message.thread.thread !== thread) return message + + return { + ...message, + thread: { + ...message.thread, + repliesCount: message.thread.repliesCount - 1 + } + } +} diff --git a/packages/query/src/notifications/query.ts b/packages/query/src/notifications/query.ts index e318a01660b..7b0d5b6a505 100644 --- a/packages/query/src/notifications/query.ts +++ b/packages/query/src/notifications/query.ts @@ -1,129 +1,138 @@ import { - type FindNotificationsParams, - SortOrder, - type Notification, - type MessageID, + type FindNotificationsParams, + SortingOrder, + type Notification, + type MessageID } from '@hcengineering/communication-types' import { - type NotificationCreatedEvent, - ResponseEventType, - type ResponseEvent, - type NotificationContextRemovedEvent, - type NotificationRemovedEvent, - type NotificationContextUpdatedEvent, + type NotificationCreatedEvent, + ResponseEventType, + type ResponseEvent, + type NotificationContextRemovedEvent, + type NotificationRemovedEvent, + type NotificationContextUpdatedEvent } from '@hcengineering/communication-sdk-types' -import {BaseQuery} from '../query.ts'; +import { BaseQuery } from '../query' export class NotificationQuery extends BaseQuery { - override async find(params: FindNotificationsParams): Promise { - return this.client.findNotifications(params, this.id) + override async find (params: FindNotificationsParams): Promise { + return await this.client.findNotifications(params, this.id) + } + + override getObjectId (object: Notification): MessageID { + return object.message.id + } + + override getObjectDate (object: Notification): Date { + return object.message.created + } + + override async onEvent (event: ResponseEvent): Promise { + switch (event.type) { + case ResponseEventType.NotificationCreated: { + await this.onCreateNotificationEvent(event) + return + } + case ResponseEventType.NotificationRemoved: { + await this.onRemoveNotificationEvent(event) + return + } + case ResponseEventType.NotificationContextUpdated: { + await this.onUpdateNotificationContextEvent(event) + return + } + case ResponseEventType.NotificationContextRemoved: { + await this.onRemoveNotificationContextEvent(event) + } } + } - override getObjectId(object: Notification): MessageID { - return object.message.id + async onCreateNotificationEvent (event: NotificationCreatedEvent): Promise { + if (this.result instanceof Promise) { + this.result = await this.result } - override getObjectDate(object: Notification): Date { - return object.message.created - } - - override async onEvent(event: ResponseEvent): Promise { - switch (event.type) { - case ResponseEventType.NotificationCreated: - return await this.onCreateNotificationEvent(event) - case ResponseEventType.NotificationRemoved: - return await this.onRemoveNotificationEvent(event) - case ResponseEventType.NotificationContextUpdated: - return await this.onUpdateNotificationContextEvent(event) - case ResponseEventType.NotificationContextRemoved: - return await this.onRemoveNotificationContextEvent(event) - } - } + const exists = this.result.get(event.notification.message.id) + if (exists !== undefined) return - async onCreateNotificationEvent(event: NotificationCreatedEvent): Promise { - if (this.result instanceof Promise) { - this.result = await this.result - } + if (this.params.message != null && this.params.message !== event.notification.message.id) return + if (this.params.context != null && this.params.context !== event.notification.context) return - const exists = this.result.get(event.notification.message.id) - if (exists !== undefined) return + if (this.result.isTail()) { + if (this.params.order === SortingOrder.Ascending) { + this.result.push(event.notification) + } else { + this.result.unshift(event.notification) + } + await this.notify() + } + } - if (this.params.message != null && this.params.message !== event.notification.message.id) return - if (this.params.context != null && this.params.context !== event.notification.context) return + private async onUpdateNotificationContextEvent (event: NotificationContextUpdatedEvent): Promise { + if (this.result instanceof Promise) { + this.result = await this.result + } - if (this.result.isTail()) { - if (this.params.sort === SortOrder.Asc) { - this.result.push(event.notification) - } else { - this.result.unshift(event.notification) + if (this.params.context != null && this.params.context !== event.context) return + if (event.update.lastView === undefined && event.update.archivedFrom === undefined) return + + const toUpdate = + this.params.context === event.context + ? this.result.getResult() + : this.result.getResult().filter((it) => it.context === event.context) + if (toUpdate.length === 0) return + + for (const notification of toUpdate) { + this.result.update({ + ...notification, + ...(event.update.lastView !== undefined + ? { + read: event.update.lastView < notification.message.created + } + : {}), + ...(event.update.archivedFrom !== undefined + ? { + archived: event.update.archivedFrom < notification.message.created } - await this.notify() - } + : {}) + }) } + } - - private async onUpdateNotificationContextEvent(event: NotificationContextUpdatedEvent): Promise { - if (this.result instanceof Promise) { - this.result = await this.result - } - - if (this.params.context != null && this.params.context !== event.context) return - if (event.update.lastView === undefined && event.update.archivedFrom === undefined) return - - const toUpdate = this.params.context === event.context ? - this.result.getResult() - : this.result.getResult().filter(it => it.context === event.context) - if (toUpdate.length === 0) return - - for (const notification of toUpdate) { - this.result.update({ - ...notification, - ...event.update.lastView !== undefined ? { - read: event.update.lastView < notification.message.created - } : {}, - ...event.update.archivedFrom !== undefined ? { - archived: event.update.archivedFrom < notification.message.created - } : {} - }) - } + private async onRemoveNotificationEvent (event: NotificationRemovedEvent): Promise { + if (this.result instanceof Promise) { + this.result = await this.result } - private async onRemoveNotificationEvent(event: NotificationRemovedEvent): Promise { - if (this.result instanceof Promise) { - this.result = await this.result - } - - const deleted = this.result.delete(event.message) + const deleted = this.result.delete(event.message) - if (deleted !== undefined) { - await this.notify() - } + if (deleted !== undefined) { + await this.notify() } + } - private async onRemoveNotificationContextEvent(event: NotificationContextRemovedEvent): Promise { - if (this.result instanceof Promise) { - this.result = await this.result - } - - if (this.params.context != null && this.params.context !== event.context) return - - if (event.context === this.params.context) { - if (this.result.length === 0) return - this.result.deleteAll() - this.result.setHead(true) - this.result.setTail(true) - await this.notify() - } else { - const toRemove = this.result.getResult().filter(it => it.context === event.context) - if (toRemove.length === 0) return - - for (const notification of toRemove) { - this.result.delete(notification.message.id) - } - await this.notify() - } - + private async onRemoveNotificationContextEvent (event: NotificationContextRemovedEvent): Promise { + if (this.result instanceof Promise) { + this.result = await this.result } + if (this.params.context != null && this.params.context !== event.context) return + + if (event.context === this.params.context) { + if (this.result.length === 0) return + this.result.deleteAll() + this.result.setHead(true) + this.result.setTail(true) + await this.notify() + } else { + const toRemove = this.result.getResult().filter((it) => it.context === event.context) + if (toRemove.length === 0) return + + for (const notification of toRemove) { + this.result.delete(notification.message.id) + } + await this.notify() + } + } } diff --git a/packages/query/src/query.ts b/packages/query/src/query.ts index fd95c80cf3b..a770f1b0ef5 100644 --- a/packages/query/src/query.ts +++ b/packages/query/src/query.ts @@ -1,11 +1,11 @@ -import { Direction, type ID, SortOrder } from '@hcengineering/communication-types' +import { SortingOrder, type ID } from '@hcengineering/communication-types' import { type ResponseEvent, type QueryCallback, type QueryClient } from '@hcengineering/communication-sdk-types' import { QueryResult } from './result' -import { defaultQueryParams, type FindParams, type Query, type QueryId } from './types' +import { defaultQueryParams, type FindParams, type PagedQuery, type QueryId } from './types' import { WindowImpl } from './window' -export class BaseQuery implements Query { +export class BaseQuery implements PagedQuery { protected result: QueryResult | Promise> private forward: Promise | T[] = [] private backward: Promise | T[] = [] @@ -24,20 +24,18 @@ export class BaseQuery implements Query { const limit = this.params.limit ?? defaultQueryParams.limit const findParams = { ...this.params, - excluded: this.params.excluded ?? defaultQueryParams.excluded, - direction: this.params.direction ?? defaultQueryParams.direction, - sort: this.params.sort ?? defaultQueryParams.sort, + sort: this.params.order ?? defaultQueryParams.order, limit: limit + 1 } const findPromise = this.find(findParams) this.result = findPromise.then((res) => { - const isTail = params.from ? res.length <= limit : params.sort === SortOrder.Desc - const isHead = params.from === undefined && params.sort === SortOrder.Asc + const isTail = false + const isHead = false if (!isTail) { res.pop() } - const qResult = new QueryResult(res, this.getObjectId) + const qResult = new QueryResult(res, (x) => this.getObjectId(x)) qResult.setTail(isTail) qResult.setHead(isHead) @@ -55,25 +53,25 @@ export class BaseQuery implements Query { // eslint-disable-next-line @typescript-eslint/no-unused-vars protected async find(params: FindParams): Promise { - /*Implement in subclass*/ + /* Implement in subclass */ return [] as T[] } // eslint-disable-next-line @typescript-eslint/no-unused-vars protected getObjectId(object: T): ID { - /*Implement in subclass*/ + /* Implement in subclass */ return '' as ID } // eslint-disable-next-line @typescript-eslint/no-unused-vars protected getObjectDate(object: T): Date { - /*Implement in subclass*/ - return new Date(0) as Date + /* Implement in subclass */ + return new Date(0) } // eslint-disable-next-line @typescript-eslint/no-unused-vars async onEvent(event: ResponseEvent): Promise { - /*Implement in subclass*/ + /* Implement in subclass */ } setCallback(callback: QueryCallback): void { @@ -99,7 +97,7 @@ export class BaseQuery implements Query { this.callback(window) } - async loadForward() { + async requestLoadNextPage(): Promise { if (this.result instanceof Promise) { this.result = await this.result } @@ -117,9 +115,8 @@ export class BaseQuery implements Query { ...this.params, from: this.getObjectDate(last), excluded: true, - direction: Direction.Forward, limit: limit + 1, - sort: SortOrder.Asc + order: SortingOrder.Ascending } const forward = this.find(findParams) @@ -139,7 +136,7 @@ export class BaseQuery implements Query { }) } - async loadBackward() { + async requestLoadPrevPage(): Promise { if (this.result instanceof Promise) { this.result = await this.result } @@ -149,17 +146,15 @@ export class BaseQuery implements Query { if (this.result.isHead()) return - const first = this.params.sort === SortOrder.Asc ? this.result.getFirst() : this.result.getLast() + const first = this.params.order === SortingOrder.Ascending ? this.result.getFirst() : this.result.getLast() if (first === undefined) return const limit = this.params.limit ?? defaultQueryParams.limit const findParams: FindParams = { ...this.params, from: this.getObjectDate(first), - excluded: true, - direction: Direction.Backward, limit: limit + 1, - sort: SortOrder.Desc + order: SortingOrder.Descending } const backward = this.find(findParams) @@ -172,7 +167,7 @@ export class BaseQuery implements Query { res.pop() } - if (this.params.sort === SortOrder.Asc) { + if (this.params.order === SortingOrder.Ascending) { const reversed = res.reverse() this.result.prepend(reversed) } else { diff --git a/packages/query/src/result.ts b/packages/query/src/result.ts index 58e288c9676..5186ccebd1a 100644 --- a/packages/query/src/result.ts +++ b/packages/query/src/result.ts @@ -25,11 +25,11 @@ export class QueryResult { return this.head } - setHead(head: boolean) { + setHead(head: boolean): void { this.head = head } - setTail(tail: boolean) { + setTail(tail: boolean): void { this.tail = tail } @@ -47,7 +47,7 @@ export class QueryResult { return object } - deleteAll() { + deleteAll(): void { this.objectById.clear() } @@ -71,12 +71,12 @@ export class QueryResult { return Array.from(this.objectById.values())[this.objectById.size - 1] } - prepend(objects: T[]) { + prepend(objects: T[]): void { const current = Array.from(this.objectById.entries()) this.objectById = new Map([...objects.map<[ID, T]>((object) => [this.getId(object), object]), ...current]) } - append(objects: T[]) { + append(objects: T[]): void { for (const object of objects) { this.objectById.set(this.getId(object), object) } diff --git a/packages/query/src/types.ts b/packages/query/src/types.ts index 31ddd46f956..cf4a6b8e4f2 100644 --- a/packages/query/src/types.ts +++ b/packages/query/src/types.ts @@ -1,33 +1,34 @@ import { type ResponseEvent } from '@hcengineering/communication-sdk-types' -import { Direction, SortOrder, type Window } from '@hcengineering/communication-types' +import { SortingOrder, type Window } from '@hcengineering/communication-types' -import { QueryResult } from './result.ts' +import { QueryResult } from './result' export type QueryId = number export const defaultQueryParams = { limit: 50, - excluded: false, - direction: Direction.Forward, - sort: SortOrder.Desc + order: SortingOrder.Ascending } -export type FindParams = Partial & { - from?: Date +export enum Direction { + Forward = 1, + Backward = -1 } -export interface Query { +export type FindParams = Partial + +export interface PagedQuery { readonly id: QueryId readonly params: P - onEvent(event: ResponseEvent): Promise + onEvent: (event: ResponseEvent) => Promise - loadForward(): Promise - loadBackward(): Promise + requestLoadNextPage: () => Promise + requestLoadPrevPage: () => Promise - unsubscribe(): Promise + unsubscribe: () => Promise - setCallback(callback: (window: Window) => void): void - removeCallback(): void - copyResult(): QueryResult | undefined + setCallback: (callback: (window: Window) => void) => void + removeCallback: () => void + copyResult: () => QueryResult | undefined } diff --git a/packages/query/src/window.ts b/packages/query/src/window.ts index 824df8615a8..a31d7026cb8 100644 --- a/packages/query/src/window.ts +++ b/packages/query/src/window.ts @@ -1,13 +1,13 @@ import type { Window } from '@hcengineering/communication-types' -import type { Query } from './types' +import type { PagedQuery } from './types' export class WindowImpl implements Window { constructor( private readonly result: T[], private readonly isTail: boolean, private readonly isHead: boolean, - private readonly query: Query + private readonly query: PagedQuery ) {} getResult(): T[] { @@ -16,12 +16,12 @@ export class WindowImpl implements Window { async loadNextPage(): Promise { if (!this.hasNextPage()) return - await this.query.loadForward() + await this.query.requestLoadNextPage() } async loadPrevPage(): Promise { if (!this.hasPrevPage()) return - await this.query.loadBackward() + await this.query.requestLoadPrevPage() } hasNextPage(): boolean { diff --git a/packages/sdk-types/package.json b/packages/sdk-types/package.json index 4295ff71495..084cb2176c4 100644 --- a/packages/sdk-types/package.json +++ b/packages/sdk-types/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-sdk-types", - "version": "0.1.9", + "version": "0.1.47", "main": "./dist/index.js", "module": "./dist/index.js", "types": "./types/index.d.ts", @@ -20,7 +20,7 @@ }, "repository": { "type": "git", - "url": "git+https://github.com/hcengineering/communication.git", + "url": "git+https://github.com/hcengineering/communication.git" }, "publishConfig": { "registry": "https://npm.pkg.github.com" diff --git a/packages/sdk-types/src/client.ts b/packages/sdk-types/src/client.ts index 8d03ea2e708..36415edfb65 100644 --- a/packages/sdk-types/src/client.ts +++ b/packages/sdk-types/src/client.ts @@ -9,7 +9,9 @@ import type { NotificationContextUpdate, RichText, SocialID, - Notification + Notification, + FindMessagesGroupsParams, + MessagesGroup } from '@hcengineering/communication-types' import type { FindMessagesParams } from '@hcengineering/communication-types' @@ -18,7 +20,7 @@ import type { ResponseEvent } from './responseEvent.ts' export interface Client { createMessage(card: CardID, content: RichText, creator: SocialID): Promise removeMessage(card: CardID, id: MessageID): Promise - createPatch(card: CardID, message: MessageID, content: RichText, creator: SocialID): Promise + updateMessage(card: CardID, message: MessageID, content: RichText, creator: SocialID): Promise createReaction(card: CardID, message: MessageID, reaction: string, creator: SocialID): Promise removeReaction(card: CardID, message: MessageID, reaction: string, creator: SocialID): Promise @@ -26,6 +28,8 @@ export interface Client { createAttachment(card: CardID, message: MessageID, attachment: CardID, creator: SocialID): Promise removeAttachment(card: CardID, message: MessageID, attachment: CardID): Promise + createThread(card: CardID, message: MessageID, thread: CardID, created: Date): Promise + createNotification(message: MessageID, context: ContextID): Promise removeNotification(message: MessageID, context: ContextID): Promise @@ -36,6 +40,7 @@ export interface Client { onEvent(event: ResponseEvent): void findMessages(params: FindMessagesParams, queryId?: number): Promise + findMessagesGroups(params: FindMessagesGroupsParams): Promise findNotificationContexts(params: FindNotificationContextParams, queryId?: number): Promise findNotifications(params: FindNotificationsParams, queryId?: number): Promise diff --git a/packages/sdk-types/src/db.ts b/packages/sdk-types/src/db.ts index 24d9a1cb852..fecd2478405 100644 --- a/packages/sdk-types/src/db.ts +++ b/packages/sdk-types/src/db.ts @@ -14,47 +14,55 @@ import type { BlobID, FindMessagesGroupsParams, MessagesGroup, - WorkspaceID + WorkspaceID, + PatchType, + Thread } from '@hcengineering/communication-types' export interface DbAdapter { createMessage(card: CardID, content: RichText, creator: SocialID, created: Date): Promise + removeMessage(card: CardID, id: MessageID, socialIds?: SocialID[]): Promise + removeMessages(card: CardID, fromId: MessageID, toId: MessageID): Promise - removeMessage(card: CardID, id: MessageID): Promise - removeMessages(card: CardID, ids: MessageID[]): Promise - - createPatch(card: CardID, message: MessageID, content: RichText, creator: SocialID, created: Date): Promise + createPatch( + card: CardID, + message: MessageID, + type: PatchType, + content: RichText, + creator: SocialID, + created: Date + ): Promise + removePatches(card: CardID, fromId: MessageID, toId: MessageID): Promise createMessagesGroup( card: CardID, blobId: BlobID, - from_id: MessageID, - to_id: MessageID, - from_date: Date, - to_date: Date, + fromDate: Date, + toDate: Date, + fromID: MessageID, + toID: MessageID, count: number ): Promise + removeMessagesGroup(card: CardID, blobId: BlobID): Promise createReaction(card: CardID, message: MessageID, reaction: string, creator: SocialID, created: Date): Promise - removeReaction(card: CardID, message: MessageID, reaction: string, creator: SocialID): Promise createAttachment(message: MessageID, attachment: CardID, creator: SocialID, created: Date): Promise - removeAttachment(message: MessageID, attachment: CardID): Promise - findMessages(query: FindMessagesParams): Promise + createThread(card: CardID, message: MessageID, thread: CardID, created: Date): Promise + updateThread(thread: CardID, lastReply: Date, op: 'increment' | 'decrement'): Promise - findMessagesGroups(query: FindMessagesGroupsParams): Promise + findMessages(params: FindMessagesParams): Promise + findMessagesGroups(params: FindMessagesGroupsParams): Promise + findThread(thread: CardID): Promise createNotification(message: MessageID, context: ContextID): Promise - removeNotification(message: MessageID, context: ContextID): Promise createContext(personalWorkspace: WorkspaceID, card: CardID, lastView?: Date, lastUpdate?: Date): Promise - updateContext(context: ContextID, update: NotificationContextUpdate): Promise - removeContext(context: ContextID): Promise findContexts( @@ -62,7 +70,6 @@ export interface DbAdapter { personalWorkspaces: WorkspaceID[], workspace?: WorkspaceID ): Promise - findNotifications( params: FindNotificationsParams, personalWorkspace: WorkspaceID, diff --git a/packages/sdk-types/src/query.ts b/packages/sdk-types/src/query.ts index 29fbe8eb9e6..156a1ef67db 100644 --- a/packages/sdk-types/src/query.ts +++ b/packages/sdk-types/src/query.ts @@ -4,7 +4,13 @@ import type { Client } from './client' export type QueryClient = Pick< Client, - 'onEvent' | 'findMessages' | 'findNotificationContexts' | 'findNotifications' | 'unsubscribeQuery' | 'close' + | 'onEvent' + | 'findMessages' + | 'findMessagesGroups' + | 'findNotificationContexts' + | 'findNotifications' + | 'unsubscribeQuery' + | 'close' > export type QueryCallback = (window: Window) => void diff --git a/packages/sdk-types/src/requestEvent.ts b/packages/sdk-types/src/requestEvent.ts index d2f50edc5f7..ee305b6888c 100644 --- a/packages/sdk-types/src/requestEvent.ts +++ b/packages/sdk-types/src/requestEvent.ts @@ -5,24 +5,35 @@ import type { NotificationContextUpdate, RichText, SocialID, - MessagesGroup + MessagesGroup, + BlobID } from '@hcengineering/communication-types' export enum RequestEventType { CreateMessage = 'createMessage', RemoveMessage = 'removeMessage', RemoveMessages = 'removeMessages', + CreatePatch = 'createPatch', + RemovePatches = 'removePatches', + CreateReaction = 'createReaction', RemoveReaction = 'removeReaction', + CreateAttachment = 'createAttachment', RemoveAttachment = 'removeAttachment', + + CreateThread = 'createThread', + + CreateMessagesGroup = 'createMessagesGroup', + RemoveMessagesGroup = 'removeMessagesGroup', + CreateNotification = 'createNotification', RemoveNotification = 'removeNotification', + CreateNotificationContext = 'createNotificationContext', RemoveNotificationContext = 'removeNotificationContext', - UpdateNotificationContext = 'updateNotificationContext', - CreateMessagesGroup = 'createMessagesGroup' + UpdateNotificationContext = 'updateNotificationContext' } export type RequestEvent = @@ -30,6 +41,7 @@ export type RequestEvent = | RemoveMessageEvent | RemoveMessagesEvent | CreatePatchEvent + | RemovePatchesEvent | CreateReactionEvent | RemoveReactionEvent | CreateAttachmentEvent @@ -40,6 +52,8 @@ export type RequestEvent = | RemoveNotificationContextEvent | UpdateNotificationContextEvent | CreateMessagesGroupEvent + | RemoveMessagesGroupEvent + | CreateThreadEvent export interface CreateMessageEvent { type: RequestEventType.CreateMessage @@ -57,8 +71,8 @@ export interface RemoveMessageEvent { export interface RemoveMessagesEvent { type: RequestEventType.RemoveMessages card: CardID - messages: MessageID[] - silent?: boolean + fromId: MessageID + toId: MessageID } export interface CreatePatchEvent { @@ -69,6 +83,13 @@ export interface CreatePatchEvent { creator: SocialID } +export interface RemovePatchesEvent { + type: RequestEventType.RemovePatches + card: CardID + fromId: MessageID + toId: MessageID +} + export interface CreateReactionEvent { type: RequestEventType.CreateReaction card: CardID @@ -100,6 +121,13 @@ export interface RemoveAttachmentEvent { attachment: CardID } +export interface CreateThreadEvent { + type: RequestEventType.CreateThread + card: CardID + message: MessageID + thread: CardID +} + export interface CreateNotificationEvent { type: RequestEventType.CreateNotification message: MessageID @@ -135,6 +163,12 @@ export interface CreateMessagesGroupEvent { group: MessagesGroup } +export interface RemoveMessagesGroupEvent { + type: RequestEventType.RemoveMessagesGroup + card: CardID + blobId: BlobID +} + export type EventResult = CreateMessageResult | CreateNotificationContextResult | {} export interface CreateMessageResult { diff --git a/packages/sdk-types/src/responseEvent.ts b/packages/sdk-types/src/responseEvent.ts index f6b41d09fac..b1fd2cf2fb6 100644 --- a/packages/sdk-types/src/responseEvent.ts +++ b/packages/sdk-types/src/responseEvent.ts @@ -9,18 +9,19 @@ import type { Patch, Reaction, SocialID, - Notification + Notification, + Thread } from '@hcengineering/communication-types' export enum ResponseEventType { MessageCreated = 'messageCreated', MessageRemoved = 'messageRemoved', - MessagesRemoved = 'messagesRemoved', PatchCreated = 'patchCreated', ReactionCreated = 'reactionCreated', ReactionRemoved = 'reactionRemoved', AttachmentCreated = 'attachmentCreated', AttachmentRemoved = 'attachmentRemoved', + ThreadCreated = 'threadCreated', NotificationCreated = 'notificationCreated', NotificationRemoved = 'notificationRemoved', NotificationContextCreated = 'notificationContextCreated', @@ -31,7 +32,6 @@ export enum ResponseEventType { export type ResponseEvent = | MessageCreatedEvent | MessageRemovedEvent - | MessagesRemovedEvent | PatchCreatedEvent | ReactionCreatedEvent | ReactionRemovedEvent @@ -42,6 +42,7 @@ export type ResponseEvent = | NotificationContextCreatedEvent | NotificationContextRemovedEvent | NotificationContextUpdatedEvent + | ThreadCreatedEvent export interface MessageCreatedEvent { type: ResponseEventType.MessageCreated @@ -54,12 +55,6 @@ export interface MessageRemovedEvent { message: MessageID } -export interface MessagesRemovedEvent { - type: ResponseEventType.MessagesRemoved - card: CardID - messages: MessageID[] -} - export interface PatchCreatedEvent { type: ResponseEventType.PatchCreated card: CardID @@ -93,6 +88,11 @@ export interface AttachmentRemovedEvent { attachment: CardID } +export interface ThreadCreatedEvent { + type: ResponseEventType.ThreadCreated + thread: Thread +} + export interface NotificationCreatedEvent { type: ResponseEventType.NotificationCreated personalWorkspace: string diff --git a/packages/sdk-types/src/serverApi.ts b/packages/sdk-types/src/serverApi.ts index 4a9d1f918d5..a1d4217488d 100644 --- a/packages/sdk-types/src/serverApi.ts +++ b/packages/sdk-types/src/serverApi.ts @@ -13,6 +13,7 @@ export interface ConnectionInfo { sessionId: string personalWorkspace: WorkspaceID socialIds: SocialID[] + isSystem: boolean } export interface ServerApi { diff --git a/packages/server/package.json b/packages/server/package.json index e215f29720b..bd98818cb87 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-server", - "version": "0.1.9", + "version": "0.1.47", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/server/src/eventProcessor.ts b/packages/server/src/eventProcessor.ts index 5e37920c49d..473a2d7ec47 100644 --- a/packages/server/src/eventProcessor.ts +++ b/packages/server/src/eventProcessor.ts @@ -4,7 +4,8 @@ import { type Reaction, type Attachment, type SocialID, - type WorkspaceID + type WorkspaceID, + PatchType } from '@hcengineering/communication-types' import { type CreateAttachmentEvent, @@ -38,62 +39,65 @@ import { ResponseEventType, type CreateMessagesGroupEvent, type RemoveMessagesEvent, - type MessagesRemovedEvent + type ThreadCreatedEvent, + type CreateThreadEvent, + type ConnectionInfo, + type RemovePatchesEvent, + type RemoveMessagesGroupEvent } from '@hcengineering/communication-sdk-types' -export type Result = { +export interface Result { responseEvent?: ResponseEvent result: EventResult } -export type UserInfo = { - personalWorkspace: WorkspaceID - socialIds: SocialID[] -} - export class EventProcessor { constructor( private readonly db: DbAdapter, private readonly workspace: WorkspaceID ) {} - async process(user: UserInfo, event: RequestEvent): Promise { + async process(info: ConnectionInfo, event: RequestEvent): Promise { switch (event.type) { case RequestEventType.CreateMessage: - return await this.createMessage(event, user) + return await this.createMessage(event, info) case RequestEventType.RemoveMessage: - return await this.removeMessage(event, user) + return await this.removeMessage(event, info) case RequestEventType.RemoveMessages: - return await this.removeMessages(event, user) + return await this.removeMessages(event, info) case RequestEventType.CreatePatch: - return await this.createPatch(event, user) + return await this.createPatch(event, info) case RequestEventType.CreateReaction: - return await this.createReaction(event, user) + return await this.createReaction(event, info) case RequestEventType.RemoveReaction: - return await this.removeReaction(event, user) + return await this.removeReaction(event, info) case RequestEventType.CreateAttachment: - return await this.createAttachment(event, user) + return await this.createAttachment(event, info) case RequestEventType.RemoveAttachment: - return await this.removeAttachment(event, user) + return await this.removeAttachment(event, info) case RequestEventType.CreateNotification: - return await this.createNotification(event, user) + return await this.createNotification(event, info) case RequestEventType.RemoveNotification: - return await this.removeNotification(event, user) + return await this.removeNotification(event, info) case RequestEventType.CreateNotificationContext: - return await this.createNotificationContext(event, user) + return await this.createNotificationContext(event, info) case RequestEventType.RemoveNotificationContext: - return await this.removeNotificationContext(event, user) + return await this.removeNotificationContext(event, info) case RequestEventType.UpdateNotificationContext: - return await this.updateNotificationContext(event, user) + return await this.updateNotificationContext(event, info) case RequestEventType.CreateMessagesGroup: - return await this.createMessagesGroup(event, user) + return await this.createMessagesGroup(event, info) + case RequestEventType.CreateThread: + return await this.createThread(event, info) + case RequestEventType.RemovePatches: + return await this.removePatches(event, info) + case RequestEventType.RemoveMessagesGroup: + return await this.removeMessagesGroup(event, info) } } - private async createMessage(event: CreateMessageEvent, user: UserInfo): Promise { - if (!user.socialIds.includes(event.creator)) { - throw new Error('Forbidden') - } + private async createMessage(event: CreateMessageEvent, info: ConnectionInfo): Promise { + this.checkCreator(info, event.creator) const created = new Date() const id = await this.db.createMessage(event.card, event.content, event.creator, created) @@ -102,8 +106,7 @@ export class EventProcessor { card: event.card, content: event.content, creator: event.creator, - created: created, - edited: created, + created, reactions: [], attachments: [] } @@ -117,18 +120,17 @@ export class EventProcessor { } } - private async createPatch(event: CreatePatchEvent, user: UserInfo): Promise { - if (!user.socialIds.includes(event.creator)) { - throw new Error('Forbidden') - } + private async createPatch(event: CreatePatchEvent, info: ConnectionInfo): Promise { + this.checkCreator(info, event.creator) const created = new Date() - await this.db.createPatch(event.card, event.message, event.content, event.creator, created) + await this.db.createPatch(event.card, event.message, PatchType.update, event.content, event.creator, created) const patch: Patch = { + type: PatchType.update, message: event.message, content: event.content, creator: event.creator, - created: created + created } const responseEvent: PatchCreatedEvent = { type: ResponseEventType.PatchCreated, @@ -141,16 +143,9 @@ export class EventProcessor { } } - //eslint-disable-next-line @typescript-eslint/no-unused-vars - private async removeMessage(event: RemoveMessageEvent, _: UserInfo): Promise { - const res = await this.db.removeMessage(event.card, event.message) - - if (res === undefined) { - return { - responseEvent: undefined, - result: { id: res } - } - } + private async removeMessage(event: RemoveMessageEvent, info: ConnectionInfo): Promise { + const socialIds = info.isSystem ? undefined : info.socialIds + await this.db.removeMessage(event.card, event.message, socialIds) const responseEvent: MessageRemovedEvent = { type: ResponseEventType.MessageRemoved, @@ -160,37 +155,34 @@ export class EventProcessor { return { responseEvent, - result: { id: res } + result: {} } } - //eslint-disable-next-line @typescript-eslint/no-unused-vars - private async removeMessages(event: RemoveMessagesEvent, _: UserInfo): Promise { - const ids = await this.db.removeMessages(event.card, event.messages) + private async removeMessages(event: RemoveMessagesEvent, info: ConnectionInfo): Promise { + if (!info.isSystem) { + throw new Error('Forbidden') + } + await this.db.removeMessages(event.card, event.fromId, event.toId) - if (event.silent === true) { - return { - responseEvent: undefined, - result: { ids } - } + return { + result: {} } + } - const responseEvent: MessagesRemovedEvent = { - type: ResponseEventType.MessagesRemoved, - card: event.card, - messages: ids + private async removePatches(event: RemovePatchesEvent, info: ConnectionInfo): Promise { + if (!info.isSystem) { + throw new Error('Forbidden') } + await this.db.removePatches(event.card, event.fromId, event.toId) return { - responseEvent, - result: { ids } + result: {} } } - private async createReaction(event: CreateReactionEvent, user: UserInfo): Promise { - if (!user.socialIds.includes(event.creator)) { - throw new Error('Forbidden') - } + private async createReaction(event: CreateReactionEvent, info: ConnectionInfo): Promise { + this.checkCreator(info, event.creator) const created = new Date() await this.db.createReaction(event.card, event.message, event.reaction, event.creator, created) @@ -198,7 +190,7 @@ export class EventProcessor { message: event.message, reaction: event.reaction, creator: event.creator, - created: created + created } const responseEvent: ReactionCreatedEvent = { type: ResponseEventType.ReactionCreated, @@ -211,10 +203,8 @@ export class EventProcessor { } } - private async removeReaction(event: RemoveReactionEvent, user: UserInfo): Promise { - if (!user.socialIds.includes(event.creator)) { - throw new Error('Forbidden') - } + private async removeReaction(event: RemoveReactionEvent, info: ConnectionInfo): Promise { + this.checkCreator(info, event.creator) await this.db.removeReaction(event.card, event.message, event.reaction, event.creator) const responseEvent: ReactionRemovedEvent = { type: ResponseEventType.ReactionRemoved, @@ -229,8 +219,8 @@ export class EventProcessor { } } - //eslint-disable-next-line @typescript-eslint/no-unused-vars - private async createAttachment(event: CreateAttachmentEvent, _: UserInfo): Promise { + private async createAttachment(event: CreateAttachmentEvent, info: ConnectionInfo): Promise { + this.checkCreator(info, event.creator) const created = new Date() await this.db.createAttachment(event.message, event.card, event.creator, created) @@ -238,7 +228,7 @@ export class EventProcessor { message: event.message, card: event.card, creator: event.creator, - created: created + created } const responseEvent: AttachmentCreatedEvent = { type: ResponseEventType.AttachmentCreated, @@ -252,8 +242,8 @@ export class EventProcessor { } } - //eslint-disable-next-line @typescript-eslint/no-unused-vars - private async removeAttachment(event: RemoveAttachmentEvent, _: UserInfo): Promise { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + private async removeAttachment(event: RemoveAttachmentEvent, _: ConnectionInfo): Promise { await this.db.removeAttachment(event.message, event.card) const responseEvent: AttachmentRemovedEvent = { type: ResponseEventType.AttachmentRemoved, @@ -267,8 +257,8 @@ export class EventProcessor { } } - //eslint-disable-next-line @typescript-eslint/no-unused-vars - private async createNotification(event: CreateNotificationEvent, _: UserInfo): Promise { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + private async createNotification(event: CreateNotificationEvent, _: ConnectionInfo): Promise { await this.db.createNotification(event.message, event.context) return { @@ -276,12 +266,12 @@ export class EventProcessor { } } - private async removeNotification(event: RemoveNotificationEvent, user: UserInfo): Promise { + private async removeNotification(event: RemoveNotificationEvent, info: ConnectionInfo): Promise { await this.db.removeNotification(event.message, event.context) const responseEvent: NotificationRemovedEvent = { type: ResponseEventType.NotificationRemoved, - personalWorkspace: user.personalWorkspace, + personalWorkspace: info.personalWorkspace, message: event.message, context: event.context } @@ -291,14 +281,17 @@ export class EventProcessor { } } - private async createNotificationContext(event: CreateNotificationContextEvent, user: UserInfo): Promise { - const id = await this.db.createContext(user.personalWorkspace, event.card, event.lastView, event.lastUpdate) + private async createNotificationContext( + event: CreateNotificationContextEvent, + info: ConnectionInfo + ): Promise { + const id = await this.db.createContext(info.personalWorkspace, event.card, event.lastView, event.lastUpdate) const responseEvent: NotificationContextCreatedEvent = { type: ResponseEventType.NotificationContextCreated, context: { id, workspace: this.workspace, - personalWorkspace: user.personalWorkspace, + personalWorkspace: info.personalWorkspace, card: event.card, lastView: event.lastView, lastUpdate: event.lastUpdate @@ -310,11 +303,14 @@ export class EventProcessor { } } - private async removeNotificationContext(event: RemoveNotificationContextEvent, user: UserInfo): Promise { + private async removeNotificationContext( + event: RemoveNotificationContextEvent, + info: ConnectionInfo + ): Promise { await this.db.removeContext(event.context) const responseEvent: NotificationContextRemovedEvent = { type: ResponseEventType.NotificationContextRemoved, - personalWorkspace: user.personalWorkspace, + personalWorkspace: info.personalWorkspace, context: event.context } return { @@ -323,12 +319,12 @@ export class EventProcessor { } } - async updateNotificationContext(event: UpdateNotificationContextEvent, user: UserInfo): Promise { + async updateNotificationContext(event: UpdateNotificationContextEvent, info: ConnectionInfo): Promise { await this.db.updateContext(event.context, event.update) const responseEvent: NotificationContextUpdatedEvent = { type: ResponseEventType.NotificationContextUpdated, - personalWorkspace: user.personalWorkspace, + personalWorkspace: info.personalWorkspace, context: event.context, update: event.update } @@ -338,14 +334,54 @@ export class EventProcessor { } } - //eslint-disable-next-line @typescript-eslint/no-unused-vars - async createMessagesGroup(event: CreateMessagesGroupEvent, _: UserInfo): Promise { - const { fromId, toId, fromDate, toDate, count } = event.group - await this.db.createMessagesGroup(event.group.card, event.group.blobId, fromId, toId, fromDate, toDate, count) + async createMessagesGroup(event: CreateMessagesGroupEvent, info: ConnectionInfo): Promise { + if (!info.isSystem) { + throw new Error('Forbidden') + } + const { fromDate, toDate, count, fromId, toId, card, blobId } = event.group + await this.db.createMessagesGroup(card, blobId, fromDate, toDate, fromId, toId, count) + + return { + responseEvent: undefined, + result: {} + } + } + + async removeMessagesGroup(event: RemoveMessagesGroupEvent, info: ConnectionInfo): Promise { + if (!info.isSystem) { + throw new Error('Forbidden') + } + await this.db.removeMessagesGroup(event.card, event.blobId) return { responseEvent: undefined, result: {} } } + + // eslint-disable-next-line @typescript-eslint/no-unused-vars + private async createThread(event: CreateThreadEvent, _: ConnectionInfo): Promise { + const date = new Date() + await this.db.createThread(event.card, event.message, event.thread, date) + const responseEvent: ThreadCreatedEvent = { + type: ResponseEventType.ThreadCreated, + thread: { + card: event.card, + thread: event.thread, + message: event.message, + repliesCount: 0, + lastReply: date + } + } + return { + responseEvent, + result: {} + } + } + + private checkCreator(info: ConnectionInfo, creator: SocialID): void { + if (!info.socialIds.includes(creator) && !info.isSystem) { + throw new Error('Forbidden') + } + } } diff --git a/packages/server/src/index.ts b/packages/server/src/index.ts index 8e1c99d13b7..586d47b99b0 100644 --- a/packages/server/src/index.ts +++ b/packages/server/src/index.ts @@ -15,7 +15,7 @@ import type { ServerApi } from '@hcengineering/communication-sdk-types' -import { Manager, type BroadcastSessionsFunc } from './manager.ts' +import { Manager, type BroadcastSessionsFunc } from './manager' export class Api implements ServerApi { private readonly manager: Manager @@ -26,7 +26,7 @@ export class Api implements ServerApi { private readonly db: DbAdapter, private readonly broadcast: BroadcastSessionsFunc ) { - this.manager = new Manager(this.ctx, db, this.workspace, broadcast) + this.manager = new Manager(this.ctx, this.db, this.workspace, this.broadcast) } static async create( @@ -35,7 +35,7 @@ export class Api implements ServerApi { dbUrl: string, broadcast: BroadcastSessionsFunc ): Promise { - const db = await createDbAdapter(dbUrl, workspace) + const db = await createDbAdapter(dbUrl, workspace, ctx, { withLogs: true }) return new Api(ctx, workspace, db, broadcast) } diff --git a/packages/server/src/manager.ts b/packages/server/src/manager.ts index b77c01c7416..08ac1a523e7 100644 --- a/packages/server/src/manager.ts +++ b/packages/server/src/manager.ts @@ -18,17 +18,17 @@ import type { MessagesGroup, WorkspaceID } from '@hcengineering/communication-types' - -import { Triggers } from './triggers.ts' -import { EventProcessor } from './eventProcessor.ts' import type { MeasureContext } from '@hcengineering/core' +import { Triggers } from './triggers' +import { EventProcessor } from './eventProcessor' + type QueryId = number | string type QueryType = 'message' | 'notification' | 'context' export type BroadcastSessionsFunc = (ctx: MeasureContext, sessionIds: string[], result: any) => void -type SessionInfo = { +interface SessionInfo { personalWorkspace: string messageQueries: Map notificationQueries: Map @@ -36,8 +36,8 @@ type SessionInfo = { } export class Manager { - private dataBySessionId: Map = new Map() - private triggers: Triggers + private readonly dataBySessionId = new Map() + private readonly triggers: Triggers private readonly eventProcessor: EventProcessor constructor( @@ -58,17 +58,16 @@ export class Manager { return result } - async findMessagesGroups(info: ConnectionInfo, params: FindMessagesGroupsParams): Promise { + async findMessagesGroups(_: ConnectionInfo, params: FindMessagesGroupsParams): Promise { return await this.db.findMessagesGroups(params) } async event(info: ConnectionInfo, event: RequestEvent): Promise { - const { result, responseEvent } = await this.eventProcessor.process( - { personalWorkspace: info.personalWorkspace, socialIds: info.socialIds }, - event - ) + const eventResult = await this.eventProcessor.process(info, event) + + const { result, responseEvent } = eventResult if (responseEvent !== undefined) { - void this.next(responseEvent) + void this.next(info, responseEvent) } return result } @@ -86,7 +85,7 @@ export class Manager { } if (type === 'message') { - data.messageQueries.set(queryId, params) + data.messageQueries.set(queryId, params as FindMessagesParams) } else if (type === 'notification') { data.notificationQueries.set(queryId, params) } else if (type === 'context') { @@ -107,10 +106,10 @@ export class Manager { this.dataBySessionId.delete(sessionId) } - async next(event: ResponseEvent): Promise { + async next(info: ConnectionInfo, event: ResponseEvent): Promise { await this.responseEvent(event) - const derived = await this.triggers.process(event) - await Promise.all(derived.map((it) => this.next(it))) + const derived = await this.triggers.process(event, info) + await Promise.all(derived.map((it) => this.next(info, it))) } private async responseEvent(event: ResponseEvent): Promise { @@ -143,8 +142,6 @@ export class Manager { { card: event.card, id: event.message }, Array.from(info.messageQueries.values()) ) - case ResponseEventType.MessagesRemoved: - return this.matchMessagesQuery({ card: event.card }, Array.from(info.messageQueries.values())) case ResponseEventType.ReactionCreated: return this.matchMessagesQuery( { card: event.card, id: event.reaction.message }, @@ -165,6 +162,11 @@ export class Manager { { card: event.card, id: event.message }, Array.from(info.messageQueries.values()) ) + case ResponseEventType.ThreadCreated: + return this.matchMessagesQuery( + { card: event.thread.card, id: event.thread.message }, + Array.from(info.messageQueries.values()) + ) case ResponseEventType.NotificationCreated: return ( info.personalWorkspace === event.personalWorkspace && diff --git a/packages/server/src/triggers.ts b/packages/server/src/triggers.ts index c4f61b597a0..80c8b1d9f32 100644 --- a/packages/server/src/triggers.ts +++ b/packages/server/src/triggers.ts @@ -1,12 +1,12 @@ import { - ResponseEventType, - type DbAdapter, type MessageCreatedEvent, - type NotificationContextCreatedEvent, - type NotificationCreatedEvent, - type ResponseEvent + type DbAdapter, + type ResponseEvent, + ResponseEventType, + type MessageRemovedEvent, + type ConnectionInfo } from '@hcengineering/communication-sdk-types' -import type { NotificationContext, ContextID, CardID, WorkspaceID } from '@hcengineering/communication-types' +import { type WorkspaceID, PatchType, type Patch } from '@hcengineering/communication-types' export class Triggers { constructor( @@ -14,103 +14,65 @@ export class Triggers { private readonly workspace: WorkspaceID ) {} - async process(event: ResponseEvent): Promise { + async process(event: ResponseEvent, info: ConnectionInfo): Promise { switch (event.type) { case ResponseEventType.MessageCreated: - return this.createNotifications(event) + return await this.onMessageCreated(event) + case ResponseEventType.MessageRemoved: + return await this.onMessageRemoved(event, info) } - return [] } - private async createNotifications(event: MessageCreatedEvent): Promise { - const card = event.message.card as any as CardID - const subscribedPersonalWorkspaces = [ - 'cd0aba36-1c4f-4170-95f2-27a12a5415f7', - 'cd0aba36-1c4f-4170-95f2-27a12a5415f8' - ] as WorkspaceID[] - - const res: ResponseEvent[] = [] - const contexts = await this.db.findContexts({ card }, [], this.workspace) - - res.push(...(await this.updateNotificationContexts(event.message.created, contexts))) - - for (const personalWorkspace of subscribedPersonalWorkspaces) { - const existsContext = contexts.find( - (it) => it.card === card && it.personalWorkspace === personalWorkspace && this.workspace === it.workspace - ) - const contextId = await this.getOrCreateContextId( - personalWorkspace, - card, - res, - event.message.created, - existsContext - ) + async onMessageRemoved(event: MessageRemovedEvent, info: ConnectionInfo): Promise { + const { card } = event + const thread = await this.db.findThread(card) + if (thread === undefined) return [] - await this.db.createNotification(event.message.id, contextId) + const date = new Date() + const socialId = info.socialIds[0] - const resultEvent: NotificationCreatedEvent = { - type: ResponseEventType.NotificationCreated, - personalWorkspace, - notification: { - context: contextId, - message: event.message, - read: false, - archived: false - } - } - res.push(resultEvent) + const patch: Patch = { + message: thread.message, + type: PatchType.removeReply, + content: thread.thread, + creator: socialId, + created: date } + await this.db.updateThread(thread.thread, date, 'decrement') + await this.db.createPatch(thread.card, patch.message, patch.type, patch.content, patch.creator, patch.created) - return res - } - - private async getOrCreateContextId( - personalWorkspace: WorkspaceID, - card: CardID, - res: ResponseEvent[], - lastUpdate: Date, - context?: NotificationContext - ): Promise { - if (context !== undefined) { - return context.id - } else { - const contextId = await this.db.createContext(personalWorkspace, card, undefined, lastUpdate) - const newContext = { - id: contextId, - card, - workspace: this.workspace, - personalWorkspace - } - const resultEvent: NotificationContextCreatedEvent = { - type: ResponseEventType.NotificationContextCreated, - context: newContext + return [ + { + type: ResponseEventType.PatchCreated, + card: thread.card, + patch } + ] + } - res.push(resultEvent) + async onMessageCreated(event: MessageCreatedEvent): Promise { + const { message } = event + const thread = await this.db.findThread(message.card) + if (thread === undefined) return [] - return contextId + const date = new Date() + const patch: Patch = { + message: thread.message, + type: PatchType.addReply, + content: thread.thread, + creator: message.creator, + created: date } - } + await this.db.updateThread(thread.thread, date, 'increment') + await this.db.createPatch(thread.card, patch.message, patch.type, patch.content, patch.creator, patch.created) - private async updateNotificationContexts( - lastUpdate: Date, - contexts: NotificationContext[] - ): Promise { - const res: ResponseEvent[] = [] - for (const context of contexts) { - if (context.lastUpdate === undefined || context.lastUpdate < lastUpdate) { - await this.db.updateContext(context.id, { lastUpdate }) - res.push({ - type: ResponseEventType.NotificationContextUpdated, - personalWorkspace: context.personalWorkspace, - context: context.id, - update: { - lastUpdate - } - }) + return [ + { + type: ResponseEventType.PatchCreated, + card: thread.card, + patch } - } - return res + ] } } diff --git a/packages/core/package.json b/packages/shared/package.json similarity index 68% rename from packages/core/package.json rename to packages/shared/package.json index ba3abbb3724..ac4216412de 100644 --- a/packages/core/package.json +++ b/packages/shared/package.json @@ -1,6 +1,6 @@ { - "name": "@hcengineering/communication-core", - "version": "0.1.9", + "name": "@hcengineering/communication-shared", + "version": "0.1.47", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", @@ -10,13 +10,15 @@ "types/**/*.d.ts" ], "devDependencies": { - "@types/bun": "^1.1.14" + "@types/bun": "^1.1.14", + "@types/js-yaml": "^4.0.9" }, "peerDependencies": { "typescript": "^5.6.3" }, "dependencies": { - "@hcengineering/communication-types": "workspace:*" + "@hcengineering/communication-types": "workspace:*", + "js-yaml": "^4.1.0" }, "repository": { "type": "git", diff --git a/packages/shared/src/files.ts b/packages/shared/src/files.ts new file mode 100644 index 00000000000..0d73c081043 --- /dev/null +++ b/packages/shared/src/files.ts @@ -0,0 +1,58 @@ +import type { + FileMessage, + FileMetadata, + MessagesGroup, + ParsedFile, + WorkspaceID +} from '@hcengineering/communication-types' +import yaml from 'js-yaml' + +import { retry, type RetryOptions } from './retry' + +export async function loadGroupFile( + workspace: WorkspaceID, + filesUrl: string, + group: MessagesGroup, + options: RetryOptions +): Promise { + const url = getFileUrl(workspace, filesUrl, group.blobId) + + const file = await retry(() => fetchFile(url), options) + const [metadata, messages] = yaml.loadAll(file) as [FileMetadata, FileMessage[]] + + return { + metadata, + messages: messages.map((message) => ({ + id: message.id, + card: metadata.card, + content: message.content, + edited: message.edited, + creator: message.creator, + created: message.created, + attachments: [], + reactions: message.reactions.map((reaction) => ({ + message: message.id, + reaction: reaction.reaction, + creator: reaction.creator, + created: reaction.created + })) + })) + } +} + +async function fetchFile(url: string): Promise { + const res = await fetch(url) + + if (!res.ok) { + throw new Error(`Failed to fetch file: ${res.statusText}`) + } + + return await res.text() +} + +function getFileUrl(workspace: WorkspaceID, urlTemplate: string, file: string): string { + return urlTemplate + .replaceAll(':filename', encodeURIComponent(file)) + .replaceAll(':workspace', encodeURIComponent(workspace)) + .replaceAll(':blobId', encodeURIComponent(file)) +} diff --git a/packages/shared/src/index.ts b/packages/shared/src/index.ts new file mode 100644 index 00000000000..4cb8058974e --- /dev/null +++ b/packages/shared/src/index.ts @@ -0,0 +1,3 @@ +export * from './message.ts' +export * from './retry.ts' +export * from './files.ts' diff --git a/packages/shared/src/message.ts b/packages/shared/src/message.ts new file mode 100644 index 00000000000..394acc4f22e --- /dev/null +++ b/packages/shared/src/message.ts @@ -0,0 +1,24 @@ +import type { MessageID } from '@hcengineering/communication-types' + +let lastTimestamp = 0 +let counter = 0n + +export function generateMessageId(): MessageID { + const timestamp = Math.floor(Date.now() / 1000) + + if (timestamp !== lastTimestamp) { + lastTimestamp = timestamp + counter = 0n + } + + const id = (BigInt(timestamp) << 20n) | counter + counter++ + + return id.toString() as MessageID +} + +export function parseMessageId(messageId: MessageID): Date { + const timestamp = Number(BigInt(messageId) >> 20n) + + return new Date(timestamp * 1000) +} diff --git a/packages/shared/src/retry.ts b/packages/shared/src/retry.ts new file mode 100644 index 00000000000..eb533dca696 --- /dev/null +++ b/packages/shared/src/retry.ts @@ -0,0 +1,20 @@ +export interface RetryOptions { + retries: number + delay?: number +} + +export async function retry(op: () => Promise, { retries, delay }: RetryOptions): Promise { + let error: any + while (retries > 0) { + retries-- + try { + return await op() + } catch (err: any) { + error = err + if (retries !== 0 && delay !== undefined && delay > 0) { + await new Promise((resolve) => setTimeout(resolve, delay)) + } + } + } + throw error +} diff --git a/packages/core/tsconfig.json b/packages/shared/tsconfig.json similarity index 100% rename from packages/core/tsconfig.json rename to packages/shared/tsconfig.json diff --git a/packages/sqlite-wasm/package.json b/packages/sqlite-wasm/package.json index 991c97f1dbd..4f2d9621c23 100644 --- a/packages/sqlite-wasm/package.json +++ b/packages/sqlite-wasm/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-sqlite-wasm", - "version": "0.1.9", + "version": "0.1.47", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/sqlite-wasm/src/adapter.ts b/packages/sqlite-wasm/src/adapter.ts index c5a7ed5eb85..75620ee2501 100644 --- a/packages/sqlite-wasm/src/adapter.ts +++ b/packages/sqlite-wasm/src/adapter.ts @@ -73,8 +73,6 @@ export class SqliteAdapter { workspace: string, card: CardID, blobId: BlobID, - from_id: MessageID, - to_id: MessageID, from_date: Date, to_date: Date, count: number diff --git a/packages/sqlite-wasm/src/db/message.ts b/packages/sqlite-wasm/src/db/message.ts index f02d0b33ab1..28b1b031007 100644 --- a/packages/sqlite-wasm/src/db/message.ts +++ b/packages/sqlite-wasm/src/db/message.ts @@ -3,10 +3,10 @@ import { type MessageID, type CardID, type FindMessagesParams, - SortOrder, + SortingOrder, type SocialID, type RichText, - Direction, type Reaction, type Attachment + type Reaction, type Attachment } from '@hcengineering/communication-types' import {BaseDb} from './base.ts' @@ -124,7 +124,7 @@ export class MessagesDb extends BaseDb { const where = this.buildMessageWhere(workspace, params) const groupBy = `GROUP BY m.id` - const orderBy = params.sort ? `ORDER BY m.created ${params.sort === SortOrder.Asc ? 'ASC' : 'DESC'}` : '' + const orderBy = params.order ? `ORDER BY m.created ${params.order === SortingOrder.Ascending ? 'ASC' : 'DESC'}` : '' const limit = params.limit ? ` LIMIT ${params.limit}` : '' const sql = [select, where, groupBy, orderBy, limit].join(' ') @@ -143,19 +143,20 @@ export class MessagesDb extends BaseDb { where.push(`m.id = '${params.id}'`) } - if (params.from != null) { - const exclude = params.excluded ?? false - const direction = params.direction ?? Direction.Forward - const getOperator = () => { - if (exclude) { - return direction === Direction.Forward ? '>' : '<' - } else { - return direction === Direction.Forward ? '>=' : '<=' - } - } - - where.push(`m.created ${getOperator()} ${params.from}`) - } + //TODO: FIX ME + // if (params.from != null) { + // const exclude = params.excluded ?? false + // const direction = params.direction ?? Direction.Forward + // const getOperator = () => { + // if (exclude) { + // return direction === Direction.Forward ? '>' : '<' + // } else { + // return direction === Direction.Forward ? '>=' : '<=' + // } + // } + // + // where.push(`m.created ${getOperator()} ${params.from}`) + // } return `WHERE ${where.join(' AND ')}` } diff --git a/packages/sqlite-wasm/src/db/notification.ts b/packages/sqlite-wasm/src/db/notification.ts index d1daf8ce17c..9decaa56757 100644 --- a/packages/sqlite-wasm/src/db/notification.ts +++ b/packages/sqlite-wasm/src/db/notification.ts @@ -3,7 +3,7 @@ import { type ContextID, type CardID, type NotificationContext, - type FindNotificationContextParams, SortOrder, + type FindNotificationContextParams, SortingOrder, type FindNotificationsParams, type Notification, type NotificationContextUpdate } from '@hcengineering/communication-types' @@ -130,7 +130,7 @@ export class NotificationsDb extends BaseDb { `; const where = this.buildNotificationWhere(params, personalWorkspace, workspace) const groupBy = `GROUP BY n.message_id, n.context_id, m.id, nc.card_id, nc.archived_from, nc.last_view, nc.last_update`; - const orderBy = `ORDER BY m.created ${params.sort === SortOrder.Asc ? 'ASC' : 'DESC'}` + const orderBy = `ORDER BY m.created ${params.order === SortingOrder.Ascending ? 'ASC' : 'DESC'}` const limit = params.limit ? ` LIMIT ${params.limit}` : '' const sql = [select, where, groupBy, orderBy, limit].join(' ') diff --git a/packages/types/package.json b/packages/types/package.json index 988ac8491dd..2f07b5b9cf8 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-types", - "version": "0.1.9", + "version": "0.1.47", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/types/src/file.ts b/packages/types/src/file.ts new file mode 100644 index 00000000000..7cc12760645 --- /dev/null +++ b/packages/types/src/file.ts @@ -0,0 +1,36 @@ +import type { CardID, Message, MessageID, RichText, SocialID } from './message' + +export interface FileMetadata { + card: CardID + title: string + fromDate: Date + toDate: Date +} + +export interface FileMessage { + id: MessageID + content: RichText + edited?: Date + creator: SocialID + created: Date + reactions: FileReaction[] + thread?: FileThread +} + +export interface FileReaction { + reaction: string + creator: SocialID + created: Date +} + +export interface FileThread { + thread: CardID + repliesCount: number + lastReply: Date + replied: SocialID[] +} + +export interface ParsedFile { + metadata: FileMetadata + messages: Message[] +} diff --git a/packages/types/src/index.ts b/packages/types/src/index.ts index 0d1134f9e28..d8fd44fd1c1 100644 --- a/packages/types/src/index.ts +++ b/packages/types/src/index.ts @@ -1,3 +1,4 @@ export * from './message.ts' export * from './notification.ts' export * from './query.ts' +export * from './file.ts' diff --git a/packages/types/src/message.ts b/packages/types/src/message.ts index e848df14a7c..d299ae7fe94 100644 --- a/packages/types/src/message.ts +++ b/packages/types/src/message.ts @@ -7,19 +7,18 @@ export type SocialID = PersonId export type WorkspaceID = WorkspaceUuid export type RichText = string -export type ID = string | number -export type MessageID = number & { message: true } +export type ID = string +export type MessageID = string & { message: true } -interface Object { - creator: SocialID - created: Date -} - -export interface Message extends Object { +export interface Message { id: MessageID card: CardID content: RichText + creator: SocialID + created: Date + edited?: Date + thread?: Thread reactions: Reaction[] attachments: Attachment[] } @@ -32,19 +31,43 @@ export interface MessagesGroup { fromDate: Date toDate: Date count: number + patches: Patch[] } -export interface Patch extends Object { +export interface Patch { message: MessageID - content: RichText + type: PatchType + content: string + creator: SocialID + created: Date } -export interface Reaction extends Object { +export enum PatchType { + update = 'update', + addReaction = 'addReaction', + removeReaction = 'removeReaction', + addReply = 'addReply', + removeReply = 'removeReply' +} + +export interface Reaction { message: MessageID reaction: string + creator: SocialID + created: Date } -export interface Attachment extends Object { +export interface Attachment { message: MessageID card: CardID + creator: SocialID + created: Date +} + +export interface Thread { + card: CardID + message: MessageID + thread: CardID + repliesCount: number + lastReply: Date } diff --git a/packages/types/src/query.ts b/packages/types/src/query.ts index 63e08dca8b5..34dce7a0ce8 100644 --- a/packages/types/src/query.ts +++ b/packages/types/src/query.ts @@ -1,15 +1,15 @@ +import { SortingOrder } from '@hcengineering/core' + import type { BlobID, CardID, MessageID } from './message' import type { ContextID } from './notification' -export enum SortOrder { - Asc = 1, - Desc = -1 -} +export { SortingOrder } -export enum Direction { - Backward = 1, - Forward = -1 -} +export type ComparisonOperator = 'less' | 'lessOrEqual' | 'greater' | 'greaterOrEqual' | 'notEqual' + +type Exclusive = { + [K in keyof T]: Record & Partial, never>> +}[keyof T] export interface Window { getResult(): T[] @@ -22,16 +22,14 @@ export interface Window { } interface FindParams { - from?: Date - excluded?: boolean - direction?: Direction - sort?: SortOrder + order?: SortingOrder limit?: number } export interface FindMessagesParams extends FindParams { id?: MessageID - card?: CardID + card: CardID + created?: Exclusive> | Date } export interface FindNotificationsParams extends FindParams { @@ -39,6 +37,7 @@ export interface FindNotificationsParams extends FindParams { message?: MessageID read?: boolean archived?: boolean + created?: Exclusive> | Date } export interface FindNotificationContextParams extends FindParams { @@ -46,20 +45,10 @@ export interface FindNotificationContextParams extends FindParams { card?: CardID } -export type ComparisonOperator = 'less' | 'lessOrEqual' | 'greater' | 'greaterOrEqual' | 'notEqual' - -type Exclusive = { - [K in keyof T]: Record & Partial, never>> -}[keyof T] - -export interface FindMessagesGroupsParams { +export interface FindMessagesGroupsParams extends FindParams { card?: CardID blobId?: BlobID - fromId?: Exclusive> | MessageID - toId?: Exclusive> | MessageID - fromDate?: Exclusive> | Date - toDate?: Exclusive> | Date - limit?: number - sortBy?: 'fromId' | 'toId' | 'fromDate' | 'toDate' - sort?: SortOrder + fromDate?: Partial> | Date + toDate?: Partial> | Date + orderBy?: 'fromDate' | 'toDate' } From c8f489b3a8a1d661e506a3464cca24f8f2748200 Mon Sep 17 00:00:00 2001 From: Kristina Date: Thu, 27 Feb 2025 11:23:03 +0400 Subject: [PATCH 046/636] Add copyright (#32) --- packages/client-query/src/index.ts | 15 +++++++++++++++ packages/client-query/src/query.ts | 15 +++++++++++++++ packages/client-sqlite/src/client.ts | 15 +++++++++++++++ packages/client-sqlite/src/index.ts | 15 +++++++++++++++ packages/client-ws/src/client.ts | 15 +++++++++++++++ packages/client-ws/src/connection.ts | 15 +++++++++++++++ packages/client-ws/src/index.ts | 15 +++++++++++++++ packages/cockroach/src/adapter.ts | 15 +++++++++++++++ packages/cockroach/src/connection.ts | 15 +++++++++++++++ packages/cockroach/src/db/base.ts | 15 +++++++++++++++ packages/cockroach/src/db/message.ts | 15 +++++++++++++++ packages/cockroach/src/db/notification.ts | 15 +++++++++++++++ packages/cockroach/src/db/schema.ts | 15 +++++++++++++++ packages/cockroach/src/db/utils.ts | 15 +++++++++++++++ packages/cockroach/src/index.ts | 15 +++++++++++++++ packages/cockroach/src/types.ts | 15 +++++++++++++++ packages/query/src/index.ts | 15 +++++++++++++++ packages/query/src/lq.ts | 15 +++++++++++++++ packages/query/src/messages/query.ts | 15 +++++++++++++++ packages/query/src/messages/utils.ts | 15 +++++++++++++++ packages/query/src/notifications/query.ts | 15 +++++++++++++++ packages/query/src/query.ts | 15 +++++++++++++++ packages/query/src/result.ts | 15 +++++++++++++++ packages/query/src/types.ts | 15 +++++++++++++++ packages/query/src/window.ts | 15 +++++++++++++++ packages/sdk-types/src/client.ts | 15 +++++++++++++++ packages/sdk-types/src/db.ts | 15 +++++++++++++++ packages/sdk-types/src/index.ts | 15 +++++++++++++++ packages/sdk-types/src/query.ts | 15 +++++++++++++++ packages/sdk-types/src/requestEvent.ts | 15 +++++++++++++++ packages/sdk-types/src/responseEvent.ts | 15 +++++++++++++++ packages/sdk-types/src/serverApi.ts | 15 +++++++++++++++ packages/server/src/eventProcessor.ts | 15 +++++++++++++++ packages/server/src/index.ts | 15 +++++++++++++++ packages/server/src/manager.ts | 15 +++++++++++++++ packages/server/src/triggers.ts | 15 +++++++++++++++ packages/shared/src/files.ts | 15 +++++++++++++++ packages/shared/src/index.ts | 15 +++++++++++++++ packages/shared/src/message.ts | 15 +++++++++++++++ packages/shared/src/retry.ts | 15 +++++++++++++++ packages/sqlite-wasm/src/adapter.ts | 15 +++++++++++++++ packages/sqlite-wasm/src/connection.ts | 15 +++++++++++++++ packages/sqlite-wasm/src/db/base.ts | 15 +++++++++++++++ packages/sqlite-wasm/src/db/message.ts | 15 +++++++++++++++ packages/sqlite-wasm/src/db/notification.ts | 15 +++++++++++++++ packages/sqlite-wasm/src/db/types.ts | 15 +++++++++++++++ packages/sqlite-wasm/src/index.ts | 15 +++++++++++++++ packages/sqlite-wasm/src/migrations.ts | 15 +++++++++++++++ packages/types/src/file.ts | 15 +++++++++++++++ packages/types/src/index.ts | 15 +++++++++++++++ packages/types/src/message.ts | 15 +++++++++++++++ packages/types/src/notification.ts | 15 +++++++++++++++ packages/types/src/query.ts | 15 +++++++++++++++ 53 files changed, 795 insertions(+) diff --git a/packages/client-query/src/index.ts b/packages/client-query/src/index.ts index b1df62dcd80..ee5d04972ed 100644 --- a/packages/client-query/src/index.ts +++ b/packages/client-query/src/index.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import { LiveQueries } from '@hcengineering/communication-query' import type { QueryClient } from '@hcengineering/communication-sdk-types' import type { WorkspaceID } from '@hcengineering/communication-types' diff --git a/packages/client-query/src/query.ts b/packages/client-query/src/query.ts index 400e1e4d722..76a7df614ee 100644 --- a/packages/client-query/src/query.ts +++ b/packages/client-query/src/query.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import { type LiveQueries } from '@hcengineering/communication-query' import type { MessagesQueryCallback, diff --git a/packages/client-sqlite/src/client.ts b/packages/client-sqlite/src/client.ts index 2393490f9f0..7884b77a4d9 100644 --- a/packages/client-sqlite/src/client.ts +++ b/packages/client-sqlite/src/client.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import { type CardID, type Message, diff --git a/packages/client-sqlite/src/index.ts b/packages/client-sqlite/src/index.ts index c91338aabfa..9ad2a8e4bac 100644 --- a/packages/client-sqlite/src/index.ts +++ b/packages/client-sqlite/src/index.ts @@ -1,2 +1,17 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + export { type Client } from '@hcengineering/communication-sdk-types' export * from './client' diff --git a/packages/client-ws/src/client.ts b/packages/client-ws/src/client.ts index d11674f83ba..3fa8e0101db 100644 --- a/packages/client-ws/src/client.ts +++ b/packages/client-ws/src/client.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import { type CardID, type ContextID, diff --git a/packages/client-ws/src/connection.ts b/packages/client-ws/src/connection.ts index 98a0df175d4..9d0a8a80544 100644 --- a/packages/client-ws/src/connection.ts +++ b/packages/client-ws/src/connection.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import type { ResponseEvent } from '@hcengineering/communication-sdk-types' import { encode, decode } from '@msgpack/msgpack' diff --git a/packages/client-ws/src/index.ts b/packages/client-ws/src/index.ts index 226fb61584a..a4b18e3dca9 100644 --- a/packages/client-ws/src/index.ts +++ b/packages/client-ws/src/index.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import { createMessagesQuery, createNotificationsQuery } from '@hcengineering/communication-client-query' export * from './client' diff --git a/packages/cockroach/src/adapter.ts b/packages/cockroach/src/adapter.ts index 0ca7ef20b1e..e83b2c22c37 100644 --- a/packages/cockroach/src/adapter.ts +++ b/packages/cockroach/src/adapter.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import type { ParameterOrJSON, Row } from 'postgres' import type postgres from 'postgres' import { diff --git a/packages/cockroach/src/connection.ts b/packages/cockroach/src/connection.ts index b14941e5162..f7223da5a75 100644 --- a/packages/cockroach/src/connection.ts +++ b/packages/cockroach/src/connection.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + // Full copy from @hcengineering/postgres import postgres from 'postgres' import { v4 as uuid } from 'uuid' diff --git a/packages/cockroach/src/db/base.ts b/packages/cockroach/src/db/base.ts index e81dad2d219..572c5c3e434 100644 --- a/packages/cockroach/src/db/base.ts +++ b/packages/cockroach/src/db/base.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import { type ParameterOrJSON, type Row } from 'postgres' import type { WorkspaceID } from '@hcengineering/communication-types' diff --git a/packages/cockroach/src/db/message.ts b/packages/cockroach/src/db/message.ts index 1ddad0af650..eadbf34c661 100644 --- a/packages/cockroach/src/db/message.ts +++ b/packages/cockroach/src/db/message.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import { type Message, type MessageID, diff --git a/packages/cockroach/src/db/notification.ts b/packages/cockroach/src/db/notification.ts index 705edc24f76..5f6a0e2dc5c 100644 --- a/packages/cockroach/src/db/notification.ts +++ b/packages/cockroach/src/db/notification.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import { type MessageID, type ContextID, diff --git a/packages/cockroach/src/db/schema.ts b/packages/cockroach/src/db/schema.ts index 1dfc6578c20..298a35ba9d6 100644 --- a/packages/cockroach/src/db/schema.ts +++ b/packages/cockroach/src/db/schema.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import { type ContextID, type MessageID, diff --git a/packages/cockroach/src/db/utils.ts b/packages/cockroach/src/db/utils.ts index 9f4ac0a1b1d..950ef1bb5aa 100644 --- a/packages/cockroach/src/db/utils.ts +++ b/packages/cockroach/src/db/utils.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + export function getCondition ( table: string, dbField: string, diff --git a/packages/cockroach/src/index.ts b/packages/cockroach/src/index.ts index 32544c6dfa9..ad167c694a7 100644 --- a/packages/cockroach/src/index.ts +++ b/packages/cockroach/src/index.ts @@ -1 +1,16 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + export * from './adapter' diff --git a/packages/cockroach/src/types.ts b/packages/cockroach/src/types.ts index d15e572af3e..f8545d41294 100644 --- a/packages/cockroach/src/types.ts +++ b/packages/cockroach/src/types.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import { type ParameterOrJSON, type Row } from 'postgres' export interface Logger { diff --git a/packages/query/src/index.ts b/packages/query/src/index.ts index 57ad51bd4b0..45ba5768222 100644 --- a/packages/query/src/index.ts +++ b/packages/query/src/index.ts @@ -1 +1,16 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + export * from './lq.ts' diff --git a/packages/query/src/lq.ts b/packages/query/src/lq.ts index 44563787ec1..5ace34515d4 100644 --- a/packages/query/src/lq.ts +++ b/packages/query/src/lq.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import { type FindMessagesParams, type FindNotificationsParams, diff --git a/packages/query/src/messages/query.ts b/packages/query/src/messages/query.ts index 073527450b7..67f6d7c742e 100644 --- a/packages/query/src/messages/query.ts +++ b/packages/query/src/messages/query.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import { type FindMessagesParams, type Message, diff --git a/packages/query/src/messages/utils.ts b/packages/query/src/messages/utils.ts index 1db6a1260df..41a9ce4c7ef 100644 --- a/packages/query/src/messages/utils.ts +++ b/packages/query/src/messages/utils.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import type { CardID, Message, Reaction, SocialID } from '@hcengineering/communication-types' export function addReaction (message: Message, reaction: Reaction): Message { diff --git a/packages/query/src/notifications/query.ts b/packages/query/src/notifications/query.ts index 7b0d5b6a505..33a29525377 100644 --- a/packages/query/src/notifications/query.ts +++ b/packages/query/src/notifications/query.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import { type FindNotificationsParams, SortingOrder, diff --git a/packages/query/src/query.ts b/packages/query/src/query.ts index a770f1b0ef5..084f2af846c 100644 --- a/packages/query/src/query.ts +++ b/packages/query/src/query.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import { SortingOrder, type ID } from '@hcengineering/communication-types' import { type ResponseEvent, type QueryCallback, type QueryClient } from '@hcengineering/communication-sdk-types' diff --git a/packages/query/src/result.ts b/packages/query/src/result.ts index 5186ccebd1a..31a45ccb814 100644 --- a/packages/query/src/result.ts +++ b/packages/query/src/result.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import type { ID } from '@hcengineering/communication-types' export class QueryResult { diff --git a/packages/query/src/types.ts b/packages/query/src/types.ts index cf4a6b8e4f2..95d32a17632 100644 --- a/packages/query/src/types.ts +++ b/packages/query/src/types.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import { type ResponseEvent } from '@hcengineering/communication-sdk-types' import { SortingOrder, type Window } from '@hcengineering/communication-types' diff --git a/packages/query/src/window.ts b/packages/query/src/window.ts index a31d7026cb8..3bc4be36065 100644 --- a/packages/query/src/window.ts +++ b/packages/query/src/window.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import type { Window } from '@hcengineering/communication-types' import type { PagedQuery } from './types' diff --git a/packages/sdk-types/src/client.ts b/packages/sdk-types/src/client.ts index 36415edfb65..75de33b38ce 100644 --- a/packages/sdk-types/src/client.ts +++ b/packages/sdk-types/src/client.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import type { CardID, ContextID, diff --git a/packages/sdk-types/src/db.ts b/packages/sdk-types/src/db.ts index fecd2478405..a19e0d0ec18 100644 --- a/packages/sdk-types/src/db.ts +++ b/packages/sdk-types/src/db.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import type { CardID, ContextID, diff --git a/packages/sdk-types/src/index.ts b/packages/sdk-types/src/index.ts index 2a57740ddfd..2a4a8c1995c 100644 --- a/packages/sdk-types/src/index.ts +++ b/packages/sdk-types/src/index.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + export * from './client' export * from './db' export * from './query' diff --git a/packages/sdk-types/src/query.ts b/packages/sdk-types/src/query.ts index 156a1ef67db..60c479a024c 100644 --- a/packages/sdk-types/src/query.ts +++ b/packages/sdk-types/src/query.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import type { Message, Window, Notification } from '@hcengineering/communication-types' import type { Client } from './client' diff --git a/packages/sdk-types/src/requestEvent.ts b/packages/sdk-types/src/requestEvent.ts index ee305b6888c..9b8b9cc03b9 100644 --- a/packages/sdk-types/src/requestEvent.ts +++ b/packages/sdk-types/src/requestEvent.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import type { CardID, ContextID, diff --git a/packages/sdk-types/src/responseEvent.ts b/packages/sdk-types/src/responseEvent.ts index b1fd2cf2fb6..731e796f297 100644 --- a/packages/sdk-types/src/responseEvent.ts +++ b/packages/sdk-types/src/responseEvent.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import type { Attachment, CardID, diff --git a/packages/sdk-types/src/serverApi.ts b/packages/sdk-types/src/serverApi.ts index a1d4217488d..7ddf2e0f9b9 100644 --- a/packages/sdk-types/src/serverApi.ts +++ b/packages/sdk-types/src/serverApi.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import type { FindMessagesGroupsParams, FindMessagesParams, diff --git a/packages/server/src/eventProcessor.ts b/packages/server/src/eventProcessor.ts index 473a2d7ec47..7dae3e18a4f 100644 --- a/packages/server/src/eventProcessor.ts +++ b/packages/server/src/eventProcessor.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import { type Message, type Patch, diff --git a/packages/server/src/index.ts b/packages/server/src/index.ts index 586d47b99b0..f4282ade666 100644 --- a/packages/server/src/index.ts +++ b/packages/server/src/index.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import type { MeasureContext } from '@hcengineering/core' import type { FindMessagesGroupsParams, diff --git a/packages/server/src/manager.ts b/packages/server/src/manager.ts index 08ac1a523e7..6b8c3a57095 100644 --- a/packages/server/src/manager.ts +++ b/packages/server/src/manager.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import { type ConnectionInfo, type DbAdapter, diff --git a/packages/server/src/triggers.ts b/packages/server/src/triggers.ts index 80c8b1d9f32..57e0c379620 100644 --- a/packages/server/src/triggers.ts +++ b/packages/server/src/triggers.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import { type MessageCreatedEvent, type DbAdapter, diff --git a/packages/shared/src/files.ts b/packages/shared/src/files.ts index 0d73c081043..7601e96c77e 100644 --- a/packages/shared/src/files.ts +++ b/packages/shared/src/files.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import type { FileMessage, FileMetadata, diff --git a/packages/shared/src/index.ts b/packages/shared/src/index.ts index 4cb8058974e..bd41801a46a 100644 --- a/packages/shared/src/index.ts +++ b/packages/shared/src/index.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + export * from './message.ts' export * from './retry.ts' export * from './files.ts' diff --git a/packages/shared/src/message.ts b/packages/shared/src/message.ts index 394acc4f22e..0dac007bc2f 100644 --- a/packages/shared/src/message.ts +++ b/packages/shared/src/message.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import type { MessageID } from '@hcengineering/communication-types' let lastTimestamp = 0 diff --git a/packages/shared/src/retry.ts b/packages/shared/src/retry.ts index eb533dca696..7d956582c3f 100644 --- a/packages/shared/src/retry.ts +++ b/packages/shared/src/retry.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + export interface RetryOptions { retries: number delay?: number diff --git a/packages/sqlite-wasm/src/adapter.ts b/packages/sqlite-wasm/src/adapter.ts index 75620ee2501..628a435443d 100644 --- a/packages/sqlite-wasm/src/adapter.ts +++ b/packages/sqlite-wasm/src/adapter.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import { type Message, type FindMessagesParams, diff --git a/packages/sqlite-wasm/src/connection.ts b/packages/sqlite-wasm/src/connection.ts index 7f82b3b91d1..281b7f9c783 100644 --- a/packages/sqlite-wasm/src/connection.ts +++ b/packages/sqlite-wasm/src/connection.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + // @ts-expect-error error import { sqlite3Worker1Promiser } from '@sqlite.org/sqlite-wasm' diff --git a/packages/sqlite-wasm/src/db/base.ts b/packages/sqlite-wasm/src/db/base.ts index 63d1ff3c35b..be348e544bf 100644 --- a/packages/sqlite-wasm/src/db/base.ts +++ b/packages/sqlite-wasm/src/db/base.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import type {Sqlite3Worker1Promiser} from "../connection"; export class BaseDb { diff --git a/packages/sqlite-wasm/src/db/message.ts b/packages/sqlite-wasm/src/db/message.ts index 28b1b031007..44c820224ba 100644 --- a/packages/sqlite-wasm/src/db/message.ts +++ b/packages/sqlite-wasm/src/db/message.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import { type Message, type MessageID, diff --git a/packages/sqlite-wasm/src/db/notification.ts b/packages/sqlite-wasm/src/db/notification.ts index 9decaa56757..b3215a2e5af 100644 --- a/packages/sqlite-wasm/src/db/notification.ts +++ b/packages/sqlite-wasm/src/db/notification.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import { type MessageID, type ContextID, diff --git a/packages/sqlite-wasm/src/db/types.ts b/packages/sqlite-wasm/src/db/types.ts index ee6c3d5adb8..edde495119d 100644 --- a/packages/sqlite-wasm/src/db/types.ts +++ b/packages/sqlite-wasm/src/db/types.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import type {CardID, ContextID, MessageID, RichText, SocialID } from "@hcengineering/communication-types" export enum TableName { diff --git a/packages/sqlite-wasm/src/index.ts b/packages/sqlite-wasm/src/index.ts index 03eeab5ffaf..566be6a0660 100644 --- a/packages/sqlite-wasm/src/index.ts +++ b/packages/sqlite-wasm/src/index.ts @@ -1 +1,16 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + export * from './adapter.ts' diff --git a/packages/sqlite-wasm/src/migrations.ts b/packages/sqlite-wasm/src/migrations.ts index a1aee473477..20db224d33c 100644 --- a/packages/sqlite-wasm/src/migrations.ts +++ b/packages/sqlite-wasm/src/migrations.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import type { Sqlite3Worker1Promiser } from './connection' export async function applyMigrations(worker: Sqlite3Worker1Promiser, dbId: string): Promise { diff --git a/packages/types/src/file.ts b/packages/types/src/file.ts index 7cc12760645..b2af880b6e8 100644 --- a/packages/types/src/file.ts +++ b/packages/types/src/file.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import type { CardID, Message, MessageID, RichText, SocialID } from './message' export interface FileMetadata { diff --git a/packages/types/src/index.ts b/packages/types/src/index.ts index d8fd44fd1c1..b95c2f6b615 100644 --- a/packages/types/src/index.ts +++ b/packages/types/src/index.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + export * from './message.ts' export * from './notification.ts' export * from './query.ts' diff --git a/packages/types/src/message.ts b/packages/types/src/message.ts index d299ae7fe94..b1165699912 100644 --- a/packages/types/src/message.ts +++ b/packages/types/src/message.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import type { Ref, Blob, PersonId, WorkspaceUuid } from '@hcengineering/core' import type { Card } from '@hcengineering/card' diff --git a/packages/types/src/notification.ts b/packages/types/src/notification.ts index 11bb3be7e45..c860d2e24bb 100644 --- a/packages/types/src/notification.ts +++ b/packages/types/src/notification.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import type { Message, CardID, WorkspaceID } from './message' export type ContextID = string & { context: true } diff --git a/packages/types/src/query.ts b/packages/types/src/query.ts index 34dce7a0ce8..3c46b4d2d7c 100644 --- a/packages/types/src/query.ts +++ b/packages/types/src/query.ts @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + import { SortingOrder } from '@hcengineering/core' import type { BlobID, CardID, MessageID } from './message' From 4c38713a66174e9686e41dd1391601f1c72d80e7 Mon Sep 17 00:00:00 2001 From: denis-tingaikin Date: Sat, 1 Mar 2025 15:58:55 +0300 Subject: [PATCH 047/636] fix issues with recordings with 1440p and above Signed-off-by: denis-tingaikin --- internal/pkg/config/config.go | 2 +- internal/pkg/manifest/hls.go | 5 -- internal/pkg/resconv/resconv.go | 27 +++++----- internal/pkg/resconv/resconv_test.go | 9 ++-- internal/pkg/uploader/postpone.go | 20 ++++--- internal/pkg/uploader/postpone_test.go | 7 +-- internal/pkg/uploader/s3.go | 6 +-- internal/pkg/uploader/uploader.go | 75 ++++++++++++++------------ 8 files changed, 76 insertions(+), 75 deletions(-) diff --git a/internal/pkg/config/config.go b/internal/pkg/config/config.go index 057a4b0a6e4..6a748d5bae6 100644 --- a/internal/pkg/config/config.go +++ b/internal/pkg/config/config.go @@ -28,7 +28,7 @@ type Config struct { PprofEnabled bool `default:"false" split_words:"true" desc:"starts profile server on localhost:6060 if true"` Insecure bool `default:"false" desc:"ignores authorization check if true"` ServeURL string `split_words:"true" desc:"app listen url" default:"0.0.0.0:1080"` - EndpointURL *url.URL `split_words:"true" desc:"S3 or Datalake endpoint, example: s3://my-ip-address, datalake://my-ip-address"` + EndpointURL *url.URL `split_words:"true" default:"s3://127.0.0.1:9000" desc:"S3 or Datalake endpoint, example: s3://my-ip-address, datalake://my-ip-address"` AuthURL *url.URL `split_words:"true" desc:"url to auth the upload"` MaxCapacity int64 `split_words:"true" default:"6220800" desc:"represents the amount of maximum possible capacity for the transcoding. The default value is 1920 * 1080 * 3."` MaxThreads int `split_words:"true" default:"4" desc:"means upper bound for the transcoing provider."` diff --git a/internal/pkg/manifest/hls.go b/internal/pkg/manifest/hls.go index c301e4f4f51..d8dd7a53613 100644 --- a/internal/pkg/manifest/hls.go +++ b/internal/pkg/manifest/hls.go @@ -53,11 +53,6 @@ func GenerateHLSPlaylist(levels []string, outputPath, uploadID string) error { if err != nil { return err } - - _, err = file.WriteString("#EXT-X-ENDLIST") - if err != nil { - return err - } } return nil diff --git a/internal/pkg/resconv/resconv.go b/internal/pkg/resconv/resconv.go index 4e33a8db440..1c00db3d23b 100644 --- a/internal/pkg/resconv/resconv.go +++ b/internal/pkg/resconv/resconv.go @@ -28,35 +28,32 @@ var prefixes = []struct { pixels int label string }{ - {pixels: 640 * 480, label: "320p"}, + {pixels: 640 * 360, label: "360p"}, {pixels: 1280 * 720, label: "480p"}, {pixels: 1920 * 1080, label: "720p"}, {pixels: 2560 * 1440, label: "1080p"}, - {pixels: 3840 * 2160, label: "2k"}, - {pixels: 5120 * 2880, label: "4k"}, - {pixels: 7680 * 4320, label: "5k"}, + {pixels: 3840 * 2160, label: "1440p"}, + {pixels: 7680 * 4320, label: "2160p"}, } var bandwidthMap = map[string]int{ - "320p": 300000, "360p": 500000, "480p": 2000000, "720p": 5000000, "1080p": 8000000, "1440p": 16000000, - "4k": 25000000, - "8k": 50000000, + "2160p": 25000000, + "4320p": 50000000, } var resolutions = map[string]string{ - "320p": "480:240", + "360p": "640:360", "480p": "640:480", "720p": "1280:720", "1080p": "1920:1080", - "2k": "2048:1080", - "4k": "3840:2160", - "5k": "5120:2880", - "8k": "7680:4320", + "1440p": "2560:1440", + "2160p": "3840:2160", + "4320p": "7680:4320", } // SubLevels returns sublevels for the resolution @@ -85,8 +82,8 @@ func SubLevels(resolution string) (res []string) { // Resolution returns default resolution based on the level func Resolution(level string) string { - if v, ok := resolutions[level]; ok { - return v + if res, ok := resolutions[level]; ok { + return res } return Resolution(defaultLevel) } @@ -98,7 +95,7 @@ func Level(resolution string) string { return pixels < prefixes[i].pixels }) if idx == len(prefixes) { - return "8k" + return "4320p" } return prefixes[idx].label diff --git a/internal/pkg/resconv/resconv_test.go b/internal/pkg/resconv/resconv_test.go index 4139e21370a..f1dc27b8002 100644 --- a/internal/pkg/resconv/resconv_test.go +++ b/internal/pkg/resconv/resconv_test.go @@ -29,10 +29,11 @@ func Test_Resconv_ShouldReturnCorrectPrefix(t *testing.T) { {res: "640:480", expected: "480p"}, {res: "1280:720", expected: "720p"}, {res: "1920:1080", expected: "1080p"}, - {res: "2560:1440", expected: "2k"}, - {res: "3840:2160", expected: "4k"}, - {res: "5120:2880", expected: "5k"}, - {res: "9000:4000", expected: "8k"}, + {res: "2880:1800", expected: "1440p"}, + {res: "2560:1440", expected: "1440p"}, + {res: "3840:2160", expected: "2160p"}, + {res: "5120:2880", expected: "2160p"}, + {res: "9000:4000", expected: "4320p"}, } for _, tt := range tests { diff --git a/internal/pkg/uploader/postpone.go b/internal/pkg/uploader/postpone.go index 3486996ec18..4cfaa81b37c 100644 --- a/internal/pkg/uploader/postpone.go +++ b/internal/pkg/uploader/postpone.go @@ -16,27 +16,33 @@ package uploader import ( "context" "time" + + "github.com/huly-stream/internal/pkg/log" + "go.uber.org/zap" ) -func (u *uploader) postpone(id string, action func()) { - u.wg.Add(1) +func (u *uploader) postpone(fileName string, action func(ctx context.Context)) { + u.waitJobs.Add(1) var ctx, cancel = context.WithCancel(context.Background()) + ctx = log.WithLoggerFields(ctx, zap.String("pospone", "action")) var startCh = time.After(u.postponeDuration) - if v, ok := u.contexts.Load(id); ok { + if v, ok := u.contexts.Load(fileName); ok { (*v.(*context.CancelFunc))() } - u.contexts.Store(id, &cancel) + u.contexts.Store(fileName, &cancel) go func() { - u.wg.Done() + u.waitJobs.Done() defer cancel() select { case <-ctx.Done(): return case <-startCh: - action() - u.contexts.CompareAndDelete(id, &cancel) + action(ctx) + if u.ctx.Err() == nil { + u.contexts.CompareAndDelete(fileName, &cancel) + } } }() } diff --git a/internal/pkg/uploader/postpone_test.go b/internal/pkg/uploader/postpone_test.go index 89522b4cec6..68758451e79 100644 --- a/internal/pkg/uploader/postpone_test.go +++ b/internal/pkg/uploader/postpone_test.go @@ -14,6 +14,7 @@ package uploader import ( + "context" "sync/atomic" "testing" "time" @@ -26,9 +27,9 @@ func Test_Postpone(t *testing.T) { postponeDuration: time.Second / 4, } var counter atomic.Int32 - u.postpone("1", func() { counter.Add(1) }) + u.postpone("1", func(context.Context) { counter.Add(1) }) time.Sleep(time.Second / 8) - u.postpone("1", func() { counter.Add(1) }) + u.postpone("1", func(context.Context) { counter.Add(1) }) time.Sleep(time.Second / 2) require.Equal(t, int32(1), counter.Load()) time.Sleep(time.Second / 2) @@ -38,7 +39,7 @@ func Test_Postpone(t *testing.T) { func Test_WithoutPostpone(t *testing.T) { var counter atomic.Int32 var u uploader - u.postpone("1", func() { counter.Add(1) }) + u.postpone("1", func(context.Context) { counter.Add(1) }) time.Sleep(time.Second / 10) require.Equal(t, int32(1), counter.Load()) } diff --git a/internal/pkg/uploader/s3.go b/internal/pkg/uploader/s3.go index 016e6fc87c3..d211e0347f6 100644 --- a/internal/pkg/uploader/s3.go +++ b/internal/pkg/uploader/s3.go @@ -41,10 +41,9 @@ type S3Storage struct { } // NewS3 creates a new S3 storage -func NewS3(ctx context.Context, endpoint string) Storage { +func NewS3(ctx context.Context, endpoint, bucketName string) Storage { var accessKeyID = os.Getenv("AWS_ACCESS_KEY_ID") var accessKeySecret = os.Getenv("AWS_SECRET_ACCESS_KEY") - var bucketName = os.Getenv("AWS_BUCKET_NAME") var logger = log.FromContext(ctx).With(zap.String("s3", "storage")) cfg, err := config.LoadDefaultConfig(ctx, @@ -68,9 +67,6 @@ func NewS3(ctx context.Context, endpoint string) Storage { } func getContentType(objectKey string) string { - if strings.HasSuffix(objectKey, ".txt") { - return "txt" - } if strings.HasSuffix(objectKey, ".ts") { return "video/mp2t" } diff --git a/internal/pkg/uploader/uploader.go b/internal/pkg/uploader/uploader.go index a2c76811cda..8d04618f574 100644 --- a/internal/pkg/uploader/uploader.go +++ b/internal/pkg/uploader/uploader.go @@ -34,7 +34,7 @@ import ( type uploader struct { done chan struct{} - wg sync.WaitGroup + waitJobs sync.WaitGroup ctx context.Context cancel context.CancelFunc baseDir string @@ -48,10 +48,13 @@ type uploader struct { } func (u *uploader) retry(action func() error) { + var delay = time.Millisecond * 50 for range u.retryCount { if err := action(); err == nil { return } + time.Sleep(delay) + delay *= 2 } } @@ -59,30 +62,41 @@ func (u *uploader) retry(action func() error) { func (u *uploader) Rollback() { logger := log.FromContext(u.ctx).With(zap.String("uploader", "Rollback")) logger.Debug("starting") - defer u.cancel() - - u.wg.Wait() + defer logger.Debug("done") - u.sentFiles.Range(func(key, value any) bool { - logger.Debug("deleting remote file", zap.String("key", key.(string))) - u.retry(func() error { return u.storage.DeleteFile(u.ctx, key.(string)) }) - log.FromContext(u.ctx).Debug("deleting local file", zap.String("key", key.(string))) - _ = os.Remove(key.(string)) - return true + u.postpone("", func(ctx context.Context) { + u.sentFiles.Range(func(key, value any) bool { + logger.Debug("deleting remote file", zap.String("key", key.(string))) + u.retry(func() error { return u.storage.DeleteFile(ctx, key.(string)) }) + return true + }) }) + + u.Terminate() } +// Terminate deletes func (u *uploader) Terminate() { logger := log.FromContext(u.ctx).With(zap.String("uploader", "Terminate")) logger.Debug("starting") - defer u.cancel() - u.wg.Wait() + go func() { + defer logger.Debug("done") + u.waitJobs.Wait() + u.cancel() + }() +} - u.sentFiles.Range(func(key, value any) bool { - _ = os.Remove(key.(string)) - return true +func (u *uploader) uploadAndDelte(fileName string) { + u.postpone(fileName+"-del", func(context.Context) {}) + u.postpone(fileName, func(ctx context.Context) { + u.retry(func() error { return u.storage.UploadFile(ctx, fileName) }) + u.postpone(fileName+"-del", func(context.Context) { + _ = os.Remove(fileName) + }) }) + + u.sentFiles.Store(fileName, struct{}{}) } func (u *uploader) Serve() error { @@ -98,13 +112,8 @@ func (u *uploader) Serve() error { _ = os.MkdirAll(u.baseDir, os.ModePerm) initFiles, _ := os.ReadDir(u.baseDir) for _, f := range initFiles { - var name = filepath.Join(u.baseDir, f.Name()) - u.postpone(name, func() { - logger.Debug("started uploading", zap.String("eventName", name)) - u.retry(func() error { return u.storage.UploadFile(u.ctx, name) }) - logger.Debug("added to sentFiles", zap.String("eventName", name)) - u.sentFiles.Store(name, struct{}{}) - }) + var filePath = filepath.Join(u.baseDir, f.Name()) + u.uploadAndDelte(filePath) } if err := watcher.Add(u.baseDir); err != nil { @@ -132,12 +141,7 @@ func (u *uploader) Serve() error { if !strings.Contains(event.Name, u.uploadID) { continue } - u.postpone(event.Name, func() { - logger.Debug("started uploading", zap.String("eventName", event.Name)) - u.retry(func() error { return u.storage.UploadFile(u.ctx, event.Name) }) - logger.Debug("added to sentFiles", zap.String("eventName", event.Name)) - u.sentFiles.Store(event.Name, struct{}{}) - }) + u.uploadAndDelte(event.Name) case err, ok := <-watcher.Errors: if !ok { return u.ctx.Err() @@ -168,7 +172,7 @@ func New(ctx context.Context, baseDir string, endpointURL *url.URL, uploadInfo h var storage Storage var err error - storage, err = NewStorageByURL(ctx, endpointURL, uploadInfo.MetaData) + storage, err = NewStorageByURL(uploaderCtx, endpointURL, uploadInfo.MetaData) if err != nil { uploadCancel() return nil, err @@ -181,7 +185,7 @@ func New(ctx context.Context, baseDir string, endpointURL *url.URL, uploadInfo h uploadID: uploadInfo.ID, postponeDuration: time.Second * 2, storage: storage, - retryCount: 5, + retryCount: 10, baseDir: filepath.Join(baseDir, uploadInfo.ID), eventBufferCount: 100, }, nil @@ -189,20 +193,21 @@ func New(ctx context.Context, baseDir string, endpointURL *url.URL, uploadInfo h // NewStorageByURL creates a new storage basd on the type from the url scheme, for example "datalake://my-datalake-endpoint" func NewStorageByURL(ctx context.Context, u *url.URL, headers map[string]string) (Storage, error) { + var workspace = headers["workspace"] + if workspace == "" { + return nil, errors.New("missed workspace in the client's metadata") + } c, _ := config.FromEnv() switch u.Scheme { case "tus": return nil, errors.New("not imlemented yet") case "datalake": - if headers["workspace"] == "" { - return nil, errors.New("missed workspace in the client's metadata") - } if headers["token"] == "" { return nil, errors.New("missed auth token in the client's metadata") } - return NewDatalakeStorage(c.Endpoint().String(), headers["workspace"], headers["token"]), nil + return NewDatalakeStorage(c.Endpoint().String(), workspace, headers["token"]), nil case "s3": - return NewS3(ctx, c.Endpoint().String()), nil + return NewS3(ctx, c.Endpoint().String(), workspace), nil default: return nil, errors.New("unknown scheme") } From ce86585484c47142544b4b3b838820911a748bf1 Mon Sep 17 00:00:00 2001 From: denis-tingaikin Date: Mon, 3 Mar 2025 06:39:23 +0300 Subject: [PATCH 048/636] fix tests Signed-off-by: denis-tingaikin --- internal/pkg/resconv/resconv.go | 2 +- internal/pkg/resconv/resconv_test.go | 3 ++- internal/pkg/uploader/postpone.go | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/internal/pkg/resconv/resconv.go b/internal/pkg/resconv/resconv.go index 1c00db3d23b..3ed4a0b3c22 100644 --- a/internal/pkg/resconv/resconv.go +++ b/internal/pkg/resconv/resconv.go @@ -22,7 +22,7 @@ import ( "strings" ) -const defaultLevel = "320p" +const defaultLevel = "360p" var prefixes = []struct { pixels int diff --git a/internal/pkg/resconv/resconv_test.go b/internal/pkg/resconv/resconv_test.go index f1dc27b8002..547fab05821 100644 --- a/internal/pkg/resconv/resconv_test.go +++ b/internal/pkg/resconv/resconv_test.go @@ -25,7 +25,8 @@ func Test_Resconv_ShouldReturnCorrectPrefix(t *testing.T) { res string expected string }{ - {res: "320:240", expected: "320p"}, + {res: "320:240", expected: "360p"}, + {res: "320:360", expected: "360p"}, {res: "640:480", expected: "480p"}, {res: "1280:720", expected: "720p"}, {res: "1920:1080", expected: "1080p"}, diff --git a/internal/pkg/uploader/postpone.go b/internal/pkg/uploader/postpone.go index 4cfaa81b37c..29b308dc5e8 100644 --- a/internal/pkg/uploader/postpone.go +++ b/internal/pkg/uploader/postpone.go @@ -40,7 +40,7 @@ func (u *uploader) postpone(fileName string, action func(ctx context.Context)) { return case <-startCh: action(ctx) - if u.ctx.Err() == nil { + if ctx.Err() == nil { u.contexts.CompareAndDelete(fileName, &cancel) } } From 6a73b5ad698797c7ea009ff54baf9e8cacea8706 Mon Sep 17 00:00:00 2001 From: Kristina Date: Mon, 3 Mar 2025 18:57:38 +0400 Subject: [PATCH 049/636] Update adapter (#33) * Register cards to dump Signed-off-by: Kristina Fefelova --- bun.lock | 31 +- eslint.config.js => eslint.config.mjs | 0 package.json | 5 +- packages/client-query/package.json | 8 +- packages/client-sqlite/package.json | 8 +- packages/client-ws/package.json | 8 +- packages/cockroach/package.json | 8 +- packages/cockroach/src/adapter.ts | 4 +- packages/cockroach/src/db/message.ts | 831 ++++++++++++------------ packages/cockroach/src/utils.ts | 50 ++ packages/examples/package.json | 8 +- packages/query/package.json | 8 +- packages/query/src/messages/query.ts | 4 +- packages/sdk-types/package.json | 11 +- packages/sdk-types/src/responseEvent.ts | 10 +- packages/server/package.json | 9 +- packages/server/src/eventProcessor.ts | 17 +- packages/server/src/index.ts | 7 +- packages/server/src/manager.ts | 12 +- packages/server/src/metadata.ts | 9 + packages/server/src/triggers.ts | 67 +- packages/shared/package.json | 11 +- packages/shared/src/files.ts | 53 +- packages/sqlite-wasm/package.json | 8 +- packages/types/package.json | 11 +- packages/types/src/file.ts | 1 - packages/types/src/message.ts | 2 +- packages/types/src/query.ts | 3 +- scripts/{build.sh => bundle.sh} | 17 +- 29 files changed, 723 insertions(+), 498 deletions(-) rename eslint.config.js => eslint.config.mjs (100%) create mode 100644 packages/cockroach/src/utils.ts create mode 100644 packages/server/src/metadata.ts rename scripts/{build.sh => bundle.sh} (67%) diff --git a/bun.lock b/bun.lock index 238a096925a..29ea0a0d256 100644 --- a/bun.lock +++ b/bun.lock @@ -10,13 +10,13 @@ "eslint": "^9.21.0", "eslint-config-prettier": "^9.1.0", "eslint-plugin-prettier": "^5.2.3", - "prettier": "^3.5.2", + "prettier": "^3.5.3", "typescript-eslint": "^8.25.0", }, }, "packages/client-query": { "name": "@hcengineering/communication-client-query", - "version": "0.1.47", + "version": "0.1.59", "dependencies": { "@hcengineering/communication-query": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", @@ -32,7 +32,7 @@ }, "packages/client-sqlite": { "name": "@hcengineering/communication-client-sqlite", - "version": "0.1.47", + "version": "0.1.59", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-sqlite-wasm": "workspace:*", @@ -48,7 +48,7 @@ }, "packages/client-ws": { "name": "@hcengineering/communication-client-ws", - "version": "0.1.47", + "version": "0.1.59", "dependencies": { "@hcengineering/communication-client-query": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", @@ -64,7 +64,7 @@ }, "packages/cockroach": { "name": "@hcengineering/communication-cockroach", - "version": "0.1.47", + "version": "0.1.59", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-shared": "workspace:*", @@ -81,7 +81,7 @@ }, "packages/examples": { "name": "@hcengineering/communication-examples", - "version": "0.1.47", + "version": "0.1.59", "dependencies": { "@hcengineering/communication-client-ws": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -95,7 +95,7 @@ }, "packages/query": { "name": "@hcengineering/communication-query", - "version": "0.1.47", + "version": "0.1.59", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-shared": "workspace:*", @@ -112,7 +112,7 @@ }, "packages/sdk-types": { "name": "@hcengineering/communication-sdk-types", - "version": "0.1.47", + "version": "0.1.59", "dependencies": { "@hcengineering/communication-types": "workspace:*", }, @@ -125,12 +125,13 @@ }, "packages/server": { "name": "@hcengineering/communication-server", - "version": "0.1.47", + "version": "0.1.59", "dependencies": { "@hcengineering/communication-cockroach": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", "@hcengineering/core": "0.7.2", + "@hcengineering/server-token": "0.7.2", }, "devDependencies": { "@types/bun": "^1.1.14", @@ -144,7 +145,7 @@ }, "packages/shared": { "name": "@hcengineering/communication-shared", - "version": "0.1.47", + "version": "0.1.59", "dependencies": { "@hcengineering/communication-types": "workspace:*", "js-yaml": "^4.1.0", @@ -159,7 +160,7 @@ }, "packages/sqlite-wasm": { "name": "@hcengineering/communication-sqlite-wasm", - "version": "0.1.47", + "version": "0.1.59", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -176,7 +177,7 @@ }, "packages/types": { "name": "@hcengineering/communication-types", - "version": "0.1.47", + "version": "0.1.59", "dependencies": { "@hcengineering/card": "0.7.2", "@hcengineering/core": "0.7.2", @@ -250,6 +251,8 @@ "@hcengineering/platform": ["@hcengineering/platform@0.7.2", "https://npm.pkg.github.com/download/@hcengineering/platform/0.7.2/854c94ef4607aba2306ec92a32f41cce30139de5", { "dependencies": { "intl-messageformat": "^10.7.14" } }, "sha512-RtUWhC0MPjFABojyOSGoOCLZGPVKiZkfZB9n+7Mwbc7tBznVQKeXBBOVUq6QpC50cLyx7ce8WVJBNVIpggwaeQ=="], + "@hcengineering/server-token": ["@hcengineering/server-token@0.7.2", "https://npm.pkg.github.com/download/@hcengineering/server-token/0.7.2/2bf94b717348f9ce7256fd3a3fec39869c044e4e", { "dependencies": { "@hcengineering/core": "^0.7.2", "@hcengineering/platform": "^0.7.2", "jwt-simple": "^0.5.6" } }, "sha512-WcGkUOhJJvRZZbPYSIuG0S57r5f1sgrQEm+kz8D2Ki4C+RMEwWYjRCYyZhWiE9QxbetBJtcwneKI+jfpDItvag=="], + "@hcengineering/theme": ["@hcengineering/theme@0.7.2", "https://npm.pkg.github.com/download/@hcengineering/theme/0.7.2/a6f832fd9ddbb3e5683f361e54d8bb4f3e096be8", { "dependencies": { "@hcengineering/analytics": "^0.7.2", "@hcengineering/platform": "^0.7.2", "svelte": "^4.2.19" } }, "sha512-Sqrt6ETmBKGk8SiSD/gvfitqP1GUaSpfiGy4I7xaAgXuyeWENni+YJ9x4C+9yTWT7caRnKu5WKbv/zN1PpWuqg=="], "@hcengineering/ui": ["@hcengineering/ui@0.7.2", "https://npm.pkg.github.com/download/@hcengineering/ui/0.7.2/40ec34d2d251af4ad2b8a26aa89c12894cbe5eda", { "dependencies": { "@hcengineering/analytics": "^0.7.2", "@hcengineering/core": "^0.7.2", "@hcengineering/platform": "^0.7.2", "@hcengineering/theme": "^0.7.2", "autolinker": "4.0.0", "date-fns": "^2.30.0", "date-fns-tz": "^2.0.0", "dompurify": "^3.1.6", "emoji-regex": "^10.1.0", "fast-equals": "^5.2.2", "svelte": "^4.2.19" } }, "sha512-5t6JFO65Tm6iZqveEvBjfTM8DDZ9ca9uBXj7GbtfTD/1K8WC1Yf3REsNDW2I9DFYzxKPBayx9M4TRrJBseHlWg=="], @@ -476,6 +479,8 @@ "json-stable-stringify-without-jsonify": ["json-stable-stringify-without-jsonify@1.0.1", "", {}, "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw=="], + "jwt-simple": ["jwt-simple@0.5.6", "", {}, "sha512-40aUybvhH9t2h71ncA1/1SbtTNCVZHgsTsTgqPUxGWDmUDrXyDf2wMNQKEbdBjbf4AI+fQhbECNTV6lWxQKUzg=="], + "keyv": ["keyv@4.5.4", "", { "dependencies": { "json-buffer": "3.0.1" } }, "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw=="], "levn": ["levn@0.4.1", "", { "dependencies": { "prelude-ls": "^1.2.1", "type-check": "~0.4.0" } }, "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ=="], @@ -522,7 +527,7 @@ "prelude-ls": ["prelude-ls@1.2.1", "", {}, "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g=="], - "prettier": ["prettier@3.5.2", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-lc6npv5PH7hVqozBR7lkBNOGXV9vMwROAPlumdBkX0wTbbzPu/U1hk5yL8p2pt4Xoc+2mkT8t/sow2YrV/M5qg=="], + "prettier": ["prettier@3.5.3", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw=="], "prettier-linter-helpers": ["prettier-linter-helpers@1.0.0", "", { "dependencies": { "fast-diff": "^1.1.2" } }, "sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w=="], diff --git a/eslint.config.js b/eslint.config.mjs similarity index 100% rename from eslint.config.js rename to eslint.config.mjs diff --git a/package.json b/package.json index 1d53a75e857..7e3a9002f7a 100644 --- a/package.json +++ b/package.json @@ -1,10 +1,9 @@ { "name": "@hcengineering/communication", - "type": "module", "workspaces": ["packages/*"], "scripts": { "publish": "sh scripts/publish.sh", - "build": "sh scripts/build.sh && tsc --noEmit", + "bundle": "sh scripts/bundle.sh && tsc --noEmit", "lint": "eslint packages/**/src/*.ts", "format": "prettier --write packages/**/src/*.ts && bun run lint" }, @@ -15,7 +14,7 @@ "eslint": "^9.21.0", "eslint-config-prettier": "^9.1.0", "eslint-plugin-prettier": "^5.2.3", - "prettier": "^3.5.2", + "prettier": "^3.5.3", "typescript-eslint": "^8.25.0" } } diff --git a/packages/client-query/package.json b/packages/client-query/package.json index df4ca1bc4c1..8e4c9b04561 100644 --- a/packages/client-query/package.json +++ b/packages/client-query/package.json @@ -1,14 +1,18 @@ { "name": "@hcengineering/communication-client-query", - "version": "0.1.47", + "version": "0.1.59", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", - "type": "module", "files": [ "dist/index.js", + "dist/index.cjs", "types/**/*.d.ts" ], + "scripts": { + "bundle": "bun run bundle:browser", + "bundle:browser": "bun build src/index.ts --outdir dist --target browser" + }, "devDependencies": { "@types/bun": "^1.1.14" }, diff --git a/packages/client-sqlite/package.json b/packages/client-sqlite/package.json index ee0fa824cbb..37cedb24ab5 100644 --- a/packages/client-sqlite/package.json +++ b/packages/client-sqlite/package.json @@ -1,14 +1,18 @@ { "name": "@hcengineering/communication-client-sqlite", - "version": "0.1.47", + "version": "0.1.59", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", - "type": "module", "files": [ "dist/index.js", + "dist/index.cjs", "types/**/*.d.ts" ], + "scripts": { + "bundle": "bun run bundle:browser", + "bundle:browser": "bun build src/index.ts --outdir dist --target browser" + }, "devDependencies": { "@types/bun": "^1.1.14" }, diff --git a/packages/client-ws/package.json b/packages/client-ws/package.json index 86e4dadcb1f..b6e08be8b06 100644 --- a/packages/client-ws/package.json +++ b/packages/client-ws/package.json @@ -1,14 +1,18 @@ { "name": "@hcengineering/communication-client-ws", - "version": "0.1.47", + "version": "0.1.59", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", - "type": "module", "files": [ "dist/index.js", + "dist/index.cjs", "types/**/*.d.ts" ], + "scripts": { + "bundle": "bun run bundle:browser", + "bundle:browser": "bun build src/index.ts --outdir dist --target browser" + }, "devDependencies": { "@types/bun": "^1.1.14" }, diff --git a/packages/cockroach/package.json b/packages/cockroach/package.json index b34725c8ed0..888cf8ec652 100644 --- a/packages/cockroach/package.json +++ b/packages/cockroach/package.json @@ -1,14 +1,18 @@ { "name": "@hcengineering/communication-cockroach", - "version": "0.1.47", + "version": "0.1.59", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", - "type": "module", "files": [ "dist/index.js", + "dist/index.cjs", "types/**/*.d.ts" ], + "scripts": { + "bundle": "bun run bundle:node", + "bundle:node": "bun build src/index.ts --target node --format cjs --outfile dist/index.js" + }, "devDependencies": { "@types/bun": "^1.1.14" }, diff --git a/packages/cockroach/src/adapter.ts b/packages/cockroach/src/adapter.ts index e83b2c22c37..f5d9a846a67 100644 --- a/packages/cockroach/src/adapter.ts +++ b/packages/cockroach/src/adapter.ts @@ -42,6 +42,7 @@ import { MessagesDb } from './db/message' import { NotificationsDb } from './db/notification' import { connect, type PostgresClientReference } from './connection' import { type Options, type Logger, type SqlClient } from './types' +import { injectVars } from './utils.ts' export class CockroachAdapter implements DbAdapter { private readonly message: MessagesDb @@ -267,7 +268,8 @@ class CockroachClient implements SqlClient { ) {} async execute)[]>(query: string, params?: ParameterOrJSON[]): Promise { - return await this.sql.unsafe(query, params) + const sql = params !== undefined && params.length > 0 ? injectVars(query, params) : query + return await this.sql.unsafe(sql) } close(): void { diff --git a/packages/cockroach/src/db/message.ts b/packages/cockroach/src/db/message.ts index eadbf34c661..e94d089650c 100644 --- a/packages/cockroach/src/db/message.ts +++ b/packages/cockroach/src/db/message.ts @@ -14,356 +14,357 @@ // import { - type Message, - type MessageID, - type CardID, - type FindMessagesParams, - type SocialID, - type RichText, - SortingOrder, - PatchType, - type Thread, - type BlobID, - type FindMessagesGroupsParams, - type MessagesGroup + type Message, + type MessageID, + type CardID, + type FindMessagesParams, + type SocialID, + type RichText, + SortingOrder, + PatchType, + type Thread, + type BlobID, + type FindMessagesGroupsParams, + type MessagesGroup } from '@hcengineering/communication-types' -import { generateMessageId } from '@hcengineering/communication-shared' +import {generateMessageId} from '@hcengineering/communication-shared' -import { BaseDb } from './base' +import {BaseDb} from './base' import { - TableName, - type MessageDb, - type AttachmentDb, - type ReactionDb, - type PatchDb, - toMessage, - type ThreadDb, - toThread, - type MessagesGroupDb, - toMessagesGroup + TableName, + type MessageDb, + type AttachmentDb, + type ReactionDb, + type PatchDb, + toMessage, + type ThreadDb, + toThread, + type MessagesGroupDb, + toMessagesGroup } from './schema' -import { getCondition } from './utils' +import {getCondition} from './utils' export class MessagesDb extends BaseDb { - // Message - async createMessage (card: CardID, content: RichText, creator: SocialID, created: Date): Promise { - const id = generateMessageId() - const db: MessageDb = { - id, - workspace_id: this.workspace, - card_id: card, - content, - creator, - created + // Message + async createMessage(card: CardID, content: RichText, creator: SocialID, created: Date): Promise { + const id = generateMessageId() + const db: MessageDb = { + id, + workspace_id: this.workspace, + card_id: card, + content, + creator, + created + } + + const sql = `INSERT INTO ${TableName.Message} (workspace_id, card_id, id, content, creator, created) + VALUES ($1::uuid, $2::varchar, $3::bigint, $4::text, $5::varchar, $6::timestamptz)` + + await this.execute(sql, [db.workspace_id, db.card_id, db.id, db.content, db.creator, db.created], 'insert message') + + return id } - const sql = `INSERT INTO ${TableName.Message} (workspace_id, card_id, id, content, creator, created) - VALUES ($1::uuid, $2::varchar, $3::bigint, $4::text, $5::varchar, $6::timestamptz)` - - await this.execute(sql, [db.workspace_id, db.card_id, db.id, db.content, db.creator, db.created], 'insert message') - - return id - } - - async removeMessage (card: CardID, message: MessageID, socialIds?: SocialID[]): Promise { - if (socialIds === undefined || socialIds.length === 0) { - const sql = `DELETE - FROM ${TableName.Message} - WHERE workspace_id = $1::uuid - AND card_id = $2::varchar - AND id = $2::bigint;` - await this.execute(sql, [this.workspace, card, message], 'remove message') - } else if (socialIds.length === 1) { - const sql = `DELETE - FROM ${TableName.Message} - WHERE workspace_id = $1::uuid - AND card_id = $2::varchar - AND id = $2::bigint - AND creator = $3::varchar;` - await this.execute(sql, [this.workspace, card, message, socialIds[0]], 'remove message') - } else { - const sql = `DELETE - FROM ${TableName.Message} - WHERE workspace_id = $1::uuid - AND card_id = $2::varchar - AND id = $2::bigint - AND creator = ANY ($3::varchar[]);` - - await this.execute(sql, [this.workspace, card, message, socialIds], 'remove message') + async removeMessage(card: CardID, message: MessageID, socialIds?: SocialID[]): Promise { + if (socialIds === undefined || socialIds.length === 0) { + const sql = `DELETE + FROM ${TableName.Message} + WHERE workspace_id = $1::uuid + AND card_id = $2::varchar + AND id = $2::bigint;` + await this.execute(sql, [this.workspace, card, message], 'remove message') + } else if (socialIds.length === 1) { + const sql = `DELETE + FROM ${TableName.Message} + WHERE workspace_id = $1::uuid + AND card_id = $2::varchar + AND id = $2::bigint + AND creator = $3::varchar;` + await this.execute(sql, [this.workspace, card, message, socialIds[0]], 'remove message') + } else { + const sql = `DELETE + FROM ${TableName.Message} + WHERE workspace_id = $1::uuid + AND card_id = $2::varchar + AND id = $2::bigint + AND creator = ANY ($3::varchar[]);` + + await this.execute(sql, [this.workspace, card, message, socialIds], 'remove message') + } } - } - - async removeMessages (card: CardID, fromId: MessageID, toId: MessageID): Promise { - const sql = `DELETE - FROM ${TableName.Message} - WHERE workspace_id = $1::uuid - AND card_id = $2::varchar - AND id >= $3::bigint - AND id <= $4::bigint;` - - await this.execute(sql, [this.workspace, card, BigInt(fromId), BigInt(toId)], 'remove messages') - } - - async createPatch ( - card: CardID, - message: MessageID, - type: PatchType, - content: string, - creator: SocialID, - created: Date - ): Promise { - const db: PatchDb = { - workspace_id: this.workspace, - card_id: card, - message_id: message, - type, - content, - creator, - created + + async removeMessages(card: CardID, fromId: MessageID, toId: MessageID): Promise { + const sql = `DELETE + FROM ${TableName.Message} + WHERE workspace_id = $1::uuid + AND card_id = $2::varchar + AND id >= $3::bigint + AND id <= $4::bigint;` + + await this.execute(sql, [this.workspace, card, BigInt(fromId), BigInt(toId)], 'remove messages') } - const sql = `INSERT INTO ${TableName.Patch} (workspace_id, card_id, message_id, type, content, creator, created) - VALUES ($1::uuid, $2::varchar, $3::bigint, $4::varchar, $5::text, $6::varchar, $7::timestamptz)` - - await this.execute( - sql, - [db.workspace_id, db.card_id, db.message_id, db.type, db.content, db.creator, db.created], - 'insert patch' - ) - } - - async removePatches (card: CardID, fromId: MessageID, toId: MessageID): Promise { - const sql = `DELETE - FROM ${TableName.Patch} - WHERE workspace_id = $1::uuid - AND card_id = $2::varchar - AND message_id >= $3::bigint - AND message_id <= $4::bigint;` - - await this.execute(sql, [this.workspace, card, BigInt(fromId), BigInt(toId)], 'remove patches') - } - - // Attachment - async createAttachment (message: MessageID, card: CardID, creator: SocialID, created: Date): Promise { - const db: AttachmentDb = { - message_id: message, - card_id: card, - creator, - created + async createPatch( + card: CardID, + message: MessageID, + type: PatchType, + content: string, + creator: SocialID, + created: Date + ): Promise { + const db: PatchDb = { + workspace_id: this.workspace, + card_id: card, + message_id: message, + type, + content, + creator, + created + } + + const sql = `INSERT INTO ${TableName.Patch} (workspace_id, card_id, message_id, type, content, creator, created) + VALUES ($1::uuid, $2::varchar, $3::bigint, $4::varchar, $5::text, $6::varchar, $7::timestamptz)` + + await this.execute( + sql, + [db.workspace_id, db.card_id, db.message_id, db.type, db.content, db.creator, db.created], + 'insert patch' + ) } - const sql = `INSERT INTO ${TableName.Attachment} (message_id, card_id, creator, created) - VALUES ($1::bigint, $2::varchar, $3::varchar, $4::timestamptz)` - - await this.execute(sql, [db.message_id, db.card_id, db.creator, db.created], 'insert attachment') - } - - async removeAttachment (message: MessageID, card: CardID): Promise { - const sql = `DELETE - FROM ${TableName.Attachment} - WHERE message_id = $1::bigint - AND card_id = $2::varchar` - await this.execute(sql, [message, card], 'remove attachment') - } - - // Reaction - async createReaction ( - card: CardID, - message: MessageID, - reaction: string, - creator: SocialID, - created: Date - ): Promise { - const select = `SELECT m.id - FROM ${TableName.Message} m - WHERE m.id = $1::bigint` - - const messageDb = await this.execute(select, [message], 'select message') - - if (messageDb.length > 0) { - const db: ReactionDb = { - workspace_id: this.workspace, - card_id: card, - message_id: message, - reaction, - creator, - created - } - const sql = `INSERT INTO ${TableName.Reaction} (workspace_id, card_id, message_id, reaction, creator, created) - VALUES ($1::uuid, $2::varchar, $3::bigint, $4::varchar, $5::varchar, $6::timestamptz)` - - await this.execute( - sql, - [db.workspace_id, db.card_id, db.message_id, db.reaction, db.creator, db.created], - 'insert reaction' - ) - } else { - await this.createPatch(card, message, PatchType.addReaction, reaction, creator, created) + + async removePatches(card: CardID, fromId: MessageID, toId: MessageID): Promise { + const sql = `DELETE + FROM ${TableName.Patch} + WHERE workspace_id = $1::uuid + AND card_id = $2::varchar + AND message_id >= $3::bigint + AND message_id <= $4::bigint;` + + await this.execute(sql, [this.workspace, card, BigInt(fromId), BigInt(toId)], 'remove patches') } - } - - async removeReaction ( - card: CardID, - message: MessageID, - reaction: string, - creator: SocialID, - created: Date - ): Promise { - const select = `SELECT m.id - FROM ${TableName.Message} m - WHERE m.id = $1::bigint` - - const messageDb = await this.execute(select, [message], 'select message') - - if (messageDb.length > 0) { - const sql = `DELETE - FROM ${TableName.Reaction} - WHERE workspace_id = $1::uuid - AND card_id = $2::varchar - AND message_id = $3::bigint - AND reaction = $4::varchar - AND creator = $5::varchar` - await this.execute(sql, [this.workspace, card, message, reaction, creator], 'remove reaction') - } else { - await this.createPatch(card, message, PatchType.removeReaction, reaction, creator, created) + + // Attachment + async createAttachment(message: MessageID, card: CardID, creator: SocialID, created: Date): Promise { + const db: AttachmentDb = { + message_id: message, + card_id: card, + creator, + created + } + const sql = `INSERT INTO ${TableName.Attachment} (message_id, card_id, creator, created) + VALUES ($1::bigint, $2::varchar, $3::varchar, $4::timestamptz)` + + await this.execute(sql, [db.message_id, db.card_id, db.creator, db.created], 'insert attachment') } - } - - // Thread - async createThread (card: CardID, message: MessageID, thread: CardID, created: Date): Promise { - const db: ThreadDb = { - workspace_id: this.workspace, - card_id: card, - message_id: message, - thread_id: thread, - replies_count: 0, - last_reply: created + + async removeAttachment(message: MessageID, card: CardID): Promise { + const sql = `DELETE + FROM ${TableName.Attachment} + WHERE message_id = $1::bigint + AND card_id = $2::varchar` + await this.execute(sql, [message, card], 'remove attachment') } - const sql = `INSERT INTO ${TableName.Thread} (workspace_id, card_id, message_id, thread_id, replies_count, - last_reply) - VALUES ($1::uuid, $2::varchar, $3::bigint, $4::varchar, $5::int, $6::timestamptz)` - await this.execute( - sql, - [db.workspace_id, db.card_id, db.message_id, db.thread_id, db.replies_count, db.last_reply], - 'insert thread' - ) - } - - async updateThread (thread: CardID, lastReply: Date, op: 'increment' | 'decrement'): Promise { - if (op === 'increment') { - const sql = `UPDATE ${TableName.Thread} - SET replies_count = replies_count + 1, - last_reply = $3::timestamptz - WHERE workspace_id = $1::uuid - AND thread_id = $2::varchar` - await this.execute(sql, [this.workspace, thread, lastReply], 'update thread') - } else if (op === 'decrement') { - const sql = `UPDATE ${TableName.Thread} - SET replies_count = GREATEST(replies_count - 1, 0) - WHERE workspace_id = $1::uuid - AND thread_id = $2::varchar` - await this.execute(sql, [this.workspace, thread], 'update thread') + + // Reaction + async createReaction( + card: CardID, + message: MessageID, + reaction: string, + creator: SocialID, + created: Date + ): Promise { + const select = `SELECT m.id + FROM ${TableName.Message} m + WHERE m.id = $1::bigint` + + const messageDb = await this.execute(select, [message], 'select message') + + if (messageDb.length > 0) { + const db: ReactionDb = { + workspace_id: this.workspace, + card_id: card, + message_id: message, + reaction, + creator, + created + } + const sql = `INSERT INTO ${TableName.Reaction} (workspace_id, card_id, message_id, reaction, creator, created) + VALUES ($1::uuid, $2::varchar, $3::bigint, $4::varchar, $5::varchar, $6::timestamptz)` + + await this.execute( + sql, + [db.workspace_id, db.card_id, db.message_id, db.reaction, db.creator, db.created], + 'insert reaction' + ) + } else { + await this.createPatch(card, message, PatchType.addReaction, reaction, creator, created) + } } - } - - // MessagesGroup - async createMessagesGroup ( - card: CardID, - blobId: BlobID, - fromDate: Date, - toDate: Date, - fromId: MessageID, - toId: MessageID, - count: number - ): Promise { - const db: MessagesGroupDb = { - workspace_id: this.workspace, - card_id: card, - blob_id: blobId, - from_date: fromDate, - to_date: toDate, - from_id: fromId, - to_id: toId, - count + + async removeReaction( + card: CardID, + message: MessageID, + reaction: string, + creator: SocialID, + created: Date + ): Promise { + const select = `SELECT m.id + FROM ${TableName.Message} m + WHERE m.id = $1::bigint` + + const messageDb = await this.execute(select, [message], 'select message') + + if (messageDb.length > 0) { + const sql = `DELETE + FROM ${TableName.Reaction} + WHERE workspace_id = $1::uuid + AND card_id = $2::varchar + AND message_id = $3::bigint + AND reaction = $4::varchar + AND creator = $5::varchar` + await this.execute(sql, [this.workspace, card, message, reaction, creator], 'remove reaction') + } else { + await this.createPatch(card, message, PatchType.removeReaction, reaction, creator, created) + } } - const sql = `INSERT INTO ${TableName.MessagesGroup} (workspace_id, card_id, blob_id, from_date, to_date, from_id, - to_id, count) - VALUES ($1::uuid, $2::varchar, $3::uuid, $4::timestamptz, $5::timestamptz, $6::bigint, $7::bigint, - $8::int)` - await this.execute( - sql, - [db.workspace_id, db.card_id, db.blob_id, db.from_date, db.to_date, db.from_id, db.to_id, db.count], - 'insert messages group' - ) - } - - async removeMessagesGroup (card: CardID, blobId: BlobID): Promise { - const sql = `DELETE - FROM ${TableName.MessagesGroup} - WHERE workspace_id = $1::uuid - AND card_id = $2::varchar - AND blob_id = $3::uuid` - await this.execute(sql, [this.workspace, card, blobId], 'remove messages group') - } - - // Find messages - async find (params: FindMessagesParams): Promise { - // TODO: experiment with select to improve performance - const select = `SELECT m.id, - m.card_id, - m.content, - m.creator, - m.created, - t.thread_id as thread_id, - t.replies_count as replies_count, - t.last_reply as last_reply, - ${this.subSelectPatches()}, - ${this.subSelectReactions()} - FROM ${TableName.Message} m - LEFT JOIN ${TableName.Thread} t - ON t.workspace_id = m.workspace_id AND t.card_id = m.card_id AND - t.message_id = m.id` - - const { where, values } = this.buildMessageWhere(params) - const orderBy = - params.order != null ? `ORDER BY m.created ${params.order === SortingOrder.Ascending ? 'ASC' : 'DESC'}` : '' - const limit = params.limit != null ? ` LIMIT ${params.limit}` : '' - const sql = [select, where, orderBy, limit].join(' ') - - const result = await this.execute(sql, values, 'find messages') - - return result.map((it: any) => toMessage(it)) - } - - buildMessageWhere (params: FindMessagesParams): { where: string, values: any[] } { - const where: string[] = ['m.workspace_id = $1::uuid'] - const values: any[] = [this.workspace] - - let index = 2 - - if (params.id != null) { - where.push(`m.id = $${index++}::bigint`) - values.push(params.id) + // Thread + async createThread(card: CardID, message: MessageID, thread: CardID, created: Date): Promise { + const db: ThreadDb = { + workspace_id: this.workspace, + card_id: card, + message_id: message, + thread_id: thread, + replies_count: 0, + last_reply: created + } + const sql = `INSERT INTO ${TableName.Thread} (workspace_id, card_id, message_id, thread_id, replies_count, + last_reply) + VALUES ($1::uuid, $2::varchar, $3::bigint, $4::varchar, $5::int, $6::timestamptz)` + await this.execute( + sql, + [db.workspace_id, db.card_id, db.message_id, db.thread_id, db.replies_count, db.last_reply], + 'insert thread' + ) } - if (params.card != null) { - where.push(`m.card_id = $${index++}::varchar`) - values.push(params.card) + async updateThread(thread: CardID, lastReply: Date, op: 'increment' | 'decrement'): Promise { + if (op === 'increment') { + const sql = `UPDATE ${TableName.Thread} + SET replies_count = replies_count + 1, + last_reply = $3::timestamptz + WHERE workspace_id = $1::uuid + AND thread_id = $2::varchar` + await this.execute(sql, [this.workspace, thread, lastReply], 'update thread') + } else if (op === 'decrement') { + const sql = `UPDATE ${TableName.Thread} + SET replies_count = GREATEST(replies_count - 1, 0) + WHERE workspace_id = $1::uuid + AND thread_id = $2::varchar` + await this.execute(sql, [this.workspace, thread], 'update thread') + } } - const createdCondition = getCondition('m', 'created', index, params.created, 'timestamptz') + // MessagesGroup + async createMessagesGroup( + card: CardID, + blobId: BlobID, + fromDate: Date, + toDate: Date, + fromId: MessageID, + toId: MessageID, + count: number + ): Promise { + const db: MessagesGroupDb = { + workspace_id: this.workspace, + card_id: card, + blob_id: blobId, + from_date: fromDate, + to_date: toDate, + from_id: fromId, + to_id: toId, + count + } + + const sql = `INSERT INTO ${TableName.MessagesGroup} (workspace_id, card_id, blob_id, from_date, to_date, + from_id, + to_id, count) + VALUES ($1::uuid, $2::varchar, $3::uuid, $4::timestamptz, $5::timestamptz, $6::bigint, $7::bigint, + $8::int)` + await this.execute( + sql, + [db.workspace_id, db.card_id, db.blob_id, db.from_date, db.to_date, db.from_id, db.to_id, db.count], + 'insert messages group' + ) + } + + async removeMessagesGroup(card: CardID, blobId: BlobID): Promise { + const sql = `DELETE + FROM ${TableName.MessagesGroup} + WHERE workspace_id = $1::uuid + AND card_id = $2::varchar + AND blob_id = $3::uuid` + await this.execute(sql, [this.workspace, card, blobId], 'remove messages group') + } - if (createdCondition != null) { - where.push(createdCondition.where) - values.push(createdCondition.value) - index++ + // Find messages + async find(params: FindMessagesParams): Promise { + // TODO: experiment with select to improve performance + const select = `SELECT m.id, + m.card_id, + m.content, + m.creator, + m.created, + t.thread_id as thread_id, + t.replies_count as replies_count, + t.last_reply as last_reply, + ${this.subSelectPatches()}, + ${this.subSelectReactions()} + FROM ${TableName.Message} m + LEFT JOIN ${TableName.Thread} t + ON t.workspace_id = m.workspace_id AND t.card_id = m.card_id AND + t.message_id = m.id` + + const {where, values} = this.buildMessageWhere(params) + const orderBy = + params.order != null ? `ORDER BY m.created ${params.order === SortingOrder.Ascending ? 'ASC' : 'DESC'}` : '' + const limit = params.limit != null ? ` LIMIT ${params.limit}` : '' + const sql = [select, where, orderBy, limit].join(' ') + + const result = await this.execute(sql, values, 'find messages') + + return result.map((it: any) => toMessage(it)) } - return { where: `WHERE ${where.join(' AND ')}`, values } - } + buildMessageWhere(params: FindMessagesParams): { where: string, values: any[] } { + const where: string[] = ['m.workspace_id = $1::uuid'] + const values: any[] = [this.workspace] - subSelectPatches (): string { - return `COALESCE( + let index = 2 + + if (params.id != null) { + where.push(`m.id = $${index++}::bigint`) + values.push(params.id) + } + + if (params.card != null) { + where.push(`m.card_id = $${index++}::varchar`) + values.push(params.card) + } + + const createdCondition = getCondition('m', 'created', index, params.created, 'timestamptz') + + if (createdCondition != null) { + where.push(createdCondition.where) + values.push(createdCondition.value) + index++ + } + + return {where: `WHERE ${where.join(' AND ')}`, values} + } + + subSelectPatches(): string { + return `COALESCE( (SELECT jsonb_agg(jsonb_build_object( 'content', p.content, 'creator', p.creator, @@ -375,10 +376,10 @@ export class MessagesDb extends BaseDb { AND p.card_id = m.card_id AND p.type = 'update' ), '[]'::jsonb) AS patches` - } + } - subSelectAttachments (): string { - return `COALESCE( + subSelectAttachments(): string { + return `COALESCE( (SELECT jsonb_agg(jsonb_build_object( 'card_id', a.card_id, 'message_id', a.message_id, @@ -388,10 +389,10 @@ export class MessagesDb extends BaseDb { FROM ${TableName.Attachment} a WHERE a.message_id = m.id ), '[]'::jsonb) AS attachments` - } + } - subSelectReactions (): string { - return `COALESCE( + subSelectReactions(): string { + return `COALESCE( (SELECT jsonb_agg(jsonb_build_object( 'message_id', r.message_id, 'reaction', r.reaction, @@ -403,94 +404,98 @@ export class MessagesDb extends BaseDb { AND r.card_id = m.card_id AND r.message_id = m.id ), '[]'::jsonb) AS reactions` - } - - // Find thread - async findThread (thread: CardID): Promise { - const sql = `SELECT t.card_id, - t.message_id, - t.thread_id, - t.replies_count, - t.last_reply - FROM ${TableName.Thread} t - WHERE t.workspace_id = $1::uuid - AND t.thread_id = $2::varchar - LIMIT 1;` - - const result = await this.execute(sql, [this.workspace, thread], 'find thread') - return result.map((it: any) => toThread(it))[0] - } - - // Find messages groups - async findMessagesGroups (params: FindMessagesGroupsParams): Promise { - const select = `SELECT mg.card_id, - mg.blob_id, - mg.from_date, - mg.to_date, - mg.from_id, - mg.to_id, - mg.count, - jsonb_agg(jsonb_build_object( - 'message_id', p.message_id, - 'type', p.type, - 'content', p.content, - 'creator', p.creator, - 'created', p.created - ) ORDER BY p.created) AS patches - FROM ${TableName.MessagesGroup} mg - LEFT JOIN ${TableName.Patch} p - ON p.workspace_id = mg.workspace_id - AND p.card_id = mg.card_id - AND p.message_id BETWEEN mg.from_id AND mg.to_id` - - const { where, values } = this.buildMessagesGroupWhere(params) - const orderBy = - params.orderBy === 'toDate' - ? `ORDER BY mg.to_date ${params.order === SortingOrder.Ascending ? 'ASC' : 'DESC'}` - : `ORDER BY mg.from_date ${params.order === SortingOrder.Ascending ? 'ASC' : 'DESC'}` - const groupBy = 'GROUP BY mg.card_id, mg.blob_id, mg.from_date, mg.to_date, mg.from_id, mg.to_id, mg.count' - - const limit = params.limit != null ? ` LIMIT ${params.limit}` : '' - const sql = [select, where, groupBy, orderBy, limit].join(' ') - - const result = await this.execute(sql, values, 'find messages groups') - - return result.map((it: any) => toMessagesGroup(it)) - } - - buildMessagesGroupWhere (params: FindMessagesGroupsParams): { - where: string - values: any[] - } { - const where: string[] = ['mg.workspace_id = $1::uuid'] - const values: any[] = [this.workspace] - - let index = 2 - - if (params.card != null) { - where.push(`mg.card_id = $${index++}::varchar`) - values.push(params.card) } - if (params.blobId != null) { - where.push(`mg.blob_id = $${index++}`) - values.push(params.blobId) + // Find thread + async findThread(thread: CardID): Promise { + const sql = `SELECT t.card_id, + t.message_id, + t.thread_id, + t.replies_count, + t.last_reply + FROM ${TableName.Thread} t + WHERE t.workspace_id = $1::uuid + AND t.thread_id = $2::varchar + LIMIT 1;` + + const result = await this.execute(sql, [this.workspace, thread], 'find thread') + return result.map((it: any) => toThread(it))[0] } - const fromDateCondition = getCondition('mg', 'from_date', index, params.fromDate, 'timestamptz') - if (fromDateCondition != null) { - where.push(fromDateCondition.where) - values.push(fromDateCondition.value) - index++ + // Find messages groups + async findMessagesGroups(params: FindMessagesGroupsParams): Promise { + const select = ` + SELECT mg.card_id, + mg.blob_id, + mg.from_date, + mg.to_date, + mg.from_id, + mg.to_id, + mg.count, + patches + FROM ${TableName.MessagesGroup} mg + CROSS JOIN LATERAL ( + SELECT jsonb_agg(jsonb_build_object( + 'message_id', p.message_id, + 'type', p.type, + 'content', p.content, + 'creator', p.creator, + 'created', p.created + ) ORDER BY p.created) AS patches + FROM ${TableName.Patch} p + WHERE p.workspace_id = mg.workspace_id + AND p.card_id = mg.card_id + AND p.message_id BETWEEN mg.from_id AND mg.to_id + ) sub` + + const {where, values} = this.buildMessagesGroupWhere(params) + const orderBy = + params.orderBy === 'toDate' + ? `ORDER BY mg.to_date ${params.order === SortingOrder.Ascending ? 'ASC' : 'DESC'}` + : `ORDER BY mg.from_date ${params.order === SortingOrder.Ascending ? 'ASC' : 'DESC'}` + const limit = params.limit != null ? ` LIMIT ${params.limit}` : ''; + + const sql = [select, where, orderBy, limit].join(' ') + const result = await this.execute(sql, values, 'find messages groups') + + return result.map((it: any) => toMessagesGroup(it)) } - const toDateCondition = getCondition('mg', 'to_date', index, params.toDate, 'timestamptz') - if (toDateCondition != null) { - where.push(toDateCondition.where) - values.push(toDateCondition.value) - index++ + buildMessagesGroupWhere(params: FindMessagesGroupsParams): { + where: string + values: any[] + } { + const where: string[] = ['mg.workspace_id = $1::uuid'] + const values: any[] = [this.workspace] + + let index = 2 + + where.push(`mg.card_id = $${index++}::varchar`) + values.push(params.card) + + if (params.blobId != null) { + where.push(`mg.blob_id = $${index++}`) + values.push(params.blobId) + } + + const fromDateCondition = getCondition('mg', 'from_date', index, params.fromDate, 'timestamptz') + if (fromDateCondition != null) { + where.push(fromDateCondition.where) + values.push(fromDateCondition.value) + index++ + } + + const toDateCondition = getCondition('mg', 'to_date', index, params.toDate, 'timestamptz') + if (toDateCondition != null) { + where.push(toDateCondition.where) + values.push(toDateCondition.value) + index++ + } + + if (params.withPatches === true) { + where.push(`sub.patches IS NOT NULL`) + } + + return {where: `WHERE ${where.join(' AND ')}`, values} } - - return { where: `WHERE ${where.join(' AND ')}`, values } - } } diff --git a/packages/cockroach/src/utils.ts b/packages/cockroach/src/utils.ts new file mode 100644 index 00000000000..6ffc522d3fe --- /dev/null +++ b/packages/cockroach/src/utils.ts @@ -0,0 +1,50 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +export function injectVars(sql: string, values: any[]): string { + return sql.replaceAll(/(\$\d+)/g, (_, idx) => { + return escape(values[parseInt(idx.substring(1)) - 1]) + }) +} + +function escape(value: any): string { + if (value === null || value === undefined) { + return 'NULL' + } + + if (Array.isArray(value)) { + return 'ARRAY[' + value.map(escape).join(',') + ']' + } + + if (value instanceof Date) { + return `'${value.toISOString()}'` + } + + switch (typeof value) { + case 'number': + if (isNaN(value) || !isFinite(value)) { + throw new Error('Invalid number value') + } + return value.toString() + case 'boolean': + return value ? 'TRUE' : 'FALSE' + case 'string': + return `'${value.replace(/'/g, "''")}'` + case 'bigint': + return value.toString() + default: + throw new Error(`Unsupported value type: ${typeof value}`) + } +} diff --git a/packages/examples/package.json b/packages/examples/package.json index e79ec57aa20..51a0de928af 100644 --- a/packages/examples/package.json +++ b/packages/examples/package.json @@ -1,14 +1,18 @@ { "name": "@hcengineering/communication-examples", - "version": "0.1.47", + "version": "0.1.59", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", - "type": "module", "files": [ "dist/index.js", + "dist/index.cjs", "types/**/*.d.ts" ], + "scripts": { + "bundle": "bun run bundle:browser", + "bundle:browser": "bun build src/index.ts --outdir dist --target browser" + }, "devDependencies": { "@types/bun": "^1.1.14" }, diff --git a/packages/query/package.json b/packages/query/package.json index 87dbd672de3..642afe860b9 100644 --- a/packages/query/package.json +++ b/packages/query/package.json @@ -1,14 +1,18 @@ { "name": "@hcengineering/communication-query", - "version": "0.1.47", + "version": "0.1.59", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", - "type": "module", "files": [ "dist/index.js", + "dist/index.cjs", "types/**/*.d.ts" ], + "scripts": { + "bundle": "bun run bundle:browser", + "bundle:browser": "bun build src/index.ts --outdir dist --target browser" + }, "devDependencies": { "@types/bun": "^1.1.14", "@types/crypto-js": "^4.2.2" diff --git a/packages/query/src/messages/query.ts b/packages/query/src/messages/query.ts index 67f6d7c742e..877fe3363e1 100644 --- a/packages/query/src/messages/query.ts +++ b/packages/query/src/messages/query.ts @@ -134,6 +134,7 @@ export class MessagesQuery implements PagedQuery { .then(() => this.notify()) .catch((error) => { console.error('Failed to load messages', error) + void this.notify() }) } } @@ -146,6 +147,7 @@ export class MessagesQuery implements PagedQuery { .then(() => this.notify()) .catch((error) => { console.error('Failed to load messages', error) + void this.notify() }) } } @@ -567,7 +569,7 @@ export class MessagesQuery implements PagedQuery { const messageId = BigInt(patch.message) const group = this.groupsBuffer.find((it) => BigInt(it.fromId) <= messageId && BigInt(it.toId) >= messageId) - if (group != null) { + if (group != null && group.patches != null) { group.patches.push(patch) } diff --git a/packages/sdk-types/package.json b/packages/sdk-types/package.json index 084cb2176c4..4af7d6f0bc6 100644 --- a/packages/sdk-types/package.json +++ b/packages/sdk-types/package.json @@ -1,14 +1,19 @@ { "name": "@hcengineering/communication-sdk-types", - "version": "0.1.47", - "main": "./dist/index.js", + "version": "0.1.59", + "main": "./dist/index.cjs", "module": "./dist/index.js", "types": "./types/index.d.ts", - "type": "module", "files": [ "dist/index.js", + "dist/index.cjs", "types/**/*.d.ts" ], + "scripts": { + "bundle": "bun run bundle:browser && bun run bundle:node", + "bundle:browser": "bun build src/index.ts --outdir dist --target browser", + "bundle:node": "bun build src/index.ts --target node --format cjs --outfile dist/index.cjs" + }, "devDependencies": { "@types/bun": "^1.1.14" }, diff --git a/packages/sdk-types/src/responseEvent.ts b/packages/sdk-types/src/responseEvent.ts index 731e796f297..32ebcf03926 100644 --- a/packages/sdk-types/src/responseEvent.ts +++ b/packages/sdk-types/src/responseEvent.ts @@ -25,7 +25,8 @@ import type { Reaction, SocialID, Notification, - Thread + Thread, + MessagesGroup } from '@hcengineering/communication-types' export enum ResponseEventType { @@ -37,6 +38,7 @@ export enum ResponseEventType { AttachmentCreated = 'attachmentCreated', AttachmentRemoved = 'attachmentRemoved', ThreadCreated = 'threadCreated', + MessagesGroupCreated = 'messagesGroupCreated', NotificationCreated = 'notificationCreated', NotificationRemoved = 'notificationRemoved', NotificationContextCreated = 'notificationContextCreated', @@ -58,6 +60,7 @@ export type ResponseEvent = | NotificationContextRemovedEvent | NotificationContextUpdatedEvent | ThreadCreatedEvent + | MessagesGroupCreatedEvent export interface MessageCreatedEvent { type: ResponseEventType.MessageCreated @@ -103,6 +106,11 @@ export interface AttachmentRemovedEvent { attachment: CardID } +export interface MessagesGroupCreatedEvent { + type: ResponseEventType.MessagesGroupCreated + group: MessagesGroup +} + export interface ThreadCreatedEvent { type: ResponseEventType.ThreadCreated thread: Thread diff --git a/packages/server/package.json b/packages/server/package.json index bd98818cb87..3bd53817579 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -1,14 +1,18 @@ { "name": "@hcengineering/communication-server", - "version": "0.1.47", + "version": "0.1.59", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", - "type": "module", "files": [ "dist/index.js", + "dist/index.cjs", "types/**/*.d.ts" ], + "scripts": { + "bundle": "bun run bundle:node", + "bundle:node": "bun build src/index.ts --target node --format cjs --outfile dist/index.js" + }, "devDependencies": { "@types/bun": "^1.1.14", "@types/express": "^5.0.0", @@ -17,6 +21,7 @@ }, "dependencies": { "@hcengineering/core": "0.7.2", + "@hcengineering/server-token": "0.7.2", "@hcengineering/communication-cockroach": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*" diff --git a/packages/server/src/eventProcessor.ts b/packages/server/src/eventProcessor.ts index 7dae3e18a4f..08451608dba 100644 --- a/packages/server/src/eventProcessor.ts +++ b/packages/server/src/eventProcessor.ts @@ -58,7 +58,8 @@ import { type CreateThreadEvent, type ConnectionInfo, type RemovePatchesEvent, - type RemoveMessagesGroupEvent + type RemoveMessagesGroupEvent, + type MessagesGroupCreatedEvent } from '@hcengineering/communication-sdk-types' export interface Result { @@ -356,8 +357,20 @@ export class EventProcessor { const { fromDate, toDate, count, fromId, toId, card, blobId } = event.group await this.db.createMessagesGroup(card, blobId, fromDate, toDate, fromId, toId, count) + const responseEvent: MessagesGroupCreatedEvent = { + type: ResponseEventType.MessagesGroupCreated, + group: { + card, + blobId, + fromDate, + toDate, + fromId, + toId, + count + } + } return { - responseEvent: undefined, + responseEvent, result: {} } } diff --git a/packages/server/src/index.ts b/packages/server/src/index.ts index f4282ade666..183580da18e 100644 --- a/packages/server/src/index.ts +++ b/packages/server/src/index.ts @@ -31,17 +31,19 @@ import type { } from '@hcengineering/communication-sdk-types' import { Manager, type BroadcastSessionsFunc } from './manager' +import { getMetadata, type Metadata } from './metadata' export class Api implements ServerApi { private readonly manager: Manager private constructor( private readonly ctx: MeasureContext, + protected readonly metadata: Metadata, private readonly workspace: WorkspaceID, private readonly db: DbAdapter, private readonly broadcast: BroadcastSessionsFunc ) { - this.manager = new Manager(this.ctx, this.db, this.workspace, this.broadcast) + this.manager = new Manager(this.ctx, this.metadata, this.db, this.workspace, this.broadcast) } static async create( @@ -51,7 +53,8 @@ export class Api implements ServerApi { broadcast: BroadcastSessionsFunc ): Promise { const db = await createDbAdapter(dbUrl, workspace, ctx, { withLogs: true }) - return new Api(ctx, workspace, db, broadcast) + const metadata = getMetadata() + return new Api(ctx, metadata, workspace, db, broadcast) } async findMessages(info: ConnectionInfo, params: FindMessagesParams, queryId?: number): Promise { diff --git a/packages/server/src/manager.ts b/packages/server/src/manager.ts index 6b8c3a57095..a9936d93aa0 100644 --- a/packages/server/src/manager.ts +++ b/packages/server/src/manager.ts @@ -37,6 +37,7 @@ import type { MeasureContext } from '@hcengineering/core' import { Triggers } from './triggers' import { EventProcessor } from './eventProcessor' +import type { Metadata } from './metadata.ts' type QueryId = number | string type QueryType = 'message' | 'notification' | 'context' @@ -57,12 +58,13 @@ export class Manager { constructor( private readonly ctx: MeasureContext, + private readonly metadata: Metadata, private readonly db: DbAdapter, private readonly workspace: WorkspaceID, private readonly broadcast: BroadcastSessionsFunc ) { this.eventProcessor = new EventProcessor(db, this.workspace) - this.triggers = new Triggers(db, this.workspace) + this.triggers = new Triggers(this.metadata, db, this.workspace) } async findMessages(info: ConnectionInfo, params: FindMessagesParams, queryId?: number): Promise { @@ -136,7 +138,11 @@ export class Manager { } if (sessionIds.length > 0) { - this.broadcast(this.ctx, sessionIds, event) + try { + this.broadcast(this.ctx, sessionIds, event) + } catch (e) { + console.error(e) + } } } @@ -198,6 +204,8 @@ export class Manager { return info.personalWorkspace === event.personalWorkspace && info.contextQueries.size > 0 case ResponseEventType.NotificationContextUpdated: return info.personalWorkspace === event.personalWorkspace && info.contextQueries.size > 0 + case ResponseEventType.MessagesGroupCreated: + return false } } diff --git a/packages/server/src/metadata.ts b/packages/server/src/metadata.ts new file mode 100644 index 00000000000..bb261412e28 --- /dev/null +++ b/packages/server/src/metadata.ts @@ -0,0 +1,9 @@ +export interface Metadata { + msg2fileUrl: string +} + +export function getMetadata(): Metadata { + return { + msg2fileUrl: process.env.MSG2FILE_URL ?? '' + } +} diff --git a/packages/server/src/triggers.ts b/packages/server/src/triggers.ts index 57e0c379620..680cf8a054a 100644 --- a/packages/server/src/triggers.ts +++ b/packages/server/src/triggers.ts @@ -19,23 +19,47 @@ import { type ResponseEvent, ResponseEventType, type MessageRemovedEvent, - type ConnectionInfo + type ConnectionInfo, + type PatchCreatedEvent, + type MessagesGroupCreatedEvent } from '@hcengineering/communication-sdk-types' -import { type WorkspaceID, PatchType, type Patch } from '@hcengineering/communication-types' +import { type WorkspaceID, PatchType, type Patch, type CardID } from '@hcengineering/communication-types' +import { concatLink, systemAccountUuid } from '@hcengineering/core' +import { generateToken } from '@hcengineering/server-token' + +import type { Metadata } from './metadata.ts' export class Triggers { + private readonly registeredCards: Set = new Set() + constructor( + private readonly metadata: Metadata, private readonly db: DbAdapter, private readonly workspace: WorkspaceID ) {} async process(event: ResponseEvent, info: ConnectionInfo): Promise { - switch (event.type) { - case ResponseEventType.MessageCreated: - return await this.onMessageCreated(event) - case ResponseEventType.MessageRemoved: - return await this.onMessageRemoved(event, info) + try { + switch (event.type) { + case ResponseEventType.MessageCreated: + return await this.onMessageCreated(event) + case ResponseEventType.MessageRemoved: + return await this.onMessageRemoved(event, info) + case ResponseEventType.PatchCreated: + return await this.onPatchCreated(event, info) + case ResponseEventType.MessagesGroupCreated: + return await this.onMessagesGroupCreated(event) + } + } catch (err: any) { + console.error(err) + return [] } + + return [] + } + + async onMessagesGroupCreated(event: MessagesGroupCreatedEvent): Promise { + this.registeredCards.delete(event.group.card) return [] } @@ -67,6 +91,15 @@ export class Triggers { } async onMessageCreated(event: MessageCreatedEvent): Promise { + return (await Promise.all([this.registerCard(event.message.card), this.updateThread(event)])).flat() + } + + // eslint-disable-next-line @typescript-eslint/no-unused-vars + async onPatchCreated(event: PatchCreatedEvent, info: ConnectionInfo): Promise { + return this.registerCard(event.card) + } + + async updateThread(event: MessageCreatedEvent): Promise { const { message } = event const thread = await this.db.findThread(message.card) if (thread === undefined) return [] @@ -90,4 +123,24 @@ export class Triggers { } ] } + + async registerCard(card: CardID): Promise { + if (this.registeredCards.has(card) || this.metadata.msg2fileUrl === '') return [] + + try { + const token = generateToken(systemAccountUuid, this.workspace) + await fetch(concatLink(this.metadata.msg2fileUrl, '/register/:card').replaceAll(':card', card), { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: 'Bearer ' + token + } + }) + this.registeredCards.add(card) + } catch (e) { + console.error(e) + } + + return [] + } } diff --git a/packages/shared/package.json b/packages/shared/package.json index ac4216412de..6936a9129bd 100644 --- a/packages/shared/package.json +++ b/packages/shared/package.json @@ -1,14 +1,19 @@ { "name": "@hcengineering/communication-shared", - "version": "0.1.47", - "main": "dist/index.js", + "version": "0.1.59", + "main": "dist/index.cjs", "module": "dist/index.js", "types": "./types/index.d.ts", - "type": "module", "files": [ "dist/index.js", + "dist/index.cjs", "types/**/*.d.ts" ], + "scripts": { + "bundle": "bun run bundle:browser && bun run bundle:node", + "bundle:browser": "bun build src/index.ts --outdir dist --target browser", + "bundle:node": "bun build src/index.ts --target node --format cjs --outfile dist/index.cjs" + }, "devDependencies": { "@types/bun": "^1.1.14", "@types/js-yaml": "^4.0.9" diff --git a/packages/shared/src/files.ts b/packages/shared/src/files.ts index 7601e96c77e..8159556901a 100644 --- a/packages/shared/src/files.ts +++ b/packages/shared/src/files.ts @@ -33,7 +33,32 @@ export async function loadGroupFile( const url = getFileUrl(workspace, filesUrl, group.blobId) const file = await retry(() => fetchFile(url), options) - const [metadata, messages] = yaml.loadAll(file) as [FileMetadata, FileMessage[]] + return parseYaml(file) +} + +async function fetchFile(url: string): Promise { + const res = await fetch(url) + + if (!res.ok) { + throw new Error(`Failed to fetch file: ${res.statusText}`) + } + + if (res.body == null) { + throw new Error('Missing response body') + } + + return await res.text() +} + +function getFileUrl(workspace: WorkspaceID, urlTemplate: string, file: string): string { + return urlTemplate + .replaceAll(':filename', encodeURIComponent(file)) + .replaceAll(':workspace', encodeURIComponent(workspace)) + .replaceAll(':blobId', encodeURIComponent(file)) +} + +export function parseYaml(data: string): ParsedFile { + const [metadata, messages] = yaml.loadAll(data) as [FileMetadata, FileMessage[]] return { metadata, @@ -44,6 +69,15 @@ export async function loadGroupFile( edited: message.edited, creator: message.creator, created: message.created, + thread: message.thread + ? { + card: metadata.card, + message: message.id, + thread: message.thread.thread, + repliesCount: message.thread.repliesCount, + lastReply: message.thread.lastReply + } + : undefined, attachments: [], reactions: message.reactions.map((reaction) => ({ message: message.id, @@ -54,20 +88,3 @@ export async function loadGroupFile( })) } } - -async function fetchFile(url: string): Promise { - const res = await fetch(url) - - if (!res.ok) { - throw new Error(`Failed to fetch file: ${res.statusText}`) - } - - return await res.text() -} - -function getFileUrl(workspace: WorkspaceID, urlTemplate: string, file: string): string { - return urlTemplate - .replaceAll(':filename', encodeURIComponent(file)) - .replaceAll(':workspace', encodeURIComponent(workspace)) - .replaceAll(':blobId', encodeURIComponent(file)) -} diff --git a/packages/sqlite-wasm/package.json b/packages/sqlite-wasm/package.json index 4f2d9621c23..11d2e493684 100644 --- a/packages/sqlite-wasm/package.json +++ b/packages/sqlite-wasm/package.json @@ -1,14 +1,18 @@ { "name": "@hcengineering/communication-sqlite-wasm", - "version": "0.1.47", + "version": "0.1.59", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", - "type": "module", "files": [ "dist/index.js", + "dist/index.cjs", "types/**/*.d.ts" ], + "scripts": { + "bundle": "bun run bundle:browser", + "bundle:browser": "bun build src/index.ts --outdir dist --target browser" + }, "devDependencies": { "@types/bun": "^1.1.14" }, diff --git a/packages/types/package.json b/packages/types/package.json index 2f07b5b9cf8..14e12b16337 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -1,14 +1,19 @@ { "name": "@hcengineering/communication-types", - "version": "0.1.47", - "main": "dist/index.js", + "version": "0.1.59", + "main": "dist/index.cjs", "module": "dist/index.js", "types": "./types/index.d.ts", - "type": "module", "files": [ "dist/index.js", + "dist/index.cjs", "types/**/*.d.ts" ], + "scripts": { + "bundle": "bun run bundle:browser && bun run bundle:node", + "bundle:browser": "bun build src/index.ts --outdir dist --target browser", + "bundle:node": "bun build src/index.ts --target node --format cjs --outfile dist/index.cjs" + }, "devDependencies": { "@types/bun": "^1.1.14" }, diff --git a/packages/types/src/file.ts b/packages/types/src/file.ts index b2af880b6e8..50688055884 100644 --- a/packages/types/src/file.ts +++ b/packages/types/src/file.ts @@ -42,7 +42,6 @@ export interface FileThread { thread: CardID repliesCount: number lastReply: Date - replied: SocialID[] } export interface ParsedFile { diff --git a/packages/types/src/message.ts b/packages/types/src/message.ts index b1165699912..f9c3a7a7606 100644 --- a/packages/types/src/message.ts +++ b/packages/types/src/message.ts @@ -46,7 +46,7 @@ export interface MessagesGroup { fromDate: Date toDate: Date count: number - patches: Patch[] + patches?: Patch[] } export interface Patch { diff --git a/packages/types/src/query.ts b/packages/types/src/query.ts index 3c46b4d2d7c..474ef6b2863 100644 --- a/packages/types/src/query.ts +++ b/packages/types/src/query.ts @@ -61,8 +61,9 @@ export interface FindNotificationContextParams extends FindParams { } export interface FindMessagesGroupsParams extends FindParams { - card?: CardID + card: CardID blobId?: BlobID + withPatches?: boolean fromDate?: Partial> | Date toDate?: Partial> | Date orderBy?: 'fromDate' | 'toDate' diff --git a/scripts/build.sh b/scripts/bundle.sh similarity index 67% rename from scripts/build.sh rename to scripts/bundle.sh index 24777cd63b5..6d1ec2b3b25 100644 --- a/scripts/build.sh +++ b/scripts/bundle.sh @@ -5,25 +5,28 @@ PACKAGES_DIR="./packages" for package in "$PACKAGES_DIR"/*; do if [ -d "$package" ]; then PACKAGE_NAME=$(basename "$package") - ENTRY_POINT="$package/src/index.ts" - OUT_DIR="$package/dist" - TYPES_OUT_DIR="$package/types" + PACKAGE_JSON="$package/package.json" TSCONFIG="$package/tsconfig.json" + TYPES_OUT_DIR="$package/types" echo "Building package: $PACKAGE_NAME" - if [ -f "$ENTRY_POINT" ]; then + if [ -f "$PACKAGE_JSON" ]; then tsc --project "$TSCONFIG" --emitDeclarationOnly --declarationDir "$TYPES_OUT_DIR" - bun build "$ENTRY_POINT" --outdir "$OUT_DIR" --target bun - if [ $? -eq 0 ]; then + pushd "$package" > /dev/null + bun run bundle + RESULT=$? + popd > /dev/null + + if [ $RESULT -eq 0 ]; then echo "Package $PACKAGE_NAME built successfully" else echo "Error building package $PACKAGE_NAME" continue fi else - echo "Entry point $ENTRY_POINT not found for package $PACKAGE_NAME" + echo "package.json not found for package $PACKAGE_NAME" continue fi fi From b13b61c6c0cc1bf2ea716969bfed9ea031491a98 Mon Sep 17 00:00:00 2001 From: Kristina Date: Mon, 3 Mar 2025 19:15:25 +0400 Subject: [PATCH 050/636] Fix connection info (#34) Signed-off-by: Kristina Fefelova --- bun.lock | 22 +++++++++++----------- packages/client-query/package.json | 2 +- packages/client-sqlite/package.json | 2 +- packages/client-ws/package.json | 2 +- packages/cockroach/package.json | 2 +- packages/examples/package.json | 2 +- packages/query/package.json | 2 +- packages/sdk-types/package.json | 2 +- packages/sdk-types/src/serverApi.ts | 5 +++-- packages/server/package.json | 2 +- packages/server/src/eventProcessor.ts | 13 +++++++------ packages/shared/package.json | 2 +- packages/sqlite-wasm/package.json | 2 +- packages/types/package.json | 2 +- 14 files changed, 32 insertions(+), 30 deletions(-) diff --git a/bun.lock b/bun.lock index 29ea0a0d256..8a3f4283d7b 100644 --- a/bun.lock +++ b/bun.lock @@ -16,7 +16,7 @@ }, "packages/client-query": { "name": "@hcengineering/communication-client-query", - "version": "0.1.59", + "version": "0.1.60", "dependencies": { "@hcengineering/communication-query": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", @@ -32,7 +32,7 @@ }, "packages/client-sqlite": { "name": "@hcengineering/communication-client-sqlite", - "version": "0.1.59", + "version": "0.1.60", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-sqlite-wasm": "workspace:*", @@ -48,7 +48,7 @@ }, "packages/client-ws": { "name": "@hcengineering/communication-client-ws", - "version": "0.1.59", + "version": "0.1.60", "dependencies": { "@hcengineering/communication-client-query": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", @@ -64,7 +64,7 @@ }, "packages/cockroach": { "name": "@hcengineering/communication-cockroach", - "version": "0.1.59", + "version": "0.1.60", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-shared": "workspace:*", @@ -81,7 +81,7 @@ }, "packages/examples": { "name": "@hcengineering/communication-examples", - "version": "0.1.59", + "version": "0.1.60", "dependencies": { "@hcengineering/communication-client-ws": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -95,7 +95,7 @@ }, "packages/query": { "name": "@hcengineering/communication-query", - "version": "0.1.59", + "version": "0.1.60", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-shared": "workspace:*", @@ -112,7 +112,7 @@ }, "packages/sdk-types": { "name": "@hcengineering/communication-sdk-types", - "version": "0.1.59", + "version": "0.1.60", "dependencies": { "@hcengineering/communication-types": "workspace:*", }, @@ -125,7 +125,7 @@ }, "packages/server": { "name": "@hcengineering/communication-server", - "version": "0.1.59", + "version": "0.1.60", "dependencies": { "@hcengineering/communication-cockroach": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", @@ -145,7 +145,7 @@ }, "packages/shared": { "name": "@hcengineering/communication-shared", - "version": "0.1.59", + "version": "0.1.60", "dependencies": { "@hcengineering/communication-types": "workspace:*", "js-yaml": "^4.1.0", @@ -160,7 +160,7 @@ }, "packages/sqlite-wasm": { "name": "@hcengineering/communication-sqlite-wasm", - "version": "0.1.59", + "version": "0.1.60", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -177,7 +177,7 @@ }, "packages/types": { "name": "@hcengineering/communication-types", - "version": "0.1.59", + "version": "0.1.60", "dependencies": { "@hcengineering/card": "0.7.2", "@hcengineering/core": "0.7.2", diff --git a/packages/client-query/package.json b/packages/client-query/package.json index 8e4c9b04561..94862e65988 100644 --- a/packages/client-query/package.json +++ b/packages/client-query/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-client-query", - "version": "0.1.59", + "version": "0.1.60", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/client-sqlite/package.json b/packages/client-sqlite/package.json index 37cedb24ab5..91a0c2547f0 100644 --- a/packages/client-sqlite/package.json +++ b/packages/client-sqlite/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-client-sqlite", - "version": "0.1.59", + "version": "0.1.60", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/client-ws/package.json b/packages/client-ws/package.json index b6e08be8b06..20ccf10ff24 100644 --- a/packages/client-ws/package.json +++ b/packages/client-ws/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-client-ws", - "version": "0.1.59", + "version": "0.1.60", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/cockroach/package.json b/packages/cockroach/package.json index 888cf8ec652..b886d133bef 100644 --- a/packages/cockroach/package.json +++ b/packages/cockroach/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-cockroach", - "version": "0.1.59", + "version": "0.1.60", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/examples/package.json b/packages/examples/package.json index 51a0de928af..b5b53f7266d 100644 --- a/packages/examples/package.json +++ b/packages/examples/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-examples", - "version": "0.1.59", + "version": "0.1.60", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/query/package.json b/packages/query/package.json index 642afe860b9..9e82b22b7db 100644 --- a/packages/query/package.json +++ b/packages/query/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-query", - "version": "0.1.59", + "version": "0.1.60", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/sdk-types/package.json b/packages/sdk-types/package.json index 4af7d6f0bc6..ae99611870a 100644 --- a/packages/sdk-types/package.json +++ b/packages/sdk-types/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-sdk-types", - "version": "0.1.59", + "version": "0.1.60", "main": "./dist/index.cjs", "module": "./dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/sdk-types/src/serverApi.ts b/packages/sdk-types/src/serverApi.ts index 7ddf2e0f9b9..2b91a0fc16d 100644 --- a/packages/sdk-types/src/serverApi.ts +++ b/packages/sdk-types/src/serverApi.ts @@ -28,7 +28,8 @@ export interface ConnectionInfo { sessionId: string personalWorkspace: WorkspaceID socialIds: SocialID[] - isSystem: boolean + //TODO: AccountUUID + account: string } export interface ServerApi { @@ -37,8 +38,8 @@ export interface ServerApi { event(info: ConnectionInfo, event: RequestEvent): Promise - closeSession(sessionId: string): Promise unsubscribeQuery(info: ConnectionInfo, id: number): Promise + closeSession(sessionId: string): Promise close(): Promise } diff --git a/packages/server/package.json b/packages/server/package.json index 3bd53817579..1336aac2844 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-server", - "version": "0.1.59", + "version": "0.1.60", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/server/src/eventProcessor.ts b/packages/server/src/eventProcessor.ts index 08451608dba..689921a56c4 100644 --- a/packages/server/src/eventProcessor.ts +++ b/packages/server/src/eventProcessor.ts @@ -61,6 +61,7 @@ import { type RemoveMessagesGroupEvent, type MessagesGroupCreatedEvent } from '@hcengineering/communication-sdk-types' +import { systemAccountUuid } from '@hcengineering/core' export interface Result { responseEvent?: ResponseEvent @@ -160,7 +161,7 @@ export class EventProcessor { } private async removeMessage(event: RemoveMessageEvent, info: ConnectionInfo): Promise { - const socialIds = info.isSystem ? undefined : info.socialIds + const socialIds = systemAccountUuid === info.account ? undefined : info.socialIds await this.db.removeMessage(event.card, event.message, socialIds) const responseEvent: MessageRemovedEvent = { @@ -176,7 +177,7 @@ export class EventProcessor { } private async removeMessages(event: RemoveMessagesEvent, info: ConnectionInfo): Promise { - if (!info.isSystem) { + if (systemAccountUuid !== info.account) { throw new Error('Forbidden') } await this.db.removeMessages(event.card, event.fromId, event.toId) @@ -187,7 +188,7 @@ export class EventProcessor { } private async removePatches(event: RemovePatchesEvent, info: ConnectionInfo): Promise { - if (!info.isSystem) { + if (systemAccountUuid !== info.account) { throw new Error('Forbidden') } await this.db.removePatches(event.card, event.fromId, event.toId) @@ -351,7 +352,7 @@ export class EventProcessor { } async createMessagesGroup(event: CreateMessagesGroupEvent, info: ConnectionInfo): Promise { - if (!info.isSystem) { + if (systemAccountUuid !== info.account) { throw new Error('Forbidden') } const { fromDate, toDate, count, fromId, toId, card, blobId } = event.group @@ -376,7 +377,7 @@ export class EventProcessor { } async removeMessagesGroup(event: RemoveMessagesGroupEvent, info: ConnectionInfo): Promise { - if (!info.isSystem) { + if (systemAccountUuid !== info.account) { throw new Error('Forbidden') } await this.db.removeMessagesGroup(event.card, event.blobId) @@ -408,7 +409,7 @@ export class EventProcessor { } private checkCreator(info: ConnectionInfo, creator: SocialID): void { - if (!info.socialIds.includes(creator) && !info.isSystem) { + if (!info.socialIds.includes(creator) && systemAccountUuid !== info.account) { throw new Error('Forbidden') } } diff --git a/packages/shared/package.json b/packages/shared/package.json index 6936a9129bd..7995d73c2b9 100644 --- a/packages/shared/package.json +++ b/packages/shared/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-shared", - "version": "0.1.59", + "version": "0.1.60", "main": "dist/index.cjs", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/sqlite-wasm/package.json b/packages/sqlite-wasm/package.json index 11d2e493684..d542d5ff1f0 100644 --- a/packages/sqlite-wasm/package.json +++ b/packages/sqlite-wasm/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-sqlite-wasm", - "version": "0.1.59", + "version": "0.1.60", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/types/package.json b/packages/types/package.json index 14e12b16337..19b7cbb8812 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-types", - "version": "0.1.59", + "version": "0.1.60", "main": "dist/index.cjs", "module": "dist/index.js", "types": "./types/index.d.ts", From c891025321d3c21fadd709bcb533e54d9fca5824 Mon Sep 17 00:00:00 2001 From: Kristina Date: Mon, 3 Mar 2025 19:45:10 +0400 Subject: [PATCH 051/636] Fix connection info (#35) Signed-off-by: Kristina Fefelova --- bun.lock | 49 ++++++++++++++------------- packages/client-query/package.json | 2 +- packages/client-sqlite/package.json | 2 +- packages/client-ws/package.json | 2 +- packages/cockroach/package.json | 2 +- packages/examples/package.json | 2 +- packages/query/package.json | 2 +- packages/sdk-types/package.json | 5 +-- packages/sdk-types/src/serverApi.ts | 10 ++---- packages/server/package.json | 6 ++-- packages/server/src/eventProcessor.ts | 48 ++++++++++++++++---------- packages/server/src/manager.ts | 31 +++++++++-------- packages/server/src/triggers.ts | 2 +- packages/shared/package.json | 2 +- packages/sqlite-wasm/package.json | 2 +- packages/types/package.json | 6 ++-- 16 files changed, 94 insertions(+), 79 deletions(-) diff --git a/bun.lock b/bun.lock index 8a3f4283d7b..e341aec6155 100644 --- a/bun.lock +++ b/bun.lock @@ -16,7 +16,7 @@ }, "packages/client-query": { "name": "@hcengineering/communication-client-query", - "version": "0.1.60", + "version": "0.1.61", "dependencies": { "@hcengineering/communication-query": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", @@ -32,7 +32,7 @@ }, "packages/client-sqlite": { "name": "@hcengineering/communication-client-sqlite", - "version": "0.1.60", + "version": "0.1.61", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-sqlite-wasm": "workspace:*", @@ -48,7 +48,7 @@ }, "packages/client-ws": { "name": "@hcengineering/communication-client-ws", - "version": "0.1.60", + "version": "0.1.61", "dependencies": { "@hcengineering/communication-client-query": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", @@ -64,7 +64,7 @@ }, "packages/cockroach": { "name": "@hcengineering/communication-cockroach", - "version": "0.1.60", + "version": "0.1.61", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-shared": "workspace:*", @@ -81,7 +81,7 @@ }, "packages/examples": { "name": "@hcengineering/communication-examples", - "version": "0.1.60", + "version": "0.1.61", "dependencies": { "@hcengineering/communication-client-ws": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -95,7 +95,7 @@ }, "packages/query": { "name": "@hcengineering/communication-query", - "version": "0.1.60", + "version": "0.1.61", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-shared": "workspace:*", @@ -112,9 +112,10 @@ }, "packages/sdk-types": { "name": "@hcengineering/communication-sdk-types", - "version": "0.1.60", + "version": "0.1.61", "dependencies": { "@hcengineering/communication-types": "workspace:*", + "@hcengineering/core": "0.7.15", }, "devDependencies": { "@types/bun": "^1.1.14", @@ -125,13 +126,13 @@ }, "packages/server": { "name": "@hcengineering/communication-server", - "version": "0.1.60", + "version": "0.1.61", "dependencies": { "@hcengineering/communication-cockroach": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", - "@hcengineering/core": "0.7.2", - "@hcengineering/server-token": "0.7.2", + "@hcengineering/core": "0.7.15", + "@hcengineering/server-token": "0.7.15", }, "devDependencies": { "@types/bun": "^1.1.14", @@ -145,7 +146,7 @@ }, "packages/shared": { "name": "@hcengineering/communication-shared", - "version": "0.1.60", + "version": "0.1.61", "dependencies": { "@hcengineering/communication-types": "workspace:*", "js-yaml": "^4.1.0", @@ -160,7 +161,7 @@ }, "packages/sqlite-wasm": { "name": "@hcengineering/communication-sqlite-wasm", - "version": "0.1.60", + "version": "0.1.61", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -177,10 +178,10 @@ }, "packages/types": { "name": "@hcengineering/communication-types", - "version": "0.1.60", + "version": "0.1.61", "dependencies": { - "@hcengineering/card": "0.7.2", - "@hcengineering/core": "0.7.2", + "@hcengineering/card": "0.7.15", + "@hcengineering/core": "0.7.15", }, "devDependencies": { "@types/bun": "^1.1.14", @@ -221,9 +222,9 @@ "@formatjs/intl-localematcher": ["@formatjs/intl-localematcher@0.6.0", "", { "dependencies": { "tslib": "2" } }, "sha512-4rB4g+3hESy1bHSBG3tDFaMY2CH67iT7yne1e+0CLTsGLDcmoEWWpJjjpWVaYgYfYuohIRuo0E+N536gd2ZHZA=="], - "@hcengineering/analytics": ["@hcengineering/analytics@0.7.2", "https://npm.pkg.github.com/download/@hcengineering/analytics/0.7.2/6169c108c11d7903a30eca0e09dfc79facdc1170", { "dependencies": { "@hcengineering/platform": "^0.7.2" } }, "sha512-2+6O2f3yrrtho0SXVSTd44t2RFOdeR6v9IXimYhyvy+vQaj6i5W2i5rGNyxjS7wZj7u/ft/LwcusbL+yvR7rQA=="], + "@hcengineering/analytics": ["@hcengineering/analytics@0.7.15", "https://npm.pkg.github.com/download/@hcengineering/analytics/0.7.15/46f0006d8f4ba8ec04105770e6418251def15296", { "dependencies": { "@hcengineering/platform": "^0.7.15" } }, "sha512-U+tswirTFX0z7Sr51zfOEXbtrtfnf2xLzWTIrPip929g+oULJjCKMqoh+3HHVCF4HFm97x0P3qaRtW2hLgxyAQ=="], - "@hcengineering/card": ["@hcengineering/card@0.7.2", "https://npm.pkg.github.com/download/@hcengineering/card/0.7.2/f4f615f288a91cc875a6c20d75f8383010328003", { "dependencies": { "@hcengineering/core": "^0.7.2", "@hcengineering/platform": "^0.7.2", "@hcengineering/ui": "^0.7.2" } }, "sha512-e1H1Q6ncH+wyXXD9nLNjADd+6TFaPBuWywuP2gLPYubHPlJyeB7KVCIt81H+3axKTTDLCMjKqWpBXKHawcK5pQ=="], + "@hcengineering/card": ["@hcengineering/card@0.7.15", "https://npm.pkg.github.com/download/@hcengineering/card/0.7.15/70ba947306289e9ec1719ec4670abb5abadd9a74", { "dependencies": { "@hcengineering/core": "^0.7.15", "@hcengineering/platform": "^0.7.15", "@hcengineering/ui": "^0.7.15" } }, "sha512-xIM9UwFqhyjD9EJs0egn4kf3Q9Br8+spYHkzNl+5IPdL5d5itDIuy5HS9ng6XZ1BXApauo8MTl1DcaqJMneNbw=="], "@hcengineering/communication-client-query": ["@hcengineering/communication-client-query@workspace:packages/client-query"], @@ -247,15 +248,15 @@ "@hcengineering/communication-types": ["@hcengineering/communication-types@workspace:packages/types"], - "@hcengineering/core": ["@hcengineering/core@0.7.2", "https://npm.pkg.github.com/download/@hcengineering/core/0.7.2/4c8d66ef6468b2ed9d4d43857b11a78f6e71d48f", { "dependencies": { "@hcengineering/analytics": "^0.7.2", "@hcengineering/platform": "^0.7.2", "fast-equals": "^5.2.2" } }, "sha512-K8wg4c3hkn98wo/C+lS9z3QD35lIgUc0APpzOE0La6XFeV06H4Iztgvd4xR/IgVGRX3tFbyycsJcSgQXj/Z/Kw=="], + "@hcengineering/core": ["@hcengineering/core@0.7.15", "https://npm.pkg.github.com/download/@hcengineering/core/0.7.15/785cb20f9adc4a3a3c2179a0ae38316f4b4f933f", { "dependencies": { "@hcengineering/analytics": "^0.7.15", "@hcengineering/platform": "^0.7.15", "fast-equals": "^5.2.2" } }, "sha512-YhRRdtcSuNSQUdh4TO5u3u/w/vE7fcildc51wnL7Erw7VZ1rVANEx9a3mScnViVH89xw0oAcfDAm/AbH5+EZcQ=="], - "@hcengineering/platform": ["@hcengineering/platform@0.7.2", "https://npm.pkg.github.com/download/@hcengineering/platform/0.7.2/854c94ef4607aba2306ec92a32f41cce30139de5", { "dependencies": { "intl-messageformat": "^10.7.14" } }, "sha512-RtUWhC0MPjFABojyOSGoOCLZGPVKiZkfZB9n+7Mwbc7tBznVQKeXBBOVUq6QpC50cLyx7ce8WVJBNVIpggwaeQ=="], + "@hcengineering/platform": ["@hcengineering/platform@0.7.15", "https://npm.pkg.github.com/download/@hcengineering/platform/0.7.15/3e6c41c4abfd6425be49d9b31afef627dcafa4cb", { "dependencies": { "intl-messageformat": "^10.7.14" } }, "sha512-zv6afaQVu/T0MXrWch8JaxdVjLSUHDpIx3jgY1JE5xSy+FN8bjC30FR/RGavuMtm4z25rFsZQRPfr4qmZYZirQ=="], - "@hcengineering/server-token": ["@hcengineering/server-token@0.7.2", "https://npm.pkg.github.com/download/@hcengineering/server-token/0.7.2/2bf94b717348f9ce7256fd3a3fec39869c044e4e", { "dependencies": { "@hcengineering/core": "^0.7.2", "@hcengineering/platform": "^0.7.2", "jwt-simple": "^0.5.6" } }, "sha512-WcGkUOhJJvRZZbPYSIuG0S57r5f1sgrQEm+kz8D2Ki4C+RMEwWYjRCYyZhWiE9QxbetBJtcwneKI+jfpDItvag=="], + "@hcengineering/server-token": ["@hcengineering/server-token@0.7.15", "https://npm.pkg.github.com/download/@hcengineering/server-token/0.7.15/5443096926f96919ed8ed748e90e3474137519ec", { "dependencies": { "@hcengineering/core": "^0.7.15", "@hcengineering/platform": "^0.7.15", "jwt-simple": "^0.5.6" } }, "sha512-XEh0nDSSxMn34a9AHeXIgRh6iO025hKmGNXLZlXnm4kRIYODzMLhigds2H/RvmUjznfUTm9Bpy3EFLq9WFmzWg=="], - "@hcengineering/theme": ["@hcengineering/theme@0.7.2", "https://npm.pkg.github.com/download/@hcengineering/theme/0.7.2/a6f832fd9ddbb3e5683f361e54d8bb4f3e096be8", { "dependencies": { "@hcengineering/analytics": "^0.7.2", "@hcengineering/platform": "^0.7.2", "svelte": "^4.2.19" } }, "sha512-Sqrt6ETmBKGk8SiSD/gvfitqP1GUaSpfiGy4I7xaAgXuyeWENni+YJ9x4C+9yTWT7caRnKu5WKbv/zN1PpWuqg=="], + "@hcengineering/theme": ["@hcengineering/theme@0.7.15", "https://npm.pkg.github.com/download/@hcengineering/theme/0.7.15/f02e70ea8421f0cd3e5b52c34ee69488d361b3e6", { "dependencies": { "@hcengineering/analytics": "^0.7.15", "@hcengineering/platform": "^0.7.15", "svelte": "^4.2.19" } }, "sha512-d+rX4Jp4J4YELBi763M5Zfe+3x5gyQjV1NcuP1GIvSCe9tXNboKEp5X2SPdqFlP1sBfE5sgVECIAtmZkhMAqcQ=="], - "@hcengineering/ui": ["@hcengineering/ui@0.7.2", "https://npm.pkg.github.com/download/@hcengineering/ui/0.7.2/40ec34d2d251af4ad2b8a26aa89c12894cbe5eda", { "dependencies": { "@hcengineering/analytics": "^0.7.2", "@hcengineering/core": "^0.7.2", "@hcengineering/platform": "^0.7.2", "@hcengineering/theme": "^0.7.2", "autolinker": "4.0.0", "date-fns": "^2.30.0", "date-fns-tz": "^2.0.0", "dompurify": "^3.1.6", "emoji-regex": "^10.1.0", "fast-equals": "^5.2.2", "svelte": "^4.2.19" } }, "sha512-5t6JFO65Tm6iZqveEvBjfTM8DDZ9ca9uBXj7GbtfTD/1K8WC1Yf3REsNDW2I9DFYzxKPBayx9M4TRrJBseHlWg=="], + "@hcengineering/ui": ["@hcengineering/ui@0.7.15", "https://npm.pkg.github.com/download/@hcengineering/ui/0.7.15/8462766246888da93d3f51e19e0b11d7c40312f6", { "dependencies": { "@hcengineering/analytics": "^0.7.15", "@hcengineering/core": "^0.7.15", "@hcengineering/platform": "^0.7.15", "@hcengineering/theme": "^0.7.15", "autolinker": "4.0.0", "date-fns": "^2.30.0", "date-fns-tz": "^2.0.0", "dompurify": "^3.1.6", "emojibase": "^16.0.0", "fast-equals": "^5.2.2", "hls.js": "^1.5.20", "svelte": "^4.2.19" } }, "sha512-OkllPX5wH3eJ3WSEzr+PJ0H8H2ZyXWGNHdUmwQSPW6UTZqAYia9WtCVO3sxR7SMtZtjAnKPitRycSG4Q/z8GVA=="], "@humanfs/core": ["@humanfs/core@0.19.1", "", {}, "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA=="], @@ -393,7 +394,7 @@ "dompurify": ["dompurify@3.2.4", "", { "optionalDependencies": { "@types/trusted-types": "^2.0.7" } }, "sha512-ysFSFEDVduQpyhzAob/kkuJjf5zWkZD8/A9ywSp1byueyuCfHamrCBa14/Oc2iiB0e51B+NpxSl5gmzn+Ms/mg=="], - "emoji-regex": ["emoji-regex@10.4.0", "", {}, "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw=="], + "emojibase": ["emojibase@16.0.0", "", {}, "sha512-Nw2m7JLIO4Ou2X/yZPRNscHQXVbbr6SErjkJ7EooG7MbR3yDZszCv9KTizsXFc7yZl0n3WF+qUKIC/Lw6H9xaQ=="], "escape-string-regexp": ["escape-string-regexp@4.0.0", "", {}, "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA=="], @@ -451,6 +452,8 @@ "has-flag": ["has-flag@4.0.0", "", {}, "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ=="], + "hls.js": ["hls.js@1.5.20", "", {}, "sha512-uu0VXUK52JhihhnN/MVVo1lvqNNuhoxkonqgO3IpjvQiGpJBdIXMGkofjQb/j9zvV7a1SW8U9g1FslWx/1HOiQ=="], + "ignore": ["ignore@5.3.2", "", {}, "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g=="], "import-fresh": ["import-fresh@3.3.1", "", { "dependencies": { "parent-module": "^1.0.0", "resolve-from": "^4.0.0" } }, "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ=="], diff --git a/packages/client-query/package.json b/packages/client-query/package.json index 94862e65988..853d4cd8a91 100644 --- a/packages/client-query/package.json +++ b/packages/client-query/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-client-query", - "version": "0.1.60", + "version": "0.1.61", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/client-sqlite/package.json b/packages/client-sqlite/package.json index 91a0c2547f0..fb27de5dd5f 100644 --- a/packages/client-sqlite/package.json +++ b/packages/client-sqlite/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-client-sqlite", - "version": "0.1.60", + "version": "0.1.61", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/client-ws/package.json b/packages/client-ws/package.json index 20ccf10ff24..542e6b52fe3 100644 --- a/packages/client-ws/package.json +++ b/packages/client-ws/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-client-ws", - "version": "0.1.60", + "version": "0.1.61", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/cockroach/package.json b/packages/cockroach/package.json index b886d133bef..095bdf4c0d1 100644 --- a/packages/cockroach/package.json +++ b/packages/cockroach/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-cockroach", - "version": "0.1.60", + "version": "0.1.61", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/examples/package.json b/packages/examples/package.json index b5b53f7266d..c83e4869f4c 100644 --- a/packages/examples/package.json +++ b/packages/examples/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-examples", - "version": "0.1.60", + "version": "0.1.61", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/query/package.json b/packages/query/package.json index 9e82b22b7db..d2f5783f9c2 100644 --- a/packages/query/package.json +++ b/packages/query/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-query", - "version": "0.1.60", + "version": "0.1.61", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/sdk-types/package.json b/packages/sdk-types/package.json index ae99611870a..df292a920bd 100644 --- a/packages/sdk-types/package.json +++ b/packages/sdk-types/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-sdk-types", - "version": "0.1.60", + "version": "0.1.61", "main": "./dist/index.cjs", "module": "./dist/index.js", "types": "./types/index.d.ts", @@ -18,7 +18,8 @@ "@types/bun": "^1.1.14" }, "dependencies": { - "@hcengineering/communication-types": "workspace:*" + "@hcengineering/communication-types": "workspace:*", + "@hcengineering/core": "0.7.15" }, "peerDependencies": { "typescript": "^5.6.3" diff --git a/packages/sdk-types/src/serverApi.ts b/packages/sdk-types/src/serverApi.ts index 2b91a0fc16d..a62d38c3bd1 100644 --- a/packages/sdk-types/src/serverApi.ts +++ b/packages/sdk-types/src/serverApi.ts @@ -17,19 +17,15 @@ import type { FindMessagesGroupsParams, FindMessagesParams, Message, - MessagesGroup, - SocialID, - WorkspaceID + MessagesGroup } from '@hcengineering/communication-types' +import type { Account } from '@hcengineering/core' import type { EventResult, RequestEvent } from './requestEvent.ts' export interface ConnectionInfo { sessionId: string - personalWorkspace: WorkspaceID - socialIds: SocialID[] - //TODO: AccountUUID - account: string + account: Account } export interface ServerApi { diff --git a/packages/server/package.json b/packages/server/package.json index 1336aac2844..d7196acfc9d 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-server", - "version": "0.1.60", + "version": "0.1.61", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", @@ -20,8 +20,8 @@ "@types/ws": "^8.5.13" }, "dependencies": { - "@hcengineering/core": "0.7.2", - "@hcengineering/server-token": "0.7.2", + "@hcengineering/core": "0.7.15", + "@hcengineering/server-token": "0.7.15", "@hcengineering/communication-cockroach": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*" diff --git a/packages/server/src/eventProcessor.ts b/packages/server/src/eventProcessor.ts index 689921a56c4..13ed0e71498 100644 --- a/packages/server/src/eventProcessor.ts +++ b/packages/server/src/eventProcessor.ts @@ -61,7 +61,7 @@ import { type RemoveMessagesGroupEvent, type MessagesGroupCreatedEvent } from '@hcengineering/communication-sdk-types' -import { systemAccountUuid } from '@hcengineering/core' +import { systemAccountUuid, type Account } from '@hcengineering/core' export interface Result { responseEvent?: ResponseEvent @@ -114,7 +114,7 @@ export class EventProcessor { } private async createMessage(event: CreateMessageEvent, info: ConnectionInfo): Promise { - this.checkCreator(info, event.creator) + this.checkCreator(info.account, event.creator) const created = new Date() const id = await this.db.createMessage(event.card, event.content, event.creator, created) @@ -138,7 +138,7 @@ export class EventProcessor { } private async createPatch(event: CreatePatchEvent, info: ConnectionInfo): Promise { - this.checkCreator(info, event.creator) + this.checkCreator(info.account, event.creator) const created = new Date() await this.db.createPatch(event.card, event.message, PatchType.update, event.content, event.creator, created) @@ -161,7 +161,7 @@ export class EventProcessor { } private async removeMessage(event: RemoveMessageEvent, info: ConnectionInfo): Promise { - const socialIds = systemAccountUuid === info.account ? undefined : info.socialIds + const socialIds = systemAccountUuid === info.account.uuid ? undefined : info.account.socialIds await this.db.removeMessage(event.card, event.message, socialIds) const responseEvent: MessageRemovedEvent = { @@ -177,7 +177,7 @@ export class EventProcessor { } private async removeMessages(event: RemoveMessagesEvent, info: ConnectionInfo): Promise { - if (systemAccountUuid !== info.account) { + if (systemAccountUuid !== info.account.uuid) { throw new Error('Forbidden') } await this.db.removeMessages(event.card, event.fromId, event.toId) @@ -188,7 +188,7 @@ export class EventProcessor { } private async removePatches(event: RemovePatchesEvent, info: ConnectionInfo): Promise { - if (systemAccountUuid !== info.account) { + if (systemAccountUuid !== info.account.uuid) { throw new Error('Forbidden') } await this.db.removePatches(event.card, event.fromId, event.toId) @@ -199,7 +199,7 @@ export class EventProcessor { } private async createReaction(event: CreateReactionEvent, info: ConnectionInfo): Promise { - this.checkCreator(info, event.creator) + this.checkCreator(info.account, event.creator) const created = new Date() await this.db.createReaction(event.card, event.message, event.reaction, event.creator, created) @@ -221,7 +221,7 @@ export class EventProcessor { } private async removeReaction(event: RemoveReactionEvent, info: ConnectionInfo): Promise { - this.checkCreator(info, event.creator) + this.checkCreator(info.account, event.creator) await this.db.removeReaction(event.card, event.message, event.reaction, event.creator) const responseEvent: ReactionRemovedEvent = { type: ResponseEventType.ReactionRemoved, @@ -237,7 +237,7 @@ export class EventProcessor { } private async createAttachment(event: CreateAttachmentEvent, info: ConnectionInfo): Promise { - this.checkCreator(info, event.creator) + this.checkCreator(info.account, event.creator) const created = new Date() await this.db.createAttachment(event.message, event.card, event.creator, created) @@ -283,12 +283,15 @@ export class EventProcessor { } } + //eslint-disable-next-line @typescript-eslint/no-unused-vars private async removeNotification(event: RemoveNotificationEvent, info: ConnectionInfo): Promise { await this.db.removeNotification(event.message, event.context) const responseEvent: NotificationRemovedEvent = { type: ResponseEventType.NotificationRemoved, - personalWorkspace: info.personalWorkspace, + // personalWorkspace: info.personalWorkspace, + // TODO: add personal workspace + personalWorkspace: '' as WorkspaceID, message: event.message, context: event.context } @@ -300,15 +303,18 @@ export class EventProcessor { private async createNotificationContext( event: CreateNotificationContextEvent, + //eslint-disable-next-line @typescript-eslint/no-unused-vars info: ConnectionInfo ): Promise { - const id = await this.db.createContext(info.personalWorkspace, event.card, event.lastView, event.lastUpdate) + // TODO: add personal workspace + const personalWorkspace = '' as WorkspaceID + const id = await this.db.createContext(personalWorkspace, event.card, event.lastView, event.lastUpdate) const responseEvent: NotificationContextCreatedEvent = { type: ResponseEventType.NotificationContextCreated, context: { id, workspace: this.workspace, - personalWorkspace: info.personalWorkspace, + personalWorkspace, card: event.card, lastView: event.lastView, lastUpdate: event.lastUpdate @@ -322,12 +328,15 @@ export class EventProcessor { private async removeNotificationContext( event: RemoveNotificationContextEvent, + // eslint-disable-next-line @typescript-eslint/no-unused-vars info: ConnectionInfo ): Promise { await this.db.removeContext(event.context) const responseEvent: NotificationContextRemovedEvent = { type: ResponseEventType.NotificationContextRemoved, - personalWorkspace: info.personalWorkspace, + // personalWorkspace: info.personalWorkspace, + // TODO: add personal workspace + personalWorkspace: '' as WorkspaceID, context: event.context } return { @@ -336,12 +345,15 @@ export class EventProcessor { } } + // eslint-disable-next-line @typescript-eslint/no-unused-vars async updateNotificationContext(event: UpdateNotificationContextEvent, info: ConnectionInfo): Promise { await this.db.updateContext(event.context, event.update) const responseEvent: NotificationContextUpdatedEvent = { type: ResponseEventType.NotificationContextUpdated, - personalWorkspace: info.personalWorkspace, + // personalWorkspace: info.personalWorkspace, + // TODO: add personal workspace + personalWorkspace: '' as WorkspaceID, context: event.context, update: event.update } @@ -352,7 +364,7 @@ export class EventProcessor { } async createMessagesGroup(event: CreateMessagesGroupEvent, info: ConnectionInfo): Promise { - if (systemAccountUuid !== info.account) { + if (systemAccountUuid !== info.account.uuid) { throw new Error('Forbidden') } const { fromDate, toDate, count, fromId, toId, card, blobId } = event.group @@ -377,7 +389,7 @@ export class EventProcessor { } async removeMessagesGroup(event: RemoveMessagesGroupEvent, info: ConnectionInfo): Promise { - if (systemAccountUuid !== info.account) { + if (systemAccountUuid !== info.account.uuid) { throw new Error('Forbidden') } await this.db.removeMessagesGroup(event.card, event.blobId) @@ -408,8 +420,8 @@ export class EventProcessor { } } - private checkCreator(info: ConnectionInfo, creator: SocialID): void { - if (!info.socialIds.includes(creator) && systemAccountUuid !== info.account) { + private checkCreator(account: Account, creator: SocialID): void { + if (!account.socialIds.includes(creator) && systemAccountUuid !== account.uuid) { throw new Error('Forbidden') } } diff --git a/packages/server/src/manager.ts b/packages/server/src/manager.ts index a9936d93aa0..d1248f72ee8 100644 --- a/packages/server/src/manager.ts +++ b/packages/server/src/manager.ts @@ -45,7 +45,6 @@ type QueryType = 'message' | 'notification' | 'context' export type BroadcastSessionsFunc = (ctx: MeasureContext, sessionIds: string[], result: any) => void interface SessionInfo { - personalWorkspace: string messageQueries: Map notificationQueries: Map contextQueries: Map @@ -90,9 +89,8 @@ export class Manager { } subscribeQuery(info: ConnectionInfo, type: QueryType, queryId: number, params: Record): void { - const { sessionId, personalWorkspace } = info + const { sessionId } = info const data = this.dataBySessionId.get(sessionId) ?? { - personalWorkspace, messageQueries: new Map(), notificationQueries: new Map(), contextQueries: new Map() @@ -189,21 +187,26 @@ export class Manager { Array.from(info.messageQueries.values()) ) case ResponseEventType.NotificationCreated: - return ( - info.personalWorkspace === event.personalWorkspace && - this.matchNotificationQuery(event, Array.from(info.notificationQueries.values())) - ) + // return ( + // info.personalWorkspace === event.personalWorkspace && + // this.matchNotificationQuery(event, Array.from(info.notificationQueries.values())) + // ) + return false case ResponseEventType.NotificationRemoved: - return info.personalWorkspace === event.personalWorkspace && info.notificationQueries.size > 0 + // return info.personalWorkspace === event.personalWorkspace && info.notificationQueries.size > 0 + return false case ResponseEventType.NotificationContextCreated: - return ( - info.personalWorkspace === event.context.personalWorkspace && - this.matchContextQuery(event, Array.from(info.contextQueries.values())) - ) + // return ( + // info.personalWorkspace === event.context.personalWorkspace && + // this.matchContextQuery(event, Array.from(info.contextQueries.values())) + // ) + return false case ResponseEventType.NotificationContextRemoved: - return info.personalWorkspace === event.personalWorkspace && info.contextQueries.size > 0 + // return info.personalWorkspace === event.personalWorkspace && info.contextQueries.size > 0 + return false case ResponseEventType.NotificationContextUpdated: - return info.personalWorkspace === event.personalWorkspace && info.contextQueries.size > 0 + // return info.personalWorkspace === event.personalWorkspace && info.contextQueries.size > 0 + return false case ResponseEventType.MessagesGroupCreated: return false } diff --git a/packages/server/src/triggers.ts b/packages/server/src/triggers.ts index 680cf8a054a..c82947a272c 100644 --- a/packages/server/src/triggers.ts +++ b/packages/server/src/triggers.ts @@ -69,7 +69,7 @@ export class Triggers { if (thread === undefined) return [] const date = new Date() - const socialId = info.socialIds[0] + const socialId = info.account.primarySocialId const patch: Patch = { message: thread.message, diff --git a/packages/shared/package.json b/packages/shared/package.json index 7995d73c2b9..7f96a8d7c31 100644 --- a/packages/shared/package.json +++ b/packages/shared/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-shared", - "version": "0.1.60", + "version": "0.1.61", "main": "dist/index.cjs", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/sqlite-wasm/package.json b/packages/sqlite-wasm/package.json index d542d5ff1f0..d938150b235 100644 --- a/packages/sqlite-wasm/package.json +++ b/packages/sqlite-wasm/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-sqlite-wasm", - "version": "0.1.60", + "version": "0.1.61", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/types/package.json b/packages/types/package.json index 19b7cbb8812..cac7fc44713 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-types", - "version": "0.1.60", + "version": "0.1.61", "main": "dist/index.cjs", "module": "dist/index.js", "types": "./types/index.d.ts", @@ -21,8 +21,8 @@ "typescript": "^5.6.3" }, "dependencies": { - "@hcengineering/core": "0.7.2", - "@hcengineering/card": "0.7.2" + "@hcengineering/core": "0.7.15", + "@hcengineering/card": "0.7.15" }, "repository": { "type": "git", From 5342edacf638d28ebb19259594268db1af535396 Mon Sep 17 00:00:00 2001 From: denis-tingaikin Date: Sun, 16 Mar 2025 01:00:35 +0300 Subject: [PATCH 052/636] add possible to schedule transcoding tasks Signed-off-by: denis-tingaikin --- .github/workflows/docker-push.yaml | 4 +- .golangci.yaml | 4 +- Dockerfile | 18 +- README.md | 100 +++-- cmd/huly-stream/main.go | 104 ----- cmd/stream/main.go | 71 ++++ go.mod | 12 +- go.sum | 18 +- internal/pkg/api/v1/recording/handler.go | 82 ++++ internal/pkg/api/v1/transcoding/handler.go | 66 ++++ internal/pkg/config/config.go | 28 +- internal/pkg/log/zap.go | 4 +- internal/pkg/manifest/hls.go | 2 +- internal/pkg/manifest/hls_test.go | 2 +- internal/pkg/pprof/pprof.go | 3 +- internal/pkg/resconv/resconv_test.go | 2 +- .../pkg/sharedpipe/shared_pipe_bench_test.go | 18 +- internal/pkg/storage/datalake.go | 254 ++++++++++++ internal/pkg/{uploader => storage}/s3.go | 89 +++-- internal/pkg/storage/storage.go | 55 +++ internal/pkg/token/token.go | 77 ++++ .../postpone_test.go => token/token_test.go} | 37 +- .../{transcoding => transcoder}/command.go | 14 +- .../command_test.go | 16 +- internal/pkg/transcoder/coordinator.go | 190 +++++++++ internal/pkg/transcoder/queue.go | 234 +++++++++++ .../worker.go => transcoder/stream.go} | 63 +-- internal/pkg/transcoding/limiter.go | 78 ---- internal/pkg/transcoding/limiter_test.go | 88 ----- internal/pkg/transcoding/scheduler.go | 167 -------- internal/pkg/uploader/datalake.go | 130 ------ internal/pkg/uploader/options.go | 14 +- internal/pkg/uploader/postpone.go | 48 --- internal/pkg/uploader/uploader.go | 371 +++++++++++------- 34 files changed, 1518 insertions(+), 945 deletions(-) delete mode 100644 cmd/huly-stream/main.go create mode 100644 cmd/stream/main.go create mode 100644 internal/pkg/api/v1/recording/handler.go create mode 100644 internal/pkg/api/v1/transcoding/handler.go create mode 100644 internal/pkg/storage/datalake.go rename internal/pkg/{uploader => storage}/s3.go (61%) create mode 100644 internal/pkg/storage/storage.go create mode 100644 internal/pkg/token/token.go rename internal/pkg/{uploader/postpone_test.go => token/token_test.go} (50%) rename internal/pkg/{transcoding => transcoder}/command.go (93%) rename internal/pkg/{transcoding => transcoder}/command_test.go (56%) create mode 100644 internal/pkg/transcoder/coordinator.go create mode 100644 internal/pkg/transcoder/queue.go rename internal/pkg/{transcoding/worker.go => transcoder/stream.go} (72%) delete mode 100644 internal/pkg/transcoding/limiter.go delete mode 100644 internal/pkg/transcoding/limiter_test.go delete mode 100644 internal/pkg/transcoding/scheduler.go delete mode 100644 internal/pkg/uploader/datalake.go delete mode 100644 internal/pkg/uploader/postpone.go diff --git a/.github/workflows/docker-push.yaml b/.github/workflows/docker-push.yaml index ad67bcee7cc..47baea039f8 100644 --- a/.github/workflows/docker-push.yaml +++ b/.github/workflows/docker-push.yaml @@ -29,7 +29,7 @@ jobs: id: metaci uses: docker/metadata-action@v3 with: - images: hardcoreeng/huly-stream + images: hardcoreeng/stream:${{ inputs.version }} tags: | type=ref,event=pr type=sha,prefix= @@ -41,4 +41,4 @@ jobs: context: . platforms: linux/amd64,linux/arm64 push: true - tags: hardcoreeng/huly-stream:${{ inputs.version }} + tags: hardcoreeng/stream:${{ inputs.version }} diff --git a/.golangci.yaml b/.golangci.yaml index 0099487e6f5..a5335cf2089 100644 --- a/.golangci.yaml +++ b/.golangci.yaml @@ -61,8 +61,8 @@ linters-settings: dupl: threshold: 150 funlen: - lines: 120 - statements: 60 + lines: 140 + statements: 80 goconst: min-len: 2 min-occurrences: 2 diff --git a/Dockerfile b/Dockerfile index 0b4dca5d82c..4304349055c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -11,7 +11,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -FROM --platform=$BUILDPLATFORM golang:1.23.5 AS builder +FROM --platform=linux/amd64 golang:1.24.1 AS builder ENV GO111MODULE=on ENV CGO_ENABLED=0 ENV GOBIN=/bin @@ -19,18 +19,18 @@ ARG BUILDARCH=amd64 COPY . ./ -RUN set -xe && GOOS=$TARGETOS GOARCH=$TARGETARCH go build -o /go/bin/huly-stream ./cmd/huly-stream +RUN set -xe && GOOS=$TARGETOS GOARCH=$TARGETARCH go build -o /go/bin/stream ./cmd/stream -FROM alpine +FROM --platform=linux/amd64 alpine RUN set -xe && apk add --no-cache ffmpeg RUN apk add --no-cache ca-certificates jq bash \ - && addgroup -g 1000 huly-stream \ - && adduser -u 1000 -G huly-stream -s /bin/sh -D huly-stream \ - && chown huly-stream:huly-stream /. -COPY --from=builder /go/bin/huly-stream /huly-stream + && addgroup -g 1000 stream \ + && adduser -u 1000 -G stream -s /bin/sh -D stream \ + && chown stream:stream /. +COPY --from=builder /go/bin/stream /stream EXPOSE 1080 -USER huly-stream +USER stream -ENTRYPOINT ["/huly-stream"] \ No newline at end of file +ENTRYPOINT ["/stream"] \ No newline at end of file diff --git a/README.md b/README.md index f977167dba4..529b764313d 100644 --- a/README.md +++ b/README.md @@ -1,91 +1,96 @@ -# Huly Stream +# Stream [![X (formerly Twitter) Follow](https://img.shields.io/twitter/follow/huly_io?style=for-the-badge)](https://x.com/huly_io) ![GitHub License](https://img.shields.io/github/license/hcengineering/platform?style=for-the-badge) ## About -The Huly Stream high-performance HTTP-based transcoding service. Huly-stream is built around the **TUS protocol**, enabling reliable, resumable file uploads and downloads. Designed for seamless and consistent media processing,it supports advanced transcoding features with robust integration options. +The Stream is high-performance HTTP-based transcoding service. *Stream* supports **TUS protocol**, enabling reliable, +resumable transcodings. Designed for seamless and consistent media processing,it supports advanced transcoding features +with robust integration options. --- ## Features ### TUS Protocol Support -- **Resumable transcoding**: Leveraging the TUS protocol, Huly-stream ensures reliable and efficient stream processing. -### Input Support +- **Resumable transcoding**: Leveraging the TUS protocol, *Stream* ensures reliable and efficient transcoding bucket + processing. + +#### Input Support + - **Supported Input Formats**: - - `mp4` - - `webm` + - `mp4` + - `webm` + +#### Output Options -### Output Options - **Supported Output Formats**: - - `aac` - - `hls` + - `hls` + +#### Upload options -### Upload options -- **TUS Upload**: Resumable file uploads via TUS protocol. - **s3 Upload**: Direct upload to S3 storage. - **datalake Upload**: Upload to datalake storage. -### Key Functionalities +#### Key Functionalities + - **Live transcoing with minimal upload time**: Transcoding results are going to be avaible after stream completion. - **Transcoding Cancelation**: Cancel or pause ongoing transcoding in real-time. - **Transcoding Resumption**: Resume incomplete transcoding tasks efficiently. ---- +### Transcoding scheduling ## Installation ### Prerequisites + - [Go](https://golang.org/dl/) (v1.23+ recommended) - [ffmpeg](https://www.ffmpeg.org/download.html) (ensure it’s installed and available in your system's PATH) ### Steps 1. Install dependencies: - ```bash - go mod tidy - ``` + +```bash +go mod tidy +``` 2. Build the service: - ```bash - docker build . -t hcengineering/huly-stream:latest - ``` +```bash +docker build . -t hcengineering/sream:latest +``` --- ## Configuraiton ### App env configuraiton + The following environment variables can be used: + ``` -KEY TYPE DEFAULT REQUIRED DESCRIPTION -STREAM_SECRET_TOKEN String secret token for authorize requests -STREAM_LOG_LEVEL String debug sets log level for the application -STREAM_PPROF_ENABLED True or False false starts profile server on localhost:6060 if true -STREAM_INSECURE True or False false ignores authorization check if true -STREAM_SERVE_URL String 0.0.0.0:1080 app listen url -STREAM_ENDPOINT_URL URL S3 or Datalake endpoint, example: s3://my-ip-address, datalake://my-ip-address -STREAM_MAX_CAPACITY Integer 6220800 represents the amount of maximum possible capacity for the transcoding. The default value is 1920 * 1080 * 3. -STREAM_MAX_THREADS Integer 4 means upper bound for the transcoing provider. -STREAM_OUTPUT_DIR String /tmp/transcoing/ path to the directory with transcoding result. -STREAM_REMOVE_CONTENT_ON_UPLOAD True or False true deletes all content when content delivered if true -STREAM_UPLOAD_RAW_CONTENT True or False false uploads content in raw quality to the endpoint if true +KEY TYPE DEFAULT STREAM_LOG_LEVEL String debug sets log level for the application +STREAM_SERVER_SECRET String server secret required to generate and verify tokens +STREAM_PPROF_ENABLED True or False true starts profile server on localhost:6060 if true +STREAM_INSECURE True or False false ignores authorization check if true +STREAM_SERVE_URL String 0.0.0.0:1080 listen on url +STREAM_ENDPOINT_URL URL s3://127.0.0.1:9000 S3 or Datalake endpoint, example: s3://my-ip-address, datalake://my-ip-address +STREAM_MAX_PARALLEL_SCALING_COUNT Integer 2 how much parallel scaling can be processed +STREAM_MAX_THREAD_COUNT Integer 4 max number of threads for transcoder +STREAM_OUTPUT_DIR String /tmp/transcoing/ path to the directory with tra ``` ### Metadata -**resolutions:** if passed, set the resolution for the output, for example, 'resolutions: 1920:1080, 1280:720.' +**resolution:** if passed, set the resolution for the output, for example, 'resolution: 1920:1080'. **token:** must be provided to be authorized in the Huly's datalake service. **workspace:** required for uploading content to the datalake storage. - - #### S3 Env configuration if you're working with S3 storage type, these envs must be provided: @@ -94,16 +99,35 @@ if you're working with S3 storage type, these envs must be provided: ## Usage -The service exposes an HTTP API. Below are some examples of how to interact with it. +The service exposes an HTTP API. + +Below are some examples of how to interact with it. + +### Trnascode via TUS -### Upload a File for Transcoding via TUS ```bash -curl -X POST http://localhost:1080/transcoing \ +curl -X POST http://localhost:1080/recording \ -H "Tus-Resumable: 1.0.0" \ -H "Upload-Length: " \ --data-binary @path/to/your/file.mp4 ``` +Note: tus client is required, to play with a service locally you can use tus-js-client example +with [video](https://github.com/tus/tus-js-client/blob/main/demos/browser/video.html) + +### Schedule a transcoding + +```bash +curl -X POST http://localhost:1080/transcoding \ + -H "Content-Type: application/json" \ + -H "Authorization: Bearer " \ + -d '{ + "source": "", + "format": "hls", + "workspace": "test" + }' +``` + ## Contributing We welcome contributions! To get started: @@ -120,4 +144,4 @@ This project is licensed under the [MIT License](LICENSE). --- -Enjoy seamless transcoding with huly-stream! 🚀 \ No newline at end of file +Enjoy seamless transcoding with *Stream*! 🚀 \ No newline at end of file diff --git a/cmd/huly-stream/main.go b/cmd/huly-stream/main.go deleted file mode 100644 index d7c5d9690bf..00000000000 --- a/cmd/huly-stream/main.go +++ /dev/null @@ -1,104 +0,0 @@ -// Copyright © 2025 Hardcore Engineering Inc. -// -// Licensed under the Eclipse Public License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. You may -// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package main provides huly-stream entry point function -package main - -import ( - "context" - "net/http" - - "os" - "os/signal" - "syscall" - - "go.uber.org/zap" - "golang.org/x/exp/slog" - - "github.com/huly-stream/internal/pkg/config" - "github.com/huly-stream/internal/pkg/log" - "github.com/huly-stream/internal/pkg/pprof" - "github.com/huly-stream/internal/pkg/transcoding" - tusd "github.com/tus/tusd/v2/pkg/handler" -) - -const basePath = "/recording" - -func main() { - var ctx, cancel = signal.NotifyContext( - context.Background(), - os.Interrupt, - syscall.SIGHUP, - syscall.SIGTERM, - syscall.SIGQUIT, - ) - defer cancel() - ctx = log.WithLoggerFields(ctx) - - var logger = log.FromContext(ctx) - var conf = must(config.FromEnv()) - logger.Sugar().Debugf("provided config is %v", conf) - - logger.Sugar().Info(conf.Endpoint()) - - mustNoError(os.MkdirAll(conf.OutputDir, os.ModePerm)) - if conf.PprofEnabled { - go pprof.ListenAndServe(ctx, "localhost:6060") - } - scheduler := transcoding.NewScheduler(ctx, conf) - - tusComposer := tusd.NewStoreComposer() - tusComposer.UseCore(scheduler) - tusComposer.UseTerminater(scheduler) - tusComposer.UseConcater(scheduler) - tusComposer.UseLengthDeferrer(scheduler) - - var handler = must(tusd.NewHandler(tusd.Config{ - BasePath: basePath, - StoreComposer: tusComposer, - Logger: slog.New(slog.NewTextHandler(discardTextHandler{}, nil)), - })) - - http.Handle("/recording/", http.StripPrefix("/recording/", handler)) - http.Handle("/recording", http.StripPrefix("/recording", handler)) - - go func() { - logger.Info("started to listen") - defer logger.Info("server has finished") - // #nosec - var err = http.ListenAndServe(conf.ServeURL, nil) - if err != nil { - cancel() - logger.Debug("unable to listen", zap.Error(err)) - } - }() - - <-ctx.Done() -} - -type discardTextHandler struct{} - -func (discardTextHandler) Write([]byte) (int, error) { - return 0, nil -} - -func mustNoError(err error) { - if err != nil { - panic(err.Error()) - } -} - -func must[T any](val T, err error) T { - mustNoError(err) - return val -} diff --git a/cmd/stream/main.go b/cmd/stream/main.go new file mode 100644 index 00000000000..9b34801c02f --- /dev/null +++ b/cmd/stream/main.go @@ -0,0 +1,71 @@ +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package main provides huly-stream entry point function +package main + +import ( + "context" + "net/http" + + "os" + "os/signal" + "syscall" + + "go.uber.org/zap" + + "github.com/hcengineering/stream/internal/pkg/api/v1/recording" + "github.com/hcengineering/stream/internal/pkg/api/v1/transcoding" + "github.com/hcengineering/stream/internal/pkg/config" + "github.com/hcengineering/stream/internal/pkg/log" +) + +func main() { + var ctx, cancel = signal.NotifyContext( + context.Background(), + os.Interrupt, + syscall.SIGHUP, + syscall.SIGTERM, + syscall.SIGQUIT, + ) + defer cancel() + ctx = log.WithFields(ctx) + + var logger = log.FromContext(ctx) + var cfg, err = config.FromEnv() + if err != nil { + panic(err.Error()) + } + logger.Sugar().Debugf("parsed config is %v", cfg) + + var recordingHandler = recording.NewHandler(ctx, cfg) + var transcodingHandler = transcoding.NewHandler(ctx, cfg) + + http.Handle("/recording/", http.StripPrefix("/recording/", recordingHandler)) + http.Handle("/recording", http.StripPrefix("/recording", recordingHandler)) + http.Handle("/transcoding", http.StripPrefix("/transcoding", transcodingHandler)) + + go func() { + logger.Info("server started serving", zap.String("ServeURL", cfg.ServeURL)) + defer logger.Info("server finished") + + // #nosec + var err = http.ListenAndServe(cfg.ServeURL, nil) + if err != nil { + cancel() + logger.Debug("unable to listen", zap.Error(err)) + } + }() + + <-ctx.Done() +} diff --git a/go.mod b/go.mod index 3775572bbff..0671d43070f 100644 --- a/go.mod +++ b/go.mod @@ -1,14 +1,13 @@ -module github.com/huly-stream +module github.com/hcengineering/stream -go 1.23.2 +go 1.24.1 require ( github.com/aws/aws-sdk-go-v2 v1.36.1 github.com/aws/aws-sdk-go-v2/config v1.29.6 github.com/aws/aws-sdk-go-v2/credentials v1.17.59 github.com/aws/aws-sdk-go-v2/service/s3 v1.77.0 - github.com/aws/smithy-go v1.22.3 - github.com/fsnotify/fsnotify v1.8.0 + github.com/golang-jwt/jwt/v5 v5.2.1 github.com/google/uuid v1.6.0 github.com/kelseyhightower/envconfig v1.4.0 github.com/pkg/errors v0.9.1 @@ -17,6 +16,8 @@ require ( github.com/valyala/fasthttp v1.59.0 go.uber.org/zap v1.27.0 golang.org/x/exp v0.0.0-20250215185904-eff6e970281f + gopkg.in/vansante/go-ffprobe.v2 v2.2.1 + k8s.io/utils v0.0.0-20241210054802-24370beab758 ) require ( @@ -34,11 +35,12 @@ require ( github.com/aws/aws-sdk-go-v2/service/sso v1.24.15 // indirect github.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.14 // indirect github.com/aws/aws-sdk-go-v2/service/sts v1.33.14 // indirect + github.com/aws/smithy-go v1.22.3 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/klauspost/compress v1.17.11 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/valyala/bytebufferpool v1.0.0 // indirect go.uber.org/multierr v1.11.0 // indirect - golang.org/x/sys v0.30.0 // indirect + golang.org/x/net v0.37.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index 1ffa95ca7c2..6ef7c10d828 100644 --- a/go.sum +++ b/go.sum @@ -40,8 +40,8 @@ github.com/aws/smithy-go v1.22.3 h1:Z//5NuZCSW6R4PhQ93hShNbyBbn8BWCmCVCt+Q8Io5k= github.com/aws/smithy-go v1.22.3/go.mod h1:t1ufH5HMublsJYulve2RKmHDC15xu1f26kHCp/HgceI= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/fsnotify/fsnotify v1.8.0 h1:dAwr6QBTBZIkG8roQaJjGof0pp0EeF+tNV7YBP3F/8M= -github.com/fsnotify/fsnotify v1.8.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= +github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk= +github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= @@ -72,13 +72,15 @@ go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8= go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= golang.org/x/exp v0.0.0-20250215185904-eff6e970281f h1:oFMYAjX0867ZD2jcNiLBrI9BdpmEkvPyi5YrBGXbamg= golang.org/x/exp v0.0.0-20250215185904-eff6e970281f/go.mod h1:BHOTPb3L19zxehTsLoJXVaTktb06DFgmdW6Wb9s8jqk= -golang.org/x/net v0.35.0 h1:T5GQRQb2y08kTAByq9L4/bz8cipCdA8FbRTXewonqY8= -golang.org/x/net v0.35.0/go.mod h1:EglIi67kWsHKlRzzVMUD93VMSWGFOMSZgxFjparz1Qk= -golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc= -golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM= -golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY= +golang.org/x/net v0.37.0 h1:1zLorHbz+LYj7MQlSf1+2tPIIgibq2eL5xkrGk6f+2c= +golang.org/x/net v0.37.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8= +golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY= +golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/vansante/go-ffprobe.v2 v2.2.1 h1:sFV08OT1eZ1yroLCZVClIVd9YySgCh9eGjBWO0oRayI= +gopkg.in/vansante/go-ffprobe.v2 v2.2.1/go.mod h1:qF0AlAjk7Nqzqf3y333Ly+KxN3cKF2JqA3JT5ZheUGE= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +k8s.io/utils v0.0.0-20241210054802-24370beab758 h1:sdbE21q2nlQtFh65saZY+rRM6x6aJJI8IUa1AmH/qa0= +k8s.io/utils v0.0.0-20241210054802-24370beab758/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= diff --git a/internal/pkg/api/v1/recording/handler.go b/internal/pkg/api/v1/recording/handler.go new file mode 100644 index 00000000000..4282261f767 --- /dev/null +++ b/internal/pkg/api/v1/recording/handler.go @@ -0,0 +1,82 @@ +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package recording provides recording http handler. +package recording + +import ( + "context" + + "golang.org/x/exp/slog" + + "net/http" + "sync" + + "github.com/hcengineering/stream/internal/pkg/config" + "github.com/hcengineering/stream/internal/pkg/log" + "github.com/hcengineering/stream/internal/pkg/transcoder" + "go.uber.org/zap" + + tusd "github.com/tus/tusd/v2/pkg/handler" +) + +type recordingHandler struct { + logger *zap.Logger + once sync.Once + cfg *config.Config + ctx context.Context + tusHandler http.Handler +} + +func (h *recordingHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + if !h.cfg.Insecure { + r.Header.Set("X-Forwarded-Proto", "https") + } + h.once.Do(h.initialize) + h.tusHandler.ServeHTTP(w, r) +} + +// NewHandler creates a new recording http handler, requires context and config. +func NewHandler(ctx context.Context, cfg *config.Config) http.Handler { + return &recordingHandler{ + logger: log.FromContext(ctx).With(zap.String("handler", "recording")), + cfg: cfg, + ctx: ctx, + } +} + +func (h *recordingHandler) initialize() { + scheduler := transcoder.NewStreamCoordinator(h.ctx, h.cfg) + + tusComposer := tusd.NewStoreComposer() + tusComposer.UseCore(scheduler) + tusComposer.UseTerminater(scheduler) + tusComposer.UseConcater(scheduler) + tusComposer.UseLengthDeferrer(scheduler) + + var tusHandler, err = tusd.NewHandler(tusd.Config{ + BasePath: "/recording", + StoreComposer: tusComposer, + RespectForwardedHeaders: true, + DisableDownload: true, + Cors: &tusd.DefaultCorsConfig, + NetworkTimeout: h.cfg.Timeout, + Logger: slog.Default(), + }) + + if err != nil { + panic(err.Error()) + } + + h.tusHandler = tusHandler +} diff --git a/internal/pkg/api/v1/transcoding/handler.go b/internal/pkg/api/v1/transcoding/handler.go new file mode 100644 index 00000000000..0391ea74da6 --- /dev/null +++ b/internal/pkg/api/v1/transcoding/handler.go @@ -0,0 +1,66 @@ +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package transcoding provides transcoding http handler. +package transcoding + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + + "github.com/hcengineering/stream/internal/pkg/config" + "github.com/hcengineering/stream/internal/pkg/log" + "github.com/hcengineering/stream/internal/pkg/transcoder" + "go.uber.org/zap" +) + +type trascodeHandler struct { + taskQueue *transcoder.Scheduler + logger *zap.Logger +} + +func (t *trascodeHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + if r.URL.Path != "" { + w.WriteHeader(http.StatusBadRequest) + _, _ = fmt.Fprint(w, "uri is not allowed") + return + } + + if r.Header.Get("Authorization") == "" { + w.WriteHeader(http.StatusUnauthorized) + _, _ = fmt.Fprint(w, "missed Authorization header") + return + } + + var decoder = json.NewDecoder(r.Body) + var task transcoder.Task + + if err := decoder.Decode(&task); err != nil { + w.WriteHeader(http.StatusBadRequest) + _, _ = fmt.Fprint(w, "can not decode request body") + return + } + + t.taskQueue.Schedule(&task) + w.WriteHeader(http.StatusOK) +} + +// NewHandler creates a new trnascoding http handler, requires context and config. +func NewHandler(ctx context.Context, cfg *config.Config) http.Handler { + return &trascodeHandler{ + taskQueue: transcoder.NewScheduler(ctx, cfg), + logger: log.FromContext(ctx).With(zap.String("handler", "transcoding")), + } +} diff --git a/internal/pkg/config/config.go b/internal/pkg/config/config.go index 6a748d5bae6..23fee3afc7c 100644 --- a/internal/pkg/config/config.go +++ b/internal/pkg/config/config.go @@ -18,22 +18,24 @@ import ( "net/url" "time" + "github.com/pkg/errors" + "github.com/kelseyhightower/envconfig" ) // Config represents configuration for the huly-stream application. type Config struct { - SecretToken string `split_words:"true" desc:"secret token for authorize requests"` - LogLevel string `split_words:"true" default:"debug" desc:"sets log level for the application"` - PprofEnabled bool `default:"false" split_words:"true" desc:"starts profile server on localhost:6060 if true"` - Insecure bool `default:"false" desc:"ignores authorization check if true"` - ServeURL string `split_words:"true" desc:"app listen url" default:"0.0.0.0:1080"` - EndpointURL *url.URL `split_words:"true" default:"s3://127.0.0.1:9000" desc:"S3 or Datalake endpoint, example: s3://my-ip-address, datalake://my-ip-address"` - AuthURL *url.URL `split_words:"true" desc:"url to auth the upload"` - MaxCapacity int64 `split_words:"true" default:"6220800" desc:"represents the amount of maximum possible capacity for the transcoding. The default value is 1920 * 1080 * 3."` - MaxThreads int `split_words:"true" default:"4" desc:"means upper bound for the transcoing provider."` - OutputDir string `split_words:"true" default:"/tmp/transcoing/" desc:"path to the directory with transcoding result."` - Timeout time.Duration `default:"5m" desc:"timeout for the upload"` + LogLevel string `split_words:"true" default:"debug" desc:"sets log level for the application"` + ServerSecret string `split_words:"true" default:"" desc:"server secret required to generate and verify tokens"` + PprofEnabled bool `split_words:"true" default:"true" desc:"starts profile server on localhost:6060 if true"` + Insecure bool `split_words:"true" default:"false" desc:"ignores authorization check if true"` + ServeURL string `split_words:"true" desc:"listen on url" default:"0.0.0.0:1080"` + EndpointURL *url.URL `split_words:"true" default:"s3://127.0.0.1:9000" desc:"S3 or Datalake endpoint, example: s3://my-ip-address, datalake://my-ip-address"` + MaxParallelScalingCount int `split_words:"true" default:"2" desc:"how much parallel scaling can be processed"` + MaxThreadCount int `split_words:"true" default:"4" desc:"max number of threads for transcoder"` + + OutputDir string `split_words:"true" default:"/tmp/transcoing/" desc:"path to the directory with transcoding result."` + Timeout time.Duration `default:"5m" desc:"timeout for the upload"` } // FromEnv creates new Config from env @@ -52,6 +54,10 @@ func FromEnv() (*Config, error) { result.EndpointURL = nil } + if !result.Insecure && result.ServerSecret == "" { + return nil, errors.New("server secret must be provided for secure confgiuration") + } + return &result, nil } diff --git a/internal/pkg/log/zap.go b/internal/pkg/log/zap.go index 72e03ac379a..b85fcfbe5ba 100644 --- a/internal/pkg/log/zap.go +++ b/internal/pkg/log/zap.go @@ -22,8 +22,8 @@ import ( type contextKey struct{} -// WithLoggerFields createsa new context with zap.Logger and passed fields -func WithLoggerFields(ctx context.Context, fields ...zap.Field) context.Context { +// WithFields createsa new context with zap.Logger and passed fields +func WithFields(ctx context.Context, fields ...zap.Field) context.Context { var logger = FromContext(ctx) if logger == nil { var err error diff --git a/internal/pkg/manifest/hls.go b/internal/pkg/manifest/hls.go index d8dd7a53613..dcb421fe50c 100644 --- a/internal/pkg/manifest/hls.go +++ b/internal/pkg/manifest/hls.go @@ -20,7 +20,7 @@ import ( "path/filepath" "strings" - "github.com/huly-stream/internal/pkg/resconv" + "github.com/hcengineering/stream/internal/pkg/resconv" ) // GenerateHLSPlaylist generates master file for master files for resolution levels diff --git a/internal/pkg/manifest/hls_test.go b/internal/pkg/manifest/hls_test.go index 624e3a27a98..a37c63b8742 100644 --- a/internal/pkg/manifest/hls_test.go +++ b/internal/pkg/manifest/hls_test.go @@ -18,7 +18,7 @@ import ( "path/filepath" "testing" - "github.com/huly-stream/internal/pkg/manifest" + "github.com/hcengineering/stream/internal/pkg/manifest" "github.com/stretchr/testify/require" ) diff --git a/internal/pkg/pprof/pprof.go b/internal/pkg/pprof/pprof.go index e7a5eb03024..081e9603dfe 100644 --- a/internal/pkg/pprof/pprof.go +++ b/internal/pkg/pprof/pprof.go @@ -20,7 +20,7 @@ import ( "net/http/pprof" "time" - "github.com/huly-stream/internal/pkg/log" + "github.com/hcengineering/stream/internal/pkg/log" "go.uber.org/zap" ) @@ -48,7 +48,6 @@ func ListenAndServe(ctx context.Context, listenOn string) { if err := server.ListenAndServe(); err != nil { log.FromContext(ctx).Debug("Failed to start profiler", zap.Error(err)) } - <-ctx.Done() _ = server.Close() } diff --git a/internal/pkg/resconv/resconv_test.go b/internal/pkg/resconv/resconv_test.go index 547fab05821..48fd291d577 100644 --- a/internal/pkg/resconv/resconv_test.go +++ b/internal/pkg/resconv/resconv_test.go @@ -16,7 +16,7 @@ package resconv_test import ( "testing" - "github.com/huly-stream/internal/pkg/resconv" + "github.com/hcengineering/stream/internal/pkg/resconv" "github.com/stretchr/testify/require" ) diff --git a/internal/pkg/sharedpipe/shared_pipe_bench_test.go b/internal/pkg/sharedpipe/shared_pipe_bench_test.go index 3633e1fdf4f..cafe282dc00 100644 --- a/internal/pkg/sharedpipe/shared_pipe_bench_test.go +++ b/internal/pkg/sharedpipe/shared_pipe_bench_test.go @@ -196,36 +196,35 @@ func Test_Consistent(t *testing.T) { } // Benchmark_DefaultPipe-8 (4 b) 61956 19177 ns/op 48 B/op 1 allocs/op -// Benchmark_DefaultPipe-8 (8 mb) 22 49187741 ns/op 118 B/op 1 allocs/op +// Benchmark_DefaultPipe-8 (8 mb) 24 48471316 ns/op 257 B/op 1 allocs/op func Benchmark_DefaultPipe(b *testing.B) { var data [sendMessageSize]byte var buffer = make([]byte, len(data)) var readers []io.Reader var writers []io.Writer - for i := 0; i < readerCount; i++ { + for range readerCount { r, w := io.Pipe() readers = append(readers, r) writers = append(writers, w) } b.ReportAllocs() - b.ResetTimer() - for range b.N { + for b.Loop() { go func() { - for i := 0; i < readerCount; i++ { + for i := range readerCount { _, _ = writers[i].Write(data[:]) } }() - for i := 0; i < readerCount; i++ { + for i := range readerCount { _, _ = readers[i].Read(buffer) } } } // Benchmark_SharedPipe-8 (4 b) 161847 8131 ns/op 160 B/op 2 allocs/op -// Benchmark_SharedPipe-8 (8 mb) 69 15880031 ns/op 160 B/op 2 allocs/op +// Benchmark_SharedPipe-8 (8 mb) 75 15710658 ns/op 161 B/op 2 allocs/op func Benchmark_SharedPipe(b *testing.B) { var data [sendMessageSize]byte var buffer = make([]byte, len(data)) @@ -237,11 +236,10 @@ func Benchmark_SharedPipe(b *testing.B) { } b.ReportAllocs() - b.ResetTimer() - for range b.N { + for b.Loop() { _, _ = writer.Write(data[:]) - for i := 0; i < readerCount; i++ { + for i := range readerCount { _, _ = readers[i].Read(buffer) } } diff --git a/internal/pkg/storage/datalake.go b/internal/pkg/storage/datalake.go new file mode 100644 index 00000000000..abe2f69459f --- /dev/null +++ b/internal/pkg/storage/datalake.go @@ -0,0 +1,254 @@ +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. + +package storage + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "mime/multipart" + "os" + "path/filepath" + + "github.com/hcengineering/stream/internal/pkg/log" + "github.com/pkg/errors" + "github.com/valyala/fasthttp" + "go.uber.org/zap" +) + +// DatalakeStorage represents datalake storage +type DatalakeStorage struct { + baseURL string + workspace string + token string + logger *zap.Logger + client fasthttp.Client +} + +// NewDatalakeStorage creates a new datalake client +func NewDatalakeStorage(ctx context.Context, baseURL, workspace, token string) Storage { + return &DatalakeStorage{ + baseURL: baseURL, + token: token, + workspace: workspace, + logger: log.FromContext(ctx).With(zap.String("storage", "datalake")), + } +} + +// PutFile uploads file to the datalake +func (d *DatalakeStorage) PutFile(ctx context.Context, fileName string) error { + // #nosec + file, err := os.Open(fileName) + if err != nil { + return err + } + defer func() { + _ = file.Close() + }() + + var objectKey = getObjectKey(fileName) + var logger = d.logger.With(zap.String("upload", d.workspace), zap.String("fileName", fileName)) + + logger.Debug("start") + + body := &bytes.Buffer{} + writer := multipart.NewWriter(body) + + part, err := writer.CreateFormFile("file", objectKey) + if err != nil { + return errors.Wrapf(err, "failed to create form file") + } + + _, err = io.Copy(part, file) + if err != nil { + return errors.Wrapf(err, "failed to copy file data") + } + + _ = writer.Close() + + req := fasthttp.AcquireRequest() + defer fasthttp.ReleaseRequest(req) + + res := fasthttp.AcquireResponse() + defer fasthttp.ReleaseResponse(res) + + req.SetRequestURI(d.baseURL + "/upload/form-data/" + d.workspace) + req.Header.SetMethod(fasthttp.MethodPost) + req.Header.Add("Authorization", "Bearer "+d.token) + req.Header.SetContentType(writer.FormDataContentType()) + req.SetBody(body.Bytes()) + + if err := d.client.Do(req, res); err != nil { + logger.Error("upload failed", zap.Error(err)) + return errors.Wrapf(err, "upload failed") + } + + logger.Debug("uploaded") + + return nil +} + +// DeleteFile deletes file from the datalake +func (d *DatalakeStorage) DeleteFile(ctx context.Context, fileName string) error { + var logger = d.logger.With(zap.String("delete", d.workspace), zap.String("fileName", fileName)) + logger.Debug("start") + + var objectKey = getObjectKey(fileName) + + req := fasthttp.AcquireRequest() + defer fasthttp.ReleaseRequest(req) + + res := fasthttp.AcquireResponse() + defer fasthttp.ReleaseResponse(res) + + req.SetRequestURI(d.baseURL + "/blob/" + d.workspace + "/" + objectKey) + req.Header.SetMethod(fasthttp.MethodDelete) + req.Header.Add("Authorization", "Bearer "+d.token) + + if err := d.client.Do(req, res); err != nil { + logger.Error("delete failed", zap.Error(err)) + return errors.Wrapf(err, "delete failed") + } + + logger.Debug("deleted") + + return nil +} + +func getObjectKey(s string) string { + var _, objectKey = filepath.Split(s) + return objectKey +} + +// PatchMeta patches metadata for the object +func (d *DatalakeStorage) PatchMeta(ctx context.Context, filename string, md *Metadata) error { + var logger = d.logger.With(zap.String("patch meta", d.workspace), zap.String("fileName", filename)) + logger.Debug("start") + defer logger.Debug("finished") + + var objectKey = getObjectKey(filename) + + req := fasthttp.AcquireRequest() + defer fasthttp.ReleaseRequest(req) + req.SetRequestURI(d.baseURL + "/meta/" + d.workspace + "/" + objectKey) + req.Header.SetMethod(fasthttp.MethodPatch) + req.Header.Add("Authorization", "Bearer "+d.token) + req.Header.SetContentType("application/json") + + b, err := json.Marshal(md) + + if err != nil { + return err + } + req.SetBody(b) + + resp := fasthttp.AcquireResponse() + defer fasthttp.ReleaseResponse(resp) + + if err := d.client.Do(req, resp); err != nil { + return err + } + + if resp.StatusCode() != fasthttp.StatusOK { + var err = fmt.Errorf("unexpected status code: %d", resp.StatusCode()) + logger.Debug("bad status code", zap.Error(err)) + return err + } + + fmt.Println(string(resp.Body())) + + return nil +} + +// GetMeta gets metadata related to the object +func (d *DatalakeStorage) GetMeta(ctx context.Context, filename string) (*Metadata, error) { + var logger = d.logger.With(zap.String("get meta", d.workspace), zap.String("fileName", filename)) + logger.Debug("start") + + var objectKey = getObjectKey(filename) + + req := fasthttp.AcquireRequest() + defer fasthttp.ReleaseRequest(req) + req.SetRequestURI(d.baseURL + "/meta/" + d.workspace + "/" + objectKey) + req.Header.SetMethod(fasthttp.MethodGet) + req.Header.Add("Authorization", "Bearer "+d.token) + + resp := fasthttp.AcquireResponse() + defer fasthttp.ReleaseResponse(resp) + + if err := d.client.Do(req, resp); err != nil { + return nil, err + } + + if resp.StatusCode() != fasthttp.StatusOK { + var err = fmt.Errorf("unexpected status code: %d", resp.StatusCode()) + logger.Debug("bad status code", zap.Error(err)) + return nil, err + } + + var md Metadata + fmt.Println(string(resp.Body())) + var err = json.Unmarshal(resp.Body(), &md) + + return &md, err +} + +// GetFile gets file from the storage +func (d *DatalakeStorage) GetFile(ctx context.Context, filename, destination string) error { + var logger = d.logger.With(zap.String("get", d.workspace), zap.String("fileName", filename), zap.String("destination", destination)) + logger.Debug("start") + + var objectKey = getObjectKey(filename) + + req := fasthttp.AcquireRequest() + defer fasthttp.ReleaseRequest(req) + req.SetRequestURI(d.baseURL + "/blob/" + d.workspace + "/" + objectKey) + req.Header.SetMethod(fasthttp.MethodGet) + + resp := fasthttp.AcquireResponse() + defer fasthttp.ReleaseResponse(resp) + + if err := d.client.Do(req, resp); err != nil { + return err + } + + // Check the response status code + if resp.StatusCode() != fasthttp.StatusOK { + var err = fmt.Errorf("unexpected status code: %d", resp.StatusCode()) + logger.Debug("bad status code", zap.Error(err)) + return err + } + + // #nosec + file, err := os.Create(destination) + if err != nil { + logger.Debug("can't create a file", zap.Error(err)) + return err + } + defer func() { + _ = file.Close() + }() + if err := resp.BodyWriteTo(file); err != nil { + logger.Debug("can't write to file", zap.Error(err)) + return err + } + + logger.Debug("file downloaded successfully") + return nil +} + +var _ Storage = (*DatalakeStorage)(nil) +var _ MetaProvider = (*DatalakeStorage)(nil) diff --git a/internal/pkg/uploader/s3.go b/internal/pkg/storage/s3.go similarity index 61% rename from internal/pkg/uploader/s3.go rename to internal/pkg/storage/s3.go index d211e0347f6..41da2eaaf19 100644 --- a/internal/pkg/uploader/s3.go +++ b/internal/pkg/storage/s3.go @@ -11,14 +11,13 @@ // See the License for the specific language governing permissions and // limitations under the License. -package uploader +// Package storage provdies simple storage interface for the remote storages. +package storage import ( "context" "fmt" - "github.com/pkg/errors" - "os" "path/filepath" "strings" @@ -28,8 +27,7 @@ import ( "github.com/aws/aws-sdk-go-v2/config" "github.com/aws/aws-sdk-go-v2/credentials" "github.com/aws/aws-sdk-go-v2/service/s3" - "github.com/aws/smithy-go" - "github.com/huly-stream/internal/pkg/log" + "github.com/hcengineering/stream/internal/pkg/log" "go.uber.org/zap" ) @@ -44,7 +42,6 @@ type S3Storage struct { func NewS3(ctx context.Context, endpoint, bucketName string) Storage { var accessKeyID = os.Getenv("AWS_ACCESS_KEY_ID") var accessKeySecret = os.Getenv("AWS_SECRET_ACCESS_KEY") - var logger = log.FromContext(ctx).With(zap.String("s3", "storage")) cfg, err := config.LoadDefaultConfig(ctx, config.WithCredentialsProvider(credentials.NewStaticCredentialsProvider(accessKeyID, accessKeySecret, "")), @@ -62,7 +59,7 @@ func NewS3(ctx context.Context, endpoint, bucketName string) Storage { return &S3Storage{ client: s3Client, bucketName: bucketName, - logger: logger, + logger: log.FromContext(ctx).With(zap.String("s3", "storage")), } } @@ -71,7 +68,7 @@ func getContentType(objectKey string) string { return "video/mp2t" } if strings.HasSuffix(objectKey, ".m3u8") { - return "application/x-mpegurl" + return "video/x-mpegurl" } return "application/octet-stream" } @@ -79,9 +76,10 @@ func getContentType(objectKey string) string { // DeleteFile deletes file from the s3 storage func (u *S3Storage) DeleteFile(ctx context.Context, fileName string) error { var _, objectKey = filepath.Split(fileName) - var logger = log.FromContext(ctx).With(zap.String("s3 delete", u.bucketName), zap.String("fileName", fileName)) + var logger = u.logger.With(zap.String("delete", u.bucketName), zap.String("fileName", fileName)) + + logger.Debug("start") - logger.Debug("start deleting") input := &s3.DeleteObjectInput{ Bucket: aws.String(u.bucketName), Key: aws.String(objectKey), @@ -91,15 +89,17 @@ func (u *S3Storage) DeleteFile(ctx context.Context, fileName string) error { if err != nil { return fmt.Errorf("failed to delete file from S3: %w", err) } - logger.Debug("file deleted") + + logger.Debug("deleted") return nil } -// UploadFile uploads file to the s3 storage -func (u *S3Storage) UploadFile(ctx context.Context, fileName string) error { +// PutFile uploads file to the s3 storage +func (u *S3Storage) PutFile(ctx context.Context, fileName string) error { var _, objectKey = filepath.Split(fileName) - var logger = log.FromContext(ctx).With(zap.String("s3 upload", u.bucketName), zap.String("fileName", fileName)) - logger.Debug("start upload file") + var logger = u.logger.With(zap.String("upload", u.bucketName), zap.String("fileName", fileName)) + + logger.Debug("start") // #nosec var file, err = os.Open(fileName) @@ -112,6 +112,7 @@ func (u *S3Storage) UploadFile(ctx context.Context, fileName string) error { defer func() { _ = file.Close() }() + _, err = u.client.PutObject(ctx, &s3.PutObjectInput{ Bucket: aws.String(u.bucketName), Key: aws.String(objectKey), @@ -120,23 +121,57 @@ func (u *S3Storage) UploadFile(ctx context.Context, fileName string) error { }) if err != nil { - var apiErr smithy.APIError - if errors.As(err, &apiErr) && apiErr.ErrorCode() == "EntityTooLarge" { - logger.Error("Error while uploading object. The object is too large." + - "To upload objects larger than 5GB, use the S3 console (160GB max)" + - "or the multipart upload API (5TB max).") - } else { - logger.Error("Couldn't upload file", zap.Error(err)) - } - return apiErr + logger.Error("couldn't upload file", zap.Error(err)) + return err } err = s3.NewObjectExistsWaiter(u.client).Wait( ctx, &s3.HeadObjectInput{Bucket: aws.String(u.bucketName), Key: aws.String(objectKey)}, time.Minute) + if err != nil { - logger.Debug("Failed attempt to wait for object to exist.") + logger.Error("Failed attempt to wait for object to exist") + return err } - logger.Debug("file has uploaded") - return err + logger.Debug("uploaded") + return nil +} + +// GetFile gets file from the storage and stores it to destination +func (u *S3Storage) GetFile(ctx context.Context, filename, dest string) error { + var logger = u.logger.With(zap.String("get", u.bucketName), zap.String("fileName", filename), zap.String("destination", dest)) + + var result, err = u.client.GetObject(ctx, &s3.GetObjectInput{ + Bucket: &u.bucketName, + Key: &filename, + }) + + if err != nil { + logger.Error("failed to get object", zap.Error(err)) + return err + } + defer func() { + _ = result.Body.Close() + }() + + // Create a local file to save the downloaded content + // #nosec + file, err := os.Create(dest) + if err != nil { + logger.Error("failed to create file", zap.Error(err)) + return err + } + defer func() { + _ = file.Close() + }() + // Copy the S3 object content to the local file + _, err = file.ReadFrom(result.Body) + if err != nil { + logger.Error("failed to write to file", zap.Error(err)) + return err + } + + logger.Debug("file downloaded successfully") + + return nil } diff --git a/internal/pkg/storage/storage.go b/internal/pkg/storage/storage.go new file mode 100644 index 00000000000..e45250284da --- /dev/null +++ b/internal/pkg/storage/storage.go @@ -0,0 +1,55 @@ +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. + +package storage + +import ( + "context" + "net/url" + + "github.com/pkg/errors" +) + +// Metadata represents file's metadata +type Metadata map[string]any + +// MetaProvider provides simple api for working with files metadata +type MetaProvider interface { + GetMeta(ctx context.Context, filename string) (*Metadata, error) + PatchMeta(ctx context.Context, filename string, value *Metadata) error +} + +// Storage represents file-based storage +type Storage interface { + PutFile(ctx context.Context, fileName string) error + DeleteFile(ctx context.Context, fileName string) error + GetFile(ctx context.Context, fileName, destination string) error +} + +// NewStorageByURL creates a new storage based on the type from the url scheme, for example "datalake://my-datalake-endpoint" +func NewStorageByURL(ctx context.Context, u *url.URL, storageType, token, worksapce string) (Storage, error) { + if worksapce == "" { + return nil, errors.New("workspace is missed") + } + switch storageType { + case "datalake": + if token == "" { + return nil, errors.New("token is missed") + } + return NewDatalakeStorage(ctx, u.String(), worksapce, token), nil + case "s3": + return NewS3(ctx, u.String(), worksapce), nil + default: + return nil, errors.New("unknown scheme") + } +} diff --git a/internal/pkg/token/token.go b/internal/pkg/token/token.go new file mode 100644 index 00000000000..1f81a764843 --- /dev/null +++ b/internal/pkg/token/token.go @@ -0,0 +1,77 @@ +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package token provides functions to work with platform tokens +package token + +import ( + "fmt" + "time" + + "github.com/golang-jwt/jwt/v5" + "github.com/google/uuid" +) + +// Token represents Claims for the platform token +type Token struct { + jwt.RegisteredClaims + Account string `json:"account"` + Workspace string `json:"workspace,omitempty"` + Extra map[string]interface{} `json:"extra,omitempty"` +} + +// NewToken creates a new platform token +func NewToken(serverSecret, workspace, issuer, audience string) (string, error) { + var res = Token{ + Account: uuid.NewString(), + Workspace: workspace, + RegisteredClaims: jwt.RegisteredClaims{ + Issuer: issuer, + Audience: jwt.ClaimStrings{audience}, + ExpiresAt: jwt.NewNumericDate(time.Now().Add(time.Hour * 12)), + }, + } + return res.Encode(serverSecret) +} + +// Encode signes a token object and converts it to string +func (t *Token) Encode(serverSecret string) (string, error) { + var token = jwt.NewWithClaims(jwt.SigningMethodHS256, t) + var tokenString, err = token.SignedString([]byte(serverSecret)) + + if err != nil { + return "", err + } + + return tokenString, nil +} + +// Decode decodes a token by a passed configuration +func Decode(secretKey, tokenString string) (*Token, error) { + token, err := jwt.ParseWithClaims(tokenString, &Token{}, func(token *jwt.Token) (interface{}, error) { + if _, ok := token.Method.(*jwt.SigningMethodHMAC); !ok { + return nil, fmt.Errorf("unexpected signing method: %v", token.Header["alg"]) + } + return []byte(secretKey), nil + }) + + if err != nil { + return nil, fmt.Errorf("failed to parse token: %w", err) + } + + if claims, ok := token.Claims.(*Token); ok && token.Valid { + return claims, nil + } + + return nil, fmt.Errorf("invalid token: can't parse claims") +} diff --git a/internal/pkg/uploader/postpone_test.go b/internal/pkg/token/token_test.go similarity index 50% rename from internal/pkg/uploader/postpone_test.go rename to internal/pkg/token/token_test.go index 68758451e79..93d97bdb539 100644 --- a/internal/pkg/uploader/postpone_test.go +++ b/internal/pkg/token/token_test.go @@ -11,35 +11,30 @@ // See the License for the specific language governing permissions and // limitations under the License. -package uploader +package token_test import ( - "context" - "sync/atomic" "testing" "time" + "github.com/golang-jwt/jwt/v5" + "github.com/hcengineering/stream/internal/pkg/token" "github.com/stretchr/testify/require" ) -func Test_Postpone(t *testing.T) { - var u = uploader{ - postponeDuration: time.Second / 4, - } - var counter atomic.Int32 - u.postpone("1", func(context.Context) { counter.Add(1) }) - time.Sleep(time.Second / 8) - u.postpone("1", func(context.Context) { counter.Add(1) }) - time.Sleep(time.Second / 2) - require.Equal(t, int32(1), counter.Load()) - time.Sleep(time.Second / 2) - require.Equal(t, int32(1), counter.Load()) +func Test_GenerateSimpleServiceToken(t *testing.T) { + var _, err = token.NewToken("secret", "ws", "issuer", "aud") + require.NoError(t, err) } -func Test_WithoutPostpone(t *testing.T) { - var counter atomic.Int32 - var u uploader - u.postpone("1", func(context.Context) { counter.Add(1) }) - time.Sleep(time.Second / 10) - require.Equal(t, int32(1), counter.Load()) +func Test_ParseSimpleServiceToken(t *testing.T) { + const secret = "secret" + tokenString, err := token.NewToken(secret, "ws", "issuer", "aud") + require.NoError(t, err) + tok, err := token.Decode(secret, tokenString) + require.NoError(t, err) + require.Equal(t, tok.Issuer, "issuer") + require.Equal(t, tok.Audience, jwt.ClaimStrings{"aud"}) + require.Equal(t, tok.Workspace, "ws") + require.True(t, tok.ExpiresAt.After(time.Now())) } diff --git a/internal/pkg/transcoding/command.go b/internal/pkg/transcoder/command.go similarity index 93% rename from internal/pkg/transcoding/command.go rename to internal/pkg/transcoder/command.go index 0065fa31969..9cc65d96379 100644 --- a/internal/pkg/transcoding/command.go +++ b/internal/pkg/transcoder/command.go @@ -13,7 +13,7 @@ // limitations under the License. // -package transcoding +package transcoder import ( "context" @@ -25,13 +25,13 @@ import ( "github.com/pkg/errors" - "github.com/huly-stream/internal/pkg/log" - "github.com/huly-stream/internal/pkg/resconv" + "github.com/hcengineering/stream/internal/pkg/log" "go.uber.org/zap" ) // Options represents configuration for the ffmpeg command type Options struct { + Input string OuputDir string ScalingLevels []string Level string @@ -58,9 +58,8 @@ func newFfmpegCommand(ctx context.Context, in io.Reader, args []string) (*exec.C func buildCommonComamnd(opts *Options) []string { return []string{ - "-nostdin", "-threads", fmt.Sprint(opts.Threads), - "-i", "pipe:0", + "-i", opts.Input, } } @@ -79,8 +78,6 @@ func BuildRawVideoCommand(opts *Options) []string { return append(buildCommonComamnd(opts), "-c:v", "copy", - "-fps_mode", - "vfr", "-hls_time", "5", "-hls_list_size", "0", "-hls_segment_filename", filepath.Join(opts.OuputDir, opts.UploadID, fmt.Sprintf("%s_%s_%s.ts", opts.UploadID, "%03d", opts.Level)), @@ -90,9 +87,10 @@ func BuildRawVideoCommand(opts *Options) []string { // BuildScalingVideoCommand returns flags for ffmpeg for video scaling func BuildScalingVideoCommand(opts *Options) []string { var result = buildCommonComamnd(opts) + for _, level := range opts.ScalingLevels { result = append(result, - "-vf", "scale="+resconv.Resolution(level), + "-vf", "scale=-2:"+level[:len(level)-1], "-c:v", "libx264", "-preset", "veryfast", diff --git a/internal/pkg/transcoding/command_test.go b/internal/pkg/transcoder/command_test.go similarity index 56% rename from internal/pkg/transcoding/command_test.go rename to internal/pkg/transcoder/command_test.go index 719c38467a7..db382a555e7 100644 --- a/internal/pkg/transcoding/command_test.go +++ b/internal/pkg/transcoder/command_test.go @@ -11,39 +11,41 @@ // See the License for the specific language governing permissions and // limitations under the License. -package transcoding_test +package transcoder_test import ( "strings" "testing" - "github.com/huly-stream/internal/pkg/resconv" - "github.com/huly-stream/internal/pkg/transcoding" + "github.com/hcengineering/stream/internal/pkg/resconv" + "github.com/hcengineering/stream/internal/pkg/transcoder" "github.com/stretchr/testify/require" ) func Test_BuildVideoCommand_Scaling(t *testing.T) { - var scaleCommand = transcoding.BuildScalingVideoCommand(&transcoding.Options{ + var scaleCommand = transcoder.BuildScalingVideoCommand(&transcoder.Options{ OuputDir: "test", + Input: "pipe:0", UploadID: "1", Threads: 4, ScalingLevels: []string{"720p", "480p"}, }) - const expected = `-nostdin -threads 4 -i pipe:0 -vf scale=1280:720 -c:v libx264 -preset veryfast -crf 23 -g 60 -hls_time 5 -hls_list_size 0 -hls_segment_filename test/1/1_%03d_720p.ts test/1/1_720p_master.m3u8 -vf scale=640:480 -c:v libx264 -preset veryfast -crf 23 -g 60 -hls_time 5 -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` + const expected = `-threads 4 -i pipe:0 -vf scale=-2:720 -c:v libx264 -preset veryfast -crf 23 -g 60 -hls_time 5 -hls_list_size 0 -hls_segment_filename test/1/1_%03d_720p.ts test/1/1_720p_master.m3u8 -vf scale=-2:480 -c:v libx264 -preset veryfast -crf 23 -g 60 -hls_time 5 -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` require.Contains(t, expected, strings.Join(scaleCommand, " ")) } func Test_BuildVideoCommand_Raw(t *testing.T) { - var rawCommand = transcoding.BuildRawVideoCommand(&transcoding.Options{ + var rawCommand = transcoder.BuildRawVideoCommand(&transcoder.Options{ OuputDir: "test", + Input: "pipe:0", UploadID: "1", Threads: 4, Level: resconv.Level("651:490"), }) - const expected = `-nostdin -threads 4 -i pipe:0 -c:v copy -fps_mode vfr -hls_time 5 -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` + const expected = `"-threads 4 -i pipe:0 -c:v copy -hls_time 5 -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` require.Contains(t, expected, strings.Join(rawCommand, " ")) } diff --git a/internal/pkg/transcoder/coordinator.go b/internal/pkg/transcoder/coordinator.go new file mode 100644 index 00000000000..d0705cb0ce0 --- /dev/null +++ b/internal/pkg/transcoder/coordinator.go @@ -0,0 +1,190 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package transcoder + +import ( + "context" + "path/filepath" + "sync" + "sync/atomic" + "time" + + "github.com/pkg/errors" + + "github.com/google/uuid" + "github.com/hcengineering/stream/internal/pkg/config" + "github.com/hcengineering/stream/internal/pkg/log" + "github.com/hcengineering/stream/internal/pkg/resconv" + "github.com/hcengineering/stream/internal/pkg/sharedpipe" + "github.com/hcengineering/stream/internal/pkg/storage" + "github.com/hcengineering/stream/internal/pkg/uploader" + "github.com/tus/tusd/v2/pkg/handler" + "go.uber.org/zap" +) + +// StreamCoordinator represents manager for streams. It creates a new stream for a client and manages it's life cycle. +type StreamCoordinator struct { + conf *config.Config + uploadOptions uploader.Options + + activeScalling int32 + + mainContext context.Context + logger *zap.Logger + + streams sync.Map + cancels sync.Map +} + +// NewStreamCoordinator creates a new scheduler for transcode operations. +func NewStreamCoordinator(ctx context.Context, c *config.Config) *StreamCoordinator { + return &StreamCoordinator{ + conf: c, + uploadOptions: uploader.Options{ + RetryDelay: time.Millisecond * 100, + Timeout: c.Timeout, + WorkerCount: uint32(c.MaxThreadCount), + RetryCount: 5, + BufferSize: 128, + Dir: c.OutputDir, + }, + mainContext: ctx, + logger: log.FromContext(ctx).With(zap.String("Scheduler", c.OutputDir)), + } +} + +// NewUpload creates a new worker with passed parameters +func (s *StreamCoordinator) NewUpload(ctx context.Context, info handler.FileInfo) (handler.Upload, error) { + if info.ID == "" { + info.ID = uuid.NewString() + } + s.logger.Sugar().Debugf("stream: %v", info) + s.logger.Debug("NewUpload", zap.String("ID", info.ID)) + + var stream = &Stream{ + writer: sharedpipe.NewWriter(), + info: info, + logger: log.FromContext(s.mainContext).With(zap.String("worker", info.ID)), + done: make(chan struct{}), + } + + var scaling = resconv.SubLevels(info.MetaData["resolution"]) + var level = resconv.Level(info.MetaData["resolution"]) + var cost int64 + + for _, scale := range scaling { + cost += int64(resconv.Pixels(resconv.Resolution(scale))) + } + + if atomic.AddInt32(&s.activeScalling, 1) > int32(s.conf.MaxParallelScalingCount) { + atomic.AddInt32(&s.activeScalling, -1) + s.logger.Debug("run out of resources for scaling") + scaling = nil + } + + var commandOptions = Options{ + Input: "pipe:0", + OuputDir: s.conf.OutputDir, + Threads: s.conf.MaxThreadCount, + UploadID: info.ID, + Level: level, + ScalingLevels: scaling, + } + + if s.conf.EndpointURL != nil { + s.logger.Sugar().Debugf("initializing uploader for %v", info) + var opts = s.uploadOptions + opts.Dir = filepath.Join(opts.Dir, info.ID) + + var storage, err = storage.NewStorageByURL(s.mainContext, s.conf.Endpoint(), s.conf.EndpointURL.Scheme, info.MetaData["token"], info.MetaData["workspace"]) + if err != nil { + s.logger.Error("can not create storage by url") + return nil, err + } + var contentUploader = uploader.New(s.mainContext, storage, opts) + + stream.contentUploader = contentUploader + } + + s.streams.Store(stream.info.ID, stream) + if err := stream.start(s.mainContext, &commandOptions); err != nil { + return nil, err + } + + go func() { + stream.commandGroup.Wait() + if scaling != nil { + atomic.AddInt32(&s.activeScalling, -1) + } + s.logger.Debug("returned capacity", zap.Int64("capacity", cost)) + close(stream.done) + }() + + s.manageTimeout(stream) + + s.logger.Debug("NewUpload", zap.String("done", info.ID)) + return stream, nil +} + +// GetUpload returns current a worker based on upload id +func (s *StreamCoordinator) GetUpload(ctx context.Context, id string) (upload handler.Upload, err error) { + if v, ok := s.streams.Load(id); ok { + s.logger.Debug("GetUpload: found worker by id", zap.String("id", id)) + var w = v.(*Stream) + s.manageTimeout(w) + return w, nil + } + s.logger.Debug("GetUpload: worker not found", zap.String("id", id)) + return nil, errors.New("bad id") +} + +// AsTerminatableUpload returns tusd handler.TerminatableUpload +func (s *StreamCoordinator) AsTerminatableUpload(upload handler.Upload) handler.TerminatableUpload { + var worker = upload.(*Stream) + s.logger.Debug("AsTerminatableUpload") + return worker +} + +// AsLengthDeclarableUpload returns tusd handler.LengthDeclarableUpload +func (s *StreamCoordinator) AsLengthDeclarableUpload(upload handler.Upload) handler.LengthDeclarableUpload { + s.logger.Debug("AsLengthDeclarableUpload") + return upload.(*Stream) +} + +func (s *StreamCoordinator) manageTimeout(w *Stream) { + var cancelCtx, cancel = context.WithCancel(context.Background()) + if v, ok := s.cancels.Load(w.info.ID); ok { + v.(context.CancelFunc)() + } + s.cancels.Store(w.info.ID, cancel) + go func() { + select { + case <-w.done: + w.logger.Debug("stream has finished") + s.cancels.Delete(w.info.ID) + return + case <-cancelCtx.Done(): + w.logger.Debug("stream timeout has refreshed") + return + case <-time.After(s.conf.Timeout): + w.logger.Error("stream timeout") + s.cancels.Delete(w.info.ID) + var terminateCtx, terminateCancel = context.WithTimeout(context.Background(), s.conf.Timeout) + defer terminateCancel() + _ = w.Terminate(terminateCtx) + } + }() +} diff --git a/internal/pkg/transcoder/queue.go b/internal/pkg/transcoder/queue.go new file mode 100644 index 00000000000..4a82ae107b4 --- /dev/null +++ b/internal/pkg/transcoder/queue.go @@ -0,0 +1,234 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package transcoder + +import ( + "context" + "fmt" + "os" + "os/exec" + "path/filepath" + "time" + + "github.com/google/uuid" + "github.com/hcengineering/stream/internal/pkg/config" + "github.com/hcengineering/stream/internal/pkg/log" + "github.com/hcengineering/stream/internal/pkg/manifest" + "github.com/hcengineering/stream/internal/pkg/resconv" + "github.com/hcengineering/stream/internal/pkg/storage" + "github.com/hcengineering/stream/internal/pkg/token" + "github.com/hcengineering/stream/internal/pkg/uploader" + "go.uber.org/zap" + "gopkg.in/vansante/go-ffprobe.v2" +) + +// HLS represents metadata for transcoding result +type HLS struct { + Source string `json:"source"` +} + +// Task represents transcoding task +type Task struct { + ID string + Status string + Source string + Format string + Workspace string + Metadata map[string]string +} + +// Scheduler manages transcoding tasks by passed config +type Scheduler struct { + logger *zap.Logger + taskCh chan *Task + cfg *config.Config + ctx context.Context +} + +// Schedule schedules a task to transcode +func (p *Scheduler) Schedule(t *Task) { + t.ID = uuid.NewString() + t.Status = "planned" + + select { + case p.taskCh <- t: + p.logger.Sugar().Debugf("task %v is scheduled", t) + default: + p.logger.Error("task channel is full") + } +} + +// NewScheduler creates a new instance of transcoding task scheduler +func NewScheduler(ctx context.Context, cfg *config.Config) *Scheduler { + var p = &Scheduler{ + taskCh: make(chan *Task, 128), + cfg: cfg, + ctx: ctx, + logger: log.FromContext(ctx).With(zap.String("transcoding", "planner")), + } + + go p.start() + + return p +} + +func (p *Scheduler) start() { + go func() { + <-p.ctx.Done() + close(p.taskCh) + }() + + for range p.cfg.MaxParallelScalingCount { + go func() { + for task := range p.taskCh { + p.processTask(p.ctx, task) + } + }() + } +} + +func (p *Scheduler) processTask(ctx context.Context, task *Task) { + var logger = p.logger.With(zap.String("task-id", task.ID)) + + logger.Debug("start") + defer logger.Debug("finished") + + logger.Debug("phase 1: get a token") + var tokenString, err = token.NewToken(p.cfg.ServerSecret, task.Workspace, "stream", "datalake") + if err != nil { + logger.Error("can not create token", zap.Error(err)) + return + } + + logger.Debug("phase 2: preparing fs") + var destinationFolder = filepath.Join(p.cfg.OutputDir, task.ID) + var _, filename = filepath.Split(task.Source) + var sourceFilePath = filepath.Join(destinationFolder, filename) + _ = os.MkdirAll(destinationFolder, os.ModePerm) + + logger.Debug("phase 3: get the remote file") + + remoteStorage, err := storage.NewStorageByURL(ctx, p.cfg.Endpoint(), p.cfg.EndpointURL.Scheme, tokenString, task.Workspace) + + if err != nil { + logger.Error("can not create storage by url", zap.Error(err)) + _ = os.RemoveAll(destinationFolder) + return + } + + if err = remoteStorage.GetFile(ctx, task.Source, sourceFilePath); err != nil { + logger.Error("can not download a file", zap.Error(err)) + _ = os.RemoveAll(destinationFolder) + // TODO: reschedule + return + } + + logger.Debug("phase 4: prepare to transcode") + probe, err := ffprobe.ProbeURL(ctx, sourceFilePath) + if err != nil { + logger.Error("can not get probe for a file", zap.Error(err), zap.String("filepath", sourceFilePath)) + _ = os.RemoveAll(destinationFolder) + return + } + + var res = fmt.Sprintf("%v:%v", probe.FirstVideoStream().Width, probe.FirstVideoStream().Height) + var level = resconv.Level(res) + var opts = Options{ + Input: sourceFilePath, + OuputDir: p.cfg.OutputDir, + Level: level, + ScalingLevels: append(resconv.SubLevels(res), level), + UploadID: task.ID, + Threads: p.cfg.MaxThreadCount, + } + + logger.Debug("phase 5: start async upload process") + var uploader = uploader.New(ctx, remoteStorage, uploader.Options{ + Dir: destinationFolder, + WorkerCount: uint32(opts.Threads), + BufferSize: 128, + RetryCount: 5, + RetryDelay: time.Millisecond * 100, + Timeout: p.cfg.Timeout, + SourceFile: sourceFilePath, + }) + + go uploader.Start() + + logger.Debug("phase 6: start async transcode process") + var rawCommand, scaleCommand *exec.Cmd + + rawCommand, err = newFfmpegCommand(ctx, nil, BuildRawVideoCommand(&opts)) + if err != nil { + logger.Error("can not create ffmpeg command", zap.Error(err)) + go uploader.Cancel() + return + } + + scaleCommand, err = newFfmpegCommand(ctx, nil, BuildScalingVideoCommand(&opts)) + if err != nil { + logger.Error("can not create ffmpeg command", zap.Error(err)) + go uploader.Cancel() + return + } + + _ = manifest.GenerateHLSPlaylist(opts.ScalingLevels, p.cfg.OutputDir, opts.UploadID) + + if err = rawCommand.Start(); err != nil { + logger.Error("can not run raw ffmpeg command", zap.Error(err)) + go uploader.Cancel() + return + } + + if err = scaleCommand.Start(); err != nil { + logger.Error("can not run scale ffmpeg command", zap.Error(err)) + go uploader.Cancel() + return + } + + logger.Debug("phase 7: wait for the result") + if err = scaleCommand.Wait(); err != nil { + logger.Error("can not scale ", zap.Error(err)) + go uploader.Cancel() + return + } + if err = rawCommand.Wait(); err != nil { + logger.Error("can not process raw", zap.Error(err)) + go uploader.Cancel() + return + } + + logger.Debug("phase 8: schedule cleanup") + go uploader.Stop() + + logger.Debug("phase 9: try to set metadata") + var resultURL = p.cfg.Endpoint().JoinPath("blob", task.Workspace, task.ID+"_master.m3u8") + + if metaProvider, ok := remoteStorage.(storage.MetaProvider); ok { + var hls = &HLS{Source: resultURL.String()} + logger.Debug("applying metadata", zap.Stringer("url", resultURL), zap.String("source", task.Source)) + err = metaProvider.PatchMeta( + ctx, + task.Source, + &storage.Metadata{ + "hls": hls, + }, + ) + if err != nil { + logger.Error("can not patch the source file", zap.Error(err)) + } + } +} diff --git a/internal/pkg/transcoding/worker.go b/internal/pkg/transcoder/stream.go similarity index 72% rename from internal/pkg/transcoding/worker.go rename to internal/pkg/transcoder/stream.go index c2aaca3d349..d59c12c09fd 100644 --- a/internal/pkg/transcoding/worker.go +++ b/internal/pkg/transcoder/stream.go @@ -11,8 +11,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -// Package transcoding provides objects and functions for video trnascoding -package transcoding +// Package transcoder provides types and functions for video trnascoding +package transcoder import ( "context" @@ -21,27 +21,27 @@ import ( "github.com/pkg/errors" - "github.com/huly-stream/internal/pkg/manifest" - "github.com/huly-stream/internal/pkg/sharedpipe" - "github.com/huly-stream/internal/pkg/uploader" + "github.com/hcengineering/stream/internal/pkg/manifest" + "github.com/hcengineering/stream/internal/pkg/sharedpipe" + "github.com/hcengineering/stream/internal/pkg/uploader" "github.com/tus/tusd/v2/pkg/handler" "go.uber.org/zap" ) -// Worker manages client's input and transcodes it based on the passsed configuration -type Worker struct { +// Stream manages client's input and transcodes it based on the passsed configuration +type Stream struct { contentUploader uploader.Uploader logger *zap.Logger info handler.FileInfo writer *sharedpipe.Writer reader *sharedpipe.Reader - wg sync.WaitGroup - done chan struct{} + commandGroup sync.WaitGroup + done chan struct{} } // WriteChunk calls when client sends a chunk of raw data -func (w *Worker) WriteChunk(ctx context.Context, _ int64, src io.Reader) (int64, error) { +func (w *Stream) WriteChunk(ctx context.Context, _ int64, src io.Reader) (int64, error) { w.logger.Debug("Write Chunk start", zap.Int64("offset", w.info.Offset)) var bytes, err = io.ReadAll(src) _, _ = w.writer.Write(bytes) @@ -52,7 +52,7 @@ func (w *Worker) WriteChunk(ctx context.Context, _ int64, src io.Reader) (int64, } // DeclareLength sets length of the video input -func (w *Worker) DeclareLength(ctx context.Context, length int64) error { +func (w *Stream) DeclareLength(ctx context.Context, length int64) error { w.info.Size = length w.info.SizeIsDeferred = false w.logger.Debug("DeclareLength", zap.Int64("size", length), zap.Bool("SizeIsDeferred", w.info.SizeIsDeferred)) @@ -60,65 +60,67 @@ func (w *Worker) DeclareLength(ctx context.Context, length int64) error { } // GetInfo returns info about transcoing status -func (w *Worker) GetInfo(ctx context.Context) (handler.FileInfo, error) { +func (w *Stream) GetInfo(ctx context.Context) (handler.FileInfo, error) { w.logger.Debug("GetInfo is executed") return w.info, nil } -// GetReader returns worker's bytes stream -func (w *Worker) GetReader(ctx context.Context) (io.ReadCloser, error) { +// GetReader returns Stream's bytes stream +func (w *Stream) GetReader(ctx context.Context) (io.ReadCloser, error) { w.logger.Debug("GetReader is executed, creating current reader...") return w.reader, nil } // Terminate calls when upload has failed -func (w *Worker) Terminate(ctx context.Context) error { +func (w *Stream) Terminate(ctx context.Context) error { w.logger.Debug("Terminating...") if w.contentUploader != nil { go func() { - w.wg.Wait() - w.contentUploader.Rollback() + w.commandGroup.Wait() + w.contentUploader.Cancel() }() } return w.writer.Close() } // ConcatUploads calls when upload resumed after fail -func (w *Worker) ConcatUploads(ctx context.Context, partialUploads []handler.Upload) error { +func (w *Stream) ConcatUploads(ctx context.Context, partialUploads []handler.Upload) error { w.logger.Debug("ConcatUploads was executed, it's not implemented") // - // TODO: load raw source from the Buckup bucket, terminate all workers with same ID and start process again. + // TODO: load raw source from the Buckup bucket, terminate all Streams with same ID and start process again. // return errors.New("not implemented") } // FinishUpload calls when upload finished without errors on the client side -func (w *Worker) FinishUpload(ctx context.Context) error { +func (w *Stream) FinishUpload(ctx context.Context) error { w.logger.Debug("finishing upload...") + if w.contentUploader != nil { go func() { - w.wg.Wait() - w.contentUploader.Terminate() + w.commandGroup.Wait() + w.contentUploader.Stop() }() } + return w.writer.Close() } // AsConcatableUpload returns tusd handler.ConcatableUpload -func (s *Scheduler) AsConcatableUpload(upload handler.Upload) handler.ConcatableUpload { +func (s *StreamCoordinator) AsConcatableUpload(upload handler.Upload) handler.ConcatableUpload { s.logger.Debug("AsConcatableUpload is executed") - return upload.(*Worker) + return upload.(*Stream) } -func (w *Worker) start(ctx context.Context, options *Options) error { +func (w *Stream) start(ctx context.Context, options *Options) error { defer w.logger.Debug("start done") w.reader = w.writer.Transpile() if err := manifest.GenerateHLSPlaylist(append(options.ScalingLevels, options.Level), options.OuputDir, options.UploadID); err != nil { return err } - w.wg.Add(1) + w.commandGroup.Add(1) go func() { - defer w.wg.Done() + defer w.commandGroup.Done() var logger = w.logger.With(zap.String("command", "raw")) defer logger.Debug("done") @@ -134,10 +136,11 @@ func (w *Worker) start(ctx context.Context, options *Options) error { }() if len(options.ScalingLevels) > 0 { - w.wg.Add(1) + w.commandGroup.Add(1) var scalingCommandReader = w.writer.Transpile() + go func() { - defer w.wg.Done() + defer w.commandGroup.Done() var logger = w.logger.With(zap.String("command", "scaling")) defer logger.Debug("done") @@ -153,5 +156,7 @@ func (w *Worker) start(ctx context.Context, options *Options) error { }() } + go w.contentUploader.Start() + return nil } diff --git a/internal/pkg/transcoding/limiter.go b/internal/pkg/transcoding/limiter.go deleted file mode 100644 index f1fdf51d9fc..00000000000 --- a/internal/pkg/transcoding/limiter.go +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright © 2025 Hardcore Engineering Inc. -// -// Licensed under the Eclipse Public License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. You may -// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// -// See the License for the specific language governing permissions and -// limitations under the License. - -package transcoding - -import "sync/atomic" - -// Limiter is a simple CAS data structure for managing resources. -type Limiter struct { - capacity int64 - maxCapacity int64 -} - -// NewLimiter creates a new limiter with the given initial capacity. -func NewLimiter(capacity int64) *Limiter { - return &Limiter{ - capacity: capacity, - maxCapacity: capacity, - } -} - -// TryConsume attempts to consume the specified amount of capacity. -// Returns true if successful, false otherwise. -func (l *Limiter) TryConsume(amount int64) bool { - if amount <= 0 { - return false - } - - for { - current := atomic.LoadInt64(&l.capacity) - if current < amount { - return false - } - updated := current - amount - if atomic.CompareAndSwapInt64(&l.capacity, current, updated) { - return true - } - } -} - -// ReturnCapacity adds the specified amount back to the limiter's capacity. -// Does not exceed the maximum capacity. -func (l *Limiter) ReturnCapacity(amount int64) { - if amount <= 0 { - return - } - - for { - current := atomic.LoadInt64(&l.capacity) - updated := current + amount - if updated > l.maxCapacity { - updated = l.maxCapacity - } - if atomic.CompareAndSwapInt64(&l.capacity, current, updated) { - break - } - } -} - -// GetCapacity retrieves the current capacity for debugging or monitoring purposes. -func (l *Limiter) GetCapacity() int64 { - return atomic.LoadInt64(&l.capacity) -} - -// GetMaxCapacity retrieves the maximum capacity. -func (l *Limiter) GetMaxCapacity() int64 { - return l.maxCapacity -} diff --git a/internal/pkg/transcoding/limiter_test.go b/internal/pkg/transcoding/limiter_test.go deleted file mode 100644 index df7db82ba01..00000000000 --- a/internal/pkg/transcoding/limiter_test.go +++ /dev/null @@ -1,88 +0,0 @@ -// Copyright © 2025 Hardcore Engineering Inc. -// -// Licensed under the Eclipse Public License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. You may -// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// -// See the License for the specific language governing permissions and -// limitations under the License. -package transcoding_test - -import ( - "sync" - "sync/atomic" - "testing" - - "github.com/huly-stream/internal/pkg/transcoding" - "github.com/stretchr/testify/require" -) - -func TestLimiter(t *testing.T) { - limiter := transcoding.NewLimiter(10) - - t.Run("Initial capacity", func(t *testing.T) { - require.Equal(t, int64(10), limiter.GetCapacity()) - }) - - t.Run("Successful consume", func(t *testing.T) { - success := limiter.TryConsume(5) - require.True(t, success) - require.Equal(t, int64(5), limiter.GetCapacity()) - }) - - t.Run("Failed consume", func(t *testing.T) { - success := limiter.TryConsume(10) - require.False(t, success) - require.Equal(t, int64(5), limiter.GetCapacity()) - }) - - t.Run("Return capacity", func(t *testing.T) { - limiter.ReturnCapacity(3) - require.Equal(t, int64(8), limiter.GetCapacity()) - }) - - t.Run("Exceeding max capacity", func(t *testing.T) { - limiter.ReturnCapacity(10) - require.Equal(t, int64(10), limiter.GetCapacity()) - }) -} - -func TestLimiterConcurrency(t *testing.T) { - limiter := transcoding.NewLimiter(10) - var wg sync.WaitGroup - - for i := 0; i < 10; i++ { - wg.Add(1) - go func() { - defer wg.Done() - limiter.TryConsume(2) - }() - } - - wg.Wait() - require.LessOrEqual(t, limiter.GetCapacity(), int64(0)) -} - -func TestLimiterCAS(t *testing.T) { - limiter := transcoding.NewLimiter(10) - var successful int64 - var wg sync.WaitGroup - - for i := 0; i < 1000; i++ { - wg.Add(1) - go func() { - defer wg.Done() - if limiter.TryConsume(1) { - atomic.AddInt64(&successful, 1) - } - }() - } - wg.Wait() - - require.Equal(t, int64(10), successful) - require.Equal(t, int64(0), limiter.GetCapacity()) -} diff --git a/internal/pkg/transcoding/scheduler.go b/internal/pkg/transcoding/scheduler.go deleted file mode 100644 index 080aeedbb58..00000000000 --- a/internal/pkg/transcoding/scheduler.go +++ /dev/null @@ -1,167 +0,0 @@ -// -// Copyright © 2025 Hardcore Engineering Inc. -// -// Licensed under the Eclipse Public License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. You may -// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// -// See the License for the specific language governing permissions and -// limitations under the License. -// - -package transcoding - -import ( - "context" - "sync" - "time" - - "github.com/pkg/errors" - - "github.com/google/uuid" - "github.com/huly-stream/internal/pkg/config" - "github.com/huly-stream/internal/pkg/log" - "github.com/huly-stream/internal/pkg/resconv" - "github.com/huly-stream/internal/pkg/sharedpipe" - "github.com/huly-stream/internal/pkg/uploader" - "github.com/tus/tusd/v2/pkg/handler" - "go.uber.org/zap" -) - -// Scheduler represents manager for worker. It creates a new worker for clients and manages its life cycle. -type Scheduler struct { - conf *config.Config - - limiter *Limiter - - mainContext context.Context - logger *zap.Logger - workers sync.Map - cancels sync.Map -} - -// NewScheduler creates a new scheduler for transcode operations. -func NewScheduler(ctx context.Context, c *config.Config) *Scheduler { - return &Scheduler{ - conf: c, - limiter: NewLimiter(c.MaxCapacity), - mainContext: ctx, - logger: log.FromContext(ctx).With(zap.String("Scheduler", c.OutputDir)), - } -} - -// NewUpload creates a new worker with passed parameters -func (s *Scheduler) NewUpload(ctx context.Context, info handler.FileInfo) (handler.Upload, error) { - if info.ID == "" { - info.ID = uuid.NewString() - } - s.logger.Sugar().Debugf("upload: %v", info) - s.logger.Debug("NewUpload", zap.String("ID", info.ID)) - - var worker = &Worker{ - writer: sharedpipe.NewWriter(), - info: info, - logger: log.FromContext(s.mainContext).With(zap.String("worker", info.ID)), - done: make(chan struct{}), - } - - var scaling = resconv.SubLevels(info.MetaData["resolution"]) - var level = resconv.Level(info.MetaData["resolution"]) - var cost int64 - - for _, scale := range scaling { - cost += int64(resconv.Pixels(resconv.Resolution(scale))) - } - - if !s.limiter.TryConsume(cost) { - s.logger.Debug("run out of resources for scaling") - scaling = nil - } - - var commandOptions = Options{ - OuputDir: s.conf.OutputDir, - Threads: s.conf.MaxThreads, - UploadID: info.ID, - Level: level, - ScalingLevels: scaling, - } - - if s.conf.EndpointURL != nil { - s.logger.Sugar().Debugf("initializing uploader for %v", info) - var contentUploader, err = uploader.New(s.mainContext, s.conf.OutputDir, s.conf.EndpointURL, info) - if err != nil { - s.logger.Error("can not create uploader", zap.Error(err)) - return nil, err - } - - worker.contentUploader = contentUploader - go func() { - var serverErr = worker.contentUploader.Serve() - worker.logger.Debug("content uploader has finished", zap.Error(serverErr)) - }() - } - s.workers.Store(worker.info.ID, worker) - if err := worker.start(s.mainContext, &commandOptions); err != nil { - return nil, err - } - - go func() { - worker.wg.Wait() - s.limiter.ReturnCapacity(cost) - s.logger.Debug("returned capacity", zap.Int64("capacity", cost)) - close(worker.done) - }() - - s.logger.Debug("NewUpload", zap.String("done", info.ID)) - return worker, nil -} - -// GetUpload returns current a worker based on upload id -func (s *Scheduler) GetUpload(ctx context.Context, id string) (upload handler.Upload, err error) { - if v, ok := s.workers.Load(id); ok { - s.logger.Debug("GetUpload: found worker by id", zap.String("id", id)) - var w = v.(*Worker) - var cancelCtx, cancel = context.WithCancel(context.Background()) - if v, ok := s.cancels.Load(id); ok { - v.(context.CancelFunc)() - } - s.cancels.Store(id, cancel) - go func() { - select { - case <-w.done: - w.logger.Debug("upload timeout just canceled") - s.cancels.Delete(id) - return - case <-cancelCtx.Done(): - w.logger.Debug("upload refreshed") - return - case <-time.After(s.conf.Timeout): - w.logger.Debug("upload timeout") - s.cancels.Delete(id) - var terminateCtx, terminateCancel = context.WithTimeout(context.Background(), s.conf.Timeout) - defer terminateCancel() - _ = w.Terminate(terminateCtx) - } - }() - return w, nil - } - s.logger.Debug("GetUpload: worker not found", zap.String("id", id)) - return nil, errors.New("bad id") -} - -// AsTerminatableUpload returns tusd handler.TerminatableUpload -func (s *Scheduler) AsTerminatableUpload(upload handler.Upload) handler.TerminatableUpload { - var worker = upload.(*Worker) - s.logger.Debug("AsTerminatableUpload") - return worker -} - -// AsLengthDeclarableUpload returns tusd handler.LengthDeclarableUpload -func (s *Scheduler) AsLengthDeclarableUpload(upload handler.Upload) handler.LengthDeclarableUpload { - s.logger.Debug("AsLengthDeclarableUpload") - return upload.(*Worker) -} diff --git a/internal/pkg/uploader/datalake.go b/internal/pkg/uploader/datalake.go deleted file mode 100644 index 097bf0a6b5d..00000000000 --- a/internal/pkg/uploader/datalake.go +++ /dev/null @@ -1,130 +0,0 @@ -// Copyright © 2025 Hardcore Engineering Inc. -// -// Licensed under the Eclipse Public License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. You may -// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// -// See the License for the specific language governing permissions and -// limitations under the License. - -package uploader - -import ( - "bytes" - "context" - "io" - "mime/multipart" - "os" - "path/filepath" - - "github.com/huly-stream/internal/pkg/log" - "github.com/pkg/errors" - "github.com/valyala/fasthttp" - "go.uber.org/zap" -) - -// DatalakeStorage represents datalake storage -type DatalakeStorage struct { - baseURL string - workspace string - token string -} - -// NewDatalakeStorage creates a new datalake client -func NewDatalakeStorage(baseURL, workspace, token string) Storage { - return &DatalakeStorage{ - baseURL: baseURL, - token: token, - workspace: workspace, - } -} - -// UploadFile uploads file to the datalake -func (d *DatalakeStorage) UploadFile(ctx context.Context, fileName string) error { - // #nosec - file, err := os.Open(fileName) - if err != nil { - return err - } - defer func() { - _ = file.Close() - }() - - var objectKey = getObjectKey(fileName) - var logger = log.FromContext(ctx).With(zap.String("datalake upload", d.workspace), zap.String("fileName", fileName)) - - logger.Debug("start uploading") - - body := &bytes.Buffer{} - writer := multipart.NewWriter(body) - - part, err := writer.CreateFormFile("file", objectKey) - if err != nil { - return errors.Wrapf(err, "failed to create form file") - } - - _, err = io.Copy(part, file) - if err != nil { - return errors.Wrapf(err, "failed to copy file data") - } - - _ = writer.Close() - - req := fasthttp.AcquireRequest() - defer fasthttp.ReleaseRequest(req) - - res := fasthttp.AcquireResponse() - defer fasthttp.ReleaseResponse(res) - - req.SetRequestURI(d.baseURL + "/upload/form-data/" + d.workspace) - req.Header.SetMethod(fasthttp.MethodPost) - req.Header.Add("Authorization", "Bearer "+d.token) - req.Header.SetContentType(writer.FormDataContentType()) - req.SetBody(body.Bytes()) - - client := fasthttp.Client{} - if err := client.Do(req, res); err != nil { - return errors.Wrapf(err, "upload failed") - } - - logger.Debug("file uploaded") - - return nil -} - -// DeleteFile deletes file from the datalake -func (d *DatalakeStorage) DeleteFile(ctx context.Context, fileName string) error { - var logger = log.FromContext(ctx).With(zap.String("datalake delete", d.workspace), zap.String("fileName", fileName)) - logger.Debug("start deleting") - - var objectKey = getObjectKey(fileName) - - req := fasthttp.AcquireRequest() - defer fasthttp.ReleaseRequest(req) - - res := fasthttp.AcquireResponse() - defer fasthttp.ReleaseResponse(res) - - req.SetRequestURI(d.baseURL + "/blob/" + d.workspace + "/" + objectKey) - req.Header.SetMethod(fasthttp.MethodDelete) - req.Header.Add("Authorization", "Bearer "+d.token) - - client := fasthttp.Client{} - if err := client.Do(req, res); err != nil { - logger.Error("failed to del", zap.Error(err)) - return errors.Wrapf(err, "delete failed") - } - - logger.Debug("file deleted") - - return nil -} - -func getObjectKey(s string) string { - var _, objectKey = filepath.Split(s) - return objectKey -} diff --git a/internal/pkg/uploader/options.go b/internal/pkg/uploader/options.go index 076e2c70afb..97ba5cc5089 100644 --- a/internal/pkg/uploader/options.go +++ b/internal/pkg/uploader/options.go @@ -13,7 +13,15 @@ package uploader -type options struct{} +import "time" -// Option provides option for storages -type Option func(*options) +// Options reprents options for uploading files +type Options struct { + Dir string + WorkerCount uint32 + BufferSize int + RetryCount int + RetryDelay time.Duration + Timeout time.Duration + SourceFile string +} diff --git a/internal/pkg/uploader/postpone.go b/internal/pkg/uploader/postpone.go deleted file mode 100644 index 29b308dc5e8..00000000000 --- a/internal/pkg/uploader/postpone.go +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright © 2025 Hardcore Engineering Inc. -// -// Licensed under the Eclipse Public License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. You may -// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// -// See the License for the specific language governing permissions and -// limitations under the License. - -package uploader - -import ( - "context" - "time" - - "github.com/huly-stream/internal/pkg/log" - "go.uber.org/zap" -) - -func (u *uploader) postpone(fileName string, action func(ctx context.Context)) { - u.waitJobs.Add(1) - var ctx, cancel = context.WithCancel(context.Background()) - ctx = log.WithLoggerFields(ctx, zap.String("pospone", "action")) - var startCh = time.After(u.postponeDuration) - - if v, ok := u.contexts.Load(fileName); ok { - (*v.(*context.CancelFunc))() - } - u.contexts.Store(fileName, &cancel) - - go func() { - u.waitJobs.Done() - defer cancel() - select { - case <-ctx.Done(): - return - case <-startCh: - action(ctx) - if ctx.Err() == nil { - u.contexts.CompareAndDelete(fileName, &cancel) - } - } - }() -} diff --git a/internal/pkg/uploader/uploader.go b/internal/pkg/uploader/uploader.go index 8d04618f574..d251d4b4da0 100644 --- a/internal/pkg/uploader/uploader.go +++ b/internal/pkg/uploader/uploader.go @@ -11,204 +11,289 @@ // See the License for the specific language governing permissions and // limitations under the License. -// Package uploader provides objects and functions to work with uploading and monitoring files +// Package uploader provides a mechanism for uploading files to a remote storage. package uploader import ( "context" - "net/url" + "hash/fnv" "os" "path/filepath" "strings" "sync" "time" + "unsafe" - "github.com/pkg/errors" - "github.com/tus/tusd/v2/pkg/handler" - - "github.com/fsnotify/fsnotify" - "github.com/huly-stream/internal/pkg/config" - "github.com/huly-stream/internal/pkg/log" + "github.com/hcengineering/stream/internal/pkg/log" + "github.com/hcengineering/stream/internal/pkg/storage" "go.uber.org/zap" + "k8s.io/utils/inotify" ) -type uploader struct { - done chan struct{} - waitJobs sync.WaitGroup - ctx context.Context - cancel context.CancelFunc - baseDir string - uploadID string - postponeDuration time.Duration - sentFiles sync.Map - storage Storage - contexts sync.Map - retryCount int - eventBufferCount uint +// See at https://man7.org/linux/man-pages/man7/inotify.7.html +const inotifyCloseWrite uint32 = 0x8 // IN_CLOSE_WRITE +const inotifyMovedTo uint32 = 0x80 // IN_MOVED_TO + +// Uploader represents file uploader +type Uploader interface { + Start() + Stop() + Cancel() } -func (u *uploader) retry(action func() error) { - var delay = time.Millisecond * 50 - for range u.retryCount { - if err := action(); err == nil { - return - } - time.Sleep(delay) - delay *= 2 +type uploaderImpl struct { + logger *zap.Logger + options *Options + storage storage.Storage + + filesCh chan string + + sentFiles sync.Map // key: string, value: struct{} + + shouldDeleteOnStop func(string) bool + + workersCh []chan func() + + watcherStopCh chan struct{} + watcherDoneCh chan struct{} + + uploadCtx context.Context + uploadCancel context.CancelFunc + + workerWaitGroup sync.WaitGroup +} + +// New creates a new instance of uploader +func New(ctx context.Context, s storage.Storage, opts Options) Uploader { + if s == nil { + panic("storage should not be nil") } + + var res = &uploaderImpl{ + options: &opts, + storage: s, + logger: log.FromContext(ctx).With(zap.String("uploader", opts.Dir)), + shouldDeleteOnStop: func(s string) bool { + return strings.HasSuffix(s, "m3u8") + }, + filesCh: make(chan string, opts.BufferSize), + watcherStopCh: make(chan struct{}), + watcherDoneCh: make(chan struct{}), + } + + res.workersCh = make([]chan func(), opts.WorkerCount) + + for i := range opts.WorkerCount { + res.workersCh[i] = make(chan func(), opts.BufferSize) + } + + res.logger.Sugar().Debugf("uploader config is %v", opts) + + res.uploadCtx, res.uploadCancel = context.WithCancel(context.Background()) + + _ = os.MkdirAll(opts.Dir, os.ModePerm) + res.workerWaitGroup.Add(1) + + go func() { + defer res.workerWaitGroup.Done() + initFiles, _ := os.ReadDir(opts.Dir) + for _, f := range initFiles { + var filePath = filepath.Join(opts.Dir, f.Name()) + if filePath == opts.SourceFile { + continue + } + res.filesCh <- filePath + } + }() + + return res } -// Rollback deletes all delivered files and also deletes all local content by uploadID -func (u *uploader) Rollback() { - logger := log.FromContext(u.ctx).With(zap.String("uploader", "Rollback")) - logger.Debug("starting") - defer logger.Debug("done") +func (u *uploaderImpl) Stop() { + u.stop(false) +} + +func (u *uploaderImpl) Cancel() { + u.stop(true) +} - u.postpone("", func(ctx context.Context) { - u.sentFiles.Range(func(key, value any) bool { - logger.Debug("deleting remote file", zap.String("key", key.(string))) - u.retry(func() error { return u.storage.DeleteFile(ctx, key.(string)) }) +func (u *uploaderImpl) stop(rollback bool) { + close(u.watcherStopCh) + <-u.watcherDoneCh + u.logger.Debug("file watch stopped") + + if rollback { + u.logger.Debug("starting rollback...") + var i uint32 + u.sentFiles.Range(func(key, _ any) bool { + i++ + var filename = key.(string) + u.workersCh[i%u.options.WorkerCount] <- func() { + u.deleteRemoteFile(filename) + } return true }) - }) + u.logger.Debug("rollback done") + } + close(u.filesCh) + u.workerWaitGroup.Wait() + u.logger.Debug("workers done") - u.Terminate() + u.uploadCancel() + _ = os.RemoveAll(u.options.Dir) + u.sentFiles.Clear() + + u.logger.Debug("finish done", zap.Bool("cancel", rollback)) } -// Terminate deletes -func (u *uploader) Terminate() { - logger := log.FromContext(u.ctx).With(zap.String("uploader", "Terminate")) - logger.Debug("starting") +func (u *uploaderImpl) Start() { + u.startWorkers() + u.startWatch() +} +func (u *uploaderImpl) startWorkers() { go func() { - defer logger.Debug("done") - u.waitJobs.Wait() - u.cancel() + var logger = u.logger.With(zap.String("func", "startWorkers")) + logger.Debug("fanout goroutine started") + defer logger.Debug("fanout goroutine stopped") + + h := fnv.New32a() + for f := range u.filesCh { + // #nosec + bytes := unsafe.Slice(unsafe.StringData(f), len(f)) + _, _ = h.Write(bytes) + id := h.Sum32() % u.options.WorkerCount + u.workersCh[id] <- func() { + u.uploadAndDelete(f) + } + h.Reset() + } + for i := range u.options.WorkerCount { + close(u.workersCh[i]) + } }() + + for i := range u.options.WorkerCount { + var logger = u.logger.With(zap.Uint32("worker", i)) + + u.workerWaitGroup.Add(1) + go func(index uint32) { + logger.Debug("start") + defer logger.Debug("finished") + + defer u.workerWaitGroup.Done() + for work := range u.workersCh[index] { + work() + } + }(i) + } } -func (u *uploader) uploadAndDelte(fileName string) { - u.postpone(fileName+"-del", func(context.Context) {}) - u.postpone(fileName, func(ctx context.Context) { - u.retry(func() error { return u.storage.UploadFile(ctx, fileName) }) - u.postpone(fileName+"-del", func(context.Context) { - _ = os.Remove(fileName) - }) - }) +func (u *uploaderImpl) deleteRemoteFile(f string) { + var logger = u.logger.With(zap.String("delete remote", f)) + logger.Debug("deleting remote file") + + for range u.options.RetryCount { + var ctx, cancel = context.WithTimeout(u.uploadCtx, u.options.Timeout) + var err = u.storage.DeleteFile(ctx, f) + cancel() - u.sentFiles.Store(fileName, struct{}{}) + if err != nil { + logger.Error("attempt failed", zap.Error(err)) + } else { + logger.Debug("file deleted in remote storage") + break + } + + time.Sleep(u.options.RetryDelay) + } + + u.logger.Error("can not delete remote file") } -func (u *uploader) Serve() error { - var logger = log.FromContext(u.ctx).With(zap.String("uploader", u.uploadID), zap.String("dir", u.baseDir)) - var watcher, err = fsnotify.NewBufferedWatcher(u.eventBufferCount) - defer close(u.done) +func (u *uploaderImpl) uploadAndDelete(f string) { + var logger = u.logger.With(zap.String("upload and delete", f)) + logger.Debug("uploading file") - if err != nil { - logger.Error("can not start watcher") - return err + var _, ok = u.sentFiles.Load(f) + + if ok && !u.shouldDeleteOnStop(f) { + logger.Debug("file already uploaded") + return } - _ = os.MkdirAll(u.baseDir, os.ModePerm) - initFiles, _ := os.ReadDir(u.baseDir) - for _, f := range initFiles { - var filePath = filepath.Join(u.baseDir, f.Name()) - u.uploadAndDelte(filePath) + for range u.options.RetryCount { + var ctx, cancel = context.WithTimeout(u.uploadCtx, u.options.Timeout) + var err = u.storage.PutFile(ctx, f) + cancel() + + if err != nil { + logger.Error("attempt failed", zap.Error(err)) + } else { + if !u.shouldDeleteOnStop(f) { + _ = os.Remove(f) + logger.Debug("removed file locally") + } + u.sentFiles.Store(f, struct{}{}) + logger.Debug("file uploaded") + break + } + + time.Sleep(u.options.RetryDelay) } +} + +func (u *uploaderImpl) startWatch() { + var logger = u.logger.With(zap.String("func", "startWatch")) + var watcher, err = inotify.NewWatcher() - if err := watcher.Add(u.baseDir); err != nil { - return err + if err != nil { + logger.Error("can not start file watcher", zap.Error(err)) + return } + if err := watcher.AddWatch(u.options.Dir, inotifyCloseWrite); err != nil { + logger.Error("can not start watching for close write", zap.Error(err)) + return + } + if err := watcher.AddWatch(u.options.Dir, inotifyMovedTo); err != nil { + logger.Error("can not start watching for moved to", zap.Error(err)) + return + } defer func() { _ = watcher.Close() + close(u.watcherDoneCh) }() - logger.Debug("uploader has initialized and started watching") + logger.Debug("watching for file updates") defer logger.Debug("done") for { select { - case <-u.ctx.Done(): - return u.ctx.Err() - case event, ok := <-watcher.Events: + case <-u.watcherStopCh: + return + case event, ok := <-watcher.Event: if !ok { - return u.ctx.Err() + logger.Error("file channel was closed") + return } - if strings.HasSuffix(event.Name, "tmp") { + if !strings.Contains(event.Name, u.options.Dir) { continue } - if !strings.Contains(event.Name, u.uploadID) { + if event.Name == u.options.SourceFile { continue } - u.uploadAndDelte(event.Name) - case err, ok := <-watcher.Errors: - if !ok { - return u.ctx.Err() + if strings.HasSuffix(event.Name, "tmp") { + continue } - logger.Error("get an error", zap.Error(err)) - } - } -} - -// Uploader manages content delivering -type Uploader interface { - Terminate() - Rollback() - Serve() error -} - -// Storage represents file-based storage -type Storage interface { - UploadFile(ctx context.Context, fileName string) error - DeleteFile(ctx context.Context, fileName string) error -} + logger.Debug("received an event", zap.String("event", event.Name), zap.Uint32("mask", event.Mask)) -// New creates a new instance of Uplaoder -func New(ctx context.Context, baseDir string, endpointURL *url.URL, uploadInfo handler.FileInfo) (Uploader, error) { - var uploaderCtx, uploadCancel = context.WithCancel(context.Background()) - uploaderCtx = log.WithLoggerFields(uploaderCtx) - - var storage Storage - var err error - - storage, err = NewStorageByURL(uploaderCtx, endpointURL, uploadInfo.MetaData) - if err != nil { - uploadCancel() - return nil, err - } - - return &uploader{ - ctx: uploaderCtx, - cancel: uploadCancel, - done: make(chan struct{}), - uploadID: uploadInfo.ID, - postponeDuration: time.Second * 2, - storage: storage, - retryCount: 10, - baseDir: filepath.Join(baseDir, uploadInfo.ID), - eventBufferCount: 100, - }, nil -} - -// NewStorageByURL creates a new storage basd on the type from the url scheme, for example "datalake://my-datalake-endpoint" -func NewStorageByURL(ctx context.Context, u *url.URL, headers map[string]string) (Storage, error) { - var workspace = headers["workspace"] - if workspace == "" { - return nil, errors.New("missed workspace in the client's metadata") - } - c, _ := config.FromEnv() - switch u.Scheme { - case "tus": - return nil, errors.New("not imlemented yet") - case "datalake": - if headers["token"] == "" { - return nil, errors.New("missed auth token in the client's metadata") + u.filesCh <- event.Name + case err, ok := <-watcher.Error: + if !ok { + logger.Error("error channel was closed") + } + logger.Error("received an error", zap.Error(err)) } - return NewDatalakeStorage(c.Endpoint().String(), workspace, headers["token"]), nil - case "s3": - return NewS3(ctx, c.Endpoint().String(), workspace), nil - default: - return nil, errors.New("unknown scheme") } } From 9c82eed873e14bb1625bb2ecf137fca2cb7f15db Mon Sep 17 00:00:00 2001 From: denis-tingaikin Date: Sun, 16 Mar 2025 01:09:22 +0300 Subject: [PATCH 053/636] update ci for go 1.24.1 Signed-off-by: denis-tingaikin --- .github/workflows/main.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/main.yaml b/.github/workflows/main.yaml index 635804cf495..35e5f787980 100644 --- a/.github/workflows/main.yaml +++ b/.github/workflows/main.yaml @@ -36,7 +36,7 @@ jobs: - name: Setup Go uses: actions/setup-go@v5 with: - go-version: 1.23.5 + go-version: 1.24.1 - name: Build run: go build -race ./... - name: Test @@ -47,7 +47,7 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-go@v5 with: - go-version: 1.23.5 + go-version: 1.24.1 - run: go mod tidy - name: Check for changes in go.mod or go.sum run: | @@ -63,9 +63,9 @@ jobs: - name: Setup Go uses: actions/setup-go@v5 with: - go-version: 1.23.5 + go-version: 1.24.1 - name: golangci-lint uses: golangci/golangci-lint-action@v4 with: - version: v1.60.3 + version: v1.64.7 args: --timeout 3m --verbose From c6a7bae060ca93dde948bb65a6f57d389d905366 Mon Sep 17 00:00:00 2001 From: denis-tingaikin Date: Sun, 16 Mar 2025 01:26:40 +0300 Subject: [PATCH 054/636] apply self code review Signed-off-by: denis-tingaikin --- Dockerfile | 4 ++-- internal/pkg/transcoder/{queue.go => scheduler.go} | 0 2 files changed, 2 insertions(+), 2 deletions(-) rename internal/pkg/transcoder/{queue.go => scheduler.go} (100%) diff --git a/Dockerfile b/Dockerfile index 4304349055c..9fd38ffc7e7 100644 --- a/Dockerfile +++ b/Dockerfile @@ -11,7 +11,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -FROM --platform=linux/amd64 golang:1.24.1 AS builder +FROM --platform=$BUILDPLATFORM golang:1.24.1 AS builder ENV GO111MODULE=on ENV CGO_ENABLED=0 ENV GOBIN=/bin @@ -21,7 +21,7 @@ COPY . ./ RUN set -xe && GOOS=$TARGETOS GOARCH=$TARGETARCH go build -o /go/bin/stream ./cmd/stream -FROM --platform=linux/amd64 alpine +FROM --platform=$BUILDPLATFORM alpine RUN set -xe && apk add --no-cache ffmpeg RUN apk add --no-cache ca-certificates jq bash \ diff --git a/internal/pkg/transcoder/queue.go b/internal/pkg/transcoder/scheduler.go similarity index 100% rename from internal/pkg/transcoder/queue.go rename to internal/pkg/transcoder/scheduler.go From 528306f4b0fe711b745e542694617782c4587f55 Mon Sep 17 00:00:00 2001 From: denis-tingaikin Date: Tue, 18 Mar 2025 18:43:45 +0300 Subject: [PATCH 055/636] UBERF-9641: Add support for thumbnail Signed-off-by: denis-tingaikin --- Dockerfile | 4 +- internal/pkg/api/v1/recording/handler.go | 12 +-- internal/pkg/api/v1/transcoding/handler.go | 26 ++++-- .../{transcoder => mediaconvert}/command.go | 13 ++- .../command_test.go | 12 +-- .../coordinator.go | 10 ++- .../{transcoder => mediaconvert}/scheduler.go | 85 ++++++++++--------- .../{transcoder => mediaconvert}/stream.go | 4 +- 8 files changed, 98 insertions(+), 68 deletions(-) rename internal/pkg/{transcoder => mediaconvert}/command.go (89%) rename internal/pkg/{transcoder => mediaconvert}/command_test.go (65%) rename internal/pkg/{transcoder => mediaconvert}/coordinator.go (95%) rename internal/pkg/{transcoder => mediaconvert}/scheduler.go (76%) rename internal/pkg/{transcoder => mediaconvert}/stream.go (98%) diff --git a/Dockerfile b/Dockerfile index 9fd38ffc7e7..121a1b5efb4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -11,7 +11,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -FROM --platform=$BUILDPLATFORM golang:1.24.1 AS builder +FROM golang:1.24.1 AS builder ENV GO111MODULE=on ENV CGO_ENABLED=0 ENV GOBIN=/bin @@ -21,7 +21,7 @@ COPY . ./ RUN set -xe && GOOS=$TARGETOS GOARCH=$TARGETARCH go build -o /go/bin/stream ./cmd/stream -FROM --platform=$BUILDPLATFORM alpine +FROM alpine RUN set -xe && apk add --no-cache ffmpeg RUN apk add --no-cache ca-certificates jq bash \ diff --git a/internal/pkg/api/v1/recording/handler.go b/internal/pkg/api/v1/recording/handler.go index 4282261f767..e65dc53578c 100644 --- a/internal/pkg/api/v1/recording/handler.go +++ b/internal/pkg/api/v1/recording/handler.go @@ -24,7 +24,7 @@ import ( "github.com/hcengineering/stream/internal/pkg/config" "github.com/hcengineering/stream/internal/pkg/log" - "github.com/hcengineering/stream/internal/pkg/transcoder" + "github.com/hcengineering/stream/internal/pkg/mediaconvert" "go.uber.org/zap" tusd "github.com/tus/tusd/v2/pkg/handler" @@ -56,13 +56,13 @@ func NewHandler(ctx context.Context, cfg *config.Config) http.Handler { } func (h *recordingHandler) initialize() { - scheduler := transcoder.NewStreamCoordinator(h.ctx, h.cfg) + coordinator := mediaconvert.NewStreamCoordinator(h.ctx, h.cfg) tusComposer := tusd.NewStoreComposer() - tusComposer.UseCore(scheduler) - tusComposer.UseTerminater(scheduler) - tusComposer.UseConcater(scheduler) - tusComposer.UseLengthDeferrer(scheduler) + tusComposer.UseCore(coordinator) + tusComposer.UseTerminater(coordinator) + tusComposer.UseConcater(coordinator) + tusComposer.UseLengthDeferrer(coordinator) var tusHandler, err = tusd.NewHandler(tusd.Config{ BasePath: "/recording", diff --git a/internal/pkg/api/v1/transcoding/handler.go b/internal/pkg/api/v1/transcoding/handler.go index 0391ea74da6..67c4ca1fd03 100644 --- a/internal/pkg/api/v1/transcoding/handler.go +++ b/internal/pkg/api/v1/transcoding/handler.go @@ -19,15 +19,16 @@ import ( "encoding/json" "fmt" "net/http" + "strings" "github.com/hcengineering/stream/internal/pkg/config" "github.com/hcengineering/stream/internal/pkg/log" - "github.com/hcengineering/stream/internal/pkg/transcoder" + "github.com/hcengineering/stream/internal/pkg/mediaconvert" "go.uber.org/zap" ) type trascodeHandler struct { - taskQueue *transcoder.Scheduler + scheduler *mediaconvert.Scheduler logger *zap.Logger } @@ -45,7 +46,7 @@ func (t *trascodeHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { } var decoder = json.NewDecoder(r.Body) - var task transcoder.Task + var task mediaconvert.Task if err := decoder.Decode(&task); err != nil { w.WriteHeader(http.StatusBadRequest) @@ -53,14 +54,29 @@ func (t *trascodeHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { return } - t.taskQueue.Schedule(&task) + if !isSupportedFormat(task.Format) { + w.WriteHeader(http.StatusUnsupportedMediaType) + _, _ = fmt.Fprint(w, "output media format is not supported") + return + } + + t.scheduler.Schedule(&task) w.WriteHeader(http.StatusOK) } // NewHandler creates a new trnascoding http handler, requires context and config. func NewHandler(ctx context.Context, cfg *config.Config) http.Handler { return &trascodeHandler{ - taskQueue: transcoder.NewScheduler(ctx, cfg), + scheduler: mediaconvert.NewScheduler(ctx, cfg), logger: log.FromContext(ctx).With(zap.String("handler", "transcoding")), } } + +func isSupportedFormat(s string) bool { + switch strings.ToLower(s) { + case "hls": + return true + default: + return false + } +} diff --git a/internal/pkg/transcoder/command.go b/internal/pkg/mediaconvert/command.go similarity index 89% rename from internal/pkg/transcoder/command.go rename to internal/pkg/mediaconvert/command.go index 9cc65d96379..2b68893404f 100644 --- a/internal/pkg/transcoder/command.go +++ b/internal/pkg/mediaconvert/command.go @@ -13,7 +13,7 @@ // limitations under the License. // -package transcoder +package mediaconvert import ( "context" @@ -79,11 +79,21 @@ func BuildRawVideoCommand(opts *Options) []string { "-c:v", "copy", "-hls_time", "5", + "-hls_flags", "split_by_time", "-hls_list_size", "0", "-hls_segment_filename", filepath.Join(opts.OuputDir, opts.UploadID, fmt.Sprintf("%s_%s_%s.ts", opts.UploadID, "%03d", opts.Level)), filepath.Join(opts.OuputDir, opts.UploadID, fmt.Sprintf("%s_%s_master.m3u8", opts.UploadID, opts.Level))) } +// BuildThumbnailCommand creates a command that creates a thumbnail for the input video +func BuildThumbnailCommand(opts *Options) []string { + return append([]string{}, + "-i", opts.Input, + "-vframes", "1", + filepath.Join(opts.OuputDir, opts.UploadID, opts.UploadID+".jpg"), + ) +} + // BuildScalingVideoCommand returns flags for ffmpeg for video scaling func BuildScalingVideoCommand(opts *Options) []string { var result = buildCommonComamnd(opts) @@ -97,6 +107,7 @@ func BuildScalingVideoCommand(opts *Options) []string { "-crf", "23", "-g", "60", "-hls_time", "5", + "-hls_flags", "split_by_time", "-hls_list_size", "0", "-hls_segment_filename", filepath.Join(opts.OuputDir, opts.UploadID, fmt.Sprintf("%s_%s_%s.ts", opts.UploadID, "%03d", level)), filepath.Join(opts.OuputDir, opts.UploadID, fmt.Sprintf("%s_%s_master.m3u8", opts.UploadID, level))) diff --git a/internal/pkg/transcoder/command_test.go b/internal/pkg/mediaconvert/command_test.go similarity index 65% rename from internal/pkg/transcoder/command_test.go rename to internal/pkg/mediaconvert/command_test.go index db382a555e7..42ddc4ea4d7 100644 --- a/internal/pkg/transcoder/command_test.go +++ b/internal/pkg/mediaconvert/command_test.go @@ -11,19 +11,19 @@ // See the License for the specific language governing permissions and // limitations under the License. -package transcoder_test +package mediaconvert_test import ( "strings" "testing" + "github.com/hcengineering/stream/internal/pkg/mediaconvert" "github.com/hcengineering/stream/internal/pkg/resconv" - "github.com/hcengineering/stream/internal/pkg/transcoder" "github.com/stretchr/testify/require" ) func Test_BuildVideoCommand_Scaling(t *testing.T) { - var scaleCommand = transcoder.BuildScalingVideoCommand(&transcoder.Options{ + var scaleCommand = mediaconvert.BuildScalingVideoCommand(&mediaconvert.Options{ OuputDir: "test", Input: "pipe:0", UploadID: "1", @@ -31,13 +31,13 @@ func Test_BuildVideoCommand_Scaling(t *testing.T) { ScalingLevels: []string{"720p", "480p"}, }) - const expected = `-threads 4 -i pipe:0 -vf scale=-2:720 -c:v libx264 -preset veryfast -crf 23 -g 60 -hls_time 5 -hls_list_size 0 -hls_segment_filename test/1/1_%03d_720p.ts test/1/1_720p_master.m3u8 -vf scale=-2:480 -c:v libx264 -preset veryfast -crf 23 -g 60 -hls_time 5 -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` + const expected = `-threads 4 -i pipe:0 -vf scale=-2:720 -c:v libx264 -preset veryfast -crf 23 -g 60 -hls_time 5 -hls_flags split_by_time -hls_list_size 0 -hls_segment_filename test/1/1_%03d_720p.ts test/1/1_720p_master.m3u8 -vf scale=-2:480 -c:v libx264 -preset veryfast -crf 23 -g 60 -hls_time 5 -hls_flags split_by_time -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` require.Contains(t, expected, strings.Join(scaleCommand, " ")) } func Test_BuildVideoCommand_Raw(t *testing.T) { - var rawCommand = transcoder.BuildRawVideoCommand(&transcoder.Options{ + var rawCommand = mediaconvert.BuildRawVideoCommand(&mediaconvert.Options{ OuputDir: "test", Input: "pipe:0", UploadID: "1", @@ -45,7 +45,7 @@ func Test_BuildVideoCommand_Raw(t *testing.T) { Level: resconv.Level("651:490"), }) - const expected = `"-threads 4 -i pipe:0 -c:v copy -hls_time 5 -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` + const expected = `"-threads 4 -i pipe:0 -c:v copy -hls_time 5 -hls_flags split_by_time -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` require.Contains(t, expected, strings.Join(rawCommand, " ")) } diff --git a/internal/pkg/transcoder/coordinator.go b/internal/pkg/mediaconvert/coordinator.go similarity index 95% rename from internal/pkg/transcoder/coordinator.go rename to internal/pkg/mediaconvert/coordinator.go index d0705cb0ce0..843716ce793 100644 --- a/internal/pkg/transcoder/coordinator.go +++ b/internal/pkg/mediaconvert/coordinator.go @@ -13,7 +13,7 @@ // limitations under the License. // -package transcoder +package mediaconvert import ( "context" @@ -77,7 +77,7 @@ func (s *StreamCoordinator) NewUpload(ctx context.Context, info handler.FileInfo var stream = &Stream{ writer: sharedpipe.NewWriter(), info: info, - logger: log.FromContext(s.mainContext).With(zap.String("worker", info.ID)), + logger: log.FromContext(s.mainContext).With(zap.String("stream", info.ID)), done: make(chan struct{}), } @@ -142,12 +142,12 @@ func (s *StreamCoordinator) NewUpload(ctx context.Context, info handler.FileInfo // GetUpload returns current a worker based on upload id func (s *StreamCoordinator) GetUpload(ctx context.Context, id string) (upload handler.Upload, err error) { if v, ok := s.streams.Load(id); ok { - s.logger.Debug("GetUpload: found worker by id", zap.String("id", id)) + s.logger.Debug("GetUpload: found stream by id", zap.String("id", id)) var w = v.(*Stream) s.manageTimeout(w) return w, nil } - s.logger.Debug("GetUpload: worker not found", zap.String("id", id)) + s.logger.Debug("GetUpload: stream not found", zap.String("id", id)) return nil, errors.New("bad id") } @@ -175,6 +175,7 @@ func (s *StreamCoordinator) manageTimeout(w *Stream) { case <-w.done: w.logger.Debug("stream has finished") s.cancels.Delete(w.info.ID) + s.streams.Delete(w.info.ID) return case <-cancelCtx.Done(): w.logger.Debug("stream timeout has refreshed") @@ -185,6 +186,7 @@ func (s *StreamCoordinator) manageTimeout(w *Stream) { var terminateCtx, terminateCancel = context.WithTimeout(context.Background(), s.conf.Timeout) defer terminateCancel() _ = w.Terminate(terminateCtx) + s.streams.Delete(w.info.ID) } }() } diff --git a/internal/pkg/transcoder/scheduler.go b/internal/pkg/mediaconvert/scheduler.go similarity index 76% rename from internal/pkg/transcoder/scheduler.go rename to internal/pkg/mediaconvert/scheduler.go index 4a82ae107b4..83894425dbd 100644 --- a/internal/pkg/transcoder/scheduler.go +++ b/internal/pkg/mediaconvert/scheduler.go @@ -13,7 +13,7 @@ // limitations under the License. // -package transcoder +package mediaconvert import ( "context" @@ -37,7 +37,10 @@ import ( // HLS represents metadata for transcoding result type HLS struct { - Source string `json:"source"` + Source string `json:"source"` + Thumbnail string `json:"thumbnail"` + Width int `json:"width"` + Height int `json:"height"` } // Task represents transcoding task @@ -77,7 +80,7 @@ func NewScheduler(ctx context.Context, cfg *config.Config) *Scheduler { taskCh: make(chan *Task, 128), cfg: cfg, ctx: ctx, - logger: log.FromContext(ctx).With(zap.String("transcoding", "planner")), + logger: log.FromContext(ctx).With(zap.String("transcoding", "scheduler")), } go p.start() @@ -100,6 +103,7 @@ func (p *Scheduler) start() { } } +// TODO: add a factory pattern to process tasks by different media type func (p *Scheduler) processTask(ctx context.Context, task *Task) { var logger = p.logger.With(zap.String("task-id", task.ID)) @@ -166,60 +170,57 @@ func (p *Scheduler) processTask(ctx context.Context, task *Task) { SourceFile: sourceFilePath, }) - go uploader.Start() + _ = manifest.GenerateHLSPlaylist(opts.ScalingLevels, p.cfg.OutputDir, opts.UploadID) - logger.Debug("phase 6: start async transcode process") - var rawCommand, scaleCommand *exec.Cmd + go uploader.Start() - rawCommand, err = newFfmpegCommand(ctx, nil, BuildRawVideoCommand(&opts)) - if err != nil { - logger.Error("can not create ffmpeg command", zap.Error(err)) - go uploader.Cancel() - return - } + logger.Debug("phase 6: start async transcode processes") - scaleCommand, err = newFfmpegCommand(ctx, nil, BuildScalingVideoCommand(&opts)) - if err != nil { - logger.Error("can not create ffmpeg command", zap.Error(err)) - go uploader.Cancel() - return + var argsSlice = [][]string{ + BuildThumbnailCommand(&opts), + BuildRawVideoCommand(&opts), + BuildScalingVideoCommand(&opts), } - - _ = manifest.GenerateHLSPlaylist(opts.ScalingLevels, p.cfg.OutputDir, opts.UploadID) - - if err = rawCommand.Start(); err != nil { - logger.Error("can not run raw ffmpeg command", zap.Error(err)) - go uploader.Cancel() - return - } - - if err = scaleCommand.Start(); err != nil { - logger.Error("can not run scale ffmpeg command", zap.Error(err)) - go uploader.Cancel() - return + var cmds []*exec.Cmd + + for _, args := range argsSlice { + cmd, cmdErr := newFfmpegCommand(ctx, nil, args) + if cmdErr != nil { + logger.Error("can not create a new command", zap.Error(cmdErr), zap.Strings("args", args)) + go uploader.Cancel() + return + } + cmds = append(cmds, cmd) + if err = cmd.Start(); err != nil { + logger.Error("can not start a command", zap.Error(err), zap.Strings("args", args)) + go uploader.Cancel() + return + } } logger.Debug("phase 7: wait for the result") - if err = scaleCommand.Wait(); err != nil { - logger.Error("can not scale ", zap.Error(err)) - go uploader.Cancel() - return - } - if err = rawCommand.Wait(); err != nil { - logger.Error("can not process raw", zap.Error(err)) - go uploader.Cancel() - return + for _, cmd := range cmds { + if err = cmd.Wait(); err != nil { + logger.Error("can not wait for command end ", zap.Error(err)) + go uploader.Cancel() + return + } } logger.Debug("phase 8: schedule cleanup") go uploader.Stop() logger.Debug("phase 9: try to set metadata") - var resultURL = p.cfg.Endpoint().JoinPath("blob", task.Workspace, task.ID+"_master.m3u8") if metaProvider, ok := remoteStorage.(storage.MetaProvider); ok { - var hls = &HLS{Source: resultURL.String()} - logger.Debug("applying metadata", zap.Stringer("url", resultURL), zap.String("source", task.Source)) + var hls = HLS{ + Width: probe.FirstVideoStream().Width, + Height: probe.FirstVideoStream().Height, + Source: p.cfg.Endpoint().JoinPath("blob", task.Workspace, task.ID+"_master.m3u8").String(), + Thumbnail: p.cfg.Endpoint().JoinPath("blob", task.Workspace, task.ID+".jpg").String(), + } + + logger.Debug("applying metadata", zap.String("url", hls.Source), zap.String("thumbnail", hls.Thumbnail), zap.String("source", task.Source)) err = metaProvider.PatchMeta( ctx, task.Source, diff --git a/internal/pkg/transcoder/stream.go b/internal/pkg/mediaconvert/stream.go similarity index 98% rename from internal/pkg/transcoder/stream.go rename to internal/pkg/mediaconvert/stream.go index d59c12c09fd..ab47c8e771d 100644 --- a/internal/pkg/transcoder/stream.go +++ b/internal/pkg/mediaconvert/stream.go @@ -11,8 +11,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -// Package transcoder provides types and functions for video trnascoding -package transcoder +// Package mediaconvert provides types and functions for video trnascoding +package mediaconvert import ( "context" From 04cf12cc449aaba4e7da1ee5e3fc38f2a615c794 Mon Sep 17 00:00:00 2001 From: denis-tingaikin Date: Tue, 18 Mar 2025 18:52:29 +0300 Subject: [PATCH 056/636] store blobid in meta Signed-off-by: denis-tingaikin --- internal/pkg/mediaconvert/scheduler.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/internal/pkg/mediaconvert/scheduler.go b/internal/pkg/mediaconvert/scheduler.go index 83894425dbd..aac40c14669 100644 --- a/internal/pkg/mediaconvert/scheduler.go +++ b/internal/pkg/mediaconvert/scheduler.go @@ -216,8 +216,8 @@ func (p *Scheduler) processTask(ctx context.Context, task *Task) { var hls = HLS{ Width: probe.FirstVideoStream().Width, Height: probe.FirstVideoStream().Height, - Source: p.cfg.Endpoint().JoinPath("blob", task.Workspace, task.ID+"_master.m3u8").String(), - Thumbnail: p.cfg.Endpoint().JoinPath("blob", task.Workspace, task.ID+".jpg").String(), + Source: task.ID + "_master.m3u8", + Thumbnail: task.ID + ".jpg", } logger.Debug("applying metadata", zap.String("url", hls.Source), zap.String("thumbnail", hls.Thumbnail), zap.String("source", task.Source)) From bb2edae113b1e6ce0b8b72fd8bf0beb959b1c295 Mon Sep 17 00:00:00 2001 From: Kristina Date: Mon, 24 Mar 2025 10:03:47 +0400 Subject: [PATCH 057/636] Notifications, files, queries, rest-client (#36) Signed-off-by: Kristina Fefelova --- bun.lock | 219 ++--- package.json | 12 +- packages/client-query/package.json | 2 +- packages/client-query/src/index.ts | 29 +- packages/client-query/src/query.ts | 32 +- packages/client-sqlite/package.json | 31 - packages/client-sqlite/src/client.ts | 215 ----- packages/client-sqlite/tsconfig.json | 9 - packages/client-ws/src/client.ts | 240 ----- packages/client-ws/src/connection.ts | 156 ---- packages/cockroach/migrations/01_message.sql | 10 +- packages/cockroach/migrations/02_patch.sql | 17 +- .../cockroach/migrations/03_attachment.sql | 11 - packages/cockroach/migrations/03_files.sql | 20 + .../cockroach/migrations/06_notification.sql | 39 +- packages/cockroach/package.json | 2 +- packages/cockroach/src/adapter.ts | 186 ++-- packages/cockroach/src/connection.ts | 2 +- packages/cockroach/src/db/base.ts | 14 +- packages/cockroach/src/db/mapping.ts | 251 +++++ packages/cockroach/src/db/message.ts | 876 ++++++++++-------- packages/cockroach/src/db/notification.ts | 527 +++++++---- packages/cockroach/src/db/schema.ts | 141 +-- packages/cockroach/src/db/utils.ts | 48 +- packages/cockroach/src/types.ts | 6 +- packages/cockroach/src/utils.ts | 2 + packages/examples/package.json | 26 - packages/examples/src/httpServer.ts | 70 -- packages/examples/src/index.html | 51 - packages/examples/src/index.ts | 126 --- packages/query/package.json | 3 +- packages/query/src/index.ts | 3 +- packages/query/src/lq.ts | 133 +-- packages/query/src/messages/query.ts | 485 ++++++---- packages/query/src/messages/utils.ts | 69 -- .../query/src/notification-contexts/query.ts | 510 ++++++++++ packages/query/src/notifications/query.ts | 320 +++++-- packages/query/src/query.ts | 208 ----- packages/query/src/result.ts | 14 + packages/query/src/types.ts | 37 +- packages/query/src/utils.ts | 35 + packages/rest-client/package.json | 38 + .../src/index.ts | 4 +- packages/rest-client/src/rest.ts | 164 ++++ packages/rest-client/src/types.ts | 35 + packages/rest-client/src/utils.ts | 41 + .../{client-ws => rest-client}/tsconfig.json | 0 packages/sdk-types/package.json | 4 +- packages/sdk-types/src/client.ts | 19 +- packages/sdk-types/src/db.ts | 76 +- packages/sdk-types/src/query.ts | 13 - packages/sdk-types/src/requestEvent.ts | 148 +-- packages/sdk-types/src/responseEvent.ts | 118 ++- packages/sdk-types/src/serverApi.ts | 20 +- packages/server/package.json | 9 +- packages/server/src/eventProcessor.ts | 394 ++++---- packages/server/src/index.ts | 73 +- packages/server/src/manager.ts | 245 +++-- packages/server/src/metadata.ts | 19 +- .../server/src/notification/notification.ts | 174 ++++ packages/server/src/permissions.ts | 66 ++ packages/server/src/triggers.ts | 249 +++-- packages/server/src/types.ts | 31 + packages/server/src/utils.ts | 71 ++ packages/shared/package.json | 8 +- packages/shared/src/index.ts | 2 +- packages/shared/src/message.ts | 11 +- packages/shared/src/patch.ts | 135 +++ packages/sqlite-wasm/package.json | 29 - packages/sqlite-wasm/src/adapter.ts | 190 ---- packages/sqlite-wasm/src/connection.ts | 71 -- packages/sqlite-wasm/src/db/base.ts | 77 -- packages/sqlite-wasm/src/db/message.ts | 216 ----- packages/sqlite-wasm/src/db/notification.ts | 238 ----- packages/sqlite-wasm/src/db/types.ts | 72 -- packages/sqlite-wasm/src/migrations.ts | 130 --- packages/sqlite-wasm/tsconfig.json | 10 - packages/types/package.json | 6 +- .../src/index.ts => types/src/core.ts} | 13 +- packages/types/src/file.ts | 17 +- packages/types/src/index.ts | 3 +- packages/types/src/message.ts | 77 +- packages/types/src/notification.ts | 33 +- packages/types/src/query.ts | 51 +- packages/{client-ws => yaml}/package.json | 25 +- packages/yaml/src/deserialize.ts | 49 + packages/{sqlite-wasm => yaml}/src/index.ts | 3 +- .../src/files.ts => yaml/src/parse.ts} | 11 +- packages/{examples => yaml}/tsconfig.json | 0 89 files changed, 4547 insertions(+), 4128 deletions(-) delete mode 100644 packages/client-sqlite/package.json delete mode 100644 packages/client-sqlite/src/client.ts delete mode 100644 packages/client-sqlite/tsconfig.json delete mode 100644 packages/client-ws/src/client.ts delete mode 100644 packages/client-ws/src/connection.ts delete mode 100644 packages/cockroach/migrations/03_attachment.sql create mode 100644 packages/cockroach/migrations/03_files.sql create mode 100644 packages/cockroach/src/db/mapping.ts delete mode 100644 packages/examples/package.json delete mode 100644 packages/examples/src/httpServer.ts delete mode 100644 packages/examples/src/index.html delete mode 100644 packages/examples/src/index.ts delete mode 100644 packages/query/src/messages/utils.ts create mode 100644 packages/query/src/notification-contexts/query.ts delete mode 100644 packages/query/src/query.ts create mode 100644 packages/query/src/utils.ts create mode 100644 packages/rest-client/package.json rename packages/{client-sqlite => rest-client}/src/index.ts (86%) create mode 100644 packages/rest-client/src/rest.ts create mode 100644 packages/rest-client/src/types.ts create mode 100644 packages/rest-client/src/utils.ts rename packages/{client-ws => rest-client}/tsconfig.json (100%) create mode 100644 packages/server/src/notification/notification.ts create mode 100644 packages/server/src/permissions.ts create mode 100644 packages/server/src/types.ts create mode 100644 packages/server/src/utils.ts create mode 100644 packages/shared/src/patch.ts delete mode 100644 packages/sqlite-wasm/package.json delete mode 100644 packages/sqlite-wasm/src/adapter.ts delete mode 100644 packages/sqlite-wasm/src/connection.ts delete mode 100644 packages/sqlite-wasm/src/db/base.ts delete mode 100644 packages/sqlite-wasm/src/db/message.ts delete mode 100644 packages/sqlite-wasm/src/db/notification.ts delete mode 100644 packages/sqlite-wasm/src/db/types.ts delete mode 100644 packages/sqlite-wasm/src/migrations.ts delete mode 100644 packages/sqlite-wasm/tsconfig.json rename packages/{client-ws/src/index.ts => types/src/core.ts} (62%) rename packages/{client-ws => yaml}/package.json (58%) create mode 100644 packages/yaml/src/deserialize.ts rename packages/{sqlite-wasm => yaml}/src/index.ts (91%) rename packages/{shared/src/files.ts => yaml/src/parse.ts} (90%) rename packages/{examples => yaml}/tsconfig.json (100%) diff --git a/bun.lock b/bun.lock index e341aec6155..fc721f86802 100644 --- a/bun.lock +++ b/bun.lock @@ -4,19 +4,19 @@ "": { "name": "@hcengineering/communication", "devDependencies": { - "@eslint/js": "^9.21.0", - "@types/bun": "^1.2.4", - "bun-types": "^1.2.4", - "eslint": "^9.21.0", + "@eslint/js": "^9.23.0", + "@types/bun": "^1.2.5", + "bun-types": "^1.2.5", + "eslint": "^9.23.0", "eslint-config-prettier": "^9.1.0", - "eslint-plugin-prettier": "^5.2.3", + "eslint-plugin-prettier": "^5.2.4", "prettier": "^3.5.3", - "typescript-eslint": "^8.25.0", + "typescript-eslint": "^8.27.0", }, }, "packages/client-query": { "name": "@hcengineering/communication-client-query", - "version": "0.1.61", + "version": "0.1.147", "dependencies": { "@hcengineering/communication-query": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", @@ -30,41 +30,9 @@ "typescript": "^5.6.3", }, }, - "packages/client-sqlite": { - "name": "@hcengineering/communication-client-sqlite", - "version": "0.1.61", - "dependencies": { - "@hcengineering/communication-sdk-types": "workspace:*", - "@hcengineering/communication-sqlite-wasm": "workspace:*", - "@hcengineering/communication-types": "workspace:*", - "fast-equals": "^5.0.1", - }, - "devDependencies": { - "@types/bun": "^1.1.14", - }, - "peerDependencies": { - "typescript": "^5.6.3", - }, - }, - "packages/client-ws": { - "name": "@hcengineering/communication-client-ws", - "version": "0.1.61", - "dependencies": { - "@hcengineering/communication-client-query": "workspace:*", - "@hcengineering/communication-sdk-types": "workspace:*", - "@hcengineering/communication-types": "workspace:*", - "@msgpack/msgpack": "^3.0.0-beta2", - }, - "devDependencies": { - "@types/bun": "^1.1.14", - }, - "peerDependencies": { - "typescript": "^5.6.3", - }, - }, "packages/cockroach": { "name": "@hcengineering/communication-cockroach", - "version": "0.1.61", + "version": "0.1.147", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-shared": "workspace:*", @@ -79,32 +47,37 @@ "typescript": "^5.6.3", }, }, - "packages/examples": { - "name": "@hcengineering/communication-examples", - "version": "0.1.61", + "packages/query": { + "name": "@hcengineering/communication-query", + "version": "0.1.147", "dependencies": { - "@hcengineering/communication-client-ws": "workspace:*", + "@hcengineering/communication-sdk-types": "workspace:*", + "@hcengineering/communication-shared": "workspace:*", "@hcengineering/communication-types": "workspace:*", + "@hcengineering/communication-yaml": "workspace:*", + "fast-equals": "^5.0.1", }, "devDependencies": { "@types/bun": "^1.1.14", + "@types/crypto-js": "^4.2.2", }, "peerDependencies": { "typescript": "^5.6.3", }, }, - "packages/query": { - "name": "@hcengineering/communication-query", - "version": "0.1.61", + "packages/rest-client": { + "name": "@hcengineering/communication-rest-client", + "version": "0.1.147", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-shared": "workspace:*", "@hcengineering/communication-types": "workspace:*", - "fast-equals": "^5.0.1", + "@hcengineering/core": "^0.7.28", + "snappyjs": "^0.7.0", }, "devDependencies": { "@types/bun": "^1.1.14", - "@types/crypto-js": "^4.2.2", + "@types/snappyjs": "^0.7.1", }, "peerDependencies": { "typescript": "^5.6.3", @@ -112,10 +85,10 @@ }, "packages/sdk-types": { "name": "@hcengineering/communication-sdk-types", - "version": "0.1.61", + "version": "0.1.147", "dependencies": { "@hcengineering/communication-types": "workspace:*", - "@hcengineering/core": "0.7.15", + "@hcengineering/core": "^0.7.28", }, "devDependencies": { "@types/bun": "^1.1.14", @@ -126,13 +99,14 @@ }, "packages/server": { "name": "@hcengineering/communication-server", - "version": "0.1.61", + "version": "0.1.147", "dependencies": { + "@hcengineering/account-client": "^0.7.28", "@hcengineering/communication-cockroach": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", - "@hcengineering/core": "0.7.15", - "@hcengineering/server-token": "0.7.15", + "@hcengineering/core": "^0.7.28", + "@hcengineering/server-token": "^0.7.28", }, "devDependencies": { "@types/bun": "^1.1.14", @@ -146,28 +120,23 @@ }, "packages/shared": { "name": "@hcengineering/communication-shared", - "version": "0.1.61", + "version": "0.1.147", "dependencies": { "@hcengineering/communication-types": "workspace:*", - "js-yaml": "^4.1.0", }, "devDependencies": { "@types/bun": "^1.1.14", - "@types/js-yaml": "^4.0.9", }, "peerDependencies": { "typescript": "^5.6.3", }, }, - "packages/sqlite-wasm": { - "name": "@hcengineering/communication-sqlite-wasm", - "version": "0.1.61", + "packages/types": { + "name": "@hcengineering/communication-types", + "version": "0.1.147", "dependencies": { - "@hcengineering/communication-sdk-types": "workspace:*", - "@hcengineering/communication-types": "workspace:*", - "@sqlite.org/sqlite-wasm": "^3.47.1-build1", - "path": "^0.12.7", - "uuid": "^11.0.3", + "@hcengineering/card": "^0.7.28", + "@hcengineering/core": "^0.7.28", }, "devDependencies": { "@types/bun": "^1.1.14", @@ -176,15 +145,17 @@ "typescript": "^5.6.3", }, }, - "packages/types": { - "name": "@hcengineering/communication-types", - "version": "0.1.61", + "packages/yaml": { + "name": "@hcengineering/communication-yaml", + "version": "0.1.147", "dependencies": { - "@hcengineering/card": "0.7.15", - "@hcengineering/core": "0.7.15", + "@hcengineering/communication-shared": "workspace:*", + "@hcengineering/communication-types": "workspace:*", + "js-yaml": "^4.1.0", }, "devDependencies": { "@types/bun": "^1.1.14", + "@types/js-yaml": "^4.0.9", }, "peerDependencies": { "typescript": "^5.6.3", @@ -202,11 +173,13 @@ "@eslint/config-array": ["@eslint/config-array@0.19.2", "", { "dependencies": { "@eslint/object-schema": "^2.1.6", "debug": "^4.3.1", "minimatch": "^3.1.2" } }, "sha512-GNKqxfHG2ySmJOBSHg7LxeUx4xpuCoFjacmlCoYWEbaPXLwvfIjixRI12xCQZeULksQb23uiA8F40w5TojpV7w=="], + "@eslint/config-helpers": ["@eslint/config-helpers@0.2.0", "", {}, "sha512-yJLLmLexii32mGrhW29qvU3QBVTu0GUmEf/J4XsBtVhp4JkIUFN/BjWqTF63yRvGApIDpZm5fa97LtYtINmfeQ=="], + "@eslint/core": ["@eslint/core@0.12.0", "", { "dependencies": { "@types/json-schema": "^7.0.15" } }, "sha512-cmrR6pytBuSMTaBweKoGMwu3EiHiEC+DoyupPmlZ0HxBJBtIxwe+j/E4XPIKNx+Q74c8lXKPwYawBf5glsTkHg=="], - "@eslint/eslintrc": ["@eslint/eslintrc@3.3.0", "", { "dependencies": { "ajv": "^6.12.4", "debug": "^4.3.2", "espree": "^10.0.1", "globals": "^14.0.0", "ignore": "^5.2.0", "import-fresh": "^3.2.1", "js-yaml": "^4.1.0", "minimatch": "^3.1.2", "strip-json-comments": "^3.1.1" } }, "sha512-yaVPAiNAalnCZedKLdR21GOGILMLKPyqSLWaAjQFvYA2i/ciDi8ArYVr69Anohb6cH2Ukhqti4aFnYyPm8wdwQ=="], + "@eslint/eslintrc": ["@eslint/eslintrc@3.3.1", "", { "dependencies": { "ajv": "^6.12.4", "debug": "^4.3.2", "espree": "^10.0.1", "globals": "^14.0.0", "ignore": "^5.2.0", "import-fresh": "^3.2.1", "js-yaml": "^4.1.0", "minimatch": "^3.1.2", "strip-json-comments": "^3.1.1" } }, "sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ=="], - "@eslint/js": ["@eslint/js@9.21.0", "", {}, "sha512-BqStZ3HX8Yz6LvsF5ByXYrtigrV5AXADWLAGc7PH/1SxOb7/FIYYMszZZWiUou/GB9P2lXWk2SV4d+Z8h0nknw=="], + "@eslint/js": ["@eslint/js@9.23.0", "", {}, "sha512-35MJ8vCPU0ZMxo7zfev2pypqTwWTofFZO6m4KAtdoFhRpLJUpHTZZ+KB3C7Hb1d7bULYwO4lJXGCi5Se+8OMbw=="], "@eslint/object-schema": ["@eslint/object-schema@2.1.6", "", {}, "sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA=="], @@ -222,41 +195,39 @@ "@formatjs/intl-localematcher": ["@formatjs/intl-localematcher@0.6.0", "", { "dependencies": { "tslib": "2" } }, "sha512-4rB4g+3hESy1bHSBG3tDFaMY2CH67iT7yne1e+0CLTsGLDcmoEWWpJjjpWVaYgYfYuohIRuo0E+N536gd2ZHZA=="], - "@hcengineering/analytics": ["@hcengineering/analytics@0.7.15", "https://npm.pkg.github.com/download/@hcengineering/analytics/0.7.15/46f0006d8f4ba8ec04105770e6418251def15296", { "dependencies": { "@hcengineering/platform": "^0.7.15" } }, "sha512-U+tswirTFX0z7Sr51zfOEXbtrtfnf2xLzWTIrPip929g+oULJjCKMqoh+3HHVCF4HFm97x0P3qaRtW2hLgxyAQ=="], + "@hcengineering/account-client": ["@hcengineering/account-client@0.7.28", "https://npm.pkg.github.com/download/@hcengineering/account-client/0.7.28/32c9bc0dd297f00f4d5461467f25e072d01b9c76", { "dependencies": { "@hcengineering/core": "^0.7.28", "@hcengineering/platform": "^0.7.28" } }, "sha512-k1dkioX+jYrLyAXLLLsBYVCdZKf3WjhnDHw/yeAQgLQQ/9W2ffLtDMZ8lYWV3XMQ3eWrUCodtTDOiAQgXd/LEQ=="], - "@hcengineering/card": ["@hcengineering/card@0.7.15", "https://npm.pkg.github.com/download/@hcengineering/card/0.7.15/70ba947306289e9ec1719ec4670abb5abadd9a74", { "dependencies": { "@hcengineering/core": "^0.7.15", "@hcengineering/platform": "^0.7.15", "@hcengineering/ui": "^0.7.15" } }, "sha512-xIM9UwFqhyjD9EJs0egn4kf3Q9Br8+spYHkzNl+5IPdL5d5itDIuy5HS9ng6XZ1BXApauo8MTl1DcaqJMneNbw=="], + "@hcengineering/analytics": ["@hcengineering/analytics@0.7.28", "https://npm.pkg.github.com/download/@hcengineering/analytics/0.7.28/a8faf3d575bb28b9682b659495e738f8e0ca596e", { "dependencies": { "@hcengineering/platform": "^0.7.28" } }, "sha512-+CI/wJ8zgop+3mEcvhlB1UJBU6tIKq2Y7qTichZjGli8BckEIEwbD4j2beu2qXRSoTT2yUGWM9zpS9+fmH/DAw=="], - "@hcengineering/communication-client-query": ["@hcengineering/communication-client-query@workspace:packages/client-query"], - - "@hcengineering/communication-client-sqlite": ["@hcengineering/communication-client-sqlite@workspace:packages/client-sqlite"], + "@hcengineering/card": ["@hcengineering/card@0.7.28", "https://npm.pkg.github.com/download/@hcengineering/card/0.7.28/5348996f5b7b4a34517780a5bf324122cbddfbad", { "dependencies": { "@hcengineering/core": "^0.7.28", "@hcengineering/platform": "^0.7.28", "@hcengineering/ui": "^0.7.28" } }, "sha512-1d3allyx9jTpE/gr9kk/g5qo5848i4FKsRiHgCpJqzNEdRW7TAednoHAr2hKRU5DZC/i3Br56axSkUQzV2Idhg=="], - "@hcengineering/communication-client-ws": ["@hcengineering/communication-client-ws@workspace:packages/client-ws"], + "@hcengineering/communication-client-query": ["@hcengineering/communication-client-query@workspace:packages/client-query"], "@hcengineering/communication-cockroach": ["@hcengineering/communication-cockroach@workspace:packages/cockroach"], - "@hcengineering/communication-examples": ["@hcengineering/communication-examples@workspace:packages/examples"], - "@hcengineering/communication-query": ["@hcengineering/communication-query@workspace:packages/query"], + "@hcengineering/communication-rest-client": ["@hcengineering/communication-rest-client@workspace:packages/rest-client"], + "@hcengineering/communication-sdk-types": ["@hcengineering/communication-sdk-types@workspace:packages/sdk-types"], "@hcengineering/communication-server": ["@hcengineering/communication-server@workspace:packages/server"], "@hcengineering/communication-shared": ["@hcengineering/communication-shared@workspace:packages/shared"], - "@hcengineering/communication-sqlite-wasm": ["@hcengineering/communication-sqlite-wasm@workspace:packages/sqlite-wasm"], - "@hcengineering/communication-types": ["@hcengineering/communication-types@workspace:packages/types"], - "@hcengineering/core": ["@hcengineering/core@0.7.15", "https://npm.pkg.github.com/download/@hcengineering/core/0.7.15/785cb20f9adc4a3a3c2179a0ae38316f4b4f933f", { "dependencies": { "@hcengineering/analytics": "^0.7.15", "@hcengineering/platform": "^0.7.15", "fast-equals": "^5.2.2" } }, "sha512-YhRRdtcSuNSQUdh4TO5u3u/w/vE7fcildc51wnL7Erw7VZ1rVANEx9a3mScnViVH89xw0oAcfDAm/AbH5+EZcQ=="], + "@hcengineering/communication-yaml": ["@hcengineering/communication-yaml@workspace:packages/yaml"], - "@hcengineering/platform": ["@hcengineering/platform@0.7.15", "https://npm.pkg.github.com/download/@hcengineering/platform/0.7.15/3e6c41c4abfd6425be49d9b31afef627dcafa4cb", { "dependencies": { "intl-messageformat": "^10.7.14" } }, "sha512-zv6afaQVu/T0MXrWch8JaxdVjLSUHDpIx3jgY1JE5xSy+FN8bjC30FR/RGavuMtm4z25rFsZQRPfr4qmZYZirQ=="], + "@hcengineering/core": ["@hcengineering/core@0.7.28", "https://npm.pkg.github.com/download/@hcengineering/core/0.7.28/43b84c63262e2b8fe5554c1967812e2ce6bb56e1", { "dependencies": { "@hcengineering/analytics": "^0.7.28", "@hcengineering/platform": "^0.7.28", "fast-equals": "^5.2.2" } }, "sha512-qv4QE7mkect6JYlGMljW5CQ2uU80hFGq0E17lZ5rO79h1boWEKLJpINv+w3J2BdR/nNG7taIsdZg3yfl9fY4bg=="], - "@hcengineering/server-token": ["@hcengineering/server-token@0.7.15", "https://npm.pkg.github.com/download/@hcengineering/server-token/0.7.15/5443096926f96919ed8ed748e90e3474137519ec", { "dependencies": { "@hcengineering/core": "^0.7.15", "@hcengineering/platform": "^0.7.15", "jwt-simple": "^0.5.6" } }, "sha512-XEh0nDSSxMn34a9AHeXIgRh6iO025hKmGNXLZlXnm4kRIYODzMLhigds2H/RvmUjznfUTm9Bpy3EFLq9WFmzWg=="], + "@hcengineering/platform": ["@hcengineering/platform@0.7.28", "https://npm.pkg.github.com/download/@hcengineering/platform/0.7.28/2126ff97284b679ceab3c86f24febd73a3a28712", { "dependencies": { "intl-messageformat": "^10.7.14" } }, "sha512-0zEifwsLdOs/M6TZ9xnPrQTTlhwW28sooiyL4ZfQsiC2AJUy05vLP5/W8V9eDLta+EiR5bjtlulGsg1GfPjUoQ=="], - "@hcengineering/theme": ["@hcengineering/theme@0.7.15", "https://npm.pkg.github.com/download/@hcengineering/theme/0.7.15/f02e70ea8421f0cd3e5b52c34ee69488d361b3e6", { "dependencies": { "@hcengineering/analytics": "^0.7.15", "@hcengineering/platform": "^0.7.15", "svelte": "^4.2.19" } }, "sha512-d+rX4Jp4J4YELBi763M5Zfe+3x5gyQjV1NcuP1GIvSCe9tXNboKEp5X2SPdqFlP1sBfE5sgVECIAtmZkhMAqcQ=="], + "@hcengineering/server-token": ["@hcengineering/server-token@0.7.28", "https://npm.pkg.github.com/download/@hcengineering/server-token/0.7.28/b1cffcb628aa0c6ac73171ef340c8ccf0f85ab07", { "dependencies": { "@hcengineering/core": "^0.7.28", "@hcengineering/platform": "^0.7.28", "jwt-simple": "^0.5.6" } }, "sha512-TaHwzxuoFKN6jnWHIZk0UcRmB+UQ1QE1HZzUh5doPSFUYOAtTrSb32iJDA8e8oEADcJpOp9RGIUP/EP/VED7BQ=="], - "@hcengineering/ui": ["@hcengineering/ui@0.7.15", "https://npm.pkg.github.com/download/@hcengineering/ui/0.7.15/8462766246888da93d3f51e19e0b11d7c40312f6", { "dependencies": { "@hcengineering/analytics": "^0.7.15", "@hcengineering/core": "^0.7.15", "@hcengineering/platform": "^0.7.15", "@hcengineering/theme": "^0.7.15", "autolinker": "4.0.0", "date-fns": "^2.30.0", "date-fns-tz": "^2.0.0", "dompurify": "^3.1.6", "emojibase": "^16.0.0", "fast-equals": "^5.2.2", "hls.js": "^1.5.20", "svelte": "^4.2.19" } }, "sha512-OkllPX5wH3eJ3WSEzr+PJ0H8H2ZyXWGNHdUmwQSPW6UTZqAYia9WtCVO3sxR7SMtZtjAnKPitRycSG4Q/z8GVA=="], + "@hcengineering/theme": ["@hcengineering/theme@0.7.28", "https://npm.pkg.github.com/download/@hcengineering/theme/0.7.28/557640a7ba2ffaa4d6e5411a54577f60dc528300", { "dependencies": { "@hcengineering/analytics": "^0.7.28", "@hcengineering/platform": "^0.7.28", "svelte": "^4.2.19" } }, "sha512-Hv3wQKHr327TC7wMHCBiG4bMtg8vSlb7+UFwdKI8ztEs3359meqkXu+exqi7Nl5334voXIz5ZFxjwZwOMzNSFA=="], + + "@hcengineering/ui": ["@hcengineering/ui@0.7.28", "https://npm.pkg.github.com/download/@hcengineering/ui/0.7.28/82025fc775702ea8c8569611356662bfbf28eb74", { "dependencies": { "@hcengineering/analytics": "^0.7.28", "@hcengineering/core": "^0.7.28", "@hcengineering/platform": "^0.7.28", "@hcengineering/theme": "^0.7.28", "autolinker": "4.0.0", "date-fns": "^2.30.0", "date-fns-tz": "^2.0.0", "dompurify": "^3.1.6", "emojibase": "^16.0.0", "fast-equals": "^5.2.2", "hls.js": "^1.5.20", "plyr": "^3.7.8", "svelte": "^4.2.19" } }, "sha512-QsUpO0+Idu1n2dC6KtorPJ6SSp30It8FHSuG3Y+Awv4td/ftVocOSFqhskbk+us9Yw9KqOYgkqhGYEHbfRfw4Q=="], "@humanfs/core": ["@humanfs/core@0.19.1", "", {}, "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA=="], @@ -276,21 +247,17 @@ "@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.25", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ=="], - "@msgpack/msgpack": ["@msgpack/msgpack@3.0.1", "", {}, "sha512-9qysoVTITLcOFIIJeXbdtUgvvY25ojUp+WWfLc0O4H4KKWeamUNAqkjS5mej/PnVDnH70llWKNa7pzv5U4TqVQ=="], - "@nodelib/fs.scandir": ["@nodelib/fs.scandir@2.1.5", "", { "dependencies": { "@nodelib/fs.stat": "2.0.5", "run-parallel": "^1.1.9" } }, "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g=="], "@nodelib/fs.stat": ["@nodelib/fs.stat@2.0.5", "", {}, "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A=="], "@nodelib/fs.walk": ["@nodelib/fs.walk@1.2.8", "", { "dependencies": { "@nodelib/fs.scandir": "2.1.5", "fastq": "^1.6.0" } }, "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg=="], - "@pkgr/core": ["@pkgr/core@0.1.1", "", {}, "sha512-cq8o4cWH0ibXh9VGi5P20Tu9XF/0fFXl9EUinr9QfTM7a7p0oTA4iJRCQWppXR1Pg8dSM0UCItCkPwsk9qWWYA=="], - - "@sqlite.org/sqlite-wasm": ["@sqlite.org/sqlite-wasm@3.47.1-build1", "", { "bin": { "sqlite-wasm": "bin/index.js" } }, "sha512-3qnVGab3sjJ8ov0ce1rQGZrMkglYEQ/q5fnq1s1BpRWFMYsiJVaLD1zKepcI9E9r3qx7929+2G27Hfsasvrm6Q=="], + "@pkgr/core": ["@pkgr/core@0.2.0", "", {}, "sha512-vsJDAkYR6qCPu+ioGScGiMYR7LvZYIXh/dlQeviqoTWNCVfKTLYD/LkNWH4Mxsv2a5vpIRc77FN5DnmK1eBggQ=="], "@types/body-parser": ["@types/body-parser@1.19.5", "", { "dependencies": { "@types/connect": "*", "@types/node": "*" } }, "sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg=="], - "@types/bun": ["@types/bun@1.2.4", "", { "dependencies": { "bun-types": "1.2.4" } }, "sha512-QtuV5OMR8/rdKJs213iwXDpfVvnskPXY/S0ZiFbsTjQZycuqPbMW8Gf/XhLfwE5njW8sxI2WjISURXPlHypMFA=="], + "@types/bun": ["@types/bun@1.2.5", "", { "dependencies": { "bun-types": "1.2.5" } }, "sha512-w2OZTzrZTVtbnJew1pdFmgV99H0/L+Pvw+z1P67HaR18MHOzYnTYOi6qzErhK8HyT+DB782ADVPPE92Xu2/Opg=="], "@types/connect": ["@types/connect@3.4.38", "", { "dependencies": { "@types/node": "*" } }, "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug=="], @@ -312,7 +279,7 @@ "@types/mime": ["@types/mime@1.3.5", "", {}, "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w=="], - "@types/node": ["@types/node@22.13.4", "", { "dependencies": { "undici-types": "~6.20.0" } }, "sha512-ywP2X0DYtX3y08eFVx5fNIw7/uIv8hYUKgXoK8oayJlLnKcRfEYCxWMVE1XagUdVtCJlZT1AU4LXEABW+L1Peg=="], + "@types/node": ["@types/node@22.13.9", "", { "dependencies": { "undici-types": "~6.20.0" } }, "sha512-acBjXdRJ3A6Pb3tqnw9HZmyR3Fiol3aGxRCK1x3d+6CDAMjl7I649wpSd+yNURCjbOUGu9tqtLKnTGxmK6CyGw=="], "@types/qs": ["@types/qs@6.9.18", "", {}, "sha512-kK7dgTYDyGqS+e2Q4aK9X3D7q234CIZ1Bv0q/7Z5IwRDoADNU81xXJK/YVyLbLTZCoIwUoDoffFeF+p/eIklAA=="], @@ -322,27 +289,29 @@ "@types/serve-static": ["@types/serve-static@1.15.7", "", { "dependencies": { "@types/http-errors": "*", "@types/node": "*", "@types/send": "*" } }, "sha512-W8Ym+h8nhuRwaKPaDw34QUkwsGi6Rc4yYqvKFo5rm2FUEhCFbzVWrxXUxuKK8TASjWsysJY0nsmNCGhCOIsrOw=="], + "@types/snappyjs": ["@types/snappyjs@0.7.1", "", {}, "sha512-OxjzJ6cQZstysMh6PEwZWmK9qlKZyezHJKOkcUkZDooSFuog2votUEKkxMaTq51UQF3cJkXKQ+XGlj4FSl8JQQ=="], + "@types/trusted-types": ["@types/trusted-types@2.0.7", "", {}, "sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw=="], - "@types/ws": ["@types/ws@8.5.14", "", { "dependencies": { "@types/node": "*" } }, "sha512-bd/YFLW+URhBzMXurx7lWByOu+xzU9+kb3RboOteXYDfW+tr+JZa99OyNmPINEGB/ahzKrEuc8rcv4gnpJmxTw=="], + "@types/ws": ["@types/ws@8.18.0", "", { "dependencies": { "@types/node": "*" } }, "sha512-8svvI3hMyvN0kKCJMvTJP/x6Y/EoQbepff882wL+Sn5QsXb3etnamgrJq4isrBxSJj5L2AuXcI0+bgkoAXGUJw=="], - "@typescript-eslint/eslint-plugin": ["@typescript-eslint/eslint-plugin@8.25.0", "", { "dependencies": { "@eslint-community/regexpp": "^4.10.0", "@typescript-eslint/scope-manager": "8.25.0", "@typescript-eslint/type-utils": "8.25.0", "@typescript-eslint/utils": "8.25.0", "@typescript-eslint/visitor-keys": "8.25.0", "graphemer": "^1.4.0", "ignore": "^5.3.1", "natural-compare": "^1.4.0", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0", "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-VM7bpzAe7JO/BFf40pIT1lJqS/z1F8OaSsUB3rpFJucQA4cOSuH2RVVVkFULN+En0Djgr29/jb4EQnedUo95KA=="], + "@typescript-eslint/eslint-plugin": ["@typescript-eslint/eslint-plugin@8.27.0", "", { "dependencies": { "@eslint-community/regexpp": "^4.10.0", "@typescript-eslint/scope-manager": "8.27.0", "@typescript-eslint/type-utils": "8.27.0", "@typescript-eslint/utils": "8.27.0", "@typescript-eslint/visitor-keys": "8.27.0", "graphemer": "^1.4.0", "ignore": "^5.3.1", "natural-compare": "^1.4.0", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0", "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-4henw4zkePi5p252c8ncBLzLce52SEUz2Ebj8faDnuUXz2UuHEONYcJ+G0oaCF+bYCWVZtrGzq3FD7YXetmnSA=="], - "@typescript-eslint/parser": ["@typescript-eslint/parser@8.25.0", "", { "dependencies": { "@typescript-eslint/scope-manager": "8.25.0", "@typescript-eslint/types": "8.25.0", "@typescript-eslint/typescript-estree": "8.25.0", "@typescript-eslint/visitor-keys": "8.25.0", "debug": "^4.3.4" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-4gbs64bnbSzu4FpgMiQ1A+D+urxkoJk/kqlDJ2W//5SygaEiAP2B4GoS7TEdxgwol2el03gckFV9lJ4QOMiiHg=="], + "@typescript-eslint/parser": ["@typescript-eslint/parser@8.27.0", "", { "dependencies": { "@typescript-eslint/scope-manager": "8.27.0", "@typescript-eslint/types": "8.27.0", "@typescript-eslint/typescript-estree": "8.27.0", "@typescript-eslint/visitor-keys": "8.27.0", "debug": "^4.3.4" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-XGwIabPallYipmcOk45DpsBSgLC64A0yvdAkrwEzwZ2viqGqRUJ8eEYoPz0CWnutgAFbNMPdsGGvzjSmcWVlEA=="], - "@typescript-eslint/scope-manager": ["@typescript-eslint/scope-manager@8.25.0", "", { "dependencies": { "@typescript-eslint/types": "8.25.0", "@typescript-eslint/visitor-keys": "8.25.0" } }, "sha512-6PPeiKIGbgStEyt4NNXa2ru5pMzQ8OYKO1hX1z53HMomrmiSB+R5FmChgQAP1ro8jMtNawz+TRQo/cSXrauTpg=="], + "@typescript-eslint/scope-manager": ["@typescript-eslint/scope-manager@8.27.0", "", { "dependencies": { "@typescript-eslint/types": "8.27.0", "@typescript-eslint/visitor-keys": "8.27.0" } }, "sha512-8oI9GwPMQmBryaaxG1tOZdxXVeMDte6NyJA4i7/TWa4fBwgnAXYlIQP+uYOeqAaLJ2JRxlG9CAyL+C+YE9Xknw=="], - "@typescript-eslint/type-utils": ["@typescript-eslint/type-utils@8.25.0", "", { "dependencies": { "@typescript-eslint/typescript-estree": "8.25.0", "@typescript-eslint/utils": "8.25.0", "debug": "^4.3.4", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-d77dHgHWnxmXOPJuDWO4FDWADmGQkN5+tt6SFRZz/RtCWl4pHgFl3+WdYCn16+3teG09DY6XtEpf3gGD0a186g=="], + "@typescript-eslint/type-utils": ["@typescript-eslint/type-utils@8.27.0", "", { "dependencies": { "@typescript-eslint/typescript-estree": "8.27.0", "@typescript-eslint/utils": "8.27.0", "debug": "^4.3.4", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-wVArTVcz1oJOIEJxui/nRhV0TXzD/zMSOYi/ggCfNq78EIszddXcJb7r4RCp/oBrjt8n9A0BSxRMKxHftpDxDA=="], - "@typescript-eslint/types": ["@typescript-eslint/types@8.25.0", "", {}, "sha512-+vUe0Zb4tkNgznQwicsvLUJgZIRs6ITeWSCclX1q85pR1iOiaj+4uZJIUp//Z27QWu5Cseiw3O3AR8hVpax7Aw=="], + "@typescript-eslint/types": ["@typescript-eslint/types@8.27.0", "", {}, "sha512-/6cp9yL72yUHAYq9g6DsAU+vVfvQmd1a8KyA81uvfDE21O2DwQ/qxlM4AR8TSdAu+kJLBDrEHKC5/W2/nxsY0A=="], - "@typescript-eslint/typescript-estree": ["@typescript-eslint/typescript-estree@8.25.0", "", { "dependencies": { "@typescript-eslint/types": "8.25.0", "@typescript-eslint/visitor-keys": "8.25.0", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", "minimatch": "^9.0.4", "semver": "^7.6.0", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "typescript": ">=4.8.4 <5.8.0" } }, "sha512-ZPaiAKEZ6Blt/TPAx5Ot0EIB/yGtLI2EsGoY6F7XKklfMxYQyvtL+gT/UCqkMzO0BVFHLDlzvFqQzurYahxv9Q=="], + "@typescript-eslint/typescript-estree": ["@typescript-eslint/typescript-estree@8.27.0", "", { "dependencies": { "@typescript-eslint/types": "8.27.0", "@typescript-eslint/visitor-keys": "8.27.0", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", "minimatch": "^9.0.4", "semver": "^7.6.0", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "typescript": ">=4.8.4 <5.9.0" } }, "sha512-BnKq8cqPVoMw71O38a1tEb6iebEgGA80icSxW7g+kndx0o6ot6696HjG7NdgfuAVmVEtwXUr3L8R9ZuVjoQL6A=="], - "@typescript-eslint/utils": ["@typescript-eslint/utils@8.25.0", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@typescript-eslint/scope-manager": "8.25.0", "@typescript-eslint/types": "8.25.0", "@typescript-eslint/typescript-estree": "8.25.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-syqRbrEv0J1wywiLsK60XzHnQe/kRViI3zwFALrNEgnntn1l24Ra2KvOAWwWbWZ1lBZxZljPDGOq967dsl6fkA=="], + "@typescript-eslint/utils": ["@typescript-eslint/utils@8.27.0", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@typescript-eslint/scope-manager": "8.27.0", "@typescript-eslint/types": "8.27.0", "@typescript-eslint/typescript-estree": "8.27.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-njkodcwH1yvmo31YWgRHNb/x1Xhhq4/m81PhtvmRngD8iHPehxffz1SNCO+kwaePhATC+kOa/ggmvPoPza5i0Q=="], - "@typescript-eslint/visitor-keys": ["@typescript-eslint/visitor-keys@8.25.0", "", { "dependencies": { "@typescript-eslint/types": "8.25.0", "eslint-visitor-keys": "^4.2.0" } }, "sha512-kCYXKAum9CecGVHGij7muybDfTS2sD3t0L4bJsEZLkyrXUImiCTq1M3LG2SRtOhiHFwMR9wAFplpT6XHYjTkwQ=="], + "@typescript-eslint/visitor-keys": ["@typescript-eslint/visitor-keys@8.27.0", "", { "dependencies": { "@typescript-eslint/types": "8.27.0", "eslint-visitor-keys": "^4.2.0" } }, "sha512-WsXQwMkILJvffP6z4U3FYJPlbf/j07HIxmDjZpbNvBJkMfvwXj5ACRkkHwBDvLBbDbtX5TdU64/rcvKJ/vuInQ=="], - "acorn": ["acorn@8.14.0", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA=="], + "acorn": ["acorn@8.14.1", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg=="], "acorn-jsx": ["acorn-jsx@5.3.2", "", { "peerDependencies": { "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ=="], @@ -364,7 +333,7 @@ "braces": ["braces@3.0.3", "", { "dependencies": { "fill-range": "^7.1.1" } }, "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA=="], - "bun-types": ["bun-types@1.2.4", "", { "dependencies": { "@types/node": "*", "@types/ws": "~8.5.10" } }, "sha512-nDPymR207ZZEoWD4AavvEaa/KZe/qlrbMSchqpQwovPZCKc7pwMoENjEtHgMKaAjJhy+x6vfqSBA1QU3bJgs0Q=="], + "bun-types": ["bun-types@1.2.5", "", { "dependencies": { "@types/node": "*", "@types/ws": "~8.5.10" } }, "sha512-3oO6LVGGRRKI4kHINx5PIdIgnLRb7l/SprhzqXapmoYkFl5m4j6EvALvbDVuuBFaamB46Ap6HCUxIXNLCGy+tg=="], "callsites": ["callsites@3.1.0", "", {}, "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ=="], @@ -378,10 +347,14 @@ "concat-map": ["concat-map@0.0.1", "", {}, "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="], + "core-js": ["core-js@3.41.0", "", {}, "sha512-SJ4/EHwS36QMJd6h/Rg+GyR4A5xE0FSI3eZ+iBVpfqf1x0eTSg1smWLHrA+2jQThZSh97fmSgFSU8B61nxosxA=="], + "cross-spawn": ["cross-spawn@7.0.6", "", { "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", "which": "^2.0.1" } }, "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA=="], "css-tree": ["css-tree@2.3.1", "", { "dependencies": { "mdn-data": "2.0.30", "source-map-js": "^1.0.1" } }, "sha512-6Fv1DV/TYw//QF5IzQdqsNDjx/wc8TrMBZsqjL9eW01tWb7R7k/mq+/VXfJCl7SoD5emsJop9cOByJZfs8hYIw=="], + "custom-event-polyfill": ["custom-event-polyfill@1.0.7", "", {}, "sha512-TDDkd5DkaZxZFM8p+1I3yAlvM3rSr1wbrOliG4yJiwinMZN8z/iGL7BTlDkrJcYTmgUSb4ywVCc3ZaUtOtC76w=="], + "date-fns": ["date-fns@2.30.0", "", { "dependencies": { "@babel/runtime": "^7.21.0" } }, "sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw=="], "date-fns-tz": ["date-fns-tz@2.0.1", "", { "peerDependencies": { "date-fns": "2.x" } }, "sha512-fJCG3Pwx8HUoLhkepdsP7Z5RsucUi+ZBOxyM5d0ZZ6c4SdYustq0VMmOu6Wf7bli+yS/Jwp91TOCqn9jMcVrUA=="], @@ -398,13 +371,13 @@ "escape-string-regexp": ["escape-string-regexp@4.0.0", "", {}, "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA=="], - "eslint": ["eslint@9.21.0", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.12.1", "@eslint/config-array": "^0.19.2", "@eslint/core": "^0.12.0", "@eslint/eslintrc": "^3.3.0", "@eslint/js": "9.21.0", "@eslint/plugin-kit": "^0.2.7", "@humanfs/node": "^0.16.6", "@humanwhocodes/module-importer": "^1.0.1", "@humanwhocodes/retry": "^0.4.2", "@types/estree": "^1.0.6", "@types/json-schema": "^7.0.15", "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.6", "debug": "^4.3.2", "escape-string-regexp": "^4.0.0", "eslint-scope": "^8.2.0", "eslint-visitor-keys": "^4.2.0", "espree": "^10.3.0", "esquery": "^1.5.0", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", "file-entry-cache": "^8.0.0", "find-up": "^5.0.0", "glob-parent": "^6.0.2", "ignore": "^5.2.0", "imurmurhash": "^0.1.4", "is-glob": "^4.0.0", "json-stable-stringify-without-jsonify": "^1.0.1", "lodash.merge": "^4.6.2", "minimatch": "^3.1.2", "natural-compare": "^1.4.0", "optionator": "^0.9.3" }, "peerDependencies": { "jiti": "*" }, "optionalPeers": ["jiti"], "bin": { "eslint": "bin/eslint.js" } }, "sha512-KjeihdFqTPhOMXTt7StsDxriV4n66ueuF/jfPNC3j/lduHwr/ijDwJMsF+wyMJethgiKi5wniIE243vi07d3pg=="], + "eslint": ["eslint@9.23.0", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.12.1", "@eslint/config-array": "^0.19.2", "@eslint/config-helpers": "^0.2.0", "@eslint/core": "^0.12.0", "@eslint/eslintrc": "^3.3.1", "@eslint/js": "9.23.0", "@eslint/plugin-kit": "^0.2.7", "@humanfs/node": "^0.16.6", "@humanwhocodes/module-importer": "^1.0.1", "@humanwhocodes/retry": "^0.4.2", "@types/estree": "^1.0.6", "@types/json-schema": "^7.0.15", "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.6", "debug": "^4.3.2", "escape-string-regexp": "^4.0.0", "eslint-scope": "^8.3.0", "eslint-visitor-keys": "^4.2.0", "espree": "^10.3.0", "esquery": "^1.5.0", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", "file-entry-cache": "^8.0.0", "find-up": "^5.0.0", "glob-parent": "^6.0.2", "ignore": "^5.2.0", "imurmurhash": "^0.1.4", "is-glob": "^4.0.0", "json-stable-stringify-without-jsonify": "^1.0.1", "lodash.merge": "^4.6.2", "minimatch": "^3.1.2", "natural-compare": "^1.4.0", "optionator": "^0.9.3" }, "peerDependencies": { "jiti": "*" }, "optionalPeers": ["jiti"], "bin": { "eslint": "bin/eslint.js" } }, "sha512-jV7AbNoFPAY1EkFYpLq5bslU9NLNO8xnEeQXwErNibVryjk67wHVmddTBilc5srIttJDBrB0eMHKZBFbSIABCw=="], "eslint-config-prettier": ["eslint-config-prettier@9.1.0", "", { "peerDependencies": { "eslint": ">=7.0.0" }, "bin": { "eslint-config-prettier": "bin/cli.js" } }, "sha512-NSWl5BFQWEPi1j4TjVNItzYV7dZXZ+wP6I6ZhrBGpChQhZRUaElihE9uRRkcbRnNb76UMKDF3r+WTmNcGPKsqw=="], - "eslint-plugin-prettier": ["eslint-plugin-prettier@5.2.3", "", { "dependencies": { "prettier-linter-helpers": "^1.0.0", "synckit": "^0.9.1" }, "peerDependencies": { "@types/eslint": ">=8.0.0", "eslint": ">=8.0.0", "eslint-config-prettier": "*", "prettier": ">=3.0.0" }, "optionalPeers": ["@types/eslint", "eslint-config-prettier"] }, "sha512-qJ+y0FfCp/mQYQ/vWQ3s7eUlFEL4PyKfAJxsnYTJ4YT73nsJBWqmEpFryxV9OeUiqmsTsYJ5Y+KDNaeP31wrRw=="], + "eslint-plugin-prettier": ["eslint-plugin-prettier@5.2.4", "", { "dependencies": { "prettier-linter-helpers": "^1.0.0", "synckit": "^0.10.2" }, "peerDependencies": { "@types/eslint": ">=8.0.0", "eslint": ">=8.0.0", "eslint-config-prettier": "*", "prettier": ">=3.0.0" }, "optionalPeers": ["@types/eslint", "eslint-config-prettier"] }, "sha512-SFtuYmnhwYCtuCDTKPoK+CEzCnEgKTU2qTLwoCxvrC0MFBTIXo1i6hDYOI4cwHaE5GZtlWmTN3YfucYi7KJwPw=="], - "eslint-scope": ["eslint-scope@8.2.0", "", { "dependencies": { "esrecurse": "^4.3.0", "estraverse": "^5.2.0" } }, "sha512-PHlWUfG6lvPc3yvP5A4PNyBL1W8fkDUccmI21JUu/+GKZBoH/W5u6usENXUrWFRsyoW5ACUjFGgAFQp5gUlb/A=="], + "eslint-scope": ["eslint-scope@8.3.0", "", { "dependencies": { "esrecurse": "^4.3.0", "estraverse": "^5.2.0" } }, "sha512-pUNxi75F8MJ/GdeKtVLSbYg4ZI34J6C0C7sbL4YOp2exGwen7ZsuBqKzUhXd0qMQ362yET3z+uPwKeg/0C2XCQ=="], "eslint-visitor-keys": ["eslint-visitor-keys@4.2.0", "", {}, "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw=="], @@ -432,7 +405,7 @@ "fast-levenshtein": ["fast-levenshtein@2.0.6", "", {}, "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw=="], - "fastq": ["fastq@1.19.0", "", { "dependencies": { "reusify": "^1.0.4" } }, "sha512-7SFSRCNjBQIZH/xZR3iy5iQYR8aGBE0h3VG6/cwlbrpdciNYBMotQav8c1XI3HjHH+NikUpP53nPdlZSdWmFzA=="], + "fastq": ["fastq@1.19.1", "", { "dependencies": { "reusify": "^1.0.4" } }, "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ=="], "file-entry-cache": ["file-entry-cache@8.0.0", "", { "dependencies": { "flat-cache": "^4.0.0" } }, "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ=="], @@ -460,8 +433,6 @@ "imurmurhash": ["imurmurhash@0.1.4", "", {}, "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA=="], - "inherits": ["inherits@2.0.3", "", {}, "sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw=="], - "intl-messageformat": ["intl-messageformat@10.7.15", "", { "dependencies": { "@formatjs/ecma402-abstract": "2.3.3", "@formatjs/fast-memoize": "2.2.6", "@formatjs/icu-messageformat-parser": "2.11.1", "tslib": "2" } }, "sha512-LRyExsEsefQSBjU2p47oAheoKz+EOJxSLDdjOaEjdriajfHsMXOmV/EhMvYSg9bAgCUHasuAC+mcUBe/95PfIg=="], "is-extglob": ["is-extglob@2.1.1", "", {}, "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ=="], @@ -488,6 +459,8 @@ "levn": ["levn@0.4.1", "", { "dependencies": { "prelude-ls": "^1.2.1", "type-check": "~0.4.0" } }, "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ=="], + "loadjs": ["loadjs@4.3.0", "", {}, "sha512-vNX4ZZLJBeDEOBvdr2v/F+0aN5oMuPu7JTqrMwp+DtgK+AryOlpy6Xtm2/HpNr+azEa828oQjOtWsB6iDtSfSQ=="], + "locate-character": ["locate-character@3.0.0", "", {}, "sha512-SW13ws7BjaeJ6p7Q6CO2nchbYEc3X3J6WrmTTDto7yMPqVSZTUyY5Tjbid+Ab8gLnATtygYtiDIJGQRRn2ZOiA=="], "locate-path": ["locate-path@6.0.0", "", { "dependencies": { "p-locate": "^5.0.0" } }, "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw=="], @@ -516,8 +489,6 @@ "parent-module": ["parent-module@1.0.1", "", { "dependencies": { "callsites": "^3.0.0" } }, "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g=="], - "path": ["path@0.12.7", "", { "dependencies": { "process": "^0.11.1", "util": "^0.10.3" } }, "sha512-aXXC6s+1w7otVF9UletFkFcDsJeO7lSZBPUQhtb5O0xJe8LtYhj/GxldoL09bBj9+ZmE2hNoHqQSFMN5fikh4Q=="], - "path-exists": ["path-exists@4.0.0", "", {}, "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w=="], "path-key": ["path-key@3.1.1", "", {}, "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q=="], @@ -526,6 +497,8 @@ "picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="], + "plyr": ["plyr@3.7.8", "", { "dependencies": { "core-js": "^3.26.1", "custom-event-polyfill": "^1.0.7", "loadjs": "^4.2.0", "rangetouch": "^2.0.1", "url-polyfill": "^1.1.12" } }, "sha512-yG/EHDobwbB/uP+4Bm6eUpJ93f8xxHjjk2dYcD1Oqpe1EcuQl5tzzw9Oq+uVAzd2lkM11qZfydSiyIpiB8pgdA=="], + "postgres": ["postgres@3.4.5", "", {}, "sha512-cDWgoah1Gez9rN3H4165peY9qfpEo+SA61oQv65O3cRUE1pOEoJWwddwcqKE8XZYjbblOJlYDlLV4h67HrEVDg=="], "prelude-ls": ["prelude-ls@1.2.1", "", {}, "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g=="], @@ -534,17 +507,17 @@ "prettier-linter-helpers": ["prettier-linter-helpers@1.0.0", "", { "dependencies": { "fast-diff": "^1.1.2" } }, "sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w=="], - "process": ["process@0.11.10", "", {}, "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A=="], - "punycode": ["punycode@2.3.1", "", {}, "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg=="], "queue-microtask": ["queue-microtask@1.2.3", "", {}, "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A=="], + "rangetouch": ["rangetouch@2.0.1", "", {}, "sha512-sln+pNSc8NGaHoLzwNBssFSf/rSYkqeBXzX1AtJlkJiUaVSJSbRAWJk+4omsXkN+EJalzkZhWQ3th1m0FpR5xA=="], + "regenerator-runtime": ["regenerator-runtime@0.14.1", "", {}, "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw=="], "resolve-from": ["resolve-from@4.0.0", "", {}, "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g=="], - "reusify": ["reusify@1.0.4", "", {}, "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw=="], + "reusify": ["reusify@1.1.0", "", {}, "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw=="], "run-parallel": ["run-parallel@1.2.0", "", { "dependencies": { "queue-microtask": "^1.2.2" } }, "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA=="], @@ -554,6 +527,8 @@ "shebang-regex": ["shebang-regex@3.0.0", "", {}, "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A=="], + "snappyjs": ["snappyjs@0.7.0", "", {}, "sha512-u5iEEXkMe2EInQio6Wv9LWHOQYRDbD2O9hzS27GpT/lwfIQhTCnHCTqedqHIHe9ZcvQo+9au6vngQayipz1NYw=="], + "source-map-js": ["source-map-js@1.2.1", "", {}, "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA=="], "strip-json-comments": ["strip-json-comments@3.1.1", "", {}, "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig=="], @@ -562,7 +537,7 @@ "svelte": ["svelte@4.2.19", "", { "dependencies": { "@ampproject/remapping": "^2.2.1", "@jridgewell/sourcemap-codec": "^1.4.15", "@jridgewell/trace-mapping": "^0.3.18", "@types/estree": "^1.0.1", "acorn": "^8.9.0", "aria-query": "^5.3.0", "axobject-query": "^4.0.0", "code-red": "^1.0.3", "css-tree": "^2.3.1", "estree-walker": "^3.0.3", "is-reference": "^3.0.1", "locate-character": "^3.0.0", "magic-string": "^0.30.4", "periscopic": "^3.1.0" } }, "sha512-IY1rnGr6izd10B0A8LqsBfmlT5OILVuZ7XsI0vdGPEvuonFV7NYEUK4dAkm9Zg2q0Um92kYjTpS1CAP3Nh/KWw=="], - "synckit": ["synckit@0.9.2", "", { "dependencies": { "@pkgr/core": "^0.1.0", "tslib": "^2.6.2" } }, "sha512-vrozgXDQwYO72vHjUb/HnFbQx1exDjoKzqx23aXEg2a9VIg2TSFZ8FmeZpTjUCFMYw7mpX4BE2SFu8wI7asYsw=="], + "synckit": ["synckit@0.10.2", "", { "dependencies": { "@pkgr/core": "^0.2.0", "tslib": "^2.8.1" } }, "sha512-cSGiaCPhFzeFIQY8KKEacv46LclENY4d60jgkwCrKomvRkIjtMyss1dPkHLp/62c1leuOjEedB1+lWcwqTJSvA=="], "to-regex-range": ["to-regex-range@5.0.1", "", { "dependencies": { "is-number": "^7.0.0" } }, "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ=="], @@ -572,17 +547,17 @@ "type-check": ["type-check@0.4.0", "", { "dependencies": { "prelude-ls": "^1.2.1" } }, "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew=="], - "typescript": ["typescript@5.7.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-84MVSjMEHP+FQRPy3pX9sTVV/INIex71s9TL2Gm5FG/WG1SqXeKyZ0k7/blY/4FdOzI12CBy1vGc4og/eus0fw=="], + "typescript": ["typescript@5.8.2", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-aJn6wq13/afZp/jT9QZmwEjDqqvSGp1VT5GVg+f/t6/oVyrgXM6BY1h9BRh/O5p3PlUPAe+WuiEZOmb/49RqoQ=="], - "typescript-eslint": ["typescript-eslint@8.25.0", "", { "dependencies": { "@typescript-eslint/eslint-plugin": "8.25.0", "@typescript-eslint/parser": "8.25.0", "@typescript-eslint/utils": "8.25.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.8.0" } }, "sha512-TxRdQQLH4g7JkoFlYG3caW5v1S6kEkz8rqt80iQJZUYPq1zD1Ra7HfQBJJ88ABRaMvHAXnwRvRB4V+6sQ9xN5Q=="], + "typescript-eslint": ["typescript-eslint@8.27.0", "", { "dependencies": { "@typescript-eslint/eslint-plugin": "8.27.0", "@typescript-eslint/parser": "8.27.0", "@typescript-eslint/utils": "8.27.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-ZZ/8+Y0rRUMuW1gJaPtLWe4ryHbsPLzzibk5Sq+IFa2aOH1Vo0gPr1fbA6pOnzBke7zC2Da4w8AyCgxKXo3lqA=="], "undici-types": ["undici-types@6.20.0", "", {}, "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg=="], "uri-js": ["uri-js@4.4.1", "", { "dependencies": { "punycode": "^2.1.0" } }, "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg=="], - "util": ["util@0.10.4", "", { "dependencies": { "inherits": "2.0.3" } }, "sha512-0Pm9hTQ3se5ll1XihRic3FDIku70C+iHUdT/W926rSgHV5QgXsYbKZN8MSC3tJtSkhuROzvsQjAaFENRXr+19A=="], + "url-polyfill": ["url-polyfill@1.1.13", "", {}, "sha512-tXzkojrv2SujumYthZ/WjF7jaSfNhSXlYMpE5AYdL2I3D7DCeo+mch8KtW2rUuKjDg+3VXODXHVgipt8yGY/eQ=="], - "uuid": ["uuid@11.0.5", "", { "bin": { "uuid": "dist/esm/bin/uuid" } }, "sha512-508e6IcKLrhxKdBbcA2b4KQZlLVp2+J5UwQ6F7Drckkc5N9ZJwFa4TgWtsww9UG8fGHbm6gbV19TdM5pQ4GaIA=="], + "uuid": ["uuid@11.1.0", "", { "bin": { "uuid": "dist/esm/bin/uuid" } }, "sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A=="], "which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="], @@ -596,6 +571,8 @@ "@typescript-eslint/typescript-estree/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], + "bun-types/@types/ws": ["@types/ws@8.5.14", "", { "dependencies": { "@types/node": "*" } }, "sha512-bd/YFLW+URhBzMXurx7lWByOu+xzU9+kb3RboOteXYDfW+tr+JZa99OyNmPINEGB/ahzKrEuc8rcv4gnpJmxTw=="], + "fast-glob/glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="], "@typescript-eslint/typescript-estree/minimatch/brace-expansion": ["brace-expansion@2.0.1", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA=="], diff --git a/package.json b/package.json index 7e3a9002f7a..5e298dc5c53 100644 --- a/package.json +++ b/package.json @@ -8,13 +8,13 @@ "format": "prettier --write packages/**/src/*.ts && bun run lint" }, "devDependencies": { - "@eslint/js": "^9.21.0", - "@types/bun": "^1.2.4", - "bun-types": "^1.2.4", - "eslint": "^9.21.0", + "@eslint/js": "^9.23.0", + "@types/bun": "^1.2.5", + "bun-types": "^1.2.5", + "eslint": "^9.23.0", "eslint-config-prettier": "^9.1.0", - "eslint-plugin-prettier": "^5.2.3", + "eslint-plugin-prettier": "^5.2.4", "prettier": "^3.5.3", - "typescript-eslint": "^8.25.0" + "typescript-eslint": "^8.27.0" } } diff --git a/packages/client-query/package.json b/packages/client-query/package.json index 853d4cd8a91..430773b9b15 100644 --- a/packages/client-query/package.json +++ b/packages/client-query/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-client-query", - "version": "0.1.61", + "version": "0.1.147", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/client-query/src/index.ts b/packages/client-query/src/index.ts index ee5d04972ed..5ebcd40b158 100644 --- a/packages/client-query/src/index.ts +++ b/packages/client-query/src/index.ts @@ -13,30 +13,39 @@ // limitations under the License. // -import { LiveQueries } from '@hcengineering/communication-query' -import type { QueryClient } from '@hcengineering/communication-sdk-types' +import { LiveQueries, type QueryClient } from '@hcengineering/communication-query' import type { WorkspaceID } from '@hcengineering/communication-types' -import { MessagesQuery, NotificationsQuery } from './query' +import { MessagesQuery, NotificationContextsQuery, NotificationsQuery } from './query' let lq: LiveQueries +let onDestroy: (fn: () => void) => void = () => {} export function createMessagesQuery(): MessagesQuery { - return new MessagesQuery(lq) + return new MessagesQuery(lq, onDestroy) } export function createNotificationsQuery(): NotificationsQuery { - return new NotificationsQuery(lq) + return new NotificationsQuery(lq, onDestroy) } -export function initLiveQueries(client: QueryClient, workspace: WorkspaceID, filesUrl: string): void { +export function createNotificationContextsQuery(): NotificationContextsQuery { + return new NotificationContextsQuery(lq, onDestroy) +} + +export function initLiveQueries( + client: QueryClient, + workspace: WorkspaceID, + filesUrl: string, + destroyFn?: (fn: () => void) => void +): void { if (lq != null) { lq.close() } - lq = new LiveQueries(client, workspace, filesUrl) - - client.onEvent = (event) => { - void lq.onEvent(event) + if (destroyFn != null) { + onDestroy = destroyFn } + + lq = new LiveQueries(client, workspace, filesUrl) } diff --git a/packages/client-query/src/query.ts b/packages/client-query/src/query.ts index 76a7df614ee..45331d3155d 100644 --- a/packages/client-query/src/query.ts +++ b/packages/client-query/src/query.ts @@ -19,14 +19,26 @@ import type { NotificationsQueryCallback, QueryCallback } from '@hcengineering/communication-sdk-types' -import { type FindMessagesParams, type FindNotificationsParams } from '@hcengineering/communication-types' +import { + type FindMessagesParams, + type FindNotificationContextParams, + type FindNotificationsParams, + type NotificationContext +} from '@hcengineering/communication-types' import { deepEqual } from 'fast-equals' class BaseQuery

, C extends QueryCallback> { private oldQuery: P | undefined - private oldCallback: QueryCallback | undefined + private oldCallback: C | undefined - constructor(protected readonly lq: LiveQueries) {} + constructor( + protected readonly lq: LiveQueries, + onDestroy: (fn: () => void) => void + ) { + onDestroy(() => { + this.unsubscribe() + }) + } unsubscribe: () => void = () => {} @@ -82,3 +94,17 @@ export class NotificationsQuery extends BaseQuery +> { + override createQuery( + params: FindNotificationContextParams, + callback: QueryCallback + ): { + unsubscribe: () => void + } { + return this.lq.queryNotificationContexts(params, callback) + } +} diff --git a/packages/client-sqlite/package.json b/packages/client-sqlite/package.json deleted file mode 100644 index fb27de5dd5f..00000000000 --- a/packages/client-sqlite/package.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "name": "@hcengineering/communication-client-sqlite", - "version": "0.1.61", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "./types/index.d.ts", - "files": [ - "dist/index.js", - "dist/index.cjs", - "types/**/*.d.ts" - ], - "scripts": { - "bundle": "bun run bundle:browser", - "bundle:browser": "bun build src/index.ts --outdir dist --target browser" - }, - "devDependencies": { - "@types/bun": "^1.1.14" - }, - "dependencies": { - "@hcengineering/communication-types": "workspace:*", - "@hcengineering/communication-sdk-types": "workspace:*", - "@hcengineering/communication-sqlite-wasm": "workspace:*", - "fast-equals": "^5.0.1" - }, - "peerDependencies": { - "typescript": "^5.6.3" - }, - "publishConfig": { - "registry": "https://npm.pkg.github.com" - } -} diff --git a/packages/client-sqlite/src/client.ts b/packages/client-sqlite/src/client.ts deleted file mode 100644 index 7884b77a4d9..00000000000 --- a/packages/client-sqlite/src/client.ts +++ /dev/null @@ -1,215 +0,0 @@ -// -// Copyright © 2025 Hardcore Engineering Inc. -// -// Licensed under the Eclipse Public License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. You may -// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// -// See the License for the specific language governing permissions and -// limitations under the License. -// - -import { - type CardID, - type Message, - type FindMessagesParams, - type MessageID, - type RichText, - type SocialID, - type ContextID, - type NotificationContextUpdate, - type FindNotificationContextParams, - type NotificationContext, - type FindNotificationsParams, - type Notification, - type Attachment, - type Reaction, - type WorkspaceID, - type FindMessagesGroupsParams, - type MessagesGroup, - PatchType -} from '@hcengineering/communication-types' -import { - type Client, - type MessageCreatedEvent, - type DbAdapter, - type ResponseEvent, - ResponseEventType -} from '@hcengineering/communication-sdk-types' -import { createDbAdapter as createSqliteDbAdapter } from '@hcengineering/communication-sqlite-wasm' - -//TODO: FIXME -class DbClient { - onEvent: (event: ResponseEvent) => void = () => {} - - constructor( - private readonly db: DbAdapter, - private readonly workspace: WorkspaceID, - private readonly personalWorkspace: WorkspaceID - ) {} - - async createMessage(card: CardID, content: RichText, creator: SocialID): Promise { - const created = new Date() - const id = await this.db.createMessage(card, content, creator, created) - - const event: MessageCreatedEvent = { - type: ResponseEventType.MessageCreated, - message: { - id, - card, - content, - creator, - created, - edited: created, - reactions: [], - attachments: [] - } - } - - this.onEvent(event) - - return id - } - - async removeMessage(card: CardID, message: MessageID) { - await this.db.removeMessage(card, message) - this.onEvent({ type: ResponseEventType.MessageRemoved, message, card }) - } - - async updateMessage(card: CardID, message: MessageID, content: RichText, creator: SocialID): Promise { - const created = new Date() - await this.db.createPatch(card, message, PatchType.update, content, creator, created) - this.onEvent({ - type: ResponseEventType.PatchCreated, - card, - patch: { message, type: PatchType.update, content, creator, created } - }) - } - - async createReaction(card: CardID, message: MessageID, reaction: string, creator: SocialID): Promise { - const created = new Date() - await this.db.createReaction(card, message, reaction, creator, created) - this.onEvent({ type: ResponseEventType.ReactionCreated, card, reaction: { message, reaction, creator, created } }) - } - - async removeReaction(card: CardID, message: MessageID, reaction: string, creator: SocialID): Promise { - await this.db.removeReaction(card, message, reaction, creator) - this.onEvent({ type: ResponseEventType.ReactionRemoved, card, message, reaction, creator }) - } - - async createAttachment(card: CardID, message: MessageID, attachment: CardID, creator: SocialID): Promise { - const created = new Date() - await this.db.createAttachment(message, card, creator, created) - this.onEvent({ - type: ResponseEventType.AttachmentCreated, - card, - attachment: { message, card: attachment, creator, created } - }) - } - - async removeAttachment(card: CardID, message: MessageID, attachment: CardID): Promise { - await this.db.removeAttachment(message, card) - this.onEvent({ type: ResponseEventType.AttachmentRemoved, message, card, attachment }) - } - - async findMessages(params: FindMessagesParams): Promise { - const rawMessages = await this.db.findMessages(params) - return rawMessages.map((it) => this.toMessage(it)) - } - - async findMessagesGroups(params: FindMessagesGroupsParams): Promise { - return await this.db.findMessagesGroups(params) - } - - async findMessage(params: FindMessagesParams): Promise { - return (await this.findMessages({ ...params, limit: 1 }))[0] - } - - toMessage(raw: any): Message { - return { - id: raw.id, - card: raw.card, - content: raw.content, - creator: raw.creator, - created: new Date(raw.created), - edited: new Date(raw.edited), - reactions: raw.reactions.map((it: any) => this.toReaction(it)), - attachments: raw.attachments.map((it: any) => this.toAttachment(it)) - } - } - - toAttachment(raw: any): Attachment { - return { - message: raw.message, - card: raw.card, - creator: raw.creator, - created: new Date(raw.created) - } - } - - toReaction(raw: any): Reaction { - return { - message: raw.message, - reaction: raw.reaction, - creator: raw.creator, - created: new Date(raw.created) - } - } - - async createNotification(message: MessageID, context: ContextID): Promise { - await this.db.createNotification(message, context) - } - - async removeNotification(message: MessageID, context: ContextID): Promise { - await this.db.removeNotification(message, context) - } - - async createNotificationContext(card: CardID, lastView?: Date, lastUpdate?: Date): Promise { - return await this.db.createContext(this.personalWorkspace, card, lastView, lastUpdate) - } - - async updateNotificationContext(context: ContextID, update: NotificationContextUpdate): Promise { - await this.db.updateContext(context, update) - } - - async removeNotificationContext(context: ContextID): Promise { - await this.db.removeContext(context) - } - - async findNotificationContexts(params: FindNotificationContextParams): Promise { - //TODO: should we filter by workspace? - return await this.db.findContexts(params, [this.personalWorkspace]) - } - - //eslint-disable-next-line @typescript-eslint/no-unused-vars - async findNotifications(params: FindNotificationsParams): Promise { - //TODO: should we filter by workspace? - return await this.db.findNotifications(params, this.personalWorkspace) - } - - //eslint-disable-next-line @typescript-eslint/no-unused-vars - async createThread(card: CardID, message: MessageID, thread: CardID, created: Date): Promise { - //TODO: implement - } - - async unsubscribeQuery() { - //ignore - } - - close() { - this.db.close() - } -} - -export async function getSqliteClient( - workspace: WorkspaceID, - personalWorkspace: WorkspaceID, - dbUrl = 'file:communication.sqlite3?vfs=opfs' -): Promise { - const db = await createSqliteDbAdapter(dbUrl) - return new DbClient(db, workspace, personalWorkspace) as unknown as Client -} diff --git a/packages/client-sqlite/tsconfig.json b/packages/client-sqlite/tsconfig.json deleted file mode 100644 index 3ae07cd3fa2..00000000000 --- a/packages/client-sqlite/tsconfig.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "extends": "../../tsconfig.json", - "compilerOptions": { - "jsx": "react-jsx", - "outDir": "./dist", - "rootDir": "./src" - }, - "include": ["src"] -} diff --git a/packages/client-ws/src/client.ts b/packages/client-ws/src/client.ts deleted file mode 100644 index 3fa8e0101db..00000000000 --- a/packages/client-ws/src/client.ts +++ /dev/null @@ -1,240 +0,0 @@ -// -// Copyright © 2025 Hardcore Engineering Inc. -// -// Licensed under the Eclipse Public License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. You may -// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// -// See the License for the specific language governing permissions and -// limitations under the License. -// - -import { - type CardID, - type ContextID, - type FindMessagesGroupsParams, - type FindMessagesParams, - type FindNotificationContextParams, - type FindNotificationsParams, - type Message, - type MessageID, - type MessagesGroup, - type Notification, - type NotificationContext, - type NotificationContextUpdate, - type RichText, - type SocialID, - type WorkspaceID -} from '@hcengineering/communication-types' -import { - RequestEventType, - type Client, - type CreateAttachmentEvent, - type CreateMessageEvent, - type CreateMessageResult, - type CreateNotificationContextEvent, - type CreateNotificationContextResult, - type CreateNotificationEvent, - type CreatePatchEvent, - type CreateReactionEvent, - type EventResult, - type RemoveAttachmentEvent, - type RemoveMessageEvent, - type RemoveNotificationContextEvent, - type RemoveNotificationEvent, - type RemoveReactionEvent, - type RequestEvent, - type ResponseEvent, - type UpdateNotificationContextEvent -} from '@hcengineering/communication-sdk-types' -import { initLiveQueries } from '@hcengineering/communication-client-query' - -import { WebSocketConnection } from './connection' - -class WsClient implements Client { - private readonly ws: WebSocketConnection - - onEvent: (event: ResponseEvent) => void = () => {} - - constructor( - private readonly url: string, - private readonly token: string, - private readonly binary: boolean = false - ) { - const connectionUrl = this.url + '?token=' + this.token - this.ws = new WebSocketConnection(connectionUrl, this.binary) - this.ws.onEvent = (event) => { - void this.onEvent(event) - } - } - - async createMessage(card: CardID, content: RichText, creator: SocialID): Promise { - const event: CreateMessageEvent = { - type: RequestEventType.CreateMessage, - card, - content, - creator - } - const result = await this.sendEvent(event) - return (result as CreateMessageResult).id - } - - async removeMessage(card: CardID, message: MessageID): Promise { - const event: RemoveMessageEvent = { - type: RequestEventType.RemoveMessage, - card, - message - } - await this.sendEvent(event) - } - - async updateMessage(card: CardID, message: MessageID, content: RichText, creator: SocialID): Promise { - const event: CreatePatchEvent = { - type: RequestEventType.CreatePatch, - card, - message, - content, - creator - } - await this.sendEvent(event) - } - - async createReaction(card: CardID, message: MessageID, reaction: string, creator: SocialID): Promise { - const event: CreateReactionEvent = { - type: RequestEventType.CreateReaction, - card, - message, - reaction, - creator - } - await this.sendEvent(event) - } - - async removeReaction(card: CardID, message: MessageID, reaction: string, creator: SocialID): Promise { - const event: RemoveReactionEvent = { - type: RequestEventType.RemoveReaction, - card, - message, - reaction, - creator - } - await this.sendEvent(event) - } - - async createAttachment(card: CardID, message: MessageID, attachment: CardID, creator: SocialID): Promise { - const event: CreateAttachmentEvent = { - type: RequestEventType.CreateAttachment, - card, - message, - attachment, - creator - } - await this.sendEvent(event) - } - - async removeAttachment(card: CardID, message: MessageID, attachment: CardID): Promise { - const event: RemoveAttachmentEvent = { - type: RequestEventType.RemoveAttachment, - card, - message, - attachment - } - await this.sendEvent(event) - } - - async findMessages(params: FindMessagesParams, queryId?: number): Promise { - return await this.ws.send('findMessages', [params, queryId]) - } - - async findMessagesGroups(params: FindMessagesGroupsParams): Promise { - return await this.ws.send('findMessagesGroups', [params]) - } - - async createNotification(message: MessageID, context: ContextID): Promise { - const event: CreateNotificationEvent = { - type: RequestEventType.CreateNotification, - message, - context - } - await this.sendEvent(event) - } - - async removeNotification(message: MessageID, context: ContextID): Promise { - const event: RemoveNotificationEvent = { - type: RequestEventType.RemoveNotification, - message, - context - } - await this.sendEvent(event) - } - - async createNotificationContext(card: CardID, lastView?: Date, lastUpdate?: Date): Promise { - const event: CreateNotificationContextEvent = { - type: RequestEventType.CreateNotificationContext, - card, - lastView, - lastUpdate - } - const result = await this.sendEvent(event) - return (result as CreateNotificationContextResult).id - } - - async removeNotificationContext(context: ContextID): Promise { - const event: RemoveNotificationContextEvent = { - type: RequestEventType.RemoveNotificationContext, - context - } - await this.sendEvent(event) - } - - async updateNotificationContext(context: ContextID, update: NotificationContextUpdate): Promise { - const event: UpdateNotificationContextEvent = { - type: RequestEventType.UpdateNotificationContext, - context, - update - } - await this.sendEvent(event) - } - - async findNotificationContexts( - params: FindNotificationContextParams, - queryId?: number - ): Promise { - return await this.ws.send('findNotificationContexts', [params, queryId]) - } - - async findNotifications(params: FindNotificationsParams, queryId?: number): Promise { - return await this.ws.send('findNotifications', [params, queryId]) - } - - async unsubscribeQuery(id: number): Promise { - await this.ws.send('unsubscribeQuery', [id]) - } - - private async sendEvent(event: RequestEvent): Promise { - return await this.ws.send('event', [event]) - } - //eslint-disable-next-line @typescript-eslint/no-unused-vars - async createThread(card: CardID, message: MessageID, thread: CardID, created: Date): Promise { - //TODO: implement - } - - close() { - void this.ws.close() - } -} - -export async function getWebsocketClient( - url: string, - token: string, - workspace: WorkspaceID, - filesUrl: string -): Promise { - const client = new WsClient(url, token) - initLiveQueries(client, workspace, filesUrl) - return client -} diff --git a/packages/client-ws/src/connection.ts b/packages/client-ws/src/connection.ts deleted file mode 100644 index 9d0a8a80544..00000000000 --- a/packages/client-ws/src/connection.ts +++ /dev/null @@ -1,156 +0,0 @@ -// -// Copyright © 2025 Hardcore Engineering Inc. -// -// Licensed under the Eclipse Public License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. You may -// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// -// See the License for the specific language governing permissions and -// limitations under the License. -// - -import type { ResponseEvent } from '@hcengineering/communication-sdk-types' -import { encode, decode } from '@msgpack/msgpack' - -const PING_TIMEOUT = 10000 -const RECONNECT_TIMEOUT = 1000 - -export type RequestId = string - -export interface Response { - id?: RequestId - result?: any - error?: string -} - -export interface Request { - id?: RequestId - method: string - params: any[] -} - -export interface HelloRequest extends Request { - binary?: boolean -} - -export class WebSocketConnection { - private ws!: WebSocket | Promise - private requests: { [key: RequestId]: { resolve: (response: any) => void; reject: (reason: any) => void } } = {} - private lastId: number = 0 - - private pingInterval: any - private reconnectTimeout: any - - onEvent: (event: ResponseEvent) => void = () => {} - - constructor( - private url: string, - private readonly binary: boolean = false - ) { - this.connect() - } - - private connect(): void { - const ws = new WebSocket(this.url) - - ws.onmessage = (event: MessageEvent) => { - const response = deserializeResponse(event.data, this.binary) - if (response.id !== undefined) { - const handlers = this.requests[response.id] - if (handlers === undefined) return - delete this.requests[response.id] - if (response.error !== undefined) { - console.error('Websocket error', response.error) - handlers.reject(response.error) - } else { - handlers.resolve(response.result) - } - } else { - if (response.error !== undefined) { - console.error('Websocket error', response.error) - } else { - const event = response.result as ResponseEvent - this.onEvent(event) - } - } - } - - ws.onclose = () => { - clearInterval(this.pingInterval) - this.handleReconnect() - } - - this.ws = new Promise((resolve, reject) => { - ws.onopen = () => { - const request: HelloRequest = { id: 'hello', method: 'hello', params: [], binary: this.binary } - ws.send(serializeRequest(request, this.binary)) - clearInterval(this.pingInterval) - this.pingInterval = setInterval(() => { - void this.sendRequest({ method: 'ping', params: [] }) - }, PING_TIMEOUT) - resolve(ws) - } - ws.onerror = (event: any) => { - console.error('Websocket error', event) - reject(new Error('Websocket error')) - } - }) - } - - private handleReconnect() { - clearTimeout(this.reconnectTimeout) - this.reconnectTimeout = setTimeout(() => { - this.connect() - }, RECONNECT_TIMEOUT) - } - - async waitWs(): Promise { - if (this.ws instanceof Promise) { - this.ws = await this.ws - } - return this.ws - } - - async send(method: string, params: any[]): Promise { - const id = ++this.lastId - return await this.sendRequest({ id: id.toString(), method, params }) - } - - private async sendRequest(request: Request): Promise { - const ws = await this.waitWs() - - return new Promise((resolve, reject) => { - if (request.id !== undefined) { - this.requests[request.id] = { resolve, reject } - } - ws.send(serializeRequest(request, this.binary)) - }) - } - - async close(): Promise { - clearInterval(this.pingInterval) - clearTimeout(this.reconnectTimeout) - const ws = await this.waitWs() - ws.close() - } -} - -function serializeRequest(request: Request, binary: boolean): any { - if (binary) { - return encode(request) - } else { - return JSON.stringify(request) - } -} - -function deserializeResponse(data: any, binary: boolean): Response { - if (binary) { - return decode(data) as Response - } else { - return JSON.parse(data.toString()) - } -} diff --git a/packages/cockroach/migrations/01_message.sql b/packages/cockroach/migrations/01_message.sql index d113fe2fd38..37f45dbef7a 100644 --- a/packages/cockroach/migrations/01_message.sql +++ b/packages/cockroach/migrations/01_message.sql @@ -8,6 +8,10 @@ CREATE TABLE IF NOT EXISTS communication.messages creator VARCHAR(255) NOT NULL, created TIMESTAMPTZ NOT NULL, + type VARCHAR(255) NOT NULL, + data JSONB NOT NULL DEFAULT '{}', + + PRIMARY KEY (workspace_id, card_id, id) ); @@ -20,10 +24,8 @@ CREATE TABLE IF NOT EXISTS communication.messages_groups card_id VARCHAR(255) NOT NULL, blob_id UUID NOT NULL, - from_date TIMESTAMPTZ NOT NULL, - to_date TIMESTAMPTZ NOT NULL, - from_id INT8 NOT NULL, - to_id INT8 NOT NULL, + from_sec TIMESTAMPTZ(0) NOT NULL, + to_sec TIMESTAMPTZ(0) NOT NULL, count INT NOT NULL, PRIMARY KEY (workspace_id, card_id, blob_id) diff --git a/packages/cockroach/migrations/02_patch.sql b/packages/cockroach/migrations/02_patch.sql index 527df14bf03..1c2037b0467 100644 --- a/packages/cockroach/migrations/02_patch.sql +++ b/packages/cockroach/migrations/02_patch.sql @@ -1,13 +1,14 @@ CREATE TABLE IF NOT EXISTS communication.patch ( - id INT8 NOT NULL DEFAULT unique_rowid(), - workspace_id UUID NOT NULL, - card_id VARCHAR(255) NOT NULL, - message_id INT8 NOT NULL, - type VARCHAR(255) NOT NULL, - content TEXT NOT NULL, - creator VARCHAR(255) NOT NULL, - created TIMESTAMPTZ NOT NULL, + id INT8 NOT NULL DEFAULT unique_rowid(), + workspace_id UUID NOT NULL, + card_id VARCHAR(255) NOT NULL, + message_id INT8 NOT NULL, + type VARCHAR(255) NOT NULL, + content TEXT NOT NULL, + creator VARCHAR(255) NOT NULL, + created TIMESTAMPTZ NOT NULL, + message_created_sec TIMESTAMPTZ(0) NOT NULL, PRIMARY KEY (id) ); diff --git a/packages/cockroach/migrations/03_attachment.sql b/packages/cockroach/migrations/03_attachment.sql deleted file mode 100644 index 780002f3355..00000000000 --- a/packages/cockroach/migrations/03_attachment.sql +++ /dev/null @@ -1,11 +0,0 @@ -CREATE TABLE IF NOT EXISTS communication.attachments -( - message_id INT8 NOT NULL, - card_id VARCHAR(255) NOT NULL, - creator VARCHAR(255) NOT NULL, - created TIMESTAMPTZ NOT NULL DEFAULT now(), - - PRIMARY KEY (card_id, message_id) -); - -CREATE INDEX IF NOT EXISTS attachment_message_idx ON communication.attachments (message_id); diff --git a/packages/cockroach/migrations/03_files.sql b/packages/cockroach/migrations/03_files.sql new file mode 100644 index 00000000000..06892450412 --- /dev/null +++ b/packages/cockroach/migrations/03_files.sql @@ -0,0 +1,20 @@ +CREATE TABLE IF NOT EXISTS communication.files +( + workspace_id UUID NOT NULL, + card_id VARCHAR(255) NOT NULL, + message_id INT8 NOT NULL, + + blob_id UUID NOT NULL, + filename VARCHAR(255) NOT NULL, + type VARCHAR(255) NOT NULL, + size INT8 NOT NULL, + + creator VARCHAR(255) NOT NULL, + created TIMESTAMPTZ NOT NULL DEFAULT now(), + + message_created_sec TIMESTAMPTZ(0) NOT NULL, + + PRIMARY KEY (workspace_id, card_id, message_id, blob_id) +); + +CREATE INDEX IF NOT EXISTS files_workspace_card_message_idx ON communication.files (workspace_id, card_id, message_id); diff --git a/packages/cockroach/migrations/06_notification.sql b/packages/cockroach/migrations/06_notification.sql index df565632a1d..88d60cd21f0 100644 --- a/packages/cockroach/migrations/06_notification.sql +++ b/packages/cockroach/migrations/06_notification.sql @@ -1,27 +1,38 @@ CREATE TABLE IF NOT EXISTS communication.notification_context ( - id UUID NOT NULL DEFAULT gen_random_uuid(), - workspace_id UUID NOT NULL, - card_id VARCHAR(255) NOT NULL, + id INT8 NOT NULL DEFAULT unique_rowid(), - personal_workspace UUID NOT NULL, + workspace_id UUID NOT NULL, + card_id VARCHAR(255) NOT NULL, + account UUID NOT NULL, - archived_from TIMESTAMPTZ, - last_view TIMESTAMPTZ, - last_update TIMESTAMPTZ, + last_view TIMESTAMPTZ NOT NULL DEFAULT now(), + last_update TIMESTAMPTZ NOT NULL DEFAULT now(), PRIMARY KEY (id), - UNIQUE (workspace_id, card_id, personal_workspace) + UNIQUE (workspace_id, card_id, account) ); - CREATE TABLE IF NOT EXISTS communication.notifications ( - message_id UUID NOT NULL, - context UUID NOT NULL, + id INT8 NOT NULL DEFAULT unique_rowid(), + context_id INT8 NOT NULL, + message_id INT8, + created TIMESTAMPTZ NOT NULL, + content JSONB NOT NULL DEFAULT '{}', - PRIMARY KEY (message_id, context), - FOREIGN KEY (context) REFERENCES communication.notification_context (id) ON DELETE CASCADE + PRIMARY KEY (id), + FOREIGN KEY (context_id) REFERENCES communication.notification_context (id) ON DELETE CASCADE ); -CREATE INDEX IF NOT EXISTS notification_context_idx ON communication.notifications (context); \ No newline at end of file +CREATE INDEX IF NOT EXISTS notification_context_idx ON communication.notifications (context_id); + +CREATE TABLE IF NOT EXISTS communication.collaborators +( + workspace_id UUID NOT NULL, + card_id VARCHAR(255) NOT NULL, + account UUID NOT NULL, + date TIMESTAMPTZ NOT NULL DEFAULT now(), + + PRIMARY KEY (workspace_id, card_id, account) +); \ No newline at end of file diff --git a/packages/cockroach/package.json b/packages/cockroach/package.json index 095bdf4c0d1..49719134759 100644 --- a/packages/cockroach/package.json +++ b/packages/cockroach/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-cockroach", - "version": "0.1.61", + "version": "0.1.147", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/cockroach/src/adapter.ts b/packages/cockroach/src/adapter.ts index f5d9a846a67..c471d490931 100644 --- a/packages/cockroach/src/adapter.ts +++ b/packages/cockroach/src/adapter.ts @@ -13,11 +13,13 @@ // limitations under the License. // -import type { ParameterOrJSON, Row } from 'postgres' import type postgres from 'postgres' import { + type FindCollaboratorsParams, + type AccountID, type BlobID, type CardID, + type Collaborator, type ContextID, type FindMessagesGroupsParams, type FindMessagesParams, @@ -26,14 +28,15 @@ import { type Message, type MessageID, type MessagesGroup, + type MessageType, type Notification, type NotificationContext, - type NotificationContextUpdate, type PatchType, type RichText, type SocialID, + type Thread, type WorkspaceID, - type Thread + type NotificationID } from '@hcengineering/communication-types' import type { DbAdapter } from '@hcengineering/communication-sdk-types' import { retry } from '@hcengineering/communication-shared' @@ -41,8 +44,8 @@ import { retry } from '@hcengineering/communication-shared' import { MessagesDb } from './db/message' import { NotificationsDb } from './db/notification' import { connect, type PostgresClientReference } from './connection' -import { type Options, type Logger, type SqlClient } from './types' -import { injectVars } from './utils.ts' +import { type Logger, type Options, type SqlClient, type SqlParams, type SqlRow } from './types' +import { injectVars } from './utils' export class CockroachAdapter implements DbAdapter { private readonly message: MessagesDb @@ -58,16 +61,19 @@ export class CockroachAdapter implements DbAdapter { this.notification = new NotificationsDb(this.sql, this.workspace, logger, options) } - async createMessage(card: CardID, content: RichText, creator: SocialID, created: Date): Promise { - return await this.message.createMessage(card, content, creator, created) - } - - async removeMessage(card: CardID, message: MessageID, socialIds?: SocialID[]): Promise { - await this.message.removeMessage(card, message, socialIds) + async createMessage( + card: CardID, + type: MessageType, + content: RichText, + creator: SocialID, + created: Date, + data?: any + ): Promise { + return await this.message.createMessage(card, type, content, creator, created, data) } - async removeMessages(card: CardID, fromId: MessageID, toId: MessageID): Promise { - await this.message.removeMessages(card, fromId, toId) + async removeMessages(card: CardID, messages: MessageID[], socialIds?: SocialID[]): Promise { + return await this.message.removeMessages(card, messages, socialIds) } async createPatch( @@ -81,20 +87,8 @@ export class CockroachAdapter implements DbAdapter { await this.message.createPatch(card, message, type, content, creator, created) } - async removePatches(card: CardID, fromId: MessageID, toId: MessageID): Promise { - await this.message.removePatches(card, fromId, toId) - } - - async createMessagesGroup( - card: CardID, - blobId: BlobID, - fromDate: Date, - toDate: Date, - fromId: MessageID, - toId: MessageID, - count: number - ): Promise { - await this.message.createMessagesGroup(card, blobId, fromDate, toDate, fromId, toId, count) + async createMessagesGroup(card: CardID, blobId: BlobID, fromDate: Date, toDate: Date, count: number): Promise { + await this.message.createMessagesGroup(card, blobId, fromDate, toDate, count) } async removeMessagesGroup(card: CardID, blobId: BlobID): Promise { @@ -115,79 +109,90 @@ export class CockroachAdapter implements DbAdapter { await this.message.removeReaction(card, message, reaction, creator, new Date()) } - async createAttachment(message: MessageID, attachment: CardID, creator: SocialID, created: Date): Promise { - await this.message.createAttachment(message, attachment, creator, created) + async createFile( + card: CardID, + message: MessageID, + blobId: BlobID, + fileType: string, + filename: string, + size: number, + creator: SocialID, + created: Date + ): Promise { + await this.message.createFile(card, message, blobId, fileType, filename, size, creator, created) } - async removeAttachment(message: MessageID, attachment: CardID): Promise { - await this.message.removeAttachment(message, attachment) + async removeFile(card: CardID, message: MessageID, blobId: BlobID): Promise { + await this.message.removeFile(card, message, blobId) } async createThread(card: CardID, message: MessageID, thread: CardID, created: Date): Promise { await this.message.createThread(card, message, thread, created) } - async updateThread(thread: CardID, lastReply: Date, op: 'increment' | 'decrement'): Promise { - await this.message.updateThread(thread, lastReply, op) + async updateThread(thread: CardID, op: 'increment' | 'decrement', lastReply?: Date): Promise { + await this.message.updateThread(thread, op, lastReply) } - async createNotification(message: MessageID, context: ContextID): Promise { - await this.notification.createNotification(message, context) + async findMessages(params: FindMessagesParams): Promise { + return await this.message.find(params) } - async removeNotification(message: MessageID, context: ContextID): Promise { - await this.notification.removeNotification(message, context) + async findMessagesGroups(params: FindMessagesGroupsParams): Promise { + return await this.message.findMessagesGroups(params) } - async createContext( - personalWorkspace: WorkspaceID, - card: CardID, - lastView?: Date, - lastUpdate?: Date - ): Promise { - return await this.notification.createContext(personalWorkspace, card, lastView, lastUpdate) + async findThread(thread: CardID): Promise { + return await this.message.findThread(thread) } - async updateContext(context: ContextID, update: NotificationContextUpdate): Promise { - await this.notification.updateContext(context, update) + async addCollaborators(card: CardID, collaborators: AccountID[], date?: Date): Promise { + await this.notification.addCollaborators(card, collaborators, date) } - async removeContext(context: ContextID): Promise { - await this.notification.removeContext(context) + async removeCollaborators(card: CardID, collaborators: AccountID[]): Promise { + await this.notification.removeCollaborators(card, collaborators) } - // Finds - async findMessages(params: FindMessagesParams): Promise { - return await this.message.find(params) + async createNotification(context: ContextID, message: MessageID, messageCreated: Date): Promise { + return await this.notification.createNotification(context, message, messageCreated) } - async findMessagesGroups(params: FindMessagesGroupsParams): Promise { - return await this.message.findMessagesGroups(params) + async removeNotification(context: ContextID, account: AccountID, untilDate: Date): Promise { + await this.notification.removeNotifications(context, account, untilDate) } - async findThread(thread: CardID): Promise { - return await this.message.findThread(thread) + async createContext(account: AccountID, card: CardID, lastUpdate: Date, lastView: Date): Promise { + return await this.notification.createContext(account, card, lastUpdate, lastView) } - async findContexts( - params: FindNotificationContextParams, - personalWorkspaces: WorkspaceID[], - workspace?: WorkspaceID - ): Promise { - return await this.notification.findContexts(params, personalWorkspaces, workspace) + async updateContext(context: ContextID, account: AccountID, lastUpdate?: Date, lastView?: Date): Promise { + await this.notification.updateContext(context, account, lastUpdate, lastView) } - async findNotifications( - params: FindNotificationsParams, - personalWorkspace: WorkspaceID, - workspace?: WorkspaceID - ): Promise { - return await this.notification.findNotifications(params, personalWorkspace, workspace) + async removeContext(context: ContextID, account: AccountID): Promise { + await this.notification.removeContext(context, account) + } + + async findContexts(params: FindNotificationContextParams): Promise { + return await this.notification.findContexts(params) + } + + async findNotifications(params: FindNotificationsParams): Promise { + return await this.notification.findNotifications(params) + } + + async findCollaborators(params: FindCollaboratorsParams): Promise { + return await this.notification.findCollaborators(params) } close(): void { this.sql.close() } + + getCollaboratorsCursor(card: CardID, date: Date, size?: number): AsyncIterable { + return this.notification.getCollaboratorsCursor(card, date, size) + } } export async function createDbAdapter( @@ -197,12 +202,13 @@ export async function createDbAdapter( options?: Options ): Promise { const greenUrl = process.env.GREEN_URL ?? '' + const connection = connect(connectionString) + const sql = await connection.getClient() + if (greenUrl !== '') { - const client = new GreenClient(greenUrl) + const client = new GreenClient(greenUrl, sql) return new CockroachAdapter(client, workspace, logger, options) } else { - const connection = connect(connectionString) - const sql = await connection.getClient() const client = new CockroachClient(connection, sql) return new CockroachAdapter(client, workspace, logger, options) @@ -212,8 +218,11 @@ export async function createDbAdapter( class GreenClient implements SqlClient { private readonly url: string private readonly token: string - constructor(endpoint: string) { - const url = new URL(endpoint) + constructor( + private readonly endpoint: string, + private readonly sql: postgres.Sql + ) { + const url = new URL(this.endpoint) this.token = url.searchParams.get('token') ?? 'secret' const compression = url.searchParams.get('compression') ?? '' @@ -229,14 +238,21 @@ class GreenClient implements SqlClient { this.url = `${url.protocol}//${newHost}${newPathname}${newSearchParams.size > 0 ? '?' + newSearchParams.toString() : ''}` } - async execute)[]>(query: string, params?: ParameterOrJSON[]): Promise { - return await retry(() => this.fetch(query, params), { retries: 5 }) + async execute(query: string, params?: SqlParams): Promise { + return await retry(() => this.fetch(query, params), { retries: 5 }) + } + + cursor(query: string, params?: SqlParams, size?: number): AsyncIterable[]> { + const sql = params !== undefined && params.length > 0 ? injectVars(query, params) : query + + return this.sql.unsafe(sql).cursor(size) } - private async fetch)[]>( - query: string, - params?: ParameterOrJSON[] - ): Promise { + close(): void { + // do nothing + } + + private async fetch(query: string, params?: SqlParams): Promise { const url = this.url.endsWith('/') ? this.url + 'api/v1/sql' : this.url + '/api/v1/sql' const response = await fetch(url, { @@ -255,10 +271,6 @@ class GreenClient implements SqlClient { return await response.json() } - - close(): void { - // do nothing - } } class CockroachClient implements SqlClient { @@ -267,9 +279,15 @@ class CockroachClient implements SqlClient { private readonly sql: postgres.Sql ) {} - async execute)[]>(query: string, params?: ParameterOrJSON[]): Promise { + async execute(query: string, params?: SqlParams): Promise { const sql = params !== undefined && params.length > 0 ? injectVars(query, params) : query - return await this.sql.unsafe(sql) + return await this.sql.unsafe(sql) + } + + cursor(query: string, params?: SqlParams, size?: number): AsyncIterable[]> { + const sql = params !== undefined && params.length > 0 ? injectVars(query, params) : query + + return this.sql.unsafe(sql).cursor(size) } close(): void { diff --git a/packages/cockroach/src/connection.ts b/packages/cockroach/src/connection.ts index f7223da5a75..7480097f54d 100644 --- a/packages/cockroach/src/connection.ts +++ b/packages/cockroach/src/connection.ts @@ -103,7 +103,7 @@ export function connect(connectionString: string, database?: string): PostgresCl database, max: 5, fetch_types: false, - prepare: true, + prepare: false, ...extraOptions }) diff --git a/packages/cockroach/src/db/base.ts b/packages/cockroach/src/db/base.ts index 572c5c3e434..5f63b6b0e7c 100644 --- a/packages/cockroach/src/db/base.ts +++ b/packages/cockroach/src/db/base.ts @@ -16,7 +16,7 @@ import { type ParameterOrJSON, type Row } from 'postgres' import type { WorkspaceID } from '@hcengineering/communication-types' -import { type SqlClient, type Logger, type Options } from '../types' +import { type Logger, type Options, type SqlClient } from '../types' export class BaseDb { constructor ( @@ -26,11 +26,7 @@ export class BaseDb { readonly options?: Options ) {} - async execute)[]>( - sql: string, - params?: ParameterOrJSON[], - name?: string - ): Promise { + async execute>(sql: string, params?: ParameterOrJSON[], name?: string): Promise { if (this.options?.withLogs === true && this.logger !== undefined) { return await this.executeWithLogs(name ?? 'execute sql', this.logger, sql, params) } @@ -38,16 +34,16 @@ export class BaseDb { return await this.client.execute(sql, params) } - private async executeWithLogs)[]>( + private async executeWithLogs>( name: string, logger: Logger, sql: string, params?: ParameterOrJSON[] - ): Promise { + ): Promise { const start = performance.now() try { - return await this.client.execute(sql, params) + return await this.client.execute(sql, params) } finally { const time = performance.now() - start logger.info(name, { time }) diff --git a/packages/cockroach/src/db/mapping.ts b/packages/cockroach/src/db/mapping.ts new file mode 100644 index 00000000000..ee43477dc4d --- /dev/null +++ b/packages/cockroach/src/db/mapping.ts @@ -0,0 +1,251 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +import { + type File, + type BlobID, + type CardID, + type Collaborator, + type ContextID, + type Message, + type MessageID, + type MessagesGroup, + type MessageType, + type Notification, + type NotificationContext, + type NotificationID, + type Patch, + PatchType, + type Reaction, + type RichText, + type SocialID, + type Thread, + type MessageData +} from '@hcengineering/communication-types' + +import { + type FileDb, + type CollaboratorDb, + type ContextDb, + type MessageDb, + type MessagesGroupDb, + type NotificationDb, + type PatchDb, + type ReactionDb, + type ThreadDb +} from './schema' + +interface RawMessage extends MessageDb { + thread_id?: CardID + replies_count?: number + last_reply?: Date + patches?: PatchDb[] + files?: FileDb[] + reactions?: ReactionDb[] +} + +interface RawNotification extends NotificationDb { + message_id: MessageID + message_type?: MessageType + message_content?: RichText + message_creator?: SocialID + message_data?: MessageData + message_created?: Date + message_group_blob_id?: BlobID + message_group_from_sec?: Date + message_group_to_sec?: Date + message_group_count?: number + message_patches?: { + patch_type: PatchType + patch_content: RichText + patch_creator: SocialID + patch_created: Date + }[] +} + +type RawContext = ContextDb & { id: ContextID } & { + notifications?: RawNotification[] +} + +export function toMessage (raw: RawMessage): Message { + const lastPatch = raw.patches?.[0] + + return { + id: String(raw.id) as MessageID, + type: raw.type, + card: raw.card_id, + content: lastPatch?.content ?? raw.content, + creator: raw.creator, + created: raw.created, + data: raw.data, + edited: lastPatch?.created ?? undefined, + thread: + raw.thread_id != null + ? { + card: raw.card_id, + message: String(raw.id) as MessageID, + thread: raw.thread_id, + repliesCount: raw.replies_count ?? 0, + lastReply: raw.last_reply ?? new Date() + } + : undefined, + reactions: (raw.reactions ?? []).map(toReaction), + files: (raw.files ?? []).map(toFile) + } +} + +export function toReaction (raw: ReactionDb): Reaction { + return { + message: String(raw.message_id) as MessageID, + reaction: raw.reaction, + creator: raw.creator, + created: raw.created + } +} + +export function toFile (raw: FileDb): File { + return { + card: raw.card_id, + message: String(raw.message_id) as MessageID, + blobId: raw.blob_id, + type: raw.type, + filename: raw.filename, + size: parseInt(raw.size as any), + creator: raw.creator, + created: raw.created + } +} + +export function toMessagesGroup (raw: MessagesGroupDb): MessagesGroup { + return { + card: raw.card_id, + blobId: raw.blob_id, + fromSec: raw.from_sec, + toSec: raw.to_sec, + count: raw.count, + patches: raw.patches == null ? [] : raw.patches.filter((it: any) => it.message_id != null).map(toPatch) + } +} + +export function toPatch (raw: PatchDb): Patch { + return { + type: raw.type, + message: String(raw.message_id) as MessageID, + content: raw.content, + creator: raw.creator, + created: new Date(raw.created) + } +} + +export function toThread (raw: ThreadDb): Thread { + return { + card: raw.card_id, + message: String(raw.message_id) as MessageID, + thread: raw.thread_id, + repliesCount: raw.replies_count, + lastReply: raw.last_reply + } +} + +export function toNotificationContext (raw: RawContext): NotificationContext { + const lastView = new Date(raw.last_view) + return { + id: String(raw.id) as ContextID, + card: raw.card_id, + account: raw.account, + lastView, + lastUpdate: new Date(raw.last_update), + notifications: (raw.notifications ?? []) + .filter((it) => it.id != null) + .map((it) => toNotificationRaw(raw.id, raw.card_id, lastView, it)) + } +} + +function toNotificationRaw ( + id: ContextID, + card: CardID, + lastView: Date | undefined, + raw: RawNotification +): Notification { + const created = new Date(raw.created) + const read = lastView != null && lastView >= created + + let message: Message | undefined + + if ( + raw.message_content != null && + raw.message_creator != null && + raw.message_created != null && + raw.message_type != null + ) { + const lastPatch = (raw.message_patches ?? []).find((it) => it.patch_type === PatchType.update) + message = { + id: String(raw.message_id) as MessageID, + type: raw.message_type, + card, + content: lastPatch?.patch_content ?? raw.message_content, + data: raw.message_data, + creator: raw.message_creator, + created: new Date(raw.message_created), + edited: lastPatch?.patch_created != null ? new Date(lastPatch.patch_created) : undefined, + reactions: [], + files: [] + } + } + + if (message != null) { + return { + id: String(raw.id) as NotificationID, + read, + messageId: String(raw.message_id) as MessageID, + created, + context: String(id) as ContextID, + message + } + } + + let messageGroup: MessagesGroup | undefined + + if (raw.message_group_blob_id != null && raw.message_group_from_sec != null && raw.message_group_to_sec != null) { + messageGroup = { + card, + blobId: raw.message_group_blob_id, + fromSec: new Date(raw.message_group_from_sec), + toSec: new Date(raw.message_group_to_sec), + count: raw.message_group_count ?? 0 + } + } + + return { + id: String(raw.id) as NotificationID, + read, + messageId: raw.message_id, + created, + context: String(id) as ContextID, + messageGroup + } +} + +export function toNotification (raw: RawNotification & { card_id: CardID, last_view?: Date }): Notification { + const lastView = raw.last_view != null ? new Date(raw.last_view) : undefined + + return toNotificationRaw(raw.context_id, raw.card_id, lastView, raw) +} + +export function toCollaborator (raw: CollaboratorDb): Collaborator { + return { + account: raw.account + } +} diff --git a/packages/cockroach/src/db/message.ts b/packages/cockroach/src/db/message.ts index e94d089650c..5d020122b93 100644 --- a/packages/cockroach/src/db/message.ts +++ b/packages/cockroach/src/db/message.ts @@ -14,401 +14,487 @@ // import { - type Message, - type MessageID, - type CardID, - type FindMessagesParams, - type SocialID, - type RichText, - SortingOrder, - PatchType, - type Thread, - type BlobID, - type FindMessagesGroupsParams, - type MessagesGroup + type BlobID, + type CardID, + type FindMessagesGroupsParams, + type FindMessagesParams, + type Message, + type MessageID, + type MessagesGroup, + type MessageType, + PatchType, + type RichText, + type SocialID, + SortingOrder, + type Thread } from '@hcengineering/communication-types' -import {generateMessageId} from '@hcengineering/communication-shared' +import { generateMessageId, parseMessageId } from '@hcengineering/communication-shared' -import {BaseDb} from './base' -import { - TableName, - type MessageDb, - type AttachmentDb, - type ReactionDb, - type PatchDb, - toMessage, - type ThreadDb, - toThread, - type MessagesGroupDb, - toMessagesGroup -} from './schema' -import {getCondition} from './utils' +import { BaseDb } from './base' +import {type FileDb, type MessageDb, type MessagesGroupDb, type PatchDb, type ReactionDb, TableName, type ThreadDb } from './schema' +import { getCondition } from './utils' +import { toMessage, toMessagesGroup, toThread } from './mapping' export class MessagesDb extends BaseDb { - // Message - async createMessage(card: CardID, content: RichText, creator: SocialID, created: Date): Promise { - const id = generateMessageId() - const db: MessageDb = { - id, - workspace_id: this.workspace, - card_id: card, - content, - creator, - created - } - - const sql = `INSERT INTO ${TableName.Message} (workspace_id, card_id, id, content, creator, created) - VALUES ($1::uuid, $2::varchar, $3::bigint, $4::text, $5::varchar, $6::timestamptz)` - - await this.execute(sql, [db.workspace_id, db.card_id, db.id, db.content, db.creator, db.created], 'insert message') - - return id + // Message + async createMessage ( + card: CardID, + type: MessageType, + content: RichText, + creator: SocialID, + created: Date, + data?: any + ): Promise { + const id = generateMessageId() + const db: MessageDb = { + id, + type, + workspace_id: this.workspace, + card_id: card, + content, + creator, + created, + data } - async removeMessage(card: CardID, message: MessageID, socialIds?: SocialID[]): Promise { - if (socialIds === undefined || socialIds.length === 0) { - const sql = `DELETE - FROM ${TableName.Message} - WHERE workspace_id = $1::uuid - AND card_id = $2::varchar - AND id = $2::bigint;` - await this.execute(sql, [this.workspace, card, message], 'remove message') - } else if (socialIds.length === 1) { - const sql = `DELETE - FROM ${TableName.Message} - WHERE workspace_id = $1::uuid - AND card_id = $2::varchar - AND id = $2::bigint - AND creator = $3::varchar;` - await this.execute(sql, [this.workspace, card, message, socialIds[0]], 'remove message') - } else { - const sql = `DELETE - FROM ${TableName.Message} - WHERE workspace_id = $1::uuid - AND card_id = $2::varchar - AND id = $2::bigint - AND creator = ANY ($3::varchar[]);` + const sql = `INSERT INTO ${TableName.Message} (workspace_id, card_id, id, content, creator, created, type, data) + VALUES ($1::uuid, $2::varchar, $3::bigint, $4::text, $5::varchar, $6::timestamptz, $7::varchar, $8::jsonb)` - await this.execute(sql, [this.workspace, card, message, socialIds], 'remove message') - } - } + await this.execute( + sql, + [db.workspace_id, db.card_id, db.id, db.content, db.creator, db.created, db.type, db.data ?? {}], + 'insert message' + ) - async removeMessages(card: CardID, fromId: MessageID, toId: MessageID): Promise { - const sql = `DELETE - FROM ${TableName.Message} - WHERE workspace_id = $1::uuid - AND card_id = $2::varchar - AND id >= $3::bigint - AND id <= $4::bigint;` + return id + } - await this.execute(sql, [this.workspace, card, BigInt(fromId), BigInt(toId)], 'remove messages') - } + async removeMessages (card: CardID, messages: MessageID[], socialIds?: SocialID[]): Promise { + if (messages.length === 0) return [] - async createPatch( - card: CardID, - message: MessageID, - type: PatchType, - content: string, - creator: SocialID, - created: Date - ): Promise { - const db: PatchDb = { - workspace_id: this.workspace, - card_id: card, - message_id: message, - type, - content, - creator, - created - } - - const sql = `INSERT INTO ${TableName.Patch} (workspace_id, card_id, message_id, type, content, creator, created) - VALUES ($1::uuid, $2::varchar, $3::bigint, $4::varchar, $5::text, $6::varchar, $7::timestamptz)` - - await this.execute( - sql, - [db.workspace_id, db.card_id, db.message_id, db.type, db.content, db.creator, db.created], - 'insert patch' - ) - } + const where: string[] = ['workspace_id = $1::uuid', 'card_id = $2::varchar'] + const values: any[] = [this.workspace, card] - async removePatches(card: CardID, fromId: MessageID, toId: MessageID): Promise { - const sql = `DELETE - FROM ${TableName.Patch} - WHERE workspace_id = $1::uuid - AND card_id = $2::varchar - AND message_id >= $3::bigint - AND message_id <= $4::bigint;` + let index = values.length + 1 + + if (socialIds?.length === 1) { + where.push(`creator = $${index++}::varchar`) + values.push(socialIds[0]) + } - await this.execute(sql, [this.workspace, card, BigInt(fromId), BigInt(toId)], 'remove patches') + if (socialIds != null && socialIds.length > 1) { + where.push(`creator = ANY($${index++}::varchar[])`) + values.push(socialIds) } - // Attachment - async createAttachment(message: MessageID, card: CardID, creator: SocialID, created: Date): Promise { - const db: AttachmentDb = { - message_id: message, - card_id: card, - creator, - created - } - const sql = `INSERT INTO ${TableName.Attachment} (message_id, card_id, creator, created) - VALUES ($1::bigint, $2::varchar, $3::varchar, $4::timestamptz)` - - await this.execute(sql, [db.message_id, db.card_id, db.creator, db.created], 'insert attachment') + if (messages.length === 1) { + where.push(`id = $${index++}::bigint`) + values.push(messages[0]) + } else { + where.push(`id = ANY($${index++}::bigint[])`) + values.push(messages) } - async removeAttachment(message: MessageID, card: CardID): Promise { - const sql = `DELETE - FROM ${TableName.Attachment} - WHERE message_id = $1::bigint - AND card_id = $2::varchar` - await this.execute(sql, [message, card], 'remove attachment') + const sql = `DELETE FROM ${TableName.Message} WHERE ${where.join(' AND ')} RETURNING id` + + const result = await this.execute(sql, values, 'remove messages') + + return result.map((row: any) => row.id) + } + + async createPatch ( + card: CardID, + message: MessageID, + type: PatchType, + content: string, + creator: SocialID, + created: Date + ): Promise { + const db: PatchDb = { + workspace_id: this.workspace, + card_id: card, + message_id: message, + type, + content, + creator, + created, + message_created_sec: parseMessageId(message) } - // Reaction - async createReaction( - card: CardID, - message: MessageID, - reaction: string, - creator: SocialID, - created: Date - ): Promise { - const select = `SELECT m.id + const sql = `INSERT INTO ${TableName.Patch} (workspace_id, card_id, message_id, type, content, creator, created, message_created_sec) + VALUES ($1::uuid, $2::varchar, $3::bigint, $4::varchar, $5::text, $6::varchar, $7::timestamptz, $8::timestamptz)` + + await this.execute( + sql, + [db.workspace_id, db.card_id, db.message_id, db.type, db.content, db.creator, db.created, db.message_created_sec], + 'insert patch' + ) + } + + // File + async createFile (card: CardID, message: MessageID, blobId: BlobID, fileType: string, filename: string, size: number,creator: SocialID, created: Date): Promise { + const db: FileDb = { + workspace_id: this.workspace, + card_id: card, + message_id: message, + blob_id: blobId, + type: fileType, + filename, + size, + creator, + created, + message_created_sec: parseMessageId(message) + } + const sql = `INSERT INTO ${TableName.File} (workspace_id, card_id, message_id, blob_id, type, filename, creator, created, message_created_sec, size) + VALUES ($1::uuid, $2::varchar, $3::int8, $4::uuid, $5::varchar, $6::varchar, $7::varchar, $8::timestamptz, $9::timestamptz, $10::int8)` + + await this.execute(sql, [db.workspace_id, db.card_id, db.message_id, db.blob_id, db.type, db.filename, db.creator, db.created, db.message_created_sec, db.size], 'insert file') + } + + async removeFile (card: CardID, message: MessageID, blobId: BlobID): Promise { + const sql = `DELETE + FROM ${TableName.File} + WHERE workspace_id = $1::uuid + AND card_id = $2::varchar + AND message_id = $3::bigint + AND blob_id = $4::uuid` + await this.execute(sql, [this.workspace, card, message, blobId], 'remove file') + } + + // Reaction + async createReaction ( + card: CardID, + message: MessageID, + reaction: string, + creator: SocialID, + created: Date + ): Promise { + const select = `SELECT m.id FROM ${TableName.Message} m WHERE m.id = $1::bigint` - const messageDb = await this.execute(select, [message], 'select message') - - if (messageDb.length > 0) { - const db: ReactionDb = { - workspace_id: this.workspace, - card_id: card, - message_id: message, - reaction, - creator, - created - } - const sql = `INSERT INTO ${TableName.Reaction} (workspace_id, card_id, message_id, reaction, creator, created) + const messageDb = await this.execute(select, [message], 'select message') + + if (messageDb.length > 0) { + const db: ReactionDb = { + workspace_id: this.workspace, + card_id: card, + message_id: message, + reaction, + creator, + created + } + const sql = `INSERT INTO ${TableName.Reaction} (workspace_id, card_id, message_id, reaction, creator, created) VALUES ($1::uuid, $2::varchar, $3::bigint, $4::varchar, $5::varchar, $6::timestamptz)` - await this.execute( - sql, - [db.workspace_id, db.card_id, db.message_id, db.reaction, db.creator, db.created], - 'insert reaction' - ) - } else { - await this.createPatch(card, message, PatchType.addReaction, reaction, creator, created) - } + await this.execute( + sql, + [db.workspace_id, db.card_id, db.message_id, db.reaction, db.creator, db.created], + 'insert reaction' + ) + } else { + await this.createPatch(card, message, PatchType.addReaction, reaction, creator, created) } - - async removeReaction( - card: CardID, - message: MessageID, - reaction: string, - creator: SocialID, - created: Date - ): Promise { - const select = `SELECT m.id + } + + async removeReaction ( + card: CardID, + message: MessageID, + reaction: string, + creator: SocialID, + created: Date + ): Promise { + const select = `SELECT m.id FROM ${TableName.Message} m WHERE m.id = $1::bigint` - const messageDb = await this.execute(select, [message], 'select message') + const messageDb = await this.execute(select, [message], 'select message') - if (messageDb.length > 0) { - const sql = `DELETE + if (messageDb.length > 0) { + const sql = `DELETE FROM ${TableName.Reaction} WHERE workspace_id = $1::uuid AND card_id = $2::varchar AND message_id = $3::bigint AND reaction = $4::varchar AND creator = $5::varchar` - await this.execute(sql, [this.workspace, card, message, reaction, creator], 'remove reaction') - } else { - await this.createPatch(card, message, PatchType.removeReaction, reaction, creator, created) - } + await this.execute(sql, [this.workspace, card, message, reaction, creator], 'remove reaction') + } else { + await this.createPatch(card, message, PatchType.removeReaction, reaction, creator, created) } - - // Thread - async createThread(card: CardID, message: MessageID, thread: CardID, created: Date): Promise { - const db: ThreadDb = { - workspace_id: this.workspace, - card_id: card, - message_id: message, - thread_id: thread, - replies_count: 0, - last_reply: created - } - const sql = `INSERT INTO ${TableName.Thread} (workspace_id, card_id, message_id, thread_id, replies_count, + } + + // Thread + async createThread (card: CardID, message: MessageID, thread: CardID, created: Date): Promise { + const db: ThreadDb = { + workspace_id: this.workspace, + card_id: card, + message_id: message, + thread_id: thread, + replies_count: 0, + last_reply: created + } + const sql = `INSERT INTO ${TableName.Thread} (workspace_id, card_id, message_id, thread_id, replies_count, last_reply) VALUES ($1::uuid, $2::varchar, $3::bigint, $4::varchar, $5::int, $6::timestamptz)` - await this.execute( - sql, - [db.workspace_id, db.card_id, db.message_id, db.thread_id, db.replies_count, db.last_reply], - 'insert thread' - ) + await this.execute( + sql, + [db.workspace_id, db.card_id, db.message_id, db.thread_id, db.replies_count, db.last_reply], + 'insert thread' + ) + } + + async updateThread (thread: CardID, op: 'increment' | 'decrement', lastReply?: Date): Promise { + const set: string[] = [] + const values: any[] = [] + + if (lastReply != null) { + set.push('last_reply = $3::timestamptz') + values.push(lastReply) } - async updateThread(thread: CardID, lastReply: Date, op: 'increment' | 'decrement'): Promise { - if (op === 'increment') { - const sql = `UPDATE ${TableName.Thread} - SET replies_count = replies_count + 1, - last_reply = $3::timestamptz - WHERE workspace_id = $1::uuid - AND thread_id = $2::varchar` - await this.execute(sql, [this.workspace, thread, lastReply], 'update thread') - } else if (op === 'decrement') { - const sql = `UPDATE ${TableName.Thread} - SET replies_count = GREATEST(replies_count - 1, 0) - WHERE workspace_id = $1::uuid - AND thread_id = $2::varchar` - await this.execute(sql, [this.workspace, thread], 'update thread') - } + if (op === 'increment') { + set.push('replies_count = replies_count + 1') + } else if (op === 'decrement') { + set.push('replies_count = GREATEST(replies_count - 1, 0)') } - // MessagesGroup - async createMessagesGroup( - card: CardID, - blobId: BlobID, - fromDate: Date, - toDate: Date, - fromId: MessageID, - toId: MessageID, - count: number - ): Promise { - const db: MessagesGroupDb = { - workspace_id: this.workspace, - card_id: card, - blob_id: blobId, - from_date: fromDate, - to_date: toDate, - from_id: fromId, - to_id: toId, - count - } - - const sql = `INSERT INTO ${TableName.MessagesGroup} (workspace_id, card_id, blob_id, from_date, to_date, - from_id, - to_id, count) - VALUES ($1::uuid, $2::varchar, $3::uuid, $4::timestamptz, $5::timestamptz, $6::bigint, $7::bigint, - $8::int)` - await this.execute( - sql, - [db.workspace_id, db.card_id, db.blob_id, db.from_date, db.to_date, db.from_id, db.to_id, db.count], - 'insert messages group' - ) + const update = `UPDATE ${TableName.Thread}` + const setSql = 'SET ' + set.join(', ') + const where = 'WHERE workspace_id = $1::uuid AND thread_id = $2::varchar' + const sql = [update, setSql, where].join(' ') + await this.execute(sql, [this.workspace, thread, ...values], 'update thread') + } + + // MessagesGroup + async createMessagesGroup (card: CardID, blobId: BlobID, fromSec: Date, toSec: Date, count: number): Promise { + const db: MessagesGroupDb = { + workspace_id: this.workspace, + card_id: card, + blob_id: blobId, + from_sec: fromSec, + to_sec: toSec, + count } - async removeMessagesGroup(card: CardID, blobId: BlobID): Promise { - const sql = `DELETE + const sql = `INSERT INTO ${TableName.MessagesGroup} (workspace_id, card_id, blob_id, from_sec, to_sec, count) + VALUES ($1::uuid, $2::varchar, $3::uuid, $4::timestamptz, $5::timestamptz, $6::int)` + await this.execute( + sql, + [db.workspace_id, db.card_id, db.blob_id, db.from_sec, db.to_sec, db.count], + 'insert messages group' + ) + } + + async removeMessagesGroup (card: CardID, blobId: BlobID): Promise { + const sql = `DELETE FROM ${TableName.MessagesGroup} WHERE workspace_id = $1::uuid AND card_id = $2::varchar AND blob_id = $3::uuid` - await this.execute(sql, [this.workspace, card, blobId], 'remove messages group') - } - - // Find messages - async find(params: FindMessagesParams): Promise { - // TODO: experiment with select to improve performance - const select = `SELECT m.id, - m.card_id, - m.content, - m.creator, - m.created, - t.thread_id as thread_id, - t.replies_count as replies_count, - t.last_reply as last_reply, - ${this.subSelectPatches()}, - ${this.subSelectReactions()} - FROM ${TableName.Message} m - LEFT JOIN ${TableName.Thread} t - ON t.workspace_id = m.workspace_id AND t.card_id = m.card_id AND - t.message_id = m.id` - - const {where, values} = this.buildMessageWhere(params) - const orderBy = - params.order != null ? `ORDER BY m.created ${params.order === SortingOrder.Ascending ? 'ASC' : 'DESC'}` : '' - const limit = params.limit != null ? ` LIMIT ${params.limit}` : '' - const sql = [select, where, orderBy, limit].join(' ') - - const result = await this.execute(sql, values, 'find messages') - - return result.map((it: any) => toMessage(it)) + await this.execute(sql, [this.workspace, card, blobId], 'remove messages group') + } + + async find(params: FindMessagesParams): Promise { + const { where, values } = this.buildMessageWhere(params); + const orderBy = this.buildOrderBy(params); + const limit = this.buildLimit(params); + + const sql = ` + WITH + ${this.buildCteLimitedMessages(where, orderBy, limit)} + ${this.buildCteAggregatedFiles(params)} + ${this.buildCteAggregatedReactions(params)} + ${this.buildCteAggregatedPatches()} + ${this.buildMainSelect(params)} + `; + + const result = await this.execute(sql, values, 'find messages'); + return result.map((it: any) => toMessage(it)); + } + + private buildOrderBy(params: FindMessagesParams): string { + return params.order != null + ? `ORDER BY m.created ${params.order === SortingOrder.Ascending ? 'ASC' : 'DESC'}` + : ''; + } + + private buildLimit(params: FindMessagesParams): string { + return params.limit != null ? `LIMIT ${params.limit}` : ''; + } + + private buildCteLimitedMessages(where: string, orderBy: string, limit: string): string { + return ` + limited_messages AS ( + SELECT * + FROM ${TableName.Message} m + ${where} + ${orderBy} + ${limit} + ) + `; + } + + private buildCteAggregatedFiles(params: FindMessagesParams): string { + if (!params.files) return ''; + return `, + agg_files AS ( + SELECT + f.workspace_id, + f.card_id, + f.message_id, + jsonb_agg(jsonb_build_object( + 'card_id', f.card_id, + 'message_id', f.message_id, + 'blob_id', f.blob_id, + 'type', f.type, + 'filename', f.filename, + 'creator', f.creator, + 'created', f.created + )) AS files + FROM ${TableName.File} f + INNER JOIN limited_messages m + ON m.workspace_id = f.workspace_id + AND m.card_id = f.card_id + AND m.id = f.message_id + GROUP BY f.workspace_id, f.card_id, f.message_id + ) + `; + } + + private buildCteAggregatedReactions(params: FindMessagesParams): string { + if (!params.reactions) return ''; + return `, + agg_reactions AS ( + SELECT + r.workspace_id, + r.card_id, + r.message_id, + jsonb_agg(jsonb_build_object( + 'message_id', r.message_id, + 'reaction', r.reaction, + 'creator', r.creator, + 'created', r.created + )) AS reactions + FROM ${TableName.Reaction} r + INNER JOIN limited_messages m + ON m.workspace_id = r.workspace_id + AND m.card_id = r.card_id + AND m.id = r.message_id + GROUP BY r.workspace_id, r.card_id, r.message_id + ) + `; + } + + private buildCteAggregatedPatches(): string { + return `, + agg_patches AS ( + SELECT + p.workspace_id, + p.card_id, + p.message_id, + jsonb_agg( + jsonb_build_object( + 'content', p.content, + 'creator', p.creator, + 'created', p.created + ) ORDER BY p.created DESC + ) AS patches + FROM ${TableName.Patch} p + INNER JOIN limited_messages m + ON m.workspace_id = p.workspace_id + AND m.card_id = p.card_id + AND m.id = p.message_id + WHERE p.type = 'update' + GROUP BY p.workspace_id, p.card_id, p.message_id + ) + `; + } + + private buildMainSelect(params: FindMessagesParams): string { + const orderBy = this.buildOrderBy(params); + const selectReplies = params.replies + ? `t.thread_id as thread_id, t.replies_count as replies_count, t.last_reply as last_reply,` + : ''; + + const selectFiles = params.files + ? `COALESCE(f.files, '[]'::jsonb) AS files,` + : `'[]'::jsonb AS files,`; + + const selectReactions = params.reactions + ? `COALESCE(r.reactions, '[]'::jsonb) AS reactions,` + : `'[]'::jsonb AS reactions,`; + + const joinFiles = params.files ? ` + LEFT JOIN agg_files f + ON f.workspace_id = m.workspace_id + AND f.card_id = m.card_id + AND f.message_id = m.id` : ''; + + const joinReactions = params.reactions ? ` + LEFT JOIN agg_reactions r + ON r.workspace_id = m.workspace_id + AND r.card_id = m.card_id + AND r.message_id = m.id` : ''; + + return ` + SELECT + m.id, + m.card_id, + m.type, + m.content, + m.creator, + m.created, + m.data, + ${selectReplies} + ${selectFiles} + ${selectReactions} + COALESCE(p.patches, '[]'::jsonb) AS patches + FROM limited_messages m + LEFT JOIN ${TableName.Thread} t + ON t.workspace_id = m.workspace_id + AND t.card_id = m.card_id + AND t.message_id = m.id + ${joinFiles} + ${joinReactions} + LEFT JOIN agg_patches p + ON p.workspace_id = m.workspace_id + AND p.card_id = m.card_id + AND p.message_id = m.id + ${orderBy} + `; + } + + buildMessageWhere (params: FindMessagesParams): { where: string, values: any[] } { + const where: string[] = ['m.workspace_id = $1::uuid'] + const values: any[] = [this.workspace] + + let index = 2 + + if (params.id != null) { + where.push(`m.id = $${index++}::bigint`) + values.push(params.id) } - buildMessageWhere(params: FindMessagesParams): { where: string, values: any[] } { - const where: string[] = ['m.workspace_id = $1::uuid'] - const values: any[] = [this.workspace] - - let index = 2 - - if (params.id != null) { - where.push(`m.id = $${index++}::bigint`) - values.push(params.id) - } - - if (params.card != null) { - where.push(`m.card_id = $${index++}::varchar`) - values.push(params.card) - } - - const createdCondition = getCondition('m', 'created', index, params.created, 'timestamptz') - - if (createdCondition != null) { - where.push(createdCondition.where) - values.push(createdCondition.value) - index++ - } - - return {where: `WHERE ${where.join(' AND ')}`, values} + if (params.card != null) { + where.push(`m.card_id = $${index++}::varchar`) + values.push(params.card) } - subSelectPatches(): string { - return `COALESCE( - (SELECT jsonb_agg(jsonb_build_object( - 'content', p.content, - 'creator', p.creator, - 'created', p.created - ) ORDER BY p.created DESC) - FROM ${TableName.Patch} p - WHERE p.message_id = m.id - AND p.workspace_id = m.workspace_id - AND p.card_id = m.card_id - AND p.type = 'update' - ), '[]'::jsonb) AS patches` - } + const createdCondition = getCondition('m', 'created', index, params.created, 'timestamptz') - subSelectAttachments(): string { - return `COALESCE( - (SELECT jsonb_agg(jsonb_build_object( - 'card_id', a.card_id, - 'message_id', a.message_id, - 'creator', a.creator, - 'created', a.created - )) - FROM ${TableName.Attachment} a - WHERE a.message_id = m.id - ), '[]'::jsonb) AS attachments` + if (createdCondition != null) { + where.push(createdCondition.where) + values.push(...createdCondition.values) + index = createdCondition.index } - subSelectReactions(): string { - return `COALESCE( - (SELECT jsonb_agg(jsonb_build_object( - 'message_id', r.message_id, - 'reaction', r.reaction, - 'creator', r.creator, - 'created', r.created - )) - FROM ${TableName.Reaction} r - WHERE r.workspace_id = m.workspace_id - AND r.card_id = m.card_id - AND r.message_id = m.id - ), '[]'::jsonb) AS reactions` - } + return { where: `WHERE ${where.join(' AND ')}`, values } + } - // Find thread - async findThread(thread: CardID): Promise { - const sql = `SELECT t.card_id, + // Find thread + async findThread (thread: CardID): Promise { + const sql = `SELECT t.card_id, t.message_id, t.thread_id, t.replies_count, @@ -418,19 +504,17 @@ export class MessagesDb extends BaseDb { AND t.thread_id = $2::varchar LIMIT 1;` - const result = await this.execute(sql, [this.workspace, thread], 'find thread') - return result.map((it: any) => toThread(it))[0] - } + const result = await this.execute(sql, [this.workspace, thread], 'find thread') + return result.map((it: any) => toThread(it))[0] + } - // Find messages groups - async findMessagesGroups(params: FindMessagesGroupsParams): Promise { - const select = ` + // Find messages groups + async findMessagesGroups (params: FindMessagesGroupsParams): Promise { + const select = ` SELECT mg.card_id, mg.blob_id, - mg.from_date, - mg.to_date, - mg.from_id, - mg.to_id, + mg.from_sec, + mg.to_sec, mg.count, patches FROM ${TableName.MessagesGroup} mg @@ -445,57 +529,57 @@ export class MessagesDb extends BaseDb { FROM ${TableName.Patch} p WHERE p.workspace_id = mg.workspace_id AND p.card_id = mg.card_id - AND p.message_id BETWEEN mg.from_id AND mg.to_id + AND p.message_created_sec BETWEEN mg.from_sec AND mg.to_sec ) sub` - const {where, values} = this.buildMessagesGroupWhere(params) - const orderBy = - params.orderBy === 'toDate' - ? `ORDER BY mg.to_date ${params.order === SortingOrder.Ascending ? 'ASC' : 'DESC'}` - : `ORDER BY mg.from_date ${params.order === SortingOrder.Ascending ? 'ASC' : 'DESC'}` - const limit = params.limit != null ? ` LIMIT ${params.limit}` : ''; + const { where, values } = this.buildMessagesGroupWhere(params) + const orderBy = + params.orderBy === 'toSec' + ? `ORDER BY mg.to_sec ${params.order === SortingOrder.Ascending ? 'ASC' : 'DESC'}` + : `ORDER BY mg.from_sec ${params.order === SortingOrder.Ascending ? 'ASC' : 'DESC'}` + const limit = params.limit != null ? ` LIMIT ${params.limit}` : '' + + const sql = [select, where, orderBy, limit].join(' ') + const result = await this.execute(sql, values, 'find messages groups') + + return result.map((it: any) => toMessagesGroup(it)) + } + + buildMessagesGroupWhere (params: FindMessagesGroupsParams): { + where: string + values: any[] + } { + const where: string[] = ['mg.workspace_id = $1::uuid'] + const values: any[] = [this.workspace] + + let index = 2 - const sql = [select, where, orderBy, limit].join(' ') - const result = await this.execute(sql, values, 'find messages groups') + where.push(`mg.card_id = $${index++}::varchar`) + values.push(params.card) - return result.map((it: any) => toMessagesGroup(it)) + if (params.blobId != null) { + where.push(`mg.blob_id = $${index++}`) + values.push(params.blobId) } - buildMessagesGroupWhere(params: FindMessagesGroupsParams): { - where: string - values: any[] - } { - const where: string[] = ['mg.workspace_id = $1::uuid'] - const values: any[] = [this.workspace] - - let index = 2 - - where.push(`mg.card_id = $${index++}::varchar`) - values.push(params.card) - - if (params.blobId != null) { - where.push(`mg.blob_id = $${index++}`) - values.push(params.blobId) - } - - const fromDateCondition = getCondition('mg', 'from_date', index, params.fromDate, 'timestamptz') - if (fromDateCondition != null) { - where.push(fromDateCondition.where) - values.push(fromDateCondition.value) - index++ - } - - const toDateCondition = getCondition('mg', 'to_date', index, params.toDate, 'timestamptz') - if (toDateCondition != null) { - where.push(toDateCondition.where) - values.push(toDateCondition.value) - index++ - } - - if (params.withPatches === true) { - where.push(`sub.patches IS NOT NULL`) - } - - return {where: `WHERE ${where.join(' AND ')}`, values} + const fromDateCondition = getCondition('mg', 'from_sec', index, params.fromSec, 'timestamptz') + if (fromDateCondition != null) { + where.push(fromDateCondition.where) + values.push(...fromDateCondition.values) + index = fromDateCondition.index } + + const toDateCondition = getCondition('mg', 'to_sec', index, params.toSec, 'timestamptz') + if (toDateCondition != null) { + where.push(toDateCondition.where) + values.push(...toDateCondition.values) + index = toDateCondition.index + } + + if (params.patches === true) { + where.push('sub.patches IS NOT NULL') + } + + return { where: `WHERE ${where.join(' AND ')}`, values } + } } diff --git a/packages/cockroach/src/db/notification.ts b/packages/cockroach/src/db/notification.ts index 5f6a0e2dc5c..94258d59274 100644 --- a/packages/cockroach/src/db/notification.ts +++ b/packages/cockroach/src/db/notification.ts @@ -14,82 +14,140 @@ // import { - type MessageID, - type ContextID, + type AccountID, type CardID, - type NotificationContext, + type Collaborator, + type ContextID, + type FindCollaboratorsParams, type FindNotificationContextParams, - SortingOrder, type FindNotificationsParams, + type MessageID, type Notification, - type NotificationContextUpdate, - type WorkspaceID + type NotificationContext, + SortingOrder, + type NotificationID } from '@hcengineering/communication-types' import { BaseDb } from './base' -import { TableName, type ContextDb, type NotificationDb } from './schema' +import { type ContextDb, type NotificationDb, TableName } from './schema' +import { getCondition } from './utils' +import { toCollaborator, toNotification, toNotificationContext } from './mapping' export class NotificationsDb extends BaseDb { - async createNotification (message: MessageID, context: ContextID): Promise { - const db: NotificationDb = { + async addCollaborators (card: CardID, collaborators: AccountID[], date?: Date): Promise { + if (collaborators.length === 0) return + const values: any[] = [] + + const sqlValues = collaborators + .map((account, index) => { + const i = index * 3 + values.push(this.workspace, card, account, date ?? new Date()) + return `($${i + 1}::uuid, $${i + 2}::varchar, $${i + 3}::uuid, $${i + 4}::timestamptz)` + }) + .join(', ') + + const sql = `INSERT INTO ${TableName.Collaborators} (workspace_id, card_id, account, date) VALUES ${sqlValues} ON CONFLICT DO NOTHING` + + await this.execute(sql, values, 'insert collaborators') + } + + async removeCollaborators (card: CardID, collaborators: AccountID[]): Promise { + if (collaborators.length === 0) return + + if (collaborators.length === 1) { + const sql = `DELETE + FROM ${TableName.Collaborators} + WHERE workspace_id = $1::uuid + AND card_id = $2::varchar + AND account = $3::uuid` + await this.execute(sql, [this.workspace, card, collaborators[0]], 'remove collaborator') + } else { + const inValues = collaborators.map((_, index) => `$${index + 3}`).join(', ') + const sql = `DELETE + FROM ${TableName.Collaborators} + WHERE workspace_id = $1::uuid + AND card_id = $2::varchar + AND account IN (${inValues})` + + await this.execute(sql, [this.workspace, card, ...collaborators], 'remove collaborators') + } + } + + getCollaboratorsCursor ( + card: CardID, + date: Date, + size?: number + ): AsyncIterable[]> { + const sql = ` + SELECT account + FROM ${TableName.Collaborators} + WHERE workspace_id = $1::uuid + AND card_id = $2::varchar + AND date <= $3::timestamptz + ORDER BY date ASC ` + + return this.client.cursor(sql, [this.workspace, card, date], size) + } + + async createNotification (context: ContextID, message: MessageID, created: Date): Promise { + const db: Omit = { message_id: message, - context + context_id: context, + created } - const sql = `INSERT INTO ${TableName.Notification} (message_id, context_id) - VALUES ($1::bigint, $2::uuid)` - await this.execute(sql, [db.message_id, db.context], 'insert notification') + const sql = `INSERT INTO ${TableName.Notification} (message_id, context_id, created) + VALUES ($1::bigint, $2::int8, $3::timestamptz) + RETURNING id` + const result = await this.execute(sql, [db.message_id, db.context_id, db.created], 'insert notification') + return result[0].id as NotificationID } - async removeNotification (message: MessageID, context: ContextID): Promise { - // TODO: do we need to remove notifications? - const sql = `DELETE - FROM ${TableName.NotificationContext} - WHERE id = $1::uuid` - await this.execute(sql, [context], 'remove notification') + async removeNotifications (context: ContextID, account: AccountID, untilDate: Date): Promise { + const sql = ` + DELETE FROM ${TableName.Notification} n + USING ${TableName.NotificationContext} nc + WHERE n.context_id = $1::int8 + AND nc.id = n.context_id + AND nc.account = $2::uuid + AND n.created < $3::timestamptz + ` + await this.execute(sql, [context, account, untilDate], 'remove notification') } - async createContext ( - personalWorkspace: WorkspaceID, - card: CardID, - lastView?: Date, - lastUpdate?: Date - ): Promise { + async createContext (account: AccountID, card: CardID, lastUpdate: Date, lastView: Date): Promise { const db: ContextDb = { workspace_id: this.workspace, card_id: card, - personal_workspace: personalWorkspace, + account, last_view: lastView, last_update: lastUpdate } - const sql = `INSERT INTO ${TableName.NotificationContext} (workspace_id, card_id, personal_workspace, last_view, last_update) - VALUES ($1::uuid, $2::varchar, $3::uuid, $4::timestamptz, $5::timestamptz) - RETURNING id` + const sql = `INSERT INTO ${TableName.NotificationContext} (workspace_id, card_id, account, last_view, last_update) + VALUES ($1::uuid, $2::varchar, $3::uuid, $4::timestamptz, $5::timestamptz) + RETURNING id` const result = await this.execute( sql, - [db.workspace_id, db.card_id, db.personal_workspace, db.last_view, db.last_update], + [db.workspace_id, db.card_id, db.account, db.last_view, db.last_update], 'insert notification context' ) return result[0].id as ContextID } - async removeContext (context: ContextID): Promise { + async removeContext (context: ContextID, account: AccountID): Promise { const sql = `DELETE - FROM ${TableName.Notification} - WHERE context = $1::uuid` - await this.execute(sql, [context], 'remove notification context') + FROM ${TableName.Notification} + WHERE context = $1::int8 AND account = $2::uuid` + await this.execute(sql, [context, account], 'remove notification context') } - async updateContext (context: ContextID, update: NotificationContextUpdate): Promise { + async updateContext (context: ContextID, account: AccountID, lastUpdate?: Date, lastView?: Date): Promise { const dbData: Partial = {} - if (update.archivedFrom != null) { - dbData.archived_from = update.archivedFrom - } - if (update.lastView != null) { - dbData.last_view = update.lastView + if (lastView != null) { + dbData.last_view = lastView } - if (update.lastUpdate != null) { - dbData.last_update = update.lastUpdate + if (lastUpdate != null) { + dbData.last_update = lastUpdate } if (Object.keys(dbData).length === 0) { @@ -100,91 +158,236 @@ export class NotificationsDb extends BaseDb { const values = Object.values(dbData) const sql = `UPDATE ${TableName.NotificationContext} - SET ${keys.map((k, idx) => `"${k}" = $${idx + 1}::timestamptz`).join(', ')} - WHERE id = $${keys.length + 1}::uuid;` - await this.execute(sql, [values, context], 'update notification context') + SET ${keys.map((k, idx) => `"${k}" = $${idx + 3}::timestamptz`).join(', ')} + WHERE id = $1::int8 AND account = $2::uuid;` + + await this.execute(sql, [context, account, ...values], 'update notification context') } - async findContexts ( - params: FindNotificationContextParams, - personalWorkspaces: WorkspaceID[], - workspace?: WorkspaceID - ): Promise { - const select = ` - SELECT nc.id, nc.card_id, nc.archived_from, nc.last_view, nc.last_update - FROM ${TableName.NotificationContext} nc` - const { where, values } = this.buildContextWhere(params, personalWorkspaces, workspace) - // const orderSql = `ORDER BY nc.created ${params.sort === SortOrder.Asc ? 'ASC' : 'DESC'}` - const limit = params.limit != null ? ` LIMIT ${params.limit}` : '' - const sql = [select, where, limit].join(' ') + async findContexts (params: FindNotificationContextParams): Promise { + const withNotifications = params.notifications != null + const withMessages = params.notifications?.message === true - const result = await this.execute(sql, values, 'find notification contexts') + const { where, values, index } = this.buildContextWhere(params) + const limit = params.limit != null ? `LIMIT ${Number(params.limit)}` : '' + const orderBy = + params.order != null ? `ORDER BY nc.last_update ${params.order === SortingOrder.Ascending ? 'ASC' : 'DESC'}` : '' + + let joinMessages = '' + let buildObject = ` + JSONB_BUILD_OBJECT( + 'id', n.id, + 'created', n.created, + 'message_id', n.message_id + )` + + if (withMessages) { + joinMessages = ` + LEFT JOIN ${TableName.Message} m + ON n.message_id = m.id + AND nc.workspace_id = m.workspace_id + AND nc.card_id = m.card_id + LEFT JOIN ${TableName.MessagesGroup} mg + ON nc.workspace_id = mg.workspace_id + AND nc.card_id = mg.card_id + AND n.created BETWEEN mg.from_sec AND mg.to_sec` + + buildObject = ` + JSONB_BUILD_OBJECT( + 'id', n.id, + 'created', n.created, + 'message_id', n.message_id, + 'message_type', m.type, + 'message_content', m.content, + 'message_data', m.data, + 'message_creator', m.creator, + 'message_created', m.created, + 'message_group_blob_id', mg.blob_id, + 'message_group_from_sec', mg.from_sec, + 'message_group_to_sec', mg.to_sec, + 'message_group_count', mg.count, + 'message_patches', ( + SELECT COALESCE( + JSON_AGG( + JSONB_BUILD_OBJECT( + 'patch_type', p.type, + 'patch_content', p.content, + 'patch_creator', p.creator, + 'patch_created', p.created + ) ORDER BY p.created DESC + ), + '[]'::JSONB + ) + FROM ${TableName.Patch} p + WHERE p.message_id = n.message_id + ) + )` + } + + let joinNotifications = '' + let notificationsSelect = '' + let groupBy = '' + + if (withNotifications) { + const { where: whereNotifications, values: valuesNotifications } = this.buildNotificationWhere( + { read: params.notifications?.read }, + index, + true + ) + + values.push(...valuesNotifications) + + joinNotifications = ` + LEFT JOIN LATERAL ( + SELECT + n.*, + ROW_NUMBER() OVER ( + PARTITION BY n.context_id + ORDER BY n.created ${params.notifications?.order === SortingOrder.Ascending ? 'ASC' : 'DESC'} + ) AS rn + FROM ${TableName.Notification} n + ${whereNotifications} ${whereNotifications.length > 1 ? 'AND n.context_id = nc.id' : 'WHERE n.context_id = nc.id'} + ) n ON n.rn <= ${params.notifications?.limit ?? 1}` + + notificationsSelect = `, + COALESCE( + JSON_AGG( + ${buildObject} + ORDER BY n.created ${params.notifications?.order === SortingOrder.Ascending ? 'ASC' : 'DESC'} + ), + '[]'::JSONB + ) AS notifications` + + groupBy = 'GROUP BY nc.id' + } - return result.map((it) => this.toNotificationContext(it)) + const sql = ` + SELECT nc.id, + nc.card_id, + nc.account, + nc.last_view, + nc.last_update + ${notificationsSelect} + FROM ${TableName.NotificationContext} nc + ${joinNotifications} + ${joinMessages} + ${where} + ${groupBy} + ${orderBy} + ${limit}; + ` + + const result = await this.execute(sql, values, 'find contexts') + + return result.map((it: any) => toNotificationContext(it)) } - async findNotifications ( - params: FindNotificationsParams, - personalWorkspace: WorkspaceID, - workspace?: WorkspaceID - ): Promise { - // TODO: experiment with select to improve performance, should join with attachments and reactions? - const select = ` - SELECT n.message_id, - n.context, - m.card_id AS message_card, - m.content AS message_content, - m.creator AS message_creator, - m.created AS message_created, - nc.card_id, - nc.archived_from, - nc.last_view, - nc.last_update, - (SELECT json_agg( - jsonb_build_object( - 'id', p.id, - 'content', p.content, - 'creator', p.creator, - 'created', p.created - ) - ) - FROM ${TableName.Patch} p - WHERE p.message_id = m.id) AS patches - FROM ${TableName.Notification} n - JOIN ${TableName.NotificationContext} nc ON n.context = nc.id - JOIN ${TableName.Message} m ON n.message_id = m.id - ` - const { where, values } = this.buildNotificationWhere(params, personalWorkspace, workspace) + async findNotifications (params: FindNotificationsParams): Promise { + const withMessage = params.message === true + + let select = 'SELECT n.id, n.created,n.message_id, n.context_id, nc.last_view ' + + let joinMessages = '' + + if (withMessage) { + select += `, + m.card_id AS card_id, + m.type AS message_type, + m.content AS message_content, + m.creator AS message_creator, + m.created AS message_created, + m.data AS message_data, + mg.blob_id AS message_group_blob_id, + mg.from_sec AS message_group_from_sec, + mg.to_sec AS message_group_to_sec, + mg.count AS message_group_count, + (SELECT json_agg( + jsonb_build_object( + 'id', p.id, + 'content', p.content, + 'creator', p.creator, + 'created', p.created + ) + ) + FROM ${TableName.Patch} p + WHERE p.message_id = m.id) AS message_patches + ` + + joinMessages = ` + LEFT JOIN ${TableName.Message} m + ON n.message_id = m.id + AND nc.workspace_id = m.workspace_id + AND nc.card_id = m.card_id + LEFT JOIN ${TableName.MessagesGroup} mg + ON nc.workspace_id = mg.workspace_id + AND nc.card_id = mg.card_id + AND n.created BETWEEN mg.from_sec AND mg.to_sec + ` + } + + select += ` FROM ${TableName.Notification} n + JOIN ${TableName.NotificationContext} nc ON n.context_id = nc.id` + + const { where, values } = this.buildNotificationWhere(params) const orderBy = - params.order != null ? `ORDER BY m.created ${params.order === SortingOrder.Ascending ? 'ASC' : 'DESC'}` : '' - const limit = params.limit != null ? ` LIMIT ${params.limit}` : '' - const sql = [select, where, orderBy, limit].join(' ') + params.order != null ? `ORDER BY n.created ${params.order === SortingOrder.Ascending ? 'ASC' : 'DESC'}` : '' + const limit = params.limit != null ? `LIMIT ${params.limit}` : '' + + const sql = [select, joinMessages, where, orderBy, limit].join(' ') const result = await this.execute(sql, values, 'find notifications') - return result.map((it) => this.toNotification(it)) + return result.map((it: any) => toNotification(it)) } - buildContextWhere ( - params: FindNotificationContextParams, - personalWorkspaces: WorkspaceID[], - workspace?: WorkspaceID - ): { - where: string - values: any[] - } { - const where: string[] = [] - const values: any[] = [] - let index = 1 + async findCollaborators (params: FindCollaboratorsParams): Promise { + const { where, values } = this.buildCollaboratorsWhere(params) + const select = ` + SELECT c.account + FROM ${TableName.Collaborators} c + ` + + const limit = params.limit != null ? ` LIMIT ${params.limit}` : '' + const orderBy = + params.order != null ? `ORDER BY c.date ${params.order === SortingOrder.Ascending ? 'ASC' : 'DESC'}` : '' + + const sql = [select, where, orderBy, limit].join(' ') + const result = await this.execute(sql, values, 'find collaborators') + + return result.map((it: any) => toCollaborator(it)) + } - if (workspace != null) { - where.push(`nc.workspace_id = $${index++}::uuid`) - values.push(workspace) + private buildCollaboratorsWhere (params: FindCollaboratorsParams): { where: string, values: any[] } { + const where: string[] = ['c.workspace_id = $1::uuid', 'c.card_id = $2::varchar'] + const values: any[] = [this.workspace, params.card] + let index = values.length + 1 + + if (params.account != null) { + const accounts = Array.isArray(params.account) ? params.account : [params.account] + if (accounts.length === 1) { + where.push(`c.account = $${index++}::uuid`) + values.push(accounts[0]) + } else if (accounts.length > 1) { + where.push(`c.account = ANY($${index++}::uuid[])`) + values.push(accounts) + } } - if (personalWorkspaces.length > 0) { - where.push(`nc.personal_workspace IN (${personalWorkspaces.map((it) => `$${index++}::uuid`).join(', ')})`) - values.push(...personalWorkspaces) + return { where: `WHERE ${where.join(' AND ')}`, values } + } + + private buildContextWhere (params: FindNotificationContextParams): { + where: string + values: any[] + index: number + } { + const where: string[] = ['nc.workspace_id = $1::uuid'] + const values: any[] = [this.workspace] + let index = 2 + + if (params.id != null) { + where.push(`nc.id = $${index++}::int8`) + values.push(params.id) } if (params.card != null) { @@ -192,82 +395,64 @@ export class NotificationsDb extends BaseDb { values.push(params.card) } - return { where: `WHERE ${where.join(' AND ')}`, values } + if (params.account != null) { + const accounts = Array.isArray(params.account) ? params.account : [params.account] + if (accounts.length === 1) { + where.push(`nc.account = $${index++}::uuid`) + values.push(accounts[0]) + } else if (accounts.length > 1) { + where.push(`nc.account IN (${accounts.map((it) => `$${index++}::uuid`).join(', ')})`) + values.push(...accounts) + } + } + + const lastUpdateCondition = getCondition('nc', 'last_update', index, params.lastUpdate, 'timestamptz') + + if (lastUpdateCondition != null) { + where.push(lastUpdateCondition.where) + values.push(...lastUpdateCondition.values) + index= lastUpdateCondition.index + } + + return { where: `WHERE ${where.join(' AND ')}`, values, index } } - buildNotificationWhere ( + private buildNotificationWhere ( params: FindNotificationsParams, - personalWorkspace: WorkspaceID, - workspace?: WorkspaceID + initialIndex?: number, + skipWorkspace?: boolean ): { where: string values: any[] } { - const where: string[] = ['nc.personal_workspace = $1::uuid'] - const values: any[] = [personalWorkspace] - let index = 2 - - if (workspace != null) { - where.push(`nc.workspace_id = $${index++}::uuid`) - values.push(workspace) - } + const where: string[] = skipWorkspace === true ? [] : ['nc.workspace_id = $1::uuid'] + const values: any[] = skipWorkspace === true ? [] : [this.workspace] + let index = (initialIndex ?? 0) + values.length + 1 if (params.context != null) { - where.push(`n.context = $${index++}::uuid`) + where.push(`n.context_id = $${index++}::int8`) values.push(params.context) } - if (params.read === true) { - where.push('nc.last_view IS NOT NULL AND nc.last_view >= m.created') - } - - if (params.read === false) { - where.push('(nc.last_view IS NULL OR nc.last_view > m.created)') - } - - if (params.archived === true) { - where.push('nc.archived_from IS NOT NULL AND nc.archived_from >= m.created') + if (params.account != null) { + const accounts = Array.isArray(params.account) ? params.account : [params.account] + if (accounts.length === 1) { + where.push(`nc.account = $${index++}::uuid`) + values.push(accounts[0]) + } else if (accounts.length > 1) { + where.push(`nc.account = ANY ($${index++}::uuid[])`) + values.push(accounts) + } } - if (params.archived === false) { - where.push('(nc.archived_from IS NULL OR nc.archived_from > m.created)') + if (params.read === true) { + where.push('nc.last_view IS NOT NULL AND nc.last_view >= n.created') } - return { where: `WHERE ${where.join(' AND ')}`, values } - } - - toNotificationContext (row: any): NotificationContext { - return { - id: row.id, - card: row.card_id, - workspace: row.workspace_id, - personalWorkspace: row.personal_workspace, - archivedFrom: row.archived_from != null ? new Date(row.archived_from) : undefined, - lastView: row.last_view != null ? new Date(row.last_view) : undefined, - lastUpdate: row.last_update != null ? new Date(row.last_update) : undefined + if (params.read === false) { + where.push('(nc.last_view IS NULL OR nc.last_view < n.created)') } - } - toNotification (row: any): Notification { - const lastPatch = row.patches?.[0] - const lastView = row.last_view != null ? new Date(row.last_view) : undefined - const archivedFrom = row.archived_from != null ? new Date(row.archived_from) : undefined - const created = new Date(row.message_created) - - return { - message: { - id: row.id, - card: row.message_card, - content: lastPatch?.content ?? row.message_content, - creator: row.message_creator, - created, - edited: new Date(lastPatch?.created ?? row.message_created), - reactions: row.reactions ?? [], - attachments: row.attachments ?? [] - }, - context: row.context, - read: lastView != null && lastView >= created, - archived: archivedFrom != null && archivedFrom >= created - } + return { where: where.length > 0 ? `WHERE ${where.join(' AND ')}` : '', values } } } diff --git a/packages/cockroach/src/db/schema.ts b/packages/cockroach/src/db/schema.ts index 298a35ba9d6..b09e608c0eb 100644 --- a/packages/cockroach/src/db/schema.ts +++ b/packages/cockroach/src/db/schema.ts @@ -14,53 +14,48 @@ // import { + type AccountID, + type BlobID, + type CardID, type ContextID, type MessageID, + type MessageType, + type PatchType, type RichText, type SocialID, - type CardID, - type BlobID, - type Message, - type Reaction, - type Attachment, - type MessagesGroup, type WorkspaceID, - type PatchType, - type Patch, - type Thread + type NotificationID } from '@hcengineering/communication-types' export enum TableName { - Attachment = 'communication.attachments', + File = 'communication.files', Message = 'communication.messages', MessagesGroup = 'communication.messages_groups', Notification = 'communication.notifications', NotificationContext = 'communication.notification_context', Patch = 'communication.patch', Reaction = 'communication.reactions', - Thread = 'communication.thread' + Thread = 'communication.thread', + Collaborators = 'communication.collaborators' } export interface MessageDb { id: MessageID + type: MessageType workspace_id: WorkspaceID card_id: CardID content: RichText creator: SocialID created: Date - thread_id?: CardID - replies_count?: number - last_reply?: Date + data?: any } export interface MessagesGroupDb { workspace_id: WorkspaceID card_id: CardID blob_id: BlobID - from_date: Date - to_date: Date - from_id: MessageID - to_id: MessageID + from_sec: Date + to_sec: Date count: number patches?: PatchDb[] } @@ -73,6 +68,7 @@ export interface PatchDb { content: RichText creator: SocialID created: Date + message_created_sec: Date } export interface ReactionDb { @@ -84,11 +80,17 @@ export interface ReactionDb { created: Date } -export interface AttachmentDb { - message_id: MessageID +export interface FileDb { + workspace_id: WorkspaceID card_id: CardID + message_id: MessageID + blob_id: BlobID + filename: string + size: number + type: string creator: SocialID created: Date + message_created_sec: Date } export interface ThreadDb { @@ -101,98 +103,23 @@ export interface ThreadDb { } export interface NotificationDb { - message_id: MessageID - context: ContextID + id: NotificationID + message_id: MessageID | null + context_id: ContextID + created: Date } export interface ContextDb { workspace_id: WorkspaceID card_id: CardID - personal_workspace: WorkspaceID - - archived_from?: Date - last_view?: Date - last_update?: Date + account: AccountID + last_update: Date + last_view: Date } -interface RawMessage extends MessageDb { - patches?: PatchDb[] - attachments?: AttachmentDb[] - reactions?: ReactionDb[] -} - -export function toMessage (raw: RawMessage): Message { - const lastPatch = raw.patches?.[0] - - return { - id: String(raw.id) as MessageID, - card: raw.card_id, - content: lastPatch?.content ?? raw.content, - creator: raw.creator, - created: raw.created, - edited: lastPatch?.created ?? undefined, - thread: - raw.thread_id != null - ? { - card: raw.card_id, - message: String(raw.id) as MessageID, - thread: raw.thread_id, - repliesCount: raw.replies_count ?? 0, - lastReply: raw.last_reply ?? new Date() - } - : undefined, - reactions: (raw.reactions ?? []).map(toReaction), - attachments: (raw.attachments ?? []).map(toAttachment) - } -} - -export function toReaction (raw: ReactionDb): Reaction { - return { - message: String(raw.message_id) as MessageID, - reaction: raw.reaction, - creator: raw.creator, - created: raw.created - } -} - -export function toAttachment (raw: AttachmentDb): Attachment { - return { - message: String(raw.message_id) as MessageID, - card: raw.card_id, - creator: raw.creator, - created: raw.created - } -} - -export function toMessagesGroup (raw: MessagesGroupDb): MessagesGroup { - return { - card: raw.card_id, - blobId: raw.blob_id, - fromDate: raw.from_date, - toDate: raw.to_date, - fromId: String(raw.from_id) as MessageID, - toId: String(raw.to_id) as MessageID, - count: raw.count, - patches: raw.patches == null ? [] : raw.patches.filter((it: any) => it.message_id != null).map(toPatch) - } -} - -export function toPatch (raw: PatchDb): Patch { - return { - type: raw.type, - message: String(raw.message_id) as MessageID, - content: raw.content, - creator: raw.creator, - created: new Date(raw.created) - } -} - -export function toThread (raw: ThreadDb): Thread { - return { - card: raw.card_id, - message: String(raw.message_id) as MessageID, - thread: raw.thread_id, - repliesCount: raw.replies_count, - lastReply: raw.last_reply - } +export interface CollaboratorDb { + workspace_id: WorkspaceID + card_id: CardID + account: AccountID + date: Date } diff --git a/packages/cockroach/src/db/utils.ts b/packages/cockroach/src/db/utils.ts index 950ef1bb5aa..17a6fb92650 100644 --- a/packages/cockroach/src/db/utils.ts +++ b/packages/cockroach/src/db/utils.ts @@ -13,31 +13,47 @@ // limitations under the License. // -export function getCondition ( +export function getCondition( table: string, dbField: string, - index: number, + startIndex: number, param: any, type: string -): { where: string, value: any } | undefined { - if (typeof param === 'object') { - if (param.less != null) { - return { where: `${table}.${dbField} < $${index}::${type}`, value: param.less } +): { where: string, values: any[], index: number } | undefined { + const conditions: string[] = []; + const values: any[] = []; + let index = startIndex; + + if (param !== null && typeof param === 'object') { + if (param.less !== undefined) { + conditions.push(`${table}.${dbField} < $${index}::${type}`); + values.push(param.less); + index++; } - if (param.lessOrEqual != null) { - return { where: `${table}.${dbField} <= $${index}::${type}`, value: param.lessOrEqual } + if (param.lessOrEqual !== undefined) { + conditions.push(`${table}.${dbField} <= $${index}::${type}`); + values.push(param.lessOrEqual); + index++; } - if (param.greater != null) { - return { where: `${table}.${dbField} > $${index}::${type}`, value: param.greater } + if (param.greater !== undefined) { + conditions.push(`${table}.${dbField} > $${index}::${type}`); + values.push(param.greater); + index++; } - if (param.greaterOrEqual != null) { - return { where: `${table}.${dbField} >= $${index}::${type}`, value: param.greaterOrEqual } + if (param.greaterOrEqual !== undefined) { + conditions.push(`${table}.${dbField} >= $${index}::${type}`); + values.push(param.greaterOrEqual); + index++; } } - if (param != null) { - return { where: `${table}.${dbField} = $${index}::${type}`, value: param } + if (param != null && conditions.length === 0) { + conditions.push(`${table}.${dbField} = $${index}::${type}`); + values.push(param); + index++; } - return undefined -} + if (conditions.length === 0) return undefined; + + return { where: conditions.join(' AND '), values, index }; +} \ No newline at end of file diff --git a/packages/cockroach/src/types.ts b/packages/cockroach/src/types.ts index f8545d41294..1e7a5280e90 100644 --- a/packages/cockroach/src/types.ts +++ b/packages/cockroach/src/types.ts @@ -15,6 +15,9 @@ import { type ParameterOrJSON, type Row } from 'postgres' +export type SqlRow = Row & Iterable +export type SqlParams = ParameterOrJSON[] + export interface Logger { info: (message: string, data?: Record) => void warn: (message: string, data?: Record) => void @@ -26,6 +29,7 @@ export interface Options { } export interface SqlClient { - execute: )[]>(query: string, params?: ParameterOrJSON[]) => Promise + execute: (query: string, params?: SqlParams) => Promise + cursor: (query: string, params?: SqlParams, size?: number) => AsyncIterable[]> close: () => void } diff --git a/packages/cockroach/src/utils.ts b/packages/cockroach/src/utils.ts index 6ffc522d3fe..213f6d7f3f8 100644 --- a/packages/cockroach/src/utils.ts +++ b/packages/cockroach/src/utils.ts @@ -44,6 +44,8 @@ function escape(value: any): string { return `'${value.replace(/'/g, "''")}'` case 'bigint': return value.toString() + case 'object': + return `'${JSON.stringify(value)}'` default: throw new Error(`Unsupported value type: ${typeof value}`) } diff --git a/packages/examples/package.json b/packages/examples/package.json deleted file mode 100644 index c83e4869f4c..00000000000 --- a/packages/examples/package.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "name": "@hcengineering/communication-examples", - "version": "0.1.61", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "./types/index.d.ts", - "files": [ - "dist/index.js", - "dist/index.cjs", - "types/**/*.d.ts" - ], - "scripts": { - "bundle": "bun run bundle:browser", - "bundle:browser": "bun build src/index.ts --outdir dist --target browser" - }, - "devDependencies": { - "@types/bun": "^1.1.14" - }, - "dependencies": { - "@hcengineering/communication-client-ws": "workspace:*", - "@hcengineering/communication-types": "workspace:*" - }, - "peerDependencies": { - "typescript": "^5.6.3" - } -} diff --git a/packages/examples/src/httpServer.ts b/packages/examples/src/httpServer.ts deleted file mode 100644 index 4320997f2b5..00000000000 --- a/packages/examples/src/httpServer.ts +++ /dev/null @@ -1,70 +0,0 @@ -Bun.serve({ - port: 3003, - - fetch(req) { - const url = new URL(req.url) - const headers = { - 'Cross-Origin-Opener-Policy': 'same-origin', - 'Cross-Origin-Embedder-Policy': 'require-corp' - } - if (url.pathname === '/') { - return new Response(Bun.file('./index.html'), { headers }) - } - - if (url.pathname.endsWith('index.js')) { - return new Response(Bun.file('../dist/index.js'), { headers }) - } - - if (url.pathname.endsWith('sqlite3-opfs-async-proxy.js')) { - return new Response( - Bun.file('../../../node_modules/@sqlite.org/sqlite-wasm/sqlite-wasm/jswasm/sqlite3-opfs-async-proxy.js'), - { - headers - } - ) - } - - if (url.pathname.endsWith('index.mjs')) { - return new Response(Bun.file('../../../node_modules/@sqlite.org/sqlite-wasm/index.mjs'), { - headers - }) - } - - if (url.pathname.endsWith('sqlite3-worker1-promiser.mjs')) { - return new Response( - Bun.file('../../../node_modules/@sqlite.org/sqlite-wasm/sqlite-wasm/jswasm/sqlite3-worker1-promiser.mjs'), - { - headers - } - ) - } - - if (url.pathname.endsWith('sqlite3-worker1-bundler-friendly.mjs')) { - return new Response( - Bun.file( - '../../../node_modules/@sqlite.org/sqlite-wasm/sqlite-wasm/jswasm/sqlite3-worker1-bundler-friendly.mjs' - ), - { - headers - } - ) - } - - if (url.pathname.endsWith('sqlite3.js')) { - return new Response(Bun.file('../../../node_modules/@sqlite.org/sqlite-wasm/sqlite-wasm/jswasm/sqlite3.js'), { - headers - }) - } - - if (url.pathname.endsWith('.wasm')) { - return new Response(Bun.file('../../../node_modules/@sqlite.org/sqlite-wasm/sqlite-wasm/jswasm/sqlite3.wasm'), { - headers: { - ...headers, - ContentType: 'application/wasm' - } - }) - } - - return new Response('Not Found', { status: 404, headers }) - } -}) diff --git a/packages/examples/src/index.html b/packages/examples/src/index.html deleted file mode 100644 index 1eae9ec030c..00000000000 --- a/packages/examples/src/index.html +++ /dev/null @@ -1,51 +0,0 @@ - - - - - -

-
-
-
- - -
- - - - - - - \ No newline at end of file diff --git a/packages/examples/src/index.ts b/packages/examples/src/index.ts deleted file mode 100644 index d72db680f71..00000000000 --- a/packages/examples/src/index.ts +++ /dev/null @@ -1,126 +0,0 @@ -import { - type Message, - type SocialID, - SortingOrder, - type CardID, - type Window, - type WorkspaceID -} from '@hcengineering/communication-types' -import { getWebsocketClient, createMessagesQuery } from '@hcengineering/communication-client-ws' - -const card = 'cd0aba36-1c4f-4170-95f2-27a12a5415f6' as CardID -const workspace = 'cd0aba36-1c4f-4170-95f2-27a12a5415f6' as WorkspaceID -// const personalWorkspace = 'cd0aba36-1c4f-4170-95f2-27a12a5415f5' as WorkspaceID -const creator1 = 'email:vasya@huly.com' as SocialID - -export async function example() { - const platformUrl = 'ws://localhost:8090' - const token = 'token' - const client = await getWebsocketClient( - platformUrl, - token, - workspace, - 'http://localhost:4022/blob/:workspace/:blobId/:filename' - ) - - const query1 = createMessagesQuery() - - let window: Window | undefined = undefined - - query1.query({ card, order: SortingOrder.Descending }, (res) => { - window = res - const r = window.getResult() - r.reverse() - showMessages(r) - }) - - document.getElementById('forward-button')?.addEventListener('click', async () => { - if (window == null) return - await window.loadNextPage() - }) - - document.getElementById('backward-button')?.addEventListener('click', async () => { - if (window == null) return - await window.loadPrevPage() - }) - - async function editMessage(message: Message) { - await client.updateMessage(card, message.id, message.content + '_1_', creator1) - } - - async function deleteMessage(message: Message) { - await client.removeMessage(card, message.id) - } - - async function addReaction(message: Message) { - await client.createReaction(card, message.id, '👍', creator1) - } - - async function removeReaction(message: Message) { - await client.removeReaction(card, message.id, '👍', creator1) - } - - function scrollToBottom() { - const el = document.getElementById('chat') - if (el == null) return - el.scrollTo(0, el.scrollHeight) - } - - async function showMessages(messages: ReadonlyArray) { - const el = document.getElementById('messages') - if (el == null) return - el.innerHTML = '' - for (const message of messages) { - const div = el.appendChild(document.createElement('div')) - div.className = 'message' - - const messageContent = document.createElement('span') - messageContent.textContent = message.content + ' ' + message.reactions.map((it) => it.reaction).join(' ') - // + ' ' + messages.created.getTime() - div.appendChild(messageContent) - - const buttonsDiv = document.createElement('div') - buttonsDiv.className = 'buttons' - - const editButton = document.createElement('button') - editButton.textContent = 'Edit' - editButton.className = 'edit-button' - editButton.addEventListener('click', () => editMessage(message)) - buttonsDiv.appendChild(editButton) - - const deleteButton = document.createElement('button') - deleteButton.textContent = 'Remove' - deleteButton.className = 'delete-button' - deleteButton.addEventListener('click', () => deleteMessage(message)) - buttonsDiv.appendChild(deleteButton) - - const addReactionButton = document.createElement('button') - addReactionButton.textContent = '+R' - addReactionButton.className = 'add-reaction-button' - addReactionButton.addEventListener('click', () => addReaction(message)) - buttonsDiv.appendChild(addReactionButton) - - const removeReactionButton = document.createElement('button') - removeReactionButton.textContent = '-R' - removeReactionButton.className = 'remove-reaction-button' - removeReactionButton.addEventListener('click', () => removeReaction(message)) - buttonsDiv.appendChild(removeReactionButton) - - div.appendChild(buttonsDiv) - } - scrollToBottom() - } - - document.getElementById('form')?.addEventListener('submit', async (event) => { - event.preventDefault() - // @ts-expect-error error - const el = event.target?.getElementsByTagName('input')[0] as HTMLInputElement - if (el.value == '' || el.value == null) return - - await client.createMessage(card, el.value, creator1) - - el.value = '' - }) -} - -void example() diff --git a/packages/query/package.json b/packages/query/package.json index d2f5783f9c2..0afa492501d 100644 --- a/packages/query/package.json +++ b/packages/query/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-query", - "version": "0.1.61", + "version": "0.1.147", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", @@ -21,6 +21,7 @@ "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-shared": "workspace:*", "@hcengineering/communication-types": "workspace:*", + "@hcengineering/communication-yaml": "workspace:*", "fast-equals": "^5.0.1" }, "peerDependencies": { diff --git a/packages/query/src/index.ts b/packages/query/src/index.ts index 45ba5768222..d5362634f08 100644 --- a/packages/query/src/index.ts +++ b/packages/query/src/index.ts @@ -13,4 +13,5 @@ // limitations under the License. // -export * from './lq.ts' +export * from './lq' +export type { QueryClient } from './types' diff --git a/packages/query/src/lq.ts b/packages/query/src/lq.ts index 5ace34515d4..e1b333a1cd4 100644 --- a/packages/query/src/lq.ts +++ b/packages/query/src/lq.ts @@ -15,26 +15,33 @@ import { type FindMessagesParams, + type FindNotificationContextParams, type FindNotificationsParams, - type WorkspaceID + type Message, + type WorkspaceID, + type Notification, + type NotificationContext } from '@hcengineering/communication-types' import { deepEqual } from 'fast-equals' import type { MessagesQueryCallback, NotificationsQueryCallback, ResponseEvent, - QueryClient + QueryCallback, + RequestEvent, + EventResult } from '@hcengineering/communication-sdk-types' -import type { FindParams, PagedQuery, QueryId } from './types' +import type { PagedQuery, FindParams, QueryId, QueryClient } from './types' import { MessagesQuery } from './messages/query' import { NotificationQuery } from './notifications/query' +import { NotificationContextsQuery } from './notification-contexts/query' interface CreateQueryResult { unsubscribe: () => void } -const maxQueriesCache = 10 +const maxQueriesCache = 20 export class LiveQueries { private readonly queries = new Map() @@ -45,27 +52,61 @@ export class LiveQueries { private readonly client: QueryClient, private readonly workspace: WorkspaceID, private readonly filesUrl: string - ) {} + ) { + this.client.onEvent = (event) => { + void this.onEvent(event) + } + this.client.onRequest = (event, promise) => { + void this.onRequest(event, promise) + } + } async onEvent(event: ResponseEvent): Promise { for (const q of this.queries.values()) { - await q.onEvent(event) + void q.onEvent(event) } } - queryMessages(params: FindMessagesParams, callback: MessagesQueryCallback): CreateQueryResult { - const query = this.createMessagesQuery(params, callback) - this.queries.set(query.id, query) - - return { - unsubscribe: () => { - this.unsubscribeQuery(query) - } + async onRequest(event: RequestEvent, promise: Promise): Promise { + for (const q of this.queries.values()) { + void q.onRequest(event, promise) } } + queryMessages(params: FindMessagesParams, callback: MessagesQueryCallback): CreateQueryResult { + return this.createAndStoreQuery( + params, + callback, + MessagesQuery, + (params) => this.findMessagesQuery(params) + ) + } + queryNotifications(params: FindNotificationsParams, callback: NotificationsQueryCallback): CreateQueryResult { - const query = this.createNotificationQuery(params, callback) + return this.createAndStoreQuery( + params, + callback, + NotificationQuery, + (params) => this.findNotificationQuery(params) + ) + } + + queryNotificationContexts(params: FindNotificationContextParams, callback: any): CreateQueryResult { + return this.createAndStoreQuery( + params, + callback, + NotificationContextsQuery, + (params) => this.findNotificationContextsQuery(params) + ) + } + + private createAndStoreQuery>( + params: P, + callback: QueryCallback, + QueryClass: new (...args: any[]) => Q, + finder: (params: P) => Q | undefined + ): CreateQueryResult { + const query = this.createQuery(params, callback, QueryClass, finder) this.queries.set(query.id, query) return { @@ -75,68 +116,51 @@ export class LiveQueries { } } - private createMessagesQuery(params: FindMessagesParams, callback: MessagesQueryCallback): MessagesQuery { + private createQuery>( + params: P, + callback: any, + QueryClass: new (...args: any[]) => Q, + finder: (params: P) => Q | undefined + ): Q { const id = ++this.counter - const exists = this.findMessagesQuery(params) + const exists = finder(params) if (exists !== undefined) { - if (this.unsubscribed.has(id)) { - this.unsubscribed.delete(id) + if (this.unsubscribed.has(exists.id)) { + this.unsubscribed.delete(exists.id) exists.setCallback(callback) return exists } else { const result = exists.copyResult() - return new MessagesQuery(this.client, this.workspace, this.filesUrl, id, params, callback, result) + return new QueryClass(this.client, this.workspace, this.filesUrl, id, params, callback, result) } } - return new MessagesQuery(this.client, this.workspace, this.filesUrl, id, params, callback) + return new QueryClass(this.client, this.workspace, this.filesUrl, id, params, callback) } - private createNotificationQuery( - params: FindNotificationsParams, - callback: NotificationsQueryCallback - ): NotificationQuery { - const id = ++this.counter - const exists = this.findNotificationQuery(params) - - if (exists !== undefined) { - if (this.unsubscribed.has(id)) { - this.unsubscribed.delete(id) - exists.setCallback(callback) - return exists - } else { - const result = exists.copyResult() - return new NotificationQuery(this.client, id, params, callback, result) + private findQuery(params: FindParams, QueryClass: new (...args: any[]) => T): T | undefined { + for (const query of this.queries.values()) { + if (query instanceof QueryClass && this.queryCompare(params, query.params)) { + return query as T } } - - return new NotificationQuery(this.client, id, params, callback) } private findMessagesQuery(params: FindMessagesParams): MessagesQuery | undefined { - for (const query of this.queries.values()) { - if (query instanceof MessagesQuery) { - if (!this.queryCompare(params, query.params)) continue - return query - } - } + return this.findQuery(params, MessagesQuery) } private findNotificationQuery(params: FindNotificationsParams): NotificationQuery | undefined { - for (const query of this.queries.values()) { - if (query instanceof NotificationQuery) { - if (!this.queryCompare(params, query.params)) continue - return query - } - } + return this.findQuery(params, NotificationQuery) + } + + private findNotificationContextsQuery(params: FindNotificationContextParams): NotificationContextsQuery | undefined { + return this.findQuery(params, NotificationContextsQuery) } private queryCompare(q1: FindParams, q2: FindParams): boolean { - if (Object.keys(q1).length !== Object.keys(q2).length) { - return false - } - return deepEqual(q1, q2) + return Object.keys(q1).length === Object.keys(q2).length && deepEqual(q1, q2) } private removeOldQueries(): void { @@ -166,5 +190,6 @@ export class LiveQueries { close(): void { this.queries.clear() + this.unsubscribed.clear() } } diff --git a/packages/query/src/messages/query.ts b/packages/query/src/messages/query.ts index 877fe3363e1..8097f79a34e 100644 --- a/packages/query/src/messages/query.ts +++ b/packages/query/src/messages/query.ts @@ -16,40 +16,48 @@ import { type FindMessagesParams, type Message, - type MessagesGroup, - type WorkspaceID, type MessageID, - SortingOrder, - type Patch, + type MessagesGroup, type ParsedFile, + type Patch, PatchType, - type CardID + type Reaction, + type SocialID, + SortingOrder, + type WorkspaceID, + type File, + type BlobID, + MessageType } from '@hcengineering/communication-types' import { - ResponseEventType, - type AttachmentCreatedEvent, - type AttachmentRemovedEvent, + type FileCreatedEvent, + type FileRemovedEvent, type MessageCreatedEvent, - type MessageRemovedEvent, + type MessagesRemovedEvent, type PatchCreatedEvent, type QueryCallback, - type QueryClient, type ReactionCreatedEvent, type ReactionRemovedEvent, + type CreateMessageEvent, + type RequestEvent, + RequestEventType, type ResponseEvent, - type ThreadCreatedEvent + ResponseEventType, + type ThreadCreatedEvent, + type EventResult, + type CreateMessageResult } from '@hcengineering/communication-sdk-types' -import { loadGroupFile, parseMessageId } from '@hcengineering/communication-shared' +import { applyPatch, applyPatches, generateMessageId, parseMessageId } from '@hcengineering/communication-shared' +import { loadGroupFile } from '@hcengineering/communication-yaml' import { QueryResult } from '../result' -import { defaultQueryParams, Direction, type QueryId, type PagedQuery } from '../types' +import { defaultQueryParams, Direction, type PagedQuery, type QueryId, type QueryClient } from '../types' import { WindowImpl } from '../window' -import { addReaction, addReply, removeReaction, removeReply } from './utils' const GROUPS_LIMIT = 20 export class MessagesQuery implements PagedQuery { - protected result: Promise> | QueryResult + private result: Promise> | QueryResult private messagesFromFiles: Message[] = [] @@ -70,8 +78,10 @@ export class MessagesQuery implements PagedQuery { hasGroups: true } + private tmpMessages: Map = new Map() + constructor ( - protected readonly client: QueryClient, + private readonly client: QueryClient, private readonly workspace: WorkspaceID, private readonly filesUrl: string, public readonly id: QueryId, @@ -107,22 +117,132 @@ export class MessagesQuery implements PagedQuery { this.result = new QueryResult([] as Message[], (x) => x.id) if (this.isInitLoadingForward()) { - this.result.setHead(true) + this.result.setHead(this.params.created == null) void this.requestLoadNextPage() } else { - this.result.setTail(true) + this.result.setTail(this.params.created == null) void this.requestLoadPrevPage() } } } - setCallback (callback: QueryCallback): void { - this.callback = callback - void this.notify() + async onEvent (event: ResponseEvent): Promise { + switch (event.type) { + case ResponseEventType.MessageCreated: { + await this.onMessageCreatedEvent(event) + break + } + case ResponseEventType.MessagesRemoved: { + await this.onMessagesRemovedEvent(event) + break + } + case ResponseEventType.PatchCreated: { + await this.onPatchCreatedEvent(event) + break + } + case ResponseEventType.ReactionCreated: { + await this.onReactionCreatedEvent(event) + break + } + case ResponseEventType.ReactionRemoved: { + await this.onReactionRemovedEvent(event) + break + } + case ResponseEventType.FileCreated: { + await this.onFileCreatedEvent(event) + break + } + case ResponseEventType.FileRemoved: { + await this.onFileRemovedEvent(event) + break + } + case ResponseEventType.ThreadCreated: { + await this.onThreadCreatedEvent(event) + } + } } - removeCallback (): void { - this.callback = () => {} + async onRequest (event: RequestEvent, promise: Promise): Promise { + switch (event.type) { + case RequestEventType.CreateMessage: { + await this.onCreateMessageRequest(event, promise as Promise) + break + } + } + } + + async onCreateMessageRequest( + event: CreateMessageEvent, + promise: Promise + ): Promise { + if (this.params.card !== event.card) return; + const eventId = event._id + if(eventId == null) return + + const tmpId = generateMessageId(); + let resultId: MessageID | undefined; + const tmpMessage: Message = { + id: tmpId, + type: MessageType.Message, + card: event.card, + content: event.content, + creator: event.creator, + created: new Date(), + data: event.data, + edited: undefined, + thread: undefined, + reactions: [], + files: [] + }; + + if (!this.match(tmpMessage)) return; + + promise + .then(async (result) => { + this.tmpMessages.delete(eventId) + resultId = result.id; + if (this.result instanceof Promise) this.result = await this.result + + if (this.result.get(resultId)) { + if (this.result.delete(tmpId)) { + await this.notify(); + } + } else { + const updatedMessage = { ...tmpMessage, id: resultId }; + this.result.delete(tmpId); + + this.insertMessage(this.result, updatedMessage); + + void this.notify(); + } + }) + .catch(async () => { + if (this.result instanceof Promise) this.result = await this.result + this.tmpMessages.delete(eventId) + if (this.result.delete(tmpId)) { + void this.notify(); + } + }); + + if (this.result instanceof Promise) this.result = await this.result; + + if (resultId === undefined && this.result.isTail()) { + this.tmpMessages.set(eventId, tmpId) + this.insertMessage(this.result, tmpMessage); + void this.notify(); + } + } + + private insertMessage(result: QueryResult, message: Message): void { + if (this.params.order === SortingOrder.Ascending) { + result.push(message); + } else { + result.unshift(message); + } + } + + async unsubscribe (): Promise { + await this.client.unsubscribeQuery(this.id) } async requestLoadNextPage (): Promise { @@ -152,22 +272,31 @@ export class MessagesQuery implements PagedQuery { } } + removeCallback (): void { + this.callback = () => {} + } + + setCallback (callback: QueryCallback): void { + this.callback = callback + void this.notify() + } + + copyResult (): QueryResult | undefined { + if (this.result instanceof Promise) { + return undefined + } + + return this.result.copy() + } + private isInitLoadingForward (): boolean { - const { order, created, id } = this.params + const { order, id } = this.params if (id != null) { - return false + return true } - if (created == null) return order === SortingOrder.Ascending - if (created instanceof Date) return order === SortingOrder.Ascending - // TODO: fix me - if (created.less != null) return order !== SortingOrder.Ascending - if (created.lessOrEqual != null) return order !== SortingOrder.Ascending - if (created.greater != null) return order === SortingOrder.Ascending - if (created.greaterOrEqual != null) return order === SortingOrder.Ascending - - return false + return order === SortingOrder.Ascending } private async loadPage (direction: Direction, result: QueryResult): Promise> { @@ -190,7 +319,11 @@ export class MessagesQuery implements PagedQuery { } } - result.append(messages) + if (this.params.order === SortingOrder.Ascending && direction === Direction.Backward) { + result.prepend(messages.reverse()) + } else { + result.append(messages) + } return result } @@ -229,9 +362,9 @@ export class MessagesQuery implements PagedQuery { created: last != null ? { - greater: last?.created - } - : undefined, + greater: last.created + } + : this.params.created, limit, order: SortingOrder.Ascending }) @@ -271,16 +404,16 @@ export class MessagesQuery implements PagedQuery { private async findPrevMessages (limit: number, result: QueryResult): Promise { if (!this.prev.hasMessages || result.isHead()) return [] - const first = result.getLast() + const first = this.params.order === SortingOrder.Ascending ? result.getFirst() : result.getLast() return await this.find({ ...this.params, created: first != null ? { - less: first?.created - } - : undefined, + less: first?.created + } + : this.params.created, limit, order: SortingOrder.Descending }) @@ -302,7 +435,7 @@ export class MessagesQuery implements PagedQuery { ? currentGroups : await this.findGroups( direction, - direction === Direction.Forward ? this.lastGroup?.fromDate : this.firstGroup?.fromDate + direction === Direction.Forward ? this.lastGroup?.fromSec : this.firstGroup?.fromSec ) if (currentGroups.length === 0) { @@ -380,7 +513,6 @@ export class MessagesQuery implements PagedQuery { private async loadMessagesFromFiles (group: MessagesGroup): Promise { const parsedFile = await loadGroupFile(this.workspace, this.filesUrl, group, { retries: 5 }) - const patches = group.patches ?? [] const patchesMap = new Map() @@ -392,36 +524,24 @@ export class MessagesQuery implements PagedQuery { metadata: parsedFile.metadata, messages: patches.length > 0 - ? parsedFile.messages.map((message) => this.applyPatches(message, patchesMap.get(message.id) ?? [])) + ? parsedFile.messages.map((message) => applyPatches(message, patchesMap.get(message.id) ?? [], this.allowedPatches())) : parsedFile.messages } } - private async findGroupByMessage (id: MessageID): Promise { - const date = parseMessageId(id) - const group1 = (await this.client.findMessagesGroups({ - card: this.params.card, - limit: 1, - toDate: { - greaterOrEqual: date - }, - order: SortingOrder.Descending, - orderBy: 'fromDate' - }))[0] + private async findGroupByMessage(id: MessageID): Promise { + const date = parseMessageId(id); - if (group1 !== undefined) { - return group1 - } - - return (await this.client.findMessagesGroups({ + const groups = await this.client.findMessagesGroups({ card: this.params.card, + fromSec: { lessOrEqual: date }, + toSec: { greaterOrEqual: date }, limit: 1, - fromDate: { - lessOrEqual: date - }, order: SortingOrder.Ascending, - orderBy: 'fromDate' - }))[0] + orderBy: 'fromSec' + }); + + return groups[0]; } private async findGroups (direction: Direction, fromDate?: Date): Promise { @@ -435,7 +555,7 @@ export class MessagesQuery implements PagedQuery { card: this.params.card, limit: GROUPS_LIMIT, order: direction === Direction.Forward ? SortingOrder.Ascending : SortingOrder.Descending, - orderBy: 'fromDate' + orderBy: 'fromSec' }) } @@ -443,15 +563,15 @@ export class MessagesQuery implements PagedQuery { card: this.params.card, limit: GROUPS_LIMIT, order: direction === Direction.Forward ? SortingOrder.Ascending : SortingOrder.Descending, - orderBy: 'fromDate', - fromDate: + orderBy: 'fromSec', + fromSec: direction === Direction.Forward ? { - greater: fromDate - } + greater: fromDate + } : { - less: fromDate - } + less: fromDate + } }) } @@ -466,47 +586,18 @@ export class MessagesQuery implements PagedQuery { this.callback(new WindowImpl(result, this.result.isTail(), this.result.isHead(), this)) } - async unsubscribe (): Promise { - await this.client.unsubscribeQuery(this.id) - } - - async onEvent (event: ResponseEvent): Promise { - switch (event.type) { - case ResponseEventType.MessageCreated: { - await this.onCreateMessageEvent(event) - return - } - case ResponseEventType.MessageRemoved: { - await this.onRemoveMessageEvent(event) - return - } - case ResponseEventType.PatchCreated: { - await this.onCreatePatchEvent(event) - return - } - case ResponseEventType.ReactionCreated: { - await this.onCreateReactionEvent(event) - return - } - case ResponseEventType.ReactionRemoved: { - await this.onRemoveReactionEvent(event) - return - } - case ResponseEventType.AttachmentCreated: { - await this.onCreateAttachmentEvent(event) - return - } - case ResponseEventType.AttachmentRemoved: { - await this.onRemoveAttachmentEvent(event) - return - } - case ResponseEventType.ThreadCreated: { - await this.onCreateThreadEvent(event) - } + private match (message: Message): boolean { + if (this.params.id != null && this.params.id !== message.id) { + return false + } + if (this.params.card !== message.card) { + return false } + return true } - async onCreateThreadEvent (event: ThreadCreatedEvent): Promise { + private async onThreadCreatedEvent (event: ThreadCreatedEvent): Promise { + if(this.params.replies !== true) return if (this.params.card !== event.thread.card) return if (this.result instanceof Promise) this.result = await this.result @@ -532,7 +623,7 @@ export class MessagesQuery implements PagedQuery { }) } - async onCreateMessageEvent (event: MessageCreatedEvent): Promise { + private async onMessageCreatedEvent (event: MessageCreatedEvent): Promise { if (this.result instanceof Promise) this.result = await this.result if (this.params.card !== event.message.card) return const { message } = event @@ -542,6 +633,12 @@ export class MessagesQuery implements PagedQuery { if (!this.match(message)) return if (this.result.isTail()) { + const eventId = event._id + if(eventId != null) { + const tmp = this.tmpMessages.get(eventId) + if (tmp) this.result.delete(tmp) + this.tmpMessages.delete(eventId) + } if (this.params.order === SortingOrder.Ascending) { this.result.push(message) } else { @@ -551,51 +648,50 @@ export class MessagesQuery implements PagedQuery { } } - private match (message: Message): boolean { - if (this.params.id != null && this.params.id !== message.id) { - return false - } - if (this.params.card !== message.card) { - return false - } - return true - } - - private async onCreatePatchEvent (event: PatchCreatedEvent): Promise { + private async onPatchCreatedEvent (event: PatchCreatedEvent): Promise { if (this.params.card !== event.card) return + if (!this.isAllowedPatch(event.patch.type)) return if (this.result instanceof Promise) this.result = await this.result const { patch } = event - const messageId = BigInt(patch.message) - const group = this.groupsBuffer.find((it) => BigInt(it.fromId) <= messageId && BigInt(it.toId) >= messageId) + const created = parseMessageId(patch.message) + const groups = this.groupsBuffer.filter((it) => it.fromSec <= created && it.toSec >= created) - if (group != null && group.patches != null) { - group.patches.push(patch) + for (const group of groups) { + if (group.patches != null) { + group.patches.push(patch) + } } const message = this.result.get(patch.message) if (message === undefined) return if (message.created < patch.created) { - this.result.update(this.applyPatch(message, patch)) + this.result.update(applyPatch(message, patch, this.allowedPatches())) await this.notify() } } - private async onRemoveMessageEvent (event: MessageRemovedEvent): Promise { + private async onMessagesRemovedEvent (event: MessagesRemovedEvent): Promise { if (this.params.card !== event.card) return if (this.result instanceof Promise) this.result = await this.result - const deleted = this.result.delete(event.message) + let isDeleted = false + + for (const message of event.messages) { + const deleted = this.result.delete(message) + isDeleted = isDeleted || deleted !== undefined + } - if (deleted !== undefined) { + if (isDeleted) { void this.notify() } - this.messagesFromFiles = this.messagesFromFiles.filter((it) => it.id !== event.message) + this.messagesFromFiles = this.messagesFromFiles.filter((it) => !event.messages.includes(it.id)) } - private async onCreateReactionEvent (event: ReactionCreatedEvent): Promise { + private async onReactionCreatedEvent (event: ReactionCreatedEvent): Promise { + if(this.params.reactions !== true) return if (this.result instanceof Promise) this.result = await this.result if (this.params.card !== event.card) return @@ -616,7 +712,8 @@ export class MessagesQuery implements PagedQuery { } } - private async onRemoveReactionEvent (event: ReactionRemovedEvent): Promise { + private async onReactionRemovedEvent (event: ReactionRemovedEvent): Promise { + if(this.params.reactions !== true) return if (this.result instanceof Promise) this.result = await this.result if (this.params.card !== event.card) return @@ -633,78 +730,96 @@ export class MessagesQuery implements PagedQuery { ) } - private async onCreateAttachmentEvent (event: AttachmentCreatedEvent): Promise { + private async onFileCreatedEvent (event: FileCreatedEvent): Promise { + if(this.params.files !== true) return if (this.result instanceof Promise) this.result = await this.result - const attachment = { - ...event.attachment, - created: event.attachment.created + const { file } = event + const message = this.result.get(file.message) + if (message !== undefined) { + message.files.push(file) + this.result.update(message) + await this.notify() } - const message = this.result.get(attachment.message) - if (message === undefined) return - message.attachments.push(attachment) - this.result.update(message) - await this.notify() + const fromBuffer = this.messagesFromFiles.find((it) => it.id === file.message) + if (fromBuffer !== undefined) { + addFile(fromBuffer, file) + } } - private async onRemoveAttachmentEvent (event: AttachmentRemovedEvent): Promise { + private async onFileRemovedEvent (event: FileRemovedEvent): Promise { + if(this.params.files !== true) return + if(this.params.card !== event.card) return if (this.result instanceof Promise) this.result = await this.result const message = this.result.get(event.message) - if (message === undefined) return + if (message !== undefined) { - const attachments = message.attachments.filter((it) => it.card !== event.card) - if (attachments.length === message.attachments.length) return + const files = message.files.filter((it) => it.blobId !== event.blobId) + if (files.length === message.files.length) return - const updated = { - ...message, - attachments + const updated = { + ...message, + files + } + this.result.update(updated) + await this.notify() } - this.result.update(updated) - await this.notify() + + this.messagesFromFiles = this.messagesFromFiles.map((it) => + it.id === event.message ? removeFile(it, event.blobId) : it + ) } - private applyPatch (message: Message, patch: Patch): Message { - switch (patch.type) { - case PatchType.update: - return { - ...message, - edited: patch.created, - content: patch.content - } - case PatchType.addReaction: - return addReaction(message, { - message: message.id, - reaction: patch.content, - creator: patch.creator, - created: patch.created - }) - case PatchType.removeReaction: - return removeReaction(message, patch.content, patch.creator) - case PatchType.addReply: - return addReply(message, patch.content as CardID, patch.created) - case PatchType.removeReply: - return removeReply(message, patch.content as CardID) + private allowedPatches (): PatchType[] { + const result = [PatchType.update] + + if(this.params.reactions === true) { + result.push(PatchType.addReaction, PatchType.removeReaction) + } + if(this.params.files === true) { + result.push(PatchType.addFile, PatchType.removeFile) + } + if(this.params.replies === true) { + result.push(PatchType.addReply, PatchType.removeReply) } + return result + } - return message + private isAllowedPatch (type: PatchType): boolean { + return this.allowedPatches().includes(type) } +} - private applyPatches (message: Message, patches: Patch[]): Message { - if (patches.length === 0) return message - for (const p of patches) { - message = this.applyPatch(message, p) - } - return message +function addFile (message: Message, file:File): Message { + message.files.push(file) + return message +} + +function removeFile (message: Message, blobId: BlobID): Message { + const files = message.files.filter((it) => it.blobId !== blobId) + if (files.length === message.files.length) return message + + return { + ...message, + files } +} - copyResult (): QueryResult | undefined { - if (this.result instanceof Promise) { - return undefined - } +function addReaction (message: Message, reaction: Reaction): Message { + message.reactions.push(reaction) + return message +} - return this.result.copy() + +function removeReaction (message: Message, emoji: string, creator: SocialID): Message { + const reactions = message.reactions.filter((it) => it.reaction !== emoji || it.creator !== creator) + if (reactions.length === message.reactions.length) return message + + return { + ...message, + reactions } } diff --git a/packages/query/src/messages/utils.ts b/packages/query/src/messages/utils.ts deleted file mode 100644 index 41a9ce4c7ef..00000000000 --- a/packages/query/src/messages/utils.ts +++ /dev/null @@ -1,69 +0,0 @@ -// -// Copyright © 2025 Hardcore Engineering Inc. -// -// Licensed under the Eclipse Public License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. You may -// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// -// See the License for the specific language governing permissions and -// limitations under the License. -// - -import type { CardID, Message, Reaction, SocialID } from '@hcengineering/communication-types' - -export function addReaction (message: Message, reaction: Reaction): Message { - message.reactions.push(reaction) - return message -} - -export function removeReaction (message: Message, emoji: string, creator: SocialID): Message { - const reactions = message.reactions.filter((it) => it.reaction !== emoji || it.creator !== creator) - if (reactions.length === message.reactions.length) return message - - return { - ...message, - reactions - } -} - -export function addReply (message: Message, thread: CardID, created: Date): Message { - if (message.thread === undefined) { - return { - ...message, - thread: { - card: message.card, - message: message.id, - thread, - repliesCount: 1, - lastReply: created - } - } - } - - if (message.thread.thread !== thread) return message - - return { - ...message, - thread: { - ...message.thread, - repliesCount: message.thread.repliesCount + 1, - lastReply: created - } - } -} - -export function removeReply (message: Message, thread: CardID): Message { - if (message.thread === undefined || message.thread.thread !== thread) return message - - return { - ...message, - thread: { - ...message.thread, - repliesCount: message.thread.repliesCount - 1 - } - } -} diff --git a/packages/query/src/notification-contexts/query.ts b/packages/query/src/notification-contexts/query.ts new file mode 100644 index 00000000000..a8dd93fb277 --- /dev/null +++ b/packages/query/src/notification-contexts/query.ts @@ -0,0 +1,510 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +import { + type FindNotificationContextParams, + type Notification, + type NotificationContext, + PatchType, + SortingOrder, + type WorkspaceID +} from '@hcengineering/communication-types' +import { + type MessagesRemovedEvent, + type NotificationContextCreatedEvent, + type NotificationContextRemovedEvent, + type NotificationContextUpdatedEvent, + type NotificationCreatedEvent, + type NotificationsRemovedEvent, + type PatchCreatedEvent, + type QueryCallback, + type RequestEvent, + type ResponseEvent, + ResponseEventType +} from '@hcengineering/communication-sdk-types' +import { applyPatch } from '@hcengineering/communication-shared' + +import { defaultQueryParams, type PagedQuery, type QueryId, type QueryClient } from '../types' +import { QueryResult } from '../result' +import { WindowImpl } from '../window' +import { loadMessageFromGroup } from '../utils' + +const allowedPatchTypes = [PatchType.update, PatchType.addReaction, PatchType.removeReaction] + +export class NotificationContextsQuery implements PagedQuery { + private result: QueryResult | Promise> + private forward: Promise | NotificationContext[] = [] + private backward: Promise | NotificationContext[] = [] + + constructor ( + private readonly client: QueryClient, + private readonly workspace: WorkspaceID, + private readonly filesUrl: string, + public readonly id: QueryId, + public readonly params: FindNotificationContextParams, + private callback?: QueryCallback, + initialResult?: QueryResult + ) { + this.params = { + ...params, + limit: params.limit, + order: params.order ?? defaultQueryParams.order + } + const limit = params.limit != null ? params.limit + 1 : undefined + const findParams = { + ...this.params, + sort: this.params.order ?? defaultQueryParams.order, + limit + } + + if (initialResult !== undefined) { + this.result = initialResult + void this.notify() + } else { + const findPromise = this.find(findParams) + this.result = findPromise.then((res) => { + const allLoaded = limit == null || res.length <= limit + const isTail = allLoaded || params.lastUpdate == null + const isHead = allLoaded + if (limit != null && res.length > limit) { + res.pop() + } + const qResult = new QueryResult(res, (x) => x.id) + qResult.setTail(isTail) + qResult.setHead(isHead) + + return qResult + }) + this.result + .then(async () => { + await this.notify() + }) + .catch((err: any) => { + console.error('Failed to update Live query: ', err) + }) + } + } + + async onEvent (event: ResponseEvent): Promise { + switch (event.type) { + case ResponseEventType.PatchCreated: { + await this.onCreatePatchEvent(event) + break + } + case ResponseEventType.MessagesRemoved: { + await this.onMessagesRemovedEvent(event) + break + } + case ResponseEventType.NotificationCreated: { + await this.onCreateNotificationEvent(event) + break + } + case ResponseEventType.NotificationsRemoved: { + await this.onRemoveNotificationEvent(event) + break + } + case ResponseEventType.NotificationContextCreated: { + await this.onCreateNotificationContextEvent(event) + break + } + case ResponseEventType.NotificationContextUpdated: { + await this.onUpdateNotificationContextEvent(event) + break + } + case ResponseEventType.NotificationContextRemoved: { + await this.onRemoveNotificationContextEvent(event) + } + } + } + + async onRequest (event: RequestEvent): Promise {} + + async unsubscribe (): Promise { + await this.client.unsubscribeQuery(this.id) + } + + async requestLoadNextPage (): Promise { + if (this.result instanceof Promise) { + this.result = await this.result + } + if (this.forward instanceof Promise) { + this.forward = await this.forward + } + + if (this.result.isTail()) return + + const last = this.result.getLast() + if (last === undefined) return + + const limit = this.params.limit ?? defaultQueryParams.limit + const findParams: FindNotificationContextParams = { + ...this.params, + lastUpdate: { + greater: last.lastUpdate + }, + limit: limit + 1, + order: SortingOrder.Ascending + } + + const forward = this.find(findParams) + + this.forward = forward.then(async (res) => { + if (this.result instanceof Promise) { + this.result = await this.result + } + const isTail = res.length <= limit + if (!isTail) { + res.pop() + } + this.result.append(res) + this.result.setTail(isTail) + await this.notify() + return res + }) + } + + async requestLoadPrevPage (): Promise { + if (this.result instanceof Promise) { + this.result = await this.result + } + if (this.backward instanceof Promise) { + this.backward = await this.backward + } + + if (this.result.isHead()) return + + const first = this.params.order === SortingOrder.Ascending ? this.result.getFirst() : this.result.getLast() + if (first === undefined) return + + const limit = this.params.limit ?? defaultQueryParams.limit + const findParams: FindNotificationContextParams = { + ...this.params, + lastUpdate: { + less: first.lastUpdate + }, + limit: limit + 1, + order: SortingOrder.Descending + } + + const backward = this.find(findParams) + this.backward = backward.then(async (res) => { + if (this.result instanceof Promise) { + this.result = await this.result + } + const isHead = res.length <= limit + if (!isHead) { + res.pop() + } + + if (this.params.order === SortingOrder.Ascending) { + const reversed = res.reverse() + this.result.prepend(reversed) + } else { + this.result.append(res) + } + this.result.setHead(isHead) + await this.notify() + return res + }) + } + + removeCallback (): void { + this.callback = () => {} + } + + setCallback (callback: QueryCallback): void { + this.callback = callback + void this.notify() + } + + copyResult (): QueryResult | undefined { + if (this.result instanceof Promise) { + return undefined + } + + return this.result.copy() + } + + private async find (params: FindNotificationContextParams): Promise { + const contexts = await this.client.findNotificationContexts(params, this.id) + if (params.notifications?.message !== true) return contexts + + await Promise.all( + contexts.map(async (context) => { + const notifications = context.notifications ?? [] + + context.notifications = await Promise.all( + notifications.map(async (notification) => { + if (notification.message != null || notification.messageId == null) return notification + + const message = await loadMessageFromGroup( + notification.messageId, + this.workspace, + this.filesUrl, + notification.messageGroup, + notification.patches + ) + if (message !== undefined) { + return { + ...notification, + message + } + } + + return notification + }) + ) + return context + }) + ) + + return contexts + } + + private async onCreateNotificationContextEvent (event: NotificationContextCreatedEvent): Promise { + if (this.forward instanceof Promise) this.forward = await this.forward + if (this.backward instanceof Promise) this.backward = await this.backward + if (this.result instanceof Promise) this.result = await this.result + + const context = event.context + + if (this.result.get(context.id) !== undefined) { + return + } + + await this.addContext(context) + void this.notify() + } + + private async onCreatePatchEvent (event: PatchCreatedEvent): Promise { + if (this.params.notifications == null) return + if (this.forward instanceof Promise) this.forward = await this.forward + if (this.backward instanceof Promise) this.backward = await this.backward + if (this.result instanceof Promise) this.result = await this.result + + const context = this.result.getResult().find((it) => it.card === event.card) + if (context === undefined || (context.notifications ?? []).length === 0) return + + const hasMessage = context.notifications?.some((it) => it.messageId === event.patch.message) ?? false + if (!hasMessage) return + + this.result.update({ + ...context, + notifications: context.notifications?.map((it) => ({ + ...it, + message: + it.messageId === event.patch.message && it.message != null + ? applyPatch(it.message, event.patch, allowedPatchTypes) + : it.message + })) + }) + + void this.notify() + } + + private async onMessagesRemovedEvent (event: MessagesRemovedEvent): Promise { + if (this.params.notifications == null) return + if (this.forward instanceof Promise) this.forward = await this.forward + if (this.backward instanceof Promise) this.backward = await this.backward + if (this.result instanceof Promise) this.result = await this.result + + const context = this.result.getResult().find((it) => it.card === event.card) + + if (context === undefined) return + const filtered = (context.notifications ?? []).filter( + (it) => it.messageId == null || !event.messages.includes(it.messageId) + ) + if (filtered.length === (context.notifications?.length ?? 0)) return + const contextUpdated = (await this.find({ id: context.id, limit: 1, notifications: this.params.notifications }))[0] + if (contextUpdated !== undefined) { + this.result.update(contextUpdated) + } else { + this.result.update({ + ...context, + notifications: filtered + }) + } + void this.notify() + } + + private async onRemoveNotificationEvent (event: NotificationsRemovedEvent): Promise { + if (this.params.notifications == null) return + if (this.forward instanceof Promise) this.forward = await this.forward + if (this.backward instanceof Promise) this.backward = await this.backward + if (this.result instanceof Promise) this.result = await this.result + + const context = this.result.get(event.context) + if (context === undefined) return + + const filtered = (context.notifications ?? []).filter((it) => it.created > event.untilDate) + if (filtered.length === (context.notifications?.length ?? 0)) return + + const contextUpdated = (await this.find({ id: context.id, limit: 1, notifications: this.params.notifications }))[0] + if (contextUpdated !== undefined) { + this.result.update(contextUpdated) + } else { + this.result.update({ + ...context, + notifications: filtered + }) + } + void this.notify() + } + + private async onCreateNotificationEvent (event: NotificationCreatedEvent): Promise { + if (this.params.notifications == null) return + if (this.forward instanceof Promise) this.forward = await this.forward + if (this.backward instanceof Promise) this.backward = await this.backward + if (this.result instanceof Promise) this.result = await this.result + + const context = this.result.get(event.notification.context) + if (context !== undefined) { + const message = + this.params.notifications.message === true + ? ( + await this.client.findMessages({ + card: context.card, + id: event.notification.messageId + }) + )[0] + : undefined + + const notifications = [ + { + ...event.notification, + message + }, + ...(context.notifications ?? []) + ] + if (notifications.length > this.params.notifications.limit) { + notifications.pop() + } + this.result.update({ + ...context, + notifications + }) + void this.notify() + } else { + const newContext = ( + await this.find({ id: event.notification.context, notifications: this.params.notifications, limit: 1 }) + )[0] + if (newContext !== undefined) { + await this.addContext(newContext) + void this.notify() + } + } + } + + private async onRemoveNotificationContextEvent (event: NotificationContextRemovedEvent): Promise { + if (this.forward instanceof Promise) this.forward = await this.forward + if (this.backward instanceof Promise) this.backward = await this.backward + if (this.result instanceof Promise) this.result = await this.result + + this.result.delete(event.context) + } + + private async onUpdateNotificationContextEvent (event: NotificationContextUpdatedEvent): Promise { + if (this.forward instanceof Promise) this.forward = await this.forward + if (this.backward instanceof Promise) this.backward = await this.backward + if (this.result instanceof Promise) this.result = await this.result + + const toUpdate = this.result.get(event.context) + + if (toUpdate !== undefined) { + const notifications = this.filterNotifications( + event.lastView != null + ? (toUpdate.notifications ?? []).map((it) => ({ + ...it, + read: event.lastView != null && event.lastView >= it.created + })) + : toUpdate.notifications ?? [] + ) + + if ( + notifications.length < (toUpdate.notifications?.length ?? 0) && + this.params.notifications?.order !== SortingOrder.Descending + ) { + const updated: NotificationContext = ( + await this.find({ id: event.context, limit: 1, notifications: this.params.notifications }) + )[0] + if (updated !== undefined) { + this.result.update(updated) + } else { + const updated: NotificationContext = { + ...toUpdate, + lastUpdate: event.lastUpdate ?? toUpdate.lastUpdate, + lastView: event.lastView ?? toUpdate.lastView, + notifications + } + this.result.update(updated) + } + } else { + const updated: NotificationContext = { + ...toUpdate, + lastUpdate: event.lastUpdate ?? toUpdate.lastUpdate, + lastView: event.lastView ?? toUpdate.lastView, + notifications + } + this.result.update(updated) + } + if (event.lastUpdate != null) { + this.result.sort((a, b) => + this.params.order === SortingOrder.Descending + ? b.lastUpdate.getTime() - a.lastUpdate.getTime() + : a.lastUpdate.getTime() - b.lastUpdate.getTime() + ) + } + void this.notify() + } + } + + private filterNotifications (notifications: Notification[]): Notification[] { + if (this.params.notifications == null) return notifications + const read = this.params.notifications.read + if (read == null) return notifications + + return notifications.filter((it) => it.read === read) + } + + private async addContext (context: NotificationContext): Promise { + if (this.result instanceof Promise) this.result = await this.result + if (this.result.get(context.id) !== undefined) return + if (this.result.isTail()) { + if (this.params.order === SortingOrder.Ascending) { + this.result.push(context) + } else { + this.result.unshift(context) + } + } + + if (this.params.limit != null && this.result.length > this.params.limit) { + this.result.pop() + } + } + + private async notify (): Promise { + if (this.callback === undefined) return + if (this.result instanceof Promise) { + this.result = await this.result + } + + const result = this.result.getResult() + const isTail = this.result.isTail() + const isHead = this.result.isHead() + + const window = new WindowImpl(result, isTail, isHead, this) + this.callback(window) + } +} diff --git a/packages/query/src/notifications/query.ts b/packages/query/src/notifications/query.ts index 33a29525377..128ef28c04d 100644 --- a/packages/query/src/notifications/query.ts +++ b/packages/query/src/notifications/query.ts @@ -2,7 +2,7 @@ // Copyright © 2025 Hardcore Engineering Inc. // // Licensed under the Eclipse Public License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. You may +// You may not use this file except in compliance with the License. You may // obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 // // Unless required by applicable law or agreed to in writing, software @@ -15,122 +15,239 @@ import { type FindNotificationsParams, - SortingOrder, type Notification, - type MessageID + SortingOrder, + type WorkspaceID } from '@hcengineering/communication-types' import { + type NotificationContextRemovedEvent, + type NotificationContextUpdatedEvent, type NotificationCreatedEvent, - ResponseEventType, + type NotificationsRemovedEvent, + type QueryCallback, + type RequestEvent, type ResponseEvent, - type NotificationContextRemovedEvent, - type NotificationRemovedEvent, - type NotificationContextUpdatedEvent + ResponseEventType } from '@hcengineering/communication-sdk-types' -import { BaseQuery } from '../query' +import { defaultQueryParams, type PagedQuery, type QueryId, type QueryClient } from '../types' +import { QueryResult } from '../result' +import { WindowImpl } from '../window' +import { loadMessageFromGroup } from '../utils' -export class NotificationQuery extends BaseQuery { - override async find (params: FindNotificationsParams): Promise { - return await this.client.findNotifications(params, this.id) - } +export class NotificationQuery implements PagedQuery { + private result: QueryResult | Promise> - override getObjectId (object: Notification): MessageID { - return object.message.id + constructor ( + private readonly client: QueryClient, + private readonly workspace: WorkspaceID, + private readonly filesUrl: string, + public readonly id: QueryId, + public readonly params: FindNotificationsParams, + private callback?: QueryCallback, + initialResult?: QueryResult + ) { + const limit = this.params.limit ?? defaultQueryParams.limit + const findParams = { ...this.params, sort: this.params.order ?? defaultQueryParams.order, limit: limit + 1 } + + if (initialResult !== undefined) { + this.result = initialResult + void this.notify() + } else { + this.result = this.initResult(findParams, limit) + } } - override getObjectDate (object: Notification): Date { - return object.message.created + private async initResult (findParams: FindNotificationsParams, limit: number): Promise> { + try { + const res = await this.find(findParams) + const isComplete = res.length <= limit + if (!isComplete) res.pop() + + const result = new QueryResult(res, (it) => it.id) + result.setTail(isComplete) + result.setHead(isComplete) + + void this.notify() + return result + } catch (error) { + console.error('Failed to initialize query:', error) + return new QueryResult([] as Notification[], (it) => it.id) + } } - override async onEvent (event: ResponseEvent): Promise { + async onEvent (event: ResponseEvent): Promise { switch (event.type) { - case ResponseEventType.NotificationCreated: { - await this.onCreateNotificationEvent(event) - return - } - case ResponseEventType.NotificationRemoved: { - await this.onRemoveNotificationEvent(event) - return - } - case ResponseEventType.NotificationContextUpdated: { - await this.onUpdateNotificationContextEvent(event) - return - } - case ResponseEventType.NotificationContextRemoved: { - await this.onRemoveNotificationContextEvent(event) - } + case ResponseEventType.NotificationCreated: + { await this.onCreateNotificationEvent(event); break } + case ResponseEventType.NotificationsRemoved: + { await this.onRemoveNotificationsEvent(event); break } + case ResponseEventType.NotificationContextUpdated: + { await this.onUpdateNotificationContextEvent(event); break } + case ResponseEventType.NotificationContextRemoved: + { await this.onRemoveNotificationContextEvent(event) } } } - async onCreateNotificationEvent (event: NotificationCreatedEvent): Promise { - if (this.result instanceof Promise) { - this.result = await this.result - } + async onRequest (event: RequestEvent): Promise {} + + async unsubscribe (): Promise { + await this.client.unsubscribeQuery(this.id) + } - const exists = this.result.get(event.notification.message.id) - if (exists !== undefined) return + async requestLoadNextPage (): Promise { + if (this.result instanceof Promise) this.result = await this.result - if (this.params.message != null && this.params.message !== event.notification.message.id) return - if (this.params.context != null && this.params.context !== event.notification.context) return + await this.loadPage(SortingOrder.Ascending, this.result.getLast()?.created) + } - if (this.result.isTail()) { - if (this.params.order === SortingOrder.Ascending) { - this.result.push(event.notification) + async requestLoadPrevPage (): Promise { + if (this.result instanceof Promise) this.result = await this.result + await this.loadPage(SortingOrder.Descending, this.result.getFirst()?.created) + } + + private async loadPage (order: SortingOrder, created?: Date): Promise { + if (!created) return + if (this.result instanceof Promise) this.result = await this.result + + const limit = this.getLimit() + const findParams: FindNotificationsParams = { + ...this.params, + created: order === SortingOrder.Ascending ? { greater: created } : { less: created }, + limit: limit + 1, + order + } + + try { + const res = await this.find(findParams) + const isComplete = res.length <= limit + if (!isComplete) res.pop() + + if (order === SortingOrder.Ascending) { + this.result.append(res) + this.result.setTail(isComplete) } else { - this.result.unshift(event.notification) + this.result.prepend(res) + this.result.setHead(isComplete) } + await this.notify() + } catch (error) { + console.error(`Failed to load ${order === SortingOrder.Ascending ? 'next' : 'previous'} page:`, error) } } - private async onUpdateNotificationContextEvent (event: NotificationContextUpdatedEvent): Promise { - if (this.result instanceof Promise) { - this.result = await this.result + removeCallback (): void { + this.callback = () => {} + } + + setCallback (callback: QueryCallback): void { + this.callback = callback + void this.notify() + } + + copyResult (): QueryResult | undefined { + return this.result instanceof Promise ? undefined : this.result.copy() + } + + private async find (params: FindNotificationsParams): Promise { + const notifications = await this.client.findNotifications(params, this.id) + if (!params.message) return notifications + + return await Promise.all( + notifications.map(async (notification) => { + if (notification.message || !notification.messageId) return notification + const message = await loadMessageFromGroup( + notification.messageId, + this.workspace, + this.filesUrl, + notification.messageGroup, + notification.patches + ) + return message ? { ...notification, message } : notification + }) + ) + } + + private async onCreateNotificationEvent (event: NotificationCreatedEvent): Promise { + if (this.result instanceof Promise) this.result = await this.result + if (this.result.get(event.notification.id)) return + if (!this.result.isTail()) return + if (this.params.context && this.params.context !== event.notification.context) return + + const notifications = this.filterNotifications([event.notification]) + if (notifications.length === 0) return + + if (this.params.order === SortingOrder.Ascending) { + this.result.push(event.notification) + } else { + this.result.unshift(event.notification) } + await this.notify() + } + + private async onUpdateNotificationContextEvent (event: NotificationContextUpdatedEvent): Promise { + if (this.result instanceof Promise) this.result = await this.result if (this.params.context != null && this.params.context !== event.context) return - if (event.update.lastView === undefined && event.update.archivedFrom === undefined) return - const toUpdate = - this.params.context === event.context - ? this.result.getResult() - : this.result.getResult().filter((it) => it.context === event.context) + const lastView = event.lastView + if (lastView === undefined) return + + const toUpdate = this.result.getResult().filter((it) => it.context === event.context) if (toUpdate.length === 0) return - for (const notification of toUpdate) { - this.result.update({ - ...notification, - ...(event.update.lastView !== undefined - ? { - read: event.update.lastView < notification.message.created - } - : {}), - ...(event.update.archivedFrom !== undefined - ? { - archived: event.update.archivedFrom < notification.message.created - } - : {}) - }) + const updated: Notification[] = toUpdate.map((it) => ({ + ...it, + read: lastView >= it.created + })) + const filtered = this.filterNotifications(updated) + + if (filtered.length < this.getLimit() && filtered.length < toUpdate.length) { + if (this.result.length < this.getLimit()) { + for (const notification of updated) { + const allowed = filtered.some((it) => it.messageId === notification.messageId) + if (allowed) { + this.result.update(notification) + } else { + this.result.delete(notification.id) + } + } + void this.notify() + } else { + await this.reinit(this.result.length) + } + } else { + for (const notification of filtered) { + this.result.update(notification) + } + void this.notify() } } - private async onRemoveNotificationEvent (event: NotificationRemovedEvent): Promise { - if (this.result instanceof Promise) { - this.result = await this.result - } + private async onRemoveNotificationsEvent (event: NotificationsRemovedEvent): Promise { + if (this.result instanceof Promise) this.result = await this.result - const deleted = this.result.delete(event.message) + const notifications = this.result.getResult() + const length = this.result.length + let isDeleted = false - if (deleted !== undefined) { - await this.notify() + for (const notification of notifications) { + if (notification.created <= event.untilDate) { + isDeleted = true + this.result.delete(notification.id) + } + } + + if (length >= this.getLimit() && this.result.length < this.getLimit()) { + void this.reinit(this.result.length) + } else if (isDeleted) { + void this.notify() } } private async onRemoveNotificationContextEvent (event: NotificationContextRemovedEvent): Promise { - if (this.result instanceof Promise) { - this.result = await this.result - } + if (this.result instanceof Promise) this.result = await this.result if (this.params.context != null && this.params.context !== event.context) return @@ -139,15 +256,60 @@ export class NotificationQuery extends BaseQuery it.context === event.context) if (toRemove.length === 0) return + const length = this.result.length for (const notification of toRemove) { - this.result.delete(notification.message.id) + this.result.delete(notification.id) + } + + if (length >= this.getLimit() && this.result.length < this.getLimit()) { + void this.reinit(this.result.length) + } else { + void this.notify() } - await this.notify() } } + + private async notify (): Promise { + if (!this.callback) return + if (this.result instanceof Promise) this.result = await this.result + + const window = new WindowImpl(this.result.getResult(), this.result.isTail(), this.result.isHead(), this) + this.callback(window) + } + + private getLimit (): number { + return this.params.limit ?? defaultQueryParams.limit + } + + private filterNotifications (notifications: Notification[]): Notification[] { + const read = this.params.read + if (read == null) return notifications + + return notifications.filter((it) => it.read === read) + } + + private async reinit (limit: number): Promise { + if (this.result instanceof Promise) this.result = await this.result + this.result = this.find({ ...this.params, limit: limit + 1 }).then((res) => { + const isTail = res.length <= limit + const isHead = res.length <= limit + if (res.length > limit) { + res.pop() + } + + const result = new QueryResult(res, (it) => it.id) + result.setHead(isHead) + result.setTail(isTail) + return result + }) + void this.result.then((res) => { + void this.notify() + return res + }) + } } diff --git a/packages/query/src/query.ts b/packages/query/src/query.ts deleted file mode 100644 index 084f2af846c..00000000000 --- a/packages/query/src/query.ts +++ /dev/null @@ -1,208 +0,0 @@ -// -// Copyright © 2025 Hardcore Engineering Inc. -// -// Licensed under the Eclipse Public License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. You may -// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// -// See the License for the specific language governing permissions and -// limitations under the License. -// - -import { SortingOrder, type ID } from '@hcengineering/communication-types' -import { type ResponseEvent, type QueryCallback, type QueryClient } from '@hcengineering/communication-sdk-types' - -import { QueryResult } from './result' -import { defaultQueryParams, type FindParams, type PagedQuery, type QueryId } from './types' -import { WindowImpl } from './window' - -export class BaseQuery implements PagedQuery { - protected result: QueryResult | Promise> - private forward: Promise | T[] = [] - private backward: Promise | T[] = [] - - constructor( - protected readonly client: QueryClient, - public readonly id: QueryId, - public readonly params: P, - private callback?: QueryCallback, - initialResult?: QueryResult - ) { - if (initialResult !== undefined) { - this.result = initialResult - void this.notify() - } else { - const limit = this.params.limit ?? defaultQueryParams.limit - const findParams = { - ...this.params, - sort: this.params.order ?? defaultQueryParams.order, - limit: limit + 1 - } - - const findPromise = this.find(findParams) - this.result = findPromise.then((res) => { - const isTail = false - const isHead = false - if (!isTail) { - res.pop() - } - const qResult = new QueryResult(res, (x) => this.getObjectId(x)) - qResult.setTail(isTail) - qResult.setHead(isHead) - - return qResult - }) - this.result - .then(async () => { - await this.notify() - }) - .catch((err: any) => { - console.error('Failed to update Live query: ', err) - }) - } - } - - // eslint-disable-next-line @typescript-eslint/no-unused-vars - protected async find(params: FindParams): Promise { - /* Implement in subclass */ - return [] as T[] - } - - // eslint-disable-next-line @typescript-eslint/no-unused-vars - protected getObjectId(object: T): ID { - /* Implement in subclass */ - return '' as ID - } - - // eslint-disable-next-line @typescript-eslint/no-unused-vars - protected getObjectDate(object: T): Date { - /* Implement in subclass */ - return new Date(0) - } - - // eslint-disable-next-line @typescript-eslint/no-unused-vars - async onEvent(event: ResponseEvent): Promise { - /* Implement in subclass */ - } - - setCallback(callback: QueryCallback): void { - this.callback = callback - void this.notify() - } - - removeCallback(): void { - this.callback = () => {} - } - - protected async notify(): Promise { - if (this.callback === undefined) return - if (this.result instanceof Promise) { - this.result = await this.result - } - - const result = this.result.getResult() - const isTail = this.result.isTail() - const isHead = this.result.isHead() - - const window = new WindowImpl(result, isTail, isHead, this) - this.callback(window) - } - - async requestLoadNextPage(): Promise { - if (this.result instanceof Promise) { - this.result = await this.result - } - if (this.forward instanceof Promise) { - this.forward = await this.forward - } - - if (this.result.isTail()) return - - const last = this.result.getLast() - if (last === undefined) return - - const limit = this.params.limit ?? defaultQueryParams.limit - const findParams: FindParams = { - ...this.params, - from: this.getObjectDate(last), - excluded: true, - limit: limit + 1, - order: SortingOrder.Ascending - } - - const forward = this.find(findParams) - - this.forward = forward.then(async (res) => { - if (this.result instanceof Promise) { - this.result = await this.result - } - const isTail = res.length <= limit - if (!isTail) { - res.pop() - } - this.result.append(res) - this.result.setTail(isTail) - await this.notify() - return res - }) - } - - async requestLoadPrevPage(): Promise { - if (this.result instanceof Promise) { - this.result = await this.result - } - if (this.backward instanceof Promise) { - this.backward = await this.backward - } - - if (this.result.isHead()) return - - const first = this.params.order === SortingOrder.Ascending ? this.result.getFirst() : this.result.getLast() - if (first === undefined) return - - const limit = this.params.limit ?? defaultQueryParams.limit - const findParams: FindParams = { - ...this.params, - from: this.getObjectDate(first), - limit: limit + 1, - order: SortingOrder.Descending - } - - const backward = this.find(findParams) - this.backward = backward.then(async (res) => { - if (this.result instanceof Promise) { - this.result = await this.result - } - const isHead = res.length <= limit - if (!isHead) { - res.pop() - } - - if (this.params.order === SortingOrder.Ascending) { - const reversed = res.reverse() - this.result.prepend(reversed) - } else { - this.result.append(res) - } - this.result.setHead(isHead) - await this.notify() - return res - }) - } - - copyResult(): QueryResult | undefined { - if (this.result instanceof Promise) { - return undefined - } - - return this.result.copy() - } - - async unsubscribe(): Promise { - await this.client.unsubscribeQuery(this.id) - } -} diff --git a/packages/query/src/result.ts b/packages/query/src/result.ts index 31a45ccb814..a896cfab3aa 100644 --- a/packages/query/src/result.ts +++ b/packages/query/src/result.ts @@ -74,6 +74,14 @@ export class QueryResult { this.objectById = new Map([[this.getId(object), object], ...this.objectById]) } + pop(): T | undefined { + const array = Array.from(this.objectById.values()) + const last = array[array.length - 1] + if (last === undefined) return + this.objectById.delete(this.getId(last)) + return last + } + update(object: T): void { this.objectById.set(this.getId(object), object) } @@ -97,6 +105,12 @@ export class QueryResult { } } + sort(compare: (a: T, b: T) => number): void { + const current = Array.from(this.objectById.values()) + const sorted = current.sort(compare) + this.objectById = new Map(sorted.map<[ID, T]>((object) => [this.getId(object), object])) + } + copy(): QueryResult { const copy = new QueryResult(Array.from(this.objectById.values()), this.getId) diff --git a/packages/query/src/types.ts b/packages/query/src/types.ts index 95d32a17632..ca8d8dc2468 100644 --- a/packages/query/src/types.ts +++ b/packages/query/src/types.ts @@ -14,7 +14,19 @@ // import { type ResponseEvent } from '@hcengineering/communication-sdk-types' -import { SortingOrder, type Window } from '@hcengineering/communication-types' +import { + SortingOrder, + type FindMessagesGroupsParams, + type FindMessagesParams, + type FindNotificationContextParams, + type FindNotificationsParams, + type Message, + type MessagesGroup, + type NotificationContext, + type Window, + type Notification +} from '@hcengineering/communication-types' +import type { EventResult, RequestEvent } from '@hcengineering/communication-sdk-types' import { QueryResult } from './result' @@ -37,13 +49,30 @@ export interface PagedQuery { readonly params: P onEvent: (event: ResponseEvent) => Promise + onRequest: (event: RequestEvent, promise: Promise) => Promise + + unsubscribe: () => Promise requestLoadNextPage: () => Promise requestLoadPrevPage: () => Promise - unsubscribe: () => Promise - - setCallback: (callback: (window: Window) => void) => void removeCallback: () => void + setCallback: (callback: (window: Window) => void) => void copyResult: () => QueryResult | undefined } + +export interface QueryClient { + onEvent(event: ResponseEvent): void + + onRequest(event: RequestEvent, promise: Promise): void + + findMessages(params: FindMessagesParams, queryId?: number): Promise + + findMessagesGroups(params: FindMessagesGroupsParams): Promise + + findNotificationContexts(params: FindNotificationContextParams, queryId?: number): Promise + + findNotifications(params: FindNotificationsParams, queryId?: number): Promise + + unsubscribeQuery(id: number): Promise +} diff --git a/packages/query/src/utils.ts b/packages/query/src/utils.ts new file mode 100644 index 00000000000..42c5910dcbe --- /dev/null +++ b/packages/query/src/utils.ts @@ -0,0 +1,35 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +import { applyPatches } from '@hcengineering/communication-shared' +import type { Message, MessageID, MessagesGroup, Patch, WorkspaceID } from '@hcengineering/communication-types' +import { loadGroupFile } from '@hcengineering/communication-yaml' + +export async function loadMessageFromGroup( + id: MessageID, + workspace: WorkspaceID, + filesUrl: string, + group?: MessagesGroup, + patches: Patch[] = [] +): Promise { + if (group == null) return + + const parsedFile = await loadGroupFile(workspace, filesUrl, group, { retries: 5 }) + + const message = parsedFile.messages.find((it) => it.id === id) + if (message == null) return + + return applyPatches(message, patches) +} diff --git a/packages/rest-client/package.json b/packages/rest-client/package.json new file mode 100644 index 00000000000..0ece05e977d --- /dev/null +++ b/packages/rest-client/package.json @@ -0,0 +1,38 @@ +{ + "name": "@hcengineering/communication-rest-client", + "version": "0.1.147", + "main": "dist/index.cjs", + "module": "dist/index.js", + "types": "./types/index.d.ts", + "files": [ + "dist/index.js", + "dist/index.cjs", + "types/**/*.d.ts" + ], + "scripts": { + "bundle": "bun run bundle:browser && bun run bundle:node", + "bundle:browser": "bun build src/index.ts --outdir dist --target browser", + "bundle:node": "bun build src/index.ts --target node --format cjs --outfile dist/index.cjs" + }, + "devDependencies": { + "@types/bun": "^1.1.14", + "@types/snappyjs": "^0.7.1" + }, + "peerDependencies": { + "typescript": "^5.6.3" + }, + "dependencies": { + "@hcengineering/communication-types": "workspace:*", + "@hcengineering/communication-shared": "workspace:*", + "@hcengineering/communication-sdk-types": "workspace:*", + "@hcengineering/core": "^0.7.28", + "snappyjs": "^0.7.0" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/hcengineering/communication.git" + }, + "publishConfig": { + "registry": "https://npm.pkg.github.com" + } +} diff --git a/packages/client-sqlite/src/index.ts b/packages/rest-client/src/index.ts similarity index 86% rename from packages/client-sqlite/src/index.ts rename to packages/rest-client/src/index.ts index 9ad2a8e4bac..d371f8c07ec 100644 --- a/packages/client-sqlite/src/index.ts +++ b/packages/rest-client/src/index.ts @@ -13,5 +13,5 @@ // limitations under the License. // -export { type Client } from '@hcengineering/communication-sdk-types' -export * from './client' +export { createRestClient } from './rest' +export type { RestClient } from './types' diff --git a/packages/rest-client/src/rest.ts b/packages/rest-client/src/rest.ts new file mode 100644 index 00000000000..7d6e450d398 --- /dev/null +++ b/packages/rest-client/src/rest.ts @@ -0,0 +1,164 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +import { concatLink } from '@hcengineering/core' +import { PlatformError, unknownError } from '@hcengineering/platform' +import type { EventResult, RequestEvent } from '@hcengineering/communication-sdk-types' +import type { + FindMessagesGroupsParams, + FindMessagesParams, + FindNotificationContextParams, + Message, + MessagesGroup, + NotificationContext, + FindNotificationsParams, + Notification +} from '@hcengineering/communication-types' +import { retry } from '@hcengineering/communication-shared' + +import { extractJson } from './utils' +import type { RestClient } from './types' + +const retries = 3 + +export function createRestClient(endpoint: string, workspaceId: string, token: string): RestClient { + return new RestClientImpl(endpoint, workspaceId, token) +} + +class RestClientImpl implements RestClient { + endpoint: string + + constructor( + endpoint: string, + readonly workspace: string, + readonly token: string + ) { + this.endpoint = endpoint.replace('ws', 'http') + } + + private jsonHeaders(): Record { + return { + 'Content-Type': 'application/json', + Authorization: 'Bearer ' + this.token, + 'accept-encoding': 'snappy, gzip' + } + } + + private requestInit(): RequestInit { + return { + method: 'GET', + keepalive: true, + headers: this.jsonHeaders() + } + } + + async event(event: RequestEvent): Promise { + const response = await fetch(concatLink(this.endpoint, `/api/v1/event/${this.workspace}`), { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: 'Bearer ' + this.token + }, + keepalive: true, + body: JSON.stringify(event) + }) + if (!response.ok) { + throw new PlatformError(unknownError(response.statusText)) + } + return (await response.json()) as EventResult + } + + async findMessages(params: FindMessagesParams): Promise { + const searchParams = new URLSearchParams() + if (Object.keys(params).length > 0) { + searchParams.append('params', JSON.stringify(params)) + } + const requestUrl = concatLink(this.endpoint, `/api/v1/find-messages/${this.workspace}?${searchParams.toString()}`) + + return await retry( + async () => { + const response = await fetch(requestUrl, this.requestInit()) + if (!response.ok) { + throw new PlatformError(unknownError(response.statusText)) + } + return await extractJson(response) + }, + { retries } + ) + } + + async findMessagesGroups(params: FindMessagesGroupsParams): Promise { + const searchParams = new URLSearchParams() + if (Object.keys(params).length > 0) { + searchParams.append('params', JSON.stringify(params)) + } + const requestUrl = concatLink( + this.endpoint, + `/api/v1/find-messages-groups/${this.workspace}?${searchParams.toString()}` + ) + return await retry( + async () => { + const response = await fetch(requestUrl, this.requestInit()) + if (!response.ok) { + throw new PlatformError(unknownError(response.statusText)) + } + return await extractJson(response) + }, + { retries } + ) + } + + async findNotificationContexts(params: FindNotificationContextParams): Promise { + const searchParams = new URLSearchParams() + if (Object.keys(params).length > 0) { + searchParams.append('params', JSON.stringify(params)) + } + const requestUrl = concatLink( + this.endpoint, + `/api/v1/find-notification-contexts/${this.workspace}?${searchParams.toString()}` + ) + return await retry( + async () => { + const response = await fetch(requestUrl, this.requestInit()) + if (!response.ok) { + throw new PlatformError(unknownError(response.statusText)) + } + return await extractJson(response) + }, + { retries } + ) + } + + async findNotifications(params: FindNotificationsParams): Promise { + const searchParams = new URLSearchParams() + if (Object.keys(params).length > 0) { + searchParams.append('params', JSON.stringify(params)) + } + const requestUrl = concatLink( + this.endpoint, + `/api/v1/find-notifications/${this.workspace}?${searchParams.toString()}` + ) + return await retry( + async () => { + const response = await fetch(requestUrl, this.requestInit()) + if (!response.ok) { + throw new PlatformError(unknownError(response.statusText)) + } + return await extractJson(response) + }, + { retries } + ) + } +} diff --git a/packages/rest-client/src/types.ts b/packages/rest-client/src/types.ts new file mode 100644 index 00000000000..c16cfe12e1c --- /dev/null +++ b/packages/rest-client/src/types.ts @@ -0,0 +1,35 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +import type { EventResult, RequestEvent } from '@hcengineering/communication-sdk-types' +import type { + FindMessagesGroupsParams, + FindMessagesParams, + Message, + MessagesGroup, + FindNotificationsParams, + FindNotificationContextParams, + NotificationContext, + Notification +} from '@hcengineering/communication-types' + +export interface RestClient { + findMessages: (params: FindMessagesParams) => Promise + findMessagesGroups: (params: FindMessagesGroupsParams) => Promise + findNotificationContexts: (params: FindNotificationContextParams) => Promise + findNotifications: (params: FindNotificationsParams) => Promise + + event: (event: RequestEvent) => Promise +} diff --git a/packages/rest-client/src/utils.ts b/packages/rest-client/src/utils.ts new file mode 100644 index 00000000000..02c1f391680 --- /dev/null +++ b/packages/rest-client/src/utils.ts @@ -0,0 +1,41 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +import { uncompress } from 'snappyjs' + +function isDateString(value: any) { + if (typeof value !== 'string') return false + const dateStringRegex = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z$/ + + return dateStringRegex.test(value) +} + +function reviver(key: string, value: any) { + if (isDateString(value)) return new Date(value) + return value +} + +export async function extractJson(response: Response): Promise { + const encoding = response.headers.get('content-encoding') + if (encoding === 'snappy') { + const buffer = await response.arrayBuffer() + const decompressed = uncompress(buffer) + const decoder = new TextDecoder() + const jsonString = decoder.decode(decompressed) + return JSON.parse(jsonString, reviver) as T + } + const jsonString = await response.text() + return JSON.parse(jsonString, reviver) as T +} diff --git a/packages/client-ws/tsconfig.json b/packages/rest-client/tsconfig.json similarity index 100% rename from packages/client-ws/tsconfig.json rename to packages/rest-client/tsconfig.json diff --git a/packages/sdk-types/package.json b/packages/sdk-types/package.json index df292a920bd..e43b1fd9634 100644 --- a/packages/sdk-types/package.json +++ b/packages/sdk-types/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-sdk-types", - "version": "0.1.61", + "version": "0.1.147", "main": "./dist/index.cjs", "module": "./dist/index.js", "types": "./types/index.d.ts", @@ -19,7 +19,7 @@ }, "dependencies": { "@hcengineering/communication-types": "workspace:*", - "@hcengineering/core": "0.7.15" + "@hcengineering/core": "^0.7.28" }, "peerDependencies": { "typescript": "^5.6.3" diff --git a/packages/sdk-types/src/client.ts b/packages/sdk-types/src/client.ts index 75de33b38ce..a5b1536afa8 100644 --- a/packages/sdk-types/src/client.ts +++ b/packages/sdk-types/src/client.ts @@ -21,12 +21,12 @@ import type { Message, MessageID, NotificationContext, - NotificationContextUpdate, RichText, SocialID, Notification, FindMessagesGroupsParams, - MessagesGroup + MessagesGroup, + BlobID } from '@hcengineering/communication-types' import type { FindMessagesParams } from '@hcengineering/communication-types' @@ -40,17 +40,24 @@ export interface Client { createReaction(card: CardID, message: MessageID, reaction: string, creator: SocialID): Promise removeReaction(card: CardID, message: MessageID, reaction: string, creator: SocialID): Promise - createAttachment(card: CardID, message: MessageID, attachment: CardID, creator: SocialID): Promise - removeAttachment(card: CardID, message: MessageID, attachment: CardID): Promise + createFile( + card: CardID, + message: MessageID, + blobId: BlobID, + fileType: string, + filename: string, + creator: SocialID + ): Promise + removeFile(card: CardID, message: MessageID, blobId: BlobID): Promise createThread(card: CardID, message: MessageID, thread: CardID, created: Date): Promise createNotification(message: MessageID, context: ContextID): Promise removeNotification(message: MessageID, context: ContextID): Promise - createNotificationContext(card: CardID, lastView?: Date, lastUpdate?: Date): Promise + createNotificationContext(card: CardID, lastView: Date, lastUpdate: Date): Promise removeNotificationContext(context: ContextID): Promise - updateNotificationContext(context: ContextID, update: NotificationContextUpdate): Promise + updateNotificationContext(context: ContextID, lastView?: Date, lastUpdate?: Date): Promise onEvent(event: ResponseEvent): void diff --git a/packages/sdk-types/src/db.ts b/packages/sdk-types/src/db.ts index a19e0d0ec18..10fd390b377 100644 --- a/packages/sdk-types/src/db.ts +++ b/packages/sdk-types/src/db.ts @@ -22,22 +22,32 @@ import type { Message, MessageID, NotificationContext, - NotificationContextUpdate, RichText, SocialID, Notification, BlobID, FindMessagesGroupsParams, MessagesGroup, - WorkspaceID, PatchType, - Thread + Thread, + AccountID, + Collaborator, + MessageType, + FindCollaboratorsParams, + NotificationID, + MessageData } from '@hcengineering/communication-types' export interface DbAdapter { - createMessage(card: CardID, content: RichText, creator: SocialID, created: Date): Promise - removeMessage(card: CardID, id: MessageID, socialIds?: SocialID[]): Promise - removeMessages(card: CardID, fromId: MessageID, toId: MessageID): Promise + createMessage( + card: CardID, + type: MessageType, + content: RichText, + creator: SocialID, + created: Date, + data?: MessageData + ): Promise + removeMessages(card: CardID, ids: MessageID[], socialIds?: SocialID[]): Promise createPatch( card: CardID, @@ -47,49 +57,47 @@ export interface DbAdapter { creator: SocialID, created: Date ): Promise - removePatches(card: CardID, fromId: MessageID, toId: MessageID): Promise - createMessagesGroup( - card: CardID, - blobId: BlobID, - fromDate: Date, - toDate: Date, - fromID: MessageID, - toID: MessageID, - count: number - ): Promise + createMessagesGroup(card: CardID, blobId: BlobID, fromSec: Date, toSec: Date, count: number): Promise removeMessagesGroup(card: CardID, blobId: BlobID): Promise createReaction(card: CardID, message: MessageID, reaction: string, creator: SocialID, created: Date): Promise removeReaction(card: CardID, message: MessageID, reaction: string, creator: SocialID): Promise - createAttachment(message: MessageID, attachment: CardID, creator: SocialID, created: Date): Promise - removeAttachment(message: MessageID, attachment: CardID): Promise + createFile( + card: CardID, + message: MessageID, + blobId: BlobID, + fileType: string, + filename: string, + size: number, + creator: SocialID, + created: Date + ): Promise + removeFile(card: CardID, message: MessageID, blobId: BlobID): Promise createThread(card: CardID, message: MessageID, thread: CardID, created: Date): Promise - updateThread(thread: CardID, lastReply: Date, op: 'increment' | 'decrement'): Promise + updateThread(thread: CardID, op: 'increment' | 'decrement', lastReply?: Date): Promise findMessages(params: FindMessagesParams): Promise findMessagesGroups(params: FindMessagesGroupsParams): Promise findThread(thread: CardID): Promise - createNotification(message: MessageID, context: ContextID): Promise - removeNotification(message: MessageID, context: ContextID): Promise + addCollaborators(card: CardID, collaborators: AccountID[], date?: Date): Promise + removeCollaborators(card: CardID, collaborators: AccountID[]): Promise + getCollaboratorsCursor(card: CardID, date: Date, size?: number): AsyncIterable + + findCollaborators(params: FindCollaboratorsParams): Promise + + createNotification(context: ContextID, message: MessageID, created: Date): Promise + removeNotification(context: ContextID, account: AccountID, untilDate: Date): Promise - createContext(personalWorkspace: WorkspaceID, card: CardID, lastView?: Date, lastUpdate?: Date): Promise - updateContext(context: ContextID, update: NotificationContextUpdate): Promise - removeContext(context: ContextID): Promise + createContext(account: AccountID, card: CardID, lastUpdate: Date, lastView: Date): Promise + updateContext(context: ContextID, account: AccountID, lastUpdate?: Date, lastView?: Date): Promise + removeContext(context: ContextID, account: AccountID): Promise - findContexts( - params: FindNotificationContextParams, - personalWorkspaces: WorkspaceID[], - workspace?: WorkspaceID - ): Promise - findNotifications( - params: FindNotificationsParams, - personalWorkspace: WorkspaceID, - workspace?: WorkspaceID - ): Promise + findContexts(params: FindNotificationContextParams): Promise + findNotifications(params: FindNotificationsParams): Promise close(): void } diff --git a/packages/sdk-types/src/query.ts b/packages/sdk-types/src/query.ts index 60c479a024c..1d8c11ec1c0 100644 --- a/packages/sdk-types/src/query.ts +++ b/packages/sdk-types/src/query.ts @@ -15,19 +15,6 @@ import type { Message, Window, Notification } from '@hcengineering/communication-types' -import type { Client } from './client' - -export type QueryClient = Pick< - Client, - | 'onEvent' - | 'findMessages' - | 'findMessagesGroups' - | 'findNotificationContexts' - | 'findNotifications' - | 'unsubscribeQuery' - | 'close' -> - export type QueryCallback = (window: Window) => void export type MessagesQueryCallback = QueryCallback diff --git a/packages/sdk-types/src/requestEvent.ts b/packages/sdk-types/src/requestEvent.ts index 9b8b9cc03b9..4642c8a24ba 100644 --- a/packages/sdk-types/src/requestEvent.ts +++ b/packages/sdk-types/src/requestEvent.ts @@ -17,95 +17,94 @@ import type { CardID, ContextID, MessageID, - NotificationContextUpdate, RichText, SocialID, MessagesGroup, - BlobID + BlobID, + AccountID, + PatchType, + MessageType, + MessageData } from '@hcengineering/communication-types' export enum RequestEventType { CreateMessage = 'createMessage', - RemoveMessage = 'removeMessage', RemoveMessages = 'removeMessages', CreatePatch = 'createPatch', - RemovePatches = 'removePatches', CreateReaction = 'createReaction', RemoveReaction = 'removeReaction', - CreateAttachment = 'createAttachment', - RemoveAttachment = 'removeAttachment', + CreateFile = 'createFile', + RemoveFile = 'removeFile', CreateThread = 'createThread', + UpdateThread = 'updateThread', CreateMessagesGroup = 'createMessagesGroup', RemoveMessagesGroup = 'removeMessagesGroup', + AddCollaborators = 'addCollaborators', + RemoveCollaborators = 'removeCollaborators', + CreateNotification = 'createNotification', - RemoveNotification = 'removeNotification', + RemoveNotifications = 'removeNotifications', CreateNotificationContext = 'createNotificationContext', RemoveNotificationContext = 'removeNotificationContext', UpdateNotificationContext = 'updateNotificationContext' } +type BaseRequestEvent = { + _id?: string +} + export type RequestEvent = + | AddCollaboratorsEvent + | CreateFileEvent | CreateMessageEvent - | RemoveMessageEvent - | RemoveMessagesEvent + | CreateMessagesGroupEvent + | CreateNotificationContextEvent + | CreateNotificationEvent | CreatePatchEvent - | RemovePatchesEvent | CreateReactionEvent - | RemoveReactionEvent - | CreateAttachmentEvent - | RemoveAttachmentEvent - | CreateNotificationEvent - | RemoveNotificationEvent - | CreateNotificationContextEvent + | CreateThreadEvent + | RemoveFileEvent + | RemoveCollaboratorsEvent + | RemoveMessagesEvent + | RemoveMessagesGroupEvent | RemoveNotificationContextEvent + | RemoveNotificationsEvent + | RemoveReactionEvent | UpdateNotificationContextEvent - | CreateMessagesGroupEvent - | RemoveMessagesGroupEvent - | CreateThreadEvent + | UpdateThreadEvent -export interface CreateMessageEvent { +export interface CreateMessageEvent extends BaseRequestEvent { type: RequestEventType.CreateMessage + messageType: MessageType card: CardID content: RichText creator: SocialID + data?: MessageData } -export interface RemoveMessageEvent { - type: RequestEventType.RemoveMessage - card: CardID - message: MessageID -} - -export interface RemoveMessagesEvent { +export interface RemoveMessagesEvent extends BaseRequestEvent { type: RequestEventType.RemoveMessages card: CardID - fromId: MessageID - toId: MessageID + messages: MessageID[] } -export interface CreatePatchEvent { +export interface CreatePatchEvent extends BaseRequestEvent { type: RequestEventType.CreatePatch + patchType: PatchType card: CardID message: MessageID content: RichText creator: SocialID } -export interface RemovePatchesEvent { - type: RequestEventType.RemovePatches - card: CardID - fromId: MessageID - toId: MessageID -} - -export interface CreateReactionEvent { +export interface CreateReactionEvent extends BaseRequestEvent { type: RequestEventType.CreateReaction card: CardID message: MessageID @@ -113,7 +112,7 @@ export interface CreateReactionEvent { creator: SocialID } -export interface RemoveReactionEvent { +export interface RemoveReactionEvent extends BaseRequestEvent { type: RequestEventType.RemoveReaction card: CardID message: MessageID @@ -121,69 +120,100 @@ export interface RemoveReactionEvent { creator: SocialID } -export interface CreateAttachmentEvent { - type: RequestEventType.CreateAttachment +export interface CreateFileEvent extends BaseRequestEvent { + type: RequestEventType.CreateFile card: CardID message: MessageID - attachment: CardID + blobId: BlobID + size: number + fileType: string + filename: string creator: SocialID } -export interface RemoveAttachmentEvent { - type: RequestEventType.RemoveAttachment +export interface RemoveFileEvent extends BaseRequestEvent { + type: RequestEventType.RemoveFile card: CardID message: MessageID - attachment: CardID + blobId: BlobID + creator: SocialID } -export interface CreateThreadEvent { +export interface CreateThreadEvent extends BaseRequestEvent { type: RequestEventType.CreateThread card: CardID message: MessageID thread: CardID } -export interface CreateNotificationEvent { +export interface UpdateThreadEvent extends BaseRequestEvent { + type: RequestEventType.UpdateThread + thread: CardID + replies: 'increment' | 'decrement' + lastReply?: Date +} + +export interface CreateNotificationEvent extends BaseRequestEvent { type: RequestEventType.CreateNotification - message: MessageID context: ContextID + message: MessageID + created: Date + account: AccountID } -export interface RemoveNotificationEvent { - type: RequestEventType.RemoveNotification - message: MessageID +export interface RemoveNotificationsEvent extends BaseRequestEvent { + type: RequestEventType.RemoveNotifications context: ContextID + account: AccountID + untilDate: Date } -export interface CreateNotificationContextEvent { +export interface CreateNotificationContextEvent extends BaseRequestEvent { type: RequestEventType.CreateNotificationContext card: CardID - lastView?: Date - lastUpdate?: Date + account: AccountID + lastView: Date + lastUpdate: Date } -export interface RemoveNotificationContextEvent { +export interface RemoveNotificationContextEvent extends BaseRequestEvent { type: RequestEventType.RemoveNotificationContext context: ContextID + account: AccountID } -export interface UpdateNotificationContextEvent { +export interface UpdateNotificationContextEvent extends BaseRequestEvent { type: RequestEventType.UpdateNotificationContext context: ContextID - update: NotificationContextUpdate + account: AccountID + lastView?: Date + lastUpdate?: Date } -export interface CreateMessagesGroupEvent { +export interface CreateMessagesGroupEvent extends BaseRequestEvent { type: RequestEventType.CreateMessagesGroup group: MessagesGroup } -export interface RemoveMessagesGroupEvent { +export interface RemoveMessagesGroupEvent extends BaseRequestEvent { type: RequestEventType.RemoveMessagesGroup card: CardID blobId: BlobID } +export interface AddCollaboratorsEvent extends BaseRequestEvent { + type: RequestEventType.AddCollaborators + card: CardID + collaborators: AccountID[] + date?: Date +} + +export interface RemoveCollaboratorsEvent extends BaseRequestEvent { + type: RequestEventType.RemoveCollaborators + card: CardID + collaborators: AccountID[] +} + export type EventResult = CreateMessageResult | CreateNotificationContextResult | {} export interface CreateMessageResult { diff --git a/packages/sdk-types/src/responseEvent.ts b/packages/sdk-types/src/responseEvent.ts index 32ebcf03926..1fdf3a5f4a7 100644 --- a/packages/sdk-types/src/responseEvent.ts +++ b/packages/sdk-types/src/responseEvent.ts @@ -14,78 +14,97 @@ // import type { - Attachment, + File, CardID, ContextID, Message, MessageID, NotificationContext, - NotificationContextUpdate, Patch, Reaction, SocialID, Notification, Thread, - MessagesGroup + MessagesGroup, + AccountID, + BlobID } from '@hcengineering/communication-types' export enum ResponseEventType { MessageCreated = 'messageCreated', - MessageRemoved = 'messageRemoved', + MessagesRemoved = 'messagesRemoved', + PatchCreated = 'patchCreated', + ReactionCreated = 'reactionCreated', ReactionRemoved = 'reactionRemoved', - AttachmentCreated = 'attachmentCreated', - AttachmentRemoved = 'attachmentRemoved', + + FileCreated = 'fileCreated', + FileRemoved = 'fileRemoved', + ThreadCreated = 'threadCreated', + ThreadUpdated = 'threadUpdated', + MessagesGroupCreated = 'messagesGroupCreated', + NotificationCreated = 'notificationCreated', - NotificationRemoved = 'notificationRemoved', + NotificationsRemoved = 'notificationsRemoved', + NotificationContextCreated = 'notificationContextCreated', NotificationContextRemoved = 'notificationContextRemoved', - NotificationContextUpdated = 'notificationContextUpdated' + NotificationContextUpdated = 'notificationContextUpdated', + + AddedCollaborators = 'addedCollaborators', + RemovedCollaborators = 'removedCollaborators' +} + +type BaseResponseEvent = { + _id?: string } export type ResponseEvent = | MessageCreatedEvent - | MessageRemovedEvent + | MessagesRemovedEvent | PatchCreatedEvent | ReactionCreatedEvent | ReactionRemovedEvent - | AttachmentCreatedEvent - | AttachmentRemovedEvent + | FileCreatedEvent + | FileRemovedEvent | NotificationCreatedEvent - | NotificationRemovedEvent + | NotificationsRemovedEvent | NotificationContextCreatedEvent | NotificationContextRemovedEvent | NotificationContextUpdatedEvent | ThreadCreatedEvent | MessagesGroupCreatedEvent + | AddedCollaboratorsEvent + | RemovedCollaboratorsEvent + | ThreadUpdatedEvent -export interface MessageCreatedEvent { +export interface MessageCreatedEvent extends BaseResponseEvent { type: ResponseEventType.MessageCreated message: Message } -export interface MessageRemovedEvent { - type: ResponseEventType.MessageRemoved +export interface MessagesRemovedEvent extends BaseResponseEvent { + type: ResponseEventType.MessagesRemoved card: CardID - message: MessageID + messages: MessageID[] } -export interface PatchCreatedEvent { +export interface PatchCreatedEvent extends BaseResponseEvent { type: ResponseEventType.PatchCreated card: CardID patch: Patch } -export interface ReactionCreatedEvent { +export interface ReactionCreatedEvent extends BaseResponseEvent { type: ResponseEventType.ReactionCreated card: CardID reaction: Reaction } -export interface ReactionRemovedEvent { +export interface ReactionRemovedEvent extends BaseResponseEvent { type: ResponseEventType.ReactionRemoved card: CardID message: MessageID @@ -93,56 +112,77 @@ export interface ReactionRemovedEvent { creator: SocialID } -export interface AttachmentCreatedEvent { - type: ResponseEventType.AttachmentCreated +export interface FileCreatedEvent extends BaseResponseEvent { + type: ResponseEventType.FileCreated card: CardID - attachment: Attachment + file: File } -export interface AttachmentRemovedEvent { - type: ResponseEventType.AttachmentRemoved +export interface FileRemovedEvent extends BaseResponseEvent { + type: ResponseEventType.FileRemoved card: CardID message: MessageID - attachment: CardID + blobId: BlobID + creator: SocialID } -export interface MessagesGroupCreatedEvent { +export interface MessagesGroupCreatedEvent extends BaseResponseEvent { type: ResponseEventType.MessagesGroupCreated group: MessagesGroup } -export interface ThreadCreatedEvent { +export interface ThreadCreatedEvent extends BaseResponseEvent { type: ResponseEventType.ThreadCreated thread: Thread } -export interface NotificationCreatedEvent { +export interface NotificationCreatedEvent extends BaseResponseEvent { type: ResponseEventType.NotificationCreated - personalWorkspace: string notification: Notification + account: AccountID } -export interface NotificationRemovedEvent { - type: ResponseEventType.NotificationRemoved - personalWorkspace: string - message: MessageID +export interface NotificationsRemovedEvent extends BaseResponseEvent { + type: ResponseEventType.NotificationsRemoved + untilDate: Date context: ContextID + account: AccountID } -export interface NotificationContextCreatedEvent { +export interface NotificationContextCreatedEvent extends BaseResponseEvent { type: ResponseEventType.NotificationContextCreated context: NotificationContext } -export interface NotificationContextRemovedEvent { +export interface NotificationContextRemovedEvent extends BaseResponseEvent { type: ResponseEventType.NotificationContextRemoved - personalWorkspace: string context: ContextID + account: AccountID } -export interface NotificationContextUpdatedEvent { +export interface NotificationContextUpdatedEvent extends BaseResponseEvent { type: ResponseEventType.NotificationContextUpdated - personalWorkspace: string context: ContextID - update: NotificationContextUpdate + account: AccountID + lastView?: Date + lastUpdate?: Date +} + +export interface AddedCollaboratorsEvent extends BaseResponseEvent { + type: ResponseEventType.AddedCollaborators + card: CardID + collaborators: AccountID[] +} + +export interface RemovedCollaboratorsEvent extends BaseResponseEvent { + type: ResponseEventType.RemovedCollaborators + card: CardID + collaborators: AccountID[] +} + +export interface ThreadUpdatedEvent extends BaseResponseEvent { + type: ResponseEventType.ThreadUpdated + thread: CardID + replies: 'increment' | 'decrement' + lastReply?: Date } diff --git a/packages/sdk-types/src/serverApi.ts b/packages/sdk-types/src/serverApi.ts index a62d38c3bd1..05425d649ff 100644 --- a/packages/sdk-types/src/serverApi.ts +++ b/packages/sdk-types/src/serverApi.ts @@ -16,21 +16,35 @@ import type { FindMessagesGroupsParams, FindMessagesParams, + FindNotificationContextParams, + FindNotificationsParams, Message, - MessagesGroup + MessagesGroup, + NotificationContext, + Notification } from '@hcengineering/communication-types' import type { Account } from '@hcengineering/core' import type { EventResult, RequestEvent } from './requestEvent.ts' export interface ConnectionInfo { - sessionId: string + sessionId?: string account: Account } export interface ServerApi { - findMessages(info: ConnectionInfo, params: FindMessagesParams, queryId?: number): Promise + findMessages(info: ConnectionInfo, params: FindMessagesParams, queryId?: number | string): Promise findMessagesGroups(info: ConnectionInfo, params: FindMessagesGroupsParams): Promise + findNotificationContexts( + info: ConnectionInfo, + params: FindNotificationContextParams, + queryId?: number | string + ): Promise + findNotifications( + info: ConnectionInfo, + params: FindNotificationsParams, + queryId?: number | string + ): Promise event(info: ConnectionInfo, event: RequestEvent): Promise diff --git a/packages/server/package.json b/packages/server/package.json index d7196acfc9d..684fb03b52f 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-server", - "version": "0.1.61", + "version": "0.1.147", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", @@ -20,11 +20,12 @@ "@types/ws": "^8.5.13" }, "dependencies": { - "@hcengineering/core": "0.7.15", - "@hcengineering/server-token": "0.7.15", + "@hcengineering/account-client": "^0.7.28", "@hcengineering/communication-cockroach": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", - "@hcengineering/communication-types": "workspace:*" + "@hcengineering/communication-types": "workspace:*", + "@hcengineering/core": "^0.7.28", + "@hcengineering/server-token": "^0.7.28" }, "peerDependencies": { "typescript": "^5.6.3" diff --git a/packages/server/src/eventProcessor.ts b/packages/server/src/eventProcessor.ts index 13ed0e71498..681e646b6c6 100644 --- a/packages/server/src/eventProcessor.ts +++ b/packages/server/src/eventProcessor.ts @@ -13,73 +13,61 @@ // limitations under the License. // +import { type Message, type Patch, type Reaction } from '@hcengineering/communication-types' import { - type Message, - type Patch, - type Reaction, - type Attachment, - type SocialID, - type WorkspaceID, - PatchType -} from '@hcengineering/communication-types' -import { - type CreateAttachmentEvent, - type AttachmentCreatedEvent, + type AddCollaboratorsEvent, + type ConnectionInfo, + type CreateFileEvent, type CreateMessageEvent, - type MessageCreatedEvent, + type CreateMessagesGroupEvent, + type CreateNotificationContextEvent, + type CreateNotificationEvent, type CreatePatchEvent, - type PatchCreatedEvent, type CreateReactionEvent, - type ReactionCreatedEvent, - type RemoveAttachmentEvent, - type AttachmentRemovedEvent, - type RemoveMessageEvent, - type MessageRemovedEvent, - type RemoveReactionEvent, - type ReactionRemovedEvent, - type EventResult, + type CreateThreadEvent, type DbAdapter, - type CreateNotificationEvent, - type RemoveNotificationEvent, - type CreateNotificationContextEvent, - type RemoveNotificationContextEvent, - type UpdateNotificationContextEvent, - type NotificationRemovedEvent, + type EventResult, + type FileCreatedEvent, + type FileRemovedEvent, + type MessageCreatedEvent, + type MessagesGroupCreatedEvent, + type MessagesRemovedEvent, type NotificationContextCreatedEvent, type NotificationContextRemovedEvent, type NotificationContextUpdatedEvent, - type ResponseEvent, - RequestEventType, + type NotificationsRemovedEvent, + type PatchCreatedEvent, + type ReactionCreatedEvent, + type ReactionRemovedEvent, + type RemoveCollaboratorsEvent, + type RemoveFileEvent, + type RemoveMessagesEvent, + type RemoveMessagesGroupEvent, + type RemoveNotificationContextEvent, + type RemoveNotificationsEvent, + type RemoveReactionEvent, type RequestEvent, + RequestEventType, + type ResponseEvent, ResponseEventType, - type CreateMessagesGroupEvent, - type RemoveMessagesEvent, type ThreadCreatedEvent, - type CreateThreadEvent, - type ConnectionInfo, - type RemovePatchesEvent, - type RemoveMessagesGroupEvent, - type MessagesGroupCreatedEvent + type UpdateNotificationContextEvent, + type UpdateThreadEvent } from '@hcengineering/communication-sdk-types' -import { systemAccountUuid, type Account } from '@hcengineering/core' +import { systemAccountUuid } from '@hcengineering/core' export interface Result { responseEvent?: ResponseEvent - result: EventResult + result?: EventResult } export class EventProcessor { - constructor( - private readonly db: DbAdapter, - private readonly workspace: WorkspaceID - ) {} + constructor(private readonly db: DbAdapter) {} async process(info: ConnectionInfo, event: RequestEvent): Promise { switch (event.type) { case RequestEventType.CreateMessage: return await this.createMessage(event, info) - case RequestEventType.RemoveMessage: - return await this.removeMessage(event, info) case RequestEventType.RemoveMessages: return await this.removeMessages(event, info) case RequestEventType.CreatePatch: @@ -88,14 +76,14 @@ export class EventProcessor { return await this.createReaction(event, info) case RequestEventType.RemoveReaction: return await this.removeReaction(event, info) - case RequestEventType.CreateAttachment: - return await this.createAttachment(event, info) - case RequestEventType.RemoveAttachment: - return await this.removeAttachment(event, info) + case RequestEventType.CreateFile: + return await this.createFile(event, info) + case RequestEventType.RemoveFile: + return await this.removeFile(event, info) case RequestEventType.CreateNotification: return await this.createNotification(event, info) - case RequestEventType.RemoveNotification: - return await this.removeNotification(event, info) + case RequestEventType.RemoveNotifications: + return await this.removeNotifications(event, info) case RequestEventType.CreateNotificationContext: return await this.createNotificationContext(event, info) case RequestEventType.RemoveNotificationContext: @@ -106,28 +94,82 @@ export class EventProcessor { return await this.createMessagesGroup(event, info) case RequestEventType.CreateThread: return await this.createThread(event, info) - case RequestEventType.RemovePatches: - return await this.removePatches(event, info) case RequestEventType.RemoveMessagesGroup: return await this.removeMessagesGroup(event, info) + case RequestEventType.AddCollaborators: + return await this.addCollaborators(event, info) + case RequestEventType.RemoveCollaborators: + return await this.removeCollaborators(event, info) + case RequestEventType.UpdateThread: + return await this.updateThread(event, info) + } + } + + // eslint-disable-next-line @typescript-eslint/no-unused-vars + private async updateThread(event: UpdateThreadEvent, _: ConnectionInfo): Promise { + await this.db.updateThread(event.thread, event.replies, event.lastReply) + return { + responseEvent: { + _id: event._id, + type: ResponseEventType.ThreadUpdated, + thread: event.thread, + replies: event.replies, + lastReply: event.lastReply + } } } - private async createMessage(event: CreateMessageEvent, info: ConnectionInfo): Promise { - this.checkCreator(info.account, event.creator) + // eslint-disable-next-line @typescript-eslint/no-unused-vars + private async addCollaborators(event: AddCollaboratorsEvent, _: ConnectionInfo): Promise { + await this.db.addCollaborators(event.card, event.collaborators, event.date) + return { + responseEvent: { + _id: event._id, + type: ResponseEventType.AddedCollaborators, + card: event.card, + collaborators: event.collaborators + } + } + } + + // eslint-disable-next-line @typescript-eslint/no-unused-vars + private async removeCollaborators(event: RemoveCollaboratorsEvent, _: ConnectionInfo): Promise { + await this.db.removeCollaborators(event.card, event.collaborators) + + return { + responseEvent: { + _id: event._id, + type: ResponseEventType.RemovedCollaborators, + card: event.card, + collaborators: event.collaborators + } + } + } + // eslint-disable-next-line @typescript-eslint/no-unused-vars + private async createMessage(event: CreateMessageEvent, _: ConnectionInfo): Promise { const created = new Date() - const id = await this.db.createMessage(event.card, event.content, event.creator, created) + const id = await this.db.createMessage( + event.card, + event.messageType, + event.content, + event.creator, + created, + event.data + ) const message: Message = { id, + type: event.messageType, card: event.card, content: event.content, creator: event.creator, created, + data: event.data, reactions: [], - attachments: [] + files: [] } const responseEvent: MessageCreatedEvent = { + _id: event._id, type: ResponseEventType.MessageCreated, message } @@ -137,69 +179,48 @@ export class EventProcessor { } } - private async createPatch(event: CreatePatchEvent, info: ConnectionInfo): Promise { - this.checkCreator(info.account, event.creator) + // eslint-disable-next-line @typescript-eslint/no-unused-vars + private async createPatch(event: CreatePatchEvent, _: ConnectionInfo): Promise { const created = new Date() - await this.db.createPatch(event.card, event.message, PatchType.update, event.content, event.creator, created) + await this.db.createPatch(event.card, event.message, event.patchType, event.content, event.creator, created) const patch: Patch = { - type: PatchType.update, + type: event.patchType, message: event.message, content: event.content, creator: event.creator, created } const responseEvent: PatchCreatedEvent = { + _id: event._id, type: ResponseEventType.PatchCreated, card: event.card, patch } return { - responseEvent, - result: {} + responseEvent } } - private async removeMessage(event: RemoveMessageEvent, info: ConnectionInfo): Promise { + private async removeMessages(event: RemoveMessagesEvent, info: ConnectionInfo): Promise { const socialIds = systemAccountUuid === info.account.uuid ? undefined : info.account.socialIds - await this.db.removeMessage(event.card, event.message, socialIds) + const deleted = await this.db.removeMessages(event.card, event.messages, socialIds) - const responseEvent: MessageRemovedEvent = { - type: ResponseEventType.MessageRemoved, + const responseEvent: MessagesRemovedEvent = { + _id: event._id, + type: ResponseEventType.MessagesRemoved, card: event.card, - message: event.message + messages: deleted } return { responseEvent, - result: {} - } - } - - private async removeMessages(event: RemoveMessagesEvent, info: ConnectionInfo): Promise { - if (systemAccountUuid !== info.account.uuid) { - throw new Error('Forbidden') - } - await this.db.removeMessages(event.card, event.fromId, event.toId) - - return { - result: {} - } - } - - private async removePatches(event: RemovePatchesEvent, info: ConnectionInfo): Promise { - if (systemAccountUuid !== info.account.uuid) { - throw new Error('Forbidden') - } - await this.db.removePatches(event.card, event.fromId, event.toId) - - return { - result: {} + result: { messages: deleted } } } - private async createReaction(event: CreateReactionEvent, info: ConnectionInfo): Promise { - this.checkCreator(info.account, event.creator) + // eslint-disable-next-line @typescript-eslint/no-unused-vars + private async createReaction(event: CreateReactionEvent, _: ConnectionInfo): Promise { const created = new Date() await this.db.createReaction(event.card, event.message, event.reaction, event.creator, created) @@ -210,20 +231,21 @@ export class EventProcessor { created } const responseEvent: ReactionCreatedEvent = { + _id: event._id, type: ResponseEventType.ReactionCreated, card: event.card, reaction } return { - responseEvent, - result: {} + responseEvent } } - private async removeReaction(event: RemoveReactionEvent, info: ConnectionInfo): Promise { - this.checkCreator(info.account, event.creator) + // eslint-disable-next-line @typescript-eslint/no-unused-vars + private async removeReaction(event: RemoveReactionEvent, _: ConnectionInfo): Promise { await this.db.removeReaction(event.card, event.message, event.reaction, event.creator) const responseEvent: ReactionRemovedEvent = { + _id: event._id, type: ResponseEventType.ReactionRemoved, card: event.card, message: event.message, @@ -231,90 +253,104 @@ export class EventProcessor { creator: event.creator } return { - responseEvent, - result: {} + responseEvent } } - private async createAttachment(event: CreateAttachmentEvent, info: ConnectionInfo): Promise { - this.checkCreator(info.account, event.creator) + // eslint-disable-next-line @typescript-eslint/no-unused-vars + private async createFile(event: CreateFileEvent, _: ConnectionInfo): Promise { const created = new Date() - await this.db.createAttachment(event.message, event.card, event.creator, created) - - const attachment: Attachment = { - message: event.message, - card: event.card, - creator: event.creator, + await this.db.createFile( + event.card, + event.message, + event.blobId, + event.fileType, + event.filename, + event.size, + event.creator, created - } - const responseEvent: AttachmentCreatedEvent = { - type: ResponseEventType.AttachmentCreated, + ) + const responseEvent: FileCreatedEvent = { + _id: event._id, + type: ResponseEventType.FileCreated, card: event.card, - attachment + file: { + card: event.card, + message: event.message, + blobId: event.blobId, + type: event.fileType, + filename: event.filename, + size: event.size, + creator: event.creator, + created + } } - return { - responseEvent, - result: {} + responseEvent } } // eslint-disable-next-line @typescript-eslint/no-unused-vars - private async removeAttachment(event: RemoveAttachmentEvent, _: ConnectionInfo): Promise { - await this.db.removeAttachment(event.message, event.card) - const responseEvent: AttachmentRemovedEvent = { - type: ResponseEventType.AttachmentRemoved, + private async removeFile(event: RemoveFileEvent, _: ConnectionInfo): Promise { + await this.db.removeFile(event.card, event.message, event.blobId) + const responseEvent: FileRemovedEvent = { + _id: event._id, + type: ResponseEventType.FileRemoved, card: event.card, message: event.message, - attachment: event.attachment + blobId: event.blobId, + creator: event.creator } return { - responseEvent, - result: {} + responseEvent } } // eslint-disable-next-line @typescript-eslint/no-unused-vars private async createNotification(event: CreateNotificationEvent, _: ConnectionInfo): Promise { - await this.db.createNotification(event.message, event.context) + const id = await this.db.createNotification(event.context, event.message, event.created) return { - result: {} + responseEvent: { + _id: event._id, + type: ResponseEventType.NotificationCreated, + notification: { + id, + context: event.context, + messageId: event.message, + read: false, + created: event.created + }, + account: event.account + } } } - //eslint-disable-next-line @typescript-eslint/no-unused-vars - private async removeNotification(event: RemoveNotificationEvent, info: ConnectionInfo): Promise { - await this.db.removeNotification(event.message, event.context) + // eslint-disable-next-line @typescript-eslint/no-unused-vars + private async removeNotifications(event: RemoveNotificationsEvent, _: ConnectionInfo): Promise { + await this.db.removeNotification(event.context, event.account, event.untilDate) - const responseEvent: NotificationRemovedEvent = { - type: ResponseEventType.NotificationRemoved, - // personalWorkspace: info.personalWorkspace, - // TODO: add personal workspace - personalWorkspace: '' as WorkspaceID, - message: event.message, - context: event.context + const responseEvent: NotificationsRemovedEvent = { + _id: event._id, + type: ResponseEventType.NotificationsRemoved, + context: event.context, + account: event.account, + untilDate: event.untilDate } return { - responseEvent, - result: {} + responseEvent } } - private async createNotificationContext( - event: CreateNotificationContextEvent, - //eslint-disable-next-line @typescript-eslint/no-unused-vars - info: ConnectionInfo - ): Promise { - // TODO: add personal workspace - const personalWorkspace = '' as WorkspaceID - const id = await this.db.createContext(personalWorkspace, event.card, event.lastView, event.lastUpdate) + // eslint-disable-next-line @typescript-eslint/no-unused-vars + private async createNotificationContext(event: CreateNotificationContextEvent, _: ConnectionInfo): Promise { + const id = await this.db.createContext(event.account, event.card, event.lastUpdate, event.lastView) const responseEvent: NotificationContextCreatedEvent = { + _id: event._id, type: ResponseEventType.NotificationContextCreated, context: { id, - workspace: this.workspace, - personalWorkspace, + account: event.account, card: event.card, lastView: event.lastView, lastUpdate: event.lastUpdate @@ -329,75 +365,63 @@ export class EventProcessor { private async removeNotificationContext( event: RemoveNotificationContextEvent, // eslint-disable-next-line @typescript-eslint/no-unused-vars - info: ConnectionInfo + _: ConnectionInfo ): Promise { - await this.db.removeContext(event.context) + await this.db.removeContext(event.context, event.account) const responseEvent: NotificationContextRemovedEvent = { + _id: event._id, type: ResponseEventType.NotificationContextRemoved, - // personalWorkspace: info.personalWorkspace, - // TODO: add personal workspace - personalWorkspace: '' as WorkspaceID, - context: event.context + context: event.context, + account: event.account } return { - responseEvent, - result: {} + responseEvent } } // eslint-disable-next-line @typescript-eslint/no-unused-vars - async updateNotificationContext(event: UpdateNotificationContextEvent, info: ConnectionInfo): Promise { - await this.db.updateContext(event.context, event.update) + async updateNotificationContext(event: UpdateNotificationContextEvent, _: ConnectionInfo): Promise { + await this.db.updateContext(event.context, event.account, event.lastUpdate, event.lastView) const responseEvent: NotificationContextUpdatedEvent = { + _id: event._id, type: ResponseEventType.NotificationContextUpdated, - // personalWorkspace: info.personalWorkspace, - // TODO: add personal workspace - personalWorkspace: '' as WorkspaceID, context: event.context, - update: event.update + account: event.account, + lastView: event.lastView, + lastUpdate: event.lastUpdate } return { - responseEvent, - result: {} + responseEvent } } - async createMessagesGroup(event: CreateMessagesGroupEvent, info: ConnectionInfo): Promise { - if (systemAccountUuid !== info.account.uuid) { - throw new Error('Forbidden') - } - const { fromDate, toDate, count, fromId, toId, card, blobId } = event.group - await this.db.createMessagesGroup(card, blobId, fromDate, toDate, fromId, toId, count) + // eslint-disable-next-line @typescript-eslint/no-unused-vars + async createMessagesGroup(event: CreateMessagesGroupEvent, _: ConnectionInfo): Promise { + const { fromSec, toSec, count, card, blobId } = event.group + await this.db.createMessagesGroup(card, blobId, fromSec, toSec, count) const responseEvent: MessagesGroupCreatedEvent = { + _id: event._id, type: ResponseEventType.MessagesGroupCreated, group: { card, blobId, - fromDate, - toDate, - fromId, - toId, + fromSec, + toSec, count } } return { - responseEvent, - result: {} + responseEvent } } - async removeMessagesGroup(event: RemoveMessagesGroupEvent, info: ConnectionInfo): Promise { - if (systemAccountUuid !== info.account.uuid) { - throw new Error('Forbidden') - } + // eslint-disable-next-line @typescript-eslint/no-unused-vars + async removeMessagesGroup(event: RemoveMessagesGroupEvent, _: ConnectionInfo): Promise { await this.db.removeMessagesGroup(event.card, event.blobId) - return { - responseEvent: undefined, - result: {} - } + return {} } // eslint-disable-next-line @typescript-eslint/no-unused-vars @@ -405,6 +429,7 @@ export class EventProcessor { const date = new Date() await this.db.createThread(event.card, event.message, event.thread, date) const responseEvent: ThreadCreatedEvent = { + _id: event._id, type: ResponseEventType.ThreadCreated, thread: { card: event.card, @@ -415,14 +440,7 @@ export class EventProcessor { } } return { - responseEvent, - result: {} - } - } - - private checkCreator(account: Account, creator: SocialID): void { - if (!account.socialIds.includes(creator) && systemAccountUuid !== account.uuid) { - throw new Error('Forbidden') + responseEvent } } } diff --git a/packages/server/src/index.ts b/packages/server/src/index.ts index 183580da18e..95fa81eb95e 100644 --- a/packages/server/src/index.ts +++ b/packages/server/src/index.ts @@ -13,13 +13,17 @@ // limitations under the License. // -import type { MeasureContext } from '@hcengineering/core' +import { type MeasureContext, systemAccountUuid } from '@hcengineering/core' import type { FindMessagesGroupsParams, FindMessagesParams, + FindNotificationContextParams, + FindNotificationsParams, Message, MessagesGroup, - WorkspaceID + NotificationContext, + WorkspaceID, + Notification } from '@hcengineering/communication-types' import { createDbAdapter } from '@hcengineering/communication-cockroach' import type { @@ -30,8 +34,9 @@ import type { ServerApi } from '@hcengineering/communication-sdk-types' -import { Manager, type BroadcastSessionsFunc } from './manager' +import { type BroadcastSessionsFunc, Manager } from './manager' import { getMetadata, type Metadata } from './metadata' +import type { QueryId } from './types' export class Api implements ServerApi { private readonly manager: Manager @@ -52,12 +57,14 @@ export class Api implements ServerApi { dbUrl: string, broadcast: BroadcastSessionsFunc ): Promise { - const db = await createDbAdapter(dbUrl, workspace, ctx, { withLogs: true }) + const db = await createDbAdapter(dbUrl, workspace, ctx, { + withLogs: process.env.COMMUNICATION_TIME_LOGGING_ENABLED === 'true' + }) const metadata = getMetadata() return new Api(ctx, metadata, workspace, db, broadcast) } - async findMessages(info: ConnectionInfo, params: FindMessagesParams, queryId?: number): Promise { + async findMessages(info: ConnectionInfo, params: FindMessagesParams, queryId?: QueryId): Promise { return await this.manager.findMessages(info, params, queryId) } @@ -65,6 +72,62 @@ export class Api implements ServerApi { return await this.manager.findMessagesGroups(info, params) } + async findNotificationContexts( + info: ConnectionInfo, + params: FindNotificationContextParams, + queryId?: QueryId + ): Promise { + const isSystem = info.account.uuid === systemAccountUuid + + if (isSystem) { + return await this.manager.findNotificationContexts(info, params, queryId) + } + + const accounts = params.account == null || Array.isArray(params.account) ? params.account : [params.account] + const withMe = accounts == null || accounts.includes(info.account.uuid) + + if (withMe) { + return await this.manager.findNotificationContexts( + info, + { + ...params, + account: info.account.uuid + }, + queryId + ) + } + + return [] + } + + async findNotifications( + info: ConnectionInfo, + params: FindNotificationsParams, + queryId?: QueryId + ): Promise { + const isSystem = info.account.uuid === systemAccountUuid + + if (isSystem) { + return await this.manager.findNotifications(info, params, queryId) + } + + const accounts = params.account == null || Array.isArray(params.account) ? params.account : [params.account] + const withMe = accounts == null || accounts.includes(info.account.uuid) + + if (withMe) { + return await this.manager.findNotifications( + info, + { + ...params, + account: info.account.uuid + }, + queryId + ) + } + + return [] + } + async unsubscribeQuery(info: ConnectionInfo, id: number): Promise { this.manager.unsubscribeQuery(info, id) } diff --git a/packages/server/src/manager.ts b/packages/server/src/manager.ts index d1248f72ee8..b62fb2df74d 100644 --- a/packages/server/src/manager.ts +++ b/packages/server/src/manager.ts @@ -17,13 +17,13 @@ import { type ConnectionInfo, type DbAdapter, type EventResult, - type NotificationContextCreatedEvent, - type NotificationCreatedEvent, type RequestEvent, type ResponseEvent, ResponseEventType } from '@hcengineering/communication-sdk-types' import type { + AccountID, + CardID, FindMessagesGroupsParams, FindMessagesParams, FindNotificationContextParams, @@ -31,29 +31,32 @@ import type { Message, MessageID, MessagesGroup, - WorkspaceID + NotificationContext, + WorkspaceID, + Notification } from '@hcengineering/communication-types' import type { MeasureContext } from '@hcengineering/core' import { Triggers } from './triggers' +import { Permissions } from './permissions' import { EventProcessor } from './eventProcessor' import type { Metadata } from './metadata.ts' type QueryId = number | string -type QueryType = 'message' | 'notification' | 'context' export type BroadcastSessionsFunc = (ctx: MeasureContext, sessionIds: string[], result: any) => void interface SessionInfo { + account: AccountID messageQueries: Map - notificationQueries: Map - contextQueries: Map + contextQueries: Map> } export class Manager { private readonly dataBySessionId = new Map() private readonly triggers: Triggers private readonly eventProcessor: EventProcessor + private readonly permissions: Permissions = new Permissions() constructor( private readonly ctx: MeasureContext, @@ -62,58 +65,98 @@ export class Manager { private readonly workspace: WorkspaceID, private readonly broadcast: BroadcastSessionsFunc ) { - this.eventProcessor = new EventProcessor(db, this.workspace) - this.triggers = new Triggers(this.metadata, db, this.workspace) + this.eventProcessor = new EventProcessor(db) + this.triggers = new Triggers(ctx.newChild('triggers', {}), this.metadata, db, this.workspace) } - async findMessages(info: ConnectionInfo, params: FindMessagesParams, queryId?: number): Promise { + async findMessages(info: ConnectionInfo, params: FindMessagesParams, queryId?: QueryId): Promise { + this.createSession(info) const result = await this.db.findMessages(params) if (queryId != null && info.sessionId != null && info.sessionId !== '') { - this.subscribeQuery(info, 'message', queryId, params) + this.subscribeMessageQuery(info, queryId, params) } return result } - async findMessagesGroups(_: ConnectionInfo, params: FindMessagesGroupsParams): Promise { + async findMessagesGroups(info: ConnectionInfo, params: FindMessagesGroupsParams): Promise { + this.createSession(info) return await this.db.findMessagesGroups(params) } + async findNotificationContexts( + info: ConnectionInfo, + params: FindNotificationContextParams, + queryId?: QueryId + ): Promise { + this.createSession(info) + + const result = await this.db.findContexts(params) + if (queryId != null && info.sessionId != null && info.sessionId !== '') { + this.subscribeContextQuery(info, queryId, params, result) + } + return result + } + + // eslint-disable-next-line @typescript-eslint/no-unused-vars + async findNotifications(info: ConnectionInfo, params: FindNotificationsParams, _?: QueryId): Promise { + this.createSession(info) + return await this.db.findNotifications(params) + } + async event(info: ConnectionInfo, event: RequestEvent): Promise { + this.permissions.validate(info, event) + this.createSession(info) const eventResult = await this.eventProcessor.process(info, event) const { result, responseEvent } = eventResult if (responseEvent !== undefined) { void this.next(info, responseEvent) } - return result + return result ?? {} } - subscribeQuery(info: ConnectionInfo, type: QueryType, queryId: number, params: Record): void { - const { sessionId } = info - const data = this.dataBySessionId.get(sessionId) ?? { - messageQueries: new Map(), - notificationQueries: new Map(), - contextQueries: new Map() - } - if (!this.dataBySessionId.has(sessionId)) { - this.dataBySessionId.set(sessionId, data) - } + private subscribeMessageQuery(info: ConnectionInfo, queryId: QueryId, params: Record): void { + const data = this.createSession(info) + if (data == null) return - if (type === 'message') { - data.messageQueries.set(queryId, params as FindMessagesParams) - } else if (type === 'notification') { - data.notificationQueries.set(queryId, params) - } else if (type === 'context') { - data.contextQueries.set(queryId, params) + data.messageQueries.set(queryId, params as FindMessagesParams) + } + + private subscribeContextQuery( + info: ConnectionInfo, + queryId: QueryId, + params: FindNotificationContextParams, + result: NotificationContext[] + ): void { + if (params.notifications == null) return + const data = this.createSession(info) + if (data == null) return + + const cards = new Set(result.map((it) => it.card)) + const current = data.contextQueries.get(queryId) ?? new Set() + + data.contextQueries.set(queryId, new Set([...current, ...cards])) + } + + private createSession(info: ConnectionInfo): SessionInfo | undefined { + if (info.sessionId == null) return + if (!this.dataBySessionId.has(info.sessionId)) { + this.dataBySessionId.set(info.sessionId, { + account: info.account.uuid, + messageQueries: new Map(), + contextQueries: new Map() + }) } + + return this.dataBySessionId.get(info.sessionId) } unsubscribeQuery(info: ConnectionInfo, queryId: number): void { + if (info.sessionId == null) return const data = this.dataBySessionId.get(info.sessionId) if (data == null) return data.messageQueries.delete(queryId) - data.notificationQueries.delete(queryId) data.contextQueries.delete(queryId) } @@ -121,13 +164,35 @@ export class Manager { this.dataBySessionId.delete(sessionId) } - async next(info: ConnectionInfo, event: ResponseEvent): Promise { - await this.responseEvent(event) - const derived = await this.triggers.process(event, info) + close(): void { + this.db.close() + } + + private async apply(info: ConnectionInfo, derivedRequests: RequestEvent[]): Promise { + const result = await Promise.all(derivedRequests.map((it) => this.eventProcessor.process(info, it))) + const derived = result.flatMap((it) => it.responseEvent).filter((it): it is ResponseEvent => it !== undefined) await Promise.all(derived.map((it) => this.next(info, it))) } - private async responseEvent(event: ResponseEvent): Promise { + private async execute(info: ConnectionInfo, request: RequestEvent): Promise { + const result = await this.eventProcessor.process(info, request) + if (result.responseEvent != null) { + void this.next(info, result.responseEvent) + } + return result.result ?? {} + } + + private async next(info: ConnectionInfo, event: ResponseEvent): Promise { + await this.broadcastEvent(event) + await this.triggers.process( + event, + info, + (events) => this.apply(info, events), + (event) => this.execute(info, event) + ) + } + + private async broadcastEvent(event: ResponseEvent): Promise { const sessionIds: string[] = [] for (const [sessionId, session] of this.dataBySessionId.entries()) { if (this.match(event, session)) { @@ -148,109 +213,87 @@ export class Manager { switch (event.type) { case ResponseEventType.MessageCreated: return this.matchMessagesQuery( - { id: event.message.id, card: event.message.card }, - Array.from(info.messageQueries.values()) + { ids: [event.message.id], card: event.message.card }, + Array.from(info.messageQueries.values()), + new Set(info.contextQueries.values().flatMap((it) => Array.from(it))) ) case ResponseEventType.PatchCreated: return this.matchMessagesQuery( - { card: event.card, id: event.patch.message }, - Array.from(info.messageQueries.values()) + { card: event.card, ids: [event.patch.message] }, + Array.from(info.messageQueries.values()), + new Set(info.contextQueries.values().flatMap((it) => Array.from(it))) ) - case ResponseEventType.MessageRemoved: + case ResponseEventType.MessagesRemoved: return this.matchMessagesQuery( - { card: event.card, id: event.message }, - Array.from(info.messageQueries.values()) + { card: event.card, ids: event.messages }, + Array.from(info.messageQueries.values()), + new Set(info.contextQueries.values().flatMap((it) => Array.from(it))) ) case ResponseEventType.ReactionCreated: return this.matchMessagesQuery( - { card: event.card, id: event.reaction.message }, - Array.from(info.messageQueries.values()) + { card: event.card, ids: [event.reaction.message] }, + Array.from(info.messageQueries.values()), + new Set() ) case ResponseEventType.ReactionRemoved: return this.matchMessagesQuery( - { card: event.card, id: event.message }, - Array.from(info.messageQueries.values()) + { card: event.card, ids: [event.message] }, + Array.from(info.messageQueries.values()), + new Set() ) - case ResponseEventType.AttachmentCreated: + case ResponseEventType.FileCreated: return this.matchMessagesQuery( - { card: event.card, id: event.attachment.message }, - Array.from(info.messageQueries.values()) + { card: event.card, ids: [event.file.message] }, + Array.from(info.messageQueries.values()), + new Set() ) - case ResponseEventType.AttachmentRemoved: + case ResponseEventType.FileRemoved: return this.matchMessagesQuery( - { card: event.card, id: event.message }, - Array.from(info.messageQueries.values()) + { card: event.card, ids: [event.message] }, + Array.from(info.messageQueries.values()), + new Set() ) case ResponseEventType.ThreadCreated: return this.matchMessagesQuery( - { card: event.thread.card, id: event.thread.message }, - Array.from(info.messageQueries.values()) + { card: event.thread.card, ids: [event.thread.message] }, + Array.from(info.messageQueries.values()), + new Set() ) case ResponseEventType.NotificationCreated: - // return ( - // info.personalWorkspace === event.personalWorkspace && - // this.matchNotificationQuery(event, Array.from(info.notificationQueries.values())) - // ) - return false - case ResponseEventType.NotificationRemoved: - // return info.personalWorkspace === event.personalWorkspace && info.notificationQueries.size > 0 - return false + return info.account === event.account + case ResponseEventType.NotificationsRemoved: + return info.account === event.account case ResponseEventType.NotificationContextCreated: - // return ( - // info.personalWorkspace === event.context.personalWorkspace && - // this.matchContextQuery(event, Array.from(info.contextQueries.values())) - // ) - return false + return info.account === event.context.account case ResponseEventType.NotificationContextRemoved: - // return info.personalWorkspace === event.personalWorkspace && info.contextQueries.size > 0 - return false + return info.account === event.account case ResponseEventType.NotificationContextUpdated: - // return info.personalWorkspace === event.personalWorkspace && info.contextQueries.size > 0 - return false + return info.account === event.account case ResponseEventType.MessagesGroupCreated: return false + case ResponseEventType.AddedCollaborators: + return true + case ResponseEventType.RemovedCollaborators: + return true + case ResponseEventType.ThreadUpdated: + return false } } - private matchMessagesQuery(params: { id?: MessageID; card?: string }, queries: FindMessagesParams[]): boolean { + private matchMessagesQuery( + params: { ids: MessageID[]; card: CardID }, + queries: FindMessagesParams[], + cards: Set + ): boolean { + if (cards.has(params.card)) return true if (queries.length === 0) return false for (const query of queries) { - if (query.id != null && query.id !== params.id) continue + if (query.id != null && !params.ids.includes(query.id)) continue if (query.card != null && query.card !== params.card) continue return true } return false } - - private matchNotificationQuery(event: NotificationCreatedEvent, queries: FindNotificationsParams[]): boolean { - if (queries.length === 0) return false - - for (const query of queries) { - if (query.context != null && query.context !== event.notification.context) continue - if (query.message != null && query.message !== event.notification.message.id) continue - if (query.read != null && query.read !== event.notification.read) continue - if (query.archived != null && query.archived !== event.notification.archived) continue - return true - } - - return false - } - - private matchContextQuery(event: NotificationContextCreatedEvent, queries: FindNotificationContextParams[]): boolean { - if (queries.length === 0) return false - - for (const query of queries) { - if (query.id != null && query.id !== event.context.id) continue - if (query.card != null && query.card !== event.context.card) continue - return true - } - - return false - } - - close(): void { - this.db.close() - } } diff --git a/packages/server/src/metadata.ts b/packages/server/src/metadata.ts index bb261412e28..7a4a0f3fe27 100644 --- a/packages/server/src/metadata.ts +++ b/packages/server/src/metadata.ts @@ -1,9 +1,26 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + export interface Metadata { msg2fileUrl: string + accountsUrl: string } export function getMetadata(): Metadata { return { - msg2fileUrl: process.env.MSG2FILE_URL ?? '' + msg2fileUrl: process.env.MSG2FILE_URL ?? '', + accountsUrl: process.env.ACCOUNTS_URL ?? '' } } diff --git a/packages/server/src/notification/notification.ts b/packages/server/src/notification/notification.ts new file mode 100644 index 00000000000..8a8b5e3fa31 --- /dev/null +++ b/packages/server/src/notification/notification.ts @@ -0,0 +1,174 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +import { + type CreateNotificationContextResult, + type RequestEvent, + RequestEventType, + type ResponseEvent, + ResponseEventType +} from '@hcengineering/communication-sdk-types' +import {MessageType, type AccountID, type CardID, type ContextID, type Message, type NotificationContext } from '@hcengineering/communication-types' + +import type { TriggerCtx } from '../types' +import { findAccount } from '../utils' + +const BATCH_SIZE = 500 + +export async function notify (ctx: TriggerCtx, event: ResponseEvent): Promise { + switch (event.type) { + case ResponseEventType.MessageCreated: { + return await notifyMessage(ctx, event.message) + } + } + + return [] +} + +async function notifyMessage (ctx: TriggerCtx, message: Message): Promise { + const cursor = ctx.db.getCollaboratorsCursor(message.card, message.created, BATCH_SIZE) + const creatorAccount = await findAccount(ctx, message.creator) + const result: RequestEvent[] = [] + + let isFirstBatch = true + + for await (const dbCollaborators of cursor) { + const collaborators: AccountID[] = dbCollaborators.map((it) => it.account) + const contexts: NotificationContext[] = await ctx.db.findContexts({ + card: message.card, + account: isFirstBatch && collaborators.length < BATCH_SIZE ? undefined : collaborators + }) + + for (const collaborator of collaborators) { + try { + const context = contexts.find((it) => it.account === collaborator) + const res = await processCollaborator(ctx, message, collaborator, creatorAccount, context) + result.push(...res) + } catch (e) { + ctx.ctx.error('Error on create notification', { collaborator, error: e }) + } + } + + isFirstBatch = false + } + + return result +} + +async function processCollaborator ( + ctx: TriggerCtx, + message: Message, + collaborator: AccountID, + creatorAccount?: AccountID, + context?: NotificationContext +): Promise { + const result: RequestEvent[] = [] + const isOwn = creatorAccount === collaborator + const { contextId, events } = await createOrUpdateContext(ctx, message, collaborator, isOwn, context) + + result.push(...events) + + if (contextId == null || isOwn) return result + if(message.type !== MessageType.Message) return result + + result.push({ + type: RequestEventType.CreateNotification, + account: collaborator, + context: contextId, + message: message.id, + created: message.created + }) + return result +} + +async function createOrUpdateContext ( + ctx: TriggerCtx, + message: Message, + collaborator: AccountID, + isOwn: boolean, + context?: NotificationContext +): Promise<{ + contextId: ContextID | undefined + events: RequestEvent[] + }> { + if (context == null) { + const contextId = await createContext( + ctx, + collaborator, + message.card, + message.created, + isOwn ? message.created : undefined + ) + + return { + contextId, + events: [] + } + } + + const lastUpdate = + context.lastUpdate == null || message.created > context.lastUpdate ? message.created : context.lastUpdate + const lastView = isOwn && isContextRead(context) ? message.created : undefined + + return { + contextId: context.id, + events: [ + { + type: RequestEventType.UpdateNotificationContext, + context: context.id, + account: collaborator, + lastView, + lastUpdate + } + ] + } +} + +async function createContext ( + ctx: TriggerCtx, + account: AccountID, + card: CardID, + lastUpdate: Date, + lastView?: Date +): Promise { + try { + const result = (await ctx.execute({ + type: RequestEventType.CreateNotificationContext, + account, + card, + lastUpdate, + lastView: lastView ?? new Date(lastUpdate.getTime() - 1) + })) as CreateNotificationContextResult + + return result.id + } catch (e) { + return ( + await ctx.db.findContexts({ + account, + card + }) + )[0]?.id + } +} + +function isContextRead (context: NotificationContext): boolean { + const { lastView, lastUpdate } = context + + if (lastView == null) { + return false + } + + return lastView >= lastUpdate +} diff --git a/packages/server/src/permissions.ts b/packages/server/src/permissions.ts new file mode 100644 index 00000000000..3a27ccce8cf --- /dev/null +++ b/packages/server/src/permissions.ts @@ -0,0 +1,66 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +import { type AccountID, type SocialID } from '@hcengineering/communication-types' +import { type ConnectionInfo, type RequestEvent, RequestEventType } from '@hcengineering/communication-sdk-types' +import { type Account, systemAccountUuid } from '@hcengineering/core' + +export class Permissions { + validate(info: ConnectionInfo, event: RequestEvent): void { + switch (event.type) { + case RequestEventType.CreateMessage: + case RequestEventType.CreatePatch: + case RequestEventType.CreateReaction: + case RequestEventType.RemoveReaction: + case RequestEventType.RemoveFile: + case RequestEventType.CreateFile: { + this.checkSocialId(info.account, event.creator) + return + } + case RequestEventType.RemoveNotifications: + case RequestEventType.CreateNotificationContext: + case RequestEventType.UpdateNotificationContext: + case RequestEventType.RemoveNotificationContext: { + this.checkAccount(info.account, event.account) + return + } + case RequestEventType.CreateMessagesGroup: + case RequestEventType.RemoveMessagesGroup: { + this.onlySystemAccount(info.account) + break + } + default: + break + } + } + + private checkSocialId(account: Account, creator: SocialID): void { + if (!account.socialIds.includes(creator) && systemAccountUuid !== account.uuid) { + throw new Error('Forbidden') + } + } + + private checkAccount(account: Account, creator: AccountID): void { + if (account.uuid !== creator && systemAccountUuid !== account.uuid) { + throw new Error('Forbidden') + } + } + + private onlySystemAccount(account: Account): void { + if (systemAccountUuid !== account.uuid) { + throw new Error('Forbidden') + } + } +} diff --git a/packages/server/src/triggers.ts b/packages/server/src/triggers.ts index c82947a272c..d944ba2a70d 100644 --- a/packages/server/src/triggers.ts +++ b/packages/server/src/triggers.ts @@ -14,118 +14,247 @@ // import { - type MessageCreatedEvent, + type ConnectionInfo, + type CreatePatchEvent, type DbAdapter, + type EventResult, + type FileCreatedEvent, + type FileRemovedEvent, + type MessageCreatedEvent, + type MessagesGroupCreatedEvent, + type MessagesRemovedEvent, + type PatchCreatedEvent, + type RequestEvent, + RequestEventType, type ResponseEvent, ResponseEventType, - type MessageRemovedEvent, - type ConnectionInfo, - type PatchCreatedEvent, - type MessagesGroupCreatedEvent + type UpdateThreadEvent } from '@hcengineering/communication-sdk-types' -import { type WorkspaceID, PatchType, type Patch, type CardID } from '@hcengineering/communication-types' -import { concatLink, systemAccountUuid } from '@hcengineering/core' +import { type CardID, PatchType, type WorkspaceID, type File } from '@hcengineering/communication-types' +import { concatLink, type MeasureContext, systemAccountUuid } from '@hcengineering/core' import { generateToken } from '@hcengineering/server-token' import type { Metadata } from './metadata.ts' +import { notify } from './notification/notification' +import { type TriggerCtx } from './types.js' +import { findAccount } from './utils' export class Triggers { - private readonly registeredCards: Set = new Set() + private readonly registeredCards = new Set() constructor( + private readonly ctx: MeasureContext, private readonly metadata: Metadata, private readonly db: DbAdapter, private readonly workspace: WorkspaceID ) {} - async process(event: ResponseEvent, info: ConnectionInfo): Promise { + async process( + event: ResponseEvent, + info: ConnectionInfo, + apply: (events: RequestEvent[]) => Promise, + execute: (event: RequestEvent) => Promise + ): Promise { + const ctx: TriggerCtx = { + ctx: this.ctx, + metadata: this.metadata, + db: this.db, + workspace: this.workspace, + account: info.account, + execute + } + + await this.applySyncTriggers(ctx, event, apply) + + void this.createNotification(ctx, event).then((it) => { + void apply(it) + }) + } + + private async applySyncTriggers( + ctx: TriggerCtx, + event: ResponseEvent, + apply: (events: RequestEvent[]) => Promise + ): Promise { + let events: RequestEvent[] = [] + try { switch (event.type) { - case ResponseEventType.MessageCreated: - return await this.onMessageCreated(event) - case ResponseEventType.MessageRemoved: - return await this.onMessageRemoved(event, info) - case ResponseEventType.PatchCreated: - return await this.onPatchCreated(event, info) - case ResponseEventType.MessagesGroupCreated: - return await this.onMessagesGroupCreated(event) + case ResponseEventType.MessageCreated: { + events = await this.onMessageCreated(ctx, event) + break + } + case ResponseEventType.MessagesRemoved: { + events = await this.onMessagesRemoved(ctx, event) + break + } + case ResponseEventType.PatchCreated: { + events = await this.onPatchCreated(ctx, event) + break + } + case ResponseEventType.FileCreated: { + events = await this.onFileCreated(ctx, event) + break + } + case ResponseEventType.FileRemoved: { + events = await this.onFileRemoved(ctx, event) + break + } + case ResponseEventType.MessagesGroupCreated: { + events = await this.onMessagesGroupCreated(ctx, event) + break + } } + await apply(events) } catch (err: any) { console.error(err) - return [] } + } - return [] + private async createNotification(ctx: TriggerCtx, event: ResponseEvent): Promise { + return await notify(ctx, event) } - async onMessagesGroupCreated(event: MessagesGroupCreatedEvent): Promise { + private async onMessagesGroupCreated(_: TriggerCtx, event: MessagesGroupCreatedEvent): Promise { this.registeredCards.delete(event.group.card) return [] } - async onMessageRemoved(event: MessageRemovedEvent, info: ConnectionInfo): Promise { + private async onMessagesRemoved(ctx: TriggerCtx, event: MessagesRemovedEvent): Promise { const { card } = event const thread = await this.db.findThread(card) if (thread === undefined) return [] - const date = new Date() - const socialId = info.account.primarySocialId - - const patch: Patch = { - message: thread.message, - type: PatchType.removeReply, - content: thread.thread, - creator: socialId, - created: date - } - await this.db.updateThread(thread.thread, date, 'decrement') - await this.db.createPatch(thread.card, patch.message, patch.type, patch.content, patch.creator, patch.created) + const socialId = ctx.account.primarySocialId - return [ - { - type: ResponseEventType.PatchCreated, + return event.messages.flatMap(() => { + const patchEvent: CreatePatchEvent = { + type: RequestEventType.CreatePatch, + patchType: PatchType.removeReply, card: thread.card, - patch + message: thread.message, + content: thread.thread, + creator: socialId } - ] + const threadEvent: UpdateThreadEvent = { + type: RequestEventType.UpdateThread, + thread: thread.thread, + replies: 'decrement' + } + + return [patchEvent, threadEvent] + }) } - async onMessageCreated(event: MessageCreatedEvent): Promise { - return (await Promise.all([this.registerCard(event.message.card), this.updateThread(event)])).flat() + private async onMessageCreated(ctx: TriggerCtx, event: MessageCreatedEvent): Promise { + void this.registerCard(event.message.card) + + return [...(await this.addCollaborators(ctx, event)), ...(await this.addThreadReply(event))] } - // eslint-disable-next-line @typescript-eslint/no-unused-vars - async onPatchCreated(event: PatchCreatedEvent, info: ConnectionInfo): Promise { - return this.registerCard(event.card) + private async addCollaborators(ctx: TriggerCtx, event: MessageCreatedEvent): Promise { + const { creator } = event.message + const account = await findAccount(ctx, creator) + + if (account === undefined) { + return [] + } + + const collaborator = ( + await this.db.findCollaborators({ + card: event.message.card, + account, + limit: 1 + }) + )[0] + + return collaborator != null + ? [] + : [ + { + type: RequestEventType.AddCollaborators, + card: event.message.card, + collaborators: [account], + date: event.message.created + } + ] } - async updateThread(event: MessageCreatedEvent): Promise { + private async onPatchCreated(_: TriggerCtx, event: PatchCreatedEvent): Promise { + void this.registerCard(event.card) + return [] + } + + private async addThreadReply(event: MessageCreatedEvent): Promise { const { message } = event const thread = await this.db.findThread(message.card) - if (thread === undefined) return [] - - const date = new Date() - const patch: Patch = { - message: thread.message, - type: PatchType.addReply, - content: thread.thread, - creator: message.creator, - created: date + if (thread === undefined) { + return [] } - await this.db.updateThread(thread.thread, date, 'increment') - await this.db.createPatch(thread.card, patch.message, patch.type, patch.content, patch.creator, patch.created) return [ { - type: ResponseEventType.PatchCreated, + type: RequestEventType.CreatePatch, + patchType: PatchType.addReply, card: thread.card, - patch + message: thread.message, + content: thread.thread, + creator: message.creator + }, + { + type: RequestEventType.UpdateThread, + thread: thread.thread, + lastReply: event.message.created, + replies: 'increment' + } + ] + } + + private async onFileCreated(_: TriggerCtx, event: FileCreatedEvent): Promise { + const message = (await this.db.findMessages({ card: event.card, id: event.file.message, limit: 1 }))[0] + if (message !== undefined) return [] + + const { file } = event + const patchContent: Omit = { + blobId: file.blobId, + type: file.type, + filename: file.filename, + size: file.size + } + + return [ + { + type: RequestEventType.CreatePatch, + patchType: PatchType.addFile, + card: event.card, + message: file.message, + content: JSON.stringify(patchContent), + creator: file.creator } ] } - async registerCard(card: CardID): Promise { - if (this.registeredCards.has(card) || this.metadata.msg2fileUrl === '') return [] + private async onFileRemoved(_: TriggerCtx, event: FileRemovedEvent): Promise { + const message = (await this.db.findMessages({ card: event.card, id: event.message, limit: 1 }))[0] + if (message !== undefined) return [] + const { blobId } = event + + return [ + { + type: RequestEventType.CreatePatch, + patchType: PatchType.removeFile, + card: event.card, + message: event.message, + content: JSON.stringify({ blobId }), + creator: event.creator + } + ] + } + + private async registerCard(card: CardID): Promise { + if (this.registeredCards.has(card) || this.metadata.msg2fileUrl === '') { + return + } try { const token = generateToken(systemAccountUuid, this.workspace) @@ -140,7 +269,5 @@ export class Triggers { } catch (e) { console.error(e) } - - return [] } } diff --git a/packages/server/src/types.ts b/packages/server/src/types.ts new file mode 100644 index 00000000000..e191c9220c4 --- /dev/null +++ b/packages/server/src/types.ts @@ -0,0 +1,31 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +import type { Account, MeasureContext } from '@hcengineering/core' +import type { DbAdapter, EventResult, RequestEvent } from '@hcengineering/communication-sdk-types' +import type { WorkspaceID } from '@hcengineering/communication-types' + +import type { Metadata } from './metadata' + +export interface TriggerCtx { + ctx: MeasureContext + metadata: Metadata + db: DbAdapter + workspace: WorkspaceID + account: Account + execute: (event: RequestEvent) => Promise +} + +export type QueryId = string | number diff --git a/packages/server/src/utils.ts b/packages/server/src/utils.ts new file mode 100644 index 00000000000..def6f261408 --- /dev/null +++ b/packages/server/src/utils.ts @@ -0,0 +1,71 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +import type { AccountID, SocialID } from '@hcengineering/communication-types' +import { generateToken } from '@hcengineering/server-token' +import { systemAccountUuid } from '@hcengineering/core' + +import type { TriggerCtx } from './types' + +export async function findAccount(ctx: TriggerCtx, socialString: SocialID): Promise { + if (ctx.account.socialIds.includes(socialString)) { + return ctx.account.uuid + } + + const token = generateToken(systemAccountUuid) + // const account = getAccountClient(ctx.metadata.accountsUrl, token) + + try { + //TODO: FIXME + return await fetchAccount(socialString, ctx.metadata.accountsUrl, token) + } catch (err: any) { + ctx.ctx.warn('Cannot find account', { socialString, err }) + } +} + +//TODO: replace with AccountClient +async function fetchAccount(socialId: SocialID, url: string, token: string): Promise { + const body = { + method: 'findPersonBySocialId' as const, + params: { socialId, requireAccount: true } + } + const request: RequestInit = { + keepalive: true, + headers: { + ...(token === undefined + ? {} + : { + Authorization: 'Bearer ' + token + }) + } + } + + const response = await fetch(url, { + ...request, + headers: { + ...request.headers, + 'Content-Type': 'application/json' + }, + method: 'POST', + body: JSON.stringify(body) + }) + + const result = await response.json() + if (result.error != null) { + throw Error(result.error) + } + + return result.result as AccountID | undefined +} diff --git a/packages/shared/package.json b/packages/shared/package.json index 7f96a8d7c31..6ec1fa97bc8 100644 --- a/packages/shared/package.json +++ b/packages/shared/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-shared", - "version": "0.1.61", + "version": "0.1.147", "main": "dist/index.cjs", "module": "dist/index.js", "types": "./types/index.d.ts", @@ -15,15 +15,13 @@ "bundle:node": "bun build src/index.ts --target node --format cjs --outfile dist/index.cjs" }, "devDependencies": { - "@types/bun": "^1.1.14", - "@types/js-yaml": "^4.0.9" + "@types/bun": "^1.1.14" }, "peerDependencies": { "typescript": "^5.6.3" }, "dependencies": { - "@hcengineering/communication-types": "workspace:*", - "js-yaml": "^4.1.0" + "@hcengineering/communication-types": "workspace:*" }, "repository": { "type": "git", diff --git a/packages/shared/src/index.ts b/packages/shared/src/index.ts index bd41801a46a..f9d4d66a364 100644 --- a/packages/shared/src/index.ts +++ b/packages/shared/src/index.ts @@ -15,4 +15,4 @@ export * from './message.ts' export * from './retry.ts' -export * from './files.ts' +export * from './patch.ts' diff --git a/packages/shared/src/message.ts b/packages/shared/src/message.ts index 0dac007bc2f..8e5b28522c6 100644 --- a/packages/shared/src/message.ts +++ b/packages/shared/src/message.ts @@ -15,25 +15,24 @@ import type { MessageID } from '@hcengineering/communication-types' -let lastTimestamp = 0 +let lastTimestamp = 0n let counter = 0n export function generateMessageId(): MessageID { - const timestamp = Math.floor(Date.now() / 1000) + const timestamp = BigInt(Math.floor(Date.now() / 1000)) if (timestamp !== lastTimestamp) { lastTimestamp = timestamp counter = 0n } - const id = (BigInt(timestamp) << 20n) | counter counter++ + const id = (timestamp << 20n) | (counter << 10n) | BigInt(Math.floor(Math.random() * 1024)) + return id.toString() as MessageID } export function parseMessageId(messageId: MessageID): Date { - const timestamp = Number(BigInt(messageId) >> 20n) - - return new Date(timestamp * 1000) + return new Date(Number(BigInt(messageId) >> 20n) * 1000) } diff --git a/packages/shared/src/patch.ts b/packages/shared/src/patch.ts new file mode 100644 index 00000000000..6c1897714b3 --- /dev/null +++ b/packages/shared/src/patch.ts @@ -0,0 +1,135 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +import { + PatchType, + type BlobID, + type CardID, + type Message, + type Patch, + type Reaction, + type SocialID, + type File +} from '@hcengineering/communication-types' + +type PatchFile = Pick + +export function applyPatches(message: Message, patches: Patch[], allowedPatchTypes: PatchType[] = []): Message { + if (patches.length === 0) return message + + for (const p of patches) { + message = applyPatch(message, p, allowedPatchTypes) + } + return message +} + +export function applyPatch(message: Message, patch: Patch, allowedPatchTypes: PatchType[] = []): Message { + if (allowedPatchTypes.length > 0 && !allowedPatchTypes.includes(patch.type)) return message + switch (patch.type) { + case PatchType.update: + return { + ...message, + edited: patch.created, + content: patch.content + } + case PatchType.addReaction: + return addReaction(message, { + message: message.id, + reaction: patch.content, + creator: patch.creator, + created: patch.created + }) + case PatchType.removeReaction: + return removeReaction(message, patch.content, patch.creator) + case PatchType.addReply: + return addReply(message, patch.content as CardID, patch.created) + case PatchType.removeReply: + return removeReply(message, patch.content as CardID) + case PatchType.addFile: + return addFile(message, JSON.parse(patch.content) as PatchFile, patch.created, patch.creator) + case PatchType.removeFile: + return removeFile(message, patch.content as BlobID) + } + + return message +} + +function addReaction(message: Message, reaction: Reaction): Message { + message.reactions.push(reaction) + return message +} + +function removeReaction(message: Message, emoji: string, creator: SocialID): Message { + const reactions = message.reactions.filter((it) => it.reaction !== emoji || it.creator !== creator) + if (reactions.length === message.reactions.length) return message + + return { + ...message, + reactions + } +} + +function addReply(message: Message, thread: CardID, created: Date): Message { + if (message.thread === undefined) { + return { + ...message, + thread: { + card: message.card, + message: message.id, + thread, + repliesCount: 1, + lastReply: created + } + } + } + + if (message.thread.thread !== thread) return message + + return { + ...message, + thread: { + ...message.thread, + repliesCount: message.thread.repliesCount + 1, + lastReply: created + } + } +} + +function addFile(message: Message, file: PatchFile, created: Date, creator: SocialID): Message { + message.files.push({ ...file, card: message.card, message: message.id, created, creator }) + return message +} + +function removeFile(message: Message, blobId: BlobID): Message { + const files = message.files.filter((it) => it.blobId !== blobId) + if (files.length === message.files.length) return message + + return { + ...message, + files + } +} + +function removeReply(message: Message, thread: CardID): Message { + if (message.thread === undefined || message.thread.thread !== thread) return message + + return { + ...message, + thread: { + ...message.thread, + repliesCount: message.thread.repliesCount - 1 + } + } +} diff --git a/packages/sqlite-wasm/package.json b/packages/sqlite-wasm/package.json deleted file mode 100644 index d938150b235..00000000000 --- a/packages/sqlite-wasm/package.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "name": "@hcengineering/communication-sqlite-wasm", - "version": "0.1.61", - "main": "dist/index.js", - "module": "dist/index.js", - "types": "./types/index.d.ts", - "files": [ - "dist/index.js", - "dist/index.cjs", - "types/**/*.d.ts" - ], - "scripts": { - "bundle": "bun run bundle:browser", - "bundle:browser": "bun build src/index.ts --outdir dist --target browser" - }, - "devDependencies": { - "@types/bun": "^1.1.14" - }, - "dependencies": { - "@hcengineering/communication-types": "workspace:*", - "@hcengineering/communication-sdk-types": "workspace:*", - "@sqlite.org/sqlite-wasm": "^3.47.1-build1", - "path": "^0.12.7", - "uuid": "^11.0.3" - }, - "peerDependencies": { - "typescript": "^5.6.3" - } -} diff --git a/packages/sqlite-wasm/src/adapter.ts b/packages/sqlite-wasm/src/adapter.ts deleted file mode 100644 index 628a435443d..00000000000 --- a/packages/sqlite-wasm/src/adapter.ts +++ /dev/null @@ -1,190 +0,0 @@ -// -// Copyright © 2025 Hardcore Engineering Inc. -// -// Licensed under the Eclipse Public License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. You may -// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// -// See the License for the specific language governing permissions and -// limitations under the License. -// - -import { - type Message, - type FindMessagesParams, - type CardID, - type RichText, - type SocialID, - type MessageID, - type ContextID, - type NotificationContextUpdate, - type FindNotificationsParams, - type FindNotificationContextParams, - type NotificationContext, - type Notification, - type BlobID, - type MessagesGroup, - type FindMessagesGroupsParams -} from '@hcengineering/communication-types' -import type { DbAdapter } from '@hcengineering/communication-sdk-types' - -import { initializeSQLite, type Sqlite3Worker1Promiser } from './connection' -import { applyMigrations } from './migrations.ts' -import { MessagesDb } from './db/message.ts' -import { NotificationsDb } from './db/notification.ts' - -//TODO: FIXME -//export class SqliteAdapter implements DbAdapter -export class SqliteAdapter { - private readonly message: MessagesDb - private readonly notification: NotificationsDb - - constructor( - private readonly worker: Sqlite3Worker1Promiser, - private readonly dbId: string - ) { - this.message = new MessagesDb(worker, dbId) - this.notification = new NotificationsDb(worker, dbId) - } - - async createMessage( - workspace: string, - card: CardID, - content: RichText, - creator: SocialID, - created: Date - ): Promise { - return await this.message.createMessage(workspace, card, content, creator, created) - } - - async createPatch( - workspace: string, - card: CardID, - message: MessageID, - content: RichText, - creator: SocialID, - created: Date - ): Promise { - //TODO: FIXME - return await this.message.createPatch(message, content, creator, created) - } - - async removeMessage(workspace: string, card: CardID, id: MessageID): Promise { - await this.message.removeMessage(id) - return id - } - - async removeMessages(workspace: string, card: CardID, ids: MessageID[]): Promise { - //TODO: implement - return ids - } - - /* eslint-disable @typescript-eslint/no-unused-vars */ - async createMessagesGroup( - workspace: string, - card: CardID, - blobId: BlobID, - from_date: Date, - to_date: Date, - count: number - ): Promise { - //TODO: implement - } - - async findMessagesGroups(workspace: string, params: FindMessagesGroupsParams): Promise { - //TODO: implement - return [] - } - - async createReaction( - workspace: string, - card: CardID, - message: MessageID, - reaction: string, - creator: SocialID, - created: Date - ): Promise { - //TODO: FIXME - return await this.message.createReaction(message, reaction, creator, created) - } - - async removeReaction( - workspace: string, - card: CardID, - message: MessageID, - reaction: string, - creator: SocialID - ): Promise { - //TODO: FIXME - return await this.message.removeReaction(message, reaction, creator) - } - - async createAttachment(message: MessageID, attachment: CardID, creator: SocialID, created: Date): Promise { - return await this.message.createAttachment(message, attachment, creator, created) - } - - async removeAttachment(message: MessageID, attachment: CardID): Promise { - return await this.message.removeAttachment(message, attachment) - } - - async findMessages(workspace: string, params: FindMessagesParams): Promise { - return await this.message.find(workspace, params) - } - - async createNotification(message: MessageID, context: ContextID): Promise { - return await this.notification.createNotification(message, context) - } - async removeNotification(message: MessageID, context: ContextID): Promise { - return await this.notification.removeNotification(message, context) - } - - async createContext( - workspace: string, - card: CardID, - personalWorkspace: string, - lastView?: Date, - lastUpdate?: Date - ): Promise { - return await this.notification.createContext(workspace, card, personalWorkspace, lastView, lastUpdate) - } - - async removeContext(context: ContextID): Promise { - return await this.notification.removeContext(context) - } - - async updateContext(context: ContextID, update: NotificationContextUpdate): Promise { - return await this.notification.updateContext(context, update) - } - - async findContexts( - params: FindNotificationContextParams, - personalWorkspaces: string[], - workspace?: string - ): Promise { - return await this.notification.findContexts(params, personalWorkspaces, workspace) - } - - async findNotifications( - params: FindNotificationsParams, - personalWorkspace: string, - workspace?: string - ): Promise { - return await this.notification.findNotifications(params, personalWorkspace, workspace) - } - - close(): void { - void this.worker('close') - } -} - -export async function createDbAdapter(connectionString: string): Promise { - const { worker, dbId } = await initializeSQLite(connectionString) - - await applyMigrations(worker, dbId) - //TODO: FIXME - return new SqliteAdapter(worker, dbId) as unknown as DbAdapter -} diff --git a/packages/sqlite-wasm/src/connection.ts b/packages/sqlite-wasm/src/connection.ts deleted file mode 100644 index 281b7f9c783..00000000000 --- a/packages/sqlite-wasm/src/connection.ts +++ /dev/null @@ -1,71 +0,0 @@ -// -// Copyright © 2025 Hardcore Engineering Inc. -// -// Licensed under the Eclipse Public License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. You may -// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// -// See the License for the specific language governing permissions and -// limitations under the License. -// - -// @ts-expect-error error -import { sqlite3Worker1Promiser } from '@sqlite.org/sqlite-wasm' - -export type Sqlite3Worker1Promiser = { - ( - command: 'config-get', - params: object - ): Promise<{ - result: { - version: { - libVersion: string - } - } - }> - - ( - command: 'open', - params: { filename: string } - ): Promise<{ - dbId: string - }> - - ( - command: 'exec', - params: { - dbId: string - sql: string - callback?: (row: SqlResult | null | undefined) => void - } - ): Promise - - (command: 'close'): Promise -} - -type SqlResult = { - columnNames: string[] - row: any[] | null | undefined - rowNumber: number | null | undefined -} - -export async function initializeSQLite( - connectionString: string -): Promise<{ worker: Sqlite3Worker1Promiser; dbId: string }> { - const promiser: Sqlite3Worker1Promiser = await new Promise((resolve) => { - const _promiser = sqlite3Worker1Promiser({ - onready: () => resolve(_promiser) - }) - }) - - const configResponse = await promiser('config-get', {}) - console.log('SQLite3 config', configResponse.result) - - const { dbId } = await promiser('open', { filename: connectionString }) - - return { worker: promiser, dbId } -} diff --git a/packages/sqlite-wasm/src/db/base.ts b/packages/sqlite-wasm/src/db/base.ts deleted file mode 100644 index be348e544bf..00000000000 --- a/packages/sqlite-wasm/src/db/base.ts +++ /dev/null @@ -1,77 +0,0 @@ -// -// Copyright © 2025 Hardcore Engineering Inc. -// -// Licensed under the Eclipse Public License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. You may -// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// -// See the License for the specific language governing permissions and -// limitations under the License. -// - -import type {Sqlite3Worker1Promiser} from "../connection"; - -export class BaseDb { - constructor(protected readonly worker: Sqlite3Worker1Promiser, protected readonly dbId: string) { - } - - async insert( table: string, data: Record): Promise { - const keys = Object.keys(data) - const values = Object.values(data) - const sql = ` - INSERT INTO ${table} (${keys.map((k) => `${k}`).join(', ')}) - VALUES (${values.map((value) => `'${value}'`).join(', ')}); - ` - await this.worker('exec', { - dbId: this.dbId, - sql - }); - } - - async remove(table: string, where: Record): Promise { - const keys = Object.keys(where) - const values = Object.values(where) - - const sql = ` - DELETE - FROM ${table} - WHERE ${keys.map((k, idx) => `${k} = '${values[idx]}'`).join(' AND ')};` - - await this.worker('exec', { - dbId: this.dbId, - sql - }); - } - - - async select(sql: string): Promise[]> { - return new Promise(async (resolve) => { - const rows: Record[] = []; - - await this.worker('exec', { - dbId: this.dbId, - sql, - callback: (res) => { - if (res == null) { - resolve(rows); - return; - } - if (res.row == null || res.rowNumber == null) { - resolve(rows); - } else { - const rowObject: Record = {}; - res.columnNames.forEach((columnName, index) => { - rowObject[columnName] = res.row?.[index] ?? undefined - }); - - rows.push(rowObject); - } - }, - }); - }); - } -} \ No newline at end of file diff --git a/packages/sqlite-wasm/src/db/message.ts b/packages/sqlite-wasm/src/db/message.ts deleted file mode 100644 index 44c820224ba..00000000000 --- a/packages/sqlite-wasm/src/db/message.ts +++ /dev/null @@ -1,216 +0,0 @@ -// -// Copyright © 2025 Hardcore Engineering Inc. -// -// Licensed under the Eclipse Public License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. You may -// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// -// See the License for the specific language governing permissions and -// limitations under the License. -// - -import { - type Message, - type MessageID, - type CardID, - type FindMessagesParams, - SortingOrder, - type SocialID, - type RichText, - type Reaction, type Attachment -} from '@hcengineering/communication-types' - -import {BaseDb} from './base.ts' -import { - TableName, - type MessageDb, - type AttachmentDb, - type ReactionDb, - type PatchDb -} from './types.ts' - -export class MessagesDb extends BaseDb { - //Message - async createMessage(workspace: string, card: CardID, content: RichText, creator: SocialID, created: Date): Promise { - const dbData: MessageDb = { - id: self.crypto.randomUUID(), - workspace_id: workspace, - card_id: card, - content: content, - creator: creator, - created: created, - } - await this.insert(TableName.Message, dbData) - //TODO: FIX ME - return dbData.id as any as MessageID - } - - async removeMessage(message: MessageID): Promise { - await this.remove(TableName.Message, {id: message}) - } - - async createPatch(message: MessageID, content: RichText, creator: SocialID, created: Date): Promise { - const dbData: PatchDb = { - id: self.crypto.randomUUID(), - message_id: message, - content: content, - creator: creator, - created: created - } - - await this.insert(TableName.Patch, dbData) - } - - //Attachment - async createAttachment(message: MessageID, card: CardID, creator: SocialID, created: Date): Promise { - const dbData: AttachmentDb = { - message_id: message, - card_id: card, - creator: creator, - created: created - } - await this.insert(TableName.Attachment, dbData) - } - - async removeAttachment(message: MessageID, card: CardID): Promise { - await this.remove(TableName.Attachment, { - message_id: message, - card_id: card - }) - } - - //Reaction - async createReaction(message: MessageID, reaction: string, creator: SocialID, created: Date): Promise { - const dbData: ReactionDb = { - message_id: message, - reaction: reaction, - creator: creator, - created: created - } - await this.insert(TableName.Reaction, dbData) - } - - async removeReaction(message: MessageID, reaction: string, creator: SocialID): Promise { - await this.remove(TableName.Reaction, { - message_id: message, - reaction: reaction, - creator: creator - }) - } - - //Find messages - async find(workspace: string, params: FindMessagesParams): Promise { - const select = `SELECT m.id, - m.card_id, - m.content, - m.creator, - m.created, - json_group_array( - json_object( - 'content', p.content, - 'creator', p.creator, - 'created', p.created - ) - ) AS patches, - json_group_array( - json_object( - 'card_id', a.card_id, - 'message_id', a.message_id, - 'creator', a.creator, - 'created', a.created - ) - ) AS attachments, - json_group_array( - json_object( - 'message_id', r.message_id, - 'reaction', r.reaction, - 'creator', r.creator, - 'created', r.created - ) - ) AS reactions - FROM ${TableName.Message} m - LEFT JOIN ${TableName.Patch} p ON p.message_id = m.id - LEFT JOIN ${TableName.Attachment} a ON a.message_id = m.id - LEFT JOIN ${TableName.Reaction} r ON r.message_id = m.id` - - const where = this.buildMessageWhere(workspace, params) - const groupBy = `GROUP BY m.id` - const orderBy = params.order ? `ORDER BY m.created ${params.order === SortingOrder.Ascending ? 'ASC' : 'DESC'}` : '' - const limit = params.limit ? ` LIMIT ${params.limit}` : '' - const sql = [select, where, groupBy, orderBy, limit].join(' ') - - const result = await this.select(sql) - - return result.map(it => this.toMessage(it)) - } - - buildMessageWhere(workspace: string, params: FindMessagesParams): string { - const where: string[] = [`m.workspace_id = '${workspace}'`] - - if (params.card != null) { - where.push(`m.card_id = '${params.card}'`) - } - if (params.id != null) { - where.push(`m.id = '${params.id}'`) - } - - //TODO: FIX ME - // if (params.from != null) { - // const exclude = params.excluded ?? false - // const direction = params.direction ?? Direction.Forward - // const getOperator = () => { - // if (exclude) { - // return direction === Direction.Forward ? '>' : '<' - // } else { - // return direction === Direction.Forward ? '>=' : '<=' - // } - // } - // - // where.push(`m.created ${getOperator()} ${params.from}`) - // } - - return `WHERE ${where.join(' AND ')}` - } - - private toMessage(row: any): Message { - const patches = JSON.parse(row.patches).filter((it: any) => it.created != null) - const attachments = JSON.parse(row.attachments).filter((it: any) => it.created != null) - const reactions = JSON.parse(row.reactions).filter((it: any) => it.created != null) - - const lastPatch = patches?.[0] - - return { - id: row.id, - card: row.card_id, - content: lastPatch?.content ?? row.content, - creator: row.creator, - created: new Date(row.created), - edited: (lastPatch?.created ?? row.created), - reactions: (reactions ?? []).map((it: any) => this.toReaction(it)), - attachments: (attachments ?? []).map((it: any) => this.toAttachment(it)) - } - } - - private toReaction(row: any): Reaction { - return { - message: row.message_id, - reaction: row.reaction, - creator: row.creator, - created: new Date(row.created) - } - } - - private toAttachment(row: any): Attachment { - return { - message: row.message_id, - card: row.card, - creator: row.creator, - created: new Date(row.created) - } - } -} - diff --git a/packages/sqlite-wasm/src/db/notification.ts b/packages/sqlite-wasm/src/db/notification.ts deleted file mode 100644 index b3215a2e5af..00000000000 --- a/packages/sqlite-wasm/src/db/notification.ts +++ /dev/null @@ -1,238 +0,0 @@ -// -// Copyright © 2025 Hardcore Engineering Inc. -// -// Licensed under the Eclipse Public License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. You may -// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// -// See the License for the specific language governing permissions and -// limitations under the License. -// - -import { - type MessageID, - type ContextID, - type CardID, - type NotificationContext, - type FindNotificationContextParams, SortingOrder, - type FindNotificationsParams, type Notification, - type NotificationContextUpdate -} from '@hcengineering/communication-types' - -import {BaseDb} from './base.ts' -import {TableName, type ContextDb, type NotificationDb} from './types.ts' - -export class NotificationsDb extends BaseDb { - async createNotification(message: MessageID, context: ContextID): Promise { - const dbData: NotificationDb = { - message_id: message, - context_id: context - } - await this.insert(TableName.Notification, dbData) - } - - async removeNotification(message: MessageID, context: ContextID): Promise { - await this.remove(TableName.Notification, { - message_id: message, - context - }) - } - - async createContext(workspace: string, card: CardID, personalWorkspace: string, lastView?: Date, lastUpdate?: Date): Promise { - const dbData: ContextDb = { - id: self.crypto.randomUUID(), - workspace_id: workspace, - card_id: card, - personal_workspace: personalWorkspace, - last_view: lastView, - last_update: lastUpdate - } - await this.insert(TableName.NotificationContext, dbData) - return dbData.id as ContextID - } - - async removeContext(context: ContextID): Promise { - await this.remove(TableName.NotificationContext, { - id: context - }) - } - - async updateContext(context: ContextID, update: NotificationContextUpdate): Promise { - const dbData: Partial = {} - - if (update.archivedFrom != null) { - dbData.archived_from = update.archivedFrom - } - if (update.lastView != null) { - dbData.last_view = update.lastView - } - if (update.lastUpdate != null) { - dbData.last_update = update.lastUpdate - } - - if (Object.keys(dbData).length === 0) { - return - } - - const keys = Object.keys(dbData) - const values = Object.values(dbData) - - const sql = ` - UPDATE ${TableName.NotificationContext} - SET ${keys.map((k, idx) => `"${k}" = '${values[idx]}'`).join(', ')} - WHERE id = '${context}' - `; - - await this.worker('exec', { - dbId: this.dbId, - sql - }); - } - - async findContexts(params: FindNotificationContextParams, personalWorkspaces: string[], workspace?: string,): Promise { - const select = ` - SELECT nc.id, - nc.card_id, - nc.archived_from, - nc.last_view, - nc.last_update, - nc.workspace_id, - nc.personal_workspace - FROM ${TableName.NotificationContext} nc`; - const where = this.buildContextWhere(params, personalWorkspaces, workspace); - // const orderSql = `ORDER BY nc.created ${params.sort === SortOrder.Asc ? 'ASC' : 'DESC'}` - const limit = params.limit ? ` LIMIT ${params.limit}` : '' - const sql = [select, where, limit].join(' ') - - const result = await this.select(sql) - - return result.map(it => this.toNotificationContext(it)); - } - - - async findNotifications(params: FindNotificationsParams, personalWorkspace: string, workspace?: string): Promise { - //TODO: should join with attachments and reactions? - const select = ` - SELECT n.message_id, - n.context_id, - m.card_id AS message_card, - m.content AS message_content, - m.creator AS message_creator, - m.created AS message_created, - nc.card_id, - nc.archived_from, - nc.last_view, - nc.last_update, - json_group_array( - json_object( - 'id', p.id, - 'content', p.content, - 'creator', p.creator, - 'created', p.created - ) - ) AS patches - FROM ${TableName.Notification} n - JOIN - ${TableName.NotificationContext} nc ON n.context_id = nc.id - JOIN - ${TableName.Message} m ON n.message_id = m.id - LEFT JOIN - ${TableName.Patch} p ON p.message_id = m.id - `; - const where = this.buildNotificationWhere(params, personalWorkspace, workspace) - const groupBy = `GROUP BY n.message_id, n.context_id, m.id, nc.card_id, nc.archived_from, nc.last_view, nc.last_update`; - const orderBy = `ORDER BY m.created ${params.order === SortingOrder.Ascending ? 'ASC' : 'DESC'}` - const limit = params.limit ? ` LIMIT ${params.limit}` : '' - const sql = [select, where, groupBy, orderBy, limit].join(' ') - - const result = await this.select(sql) - - return result.map(it => this.toNotification(it)); - } - - buildContextWhere(params: FindNotificationContextParams, personalWorkspaces: string[], workspace?: string,): string { - const where: string[] = [] - - if (workspace != null) { - where.push(`nc.workspace_id = '${workspace}'`) - } - if (personalWorkspaces.length > 0) { - where.push(`nc.personal_workspace IN (${personalWorkspaces.map(it => `'${it}'`).join(', ')})`) - } - - if (params.card != null) { - where.push(`nc.card_id = '${params.card}'`) - } - - return `WHERE ${where.join(' AND ')}` - } - - buildNotificationWhere(params: FindNotificationsParams, personalWorkspace: string, workspace?: string): string { - const where: string[] = [`nc.personal_workspace = '${personalWorkspace}'`] - if (workspace != null) { - where.push(`nc.workspace_id = '${workspace}'`) - } - - if (params.context != null) { - where.push(`n.context_id = '${params.context}'`) - } - - if (params.read === true) { - where.push(`nc.last_view IS NOT NULL AND nc.last_view >= m.created`) - } - - if (params.read === false) { - where.push(`(nc.last_view IS NULL OR nc.last_view > m.created)`) - } - - if (params.archived === true) { - where.push(`nc.archived_from IS NOT NULL AND nc.archived_from >= m.created`) - } - - if (params.archived === false) { - where.push(`(nc.archived_from IS NULL OR nc.archived_from > m.created)`) - } - - return `WHERE ${where.join(' AND ')}` - } - - toNotificationContext(row: any): NotificationContext { - return { - id: row.id, - card: row.card_id, - archivedFrom: row.archived_from ? new Date(row.archived_from) : undefined, - lastView: row.last_view ? new Date(row.last_view) : undefined, - lastUpdate: row.last_update ? new Date(row.last_update) : undefined, - workspace: row.workspace, - personalWorkspace: row.personal_workspace - } - } - - toNotification(row: any): Notification { - const patches = JSON.parse(row.patches).filter((p: any) => p.created != null) - const lastPatch = patches[patches.length - 1] - const lastView = row.last_view ? new Date(row.last_view) : undefined - const archivedFrom = row.archived_from ? new Date(row.archived_from) : undefined - const created = new Date(row.message_created) - return { - message: { - id: row.message_id, - card: row.message_card, - content: lastPatch?.content ?? row.message_content, - creator: row.message_creator, - created, - edited: new Date(lastPatch?.created ?? row.message_created), - reactions: [], - attachments: [] - }, - context: row.context_id, - read: lastView != null && lastView >= created, - archived: archivedFrom != null && archivedFrom >= created - } - } -} - diff --git a/packages/sqlite-wasm/src/db/types.ts b/packages/sqlite-wasm/src/db/types.ts deleted file mode 100644 index edde495119d..00000000000 --- a/packages/sqlite-wasm/src/db/types.ts +++ /dev/null @@ -1,72 +0,0 @@ -// -// Copyright © 2025 Hardcore Engineering Inc. -// -// Licensed under the Eclipse Public License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. You may -// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// -// See the License for the specific language governing permissions and -// limitations under the License. -// - -import type {CardID, ContextID, MessageID, RichText, SocialID } from "@hcengineering/communication-types" - -export enum TableName { - Message = 'message', - Patch = 'patch', - Attachment = 'attachment', - Reaction = 'reaction', - Notification = 'notification', - NotificationContext = 'notification_context' -} - -export interface MessageDb { - id: string - workspace_id: string, - card_id: string, - content: RichText, - creator: SocialID, - created: Date, -} - -export interface PatchDb { - id: string, - message_id: MessageID, - content: RichText, - creator: SocialID, - created: Date, -} - -export interface ReactionDb { - message_id: MessageID, - reaction: string, - creator: SocialID - created: Date -} - -export interface AttachmentDb { - message_id: MessageID, - card_id: CardID, - creator: SocialID - created: Date -} - -export interface NotificationDb { - message_id: MessageID, - context_id: ContextID -} - -export interface ContextDb { - id: string - workspace_id: string - card_id: CardID - personal_workspace: string - - archived_from?: Date - last_view?: Date - last_update?: Date -} \ No newline at end of file diff --git a/packages/sqlite-wasm/src/migrations.ts b/packages/sqlite-wasm/src/migrations.ts deleted file mode 100644 index 20db224d33c..00000000000 --- a/packages/sqlite-wasm/src/migrations.ts +++ /dev/null @@ -1,130 +0,0 @@ -// -// Copyright © 2025 Hardcore Engineering Inc. -// -// Licensed under the Eclipse Public License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. You may -// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// -// See the License for the specific language governing permissions and -// limitations under the License. -// - -import type { Sqlite3Worker1Promiser } from './connection' - -export async function applyMigrations(worker: Sqlite3Worker1Promiser, dbId: string): Promise { - await migrationV1(worker, dbId) -} - -async function migrationV1(worker: Sqlite3Worker1Promiser, dbId: string): Promise { - await worker('exec', { - dbId, - sql: ` - CREATE TABLE IF NOT EXISTS message - ( - id TEXT NOT NULL, - workspace_id TEXT NOT NULL, - card_id TEXT NOT NULL, - content TEXT NOT NULL, - creator TEXT NOT NULL, - created DATETIME NOT NULL, - PRIMARY KEY (id) - ) - ` - }) - - await worker('exec', { - dbId, - sql: ` - CREATE TABLE IF NOT EXISTS patch - ( - id TEXT NOT NULL, - message_id TEXT NOT NULL, - content TEXT NOT NULL, - creator VARCHAR(255) NOT NULL, - created DATETIME NOT NULL, - - PRIMARY KEY (id), - FOREIGN KEY (message_id) REFERENCES message (id) ON DELETE CASCADE - ) - ` - }) - await worker('exec', { - dbId, - sql: `CREATE INDEX IF NOT EXISTS idx_patch_message_id ON patch (message_id)` - }) - - await worker('exec', { - dbId, - sql: ` - CREATE TABLE IF NOT EXISTS attachment - ( - message_id TEXT NOT NULL, - card_id TEXT NOT NULL, - creator VARCHAR(255) NOT NULL, - created DATETIME NOT NULL, - - PRIMARY KEY (message_id, card_id), - FOREIGN KEY (message_id) REFERENCES message (id) ON DELETE CASCADE - ) - ` - }) - - await worker('exec', { - dbId, - sql: `CREATE INDEX IF NOT EXISTS attachment_message_idx ON attachment (message_id)` - }) - - await worker('exec', { - dbId, - sql: ` - CREATE TABLE IF NOT EXISTS reaction - ( - message_id TEXT NOT NULL, - reaction TEXT NOT NULL, - creator VARCHAR(255) NOT NULL, - created DATETIME NOT NULL, - - PRIMARY KEY (message_id, creator, reaction), - FOREIGN KEY (message_id) REFERENCES message (id) ON DELETE CASCADE - ) - ` - }) - - await worker('exec', { - dbId, - sql: `CREATE INDEX IF NOT EXISTS reaction_message_idx ON reaction (message_id)` - }) - - await worker('exec', { - dbId, - sql: ` - CREATE TABLE IF NOT EXISTS notification_context - ( - id TEXT NOT NULL, - workspace_id TEXT NOT NULL, - card_id TEXT NOT NULL, - personal_workspace TEXT NOT NULL, - archived_from DATETIME, - last_view DATETIME, - last_update DATETIME, - - PRIMARY KEY (id), - UNIQUE (workspace_id, card_id, personal_workspace) - ); - - CREATE TABLE IF NOT EXISTS notification - ( - message_id TEXT NOT NULL, - context_id TEXT NOT NULL, - - PRIMARY KEY (message_id, context_id), - FOREIGN KEY (message_id) REFERENCES message (id) ON DELETE CASCADE, - FOREIGN KEY (context_id) REFERENCES notification_context (id) ON DELETE CASCADE - ); - ` - }) -} diff --git a/packages/sqlite-wasm/tsconfig.json b/packages/sqlite-wasm/tsconfig.json deleted file mode 100644 index e7a6cb178dd..00000000000 --- a/packages/sqlite-wasm/tsconfig.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "extends": "../../tsconfig.json", - "compilerOptions": { - "declarationDir": "./types", - "emitDeclarationOnly": true, - "outDir": "./dist", - "rootDir": "./src" - }, - "include": ["src"] -} diff --git a/packages/types/package.json b/packages/types/package.json index cac7fc44713..1501b2565db 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-types", - "version": "0.1.61", + "version": "0.1.147", "main": "dist/index.cjs", "module": "dist/index.js", "types": "./types/index.d.ts", @@ -21,8 +21,8 @@ "typescript": "^5.6.3" }, "dependencies": { - "@hcengineering/core": "0.7.15", - "@hcengineering/card": "0.7.15" + "@hcengineering/core": "^0.7.28", + "@hcengineering/card": "^0.7.28" }, "repository": { "type": "git", diff --git a/packages/client-ws/src/index.ts b/packages/types/src/core.ts similarity index 62% rename from packages/client-ws/src/index.ts rename to packages/types/src/core.ts index a4b18e3dca9..4d5175102e0 100644 --- a/packages/client-ws/src/index.ts +++ b/packages/types/src/core.ts @@ -13,7 +13,14 @@ // limitations under the License. // -import { createMessagesQuery, createNotificationsQuery } from '@hcengineering/communication-client-query' +import type { Ref, Blob, AccountUuid, WorkspaceUuid, PersonId } from '@hcengineering/core' +import type { Card } from '@hcengineering/card' -export * from './client' -export { createMessagesQuery, createNotificationsQuery } +export type BlobID = Ref +export type CardID = Ref +export type SocialID = PersonId +export type WorkspaceID = WorkspaceUuid +export type RichText = string +export type AccountID = AccountUuid + +export type ID = string diff --git a/packages/types/src/file.ts b/packages/types/src/file.ts index 50688055884..7b8da10cf37 100644 --- a/packages/types/src/file.ts +++ b/packages/types/src/file.ts @@ -13,7 +13,8 @@ // limitations under the License. // -import type { CardID, Message, MessageID, RichText, SocialID } from './message' +import type { BlobID, CardID, RichText, SocialID } from './core' +import type { Message, MessageID, MessageType, MessageData } from './message' export interface FileMetadata { card: CardID @@ -24,14 +25,26 @@ export interface FileMetadata { export interface FileMessage { id: MessageID + type: MessageType content: RichText - edited?: Date creator: SocialID + data?: MessageData created: Date + edited?: Date reactions: FileReaction[] + files: FileBlob[] thread?: FileThread } +export interface FileBlob { + blobId: BlobID + type: string + filename: string + size: number + creator: SocialID + created: Date +} + export interface FileReaction { reaction: string creator: SocialID diff --git a/packages/types/src/index.ts b/packages/types/src/index.ts index b95c2f6b615..3cd71d88293 100644 --- a/packages/types/src/index.ts +++ b/packages/types/src/index.ts @@ -13,7 +13,8 @@ // limitations under the License. // +export * from './core.ts' +export * from './file.ts' export * from './message.ts' export * from './notification.ts' export * from './query.ts' -export * from './file.ts' diff --git a/packages/types/src/message.ts b/packages/types/src/message.ts index f9c3a7a7606..514f23eff14 100644 --- a/packages/types/src/message.ts +++ b/packages/types/src/message.ts @@ -13,38 +13,73 @@ // limitations under the License. // -import type { Ref, Blob, PersonId, WorkspaceUuid } from '@hcengineering/core' -import type { Card } from '@hcengineering/card' +import type { Attribute, Class, Mixin, Ref } from '@hcengineering/core' +import type { BlobID, CardID, ID, RichText, SocialID } from './core' +import type { Card, Tag } from '@hcengineering/card' -export type BlobID = Ref -export type CardID = Ref -export type SocialID = PersonId -export type WorkspaceID = WorkspaceUuid -export type RichText = string - -export type ID = string -export type MessageID = string & { message: true } +export type MessageID = ID & { message: true } export interface Message { id: MessageID card: CardID + type: MessageType content: RichText creator: SocialID created: Date + data?: MessageData edited?: Date thread?: Thread reactions: Reaction[] - attachments: Attachment[] + files: File[] +} + +export enum MessageType { + Message = 'message', + Activity = 'activity' +} + +export type MessageData = ActivityMessageData | any + +export interface ActivityMessage extends Message { + type: MessageType.Activity + data: ActivityMessageData +} + +export interface ActivityMessageData { + action: 'create' | 'remove' | 'update' + update?: ActivityUpdate +} + +export type ActivityUpdate = ActivityAttributeUpdate | ActivityTagUpdate +export enum ActivityUpdateType { + Attribute = 'attribute', + Tag = 'tag' +} + +export interface ActivityTagUpdate { + type: ActivityUpdateType.Tag + tag: Ref + action: 'add' | 'remove' +} + +type AttributeValue = string | number | null + +export interface ActivityAttributeUpdate { + type: ActivityUpdateType.Attribute + attrKey: string + attrClass: Ref>> + mixin?: Ref> + set?: AttributeValue | AttributeValue[] + added?: AttributeValue[] + removed?: AttributeValue[] } export interface MessagesGroup { card: CardID blobId: BlobID - fromId: MessageID - toId: MessageID - fromDate: Date - toDate: Date + fromSec: Date + toSec: Date count: number patches?: Patch[] } @@ -62,7 +97,9 @@ export enum PatchType { addReaction = 'addReaction', removeReaction = 'removeReaction', addReply = 'addReply', - removeReply = 'removeReply' + removeReply = 'removeReply', + addFile = 'addFile', + removeFile = 'removeFile' } export interface Reaction { @@ -72,9 +109,13 @@ export interface Reaction { created: Date } -export interface Attachment { - message: MessageID +export interface File { card: CardID + message: MessageID + blobId: BlobID + type: string + filename: string + size: number creator: SocialID created: Date } diff --git a/packages/types/src/notification.ts b/packages/types/src/notification.ts index c860d2e24bb..be4ee0fa5e1 100644 --- a/packages/types/src/notification.ts +++ b/packages/types/src/notification.ts @@ -13,29 +13,32 @@ // limitations under the License. // -import type { Message, CardID, WorkspaceID } from './message' +import type { AccountID, CardID, ID } from './core' +import type { Message, MessageID, MessagesGroup, Patch } from './message' -export type ContextID = string & { context: true } +export type ContextID = ID & { context: true } +export type NotificationID = ID & { notification: true } + +export interface Collaborator { + account: AccountID +} export interface Notification { - message: Message + id: NotificationID context: ContextID read: boolean - archived: boolean + created: Date + messageId?: MessageID + message?: Message + messageGroup?: MessagesGroup + patches?: Patch[] } export interface NotificationContext { id: ContextID card: CardID - workspace: WorkspaceID - personalWorkspace: WorkspaceID - archivedFrom?: Date - lastView?: Date - lastUpdate?: Date -} - -export interface NotificationContextUpdate { - archivedFrom?: Date - lastView?: Date - lastUpdate?: Date + account: AccountID + lastUpdate: Date + lastView: Date + notifications?: Notification[] } diff --git a/packages/types/src/query.ts b/packages/types/src/query.ts index 474ef6b2863..3ea86cb1f05 100644 --- a/packages/types/src/query.ts +++ b/packages/types/src/query.ts @@ -15,24 +15,23 @@ import { SortingOrder } from '@hcengineering/core' -import type { BlobID, CardID, MessageID } from './message' +import type { MessageID } from './message' import type { ContextID } from './notification' +import type { AccountID, BlobID, CardID } from './core' export { SortingOrder } export type ComparisonOperator = 'less' | 'lessOrEqual' | 'greater' | 'greaterOrEqual' | 'notEqual' -type Exclusive = { - [K in keyof T]: Record & Partial, never>> -}[keyof T] - export interface Window { getResult(): T[] loadNextPage(): Promise + loadPrevPage(): Promise hasNextPage(): boolean + hasPrevPage(): boolean } @@ -44,27 +43,43 @@ interface FindParams { export interface FindMessagesParams extends FindParams { id?: MessageID card: CardID - created?: Exclusive> | Date + files?: boolean + reactions?: boolean + replies?: boolean + created?: Partial> | Date } -export interface FindNotificationsParams extends FindParams { - context?: ContextID - message?: MessageID - read?: boolean - archived?: boolean - created?: Exclusive> | Date +export interface FindMessagesGroupsParams extends FindParams { + card: CardID + blobId?: BlobID + patches?: boolean + fromSec?: Partial> | Date + toSec?: Partial> | Date + orderBy?: 'fromSec' | 'toSec' } export interface FindNotificationContextParams extends FindParams { id?: ContextID card?: CardID + lastUpdate?: Partial> | Date + account?: AccountID | AccountID[] + notifications?: { + message?: boolean + limit: number + order: SortingOrder + read?: boolean + } } -export interface FindMessagesGroupsParams extends FindParams { +export interface FindNotificationsParams extends FindParams { + context?: ContextID + read?: boolean + created?: Partial> | Date + account?: AccountID | AccountID[] + message?: boolean +} + +export interface FindCollaboratorsParams extends FindParams { card: CardID - blobId?: BlobID - withPatches?: boolean - fromDate?: Partial> | Date - toDate?: Partial> | Date - orderBy?: 'fromDate' | 'toDate' + account?: AccountID | AccountID[] } diff --git a/packages/client-ws/package.json b/packages/yaml/package.json similarity index 58% rename from packages/client-ws/package.json rename to packages/yaml/package.json index 542e6b52fe3..c97729c12c1 100644 --- a/packages/client-ws/package.json +++ b/packages/yaml/package.json @@ -1,7 +1,7 @@ { - "name": "@hcengineering/communication-client-ws", - "version": "0.1.61", - "main": "dist/index.js", + "name": "@hcengineering/communication-yaml", + "version": "0.1.147", + "main": "dist/index.cjs", "module": "dist/index.js", "types": "./types/index.d.ts", "files": [ @@ -10,21 +10,22 @@ "types/**/*.d.ts" ], "scripts": { - "bundle": "bun run bundle:browser", - "bundle:browser": "bun build src/index.ts --outdir dist --target browser" + "bundle": "bun run bundle:browser && bun run bundle:node", + "bundle:browser": "bun build src/index.ts --outdir dist --target browser", + "bundle:node": "bun build src/index.ts --target node --format cjs --outfile dist/index.cjs" }, "devDependencies": { - "@types/bun": "^1.1.14" - }, - "dependencies": { - "@hcengineering/communication-types": "workspace:*", - "@hcengineering/communication-client-query": "workspace:*", - "@hcengineering/communication-sdk-types": "workspace:*", - "@msgpack/msgpack": "^3.0.0-beta2" + "@types/bun": "^1.1.14", + "@types/js-yaml": "^4.0.9" }, "peerDependencies": { "typescript": "^5.6.3" }, + "dependencies": { + "@hcengineering/communication-types": "workspace:*", + "@hcengineering/communication-shared": "workspace:*", + "js-yaml": "^4.1.0" + }, "repository": { "type": "git", "url": "git+https://github.com/hcengineering/communication.git" diff --git a/packages/yaml/src/deserialize.ts b/packages/yaml/src/deserialize.ts new file mode 100644 index 00000000000..93171636f81 --- /dev/null +++ b/packages/yaml/src/deserialize.ts @@ -0,0 +1,49 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +import type { FileMessage, Message } from '@hcengineering/communication-types' + +export function deserializeMessage(message: Message): FileMessage { + return { + id: message.id, + type: message.type, + content: message.content, + edited: message.edited, + creator: message.creator, + created: message.created, + data: message.data, + thread: + message.thread != null + ? { + thread: message.thread.thread, + repliesCount: message.thread.repliesCount, + lastReply: message.thread.lastReply + } + : undefined, + files: message.files.map((file) => ({ + blobId: file.blobId, + type: file.type, + filename: file.filename, + size: file.size, + created: file.created, + creator: file.creator + })), + reactions: message.reactions.map((reaction) => ({ + reaction: reaction.reaction, + creator: reaction.creator, + created: reaction.created + })) + } +} diff --git a/packages/sqlite-wasm/src/index.ts b/packages/yaml/src/index.ts similarity index 91% rename from packages/sqlite-wasm/src/index.ts rename to packages/yaml/src/index.ts index 566be6a0660..86f8e444afa 100644 --- a/packages/sqlite-wasm/src/index.ts +++ b/packages/yaml/src/index.ts @@ -13,4 +13,5 @@ // limitations under the License. // -export * from './adapter.ts' +export * from './parse.ts' +export * from './deserialize.ts' diff --git a/packages/shared/src/files.ts b/packages/yaml/src/parse.ts similarity index 90% rename from packages/shared/src/files.ts rename to packages/yaml/src/parse.ts index 8159556901a..a08fd688bae 100644 --- a/packages/shared/src/files.ts +++ b/packages/yaml/src/parse.ts @@ -13,6 +13,7 @@ // limitations under the License. // +import { retry, type RetryOptions } from '@hcengineering/communication-shared' import type { FileMessage, FileMetadata, @@ -22,8 +23,6 @@ import type { } from '@hcengineering/communication-types' import yaml from 'js-yaml' -import { retry, type RetryOptions } from './retry' - export async function loadGroupFile( workspace: WorkspaceID, filesUrl: string, @@ -64,11 +63,13 @@ export function parseYaml(data: string): ParsedFile { metadata, messages: messages.map((message) => ({ id: message.id, + type: message.type, card: metadata.card, content: message.content, edited: message.edited, creator: message.creator, created: message.created, + data: message.data, thread: message.thread ? { card: metadata.card, @@ -78,7 +79,11 @@ export function parseYaml(data: string): ParsedFile { lastReply: message.thread.lastReply } : undefined, - attachments: [], + files: message.files.map((file) => ({ + ...file, + message: message.id, + card: metadata.card + })), reactions: message.reactions.map((reaction) => ({ message: message.id, reaction: reaction.reaction, diff --git a/packages/examples/tsconfig.json b/packages/yaml/tsconfig.json similarity index 100% rename from packages/examples/tsconfig.json rename to packages/yaml/tsconfig.json From 920a81fcd9effcd183e4f4dc99086a6f9a5c3097 Mon Sep 17 00:00:00 2001 From: Kristina Date: Tue, 1 Apr 2025 18:52:52 +0400 Subject: [PATCH 058/636] Inline migrations (#37) Signed-off-by: Kristina Fefelova --- README.md | 28 ++- bun.lock | 54 ++--- package.json | 8 +- packages/client-query/package.json | 2 +- packages/cockroach/migrations/00_schema.sql | 1 - packages/cockroach/migrations/01_message.sql | 35 ---- packages/cockroach/migrations/02_patch.sql | 16 -- packages/cockroach/migrations/03_files.sql | 20 -- packages/cockroach/migrations/04_reaction.sql | 14 -- packages/cockroach/migrations/05_thread.sql | 15 -- .../cockroach/migrations/06_notification.sql | 38 ---- packages/cockroach/package.json | 2 +- packages/cockroach/src/adapter.ts | 2 + packages/cockroach/src/init.ts | 194 ++++++++++++++++++ packages/query/package.json | 2 +- packages/rest-client/package.json | 2 +- packages/sdk-types/package.json | 2 +- packages/server/package.json | 2 +- packages/shared/package.json | 2 +- packages/types/package.json | 2 +- packages/yaml/package.json | 2 +- 21 files changed, 263 insertions(+), 180 deletions(-) delete mode 100644 packages/cockroach/migrations/00_schema.sql delete mode 100644 packages/cockroach/migrations/01_message.sql delete mode 100644 packages/cockroach/migrations/02_patch.sql delete mode 100644 packages/cockroach/migrations/03_files.sql delete mode 100644 packages/cockroach/migrations/04_reaction.sql delete mode 100644 packages/cockroach/migrations/05_thread.sql delete mode 100644 packages/cockroach/migrations/06_notification.sql create mode 100644 packages/cockroach/src/init.ts diff --git a/README.md b/README.md index 4c327ea3b30..f08e34d66cc 100644 --- a/README.md +++ b/README.md @@ -1 +1,27 @@ -# Platform communication package \ No newline at end of file +## 📡 Platform Communication Package + +A modular communication package for platform-level messaging — +built with [Bun](https://bun.sh). + +--- + +### 📦 Installation + +Install dependencies using Bun: + +```bash + bun install +``` + +### Build packages + +```bash + bun run bundle +``` + +### Lint & format + +```bash + bun run lint + bun run format +``` \ No newline at end of file diff --git a/bun.lock b/bun.lock index fc721f86802..8c3ba135216 100644 --- a/bun.lock +++ b/bun.lock @@ -5,18 +5,18 @@ "name": "@hcengineering/communication", "devDependencies": { "@eslint/js": "^9.23.0", - "@types/bun": "^1.2.5", - "bun-types": "^1.2.5", + "@types/bun": "^1.2.8", + "bun-types": "^1.2.8", "eslint": "^9.23.0", "eslint-config-prettier": "^9.1.0", - "eslint-plugin-prettier": "^5.2.4", + "eslint-plugin-prettier": "^5.2.5", "prettier": "^3.5.3", - "typescript-eslint": "^8.27.0", + "typescript-eslint": "^8.29.0", }, }, "packages/client-query": { "name": "@hcengineering/communication-client-query", - "version": "0.1.147", + "version": "0.1.150", "dependencies": { "@hcengineering/communication-query": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", @@ -32,7 +32,7 @@ }, "packages/cockroach": { "name": "@hcengineering/communication-cockroach", - "version": "0.1.147", + "version": "0.1.150", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-shared": "workspace:*", @@ -49,7 +49,7 @@ }, "packages/query": { "name": "@hcengineering/communication-query", - "version": "0.1.147", + "version": "0.1.150", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-shared": "workspace:*", @@ -67,7 +67,7 @@ }, "packages/rest-client": { "name": "@hcengineering/communication-rest-client", - "version": "0.1.147", + "version": "0.1.150", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-shared": "workspace:*", @@ -85,7 +85,7 @@ }, "packages/sdk-types": { "name": "@hcengineering/communication-sdk-types", - "version": "0.1.147", + "version": "0.1.150", "dependencies": { "@hcengineering/communication-types": "workspace:*", "@hcengineering/core": "^0.7.28", @@ -99,7 +99,7 @@ }, "packages/server": { "name": "@hcengineering/communication-server", - "version": "0.1.147", + "version": "0.1.150", "dependencies": { "@hcengineering/account-client": "^0.7.28", "@hcengineering/communication-cockroach": "workspace:*", @@ -120,7 +120,7 @@ }, "packages/shared": { "name": "@hcengineering/communication-shared", - "version": "0.1.147", + "version": "0.1.150", "dependencies": { "@hcengineering/communication-types": "workspace:*", }, @@ -133,7 +133,7 @@ }, "packages/types": { "name": "@hcengineering/communication-types", - "version": "0.1.147", + "version": "0.1.150", "dependencies": { "@hcengineering/card": "^0.7.28", "@hcengineering/core": "^0.7.28", @@ -147,7 +147,7 @@ }, "packages/yaml": { "name": "@hcengineering/communication-yaml", - "version": "0.1.147", + "version": "0.1.150", "dependencies": { "@hcengineering/communication-shared": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -257,7 +257,7 @@ "@types/body-parser": ["@types/body-parser@1.19.5", "", { "dependencies": { "@types/connect": "*", "@types/node": "*" } }, "sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg=="], - "@types/bun": ["@types/bun@1.2.5", "", { "dependencies": { "bun-types": "1.2.5" } }, "sha512-w2OZTzrZTVtbnJew1pdFmgV99H0/L+Pvw+z1P67HaR18MHOzYnTYOi6qzErhK8HyT+DB782ADVPPE92Xu2/Opg=="], + "@types/bun": ["@types/bun@1.2.8", "", { "dependencies": { "bun-types": "1.2.7" } }, "sha512-t8L1RvJVUghW5V+M/fL3Thbxcs0HwNsXsnTEBEfEVqGteiJToOlZ/fyOEaR1kZsNqnu+3XA4RI/qmnX4w6+S+w=="], "@types/connect": ["@types/connect@3.4.38", "", { "dependencies": { "@types/node": "*" } }, "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug=="], @@ -295,21 +295,21 @@ "@types/ws": ["@types/ws@8.18.0", "", { "dependencies": { "@types/node": "*" } }, "sha512-8svvI3hMyvN0kKCJMvTJP/x6Y/EoQbepff882wL+Sn5QsXb3etnamgrJq4isrBxSJj5L2AuXcI0+bgkoAXGUJw=="], - "@typescript-eslint/eslint-plugin": ["@typescript-eslint/eslint-plugin@8.27.0", "", { "dependencies": { "@eslint-community/regexpp": "^4.10.0", "@typescript-eslint/scope-manager": "8.27.0", "@typescript-eslint/type-utils": "8.27.0", "@typescript-eslint/utils": "8.27.0", "@typescript-eslint/visitor-keys": "8.27.0", "graphemer": "^1.4.0", "ignore": "^5.3.1", "natural-compare": "^1.4.0", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0", "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-4henw4zkePi5p252c8ncBLzLce52SEUz2Ebj8faDnuUXz2UuHEONYcJ+G0oaCF+bYCWVZtrGzq3FD7YXetmnSA=="], + "@typescript-eslint/eslint-plugin": ["@typescript-eslint/eslint-plugin@8.29.0", "", { "dependencies": { "@eslint-community/regexpp": "^4.10.0", "@typescript-eslint/scope-manager": "8.29.0", "@typescript-eslint/type-utils": "8.29.0", "@typescript-eslint/utils": "8.29.0", "@typescript-eslint/visitor-keys": "8.29.0", "graphemer": "^1.4.0", "ignore": "^5.3.1", "natural-compare": "^1.4.0", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0", "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-PAIpk/U7NIS6H7TEtN45SPGLQaHNgB7wSjsQV/8+KYokAb2T/gloOA/Bee2yd4/yKVhPKe5LlaUGhAZk5zmSaQ=="], - "@typescript-eslint/parser": ["@typescript-eslint/parser@8.27.0", "", { "dependencies": { "@typescript-eslint/scope-manager": "8.27.0", "@typescript-eslint/types": "8.27.0", "@typescript-eslint/typescript-estree": "8.27.0", "@typescript-eslint/visitor-keys": "8.27.0", "debug": "^4.3.4" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-XGwIabPallYipmcOk45DpsBSgLC64A0yvdAkrwEzwZ2viqGqRUJ8eEYoPz0CWnutgAFbNMPdsGGvzjSmcWVlEA=="], + "@typescript-eslint/parser": ["@typescript-eslint/parser@8.29.0", "", { "dependencies": { "@typescript-eslint/scope-manager": "8.29.0", "@typescript-eslint/types": "8.29.0", "@typescript-eslint/typescript-estree": "8.29.0", "@typescript-eslint/visitor-keys": "8.29.0", "debug": "^4.3.4" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-8C0+jlNJOwQso2GapCVWWfW/rzaq7Lbme+vGUFKE31djwNncIpgXD7Cd4weEsDdkoZDjH0lwwr3QDQFuyrMg9g=="], - "@typescript-eslint/scope-manager": ["@typescript-eslint/scope-manager@8.27.0", "", { "dependencies": { "@typescript-eslint/types": "8.27.0", "@typescript-eslint/visitor-keys": "8.27.0" } }, "sha512-8oI9GwPMQmBryaaxG1tOZdxXVeMDte6NyJA4i7/TWa4fBwgnAXYlIQP+uYOeqAaLJ2JRxlG9CAyL+C+YE9Xknw=="], + "@typescript-eslint/scope-manager": ["@typescript-eslint/scope-manager@8.29.0", "", { "dependencies": { "@typescript-eslint/types": "8.29.0", "@typescript-eslint/visitor-keys": "8.29.0" } }, "sha512-aO1PVsq7Gm+tcghabUpzEnVSFMCU4/nYIgC2GOatJcllvWfnhrgW0ZEbnTxm36QsikmCN1K/6ZgM7fok2I7xNw=="], - "@typescript-eslint/type-utils": ["@typescript-eslint/type-utils@8.27.0", "", { "dependencies": { "@typescript-eslint/typescript-estree": "8.27.0", "@typescript-eslint/utils": "8.27.0", "debug": "^4.3.4", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-wVArTVcz1oJOIEJxui/nRhV0TXzD/zMSOYi/ggCfNq78EIszddXcJb7r4RCp/oBrjt8n9A0BSxRMKxHftpDxDA=="], + "@typescript-eslint/type-utils": ["@typescript-eslint/type-utils@8.29.0", "", { "dependencies": { "@typescript-eslint/typescript-estree": "8.29.0", "@typescript-eslint/utils": "8.29.0", "debug": "^4.3.4", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-ahaWQ42JAOx+NKEf5++WC/ua17q5l+j1GFrbbpVKzFL/tKVc0aYY8rVSYUpUvt2hUP1YBr7mwXzx+E/DfUWI9Q=="], - "@typescript-eslint/types": ["@typescript-eslint/types@8.27.0", "", {}, "sha512-/6cp9yL72yUHAYq9g6DsAU+vVfvQmd1a8KyA81uvfDE21O2DwQ/qxlM4AR8TSdAu+kJLBDrEHKC5/W2/nxsY0A=="], + "@typescript-eslint/types": ["@typescript-eslint/types@8.29.0", "", {}, "sha512-wcJL/+cOXV+RE3gjCyl/V2G877+2faqvlgtso/ZRbTCnZazh0gXhe+7gbAnfubzN2bNsBtZjDvlh7ero8uIbzg=="], - "@typescript-eslint/typescript-estree": ["@typescript-eslint/typescript-estree@8.27.0", "", { "dependencies": { "@typescript-eslint/types": "8.27.0", "@typescript-eslint/visitor-keys": "8.27.0", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", "minimatch": "^9.0.4", "semver": "^7.6.0", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "typescript": ">=4.8.4 <5.9.0" } }, "sha512-BnKq8cqPVoMw71O38a1tEb6iebEgGA80icSxW7g+kndx0o6ot6696HjG7NdgfuAVmVEtwXUr3L8R9ZuVjoQL6A=="], + "@typescript-eslint/typescript-estree": ["@typescript-eslint/typescript-estree@8.29.0", "", { "dependencies": { "@typescript-eslint/types": "8.29.0", "@typescript-eslint/visitor-keys": "8.29.0", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", "minimatch": "^9.0.4", "semver": "^7.6.0", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "typescript": ">=4.8.4 <5.9.0" } }, "sha512-yOfen3jE9ISZR/hHpU/bmNvTtBW1NjRbkSFdZOksL1N+ybPEE7UVGMwqvS6CP022Rp00Sb0tdiIkhSCe6NI8ow=="], - "@typescript-eslint/utils": ["@typescript-eslint/utils@8.27.0", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@typescript-eslint/scope-manager": "8.27.0", "@typescript-eslint/types": "8.27.0", "@typescript-eslint/typescript-estree": "8.27.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-njkodcwH1yvmo31YWgRHNb/x1Xhhq4/m81PhtvmRngD8iHPehxffz1SNCO+kwaePhATC+kOa/ggmvPoPza5i0Q=="], + "@typescript-eslint/utils": ["@typescript-eslint/utils@8.29.0", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@typescript-eslint/scope-manager": "8.29.0", "@typescript-eslint/types": "8.29.0", "@typescript-eslint/typescript-estree": "8.29.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-gX/A0Mz9Bskm8avSWFcK0gP7cZpbY4AIo6B0hWYFCaIsz750oaiWR4Jr2CI+PQhfW1CpcQr9OlfPS+kMFegjXA=="], - "@typescript-eslint/visitor-keys": ["@typescript-eslint/visitor-keys@8.27.0", "", { "dependencies": { "@typescript-eslint/types": "8.27.0", "eslint-visitor-keys": "^4.2.0" } }, "sha512-WsXQwMkILJvffP6z4U3FYJPlbf/j07HIxmDjZpbNvBJkMfvwXj5ACRkkHwBDvLBbDbtX5TdU64/rcvKJ/vuInQ=="], + "@typescript-eslint/visitor-keys": ["@typescript-eslint/visitor-keys@8.29.0", "", { "dependencies": { "@typescript-eslint/types": "8.29.0", "eslint-visitor-keys": "^4.2.0" } }, "sha512-Sne/pVz8ryR03NFK21VpN88dZ2FdQXOlq3VIklbrTYEt8yXtRFr9tvUhqvCeKjqYk5FSim37sHbooT6vzBTZcg=="], "acorn": ["acorn@8.14.1", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg=="], @@ -333,7 +333,7 @@ "braces": ["braces@3.0.3", "", { "dependencies": { "fill-range": "^7.1.1" } }, "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA=="], - "bun-types": ["bun-types@1.2.5", "", { "dependencies": { "@types/node": "*", "@types/ws": "~8.5.10" } }, "sha512-3oO6LVGGRRKI4kHINx5PIdIgnLRb7l/SprhzqXapmoYkFl5m4j6EvALvbDVuuBFaamB46Ap6HCUxIXNLCGy+tg=="], + "bun-types": ["bun-types@1.2.8", "", { "dependencies": { "@types/node": "*", "@types/ws": "*" } }, "sha512-D5npfxKIGuYe9dTHLK1hi4XFmbMdKYoLrgyd25rrUyCrnyU4ljmQW7vDdonvibKeyU72mZuixIhQ2J+q6uM0Mg=="], "callsites": ["callsites@3.1.0", "", {}, "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ=="], @@ -375,7 +375,7 @@ "eslint-config-prettier": ["eslint-config-prettier@9.1.0", "", { "peerDependencies": { "eslint": ">=7.0.0" }, "bin": { "eslint-config-prettier": "bin/cli.js" } }, "sha512-NSWl5BFQWEPi1j4TjVNItzYV7dZXZ+wP6I6ZhrBGpChQhZRUaElihE9uRRkcbRnNb76UMKDF3r+WTmNcGPKsqw=="], - "eslint-plugin-prettier": ["eslint-plugin-prettier@5.2.4", "", { "dependencies": { "prettier-linter-helpers": "^1.0.0", "synckit": "^0.10.2" }, "peerDependencies": { "@types/eslint": ">=8.0.0", "eslint": ">=8.0.0", "eslint-config-prettier": "*", "prettier": ">=3.0.0" }, "optionalPeers": ["@types/eslint", "eslint-config-prettier"] }, "sha512-SFtuYmnhwYCtuCDTKPoK+CEzCnEgKTU2qTLwoCxvrC0MFBTIXo1i6hDYOI4cwHaE5GZtlWmTN3YfucYi7KJwPw=="], + "eslint-plugin-prettier": ["eslint-plugin-prettier@5.2.5", "", { "dependencies": { "prettier-linter-helpers": "^1.0.0", "synckit": "^0.10.2" }, "peerDependencies": { "@types/eslint": ">=8.0.0", "eslint": ">=8.0.0", "eslint-config-prettier": ">= 7.0.0 <10.0.0 || >=10.1.0", "prettier": ">=3.0.0" }, "optionalPeers": ["@types/eslint", "eslint-config-prettier"] }, "sha512-IKKP8R87pJyMl7WWamLgPkloB16dagPIdd2FjBDbyRYPKo93wS/NbCOPh6gH+ieNLC+XZrhJt/kWj0PS/DFdmg=="], "eslint-scope": ["eslint-scope@8.3.0", "", { "dependencies": { "esrecurse": "^4.3.0", "estraverse": "^5.2.0" } }, "sha512-pUNxi75F8MJ/GdeKtVLSbYg4ZI34J6C0C7sbL4YOp2exGwen7ZsuBqKzUhXd0qMQ362yET3z+uPwKeg/0C2XCQ=="], @@ -549,7 +549,7 @@ "typescript": ["typescript@5.8.2", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-aJn6wq13/afZp/jT9QZmwEjDqqvSGp1VT5GVg+f/t6/oVyrgXM6BY1h9BRh/O5p3PlUPAe+WuiEZOmb/49RqoQ=="], - "typescript-eslint": ["typescript-eslint@8.27.0", "", { "dependencies": { "@typescript-eslint/eslint-plugin": "8.27.0", "@typescript-eslint/parser": "8.27.0", "@typescript-eslint/utils": "8.27.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-ZZ/8+Y0rRUMuW1gJaPtLWe4ryHbsPLzzibk5Sq+IFa2aOH1Vo0gPr1fbA6pOnzBke7zC2Da4w8AyCgxKXo3lqA=="], + "typescript-eslint": ["typescript-eslint@8.29.0", "", { "dependencies": { "@typescript-eslint/eslint-plugin": "8.29.0", "@typescript-eslint/parser": "8.29.0", "@typescript-eslint/utils": "8.29.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-ep9rVd9B4kQsZ7ZnWCVxUE/xDLUUUsRzE0poAeNu+4CkFErLfuvPt/qtm2EpnSyfvsR0S6QzDFSrPCFBwf64fg=="], "undici-types": ["undici-types@6.20.0", "", {}, "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg=="], @@ -569,9 +569,9 @@ "@humanfs/node/@humanwhocodes/retry": ["@humanwhocodes/retry@0.3.1", "", {}, "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA=="], - "@typescript-eslint/typescript-estree/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], + "@types/bun/bun-types": ["bun-types@1.2.7", "", { "dependencies": { "@types/node": "*", "@types/ws": "*" } }, "sha512-P4hHhk7kjF99acXqKvltyuMQ2kf/rzIw3ylEDpCxDS9Xa0X0Yp/gJu/vDCucmWpiur5qJ0lwB2bWzOXa2GlHqA=="], - "bun-types/@types/ws": ["@types/ws@8.5.14", "", { "dependencies": { "@types/node": "*" } }, "sha512-bd/YFLW+URhBzMXurx7lWByOu+xzU9+kb3RboOteXYDfW+tr+JZa99OyNmPINEGB/ahzKrEuc8rcv4gnpJmxTw=="], + "@typescript-eslint/typescript-estree/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], "fast-glob/glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="], diff --git a/package.json b/package.json index 5e298dc5c53..6a74f35ee7a 100644 --- a/package.json +++ b/package.json @@ -9,12 +9,12 @@ }, "devDependencies": { "@eslint/js": "^9.23.0", - "@types/bun": "^1.2.5", - "bun-types": "^1.2.5", + "@types/bun": "^1.2.8", + "bun-types": "^1.2.8", "eslint": "^9.23.0", "eslint-config-prettier": "^9.1.0", - "eslint-plugin-prettier": "^5.2.4", + "eslint-plugin-prettier": "^5.2.5", "prettier": "^3.5.3", - "typescript-eslint": "^8.27.0" + "typescript-eslint": "^8.29.0" } } diff --git a/packages/client-query/package.json b/packages/client-query/package.json index 430773b9b15..6a96b56d79a 100644 --- a/packages/client-query/package.json +++ b/packages/client-query/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-client-query", - "version": "0.1.147", + "version": "0.1.150", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/cockroach/migrations/00_schema.sql b/packages/cockroach/migrations/00_schema.sql deleted file mode 100644 index ed4ea1dc0c5..00000000000 --- a/packages/cockroach/migrations/00_schema.sql +++ /dev/null @@ -1 +0,0 @@ -CREATE SCHEMA IF NOT EXISTS communication; diff --git a/packages/cockroach/migrations/01_message.sql b/packages/cockroach/migrations/01_message.sql deleted file mode 100644 index 37f45dbef7a..00000000000 --- a/packages/cockroach/migrations/01_message.sql +++ /dev/null @@ -1,35 +0,0 @@ -CREATE TABLE IF NOT EXISTS communication.messages -( - workspace_id UUID NOT NULL, - card_id VARCHAR(255) NOT NULL, - id INT8 NOT NULL, - - content TEXT NOT NULL, - creator VARCHAR(255) NOT NULL, - created TIMESTAMPTZ NOT NULL, - - type VARCHAR(255) NOT NULL, - data JSONB NOT NULL DEFAULT '{}', - - - PRIMARY KEY (workspace_id, card_id, id) -); - -CREATE INDEX IF NOT EXISTS idx_messages_workspace_card ON communication.messages (workspace_id, card_id); -CREATE INDEX IF NOT EXISTS idx_messages_workspace_card_id ON communication.messages (workspace_id, card_id, id); - -CREATE TABLE IF NOT EXISTS communication.messages_groups -( - workspace_id UUID NOT NULL, - card_id VARCHAR(255) NOT NULL, - blob_id UUID NOT NULL, - - from_sec TIMESTAMPTZ(0) NOT NULL, - to_sec TIMESTAMPTZ(0) NOT NULL, - count INT NOT NULL, - - PRIMARY KEY (workspace_id, card_id, blob_id) -); - - -CREATE INDEX IF NOT EXISTS idx_messages_groups_workspace_card ON communication.messages_groups (workspace_id, card_id); \ No newline at end of file diff --git a/packages/cockroach/migrations/02_patch.sql b/packages/cockroach/migrations/02_patch.sql deleted file mode 100644 index 1c2037b0467..00000000000 --- a/packages/cockroach/migrations/02_patch.sql +++ /dev/null @@ -1,16 +0,0 @@ -CREATE TABLE IF NOT EXISTS communication.patch -( - id INT8 NOT NULL DEFAULT unique_rowid(), - workspace_id UUID NOT NULL, - card_id VARCHAR(255) NOT NULL, - message_id INT8 NOT NULL, - type VARCHAR(255) NOT NULL, - content TEXT NOT NULL, - creator VARCHAR(255) NOT NULL, - created TIMESTAMPTZ NOT NULL, - message_created_sec TIMESTAMPTZ(0) NOT NULL, - - PRIMARY KEY (id) -); - -CREATE INDEX IF NOT EXISTS idx_patch_workspace_card_message ON communication.patch (workspace_id, card_id, message_id); \ No newline at end of file diff --git a/packages/cockroach/migrations/03_files.sql b/packages/cockroach/migrations/03_files.sql deleted file mode 100644 index 06892450412..00000000000 --- a/packages/cockroach/migrations/03_files.sql +++ /dev/null @@ -1,20 +0,0 @@ -CREATE TABLE IF NOT EXISTS communication.files -( - workspace_id UUID NOT NULL, - card_id VARCHAR(255) NOT NULL, - message_id INT8 NOT NULL, - - blob_id UUID NOT NULL, - filename VARCHAR(255) NOT NULL, - type VARCHAR(255) NOT NULL, - size INT8 NOT NULL, - - creator VARCHAR(255) NOT NULL, - created TIMESTAMPTZ NOT NULL DEFAULT now(), - - message_created_sec TIMESTAMPTZ(0) NOT NULL, - - PRIMARY KEY (workspace_id, card_id, message_id, blob_id) -); - -CREATE INDEX IF NOT EXISTS files_workspace_card_message_idx ON communication.files (workspace_id, card_id, message_id); diff --git a/packages/cockroach/migrations/04_reaction.sql b/packages/cockroach/migrations/04_reaction.sql deleted file mode 100644 index 30e8ce02a70..00000000000 --- a/packages/cockroach/migrations/04_reaction.sql +++ /dev/null @@ -1,14 +0,0 @@ -CREATE TABLE IF NOT EXISTS communication.reactions -( - workspace_id UUID NOT NULL, - card_id VARCHAR(255) NOT NULL, - message_id INT8 NOT NULL, - reaction VARCHAR(100) NOT NULL, - creator VARCHAR(255) NOT NULL, - created TIMESTAMPTZ NOT NULL DEFAULT now(), - - FOREIGN KEY (workspace_id, card_id, message_id) REFERENCES communication.messages (workspace_id, card_id, id) ON DELETE CASCADE, - PRIMARY KEY (workspace_id, card_id, message_id, creator, reaction) -); - -CREATE INDEX IF NOT EXISTS idx_reactions_workspace_card_message ON communication.reactions (workspace_id, card_id, message_id); \ No newline at end of file diff --git a/packages/cockroach/migrations/05_thread.sql b/packages/cockroach/migrations/05_thread.sql deleted file mode 100644 index 73d6e50120b..00000000000 --- a/packages/cockroach/migrations/05_thread.sql +++ /dev/null @@ -1,15 +0,0 @@ -CREATE TABLE IF NOT EXISTS communication.thread -( - workspace_id UUID NOT NULL, - card_id VARCHAR(255) NOT NULL, - message_id INT8 NOT NULL, - thread_id VARCHAR(255) NOT NULL, - replies_count INT NOT NULL, - last_reply TIMESTAMPTZ NOT NULL, - - PRIMARY KEY (workspace_id, thread_id), - UNIQUE (workspace_id, card_id, message_id) -); - -CREATE INDEX IF NOT EXISTS idx_thread_workspace_card_message ON communication.thread (workspace_id, thread_id); -CREATE INDEX IF NOT EXISTS idx_thread_workspace_card_message ON communication.thread (workspace_id, card_id, message_id); \ No newline at end of file diff --git a/packages/cockroach/migrations/06_notification.sql b/packages/cockroach/migrations/06_notification.sql deleted file mode 100644 index 88d60cd21f0..00000000000 --- a/packages/cockroach/migrations/06_notification.sql +++ /dev/null @@ -1,38 +0,0 @@ -CREATE TABLE IF NOT EXISTS communication.notification_context -( - id INT8 NOT NULL DEFAULT unique_rowid(), - - workspace_id UUID NOT NULL, - card_id VARCHAR(255) NOT NULL, - account UUID NOT NULL, - - last_view TIMESTAMPTZ NOT NULL DEFAULT now(), - last_update TIMESTAMPTZ NOT NULL DEFAULT now(), - - PRIMARY KEY (id), - UNIQUE (workspace_id, card_id, account) -); - -CREATE TABLE IF NOT EXISTS communication.notifications -( - id INT8 NOT NULL DEFAULT unique_rowid(), - context_id INT8 NOT NULL, - message_id INT8, - created TIMESTAMPTZ NOT NULL, - content JSONB NOT NULL DEFAULT '{}', - - PRIMARY KEY (id), - FOREIGN KEY (context_id) REFERENCES communication.notification_context (id) ON DELETE CASCADE -); - -CREATE INDEX IF NOT EXISTS notification_context_idx ON communication.notifications (context_id); - -CREATE TABLE IF NOT EXISTS communication.collaborators -( - workspace_id UUID NOT NULL, - card_id VARCHAR(255) NOT NULL, - account UUID NOT NULL, - date TIMESTAMPTZ NOT NULL DEFAULT now(), - - PRIMARY KEY (workspace_id, card_id, account) -); \ No newline at end of file diff --git a/packages/cockroach/package.json b/packages/cockroach/package.json index 49719134759..0c0fa78a48d 100644 --- a/packages/cockroach/package.json +++ b/packages/cockroach/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-cockroach", - "version": "0.1.147", + "version": "0.1.150", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/cockroach/src/adapter.ts b/packages/cockroach/src/adapter.ts index c471d490931..a596d9c81df 100644 --- a/packages/cockroach/src/adapter.ts +++ b/packages/cockroach/src/adapter.ts @@ -46,6 +46,7 @@ import { NotificationsDb } from './db/notification' import { connect, type PostgresClientReference } from './connection' import { type Logger, type Options, type SqlClient, type SqlParams, type SqlRow } from './types' import { injectVars } from './utils' +import { initSchema } from './init' export class CockroachAdapter implements DbAdapter { private readonly message: MessagesDb @@ -204,6 +205,7 @@ export async function createDbAdapter( const greenUrl = process.env.GREEN_URL ?? '' const connection = connect(connectionString) const sql = await connection.getClient() + await initSchema(sql) if (greenUrl !== '') { const client = new GreenClient(greenUrl, sql) diff --git a/packages/cockroach/src/init.ts b/packages/cockroach/src/init.ts new file mode 100644 index 00000000000..528aac3859c --- /dev/null +++ b/packages/cockroach/src/init.ts @@ -0,0 +1,194 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +import type postgres from 'postgres' + +const migrationsTableName = 'communication._migrations' + +export async function initSchema(sql: postgres.Sql) { + console.log('🗃️ Initializing schema...') + await sql.unsafe('CREATE SCHEMA IF NOT EXISTS communication;') + await sql.unsafe(`CREATE TABLE IF NOT EXISTS ${migrationsTableName} + ( + name VARCHAR(255) NOT NULL, + created_on TIMESTAMPTZ NOT NULL DEFAULT now() + )`) + + const appliedMigrations = await sql.unsafe(`SELECT name + FROM ${migrationsTableName}`) + const appliedNames = appliedMigrations.map((it) => it.name) + + const migrations = [migrationO1()] + for (const [name, sqlString] of migrations) { + if (appliedNames.includes(name)) continue + try { + await sql.unsafe(sqlString) + await sql.unsafe( + `INSERT INTO ${migrationsTableName}(name) + VALUES ($1::varchar);`, + [name] + ) + console.log(`✅ Migration ${name} applied`) + } catch (err) { + console.error(`❌ Failed on ${name}:`, err) + throw err + } + } + console.log('🎉 All migrations complete') +} + +function migrationO1(): [string, string] { + const sql = ` + CREATE TABLE IF NOT EXISTS communication.messages + ( + workspace_id UUID NOT NULL, + card_id VARCHAR(255) NOT NULL, + id INT8 NOT NULL, + + content TEXT NOT NULL, + creator VARCHAR(255) NOT NULL, + created TIMESTAMPTZ NOT NULL, + + type VARCHAR(255) NOT NULL, + data JSONB NOT NULL DEFAULT '{}', + + + PRIMARY KEY (workspace_id, card_id, id) + ); + + CREATE INDEX IF NOT EXISTS idx_messages_workspace_card ON communication.messages (workspace_id, card_id); + CREATE INDEX IF NOT EXISTS idx_messages_workspace_card_id ON communication.messages (workspace_id, card_id, id); + + CREATE TABLE IF NOT EXISTS communication.messages_groups + ( + workspace_id UUID NOT NULL, + card_id VARCHAR(255) NOT NULL, + blob_id UUID NOT NULL, + + from_sec TIMESTAMPTZ(0) NOT NULL, + to_sec TIMESTAMPTZ(0) NOT NULL, + count INT NOT NULL, + + PRIMARY KEY (workspace_id, card_id, blob_id) + ); + + + CREATE INDEX IF NOT EXISTS idx_messages_groups_workspace_card ON communication.messages_groups (workspace_id, card_id); + CREATE TABLE IF NOT EXISTS communication.patch + ( + id INT8 NOT NULL DEFAULT unique_rowid(), + workspace_id UUID NOT NULL, + card_id VARCHAR(255) NOT NULL, + message_id INT8 NOT NULL, + type VARCHAR(255) NOT NULL, + content TEXT NOT NULL, + creator VARCHAR(255) NOT NULL, + created TIMESTAMPTZ NOT NULL, + message_created_sec TIMESTAMPTZ(0) NOT NULL, + + PRIMARY KEY (id) + ); + + CREATE INDEX IF NOT EXISTS idx_patch_workspace_card_message ON communication.patch (workspace_id, card_id, message_id); + CREATE TABLE IF NOT EXISTS communication.files + ( + workspace_id UUID NOT NULL, + card_id VARCHAR(255) NOT NULL, + message_id INT8 NOT NULL, + + blob_id UUID NOT NULL, + filename VARCHAR(255) NOT NULL, + type VARCHAR(255) NOT NULL, + size INT8 NOT NULL, + + creator VARCHAR(255) NOT NULL, + created TIMESTAMPTZ NOT NULL DEFAULT now(), + + message_created_sec TIMESTAMPTZ(0) NOT NULL, + + PRIMARY KEY (workspace_id, card_id, message_id, blob_id) + ); + + CREATE INDEX IF NOT EXISTS files_workspace_card_message_idx ON communication.files (workspace_id, card_id, message_id); + CREATE TABLE IF NOT EXISTS communication.reactions + ( + workspace_id UUID NOT NULL, + card_id VARCHAR(255) NOT NULL, + message_id INT8 NOT NULL, + reaction VARCHAR(100) NOT NULL, + creator VARCHAR(255) NOT NULL, + created TIMESTAMPTZ NOT NULL DEFAULT now(), + + FOREIGN KEY (workspace_id, card_id, message_id) REFERENCES communication.messages (workspace_id, card_id, id) ON DELETE CASCADE, + PRIMARY KEY (workspace_id, card_id, message_id, creator, reaction) + ); + + CREATE INDEX IF NOT EXISTS idx_reactions_workspace_card_message ON communication.reactions (workspace_id, card_id, message_id); + CREATE TABLE IF NOT EXISTS communication.thread + ( + workspace_id UUID NOT NULL, + card_id VARCHAR(255) NOT NULL, + message_id INT8 NOT NULL, + thread_id VARCHAR(255) NOT NULL, + replies_count INT NOT NULL, + last_reply TIMESTAMPTZ NOT NULL, + + PRIMARY KEY (workspace_id, thread_id), + UNIQUE (workspace_id, card_id, message_id) + ); + + CREATE INDEX IF NOT EXISTS idx_thread_workspace_card_message ON communication.thread (workspace_id, thread_id); + CREATE INDEX IF NOT EXISTS idx_thread_workspace_card_message ON communication.thread (workspace_id, card_id, message_id); + CREATE TABLE IF NOT EXISTS communication.notification_context + ( + id INT8 NOT NULL DEFAULT unique_rowid(), + + workspace_id UUID NOT NULL, + card_id VARCHAR(255) NOT NULL, + account UUID NOT NULL, + + last_view TIMESTAMPTZ NOT NULL DEFAULT now(), + last_update TIMESTAMPTZ NOT NULL DEFAULT now(), + + PRIMARY KEY (id), + UNIQUE (workspace_id, card_id, account) + ); + + CREATE TABLE IF NOT EXISTS communication.notifications + ( + id INT8 NOT NULL DEFAULT unique_rowid(), + context_id INT8 NOT NULL, + message_id INT8, + created TIMESTAMPTZ NOT NULL, + content JSONB NOT NULL DEFAULT '{}', + + PRIMARY KEY (id), + FOREIGN KEY (context_id) REFERENCES communication.notification_context (id) ON DELETE CASCADE + ); + + CREATE INDEX IF NOT EXISTS notification_context_idx ON communication.notifications (context_id); + + CREATE TABLE IF NOT EXISTS communication.collaborators + ( + workspace_id UUID NOT NULL, + card_id VARCHAR(255) NOT NULL, + account UUID NOT NULL, + date TIMESTAMPTZ NOT NULL DEFAULT now(), + + PRIMARY KEY (workspace_id, card_id, account) + ); + ` + return ['init_tables_01', sql] +} diff --git a/packages/query/package.json b/packages/query/package.json index 0afa492501d..7a29f25abb0 100644 --- a/packages/query/package.json +++ b/packages/query/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-query", - "version": "0.1.147", + "version": "0.1.150", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/rest-client/package.json b/packages/rest-client/package.json index 0ece05e977d..8e83956e506 100644 --- a/packages/rest-client/package.json +++ b/packages/rest-client/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-rest-client", - "version": "0.1.147", + "version": "0.1.150", "main": "dist/index.cjs", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/sdk-types/package.json b/packages/sdk-types/package.json index e43b1fd9634..5b49f95b686 100644 --- a/packages/sdk-types/package.json +++ b/packages/sdk-types/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-sdk-types", - "version": "0.1.147", + "version": "0.1.150", "main": "./dist/index.cjs", "module": "./dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/server/package.json b/packages/server/package.json index 684fb03b52f..083cbb46eea 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-server", - "version": "0.1.147", + "version": "0.1.150", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/shared/package.json b/packages/shared/package.json index 6ec1fa97bc8..4b4af0853f4 100644 --- a/packages/shared/package.json +++ b/packages/shared/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-shared", - "version": "0.1.147", + "version": "0.1.150", "main": "dist/index.cjs", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/types/package.json b/packages/types/package.json index 1501b2565db..2ed88f59524 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-types", - "version": "0.1.147", + "version": "0.1.150", "main": "dist/index.cjs", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/yaml/package.json b/packages/yaml/package.json index c97729c12c1..fbeb357649e 100644 --- a/packages/yaml/package.json +++ b/packages/yaml/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-yaml", - "version": "0.1.147", + "version": "0.1.150", "main": "dist/index.cjs", "module": "dist/index.js", "types": "./types/index.d.ts", From f64956485630d5789676a5bf5b99be66c2444d14 Mon Sep 17 00:00:00 2001 From: Kristina Date: Wed, 2 Apr 2025 15:32:30 +0400 Subject: [PATCH 059/636] Fix token generation (#38) Signed-off-by: Kristina Fefelova --- bun.lock | 19 ++++++++++--------- packages/client-query/package.json | 2 +- packages/cockroach/package.json | 2 +- packages/cockroach/src/init.ts | 4 ++++ packages/query/package.json | 2 +- packages/rest-client/package.json | 2 +- packages/sdk-types/package.json | 2 +- packages/server/package.json | 5 +++-- packages/server/src/index.ts | 7 +++++++ packages/server/src/metadata.ts | 4 +++- packages/shared/package.json | 2 +- packages/types/package.json | 2 +- packages/yaml/package.json | 2 +- 13 files changed, 35 insertions(+), 20 deletions(-) diff --git a/bun.lock b/bun.lock index 8c3ba135216..57a74f61e42 100644 --- a/bun.lock +++ b/bun.lock @@ -16,7 +16,7 @@ }, "packages/client-query": { "name": "@hcengineering/communication-client-query", - "version": "0.1.150", + "version": "0.1.155", "dependencies": { "@hcengineering/communication-query": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", @@ -32,7 +32,7 @@ }, "packages/cockroach": { "name": "@hcengineering/communication-cockroach", - "version": "0.1.150", + "version": "0.1.155", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-shared": "workspace:*", @@ -49,7 +49,7 @@ }, "packages/query": { "name": "@hcengineering/communication-query", - "version": "0.1.150", + "version": "0.1.155", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-shared": "workspace:*", @@ -67,7 +67,7 @@ }, "packages/rest-client": { "name": "@hcengineering/communication-rest-client", - "version": "0.1.150", + "version": "0.1.155", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-shared": "workspace:*", @@ -85,7 +85,7 @@ }, "packages/sdk-types": { "name": "@hcengineering/communication-sdk-types", - "version": "0.1.150", + "version": "0.1.155", "dependencies": { "@hcengineering/communication-types": "workspace:*", "@hcengineering/core": "^0.7.28", @@ -99,13 +99,14 @@ }, "packages/server": { "name": "@hcengineering/communication-server", - "version": "0.1.150", + "version": "0.1.155", "dependencies": { "@hcengineering/account-client": "^0.7.28", "@hcengineering/communication-cockroach": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", "@hcengineering/core": "^0.7.28", + "@hcengineering/platform": "^0.7.28", "@hcengineering/server-token": "^0.7.28", }, "devDependencies": { @@ -120,7 +121,7 @@ }, "packages/shared": { "name": "@hcengineering/communication-shared", - "version": "0.1.150", + "version": "0.1.155", "dependencies": { "@hcengineering/communication-types": "workspace:*", }, @@ -133,7 +134,7 @@ }, "packages/types": { "name": "@hcengineering/communication-types", - "version": "0.1.150", + "version": "0.1.155", "dependencies": { "@hcengineering/card": "^0.7.28", "@hcengineering/core": "^0.7.28", @@ -147,7 +148,7 @@ }, "packages/yaml": { "name": "@hcengineering/communication-yaml", - "version": "0.1.150", + "version": "0.1.155", "dependencies": { "@hcengineering/communication-shared": "workspace:*", "@hcengineering/communication-types": "workspace:*", diff --git a/packages/client-query/package.json b/packages/client-query/package.json index 6a96b56d79a..0211cfd81d9 100644 --- a/packages/client-query/package.json +++ b/packages/client-query/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-client-query", - "version": "0.1.150", + "version": "0.1.155", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/cockroach/package.json b/packages/cockroach/package.json index 0c0fa78a48d..a995c6460ad 100644 --- a/packages/cockroach/package.json +++ b/packages/cockroach/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-cockroach", - "version": "0.1.150", + "version": "0.1.155", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/cockroach/src/init.ts b/packages/cockroach/src/init.ts index 528aac3859c..1470fa7765f 100644 --- a/packages/cockroach/src/init.ts +++ b/packages/cockroach/src/init.ts @@ -17,7 +17,10 @@ import type postgres from 'postgres' const migrationsTableName = 'communication._migrations' +let isInitialized = false + export async function initSchema(sql: postgres.Sql) { + if(isInitialized) return console.log('🗃️ Initializing schema...') await sql.unsafe('CREATE SCHEMA IF NOT EXISTS communication;') await sql.unsafe(`CREATE TABLE IF NOT EXISTS ${migrationsTableName} @@ -46,6 +49,7 @@ export async function initSchema(sql: postgres.Sql) { throw err } } + isInitialized = true console.log('🎉 All migrations complete') } diff --git a/packages/query/package.json b/packages/query/package.json index 7a29f25abb0..47230de7cf3 100644 --- a/packages/query/package.json +++ b/packages/query/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-query", - "version": "0.1.150", + "version": "0.1.155", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/rest-client/package.json b/packages/rest-client/package.json index 8e83956e506..08a376cd1bf 100644 --- a/packages/rest-client/package.json +++ b/packages/rest-client/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-rest-client", - "version": "0.1.150", + "version": "0.1.155", "main": "dist/index.cjs", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/sdk-types/package.json b/packages/sdk-types/package.json index 5b49f95b686..ccd0cd96735 100644 --- a/packages/sdk-types/package.json +++ b/packages/sdk-types/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-sdk-types", - "version": "0.1.150", + "version": "0.1.155", "main": "./dist/index.cjs", "module": "./dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/server/package.json b/packages/server/package.json index 083cbb46eea..a851822c0e8 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-server", - "version": "0.1.150", + "version": "0.1.155", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", @@ -25,7 +25,8 @@ "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", "@hcengineering/core": "^0.7.28", - "@hcengineering/server-token": "^0.7.28" + "@hcengineering/server-token": "^0.7.28", + "@hcengineering/platform": "^0.7.28" }, "peerDependencies": { "typescript": "^5.6.3" diff --git a/packages/server/src/index.ts b/packages/server/src/index.ts index 95fa81eb95e..b96e89c8b1a 100644 --- a/packages/server/src/index.ts +++ b/packages/server/src/index.ts @@ -33,6 +33,9 @@ import type { RequestEvent, ServerApi } from '@hcengineering/communication-sdk-types' +import {setMetadata} from "@hcengineering/platform"; +import serverToken from "@hcengineering/server-token"; + import { type BroadcastSessionsFunc, Manager } from './manager' import { getMetadata, type Metadata } from './metadata' @@ -60,7 +63,11 @@ export class Api implements ServerApi { const db = await createDbAdapter(dbUrl, workspace, ctx, { withLogs: process.env.COMMUNICATION_TIME_LOGGING_ENABLED === 'true' }) + const metadata = getMetadata() + + setMetadata(serverToken.metadata.Secret, metadata.secret) + return new Api(ctx, metadata, workspace, db, broadcast) } diff --git a/packages/server/src/metadata.ts b/packages/server/src/metadata.ts index 7a4a0f3fe27..b09fb681afd 100644 --- a/packages/server/src/metadata.ts +++ b/packages/server/src/metadata.ts @@ -16,11 +16,13 @@ export interface Metadata { msg2fileUrl: string accountsUrl: string + secret?: string } export function getMetadata(): Metadata { return { msg2fileUrl: process.env.MSG2FILE_URL ?? '', - accountsUrl: process.env.ACCOUNTS_URL ?? '' + accountsUrl: process.env.ACCOUNTS_URL ?? '', + secret: process.env.SERVER_SECRET } } diff --git a/packages/shared/package.json b/packages/shared/package.json index 4b4af0853f4..79928a2d7bc 100644 --- a/packages/shared/package.json +++ b/packages/shared/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-shared", - "version": "0.1.150", + "version": "0.1.155", "main": "dist/index.cjs", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/types/package.json b/packages/types/package.json index 2ed88f59524..4846ee4e744 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-types", - "version": "0.1.150", + "version": "0.1.155", "main": "dist/index.cjs", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/yaml/package.json b/packages/yaml/package.json index fbeb357649e..3ece1a663b5 100644 --- a/packages/yaml/package.json +++ b/packages/yaml/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-yaml", - "version": "0.1.150", + "version": "0.1.155", "main": "dist/index.cjs", "module": "dist/index.js", "types": "./types/index.d.ts", From 16c78f4bd4dc2624a686f045c01992411f3e841f Mon Sep 17 00:00:00 2001 From: Kristina Date: Fri, 11 Apr 2025 17:35:27 +0400 Subject: [PATCH 060/636] Init personal labels for cards (#39) Signed-off-by: Kristina Fefelova --- bun.lock | 73 ++++---- package.json | 12 +- packages/client-query/package.json | 2 +- packages/client-query/src/index.ts | 6 +- packages/client-query/src/query.ts | 37 ++-- packages/cockroach/package.json | 2 +- packages/cockroach/src/adapter.ts | 25 ++- packages/cockroach/src/db/label.ts | 102 +++++++++++ packages/cockroach/src/db/mapping.ts | 20 ++- packages/cockroach/src/db/notification.ts | 15 +- packages/cockroach/src/db/schema.ts | 17 +- packages/cockroach/src/init.ts | 34 +++- packages/query/package.json | 2 +- packages/query/src/label/query.ts | 163 ++++++++++++++++++ packages/query/src/lq.ts | 40 +++-- packages/query/src/messages/query.ts | 29 ++-- .../query/src/notification-contexts/query.ts | 26 +-- packages/query/src/notifications/query.ts | 19 +- packages/query/src/types.ts | 25 ++- packages/rest-client/package.json | 2 +- packages/rest-client/src/rest.ts | 111 ++++++++++-- packages/rest-client/src/types.ts | 33 +++- packages/sdk-types/package.json | 2 +- packages/sdk-types/src/client.ts | 2 +- packages/sdk-types/src/db.ts | 12 +- packages/sdk-types/src/event.ts | 39 +++++ packages/sdk-types/src/index.ts | 3 +- packages/sdk-types/src/query.ts | 8 +- .../sdk-types/src/requestEvents/common.ts | 18 ++ packages/sdk-types/src/requestEvents/label.ts | 44 +++++ .../message.ts} | 111 +++--------- .../src/requestEvents/notification.ts | 101 +++++++++++ .../sdk-types/src/responseEvents/common.ts | 18 ++ .../sdk-types/src/responseEvents/label.ts | 45 +++++ .../message.ts} | 111 +++--------- .../src/responseEvents/notification.ts | 90 ++++++++++ packages/sdk-types/src/serverApi.ts | 8 +- packages/server/package.json | 2 +- packages/server/src/eventProcessor.ts | 129 +++++++++----- packages/server/src/index.ts | 69 +++----- packages/server/src/manager.ts | 55 +++--- .../server/src/notification/notification.ts | 47 +++-- packages/server/src/permissions.ts | 34 ++-- packages/server/src/triggers.ts | 116 ++++++++++--- packages/shared/package.json | 2 +- packages/types/package.json | 5 +- packages/types/src/core.ts | 3 +- packages/types/src/index.ts | 11 +- packages/types/src/label.ts | 33 ++++ packages/types/src/notification.ts | 4 +- packages/types/src/query.ts | 10 +- packages/yaml/package.json | 2 +- 52 files changed, 1432 insertions(+), 497 deletions(-) create mode 100644 packages/cockroach/src/db/label.ts create mode 100644 packages/query/src/label/query.ts create mode 100644 packages/sdk-types/src/event.ts create mode 100644 packages/sdk-types/src/requestEvents/common.ts create mode 100644 packages/sdk-types/src/requestEvents/label.ts rename packages/sdk-types/src/{requestEvent.ts => requestEvents/message.ts} (53%) create mode 100644 packages/sdk-types/src/requestEvents/notification.ts create mode 100644 packages/sdk-types/src/responseEvents/common.ts create mode 100644 packages/sdk-types/src/responseEvents/label.ts rename packages/sdk-types/src/{responseEvent.ts => responseEvents/message.ts} (51%) create mode 100644 packages/sdk-types/src/responseEvents/notification.ts create mode 100644 packages/types/src/label.ts diff --git a/bun.lock b/bun.lock index 57a74f61e42..8ad9f987c61 100644 --- a/bun.lock +++ b/bun.lock @@ -4,19 +4,19 @@ "": { "name": "@hcengineering/communication", "devDependencies": { - "@eslint/js": "^9.23.0", - "@types/bun": "^1.2.8", - "bun-types": "^1.2.8", - "eslint": "^9.23.0", + "@eslint/js": "^9.24.0", + "@types/bun": "^1.2.9", + "bun-types": "^1.2.9", + "eslint": "^9.24.0", "eslint-config-prettier": "^9.1.0", - "eslint-plugin-prettier": "^5.2.5", + "eslint-plugin-prettier": "^5.2.6", "prettier": "^3.5.3", - "typescript-eslint": "^8.29.0", + "typescript-eslint": "^8.29.1", }, }, "packages/client-query": { "name": "@hcengineering/communication-client-query", - "version": "0.1.155", + "version": "0.1.166", "dependencies": { "@hcengineering/communication-query": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", @@ -32,7 +32,7 @@ }, "packages/cockroach": { "name": "@hcengineering/communication-cockroach", - "version": "0.1.155", + "version": "0.1.166", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-shared": "workspace:*", @@ -49,7 +49,7 @@ }, "packages/query": { "name": "@hcengineering/communication-query", - "version": "0.1.155", + "version": "0.1.166", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-shared": "workspace:*", @@ -67,7 +67,7 @@ }, "packages/rest-client": { "name": "@hcengineering/communication-rest-client", - "version": "0.1.155", + "version": "0.1.166", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-shared": "workspace:*", @@ -85,7 +85,7 @@ }, "packages/sdk-types": { "name": "@hcengineering/communication-sdk-types", - "version": "0.1.155", + "version": "0.1.166", "dependencies": { "@hcengineering/communication-types": "workspace:*", "@hcengineering/core": "^0.7.28", @@ -99,7 +99,7 @@ }, "packages/server": { "name": "@hcengineering/communication-server", - "version": "0.1.155", + "version": "0.1.166", "dependencies": { "@hcengineering/account-client": "^0.7.28", "@hcengineering/communication-cockroach": "workspace:*", @@ -121,7 +121,7 @@ }, "packages/shared": { "name": "@hcengineering/communication-shared", - "version": "0.1.155", + "version": "0.1.166", "dependencies": { "@hcengineering/communication-types": "workspace:*", }, @@ -134,10 +134,11 @@ }, "packages/types": { "name": "@hcengineering/communication-types", - "version": "0.1.155", + "version": "0.1.166", "dependencies": { "@hcengineering/card": "^0.7.28", "@hcengineering/core": "^0.7.28", + "@hcengineering/tags": "^0.7.28", }, "devDependencies": { "@types/bun": "^1.1.14", @@ -148,7 +149,7 @@ }, "packages/yaml": { "name": "@hcengineering/communication-yaml", - "version": "0.1.155", + "version": "0.1.166", "dependencies": { "@hcengineering/communication-shared": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -172,7 +173,7 @@ "@eslint-community/regexpp": ["@eslint-community/regexpp@4.12.1", "", {}, "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ=="], - "@eslint/config-array": ["@eslint/config-array@0.19.2", "", { "dependencies": { "@eslint/object-schema": "^2.1.6", "debug": "^4.3.1", "minimatch": "^3.1.2" } }, "sha512-GNKqxfHG2ySmJOBSHg7LxeUx4xpuCoFjacmlCoYWEbaPXLwvfIjixRI12xCQZeULksQb23uiA8F40w5TojpV7w=="], + "@eslint/config-array": ["@eslint/config-array@0.20.0", "", { "dependencies": { "@eslint/object-schema": "^2.1.6", "debug": "^4.3.1", "minimatch": "^3.1.2" } }, "sha512-fxlS1kkIjx8+vy2SjuCB94q3htSNrufYTXubwiBFeaQHbH6Ipi43gFJq2zCMt6PHhImH3Xmr0NksKDvchWlpQQ=="], "@eslint/config-helpers": ["@eslint/config-helpers@0.2.0", "", {}, "sha512-yJLLmLexii32mGrhW29qvU3QBVTu0GUmEf/J4XsBtVhp4JkIUFN/BjWqTF63yRvGApIDpZm5fa97LtYtINmfeQ=="], @@ -180,7 +181,7 @@ "@eslint/eslintrc": ["@eslint/eslintrc@3.3.1", "", { "dependencies": { "ajv": "^6.12.4", "debug": "^4.3.2", "espree": "^10.0.1", "globals": "^14.0.0", "ignore": "^5.2.0", "import-fresh": "^3.2.1", "js-yaml": "^4.1.0", "minimatch": "^3.1.2", "strip-json-comments": "^3.1.1" } }, "sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ=="], - "@eslint/js": ["@eslint/js@9.23.0", "", {}, "sha512-35MJ8vCPU0ZMxo7zfev2pypqTwWTofFZO6m4KAtdoFhRpLJUpHTZZ+KB3C7Hb1d7bULYwO4lJXGCi5Se+8OMbw=="], + "@eslint/js": ["@eslint/js@9.24.0", "", {}, "sha512-uIY/y3z0uvOGX8cp1C2fiC4+ZmBhp6yZWkojtHL1YEMnRt1Y63HB9TM17proGEmeG7HeUY+UP36F0aknKYTpYA=="], "@eslint/object-schema": ["@eslint/object-schema@2.1.6", "", {}, "sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA=="], @@ -224,12 +225,18 @@ "@hcengineering/platform": ["@hcengineering/platform@0.7.28", "https://npm.pkg.github.com/download/@hcengineering/platform/0.7.28/2126ff97284b679ceab3c86f24febd73a3a28712", { "dependencies": { "intl-messageformat": "^10.7.14" } }, "sha512-0zEifwsLdOs/M6TZ9xnPrQTTlhwW28sooiyL4ZfQsiC2AJUy05vLP5/W8V9eDLta+EiR5bjtlulGsg1GfPjUoQ=="], + "@hcengineering/preference": ["@hcengineering/preference@0.7.28", "https://npm.pkg.github.com/download/@hcengineering/preference/0.7.28/f1739991cc012d194fa18f99565ae5bd64f0ad07", { "dependencies": { "@hcengineering/core": "^0.7.28", "@hcengineering/platform": "^0.7.28", "@hcengineering/ui": "^0.7.28" } }, "sha512-WwwOfjZ1TLdTfPhQGvSjo8dS63HyuE7BDVhfstxP8D+H4BI5UKYoev/6vlwQf/3d+jI1S4f2ivO//IDJAD7sGg=="], + "@hcengineering/server-token": ["@hcengineering/server-token@0.7.28", "https://npm.pkg.github.com/download/@hcengineering/server-token/0.7.28/b1cffcb628aa0c6ac73171ef340c8ccf0f85ab07", { "dependencies": { "@hcengineering/core": "^0.7.28", "@hcengineering/platform": "^0.7.28", "jwt-simple": "^0.5.6" } }, "sha512-TaHwzxuoFKN6jnWHIZk0UcRmB+UQ1QE1HZzUh5doPSFUYOAtTrSb32iJDA8e8oEADcJpOp9RGIUP/EP/VED7BQ=="], + "@hcengineering/tags": ["@hcengineering/tags@0.7.28", "https://npm.pkg.github.com/download/@hcengineering/tags/0.7.28/42545cc43e3c61e32ce96be87c245fa929f5cbb6", { "dependencies": { "@hcengineering/core": "^0.7.28", "@hcengineering/platform": "^0.7.28", "@hcengineering/ui": "^0.7.28", "@hcengineering/view": "^0.7.28" } }, "sha512-2KCVUu5N/GAkDsDRbdUi7gnBq4ze1Z3eCLoc8nz2G/zeAjAga8gmWkZEbXqAC8rNVqBQv72EKeJFGJyqnev6Xw=="], + "@hcengineering/theme": ["@hcengineering/theme@0.7.28", "https://npm.pkg.github.com/download/@hcengineering/theme/0.7.28/557640a7ba2ffaa4d6e5411a54577f60dc528300", { "dependencies": { "@hcengineering/analytics": "^0.7.28", "@hcengineering/platform": "^0.7.28", "svelte": "^4.2.19" } }, "sha512-Hv3wQKHr327TC7wMHCBiG4bMtg8vSlb7+UFwdKI8ztEs3359meqkXu+exqi7Nl5334voXIz5ZFxjwZwOMzNSFA=="], "@hcengineering/ui": ["@hcengineering/ui@0.7.28", "https://npm.pkg.github.com/download/@hcengineering/ui/0.7.28/82025fc775702ea8c8569611356662bfbf28eb74", { "dependencies": { "@hcengineering/analytics": "^0.7.28", "@hcengineering/core": "^0.7.28", "@hcengineering/platform": "^0.7.28", "@hcengineering/theme": "^0.7.28", "autolinker": "4.0.0", "date-fns": "^2.30.0", "date-fns-tz": "^2.0.0", "dompurify": "^3.1.6", "emojibase": "^16.0.0", "fast-equals": "^5.2.2", "hls.js": "^1.5.20", "plyr": "^3.7.8", "svelte": "^4.2.19" } }, "sha512-QsUpO0+Idu1n2dC6KtorPJ6SSp30It8FHSuG3Y+Awv4td/ftVocOSFqhskbk+us9Yw9KqOYgkqhGYEHbfRfw4Q=="], + "@hcengineering/view": ["@hcengineering/view@0.7.28", "https://npm.pkg.github.com/download/@hcengineering/view/0.7.28/8aad5b07d3750463a78aae1b9482361f2ff9f45e", { "dependencies": { "@hcengineering/core": "^0.7.28", "@hcengineering/platform": "^0.7.28", "@hcengineering/preference": "^0.7.28", "@hcengineering/ui": "^0.7.28" } }, "sha512-cG0isakqIxuiAG7T2KyRnztfOp3falr+oG77Qod915EeBKNEDqXfNrD29Dp04Wwf5TowifDT9smLzAyH8l6kgg=="], + "@humanfs/core": ["@humanfs/core@0.19.1", "", {}, "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA=="], "@humanfs/node": ["@humanfs/node@0.16.6", "", { "dependencies": { "@humanfs/core": "^0.19.1", "@humanwhocodes/retry": "^0.3.0" } }, "sha512-YuI2ZHQL78Q5HbhDiBA1X4LmYdXCKCMQIfw0pw7piHJwyREFebJUvrQN4cMssyES6x+vfUbx1CIpaQUKYdQZOw=="], @@ -254,11 +261,11 @@ "@nodelib/fs.walk": ["@nodelib/fs.walk@1.2.8", "", { "dependencies": { "@nodelib/fs.scandir": "2.1.5", "fastq": "^1.6.0" } }, "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg=="], - "@pkgr/core": ["@pkgr/core@0.2.0", "", {}, "sha512-vsJDAkYR6qCPu+ioGScGiMYR7LvZYIXh/dlQeviqoTWNCVfKTLYD/LkNWH4Mxsv2a5vpIRc77FN5DnmK1eBggQ=="], + "@pkgr/core": ["@pkgr/core@0.2.1", "", {}, "sha512-VzgHzGblFmUeBmmrk55zPyrQIArQN4vujc9shWytaPdB3P7qhi0cpaiKIr7tlCmFv2lYUwnLospIqjL9ZSAhhg=="], "@types/body-parser": ["@types/body-parser@1.19.5", "", { "dependencies": { "@types/connect": "*", "@types/node": "*" } }, "sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg=="], - "@types/bun": ["@types/bun@1.2.8", "", { "dependencies": { "bun-types": "1.2.7" } }, "sha512-t8L1RvJVUghW5V+M/fL3Thbxcs0HwNsXsnTEBEfEVqGteiJToOlZ/fyOEaR1kZsNqnu+3XA4RI/qmnX4w6+S+w=="], + "@types/bun": ["@types/bun@1.2.9", "", { "dependencies": { "bun-types": "1.2.9" } }, "sha512-epShhLGQYc4Bv/aceHbmBhOz1XgUnuTZgcxjxk+WXwNyDXavv5QHD1QEFV0FwbTSQtNq6g4ZcV6y0vZakTjswg=="], "@types/connect": ["@types/connect@3.4.38", "", { "dependencies": { "@types/node": "*" } }, "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug=="], @@ -296,21 +303,21 @@ "@types/ws": ["@types/ws@8.18.0", "", { "dependencies": { "@types/node": "*" } }, "sha512-8svvI3hMyvN0kKCJMvTJP/x6Y/EoQbepff882wL+Sn5QsXb3etnamgrJq4isrBxSJj5L2AuXcI0+bgkoAXGUJw=="], - "@typescript-eslint/eslint-plugin": ["@typescript-eslint/eslint-plugin@8.29.0", "", { "dependencies": { "@eslint-community/regexpp": "^4.10.0", "@typescript-eslint/scope-manager": "8.29.0", "@typescript-eslint/type-utils": "8.29.0", "@typescript-eslint/utils": "8.29.0", "@typescript-eslint/visitor-keys": "8.29.0", "graphemer": "^1.4.0", "ignore": "^5.3.1", "natural-compare": "^1.4.0", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0", "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-PAIpk/U7NIS6H7TEtN45SPGLQaHNgB7wSjsQV/8+KYokAb2T/gloOA/Bee2yd4/yKVhPKe5LlaUGhAZk5zmSaQ=="], + "@typescript-eslint/eslint-plugin": ["@typescript-eslint/eslint-plugin@8.29.1", "", { "dependencies": { "@eslint-community/regexpp": "^4.10.0", "@typescript-eslint/scope-manager": "8.29.1", "@typescript-eslint/type-utils": "8.29.1", "@typescript-eslint/utils": "8.29.1", "@typescript-eslint/visitor-keys": "8.29.1", "graphemer": "^1.4.0", "ignore": "^5.3.1", "natural-compare": "^1.4.0", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0", "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-ba0rr4Wfvg23vERs3eB+P3lfj2E+2g3lhWcCVukUuhtcdUx5lSIFZlGFEBHKr+3zizDa/TvZTptdNHVZWAkSBg=="], - "@typescript-eslint/parser": ["@typescript-eslint/parser@8.29.0", "", { "dependencies": { "@typescript-eslint/scope-manager": "8.29.0", "@typescript-eslint/types": "8.29.0", "@typescript-eslint/typescript-estree": "8.29.0", "@typescript-eslint/visitor-keys": "8.29.0", "debug": "^4.3.4" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-8C0+jlNJOwQso2GapCVWWfW/rzaq7Lbme+vGUFKE31djwNncIpgXD7Cd4weEsDdkoZDjH0lwwr3QDQFuyrMg9g=="], + "@typescript-eslint/parser": ["@typescript-eslint/parser@8.29.1", "", { "dependencies": { "@typescript-eslint/scope-manager": "8.29.1", "@typescript-eslint/types": "8.29.1", "@typescript-eslint/typescript-estree": "8.29.1", "@typescript-eslint/visitor-keys": "8.29.1", "debug": "^4.3.4" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-zczrHVEqEaTwh12gWBIJWj8nx+ayDcCJs06yoNMY0kwjMWDM6+kppljY+BxWI06d2Ja+h4+WdufDcwMnnMEWmg=="], - "@typescript-eslint/scope-manager": ["@typescript-eslint/scope-manager@8.29.0", "", { "dependencies": { "@typescript-eslint/types": "8.29.0", "@typescript-eslint/visitor-keys": "8.29.0" } }, "sha512-aO1PVsq7Gm+tcghabUpzEnVSFMCU4/nYIgC2GOatJcllvWfnhrgW0ZEbnTxm36QsikmCN1K/6ZgM7fok2I7xNw=="], + "@typescript-eslint/scope-manager": ["@typescript-eslint/scope-manager@8.29.1", "", { "dependencies": { "@typescript-eslint/types": "8.29.1", "@typescript-eslint/visitor-keys": "8.29.1" } }, "sha512-2nggXGX5F3YrsGN08pw4XpMLO1Rgtnn4AzTegC2MDesv6q3QaTU5yU7IbS1tf1IwCR0Hv/1EFygLn9ms6LIpDA=="], - "@typescript-eslint/type-utils": ["@typescript-eslint/type-utils@8.29.0", "", { "dependencies": { "@typescript-eslint/typescript-estree": "8.29.0", "@typescript-eslint/utils": "8.29.0", "debug": "^4.3.4", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-ahaWQ42JAOx+NKEf5++WC/ua17q5l+j1GFrbbpVKzFL/tKVc0aYY8rVSYUpUvt2hUP1YBr7mwXzx+E/DfUWI9Q=="], + "@typescript-eslint/type-utils": ["@typescript-eslint/type-utils@8.29.1", "", { "dependencies": { "@typescript-eslint/typescript-estree": "8.29.1", "@typescript-eslint/utils": "8.29.1", "debug": "^4.3.4", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-DkDUSDwZVCYN71xA4wzySqqcZsHKic53A4BLqmrWFFpOpNSoxX233lwGu/2135ymTCR04PoKiEEEvN1gFYg4Tw=="], - "@typescript-eslint/types": ["@typescript-eslint/types@8.29.0", "", {}, "sha512-wcJL/+cOXV+RE3gjCyl/V2G877+2faqvlgtso/ZRbTCnZazh0gXhe+7gbAnfubzN2bNsBtZjDvlh7ero8uIbzg=="], + "@typescript-eslint/types": ["@typescript-eslint/types@8.29.1", "", {}, "sha512-VT7T1PuJF1hpYC3AGm2rCgJBjHL3nc+A/bhOp9sGMKfi5v0WufsX/sHCFBfNTx2F+zA6qBc/PD0/kLRLjdt8mQ=="], - "@typescript-eslint/typescript-estree": ["@typescript-eslint/typescript-estree@8.29.0", "", { "dependencies": { "@typescript-eslint/types": "8.29.0", "@typescript-eslint/visitor-keys": "8.29.0", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", "minimatch": "^9.0.4", "semver": "^7.6.0", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "typescript": ">=4.8.4 <5.9.0" } }, "sha512-yOfen3jE9ISZR/hHpU/bmNvTtBW1NjRbkSFdZOksL1N+ybPEE7UVGMwqvS6CP022Rp00Sb0tdiIkhSCe6NI8ow=="], + "@typescript-eslint/typescript-estree": ["@typescript-eslint/typescript-estree@8.29.1", "", { "dependencies": { "@typescript-eslint/types": "8.29.1", "@typescript-eslint/visitor-keys": "8.29.1", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", "minimatch": "^9.0.4", "semver": "^7.6.0", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "typescript": ">=4.8.4 <5.9.0" } }, "sha512-l1enRoSaUkQxOQnbi0KPUtqeZkSiFlqrx9/3ns2rEDhGKfTa+88RmXqedC1zmVTOWrLc2e6DEJrTA51C9iLH5g=="], - "@typescript-eslint/utils": ["@typescript-eslint/utils@8.29.0", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@typescript-eslint/scope-manager": "8.29.0", "@typescript-eslint/types": "8.29.0", "@typescript-eslint/typescript-estree": "8.29.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-gX/A0Mz9Bskm8avSWFcK0gP7cZpbY4AIo6B0hWYFCaIsz750oaiWR4Jr2CI+PQhfW1CpcQr9OlfPS+kMFegjXA=="], + "@typescript-eslint/utils": ["@typescript-eslint/utils@8.29.1", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@typescript-eslint/scope-manager": "8.29.1", "@typescript-eslint/types": "8.29.1", "@typescript-eslint/typescript-estree": "8.29.1" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-QAkFEbytSaB8wnmB+DflhUPz6CLbFWE2SnSCrRMEa+KnXIzDYbpsn++1HGvnfAsUY44doDXmvRkO5shlM/3UfA=="], - "@typescript-eslint/visitor-keys": ["@typescript-eslint/visitor-keys@8.29.0", "", { "dependencies": { "@typescript-eslint/types": "8.29.0", "eslint-visitor-keys": "^4.2.0" } }, "sha512-Sne/pVz8ryR03NFK21VpN88dZ2FdQXOlq3VIklbrTYEt8yXtRFr9tvUhqvCeKjqYk5FSim37sHbooT6vzBTZcg=="], + "@typescript-eslint/visitor-keys": ["@typescript-eslint/visitor-keys@8.29.1", "", { "dependencies": { "@typescript-eslint/types": "8.29.1", "eslint-visitor-keys": "^4.2.0" } }, "sha512-RGLh5CRaUEf02viP5c1Vh1cMGffQscyHe7HPAzGpfmfflFg1wUz2rYxd+OZqwpeypYvZ8UxSxuIpF++fmOzEcg=="], "acorn": ["acorn@8.14.1", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg=="], @@ -334,7 +341,7 @@ "braces": ["braces@3.0.3", "", { "dependencies": { "fill-range": "^7.1.1" } }, "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA=="], - "bun-types": ["bun-types@1.2.8", "", { "dependencies": { "@types/node": "*", "@types/ws": "*" } }, "sha512-D5npfxKIGuYe9dTHLK1hi4XFmbMdKYoLrgyd25rrUyCrnyU4ljmQW7vDdonvibKeyU72mZuixIhQ2J+q6uM0Mg=="], + "bun-types": ["bun-types@1.2.9", "", { "dependencies": { "@types/node": "*", "@types/ws": "*" } }, "sha512-dk/kOEfQbajENN/D6FyiSgOKEuUi9PWfqKQJEgwKrCMWbjS/S6tEXp178mWvWAcUSYm9ArDlWHZKO3T/4cLXiw=="], "callsites": ["callsites@3.1.0", "", {}, "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ=="], @@ -372,11 +379,11 @@ "escape-string-regexp": ["escape-string-regexp@4.0.0", "", {}, "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA=="], - "eslint": ["eslint@9.23.0", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.12.1", "@eslint/config-array": "^0.19.2", "@eslint/config-helpers": "^0.2.0", "@eslint/core": "^0.12.0", "@eslint/eslintrc": "^3.3.1", "@eslint/js": "9.23.0", "@eslint/plugin-kit": "^0.2.7", "@humanfs/node": "^0.16.6", "@humanwhocodes/module-importer": "^1.0.1", "@humanwhocodes/retry": "^0.4.2", "@types/estree": "^1.0.6", "@types/json-schema": "^7.0.15", "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.6", "debug": "^4.3.2", "escape-string-regexp": "^4.0.0", "eslint-scope": "^8.3.0", "eslint-visitor-keys": "^4.2.0", "espree": "^10.3.0", "esquery": "^1.5.0", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", "file-entry-cache": "^8.0.0", "find-up": "^5.0.0", "glob-parent": "^6.0.2", "ignore": "^5.2.0", "imurmurhash": "^0.1.4", "is-glob": "^4.0.0", "json-stable-stringify-without-jsonify": "^1.0.1", "lodash.merge": "^4.6.2", "minimatch": "^3.1.2", "natural-compare": "^1.4.0", "optionator": "^0.9.3" }, "peerDependencies": { "jiti": "*" }, "optionalPeers": ["jiti"], "bin": { "eslint": "bin/eslint.js" } }, "sha512-jV7AbNoFPAY1EkFYpLq5bslU9NLNO8xnEeQXwErNibVryjk67wHVmddTBilc5srIttJDBrB0eMHKZBFbSIABCw=="], + "eslint": ["eslint@9.24.0", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.12.1", "@eslint/config-array": "^0.20.0", "@eslint/config-helpers": "^0.2.0", "@eslint/core": "^0.12.0", "@eslint/eslintrc": "^3.3.1", "@eslint/js": "9.24.0", "@eslint/plugin-kit": "^0.2.7", "@humanfs/node": "^0.16.6", "@humanwhocodes/module-importer": "^1.0.1", "@humanwhocodes/retry": "^0.4.2", "@types/estree": "^1.0.6", "@types/json-schema": "^7.0.15", "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.6", "debug": "^4.3.2", "escape-string-regexp": "^4.0.0", "eslint-scope": "^8.3.0", "eslint-visitor-keys": "^4.2.0", "espree": "^10.3.0", "esquery": "^1.5.0", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", "file-entry-cache": "^8.0.0", "find-up": "^5.0.0", "glob-parent": "^6.0.2", "ignore": "^5.2.0", "imurmurhash": "^0.1.4", "is-glob": "^4.0.0", "json-stable-stringify-without-jsonify": "^1.0.1", "lodash.merge": "^4.6.2", "minimatch": "^3.1.2", "natural-compare": "^1.4.0", "optionator": "^0.9.3" }, "peerDependencies": { "jiti": "*" }, "optionalPeers": ["jiti"], "bin": { "eslint": "bin/eslint.js" } }, "sha512-eh/jxIEJyZrvbWRe4XuVclLPDYSYYYgLy5zXGGxD6j8zjSAxFEzI2fL/8xNq6O2yKqVt+eF2YhV+hxjV6UKXwQ=="], "eslint-config-prettier": ["eslint-config-prettier@9.1.0", "", { "peerDependencies": { "eslint": ">=7.0.0" }, "bin": { "eslint-config-prettier": "bin/cli.js" } }, "sha512-NSWl5BFQWEPi1j4TjVNItzYV7dZXZ+wP6I6ZhrBGpChQhZRUaElihE9uRRkcbRnNb76UMKDF3r+WTmNcGPKsqw=="], - "eslint-plugin-prettier": ["eslint-plugin-prettier@5.2.5", "", { "dependencies": { "prettier-linter-helpers": "^1.0.0", "synckit": "^0.10.2" }, "peerDependencies": { "@types/eslint": ">=8.0.0", "eslint": ">=8.0.0", "eslint-config-prettier": ">= 7.0.0 <10.0.0 || >=10.1.0", "prettier": ">=3.0.0" }, "optionalPeers": ["@types/eslint", "eslint-config-prettier"] }, "sha512-IKKP8R87pJyMl7WWamLgPkloB16dagPIdd2FjBDbyRYPKo93wS/NbCOPh6gH+ieNLC+XZrhJt/kWj0PS/DFdmg=="], + "eslint-plugin-prettier": ["eslint-plugin-prettier@5.2.6", "", { "dependencies": { "prettier-linter-helpers": "^1.0.0", "synckit": "^0.11.0" }, "peerDependencies": { "@types/eslint": ">=8.0.0", "eslint": ">=8.0.0", "eslint-config-prettier": ">= 7.0.0 <10.0.0 || >=10.1.0", "prettier": ">=3.0.0" }, "optionalPeers": ["@types/eslint", "eslint-config-prettier"] }, "sha512-mUcf7QG2Tjk7H055Jk0lGBjbgDnfrvqjhXh9t2xLMSCjZVcw9Rb1V6sVNXO0th3jgeO7zllWPTNRil3JW94TnQ=="], "eslint-scope": ["eslint-scope@8.3.0", "", { "dependencies": { "esrecurse": "^4.3.0", "estraverse": "^5.2.0" } }, "sha512-pUNxi75F8MJ/GdeKtVLSbYg4ZI34J6C0C7sbL4YOp2exGwen7ZsuBqKzUhXd0qMQ362yET3z+uPwKeg/0C2XCQ=="], @@ -538,7 +545,7 @@ "svelte": ["svelte@4.2.19", "", { "dependencies": { "@ampproject/remapping": "^2.2.1", "@jridgewell/sourcemap-codec": "^1.4.15", "@jridgewell/trace-mapping": "^0.3.18", "@types/estree": "^1.0.1", "acorn": "^8.9.0", "aria-query": "^5.3.0", "axobject-query": "^4.0.0", "code-red": "^1.0.3", "css-tree": "^2.3.1", "estree-walker": "^3.0.3", "is-reference": "^3.0.1", "locate-character": "^3.0.0", "magic-string": "^0.30.4", "periscopic": "^3.1.0" } }, "sha512-IY1rnGr6izd10B0A8LqsBfmlT5OILVuZ7XsI0vdGPEvuonFV7NYEUK4dAkm9Zg2q0Um92kYjTpS1CAP3Nh/KWw=="], - "synckit": ["synckit@0.10.2", "", { "dependencies": { "@pkgr/core": "^0.2.0", "tslib": "^2.8.1" } }, "sha512-cSGiaCPhFzeFIQY8KKEacv46LclENY4d60jgkwCrKomvRkIjtMyss1dPkHLp/62c1leuOjEedB1+lWcwqTJSvA=="], + "synckit": ["synckit@0.11.3", "", { "dependencies": { "@pkgr/core": "^0.2.1", "tslib": "^2.8.1" } }, "sha512-szhWDqNNI9etJUvbZ1/cx1StnZx8yMmFxme48SwR4dty4ioSY50KEZlpv0qAfgc1fpRzuh9hBXEzoCpJ779dLg=="], "to-regex-range": ["to-regex-range@5.0.1", "", { "dependencies": { "is-number": "^7.0.0" } }, "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ=="], @@ -550,7 +557,7 @@ "typescript": ["typescript@5.8.2", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-aJn6wq13/afZp/jT9QZmwEjDqqvSGp1VT5GVg+f/t6/oVyrgXM6BY1h9BRh/O5p3PlUPAe+WuiEZOmb/49RqoQ=="], - "typescript-eslint": ["typescript-eslint@8.29.0", "", { "dependencies": { "@typescript-eslint/eslint-plugin": "8.29.0", "@typescript-eslint/parser": "8.29.0", "@typescript-eslint/utils": "8.29.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-ep9rVd9B4kQsZ7ZnWCVxUE/xDLUUUsRzE0poAeNu+4CkFErLfuvPt/qtm2EpnSyfvsR0S6QzDFSrPCFBwf64fg=="], + "typescript-eslint": ["typescript-eslint@8.29.1", "", { "dependencies": { "@typescript-eslint/eslint-plugin": "8.29.1", "@typescript-eslint/parser": "8.29.1", "@typescript-eslint/utils": "8.29.1" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-f8cDkvndhbQMPcysk6CUSGBWV+g1utqdn71P5YKwMumVMOG/5k7cHq0KyG4O52nB0oKS4aN2Tp5+wB4APJGC+w=="], "undici-types": ["undici-types@6.20.0", "", {}, "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg=="], @@ -570,8 +577,6 @@ "@humanfs/node/@humanwhocodes/retry": ["@humanwhocodes/retry@0.3.1", "", {}, "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA=="], - "@types/bun/bun-types": ["bun-types@1.2.7", "", { "dependencies": { "@types/node": "*", "@types/ws": "*" } }, "sha512-P4hHhk7kjF99acXqKvltyuMQ2kf/rzIw3ylEDpCxDS9Xa0X0Yp/gJu/vDCucmWpiur5qJ0lwB2bWzOXa2GlHqA=="], - "@typescript-eslint/typescript-estree/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], "fast-glob/glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="], diff --git a/package.json b/package.json index 6a74f35ee7a..9d809825de3 100644 --- a/package.json +++ b/package.json @@ -8,13 +8,13 @@ "format": "prettier --write packages/**/src/*.ts && bun run lint" }, "devDependencies": { - "@eslint/js": "^9.23.0", - "@types/bun": "^1.2.8", - "bun-types": "^1.2.8", - "eslint": "^9.23.0", + "@eslint/js": "^9.24.0", + "@types/bun": "^1.2.9", + "bun-types": "^1.2.9", + "eslint": "^9.24.0", "eslint-config-prettier": "^9.1.0", - "eslint-plugin-prettier": "^5.2.5", + "eslint-plugin-prettier": "^5.2.6", "prettier": "^3.5.3", - "typescript-eslint": "^8.29.0" + "typescript-eslint": "^8.29.1" } } diff --git a/packages/client-query/package.json b/packages/client-query/package.json index 0211cfd81d9..eb97bcedcf1 100644 --- a/packages/client-query/package.json +++ b/packages/client-query/package.json @@ -1,6 +1,6 @@ { "name": "@hcengineering/communication-client-query", - "version": "0.1.155", + "version": "0.1.166", "main": "dist/index.js", "module": "dist/index.js", "types": "./types/index.d.ts", diff --git a/packages/client-query/src/index.ts b/packages/client-query/src/index.ts index 5ebcd40b158..8054edea478 100644 --- a/packages/client-query/src/index.ts +++ b/packages/client-query/src/index.ts @@ -16,7 +16,7 @@ import { LiveQueries, type QueryClient } from '@hcengineering/communication-query' import type { WorkspaceID } from '@hcengineering/communication-types' -import { MessagesQuery, NotificationContextsQuery, NotificationsQuery } from './query' +import { LabelsQuery, MessagesQuery, NotificationContextsQuery, NotificationsQuery } from './query' let lq: LiveQueries let onDestroy: (fn: () => void) => void = () => {} @@ -33,6 +33,10 @@ export function createNotificationContextsQuery(): NotificationContextsQuery { return new NotificationContextsQuery(lq, onDestroy) } +export function createLabelsQuery(): LabelsQuery { + return new LabelsQuery(lq, onDestroy) +} + export function initLiveQueries( client: QueryClient, workspace: WorkspaceID, diff --git a/packages/client-query/src/query.ts b/packages/client-query/src/query.ts index 45331d3155d..bc4585cd8a8 100644 --- a/packages/client-query/src/query.ts +++ b/packages/client-query/src/query.ts @@ -14,20 +14,20 @@ // import { type LiveQueries } from '@hcengineering/communication-query' -import type { - MessagesQueryCallback, - NotificationsQueryCallback, - QueryCallback -} from '@hcengineering/communication-sdk-types' +import type { PagedQueryCallback, QueryCallback } from '@hcengineering/communication-sdk-types' import { + type FindLabelsParams, type FindMessagesParams, type FindNotificationContextParams, type FindNotificationsParams, - type NotificationContext + type Label, + type Message, + type NotificationContext, + type Notification } from '@hcengineering/communication-types' import { deepEqual } from 'fast-equals' -class BaseQuery

, C extends QueryCallback> { +class BaseQuery

, C extends (r: any) => void> { private oldQuery: P | undefined private oldCallback: C | undefined @@ -78,16 +78,16 @@ class BaseQuery

, C extends QueryCallback> { } } -export class MessagesQuery extends BaseQuery { - override createQuery(params: FindMessagesParams, callback: MessagesQueryCallback): { unsubscribe: () => void } { +export class MessagesQuery extends BaseQuery> { + override createQuery(params: FindMessagesParams, callback: PagedQueryCallback): { unsubscribe: () => void } { return this.lq.queryMessages(params, callback) } } -export class NotificationsQuery extends BaseQuery { +export class NotificationsQuery extends BaseQuery> { override createQuery( params: FindNotificationsParams, - callback: NotificationsQueryCallback + callback: PagedQueryCallback ): { unsubscribe: () => void } { @@ -97,14 +97,25 @@ export class NotificationsQuery extends BaseQuery + PagedQueryCallback > { override createQuery( params: FindNotificationContextParams, - callback: QueryCallback + callback: PagedQueryCallback ): { unsubscribe: () => void } { return this.lq.queryNotificationContexts(params, callback) } } + +export class LabelsQuery extends BaseQuery> { + override createQuery( + params: FindLabelsParams, + callback: QueryCallback

, C extends (r: any) => void> { private oldQuery: P | undefined @@ -78,8 +78,8 @@ class BaseQuery

, C extends (r: any) => void> { } } -export class MessagesQuery extends BaseQuery> { - override createQuery(params: FindMessagesParams, callback: PagedQueryCallback): { unsubscribe: () => void } { +export class MessagesQuery extends BaseQuery> { + override createQuery(params: MessageQueryParams, callback: PagedQueryCallback): { unsubscribe: () => void } { return this.lq.queryMessages(params, callback) } } diff --git a/packages/query/src/index.ts b/packages/query/src/index.ts index 580b18d7b6e..b377c23485b 100644 --- a/packages/query/src/index.ts +++ b/packages/query/src/index.ts @@ -14,3 +14,5 @@ // export * from './lq' + +export type { MessageQueryParams } from './types' diff --git a/packages/query/src/lq.ts b/packages/query/src/lq.ts index 8cbbedb8b73..37ac9519f5e 100644 --- a/packages/query/src/lq.ts +++ b/packages/query/src/lq.ts @@ -34,7 +34,7 @@ import type { FindClient } from '@hcengineering/communication-sdk-types' -import type { FindParams, QueryId, AnyQuery } from './types' +import type { FindParams, QueryId, AnyQuery, MessageQueryParams } from './types' import { MessagesQuery } from './messages/query' import { NotificationQuery } from './notifications/query' import { NotificationContextsQuery } from './notification-contexts/query' @@ -76,7 +76,7 @@ export class LiveQueries { } } - queryMessages(params: FindMessagesParams, callback: PagedQueryCallback): CreateQueryResult { + queryMessages(params: MessageQueryParams, callback: PagedQueryCallback): CreateQueryResult { return this.createAndStoreQuery( params, callback, diff --git a/packages/query/src/messages/query.ts b/packages/query/src/messages/query.ts index 5b3d1a1556e..7296330323a 100644 --- a/packages/query/src/messages/query.ts +++ b/packages/query/src/messages/query.ts @@ -652,11 +652,29 @@ export class MessagesQuery implements PagedQuery { if (tmp) this.result.delete(tmp) this.tmpMessages.delete(eventId) } + const lastMessage = this.result.getLast() + const firstMessage = this.result.getFirst() + + function shouldResort(order: SortingOrder): boolean { + if (firstMessage == null || lastMessage == null) return false + if (order === SortingOrder.Ascending) { + return lastMessage.created > message.created + } + return firstMessage.created > message.created + } if (this.params.order === SortingOrder.Ascending) { this.result.push(message) } else { this.result.unshift(message) } + + if (shouldResort(this.params.order ?? SortingOrder.Ascending)) { + this.result.sort((a, b) => + this.params.order === SortingOrder.Ascending + ? a.created.getTime() - b.created.getTime() + : b.created.getTime() - a.created.getTime() + ) + } await this.notify() } } From d0aaaa312ad18d9f633febaad6cff73631842d43 Mon Sep 17 00:00:00 2001 From: Kristina Date: Tue, 22 Apr 2025 15:20:17 +0400 Subject: [PATCH 068/636] Fix extran id constraint (#47) Signed-off-by: Kristina Fefelova --- .version | 2 +- packages/cockroach/src/init.ts | 20 +++++++++++++++++++- 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/.version b/.version index d26f7fe8bb8..d3bfa591c2f 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -0.1.172 +0.1.173 diff --git a/packages/cockroach/src/init.ts b/packages/cockroach/src/init.ts index 29a598f78b9..d23925e3795 100644 --- a/packages/cockroach/src/init.ts +++ b/packages/cockroach/src/init.ts @@ -68,7 +68,9 @@ function getMigrations(): [string, string][] { migrationV5_3(), migrationV5_4(), migrationV5_5(), - migrationV5_6() + migrationV5_6(), + migrationV6_1(), + migrationV6_2() ] } @@ -339,3 +341,19 @@ function migrationV5_6(): [string, string] { ` return ['migrate-constraints_v5_6', sql] } + +function migrationV6_1(): [string, string] { + const sql = ` + DROP INDEX IF EXISTS communication.idx_messages_unique_external_id CASCADE; + ` + return ['message_drop_external_id_unique_index', sql] +} + +function migrationV6_2(): [string, string] { + const sql = ` + CREATE UNIQUE INDEX IF NOT EXISTS idx_messages_unique_workspace_card_external_id + ON communication.messages (workspace_id, card_id, external_id) + WHERE external_id IS NOT NULL; + ` + return ['idx_messages_unique_workspace_card_external_id', sql] +} From 4f1876a928eadb687067af1bf6a5e0c646bcf613 Mon Sep 17 00:00:00 2001 From: Kristina Date: Thu, 1 May 2025 09:11:42 +0400 Subject: [PATCH 069/636] Update version (#48) Signed-off-by: Kristina Fefelova --- .version | 2 +- bun.lock | 80 +++++++++++++++++++++++++++++++++++++--------------- package.json | 8 +++--- 3 files changed, 63 insertions(+), 27 deletions(-) diff --git a/.version b/.version index d3bfa591c2f..c3170be411d 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -0.1.173 +0.1.174 diff --git a/bun.lock b/bun.lock index 0f77f81df5b..16ef1832cc7 100644 --- a/bun.lock +++ b/bun.lock @@ -5,14 +5,14 @@ "name": "@hcengineering/communication", "devDependencies": { "@eslint/js": "^9.25.1", - "@types/bun": "^1.2.10", - "bun-types": "^1.2.10", + "@types/bun": "^1.2.11", + "bun-types": "^1.2.11", "eslint": "^9.25.1", "eslint-config-prettier": "^9.1.0", "eslint-plugin-prettier": "^5.2.6", "prettier": "^3.5.3", - "turbo": "^2.5.0", - "typescript-eslint": "^8.31.0", + "turbo": "^2.5.2", + "typescript-eslint": "^8.31.1", }, }, "packages/client-query": { @@ -269,7 +269,7 @@ "@types/body-parser": ["@types/body-parser@1.19.5", "", { "dependencies": { "@types/connect": "*", "@types/node": "*" } }, "sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg=="], - "@types/bun": ["@types/bun@1.2.10", "", { "dependencies": { "bun-types": "1.2.10" } }, "sha512-eilv6WFM3M0c9ztJt7/g80BDusK98z/FrFwseZgT4bXCq2vPhXD4z8R3oddmAn+R/Nmz9vBn4kweJKmGTZj+lg=="], + "@types/bun": ["@types/bun@1.2.11", "", { "dependencies": { "bun-types": "1.2.11" } }, "sha512-ZLbbI91EmmGwlWTRWuV6J19IUiUC5YQ3TCEuSHI3usIP75kuoA8/0PVF+LTrbEnVc8JIhpElWOxv1ocI1fJBbw=="], "@types/connect": ["@types/connect@3.4.38", "", { "dependencies": { "@types/node": "*" } }, "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug=="], @@ -307,21 +307,21 @@ "@types/ws": ["@types/ws@8.18.0", "", { "dependencies": { "@types/node": "*" } }, "sha512-8svvI3hMyvN0kKCJMvTJP/x6Y/EoQbepff882wL+Sn5QsXb3etnamgrJq4isrBxSJj5L2AuXcI0+bgkoAXGUJw=="], - "@typescript-eslint/eslint-plugin": ["@typescript-eslint/eslint-plugin@8.31.0", "", { "dependencies": { "@eslint-community/regexpp": "^4.10.0", "@typescript-eslint/scope-manager": "8.31.0", "@typescript-eslint/type-utils": "8.31.0", "@typescript-eslint/utils": "8.31.0", "@typescript-eslint/visitor-keys": "8.31.0", "graphemer": "^1.4.0", "ignore": "^5.3.1", "natural-compare": "^1.4.0", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0", "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-evaQJZ/J/S4wisevDvC1KFZkPzRetH8kYZbkgcTRyql3mcKsf+ZFDV1BVWUGTCAW5pQHoqn5gK5b8kn7ou9aFQ=="], + "@typescript-eslint/eslint-plugin": ["@typescript-eslint/eslint-plugin@8.31.1", "", { "dependencies": { "@eslint-community/regexpp": "^4.10.0", "@typescript-eslint/scope-manager": "8.31.1", "@typescript-eslint/type-utils": "8.31.1", "@typescript-eslint/utils": "8.31.1", "@typescript-eslint/visitor-keys": "8.31.1", "graphemer": "^1.4.0", "ignore": "^5.3.1", "natural-compare": "^1.4.0", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0", "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-oUlH4h1ABavI4F0Xnl8/fOtML/eu8nI2A1nYd+f+55XI0BLu+RIqKoCiZKNo6DtqZBEQm5aNKA20G3Z5w3R6GQ=="], - "@typescript-eslint/parser": ["@typescript-eslint/parser@8.31.0", "", { "dependencies": { "@typescript-eslint/scope-manager": "8.31.0", "@typescript-eslint/types": "8.31.0", "@typescript-eslint/typescript-estree": "8.31.0", "@typescript-eslint/visitor-keys": "8.31.0", "debug": "^4.3.4" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-67kYYShjBR0jNI5vsf/c3WG4u+zDnCTHTPqVMQguffaWWFs7artgwKmfwdifl+r6XyM5LYLas/dInj2T0SgJyw=="], + "@typescript-eslint/parser": ["@typescript-eslint/parser@8.31.1", "", { "dependencies": { "@typescript-eslint/scope-manager": "8.31.1", "@typescript-eslint/types": "8.31.1", "@typescript-eslint/typescript-estree": "8.31.1", "@typescript-eslint/visitor-keys": "8.31.1", "debug": "^4.3.4" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-oU/OtYVydhXnumd0BobL9rkJg7wFJ9bFFPmSmB/bf/XWN85hlViji59ko6bSKBXyseT9V8l+CN1nwmlbiN0G7Q=="], - "@typescript-eslint/scope-manager": ["@typescript-eslint/scope-manager@8.31.0", "", { "dependencies": { "@typescript-eslint/types": "8.31.0", "@typescript-eslint/visitor-keys": "8.31.0" } }, "sha512-knO8UyF78Nt8O/B64i7TlGXod69ko7z6vJD9uhSlm0qkAbGeRUSudcm0+K/4CrRjrpiHfBCjMWlc08Vav1xwcw=="], + "@typescript-eslint/scope-manager": ["@typescript-eslint/scope-manager@8.31.1", "", { "dependencies": { "@typescript-eslint/types": "8.31.1", "@typescript-eslint/visitor-keys": "8.31.1" } }, "sha512-BMNLOElPxrtNQMIsFHE+3P0Yf1z0dJqV9zLdDxN/xLlWMlXK/ApEsVEKzpizg9oal8bAT5Sc7+ocal7AC1HCVw=="], - "@typescript-eslint/type-utils": ["@typescript-eslint/type-utils@8.31.0", "", { "dependencies": { "@typescript-eslint/typescript-estree": "8.31.0", "@typescript-eslint/utils": "8.31.0", "debug": "^4.3.4", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-DJ1N1GdjI7IS7uRlzJuEDCgDQix3ZVYVtgeWEyhyn4iaoitpMBX6Ndd488mXSx0xah/cONAkEaYyylDyAeHMHg=="], + "@typescript-eslint/type-utils": ["@typescript-eslint/type-utils@8.31.1", "", { "dependencies": { "@typescript-eslint/typescript-estree": "8.31.1", "@typescript-eslint/utils": "8.31.1", "debug": "^4.3.4", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-fNaT/m9n0+dpSp8G/iOQ05GoHYXbxw81x+yvr7TArTuZuCA6VVKbqWYVZrV5dVagpDTtj/O8k5HBEE/p/HM5LA=="], - "@typescript-eslint/types": ["@typescript-eslint/types@8.31.0", "", {}, "sha512-Ch8oSjVyYyJxPQk8pMiP2FFGYatqXQfQIaMp+TpuuLlDachRWpUAeEu1u9B/v/8LToehUIWyiKcA/w5hUFRKuQ=="], + "@typescript-eslint/types": ["@typescript-eslint/types@8.31.1", "", {}, "sha512-SfepaEFUDQYRoA70DD9GtytljBePSj17qPxFHA/h3eg6lPTqGJ5mWOtbXCk1YrVU1cTJRd14nhaXWFu0l2troQ=="], - "@typescript-eslint/typescript-estree": ["@typescript-eslint/typescript-estree@8.31.0", "", { "dependencies": { "@typescript-eslint/types": "8.31.0", "@typescript-eslint/visitor-keys": "8.31.0", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", "minimatch": "^9.0.4", "semver": "^7.6.0", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "typescript": ">=4.8.4 <5.9.0" } }, "sha512-xLmgn4Yl46xi6aDSZ9KkyfhhtnYI15/CvHbpOy/eR5NWhK/BK8wc709KKwhAR0m4ZKRP7h07bm4BWUYOCuRpQQ=="], + "@typescript-eslint/typescript-estree": ["@typescript-eslint/typescript-estree@8.31.1", "", { "dependencies": { "@typescript-eslint/types": "8.31.1", "@typescript-eslint/visitor-keys": "8.31.1", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", "minimatch": "^9.0.4", "semver": "^7.6.0", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "typescript": ">=4.8.4 <5.9.0" } }, "sha512-kaA0ueLe2v7KunYOyWYtlf/QhhZb7+qh4Yw6Ni5kgukMIG+iP773tjgBiLWIXYumWCwEq3nLW+TUywEp8uEeag=="], - "@typescript-eslint/utils": ["@typescript-eslint/utils@8.31.0", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@typescript-eslint/scope-manager": "8.31.0", "@typescript-eslint/types": "8.31.0", "@typescript-eslint/typescript-estree": "8.31.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-qi6uPLt9cjTFxAb1zGNgTob4x9ur7xC6mHQJ8GwEzGMGE9tYniublmJaowOJ9V2jUzxrltTPfdG2nKlWsq0+Ww=="], + "@typescript-eslint/utils": ["@typescript-eslint/utils@8.31.1", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@typescript-eslint/scope-manager": "8.31.1", "@typescript-eslint/types": "8.31.1", "@typescript-eslint/typescript-estree": "8.31.1" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-2DSI4SNfF5T4oRveQ4nUrSjUqjMND0nLq9rEkz0gfGr3tg0S5KB6DhwR+WZPCjzkZl3cH+4x2ce3EsL50FubjQ=="], - "@typescript-eslint/visitor-keys": ["@typescript-eslint/visitor-keys@8.31.0", "", { "dependencies": { "@typescript-eslint/types": "8.31.0", "eslint-visitor-keys": "^4.2.0" } }, "sha512-QcGHmlRHWOl93o64ZUMNewCdwKGU6WItOU52H0djgNmn1EOrhVudrDzXz4OycCRSCPwFCDrE2iIt5vmuUdHxuQ=="], + "@typescript-eslint/visitor-keys": ["@typescript-eslint/visitor-keys@8.31.1", "", { "dependencies": { "@typescript-eslint/types": "8.31.1", "eslint-visitor-keys": "^4.2.0" } }, "sha512-I+/rgqOVBn6f0o7NDTmAPWWC6NuqhV174lfYvAm9fUaWeiefLdux9/YI3/nLugEn9L8fcSi0XmpKi/r5u0nmpw=="], "acorn": ["acorn@8.14.1", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg=="], @@ -345,7 +345,7 @@ "braces": ["braces@3.0.3", "", { "dependencies": { "fill-range": "^7.1.1" } }, "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA=="], - "bun-types": ["bun-types@1.2.10", "", { "dependencies": { "@types/node": "*" } }, "sha512-b5ITZMnVdf3m1gMvJHG+gIfeJHiQPJak0f7925Hxu6ZN5VKA8AGy4GZ4lM+Xkn6jtWxg5S3ldWvfmXdvnkp3GQ=="], + "bun-types": ["bun-types@1.2.11", "", { "dependencies": { "@types/node": "*" } }, "sha512-dbkp5Lo8HDrXkLrONm6bk+yiiYQSntvFUzQp0v3pzTAsXk6FtgVMjdQ+lzFNVAmQFUkPQZ3WMZqH5tTo+Dp/IA=="], "callsites": ["callsites@3.1.0", "", {}, "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ=="], @@ -557,25 +557,25 @@ "tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], - "turbo": ["turbo@2.5.0", "", { "optionalDependencies": { "turbo-darwin-64": "2.5.0", "turbo-darwin-arm64": "2.5.0", "turbo-linux-64": "2.5.0", "turbo-linux-arm64": "2.5.0", "turbo-windows-64": "2.5.0", "turbo-windows-arm64": "2.5.0" }, "bin": { "turbo": "bin/turbo" } }, "sha512-PvSRruOsitjy6qdqwIIyolv99+fEn57gP6gn4zhsHTEcCYgXPhv6BAxzAjleS8XKpo+Y582vTTA9nuqYDmbRuA=="], + "turbo": ["turbo@2.5.2", "", { "optionalDependencies": { "turbo-darwin-64": "2.5.2", "turbo-darwin-arm64": "2.5.2", "turbo-linux-64": "2.5.2", "turbo-linux-arm64": "2.5.2", "turbo-windows-64": "2.5.2", "turbo-windows-arm64": "2.5.2" }, "bin": { "turbo": "bin/turbo" } }, "sha512-Qo5lfuStr6LQh3sPQl7kIi243bGU4aHGDQJUf6ylAdGwks30jJFloc9NYHP7Y373+gGU9OS0faA4Mb5Sy8X9Xw=="], - "turbo-darwin-64": ["turbo-darwin-64@2.5.0", "", { "os": "darwin", "cpu": "x64" }, "sha512-fP1hhI9zY8hv0idym3hAaXdPi80TLovmGmgZFocVAykFtOxF+GlfIgM/l4iLAV9ObIO4SUXPVWHeBZQQ+Hpjag=="], + "turbo-darwin-64": ["turbo-darwin-64@2.5.2", "", { "os": "darwin", "cpu": "x64" }, "sha512-2aIl0Sx230nLk+Cg2qSVxvPOBWCZpwKNuAMKoROTvWKif6VMpkWWiR9XEPoz7sHeLmCOed4GYGMjL1bqAiIS/g=="], - "turbo-darwin-arm64": ["turbo-darwin-arm64@2.5.0", "", { "os": "darwin", "cpu": "arm64" }, "sha512-p9sYq7kXH7qeJwIQE86cOWv/xNqvow846l6c/qWc26Ib1ci5W7V0sI5thsrP3eH+VA0d+SHalTKg5SQXgNQBWA=="], + "turbo-darwin-arm64": ["turbo-darwin-arm64@2.5.2", "", { "os": "darwin", "cpu": "arm64" }, "sha512-MrFYhK/jYu8N6QlqZtqSHi3e4QVxlzqU3ANHTKn3/tThuwTLbNHEvzBPWSj5W7nZcM58dCqi6gYrfRz6bJZyAA=="], - "turbo-linux-64": ["turbo-linux-64@2.5.0", "", { "os": "linux", "cpu": "x64" }, "sha512-1iEln2GWiF3iPPPS1HQJT6ZCFXynJPd89gs9SkggH2EJsj3eRUSVMmMC8y6d7bBbhBFsiGGazwFIYrI12zs6uQ=="], + "turbo-linux-64": ["turbo-linux-64@2.5.2", "", { "os": "linux", "cpu": "x64" }, "sha512-LxNqUE2HmAJQ/8deoLgMUDzKxd5bKxqH0UBogWa+DF+JcXhtze3UTMr6lEr0dEofdsEUYK1zg8FRjglmwlN5YA=="], - "turbo-linux-arm64": ["turbo-linux-arm64@2.5.0", "", { "os": "linux", "cpu": "arm64" }, "sha512-bKBcbvuQHmsX116KcxHJuAcppiiBOfivOObh2O5aXNER6mce7YDDQJy00xQQNp1DhEfcSV2uOsvb3O3nN2cbcA=="], + "turbo-linux-arm64": ["turbo-linux-arm64@2.5.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-0MI1Ao1q8zhd+UUbIEsrM+yLq1BsrcJQRGZkxIsHFlGp7WQQH1oR3laBgfnUCNdCotCMD6w4moc9pUbXdOR3bg=="], - "turbo-windows-64": ["turbo-windows-64@2.5.0", "", { "os": "win32", "cpu": "x64" }, "sha512-9BCo8oQ7BO7J0K913Czbc3tw8QwLqn2nTe4E47k6aVYkM12ASTScweXPTuaPFP5iYXAT6z5Dsniw704Ixa5eGg=="], + "turbo-windows-64": ["turbo-windows-64@2.5.2", "", { "os": "win32", "cpu": "x64" }, "sha512-hOLcbgZzE5ttACHHyc1ajmWYq4zKT42IC3G6XqgiXxMbS+4eyVYTL+7UvCZBd3Kca1u4TLQdLQjeO76zyDJc2A=="], - "turbo-windows-arm64": ["turbo-windows-arm64@2.5.0", "", { "os": "win32", "cpu": "arm64" }, "sha512-OUHCV+ueXa3UzfZ4co/ueIHgeq9B2K48pZwIxKSm5VaLVuv8M13MhM7unukW09g++dpdrrE1w4IOVgxKZ0/exg=="], + "turbo-windows-arm64": ["turbo-windows-arm64@2.5.2", "", { "os": "win32", "cpu": "arm64" }, "sha512-fMU41ABhSLa18H8V3Z7BMCGynQ8x+wj9WyBMvWm1jeyRKgkvUYJsO2vkIsy8m0vrwnIeVXKOIn6eSe1ddlBVqw=="], "type-check": ["type-check@0.4.0", "", { "dependencies": { "prelude-ls": "^1.2.1" } }, "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew=="], "typescript": ["typescript@5.8.2", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-aJn6wq13/afZp/jT9QZmwEjDqqvSGp1VT5GVg+f/t6/oVyrgXM6BY1h9BRh/O5p3PlUPAe+WuiEZOmb/49RqoQ=="], - "typescript-eslint": ["typescript-eslint@8.31.0", "", { "dependencies": { "@typescript-eslint/eslint-plugin": "8.31.0", "@typescript-eslint/parser": "8.31.0", "@typescript-eslint/utils": "8.31.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-u+93F0sB0An8WEAPtwxVhFby573E8ckdjwUUQUj9QA4v8JAvgtoDdIyYR3XFwFHq2W1KJ1AurwJCO+w+Y1ixyQ=="], + "typescript-eslint": ["typescript-eslint@8.31.1", "", { "dependencies": { "@typescript-eslint/eslint-plugin": "8.31.1", "@typescript-eslint/parser": "8.31.1", "@typescript-eslint/utils": "8.31.1" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-j6DsEotD/fH39qKzXTQRwYYWlt7D+0HmfpOK+DVhwJOFLcdmn92hq3mBb7HlKJHbjjI/gTOqEcc9d6JfpFf/VA=="], "undici-types": ["undici-types@6.20.0", "", {}, "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg=="], @@ -595,12 +595,48 @@ "@eslint-community/eslint-utils/eslint-visitor-keys": ["eslint-visitor-keys@3.4.3", "", {}, "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag=="], + "@hcengineering/communication-client-query/@types/bun": ["@types/bun@1.2.10", "", { "dependencies": { "bun-types": "1.2.10" } }, "sha512-eilv6WFM3M0c9ztJt7/g80BDusK98z/FrFwseZgT4bXCq2vPhXD4z8R3oddmAn+R/Nmz9vBn4kweJKmGTZj+lg=="], + + "@hcengineering/communication-cockroach/@types/bun": ["@types/bun@1.2.10", "", { "dependencies": { "bun-types": "1.2.10" } }, "sha512-eilv6WFM3M0c9ztJt7/g80BDusK98z/FrFwseZgT4bXCq2vPhXD4z8R3oddmAn+R/Nmz9vBn4kweJKmGTZj+lg=="], + + "@hcengineering/communication-query/@types/bun": ["@types/bun@1.2.10", "", { "dependencies": { "bun-types": "1.2.10" } }, "sha512-eilv6WFM3M0c9ztJt7/g80BDusK98z/FrFwseZgT4bXCq2vPhXD4z8R3oddmAn+R/Nmz9vBn4kweJKmGTZj+lg=="], + + "@hcengineering/communication-rest-client/@types/bun": ["@types/bun@1.2.10", "", { "dependencies": { "bun-types": "1.2.10" } }, "sha512-eilv6WFM3M0c9ztJt7/g80BDusK98z/FrFwseZgT4bXCq2vPhXD4z8R3oddmAn+R/Nmz9vBn4kweJKmGTZj+lg=="], + + "@hcengineering/communication-sdk-types/@types/bun": ["@types/bun@1.2.10", "", { "dependencies": { "bun-types": "1.2.10" } }, "sha512-eilv6WFM3M0c9ztJt7/g80BDusK98z/FrFwseZgT4bXCq2vPhXD4z8R3oddmAn+R/Nmz9vBn4kweJKmGTZj+lg=="], + + "@hcengineering/communication-server/@types/bun": ["@types/bun@1.2.10", "", { "dependencies": { "bun-types": "1.2.10" } }, "sha512-eilv6WFM3M0c9ztJt7/g80BDusK98z/FrFwseZgT4bXCq2vPhXD4z8R3oddmAn+R/Nmz9vBn4kweJKmGTZj+lg=="], + + "@hcengineering/communication-shared/@types/bun": ["@types/bun@1.2.10", "", { "dependencies": { "bun-types": "1.2.10" } }, "sha512-eilv6WFM3M0c9ztJt7/g80BDusK98z/FrFwseZgT4bXCq2vPhXD4z8R3oddmAn+R/Nmz9vBn4kweJKmGTZj+lg=="], + + "@hcengineering/communication-types/@types/bun": ["@types/bun@1.2.10", "", { "dependencies": { "bun-types": "1.2.10" } }, "sha512-eilv6WFM3M0c9ztJt7/g80BDusK98z/FrFwseZgT4bXCq2vPhXD4z8R3oddmAn+R/Nmz9vBn4kweJKmGTZj+lg=="], + + "@hcengineering/communication-yaml/@types/bun": ["@types/bun@1.2.10", "", { "dependencies": { "bun-types": "1.2.10" } }, "sha512-eilv6WFM3M0c9ztJt7/g80BDusK98z/FrFwseZgT4bXCq2vPhXD4z8R3oddmAn+R/Nmz9vBn4kweJKmGTZj+lg=="], + "@humanfs/node/@humanwhocodes/retry": ["@humanwhocodes/retry@0.3.1", "", {}, "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA=="], "@typescript-eslint/typescript-estree/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], "fast-glob/glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="], + "@hcengineering/communication-client-query/@types/bun/bun-types": ["bun-types@1.2.10", "", { "dependencies": { "@types/node": "*" } }, "sha512-b5ITZMnVdf3m1gMvJHG+gIfeJHiQPJak0f7925Hxu6ZN5VKA8AGy4GZ4lM+Xkn6jtWxg5S3ldWvfmXdvnkp3GQ=="], + + "@hcengineering/communication-cockroach/@types/bun/bun-types": ["bun-types@1.2.10", "", { "dependencies": { "@types/node": "*" } }, "sha512-b5ITZMnVdf3m1gMvJHG+gIfeJHiQPJak0f7925Hxu6ZN5VKA8AGy4GZ4lM+Xkn6jtWxg5S3ldWvfmXdvnkp3GQ=="], + + "@hcengineering/communication-query/@types/bun/bun-types": ["bun-types@1.2.10", "", { "dependencies": { "@types/node": "*" } }, "sha512-b5ITZMnVdf3m1gMvJHG+gIfeJHiQPJak0f7925Hxu6ZN5VKA8AGy4GZ4lM+Xkn6jtWxg5S3ldWvfmXdvnkp3GQ=="], + + "@hcengineering/communication-rest-client/@types/bun/bun-types": ["bun-types@1.2.10", "", { "dependencies": { "@types/node": "*" } }, "sha512-b5ITZMnVdf3m1gMvJHG+gIfeJHiQPJak0f7925Hxu6ZN5VKA8AGy4GZ4lM+Xkn6jtWxg5S3ldWvfmXdvnkp3GQ=="], + + "@hcengineering/communication-sdk-types/@types/bun/bun-types": ["bun-types@1.2.10", "", { "dependencies": { "@types/node": "*" } }, "sha512-b5ITZMnVdf3m1gMvJHG+gIfeJHiQPJak0f7925Hxu6ZN5VKA8AGy4GZ4lM+Xkn6jtWxg5S3ldWvfmXdvnkp3GQ=="], + + "@hcengineering/communication-server/@types/bun/bun-types": ["bun-types@1.2.10", "", { "dependencies": { "@types/node": "*" } }, "sha512-b5ITZMnVdf3m1gMvJHG+gIfeJHiQPJak0f7925Hxu6ZN5VKA8AGy4GZ4lM+Xkn6jtWxg5S3ldWvfmXdvnkp3GQ=="], + + "@hcengineering/communication-shared/@types/bun/bun-types": ["bun-types@1.2.10", "", { "dependencies": { "@types/node": "*" } }, "sha512-b5ITZMnVdf3m1gMvJHG+gIfeJHiQPJak0f7925Hxu6ZN5VKA8AGy4GZ4lM+Xkn6jtWxg5S3ldWvfmXdvnkp3GQ=="], + + "@hcengineering/communication-types/@types/bun/bun-types": ["bun-types@1.2.10", "", { "dependencies": { "@types/node": "*" } }, "sha512-b5ITZMnVdf3m1gMvJHG+gIfeJHiQPJak0f7925Hxu6ZN5VKA8AGy4GZ4lM+Xkn6jtWxg5S3ldWvfmXdvnkp3GQ=="], + + "@hcengineering/communication-yaml/@types/bun/bun-types": ["bun-types@1.2.10", "", { "dependencies": { "@types/node": "*" } }, "sha512-b5ITZMnVdf3m1gMvJHG+gIfeJHiQPJak0f7925Hxu6ZN5VKA8AGy4GZ4lM+Xkn6jtWxg5S3ldWvfmXdvnkp3GQ=="], + "@typescript-eslint/typescript-estree/minimatch/brace-expansion": ["brace-expansion@2.0.1", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA=="], } } diff --git a/package.json b/package.json index d83c6736efe..76c9429aa4b 100644 --- a/package.json +++ b/package.json @@ -13,14 +13,14 @@ }, "devDependencies": { "@eslint/js": "^9.25.1", - "@types/bun": "^1.2.10", - "bun-types": "^1.2.10", + "@types/bun": "^1.2.11", + "bun-types": "^1.2.11", "eslint": "^9.25.1", "eslint-config-prettier": "^9.1.0", "eslint-plugin-prettier": "^5.2.6", "prettier": "^3.5.3", - "typescript-eslint": "^8.31.0", - "turbo": "^2.5.0" + "typescript-eslint": "^8.31.1", + "turbo": "^2.5.2" }, "packageManager": "bun@1.2.9" } From a3fb7d0c58d9398630dca402fb16a78fe7cff25f Mon Sep 17 00:00:00 2001 From: Kristina Date: Mon, 5 May 2025 23:42:04 +0400 Subject: [PATCH 070/636] Fix messages query (#49) Signed-off-by: Kristina Fefelova --- .version | 2 +- bun.lock | 244 ++++++++++++++++------ package.json | 12 +- packages/client-query/src/index.ts | 2 + packages/query/src/lq.ts | 2 +- packages/query/src/messages/query.ts | 299 ++++++++++++++++++++------- packages/query/src/types.ts | 20 +- 7 files changed, 425 insertions(+), 156 deletions(-) diff --git a/.version b/.version index c3170be411d..1e17a1ebab0 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -0.1.174 +0.1.175 diff --git a/bun.lock b/bun.lock index 16ef1832cc7..b988aadafef 100644 --- a/bun.lock +++ b/bun.lock @@ -4,20 +4,20 @@ "": { "name": "@hcengineering/communication", "devDependencies": { - "@eslint/js": "^9.25.1", - "@types/bun": "^1.2.11", - "bun-types": "^1.2.11", - "eslint": "^9.25.1", + "@eslint/js": "^9.26.0", + "@types/bun": "^1.2.12", + "bun-types": "^1.2.12", + "eslint": "^9.26.0", "eslint-config-prettier": "^9.1.0", - "eslint-plugin-prettier": "^5.2.6", + "eslint-plugin-prettier": "^5.4.0", "prettier": "^3.5.3", "turbo": "^2.5.2", - "typescript-eslint": "^8.31.1", + "typescript-eslint": "^8.32.0", }, }, "packages/client-query": { "name": "@hcengineering/communication-client-query", - "version": "0.1.0", + "version": "0.1.174-beta.2", "dependencies": { "@hcengineering/communication-query": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", @@ -33,7 +33,7 @@ }, "packages/cockroach": { "name": "@hcengineering/communication-cockroach", - "version": "0.1.0", + "version": "0.1.174-beta.2", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-shared": "workspace:*", @@ -50,7 +50,7 @@ }, "packages/query": { "name": "@hcengineering/communication-query", - "version": "0.1.0", + "version": "0.1.174-beta.2", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-shared": "workspace:*", @@ -69,7 +69,7 @@ }, "packages/rest-client": { "name": "@hcengineering/communication-rest-client", - "version": "0.1.0", + "version": "0.1.174-beta.2", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-shared": "workspace:*", @@ -87,7 +87,7 @@ }, "packages/sdk-types": { "name": "@hcengineering/communication-sdk-types", - "version": "0.1.0", + "version": "0.1.174-beta.2", "dependencies": { "@hcengineering/communication-types": "workspace:*", "@hcengineering/core": "^0.7.28", @@ -101,7 +101,7 @@ }, "packages/server": { "name": "@hcengineering/communication-server", - "version": "0.1.0", + "version": "0.1.174-beta.2", "dependencies": { "@hcengineering/account-client": "^0.7.28", "@hcengineering/communication-cockroach": "workspace:*", @@ -124,7 +124,7 @@ }, "packages/shared": { "name": "@hcengineering/communication-shared", - "version": "0.1.0", + "version": "0.1.174-beta.2", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -138,7 +138,7 @@ }, "packages/types": { "name": "@hcengineering/communication-types", - "version": "0.1.0", + "version": "0.1.174-beta.2", "dependencies": { "@hcengineering/card": "^0.7.28", "@hcengineering/core": "^0.7.28", @@ -153,7 +153,7 @@ }, "packages/yaml": { "name": "@hcengineering/communication-yaml", - "version": "0.1.0", + "version": "0.1.174-beta.2", "dependencies": { "@hcengineering/communication-shared": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -185,7 +185,7 @@ "@eslint/eslintrc": ["@eslint/eslintrc@3.3.1", "", { "dependencies": { "ajv": "^6.12.4", "debug": "^4.3.2", "espree": "^10.0.1", "globals": "^14.0.0", "ignore": "^5.2.0", "import-fresh": "^3.2.1", "js-yaml": "^4.1.0", "minimatch": "^3.1.2", "strip-json-comments": "^3.1.1" } }, "sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ=="], - "@eslint/js": ["@eslint/js@9.25.1", "", {}, "sha512-dEIwmjntEx8u3Uvv+kr3PDeeArL8Hw07H9kyYxCjnM9pBjfEhk6uLXSchxxzgiwtRhhzVzqmUSDFBOi1TuZ7qg=="], + "@eslint/js": ["@eslint/js@9.26.0", "", {}, "sha512-I9XlJawFdSMvWjDt6wksMCrgns5ggLNfFwFvnShsleWruvXM514Qxk8V246efTw+eo9JABvVz+u3q2RiAowKxQ=="], "@eslint/object-schema": ["@eslint/object-schema@2.1.6", "", {}, "sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA=="], @@ -259,6 +259,8 @@ "@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.25", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ=="], + "@modelcontextprotocol/sdk": ["@modelcontextprotocol/sdk@1.11.0", "", { "dependencies": { "content-type": "^1.0.5", "cors": "^2.8.5", "cross-spawn": "^7.0.3", "eventsource": "^3.0.2", "express": "^5.0.1", "express-rate-limit": "^7.5.0", "pkce-challenge": "^5.0.0", "raw-body": "^3.0.0", "zod": "^3.23.8", "zod-to-json-schema": "^3.24.1" } }, "sha512-k/1pb70eD638anoi0e8wUGAlbMJXyvdV4p62Ko+EZ7eBe1xMx8Uhak1R5DgfoofsK5IBBnRwsYGTaLZl+6/+RQ=="], + "@nodelib/fs.scandir": ["@nodelib/fs.scandir@2.1.5", "", { "dependencies": { "@nodelib/fs.stat": "2.0.5", "run-parallel": "^1.1.9" } }, "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g=="], "@nodelib/fs.stat": ["@nodelib/fs.stat@2.0.5", "", {}, "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A=="], @@ -269,7 +271,7 @@ "@types/body-parser": ["@types/body-parser@1.19.5", "", { "dependencies": { "@types/connect": "*", "@types/node": "*" } }, "sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg=="], - "@types/bun": ["@types/bun@1.2.11", "", { "dependencies": { "bun-types": "1.2.11" } }, "sha512-ZLbbI91EmmGwlWTRWuV6J19IUiUC5YQ3TCEuSHI3usIP75kuoA8/0PVF+LTrbEnVc8JIhpElWOxv1ocI1fJBbw=="], + "@types/bun": ["@types/bun@1.2.12", "", { "dependencies": { "bun-types": "1.2.12" } }, "sha512-lY/GQTXDGsolT/TiH72p1tuyUORuRrdV7VwOTOjDOt8uTBJQOJc5zz3ufwwDl0VBaoxotSk4LdP0hhjLJ6ypIQ=="], "@types/connect": ["@types/connect@3.4.38", "", { "dependencies": { "@types/node": "*" } }, "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug=="], @@ -307,21 +309,23 @@ "@types/ws": ["@types/ws@8.18.0", "", { "dependencies": { "@types/node": "*" } }, "sha512-8svvI3hMyvN0kKCJMvTJP/x6Y/EoQbepff882wL+Sn5QsXb3etnamgrJq4isrBxSJj5L2AuXcI0+bgkoAXGUJw=="], - "@typescript-eslint/eslint-plugin": ["@typescript-eslint/eslint-plugin@8.31.1", "", { "dependencies": { "@eslint-community/regexpp": "^4.10.0", "@typescript-eslint/scope-manager": "8.31.1", "@typescript-eslint/type-utils": "8.31.1", "@typescript-eslint/utils": "8.31.1", "@typescript-eslint/visitor-keys": "8.31.1", "graphemer": "^1.4.0", "ignore": "^5.3.1", "natural-compare": "^1.4.0", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0", "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-oUlH4h1ABavI4F0Xnl8/fOtML/eu8nI2A1nYd+f+55XI0BLu+RIqKoCiZKNo6DtqZBEQm5aNKA20G3Z5w3R6GQ=="], + "@typescript-eslint/eslint-plugin": ["@typescript-eslint/eslint-plugin@8.32.0", "", { "dependencies": { "@eslint-community/regexpp": "^4.10.0", "@typescript-eslint/scope-manager": "8.32.0", "@typescript-eslint/type-utils": "8.32.0", "@typescript-eslint/utils": "8.32.0", "@typescript-eslint/visitor-keys": "8.32.0", "graphemer": "^1.4.0", "ignore": "^5.3.1", "natural-compare": "^1.4.0", "ts-api-utils": "^2.1.0" }, "peerDependencies": { "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0", "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-/jU9ettcntkBFmWUzzGgsClEi2ZFiikMX5eEQsmxIAWMOn4H3D4rvHssstmAHGVvrYnaMqdWWWg0b5M6IN/MTQ=="], + + "@typescript-eslint/parser": ["@typescript-eslint/parser@8.32.0", "", { "dependencies": { "@typescript-eslint/scope-manager": "8.32.0", "@typescript-eslint/types": "8.32.0", "@typescript-eslint/typescript-estree": "8.32.0", "@typescript-eslint/visitor-keys": "8.32.0", "debug": "^4.3.4" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-B2MdzyWxCE2+SqiZHAjPphft+/2x2FlO9YBx7eKE1BCb+rqBlQdhtAEhzIEdozHd55DXPmxBdpMygFJjfjjA9A=="], - "@typescript-eslint/parser": ["@typescript-eslint/parser@8.31.1", "", { "dependencies": { "@typescript-eslint/scope-manager": "8.31.1", "@typescript-eslint/types": "8.31.1", "@typescript-eslint/typescript-estree": "8.31.1", "@typescript-eslint/visitor-keys": "8.31.1", "debug": "^4.3.4" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-oU/OtYVydhXnumd0BobL9rkJg7wFJ9bFFPmSmB/bf/XWN85hlViji59ko6bSKBXyseT9V8l+CN1nwmlbiN0G7Q=="], + "@typescript-eslint/scope-manager": ["@typescript-eslint/scope-manager@8.32.0", "", { "dependencies": { "@typescript-eslint/types": "8.32.0", "@typescript-eslint/visitor-keys": "8.32.0" } }, "sha512-jc/4IxGNedXkmG4mx4nJTILb6TMjL66D41vyeaPWvDUmeYQzF3lKtN15WsAeTr65ce4mPxwopPSo1yUUAWw0hQ=="], - "@typescript-eslint/scope-manager": ["@typescript-eslint/scope-manager@8.31.1", "", { "dependencies": { "@typescript-eslint/types": "8.31.1", "@typescript-eslint/visitor-keys": "8.31.1" } }, "sha512-BMNLOElPxrtNQMIsFHE+3P0Yf1z0dJqV9zLdDxN/xLlWMlXK/ApEsVEKzpizg9oal8bAT5Sc7+ocal7AC1HCVw=="], + "@typescript-eslint/type-utils": ["@typescript-eslint/type-utils@8.32.0", "", { "dependencies": { "@typescript-eslint/typescript-estree": "8.32.0", "@typescript-eslint/utils": "8.32.0", "debug": "^4.3.4", "ts-api-utils": "^2.1.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-t2vouuYQKEKSLtJaa5bB4jHeha2HJczQ6E5IXPDPgIty9EqcJxpr1QHQ86YyIPwDwxvUmLfP2YADQ5ZY4qddZg=="], - "@typescript-eslint/type-utils": ["@typescript-eslint/type-utils@8.31.1", "", { "dependencies": { "@typescript-eslint/typescript-estree": "8.31.1", "@typescript-eslint/utils": "8.31.1", "debug": "^4.3.4", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-fNaT/m9n0+dpSp8G/iOQ05GoHYXbxw81x+yvr7TArTuZuCA6VVKbqWYVZrV5dVagpDTtj/O8k5HBEE/p/HM5LA=="], + "@typescript-eslint/types": ["@typescript-eslint/types@8.32.0", "", {}, "sha512-O5Id6tGadAZEMThM6L9HmVf5hQUXNSxLVKeGJYWNhhVseps/0LddMkp7//VDkzwJ69lPL0UmZdcZwggj9akJaA=="], - "@typescript-eslint/types": ["@typescript-eslint/types@8.31.1", "", {}, "sha512-SfepaEFUDQYRoA70DD9GtytljBePSj17qPxFHA/h3eg6lPTqGJ5mWOtbXCk1YrVU1cTJRd14nhaXWFu0l2troQ=="], + "@typescript-eslint/typescript-estree": ["@typescript-eslint/typescript-estree@8.32.0", "", { "dependencies": { "@typescript-eslint/types": "8.32.0", "@typescript-eslint/visitor-keys": "8.32.0", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", "minimatch": "^9.0.4", "semver": "^7.6.0", "ts-api-utils": "^2.1.0" }, "peerDependencies": { "typescript": ">=4.8.4 <5.9.0" } }, "sha512-pU9VD7anSCOIoBFnhTGfOzlVFQIA1XXiQpH/CezqOBaDppRwTglJzCC6fUQGpfwey4T183NKhF1/mfatYmjRqQ=="], - "@typescript-eslint/typescript-estree": ["@typescript-eslint/typescript-estree@8.31.1", "", { "dependencies": { "@typescript-eslint/types": "8.31.1", "@typescript-eslint/visitor-keys": "8.31.1", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", "minimatch": "^9.0.4", "semver": "^7.6.0", "ts-api-utils": "^2.0.1" }, "peerDependencies": { "typescript": ">=4.8.4 <5.9.0" } }, "sha512-kaA0ueLe2v7KunYOyWYtlf/QhhZb7+qh4Yw6Ni5kgukMIG+iP773tjgBiLWIXYumWCwEq3nLW+TUywEp8uEeag=="], + "@typescript-eslint/utils": ["@typescript-eslint/utils@8.32.0", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.7.0", "@typescript-eslint/scope-manager": "8.32.0", "@typescript-eslint/types": "8.32.0", "@typescript-eslint/typescript-estree": "8.32.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-8S9hXau6nQ/sYVtC3D6ISIDoJzS1NsCK+gluVhLN2YkBPX+/1wkwyUiDKnxRh15579WoOIyVWnoyIf3yGI9REw=="], - "@typescript-eslint/utils": ["@typescript-eslint/utils@8.31.1", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@typescript-eslint/scope-manager": "8.31.1", "@typescript-eslint/types": "8.31.1", "@typescript-eslint/typescript-estree": "8.31.1" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-2DSI4SNfF5T4oRveQ4nUrSjUqjMND0nLq9rEkz0gfGr3tg0S5KB6DhwR+WZPCjzkZl3cH+4x2ce3EsL50FubjQ=="], + "@typescript-eslint/visitor-keys": ["@typescript-eslint/visitor-keys@8.32.0", "", { "dependencies": { "@typescript-eslint/types": "8.32.0", "eslint-visitor-keys": "^4.2.0" } }, "sha512-1rYQTCLFFzOI5Nl0c8LUpJT8HxpwVRn9E4CkMsYfuN6ctmQqExjSTzzSk0Tz2apmXy7WU6/6fyaZVVA/thPN+w=="], - "@typescript-eslint/visitor-keys": ["@typescript-eslint/visitor-keys@8.31.1", "", { "dependencies": { "@typescript-eslint/types": "8.31.1", "eslint-visitor-keys": "^4.2.0" } }, "sha512-I+/rgqOVBn6f0o7NDTmAPWWC6NuqhV174lfYvAm9fUaWeiefLdux9/YI3/nLugEn9L8fcSi0XmpKi/r5u0nmpw=="], + "accepts": ["accepts@2.0.0", "", { "dependencies": { "mime-types": "^3.0.0", "negotiator": "^1.0.0" } }, "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng=="], "acorn": ["acorn@8.14.1", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg=="], @@ -341,11 +345,19 @@ "balanced-match": ["balanced-match@1.0.2", "", {}, "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="], + "body-parser": ["body-parser@2.2.0", "", { "dependencies": { "bytes": "^3.1.2", "content-type": "^1.0.5", "debug": "^4.4.0", "http-errors": "^2.0.0", "iconv-lite": "^0.6.3", "on-finished": "^2.4.1", "qs": "^6.14.0", "raw-body": "^3.0.0", "type-is": "^2.0.0" } }, "sha512-02qvAaxv8tp7fBa/mw1ga98OGm+eCbqzJOKoRt70sLmfEEi+jyBYVTDGfCL/k06/4EMk/z01gCe7HoCH/f2LTg=="], + "brace-expansion": ["brace-expansion@1.1.11", "", { "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" } }, "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA=="], "braces": ["braces@3.0.3", "", { "dependencies": { "fill-range": "^7.1.1" } }, "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA=="], - "bun-types": ["bun-types@1.2.11", "", { "dependencies": { "@types/node": "*" } }, "sha512-dbkp5Lo8HDrXkLrONm6bk+yiiYQSntvFUzQp0v3pzTAsXk6FtgVMjdQ+lzFNVAmQFUkPQZ3WMZqH5tTo+Dp/IA=="], + "bun-types": ["bun-types@1.2.12", "", { "dependencies": { "@types/node": "*" } }, "sha512-tvWMx5vPqbRXgE8WUZI94iS1xAYs8bkqESR9cxBB1Wi+urvfTrF1uzuDgBHFAdO0+d2lmsbG3HmeKMvUyj6pWA=="], + + "bytes": ["bytes@3.1.2", "", {}, "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg=="], + + "call-bind-apply-helpers": ["call-bind-apply-helpers@1.0.2", "", { "dependencies": { "es-errors": "^1.3.0", "function-bind": "^1.1.2" } }, "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ=="], + + "call-bound": ["call-bound@1.0.4", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.2", "get-intrinsic": "^1.3.0" } }, "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg=="], "callsites": ["callsites@3.1.0", "", {}, "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ=="], @@ -359,8 +371,18 @@ "concat-map": ["concat-map@0.0.1", "", {}, "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="], + "content-disposition": ["content-disposition@1.0.0", "", { "dependencies": { "safe-buffer": "5.2.1" } }, "sha512-Au9nRL8VNUut/XSzbQA38+M78dzP4D+eqg3gfJHMIHHYa3bg067xj1KxMUWj+VULbiZMowKngFFbKczUrNJ1mg=="], + + "content-type": ["content-type@1.0.5", "", {}, "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA=="], + + "cookie": ["cookie@0.7.2", "", {}, "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w=="], + + "cookie-signature": ["cookie-signature@1.2.2", "", {}, "sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg=="], + "core-js": ["core-js@3.41.0", "", {}, "sha512-SJ4/EHwS36QMJd6h/Rg+GyR4A5xE0FSI3eZ+iBVpfqf1x0eTSg1smWLHrA+2jQThZSh97fmSgFSU8B61nxosxA=="], + "cors": ["cors@2.8.5", "", { "dependencies": { "object-assign": "^4", "vary": "^1" } }, "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g=="], + "cross-spawn": ["cross-spawn@7.0.6", "", { "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", "which": "^2.0.1" } }, "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA=="], "css-tree": ["css-tree@2.3.1", "", { "dependencies": { "mdn-data": "2.0.30", "source-map-js": "^1.0.1" } }, "sha512-6Fv1DV/TYw//QF5IzQdqsNDjx/wc8TrMBZsqjL9eW01tWb7R7k/mq+/VXfJCl7SoD5emsJop9cOByJZfs8hYIw=="], @@ -377,17 +399,33 @@ "deep-is": ["deep-is@0.1.4", "", {}, "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ=="], + "depd": ["depd@2.0.0", "", {}, "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw=="], + "dompurify": ["dompurify@3.2.4", "", { "optionalDependencies": { "@types/trusted-types": "^2.0.7" } }, "sha512-ysFSFEDVduQpyhzAob/kkuJjf5zWkZD8/A9ywSp1byueyuCfHamrCBa14/Oc2iiB0e51B+NpxSl5gmzn+Ms/mg=="], + "dunder-proto": ["dunder-proto@1.0.1", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.1", "es-errors": "^1.3.0", "gopd": "^1.2.0" } }, "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A=="], + + "ee-first": ["ee-first@1.1.1", "", {}, "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow=="], + "emojibase": ["emojibase@16.0.0", "", {}, "sha512-Nw2m7JLIO4Ou2X/yZPRNscHQXVbbr6SErjkJ7EooG7MbR3yDZszCv9KTizsXFc7yZl0n3WF+qUKIC/Lw6H9xaQ=="], + "encodeurl": ["encodeurl@2.0.0", "", {}, "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg=="], + + "es-define-property": ["es-define-property@1.0.1", "", {}, "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g=="], + + "es-errors": ["es-errors@1.3.0", "", {}, "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw=="], + + "es-object-atoms": ["es-object-atoms@1.1.1", "", { "dependencies": { "es-errors": "^1.3.0" } }, "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA=="], + + "escape-html": ["escape-html@1.0.3", "", {}, "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow=="], + "escape-string-regexp": ["escape-string-regexp@4.0.0", "", {}, "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA=="], - "eslint": ["eslint@9.25.1", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.12.1", "@eslint/config-array": "^0.20.0", "@eslint/config-helpers": "^0.2.1", "@eslint/core": "^0.13.0", "@eslint/eslintrc": "^3.3.1", "@eslint/js": "9.25.1", "@eslint/plugin-kit": "^0.2.8", "@humanfs/node": "^0.16.6", "@humanwhocodes/module-importer": "^1.0.1", "@humanwhocodes/retry": "^0.4.2", "@types/estree": "^1.0.6", "@types/json-schema": "^7.0.15", "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.6", "debug": "^4.3.2", "escape-string-regexp": "^4.0.0", "eslint-scope": "^8.3.0", "eslint-visitor-keys": "^4.2.0", "espree": "^10.3.0", "esquery": "^1.5.0", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", "file-entry-cache": "^8.0.0", "find-up": "^5.0.0", "glob-parent": "^6.0.2", "ignore": "^5.2.0", "imurmurhash": "^0.1.4", "is-glob": "^4.0.0", "json-stable-stringify-without-jsonify": "^1.0.1", "lodash.merge": "^4.6.2", "minimatch": "^3.1.2", "natural-compare": "^1.4.0", "optionator": "^0.9.3" }, "peerDependencies": { "jiti": "*" }, "optionalPeers": ["jiti"], "bin": { "eslint": "bin/eslint.js" } }, "sha512-E6Mtz9oGQWDCpV12319d59n4tx9zOTXSTmc8BLVxBx+G/0RdM5MvEEJLU9c0+aleoePYYgVTOsRblx433qmhWQ=="], + "eslint": ["eslint@9.26.0", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.12.1", "@eslint/config-array": "^0.20.0", "@eslint/config-helpers": "^0.2.1", "@eslint/core": "^0.13.0", "@eslint/eslintrc": "^3.3.1", "@eslint/js": "9.26.0", "@eslint/plugin-kit": "^0.2.8", "@humanfs/node": "^0.16.6", "@humanwhocodes/module-importer": "^1.0.1", "@humanwhocodes/retry": "^0.4.2", "@modelcontextprotocol/sdk": "^1.8.0", "@types/estree": "^1.0.6", "@types/json-schema": "^7.0.15", "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.6", "debug": "^4.3.2", "escape-string-regexp": "^4.0.0", "eslint-scope": "^8.3.0", "eslint-visitor-keys": "^4.2.0", "espree": "^10.3.0", "esquery": "^1.5.0", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", "file-entry-cache": "^8.0.0", "find-up": "^5.0.0", "glob-parent": "^6.0.2", "ignore": "^5.2.0", "imurmurhash": "^0.1.4", "is-glob": "^4.0.0", "json-stable-stringify-without-jsonify": "^1.0.1", "lodash.merge": "^4.6.2", "minimatch": "^3.1.2", "natural-compare": "^1.4.0", "optionator": "^0.9.3", "zod": "^3.24.2" }, "peerDependencies": { "jiti": "*" }, "optionalPeers": ["jiti"], "bin": { "eslint": "bin/eslint.js" } }, "sha512-Hx0MOjPh6uK9oq9nVsATZKE/Wlbai7KFjfCuw9UHaguDW3x+HF0O5nIi3ud39TWgrTjTO5nHxmL3R1eANinWHQ=="], "eslint-config-prettier": ["eslint-config-prettier@9.1.0", "", { "peerDependencies": { "eslint": ">=7.0.0" }, "bin": { "eslint-config-prettier": "bin/cli.js" } }, "sha512-NSWl5BFQWEPi1j4TjVNItzYV7dZXZ+wP6I6ZhrBGpChQhZRUaElihE9uRRkcbRnNb76UMKDF3r+WTmNcGPKsqw=="], - "eslint-plugin-prettier": ["eslint-plugin-prettier@5.2.6", "", { "dependencies": { "prettier-linter-helpers": "^1.0.0", "synckit": "^0.11.0" }, "peerDependencies": { "@types/eslint": ">=8.0.0", "eslint": ">=8.0.0", "eslint-config-prettier": ">= 7.0.0 <10.0.0 || >=10.1.0", "prettier": ">=3.0.0" }, "optionalPeers": ["@types/eslint", "eslint-config-prettier"] }, "sha512-mUcf7QG2Tjk7H055Jk0lGBjbgDnfrvqjhXh9t2xLMSCjZVcw9Rb1V6sVNXO0th3jgeO7zllWPTNRil3JW94TnQ=="], + "eslint-plugin-prettier": ["eslint-plugin-prettier@5.4.0", "", { "dependencies": { "prettier-linter-helpers": "^1.0.0", "synckit": "^0.11.0" }, "peerDependencies": { "@types/eslint": ">=8.0.0", "eslint": ">=8.0.0", "eslint-config-prettier": ">= 7.0.0 <10.0.0 || >=10.1.0", "prettier": ">=3.0.0" }, "optionalPeers": ["@types/eslint", "eslint-config-prettier"] }, "sha512-BvQOvUhkVQM1i63iMETK9Hjud9QhqBnbtT1Zc642p9ynzBuCe5pybkOnvqZIBypXmMlsGcnU4HZ8sCTPfpAexA=="], "eslint-scope": ["eslint-scope@8.3.0", "", { "dependencies": { "esrecurse": "^4.3.0", "estraverse": "^5.2.0" } }, "sha512-pUNxi75F8MJ/GdeKtVLSbYg4ZI34J6C0C7sbL4YOp2exGwen7ZsuBqKzUhXd0qMQ362yET3z+uPwKeg/0C2XCQ=="], @@ -405,6 +443,16 @@ "esutils": ["esutils@2.0.3", "", {}, "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g=="], + "etag": ["etag@1.8.1", "", {}, "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg=="], + + "eventsource": ["eventsource@3.0.6", "", { "dependencies": { "eventsource-parser": "^3.0.1" } }, "sha512-l19WpE2m9hSuyP06+FbuUUf1G+R0SFLrtQfbRb9PRr+oimOfxQhgGCbVaXg5IvZyyTThJsxh6L/srkMiCeBPDA=="], + + "eventsource-parser": ["eventsource-parser@3.0.1", "", {}, "sha512-VARTJ9CYeuQYb0pZEPbzi740OWFgpHe7AYJ2WFZVnUDUQp5Dk2yJUgF36YsZ81cOyxT0QxmXD2EQpapAouzWVA=="], + + "express": ["express@5.1.0", "", { "dependencies": { "accepts": "^2.0.0", "body-parser": "^2.2.0", "content-disposition": "^1.0.0", "content-type": "^1.0.5", "cookie": "^0.7.1", "cookie-signature": "^1.2.1", "debug": "^4.4.0", "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "etag": "^1.8.1", "finalhandler": "^2.1.0", "fresh": "^2.0.0", "http-errors": "^2.0.0", "merge-descriptors": "^2.0.0", "mime-types": "^3.0.0", "on-finished": "^2.4.1", "once": "^1.4.0", "parseurl": "^1.3.3", "proxy-addr": "^2.0.7", "qs": "^6.14.0", "range-parser": "^1.2.1", "router": "^2.2.0", "send": "^1.1.0", "serve-static": "^2.2.0", "statuses": "^2.0.1", "type-is": "^2.0.1", "vary": "^1.1.2" } }, "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA=="], + + "express-rate-limit": ["express-rate-limit@7.5.0", "", { "peerDependencies": { "express": "^4.11 || 5 || ^5.0.0-beta.1" } }, "sha512-eB5zbQh5h+VenMPM3fh+nw1YExi5nMr6HUCR62ELSP11huvxm/Uir1H1QEyTkk5QX6A58pX6NmaTMceKZ0Eodg=="], + "fast-deep-equal": ["fast-deep-equal@3.1.3", "", {}, "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="], "fast-diff": ["fast-diff@1.3.0", "", {}, "sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw=="], @@ -423,36 +471,64 @@ "fill-range": ["fill-range@7.1.1", "", { "dependencies": { "to-regex-range": "^5.0.1" } }, "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg=="], + "finalhandler": ["finalhandler@2.1.0", "", { "dependencies": { "debug": "^4.4.0", "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "on-finished": "^2.4.1", "parseurl": "^1.3.3", "statuses": "^2.0.1" } }, "sha512-/t88Ty3d5JWQbWYgaOGCCYfXRwV1+be02WqYYlL6h0lEiUAMPM8o8qKGO01YIkOHzka2up08wvgYD0mDiI+q3Q=="], + "find-up": ["find-up@5.0.0", "", { "dependencies": { "locate-path": "^6.0.0", "path-exists": "^4.0.0" } }, "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng=="], "flat-cache": ["flat-cache@4.0.1", "", { "dependencies": { "flatted": "^3.2.9", "keyv": "^4.5.4" } }, "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw=="], "flatted": ["flatted@3.3.3", "", {}, "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg=="], + "forwarded": ["forwarded@0.2.0", "", {}, "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow=="], + + "fresh": ["fresh@2.0.0", "", {}, "sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A=="], + + "function-bind": ["function-bind@1.1.2", "", {}, "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA=="], + + "get-intrinsic": ["get-intrinsic@1.3.0", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.2", "es-define-property": "^1.0.1", "es-errors": "^1.3.0", "es-object-atoms": "^1.1.1", "function-bind": "^1.1.2", "get-proto": "^1.0.1", "gopd": "^1.2.0", "has-symbols": "^1.1.0", "hasown": "^2.0.2", "math-intrinsics": "^1.1.0" } }, "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ=="], + + "get-proto": ["get-proto@1.0.1", "", { "dependencies": { "dunder-proto": "^1.0.1", "es-object-atoms": "^1.0.0" } }, "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g=="], + "glob-parent": ["glob-parent@6.0.2", "", { "dependencies": { "is-glob": "^4.0.3" } }, "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A=="], "globals": ["globals@14.0.0", "", {}, "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ=="], + "gopd": ["gopd@1.2.0", "", {}, "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg=="], + "graphemer": ["graphemer@1.4.0", "", {}, "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag=="], "has-flag": ["has-flag@4.0.0", "", {}, "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ=="], + "has-symbols": ["has-symbols@1.1.0", "", {}, "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ=="], + + "hasown": ["hasown@2.0.2", "", { "dependencies": { "function-bind": "^1.1.2" } }, "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ=="], + "hls.js": ["hls.js@1.5.20", "", {}, "sha512-uu0VXUK52JhihhnN/MVVo1lvqNNuhoxkonqgO3IpjvQiGpJBdIXMGkofjQb/j9zvV7a1SW8U9g1FslWx/1HOiQ=="], + "http-errors": ["http-errors@2.0.0", "", { "dependencies": { "depd": "2.0.0", "inherits": "2.0.4", "setprototypeof": "1.2.0", "statuses": "2.0.1", "toidentifier": "1.0.1" } }, "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ=="], + + "iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="], + "ignore": ["ignore@5.3.2", "", {}, "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g=="], "import-fresh": ["import-fresh@3.3.1", "", { "dependencies": { "parent-module": "^1.0.0", "resolve-from": "^4.0.0" } }, "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ=="], "imurmurhash": ["imurmurhash@0.1.4", "", {}, "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA=="], + "inherits": ["inherits@2.0.4", "", {}, "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="], + "intl-messageformat": ["intl-messageformat@10.7.15", "", { "dependencies": { "@formatjs/ecma402-abstract": "2.3.3", "@formatjs/fast-memoize": "2.2.6", "@formatjs/icu-messageformat-parser": "2.11.1", "tslib": "2" } }, "sha512-LRyExsEsefQSBjU2p47oAheoKz+EOJxSLDdjOaEjdriajfHsMXOmV/EhMvYSg9bAgCUHasuAC+mcUBe/95PfIg=="], + "ipaddr.js": ["ipaddr.js@1.9.1", "", {}, "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g=="], + "is-extglob": ["is-extglob@2.1.1", "", {}, "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ=="], "is-glob": ["is-glob@4.0.3", "", { "dependencies": { "is-extglob": "^2.1.1" } }, "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg=="], "is-number": ["is-number@7.0.0", "", {}, "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng=="], + "is-promise": ["is-promise@4.0.0", "", {}, "sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ=="], + "is-reference": ["is-reference@3.0.3", "", { "dependencies": { "@types/estree": "^1.0.6" } }, "sha512-ixkJoqQvAP88E6wLydLGGqCJsrFUnqoH6HnaczB8XmDH1oaWU+xxdptvikTgaEhtZ53Ky6YXiBuUI2WXLMCwjw=="], "isexe": ["isexe@2.0.0", "", {}, "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="], @@ -481,18 +557,38 @@ "magic-string": ["magic-string@0.30.17", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.0" } }, "sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA=="], + "math-intrinsics": ["math-intrinsics@1.1.0", "", {}, "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g=="], + "mdn-data": ["mdn-data@2.0.30", "", {}, "sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA=="], + "media-typer": ["media-typer@1.1.0", "", {}, "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw=="], + + "merge-descriptors": ["merge-descriptors@2.0.0", "", {}, "sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g=="], + "merge2": ["merge2@1.4.1", "", {}, "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg=="], "micromatch": ["micromatch@4.0.8", "", { "dependencies": { "braces": "^3.0.3", "picomatch": "^2.3.1" } }, "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA=="], + "mime-db": ["mime-db@1.54.0", "", {}, "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ=="], + + "mime-types": ["mime-types@3.0.1", "", { "dependencies": { "mime-db": "^1.54.0" } }, "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA=="], + "minimatch": ["minimatch@3.1.2", "", { "dependencies": { "brace-expansion": "^1.1.7" } }, "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw=="], "ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="], "natural-compare": ["natural-compare@1.4.0", "", {}, "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw=="], + "negotiator": ["negotiator@1.0.0", "", {}, "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg=="], + + "object-assign": ["object-assign@4.1.1", "", {}, "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg=="], + + "object-inspect": ["object-inspect@1.13.4", "", {}, "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew=="], + + "on-finished": ["on-finished@2.4.1", "", { "dependencies": { "ee-first": "1.1.1" } }, "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg=="], + + "once": ["once@1.4.0", "", { "dependencies": { "wrappy": "1" } }, "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w=="], + "optionator": ["optionator@0.9.4", "", { "dependencies": { "deep-is": "^0.1.3", "fast-levenshtein": "^2.0.6", "levn": "^0.4.1", "prelude-ls": "^1.2.1", "type-check": "^0.4.0", "word-wrap": "^1.2.5" } }, "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g=="], "p-limit": ["p-limit@3.1.0", "", { "dependencies": { "yocto-queue": "^0.1.0" } }, "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ=="], @@ -501,14 +597,20 @@ "parent-module": ["parent-module@1.0.1", "", { "dependencies": { "callsites": "^3.0.0" } }, "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g=="], + "parseurl": ["parseurl@1.3.3", "", {}, "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ=="], + "path-exists": ["path-exists@4.0.0", "", {}, "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w=="], "path-key": ["path-key@3.1.1", "", {}, "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q=="], + "path-to-regexp": ["path-to-regexp@8.2.0", "", {}, "sha512-TdrF7fW9Rphjq4RjrW0Kp2AW0Ahwu9sRGTkS6bvDi0SCwZlEZYmcfDbEsTz8RVk0EHIS/Vd1bv3JhG+1xZuAyQ=="], + "periscopic": ["periscopic@3.1.0", "", { "dependencies": { "@types/estree": "^1.0.0", "estree-walker": "^3.0.0", "is-reference": "^3.0.0" } }, "sha512-vKiQ8RRtkl9P+r/+oefh25C3fhybptkHKCZSPlcXiJux2tJF55GnEj3BVn4A5gKfq9NWWXXrxkHBwVPUfH0opw=="], "picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="], + "pkce-challenge": ["pkce-challenge@5.0.0", "", {}, "sha512-ueGLflrrnvwB3xuo/uGob5pd5FN7l0MsLf0Z87o/UQmRtwjvfylfc9MurIxRAWywCYTgrvpXBcqjV4OfCYGCIQ=="], + "plyr": ["plyr@3.7.8", "", { "dependencies": { "core-js": "^3.26.1", "custom-event-polyfill": "^1.0.7", "loadjs": "^4.2.0", "rangetouch": "^2.0.1", "url-polyfill": "^1.1.12" } }, "sha512-yG/EHDobwbB/uP+4Bm6eUpJ93f8xxHjjk2dYcD1Oqpe1EcuQl5tzzw9Oq+uVAzd2lkM11qZfydSiyIpiB8pgdA=="], "postgres": ["postgres@3.4.5", "", {}, "sha512-cDWgoah1Gez9rN3H4165peY9qfpEo+SA61oQv65O3cRUE1pOEoJWwddwcqKE8XZYjbblOJlYDlLV4h67HrEVDg=="], @@ -519,30 +621,60 @@ "prettier-linter-helpers": ["prettier-linter-helpers@1.0.0", "", { "dependencies": { "fast-diff": "^1.1.2" } }, "sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w=="], + "proxy-addr": ["proxy-addr@2.0.7", "", { "dependencies": { "forwarded": "0.2.0", "ipaddr.js": "1.9.1" } }, "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg=="], + "punycode": ["punycode@2.3.1", "", {}, "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg=="], + "qs": ["qs@6.14.0", "", { "dependencies": { "side-channel": "^1.1.0" } }, "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w=="], + "queue-microtask": ["queue-microtask@1.2.3", "", {}, "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A=="], + "range-parser": ["range-parser@1.2.1", "", {}, "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg=="], + "rangetouch": ["rangetouch@2.0.1", "", {}, "sha512-sln+pNSc8NGaHoLzwNBssFSf/rSYkqeBXzX1AtJlkJiUaVSJSbRAWJk+4omsXkN+EJalzkZhWQ3th1m0FpR5xA=="], + "raw-body": ["raw-body@3.0.0", "", { "dependencies": { "bytes": "3.1.2", "http-errors": "2.0.0", "iconv-lite": "0.6.3", "unpipe": "1.0.0" } }, "sha512-RmkhL8CAyCRPXCE28MMH0z2PNWQBNk2Q09ZdxM9IOOXwxwZbN+qbWaatPkdkWIKL2ZVDImrN/pK5HTRz2PcS4g=="], + "regenerator-runtime": ["regenerator-runtime@0.14.1", "", {}, "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw=="], "resolve-from": ["resolve-from@4.0.0", "", {}, "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g=="], "reusify": ["reusify@1.1.0", "", {}, "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw=="], + "router": ["router@2.2.0", "", { "dependencies": { "debug": "^4.4.0", "depd": "^2.0.0", "is-promise": "^4.0.0", "parseurl": "^1.3.3", "path-to-regexp": "^8.0.0" } }, "sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ=="], + "run-parallel": ["run-parallel@1.2.0", "", { "dependencies": { "queue-microtask": "^1.2.2" } }, "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA=="], + "safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="], + + "safer-buffer": ["safer-buffer@2.1.2", "", {}, "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="], + "semver": ["semver@7.7.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA=="], + "send": ["send@1.2.0", "", { "dependencies": { "debug": "^4.3.5", "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "etag": "^1.8.1", "fresh": "^2.0.0", "http-errors": "^2.0.0", "mime-types": "^3.0.1", "ms": "^2.1.3", "on-finished": "^2.4.1", "range-parser": "^1.2.1", "statuses": "^2.0.1" } }, "sha512-uaW0WwXKpL9blXE2o0bRhoL2EGXIrZxQ2ZQ4mgcfoBxdFmQold+qWsD2jLrfZ0trjKL6vOw0j//eAwcALFjKSw=="], + + "serve-static": ["serve-static@2.2.0", "", { "dependencies": { "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "parseurl": "^1.3.3", "send": "^1.2.0" } }, "sha512-61g9pCh0Vnh7IutZjtLGGpTA355+OPn2TyDv/6ivP2h/AdAVX9azsoxmg2/M6nZeQZNYBEwIcsne1mJd9oQItQ=="], + + "setprototypeof": ["setprototypeof@1.2.0", "", {}, "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw=="], + "shebang-command": ["shebang-command@2.0.0", "", { "dependencies": { "shebang-regex": "^3.0.0" } }, "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA=="], "shebang-regex": ["shebang-regex@3.0.0", "", {}, "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A=="], + "side-channel": ["side-channel@1.1.0", "", { "dependencies": { "es-errors": "^1.3.0", "object-inspect": "^1.13.3", "side-channel-list": "^1.0.0", "side-channel-map": "^1.0.1", "side-channel-weakmap": "^1.0.2" } }, "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw=="], + + "side-channel-list": ["side-channel-list@1.0.0", "", { "dependencies": { "es-errors": "^1.3.0", "object-inspect": "^1.13.3" } }, "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA=="], + + "side-channel-map": ["side-channel-map@1.0.1", "", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "get-intrinsic": "^1.2.5", "object-inspect": "^1.13.3" } }, "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA=="], + + "side-channel-weakmap": ["side-channel-weakmap@1.0.2", "", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "get-intrinsic": "^1.2.5", "object-inspect": "^1.13.3", "side-channel-map": "^1.0.1" } }, "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A=="], + "snappyjs": ["snappyjs@0.7.0", "", {}, "sha512-u5iEEXkMe2EInQio6Wv9LWHOQYRDbD2O9hzS27GpT/lwfIQhTCnHCTqedqHIHe9ZcvQo+9au6vngQayipz1NYw=="], "source-map-js": ["source-map-js@1.2.1", "", {}, "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA=="], + "statuses": ["statuses@2.0.1", "", {}, "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ=="], + "strip-json-comments": ["strip-json-comments@3.1.1", "", {}, "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig=="], "supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], @@ -553,7 +685,9 @@ "to-regex-range": ["to-regex-range@5.0.1", "", { "dependencies": { "is-number": "^7.0.0" } }, "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ=="], - "ts-api-utils": ["ts-api-utils@2.0.1", "", { "peerDependencies": { "typescript": ">=4.8.4" } }, "sha512-dnlgjFSVetynI8nzgJ+qF62efpglpWRk8isUEWZGWlJYySCTD6aKvbUDu+zbPeDakk3bg5H4XpitHukgfL1m9w=="], + "toidentifier": ["toidentifier@1.0.1", "", {}, "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA=="], + + "ts-api-utils": ["ts-api-utils@2.1.0", "", { "peerDependencies": { "typescript": ">=4.8.4" } }, "sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ=="], "tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], @@ -573,70 +707,48 @@ "type-check": ["type-check@0.4.0", "", { "dependencies": { "prelude-ls": "^1.2.1" } }, "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew=="], + "type-is": ["type-is@2.0.1", "", { "dependencies": { "content-type": "^1.0.5", "media-typer": "^1.1.0", "mime-types": "^3.0.0" } }, "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw=="], + "typescript": ["typescript@5.8.2", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-aJn6wq13/afZp/jT9QZmwEjDqqvSGp1VT5GVg+f/t6/oVyrgXM6BY1h9BRh/O5p3PlUPAe+WuiEZOmb/49RqoQ=="], - "typescript-eslint": ["typescript-eslint@8.31.1", "", { "dependencies": { "@typescript-eslint/eslint-plugin": "8.31.1", "@typescript-eslint/parser": "8.31.1", "@typescript-eslint/utils": "8.31.1" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-j6DsEotD/fH39qKzXTQRwYYWlt7D+0HmfpOK+DVhwJOFLcdmn92hq3mBb7HlKJHbjjI/gTOqEcc9d6JfpFf/VA=="], + "typescript-eslint": ["typescript-eslint@8.32.0", "", { "dependencies": { "@typescript-eslint/eslint-plugin": "8.32.0", "@typescript-eslint/parser": "8.32.0", "@typescript-eslint/utils": "8.32.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-UMq2kxdXCzinFFPsXc9o2ozIpYCCOiEC46MG3yEh5Vipq6BO27otTtEBZA1fQ66DulEUgE97ucQ/3YY66CPg0A=="], "undici-types": ["undici-types@6.20.0", "", {}, "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg=="], + "unpipe": ["unpipe@1.0.0", "", {}, "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ=="], + "uri-js": ["uri-js@4.4.1", "", { "dependencies": { "punycode": "^2.1.0" } }, "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg=="], "url-polyfill": ["url-polyfill@1.1.13", "", {}, "sha512-tXzkojrv2SujumYthZ/WjF7jaSfNhSXlYMpE5AYdL2I3D7DCeo+mch8KtW2rUuKjDg+3VXODXHVgipt8yGY/eQ=="], "uuid": ["uuid@11.1.0", "", { "bin": { "uuid": "dist/esm/bin/uuid" } }, "sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A=="], + "vary": ["vary@1.1.2", "", {}, "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg=="], + "which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="], "word-wrap": ["word-wrap@1.2.5", "", {}, "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA=="], + "wrappy": ["wrappy@1.0.2", "", {}, "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="], + "yocto-queue": ["yocto-queue@0.1.0", "", {}, "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q=="], "zod": ["zod@3.24.2", "", {}, "sha512-lY7CDW43ECgW9u1TcT3IoXHflywfVqDYze4waEz812jR/bZ8FHDsl7pFQoSZTz5N+2NqRXs8GBwnAwo3ZNxqhQ=="], - "@eslint-community/eslint-utils/eslint-visitor-keys": ["eslint-visitor-keys@3.4.3", "", {}, "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag=="], - - "@hcengineering/communication-client-query/@types/bun": ["@types/bun@1.2.10", "", { "dependencies": { "bun-types": "1.2.10" } }, "sha512-eilv6WFM3M0c9ztJt7/g80BDusK98z/FrFwseZgT4bXCq2vPhXD4z8R3oddmAn+R/Nmz9vBn4kweJKmGTZj+lg=="], - - "@hcengineering/communication-cockroach/@types/bun": ["@types/bun@1.2.10", "", { "dependencies": { "bun-types": "1.2.10" } }, "sha512-eilv6WFM3M0c9ztJt7/g80BDusK98z/FrFwseZgT4bXCq2vPhXD4z8R3oddmAn+R/Nmz9vBn4kweJKmGTZj+lg=="], - - "@hcengineering/communication-query/@types/bun": ["@types/bun@1.2.10", "", { "dependencies": { "bun-types": "1.2.10" } }, "sha512-eilv6WFM3M0c9ztJt7/g80BDusK98z/FrFwseZgT4bXCq2vPhXD4z8R3oddmAn+R/Nmz9vBn4kweJKmGTZj+lg=="], + "zod-to-json-schema": ["zod-to-json-schema@3.24.5", "", { "peerDependencies": { "zod": "^3.24.1" } }, "sha512-/AuWwMP+YqiPbsJx5D6TfgRTc4kTLjsh5SOcd4bLsfUg2RcEXrFMJl1DGgdHy2aCfsIA/cr/1JM0xcB2GZji8g=="], - "@hcengineering/communication-rest-client/@types/bun": ["@types/bun@1.2.10", "", { "dependencies": { "bun-types": "1.2.10" } }, "sha512-eilv6WFM3M0c9ztJt7/g80BDusK98z/FrFwseZgT4bXCq2vPhXD4z8R3oddmAn+R/Nmz9vBn4kweJKmGTZj+lg=="], - - "@hcengineering/communication-sdk-types/@types/bun": ["@types/bun@1.2.10", "", { "dependencies": { "bun-types": "1.2.10" } }, "sha512-eilv6WFM3M0c9ztJt7/g80BDusK98z/FrFwseZgT4bXCq2vPhXD4z8R3oddmAn+R/Nmz9vBn4kweJKmGTZj+lg=="], - - "@hcengineering/communication-server/@types/bun": ["@types/bun@1.2.10", "", { "dependencies": { "bun-types": "1.2.10" } }, "sha512-eilv6WFM3M0c9ztJt7/g80BDusK98z/FrFwseZgT4bXCq2vPhXD4z8R3oddmAn+R/Nmz9vBn4kweJKmGTZj+lg=="], - - "@hcengineering/communication-shared/@types/bun": ["@types/bun@1.2.10", "", { "dependencies": { "bun-types": "1.2.10" } }, "sha512-eilv6WFM3M0c9ztJt7/g80BDusK98z/FrFwseZgT4bXCq2vPhXD4z8R3oddmAn+R/Nmz9vBn4kweJKmGTZj+lg=="], - - "@hcengineering/communication-types/@types/bun": ["@types/bun@1.2.10", "", { "dependencies": { "bun-types": "1.2.10" } }, "sha512-eilv6WFM3M0c9ztJt7/g80BDusK98z/FrFwseZgT4bXCq2vPhXD4z8R3oddmAn+R/Nmz9vBn4kweJKmGTZj+lg=="], - - "@hcengineering/communication-yaml/@types/bun": ["@types/bun@1.2.10", "", { "dependencies": { "bun-types": "1.2.10" } }, "sha512-eilv6WFM3M0c9ztJt7/g80BDusK98z/FrFwseZgT4bXCq2vPhXD4z8R3oddmAn+R/Nmz9vBn4kweJKmGTZj+lg=="], + "@eslint-community/eslint-utils/eslint-visitor-keys": ["eslint-visitor-keys@3.4.3", "", {}, "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag=="], "@humanfs/node/@humanwhocodes/retry": ["@humanwhocodes/retry@0.3.1", "", {}, "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA=="], "@typescript-eslint/typescript-estree/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], - "fast-glob/glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="], - - "@hcengineering/communication-client-query/@types/bun/bun-types": ["bun-types@1.2.10", "", { "dependencies": { "@types/node": "*" } }, "sha512-b5ITZMnVdf3m1gMvJHG+gIfeJHiQPJak0f7925Hxu6ZN5VKA8AGy4GZ4lM+Xkn6jtWxg5S3ldWvfmXdvnkp3GQ=="], - - "@hcengineering/communication-cockroach/@types/bun/bun-types": ["bun-types@1.2.10", "", { "dependencies": { "@types/node": "*" } }, "sha512-b5ITZMnVdf3m1gMvJHG+gIfeJHiQPJak0f7925Hxu6ZN5VKA8AGy4GZ4lM+Xkn6jtWxg5S3ldWvfmXdvnkp3GQ=="], - - "@hcengineering/communication-query/@types/bun/bun-types": ["bun-types@1.2.10", "", { "dependencies": { "@types/node": "*" } }, "sha512-b5ITZMnVdf3m1gMvJHG+gIfeJHiQPJak0f7925Hxu6ZN5VKA8AGy4GZ4lM+Xkn6jtWxg5S3ldWvfmXdvnkp3GQ=="], - - "@hcengineering/communication-rest-client/@types/bun/bun-types": ["bun-types@1.2.10", "", { "dependencies": { "@types/node": "*" } }, "sha512-b5ITZMnVdf3m1gMvJHG+gIfeJHiQPJak0f7925Hxu6ZN5VKA8AGy4GZ4lM+Xkn6jtWxg5S3ldWvfmXdvnkp3GQ=="], - - "@hcengineering/communication-sdk-types/@types/bun/bun-types": ["bun-types@1.2.10", "", { "dependencies": { "@types/node": "*" } }, "sha512-b5ITZMnVdf3m1gMvJHG+gIfeJHiQPJak0f7925Hxu6ZN5VKA8AGy4GZ4lM+Xkn6jtWxg5S3ldWvfmXdvnkp3GQ=="], + "@typescript-eslint/utils/@eslint-community/eslint-utils": ["@eslint-community/eslint-utils@4.7.0", "", { "dependencies": { "eslint-visitor-keys": "^3.4.3" }, "peerDependencies": { "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" } }, "sha512-dyybb3AcajC7uha6CvhdVRJqaKyn7w2YKqKyAN37NKYgZT36w+iRb0Dymmc5qEJ549c/S31cMMSFd75bteCpCw=="], - "@hcengineering/communication-server/@types/bun/bun-types": ["bun-types@1.2.10", "", { "dependencies": { "@types/node": "*" } }, "sha512-b5ITZMnVdf3m1gMvJHG+gIfeJHiQPJak0f7925Hxu6ZN5VKA8AGy4GZ4lM+Xkn6jtWxg5S3ldWvfmXdvnkp3GQ=="], - - "@hcengineering/communication-shared/@types/bun/bun-types": ["bun-types@1.2.10", "", { "dependencies": { "@types/node": "*" } }, "sha512-b5ITZMnVdf3m1gMvJHG+gIfeJHiQPJak0f7925Hxu6ZN5VKA8AGy4GZ4lM+Xkn6jtWxg5S3ldWvfmXdvnkp3GQ=="], - - "@hcengineering/communication-types/@types/bun/bun-types": ["bun-types@1.2.10", "", { "dependencies": { "@types/node": "*" } }, "sha512-b5ITZMnVdf3m1gMvJHG+gIfeJHiQPJak0f7925Hxu6ZN5VKA8AGy4GZ4lM+Xkn6jtWxg5S3ldWvfmXdvnkp3GQ=="], - - "@hcengineering/communication-yaml/@types/bun/bun-types": ["bun-types@1.2.10", "", { "dependencies": { "@types/node": "*" } }, "sha512-b5ITZMnVdf3m1gMvJHG+gIfeJHiQPJak0f7925Hxu6ZN5VKA8AGy4GZ4lM+Xkn6jtWxg5S3ldWvfmXdvnkp3GQ=="], + "fast-glob/glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="], "@typescript-eslint/typescript-estree/minimatch/brace-expansion": ["brace-expansion@2.0.1", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA=="], + + "@typescript-eslint/utils/@eslint-community/eslint-utils/eslint-visitor-keys": ["eslint-visitor-keys@3.4.3", "", {}, "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag=="], } } diff --git a/package.json b/package.json index 76c9429aa4b..bd859afaee3 100644 --- a/package.json +++ b/package.json @@ -12,14 +12,14 @@ "clean": "turbo run clean" }, "devDependencies": { - "@eslint/js": "^9.25.1", - "@types/bun": "^1.2.11", - "bun-types": "^1.2.11", - "eslint": "^9.25.1", + "@eslint/js": "^9.26.0", + "@types/bun": "^1.2.12", + "bun-types": "^1.2.12", + "eslint": "^9.26.0", "eslint-config-prettier": "^9.1.0", - "eslint-plugin-prettier": "^5.2.6", + "eslint-plugin-prettier": "^5.4.0", "prettier": "^3.5.3", - "typescript-eslint": "^8.31.1", + "typescript-eslint": "^8.32.0", "turbo": "^2.5.2" }, "packageManager": "bun@1.2.9" diff --git a/packages/client-query/src/index.ts b/packages/client-query/src/index.ts index 4e4c75ce1fe..17909460ae7 100644 --- a/packages/client-query/src/index.ts +++ b/packages/client-query/src/index.ts @@ -19,6 +19,8 @@ import type { FindClient } from '@hcengineering/communication-sdk-types' import { LabelsQuery, MessagesQuery, NotificationContextsQuery, NotificationsQuery } from './query' +export type { MessageQueryParams } from '@hcengineering/communication-query' + let lq: LiveQueries let onDestroy: (fn: () => void) => void = () => {} diff --git a/packages/query/src/lq.ts b/packages/query/src/lq.ts index 37ac9519f5e..a91829be8bb 100644 --- a/packages/query/src/lq.ts +++ b/packages/query/src/lq.ts @@ -44,7 +44,7 @@ interface CreateQueryResult { unsubscribe: () => void } -const maxQueriesCache = 20 +const maxQueriesCache = 40 export class LiveQueries { private readonly queries = new Map() diff --git a/packages/query/src/messages/query.ts b/packages/query/src/messages/query.ts index 7296330323a..5f9dd20744a 100644 --- a/packages/query/src/messages/query.ts +++ b/packages/query/src/messages/query.ts @@ -14,39 +14,40 @@ // import { + type BlobID, + type File, + type FindMessagesGroupsParams, type FindMessagesParams, type Message, type MessageID, type MessagesGroup, + MessageType, type ParsedFile, type Patch, PatchType, type Reaction, type SocialID, SortingOrder, - type WorkspaceID, - type File, - type BlobID, - MessageType + type WorkspaceID } from '@hcengineering/communication-types' import { + type CreateMessageEvent, + type CreateMessageResult, + type EventResult, type FileCreatedEvent, type FileRemovedEvent, + type FindClient, type MessageCreatedEvent, + MessageRequestEventType, + MessageResponseEventType, type MessagesRemovedEvent, + type PagedQueryCallback, type PatchCreatedEvent, type ReactionCreatedEvent, type ReactionRemovedEvent, - type CreateMessageEvent, type RequestEvent, type ResponseEvent, - type ThreadCreatedEvent, - type EventResult, - type CreateMessageResult, - MessageResponseEventType, - MessageRequestEventType, - type PagedQueryCallback, - type FindClient + type ThreadCreatedEvent } from '@hcengineering/communication-sdk-types' import { applyPatch, applyPatches } from '@hcengineering/communication-shared' import { loadGroupFile } from '@hcengineering/communication-yaml' @@ -56,38 +57,43 @@ import { QueryResult } from '../result' import { defaultQueryParams, Direction, - type PagedQuery, - type QueryId, type MessageQueryParams, - type OneMessageQueryParams + type OneMessageQueryParams, + type PagedQuery, + type QueryId } from '../types' import { WindowImpl } from '../window' -const GROUPS_LIMIT = 20 +const GROUPS_LIMIT = 4 export class MessagesQuery implements PagedQuery { private result: Promise> | QueryResult - private messagesFromFiles: Message[] = [] - private readonly groupsBuffer: MessagesGroup[] = [] private firstGroup?: MessagesGroup private lastGroup?: MessagesGroup + private firstLoadedGroup?: MessagesGroup + private lastLoadedGroup?: MessagesGroup + + private lastGroupsDirection?: Direction private readonly limit: number + private initialized = false private readonly next = { hasMessages: true, - hasGroups: true + hasGroups: true, + buffer: [] as Message[] } private readonly prev = { hasMessages: true, - hasGroups: true + hasGroups: true, + buffer: [] as Message[] } - private tmpMessages: Map = new Map() + private readonly tmpMessages = new Map() constructor( private readonly client: FindClient, @@ -110,6 +116,7 @@ export class MessagesQuery implements PagedQuery { if (count < this.limit) { this.result = initialResult + this.initialized = true } else { if (this.params.order === SortingOrder.Ascending) { this.result = new QueryResult(messages.slice(0, baseLimit), (x) => x.id) @@ -121,15 +128,16 @@ export class MessagesQuery implements PagedQuery { this.result.setTail(true) } } + this.initialized = true void this.notify() } else { this.result = new QueryResult([] as Message[], (x) => x.id) if (this.isInitLoadingForward()) { - this.result.setHead(this.params.created == null) + this.result.setHead(this.params.from == null) void this.requestLoadNextPage() } else { - this.result.setTail(this.params.created == null) + this.result.setTail(this.params.from == null) void this.requestLoadPrevPage() } } @@ -209,8 +217,8 @@ export class MessagesQuery implements PagedQuery { resultId = result.id if (this.result instanceof Promise) this.result = await this.result - if (this.result.get(resultId)) { - if (this.result.delete(tmpId)) { + if (this.result.get(resultId) != null) { + if (this.result.delete(tmpId) != null) { await this.notify() } } else { @@ -225,7 +233,7 @@ export class MessagesQuery implements PagedQuery { .catch(async () => { if (this.result instanceof Promise) this.result = await this.result this.tmpMessages.delete(eventId) - if (this.result.delete(tmpId)) { + if (this.result.delete(tmpId) != null) { void this.notify() } }) @@ -319,14 +327,18 @@ export class MessagesQuery implements PagedQuery { if (messages.length === this.limit && this.limit > 1) { const lastMessage = messages.pop() if (lastMessage != null && !fromDb) { - direction === Direction.Forward - ? this.messagesFromFiles.unshift(lastMessage) - : this.messagesFromFiles.push(lastMessage) + if (direction === Direction.Forward) { + this.next.buffer.unshift(lastMessage) + } else { + this.prev.buffer.push(lastMessage) + } } } if (this.params.order === SortingOrder.Ascending && direction === Direction.Backward) { result.prepend(messages.reverse()) + } else if (this.params.order === SortingOrder.Descending && direction === Direction.Forward) { + result.prepend(messages.reverse()) } else { result.append(messages) } @@ -336,14 +348,13 @@ export class MessagesQuery implements PagedQuery { // Load next private async loadNextMessages(result: QueryResult): Promise<{ messages: Message[]; fromDb: boolean }> { - const messages: Message[] = this.messagesFromFiles.splice(0, this.limit) - + const messages: Message[] = this.next.buffer.splice(0, this.limit) if (messages.length >= this.limit) return { messages, fromDb: false } while (this.next.hasGroups || this.groupsBuffer.length > 0) { await this.loadGroups(Direction.Forward, result) - messages.push(...this.messagesFromFiles.splice(0, this.limit - messages.length)) + messages.push(...this.next.buffer.splice(0, this.limit - messages.length)) if (messages.length >= this.limit) return { messages, fromDb: false } } @@ -361,7 +372,7 @@ export class MessagesQuery implements PagedQuery { if (result.isTail()) return [] - const last = result.getLast() + const last = this.params.order === SortingOrder.Ascending ? result.getLast() : result.getFirst() return await this.find({ ...this.params, @@ -370,35 +381,53 @@ export class MessagesQuery implements PagedQuery { ? { greater: last.created } - : this.params.created, + : this.params.from != null + ? { greaterOrEqual: this.params.from } + : undefined, limit, order: SortingOrder.Ascending }) } // Load prev - private async loadPrevMessages(result: QueryResult): Promise<{ messages: Message[]; fromDb: boolean }> { + private async loadPrevMessages( + result: QueryResult + ): Promise<{ messages: Message[]; fromDb: boolean; hasNext?: boolean }> { const messages: Message[] = [] + const prevBuffer = this.prev.buffer + const last = prevBuffer[prevBuffer.length - 1] + + let fromDb = false if (this.prev.hasMessages) { const prevMessages = await this.findPrevMessages(this.limit, result) + const first = prevMessages[0] this.prev.hasMessages = prevMessages.length > 0 - messages.push(...prevMessages) + + if (last == null) { + messages.push(...prevMessages) + fromDb = true + } else if (first != null && first.created < last.created) { + messages.push(...prevMessages) + fromDb = true + } else { + const toPush = this.prev.buffer.splice(-this.limit).reverse() + messages.push(...toPush) + } } - if (messages.length >= this.limit) return { messages, fromDb: true } + if (messages.length >= this.limit) return { messages, fromDb } const restLimit = this.limit - messages.length - const fromBuffer = this.messagesFromFiles.splice(-restLimit, restLimit).reverse() + const fromBuffer = this.prev.buffer.splice(-restLimit).reverse() messages.push(...fromBuffer) if (messages.length >= this.limit) return { messages, fromDb: false } - while (this.prev.hasGroups || this.groupsBuffer.length > 0) { await this.loadGroups(Direction.Backward, result) const rest = this.limit - messages.length - const fromBuffer2 = this.messagesFromFiles.splice(-rest, rest).reverse() + const fromBuffer2 = this.prev.buffer.splice(-rest).reverse() messages.push(...fromBuffer2) if (messages.length >= this.limit) return { messages, fromDb: false } @@ -419,12 +448,54 @@ export class MessagesQuery implements PagedQuery { ? { less: first?.created } - : this.params.created, + : this.params.from != null + ? { lessOrEqual: this.params.from } + : undefined, limit, order: SortingOrder.Descending }) } + getLoadGroupsParams(direction: Direction): Pick | undefined { + if (direction === Direction.Forward) { + if (this.lastGroup != null) { + return { + fromDate: { + greater: this.lastGroup.fromDate + } + } + } + + if (this.params.from instanceof Date) { + return { + toDate: { + greaterOrEqual: this.params.from + } + } + } + } + + if (direction === Direction.Backward) { + if (this.firstGroup != null) { + return { + fromDate: { + less: this.firstGroup.fromDate + } + } + } + + if (this.params.from instanceof Date) { + return { + fromDate: { + lessOrEqual: this.params.from + } + } + } + } + + return undefined + } + private async loadGroups(direction: Direction, result: QueryResult): Promise { let messagesCount = 0 const lastResult = result.getLast() @@ -432,17 +503,24 @@ export class MessagesQuery implements PagedQuery { const toBuffer: MessagesGroup[] = [] while (messagesCount < this.limit) { + if (this.lastGroupsDirection !== direction && this.groupsBuffer.length > 0) { + this.groupsBuffer.length = 0 + this.lastGroup = this.lastLoadedGroup + this.firstGroup = this.firstLoadedGroup + + if (this.lastGroupsDirection === Direction.Backward) { + this.prev.hasGroups = true + } else if (this.lastGroupsDirection === Direction.Forward) { + this.next.hasGroups = true + } + } + this.lastGroupsDirection = direction const currentGroups = this.groupsBuffer.splice(direction === Direction.Forward ? 0 : -GROUPS_LIMIT, GROUPS_LIMIT) const hasGroups = direction === Direction.Forward ? this.next.hasGroups : this.prev.hasGroups if (currentGroups.length === 0 && !hasGroups) break const groups = - currentGroups.length > 0 - ? currentGroups - : await this.findGroups( - direction, - direction === Direction.Forward ? this.lastGroup?.fromDate : this.firstGroup?.fromDate - ) + currentGroups.length > 0 ? currentGroups : await this.findGroups(direction, this.getLoadGroupsParams(direction)) if (currentGroups.length === 0) { this.firstGroup = direction === Direction.Forward ? (this.firstGroup ?? groups[0]) : groups[groups.length - 1] @@ -463,6 +541,7 @@ export class MessagesQuery implements PagedQuery { } const orderedGroups = direction === Direction.Forward ? groups : groups.reverse() + while (messagesCount < this.limit && orderedGroups.length > 0) { const group = direction === Direction.Forward ? orderedGroups.shift() : orderedGroups.pop() if (group == null) break @@ -470,6 +549,13 @@ export class MessagesQuery implements PagedQuery { messagesCount += group.count } + this.firstLoadedGroup = + direction === Direction.Forward ? (this.firstLoadedGroup ?? toLoad[0]) : toLoad[toLoad.length - 1] + this.lastLoadedGroup = + direction === Direction.Forward + ? (toLoad[toLoad.length - 1] ?? this.lastLoadedGroup) + : (this.lastLoadedGroup ?? toLoad[0]) + while (orderedGroups.length > 0) { const group = direction === Direction.Forward ? orderedGroups.shift() : orderedGroups.pop() if (group == null) break @@ -492,17 +578,26 @@ export class MessagesQuery implements PagedQuery { const firstInFile = file.messages[0] const queryDate = lastResult != null && firstInFile.created < lastResult?.created ? lastResult?.created : undefined - this.messagesFromFiles.push(...this.matchFileMessages(file, queryDate)) + const { next, prev } = this.matchFileMessages(file, direction, result, queryDate) + this.next.buffer.push(...next) + this.prev.buffer.push(...prev) } else { const lastInFile = file.messages[file.messages.length - 1] const queryDate = lastResult != null && lastInFile.created > lastResult?.created ? lastResult?.created : undefined - this.messagesFromFiles.unshift(...this.matchFileMessages(file, queryDate)) + const matched = this.matchFileMessages(file, direction, result, queryDate) + this.prev.buffer.unshift(...matched.prev) + this.next.buffer.push(...matched.next) } } } - private matchFileMessages(file: ParsedFile, created?: Date): Message[] { + private matchFileMessages( + file: ParsedFile, + direction: Direction, + queryResult: QueryResult, + filterDate?: Date + ): { next: Message[]; prev: Message[] } { let result: Message[] = file.messages const params = this.params if (this.isOneMessageQuery(params)) { @@ -510,14 +605,40 @@ export class MessagesQuery implements PagedQuery { result = msg != null ? [msg] : [] } - if (created != null) { + if (filterDate != null) { result = this.params.order === SortingOrder.Ascending - ? result.filter((it) => it.created > created) - : result.filter((it) => it.created < created) + ? result.filter((it) => it.created > filterDate) + : result.filter((it) => it.created < filterDate) } - return result + let prevResult: Message[] = [] + let nextResult: Message[] = [] + const from = this.initialized ? undefined : this.params.from + + const firstFromQueryResult = + params.order === SortingOrder.Ascending ? queryResult.getFirst() : queryResult.getLast() + const lastFromFile = result[result.length - 1] + if (from instanceof Date) { + for (const message of result) { + const isNext = params.order === SortingOrder.Ascending ? message.created >= from : message.created > from + + if (isNext) { + nextResult.push(message) + } else { + prevResult.push(message) + } + } + } else if ( + direction === Direction.Backward && + (firstFromQueryResult == null || lastFromFile.created < firstFromQueryResult.created) + ) { + prevResult = result + } else { + nextResult = result + } + + return { next: nextResult, prev: prevResult } } private async loadMessagesFromFiles(group: MessagesGroup): Promise { @@ -540,11 +661,11 @@ export class MessagesQuery implements PagedQuery { } } - private async findGroupByDate(created: Date): Promise { + private async findGroupByDate(params: Date): Promise { const groups = await this.client.findMessagesGroups({ card: this.params.card, - fromDate: { lessOrEqual: created }, - toDate: { greaterOrEqual: created }, + fromDate: { lessOrEqual: params }, + toDate: { greaterOrEqual: params }, limit: 1, order: SortingOrder.Ascending, orderBy: 'fromDate' @@ -553,13 +674,16 @@ export class MessagesQuery implements PagedQuery { return groups[0] } - private async findGroups(direction: Direction, fromDate?: Date): Promise { + private async findGroups( + direction: Direction, + date?: Pick + ): Promise { if (this.isOneMessageQuery(this.params)) { const group = await this.findGroupByDate(this.params.created) return group !== undefined ? [group] : [] } - if (fromDate == null) { + if (date == null) { return await this.client.findMessagesGroups({ card: this.params.card, limit: GROUPS_LIMIT, @@ -573,18 +697,12 @@ export class MessagesQuery implements PagedQuery { limit: GROUPS_LIMIT, order: direction === Direction.Forward ? SortingOrder.Ascending : SortingOrder.Descending, orderBy: 'fromDate', - fromDate: - direction === Direction.Forward - ? { - greater: fromDate - } - : { - less: fromDate - } + ...date }) } private async find(params: FindMessagesParams): Promise { + delete (params as any).from return await this.client.findMessages(params, this.id) } @@ -593,6 +711,7 @@ export class MessagesQuery implements PagedQuery { } private async notify(): Promise { + this.initialized = true if (this.callback == null) return if (this.result instanceof Promise) this.result = await this.result const result = this.result.getResult() @@ -625,7 +744,16 @@ export class MessagesQuery implements PagedQuery { void this.notify() } - this.messagesFromFiles = this.messagesFromFiles.map((it) => { + this.next.buffer = this.next.buffer.map((it) => { + if (it.id === event.thread.message) { + return { + ...it, + thread: event.thread + } + } + return it + }) + this.prev.buffer = this.next.buffer.map((it) => { if (it.id === event.thread.message) { return { ...it, @@ -649,7 +777,7 @@ export class MessagesQuery implements PagedQuery { const eventId = event._id if (eventId != null) { const tmp = this.tmpMessages.get(eventId) - if (tmp) this.result.delete(tmp) + if (tmp != null) this.result.delete(tmp) this.tmpMessages.delete(eventId) } const lastMessage = this.result.getLast() @@ -718,7 +846,8 @@ export class MessagesQuery implements PagedQuery { void this.notify() } - this.messagesFromFiles = this.messagesFromFiles.filter((it) => !event.messages.includes(it.id)) + this.next.buffer = this.next.buffer.filter((it) => !event.messages.includes(it.id)) + this.prev.buffer = this.prev.buffer.filter((it) => !event.messages.includes(it.id)) } private async onReactionCreatedEvent(event: ReactionCreatedEvent): Promise { @@ -737,9 +866,13 @@ export class MessagesQuery implements PagedQuery { void this.notify() } - const fromBuffer = this.messagesFromFiles.find((it) => it.id === reaction.message) - if (fromBuffer !== undefined) { - addReaction(fromBuffer, reaction) + const fromNextBuffer = this.next.buffer.find((it) => it.id === reaction.message) + if (fromNextBuffer !== undefined) { + addReaction(fromNextBuffer, reaction) + } + const fromPrevBuffer = this.prev.buffer.find((it) => it.id === reaction.message) + if (fromPrevBuffer !== undefined) { + addReaction(fromPrevBuffer, reaction) } } @@ -756,7 +889,10 @@ export class MessagesQuery implements PagedQuery { void this.notify() } } - this.messagesFromFiles = this.messagesFromFiles.map((it) => + this.next.buffer = this.next.buffer.map((it) => + it.id === event.message ? removeReaction(it, event.reaction, event.creator) : it + ) + this.prev.buffer = this.prev.buffer.map((it) => it.id === event.message ? removeReaction(it, event.reaction, event.creator) : it ) } @@ -773,9 +909,13 @@ export class MessagesQuery implements PagedQuery { await this.notify() } - const fromBuffer = this.messagesFromFiles.find((it) => it.id === file.message) - if (fromBuffer !== undefined) { - addFile(fromBuffer, file) + const fromNextBuffer = this.next.buffer.find((it) => it.id === file.message) + if (fromNextBuffer !== undefined) { + addFile(fromNextBuffer, file) + } + const fromPrevBuffer = this.prev.buffer.find((it) => it.id === file.message) + if (fromPrevBuffer !== undefined) { + addFile(fromPrevBuffer, file) } } @@ -797,9 +937,8 @@ export class MessagesQuery implements PagedQuery { await this.notify() } - this.messagesFromFiles = this.messagesFromFiles.map((it) => - it.id === event.message ? removeFile(it, event.blobId) : it - ) + this.next.buffer = this.next.buffer.map((it) => (it.id === event.message ? removeFile(it, event.blobId) : it)) + this.prev.buffer = this.prev.buffer.map((it) => (it.id === event.message ? removeFile(it, event.blobId) : it)) } private allowedPatches(): PatchType[] { diff --git a/packages/query/src/types.ts b/packages/query/src/types.ts index 085a2428e8a..9e554125177 100644 --- a/packages/query/src/types.ts +++ b/packages/query/src/types.ts @@ -79,9 +79,25 @@ export interface DefaultMessageQueryParams { created?: Partial> | Date } -export interface OneMessageQueryParams extends DefaultMessageQueryParams { +interface BaseMessageQueryParams { + card: CardID + + limit?: number + order?: SortingOrder + + files?: boolean + reactions?: boolean + replies?: boolean +} + +export interface ManyMessagesQueryParams extends BaseMessageQueryParams { + from?: Date +} + +export interface OneMessageQueryParams extends BaseMessageQueryParams { id: MessageID created: Date + from?: never } -export type MessageQueryParams = OneMessageQueryParams | DefaultMessageQueryParams +export type MessageQueryParams = OneMessageQueryParams | ManyMessagesQueryParams From b4dcc00ccf31e369da0d8df6166928bb8656c550 Mon Sep 17 00:00:00 2001 From: Kristina Date: Wed, 7 May 2025 07:23:50 +0400 Subject: [PATCH 071/636] Fix threads and patches, recreate db schema (#50) Signed-off-by: Kristina Fefelova --- .version | 2 +- bun.lock | 75 ++-- package.json | 2 +- packages/cockroach/src/adapter.ts | 84 +---- packages/cockroach/src/db/mapping.ts | 61 +++- packages/cockroach/src/db/message.ts | 240 +++++++------ packages/cockroach/src/db/notification.ts | 2 +- packages/cockroach/src/db/schema.ts | 15 +- packages/cockroach/src/init.ts | 321 +++++++----------- packages/query/src/messages/query.ts | 2 +- .../query/src/notification-contexts/query.ts | 7 +- packages/rest-client/package.json | 2 +- packages/rest-client/src/rest.ts | 12 +- packages/sdk-types/package.json | 2 +- packages/sdk-types/src/db.ts | 17 +- .../sdk-types/src/requestEvents/message.ts | 7 +- packages/sdk-types/src/serverApi.ts | 5 +- packages/server/package.json | 9 +- packages/server/src/index.ts | 8 +- packages/server/src/metadata.ts | 3 +- packages/server/src/middleware/base.ts | 26 +- packages/server/src/middleware/broadcast.ts | 4 +- packages/server/src/middleware/db.ts | 50 +-- packages/server/src/middleware/triggers.ts | 3 +- packages/server/src/middleware/validate.ts | 20 +- packages/server/src/middlewares.ts | 7 + packages/server/src/triggers/message.ts | 106 +++++- packages/server/src/triggers/utils.ts | 76 +++++ packages/server/src/types.ts | 7 +- packages/server/src/utils.ts | 41 +-- packages/shared/src/index.ts | 1 + packages/shared/src/patch.ts | 79 ++--- packages/shared/src/utils.ts | 37 ++ packages/types/package.json | 6 +- packages/types/src/file.ts | 3 +- packages/types/src/message.ts | 88 ++++- packages/yaml/src/deserialize.ts | 1 + packages/yaml/src/parse.ts | 1 + 38 files changed, 829 insertions(+), 603 deletions(-) create mode 100644 packages/server/src/triggers/utils.ts create mode 100644 packages/shared/src/utils.ts diff --git a/.version b/.version index 1e17a1ebab0..e8be64cc0f6 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -0.1.175 +0.1.176 diff --git a/bun.lock b/bun.lock index b988aadafef..0d70c2ac6be 100644 --- a/bun.lock +++ b/bun.lock @@ -11,13 +11,13 @@ "eslint-config-prettier": "^9.1.0", "eslint-plugin-prettier": "^5.4.0", "prettier": "^3.5.3", - "turbo": "^2.5.2", + "turbo": "^2.5.3", "typescript-eslint": "^8.32.0", }, }, "packages/client-query": { "name": "@hcengineering/communication-client-query", - "version": "0.1.174-beta.2", + "version": "0.1.0", "dependencies": { "@hcengineering/communication-query": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", @@ -33,7 +33,7 @@ }, "packages/cockroach": { "name": "@hcengineering/communication-cockroach", - "version": "0.1.174-beta.2", + "version": "0.1.0", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-shared": "workspace:*", @@ -50,7 +50,7 @@ }, "packages/query": { "name": "@hcengineering/communication-query", - "version": "0.1.174-beta.2", + "version": "0.1.0", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-shared": "workspace:*", @@ -69,12 +69,12 @@ }, "packages/rest-client": { "name": "@hcengineering/communication-rest-client", - "version": "0.1.174-beta.2", + "version": "0.1.0", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-shared": "workspace:*", "@hcengineering/communication-types": "workspace:*", - "@hcengineering/core": "^0.7.28", + "@hcengineering/core": "^0.7.88", "snappyjs": "^0.7.0", }, "devDependencies": { @@ -87,10 +87,10 @@ }, "packages/sdk-types": { "name": "@hcengineering/communication-sdk-types", - "version": "0.1.174-beta.2", + "version": "0.1.0", "dependencies": { "@hcengineering/communication-types": "workspace:*", - "@hcengineering/core": "^0.7.28", + "@hcengineering/core": "^0.7.88", }, "devDependencies": { "@types/bun": "^1.1.14", @@ -101,15 +101,16 @@ }, "packages/server": { "name": "@hcengineering/communication-server", - "version": "0.1.174-beta.2", + "version": "0.1.0", "dependencies": { - "@hcengineering/account-client": "^0.7.28", + "@hcengineering/account-client": "^0.7.88", "@hcengineering/communication-cockroach": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", - "@hcengineering/core": "^0.7.28", - "@hcengineering/platform": "^0.7.28", - "@hcengineering/server-token": "^0.7.28", + "@hcengineering/communication-yaml": "workspace:*", + "@hcengineering/core": "^0.7.88", + "@hcengineering/platform": "^0.7.88", + "@hcengineering/server-token": "^0.7.88", "zod": "^3.24.2", }, "devDependencies": { @@ -124,7 +125,7 @@ }, "packages/shared": { "name": "@hcengineering/communication-shared", - "version": "0.1.174-beta.2", + "version": "0.1.0", "dependencies": { "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -138,11 +139,11 @@ }, "packages/types": { "name": "@hcengineering/communication-types", - "version": "0.1.174-beta.2", + "version": "0.1.0", "dependencies": { - "@hcengineering/card": "^0.7.28", - "@hcengineering/core": "^0.7.28", - "@hcengineering/tags": "^0.7.28", + "@hcengineering/card": "^0.7.88", + "@hcengineering/core": "^0.7.88", + "@hcengineering/tags": "^0.7.88", }, "devDependencies": { "@types/bun": "^1.1.14", @@ -153,7 +154,7 @@ }, "packages/yaml": { "name": "@hcengineering/communication-yaml", - "version": "0.1.174-beta.2", + "version": "0.1.0", "dependencies": { "@hcengineering/communication-shared": "workspace:*", "@hcengineering/communication-types": "workspace:*", @@ -201,11 +202,11 @@ "@formatjs/intl-localematcher": ["@formatjs/intl-localematcher@0.6.0", "", { "dependencies": { "tslib": "2" } }, "sha512-4rB4g+3hESy1bHSBG3tDFaMY2CH67iT7yne1e+0CLTsGLDcmoEWWpJjjpWVaYgYfYuohIRuo0E+N536gd2ZHZA=="], - "@hcengineering/account-client": ["@hcengineering/account-client@0.7.28", "https://npm.pkg.github.com/download/@hcengineering/account-client/0.7.28/32c9bc0dd297f00f4d5461467f25e072d01b9c76", { "dependencies": { "@hcengineering/core": "^0.7.28", "@hcengineering/platform": "^0.7.28" } }, "sha512-k1dkioX+jYrLyAXLLLsBYVCdZKf3WjhnDHw/yeAQgLQQ/9W2ffLtDMZ8lYWV3XMQ3eWrUCodtTDOiAQgXd/LEQ=="], + "@hcengineering/account-client": ["@hcengineering/account-client@0.7.88", "https://npm.pkg.github.com/download/@hcengineering/account-client/0.7.88/57caa95e62fecc4ca22a32c76bcdc900b0384f04", { "dependencies": { "@hcengineering/core": "^0.7.88", "@hcengineering/platform": "^0.7.88" } }, "sha512-CZWOJT/JZAUy+FuYuX0QcJQYDnVDNAB0QHU4RxKc7gSlYAYleibL7qJC0vRGL4rNkvDVZfJMKDPUQW4AauGqtA=="], - "@hcengineering/analytics": ["@hcengineering/analytics@0.7.28", "https://npm.pkg.github.com/download/@hcengineering/analytics/0.7.28/a8faf3d575bb28b9682b659495e738f8e0ca596e", { "dependencies": { "@hcengineering/platform": "^0.7.28" } }, "sha512-+CI/wJ8zgop+3mEcvhlB1UJBU6tIKq2Y7qTichZjGli8BckEIEwbD4j2beu2qXRSoTT2yUGWM9zpS9+fmH/DAw=="], + "@hcengineering/analytics": ["@hcengineering/analytics@0.7.88", "https://npm.pkg.github.com/download/@hcengineering/analytics/0.7.88/385c2ae512bbcae39189d63e53ea527aa21a662f", { "dependencies": { "@hcengineering/platform": "^0.7.88" } }, "sha512-3jT3h2Y85awrgG0JWZSrpWOXaX1J3dpq8zouo3QMw8OVO1RgHCwPEqrBPOALnB+gc9cZL02p6Aq4J+fnTZ2fww=="], - "@hcengineering/card": ["@hcengineering/card@0.7.28", "https://npm.pkg.github.com/download/@hcengineering/card/0.7.28/5348996f5b7b4a34517780a5bf324122cbddfbad", { "dependencies": { "@hcengineering/core": "^0.7.28", "@hcengineering/platform": "^0.7.28", "@hcengineering/ui": "^0.7.28" } }, "sha512-1d3allyx9jTpE/gr9kk/g5qo5848i4FKsRiHgCpJqzNEdRW7TAednoHAr2hKRU5DZC/i3Br56axSkUQzV2Idhg=="], + "@hcengineering/card": ["@hcengineering/card@0.7.88", "https://npm.pkg.github.com/download/@hcengineering/card/0.7.88/e39b9e182325945494f162f2dd6f16ef9bea0f3e", { "dependencies": { "@hcengineering/core": "^0.7.88", "@hcengineering/platform": "^0.7.88", "@hcengineering/ui": "^0.7.88" } }, "sha512-3JDgocHFSGmQuKsrtQ5CTTm4rpd/PHahIx6oNX8sshDGpS2hnvNtrHNkJf39seIKiq0NNKmJ//Etl4I21h4c6w=="], "@hcengineering/communication-client-query": ["@hcengineering/communication-client-query@workspace:packages/client-query"], @@ -225,21 +226,21 @@ "@hcengineering/communication-yaml": ["@hcengineering/communication-yaml@workspace:packages/yaml"], - "@hcengineering/core": ["@hcengineering/core@0.7.28", "https://npm.pkg.github.com/download/@hcengineering/core/0.7.28/43b84c63262e2b8fe5554c1967812e2ce6bb56e1", { "dependencies": { "@hcengineering/analytics": "^0.7.28", "@hcengineering/platform": "^0.7.28", "fast-equals": "^5.2.2" } }, "sha512-qv4QE7mkect6JYlGMljW5CQ2uU80hFGq0E17lZ5rO79h1boWEKLJpINv+w3J2BdR/nNG7taIsdZg3yfl9fY4bg=="], + "@hcengineering/core": ["@hcengineering/core@0.7.88", "https://npm.pkg.github.com/download/@hcengineering/core/0.7.88/ca01c3ccd43c85ea44b189d995b0693c5465956c", { "dependencies": { "@hcengineering/analytics": "^0.7.88", "@hcengineering/platform": "^0.7.88", "fast-equals": "^5.2.2" } }, "sha512-c3GRTGAZYARu1kMt4TqkugmnuprYesZ4XZUeakReRtZdYei9tAF+i9xPpDKb4J3hvMC3u82LlEiV8iK26iXpTA=="], - "@hcengineering/platform": ["@hcengineering/platform@0.7.28", "https://npm.pkg.github.com/download/@hcengineering/platform/0.7.28/2126ff97284b679ceab3c86f24febd73a3a28712", { "dependencies": { "intl-messageformat": "^10.7.14" } }, "sha512-0zEifwsLdOs/M6TZ9xnPrQTTlhwW28sooiyL4ZfQsiC2AJUy05vLP5/W8V9eDLta+EiR5bjtlulGsg1GfPjUoQ=="], + "@hcengineering/platform": ["@hcengineering/platform@0.7.88", "https://npm.pkg.github.com/download/@hcengineering/platform/0.7.88/b3fbe09d6b849f7eb3641792682bf133103037f0", { "dependencies": { "intl-messageformat": "^10.7.14" } }, "sha512-iFAwYQZjGox1NqEgUh8hiT1mRHdSJqYmYux0lkeIFyt8Nn/BAZOVXe0br0Dx7wXiueFPWhSZSeWVhxvBb78zoA=="], - "@hcengineering/preference": ["@hcengineering/preference@0.7.28", "https://npm.pkg.github.com/download/@hcengineering/preference/0.7.28/f1739991cc012d194fa18f99565ae5bd64f0ad07", { "dependencies": { "@hcengineering/core": "^0.7.28", "@hcengineering/platform": "^0.7.28", "@hcengineering/ui": "^0.7.28" } }, "sha512-WwwOfjZ1TLdTfPhQGvSjo8dS63HyuE7BDVhfstxP8D+H4BI5UKYoev/6vlwQf/3d+jI1S4f2ivO//IDJAD7sGg=="], + "@hcengineering/preference": ["@hcengineering/preference@0.7.88", "https://npm.pkg.github.com/download/@hcengineering/preference/0.7.88/e1181dae3cb2eaf53fed66ddd3d5c4854717ccf0", { "dependencies": { "@hcengineering/core": "^0.7.88", "@hcengineering/platform": "^0.7.88", "@hcengineering/ui": "^0.7.88" } }, "sha512-idzAlphM/HyNJfFMFwbpyKg6KvFTJ6rTaqgduNNJqkO4DsUCEF5/1WLNQMmiAeoFDY9y5+/mHJrx+1Ztq1XIuw=="], - "@hcengineering/server-token": ["@hcengineering/server-token@0.7.28", "https://npm.pkg.github.com/download/@hcengineering/server-token/0.7.28/b1cffcb628aa0c6ac73171ef340c8ccf0f85ab07", { "dependencies": { "@hcengineering/core": "^0.7.28", "@hcengineering/platform": "^0.7.28", "jwt-simple": "^0.5.6" } }, "sha512-TaHwzxuoFKN6jnWHIZk0UcRmB+UQ1QE1HZzUh5doPSFUYOAtTrSb32iJDA8e8oEADcJpOp9RGIUP/EP/VED7BQ=="], + "@hcengineering/server-token": ["@hcengineering/server-token@0.7.88", "https://npm.pkg.github.com/download/@hcengineering/server-token/0.7.88/11b3a8247869a50037fcbb13572e16545116ff93", { "dependencies": { "@hcengineering/core": "^0.7.88", "@hcengineering/platform": "^0.7.88", "jwt-simple": "^0.5.6" } }, "sha512-UQfW0E+v+PTwR7ICR+kSTQqAokpiV008ukqsxRSQJkCM4vmNYciUgd/7Y/ifDMFhKDvNyODb+wgAQs3QxkSbkA=="], - "@hcengineering/tags": ["@hcengineering/tags@0.7.28", "https://npm.pkg.github.com/download/@hcengineering/tags/0.7.28/42545cc43e3c61e32ce96be87c245fa929f5cbb6", { "dependencies": { "@hcengineering/core": "^0.7.28", "@hcengineering/platform": "^0.7.28", "@hcengineering/ui": "^0.7.28", "@hcengineering/view": "^0.7.28" } }, "sha512-2KCVUu5N/GAkDsDRbdUi7gnBq4ze1Z3eCLoc8nz2G/zeAjAga8gmWkZEbXqAC8rNVqBQv72EKeJFGJyqnev6Xw=="], + "@hcengineering/tags": ["@hcengineering/tags@0.7.88", "https://npm.pkg.github.com/download/@hcengineering/tags/0.7.88/f3f0de35b1be71d1f3f6711b8e4f161e0cf857eb", { "dependencies": { "@hcengineering/core": "^0.7.88", "@hcengineering/platform": "^0.7.88", "@hcengineering/ui": "^0.7.88", "@hcengineering/view": "^0.7.88" } }, "sha512-wpnVX/1Y8E4YPYnUcw+ZyVOpZvXYpboZC9D1btMqFs34CoMGc4tW1XT3YfsS8KxlkH6tsQjsAUvtWOPy2J46dg=="], - "@hcengineering/theme": ["@hcengineering/theme@0.7.28", "https://npm.pkg.github.com/download/@hcengineering/theme/0.7.28/557640a7ba2ffaa4d6e5411a54577f60dc528300", { "dependencies": { "@hcengineering/analytics": "^0.7.28", "@hcengineering/platform": "^0.7.28", "svelte": "^4.2.19" } }, "sha512-Hv3wQKHr327TC7wMHCBiG4bMtg8vSlb7+UFwdKI8ztEs3359meqkXu+exqi7Nl5334voXIz5ZFxjwZwOMzNSFA=="], + "@hcengineering/theme": ["@hcengineering/theme@0.7.88", "https://npm.pkg.github.com/download/@hcengineering/theme/0.7.88/d939d1e2a9047def2795dc5124653e49ca522cfc", { "dependencies": { "@hcengineering/analytics": "^0.7.88", "@hcengineering/platform": "^0.7.88", "svelte": "^4.2.19" } }, "sha512-PCt9bFuGPYOf1kQgjzxPSZgGXU4G14Z1XZJfgGu71u/zHxG6fu6nls35aSrEjy2Ks6oR+OCqqvpDh6RTf6utrg=="], - "@hcengineering/ui": ["@hcengineering/ui@0.7.28", "https://npm.pkg.github.com/download/@hcengineering/ui/0.7.28/82025fc775702ea8c8569611356662bfbf28eb74", { "dependencies": { "@hcengineering/analytics": "^0.7.28", "@hcengineering/core": "^0.7.28", "@hcengineering/platform": "^0.7.28", "@hcengineering/theme": "^0.7.28", "autolinker": "4.0.0", "date-fns": "^2.30.0", "date-fns-tz": "^2.0.0", "dompurify": "^3.1.6", "emojibase": "^16.0.0", "fast-equals": "^5.2.2", "hls.js": "^1.5.20", "plyr": "^3.7.8", "svelte": "^4.2.19" } }, "sha512-QsUpO0+Idu1n2dC6KtorPJ6SSp30It8FHSuG3Y+Awv4td/ftVocOSFqhskbk+us9Yw9KqOYgkqhGYEHbfRfw4Q=="], + "@hcengineering/ui": ["@hcengineering/ui@0.7.88", "https://npm.pkg.github.com/download/@hcengineering/ui/0.7.88/fce3392a59f706c248ba4b17714163e5725d5f01", { "dependencies": { "@hcengineering/analytics": "^0.7.88", "@hcengineering/core": "^0.7.88", "@hcengineering/platform": "^0.7.88", "@hcengineering/theme": "^0.7.88", "autolinker": "4.0.0", "date-fns": "^2.30.0", "date-fns-tz": "^2.0.0", "dompurify": "^3.1.6", "emojibase": "^16.0.0", "fast-equals": "^5.2.2", "hls.js": "^1.5.20", "plyr": "^3.7.8", "svelte": "^4.2.19" } }, "sha512-mzHXcX3aBC/OXUaV9F2hnB6+irwMfEYM5J1CnfqiHqgv3Hza20BZR6BrYe/nQCLb44DuHkXojbwL94AKK3QW6w=="], - "@hcengineering/view": ["@hcengineering/view@0.7.28", "https://npm.pkg.github.com/download/@hcengineering/view/0.7.28/8aad5b07d3750463a78aae1b9482361f2ff9f45e", { "dependencies": { "@hcengineering/core": "^0.7.28", "@hcengineering/platform": "^0.7.28", "@hcengineering/preference": "^0.7.28", "@hcengineering/ui": "^0.7.28" } }, "sha512-cG0isakqIxuiAG7T2KyRnztfOp3falr+oG77Qod915EeBKNEDqXfNrD29Dp04Wwf5TowifDT9smLzAyH8l6kgg=="], + "@hcengineering/view": ["@hcengineering/view@0.7.88", "https://npm.pkg.github.com/download/@hcengineering/view/0.7.88/8a13ddff0178ac3b705ebed683d0c1cc328c8e1c", { "dependencies": { "@hcengineering/core": "^0.7.88", "@hcengineering/platform": "^0.7.88", "@hcengineering/preference": "^0.7.88", "@hcengineering/ui": "^0.7.88" } }, "sha512-W3/OgCu9W+OGZ8v8hoT0bjoCNflvIcWFwFafk40FtWFxvqzObm41e5ZWNj3yGWFdlVsyN/Nqdjkyz0txWUXQAA=="], "@humanfs/core": ["@humanfs/core@0.19.1", "", {}, "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA=="], @@ -691,19 +692,19 @@ "tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], - "turbo": ["turbo@2.5.2", "", { "optionalDependencies": { "turbo-darwin-64": "2.5.2", "turbo-darwin-arm64": "2.5.2", "turbo-linux-64": "2.5.2", "turbo-linux-arm64": "2.5.2", "turbo-windows-64": "2.5.2", "turbo-windows-arm64": "2.5.2" }, "bin": { "turbo": "bin/turbo" } }, "sha512-Qo5lfuStr6LQh3sPQl7kIi243bGU4aHGDQJUf6ylAdGwks30jJFloc9NYHP7Y373+gGU9OS0faA4Mb5Sy8X9Xw=="], + "turbo": ["turbo@2.5.3", "", { "optionalDependencies": { "turbo-darwin-64": "2.5.3", "turbo-darwin-arm64": "2.5.3", "turbo-linux-64": "2.5.3", "turbo-linux-arm64": "2.5.3", "turbo-windows-64": "2.5.3", "turbo-windows-arm64": "2.5.3" }, "bin": { "turbo": "bin/turbo" } }, "sha512-iHuaNcq5GZZnr3XDZNuu2LSyCzAOPwDuo5Qt+q64DfsTP1i3T2bKfxJhni2ZQxsvAoxRbuUK5QetJki4qc5aYA=="], - "turbo-darwin-64": ["turbo-darwin-64@2.5.2", "", { "os": "darwin", "cpu": "x64" }, "sha512-2aIl0Sx230nLk+Cg2qSVxvPOBWCZpwKNuAMKoROTvWKif6VMpkWWiR9XEPoz7sHeLmCOed4GYGMjL1bqAiIS/g=="], + "turbo-darwin-64": ["turbo-darwin-64@2.5.3", "", { "os": "darwin", "cpu": "x64" }, "sha512-YSItEVBUIvAGPUDpAB9etEmSqZI3T6BHrkBkeSErvICXn3dfqXUfeLx35LfptLDEbrzFUdwYFNmt8QXOwe9yaw=="], - "turbo-darwin-arm64": ["turbo-darwin-arm64@2.5.2", "", { "os": "darwin", "cpu": "arm64" }, "sha512-MrFYhK/jYu8N6QlqZtqSHi3e4QVxlzqU3ANHTKn3/tThuwTLbNHEvzBPWSj5W7nZcM58dCqi6gYrfRz6bJZyAA=="], + "turbo-darwin-arm64": ["turbo-darwin-arm64@2.5.3", "", { "os": "darwin", "cpu": "arm64" }, "sha512-5PefrwHd42UiZX7YA9m1LPW6x9YJBDErXmsegCkVp+GjmWrADfEOxpFrGQNonH3ZMj77WZB2PVE5Aw3gA+IOhg=="], - "turbo-linux-64": ["turbo-linux-64@2.5.2", "", { "os": "linux", "cpu": "x64" }, "sha512-LxNqUE2HmAJQ/8deoLgMUDzKxd5bKxqH0UBogWa+DF+JcXhtze3UTMr6lEr0dEofdsEUYK1zg8FRjglmwlN5YA=="], + "turbo-linux-64": ["turbo-linux-64@2.5.3", "", { "os": "linux", "cpu": "x64" }, "sha512-M9xigFgawn5ofTmRzvjjLj3Lqc05O8VHKuOlWNUlnHPUltFquyEeSkpQNkE/vpPdOR14AzxqHbhhxtfS4qvb1w=="], - "turbo-linux-arm64": ["turbo-linux-arm64@2.5.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-0MI1Ao1q8zhd+UUbIEsrM+yLq1BsrcJQRGZkxIsHFlGp7WQQH1oR3laBgfnUCNdCotCMD6w4moc9pUbXdOR3bg=="], + "turbo-linux-arm64": ["turbo-linux-arm64@2.5.3", "", { "os": "linux", "cpu": "arm64" }, "sha512-auJRbYZ8SGJVqvzTikpg1bsRAsiI9Tk0/SDkA5Xgg0GdiHDH/BOzv1ZjDE2mjmlrO/obr19Dw+39OlMhwLffrw=="], - "turbo-windows-64": ["turbo-windows-64@2.5.2", "", { "os": "win32", "cpu": "x64" }, "sha512-hOLcbgZzE5ttACHHyc1ajmWYq4zKT42IC3G6XqgiXxMbS+4eyVYTL+7UvCZBd3Kca1u4TLQdLQjeO76zyDJc2A=="], + "turbo-windows-64": ["turbo-windows-64@2.5.3", "", { "os": "win32", "cpu": "x64" }, "sha512-arLQYohuHtIEKkmQSCU9vtrKUg+/1TTstWB9VYRSsz+khvg81eX6LYHtXJfH/dK7Ho6ck+JaEh5G+QrE1jEmCQ=="], - "turbo-windows-arm64": ["turbo-windows-arm64@2.5.2", "", { "os": "win32", "cpu": "arm64" }, "sha512-fMU41ABhSLa18H8V3Z7BMCGynQ8x+wj9WyBMvWm1jeyRKgkvUYJsO2vkIsy8m0vrwnIeVXKOIn6eSe1ddlBVqw=="], + "turbo-windows-arm64": ["turbo-windows-arm64@2.5.3", "", { "os": "win32", "cpu": "arm64" }, "sha512-3JPn66HAynJ0gtr6H+hjY4VHpu1RPKcEwGATvGUTmLmYSYBQieVlnGDRMMoYN066YfyPqnNGCfhYbXfH92Cm0g=="], "type-check": ["type-check@0.4.0", "", { "dependencies": { "prelude-ls": "^1.2.1" } }, "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew=="], diff --git a/package.json b/package.json index bd859afaee3..f950e32b634 100644 --- a/package.json +++ b/package.json @@ -20,7 +20,7 @@ "eslint-plugin-prettier": "^5.4.0", "prettier": "^3.5.3", "typescript-eslint": "^8.32.0", - "turbo": "^2.5.2" + "turbo": "^2.5.3" }, "packageManager": "bun@1.2.9" } diff --git a/packages/cockroach/src/adapter.ts b/packages/cockroach/src/adapter.ts index f9493b47351..b42e1ceeb19 100644 --- a/packages/cockroach/src/adapter.ts +++ b/packages/cockroach/src/adapter.ts @@ -41,10 +41,10 @@ import { type FindLabelsParams, type LabelID, type CardType, - type MessageData + type MessageData, + type PatchData } from '@hcengineering/communication-types' import type { DbAdapter } from '@hcengineering/communication-sdk-types' -import { retry } from '@hcengineering/communication-shared' import { MessagesDb } from './db/message' import { NotificationsDb } from './db/notification' @@ -77,9 +77,10 @@ export class CockroachAdapter implements DbAdapter { creator: SocialID, created: Date, data?: MessageData, - externalId?: string + externalId?: string, + id?: MessageID ): Promise { - return await this.message.createMessage(card, type, content, creator, created, data, externalId) + return await this.message.createMessage(card, type, content, creator, created, data, externalId, id) } async removeMessages(card: CardID, messages: MessageID[], socialIds?: SocialID[]): Promise { @@ -91,11 +92,11 @@ export class CockroachAdapter implements DbAdapter { message: MessageID, messageCreated: Date, type: PatchType, - content: RichText, + data: PatchData, creator: SocialID, created: Date ): Promise { - await this.message.createPatch(card, message, messageCreated, type, content, creator, created) + await this.message.createPatch(card, message, messageCreated, type, data, creator, created) } async createMessagesGroup(card: CardID, blobId: BlobID, fromDate: Date, toDate: Date, count: number): Promise { @@ -150,9 +151,10 @@ export class CockroachAdapter implements DbAdapter { message: MessageID, messageCreated: Date, thread: CardID, + threadType: CardType, created: Date ): Promise { - await this.message.createThread(card, message, messageCreated, thread, created) + await this.message.createThread(card, message, messageCreated, thread, threadType, created) } async updateThread(thread: CardID, op: 'increment' | 'decrement', lastReply?: Date): Promise { @@ -243,77 +245,13 @@ export async function createDbAdapter( logger?: Logger, options?: Options ): Promise { - const greenUrl = process.env.GREEN_URL ?? '' const connection = connect(connectionString) const sql = await connection.getClient() await initSchema(sql) - if (greenUrl !== '') { - const client = new GreenClient(greenUrl, sql) - return new CockroachAdapter(client, workspace, logger, options) - } else { - const client = new CockroachClient(connection, sql) - - return new CockroachAdapter(client, workspace, logger, options) - } -} - -class GreenClient implements SqlClient { - private readonly url: string - private readonly token: string - constructor( - private readonly endpoint: string, - private readonly sql: postgres.Sql - ) { - const url = new URL(this.endpoint) - this.token = url.searchParams.get('token') ?? 'secret' + const client = new CockroachClient(connection, sql) - const compression = url.searchParams.get('compression') ?? '' - - const newHost = url.host - const newPathname = url.pathname - const newSearchParams = new URLSearchParams() - - if (compression !== '') { - newSearchParams.set('compression', compression) - } - - this.url = `${url.protocol}//${newHost}${newPathname}${newSearchParams.size > 0 ? '?' + newSearchParams.toString() : ''}` - } - - async execute(query: string, params?: SqlParams): Promise { - return await retry(() => this.fetch(query, params), { retries: 5 }) - } - - cursor(query: string, params?: SqlParams, size?: number): AsyncIterable[]> { - const sql = params !== undefined && params.length > 0 ? injectVars(query, params) : query - - return this.sql.unsafe(sql).cursor(size) - } - - close(): void { - // do nothing - } - - private async fetch(query: string, params?: SqlParams): Promise { - const url = this.url.endsWith('/') ? this.url + 'api/v1/sql' : this.url + '/api/v1/sql' - - const response = await fetch(url, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - Authorization: 'Bearer ' + this.token, - Connection: 'keep-alive' - }, - body: JSON.stringify({ query, params }, (_, value) => (typeof value === 'bigint' ? value.toString() : value)) - }) - - if (!response.ok) { - throw new Error(`Failed to execute sql: ${response.status} ${response.statusText}`) - } - - return await response.json() - } + return new CockroachAdapter(client, workspace, logger, options) } class CockroachClient implements SqlClient { diff --git a/packages/cockroach/src/db/mapping.ts b/packages/cockroach/src/db/mapping.ts index 3eb6012bf74..2e766fb2083 100644 --- a/packages/cockroach/src/db/mapping.ts +++ b/packages/cockroach/src/db/mapping.ts @@ -33,8 +33,10 @@ import { type SocialID, type Thread, type MessageData, - type Label + type Label, + type CardType } from '@hcengineering/communication-types' +import { applyPatches } from '@hcengineering/communication-shared' import { type FileDb, @@ -51,6 +53,7 @@ import { interface RawMessage extends MessageDb { thread_id?: CardID + thread_type?: CardType replies_count?: number last_reply?: Date patches?: PatchDb[] @@ -72,7 +75,7 @@ interface RawNotification extends NotificationDb { message_group_count?: number message_patches?: { patch_type: PatchType - patch_content: RichText + patch_data: Record patch_creator: SocialID patch_created: Date }[] @@ -83,32 +86,38 @@ type RawContext = ContextDb & { id: ContextID } & { } export function toMessage(raw: RawMessage): Message { - const lastPatch = raw.patches?.[0] + const patches = (raw.patches ?? []).map((it) => toPatch(it)) - return { + const rawMessage: Message = { id: String(raw.id) as MessageID, type: raw.type, card: raw.card_id, - content: lastPatch?.content ?? raw.content, + content: raw.content, creator: raw.creator, - created: raw.created, + created: new Date(raw.created), data: raw.data, externalId: raw.external_id, - edited: lastPatch?.created ?? undefined, thread: - raw.thread_id != null + raw.thread_id != null && raw.thread_type != null ? { card: raw.card_id, message: String(raw.id) as MessageID, messageCreated: new Date(raw.created), thread: raw.thread_id, - repliesCount: raw.replies_count ?? 0, + threadType: raw.thread_type, + repliesCount: raw.replies_count ? parseInt(raw.replies_count as any) : 0, lastReply: raw.last_reply ?? new Date() } : undefined, reactions: (raw.reactions ?? []).map(toReaction), files: (raw.files ?? []).map(toFile) } + + if (patches.length === 0) { + return rawMessage + } + + return applyPatches(rawMessage, patches, [PatchType.update]) } export function toReaction(raw: ReactionDb): Reaction { @@ -116,7 +125,7 @@ export function toReaction(raw: ReactionDb): Reaction { message: String(raw.message_id) as MessageID, reaction: raw.reaction, creator: raw.creator, - created: raw.created + created: new Date(raw.created) } } @@ -130,7 +139,7 @@ export function toFile(raw: FileDb): File { filename: raw.filename, size: parseInt(raw.size as any), creator: raw.creator, - created: raw.created + created: new Date(raw.created) } } @@ -145,12 +154,12 @@ export function toMessagesGroup(raw: MessagesGroupDb): MessagesGroup { } } -export function toPatch(raw: PatchDb): Patch { +export function toPatch(raw: Omit): Patch { return { type: raw.type, messageCreated: new Date(raw.message_created), message: String(raw.message_id) as MessageID, - content: raw.content, + data: raw.data as any, creator: raw.creator, created: new Date(raw.created) } @@ -162,8 +171,9 @@ export function toThread(raw: ThreadDb): Thread { message: String(raw.message_id) as MessageID, messageCreated: new Date(raw.message_created), thread: raw.thread_id, - repliesCount: raw.replies_count, - lastReply: raw.last_reply + threadType: raw.thread_type, + repliesCount: parseInt(raw.replies_count as any), + lastReply: new Date(raw.last_reply) } } @@ -198,20 +208,35 @@ function toNotificationRaw( raw.message_created != null && raw.message_type != null ) { - const lastPatch = (raw.message_patches ?? []).find((it) => it.patch_type === PatchType.update) + const patches = (raw.message_patches ?? []).map((it) => + toPatch({ + card_id: card, + message_id: raw.message_id, + type: it.patch_type, + data: it.patch_data, + creator: it.patch_creator, + created: new Date(it.patch_created), + message_created: raw.message_created ? new Date(raw.message_created) : created + }) + ) + message = { id: String(raw.message_id) as MessageID, type: raw.message_type, card, - content: lastPatch?.patch_content ?? raw.message_content, + content: raw.message_content, data: raw.message_data, externalId: raw.message_external_id, creator: raw.message_creator, created: new Date(raw.message_created), - edited: lastPatch?.patch_created != null ? new Date(lastPatch.patch_created) : undefined, + edited: undefined, reactions: [], files: [] } + + if (patches.length > 0) { + message = applyPatches(message, patches, [PatchType.update]) + } } if (message != null) { diff --git a/packages/cockroach/src/db/message.ts b/packages/cockroach/src/db/message.ts index 2046e2fb213..69d98b9159d 100644 --- a/packages/cockroach/src/db/message.ts +++ b/packages/cockroach/src/db/message.ts @@ -16,6 +16,7 @@ import { type BlobID, type CardID, + type CardType, type FindMessagesGroupsParams, type FindMessagesParams, type Message, @@ -23,6 +24,7 @@ import { type MessageID, type MessagesGroup, type MessageType, + type PatchData, PatchType, type RichText, type SocialID, @@ -34,6 +36,7 @@ import { BaseDb } from './base' import { type FileDb, type MessageDb, + messageSchema, type MessagesGroupDb, type PatchDb, type ReactionDb, @@ -52,9 +55,10 @@ export class MessagesDb extends BaseDb { creator: SocialID, created: Date, data?: MessageData, - externalId?: string + externalId?: string, + id?: MessageID ): Promise { - const db: Omit = { + const db: Omit & { id?: MessageID } = { type, workspace_id: this.workspace, card_id: card, @@ -62,18 +66,26 @@ export class MessagesDb extends BaseDb { creator, created, data, - external_id: externalId + external_id: externalId, + id } - const sql = `INSERT INTO ${TableName.Message} (workspace_id, card_id, content, creator, created, type, data, external_id) - VALUES ($1::uuid, $2::varchar, $3::text, $4::varchar, $5::timestamptz, $6::varchar, $7::jsonb, $8::varchar) - RETURNING id::text` + const values: any[] = [] + const keys: string[] = [] - const result = await this.execute( - sql, - [db.workspace_id, db.card_id, db.content, db.creator, db.created, db.type, db.data ?? {}, externalId ?? null], - 'insert message' - ) + for (const key in db) { + const value: any = (db as any)[key] + if (value == null) continue + keys.push(key) + values.push(value) + } + + const placeholders = keys.map((key, i) => `$${i + 1}::${(messageSchema as any)[key]}`) + const sql = `INSERT INTO ${TableName.Message} (${keys.join(', ')}) + VALUES (${placeholders.join(', ')}) + RETURNING id::text` + + const result = await this.execute(sql, values, 'insert message') return result.map((it: any) => it.id)[0] } @@ -104,7 +116,10 @@ export class MessagesDb extends BaseDb { values.push(messages) } - const sql = `DELETE FROM ${TableName.Message} WHERE ${where.join(' AND ')} RETURNING id::text` + const sql = `DELETE + FROM ${TableName.Message} + WHERE ${where.join(' AND ')} + RETURNING id::text` const result = await this.execute(sql, values, 'remove messages') @@ -116,7 +131,7 @@ export class MessagesDb extends BaseDb { message: MessageID, messageCreated: Date, type: PatchType, - content: string, + data: PatchData, creator: SocialID, created: Date ): Promise { @@ -125,18 +140,20 @@ export class MessagesDb extends BaseDb { card_id: card, message_id: message, type, - content, + data, creator, created, message_created: messageCreated } - const sql = `INSERT INTO ${TableName.Patch} (workspace_id, card_id, message_id, type, content, creator, created, message_created) - VALUES ($1::uuid, $2::varchar, $3::bigint, $4::varchar, $5::text, $6::varchar, $7::timestamptz, $8::timestamptz)` + const sql = `INSERT INTO ${TableName.Patch} (workspace_id, card_id, message_id, type, data, creator, created, + message_created) + VALUES ($1::uuid, $2::varchar, $3::bigint, $4::varchar, $5::jsonb, $6::varchar, $7::timestamptz, + $8::timestamptz)` await this.execute( sql, - [db.workspace_id, db.card_id, db.message_id, db.type, db.content, db.creator, db.created, db.message_created], + [db.workspace_id, db.card_id, db.message_id, db.type, db.data, db.creator, db.created, db.message_created], 'insert patch' ) } @@ -165,8 +182,10 @@ export class MessagesDb extends BaseDb { created, message_created: messageCreated } - const sql = `INSERT INTO ${TableName.File} (workspace_id, card_id, message_id, blob_id, type, filename, creator, created, message_created, size) - VALUES ($1::uuid, $2::varchar, $3::int8, $4::uuid, $5::varchar, $6::varchar, $7::varchar, $8::timestamptz, $9::timestamptz, $10::int8)` + const sql = `INSERT INTO ${TableName.File} (workspace_id, card_id, message_id, blob_id, type, filename, creator, + created, message_created, size) + VALUES ($1::uuid, $2::varchar, $3::int8, $4::uuid, $5::varchar, $6::varchar, $7::varchar, + $8::timestamptz, $9::timestamptz, $10::int8)` await this.execute( sql, @@ -206,10 +225,12 @@ export class MessagesDb extends BaseDb { created: Date ): Promise { const select = `SELECT m.id - FROM ${TableName.Message} m - WHERE m.id = $1::bigint` + FROM ${TableName.Message} m + WHERE m.workspace_id = $1::uuid + AND m.card_id = $2::varchar + AND m.id = $3::bigint` - const messageDb = await this.execute(select, [message], 'select message') + const messageDb = await this.execute(select, [this.workspace, card, message], 'select message') if (messageDb.length > 0) { const db: ReactionDb = { @@ -221,7 +242,7 @@ export class MessagesDb extends BaseDb { created } const sql = `INSERT INTO ${TableName.Reaction} (workspace_id, card_id, message_id, reaction, creator, created) - VALUES ($1::uuid, $2::varchar, $3::bigint, $4::varchar, $5::varchar, $6::timestamptz)` + VALUES ($1::uuid, $2::varchar, $3::bigint, $4::varchar, $5::varchar, $6::timestamptz)` await this.execute( sql, @@ -229,7 +250,7 @@ export class MessagesDb extends BaseDb { 'insert reaction' ) } else { - await this.createPatch(card, message, messageCreated, PatchType.addReaction, reaction, creator, created) + await this.createPatch(card, message, messageCreated, PatchType.addReaction, { reaction }, creator, created) } } @@ -242,22 +263,24 @@ export class MessagesDb extends BaseDb { created: Date ): Promise { const select = `SELECT m.id - FROM ${TableName.Message} m - WHERE m.id = $1::bigint` + FROM ${TableName.Message} m + WHERE m.workspace_id = $1::uuid + AND m.card_id = $2::varchar + AND m.id = $3::bigint` - const messageDb = await this.execute(select, [message], 'select message') + const messageDb = await this.execute(select, [this.workspace, card, message], 'select message') if (messageDb.length > 0) { const sql = `DELETE - FROM ${TableName.Reaction} - WHERE workspace_id = $1::uuid - AND card_id = $2::varchar - AND message_id = $3::bigint - AND reaction = $4::varchar - AND creator = $5::varchar` + FROM ${TableName.Reaction} + WHERE workspace_id = $1::uuid + AND card_id = $2::varchar + AND message_id = $3::bigint + AND reaction = $4::varchar + AND creator = $5::varchar` await this.execute(sql, [this.workspace, card, message, reaction, creator], 'remove reaction') } else { - await this.createPatch(card, message, messageCreated, PatchType.removeReaction, reaction, creator, created) + await this.createPatch(card, message, messageCreated, PatchType.removeReaction, { reaction }, creator, created) } } @@ -267,6 +290,7 @@ export class MessagesDb extends BaseDb { message: MessageID, messageCreated: Date, thread: CardID, + threadType: CardType, created: Date ): Promise { const db: ThreadDb = { @@ -275,15 +299,25 @@ export class MessagesDb extends BaseDb { message_id: message, message_created: messageCreated, thread_id: thread, + thread_type: threadType, replies_count: 0, last_reply: created } - const sql = `INSERT INTO ${TableName.Thread} (workspace_id, card_id, message_id, thread_id, replies_count, - last_reply, message_created) - VALUES ($1::uuid, $2::varchar, $3::bigint, $4::varchar, $5::int, $6::timestamptz, $7::timestamptz)` + const sql = `INSERT INTO ${TableName.Thread} (workspace_id, card_id, message_id, thread_id, thread_type, replies_count, + last_reply, message_created) + VALUES ($1::uuid, $2::varchar, $3::bigint, $4::varchar, $5::varchar, $6::int, $7::timestamptz, $8::timestamptz)` await this.execute( sql, - [db.workspace_id, db.card_id, db.message_id, db.thread_id, db.replies_count, db.last_reply, db.message_created], + [ + db.workspace_id, + db.card_id, + db.message_id, + db.thread_id, + db.thread_type, + db.replies_count, + db.last_reply, + db.message_created + ], 'insert thread' ) } @@ -322,7 +356,7 @@ export class MessagesDb extends BaseDb { } const sql = `INSERT INTO ${TableName.MessagesGroup} (workspace_id, card_id, blob_id, from_date, to_date, count) - VALUES ($1::uuid, $2::varchar, $3::uuid, $4::timestamptz, $5::timestamptz, $6::int)` + VALUES ($1::uuid, $2::varchar, $3::uuid, $4::timestamptz, $5::timestamptz, $6::int)` await this.execute( sql, [db.workspace_id, db.card_id, db.blob_id, db.from_date, db.to_date, db.count], @@ -332,10 +366,10 @@ export class MessagesDb extends BaseDb { async removeMessagesGroup(card: CardID, blobId: BlobID): Promise { const sql = `DELETE - FROM ${TableName.MessagesGroup} - WHERE workspace_id = $1::uuid - AND card_id = $2::varchar - AND blob_id = $3::uuid` + FROM ${TableName.MessagesGroup} + WHERE workspace_id = $1::uuid + AND card_id = $2::varchar + AND blob_id = $3::uuid` await this.execute(sql, [this.workspace, card, blobId], 'remove messages group') } @@ -391,6 +425,7 @@ export class MessagesDb extends BaseDb { 'message_created', f.message_created, 'blob_id', f.blob_id, 'type', f.type, + 'size', f.size, 'filename', f.filename, 'creator', f.creator, 'created', f.created @@ -438,10 +473,11 @@ export class MessagesDb extends BaseDb { p.message_id, jsonb_agg( jsonb_build_object( - 'content', p.content, + 'type', p.type, + 'data', p.data, 'creator', p.creator, 'created', p.created - ) ORDER BY p.created DESC + ) ORDER BY p.created ASC ) AS patches FROM ${TableName.Patch} p INNER JOIN limited_messages m @@ -457,7 +493,7 @@ export class MessagesDb extends BaseDb { private buildMainSelect(params: FindMessagesParams): string { const orderBy = this.buildOrderBy(params) const selectReplies = params.replies - ? `t.thread_id as thread_id, t.replies_count as replies_count, t.last_reply as last_reply,` + ? `t.thread_id as thread_id, t.thread_type as thread_type, t.replies_count::int as replies_count, t.last_reply as last_reply,` : '' const selectFiles = params.files ? `COALESCE(f.files, '[]'::jsonb) AS files,` : `'[]'::jsonb AS files,` @@ -483,32 +519,31 @@ export class MessagesDb extends BaseDb { : '' return ` - SELECT - m.id::text, - m.card_id, - m.type, - m.content, - m.creator, - m.created, - m.data, - m.external_id, - ${selectReplies} - ${selectFiles} - ${selectReactions} - COALESCE(p.patches, '[]'::jsonb) AS patches - FROM limited_messages m - LEFT JOIN ${TableName.Thread} t - ON t.workspace_id = m.workspace_id - AND t.card_id = m.card_id - AND t.message_id = m.id - ${joinFiles} - ${joinReactions} - LEFT JOIN agg_patches p - ON p.workspace_id = m.workspace_id - AND p.card_id = m.card_id - AND p.message_id = m.id - ${orderBy} - ` + SELECT m.id::text, + m.card_id, + m.type, + m.content, + m.creator, + m.created, + m.data, + m.external_id, + ${selectReplies} + ${selectFiles} + ${selectReactions} + COALESCE(p.patches, '[]'::jsonb) AS patches + FROM limited_messages m + LEFT JOIN ${TableName.Thread} t + ON t.workspace_id = m.workspace_id + AND t.card_id = m.card_id + AND t.message_id = m.id + ${joinFiles} + ${joinReactions} + LEFT JOIN agg_patches p + ON p.workspace_id = m.workspace_id + AND p.card_id = m.card_id + AND p.message_id = m.id + ${orderBy} + ` } buildMessageWhere(params: FindMessagesParams): { where: string; values: any[] } { @@ -546,15 +581,16 @@ export class MessagesDb extends BaseDb { // Find thread async findThread(thread: CardID): Promise { const sql = `SELECT t.card_id, - t.message_id::text, - t.message_created, - t.thread_id, - t.replies_count, - t.last_reply - FROM ${TableName.Thread} t - WHERE t.workspace_id = $1::uuid - AND t.thread_id = $2::varchar - LIMIT 1;` + t.message_id::text, + t.message_created, + t.thread_id, + t.thread_type, + t.replies_count::int, + t.last_reply + FROM ${TableName.Thread} t + WHERE t.workspace_id = $1::uuid + AND t.thread_id = $2::varchar + LIMIT 1;` const result = await this.execute(sql, [this.workspace, thread], 'find thread') return result.map((it: any) => toThread(it))[0] @@ -563,27 +599,27 @@ export class MessagesDb extends BaseDb { // Find messages groups async findMessagesGroups(params: FindMessagesGroupsParams): Promise { const select = ` - SELECT mg.card_id, - mg.blob_id, - mg.from_date, - mg.to_date, - mg.count, - patches - FROM ${TableName.MessagesGroup} mg - CROSS JOIN LATERAL ( - SELECT jsonb_agg(jsonb_build_object( - 'message_id', p.message_id::text, - 'message_created', p.message_created, - 'type', p.type, - 'content', p.content, - 'creator', p.creator, - 'created', p.created - ) ORDER BY p.created) AS patches - FROM ${TableName.Patch} p - WHERE p.workspace_id = mg.workspace_id - AND p.card_id = mg.card_id - AND p.message_created BETWEEN mg.from_date AND mg.to_date - ) sub` + SELECT mg.card_id, + mg.blob_id, + mg.from_date, + mg.to_date, + mg.count, + patches + FROM ${TableName.MessagesGroup} mg + CROSS JOIN LATERAL ( + SELECT jsonb_agg(jsonb_build_object( + 'message_id', p.message_id::text, + 'message_created', p.message_created, + 'type', p.type, + 'data', p.data, + 'creator', p.creator, + 'created', p.created + ) ORDER BY p.created) AS patches + FROM ${TableName.Patch} p + WHERE p.workspace_id = mg.workspace_id + AND p.card_id = mg.card_id + AND p.message_created BETWEEN mg.from_date AND mg.to_date + ) sub` const { where, values } = this.buildMessagesGroupWhere(params) const orderBy = diff --git a/packages/cockroach/src/db/notification.ts b/packages/cockroach/src/db/notification.ts index d8e22344b60..e2d6137df6f 100644 --- a/packages/cockroach/src/db/notification.ts +++ b/packages/cockroach/src/db/notification.ts @@ -219,7 +219,7 @@ export class NotificationsDb extends BaseDb { JSON_AGG( JSONB_BUILD_OBJECT( 'patch_type', p.type, - 'patch_content', p.content, + 'patch_data', p.data, 'patch_creator', p.creator, 'patch_created', p.created ) ORDER BY p.created DESC diff --git a/packages/cockroach/src/db/schema.ts b/packages/cockroach/src/db/schema.ts index 69f2c6d07ea..3e9663024b7 100644 --- a/packages/cockroach/src/db/schema.ts +++ b/packages/cockroach/src/db/schema.ts @@ -54,6 +54,18 @@ export interface MessageDb { external_id?: string } +export const messageSchema: Record = { + id: 'int8', + workspace_id: 'uuid', + card_id: 'varchar', + content: 'text', + creator: 'varchar', + created: 'timestamptz', + type: 'varchar', + data: 'jsonb', + external_id: 'varchar' +} + export interface MessagesGroupDb { workspace_id: WorkspaceID card_id: CardID @@ -69,7 +81,7 @@ export interface PatchDb { card_id: CardID message_id: MessageID type: PatchType - content: RichText + data: Record creator: SocialID created: Date message_created: Date @@ -103,6 +115,7 @@ export interface ThreadDb { message_id: MessageID message_created: Date thread_id: CardID + thread_type: CardType replies_count: number last_reply: Date } diff --git a/packages/cockroach/src/init.ts b/packages/cockroach/src/init.ts index d23925e3795..2c461d15222 100644 --- a/packages/cockroach/src/init.ts +++ b/packages/cockroach/src/init.ts @@ -17,17 +17,60 @@ import type postgres from 'postgres' const migrationsTableName = 'communication._migrations' -let isInitialized = false +let isSchemaInitialized = false +let initPromise: Promise | null = null -export async function initSchema(sql: postgres.Sql) { - if (isInitialized) return +export function isInitialized(): boolean { + return isSchemaInitialized +} + +export async function initSchema(sql: postgres.Sql): Promise { + if (isInitialized()) return + + if (!initPromise) { + initPromise = (async () => { + const maxAttempts = 3 + const retryDelay = 3000 + + for (let attempt = 1; attempt <= maxAttempts; attempt++) { + try { + await init(sql) + isSchemaInitialized = true + return + } catch (err) { + if (attempt === maxAttempts) { + throw err + } + console.warn(`InitSchema attempt ${attempt} failed, retrying in ${retryDelay}ms…`, err) + await delay(retryDelay) + } + } + })() + .catch((err) => { + throw err + }) + .finally(() => { + initPromise = null + }) + } + + await initPromise +} + +function delay(ms: number): Promise { + return new Promise((res) => setTimeout(res, ms)) +} + +async function init(sql: postgres.Sql) { + if (isSchemaInitialized) return const start = performance.now() console.log('🗃️ Initializing schema...') await sql.unsafe('CREATE SCHEMA IF NOT EXISTS communication;') await sql.unsafe(`CREATE TABLE IF NOT EXISTS ${migrationsTableName} ( name VARCHAR(255) NOT NULL, - created_on TIMESTAMPTZ NOT NULL DEFAULT now() + created_on TIMESTAMPTZ NOT NULL DEFAULT now(), + PRIMARY KEY (name) )`) const appliedMigrations = await sql.unsafe(`SELECT name @@ -50,103 +93,100 @@ export async function initSchema(sql: postgres.Sql) { throw err } } - isInitialized = true + isSchemaInitialized = true const end = performance.now() const resTime = (end - start) / 1000 console.log(`🎉 All migrations complete in ${resTime.toFixed(2)} sec`) } function getMigrations(): [string, string][] { - return [ - migrationV1_1(), - migrationV2_1(), - migrationV3_1(), - migrationV4_1(), - migrationV4_2(), - migrationV5_1(), - migrationV5_2(), - migrationV5_3(), - migrationV5_4(), - migrationV5_5(), - migrationV5_6(), - migrationV6_1(), - migrationV6_2() - ] + return [migrationV1_1(), migrationV1_2()] } function migrationV1_1(): [string, string] { + const sql = ` + DROP SCHEMA IF EXISTS communication CASCADE; + CREATE SCHEMA IF NOT EXISTS communication; + CREATE TABLE IF NOT EXISTS ${migrationsTableName} + ( + name VARCHAR(255) NOT NULL, + created_on TIMESTAMPTZ NOT NULL DEFAULT now(), + PRIMARY KEY (name) + ) + ` + + return ['recreate_schema-v1_1', sql] +} + +function migrationV1_2(): [string, string] { const sql = ` CREATE TABLE IF NOT EXISTS communication.messages ( workspace_id UUID NOT NULL, card_id VARCHAR(255) NOT NULL, - id INT8 NOT NULL, - + id INT8 NOT NULL DEFAULT unique_rowid(), content TEXT NOT NULL, creator VARCHAR(255) NOT NULL, created TIMESTAMPTZ NOT NULL, - type VARCHAR(255) NOT NULL, data JSONB NOT NULL DEFAULT '{}', - - + external_id VARCHAR(255), PRIMARY KEY (workspace_id, card_id, id) ); CREATE INDEX IF NOT EXISTS idx_messages_workspace_card ON communication.messages (workspace_id, card_id); CREATE INDEX IF NOT EXISTS idx_messages_workspace_card_id ON communication.messages (workspace_id, card_id, id); + CREATE UNIQUE INDEX IF NOT EXISTS idx_messages_unique_workspace_card_external_id + ON communication.messages (workspace_id, card_id, external_id) + WHERE external_id IS NOT NULL; CREATE TABLE IF NOT EXISTS communication.messages_groups ( - workspace_id UUID NOT NULL, - card_id VARCHAR(255) NOT NULL, - blob_id UUID NOT NULL, - - from_sec TIMESTAMPTZ(0) NOT NULL, - to_sec TIMESTAMPTZ(0) NOT NULL, - count INT NOT NULL, - + workspace_id UUID NOT NULL, + card_id VARCHAR(255) NOT NULL, + blob_id UUID NOT NULL, + from_date TIMESTAMPTZ NOT NULL, + to_date TIMESTAMPTZ NOT NULL, + count INT NOT NULL, PRIMARY KEY (workspace_id, card_id, blob_id) ); - CREATE INDEX IF NOT EXISTS idx_messages_groups_workspace_card ON communication.messages_groups (workspace_id, card_id); + CREATE TABLE IF NOT EXISTS communication.patch ( - id INT8 NOT NULL DEFAULT unique_rowid(), - workspace_id UUID NOT NULL, - card_id VARCHAR(255) NOT NULL, - message_id INT8 NOT NULL, - type VARCHAR(255) NOT NULL, - content TEXT NOT NULL, - creator VARCHAR(255) NOT NULL, - created TIMESTAMPTZ NOT NULL, - message_created_sec TIMESTAMPTZ(0) NOT NULL, - + id INT8 NOT NULL DEFAULT unique_rowid(), + workspace_id UUID NOT NULL, + card_id VARCHAR(255) NOT NULL, + message_id INT8 NOT NULL, + type VARCHAR(255) NOT NULL, + creator VARCHAR(255) NOT NULL, + created TIMESTAMPTZ NOT NULL, + message_created TIMESTAMPTZ NOT NULL, + data JSONB NOT NULL DEFAULT '{}', PRIMARY KEY (id) ); CREATE INDEX IF NOT EXISTS idx_patch_workspace_card_message ON communication.patch (workspace_id, card_id, message_id); + CREATE TABLE IF NOT EXISTS communication.files ( - workspace_id UUID NOT NULL, - card_id VARCHAR(255) NOT NULL, - message_id INT8 NOT NULL, - - blob_id UUID NOT NULL, - filename VARCHAR(255) NOT NULL, - type VARCHAR(255) NOT NULL, - size INT8 NOT NULL, - - creator VARCHAR(255) NOT NULL, - created TIMESTAMPTZ NOT NULL DEFAULT now(), - - message_created_sec TIMESTAMPTZ(0) NOT NULL, - + workspace_id UUID NOT NULL, + card_id VARCHAR(255) NOT NULL, + message_id INT8 NOT NULL, + blob_id UUID NOT NULL, + filename VARCHAR(255) NOT NULL, + type VARCHAR(255) NOT NULL, + size INT8 NOT NULL, + meta JSONB NOT NULL DEFAULT '{}', + creator VARCHAR(255) NOT NULL, + created TIMESTAMPTZ NOT NULL DEFAULT now(), + message_created TIMESTAMPTZ NOT NULL, PRIMARY KEY (workspace_id, card_id, message_id, blob_id) ); CREATE INDEX IF NOT EXISTS files_workspace_card_message_idx ON communication.files (workspace_id, card_id, message_id); + CREATE TABLE IF NOT EXISTS communication.reactions ( workspace_id UUID NOT NULL, @@ -155,38 +195,37 @@ function migrationV1_1(): [string, string] { reaction VARCHAR(100) NOT NULL, creator VARCHAR(255) NOT NULL, created TIMESTAMPTZ NOT NULL DEFAULT now(), - - FOREIGN KEY (workspace_id, card_id, message_id) REFERENCES communication.messages (workspace_id, card_id, id) ON DELETE CASCADE, - PRIMARY KEY (workspace_id, card_id, message_id, creator, reaction) + PRIMARY KEY (workspace_id, card_id, message_id, creator, reaction), + FOREIGN KEY (workspace_id, card_id, message_id) REFERENCES communication.messages (workspace_id, card_id, id) ON DELETE CASCADE ); CREATE INDEX IF NOT EXISTS idx_reactions_workspace_card_message ON communication.reactions (workspace_id, card_id, message_id); + CREATE TABLE IF NOT EXISTS communication.thread ( - workspace_id UUID NOT NULL, - card_id VARCHAR(255) NOT NULL, - message_id INT8 NOT NULL, - thread_id VARCHAR(255) NOT NULL, - replies_count INT NOT NULL, - last_reply TIMESTAMPTZ NOT NULL, - + workspace_id UUID NOT NULL, + card_id VARCHAR(255) NOT NULL, + message_id INT8 NOT NULL, + thread_id VARCHAR(255) NOT NULL, + thread_type VARCHAR(255) NOT NULL, + replies_count INT NOT NULL, + last_reply TIMESTAMPTZ NOT NULL, + message_created TIMESTAMPTZ NOT NULL DEFAULT now(), PRIMARY KEY (workspace_id, thread_id), - UNIQUE (workspace_id, card_id, message_id) + CONSTRAINT thread_unique_constraint UNIQUE (workspace_id, card_id, message_id) ); - CREATE INDEX IF NOT EXISTS idx_thread_workspace_card_message ON communication.thread (workspace_id, thread_id); + CREATE INDEX IF NOT EXISTS idx_thread_workspace_thread_message ON communication.thread (workspace_id, thread_id); CREATE INDEX IF NOT EXISTS idx_thread_workspace_card_message ON communication.thread (workspace_id, card_id, message_id); + CREATE TABLE IF NOT EXISTS communication.notification_context ( id INT8 NOT NULL DEFAULT unique_rowid(), - workspace_id UUID NOT NULL, card_id VARCHAR(255) NOT NULL, account UUID NOT NULL, - last_view TIMESTAMPTZ NOT NULL DEFAULT now(), last_update TIMESTAMPTZ NOT NULL DEFAULT now(), - PRIMARY KEY (id), UNIQUE (workspace_id, card_id, account) ); @@ -195,10 +234,9 @@ function migrationV1_1(): [string, string] { ( id INT8 NOT NULL DEFAULT unique_rowid(), context_id INT8 NOT NULL, - message_id INT8, + message_id INT8 NOT NULL, created TIMESTAMPTZ NOT NULL, content JSONB NOT NULL DEFAULT '{}', - PRIMARY KEY (id), FOREIGN KEY (context_id) REFERENCES communication.notification_context (id) ON DELETE CASCADE ); @@ -211,15 +249,10 @@ function migrationV1_1(): [string, string] { card_id VARCHAR(255) NOT NULL, account UUID NOT NULL, date TIMESTAMPTZ NOT NULL DEFAULT now(), - + card_type VARCHAR(255) NOT NULL, PRIMARY KEY (workspace_id, card_id, account) ); - ` - return ['init_tables_01', sql] -} -function migrationV2_1(): [string, string] { - const sql = ` CREATE TABLE IF NOT EXISTS communication.label ( workspace_id UUID NOT NULL, @@ -231,129 +264,5 @@ function migrationV2_1(): [string, string] { PRIMARY KEY (workspace_id, card_id, label_id, account) ); ` - return ['init_labels_02', sql] -} - -function migrationV3_1(): [string, string] { - const sql = ` - ALTER TABLE communication.collaborators - ADD COLUMN IF NOT EXISTS card_type VARCHAR(255) NOT NULL DEFAULT 'card:class:Card'; - ` - return ['add_card_type_to_collaborators_03', sql] -} - -function migrationV4_1(): [string, string] { - const sql = ` - ALTER TABLE communication.messages - ADD COLUMN IF NOT EXISTS external_id VARCHAR(255); - ` - return ['message_add_external_id_column', sql] -} - -function migrationV4_2(): [string, string] { - const sql = ` - CREATE UNIQUE INDEX IF NOT EXISTS idx_messages_unique_external_id - ON communication.messages (external_id) - WHERE external_id IS NOT NULL; - ` - return ['message_add_external_id_column_unique_index', sql] -} - -function migrationV5_1(): [string, string] { - const sql = ` - ALTER TABLE communication.reactions - DROP CONSTRAINT IF EXISTS reactions_workspace_id_card_id_message_id_fkey; - ` - return ['remove-reactions-fk_v5_1', sql] -} - -function migrationV5_2(): [string, string] { - const sql = ` - ALTER TABLE communication.messages - ALTER COLUMN id SET DEFAULT unique_rowid(); - - ALTER TABLE communication.messages - DROP CONSTRAINT IF EXISTS messages_pkey, - ADD CONSTRAINT messages_pkey PRIMARY KEY (id);` - return ['migrate-message-id_v5_2', sql] -} - -function migrationV5_3(): [string, string] { - const sql = ` - ALTER TABLE communication.messages_groups ADD COLUMN from_date TIMESTAMPTZ; - ALTER TABLE communication.messages_groups ADD COLUMN to_date TIMESTAMPTZ; - ALTER TABLE communication.patch ADD COLUMN message_created TIMESTAMPTZ; - ALTER TABLE communication.files ADD COLUMN message_created TIMESTAMPTZ; - ALTER TABLE communication.thread ADD COLUMN IF NOT EXISTS message_created TIMESTAMPTZ NOT NULL DEFAULT now(); - DROP INDEX IF EXISTS communication.thread_workspace_id_card_id_message_id_key CASCADE; - ALTER TABLE communication.thread ADD CONSTRAINT thread_unique_constraint UNIQUE (message_id); - ` - return ['add-date-columns_v5_3', sql] -} - -function migrationV5_4(): [string, string] { - const sql = ` - UPDATE communication.messages_groups - SET from_date = from_sec::TIMESTAMPTZ, - to_date = to_sec::TIMESTAMPTZ; - - ALTER TABLE communication.messages_groups ALTER COLUMN from_date SET NOT NULL; - ALTER TABLE communication.messages_groups ALTER COLUMN to_date SET NOT NULL; - - ALTER TABLE communication.messages_groups DROP COLUMN from_sec; - ALTER TABLE communication.messages_groups DROP COLUMN to_sec; - - UPDATE communication.patch - SET message_created = message_created_sec::TIMESTAMPTZ; - - ALTER TABLE communication.patch ALTER COLUMN message_created SET NOT NULL; - ALTER TABLE communication.patch DROP COLUMN message_created_sec; - - UPDATE communication.files - SET message_created = message_created_sec::TIMESTAMPTZ; - - ALTER TABLE communication.files ALTER COLUMN message_created SET NOT NULL; - ALTER TABLE communication.files DROP COLUMN message_created_sec; - ` - return ['migrate-date-values_v5_4', sql] -} - -function migrationV5_5(): [string, string] { - const sql = ` - ALTER TABLE communication.reactions - DROP CONSTRAINT IF EXISTS reactions_pkey; - ALTER TABLE communication.reactions - ADD CONSTRAINT reactions_pkey PRIMARY KEY (message_id, creator, reaction); - ` - return ['migrate-reactions-pk_v5_5', sql] -} - -function migrationV5_6(): [string, string] { - const sql = ` - ALTER TABLE communication.files - DROP CONSTRAINT IF EXISTS files_pkey; - ALTER TABLE communication.files - ADD CONSTRAINT files_pkey PRIMARY KEY (message_id, blob_id); - - ALTER TABLE communication.reactions - ADD CONSTRAINT reactions_message_fkey FOREIGN KEY (message_id) - REFERENCES communication.messages (id) ON DELETE CASCADE; - ` - return ['migrate-constraints_v5_6', sql] -} - -function migrationV6_1(): [string, string] { - const sql = ` - DROP INDEX IF EXISTS communication.idx_messages_unique_external_id CASCADE; - ` - return ['message_drop_external_id_unique_index', sql] -} - -function migrationV6_2(): [string, string] { - const sql = ` - CREATE UNIQUE INDEX IF NOT EXISTS idx_messages_unique_workspace_card_external_id - ON communication.messages (workspace_id, card_id, external_id) - WHERE external_id IS NOT NULL; - ` - return ['idx_messages_unique_workspace_card_external_id', sql] + return ['reinit_tables-v1_2', sql] } diff --git a/packages/query/src/messages/query.ts b/packages/query/src/messages/query.ts index 5f9dd20744a..90ad2119488 100644 --- a/packages/query/src/messages/query.ts +++ b/packages/query/src/messages/query.ts @@ -951,7 +951,7 @@ export class MessagesQuery implements PagedQuery { result.push(PatchType.addFile, PatchType.removeFile) } if (this.params.replies === true) { - result.push(PatchType.addReply, PatchType.removeReply) + result.push(PatchType.updateThread) } return result } diff --git a/packages/query/src/notification-contexts/query.ts b/packages/query/src/notification-contexts/query.ts index e22f2b1cab5..824543769cd 100644 --- a/packages/query/src/notification-contexts/query.ts +++ b/packages/query/src/notification-contexts/query.ts @@ -17,7 +17,6 @@ import { type FindNotificationContextParams, type Notification, type NotificationContext, - PatchType, SortingOrder, type WorkspaceID } from '@hcengineering/communication-types' @@ -43,8 +42,6 @@ import { QueryResult } from '../result' import { WindowImpl } from '../window' import { loadMessageFromGroup } from '../utils' -const allowedPatchTypes = [PatchType.update, PatchType.addReaction, PatchType.removeReaction] - export class NotificationContextsQuery implements PagedQuery { private result: QueryResult | Promise> private forward: Promise | NotificationContext[] = [] @@ -311,9 +308,7 @@ export class NotificationContextsQuery implements PagedQuery ({ ...it, message: - it.messageId === event.patch.message && it.message != null - ? applyPatch(it.message, event.patch, allowedPatchTypes) - : it.message + it.messageId === event.patch.message && it.message != null ? applyPatch(it.message, event.patch) : it.message })) }) diff --git a/packages/rest-client/package.json b/packages/rest-client/package.json index cdba5818af6..5351b109fe3 100644 --- a/packages/rest-client/package.json +++ b/packages/rest-client/package.json @@ -29,7 +29,7 @@ "@hcengineering/communication-types": "workspace:*", "@hcengineering/communication-shared": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", - "@hcengineering/core": "^0.7.28", + "@hcengineering/core": "^0.7.88", "snappyjs": "^0.7.0" }, "repository": { diff --git a/packages/rest-client/src/rest.ts b/packages/rest-client/src/rest.ts index 7d5de851fc7..51002d836d1 100644 --- a/packages/rest-client/src/rest.ts +++ b/packages/rest-client/src/rest.ts @@ -102,7 +102,9 @@ class RestClientImpl implements RestClient { creator: SocialID, type: MessageType, data?: MessageData, - created?: Date + created?: Date, + externalId?: string, + id?: MessageID ): Promise { const result = await this.event({ type: MessageRequestEventType.CreateMessage, @@ -112,7 +114,9 @@ class RestClientImpl implements RestClient { content, creator, data, - created + created, + externalId, + id }) return result as CreateMessageResult } @@ -130,8 +134,8 @@ class RestClientImpl implements RestClient { messageCreated, card, message, - content, - creator + creator, + data: { content } }) } diff --git a/packages/sdk-types/package.json b/packages/sdk-types/package.json index 732e4b3cfcc..9038e0f791f 100644 --- a/packages/sdk-types/package.json +++ b/packages/sdk-types/package.json @@ -23,7 +23,7 @@ }, "dependencies": { "@hcengineering/communication-types": "workspace:*", - "@hcengineering/core": "^0.7.28" + "@hcengineering/core": "^0.7.88" }, "peerDependencies": { "typescript": "^5.6.3" diff --git a/packages/sdk-types/src/db.ts b/packages/sdk-types/src/db.ts index e5eabaa3c37..0efa11e26c2 100644 --- a/packages/sdk-types/src/db.ts +++ b/packages/sdk-types/src/db.ts @@ -39,7 +39,8 @@ import type { Label, FindLabelsParams, LabelID, - CardType + CardType, + PatchData } from '@hcengineering/communication-types' export interface DbAdapter { @@ -50,7 +51,8 @@ export interface DbAdapter { creator: SocialID, created: Date, data?: MessageData, - externalId?: string + externalId?: string, + id?: MessageID ): Promise removeMessages(card: CardID, ids: MessageID[], socialIds?: SocialID[]): Promise @@ -59,7 +61,7 @@ export interface DbAdapter { message: MessageID, messageCreated: Date, type: PatchType, - content: RichText, + data: PatchData, creator: SocialID, created: Date ): Promise @@ -96,7 +98,14 @@ export interface DbAdapter { ): Promise removeFile(card: CardID, message: MessageID, blobId: BlobID): Promise - createThread(card: CardID, message: MessageID, messageCreated: Date, thread: CardID, created: Date): Promise + createThread( + card: CardID, + message: MessageID, + messageCreated: Date, + thread: CardID, + threadType: CardType, + created: Date + ): Promise updateThread(thread: CardID, op: 'increment' | 'decrement', lastReply?: Date): Promise findMessages(params: FindMessagesParams): Promise diff --git a/packages/sdk-types/src/requestEvents/message.ts b/packages/sdk-types/src/requestEvents/message.ts index 066c99d3a87..d357958154e 100644 --- a/packages/sdk-types/src/requestEvents/message.ts +++ b/packages/sdk-types/src/requestEvents/message.ts @@ -23,7 +23,8 @@ import type { PatchType, MessageType, MessageData, - CardType + CardType, + PatchData } from '@hcengineering/communication-types' import type { BaseRequestEvent } from './common' @@ -70,6 +71,7 @@ export interface CreateMessageEvent extends BaseRequestEvent { data?: MessageData externalId?: string created?: Date + id?: MessageID } export interface RemoveMessagesEvent extends BaseRequestEvent { @@ -84,7 +86,7 @@ export interface CreatePatchEvent extends BaseRequestEvent { card: CardID message: MessageID messageCreated: Date - content: RichText + data: PatchData creator: SocialID } @@ -133,6 +135,7 @@ export interface CreateThreadEvent extends BaseRequestEvent { message: MessageID messageCreated: Date thread: CardID + threadType: CardType } export interface UpdateThreadEvent extends BaseRequestEvent { diff --git a/packages/sdk-types/src/serverApi.ts b/packages/sdk-types/src/serverApi.ts index 48145c529b5..d502a63b1dd 100644 --- a/packages/sdk-types/src/serverApi.ts +++ b/packages/sdk-types/src/serverApi.ts @@ -23,7 +23,9 @@ import type { NotificationContext, Notification, FindLabelsParams, - Label + Label, + FindCollaboratorsParams, + Collaborator } from '@hcengineering/communication-types' import type { Account } from '@hcengineering/core' @@ -49,6 +51,7 @@ export interface ServerApi { ): Promise findLabels(session: SessionData, params: FindLabelsParams): Promise + findCollaborators(session: SessionData, params: FindCollaboratorsParams): Promise event(session: SessionData, event: RequestEvent): Promise diff --git a/packages/server/package.json b/packages/server/package.json index 0b30822ee43..b1ea23a8cd6 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -24,13 +24,14 @@ "@types/ws": "^8.5.13" }, "dependencies": { - "@hcengineering/account-client": "^0.7.28", + "@hcengineering/account-client": "^0.7.88", "@hcengineering/communication-cockroach": "workspace:*", "@hcengineering/communication-sdk-types": "workspace:*", "@hcengineering/communication-types": "workspace:*", - "@hcengineering/core": "^0.7.28", - "@hcengineering/server-token": "^0.7.28", - "@hcengineering/platform": "^0.7.28", + "@hcengineering/communication-yaml": "workspace:*", + "@hcengineering/core": "^0.7.88", + "@hcengineering/server-token": "^0.7.88", + "@hcengineering/platform": "^0.7.88", "zod": "^3.24.2" }, "peerDependencies": { diff --git a/packages/server/src/index.ts b/packages/server/src/index.ts index 9a5dbe2a02a..336ab639cc4 100644 --- a/packages/server/src/index.ts +++ b/packages/server/src/index.ts @@ -25,7 +25,9 @@ import type { WorkspaceID, Notification, FindLabelsParams, - Label + Label, + FindCollaboratorsParams, + Collaborator } from '@hcengineering/communication-types' import { createDbAdapter } from '@hcengineering/communication-cockroach' import type { EventResult, RequestEvent, ServerApi, SessionData } from '@hcengineering/communication-sdk-types' @@ -89,6 +91,10 @@ export class Api implements ServerApi { return await this.middlewares.findLabels(session, params) } + async findCollaborators(session: SessionData, params: FindCollaboratorsParams): Promise { + return await this.middlewares.findCollaborators(session, params) + } + async unsubscribeQuery(session: SessionData, id: number): Promise { await this.middlewares.unsubscribeQuery(session, id) } diff --git a/packages/server/src/metadata.ts b/packages/server/src/metadata.ts index 04951b10602..3348fe61e0c 100644 --- a/packages/server/src/metadata.ts +++ b/packages/server/src/metadata.ts @@ -19,6 +19,7 @@ export function getMetadata(): Metadata { return { msg2fileUrl: process.env.MSG2FILE_URL ?? '', accountsUrl: process.env.ACCOUNTS_URL ?? '', - secret: process.env.SERVER_SECRET + secret: process.env.SERVER_SECRET, + filesUrl: process.env.FILES_URL ?? '' } } diff --git a/packages/server/src/middleware/base.ts b/packages/server/src/middleware/base.ts index 0dfcd6f6742..1491b2f61ad 100644 --- a/packages/server/src/middleware/base.ts +++ b/packages/server/src/middleware/base.ts @@ -29,7 +29,9 @@ import type { NotificationContext, Notification, FindLabelsParams, - Label + Label, + FindCollaboratorsParams, + Collaborator } from '@hcengineering/communication-types' import type { Middleware, MiddlewareContext, QueryId } from '../types' @@ -72,12 +74,16 @@ export class BaseMiddleware implements Middleware { return await this.provideFindLabels(session, params, queryId) } + async findCollaborators(session: SessionData, params: FindCollaboratorsParams): Promise { + return await this.provideFindCollaborators(session, params) + } + async event(session: SessionData, event: RequestEvent, derived: boolean): Promise { return await this.provideEvent(session, event, derived) } - async response(session: SessionData, event: ResponseEvent): Promise { - return await this.provideResponse(session, event) + async response(session: SessionData, event: ResponseEvent, derived: boolean): Promise { + return await this.provideResponse(session, event, derived) } unsubscribeQuery(session: SessionData, queryId: number): void { @@ -151,9 +157,19 @@ export class BaseMiddleware implements Middleware { return [] } - protected async provideResponse(session: SessionData, event: ResponseEvent): Promise { + protected async provideFindCollaborators( + session: SessionData, + params: FindCollaboratorsParams + ): Promise { + if (this.next !== undefined) { + return this.next.findCollaborators(session, params) + } + return [] + } + + protected async provideResponse(session: SessionData, event: ResponseEvent, derived: boolean): Promise { if (this.next !== undefined) { - return this.next.response(session, event) + return this.next.response(session, event, derived) } } } diff --git a/packages/server/src/middleware/broadcast.ts b/packages/server/src/middleware/broadcast.ts index 33bc8d08ed2..7e11f6597ec 100644 --- a/packages/server/src/middleware/broadcast.ts +++ b/packages/server/src/middleware/broadcast.ts @@ -118,7 +118,7 @@ export class BroadcastMiddleware extends BaseMiddleware implements Middleware { data.contextQueries.delete(queryId) } - async response(session: SessionData, event: ResponseEvent): Promise { + async response(session: SessionData, event: ResponseEvent, derived: boolean): Promise { const sessionIds: string[] = [] for (const [sessionId, session] of this.dataBySessionId.entries()) { if (this.match(event, session)) { @@ -133,7 +133,7 @@ export class BroadcastMiddleware extends BaseMiddleware implements Middleware { this.context.ctx.error('Failed to broadcast event', { error: e }) } } - await this.provideResponse(session, event) + await this.provideResponse(session, event, derived) } closeSession(sessionId: string): void { diff --git a/packages/server/src/middleware/db.ts b/packages/server/src/middleware/db.ts index a06a206149e..ba538123284 100644 --- a/packages/server/src/middleware/db.ts +++ b/packages/server/src/middleware/db.ts @@ -13,19 +13,21 @@ // limitations under the License. // -import type { - FindLabelsParams, - FindMessagesGroupsParams, - FindMessagesParams, - FindNotificationContextParams, - FindNotificationsParams, - Label, - Message, - MessagesGroup, - Notification, - NotificationContext, - Patch, - Reaction +import { + type Collaborator, + type FindCollaboratorsParams, + type FindLabelsParams, + type FindMessagesGroupsParams, + type FindMessagesParams, + type FindNotificationContextParams, + type FindNotificationsParams, + type Label, + type Message, + type MessagesGroup, + type Notification, + type NotificationContext, + type Patch, + type Reaction } from '@hcengineering/communication-types' import { type AddCollaboratorsEvent, @@ -117,10 +119,14 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { return await this.db.findLabels(params) } - async event(session: SessionData, event: RequestEvent): Promise { + async findCollaborators(_: SessionData, params: FindCollaboratorsParams): Promise { + return await this.db.findCollaborators(params) + } + + async event(session: SessionData, event: RequestEvent, derived: boolean): Promise { const result = await this.processEvent(session, event) if (result.responseEvent) { - void this.context.head?.response(session, result.responseEvent) + void this.context.head?.response(session, result.responseEvent, derived) } return result.result ?? {} @@ -220,7 +226,8 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { event.creator, created, event.data, - event.externalId + event.externalId, + event.id ) const message: Message = { id, @@ -257,19 +264,19 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { event.message, event.messageCreated, event.patchType, - event.content, + event.data, event.creator, created ) - const patch: Patch = { + const patch = { type: event.patchType, messageCreated: event.messageCreated, message: event.message, - content: event.content, + data: event.data, creator: event.creator, created - } + } as Patch const responseEvent: PatchCreatedEvent = { _id: event._id, type: MessageResponseEventType.PatchCreated, @@ -505,13 +512,14 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { private async createThread(event: CreateThreadEvent): Promise { const date = new Date() - await this.db.createThread(event.card, event.message, event.messageCreated, event.thread, date) + await this.db.createThread(event.card, event.message, event.messageCreated, event.thread, event.threadType, date) const responseEvent: ThreadCreatedEvent = { _id: event._id, type: MessageResponseEventType.ThreadCreated, thread: { card: event.card, thread: event.thread, + threadType: event.threadType, message: event.message, messageCreated: event.messageCreated, repliesCount: 0, diff --git a/packages/server/src/middleware/triggers.ts b/packages/server/src/middleware/triggers.ts index 5d1f696c286..b7d1461b556 100644 --- a/packages/server/src/middleware/triggers.ts +++ b/packages/server/src/middleware/triggers.ts @@ -33,13 +33,14 @@ export class TriggersMiddleware extends BaseMiddleware implements Middleware { this.ctx = context.ctx.newChild('triggers', {}) } - async response(session: SessionData, event: ResponseEvent): Promise { + async response(session: SessionData, event: ResponseEvent, derived: boolean): Promise { const ctx: Omit = { metadata: this.context.metadata, db: this.db, workspace: this.context.workspace, account: session.account, registeredCards: this.context.registeredCards, + derived, execute: async (event: RequestEvent) => { return (await this.context.head?.event(session, event, true)) ?? {} } diff --git a/packages/server/src/middleware/validate.ts b/packages/server/src/middleware/validate.ts index 1dbadefc1ed..4542e1a2cf1 100644 --- a/packages/server/src/middleware/validate.ts +++ b/packages/server/src/middleware/validate.ts @@ -22,6 +22,8 @@ import { NotificationRequestEventType } from '@hcengineering/communication-sdk-types' import type { + Collaborator, + FindCollaboratorsParams, FindLabelsParams, FindMessagesGroupsParams, FindMessagesParams, @@ -90,6 +92,11 @@ export class ValidateMiddleware extends BaseMiddleware implements Middleware { return await this.provideFindLabels(session, params, queryId) } + async findCollaborators(session: SessionData, params: FindCollaboratorsParams): Promise { + this.validate(params, FindCollaboratorsParamsSchema) + return await this.provideFindCollaborators(session, params) + } + async event(session: SessionData, event: RequestEvent, derived: boolean): Promise { if (derived) return await this.provideEvent(session, event, derived) switch (event.type) { @@ -165,6 +172,7 @@ const CardType = z.string() const ContextID = z.string() const LabelID = z.string() const MessageData = z.any() +const PatchData = z.any() const MessageID = z.string() const MessageType = z.string() const MessagesGroup = z.any() @@ -233,6 +241,10 @@ const FindLabelsParamsSchema = FindParamsSchema.extend({ account: AccountID.optional() }).strict() +const FindCollaboratorsParamsSchema = FindParamsSchema.extend({ + card: CardID.optional(), + account: z.union([AccountID, z.array(AccountID)]).optional() +}).strict() //Events const BaseRequestEventSchema = z @@ -267,7 +279,8 @@ const CreateMessageEventSchema = BaseRequestEventSchema.extend({ creator: SocialID, data: MessageData.optional(), externalId: z.string().optional(), - created: Date.optional() + created: Date.optional(), + id: MessageID.optional() }).strict() const RemoveMessagesEventSchema = BaseRequestEventSchema.extend({ @@ -282,7 +295,7 @@ const CreatePatchEventSchema = BaseRequestEventSchema.extend({ card: CardID, message: MessageID, messageCreated: Date, - content: RichText, + data: PatchData, creator: SocialID }).strict() @@ -329,7 +342,8 @@ const CreateThreadEventSchema = BaseRequestEventSchema.extend({ card: CardID, message: MessageID, messageCreated: Date, - thread: CardID + thread: CardID, + threadType: CardType }).strict() const UpdateThreadEventSchema = BaseRequestEventSchema.extend({ diff --git a/packages/server/src/middlewares.ts b/packages/server/src/middlewares.ts index c8c3d8030b6..76dce9c121c 100644 --- a/packages/server/src/middlewares.ts +++ b/packages/server/src/middlewares.ts @@ -16,6 +16,8 @@ import type { MeasureContext } from '@hcengineering/core' import type { DbAdapter, EventResult, RequestEvent, SessionData } from '@hcengineering/communication-sdk-types' import type { + Collaborator, + FindCollaboratorsParams, FindLabelsParams, FindMessagesGroupsParams, FindMessagesParams, @@ -149,6 +151,11 @@ export class Middlewares { return await this.head.findLabels(session, params) } + async findCollaborators(session: SessionData, params: FindCollaboratorsParams): Promise { + if (this.head === undefined) return [] + return await this.head.findCollaborators(session, params) + } + async unsubscribeQuery(session: SessionData, id: number): Promise { if (this.head === undefined) return this.head?.unsubscribeQuery(session, id) diff --git a/packages/server/src/triggers/message.ts b/packages/server/src/triggers/message.ts index 11444a8dbf5..3f6bcb4b49c 100644 --- a/packages/server/src/triggers/message.ts +++ b/packages/server/src/triggers/message.ts @@ -25,14 +25,23 @@ import { NotificationRequestEventType, type PatchCreatedEvent, type RequestEvent, + type ThreadCreatedEvent, type UpdateThreadEvent } from '@hcengineering/communication-sdk-types' -import { type CardID, PatchType, type File } from '@hcengineering/communication-types' +import { + type AddFilePatchData, + type CardID, + type Message, + MessageType, + PatchType +} from '@hcengineering/communication-types' import { generateToken } from '@hcengineering/server-token' import { concatLink, systemAccountUuid } from '@hcengineering/core' +import { generateMessageId } from '@hcengineering/communication-shared' import type { TriggerCtx, TriggerFn, Triggers } from '../types' import { findAccount } from '../utils' +import { findMessageInFiles } from './utils' async function onMessagesGroupCreated(ctx: TriggerCtx, event: MessagesGroupCreatedEvent): Promise { ctx.registeredCards.delete(event.group.card) @@ -49,11 +58,11 @@ async function onMessagesRemoved(ctx: TriggerCtx, event: MessagesRemovedEvent): return event.messages.flatMap(() => { const patchEvent: CreatePatchEvent = { type: MessageRequestEventType.CreatePatch, - patchType: PatchType.removeReply, + patchType: PatchType.updateThread, card: thread.card, message: thread.message, messageCreated: thread.messageCreated, - content: thread.thread, + data: { thread: thread.thread, threadType: thread.threadType, replies: 'decrement' }, creator: socialId } const threadEvent: UpdateThreadEvent = { @@ -62,7 +71,7 @@ async function onMessagesRemoved(ctx: TriggerCtx, event: MessagesRemovedEvent): replies: 'decrement' } - return [patchEvent, threadEvent] + return [threadEvent] }) } @@ -71,7 +80,7 @@ async function onFileCreated(ctx: TriggerCtx, event: FileCreatedEvent): Promise< if (message !== undefined) return [] const { file } = event - const patchContent: Omit = { + const patchData: AddFilePatchData = { blobId: file.blobId, type: file.type, filename: file.filename, @@ -85,7 +94,7 @@ async function onFileCreated(ctx: TriggerCtx, event: FileCreatedEvent): Promise< card: event.card, message: file.message, messageCreated: file.messageCreated, - content: JSON.stringify(patchContent), + data: patchData, creator: file.creator } ] @@ -103,7 +112,7 @@ async function onFileRemoved(ctx: TriggerCtx, event: FileRemovedEvent): Promise< card: event.card, message: event.message, messageCreated: event.messageCreated, - content: JSON.stringify({ blobId }), + data: { blobId }, creator: event.creator } ] @@ -157,6 +166,7 @@ async function addCollaborators(ctx: TriggerCtx, event: MessageCreatedEvent): Pr } async function addThreadReply(ctx: TriggerCtx, event: MessageCreatedEvent): Promise { + if (event.message.type !== MessageType.Message || ctx.derived) return [] const { message } = event const thread = await ctx.db.findThread(message.card) if (thread === undefined) return [] @@ -164,11 +174,11 @@ async function addThreadReply(ctx: TriggerCtx, event: MessageCreatedEvent): Prom return [ { type: MessageRequestEventType.CreatePatch, - patchType: PatchType.addReply, + patchType: PatchType.updateThread, card: thread.card, message: thread.message, messageCreated: thread.messageCreated, - content: thread.thread, + data: { thread: thread.thread, threadType: thread.threadType, replies: 'increment' }, creator: message.creator }, { @@ -180,6 +190,81 @@ async function addThreadReply(ctx: TriggerCtx, event: MessageCreatedEvent): Prom ] } +async function onThreadCreated(ctx: TriggerCtx, event: ThreadCreatedEvent): Promise { + let message: Message | undefined = ( + await ctx.db.findMessages({ + card: event.thread.card, + id: event.thread.message, + limit: 1, + files: true, + reactions: true + }) + )[0] + + const result: RequestEvent[] = [] + + if (message === undefined) { + message = await findMessageInFiles(ctx, event.thread.card, event.thread.message, event.thread.messageCreated) + + if (message !== undefined) { + result.push({ + type: MessageRequestEventType.CreatePatch, + patchType: PatchType.updateThread, + card: event.thread.card, + message: event.thread.message, + messageCreated: event.thread.messageCreated, + data: { thread: event.thread.thread, threadType: event.thread.threadType }, + creator: message.creator + }) + } + } + + if (message === undefined) { + return [] + } + + const messageId = generateMessageId() + result.push({ + type: MessageRequestEventType.CreateMessage, + messageType: message.type, + card: event.thread.thread, + cardType: event.thread.threadType, + content: message.content, + creator: message.creator, + data: message.data, + externalId: message.externalId, + created: message.created, + id: messageId + }) + + for (const file of message.files) { + result.push({ + type: MessageRequestEventType.CreateFile, + card: event.thread.thread, + message: messageId, + messageCreated: message.created, + blobId: file.blobId, + fileType: file.type, + filename: file.filename, + size: file.size, + creator: file.creator + }) + } + + for (const reaction of message.reactions) { + result.push({ + type: MessageRequestEventType.CreateReaction, + card: event.thread.thread, + message: messageId, + messageCreated: message.created, + reaction: reaction.reaction, + creator: reaction.creator + }) + } + + return result +} + const triggers: Triggers = [ ['add_collaborators_on_message_created', MessageResponseEventType.MessageCreated, addCollaborators as TriggerFn], ['add_thread_reply_on_message_created', MessageResponseEventType.MessageCreated, addThreadReply as TriggerFn], @@ -188,7 +273,8 @@ const triggers: Triggers = [ ['on_messages_group_created', MessageResponseEventType.MessagesGroupCreated, onMessagesGroupCreated as TriggerFn], ['remove_reply_on_messages_removed', MessageResponseEventType.MessagesRemoved, onMessagesRemoved as TriggerFn], ['on_file_created', MessageResponseEventType.FileCreated, onFileCreated as TriggerFn], - ['on_file_removed', MessageResponseEventType.FileRemoved, onFileRemoved as TriggerFn] + ['on_file_removed', MessageResponseEventType.FileRemoved, onFileRemoved as TriggerFn], + ['on_thread_created', MessageResponseEventType.ThreadCreated, onThreadCreated as TriggerFn] ] export default triggers diff --git a/packages/server/src/triggers/utils.ts b/packages/server/src/triggers/utils.ts new file mode 100644 index 00000000000..93b3dbc7552 --- /dev/null +++ b/packages/server/src/triggers/utils.ts @@ -0,0 +1,76 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +import { type CardID, type Message, type MessageID, SortingOrder } from '@hcengineering/communication-types' +import { loadGroupFile } from '@hcengineering/communication-yaml' +import { applyPatches } from '@hcengineering/communication-shared' + +import type { TriggerCtx } from '../types' + +export async function findMessage( + ctx: TriggerCtx, + card: CardID, + id: MessageID, + created: Date +): Promise { + const message = (await ctx.db.findMessages({ card, id, limit: 1, files: true }))[0] + if (message !== undefined) { + return message + } + return await findMessageInFiles(ctx, card, id, created) +} + +export async function findMessageInFiles( + ctx: TriggerCtx, + card: CardID, + id: MessageID, + created: Date +): Promise { + const filesUrl = ctx.metadata.filesUrl + if (filesUrl === '') { + ctx.ctx.error('FILES_URL is missing', { filesUrl }) + return undefined + } + + const group = ( + await ctx.db.findMessagesGroups({ + card, + fromDate: { lessOrEqual: created }, + toDate: { greaterOrEqual: created }, + limit: 1, + order: SortingOrder.Ascending, + orderBy: 'fromDate' + }) + )[0] + + if (group === undefined) { + return undefined + } + + try { + const parsedFile = await loadGroupFile(ctx.workspace, filesUrl, group, { retries: 3 }) + const messageFromFile = parsedFile.messages.find((it) => it.id === id) + if (messageFromFile === undefined) { + return undefined + } + + const patches = (group.patches ?? []).filter((it) => it.message === id) + + return patches.length > 0 ? applyPatches(messageFromFile, patches) : messageFromFile + } catch (e) { + ctx.ctx.error('Failed to find message in files', { card, id, created }) + ctx.ctx.error('Error:', { error: e }) + } +} diff --git a/packages/server/src/types.ts b/packages/server/src/types.ts index 6592d8344e5..bfa5a892eb1 100644 --- a/packages/server/src/types.ts +++ b/packages/server/src/types.ts @@ -24,6 +24,8 @@ import type { } from '@hcengineering/communication-sdk-types' import type { CardID, + Collaborator, + FindCollaboratorsParams, FindLabelsParams, FindMessagesGroupsParams, FindMessagesParams, @@ -40,6 +42,7 @@ import type { export interface Metadata { msg2fileUrl: string accountsUrl: string + filesUrl: string secret?: string } @@ -65,12 +68,13 @@ export interface Middleware { ) => Promise findLabels: (session: SessionData, params: FindLabelsParams, queryId?: QueryId) => Promise + findCollaborators: (session: SessionData, params: FindCollaboratorsParams) => Promise event: (session: SessionData, event: RequestEvent, derived: boolean) => Promise unsubscribeQuery: (session: SessionData, queryId: number) => void - response: (session: SessionData, event: ResponseEvent) => Promise + response: (session: SessionData, event: ResponseEvent, derived: boolean) => Promise closeSession: (sessionId: string) => void close: () => void @@ -109,6 +113,7 @@ export interface TriggerCtx { workspace: WorkspaceID account: Account registeredCards: Set + derived: boolean execute: (event: RequestEvent) => Promise } diff --git a/packages/server/src/utils.ts b/packages/server/src/utils.ts index 3255368a551..6bc186b7997 100644 --- a/packages/server/src/utils.ts +++ b/packages/server/src/utils.ts @@ -16,6 +16,7 @@ import type { AccountID, SocialID } from '@hcengineering/communication-types' import { generateToken } from '@hcengineering/server-token' import { systemAccountUuid } from '@hcengineering/core' +import { getClient as getAccountClient } from '@hcengineering/account-client' import type { TriggerCtx } from './types' @@ -28,47 +29,11 @@ export async function findAccount(ctx: TriggerCtx, socialString: SocialID): Prom if (url === '') return undefined const token = generateToken(systemAccountUuid) - // const account = getAccountClient(ctx.metadata.accountsUrl, token) + const account = getAccountClient(ctx.metadata.accountsUrl, token) try { - //TODO: FIXME - return await fetchAccount(socialString, url, token) + return (await account.findPersonBySocialId(socialString, true)) as AccountID | undefined } catch (err: any) { ctx.ctx.warn('Cannot find account', { socialString, err }) } } - -//TODO: replace with AccountClient -async function fetchAccount(socialId: SocialID, url: string, token: string): Promise { - const body = { - method: 'findPersonBySocialId' as const, - params: { socialId, requireAccount: true } - } - const request: RequestInit = { - keepalive: true, - headers: { - ...(token === undefined - ? {} - : { - Authorization: 'Bearer ' + token - }) - } - } - - const response = await fetch(url, { - ...request, - headers: { - ...request.headers, - 'Content-Type': 'application/json' - }, - method: 'POST', - body: JSON.stringify(body) - }) - - const result = await response.json() - if (result.error != null) { - throw Error(result.error) - } - - return result.result as AccountID | undefined -} diff --git a/packages/shared/src/index.ts b/packages/shared/src/index.ts index f1ad93e6ab5..d871556b299 100644 --- a/packages/shared/src/index.ts +++ b/packages/shared/src/index.ts @@ -15,3 +15,4 @@ export * from './retry' export * from './patch' +export * from './utils' diff --git a/packages/shared/src/patch.ts b/packages/shared/src/patch.ts index 04ec5cc5f20..a34645c3c46 100644 --- a/packages/shared/src/patch.ts +++ b/packages/shared/src/patch.ts @@ -16,16 +16,14 @@ import { PatchType, type BlobID, - type CardID, type Message, type Patch, type Reaction, type SocialID, - type File + type AddFilePatchData, + type UpdateThreadPatchData } from '@hcengineering/communication-types' -type PatchFile = Pick - export function applyPatches(message: Message, patches: Patch[], allowedPatchTypes: PatchType[] = []): Message { if (patches.length === 0) return message @@ -42,25 +40,24 @@ export function applyPatch(message: Message, patch: Patch, allowedPatchTypes: Pa return { ...message, edited: patch.created, - content: patch.content + content: patch.data.content ?? message.content, + data: patch.data.data ?? message.data } case PatchType.addReaction: return addReaction(message, { message: message.id, - reaction: patch.content, + reaction: patch.data.reaction, creator: patch.creator, created: patch.created }) case PatchType.removeReaction: - return removeReaction(message, patch.content, patch.creator) - case PatchType.addReply: - return addReply(message, patch.content as CardID, patch.created) - case PatchType.removeReply: - return removeReply(message, patch.content as CardID) + return removeReaction(message, patch.data.reaction, patch.creator) case PatchType.addFile: - return addFile(message, JSON.parse(patch.content) as PatchFile, patch.created, patch.creator) + return addFile(message, patch.data, patch.created, patch.creator) case PatchType.removeFile: - return removeFile(message, patch.content as BlobID) + return removeFile(message, patch.data.blobId) + case PatchType.updateThread: + return updateThread(message, patch.data, patch.created) } return message @@ -81,36 +78,38 @@ function removeReaction(message: Message, emoji: string, creator: SocialID): Mes } } -function addReply(message: Message, thread: CardID, created: Date): Message { - if (message.thread === undefined) { - return { - ...message, - thread: { - card: message.card, - message: message.id, - messageCreated: message.created, - thread, - repliesCount: 1, - lastReply: created - } - } +function updateThread(message: Message, data: UpdateThreadPatchData, created: Date): Message { + const thread = message.thread ?? { + card: message.card, + message: message.id, + messageCreated: message.created, + thread: data.thread, + threadType: data.threadType, + repliesCount: 0, + lastReply: created } - if (message.thread.thread !== thread) return message + thread.thread = data.thread + thread.threadType = data.threadType + + if (data.replies === 'increment') { + thread.repliesCount = thread.repliesCount + 1 + thread.lastReply = created + } + + if (data.replies === 'decrement') { + thread.repliesCount = Math.max(thread.repliesCount - 1, 0) + } return { ...message, - thread: { - ...message.thread, - repliesCount: message.thread.repliesCount + 1, - lastReply: created - } + thread } } -function addFile(message: Message, file: PatchFile, created: Date, creator: SocialID): Message { +function addFile(message: Message, data: AddFilePatchData, created: Date, creator: SocialID): Message { message.files.push({ - ...file, + ...data, card: message.card, message: message.id, created, @@ -129,15 +128,3 @@ function removeFile(message: Message, blobId: BlobID): Message { files } } - -function removeReply(message: Message, thread: CardID): Message { - if (message.thread === undefined || message.thread.thread !== thread) return message - - return { - ...message, - thread: { - ...message.thread, - repliesCount: message.thread.repliesCount - 1 - } - } -} diff --git a/packages/shared/src/utils.ts b/packages/shared/src/utils.ts new file mode 100644 index 00000000000..d9a490c4f29 --- /dev/null +++ b/packages/shared/src/utils.ts @@ -0,0 +1,37 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +import type { MessageID } from '@hcengineering/communication-types' + +const COUNTER_BITS = 12n +const RANDOM_BITS = 10n +const MAX_SEQUENCE = (1n << COUNTER_BITS) - 1n +const MAX_RANDOM = (1n << RANDOM_BITS) - 1n + +let counter = 0n + +/** + * Generate 64-bit MessageID and return it as string. + */ +export function generateMessageId(): MessageID { + const ts = BigInt(Date.now()) + counter = counter < MAX_SEQUENCE ? counter + 1n : 0n + + const random = BigInt(Math.floor(Math.random() * Number(MAX_RANDOM + 1n))) + + const id = (ts << (COUNTER_BITS + RANDOM_BITS)) | (counter << RANDOM_BITS) | random + + return id.toString() as MessageID +} diff --git a/packages/types/package.json b/packages/types/package.json index 202cc6728b4..3110327364c 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -25,9 +25,9 @@ "typescript": "^5.6.3" }, "dependencies": { - "@hcengineering/core": "^0.7.28", - "@hcengineering/card": "^0.7.28", - "@hcengineering/tags": "^0.7.28" + "@hcengineering/core": "^0.7.88", + "@hcengineering/card": "^0.7.88", + "@hcengineering/tags": "^0.7.88" }, "repository": { "type": "git", diff --git a/packages/types/src/file.ts b/packages/types/src/file.ts index c807088c0b5..1740fecc0a1 100644 --- a/packages/types/src/file.ts +++ b/packages/types/src/file.ts @@ -13,7 +13,7 @@ // limitations under the License. // -import type { BlobID, CardID, RichText, SocialID } from './core' +import type { BlobID, CardID, CardType, RichText, SocialID } from './core' import type { Message, MessageID, MessageType, MessageData } from './message' export interface FileMetadata { @@ -54,6 +54,7 @@ export interface FileReaction { export interface FileThread { thread: CardID + threadType: CardType repliesCount: number lastReply: Date } diff --git a/packages/types/src/message.ts b/packages/types/src/message.ts index af912a3fbe4..4832d9df815 100644 --- a/packages/types/src/message.ts +++ b/packages/types/src/message.ts @@ -14,7 +14,7 @@ // import type { Attribute, Class, Mixin, Ref } from '@hcengineering/core' -import type { BlobID, CardID, ID, RichText, SocialID } from './core' +import type { BlobID, CardID, CardType, ID, RichText, SocialID } from './core' import type { Card, Tag } from '@hcengineering/card' export type MessageID = ID & { message: true } @@ -85,23 +85,98 @@ export interface MessagesGroup { patches?: Patch[] } -export interface Patch { +interface BasePatch { message: MessageID messageCreated: Date type: PatchType - content: string creator: SocialID created: Date + + data: Record +} + +export interface UpdatePatch extends BasePatch { + type: PatchType.update + data: UpdatePatchData +} + +export interface AddReactionPatch extends BasePatch { + type: PatchType.addReaction + data: AddReactionPatchData +} + +export interface RemoveReactionPatch extends BasePatch { + type: PatchType.removeReaction + data: RemoveReactionPatchData +} +export interface UpdateThreadPatch extends BasePatch { + type: PatchType.updateThread + data: UpdateThreadPatchData +} + +export interface AddFilePatch extends BasePatch { + type: PatchType.addFile + data: AddFilePatchData +} + +export interface RemoveFilePatch extends BasePatch { + type: PatchType.removeFile + data: RemoveFilePatchData +} + +export type Patch = + | UpdatePatch + | AddReactionPatch + | RemoveReactionPatch + | AddFilePatch + | RemoveFilePatch + | UpdateThreadPatch + +export type PatchData = + | UpdatePatchData + | AddReactionPatchData + | RemoveReactionPatchData + | AddFilePatchData + | RemoveFilePatchData + | UpdateThreadPatchData + +export interface UpdateThreadPatchData { + thread: CardID + threadType: CardType + replies?: 'increment' | 'decrement' +} + +export interface UpdatePatchData { + content?: RichText + data?: MessageData +} + +export interface AddReactionPatchData { + reaction: string +} + +export interface RemoveReactionPatchData { + reaction: string +} + +export interface AddFilePatchData { + blobId: BlobID + type: string + filename: string + size: number +} + +export interface RemoveFilePatchData { + blobId: BlobID } export enum PatchType { update = 'update', addReaction = 'addReaction', removeReaction = 'removeReaction', - addReply = 'addReply', - removeReply = 'removeReply', addFile = 'addFile', - removeFile = 'removeFile' + removeFile = 'removeFile', + updateThread = 'updateThread' } export interface Reaction { @@ -128,6 +203,7 @@ export interface Thread { message: MessageID messageCreated: Date thread: CardID + threadType: CardType repliesCount: number lastReply: Date } diff --git a/packages/yaml/src/deserialize.ts b/packages/yaml/src/deserialize.ts index 6455b535ea7..8e8af4dd5b2 100644 --- a/packages/yaml/src/deserialize.ts +++ b/packages/yaml/src/deserialize.ts @@ -29,6 +29,7 @@ export function deserializeMessage(message: Message): FileMessage { message.thread != null ? { thread: message.thread.thread, + threadType: message.thread.threadType, repliesCount: message.thread.repliesCount, lastReply: message.thread.lastReply } diff --git a/packages/yaml/src/parse.ts b/packages/yaml/src/parse.ts index 3137c5e1b31..de06ede61b9 100644 --- a/packages/yaml/src/parse.ts +++ b/packages/yaml/src/parse.ts @@ -77,6 +77,7 @@ export function parseYaml(data: string): ParsedFile { message: message.id, messageCreated: message.created, thread: message.thread.thread, + threadType: message.thread.threadType, repliesCount: message.thread.repliesCount, lastReply: message.thread.lastReply } From 42cd51f9664be4a3e86cd1d436bb4b2533e3cc71 Mon Sep 17 00:00:00 2001 From: Alexander Onnikov Date: Wed, 7 May 2025 14:52:37 +0700 Subject: [PATCH 072/636] fix: writer proper content type to datalake Signed-off-by: Alexander Onnikov --- internal/pkg/storage/datalake.go | 32 +++++++++++++++++++++++++------- internal/pkg/storage/s3.go | 11 ----------- internal/pkg/storage/storage.go | 11 +++++++++++ 3 files changed, 36 insertions(+), 18 deletions(-) diff --git a/internal/pkg/storage/datalake.go b/internal/pkg/storage/datalake.go index abe2f69459f..e366c98d70f 100644 --- a/internal/pkg/storage/datalake.go +++ b/internal/pkg/storage/datalake.go @@ -20,8 +20,10 @@ import ( "fmt" "io" "mime/multipart" + "net/textproto" "os" "path/filepath" + "strings" "github.com/hcengineering/stream/internal/pkg/log" "github.com/pkg/errors" @@ -48,6 +50,24 @@ func NewDatalakeStorage(ctx context.Context, baseURL, workspace, token string) S } } +var quoteEscaper = strings.NewReplacer("\\", "\\\\", `"`, "\\\"") + +func escapeQuotes(s string) string { + return quoteEscaper.Replace(s) +} + +func createFormFile(writer *multipart.Writer, fieldname, filename, contentType string) (io.Writer, error) { + h := make(textproto.MIMEHeader) + h.Set("Content-Disposition", fmt.Sprintf(`form-data; name="%s"; filename="%s"`, escapeQuotes(fieldname), escapeQuotes(filename))) + h.Set("Content-Type", contentType) + return writer.CreatePart(h) +} + +func getObjectKey(s string) string { + var _, objectKey = filepath.Split(s) + return objectKey +} + // PutFile uploads file to the datalake func (d *DatalakeStorage) PutFile(ctx context.Context, fileName string) error { // #nosec @@ -67,7 +87,7 @@ func (d *DatalakeStorage) PutFile(ctx context.Context, fileName string) error { body := &bytes.Buffer{} writer := multipart.NewWriter(body) - part, err := writer.CreateFormFile("file", objectKey) + part, err := createFormFile(writer, "file", objectKey, getContentType(objectKey)) if err != nil { return errors.Wrapf(err, "failed to create form file") } @@ -77,7 +97,10 @@ func (d *DatalakeStorage) PutFile(ctx context.Context, fileName string) error { return errors.Wrapf(err, "failed to copy file data") } - _ = writer.Close() + err = writer.Close() + if err != nil { + return errors.Wrapf(err, "failed to close multipart writer") + } req := fasthttp.AcquireRequest() defer fasthttp.ReleaseRequest(req) @@ -128,11 +151,6 @@ func (d *DatalakeStorage) DeleteFile(ctx context.Context, fileName string) error return nil } -func getObjectKey(s string) string { - var _, objectKey = filepath.Split(s) - return objectKey -} - // PatchMeta patches metadata for the object func (d *DatalakeStorage) PatchMeta(ctx context.Context, filename string, md *Metadata) error { var logger = d.logger.With(zap.String("patch meta", d.workspace), zap.String("fileName", filename)) diff --git a/internal/pkg/storage/s3.go b/internal/pkg/storage/s3.go index 41da2eaaf19..3864b4fa183 100644 --- a/internal/pkg/storage/s3.go +++ b/internal/pkg/storage/s3.go @@ -20,7 +20,6 @@ import ( "os" "path/filepath" - "strings" "time" "github.com/aws/aws-sdk-go-v2/aws" @@ -63,16 +62,6 @@ func NewS3(ctx context.Context, endpoint, bucketName string) Storage { } } -func getContentType(objectKey string) string { - if strings.HasSuffix(objectKey, ".ts") { - return "video/mp2t" - } - if strings.HasSuffix(objectKey, ".m3u8") { - return "video/x-mpegurl" - } - return "application/octet-stream" -} - // DeleteFile deletes file from the s3 storage func (u *S3Storage) DeleteFile(ctx context.Context, fileName string) error { var _, objectKey = filepath.Split(fileName) diff --git a/internal/pkg/storage/storage.go b/internal/pkg/storage/storage.go index e45250284da..1914b26ac75 100644 --- a/internal/pkg/storage/storage.go +++ b/internal/pkg/storage/storage.go @@ -16,6 +16,7 @@ package storage import ( "context" "net/url" + "strings" "github.com/pkg/errors" ) @@ -53,3 +54,13 @@ func NewStorageByURL(ctx context.Context, u *url.URL, storageType, token, worksa return nil, errors.New("unknown scheme") } } + +func getContentType(objectKey string) string { + if strings.HasSuffix(objectKey, ".ts") { + return "video/mp2t" + } + if strings.HasSuffix(objectKey, ".m3u8") { + return "video/x-mpegurl" + } + return "application/octet-stream" +} From 78e9ec18eda063b0ecba21c35858881485c5d2af Mon Sep 17 00:00:00 2001 From: Alexander Onnikov Date: Wed, 7 May 2025 15:19:08 +0700 Subject: [PATCH 073/636] Add sentry do stream service Signed-off-by: Alexander Onnikov --- README.md | 4 +++- cmd/stream/main.go | 35 +++++++++++++++++++++++++++++++---- go.mod | 3 +++ go.sum | 19 ++++++++++++++++++- internal/pkg/config/config.go | 1 + 5 files changed, 56 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index 529b764313d..efedffe08c1 100644 --- a/README.md +++ b/README.md @@ -72,7 +72,8 @@ docker build . -t hcengineering/sream:latest The following environment variables can be used: ``` -KEY TYPE DEFAULT STREAM_LOG_LEVEL String debug sets log level for the application +KEY TYPE DEFAULT DESCRIPTION +STREAM_LOG_LEVEL String debug sets log level for the application STREAM_SERVER_SECRET String server secret required to generate and verify tokens STREAM_PPROF_ENABLED True or False true starts profile server on localhost:6060 if true STREAM_INSECURE True or False false ignores authorization check if true @@ -81,6 +82,7 @@ STREAM_ENDPOINT_URL URL s3://127.0.0.1:9000 STREAM_MAX_PARALLEL_SCALING_COUNT Integer 2 how much parallel scaling can be processed STREAM_MAX_THREAD_COUNT Integer 4 max number of threads for transcoder STREAM_OUTPUT_DIR String /tmp/transcoing/ path to the directory with tra +STREAM_SENTRY_DSN String "" sentry dsn for error tracking ``` ### Metadata diff --git a/cmd/stream/main.go b/cmd/stream/main.go index 9b34801c02f..ea63605dac7 100644 --- a/cmd/stream/main.go +++ b/cmd/stream/main.go @@ -21,7 +21,10 @@ import ( "os" "os/signal" "syscall" + "time" + "github.com/getsentry/sentry-go" + sentryhttp "github.com/getsentry/sentry-go/http" "go.uber.org/zap" "github.com/hcengineering/stream/internal/pkg/api/v1/recording" @@ -48,19 +51,43 @@ func main() { } logger.Sugar().Debugf("parsed config is %v", cfg) + if cfg.SentryDsn != "" { + if err := sentry.Init(sentry.ClientOptions{ + Dsn: cfg.SentryDsn, + Tags: map[string]string{"application": "stream"}, + }); err != nil { + logger.Sugar().Fatalf("sentry.Init: %s", err) + } + // ensure buffered events are sent before exit + defer sentry.Flush(2 * time.Second) + } + var recordingHandler = recording.NewHandler(ctx, cfg) var transcodingHandler = transcoding.NewHandler(ctx, cfg) - http.Handle("/recording/", http.StripPrefix("/recording/", recordingHandler)) - http.Handle("/recording", http.StripPrefix("/recording", recordingHandler)) - http.Handle("/transcoding", http.StripPrefix("/transcoding", transcodingHandler)) + // setup HTTP routes + mux := http.NewServeMux() + mux.Handle("/recording/", http.StripPrefix("/recording/", recordingHandler)) + mux.Handle("/recording", http.StripPrefix("/recording", recordingHandler)) + mux.Handle("/transcoding", http.StripPrefix("/transcoding", transcodingHandler)) + + // wrap with Sentry HTTP handler if enabled + var handler http.Handler = mux + if cfg.SentryDsn != "" { + sentryHandler := sentryhttp.New(sentryhttp.Options{ + Repanic: true, + WaitForDelivery: true, + Timeout: 2 * time.Second, + }) + handler = sentryHandler.Handle(mux) + } go func() { logger.Info("server started serving", zap.String("ServeURL", cfg.ServeURL)) defer logger.Info("server finished") // #nosec - var err = http.ListenAndServe(cfg.ServeURL, nil) + var err = http.ListenAndServe(cfg.ServeURL, handler) if err != nil { cancel() logger.Debug("unable to listen", zap.Error(err)) diff --git a/go.mod b/go.mod index 0671d43070f..30d0f9ea7c1 100644 --- a/go.mod +++ b/go.mod @@ -7,6 +7,7 @@ require ( github.com/aws/aws-sdk-go-v2/config v1.29.6 github.com/aws/aws-sdk-go-v2/credentials v1.17.59 github.com/aws/aws-sdk-go-v2/service/s3 v1.77.0 + github.com/getsentry/sentry-go v0.31.1 github.com/golang-jwt/jwt/v5 v5.2.1 github.com/google/uuid v1.6.0 github.com/kelseyhightower/envconfig v1.4.0 @@ -42,5 +43,7 @@ require ( github.com/valyala/bytebufferpool v1.0.0 // indirect go.uber.org/multierr v1.11.0 // indirect golang.org/x/net v0.37.0 // indirect + golang.org/x/sys v0.31.0 // indirect + golang.org/x/text v0.23.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index 6ef7c10d828..ca1613a2fad 100644 --- a/go.sum +++ b/go.sum @@ -40,20 +40,34 @@ github.com/aws/smithy-go v1.22.3 h1:Z//5NuZCSW6R4PhQ93hShNbyBbn8BWCmCVCt+Q8Io5k= github.com/aws/smithy-go v1.22.3/go.mod h1:t1ufH5HMublsJYulve2RKmHDC15xu1f26kHCp/HgceI= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/getsentry/sentry-go v0.31.1 h1:ELVc0h7gwyhnXHDouXkhqTFSO5oslsRDk0++eyE0KJ4= +github.com/getsentry/sentry-go v0.31.1/go.mod h1:CYNcMMz73YigoHljQRG+qPF+eMq8gG72XcGN/p71BAY= +github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA= +github.com/go-errors/errors v1.4.2/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk= github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/kelseyhightower/envconfig v1.4.0 h1:Im6hONhd3pLkfDFsbRgu68RDNkGF1r3dvMUtDTo2cv8= github.com/kelseyhightower/envconfig v1.4.0/go.mod h1:cccZRl6mQpaq41TPp5QxidR+Sa3axMbJDNb//FQX6Gg= github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc= github.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0= +github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= +github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/pingcap/errors v0.11.4 h1:lFuQV/oaUMGcD2tqt+01ROSmJs75VG1ToEOkZIZ4nE4= +github.com/pingcap/errors v0.11.4/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8= +github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE= github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/tus/tusd/v2 v2.6.0 h1:Je243QDKnFTvm/WkLH2bd1oQ+7trolrflRWyuI0PdWI= @@ -74,10 +88,13 @@ golang.org/x/exp v0.0.0-20250215185904-eff6e970281f h1:oFMYAjX0867ZD2jcNiLBrI9Bd golang.org/x/exp v0.0.0-20250215185904-eff6e970281f/go.mod h1:BHOTPb3L19zxehTsLoJXVaTktb06DFgmdW6Wb9s8jqk= golang.org/x/net v0.37.0 h1:1zLorHbz+LYj7MQlSf1+2tPIIgibq2eL5xkrGk6f+2c= golang.org/x/net v0.37.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8= +golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik= +golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY= golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/vansante/go-ffprobe.v2 v2.2.1 h1:sFV08OT1eZ1yroLCZVClIVd9YySgCh9eGjBWO0oRayI= gopkg.in/vansante/go-ffprobe.v2 v2.2.1/go.mod h1:qF0AlAjk7Nqzqf3y333Ly+KxN3cKF2JqA3JT5ZheUGE= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= diff --git a/internal/pkg/config/config.go b/internal/pkg/config/config.go index 23fee3afc7c..0b0badfcf0e 100644 --- a/internal/pkg/config/config.go +++ b/internal/pkg/config/config.go @@ -25,6 +25,7 @@ import ( // Config represents configuration for the huly-stream application. type Config struct { + SentryDsn string `split_words:"true" default:"" desc:"sentry dsn value"` LogLevel string `split_words:"true" default:"debug" desc:"sets log level for the application"` ServerSecret string `split_words:"true" default:"" desc:"server secret required to generate and verify tokens"` PprofEnabled bool `split_words:"true" default:"true" desc:"starts profile server on localhost:6060 if true"` From ba149a18c2b99c1afbb40bde69419a1c685d549d Mon Sep 17 00:00:00 2001 From: Alexander Onnikov Date: Wed, 7 May 2025 15:19:28 +0700 Subject: [PATCH 074/636] fix: null pointer dereference in scheduler Signed-off-by: Alexander Onnikov --- internal/pkg/mediaconvert/scheduler.go | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/internal/pkg/mediaconvert/scheduler.go b/internal/pkg/mediaconvert/scheduler.go index aac40c14669..24717499064 100644 --- a/internal/pkg/mediaconvert/scheduler.go +++ b/internal/pkg/mediaconvert/scheduler.go @@ -148,7 +148,14 @@ func (p *Scheduler) processTask(ctx context.Context, task *Task) { return } - var res = fmt.Sprintf("%v:%v", probe.FirstVideoStream().Width, probe.FirstVideoStream().Height) + videoStream := probe.FirstVideoStream() + if videoStream == nil { + logger.Error("no video stream found in the file", zap.String("filepath", sourceFilePath)) + _ = os.RemoveAll(destinationFolder) + return + } + + var res = fmt.Sprintf("%v:%v", videoStream.Width, videoStream.Height) var level = resconv.Level(res) var opts = Options{ Input: sourceFilePath, @@ -214,8 +221,8 @@ func (p *Scheduler) processTask(ctx context.Context, task *Task) { if metaProvider, ok := remoteStorage.(storage.MetaProvider); ok { var hls = HLS{ - Width: probe.FirstVideoStream().Width, - Height: probe.FirstVideoStream().Height, + Width: videoStream.Width, + Height: videoStream.Height, Source: task.ID + "_master.m3u8", Thumbnail: task.ID + ".jpg", } From 3d2a250df57e0da8fecc3af07258a3606b43e103 Mon Sep 17 00:00:00 2001 From: Alexander Onnikov Date: Wed, 7 May 2025 16:37:41 +0700 Subject: [PATCH 075/636] fix: typos in README Signed-off-by: Alexander Onnikov --- README.md | 26 +++++++++++++------------- internal/pkg/storage/storage.go | 8 ++++---- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/README.md b/README.md index efedffe08c1..a1e12967c0f 100644 --- a/README.md +++ b/README.md @@ -5,9 +5,9 @@ ## About -The Stream is high-performance HTTP-based transcoding service. *Stream* supports **TUS protocol**, enabling reliable, -resumable transcodings. Designed for seamless and consistent media processing,it supports advanced transcoding features -with robust integration options. +The Stream is a high-performance HTTP-based transcoding service. *Stream* supports the **TUS protocol**, enabling +reliable, resumable transcoding. Designed for seamless and consistent media processing, it supports advanced transcoding +features with robust integration options. --- @@ -36,8 +36,8 @@ with robust integration options. #### Key Functionalities -- **Live transcoing with minimal upload time**: Transcoding results are going to be avaible after stream completion. -- **Transcoding Cancelation**: Cancel or pause ongoing transcoding in real-time. +- **Live transcoding with minimal upload time**: Transcoding results are available after stream completion. +- **Transcoding Cancellation**: Cancel or pause ongoing transcoding in real-time. - **Transcoding Resumption**: Resume incomplete transcoding tasks efficiently. ### Transcoding scheduling @@ -60,14 +60,14 @@ go mod tidy 2. Build the service: ```bash -docker build . -t hcengineering/sream:latest +docker build . -t hcengineering/stream:latest ``` --- -## Configuraiton +## Configuration -### App env configuraiton +### App Env Configuration The following environment variables can be used: @@ -77,11 +77,11 @@ STREAM_LOG_LEVEL String debug STREAM_SERVER_SECRET String server secret required to generate and verify tokens STREAM_PPROF_ENABLED True or False true starts profile server on localhost:6060 if true STREAM_INSECURE True or False false ignores authorization check if true -STREAM_SERVE_URL String 0.0.0.0:1080 listen on url +STREAM_SERVE_URL String 0.0.0.0:1080 listens on URL STREAM_ENDPOINT_URL URL s3://127.0.0.1:9000 S3 or Datalake endpoint, example: s3://my-ip-address, datalake://my-ip-address STREAM_MAX_PARALLEL_SCALING_COUNT Integer 2 how much parallel scaling can be processed STREAM_MAX_THREAD_COUNT Integer 4 max number of threads for transcoder -STREAM_OUTPUT_DIR String /tmp/transcoing/ path to the directory with tra +STREAM_OUTPUT_DIR String /tmp/transcoding/ path to the directory where transcoded files are stored STREAM_SENTRY_DSN String "" sentry dsn for error tracking ``` @@ -105,7 +105,7 @@ The service exposes an HTTP API. Below are some examples of how to interact with it. -### Trnascode via TUS +### Transcode via TUS ```bash curl -X POST http://localhost:1080/recording \ @@ -114,8 +114,8 @@ curl -X POST http://localhost:1080/recording \ --data-binary @path/to/your/file.mp4 ``` -Note: tus client is required, to play with a service locally you can use tus-js-client example -with [video](https://github.com/tus/tus-js-client/blob/main/demos/browser/video.html) +Note: A TUS client is required to play with the service locally. +You can use the tus-js-client example with [video](https://github.com/tus/tus-js-client/blob/main/demos/browser/video.html). ### Schedule a transcoding diff --git a/internal/pkg/storage/storage.go b/internal/pkg/storage/storage.go index 1914b26ac75..79b65ec9c29 100644 --- a/internal/pkg/storage/storage.go +++ b/internal/pkg/storage/storage.go @@ -38,8 +38,8 @@ type Storage interface { } // NewStorageByURL creates a new storage based on the type from the url scheme, for example "datalake://my-datalake-endpoint" -func NewStorageByURL(ctx context.Context, u *url.URL, storageType, token, worksapce string) (Storage, error) { - if worksapce == "" { +func NewStorageByURL(ctx context.Context, u *url.URL, storageType, token, workspace string) (Storage, error) { + if workspace == "" { return nil, errors.New("workspace is missed") } switch storageType { @@ -47,9 +47,9 @@ func NewStorageByURL(ctx context.Context, u *url.URL, storageType, token, worksa if token == "" { return nil, errors.New("token is missed") } - return NewDatalakeStorage(ctx, u.String(), worksapce, token), nil + return NewDatalakeStorage(ctx, u.String(), workspace, token), nil case "s3": - return NewS3(ctx, u.String(), worksapce), nil + return NewS3(ctx, u.String(), workspace), nil default: return nil, errors.New("unknown scheme") } From 0b70965641822555a098cc2646e109064ed009df Mon Sep 17 00:00:00 2001 From: Alexander Onnikov Date: Wed, 7 May 2025 16:41:46 +0700 Subject: [PATCH 076/636] add comment about CreateFormFile function Signed-off-by: Alexander Onnikov --- internal/pkg/storage/datalake.go | 3 +++ 1 file changed, 3 insertions(+) diff --git a/internal/pkg/storage/datalake.go b/internal/pkg/storage/datalake.go index e366c98d70f..068c22e8cd5 100644 --- a/internal/pkg/storage/datalake.go +++ b/internal/pkg/storage/datalake.go @@ -56,6 +56,9 @@ func escapeQuotes(s string) string { return quoteEscaper.Replace(s) } +// multipart writer CreateFormFile function does not support custom content type +// here we have to have a modified copy that uses actual type instead of application/octet-stream +// see https://github.com/golang/go/issues/49329 func createFormFile(writer *multipart.Writer, fieldname, filename, contentType string) (io.Writer, error) { h := make(textproto.MIMEHeader) h.Set("Content-Disposition", fmt.Sprintf(`form-data; name="%s"; filename="%s"`, escapeQuotes(fieldname), escapeQuotes(filename))) From f349f8aad9905ca1247f6aa5c4b7f81111a6bf03 Mon Sep 17 00:00:00 2001 From: Kristina Date: Wed, 7 May 2025 19:29:44 +0400 Subject: [PATCH 077/636] Fix context create (#51) Signed-off-by: Kristina Fefelova --- .version | 2 +- .../sdk-types/src/responseEvents/notification.ts | 1 + packages/server/src/middleware/db.ts | 6 ++++-- packages/server/src/triggers/message.ts | 2 +- packages/server/src/triggers/notification.ts | 13 +++++++++++-- 5 files changed, 18 insertions(+), 6 deletions(-) diff --git a/.version b/.version index e8be64cc0f6..e5c7f62e4e7 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -0.1.176 +0.1.178 diff --git a/packages/sdk-types/src/responseEvents/notification.ts b/packages/sdk-types/src/responseEvents/notification.ts index 20958ad0bfa..fc826a1833f 100644 --- a/packages/sdk-types/src/responseEvents/notification.ts +++ b/packages/sdk-types/src/responseEvents/notification.ts @@ -81,6 +81,7 @@ export interface AddedCollaboratorsEvent extends BaseResponseEvent { card: CardID cardType: CardType collaborators: AccountID[] + date: Date } export interface RemovedCollaboratorsEvent extends BaseResponseEvent { diff --git a/packages/server/src/middleware/db.ts b/packages/server/src/middleware/db.ts index ba538123284..c6d543edb65 100644 --- a/packages/server/src/middleware/db.ts +++ b/packages/server/src/middleware/db.ts @@ -191,7 +191,8 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { } private async addCollaborators(event: AddCollaboratorsEvent): Promise { - const added = await this.db.addCollaborators(event.card, event.cardType, event.collaborators, event.date) + const date = event.date ?? new Date() + const added = await this.db.addCollaborators(event.card, event.cardType, event.collaborators, date) if (added.length === 0) return {} return { responseEvent: { @@ -199,7 +200,8 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { type: NotificationResponseEventType.AddedCollaborators, card: event.card, cardType: event.cardType, - collaborators: added + collaborators: added, + date } } } diff --git a/packages/server/src/triggers/message.ts b/packages/server/src/triggers/message.ts index 3f6bcb4b49c..6afebacaddc 100644 --- a/packages/server/src/triggers/message.ts +++ b/packages/server/src/triggers/message.ts @@ -71,7 +71,7 @@ async function onMessagesRemoved(ctx: TriggerCtx, event: MessagesRemovedEvent): replies: 'decrement' } - return [threadEvent] + return [patchEvent, threadEvent] }) } diff --git a/packages/server/src/triggers/notification.ts b/packages/server/src/triggers/notification.ts index a5e1c06ab20..b8af2d8ce62 100644 --- a/packages/server/src/triggers/notification.ts +++ b/packages/server/src/triggers/notification.ts @@ -14,10 +14,11 @@ // import { - LabelRequestEventType, - NotificationResponseEventType, type AddedCollaboratorsEvent, + LabelRequestEventType, type NotificationContextUpdatedEvent, + NotificationRequestEventType, + NotificationResponseEventType, type RemovedCollaboratorsEvent, type RequestEvent } from '@hcengineering/communication-sdk-types' @@ -36,6 +37,14 @@ async function onAddedCollaborators(ctx: TriggerCtx, event: AddedCollaboratorsEv account: collaborator, label: SubscriptionLabelID }) + + result.push({ + type: NotificationRequestEventType.CreateNotificationContext, + account: collaborator, + card, + lastUpdate: event.date, + lastView: event.date + }) } return result } From f7de73eca783f4c25338434450cd9b7c060df038 Mon Sep 17 00:00:00 2001 From: Kristina Date: Mon, 12 May 2025 09:49:13 +0400 Subject: [PATCH 078/636] Fix undefined error (#52) Signed-off-by: Kristina Fefelova --- .version | 2 +- bun.lock | 8 ++++---- package.json | 4 ++-- packages/server/src/middleware/broadcast.ts | 6 +++--- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/.version b/.version index e5c7f62e4e7..2005625e83d 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -0.1.178 +0.1.180 diff --git a/bun.lock b/bun.lock index 0d70c2ac6be..268d9a923ea 100644 --- a/bun.lock +++ b/bun.lock @@ -5,8 +5,8 @@ "name": "@hcengineering/communication", "devDependencies": { "@eslint/js": "^9.26.0", - "@types/bun": "^1.2.12", - "bun-types": "^1.2.12", + "@types/bun": "^1.2.13", + "bun-types": "^1.2.13", "eslint": "^9.26.0", "eslint-config-prettier": "^9.1.0", "eslint-plugin-prettier": "^5.4.0", @@ -272,7 +272,7 @@ "@types/body-parser": ["@types/body-parser@1.19.5", "", { "dependencies": { "@types/connect": "*", "@types/node": "*" } }, "sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg=="], - "@types/bun": ["@types/bun@1.2.12", "", { "dependencies": { "bun-types": "1.2.12" } }, "sha512-lY/GQTXDGsolT/TiH72p1tuyUORuRrdV7VwOTOjDOt8uTBJQOJc5zz3ufwwDl0VBaoxotSk4LdP0hhjLJ6ypIQ=="], + "@types/bun": ["@types/bun@1.2.13", "", { "dependencies": { "bun-types": "1.2.13" } }, "sha512-u6vXep/i9VBxoJl3GjZsl/BFIsvML8DfVDO0RYLEwtSZSp981kEO1V5NwRcO1CPJ7AmvpbnDCiMKo3JvbDEjAg=="], "@types/connect": ["@types/connect@3.4.38", "", { "dependencies": { "@types/node": "*" } }, "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug=="], @@ -352,7 +352,7 @@ "braces": ["braces@3.0.3", "", { "dependencies": { "fill-range": "^7.1.1" } }, "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA=="], - "bun-types": ["bun-types@1.2.12", "", { "dependencies": { "@types/node": "*" } }, "sha512-tvWMx5vPqbRXgE8WUZI94iS1xAYs8bkqESR9cxBB1Wi+urvfTrF1uzuDgBHFAdO0+d2lmsbG3HmeKMvUyj6pWA=="], + "bun-types": ["bun-types@1.2.13", "", { "dependencies": { "@types/node": "*" } }, "sha512-rRjA1T6n7wto4gxhAO/ErZEtOXyEZEmnIHQfl0Dt1QQSB4QV0iP6BZ9/YB5fZaHFQ2dwHFrmPaRQ9GGMX01k9Q=="], "bytes": ["bytes@3.1.2", "", {}, "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg=="], diff --git a/package.json b/package.json index f950e32b634..d65ebf45b3c 100644 --- a/package.json +++ b/package.json @@ -13,8 +13,8 @@ }, "devDependencies": { "@eslint/js": "^9.26.0", - "@types/bun": "^1.2.12", - "bun-types": "^1.2.12", + "@types/bun": "^1.2.13", + "bun-types": "^1.2.13", "eslint": "^9.26.0", "eslint-config-prettier": "^9.1.0", "eslint-plugin-prettier": "^5.4.0", diff --git a/packages/server/src/middleware/broadcast.ts b/packages/server/src/middleware/broadcast.ts index 7e11f6597ec..9327ebb7066 100644 --- a/packages/server/src/middleware/broadcast.ts +++ b/packages/server/src/middleware/broadcast.ts @@ -181,19 +181,19 @@ export class BroadcastMiddleware extends BaseMiddleware implements Middleware { return this.matchMessagesQuery( { ids: [event.message.id], card: event.message.card }, Array.from(info.messageQueries.values()), - new Set(info.contextQueries.values().flatMap((it) => Array.from(it))) + new Set(Array.from(info.contextQueries.values()).flatMap((it) => Array.from(it))) ) case MessageResponseEventType.PatchCreated: return this.matchMessagesQuery( { card: event.card, ids: [event.patch.message] }, Array.from(info.messageQueries.values()), - new Set(info.contextQueries.values().flatMap((it) => Array.from(it))) + new Set(Array.from(info.contextQueries.values()).flatMap((it) => Array.from(it))) ) case MessageResponseEventType.MessagesRemoved: return this.matchMessagesQuery( { card: event.card, ids: event.messages }, Array.from(info.messageQueries.values()), - new Set(info.contextQueries.values().flatMap((it) => Array.from(it))) + new Set(Array.from(info.contextQueries.values()).flatMap((it) => Array.from(it))) ) case MessageResponseEventType.ReactionCreated: return this.matchMessagesQuery( From f6d3e11fecaec6df080f91f10a44d211ddd16b28 Mon Sep 17 00:00:00 2001 From: Alexander Onnikov Date: Tue, 13 May 2025 00:35:27 +0700 Subject: [PATCH 079/636] fix: enable conversion for mp4 files only Signed-off-by: Alexander Onnikov --- internal/pkg/mediaconvert/scheduler.go | 42 +++++++++++++++++++++++--- internal/pkg/storage/datalake.go | 32 ++++++++++++++++++++ internal/pkg/storage/s3.go | 21 +++++++++++++ internal/pkg/storage/storage.go | 7 +++++ 4 files changed, 98 insertions(+), 4 deletions(-) diff --git a/internal/pkg/mediaconvert/scheduler.go b/internal/pkg/mediaconvert/scheduler.go index 24717499064..24fc394425f 100644 --- a/internal/pkg/mediaconvert/scheduler.go +++ b/internal/pkg/mediaconvert/scheduler.go @@ -121,20 +121,37 @@ func (p *Scheduler) processTask(ctx context.Context, task *Task) { var destinationFolder = filepath.Join(p.cfg.OutputDir, task.ID) var _, filename = filepath.Split(task.Source) var sourceFilePath = filepath.Join(destinationFolder, filename) - _ = os.MkdirAll(destinationFolder, os.ModePerm) + err = os.MkdirAll(destinationFolder, os.ModePerm) + if err != nil { + logger.Error("can not create temporary folder", zap.Error(err)) + return + } logger.Debug("phase 3: get the remote file") remoteStorage, err := storage.NewStorageByURL(ctx, p.cfg.Endpoint(), p.cfg.EndpointURL.Scheme, tokenString, task.Workspace) if err != nil { - logger.Error("can not create storage by url", zap.Error(err)) + logger.Error("can not create storage by url", zap.Error(err), zap.String("url", p.cfg.EndpointURL.String())) + _ = os.RemoveAll(destinationFolder) + return + } + + stat, err := remoteStorage.StatFile(ctx, task.Source) + if err != nil { + logger.Error("can not stat a file", zap.Error(err), zap.String("filepath", task.Source)) + _ = os.RemoveAll(destinationFolder) + return + } + + if !IsSupportedMediaType(stat.Type) { + logger.Info("unsupported media type", zap.String("type", stat.Type)) _ = os.RemoveAll(destinationFolder) return } if err = remoteStorage.GetFile(ctx, task.Source, sourceFilePath); err != nil { - logger.Error("can not download a file", zap.Error(err)) + logger.Error("can not download a file", zap.Error(err), zap.String("filepath", task.Source)) _ = os.RemoveAll(destinationFolder) // TODO: reschedule return @@ -177,7 +194,12 @@ func (p *Scheduler) processTask(ctx context.Context, task *Task) { SourceFile: sourceFilePath, }) - _ = manifest.GenerateHLSPlaylist(opts.ScalingLevels, p.cfg.OutputDir, opts.UploadID) + err = manifest.GenerateHLSPlaylist(opts.ScalingLevels, p.cfg.OutputDir, opts.UploadID) + if err != nil { + logger.Error("can not generate hls playlist", zap.String("out", p.cfg.OutputDir), zap.String("uploadID", opts.UploadID)) + _ = os.RemoveAll(destinationFolder) + return + } go uploader.Start() @@ -240,3 +262,15 @@ func (p *Scheduler) processTask(ctx context.Context, task *Task) { } } } + +func IsSupportedMediaType(mediaType string) bool { + // Explicitly disable conversion for video/mp2t and video/x-mpegurl + switch mediaType { + case "video/mp2t", "video/x-mpegurl": + return false + case "video/mp4": + return true + default: + return false + } +} diff --git a/internal/pkg/storage/datalake.go b/internal/pkg/storage/datalake.go index 068c22e8cd5..25d07268fc5 100644 --- a/internal/pkg/storage/datalake.go +++ b/internal/pkg/storage/datalake.go @@ -271,5 +271,37 @@ func (d *DatalakeStorage) GetFile(ctx context.Context, filename, destination str return nil } +func (d *DatalakeStorage) StatFile(ctx context.Context, filename string) (*BlobInfo, error) { + var logger = d.logger.With(zap.String("head", d.workspace), zap.String("fileName", filename)) + logger.Debug("start") + + var objectKey = getObjectKey(filename) + + req := fasthttp.AcquireRequest() + defer fasthttp.ReleaseRequest(req) + req.SetRequestURI(d.baseURL + "/blob/" + d.workspace + "/" + objectKey) + req.Header.SetMethod(fasthttp.MethodHead) + + resp := fasthttp.AcquireResponse() + defer fasthttp.ReleaseResponse(resp) + + if err := d.client.Do(req, resp); err != nil { + return nil, err + } + + // Check the response status code + if resp.StatusCode() != fasthttp.StatusOK { + var err = fmt.Errorf("unexpected status code: %d", resp.StatusCode()) + logger.Debug("bad status code", zap.Error(err)) + return nil, err + } + + var info BlobInfo + info.Size = int64(resp.Header.ContentLength()) + info.Type = string(resp.Header.ContentType()) + info.ETag = string(resp.Header.Peek("ETag")) + return &info, nil +} + var _ Storage = (*DatalakeStorage)(nil) var _ MetaProvider = (*DatalakeStorage)(nil) diff --git a/internal/pkg/storage/s3.go b/internal/pkg/storage/s3.go index 3864b4fa183..3a4baca9529 100644 --- a/internal/pkg/storage/s3.go +++ b/internal/pkg/storage/s3.go @@ -164,3 +164,24 @@ func (u *S3Storage) GetFile(ctx context.Context, filename, dest string) error { return nil } + +func (u *S3Storage) StatFile(ctx context.Context, filename string) (*BlobInfo, error) { + var logger = u.logger.With(zap.String("head", u.bucketName), zap.String("fileName", filename)) + + var head, err = u.client.HeadObject(ctx, &s3.HeadObjectInput{ + Bucket: &u.bucketName, + Key: &filename, + }) + + if err != nil { + logger.Error("failed to head object", zap.Error(err)) + return nil, err + } + + var info BlobInfo + info.Size = *head.ContentLength + info.Type = *head.ContentType + info.ETag = *head.ETag + + return &info, nil +} diff --git a/internal/pkg/storage/storage.go b/internal/pkg/storage/storage.go index 79b65ec9c29..8eb726a3370 100644 --- a/internal/pkg/storage/storage.go +++ b/internal/pkg/storage/storage.go @@ -30,11 +30,18 @@ type MetaProvider interface { PatchMeta(ctx context.Context, filename string, value *Metadata) error } +type BlobInfo struct { + Size int64 + Type string + ETag string +} + // Storage represents file-based storage type Storage interface { PutFile(ctx context.Context, fileName string) error DeleteFile(ctx context.Context, fileName string) error GetFile(ctx context.Context, fileName, destination string) error + StatFile(ctx context.Context, fileName string) (*BlobInfo, error) } // NewStorageByURL creates a new storage based on the type from the url scheme, for example "datalake://my-datalake-endpoint" From 3b5cfed3205e0a3ca2f916ec46a2a0dbbac950c1 Mon Sep 17 00:00:00 2001 From: Alexander Onnikov Date: Tue, 13 May 2025 01:05:01 +0700 Subject: [PATCH 080/636] fix: increase min complexity Signed-off-by: Alexander Onnikov --- .golangci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.golangci.yaml b/.golangci.yaml index a5335cf2089..57e1b81c28a 100644 --- a/.golangci.yaml +++ b/.golangci.yaml @@ -57,7 +57,7 @@ linters-settings: goimports: local-prefixes: github.com/networkservicemesh/sdk gocyclo: - min-complexity: 15 + min-complexity: 20 dupl: threshold: 150 funlen: From d82387f8473528bc744d0ea0500a681946a19e27 Mon Sep 17 00:00:00 2001 From: Alexander Onnikov Date: Tue, 13 May 2025 01:08:52 +0700 Subject: [PATCH 081/636] fix: asjust linter settings Signed-off-by: Alexander Onnikov --- .golangci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.golangci.yaml b/.golangci.yaml index 57e1b81c28a..3c796cc8c1f 100644 --- a/.golangci.yaml +++ b/.golangci.yaml @@ -62,7 +62,7 @@ linters-settings: threshold: 150 funlen: lines: 140 - statements: 80 + statements: 100 goconst: min-len: 2 min-occurrences: 2 From ce265f9518f1ed495acca530c36db45f1e4dd909 Mon Sep 17 00:00:00 2001 From: Alexander Onnikov Date: Tue, 13 May 2025 01:13:53 +0700 Subject: [PATCH 082/636] fix: more fixes Signed-off-by: Alexander Onnikov --- .golangci.yaml | 2 +- internal/pkg/mediaconvert/scheduler.go | 1 + internal/pkg/storage/datalake.go | 1 + internal/pkg/storage/s3.go | 1 + internal/pkg/storage/storage.go | 1 + 5 files changed, 5 insertions(+), 1 deletion(-) diff --git a/.golangci.yaml b/.golangci.yaml index 3c796cc8c1f..16f9ae9d7bf 100644 --- a/.golangci.yaml +++ b/.golangci.yaml @@ -61,7 +61,7 @@ linters-settings: dupl: threshold: 150 funlen: - lines: 140 + lines: 160 statements: 100 goconst: min-len: 2 diff --git a/internal/pkg/mediaconvert/scheduler.go b/internal/pkg/mediaconvert/scheduler.go index 24fc394425f..5c686629eea 100644 --- a/internal/pkg/mediaconvert/scheduler.go +++ b/internal/pkg/mediaconvert/scheduler.go @@ -263,6 +263,7 @@ func (p *Scheduler) processTask(ctx context.Context, task *Task) { } } +// IsSupportedMediaType checks whether transcoding is supported for given media type func IsSupportedMediaType(mediaType string) bool { // Explicitly disable conversion for video/mp2t and video/x-mpegurl switch mediaType { diff --git a/internal/pkg/storage/datalake.go b/internal/pkg/storage/datalake.go index 25d07268fc5..b2f1f64bb8f 100644 --- a/internal/pkg/storage/datalake.go +++ b/internal/pkg/storage/datalake.go @@ -271,6 +271,7 @@ func (d *DatalakeStorage) GetFile(ctx context.Context, filename, destination str return nil } +// StatFile gets file stat from the storage func (d *DatalakeStorage) StatFile(ctx context.Context, filename string) (*BlobInfo, error) { var logger = d.logger.With(zap.String("head", d.workspace), zap.String("fileName", filename)) logger.Debug("start") diff --git a/internal/pkg/storage/s3.go b/internal/pkg/storage/s3.go index 3a4baca9529..96c2aa9bf17 100644 --- a/internal/pkg/storage/s3.go +++ b/internal/pkg/storage/s3.go @@ -165,6 +165,7 @@ func (u *S3Storage) GetFile(ctx context.Context, filename, dest string) error { return nil } +// StatFile gets file stat from the storage func (u *S3Storage) StatFile(ctx context.Context, filename string) (*BlobInfo, error) { var logger = u.logger.With(zap.String("head", u.bucketName), zap.String("fileName", filename)) diff --git a/internal/pkg/storage/storage.go b/internal/pkg/storage/storage.go index 8eb726a3370..db619e8fdca 100644 --- a/internal/pkg/storage/storage.go +++ b/internal/pkg/storage/storage.go @@ -30,6 +30,7 @@ type MetaProvider interface { PatchMeta(ctx context.Context, filename string, value *Metadata) error } +// BlobInfo contains blob stat information type BlobInfo struct { Size int64 Type string From cb3ad660fbc94fe19a62a5ac29da2e29fb06a15e Mon Sep 17 00:00:00 2001 From: Kristina Date: Wed, 14 May 2025 10:21:09 +0400 Subject: [PATCH 083/636] Add card events and files meta (#53) Signed-off-by: Kristina Fefelova --- .version | 2 +- bun.lock | 52 ++++++-- package.json | 2 +- packages/cockroach/src/adapter.ts | 72 +++++++++-- packages/cockroach/src/db/label.ts | 107 +++++++++++----- packages/cockroach/src/db/mapping.ts | 3 +- packages/cockroach/src/db/message.ts | 116 ++++++++++++++---- packages/cockroach/src/db/notification.ts | 76 +++++++++--- packages/cockroach/src/db/schema.ts | 4 +- packages/query/src/label/query.ts | 57 +++++++++ packages/query/src/messages/query.ts | 40 ++++-- .../query/src/notification-contexts/query.ts | 28 ++++- packages/rest-client/src/rest.ts | 6 +- packages/sdk-types/src/db.ts | 31 +++-- packages/sdk-types/src/event.ts | 24 +++- packages/sdk-types/src/requestEvents/card.ts | 36 ++++++ .../sdk-types/src/requestEvents/message.ts | 4 +- packages/sdk-types/src/responseEvents/card.ts | 36 ++++++ packages/server/package.json | 2 + packages/server/src/middleware/broadcast.ts | 5 + packages/server/src/middleware/db.ts | 56 ++++++++- packages/server/src/middleware/triggers.ts | 17 +-- packages/server/src/middleware/validate.ts | 111 ++++++++++++++--- packages/server/src/middlewares.ts | 4 +- .../server/src/notification/notification.ts | 1 - packages/server/src/triggers/all.ts | 3 +- packages/server/src/triggers/card.ts | 96 +++++++++++++++ packages/server/src/triggers/message.ts | 28 ++++- packages/server/src/triggers/notification.ts | 37 ++++-- packages/server/src/triggers/utils.ts | 1 - packages/server/src/types.ts | 14 +-- packages/types/src/core.ts | 4 +- packages/types/src/message.ts | 3 +- 33 files changed, 887 insertions(+), 191 deletions(-) create mode 100644 packages/sdk-types/src/requestEvents/card.ts create mode 100644 packages/sdk-types/src/responseEvents/card.ts create mode 100644 packages/server/src/triggers/card.ts diff --git a/.version b/.version index 2005625e83d..7fcea722aa2 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -0.1.180 +0.1.181 diff --git a/bun.lock b/bun.lock index 268d9a923ea..ef9c9cd4e7b 100644 --- a/bun.lock +++ b/bun.lock @@ -12,7 +12,7 @@ "eslint-plugin-prettier": "^5.4.0", "prettier": "^3.5.3", "turbo": "^2.5.3", - "typescript-eslint": "^8.32.0", + "typescript-eslint": "^8.32.1", }, }, "packages/client-query": { @@ -111,6 +111,8 @@ "@hcengineering/core": "^0.7.88", "@hcengineering/platform": "^0.7.88", "@hcengineering/server-token": "^0.7.88", + "@hcengineering/text-core": "^0.7.88", + "@hcengineering/text-markdown": "^0.7.88", "zod": "^3.24.2", }, "devDependencies": { @@ -236,6 +238,12 @@ "@hcengineering/tags": ["@hcengineering/tags@0.7.88", "https://npm.pkg.github.com/download/@hcengineering/tags/0.7.88/f3f0de35b1be71d1f3f6711b8e4f161e0cf857eb", { "dependencies": { "@hcengineering/core": "^0.7.88", "@hcengineering/platform": "^0.7.88", "@hcengineering/ui": "^0.7.88", "@hcengineering/view": "^0.7.88" } }, "sha512-wpnVX/1Y8E4YPYnUcw+ZyVOpZvXYpboZC9D1btMqFs34CoMGc4tW1XT3YfsS8KxlkH6tsQjsAUvtWOPy2J46dg=="], + "@hcengineering/text-core": ["@hcengineering/text-core@0.7.88", "https://npm.pkg.github.com/download/@hcengineering/text-core/0.7.88/7e53b0da9622709b2d1071049ead3843e0c45764", { "dependencies": { "@hcengineering/core": "^0.7.88", "fast-equals": "^5.2.2" } }, "sha512-mFri9SMzD99uR3/H0gZ4KEekYf3lEmJ2bqEQrjIVmi9z7+CzDTxmKnXeHKzcFpvQaLt5kmbPIoXM4MyxpK7ZXA=="], + + "@hcengineering/text-html": ["@hcengineering/text-html@0.7.88", "https://npm.pkg.github.com/download/@hcengineering/text-html/0.7.88/e48beced20921d9e1e9c84548c3caf5c9770bac1", { "dependencies": { "@hcengineering/text-core": "^0.7.88", "htmlparser2": "^9.0.0" } }, "sha512-Wj0gYv0JEUIKpuaDFmRn7/60/SXGg0CZVmzbux0XlUzayly+C4Pkfj5U3G/ZEUyDBtAtyeBdWtnkzgQgN6qOuQ=="], + + "@hcengineering/text-markdown": ["@hcengineering/text-markdown@0.7.88", "https://npm.pkg.github.com/download/@hcengineering/text-markdown/0.7.88/86938b13b0bdd81941a22378e3b2e92998f71d80", { "dependencies": { "@hcengineering/text-core": "^0.7.88", "@hcengineering/text-html": "^0.7.88", "fast-equals": "^5.2.2", "markdown-it": "^14.0.0" } }, "sha512-066WeD/6EI5VmFQZ/Cv6eEOFXBd+fA+YPxX5fb0ytlZr5bLpQs6/AifpyqJoZ5gqQzfAf/HmtDVxDl5QU95FGw=="], + "@hcengineering/theme": ["@hcengineering/theme@0.7.88", "https://npm.pkg.github.com/download/@hcengineering/theme/0.7.88/d939d1e2a9047def2795dc5124653e49ca522cfc", { "dependencies": { "@hcengineering/analytics": "^0.7.88", "@hcengineering/platform": "^0.7.88", "svelte": "^4.2.19" } }, "sha512-PCt9bFuGPYOf1kQgjzxPSZgGXU4G14Z1XZJfgGu71u/zHxG6fu6nls35aSrEjy2Ks6oR+OCqqvpDh6RTf6utrg=="], "@hcengineering/ui": ["@hcengineering/ui@0.7.88", "https://npm.pkg.github.com/download/@hcengineering/ui/0.7.88/fce3392a59f706c248ba4b17714163e5725d5f01", { "dependencies": { "@hcengineering/analytics": "^0.7.88", "@hcengineering/core": "^0.7.88", "@hcengineering/platform": "^0.7.88", "@hcengineering/theme": "^0.7.88", "autolinker": "4.0.0", "date-fns": "^2.30.0", "date-fns-tz": "^2.0.0", "dompurify": "^3.1.6", "emojibase": "^16.0.0", "fast-equals": "^5.2.2", "hls.js": "^1.5.20", "plyr": "^3.7.8", "svelte": "^4.2.19" } }, "sha512-mzHXcX3aBC/OXUaV9F2hnB6+irwMfEYM5J1CnfqiHqgv3Hza20BZR6BrYe/nQCLb44DuHkXojbwL94AKK3QW6w=="], @@ -310,21 +318,21 @@ "@types/ws": ["@types/ws@8.18.0", "", { "dependencies": { "@types/node": "*" } }, "sha512-8svvI3hMyvN0kKCJMvTJP/x6Y/EoQbepff882wL+Sn5QsXb3etnamgrJq4isrBxSJj5L2AuXcI0+bgkoAXGUJw=="], - "@typescript-eslint/eslint-plugin": ["@typescript-eslint/eslint-plugin@8.32.0", "", { "dependencies": { "@eslint-community/regexpp": "^4.10.0", "@typescript-eslint/scope-manager": "8.32.0", "@typescript-eslint/type-utils": "8.32.0", "@typescript-eslint/utils": "8.32.0", "@typescript-eslint/visitor-keys": "8.32.0", "graphemer": "^1.4.0", "ignore": "^5.3.1", "natural-compare": "^1.4.0", "ts-api-utils": "^2.1.0" }, "peerDependencies": { "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0", "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-/jU9ettcntkBFmWUzzGgsClEi2ZFiikMX5eEQsmxIAWMOn4H3D4rvHssstmAHGVvrYnaMqdWWWg0b5M6IN/MTQ=="], + "@typescript-eslint/eslint-plugin": ["@typescript-eslint/eslint-plugin@8.32.1", "", { "dependencies": { "@eslint-community/regexpp": "^4.10.0", "@typescript-eslint/scope-manager": "8.32.1", "@typescript-eslint/type-utils": "8.32.1", "@typescript-eslint/utils": "8.32.1", "@typescript-eslint/visitor-keys": "8.32.1", "graphemer": "^1.4.0", "ignore": "^7.0.0", "natural-compare": "^1.4.0", "ts-api-utils": "^2.1.0" }, "peerDependencies": { "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0", "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-6u6Plg9nP/J1GRpe/vcjjabo6Uc5YQPAMxsgQyGC/I0RuukiG1wIe3+Vtg3IrSCVJDmqK3j8adrtzXSENRtFgg=="], - "@typescript-eslint/parser": ["@typescript-eslint/parser@8.32.0", "", { "dependencies": { "@typescript-eslint/scope-manager": "8.32.0", "@typescript-eslint/types": "8.32.0", "@typescript-eslint/typescript-estree": "8.32.0", "@typescript-eslint/visitor-keys": "8.32.0", "debug": "^4.3.4" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-B2MdzyWxCE2+SqiZHAjPphft+/2x2FlO9YBx7eKE1BCb+rqBlQdhtAEhzIEdozHd55DXPmxBdpMygFJjfjjA9A=="], + "@typescript-eslint/parser": ["@typescript-eslint/parser@8.32.1", "", { "dependencies": { "@typescript-eslint/scope-manager": "8.32.1", "@typescript-eslint/types": "8.32.1", "@typescript-eslint/typescript-estree": "8.32.1", "@typescript-eslint/visitor-keys": "8.32.1", "debug": "^4.3.4" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-LKMrmwCPoLhM45Z00O1ulb6jwyVr2kr3XJp+G+tSEZcbauNnScewcQwtJqXDhXeYPDEjZ8C1SjXm015CirEmGg=="], - "@typescript-eslint/scope-manager": ["@typescript-eslint/scope-manager@8.32.0", "", { "dependencies": { "@typescript-eslint/types": "8.32.0", "@typescript-eslint/visitor-keys": "8.32.0" } }, "sha512-jc/4IxGNedXkmG4mx4nJTILb6TMjL66D41vyeaPWvDUmeYQzF3lKtN15WsAeTr65ce4mPxwopPSo1yUUAWw0hQ=="], + "@typescript-eslint/scope-manager": ["@typescript-eslint/scope-manager@8.32.1", "", { "dependencies": { "@typescript-eslint/types": "8.32.1", "@typescript-eslint/visitor-keys": "8.32.1" } }, "sha512-7IsIaIDeZn7kffk7qXC3o6Z4UblZJKV3UBpkvRNpr5NSyLji7tvTcvmnMNYuYLyh26mN8W723xpo3i4MlD33vA=="], - "@typescript-eslint/type-utils": ["@typescript-eslint/type-utils@8.32.0", "", { "dependencies": { "@typescript-eslint/typescript-estree": "8.32.0", "@typescript-eslint/utils": "8.32.0", "debug": "^4.3.4", "ts-api-utils": "^2.1.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-t2vouuYQKEKSLtJaa5bB4jHeha2HJczQ6E5IXPDPgIty9EqcJxpr1QHQ86YyIPwDwxvUmLfP2YADQ5ZY4qddZg=="], + "@typescript-eslint/type-utils": ["@typescript-eslint/type-utils@8.32.1", "", { "dependencies": { "@typescript-eslint/typescript-estree": "8.32.1", "@typescript-eslint/utils": "8.32.1", "debug": "^4.3.4", "ts-api-utils": "^2.1.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-mv9YpQGA8iIsl5KyUPi+FGLm7+bA4fgXaeRcFKRDRwDMu4iwrSHeDPipwueNXhdIIZltwCJv+NkxftECbIZWfA=="], - "@typescript-eslint/types": ["@typescript-eslint/types@8.32.0", "", {}, "sha512-O5Id6tGadAZEMThM6L9HmVf5hQUXNSxLVKeGJYWNhhVseps/0LddMkp7//VDkzwJ69lPL0UmZdcZwggj9akJaA=="], + "@typescript-eslint/types": ["@typescript-eslint/types@8.32.1", "", {}, "sha512-YmybwXUJcgGqgAp6bEsgpPXEg6dcCyPyCSr0CAAueacR/CCBi25G3V8gGQ2kRzQRBNol7VQknxMs9HvVa9Rvfg=="], - "@typescript-eslint/typescript-estree": ["@typescript-eslint/typescript-estree@8.32.0", "", { "dependencies": { "@typescript-eslint/types": "8.32.0", "@typescript-eslint/visitor-keys": "8.32.0", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", "minimatch": "^9.0.4", "semver": "^7.6.0", "ts-api-utils": "^2.1.0" }, "peerDependencies": { "typescript": ">=4.8.4 <5.9.0" } }, "sha512-pU9VD7anSCOIoBFnhTGfOzlVFQIA1XXiQpH/CezqOBaDppRwTglJzCC6fUQGpfwey4T183NKhF1/mfatYmjRqQ=="], + "@typescript-eslint/typescript-estree": ["@typescript-eslint/typescript-estree@8.32.1", "", { "dependencies": { "@typescript-eslint/types": "8.32.1", "@typescript-eslint/visitor-keys": "8.32.1", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", "minimatch": "^9.0.4", "semver": "^7.6.0", "ts-api-utils": "^2.1.0" }, "peerDependencies": { "typescript": ">=4.8.4 <5.9.0" } }, "sha512-Y3AP9EIfYwBb4kWGb+simvPaqQoT5oJuzzj9m0i6FCY6SPvlomY2Ei4UEMm7+FXtlNJbor80ximyslzaQF6xhg=="], - "@typescript-eslint/utils": ["@typescript-eslint/utils@8.32.0", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.7.0", "@typescript-eslint/scope-manager": "8.32.0", "@typescript-eslint/types": "8.32.0", "@typescript-eslint/typescript-estree": "8.32.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-8S9hXau6nQ/sYVtC3D6ISIDoJzS1NsCK+gluVhLN2YkBPX+/1wkwyUiDKnxRh15579WoOIyVWnoyIf3yGI9REw=="], + "@typescript-eslint/utils": ["@typescript-eslint/utils@8.32.1", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.7.0", "@typescript-eslint/scope-manager": "8.32.1", "@typescript-eslint/types": "8.32.1", "@typescript-eslint/typescript-estree": "8.32.1" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-DsSFNIgLSrc89gpq1LJB7Hm1YpuhK086DRDJSNrewcGvYloWW1vZLHBTIvarKZDcAORIy/uWNx8Gad+4oMpkSA=="], - "@typescript-eslint/visitor-keys": ["@typescript-eslint/visitor-keys@8.32.0", "", { "dependencies": { "@typescript-eslint/types": "8.32.0", "eslint-visitor-keys": "^4.2.0" } }, "sha512-1rYQTCLFFzOI5Nl0c8LUpJT8HxpwVRn9E4CkMsYfuN6ctmQqExjSTzzSk0Tz2apmXy7WU6/6fyaZVVA/thPN+w=="], + "@typescript-eslint/visitor-keys": ["@typescript-eslint/visitor-keys@8.32.1", "", { "dependencies": { "@typescript-eslint/types": "8.32.1", "eslint-visitor-keys": "^4.2.0" } }, "sha512-ar0tjQfObzhSaW3C3QNmTc5ofj0hDoNQ5XWrCy6zDyabdr0TWhCkClp+rywGNj/odAFBVzzJrK4tEq5M4Hmu4w=="], "accepts": ["accepts@2.0.0", "", { "dependencies": { "mime-types": "^3.0.0", "negotiator": "^1.0.0" } }, "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng=="], @@ -402,8 +410,16 @@ "depd": ["depd@2.0.0", "", {}, "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw=="], + "dom-serializer": ["dom-serializer@2.0.0", "", { "dependencies": { "domelementtype": "^2.3.0", "domhandler": "^5.0.2", "entities": "^4.2.0" } }, "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg=="], + + "domelementtype": ["domelementtype@2.3.0", "", {}, "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw=="], + + "domhandler": ["domhandler@5.0.3", "", { "dependencies": { "domelementtype": "^2.3.0" } }, "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w=="], + "dompurify": ["dompurify@3.2.4", "", { "optionalDependencies": { "@types/trusted-types": "^2.0.7" } }, "sha512-ysFSFEDVduQpyhzAob/kkuJjf5zWkZD8/A9ywSp1byueyuCfHamrCBa14/Oc2iiB0e51B+NpxSl5gmzn+Ms/mg=="], + "domutils": ["domutils@3.2.2", "", { "dependencies": { "dom-serializer": "^2.0.0", "domelementtype": "^2.3.0", "domhandler": "^5.0.3" } }, "sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw=="], + "dunder-proto": ["dunder-proto@1.0.1", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.1", "es-errors": "^1.3.0", "gopd": "^1.2.0" } }, "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A=="], "ee-first": ["ee-first@1.1.1", "", {}, "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow=="], @@ -412,6 +428,8 @@ "encodeurl": ["encodeurl@2.0.0", "", {}, "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg=="], + "entities": ["entities@4.5.0", "", {}, "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw=="], + "es-define-property": ["es-define-property@1.0.1", "", {}, "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g=="], "es-errors": ["es-errors@1.3.0", "", {}, "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw=="], @@ -506,6 +524,8 @@ "hls.js": ["hls.js@1.5.20", "", {}, "sha512-uu0VXUK52JhihhnN/MVVo1lvqNNuhoxkonqgO3IpjvQiGpJBdIXMGkofjQb/j9zvV7a1SW8U9g1FslWx/1HOiQ=="], + "htmlparser2": ["htmlparser2@9.1.0", "", { "dependencies": { "domelementtype": "^2.3.0", "domhandler": "^5.0.3", "domutils": "^3.1.0", "entities": "^4.5.0" } }, "sha512-5zfg6mHUoaer/97TxnGpxmbR7zJtPwIYFMZ/H5ucTlPZhKvtum05yiPK3Mgai3a0DyVxv7qYqoweaEd2nrYQzQ=="], + "http-errors": ["http-errors@2.0.0", "", { "dependencies": { "depd": "2.0.0", "inherits": "2.0.4", "setprototypeof": "1.2.0", "statuses": "2.0.1", "toidentifier": "1.0.1" } }, "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ=="], "iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="], @@ -548,6 +568,8 @@ "levn": ["levn@0.4.1", "", { "dependencies": { "prelude-ls": "^1.2.1", "type-check": "~0.4.0" } }, "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ=="], + "linkify-it": ["linkify-it@5.0.0", "", { "dependencies": { "uc.micro": "^2.0.0" } }, "sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ=="], + "loadjs": ["loadjs@4.3.0", "", {}, "sha512-vNX4ZZLJBeDEOBvdr2v/F+0aN5oMuPu7JTqrMwp+DtgK+AryOlpy6Xtm2/HpNr+azEa828oQjOtWsB6iDtSfSQ=="], "locate-character": ["locate-character@3.0.0", "", {}, "sha512-SW13ws7BjaeJ6p7Q6CO2nchbYEc3X3J6WrmTTDto7yMPqVSZTUyY5Tjbid+Ab8gLnATtygYtiDIJGQRRn2ZOiA=="], @@ -558,10 +580,14 @@ "magic-string": ["magic-string@0.30.17", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.0" } }, "sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA=="], + "markdown-it": ["markdown-it@14.1.0", "", { "dependencies": { "argparse": "^2.0.1", "entities": "^4.4.0", "linkify-it": "^5.0.0", "mdurl": "^2.0.0", "punycode.js": "^2.3.1", "uc.micro": "^2.1.0" }, "bin": { "markdown-it": "bin/markdown-it.mjs" } }, "sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg=="], + "math-intrinsics": ["math-intrinsics@1.1.0", "", {}, "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g=="], "mdn-data": ["mdn-data@2.0.30", "", {}, "sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA=="], + "mdurl": ["mdurl@2.0.0", "", {}, "sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w=="], + "media-typer": ["media-typer@1.1.0", "", {}, "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw=="], "merge-descriptors": ["merge-descriptors@2.0.0", "", {}, "sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g=="], @@ -626,6 +652,8 @@ "punycode": ["punycode@2.3.1", "", {}, "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg=="], + "punycode.js": ["punycode.js@2.3.1", "", {}, "sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA=="], + "qs": ["qs@6.14.0", "", { "dependencies": { "side-channel": "^1.1.0" } }, "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w=="], "queue-microtask": ["queue-microtask@1.2.3", "", {}, "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A=="], @@ -712,7 +740,9 @@ "typescript": ["typescript@5.8.2", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-aJn6wq13/afZp/jT9QZmwEjDqqvSGp1VT5GVg+f/t6/oVyrgXM6BY1h9BRh/O5p3PlUPAe+WuiEZOmb/49RqoQ=="], - "typescript-eslint": ["typescript-eslint@8.32.0", "", { "dependencies": { "@typescript-eslint/eslint-plugin": "8.32.0", "@typescript-eslint/parser": "8.32.0", "@typescript-eslint/utils": "8.32.0" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-UMq2kxdXCzinFFPsXc9o2ozIpYCCOiEC46MG3yEh5Vipq6BO27otTtEBZA1fQ66DulEUgE97ucQ/3YY66CPg0A=="], + "typescript-eslint": ["typescript-eslint@8.32.1", "", { "dependencies": { "@typescript-eslint/eslint-plugin": "8.32.1", "@typescript-eslint/parser": "8.32.1", "@typescript-eslint/utils": "8.32.1" }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", "typescript": ">=4.8.4 <5.9.0" } }, "sha512-D7el+eaDHAmXvrZBy1zpzSNIRqnCOrkwTgZxTu3MUqRWk8k0q9m9Ho4+vPf7iHtgUfrK/o8IZaEApsxPlHTFCg=="], + + "uc.micro": ["uc.micro@2.1.0", "", {}, "sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A=="], "undici-types": ["undici-types@6.20.0", "", {}, "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg=="], @@ -742,6 +772,8 @@ "@humanfs/node/@humanwhocodes/retry": ["@humanwhocodes/retry@0.3.1", "", {}, "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA=="], + "@typescript-eslint/eslint-plugin/ignore": ["ignore@7.0.4", "", {}, "sha512-gJzzk+PQNznz8ysRrC0aOkBNVRBDtE1n53IqyqEf3PXrYwomFs5q4pGMizBMJF+ykh03insJ27hB8gSrD2Hn8A=="], + "@typescript-eslint/typescript-estree/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], "@typescript-eslint/utils/@eslint-community/eslint-utils": ["@eslint-community/eslint-utils@4.7.0", "", { "dependencies": { "eslint-visitor-keys": "^3.4.3" }, "peerDependencies": { "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" } }, "sha512-dyybb3AcajC7uha6CvhdVRJqaKyn7w2YKqKyAN37NKYgZT36w+iRb0Dymmc5qEJ549c/S31cMMSFd75bteCpCw=="], diff --git a/package.json b/package.json index d65ebf45b3c..d1e0f7a45aa 100644 --- a/package.json +++ b/package.json @@ -19,7 +19,7 @@ "eslint-config-prettier": "^9.1.0", "eslint-plugin-prettier": "^5.4.0", "prettier": "^3.5.3", - "typescript-eslint": "^8.32.0", + "typescript-eslint": "^8.32.1", "turbo": "^2.5.3" }, "packageManager": "bun@1.2.9" diff --git a/packages/cockroach/src/adapter.ts b/packages/cockroach/src/adapter.ts index b42e1ceeb19..48e0d857807 100644 --- a/packages/cockroach/src/adapter.ts +++ b/packages/cockroach/src/adapter.ts @@ -42,7 +42,9 @@ import { type LabelID, type CardType, type MessageData, - type PatchData + type PatchData, + type File, + type BlobMetadata } from '@hcengineering/communication-types' import type { DbAdapter } from '@hcengineering/communication-sdk-types' @@ -83,7 +85,7 @@ export class CockroachAdapter implements DbAdapter { return await this.message.createMessage(card, type, content, creator, created, data, externalId, id) } - async removeMessages(card: CardID, messages: MessageID[], socialIds?: SocialID[]): Promise { + async removeMessages(card: CardID, messages?: MessageID[], socialIds?: SocialID[]): Promise { return await this.message.removeMessages(card, messages, socialIds) } @@ -99,6 +101,10 @@ export class CockroachAdapter implements DbAdapter { await this.message.createPatch(card, message, messageCreated, type, data, creator, created) } + async removePatches(card: CardID): Promise { + await this.message.removePatches(card) + } + async createMessagesGroup(card: CardID, blobId: BlobID, fromDate: Date, toDate: Date, count: number): Promise { await this.message.createMessagesGroup(card, blobId, fromDate, toDate, count) } @@ -136,14 +142,26 @@ export class CockroachAdapter implements DbAdapter { fileType: string, filename: string, size: number, + meta: BlobMetadata | undefined, creator: SocialID, created: Date ): Promise { - await this.message.createFile(card, message, messageCreated, blobId, fileType, filename, size, creator, created) + await this.message.createFile( + card, + message, + messageCreated, + blobId, + fileType, + filename, + size, + meta, + creator, + created + ) } - async removeFile(card: CardID, message: MessageID, blobId: BlobID): Promise { - await this.message.removeFile(card, message, blobId) + async removeFiles(query: Partial): Promise { + await this.message.removeFiles(query) } async createThread( @@ -157,8 +175,19 @@ export class CockroachAdapter implements DbAdapter { await this.message.createThread(card, message, messageCreated, thread, threadType, created) } - async updateThread(thread: CardID, op: 'increment' | 'decrement', lastReply?: Date): Promise { - await this.message.updateThread(thread, op, lastReply) + async removeThreads(query: Partial): Promise { + await this.message.removeThreads(query) + } + + async updateThread( + thread: CardID, + update: { + threadType?: CardType + op?: 'increment' | 'decrement' + lastReply?: Date + } + ): Promise { + await this.message.updateThread(thread, update) } async findMessages(params: FindMessagesParams): Promise { @@ -182,10 +211,14 @@ export class CockroachAdapter implements DbAdapter { return await this.notification.addCollaborators(card, cardType, collaborators, date) } - async removeCollaborators(card: CardID, collaborators: AccountID[]): Promise { + async removeCollaborators(card: CardID, collaborators?: AccountID[]): Promise { await this.notification.removeCollaborators(card, collaborators) } + async updateCollaborators(params: FindCollaboratorsParams, data: Partial): Promise { + await this.notification.updateCollaborators(params, data) + } + async createNotification(context: ContextID, message: MessageID, messageCreated: Date): Promise { return await this.notification.createNotification(context, message, messageCreated) } @@ -202,8 +235,8 @@ export class CockroachAdapter implements DbAdapter { await this.notification.updateContext(context, account, lastUpdate, lastView) } - async removeContext(context: ContextID, account: AccountID): Promise { - await this.notification.removeContext(context, account) + async removeContexts(query: Partial): Promise { + await this.notification.removeContexts(query) } async findNotificationContexts(params: FindNotificationContextParams): Promise { @@ -230,13 +263,28 @@ export class CockroachAdapter implements DbAdapter { return this.label.createLabel(label, card, cardType, account, created) } - removeLabel(label: LabelID, card: CardID, account: AccountID): Promise { - return this.label.removeLabel(label, card, account) + removeLabels(query: Partial

, C extends (r: any) => void> { private oldQuery: P | undefined private oldCallback: C | undefined - constructor( + constructor ( protected readonly lq: LiveQueries, onDestroy: (fn: () => void) => void ) { @@ -42,7 +41,7 @@ class BaseQuery

, C extends (r: any) => void> { unsubscribe: () => void = () => {} - query(params: P, callback: C): boolean { + query (params: P, callback: C): boolean { if (!this.needUpdate(params, callback)) { return false } @@ -50,7 +49,7 @@ class BaseQuery

, C extends (r: any) => void> { return true } - private doQuery(query: P, callback: C): void { + private doQuery (query: P, callback: C): void { this.unsubscribe() this.oldCallback = callback this.oldQuery = query @@ -65,13 +64,13 @@ class BaseQuery

, C extends (r: any) => void> { } // eslint-disable-next-line @typescript-eslint/no-unused-vars - createQuery(params: P, callback: C): { unsubscribe: () => void } { + createQuery (params: P, callback: C): { unsubscribe: () => void } { return { unsubscribe: () => {} } } - private needUpdate(params: P, callback: C): boolean { + private needUpdate (params: P, callback: C): boolean { if (!deepEqual(params, this.oldQuery)) return true if (!deepEqual(callback.toString(), this.oldCallback?.toString())) return true return false @@ -79,43 +78,43 @@ class BaseQuery

, C extends (r: any) => void> { } export class MessagesQuery extends BaseQuery> { - override createQuery(params: MessageQueryParams, callback: PagedQueryCallback): { unsubscribe: () => void } { + override createQuery (params: MessageQueryParams, callback: PagedQueryCallback): { unsubscribe: () => void } { return this.lq.queryMessages(params, callback) } } export class NotificationsQuery extends BaseQuery> { - override createQuery( + override createQuery ( params: FindNotificationsParams, callback: PagedQueryCallback ): { - unsubscribe: () => void - } { + unsubscribe: () => void + } { return this.lq.queryNotifications(params, callback) } } export class NotificationContextsQuery extends BaseQuery< - FindNotificationContextParams, - PagedQueryCallback +FindNotificationContextParams, +PagedQueryCallback > { - override createQuery( + override createQuery ( params: FindNotificationContextParams, callback: PagedQueryCallback ): { - unsubscribe: () => void - } { + unsubscribe: () => void + } { return this.lq.queryNotificationContexts(params, callback) } } export class LabelsQuery extends BaseQuery> { - override createQuery( + override createQuery ( params: FindLabelsParams, callback: QueryCallback

, C extends (r: any) => void> { private oldQuery: P | undefined private oldCallback: C | undefined - constructor ( - protected readonly lq: LiveQueries, - onDestroy: (fn: () => void) => void - ) { - onDestroy(() => { - this.unsubscribe() - }) + constructor (dontDestroy?: boolean) { + if (dontDestroy !== true) { + const destroyFn = getOnDestroy() + destroyFn(() => { + this.unsubscribe() + }) + } } unsubscribe: () => void = () => {} @@ -81,7 +82,7 @@ class BaseQuery

, C extends (r: any) => void> { export class MessagesQuery extends BaseQuery> { override createQuery (params: MessageQueryParams, callback: PagedQueryCallback): { unsubscribe: () => void } { - return this.lq.queryMessages(params, callback) + return getLiveQueries().queryMessages(params, callback) } } @@ -92,7 +93,7 @@ export class NotificationsQuery extends BaseQuery void } { - return this.lq.queryNotifications(params, callback) + return getLiveQueries().queryNotifications(params, callback) } } @@ -106,7 +107,7 @@ PagedQueryCallback ): { unsubscribe: () => void } { - return this.lq.queryNotificationContexts(params, callback) + return getLiveQueries().queryNotificationContexts(params, callback) } } @@ -117,7 +118,7 @@ export class LabelsQuery extends BaseQuery void } { - return this.lq.queryLabels(params, callback) + return getLiveQueries().queryLabels(params, callback) } } @@ -128,6 +129,6 @@ export class CollaboratorsQuery extends BaseQuery void } { - return this.lq.queryCollaborators(params, callback) + return getLiveQueries().queryCollaborators(params, callback) } } diff --git a/packages/cockroach/src/adapter.ts b/packages/cockroach/src/adapter.ts index a4cc8a07ce3..29e71b54d3f 100644 --- a/packages/cockroach/src/adapter.ts +++ b/packages/cockroach/src/adapter.ts @@ -40,7 +40,6 @@ import { type FindLabelsParams, type LabelID, type CardType, - type PatchData, NotificationType, type NotificationContent, type LinkPreviewData, @@ -52,7 +51,7 @@ import type { DbAdapter, LabelUpdates, NotificationContextUpdates, - NotificationUpdates, RemoveLabelQuery, RemoveThreadQuery, + NotificationUpdates, RemoveLabelQuery, ThreadQuery, ThreadUpdates, UpdateNotificationQuery } from '@hcengineering/communication-sdk-types' @@ -97,13 +96,12 @@ export class CockroachAdapter implements DbAdapter { async createPatch ( cardId: CardID, messageId: MessageID, - messageCreated: Date, type: PatchType, - data: PatchData, + data: Record, creator: SocialID, created: Date ): Promise { - await this.message.createPatch(cardId, messageId, messageCreated, type, data, creator, created) + await this.message.createPatch(cardId, messageId, type, data, creator, created) } async createMessagesGroup ( @@ -120,14 +118,14 @@ export class CockroachAdapter implements DbAdapter { await this.message.removeMessagesGroup(card, blobId) } - async setReaction ( + async addReaction ( cardId: CardID, message: MessageID, reaction: string, socialId: SocialID, date: Date ): Promise { - await this.message.setReaction(cardId, message, reaction, socialId, date) + await this.message.addReaction(cardId, message, reaction, socialId, date) } async removeReaction ( @@ -140,32 +138,52 @@ export class CockroachAdapter implements DbAdapter { await this.message.removeReaction(cardId, messageId, reaction, socialId, date) } - async attachBlob ( + async attachBlobs ( cardId: CardID, messageId: MessageID, - data: BlobData, + blobs: BlobData[], socialId: SocialID, date: Date ): Promise { - await this.message.attachBlob(cardId, messageId, data, socialId, date) + await this.message.attachBlobs(cardId, messageId, blobs, socialId, date) } - async detachBlob (cardId: CardID, messageId: MessageID, blobId: BlobID, socialId: SocialID, date: Date): Promise { - await this.message.detachBlob(cardId, messageId, blobId, socialId, date) + async detachBlobs (cardId: CardID, messageId: MessageID, blobIds: BlobID[], socialId: SocialID, date: Date): Promise { + await this.message.detachBlobs(cardId, messageId, blobIds, socialId, date) } - async createLinkPreview ( + async setBlobs ( cardId: CardID, messageId: MessageID, - data: LinkPreviewData, + blobs: BlobData[], socialId: SocialID, date: Date - ): Promise { - return await this.message.createLinkPreview(cardId, messageId, data, socialId, date) + ): Promise { + await this.message.setBlobs(cardId, messageId, blobs, socialId, date) } - async removeLinkPreview (cardId: CardID, messageId: MessageID, previewId: LinkPreviewID): Promise { - await this.message.removeLinkPreview(cardId, messageId, previewId) + async attachLinkPreviews ( + cardId: CardID, + messageId: MessageID, + data: (LinkPreviewData & { previewId: LinkPreviewID })[], + socialId: SocialID, + date: Date + ): Promise { + await this.message.attachLinkPreviews(cardId, messageId, data, socialId, date) + } + + async setLinkPreviews ( + cardId: CardID, + messageId: MessageID, + data: (LinkPreviewData & { previewId: LinkPreviewID })[], + socialId: SocialID, + date: Date + ): Promise { + await this.message.setLinkPreviews(cardId, messageId, data, socialId, date) + } + + async detachLinkPreviews (cardId: CardID, messageId: MessageID, previewIds: LinkPreviewID[], socialId: SocialID, date: Date): Promise { + await this.message.detachLinkPreviews(cardId, messageId, previewIds, socialId, date) } async attachThread ( @@ -173,17 +191,18 @@ export class CockroachAdapter implements DbAdapter { messageId: MessageID, threadId: CardID, threadType: CardType, + socialId: SocialID, date: Date ): Promise { - await this.message.attachThread(cardId, messageId, threadId, threadType, date) + await this.message.attachThread(cardId, messageId, threadId, threadType, socialId, date) } - async removeThreads (query: RemoveThreadQuery): Promise { - await this.message.removeThreads(query) + async updateThread (cardId: CardID, messageId: MessageID, threadId: CardID, update: ThreadUpdates, socialId: SocialID, date: Date): Promise { + await this.message.updateThread(cardId, messageId, threadId, update, socialId, date) } - async updateThread (threadId: CardID, update: ThreadUpdates): Promise { - await this.message.updateThread(threadId, update) + async removeThreads (query: ThreadQuery): Promise { + await this.message.removeThreads(query) } async findMessages (params: FindMessagesParams): Promise { @@ -235,8 +254,8 @@ export class CockroachAdapter implements DbAdapter { ) } - async updateNotification (query: UpdateNotificationQuery, updates: NotificationUpdates): Promise { - await this.notification.updateNotification(query, updates) + async updateNotification (contextId: ContextID, account: AccountID, query: UpdateNotificationQuery, updates: NotificationUpdates): Promise { + await this.notification.updateNotification(contextId, account, query, updates) } async removeNotifications ( @@ -261,8 +280,8 @@ export class CockroachAdapter implements DbAdapter { await this.notification.updateContext(context, account, updates) } - async removeContext (contextId: ContextID, account: AccountID): Promise { - await this.notification.removeContext(contextId, account) + async removeContext (contextId: ContextID, account: AccountID): Promise { + return await this.notification.removeContext(contextId, account) } async findNotificationContexts (params: FindNotificationContextParams): Promise { diff --git a/packages/cockroach/src/client.ts b/packages/cockroach/src/client.ts index 04cea483aed..987b0bb2319 100644 --- a/packages/cockroach/src/client.ts +++ b/packages/cockroach/src/client.ts @@ -27,9 +27,9 @@ export class SqlClient { return this.sql } - async execute(query: string, params?: SqlParams): Promise { + async execute(query: string, params?: SqlParams, client?: postgres.TransactionSql): Promise { const convertedParams = convertArrayParams(params) - return await this.sql.unsafe(query, convertedParams) + return await (client ?? this.sql).unsafe(query, convertedParams) } cursor(query: string, params?: SqlParams, size?: number): AsyncIterable[]> { diff --git a/packages/cockroach/src/db/base.ts b/packages/cockroach/src/db/base.ts index 0fda7f6aaa3..d99528c09ef 100644 --- a/packages/cockroach/src/db/base.ts +++ b/packages/cockroach/src/db/base.ts @@ -31,27 +31,33 @@ export class BaseDb { return this.client.getRawClient() } - async execute>(sql: string, params?: ParameterOrJSON[], name?: string): Promise { + async execute>( + sql: string, + params?: ParameterOrJSON[], + name?: string, + client?: postgres.TransactionSql + ): Promise { if (this.options?.withLogs === true && this.logger !== undefined) { - return await this.executeWithLogs(name, this.logger, sql, params) + return await this.executeWithLogs(name, this.logger, sql, params, client) } - return await this.client.execute(sql, params) + return await this.client.execute(sql, params, client) } private async executeWithLogs>( name: string | undefined, logger: Logger, sql: string, - params?: ParameterOrJSON[] + params?: ParameterOrJSON[], + client?: postgres.TransactionSql ): Promise { if (name === undefined) { - return await this.client.execute(sql, params) + return await this.client.execute(sql, params, client) } const start = performance.now() try { - return await this.client.execute(sql, params) + return await this.client.execute(sql, params, client) } finally { const time = performance.now() - start logger.info(name, { time }) diff --git a/packages/cockroach/src/db/label.ts b/packages/cockroach/src/db/label.ts index 6ef365a0eeb..ebf5d643c37 100644 --- a/packages/cockroach/src/db/label.ts +++ b/packages/cockroach/src/db/label.ts @@ -24,7 +24,7 @@ import { } from '@hcengineering/communication-types' import { BaseDb } from './base' -import { type LabelDb, TableName } from './schema' +import { type LabelDb, TableName } from '../schema' import { toLabel } from './mapping' import type { LabelUpdates, RemoveLabelQuery } from '@hcengineering/communication-sdk-types' diff --git a/packages/cockroach/src/db/mapping.ts b/packages/cockroach/src/db/mapping.ts index a71727bfddb..2d3b59c178a 100644 --- a/packages/cockroach/src/db/mapping.ts +++ b/packages/cockroach/src/db/mapping.ts @@ -54,7 +54,7 @@ import { type ThreadDb, type LabelDb, type LinkPreviewDb -} from './schema' +} from '../schema' interface RawMessage extends MessageDb { thread_id?: CardID @@ -69,7 +69,6 @@ interface RawMessage extends MessageDb { interface RawNotification extends NotificationDb { account: AccountID - card_id: CardID message_id: MessageID message_type?: MessageType message_content?: Markdown @@ -149,7 +148,7 @@ export function toReaction (raw: ReactionDb): Reaction { export function toBlob (raw: Omit): AttachedBlob { return { blobId: raw.blob_id, - contentType: raw.type, + mimeType: raw.type, fileName: raw.filename, size: Number(raw.size), metadata: raw.meta, @@ -313,7 +312,7 @@ function toNotificationRaw (id: ContextID, card: CardID, raw: RawNotification): return { id: String(raw.id) as NotificationID, - cardId: raw.card_id, + cardId: card, account: raw.account, type: raw.type, read: Boolean(raw.read), diff --git a/packages/cockroach/src/db/message.ts b/packages/cockroach/src/db/message.ts index 70193a60b99..bf23da34ca8 100644 --- a/packages/cockroach/src/db/message.ts +++ b/packages/cockroach/src/db/message.ts @@ -14,10 +14,16 @@ // import { + AddReactionPatchData, + AttachBlobsPatchData, + AttachLinkPreviewsPatchData, + AttachThreadPatchData, type BlobData, type BlobID, type CardID, type CardType, + DetachBlobsPatchData, + DetachLinkPreviewsPatchData, type FindMessagesGroupsParams, type FindMessagesParams, type LinkPreviewData, @@ -28,18 +34,19 @@ import { type MessageID, type MessagesGroup, type MessageType, - type PatchData, PatchType, + RemoveReactionPatchData, + SetLinkPreviewsPatchData, type SocialID, SortingOrder, - type Thread + type Thread, + UpdateThreadPatchData } from '@hcengineering/communication-types' -import type { RemoveThreadQuery, ThreadUpdates } from '@hcengineering/communication-sdk-types' +import type { ThreadUpdates, ThreadQuery } from '@hcengineering/communication-sdk-types' +import postgres from 'postgres' import { BaseDb } from './base' import { - type FileDb, - type LinkPreviewDb, type MessageDb, messageSchema, type MessagesGroupDb, @@ -47,10 +54,9 @@ import { type ReactionDb, TableName, type ThreadDb -} from './schema' +} from '../schema' import { getCondition } from './utils' import { toMessage, toMessagesGroup, toThread } from './mapping' -import { isExternalMessageId, messageIdToDate } from '../messageId' export class MessagesDb extends BaseDb { // Message @@ -90,198 +96,417 @@ export class MessagesDb extends BaseDb { VALUES (${placeholders.join(', ')}) RETURNING id::text, created` - if (isExternalMessageId(db.id)) { - return await this.getRowClient().begin(async (s) => { - const sql = `INSERT INTO ${TableName.MessageCreated} (workspace_id, card_id, message_id, created) - VALUES ($1::uuid, $2::varchar, $3::int8, $4::timestamptz) + return await this.getRowClient().begin(async (s) => { + const sql = `INSERT INTO ${TableName.MessageCreated} (workspace_id, card_id, message_id, created) + VALUES ($1::uuid, $2::varchar, $3::varchar, $4::timestamptz) ON CONFLICT (workspace_id, card_id, message_id) DO NOTHING` - const result = await s.unsafe(sql, [this.workspace, cardId, db.id, created]) - if (result.count === 0) { - return false - } + const result = await s.unsafe(sql, [this.workspace, cardId, db.id, created]) + if (result.count === 0) { + return false + } - await s.unsafe(insertSql, values) - return true - }) - } else { - await this.execute(insertSql, values, 'insert message') + await s.unsafe(insertSql, values) return true - } + }) } async createPatch ( cardId: CardID, messageId: MessageID, - messageCreated: Date, type: PatchType, - data: PatchData, + data: Record, creator: SocialID, - created: Date + created: Date, + client?: postgres.TransactionSql ): Promise { - const db: PatchDb = { + const db: Omit = { workspace_id: this.workspace, card_id: cardId, message_id: messageId, type, data, creator, - created, - message_created: messageCreated + created } - const sql = `INSERT INTO ${TableName.Patch} (workspace_id, card_id, message_id, type, data, creator, created, message_created) - VALUES ($1::uuid, $2::varchar, $3::int8, $4::varchar, $5::jsonb, $6::varchar, $7::timestamptz, $8::timestamptz)` + const sql = ` + INSERT INTO ${TableName.Patch} ( + workspace_id, card_id, message_id, + type, data, creator, created, message_created + ) + SELECT + $1::uuid, $2::varchar, $3::varchar, + $4::varchar, $5::jsonb, $6::varchar, $7::timestamptz, + mc.created + FROM ${TableName.MessageCreated} mc + WHERE mc.workspace_id = $1::uuid + AND mc.card_id = $2::varchar + AND mc.message_id = $3::varchar + ` await this.execute( sql, - [db.workspace_id, db.card_id, db.message_id, db.type, db.data, db.creator, db.created, db.message_created], - 'insert patch' + [this.workspace, db.card_id, db.message_id, db.type, db.data, db.creator, db.created], + 'insert patch', + client ) } // Blob - async attachBlob ( + async attachBlobs ( cardId: CardID, messageId: MessageID, - data: BlobData, + blobs: BlobData[], socialId: SocialID, date: Date ): Promise { - const db: FileDb = { - workspace_id: this.workspace, - card_id: cardId, - message_id: messageId, - blob_id: data.blobId, - type: data.contentType, - filename: data.fileName, - size: data.size, - creator: socialId, - created: date, - meta: data.metadata + if (blobs.length === 0) return + + const values: any[] = [] + const placeholders: string[] = [] + + blobs.forEach((blob, i) => { + const baseIndex = i * 10 + placeholders.push(`($${baseIndex + 1}::uuid, $${baseIndex + 2}::varchar, $${baseIndex + 3}::varchar, $${baseIndex + 4}::uuid, + $${baseIndex + 5}::varchar, $${baseIndex + 6}::varchar, $${baseIndex + 7}::varchar, + $${baseIndex + 8}::timestamptz, $${baseIndex + 9}::int8, $${baseIndex + 10}::jsonb)`) + + values.push( + this.workspace, + cardId, + messageId, + blob.blobId, + blob.mimeType, + blob.fileName, + socialId, + date, + blob.size, + blob.metadata ?? {} + ) + }) + + const insertSql = ` + INSERT INTO ${TableName.File} (workspace_id, card_id, message_id, blob_id, + type, filename, creator, created, size, meta) + VALUES ${placeholders.join(', ')}` + + const inDb = await this.isMessageInDb(cardId, messageId) + if (!inDb) { + await this.getRowClient().begin(async (s) => { + await this.execute(insertSql, values, 'insert files', s) + + const data: AttachBlobsPatchData = { + operation: 'attach', + blobs + } + await this.createPatch(cardId, messageId, PatchType.blob, data, socialId, date, s) + return true + }) + } else { + await this.execute(insertSql, values, 'insert files') } - const sql = `INSERT INTO ${TableName.File} (workspace_id, card_id, message_id, blob_id, type, filename, creator, - created, size, meta) - VALUES ($1::uuid, $2::varchar, $3::int8, $4::uuid, $5::varchar, $6::varchar, $7::varchar, - $8::timestamptz, $9::int8, $10::jsonb)` + } - await this.execute( - sql, - [ - db.workspace_id, - db.card_id, - db.message_id, - db.blob_id, - db.type, - db.filename, - db.creator, - db.created, - db.size, - db.meta ?? {} - ], - 'insert file' - ) + async detachBlobs ( + cardId: CardID, + messageId: MessageID, + blobIds: BlobID[], + socialId: SocialID, + date: Date + ): Promise { + if (blobIds.length === 0) return + + const sql = ` + DELETE FROM ${TableName.File} + WHERE workspace_id = $1::uuid + AND card_id = $2::varchar + AND message_id = $3::varchar + AND blob_id = ANY($4::uuid[]) + ` + + const inDb = await this.isMessageInDb(cardId, messageId) + if (!inDb) { + await this.getRowClient().begin(async (s) => { + await this.execute(sql, [this.workspace, cardId, messageId, blobIds], 'remove files', s) + + const data: DetachBlobsPatchData = { + operation: 'detach', + blobIds + } + await this.createPatch(cardId, messageId, PatchType.blob, data, socialId, date, s) + return true + }) + } else { + await this.execute(sql, [this.workspace, cardId, messageId, blobIds], 'remove files') + } } - async detachBlob ( + async setBlobs ( cardId: CardID, messageId: MessageID, - blobId: BlobID, + blobs: BlobData[], socialId: SocialID, date: Date ): Promise { - const sql = `DELETE - FROM ${TableName.File} - WHERE workspace_id = $1::uuid - AND card_id = $2::varchar - AND message_id = $3::int8 - AND blob_id = $4::uuid` + if (blobs.length === 0) return + + const values: any[] = [] + const placeholders: string[] = [] + + blobs.forEach((blob, i) => { + const baseIndex = i * 10 + placeholders.push(`($${baseIndex + 1}::uuid, $${baseIndex + 2}::varchar, $${baseIndex + 3}::varchar, $${baseIndex + 4}::uuid, + $${baseIndex + 5}::varchar, $${baseIndex + 6}::varchar, $${baseIndex + 7}::varchar, + $${baseIndex + 8}::timestamptz, $${baseIndex + 9}::int8, $${baseIndex + 10}::jsonb)`) + + values.push( + this.workspace, + cardId, + messageId, + blob.blobId, + blob.mimeType, + blob.fileName, + socialId, + date, + blob.size, + blob.metadata ?? {} + ) + }) + + const insertSql = ` + INSERT INTO ${TableName.File} (workspace_id, card_id, message_id, blob_id, + type, filename, creator, created, size, meta) + VALUES ${placeholders.join(', ')}` + const deleteSql = ` + DELETE FROM ${TableName.File} + WHERE workspace_id = $1::uuid + AND card_id = $2::varchar + AND message_id = $3::varchar + ` + await this.getRowClient().begin(async (s) => { + await this.execute(deleteSql, [this.workspace, cardId, messageId], 'delete blobs', s) + await this.execute(insertSql, values, 'insert blobs', s) - await this.execute(sql, [this.workspace, cardId, messageId, blobId], 'remove files') + const data: AttachBlobsPatchData = { + operation: 'attach', + blobs + } + + await this.createPatch(cardId, messageId, PatchType.blob, data, socialId, date, s) + + return true + }) } - async createLinkPreview ( + async attachLinkPreviews ( cardId: CardID, messageId: MessageID, - data: LinkPreviewData, + previews: (LinkPreviewData & { previewId: LinkPreviewID })[], socialId: SocialID, date: Date - ): Promise { - const db: Omit = { - workspace_id: this.workspace, - card_id: cardId, - message_id: messageId, - url: data.url, - host: data.host, - title: data.title ?? null, - description: data.description ?? null, - favicon: data.iconUrl ?? null, - hostname: data.siteName ?? null, - image: data.previewImage ?? null, - creator: socialId, - created: date + ): Promise { + if (previews.length === 0) return + + const values: any[] = [] + const placeholders: string[] = [] + + previews.forEach((preview, i) => { + const base = i * 12 + placeholders.push(`($${base + 1}::uuid, $${base + 2}::varchar, $${base + 3}::varchar, $${base + 4}::varchar, + $${base + 5}::varchar, $${base + 6}::varchar, $${base + 7}::varchar, + $${base + 8}::varchar, $${base + 9}::varchar, $${base + 10}::jsonb, + $${base + 11}::varchar, $${base + 12}::timestamptz, $${base + 13}::int8)`) + + values.push( + this.workspace, + cardId, + messageId, + preview.url, + preview.host, + preview.title ?? null, + preview.description ?? null, + preview.iconUrl ?? null, + preview.siteName ?? null, + preview.previewImage ?? null, + socialId, + date, + preview.previewId + ) + }) + + const insertSql = ` + INSERT INTO ${TableName.LinkPreview} ( + workspace_id, card_id, message_id, url, host, title, description, + favicon, hostname, image, creator, created, id + ) VALUES ${placeholders.join(', ')}` + + const inDb = await this.isMessageInDb(cardId, messageId) + if (!inDb) { + await this.getRowClient().begin(async (s) => { + await this.execute(insertSql, values, 'insert link previews', s) + + const data: AttachLinkPreviewsPatchData = { + operation: 'attach', + previews + } + await this.createPatch(cardId, messageId, PatchType.linkPreview, data, socialId, date, s) + }) + } else { + await this.execute(insertSql, values, 'insert link previews') } - const sql = `INSERT INTO ${TableName.LinkPreview} (workspace_id, card_id, message_id, url, host, title, description, - favicon, hostname, image, creator, created) - VALUES ($1::uuid, $2::varchar, $3::int8, $4::varchar, $5::varchar, $6::varchar, $7::varchar, - $8::varchar, $9::varchar, $10::jsonb, $11::varchar, $12::timestamptz) - RETURNING id::text` - const result = await this.execute( - sql, - [ - db.workspace_id, - db.card_id, - db.message_id, - db.url, - db.host, - db.title, - db.description, - db.favicon, - db.hostname, - db.image, - db.creator, - db.created - ], - 'insert link preview' - ) + } - return result[0].id as LinkPreviewID + async detachLinkPreviews ( + cardId: CardID, + messageId: MessageID, + previewIds: LinkPreviewID[], + socialId: SocialID, + date: Date + ): Promise { + if (previewIds.length === 0) return + + const sql = + previewIds.length > 1 + ? ` + DELETE FROM ${TableName.LinkPreview} + WHERE workspace_id = $1::uuid + AND card_id = $2::varchar + AND message_id = $3::varchar + AND id = ANY($4::int8[]) + ` + : ` + DELETE FROM ${TableName.LinkPreview} + WHERE workspace_id = $1::uuid + AND card_id = $2::varchar + AND message_id = $3::varchar + AND id = $4::int8 + ` + + const inDb = await this.isMessageInDb(cardId, messageId) + + if (!inDb) { + await this.getRowClient().begin(async (s) => { + await this.execute( + sql, + [this.workspace, cardId, messageId, previewIds.length === 1 ? previewIds[0] : previewIds], + 'remove link previews', + s + ) + + const data: DetachLinkPreviewsPatchData = { + operation: 'detach', + previewIds + } + + await this.createPatch(cardId, messageId, PatchType.linkPreview, data, socialId, date, s) + + return true + }) + } else { + await this.execute( + sql, + [this.workspace, cardId, messageId, previewIds.length === 1 ? previewIds[0] : previewIds], + 'remove link previews' + ) + } } - async removeLinkPreview (cardId: CardID, messageId: MessageID, previewId: LinkPreviewID): Promise { - const sql = `DELETE - FROM ${TableName.LinkPreview} - WHERE workspace_id = $1::uuid - AND card_id = $2::varchar - AND message_id = $3::int8 - AND id = $4::int8` - await this.execute(sql, [this.workspace, cardId, messageId, previewId], 'remove link preview') + public async setLinkPreviews ( + cardId: CardID, + messageId: MessageID, + previews: (LinkPreviewData & { previewId: LinkPreviewID })[], + socialId: SocialID, + date: Date + ): Promise { + if (previews.length === 0) return + const deleteSql = ` + DELETE FROM ${TableName.LinkPreview} + WHERE workspace_id = $1::uuid + AND card_id = $2::varchar + AND message_id = $3::varchar + ` + + const values: any[] = [] + const placeholders: string[] = [] + + previews.forEach((preview, i) => { + const base = i * 12 + placeholders.push(`($${base + 1}::uuid, $${base + 2}::varchar, $${base + 3}::varchar, $${base + 4}::varchar, + $${base + 5}::varchar, $${base + 6}::varchar, $${base + 7}::varchar, + $${base + 8}::varchar, $${base + 9}::varchar, $${base + 10}::jsonb, + $${base + 11}::varchar, $${base + 12}::timestamptz, $${base + 13}::int8)`) + + values.push( + this.workspace, + cardId, + messageId, + preview.url, + preview.host, + preview.title ?? null, + preview.description ?? null, + preview.iconUrl ?? null, + preview.siteName ?? null, + preview.previewImage ?? null, + socialId, + date, + preview.previewId + ) + }) + + const insertSql = `INSERT INTO ${TableName.LinkPreview} ( + workspace_id, card_id, message_id, url, host, title, description, + favicon, hostname, image, creator, created, id + ) VALUES ${placeholders.join(', ')} ` + + await this.getRowClient().begin(async (s) => { + await this.execute(deleteSql, [this.workspace, cardId, messageId], 'delete link previews', s) + await this.execute(insertSql, values, 'insert new link previews', s) + + const data: SetLinkPreviewsPatchData = { + operation: 'set', + previews + } + + await this.createPatch(cardId, messageId, PatchType.linkPreview, data, socialId, date, s) + + return true + }) } // Reaction - async setReaction ( + async addReaction ( cardId: CardID, messageId: MessageID, reaction: string, creator: SocialID, created: Date ): Promise { - const db: ReactionDb = { - workspace_id: this.workspace, - card_id: cardId, - message_id: messageId, - reaction, - creator, - created - } - const sql = `INSERT INTO ${TableName.Reaction} (workspace_id, card_id, message_id, reaction, creator, created) - VALUES ($1::uuid, $2::varchar, $3::int8, $4::varchar, $5::varchar, $6::timestamptz) + const inDb = await this.isMessageInDb(cardId, messageId) + if (inDb) { + const db: ReactionDb = { + workspace_id: this.workspace, + card_id: cardId, + message_id: messageId, + reaction, + creator, + created + } + const sql = `INSERT INTO ${TableName.Reaction} (workspace_id, card_id, message_id, reaction, creator, created) + VALUES ($1::uuid, $2::varchar, $3::varchar, $4::varchar, $5::varchar, $6::timestamptz) ON CONFLICT DO NOTHING` - await this.execute( - sql, - [db.workspace_id, db.card_id, db.message_id, db.reaction, db.creator, db.created], - 'insert reaction' - ) + await this.execute( + sql, + [db.workspace_id, db.card_id, db.message_id, db.reaction, db.creator, db.created], + 'insert reaction' + ) + } else { + const data: AddReactionPatchData = { + operation: 'add', + reaction + } + await this.createPatch(cardId, messageId, PatchType.reaction, data, creator, created) + } } async removeReaction ( @@ -291,14 +516,23 @@ export class MessagesDb extends BaseDb { socialId: SocialID, date: Date ): Promise { - const sql = `DELETE + const inDb = await this.isMessageInDb(cardId, messageId) + if (inDb) { + const sql = `DELETE FROM ${TableName.Reaction} WHERE workspace_id = $1::uuid AND card_id = $2::varchar - AND message_id = $3::int8 + AND message_id = $3::varchar AND reaction = $4::varchar AND creator = $5::varchar` - await this.execute(sql, [this.workspace, cardId, messageId, reaction, socialId], 'remove reaction') + await this.execute(sql, [this.workspace, cardId, messageId, reaction, socialId], 'remove reaction') + } else { + const data: RemoveReactionPatchData = { + operation: 'remove', + reaction + } + await this.createPatch(cardId, messageId, PatchType.reaction, data, socialId, date) + } } // Thread @@ -307,6 +541,7 @@ export class MessagesDb extends BaseDb { messageId: MessageID, threadId: CardID, threadType: CardType, + socialId: SocialID, date: Date ): Promise { const db: ThreadDb = { @@ -321,38 +556,44 @@ export class MessagesDb extends BaseDb { const sql = `INSERT INTO ${TableName.Thread} (workspace_id, card_id, message_id, thread_id, thread_type, replies_count, last_reply) - VALUES ($1::uuid, $2::varchar, $3::int8, $4::varchar, $5::varchar, $6::int, $7::timestamptz)` - await this.execute( - sql, - [db.workspace_id, db.card_id, db.message_id, db.thread_id, db.thread_type, db.replies_count, db.last_reply], - 'insert thread' - ) - } + VALUES ($1::uuid, $2::varchar, $3::varchar, $4::varchar, $5::varchar, $6::int, $7::timestamptz)` + + const inDb = await this.isMessageInDb(cardId, messageId) + if (!inDb) { + await this.getRowClient().begin(async (s) => { + await this.execute( + sql, + [db.workspace_id, db.card_id, db.message_id, db.thread_id, db.thread_type, db.replies_count, db.last_reply], + 'insert thread', + s + ) + + const data: AttachThreadPatchData = { + operation: 'attach', + threadId, + threadType + } + await this.createPatch(cardId, messageId, PatchType.thread, data, socialId, date, s) - async removeThreads (query: RemoveThreadQuery): Promise { - const db: Partial = { - card_id: query.cardId, - message_id: query.messageId, - thread_id: query.threadId + return true + }) + } else { + await this.execute( + sql, + [db.workspace_id, db.card_id, db.message_id, db.thread_id, db.thread_type, db.replies_count, db.last_reply], + 'insert thread' + ) } - - const entries = Object.entries(db).filter(([_, value]) => value !== undefined) - - if (entries.length === 0) return - - entries.unshift(['workspace_id', this.workspace]) - - const whereClauses = entries.map(([key], index) => `${key} = $${index + 1}`) - const whereValues = entries.map(([_, value]) => value) - - const sql = `DELETE - FROM ${TableName.Thread} - WHERE ${whereClauses.join(' AND ')}` - - await this.execute(sql, whereValues, 'remove threads') } - async updateThread (threadId: CardID, update: ThreadUpdates): Promise { + async updateThread ( + cardId: CardID, + messageId: MessageID, + threadId: CardID, + update: ThreadUpdates, + socialId: SocialID, + date: Date + ): Promise { const set: string[] = [] const values: any[] = [] @@ -377,9 +618,52 @@ export class MessagesDb extends BaseDb { const updateSql = `UPDATE ${TableName.Thread}` const setSql = 'SET ' + set.join(', ') - const where = `WHERE workspace_id = $${index++}::uuid AND thread_id = $${index++}::varchar` + const where = `WHERE workspace_id = $${index++}::uuid AND thread_id = $${index++}::varchar AND card_id = $${index++}::varchar AND message_id = $${index++}::varchar` const sql = [updateSql, setSql, where].join(' ') - await this.execute(sql, [...values, this.workspace, threadId], 'update thread') + + const inDb = await this.isMessageInDb(cardId, messageId) + + if (!inDb) { + await this.getRowClient().begin(async (s) => { + await this.execute(sql, [...values, this.workspace, threadId, cardId, messageId], 'update thread', s) + + const data: UpdateThreadPatchData = { + operation: 'update', + threadId, + threadType: update.threadType, + repliesCountOp: update.repliesCountOp, + lastReply: update.lastReply + } + await this.createPatch(cardId, messageId, PatchType.thread, data, socialId, date, s) + + return true + }) + } else { + await this.execute(sql, [...values, this.workspace, threadId, cardId, messageId], 'update thread') + } + } + + async removeThreads (query: ThreadQuery): Promise { + const db: Partial = { + card_id: query.cardId, + message_id: query.messageId, + thread_id: query.threadId + } + + const entries = Object.entries(db).filter(([_, value]) => value !== undefined) + + if (entries.length === 0) return + + entries.unshift(['workspace_id', this.workspace]) + + const whereClauses = entries.map(([key], index) => `${key} = $${index + 1}`) + const whereValues = entries.map(([_, value]) => value) + + const sql = `DELETE + FROM ${TableName.Thread} + WHERE ${whereClauses.join(' AND ')}` + + await this.execute(sql, whereValues, 'remove threads') } // MessagesGroup @@ -634,7 +918,7 @@ export class MessagesDb extends BaseDb { let index = 2 if (params.id != null) { - where.push(`m.id = $${index++}::int8`) + where.push(`m.id = $${index++}::varchar`) values.push(params.id) } @@ -747,29 +1031,28 @@ export class MessagesDb extends BaseDb { } public async isMessageInDb (cardId: CardID, messageId: MessageID): Promise { - const select = `SELECT m.id - FROM ${TableName.Message} m - WHERE m.workspace_id = $1::uuid - AND m.card_id = $2::varchar - AND m.id = $3::int8 - LIMIT 1` - - return (await this.execute(select, [this.workspace, cardId, messageId])).length > 0 + const sql = ` + SELECT 1 + FROM ${TableName.Message} m + WHERE m.workspace_id = $1::uuid + AND m.card_id = $2::varchar + AND m.id = $3::varchar + LIMIT 1 + ` + + const result = await this.execute(sql, [this.workspace, cardId, messageId]) + return result.length > 0 } public async getMessageCreated (cardId: CardID, messageId: MessageID): Promise { - if (isExternalMessageId(messageId)) { - const select = `SELECT mc.created + const select = `SELECT mc.created FROM ${TableName.MessageCreated} mc WHERE mc.workspace_id = $1::uuid AND mc.card_id = $2::varchar - AND mc.id = $3::int8 + AND mc.message_id = $3::varchar LIMIT 1` - const result = await this.execute(select, [this.workspace, cardId, messageId]) - const created = result[0].created - return created != null ? new Date(created) : undefined - } - - return messageIdToDate(messageId) ?? undefined + const result = await this.execute(select, [this.workspace, cardId, messageId]) + const created = result[0].created + return created != null ? new Date(created) : undefined } } diff --git a/packages/cockroach/src/db/notification.ts b/packages/cockroach/src/db/notification.ts index c87adce4f98..f777524aba8 100644 --- a/packages/cockroach/src/db/notification.ts +++ b/packages/cockroach/src/db/notification.ts @@ -32,7 +32,7 @@ import { } from '@hcengineering/communication-types' import { BaseDb } from './base' -import { type CollaboratorDb, type ContextDb, type NotificationDb, TableName } from './schema' +import { type CollaboratorDb, type ContextDb, type NotificationDb, TableName } from '../schema' import { getCondition } from './utils' import { toCollaborator, toNotification, toNotificationContext } from './mapping' import type { @@ -66,7 +66,7 @@ export class NotificationsDb extends BaseDb { } async removeCollaborators (card: CardID, accounts: AccountID[], unsafe = false): Promise { - if (accounts === undefined && unsafe) { + if (accounts.length === 0 && unsafe) { const sql = `DELETE FROM ${TableName.Collaborators} WHERE workspace_id = $1::uuid AND card_id = $2::varchar` await this.execute(sql, [this.workspace, card], 'remove collaborators') } else if (accounts.length === 1) { @@ -123,7 +123,7 @@ export class NotificationsDb extends BaseDb { content: content ?? {} } const sql = `INSERT INTO ${TableName.Notification} (message_id, message_created, context_id, read, created, type, content) - VALUES ($1::int8, $2::timestamptz, $3::int8, $4::boolean, $5::timestamptz, $6::varchar, $7::jsonb) + VALUES ($1::varchar, $2::timestamptz, $3::int8, $4::boolean, $5::timestamptz, $6::varchar, $7::jsonb) RETURNING id::text` const result = await this.execute( sql, @@ -133,14 +133,19 @@ export class NotificationsDb extends BaseDb { return result[0].id as NotificationID } - async updateNotification (query: UpdateNotificationQuery, updates: NotificationUpdates): Promise { + async updateNotification ( + contextId: ContextID, + account: AccountID, + query: UpdateNotificationQuery, + updates: NotificationUpdates + ): Promise { const where: string[] = [ 'nc.workspace_id = $1::uuid', 'nc.id = $2::int8', 'nc.account = $3::uuid', 'nc.id = n.context_id' ] - const values: any[] = [this.workspace, query.context, query.account] + const values: any[] = [this.workspace, contextId, account] let index = values.length + 1 if (query.id != null) { @@ -153,12 +158,13 @@ export class NotificationsDb extends BaseDb { values.push(query.type) } - const createdCondition = getCondition('n', 'created', index, query.created, 'timestamptz') - - if (createdCondition != null) { - where.push(createdCondition.where) - values.push(...createdCondition.values) - index = createdCondition.index + if (query.untilDate != null) { + const createdCondition = getCondition('n', 'created', index, { lessOrEqual: query.untilDate }, 'timestamptz') + if (createdCondition != null) { + where.push(createdCondition.where) + values.push(...createdCondition.values) + index = createdCondition.index + } } const whereClause = `WHERE ${where.join(' AND ')}` @@ -230,14 +236,17 @@ export class NotificationsDb extends BaseDb { return result[0].id as ContextID } - async removeContext (contextId: ContextID, account: AccountID): Promise { + async removeContext (contextId: ContextID, account: AccountID): Promise { const sql = `DELETE FROM ${TableName.NotificationContext} WHERE workspace_id = $1::uuid AND id = $2::int8 - AND account = $3::uuid` + AND account = $3::uuid + RETURNING id::text` + + const result = await this.execute(sql, [this.workspace, contextId, account], 'remove notification context') - await this.execute(sql, [this.workspace, contextId, account], 'remove notification context') + return result[0]?.id as ContextID | undefined } async updateContext (context: ContextID, account: AccountID, updates: NotificationContextUpdates): Promise { @@ -424,7 +433,7 @@ export class NotificationsDb extends BaseDb { const withMessage = params.message === true let select = - 'SELECT n.id, n.created, n.read, n.message_id, n.message_created, n.type, n.content, n.context_id, nc.card_id, nc.account, nc.card_id, nc.last_view ' + 'SELECT n.id, n.created, n.read, n.message_id, n.message_created, n.type, n.content, n.context_id, nc.card_id, nc.account, nc.last_view ' let joinMessages = '' @@ -647,7 +656,7 @@ export class NotificationsDb extends BaseDb { } if (params.messageId != null) { - where.push(`n.message_id = $${index++}::int8`) + where.push(`n.message_id = $${index++}::varchar`) values.push(params.messageId) } diff --git a/packages/cockroach/src/index.ts b/packages/cockroach/src/index.ts index adbe465ba09..ad167c694a7 100644 --- a/packages/cockroach/src/index.ts +++ b/packages/cockroach/src/index.ts @@ -14,4 +14,3 @@ // export * from './adapter' -export * from './messageId' diff --git a/packages/cockroach/src/init.ts b/packages/cockroach/src/init.ts index add166b53e6..287a7bb7452 100644 --- a/packages/cockroach/src/init.ts +++ b/packages/cockroach/src/init.ts @@ -14,6 +14,7 @@ // import type postgres from 'postgres' +import { TableName } from './schema' /* eslint-disable @typescript-eslint/naming-convention */ @@ -115,7 +116,15 @@ function getMigrations (): [string, string][] { migrationV3_1(), migrationV4_1(), migrationV5_1(), - migrationV5_2() + migrationV5_2(), + migrationV6_1(), + migrationV6_2(), + migrationV6_3(), + migrationV6_4(), + migrationV6_5(), + migrationV6_6(), + migrationV6_7(), + migrationV6_8() ] } @@ -399,3 +408,199 @@ function migrationV5_2 (): [string, string] { ` return ['init_message_created_table-v5_2', sql] } + +function migrationV6_1 (): [string, string] { + const sql = ` + ALTER TABLE ${TableName.Message} + ADD COLUMN IF NOT EXISTS message_id_str VARCHAR(22); + ALTER TABLE ${TableName.Patch} + ADD COLUMN IF NOT EXISTS message_id_str VARCHAR(22); + ALTER TABLE ${TableName.File} + ADD COLUMN IF NOT EXISTS message_id_str VARCHAR(22); + ALTER TABLE ${TableName.Reaction} + ADD COLUMN IF NOT EXISTS message_id_str VARCHAR(22); + ALTER TABLE ${TableName.Thread} + ADD COLUMN IF NOT EXISTS message_id_str VARCHAR(22); + ALTER TABLE ${TableName.LinkPreview} + ADD COLUMN IF NOT EXISTS message_id_str VARCHAR(22); + ALTER TABLE ${TableName.Notification} + ADD COLUMN IF NOT EXISTS message_id_str VARCHAR(22); + ALTER TABLE ${TableName.MessageCreated} + ADD COLUMN IF NOT EXISTS message_id_str VARCHAR(22); + ` + return ['add_message_id_str_columns-v6_1', sql] +} + +function migrationV6_2 (): [string, string] { + const sql = ` + UPDATE ${TableName.Message} + SET message_id_str = id::text; + UPDATE ${TableName.Patch} + SET message_id_str = message_id::text; + UPDATE ${TableName.File} + SET message_id_str = message_id::text; + UPDATE ${TableName.Reaction} + SET message_id_str = message_id::text; + UPDATE ${TableName.Thread} + SET message_id_str = message_id::text; + UPDATE ${TableName.LinkPreview} + SET message_id_str = message_id::text; + UPDATE ${TableName.Notification} + SET message_id_str = message_id::text; + UPDATE ${TableName.MessageCreated} + SET message_id_str = message_id::text; + ` + return ['copy_int8_ids_to_str_columns-v6_2', sql] +} + +function migrationV6_3 (): [string, string] { + const sql = ` + ALTER TABLE ${TableName.Reaction} + DROP CONSTRAINT IF EXISTS reactions_workspace_id_card_id_message_id_fkey; + + DROP INDEX IF EXISTS communication.thread_unique_constraint CASCADE; + + DROP INDEX IF EXISTS communication.idx_patch_workspace_card_message; + DROP INDEX IF EXISTS communication.files_workspace_card_message_idx; + DROP INDEX IF EXISTS communication.idx_reactions_workspace_card_message; + DROP INDEX IF EXISTS communication.idx_thread_workspace_card_message; + DROP INDEX IF EXISTS communication.workspace_id_card_id_message_id_idx; + DROP INDEX IF EXISTS communication.notifications_context_id_read_created_desc_idx; + DROP INDEX IF EXISTS communication.notifications_type_storing_rec_idx; + ` + return ['drop_constraints_and_indexes_for_rename-v6_3', sql] +} + +function migrationV6_4 (): [string, string] { + const sql = ` + ALTER TABLE ${TableName.Message} + RENAME COLUMN id TO message_id_old; + ALTER TABLE ${TableName.Message} + RENAME COLUMN message_id_str TO id; + + ALTER TABLE ${TableName.Patch} + RENAME COLUMN message_id TO message_id_old; + ALTER TABLE ${TableName.Patch} + RENAME COLUMN message_id_str TO message_id; + + ALTER TABLE ${TableName.File} + RENAME COLUMN message_id TO message_id_old; + ALTER TABLE ${TableName.File} + RENAME COLUMN message_id_str TO message_id; + + ALTER TABLE ${TableName.Reaction} + RENAME COLUMN message_id TO message_id_old; + ALTER TABLE ${TableName.Reaction} + RENAME COLUMN message_id_str TO message_id; + + ALTER TABLE ${TableName.Thread} + RENAME COLUMN message_id TO message_id_old; + ALTER TABLE ${TableName.Thread} + RENAME COLUMN message_id_str TO message_id; + + ALTER TABLE ${TableName.LinkPreview} + RENAME COLUMN message_id TO message_id_old; + ALTER TABLE ${TableName.LinkPreview} + RENAME COLUMN message_id_str TO message_id; + + ALTER TABLE ${TableName.Notification} + RENAME COLUMN message_id TO message_id_old; + ALTER TABLE ${TableName.Notification} + RENAME COLUMN message_id_str TO message_id; + + ALTER TABLE ${TableName.MessageCreated} + RENAME COLUMN message_id TO message_id_old; + ALTER TABLE ${TableName.MessageCreated} + RENAME COLUMN message_id_str TO message_id; + ` + return ['rename_message_id_columns-v6_4', sql] +} + +function migrationV6_5 (): [string, string] { + const sql = ` + ALTER TABLE ${TableName.Message} + ALTER COLUMN id SET NOT NULL; + + ALTER TABLE ${TableName.MessageCreated} + ALTER COLUMN message_id SET NOT NULL; + ALTER TABLE ${TableName.File} + ALTER COLUMN message_id SET NOT NULL; + ALTER TABLE ${TableName.Reaction} + ALTER COLUMN message_id SET NOT NULL; + ALTER TABLE ${TableName.Thread} + ALTER COLUMN message_id SET NOT NULL; + ALTER TABLE ${TableName.LinkPreview} + ALTER COLUMN message_id SET NOT NULL; + ALTER TABLE ${TableName.Notification} + ALTER COLUMN message_id SET NOT NULL; + + ` + return ['make_message_id_not_null-v6_5', sql] +} + +function migrationV6_6 (): [string, string] { + const sql = ` + ALTER TABLE ${TableName.Message} + ALTER PRIMARY KEY USING COLUMNS (workspace_id, card_id, id); + ALTER TABLE ${TableName.MessageCreated} + ALTER PRIMARY KEY USING COLUMNS (workspace_id, card_id, message_id); + ALTER TABLE ${TableName.File} + ALTER PRIMARY KEY USING COLUMNS (workspace_id, card_id, message_id, blob_id); + ALTER TABLE ${TableName.Reaction} + ALTER PRIMARY KEY USING COLUMNS (workspace_id, card_id, message_id, creator, reaction); + ` + return ['recrate_primary_keys-v6_6', sql] +} + +function migrationV6_7 (): [string, string] { + const sql = ` + ALTER TABLE ${TableName.Reaction} + ADD CONSTRAINT fk_reactions_message + FOREIGN KEY (workspace_id, card_id, message_id) + REFERENCES ${TableName.Message} (workspace_id, card_id, id) + ON DELETE CASCADE; + + CREATE INDEX IF NOT EXISTS idx_patch_workspace_card_message + ON ${TableName.Patch} (workspace_id, card_id, message_id); + + CREATE INDEX IF NOT EXISTS files_workspace_card_message_idx + ON ${TableName.File} (workspace_id, card_id, message_id); + + CREATE INDEX IF NOT EXISTS idx_reactions_workspace_card_message + ON ${TableName.Reaction} (workspace_id, card_id, message_id); + + ALTER TABLE ${TableName.Thread} ADD CONSTRAINT thread_unique_constraint UNIQUE (workspace_id, card_id, message_id); + + CREATE INDEX IF NOT EXISTS idx_thread_workspace_card_message + ON ${TableName.Thread} (workspace_id, card_id, message_id); + + CREATE INDEX IF NOT EXISTS workspace_id_card_id_message_id_idx + ON ${TableName.LinkPreview} (workspace_id, card_id, message_id); + + CREATE INDEX IF NOT EXISTS notifications_context_id_read_created_desc_idx + ON ${TableName.Notification} (context_id, read, created DESC); + ` + return ['recreate_constraints_and_indexes-v6_7', sql] +} + +function migrationV6_8 (): [string, string] { + const sql = ` + ALTER TABLE ${TableName.Message} + DROP COLUMN IF EXISTS message_id_old; + ALTER TABLE ${TableName.Patch} + DROP COLUMN IF EXISTS message_id_old; + ALTER TABLE ${TableName.File} + DROP COLUMN IF EXISTS message_id_old; + ALTER TABLE ${TableName.Reaction} + DROP COLUMN IF EXISTS message_id_old; + ALTER TABLE ${TableName.Thread} + DROP COLUMN IF EXISTS message_id_old; + ALTER TABLE ${TableName.LinkPreview} + DROP COLUMN IF EXISTS message_id_old; + ALTER TABLE ${TableName.Notification} + DROP COLUMN IF EXISTS message_id_old; + ALTER TABLE ${TableName.MessageCreated} + DROP COLUMN IF EXISTS message_id_old; + ` + return ['drop_old_message_id_columns-v6_8', sql] +} diff --git a/packages/cockroach/src/messageId.ts b/packages/cockroach/src/messageId.ts deleted file mode 100644 index cf7e2c0ad97..00000000000 --- a/packages/cockroach/src/messageId.ts +++ /dev/null @@ -1,100 +0,0 @@ -// Copyright © 2025 Hardcore Engineering Inc. -// -// Licensed under the Eclipse Public License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. You may -// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// -// See the License for the specific language governing permissions and -// limitations under the License. - -import { MessageID } from '@hcengineering/communication-types' - -const MSB_MASK = 1n << 63n -const EXTERNAL_FLAG = 1n << 62n -const TICK_MASK = (1n << 62n) - 1n - -/* - Epoch: 2022-01-01T00:00:00Z - 64-bit layout: - bit 63 = 0 - bit 62 = external flag - bits 61..0 = monotonic tick -*/ - -const EPOCH_OFFSET_US = BigInt(Date.UTC(2022, 0, 1)) * 1000n -const monoStartNs = process.hrtime.bigint() -const realStartUs = BigInt(Date.now()) * 1000n - -let lastTick = 0n - -/** - * Generate the next monotonic tick (unit = 10µs) since epoch2022 - * strictly monotonic: bumps +1 on conflict or clock rollback - */ -function getMonotonicTick10us (): bigint { - const nowNs = process.hrtime.bigint() - const deltaUs = (nowNs - monoStartNs) / 1000n - const absUs = realStartUs + deltaUs - const relUs = absUs > EPOCH_OFFSET_US ? absUs - EPOCH_OFFSET_US : 0n - const candidate = relUs / 10n - const tick = candidate <= lastTick ? lastTick + 1n : candidate - lastTick = tick - return tick -} - -function tick10usToDate (tick: bigint): { date: Date, us: number } { - const totalUs = tick * 10n - const absUs = EPOCH_OFFSET_US + totalUs - const ms = Number(absUs / 1000n) - const us = Number(absUs % 1000n) - return { date: new Date(ms), us } -} - -export function generateMessageId (external = false): MessageID { - const tick = getMonotonicTick10us() - const id = external ? (tick | EXTERNAL_FLAG) : tick - return id.toString() as MessageID -} - -export function messageIdToDate (id: string): Date | undefined { - let n: bigint - try { - n = BigInt(id) - } catch { - throw new Error(`Invalid bigint string: ${id}`) - } - const tick = n & TICK_MASK - try { - const date = tick10usToDate(tick).date - if (isNaN(date.getTime())) { - return undefined - } - return date - } catch (err: any) { - console.error('Failed to parse message id', id, err) - return undefined - } -} - -export function isExternalMessageId (id: string): boolean { - let n: bigint - try { - n = BigInt(id) - } catch { - throw new Error(`Invalid bigint string: ${id}`) - } - - if ((n & MSB_MASK) !== 0n) { - throw new Error(`Invalid MessageID: MSB (bit63) must be 0, got ${id}`) - } - - return (n & EXTERNAL_FLAG) !== 0n -} - -export function isInternalMessageId (messageId: string): boolean { - return !isExternalMessageId(messageId) -} diff --git a/packages/cockroach/src/db/schema.ts b/packages/cockroach/src/schema.ts similarity index 99% rename from packages/cockroach/src/db/schema.ts rename to packages/cockroach/src/schema.ts index 9de97081bb4..9cc01a6a88f 100644 --- a/packages/cockroach/src/db/schema.ts +++ b/packages/cockroach/src/schema.ts @@ -63,7 +63,7 @@ export interface MessageDb { export const messageSchema: Record = { workspace_id: 'uuid', card_id: 'varchar', - id: 'int8', + id: 'varchar', created: 'timestamptz', content: 'text', creator: 'varchar', diff --git a/packages/query/src/collaborators/query.ts b/packages/query/src/collaborators/query.ts index 533133373ac..7942521fac7 100644 --- a/packages/query/src/collaborators/query.ts +++ b/packages/query/src/collaborators/query.ts @@ -15,16 +15,15 @@ import type { AccountID, Collaborator, FindCollaboratorsParams, WorkspaceID } from '@hcengineering/communication-types' import { - type CardRemovedEvent, - CardResponseEventType, type EventResult, type FindClient, - NotificationResponseEventType, type QueryCallback, - type RequestEvent, - type ResponseEvent, - AddedCollaboratorsEvent, - RemovedCollaboratorsEvent + type Event, + NotificationEventType, + CardEventType, + AddCollaboratorsEvent, + RemoveCollaboratorsEvent, + RemoveCardEvent } from '@hcengineering/communication-sdk-types' import { QueryResult } from '../result' @@ -51,22 +50,22 @@ export class CollaboratorsQuery implements Query { + async onEvent (event: Event): Promise { if (this.isCardRemoved) return switch (event.type) { - case NotificationResponseEventType.AddedCollaborators: + case NotificationEventType.AddCollaborators: await this.onCollaboratorsAdded(event) break - case NotificationResponseEventType.RemovedCollaborators: + case NotificationEventType.RemoveCollaborators: await this.onCollaboratorsRemoved(event) break - case CardResponseEventType.CardRemoved: + case CardEventType.RemoveCard: await this.onCardRemoved(event) break } } - async onCollaboratorsAdded (event: AddedCollaboratorsEvent): Promise { + async onCollaboratorsAdded (event: AddCollaboratorsEvent): Promise { if (event.cardId !== this.params.card || event.collaborators.length === 0) return if (this.result instanceof Promise) this.result = await this.result @@ -90,7 +89,7 @@ export class CollaboratorsQuery implements Query { + async onCollaboratorsRemoved (event: RemoveCollaboratorsEvent): Promise { if (event.cardId !== this.params.card || event.collaborators.length === 0) return if (this.result instanceof Promise) this.result = await this.result @@ -110,7 +109,7 @@ export class CollaboratorsQuery implements Query { + async onCardRemoved (event: RemoveCardEvent): Promise { if (this.params.card !== event.cardId) return if (this.result instanceof Promise) this.result = await this.result @@ -119,7 +118,7 @@ export class CollaboratorsQuery implements Query): Promise {} + async onRequest (event: Event, promise: Promise): Promise {} private async initResult (): Promise> { try { diff --git a/packages/query/src/label/query.ts b/packages/query/src/label/query.ts index a927ecd8ff0..02c6aaec05f 100644 --- a/packages/query/src/label/query.ts +++ b/packages/query/src/label/query.ts @@ -15,17 +15,16 @@ import type { FindLabelsParams, Label, WorkspaceID } from '@hcengineering/communication-types' import { - type CardRemovedEvent, - CardResponseEventType, - type CardTypeUpdatedEvent, type EventResult, type FindClient, - type LabelCreatedEvent, - type LabelRemovedEvent, - LabelResponseEventType, type QueryCallback, - type RequestEvent, - type ResponseEvent + type Event, + LabelEventType, + CardEventType, + CreateLabelEvent, + RemoveLabelEvent, + UpdateCardTypeEvent, + RemoveCardEvent } from '@hcengineering/communication-sdk-types' import { QueryResult } from '../result' @@ -56,38 +55,45 @@ export class LabelsQuery implements Query { } } - async onEvent (event: ResponseEvent): Promise { + async onEvent (event: Event): Promise { if (this.isCardRemoved) return switch (event.type) { - case LabelResponseEventType.LabelCreated: + case LabelEventType.CreateLabel: await this.onLabelCreated(event) break - case LabelResponseEventType.LabelRemoved: + case LabelEventType.RemoveLabel: await this.onLabelRemoved(event) break - case CardResponseEventType.CardTypeUpdated: + case CardEventType.UpdateCardType: await this.onCardTypeUpdated(event) break - case CardResponseEventType.CardRemoved: + case CardEventType.RemoveCard: await this.onCardRemoved(event) break } } - async onLabelCreated (event: LabelCreatedEvent): Promise { + async onLabelCreated (event: CreateLabelEvent): Promise { if (this.result instanceof Promise) this.result = await this.result if (this.params.limit != null && this.result.length >= this.params.limit) return + const label: Label = { + labelId: event.labelId, + cardId: event.cardId, + cardType: event.cardType, + account: event.account, + created: event.date ?? new Date() + } - const match = this.match(event.label) + const match = this.match(label) if (!match) return - const existing = this.result.get(getId(event.label)) + const existing = this.result.get(getId(label)) if (existing != null) return - this.result.push(event.label) + this.result.push(label) void this.notify() } - async onLabelRemoved (event: LabelRemovedEvent): Promise { + async onLabelRemoved (event: RemoveLabelEvent): Promise { if (this.result instanceof Promise) this.result = await this.result const existing = this.result @@ -105,7 +111,7 @@ export class LabelsQuery implements Query { void this.notify() } - async onCardTypeUpdated (event: CardTypeUpdatedEvent): Promise { + async onCardTypeUpdated (event: UpdateCardTypeEvent): Promise { if (this.result instanceof Promise) this.result = await this.result const result = this.result.getResult() @@ -146,7 +152,7 @@ export class LabelsQuery implements Query { } } - async onCardRemoved (event: CardRemovedEvent): Promise { + async onCardRemoved (event: RemoveCardEvent): Promise { if (this.result instanceof Promise) this.result = await this.result if (this.params.card === event.cardId) { @@ -175,7 +181,7 @@ export class LabelsQuery implements Query { } } - async onRequest (event: RequestEvent, promise: Promise): Promise {} + async onRequest (event: Event, promise: Promise): Promise {} private async initResult (): Promise> { try { diff --git a/packages/query/src/lq.ts b/packages/query/src/lq.ts index d48e2c38359..78e07fee31e 100644 --- a/packages/query/src/lq.ts +++ b/packages/query/src/lq.ts @@ -28,9 +28,8 @@ import { } from '@hcengineering/communication-types' import { deepEqual } from 'fast-equals' import type { - ResponseEvent, QueryCallback, - RequestEvent, + Event, EventResult, PagedQueryCallback, FindClient @@ -67,13 +66,13 @@ export class LiveQueries { } } - async onEvent (event: ResponseEvent): Promise { + async onEvent (event: Event): Promise { for (const q of this.queries.values()) { void q.onEvent(event) } } - async onRequest (event: RequestEvent, promise: Promise): Promise { + async onRequest (event: Event, promise: Promise): Promise { for (const q of this.queries.values()) { void q.onRequest(event, promise) } @@ -124,7 +123,7 @@ export class LiveQueries { QueryClass: new (...args: any[]) => Q, finder: (params: P) => Q | undefined ): CreateQueryResult { - const query = this.createQuery(params, callback, QueryClass, finder) + const query = this.findOrCreateQuery(params, callback, QueryClass, finder) this.queries.set(query.id, query) return { @@ -134,7 +133,7 @@ export class LiveQueries { } } - private createQuery( + private findOrCreateQuery( params: P, callback: any, QueryClass: new (...args: any[]) => Q, @@ -150,6 +149,7 @@ export class LiveQueries { return exists } else { const result = exists.copyResult() + return new QueryClass(this.client, this.workspace, this.filesUrl, id, params, callback, result) } } diff --git a/packages/query/src/messages/query.ts b/packages/query/src/messages/query.ts index 4be5e5340f2..45fcbb98a7b 100644 --- a/packages/query/src/messages/query.ts +++ b/packages/query/src/messages/query.ts @@ -16,43 +16,28 @@ import { type FindMessagesGroupsParams, type FindMessagesParams, - type LinkPreview, type Message, type MessageID, type MessagesGroup, - MessageType, type ParsedFile, type Patch, - PatchType, - type Reaction, SortingOrder, type WorkspaceID, - type AttachedBlob + PatchType } from '@hcengineering/communication-types' import { - type CardRemovedEvent, - CardResponseEventType, type CreateMessageEvent, type CreateMessageResult, type EventResult, - type BlobAttachedEvent, - type BlobDetachedEvent, type FindClient, - type LinkPreviewCreatedEvent, - type LinkPreviewRemovedEvent, - type MessageCreatedEvent, - MessageRequestEventType, - MessageResponseEventType, + MessageEventType, type PagedQueryCallback, - type ReactionSetEvent, - type ReactionRemovedEvent, - type RequestEvent, - type ResponseEvent, - type ThreadAttachedEvent, - PatchCreatedEvent, - ThreadUpdatedEvent + type Event, + PatchEvent, + CardEventType, + RemoveCardEvent } from '@hcengineering/communication-sdk-types' -import { applyPatch, applyPatches } from '@hcengineering/communication-shared' +import { applyPatches, MessageProcessor } from '@hcengineering/communication-shared' import { loadGroupFile } from '@hcengineering/communication-yaml' import { v4 as uuid } from 'uuid' @@ -66,7 +51,6 @@ import { type QueryId } from '../types' import { WindowImpl } from '../window' -import { attachBlob, addLinkPreview, addReaction, detachBlob, removeLinkPreview, removeReaction } from '../utils' const GROUPS_LIMIT = 4 @@ -97,9 +81,6 @@ export class MessagesQuery implements PagedQuery { buffer: [] as Message[] } - private readonly attachedBlobs = new Map() - private readonly createdReactions = new Map() - private readonly createdLinkPreviews = new Map() private readonly createdPatches = new Map() private readonly tmpMessages = new Map() @@ -153,54 +134,29 @@ export class MessagesQuery implements PagedQuery { } } - async onEvent (event: ResponseEvent): Promise { + async onEvent (event: Event): Promise { if (this.isCardRemoved) return switch (event.type) { - case MessageResponseEventType.MessageCreated: { + case MessageEventType.CreateMessage: { await this.onMessageCreatedEvent(event) break } - case MessageResponseEventType.PatchCreated: { - await this.onPatchCreatedEvent(event) + case MessageEventType.UpdatePatch: + case MessageEventType.RemovePatch: + case MessageEventType.ThreadPatch: + case MessageEventType.LinkPreviewPatch: + case MessageEventType.BlobPatch: + case MessageEventType.ReactionPatch: { + await this.onPatchEvent(event) break } - case MessageResponseEventType.ReactionSet: { - await this.onReactionSetEvent(event) - break - } - case MessageResponseEventType.ReactionRemoved: { - await this.onReactionRemovedEvent(event) - break - } - case MessageResponseEventType.BlobAttached: { - await this.onBlobAttachedEvent(event) - break - } - case MessageResponseEventType.BlobDetached: { - await this.onBlobDetachedEvent(event) - break - } - case MessageResponseEventType.LinkPreviewCreated: { - await this.onLinkPreviewCreatedEvent(event) - break - } - case MessageResponseEventType.LinkPreviewRemoved: { - await this.onLinkPreviewRemovedEvent(event) - break - } - case MessageResponseEventType.ThreadAttached: - await this.onThreadAttachedEvent(event) - break - case MessageResponseEventType.ThreadUpdated: - await this.onThreadUpdatedEvent(event) - break - case CardResponseEventType.CardRemoved: + case CardEventType.RemoveCard: await this.onCardRemoved(event) break } } - async onCardRemoved (event: CardRemovedEvent): Promise { + async onCardRemoved (event: RemoveCardEvent): Promise { if (this.result instanceof Promise) this.result = await this.result if (this.params.card === event.cardId) { this.isCardRemoved = true @@ -224,10 +180,10 @@ export class MessagesQuery implements PagedQuery { } } - async onRequest (event: RequestEvent, promise: Promise): Promise { + async onRequest (event: Event, promise: Promise): Promise { if (this.isCardRemoved) return switch (event.type) { - case MessageRequestEventType.CreateMessage: { + case MessageEventType.CreateMessage: { await this.onCreateMessageRequest(event, promise as Promise) break } @@ -239,28 +195,15 @@ export class MessagesQuery implements PagedQuery { const eventId = event._id if (eventId == null || event.socialId == null) return - const tmpId = uuid() as MessageID + const tmpId = event.messageId ?? (uuid() as MessageID) let resultId: MessageID | undefined - const tmpMessage: Message = { - id: tmpId, - type: MessageType.Message, - removed: false, - cardId: event.cardId, - content: event.content, - creator: event.socialId, - created: new Date(), - extra: event.extra, - edited: undefined, - thread: undefined, - reactions: [], - blobs: [], - linkPreviews: [] - } + const tmpMessage = MessageProcessor.createFromEvent(event, tmpId) if (!this.match(tmpMessage)) return promise .then(async (result) => { + if (tmpId === result.messageId) return this.tmpMessages.delete(eventId) resultId = result.messageId if (this.result instanceof Promise) this.result = await this.result @@ -270,7 +213,7 @@ export class MessagesQuery implements PagedQuery { await this.notify() } } else { - const updatedMessage = this.fillMessage({ ...tmpMessage, id: resultId }) + const updatedMessage = this.patchMessage({ ...tmpMessage, id: resultId }) this.result.delete(tmpId) this.insertMessage(this.result, updatedMessage) @@ -776,105 +719,17 @@ export class MessagesQuery implements PagedQuery { return true } - private async onThreadAttachedEvent (event: ThreadAttachedEvent): Promise { - if (this.params.replies !== true) return - if (this.params.card !== event.thread.cardId) return - if (this.result instanceof Promise) this.result = await this.result - - const message = this.result.get(event.thread.messageId) - if (message !== undefined) { - const updated: Message = { - ...message, - thread: event.thread - } - - this.result.update(updated) - void this.notify() - } - - this.next.buffer = this.next.buffer.map((it) => { - if (it.id === event.thread.messageId) { - return { - ...it, - thread: event.thread - } - } - return it - }) - this.prev.buffer = this.next.buffer.map((it) => { - if (it.id === event.thread.messageId) { - return { - ...it, - thread: event.thread - } - } - return it - }) - } - - private updateThread (message: Message, repliesCountOp?: 'increment' | 'decrement', lastReply?: Date): Message { - if (message.thread === undefined) return message - let count = message.thread.repliesCount - - if (repliesCountOp === 'increment') { - count = count + 1 - } else if (repliesCountOp === 'decrement') { - count = Math.max(count - 1, 0) - } - - return { - ...message, - thread: { ...message.thread, repliesCount: count, lastReply: lastReply ?? message.thread.lastReply } - } - } - - private async onThreadUpdatedEvent (event: ThreadUpdatedEvent): Promise { - if (this.params.replies !== true) return - if (this.params.card !== event.cardId) return - if (this.result instanceof Promise) this.result = await this.result - - const message = this.result.get(event.messageId) - if (message !== undefined) { - const updated: Message = this.updateThread(message, event.updates.repliesCountOp, event.updates.lastReply) - - this.result.update(updated) - void this.notify() - } - - this.next.buffer = this.next.buffer.map((it) => { - if (it.id === event.messageId) { - return this.updateThread(it, event.updates.repliesCountOp, event.updates.lastReply) - } - return it - }) - this.prev.buffer = this.next.buffer.map((it) => { - if (it.id === event.messageId) { - return this.updateThread(it, event.updates.repliesCountOp, event.updates.lastReply) - } - return it - }) - } - - private fillMessage (origin: Message): Message { + private patchMessage (origin: Message): Message { let message = origin - if (this.params.files === true) { - message.blobs = this.attachedBlobs.get(message.id) ?? [] - } - if (this.params.reactions === true) { - message.reactions = this.createdReactions.get(message.id) ?? [] - } - if (this.params.links === true) { - message.linkPreviews = this.createdLinkPreviews.get(message.id) ?? [] - } const patches = this.createdPatches.get(message.id) ?? [] - message = applyPatches(message, patches) + message = applyPatches(message, patches, this.allowedPatches()) return message } - private async onMessageCreatedEvent (event: MessageCreatedEvent): Promise { + private async onMessageCreatedEvent (event: CreateMessageEvent): Promise { + if (this.params.card !== event.cardId || event.messageId == null) return if (this.result instanceof Promise) this.result = await this.result - if (this.params.card !== event.message.cardId) return - let message = event.message + let message = MessageProcessor.createFromEvent(event) const exists = this.result.get(message.id) if (exists !== undefined) { @@ -883,7 +738,7 @@ export class MessagesQuery implements PagedQuery { } if (!this.match(message)) return - message = this.fillMessage(message) + message = this.patchMessage(message) if (this.result.isTail()) { const eventId = event._id @@ -929,221 +784,65 @@ export class MessagesQuery implements PagedQuery { } private cleanCache (message: MessageID): void { - this.attachedBlobs.delete(message) - this.createdReactions.delete(message) - this.createdLinkPreviews.delete(message) this.createdPatches.delete(message) } - private async onPatchCreatedEvent (event: PatchCreatedEvent): Promise { + private async onPatchEvent (event: PatchEvent): Promise { if (this.params.card !== event.cardId) return - if (this.result instanceof Promise) this.result = await this.result + const allowedPatches = this.allowedPatches() + const eventPatches = MessageProcessor.eventToPatches(event).filter((it) => allowedPatches.includes(it.type)) - const { patch, messageId, messageCreated } = event - const groups = this.groupsBuffer.filter( - (it) => it.fromDate.getTime() <= messageCreated.getTime() && it.toDate.getTime() >= messageCreated.getTime() - ) - - for (const group of groups) { - if (group.patches != null) { - group.patches.push(patch) - } - } - - const message = this.result.get(messageId) - if (message === undefined) return - - if (message.created < patch.created) { - this.result.update(applyPatch(message, patch, [PatchType.update, PatchType.remove])) - await this.notify() - } - } - - private async onReactionSetEvent (event: ReactionSetEvent): Promise { - if (this.params.reactions !== true || this.params.card !== event.cardId) return - const current = this.createdReactions.get(event.messageId) ?? [] - this.createdReactions.set(event.messageId, [...current, event.reaction]) - if (this.result instanceof Promise) this.result = await this.result - - const reaction = { - ...event.reaction, - created: event.reaction.created - } - - const message = this.result.get(event.messageId) - if (message !== undefined) { - this.result.update(addReaction(message, reaction)) - void this.notify() - } - - const fromNextBuffer = this.next.buffer.find((it) => it.id === event.messageId) - if (fromNextBuffer !== undefined) { - addReaction(fromNextBuffer, reaction) - } - const fromPrevBuffer = this.prev.buffer.find((it) => it.id === event.messageId) - if (fromPrevBuffer !== undefined) { - addReaction(fromPrevBuffer, reaction) - } - } - - private async onReactionRemovedEvent (event: ReactionRemovedEvent): Promise { - if (this.params.reactions !== true || this.params.card !== event.cardId) return - const current = this.createdReactions.get(event.messageId) ?? [] - - const reactions = current.filter((it) => it.reaction !== event.reaction || it.creator !== event.socialId) - this.createdReactions.set(event.messageId, reactions) - if (this.result instanceof Promise) this.result = await this.result - - const message = this.result.get(event.messageId) - if (message !== undefined) { - const updated = removeReaction(message, event.reaction, event.socialId) - if (updated.reactions.length !== message.reactions.length) { - this.result.update(updated) - void this.notify() - } - } - this.next.buffer = this.next.buffer.map((it) => - it.id === event.messageId ? removeReaction(it, event.reaction, event.socialId) : it - ) - this.prev.buffer = this.prev.buffer.map((it) => - it.id === event.messageId ? removeReaction(it, event.reaction, event.socialId) : it - ) - } - - private updateFilesCache (message: MessageID, blobs: AttachedBlob[]): void { - const blobsCache = this.attachedBlobs.get(message) ?? [] - this.attachedBlobs.set(message, blobsCache) - for (const blob of blobs) { - const current = blobsCache.find((it) => it.blobId === blob.blobId) - if (current === undefined) { - blobsCache.push(blob) - } - } - } - - private async onBlobAttachedEvent (event: BlobAttachedEvent): Promise { - if (this.params.files !== true || event.cardId !== this.params.card) return - console.log('onFileCreatedEvent', event) - this.updateFilesCache(event.messageId, [event.blob]) - if (this.result instanceof Promise) this.result = await this.result - - const { blob } = event - const message = this.result.get(event.messageId) - if (message !== undefined) { - if (!message.blobs.some((it) => it.blobId === blob.blobId)) { - message.blobs.push(blob) - this.result.update(message) - await this.notify() - } - } else { - console.log('no message for file', event) - } - - const fromNextBuffer = this.next.buffer.find((it) => it.id === event.messageId) - if (fromNextBuffer !== undefined) { - attachBlob(fromNextBuffer, blob) - } - const fromPrevBuffer = this.prev.buffer.find((it) => it.id === event.messageId) - if (fromPrevBuffer !== undefined) { - attachBlob(fromPrevBuffer, blob) - } - } - - private async onLinkPreviewCreatedEvent (event: LinkPreviewCreatedEvent): Promise { - if (this.params.links !== true || this.params.card !== event.cardId) return - const current = this.createdLinkPreviews.get(event.messageId) ?? [] - this.createdLinkPreviews.set(event.messageId, [...current, event.linkPreview]) + if (eventPatches.length === 0) return if (this.result instanceof Promise) this.result = await this.result - const message = this.result.get(event.messageId) - const { linkPreview } = event - if (message !== undefined) { - if (!message.linkPreviews.some((it) => it.id === linkPreview.id)) { - message.linkPreviews.push(linkPreview) - this.result.update(message) - await this.notify() - } - } - - const fromNextBuffer = this.next.buffer.find((it) => it.id === event.messageId) - if (fromNextBuffer !== undefined) { - addLinkPreview(fromNextBuffer, linkPreview) - } - const fromPrevBuffer = this.prev.buffer.find((it) => it.id === event.messageId) - if (fromPrevBuffer !== undefined) { - addLinkPreview(fromPrevBuffer, linkPreview) - } - } - - private async onLinkPreviewRemovedEvent (event: LinkPreviewRemovedEvent): Promise { - if (this.params.links !== true || this.params.card !== event.cardId) return - const current = this.createdLinkPreviews.get(event.messageId) ?? [] - const linkPreviews = current.filter((it) => it.id !== event.previewId) - this.createdLinkPreviews.set(event.messageId, linkPreviews) - - if (this.result instanceof Promise) this.result = await this.result - const message = this.result.get(event.messageId) - if (message !== undefined) { - const links = message.linkPreviews.filter((it) => it.id !== event.previewId) - if (links.length === message.linkPreviews.length) return - - const updated = { - ...message, - links - } - this.result.update(updated) - await this.notify() - } - this.next.buffer = this.next.buffer.map((it) => - it.id === event.messageId ? removeLinkPreview(it, event.previewId) : it - ) - this.prev.buffer = this.prev.buffer.map((it) => - it.id === event.messageId ? removeLinkPreview(it, event.previewId) : it - ) - } + const { messageId } = event - private async onBlobDetachedEvent (event: BlobDetachedEvent): Promise { - if (this.params.files !== true) return - if (this.params.card !== event.cardId) return - const current = this.attachedBlobs.get(event.messageId) ?? [] - const files = current.filter((it) => it.blobId !== event.blobId) - this.attachedBlobs.set(event.messageId, files) - if (this.result instanceof Promise) this.result = await this.result + const message = this.result.get(messageId) - const message = this.result.get(event.messageId) if (message !== undefined) { - const files = message.blobs.filter((it) => it.blobId !== event.blobId) - if (files.length === message.blobs.length) return + const updatedMessage = applyPatches(message, eventPatches, this.allowedPatches()) - const updated = { - ...message, - files - } - this.result.update(updated) + this.result.update(updatedMessage) await this.notify() + } else { + const currentPatches = this.createdPatches.get(messageId) ?? [] + const patches = currentPatches.concat(eventPatches) + this.createdPatches.set(messageId, patches) + + this.next.buffer = this.next.buffer.map((it) => { + if (it.id === event.messageId) { + this.createdPatches.delete(messageId) + return applyPatches(it, eventPatches, this.allowedPatches()) + } + return it + }) + this.prev.buffer = this.next.buffer.map((it) => { + if (it.id === event.messageId) { + this.createdPatches.delete(messageId) + return applyPatches(it, eventPatches, this.allowedPatches()) + } + return it + }) } - - this.next.buffer = this.next.buffer.map((it) => (it.id === event.messageId ? detachBlob(it, event.blobId) : it)) - this.prev.buffer = this.prev.buffer.map((it) => (it.id === event.messageId ? detachBlob(it, event.blobId) : it)) } private allowedPatches (): PatchType[] { const result = [PatchType.update, PatchType.remove] if (this.params.reactions === true) { - result.push(PatchType.setReaction, PatchType.removeReaction) + result.push(PatchType.reaction) } if (this.params.files === true) { - result.push(PatchType.attachBlob, PatchType.detachBlob) + result.push(PatchType.blob) } if (this.params.replies === true) { - result.push(PatchType.updateThread) + result.push(PatchType.thread) + } + if (this.params.links === true) { + result.push(PatchType.linkPreview) } - return result - } - private isAllowedPatch (type: PatchType): boolean { - return this.allowedPatches().includes(type) + return result } } diff --git a/packages/query/src/notification-contexts/query.ts b/packages/query/src/notification-contexts/query.ts index 5604358a2e4..cad726d2ef4 100644 --- a/packages/query/src/notification-contexts/query.ts +++ b/packages/query/src/notification-contexts/query.ts @@ -16,58 +16,45 @@ import { type CardID, type FindNotificationContextParams, + FindNotificationsParams, type Message, type MessageID, type Notification, type NotificationContext, NotificationType, PatchType, + SortingOrder, type WorkspaceID } from '@hcengineering/communication-types' import { - type CardRemovedEvent, - CardResponseEventType, - type BlobAttachedEvent, - type BlobDetachedEvent, + CardEventType, + CreateNotificationContextEvent, + CreateNotificationEvent, + type Event, type FindClient, - MessageResponseEventType, - type NotificationContextCreatedEvent, - type NotificationContextRemovedEvent, - type NotificationContextUpdatedEvent, - type NotificationCreatedEvent, - NotificationResponseEventType, - type NotificationsRemovedEvent, - type NotificationUpdatedEvent, + MessageEventType, + NotificationEventType, type PagedQueryCallback, - type PatchCreatedEvent, - type RequestEvent, - type ResponseEvent, - type ThreadAttachedEvent, - type ThreadUpdatedEvent + PatchEvent, + RemoveCardEvent, + RemoveNotificationContextEvent, + RemoveNotificationsEvent, + UpdateNotificationContextEvent, + UpdateNotificationEvent } from '@hcengineering/communication-sdk-types' -import { applyPatch } from '@hcengineering/communication-shared' +import { + applyPatches, + MessageProcessor, + NotificationContextProcessor, + NotificationProcessor +} from '@hcengineering/communication-shared' import { defaultQueryParams, type PagedQuery, type QueryId } from '../types' import { QueryResult } from '../result' import { WindowImpl } from '../window' -import { - attachBlob, - attachThread, - findMessage, - loadMessageFromGroup, - matchNotification, - detachBlob, - updateThread -} from '../utils' -import { SortingOrder } from '@hcengineering/communication-types' - -const allowedPatchTypes = [ - PatchType.update, - PatchType.remove, - PatchType.attachBlob, - PatchType.detachBlob, - PatchType.updateThread -] +import { findMessage, loadMessageFromGroup, matchNotification } from '../utils' + +const allowedPatchTypes = [PatchType.update, PatchType.remove, PatchType.blob] export class NotificationContextsQuery implements PagedQuery { private result: QueryResult | Promise> private forward: Promise | NotificationContext[] = [] @@ -123,54 +110,45 @@ export class NotificationContextsQuery implements PagedQuery { + async onEvent (event: Event): Promise { switch (event.type) { - case MessageResponseEventType.PatchCreated: { + case MessageEventType.BlobPatch: + case MessageEventType.RemovePatch: + case MessageEventType.UpdatePatch: { await this.onCreatePatchEvent(event) break } - case NotificationResponseEventType.NotificationCreated: { - await this.onCreateNotificationEvent(event) + case NotificationEventType.CreateNotificationContext: { + await this.onCreateNotificationContextEvent(event) break } - case NotificationResponseEventType.NotificationsRemoved: { - await this.onRemoveNotificationEvent(event) + case NotificationEventType.UpdateNotificationContext: { + await this.onUpdateNotificationContextEvent(event) break } - case NotificationResponseEventType.NotificationUpdated: { - await this.onUpdateNotificationEvent(event) + case NotificationEventType.RemoveNotificationContext: { + await this.onRemoveNotificationContextEvent(event) break } - case NotificationResponseEventType.NotificationContextCreated: { - await this.onCreateNotificationContextEvent(event) + case NotificationEventType.CreateNotification: { + await this.onCreateNotificationEvent(event) break } - case NotificationResponseEventType.NotificationContextUpdated: { - await this.onUpdateNotificationContextEvent(event) + case NotificationEventType.RemoveNotifications: { + await this.onRemoveNotificationEvent(event) break } - case NotificationResponseEventType.NotificationContextRemoved: { - await this.onRemoveNotificationContextEvent(event) + case NotificationEventType.UpdateNotification: { + await this.onUpdateNotificationEvent(event) break } - case CardResponseEventType.CardRemoved: + case CardEventType.RemoveCard: await this.onCardRemoved(event) break - case MessageResponseEventType.BlobAttached: - await this.onBlobAttached(event) - break - case MessageResponseEventType.BlobDetached: - await this.onBlobDetached(event) - break - case MessageResponseEventType.ThreadAttached: - await this.onThreadAttached(event) - break - case MessageResponseEventType.ThreadUpdated: - await this.onThreadUpdated(event) } } - async onRequest (event: RequestEvent): Promise {} + async onRequest (event: Event): Promise {} async unsubscribe (): Promise { await this.client.unsubscribeQuery(this.id) @@ -314,17 +292,18 @@ export class NotificationContextsQuery implements PagedQuery { + private async onCreateNotificationContextEvent (event: CreateNotificationContextEvent): Promise { + if (event.contextId === undefined) return if (this.forward instanceof Promise) this.forward = await this.forward if (this.backward instanceof Promise) this.backward = await this.backward if (this.result instanceof Promise) this.result = await this.result - const context = event.context - - if (this.result.get(context.id) !== undefined) { + if (this.result.get(event.contextId) !== undefined) { return } + const context = NotificationContextProcessor.createFromEvent(event) + if (!this.match(context)) { return } @@ -333,58 +312,18 @@ export class NotificationContextsQuery implements PagedQuery { - const isUpdated = await this.updateMessage(event.cardId, event.patch.messageId, (message) => - applyPatch(message, event.patch, allowedPatchTypes) - ) - if (isUpdated) { - void this.notify() - } - } - - private async onBlobAttached (event: BlobAttachedEvent): Promise { + private async onCreatePatchEvent (event: PatchEvent): Promise { + const patches = MessageProcessor.eventToPatches(event).filter((it) => allowedPatchTypes.includes(it.type)) + if (patches.length === 0) return const isUpdated = await this.updateMessage(event.cardId, event.messageId, (message) => - attachBlob(message, event.blob) + applyPatches(message, patches, allowedPatchTypes) ) if (isUpdated) { void this.notify() } } - private async onBlobDetached (event: BlobDetachedEvent): Promise { - const isUpdated = await this.updateMessage(event.cardId, event.messageId, (message) => - detachBlob(message, event.blobId) - ) - if (isUpdated) { - void this.notify() - } - } - - private async onThreadAttached (event: ThreadAttachedEvent): Promise { - const isUpdated = await this.updateMessage(event.thread.cardId, event.thread.messageId, (message) => - attachThread( - message, - event.thread.threadId, - event.thread.threadType, - event.thread.repliesCount, - event.thread.lastReply - ) - ) - if (isUpdated) { - void this.notify() - } - } - - private async onThreadUpdated (event: ThreadUpdatedEvent): Promise { - const isUpdated = await this.updateMessage(event.cardId, event.messageId, (message) => - updateThread(message, event.threadId, event.updates.repliesCountOp, event.updates.lastReply) - ) - if (isUpdated) { - void this.notify() - } - } - - private async onRemoveNotificationEvent (event: NotificationsRemovedEvent): Promise { + private async onRemoveNotificationEvent (event: RemoveNotificationsEvent): Promise { if (this.params.notifications == null) return if (this.forward instanceof Promise) this.forward = await this.forward if (this.backward instanceof Promise) this.backward = await this.backward @@ -415,17 +354,21 @@ export class NotificationContextsQuery implements PagedQuery { + private async onUpdateNotificationEvent (event: UpdateNotificationEvent): Promise { if (this.params.notifications == null) return if (this.forward instanceof Promise) this.forward = await this.forward if (this.backward instanceof Promise) this.backward = await this.backward if (this.result instanceof Promise) this.result = await this.result - const context = this.result.get(event.query.context) + const context = this.result.get(event.contextId) if (context?.notifications === undefined) return + let matchQuery: FindNotificationsParams = { ...event.query, context: event.contextId, account: event.account } + if (event.query.untilDate != null) { + matchQuery = { ...matchQuery, created: { lessOrEqual: event.query.untilDate } } + } const toUpdate = context.notifications.filter( - (it) => matchNotification(it, event.query) && it.read !== event.updates.read + (it) => matchNotification(it, matchQuery) && it.read !== event.updates.read ) if (toUpdate === undefined || (toUpdate?.length ?? 0) === 0) return const toUpdateMap = new Map(toUpdate.map((it) => [it.id, it])) @@ -453,19 +396,20 @@ export class NotificationContextsQuery implements PagedQuery { - if (this.params.notifications == null) return + private async onCreateNotificationEvent (event: CreateNotificationEvent): Promise { + if (this.params.notifications == null || event.notificationId == null) return if (this.forward instanceof Promise) this.forward = await this.forward if (this.backward instanceof Promise) this.backward = await this.backward if (this.result instanceof Promise) this.result = await this.result - const match = matchNotification(event.notification, { + const notification = NotificationProcessor.createFromEvent(event) + const match = matchNotification(notification, { type: this.params.notifications.type, read: this.params.notifications.read }) if (!match) return - const context = this.result.get(event.notification.contextId) + const context = this.result.get(notification.contextId) if (context !== undefined) { const message = this.params.notifications.message === true @@ -474,8 +418,8 @@ export class NotificationContextsQuery implements PagedQuery { + private async onRemoveNotificationContextEvent (event: RemoveNotificationContextEvent): Promise { if (this.forward instanceof Promise) this.forward = await this.forward if (this.backward instanceof Promise) this.backward = await this.backward if (this.result instanceof Promise) this.result = await this.result const length = this.result.length - const deleted = this.result.delete(event.context.id) + const deleted = this.result.delete(event.contextId) if (deleted != null) { if (this.params.limit != null && length >= this.params.limit && this.result.length < this.params.limit) { @@ -549,7 +493,7 @@ export class NotificationContextsQuery implements PagedQuery { + private async onUpdateNotificationContextEvent (event: UpdateNotificationContextEvent): Promise { if (this.forward instanceof Promise) this.forward = await this.forward if (this.backward instanceof Promise) this.backward = await this.backward if (this.result instanceof Promise) this.result = await this.result @@ -559,12 +503,14 @@ export class NotificationContextsQuery implements PagedQuery ({ ...it, read: - it.type === NotificationType.Message ? event.lastView != null && event.lastView >= it.created : it.read + it.type === NotificationType.Message + ? event.updates.lastView != null && event.updates.lastView >= it.created + : it.read })) ) : currentNotifications @@ -586,14 +532,14 @@ export class NotificationContextsQuery implements PagedQuery this.params.order === SortingOrder.Descending ? (b.lastNotify?.getTime() ?? 0) - (a.lastNotify?.getTime() ?? 0) @@ -603,7 +549,7 @@ export class NotificationContextsQuery implements PagedQuery { + async onCardRemoved (event: RemoveCardEvent): Promise { if (this.result instanceof Promise) this.result = await this.result let updated = false const result = this.result.getResult() diff --git a/packages/query/src/notifications/query.ts b/packages/query/src/notifications/query.ts index c57539b8783..6b5fac33c68 100644 --- a/packages/query/src/notifications/query.ts +++ b/packages/query/src/notifications/query.ts @@ -25,39 +25,28 @@ import { type WorkspaceID } from '@hcengineering/communication-types' import { - type CardRemovedEvent, - CardResponseEventType, - type BlobAttachedEvent, - type BlobDetachedEvent, type FindClient, - MessageResponseEventType, - type NotificationContextRemovedEvent, - type NotificationContextUpdatedEvent, - type NotificationCreatedEvent, - NotificationResponseEventType, - type NotificationsRemovedEvent, - type NotificationUpdatedEvent, type PagedQueryCallback, - type PatchCreatedEvent, - type RequestEvent, - type ResponseEvent, - type ThreadAttachedEvent, - type ThreadUpdatedEvent + type Event, + NotificationEventType, + MessageEventType, + CardEventType, + CreateNotificationEvent, + UpdateNotificationContextEvent, + UpdateNotificationEvent, + RemoveNotificationsEvent, + RemoveNotificationContextEvent, + RemoveCardEvent, + PatchEvent } from '@hcengineering/communication-sdk-types' -import { applyPatch } from '@hcengineering/communication-shared' +import { applyPatches, MessageProcessor, NotificationProcessor } from '@hcengineering/communication-shared' import { defaultQueryParams, type PagedQuery, type QueryId } from '../types' import { QueryResult } from '../result' import { WindowImpl } from '../window' -import { attachBlob, attachThread, loadMessageFromGroup, matchNotification, detachBlob, updateThread } from '../utils' +import { loadMessageFromGroup, matchNotification } from '../utils' -const allowedPatchTypes = [ - PatchType.update, - PatchType.remove, - PatchType.attachBlob, - PatchType.detachBlob, - PatchType.updateThread -] +const allowedPatchTypes = [PatchType.update, PatchType.remove, PatchType.blob] export class NotificationQuery implements PagedQuery { private result: QueryResult | Promise> @@ -104,49 +93,39 @@ export class NotificationQuery implements PagedQuery { + async onEvent (event: Event): Promise { switch (event.type) { - case NotificationResponseEventType.NotificationCreated: { + case NotificationEventType.CreateNotification: { await this.onCreateNotificationEvent(event) break } - case NotificationResponseEventType.NotificationsRemoved: { + case NotificationEventType.RemoveNotifications: { await this.onRemoveNotificationsEvent(event) break } - case NotificationResponseEventType.NotificationUpdated: { + case NotificationEventType.UpdateNotification: { await this.onUpdateNotificationEvent(event) break } - case NotificationResponseEventType.NotificationContextUpdated: { + case NotificationEventType.UpdateNotificationContext: { await this.onUpdateNotificationContextEvent(event) break } - case NotificationResponseEventType.NotificationContextRemoved: + case NotificationEventType.RemoveNotificationContext: await this.onRemoveNotificationContextEvent(event) break - case MessageResponseEventType.PatchCreated: + case MessageEventType.UpdatePatch: + case MessageEventType.RemovePatch: + case MessageEventType.BlobPatch: await this.onCreatePatchEvent(event) break - case MessageResponseEventType.BlobAttached: - await this.onBlobAttached(event) - break - case MessageResponseEventType.BlobDetached: - await this.onBlobDetached(event) - break - case MessageResponseEventType.ThreadAttached: - await this.onThreadAttached(event) - break - case MessageResponseEventType.ThreadUpdated: - await this.onThreadUpdated(event) - break - case CardResponseEventType.CardRemoved: + case CardEventType.RemoveCard: await this.onCardRemoved(event) break } } - async onRequest (event: RequestEvent): Promise {} + async onRequest (event: Event): Promise {} async unsubscribe (): Promise { await this.client.unsubscribeQuery(this.id) @@ -226,28 +205,30 @@ export class NotificationQuery implements PagedQuery { + private async onCreateNotificationEvent (event: CreateNotificationEvent): Promise { + if (event.notificationId == null) return if (this.result instanceof Promise) this.result = await this.result - if (this.result.get(event.notification.id) != null) return + if (this.result.get(event.notificationId) != null) return if (!this.result.isTail()) return - const match = matchNotification(event.notification, { ...this.params, created: undefined }) + const notification = NotificationProcessor.createFromEvent(event) + const match = matchNotification(notification, { ...this.params, created: undefined }) if (!match) return if (this.params.order === SortingOrder.Ascending) { - this.result.push(event.notification) + this.result.push(notification) } else { - this.result.unshift(event.notification) + this.result.unshift(notification) } await this.notify() } - private async onUpdateNotificationContextEvent (event: NotificationContextUpdatedEvent): Promise { + private async onUpdateNotificationContextEvent (event: UpdateNotificationContextEvent): Promise { if (this.result instanceof Promise) this.result = await this.result if (this.params.context != null && this.params.context !== event.contextId) return - const lastView = event.lastView + const lastView = event.updates.lastView if (lastView === undefined) return const toUpdate = this.result.getResult().filter((it) => it.contextId === event.contextId) @@ -284,7 +265,7 @@ export class NotificationQuery implements PagedQuery { + private async onUpdateNotificationEvent (event: UpdateNotificationEvent): Promise { if (this.result instanceof Promise) this.result = await this.result const toUpdate = ( @@ -297,7 +278,7 @@ export class NotificationQuery implements PagedQuery { + private async onRemoveNotificationsEvent (event: RemoveNotificationsEvent): Promise { if (this.params.context !== undefined && this.params.context !== event.contextId) return if (this.result instanceof Promise) this.result = await this.result @@ -318,19 +299,18 @@ export class NotificationQuery implements PagedQuery { + private async onRemoveNotificationContextEvent (event: RemoveNotificationContextEvent): Promise { + if (this.params.context != null && this.params.context !== event.contextId) return if (this.result instanceof Promise) this.result = await this.result - if (this.params.context != null && this.params.context !== event.context.id) return - - if (event.context.id === this.params.context) { + if (event.contextId === this.params.context) { if (this.result.length === 0) return this.result.deleteAll() this.result.setHead(true) this.result.setTail(true) void this.notify() } else { - const toRemove = this.result.getResult().filter((it) => it.contextId === event.context.id) + const toRemove = this.result.getResult().filter((it) => it.contextId === event.contextId) if (toRemove.length === 0) return const length = this.result.length @@ -346,69 +326,20 @@ export class NotificationQuery implements PagedQuery { - if (this.params.message !== true) return - const isUpdated = await this.updateMessage( - (it) => this.matchNotificationByMessage(it, event.cardId, event.patch.messageId), - (message) => applyPatch(message, event.patch, allowedPatchTypes) - ) - if (isUpdated) { - void this.notify() - } - } - - private async onBlobAttached (event: BlobAttachedEvent): Promise { - if (this.params.message !== true) return - const isUpdated = await this.updateMessage( - (it) => this.matchNotificationByMessage(it, event.cardId, event.messageId), - (message) => attachBlob(message, event.blob) - ) - if (isUpdated) { - void this.notify() - } - } - - private async onBlobDetached (event: BlobDetachedEvent): Promise { - if (this.params.message !== true) return - const isUpdated = await this.updateMessage( - (it) => this.matchNotificationByMessage(it, event.cardId, event.messageId), - (message) => detachBlob(message, event.blobId) - ) - if (isUpdated) { - void this.notify() - } - } - - private async onThreadAttached (event: ThreadAttachedEvent): Promise { - if (this.params.message !== true) return - const isUpdated = await this.updateMessage( - (it) => this.matchNotificationByMessage(it, event.thread.cardId, event.thread.messageId), - (message) => - attachThread( - message, - event.thread.threadId, - event.thread.threadType, - event.thread.repliesCount, - event.thread.lastReply - ) - ) - if (isUpdated) { - void this.notify() - } - } - - private async onThreadUpdated (event: ThreadUpdatedEvent): Promise { + private async onCreatePatchEvent (event: PatchEvent): Promise { if (this.params.message !== true) return + const patches = MessageProcessor.eventToPatches(event).filter((it) => allowedPatchTypes.includes(it.type)) + if (patches.length === 0) return const isUpdated = await this.updateMessage( (it) => this.matchNotificationByMessage(it, event.cardId, event.messageId), - (message) => updateThread(message, event.threadId, event.updates.repliesCountOp, event.updates.lastReply) + (message) => applyPatches(message, patches, allowedPatchTypes) ) if (isUpdated) { void this.notify() } } - private async onCardRemoved (event: CardRemovedEvent): Promise { + private async onCardRemoved (event: RemoveCardEvent): Promise { if (this.params.message !== true) return if (this.result instanceof Promise) this.result = await this.result const isUpdated = await this.updateMessage( diff --git a/packages/query/src/types.ts b/packages/query/src/types.ts index d9471e7e008..e6f9d12a7bc 100644 --- a/packages/query/src/types.ts +++ b/packages/query/src/types.ts @@ -13,7 +13,7 @@ // limitations under the License. // -import { type ResponseEvent, EventResult, RequestEvent } from '@hcengineering/communication-sdk-types' +import { EventResult, Event } from '@hcengineering/communication-sdk-types' import { SortingOrder, type Window, @@ -42,8 +42,8 @@ interface BaseQuery { readonly id: QueryId readonly params: P - onEvent: (event: ResponseEvent) => Promise - onRequest: (event: RequestEvent, promise: Promise) => Promise + onEvent: (event: Event) => Promise + onRequest: (event: Event, promise: Promise) => Promise unsubscribe: () => Promise diff --git a/packages/query/src/utils.ts b/packages/query/src/utils.ts index 3d6cb1c35a8..2da02b159a1 100644 --- a/packages/query/src/utils.ts +++ b/packages/query/src/utils.ts @@ -15,20 +15,13 @@ import { applyPatches } from '@hcengineering/communication-shared' import { - type BlobID, type CardID, - type CardType, - type AttachedBlob, type FindNotificationsParams, - type LinkPreview, - type LinkPreviewID, type Message, type MessageID, type MessagesGroup, type Notification, type Patch, - type Reaction, - type SocialID, SortingOrder, type WorkspaceID } from '@hcengineering/communication-types' @@ -113,96 +106,6 @@ export async function loadMessageFromGroup ( return applyPatches(message, patches) } -export function attachBlob (message: Message, blob: AttachedBlob): Message { - if (!message.blobs.some((it) => it.blobId === blob.blobId)) { - message.blobs.push(blob) - } - return message -} - -export function detachBlob (message: Message, blobId: BlobID): Message { - const blobs = message.blobs.filter((it) => it.blobId !== blobId) - if (blobs.length === message.blobs.length) return message - - return { - ...message, - blobs - } -} - -export function addLinkPreview (message: Message, linkPreview: LinkPreview): Message { - const current = message.linkPreviews.find((it) => it.id === linkPreview.id) - if (current === undefined) { - message.linkPreviews.push(linkPreview) - } - return message -} - -export function removeLinkPreview (message: Message, id: LinkPreviewID): Message { - const linkPreviews = message.linkPreviews.filter((it) => it.id !== id) - if (linkPreviews.length === message.linkPreviews.length) return message - return { - ...message, - linkPreviews - } -} - -export function addReaction (message: Message, reaction: Reaction): Message { - const current = message.reactions.find((it) => it.reaction === reaction.reaction && it.creator === reaction.creator) - if (current === undefined) { - message.reactions.push(reaction) - } - return message -} - -export function removeReaction (message: Message, emoji: string, creator: SocialID): Message { - const reactions = message.reactions.filter((it) => it.reaction !== emoji || it.creator !== creator) - if (reactions.length === message.reactions.length) return message - - return { - ...message, - reactions - } -} - -export function attachThread ( - message: Message, - threadId: CardID, - threadType: CardType, - repliesCount: number, - lastReply: Date -): Message { - if (message.thread !== undefined) { - return message - } - - message.thread = { - cardId: message.cardId, - messageId: message.id, - threadId, - threadType, - repliesCount, - lastReply - } - return message -} - -export function updateThread ( - message: Message, - threadId: CardID, - repliesCountOp: 'increment' | 'decrement', - lastReply?: Date -): Message { - if (message.thread === undefined || message.thread.threadId !== threadId) { - return message - } - - message.thread.repliesCount = - repliesCountOp === 'increment' ? message.thread.repliesCount + 1 : Math.max(message.thread.repliesCount - 1, 0) - message.thread.lastReply = lastReply ?? message.thread.lastReply - return message -} - export function matchNotification (notification: Notification, params: FindNotificationsParams): boolean { if (params.type !== undefined && params.type !== notification.type) return false if (params.read !== undefined && params.read !== notification.read) return false diff --git a/packages/rest-client/src/rest.ts b/packages/rest-client/src/rest.ts index 2a04572166f..62e0f822974 100644 --- a/packages/rest-client/src/rest.ts +++ b/packages/rest-client/src/rest.ts @@ -15,12 +15,12 @@ import { concatLink } from '@hcengineering/core' import { - MessageRequestEventType, type EventResult, - type RequestEvent, + type Event, type CreateMessageResult, type CreateMessageOptions, - PatchMessageOptions + UpdatePatchOptions, + MessageEventType } from '@hcengineering/communication-sdk-types' import { type FindMessagesGroupsParams, @@ -39,8 +39,7 @@ import { type MessageType, type BlobID, type MessageExtra, - type BlobData, - PatchType + type BlobData } from '@hcengineering/communication-types' import { retry } from '@hcengineering/communication-shared' @@ -80,7 +79,7 @@ class RestClientImpl implements RestClient { } } - async event (event: RequestEvent): Promise { + async event (event: Event): Promise { const response = await fetch(concatLink(this.endpoint, `/api/v1/event/${this.workspace}`), { method: 'POST', headers: { @@ -101,14 +100,14 @@ class RestClientImpl implements RestClient { cardType: CardType, content: Markdown, type: MessageType, - extra?: MessageExtra, - socialId?: SocialID, + extra: MessageExtra | undefined, + socialId: SocialID, date?: Date, messageId?: MessageID, options?: CreateMessageOptions ): Promise { const result = await this.event({ - type: MessageRequestEventType.CreateMessage, + type: MessageEventType.CreateMessage, messageType: type, cardId, cardType, @@ -125,64 +124,95 @@ class RestClientImpl implements RestClient { async updateMessage ( cardId: CardID, messageId: MessageID, - content?: Markdown, - extra?: MessageExtra, - socialId?: SocialID, + content: Markdown | undefined, + extra: MessageExtra | undefined, + socialId: SocialID, date?: Date, - options?: PatchMessageOptions + options?: UpdatePatchOptions ): Promise { await this.event({ - type: MessageRequestEventType.CreatePatch, - patchType: PatchType.update, + type: MessageEventType.UpdatePatch, cardId, messageId, - data: { content, extra }, + content, + extra, socialId, date, options }) } - async removeMessage (cardId: CardID, messageId: MessageID, socialId?: SocialID): Promise { + async removeMessage (cardId: CardID, messageId: MessageID, socialId: SocialID, date?: Date): Promise { await this.event({ - type: MessageRequestEventType.CreatePatch, - patchType: PatchType.remove, + type: MessageEventType.RemovePatch, cardId, messageId, - data: {}, - socialId + socialId, + date }) } - async attachBlob ( + async attachBlobs ( cardId: CardID, messageId: MessageID, - blobData: BlobData, - socialId?: SocialID, + blobs: BlobData[], + socialId: SocialID, date?: Date ): Promise { await this.event({ - type: MessageRequestEventType.AttachBlob, + type: MessageEventType.BlobPatch, cardId, messageId, - blobData, + operations: [ + { + opcode: 'attach', + blobs + } + ], socialId, date }) } - async detachBlob ( + async detachBlobs ( cardId: CardID, messageId: MessageID, - blobId: BlobID, - socialId?: SocialID, + blobIds: BlobID[], + socialId: SocialID, date?: Date ): Promise { await this.event({ - type: MessageRequestEventType.DetachBlob, + type: MessageEventType.BlobPatch, cardId, messageId, - blobId, + operations: [ + { + opcode: 'detach', + blobIds + } + ], + socialId, + date + }) + } + + async setBlobs ( + cardId: CardID, + messageId: MessageID, + blobs: BlobData[], + socialId: SocialID, + date?: Date + ): Promise { + await this.event({ + type: MessageEventType.BlobPatch, + cardId, + messageId, + operations: [ + { + opcode: 'set', + blobs + } + ], socialId, date }) diff --git a/packages/rest-client/src/types.ts b/packages/rest-client/src/types.ts index 71873865b27..3b5c1c675be 100644 --- a/packages/rest-client/src/types.ts +++ b/packages/rest-client/src/types.ts @@ -17,8 +17,8 @@ import type { CreateMessageOptions, CreateMessageResult, EventResult, - PatchMessageOptions, - RequestEvent + UpdatePatchOptions, + Event } from '@hcengineering/communication-sdk-types' import type { FindMessagesGroupsParams, @@ -46,15 +46,15 @@ export interface RestClient { findNotificationContexts: (params: FindNotificationContextParams) => Promise findNotifications: (params: FindNotificationsParams) => Promise - event: (event: RequestEvent) => Promise + event: (event: Event) => Promise createMessage: ( cardId: CardID, cardType: CardType, content: Markdown, type: MessageType, - extra?: MessageExtra, - socialId?: SocialID, + extra: MessageExtra | undefined, + socialId: SocialID, date?: Date, messageId?: MessageID, options?: CreateMessageOptions @@ -62,14 +62,27 @@ export interface RestClient { updateMessage: ( cardId: CardID, messageId: MessageID, - content?: Markdown, - extra?: MessageExtra, - socialId?: SocialID, + content: Markdown | undefined, + extra: MessageExtra | undefined, + socialId: SocialID, date?: Date, - options?: PatchMessageOptions + options?: UpdatePatchOptions ) => Promise - removeMessage: (cardId: CardID, messageId: MessageID, socialId?: SocialID) => Promise + removeMessage: (cardId: CardID, messageId: MessageID, socialId: SocialID, date?: Date) => Promise - attachBlob: (cardId: CardID, messageId: MessageID, data: BlobData, socialId?: SocialID, date?: Date) => Promise - detachBlob: (cardId: CardID, messageId: MessageID, blobId: BlobID, socialId?: SocialID, date?: Date) => Promise + attachBlobs: ( + cardId: CardID, + messageId: MessageID, + blobs: BlobData[], + socialId: SocialID, + date?: Date + ) => Promise + detachBlobs: ( + cardId: CardID, + messageId: MessageID, + blobIds: BlobID[], + socialId: SocialID, + date?: Date + ) => Promise + setBlobs: (cardId: CardID, messageId: MessageID, blobs: BlobData[], socialId: SocialID, date?: Date) => Promise } diff --git a/packages/sdk-types/src/client.ts b/packages/sdk-types/src/client.ts index d30108adcce..9b2f3fa8241 100644 --- a/packages/sdk-types/src/client.ts +++ b/packages/sdk-types/src/client.ts @@ -27,10 +27,11 @@ import type { NotificationContext } from '@hcengineering/communication-types' -import type { EventResult, RequestEvent, ResponseEvent } from './event' +import type { EventResult, Event } from './events/event' export interface FindClient { - onEvent: (event: ResponseEvent) => void + onEvent: (event: Event) => void + onRequest: (event: Event, promise: Promise) => void findMessages: (params: FindMessagesParams, queryId?: number) => Promise @@ -44,7 +45,5 @@ export interface FindClient { findCollaborators: (params: FindCollaboratorsParams, queryId?: number) => Promise - onRequest: (event: RequestEvent, promise: Promise) => void - unsubscribeQuery: (id: number) => Promise } diff --git a/packages/sdk-types/src/db.ts b/packages/sdk-types/src/db.ts index 5d862a6c1d3..ccab09ff196 100644 --- a/packages/sdk-types/src/db.ts +++ b/packages/sdk-types/src/db.ts @@ -40,10 +40,8 @@ import { FindLabelsParams, LabelID, CardType, - PatchData, NotificationContent, NotificationType, - ComparisonOperator, BlobData, LinkPreviewData, LinkPreviewID @@ -63,9 +61,8 @@ export interface DbAdapter { createPatch: ( cardId: CardID, messageId: MessageID, - messageCreated: Date, type: PatchType, - data: PatchData, + data: Record, creator: SocialID, created: Date ) => Promise @@ -73,24 +70,26 @@ export interface DbAdapter { createMessagesGroup: (cardId: CardID, blobId: BlobID, fromDate: Date, toDate: Date, count: number) => Promise removeMessagesGroup: (cardId: CardID, blobId: BlobID) => Promise - setReaction: (cardId: CardID, messageId: MessageID, reaction: string, socialId: SocialID, date: Date) => Promise + addReaction: (cardId: CardID, messageId: MessageID, reaction: string, socialId: SocialID, date: Date) => Promise removeReaction: (cardId: CardID, message: MessageID, reaction: string, socialId: SocialID, date: Date) => Promise - attachBlob: (cardId: CardID, messageId: MessageID, data: BlobData, socialId: SocialID, date: Date) => Promise - detachBlob: (card: CardID, messageId: MessageID, blobId: BlobID, socialId: SocialID, date: Date) => Promise + attachBlobs: (cardId: CardID, messageId: MessageID, data: BlobData[], socialId: SocialID, date: Date) => Promise + detachBlobs: (card: CardID, messageId: MessageID, blobId: BlobID[], socialId: SocialID, date: Date) => Promise + setBlobs: (cardId: CardID, messageId: MessageID, data: BlobData[], socialId: SocialID, date: Date) => Promise - createLinkPreview: ( + attachLinkPreviews: ( cardId: CardID, messageId: MessageID, - data: LinkPreviewData, + data: (LinkPreviewData & { previewId: LinkPreviewID })[], socialId: SocialID, date: Date - ) => Promise - removeLinkPreview: (cardId: CardID, messageId: MessageID, id: LinkPreviewID) => Promise + ) => Promise + detachLinkPreviews: (cardId: CardID, messageId: MessageID, ids: LinkPreviewID[], socialId: SocialID, date: Date) => Promise + setLinkPreviews: (cardId: CardID, messageId: MessageID, data: (LinkPreviewData & { previewId: LinkPreviewID })[], socialId: SocialID, date: Date) => Promise - attachThread: (cardId: CardID, messageId: MessageID, threadId: CardID, threadType: CardType, date: Date) => Promise - removeThreads: (query: RemoveThreadQuery) => Promise - updateThread: (thread: CardID, update: ThreadUpdates) => Promise + attachThread: (cardId: CardID, messageId: MessageID, threadId: CardID, threadType: CardType, socialId: SocialID, date: Date) => Promise + removeThreads: (query: ThreadQuery) => Promise + updateThread: (cardId: CardID, messageId: MessageID, thread: CardID, update: ThreadUpdates, socialId: SocialID, date: Date) => Promise findMessages: (params: FindMessagesParams) => Promise findMessagesGroups: (params: FindMessagesGroupsParams) => Promise @@ -112,7 +111,7 @@ export interface DbAdapter { content: NotificationContent | undefined, created: Date ) => Promise - updateNotification: (query: UpdateNotificationQuery, updates: NotificationUpdates) => Promise + updateNotification: (context: ContextID, account: AccountID, query: UpdateNotificationQuery, updates: NotificationUpdates) => Promise removeNotifications: (contextId: ContextID, account: AccountID, ids: NotificationID[]) => Promise createContext: ( @@ -123,7 +122,7 @@ export interface DbAdapter { lastNotify?: Date ) => Promise updateContext: (contextId: ContextID, account: AccountID, updates: NotificationContextUpdates) => Promise - removeContext: (id: ContextID, account: AccountID) => Promise + removeContext: (id: ContextID, account: AccountID) => Promise findNotificationContexts: (params: FindNotificationContextParams) => Promise findNotifications: (params: FindNotificationsParams) => Promise @@ -141,21 +140,20 @@ export interface DbAdapter { close: () => void } -export type RemoveThreadQuery = Partial> +export type ThreadQuery = Partial> export type RemoveLabelQuery = Partial> export interface UpdateNotificationQuery { - context: ContextID - account: AccountID type?: NotificationType id?: NotificationID - created?: Partial> | Date + untilDate?: Date } export type NotificationUpdates = Partial> export type NotificationContextUpdates = Partial> export interface ThreadUpdates { + messageId?: MessageID threadType?: CardType lastReply?: Date repliesCountOp?: 'increment' | 'decrement' diff --git a/packages/sdk-types/src/event.ts b/packages/sdk-types/src/event.ts deleted file mode 100644 index 58907fa122b..00000000000 --- a/packages/sdk-types/src/event.ts +++ /dev/null @@ -1,52 +0,0 @@ -// -// Copyright © 2025 Hardcore Engineering Inc. -// -// Licensed under the Eclipse Public License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. You may -// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// -// See the License for the specific language governing permissions and -// limitations under the License. -// - -import type { LabelRequestEvent, LabelRequestEventType } from './requestEvents/label' -import type { MessageEventResult, MessageRequestEvent, MessageRequestEventType } from './requestEvents/message' -import type { - NotificationEventResult, - NotificationRequestEvent, - NotificationRequestEventType -} from './requestEvents/notification' -import type { LabelResponseEvent, LabelResponseEventType } from './responseEvents/label' -import type { MessageResponseEvent, MessageResponseEventType } from './responseEvents/message' -import type { NotificationResponseEvent, NotificationResponseEventType } from './responseEvents/notification' -import type { CardRequestEvent, CardRequestEventType } from './requestEvents/card' -import type { CardResponseEvent, CardResponseEventType } from './responseEvents/card' - -export * from './requestEvents/message' -export * from './responseEvents/message' -export * from './requestEvents/notification' -export * from './responseEvents/notification' -export * from './requestEvents/label' -export * from './responseEvents/label' -export * from './requestEvents/card' -export * from './responseEvents/card' - -export type RequestEventType = - | MessageRequestEventType - | NotificationRequestEventType - | LabelRequestEventType - | CardRequestEventType -export type RequestEvent = MessageRequestEvent | NotificationRequestEvent | LabelRequestEvent | CardRequestEvent -// eslint-disable-next-line @typescript-eslint/ban-types -export type EventResult = MessageEventResult | NotificationEventResult | {} - -export type ResponseEventType = - | MessageResponseEventType - | NotificationResponseEventType - | LabelResponseEventType - | CardResponseEventType -export type ResponseEvent = MessageResponseEvent | NotificationResponseEvent | LabelResponseEvent | CardResponseEvent diff --git a/packages/sdk-types/src/requestEvents/card.ts b/packages/sdk-types/src/events/card.ts similarity index 69% rename from packages/sdk-types/src/requestEvents/card.ts rename to packages/sdk-types/src/events/card.ts index a090bb008f0..d96c8627908 100644 --- a/packages/sdk-types/src/requestEvents/card.ts +++ b/packages/sdk-types/src/events/card.ts @@ -15,28 +15,28 @@ import type { CardID, CardType, SocialID } from '@hcengineering/communication-types' -import type { BaseRequestEvent } from './common' +import type { BaseEvent } from './common' -export enum CardRequestEventType { +export enum CardEventType { // Internal UpdateCardType = 'updateCardType', RemoveCard = 'removeCard' } -export type CardRequestEvent = UpdateCardTypeEvent | RemoveCardEvent +export type CardEvent = UpdateCardTypeEvent | RemoveCardEvent // Internal -export interface UpdateCardTypeEvent extends BaseRequestEvent { - type: CardRequestEventType.UpdateCardType +export interface UpdateCardTypeEvent extends BaseEvent { + type: CardEventType.UpdateCardType cardId: CardID cardType: CardType socialId: SocialID - date: Date + date?: Date } -export interface RemoveCardEvent extends BaseRequestEvent { - type: CardRequestEventType.RemoveCard +export interface RemoveCardEvent extends BaseEvent { + type: CardEventType.RemoveCard cardId: CardID socialId: SocialID - date: Date + date?: Date } diff --git a/packages/sdk-types/src/requestEvents/common.ts b/packages/sdk-types/src/events/common.ts similarity index 94% rename from packages/sdk-types/src/requestEvents/common.ts rename to packages/sdk-types/src/events/common.ts index 54253821e3e..65351e68fc4 100644 --- a/packages/sdk-types/src/requestEvents/common.ts +++ b/packages/sdk-types/src/events/common.ts @@ -13,6 +13,6 @@ // limitations under the License. // -export interface BaseRequestEvent { +export interface BaseEvent { _id?: string } diff --git a/packages/sdk-types/src/events/event.ts b/packages/sdk-types/src/events/event.ts new file mode 100644 index 00000000000..a92d51b4029 --- /dev/null +++ b/packages/sdk-types/src/events/event.ts @@ -0,0 +1,27 @@ +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. + +import type { LabelEvent, LabelEventType } from './label' +import type { MessageEventResult, MessageEventType, MessageEvent } from './message' +import type { NotificationEventResult, NotificationEvent, NotificationEventType } from './notification' +import type { CardEvent, CardEventType } from './card' + +export * from './message' +export * from './notification' +export * from './label' +export * from './card' + +export type EventType = MessageEventType | NotificationEventType | LabelEventType | CardEventType +export type Event = MessageEvent | NotificationEvent | LabelEvent | CardEvent +// eslint-disable-next-line @typescript-eslint/ban-types +export type EventResult = MessageEventResult | NotificationEventResult | {} diff --git a/packages/sdk-types/src/requestEvents/label.ts b/packages/sdk-types/src/events/label.ts similarity index 61% rename from packages/sdk-types/src/requestEvents/label.ts rename to packages/sdk-types/src/events/label.ts index b4ca192f7f8..84f42e65812 100644 --- a/packages/sdk-types/src/requestEvents/label.ts +++ b/packages/sdk-types/src/events/label.ts @@ -13,33 +13,33 @@ // limitations under the License. // -import type { CardID, AccountID, LabelID, CardType, SocialID } from '@hcengineering/communication-types' -import type { BaseRequestEvent } from './common' +import type { CardID, AccountID, LabelID, CardType } from '@hcengineering/communication-types' +import type { BaseEvent } from './common' -export enum LabelRequestEventType { +export enum LabelEventType { // Internal CreateLabel = 'createLabel', RemoveLabel = 'removeLabel' } -export type LabelRequestEvent = CreateLabelEvent | RemoveLabelEvent +export type LabelEvent = CreateLabelEvent | RemoveLabelEvent // Internal -export interface CreateLabelEvent extends BaseRequestEvent { - type: LabelRequestEventType.CreateLabel +export interface CreateLabelEvent extends BaseEvent { + type: LabelEventType.CreateLabel labelId: LabelID cardId: CardID cardType: CardType account: AccountID - socialId: SocialID - date: Date + + date?: Date } -export interface RemoveLabelEvent extends BaseRequestEvent { - type: LabelRequestEventType.RemoveLabel +export interface RemoveLabelEvent extends BaseEvent { + type: LabelEventType.RemoveLabel labelId: LabelID cardId: CardID account: AccountID - socialId: SocialID - date: Date + + date?: Date } diff --git a/packages/sdk-types/src/events/message.ts b/packages/sdk-types/src/events/message.ts new file mode 100644 index 00000000000..06000715a02 --- /dev/null +++ b/packages/sdk-types/src/events/message.ts @@ -0,0 +1,228 @@ +import type { + CardID, + MessageID, + Markdown, + SocialID, + BlobID, + MessageType, + CardType, + LinkPreviewID, + MessagesGroup, + MessageExtra, + BlobData, + LinkPreviewData +} from '@hcengineering/communication-types' + +import type { BaseEvent } from './common' + +export enum MessageEventType { + // Public events + CreateMessage = 'createMessage', + UpdatePatch = 'updatePatch', + RemovePatch = 'removePatch', + ReactionPatch = 'reactionPatch', + BlobPatch = 'blobPatch', + LinkPreviewPatch = 'linkPreviewPatch', + ThreadPatch = 'threadPatch', + + // Internal events + CreateMessagesGroup = 'createMessagesGroup', + RemoveMessagesGroup = 'removeMessagesGroup' +} + +export type PatchEvent = + | UpdatePatchEvent + | RemovePatchEvent + | ReactionPatchEvent + | BlobPatchEvent + | LinkPreviewPatchEvent + | ThreadPatchEvent + +export type MessageEvent = CreateMessageEvent | PatchEvent | CreateMessagesGroupEvent | RemoveMessagesGroupEvent + +export interface CreateMessageOptions { + // Available for regular users (Not implemented yet) + skipLinkPreviews?: boolean + // Available only for system + noNotify?: boolean +} +export interface UpdatePatchOptions { + // Available for regular users (Not implemented yet) + skipLinkPreviewsUpdate?: boolean +} + +export interface CreateMessageEvent extends BaseEvent { + type: MessageEventType.CreateMessage + + cardId: CardID + cardType: CardType + + messageId?: MessageID + messageType: MessageType + + content: Markdown + extra?: MessageExtra + + socialId: SocialID + date?: Date + + options?: CreateMessageOptions +} + +// Available for author and system +export interface UpdatePatchEvent extends BaseEvent { + type: MessageEventType.UpdatePatch + + cardId: CardID + messageId: MessageID + + content?: Markdown + extra?: MessageExtra + + socialId: SocialID + date?: Date + + options?: UpdatePatchOptions +} + +// Available for author and system +export interface RemovePatchEvent extends BaseEvent { + type: MessageEventType.RemovePatch + + cardId: CardID + messageId: MessageID + + socialId: SocialID + date?: Date +} + +export interface AddReactionOperation { + opcode: 'add' + reaction: string +} + +export interface RemoveReactionOperation { + opcode: 'remove' + reaction: string +} + +// For any user +export interface ReactionPatchEvent extends BaseEvent { + type: MessageEventType.ReactionPatch + + cardId: CardID + messageId: MessageID + + operation: AddReactionOperation | RemoveReactionOperation + + socialId: SocialID + date?: Date +} + +export interface AttachBlobsOperation { + opcode: 'attach' + blobs: BlobData[] +} + +export interface DetachBlobsOperation { + opcode: 'detach' + blobIds: BlobID[] +} + +export interface SetBlobsOperation { + opcode: 'set' + blobs: BlobData[] +} + +// For system and message author +export interface BlobPatchEvent extends BaseEvent { + type: MessageEventType.BlobPatch + + cardId: CardID + messageId: MessageID + + operations: (AttachBlobsOperation | DetachBlobsOperation | SetBlobsOperation)[] + + socialId: SocialID + date?: Date +} + +// For any user +export interface AttachThreadOperation { + opcode: 'attach' + threadId: CardID + threadType: CardType +} + +// For system +export interface UpdateThreadOperation { + opcode: 'update' + threadId: CardID + updates: { + threadType?: CardType + repliesCountOp?: 'increment' | 'decrement' + lastReply?: Date + } +} + +export interface ThreadPatchEvent extends BaseEvent { + type: MessageEventType.ThreadPatch + + cardId: CardID + messageId: MessageID + + operation: AttachThreadOperation | UpdateThreadOperation + + socialId: SocialID + date?: Date +} + +export interface AttachLinkPreviewsOperation { + opcode: 'attach' + previews: (LinkPreviewData & { previewId: LinkPreviewID })[] +} + +export interface DetachLinkPreviewsOperation { + opcode: 'detach' + previewIds: LinkPreviewID[] +} + +export interface SetLinkPreviewsOperation { + opcode: 'set' + previews: (LinkPreviewData & { previewId: LinkPreviewID })[] +} + +// For system and message author +export interface LinkPreviewPatchEvent extends BaseEvent { + type: MessageEventType.LinkPreviewPatch + cardId: CardID + messageId: MessageID + + operations: (AttachLinkPreviewsOperation | DetachLinkPreviewsOperation | SetLinkPreviewsOperation)[] + + socialId: SocialID + date?: Date +} + +export interface CreateMessageResult { + messageId: MessageID + created: Date +} + +export type MessageEventResult = CreateMessageResult + +// Internal +export interface CreateMessagesGroupEvent extends BaseEvent { + type: MessageEventType.CreateMessagesGroup + group: MessagesGroup + socialId: SocialID + date?: Date +} + +export interface RemoveMessagesGroupEvent extends BaseEvent { + type: MessageEventType.RemoveMessagesGroup + cardId: CardID + blobId: BlobID + socialId: SocialID + date?: Date +} diff --git a/packages/sdk-types/src/requestEvents/notification.ts b/packages/sdk-types/src/events/notification.ts similarity index 63% rename from packages/sdk-types/src/requestEvents/notification.ts rename to packages/sdk-types/src/events/notification.ts index d6d98dcc941..97e50d69f69 100644 --- a/packages/sdk-types/src/requestEvents/notification.ts +++ b/packages/sdk-types/src/events/notification.ts @@ -13,7 +13,7 @@ // limitations under the License. // -import type { +import { CardID, ContextID, MessageID, @@ -24,10 +24,9 @@ import type { NotificationID, SocialID } from '@hcengineering/communication-types' -import type { BaseRequestEvent } from './common' -import type { UpdateNotificationQuery } from '../db.ts' +import type { BaseEvent } from './common' -export enum NotificationRequestEventType { +export enum NotificationEventType { AddCollaborators = 'addCollaborators', RemoveCollaborators = 'removeCollaborators', @@ -40,7 +39,7 @@ export enum NotificationRequestEventType { UpdateNotificationContext = 'updateNotificationContext' } -export type NotificationRequestEvent = +export type NotificationEvent = | AddCollaboratorsEvent | CreateNotificationContextEvent | CreateNotificationEvent @@ -50,10 +49,11 @@ export type NotificationRequestEvent = | RemoveNotificationsEvent | UpdateNotificationContextEvent -export interface CreateNotificationEvent extends BaseRequestEvent { - type: NotificationRequestEventType.CreateNotification +export interface CreateNotificationEvent extends BaseEvent { + type: NotificationEventType.CreateNotification + notificationId?: NotificationID notificationType: NotificationType - read?: boolean + read: boolean content?: NotificationContent cardId: CardID contextId: ContextID @@ -61,53 +61,57 @@ export interface CreateNotificationEvent extends BaseRequestEvent { messageCreated: Date account: AccountID - socialId: SocialID - date: Date + date?: Date } -export interface UpdateNotificationEvent extends BaseRequestEvent { - type: NotificationRequestEventType.UpdateNotification - query: UpdateNotificationQuery +export interface UpdateNotificationEvent extends BaseEvent { + type: NotificationEventType.UpdateNotification + contextId: ContextID + account: AccountID + query: { + type?: NotificationType + id?: NotificationID + untilDate?: Date + } updates: { read: boolean } - socialId?: SocialID + date?: Date } -export interface RemoveNotificationsEvent extends BaseRequestEvent { - type: NotificationRequestEventType.RemoveNotifications +export interface RemoveNotificationsEvent extends BaseEvent { + type: NotificationEventType.RemoveNotifications contextId: ContextID account: AccountID ids: NotificationID[] - socialId?: SocialID date?: Date } -export interface CreateNotificationContextEvent extends BaseRequestEvent { - type: NotificationRequestEventType.CreateNotificationContext +export interface CreateNotificationContextEvent extends BaseEvent { + type: NotificationEventType.CreateNotificationContext + contextId?: ContextID cardId: CardID account: AccountID + lastView: Date lastUpdate: Date lastNotify?: Date - socialId: SocialID - date: Date + date?: Date } -export interface RemoveNotificationContextEvent extends BaseRequestEvent { - type: NotificationRequestEventType.RemoveNotificationContext +export interface RemoveNotificationContextEvent extends BaseEvent { + type: NotificationEventType.RemoveNotificationContext contextId: ContextID account: AccountID - socialId?: SocialID date?: Date } -export interface UpdateNotificationContextEvent extends BaseRequestEvent { - type: NotificationRequestEventType.UpdateNotificationContext +export interface UpdateNotificationContextEvent extends BaseEvent { + type: NotificationEventType.UpdateNotificationContext contextId: ContextID account: AccountID updates: { @@ -115,25 +119,27 @@ export interface UpdateNotificationContextEvent extends BaseRequestEvent { lastUpdate?: Date lastNotify?: Date } - socialId?: SocialID + date?: Date } -export interface AddCollaboratorsEvent extends BaseRequestEvent { - type: NotificationRequestEventType.AddCollaborators +export interface AddCollaboratorsEvent extends BaseEvent { + type: NotificationEventType.AddCollaborators cardId: CardID cardType: CardType collaborators: AccountID[] - socialId?: SocialID + + socialId: SocialID date?: Date } -export interface RemoveCollaboratorsEvent extends BaseRequestEvent { - type: NotificationRequestEventType.RemoveCollaborators +export interface RemoveCollaboratorsEvent extends BaseEvent { + type: NotificationEventType.RemoveCollaborators cardId: CardID cardType: CardType collaborators: AccountID[] - socialId?: SocialID + + socialId: SocialID date?: Date } diff --git a/packages/sdk-types/src/index.ts b/packages/sdk-types/src/index.ts index ed12f5cd6e2..0cab58460d9 100644 --- a/packages/sdk-types/src/index.ts +++ b/packages/sdk-types/src/index.ts @@ -16,5 +16,5 @@ export type * from './client' export type * from './db' export type * from './query' -export * from './event' +export * from './events/event' export type * from './serverApi' diff --git a/packages/sdk-types/src/requestEvents/message.ts b/packages/sdk-types/src/requestEvents/message.ts deleted file mode 100644 index 3d8e91dd185..00000000000 --- a/packages/sdk-types/src/requestEvents/message.ts +++ /dev/null @@ -1,256 +0,0 @@ -import type { - CardID, - MessageID, - Markdown, - SocialID, - BlobID, - MessageType, - CardType, - LinkPreviewID, - PatchType, - PatchData, - MessagesGroup, - MessageExtra, - BlobData, - LinkPreviewData -} from '@hcengineering/communication-types' - -import type { BaseRequestEvent } from './common' - -export enum MessageRequestEventType { - // Public events - CreateMessage = 'createMessage', - CreatePatch = 'createPatch', - // UpdateMessage = 'updateMessage', - // RemoveMessage = 'removeMessage', - - AttachThread = 'attachThread', - - SetReaction = 'setReaction', - RemoveReaction = 'removeReaction', - - AttachBlob = 'attachBlob', - DetachBlob = 'removeBlob', - - CreateLinkPreview = 'createLinkPreview', - RemoveLinkPreview = 'removeLinkPreview', - - // Internal events - UpdateThread = 'updateThread', - - CreateMessagesGroup = 'createMessagesGroup', - RemoveMessagesGroup = 'removeMessagesGroup' -} - -export type MessageRequestEvent = - | CreateMessageEvent - | SetReactionEvent - | RemoveReactionEvent - | AttachBlobEvent - | DetachBlobEvent - | CreateLinkPreviewEvent - | RemoveLinkPreviewEvent - | CreatePatchEvent - | UpdateThreadEvent - | CreateMessagesGroupEvent - | RemoveMessagesGroupEvent - | AttachThreadEvent - -export interface CreateMessageOptions { - // Available for regular users (Not implemented yet) - skipLinkPreviews?: boolean - // Available only for system - noNotify?: boolean -} -export interface PatchMessageOptions { - // Available for regular users (Not implemented yet) - skipLinkPreviewsUpdate?: boolean - // Available only for system (Not implemented yet) - markAsUpdated?: boolean -} - -export interface CreateMessageEvent extends BaseRequestEvent { - type: MessageRequestEventType.CreateMessage - - cardId: CardID - cardType: CardType - - messageId?: MessageID - messageType: MessageType - - content: Markdown - extra?: MessageExtra - - socialId?: SocialID - date?: Date - - options?: CreateMessageOptions -} - -export interface CreatePatchEvent extends BaseRequestEvent { - type: MessageRequestEventType.CreatePatch - cardId: CardID - messageId: MessageID - - patchType: PatchType - data: PatchData - - socialId?: SocialID - date?: Date - - options?: PatchMessageOptions -} - -// export interface UpdateMessageEvent extends BaseRequestEvent { -// type: MessageRequestEventType.UpdateMessage -// -// cardId: CardID -// messageId: MessageID -// -// content?: Markdown -// extra?: MessageExtra -// -// socialId?: SocialID -// date?: Date -// -// options?: UpdateMessageOptions -// } -// -// export interface RemoveMessageEvent extends BaseRequestEvent { -// type: MessageRequestEventType.RemoveMessage -// -// cardId: CardID -// messageId: MessageID -// -// socialId?: SocialID -// date?: Date -// -// options?: RemoveMessageOptions -// } - -export interface AttachThreadEvent extends BaseRequestEvent { - type: MessageRequestEventType.AttachThread - - cardId: CardID - messageId: MessageID - - threadId: CardID - threadType: CardType - - socialId?: SocialID - date?: Date -} - -export interface SetReactionEvent extends BaseRequestEvent { - type: MessageRequestEventType.SetReaction - - cardId: CardID - messageId: MessageID - - reaction: string - - socialId?: SocialID - date?: Date -} - -export interface RemoveReactionEvent extends BaseRequestEvent { - type: MessageRequestEventType.RemoveReaction - - cardId: CardID - messageId: MessageID - - reaction: string - - socialId?: SocialID - date?: Date -} - -export interface AttachBlobEvent extends BaseRequestEvent { - type: MessageRequestEventType.AttachBlob - - cardId: CardID - messageId: MessageID - - blobData: BlobData - - socialId?: SocialID - date?: Date -} - -export interface DetachBlobEvent extends BaseRequestEvent { - type: MessageRequestEventType.DetachBlob - - cardId: CardID - messageId: MessageID - - blobId: BlobID - - socialId?: SocialID - date?: Date -} - -export interface CreateLinkPreviewEvent extends BaseRequestEvent { - previewId?: string - type: MessageRequestEventType.CreateLinkPreview - - cardId: CardID - messageId: MessageID - - previewData: LinkPreviewData - - socialId?: SocialID - date?: Date -} - -export interface RemoveLinkPreviewEvent extends BaseRequestEvent { - type: MessageRequestEventType.RemoveLinkPreview - - cardId: CardID - messageId: MessageID - - previewId: LinkPreviewID - - socialId?: SocialID - date?: Date -} - -export interface CreateMessageResult { - messageId: MessageID - created: Date -} - -export interface CreateLinkPreviewResult { - previewId: MessageID - created: Date -} - -export type MessageEventResult = CreateMessageResult | CreateLinkPreviewResult - -// Internal -export interface UpdateThreadEvent extends BaseRequestEvent { - type: MessageRequestEventType.UpdateThread - cardId: CardID - messageId: MessageID - threadId: CardID - updates: { - repliesCountOp: 'increment' | 'decrement' - lastReply?: Date - } - socialId: SocialID - date: Date -} - -export interface CreateMessagesGroupEvent extends BaseRequestEvent { - type: MessageRequestEventType.CreateMessagesGroup - group: MessagesGroup - socialId: SocialID - date?: Date -} - -export interface RemoveMessagesGroupEvent extends BaseRequestEvent { - type: MessageRequestEventType.RemoveMessagesGroup - cardId: CardID - blobId: BlobID - socialId: SocialID - date?: Date -} diff --git a/packages/sdk-types/src/responseEvents/card.ts b/packages/sdk-types/src/responseEvents/card.ts deleted file mode 100644 index 7e35be45ad3..00000000000 --- a/packages/sdk-types/src/responseEvents/card.ts +++ /dev/null @@ -1,40 +0,0 @@ -// -// Copyright © 2025 Hardcore Engineering Inc. -// -// Licensed under the Eclipse Public License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. You may -// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// -// See the License for the specific language governing permissions and -// limitations under the License. -// - -import type { CardID, CardType, SocialID } from '@hcengineering/communication-types' - -import type { BaseResponseEvent } from './common' - -export enum CardResponseEventType { - CardTypeUpdated = 'cardTypeUpdated', - CardRemoved = 'cardRemoved' -} - -export type CardResponseEvent = CardTypeUpdatedEvent | CardRemovedEvent - -export interface CardTypeUpdatedEvent extends BaseResponseEvent { - type: CardResponseEventType.CardTypeUpdated - cardId: CardID - cardType: CardType - socialId: SocialID - date: Date -} - -export interface CardRemovedEvent extends BaseResponseEvent { - type: CardResponseEventType.CardRemoved - cardId: CardID - socialId: SocialID - date: Date -} diff --git a/packages/sdk-types/src/responseEvents/common.ts b/packages/sdk-types/src/responseEvents/common.ts deleted file mode 100644 index aa4d4ff2460..00000000000 --- a/packages/sdk-types/src/responseEvents/common.ts +++ /dev/null @@ -1,18 +0,0 @@ -// -// Copyright © 2025 Hardcore Engineering Inc. -// -// Licensed under the Eclipse Public License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. You may -// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// -// See the License for the specific language governing permissions and -// limitations under the License. -// - -export interface BaseResponseEvent { - _id?: string -} diff --git a/packages/sdk-types/src/responseEvents/label.ts b/packages/sdk-types/src/responseEvents/label.ts deleted file mode 100644 index 206fa898139..00000000000 --- a/packages/sdk-types/src/responseEvents/label.ts +++ /dev/null @@ -1,37 +0,0 @@ -// -// Copyright © 2025 Hardcore Engineering Inc. -// -// Licensed under the Eclipse Public License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. You may -// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// -// See the License for the specific language governing permissions and -// limitations under the License. -// - -import type { CardID, AccountID, LabelID, Label } from '@hcengineering/communication-types' - -import type { BaseResponseEvent } from './common' - -export enum LabelResponseEventType { - LabelCreated = 'labelCreated', - LabelRemoved = 'labelRemoved' -} - -export type LabelResponseEvent = LabelCreatedEvent | LabelRemovedEvent - -export interface LabelCreatedEvent extends BaseResponseEvent { - type: LabelResponseEventType.LabelCreated - label: Label -} - -export interface LabelRemovedEvent extends BaseResponseEvent { - type: LabelResponseEventType.LabelRemoved - labelId: LabelID - cardId: CardID - account: AccountID -} diff --git a/packages/sdk-types/src/responseEvents/message.ts b/packages/sdk-types/src/responseEvents/message.ts deleted file mode 100644 index da1a3531074..00000000000 --- a/packages/sdk-types/src/responseEvents/message.ts +++ /dev/null @@ -1,194 +0,0 @@ -// -// Copyright © 2025 Hardcore Engineering Inc. -// -// Licensed under the Eclipse Public License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. You may -// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// -// See the License for the specific language governing permissions and -// limitations under the License. -// - -import type { - CardID, - Message, - MessageID, - Patch, - Reaction, - SocialID, - Thread, - MessagesGroup, - BlobID, - CardType, - LinkPreview, - LinkPreviewID, - AttachedBlob -} from '@hcengineering/communication-types' -import type { BaseResponseEvent } from './common' - -export enum MessageResponseEventType { - // Public events - MessageCreated = 'messageCreated', - PatchCreated = 'patchCreated', - // MessageUpdated = 'messageUpdated', - // MessageRemoved = 'messageRemoved', - - ThreadAttached = 'threadAttached', - ThreadUpdated = 'threadUpdated', - - ReactionSet = 'reactionSet', - ReactionRemoved = 'reactionRemoved', - - BlobAttached = 'blobAttached', - BlobDetached = 'blobDetached', - - LinkPreviewCreated = 'linkPreviewCreated', - LinkPreviewRemoved = 'linkPreviewRemoved', - - // Internal events - MessagesGroupCreated = 'messagesGroupCreated', - MessagesGroupRemoved = 'messagesGroupRemoved' -} - -export type MessageResponseEvent = - | MessageCreatedEvent - | ReactionSetEvent - | ReactionRemovedEvent - | BlobAttachedEvent - | BlobDetachedEvent - | LinkPreviewCreatedEvent - | LinkPreviewRemovedEvent - | ThreadAttachedEvent - | ThreadUpdatedEvent - | PatchCreatedEvent - | MessagesGroupCreatedEvent - | MessagesGroupRemovedEvent - -interface CreateMessageOptions { - // Available for regular users (Not implemented yet) - skipLinkPreviews?: boolean - // Available only for system - noNotify?: boolean -} -interface PatchMessageOptions { - // Available for regular users (Not implemented yet) - skipLinkPreviewsUpdate?: boolean - // Available only for system (Not implemented yet) - markAsUpdated?: boolean -} - -// Public -export interface MessageCreatedEvent extends BaseResponseEvent { - type: MessageResponseEventType.MessageCreated - cardId: CardID - cardType: CardType - message: Message - options?: CreateMessageOptions -} - -export interface PatchCreatedEvent extends BaseResponseEvent { - type: MessageResponseEventType.PatchCreated - cardId: CardID - messageId: MessageID - messageCreated: Date - patch: Patch - options?: PatchMessageOptions -} - -// export interface MessageUpdatedEvent extends BaseResponseEvent { -// type: MessageResponseEventType.MessageUpdated -// cardId: CardID -// messageId: MessageID -// content?: Markdown -// extra?: MessageExtra -// date: Date -// socialId: SocialID -// } -// -// export interface MessageRemovedEvent extends BaseResponseEvent { -// type: MessageResponseEventType.MessageRemoved -// cardId: CardID -// messageId: MessageID -// date: Date -// socialId: SocialID -// } - -export interface ReactionSetEvent extends BaseResponseEvent { - type: MessageResponseEventType.ReactionSet - cardId: CardID - messageId: MessageID - reaction: Reaction -} - -export interface ReactionRemovedEvent extends BaseResponseEvent { - type: MessageResponseEventType.ReactionRemoved - cardId: CardID - messageId: MessageID - reaction: string - socialId: SocialID - date: Date -} - -export interface BlobAttachedEvent extends BaseResponseEvent { - type: MessageResponseEventType.BlobAttached - cardId: CardID - messageId: MessageID - blob: AttachedBlob -} - -export interface BlobDetachedEvent extends BaseResponseEvent { - type: MessageResponseEventType.BlobDetached - cardId: CardID - messageId: MessageID - blobId: BlobID - socialId: SocialID - date: Date -} - -export interface LinkPreviewCreatedEvent extends BaseResponseEvent { - type: MessageResponseEventType.LinkPreviewCreated - cardId: CardID - messageId: MessageID - linkPreview: LinkPreview -} - -export interface LinkPreviewRemovedEvent extends BaseResponseEvent { - type: MessageResponseEventType.LinkPreviewRemoved - cardId: CardID - messageId: MessageID - previewId: LinkPreviewID -} - -export interface ThreadAttachedEvent extends BaseResponseEvent { - type: MessageResponseEventType.ThreadAttached - cardId: CardID - messageId: MessageID - thread: Thread -} - -export interface ThreadUpdatedEvent extends BaseResponseEvent { - type: MessageResponseEventType.ThreadUpdated - cardId: CardID - messageId: MessageID - threadId: CardID - updates: { - repliesCountOp: 'increment' | 'decrement' - lastReply?: Date - } -} - -// Internal -export interface MessagesGroupCreatedEvent extends BaseResponseEvent { - type: MessageResponseEventType.MessagesGroupCreated - group: MessagesGroup -} - -export interface MessagesGroupRemovedEvent extends BaseResponseEvent { - type: MessageResponseEventType.MessagesGroupRemoved - cardId: CardID - blobId: BlobID -} diff --git a/packages/sdk-types/src/responseEvents/notification.ts b/packages/sdk-types/src/responseEvents/notification.ts deleted file mode 100644 index d53d6edfe84..00000000000 --- a/packages/sdk-types/src/responseEvents/notification.ts +++ /dev/null @@ -1,105 +0,0 @@ -// -// Copyright © 2025 Hardcore Engineering Inc. -// -// Licensed under the Eclipse Public License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. You may -// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// -// See the License for the specific language governing permissions and -// limitations under the License. -// - -import type { - CardID, - ContextID, - NotificationContext, - Notification, - AccountID, - CardType, - NotificationID, - SocialID -} from '@hcengineering/communication-types' -import type { BaseResponseEvent } from './common' -import type { NotificationUpdates, UpdateNotificationQuery } from '../db' - -export enum NotificationResponseEventType { - NotificationCreated = 'notificationCreated', - NotificationUpdated = 'notificationUpdated', - NotificationsRemoved = 'notificationsRemoved', - - NotificationContextCreated = 'notificationContextCreated', - NotificationContextRemoved = 'notificationContextRemoved', - NotificationContextUpdated = 'notificationContextUpdated', - - AddedCollaborators = 'addedCollaborators', - RemovedCollaborators = 'removedCollaborators' -} - -export type NotificationResponseEvent = - | NotificationCreatedEvent - | NotificationUpdatedEvent - | NotificationsRemovedEvent - | NotificationContextCreatedEvent - | NotificationContextRemovedEvent - | NotificationContextUpdatedEvent - | AddedCollaboratorsEvent - | RemovedCollaboratorsEvent - -export interface NotificationCreatedEvent extends BaseResponseEvent { - type: NotificationResponseEventType.NotificationCreated - notification: Notification -} - -export interface NotificationUpdatedEvent extends BaseResponseEvent { - type: NotificationResponseEventType.NotificationUpdated - query: UpdateNotificationQuery - updates: NotificationUpdates -} - -export interface NotificationsRemovedEvent extends BaseResponseEvent { - type: NotificationResponseEventType.NotificationsRemoved - contextId: ContextID - account: AccountID - ids: NotificationID[] -} - -export interface NotificationContextCreatedEvent extends BaseResponseEvent { - type: NotificationResponseEventType.NotificationContextCreated - context: NotificationContext -} - -export interface NotificationContextRemovedEvent extends BaseResponseEvent { - type: NotificationResponseEventType.NotificationContextRemoved - context: NotificationContext -} - -export interface NotificationContextUpdatedEvent extends BaseResponseEvent { - type: NotificationResponseEventType.NotificationContextUpdated - contextId: ContextID - account: AccountID - lastView?: Date - lastUpdate?: Date - lastNotify?: Date -} - -export interface AddedCollaboratorsEvent extends BaseResponseEvent { - type: NotificationResponseEventType.AddedCollaborators - cardId: CardID - cardType: CardType - collaborators: AccountID[] - socialId: SocialID - date: Date -} - -export interface RemovedCollaboratorsEvent extends BaseResponseEvent { - type: NotificationResponseEventType.RemovedCollaborators - cardId: CardID - cardType: CardType - collaborators: AccountID[] - socialId: SocialID - date: Date -} diff --git a/packages/sdk-types/src/serverApi.ts b/packages/sdk-types/src/serverApi.ts index 91923b366f9..5ba994cbf9d 100644 --- a/packages/sdk-types/src/serverApi.ts +++ b/packages/sdk-types/src/serverApi.ts @@ -29,7 +29,7 @@ import type { } from '@hcengineering/communication-types' import type { Account } from '@hcengineering/core' -import type { EventResult, RequestEvent } from './event' +import type { EventResult, Event } from './events/event' export interface SessionData { sessionId?: string @@ -53,7 +53,7 @@ export interface ServerApi { findLabels: (session: SessionData, params: FindLabelsParams) => Promise findCollaborators: (session: SessionData, params: FindCollaboratorsParams) => Promise - event: (session: SessionData, event: RequestEvent) => Promise + event: (session: SessionData, event: Event) => Promise unsubscribeQuery: (session: SessionData, id: number) => Promise diff --git a/packages/server/src/index.ts b/packages/server/src/index.ts index b368a95822c..0f12c48ccf0 100644 --- a/packages/server/src/index.ts +++ b/packages/server/src/index.ts @@ -30,7 +30,7 @@ import type { Collaborator } from '@hcengineering/communication-types' import { createDbAdapter } from '@hcengineering/communication-cockroach' -import type { EventResult, RequestEvent, ServerApi, SessionData } from '@hcengineering/communication-sdk-types' +import type { EventResult, Event, ServerApi, SessionData } from '@hcengineering/communication-sdk-types' import { getMetadata } from './metadata' import type { BroadcastSessionsFunc, QueryId } from './types' @@ -94,7 +94,7 @@ export class Api implements ServerApi { await this.middlewares.unsubscribeQuery(session, id) } - async event (session: SessionData, event: RequestEvent): Promise { + async event (session: SessionData, event: Event): Promise { return await this.middlewares.event(session, event) } diff --git a/packages/server/src/messageId.ts b/packages/server/src/messageId.ts new file mode 100644 index 00000000000..8cc6fb7a6f1 --- /dev/null +++ b/packages/server/src/messageId.ts @@ -0,0 +1,34 @@ +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. + +import { MessageID } from '@hcengineering/communication-types' + +const EPOCH_OFFSET_US = BigInt(Date.UTC(2022, 0, 1)) * 1000n +const monoStartNs = process.hrtime.bigint() +const realStartUs = BigInt(Date.now()) * 1000n +let lastTick = 0n + +function getMonotonicTick10us (): bigint { + const nowNs = process.hrtime.bigint() + const deltaUs = (nowNs - monoStartNs) / 1000n + const absUs = realStartUs + deltaUs + const relUs = absUs > EPOCH_OFFSET_US ? absUs - EPOCH_OFFSET_US : 0n + const candidate = relUs / 10n + const tick = candidate <= lastTick ? lastTick + 1n : candidate + lastTick = tick + return tick & ((1n << 64n) - 1n) +} + +export function generateMessageId (): MessageID { + return getMonotonicTick10us().toString() as MessageID +} diff --git a/packages/server/src/middleware/base.ts b/packages/server/src/middleware/base.ts index a45f3cd5fdd..5e7222f8f98 100644 --- a/packages/server/src/middleware/base.ts +++ b/packages/server/src/middleware/base.ts @@ -13,12 +13,7 @@ // limitations under the License. // -import { - type EventResult, - type RequestEvent, - type ResponseEvent, - type SessionData -} from '@hcengineering/communication-sdk-types' +import { type EventResult, type Event, type SessionData } from '@hcengineering/communication-sdk-types' import type { FindMessagesGroupsParams, FindMessagesParams, @@ -78,11 +73,11 @@ export class BaseMiddleware implements Middleware { return await this.provideFindCollaborators(session, params) } - async event (session: SessionData, event: Enriched, derived: boolean): Promise { + async event (session: SessionData, event: Enriched, derived: boolean): Promise { return await this.provideEvent(session, event, derived) } - async response (session: SessionData, event: ResponseEvent, derived: boolean): Promise { + async response (session: SessionData, event: Enriched, derived: boolean): Promise { await this.provideResponse(session, event, derived) } @@ -95,11 +90,7 @@ export class BaseMiddleware implements Middleware { close (): void {} closeSession (sessionId: string): void {} - protected async provideEvent ( - session: SessionData, - event: Enriched, - derived: boolean - ): Promise { + protected async provideEvent (session: SessionData, event: Enriched, derived: boolean): Promise { if (this.next !== undefined) { return await this.next.event(session, event, derived) } @@ -171,7 +162,7 @@ export class BaseMiddleware implements Middleware { return [] } - protected async provideResponse (session: SessionData, event: ResponseEvent, derived: boolean): Promise { + protected async provideResponse (session: SessionData, event: Enriched, derived: boolean): Promise { if (this.next !== undefined) { await this.next.response(session, event, derived) } diff --git a/packages/server/src/middleware/broadcast.ts b/packages/server/src/middleware/broadcast.ts index a69fda4ba5c..303966d358f 100644 --- a/packages/server/src/middleware/broadcast.ts +++ b/packages/server/src/middleware/broadcast.ts @@ -14,13 +14,12 @@ // import { - CardResponseEventType, + CardEventType, + type Event, type EventResult, - LabelResponseEventType, - MessageResponseEventType, - NotificationResponseEventType, - type RequestEvent, - type ResponseEvent, + LabelEventType, + MessageEventType, + NotificationEventType, type SessionData } from '@hcengineering/communication-sdk-types' import type { @@ -106,7 +105,7 @@ export class BroadcastMiddleware extends BaseMiddleware implements Middleware { return await this.provideFindLabels(session, params, queryId) } - async event (session: SessionData, event: Enriched, derived: boolean): Promise { + async event (session: SessionData, event: Enriched, derived: boolean): Promise { this.createSession(session) return await this.provideEvent(session, event, derived) } @@ -120,7 +119,7 @@ export class BroadcastMiddleware extends BaseMiddleware implements Middleware { data.contextQueries.delete(queryId) } - async response (session: SessionData, event: ResponseEvent, derived: boolean): Promise { + async response (session: SessionData, event: Enriched, derived: boolean): Promise { const sessionIds: string[] = [] for (const [sessionId, session] of this.dataBySessionId.entries()) { if (this.match(event, session)) { @@ -177,58 +176,44 @@ export class BroadcastMiddleware extends BaseMiddleware implements Middleware { return this.dataBySessionId.get(id) } - private match (event: ResponseEvent, info: SessionInfo): boolean { + private match (event: Enriched, info: SessionInfo): boolean { switch (event.type) { - case MessageResponseEventType.MessageCreated: + case MessageEventType.CreateMessage: + if (event.messageId == null) return false return this.matchMessagesQuery( - { ids: [event.message.id], card: event.cardId }, + { ids: [event.messageId], card: event.cardId }, Array.from(info.messageQueries.values()), new Set(Array.from(info.contextQueries.values()).flatMap((it) => Array.from(it))) ) - case MessageResponseEventType.PatchCreated: + case MessageEventType.ThreadPatch: + case MessageEventType.LinkPreviewPatch: + case MessageEventType.ReactionPatch: + case MessageEventType.BlobPatch: + case MessageEventType.RemovePatch: + case MessageEventType.UpdatePatch: return this.matchMessagesQuery( { card: event.cardId, ids: [event.messageId] }, Array.from(info.messageQueries.values()), new Set(Array.from(info.contextQueries.values()).flatMap((it) => Array.from(it))) ) - case MessageResponseEventType.ReactionSet: - case MessageResponseEventType.ReactionRemoved: - case MessageResponseEventType.LinkPreviewCreated: - case MessageResponseEventType.LinkPreviewRemoved: - case MessageResponseEventType.BlobAttached: - case MessageResponseEventType.BlobDetached: - case MessageResponseEventType.ThreadAttached: - return this.matchMessagesQuery( - { card: event.cardId, ids: [event.messageId] }, - Array.from(info.messageQueries.values()), - new Set() - ) - case NotificationResponseEventType.NotificationCreated: - return info.account === event.notification.account - case NotificationResponseEventType.NotificationsRemoved: - return info.account === event.account - case NotificationResponseEventType.NotificationUpdated: - return info.account === event.query.account - case NotificationResponseEventType.NotificationContextCreated: - return info.account === event.context.account - case NotificationResponseEventType.NotificationContextRemoved: - return info.account === event.context.account - case NotificationResponseEventType.NotificationContextUpdated: + case NotificationEventType.RemoveNotifications: + case NotificationEventType.CreateNotification: + case NotificationEventType.UpdateNotification: + case NotificationEventType.RemoveNotificationContext: + case NotificationEventType.UpdateNotificationContext: + case NotificationEventType.CreateNotificationContext: return info.account === event.account - case MessageResponseEventType.MessagesGroupCreated: - case MessageResponseEventType.MessagesGroupRemoved: + case MessageEventType.CreateMessagesGroup: + case MessageEventType.RemoveMessagesGroup: return false - case NotificationResponseEventType.RemovedCollaborators: - case NotificationResponseEventType.AddedCollaborators: - return true - case MessageResponseEventType.ThreadUpdated: + case NotificationEventType.RemoveCollaborators: + case NotificationEventType.AddCollaborators: return true - case LabelResponseEventType.LabelCreated: - return info.account === event.label.account - case LabelResponseEventType.LabelRemoved: + case LabelEventType.CreateLabel: + case LabelEventType.RemoveLabel: return info.account === event.account - case CardResponseEventType.CardTypeUpdated: - case CardResponseEventType.CardRemoved: + case CardEventType.UpdateCardType: + case CardEventType.RemoveCard: return true } } diff --git a/packages/server/src/middleware/date.ts b/packages/server/src/middleware/date.ts index fc6fe1b5a45..e6b94b6162d 100644 --- a/packages/server/src/middleware/date.ts +++ b/packages/server/src/middleware/date.ts @@ -13,18 +13,11 @@ // limitations under the License. // -import { - type EventResult, - MessageRequestEventType, - type RequestEvent, - type SessionData -} from '@hcengineering/communication-sdk-types' +import { type EventResult, type Event, type SessionData } from '@hcengineering/communication-sdk-types' import { systemAccountUuid } from '@hcengineering/core' -import { generateMessageId, isExternalMessageId, messageIdToDate } from '@hcengineering/communication-cockroach' import type { Middleware, MiddlewareContext, Enriched } from '../types' import { BaseMiddleware } from './base' -import { ApiError } from '../error' export class DateMiddleware extends BaseMiddleware implements Middleware { constructor ( @@ -34,24 +27,10 @@ export class DateMiddleware extends BaseMiddleware implements Middleware { super(context, next) } - async event (session: SessionData, event: Enriched, derived: boolean): Promise { + async event (session: SessionData, event: Enriched, derived: boolean): Promise { const canSetDate = derived || this.isSystem(session) - if (event.type === MessageRequestEventType.CreateMessage) { - if (event.messageId != null && !derived && !isExternalMessageId(event.messageId)) { - throw ApiError.badRequest('External message id must be 64 bit signed integer. And has 01 first bit set') - } - if (event.messageId == null && (event.date == null || !canSetDate)) { - event.messageId = generateMessageId() - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - event.date = messageIdToDate(event.messageId)! - } else if (event.messageId != null && event.date == null) { - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - event.date = isExternalMessageId(event.messageId) ? new Date() : messageIdToDate(event.messageId)! - } else if (event.messageId == null && event.date != null) { - event.messageId = generateMessageId(true) - } - } else if (!canSetDate || event.date == null) { + if (!canSetDate || event.date == null) { event.date = new Date() } diff --git a/packages/server/src/middleware/db.ts b/packages/server/src/middleware/db.ts index dbc5895db78..ff646bd5a9c 100644 --- a/packages/server/src/middleware/db.ts +++ b/packages/server/src/middleware/db.ts @@ -14,6 +14,7 @@ // import { + CardID, type Collaborator, type FindCollaboratorsParams, type FindLabelsParams, @@ -23,73 +24,51 @@ import { type FindNotificationsParams, type Label, type Message, + MessageID, type MessagesGroup, type Notification, type NotificationContext, - Patch, PatchType, - type Reaction + SocialID, + UpdatePatchData } from '@hcengineering/communication-types' import { type AddCollaboratorsEvent, - type AttachBlobEvent, - type AttachThreadEvent, - type BlobAttachedEvent, - type BlobDetachedEvent, - CardRequestEventType, - CardResponseEventType, + BlobPatchEvent, + CardEventType, type CreateLabelEvent, - type CreateLinkPreviewEvent, type CreateMessageEvent, type CreateMessagesGroupEvent, type CreateNotificationContextEvent, type CreateNotificationEvent, - type CreatePatchEvent, type DbAdapter, - type DetachBlobEvent, + type Event, type EventResult, - LabelRequestEventType, - LabelResponseEventType, - type LinkPreviewCreatedEvent, - type LinkPreviewRemovedEvent, - type MessageCreatedEvent, - MessageRequestEventType, - MessageResponseEventType, - type MessagesGroupCreatedEvent, - type NotificationContextCreatedEvent, - type NotificationContextRemovedEvent, - type NotificationContextUpdatedEvent, - NotificationRequestEventType, - NotificationResponseEventType, - type NotificationsRemovedEvent, - type NotificationUpdatedEvent, - PatchCreatedEvent, - type ReactionRemovedEvent, - type ReactionSetEvent, + LabelEventType, + LinkPreviewPatchEvent, + MessageEventType, + NotificationEventType, + ReactionPatchEvent, type RemoveCardEvent, type RemoveCollaboratorsEvent, type RemoveLabelEvent, - type RemoveLinkPreviewEvent, type RemoveMessagesGroupEvent, type RemoveNotificationContextEvent, type RemoveNotificationsEvent, - type RemoveReactionEvent, - type RequestEvent, - type ResponseEvent, + RemovePatchEvent, type SessionData, - type SetReactionEvent, - type ThreadAttachedEvent, type UpdateCardTypeEvent, type UpdateNotificationContextEvent, type UpdateNotificationEvent, - type UpdateThreadEvent + UpdatePatchEvent, + ThreadPatchEvent } from '@hcengineering/communication-sdk-types' import type { Enriched, Middleware, MiddlewareContext } from '../types' import { BaseMiddleware } from './base' interface Result { - responseEvent?: ResponseEvent + response?: Enriched result?: EventResult } @@ -129,75 +108,69 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { return await this.db.findCollaborators(params) } - async event (session: SessionData, event: Enriched, derived: boolean): Promise { + async event (session: SessionData, event: Enriched, derived: boolean): Promise { const result = await this.processEvent(session, event) - if (result.responseEvent != null) { - void this.context.head?.response(session, result.responseEvent, derived) + if (result.response != null) { + void this.context.head?.response(session, result.response, derived) } return result.result ?? {} } - private async processEvent (session: SessionData, event: Enriched): Promise { + private async processEvent (session: SessionData, event: Enriched): Promise { switch (event.type) { // Messages - case MessageRequestEventType.CreateMessage: + case MessageEventType.CreateMessage: return await this.createMessage(event) - case MessageRequestEventType.CreatePatch: - return await this.createPatch(event) - case MessageRequestEventType.SetReaction: - return await this.setReaction(event) - case MessageRequestEventType.RemoveReaction: - return await this.removeReaction(event) - case MessageRequestEventType.AttachBlob: - return await this.attachBlob(event) - case MessageRequestEventType.DetachBlob: - return await this.detachBlob(event) - case MessageRequestEventType.CreateLinkPreview: - return await this.createLinkPreview(event) - case MessageRequestEventType.RemoveLinkPreview: - return await this.removeLinkPreview(event) - case MessageRequestEventType.AttachThread: - return await this.attachThread(event) - case MessageRequestEventType.UpdateThread: - return await this.updateThread(event) - case MessageRequestEventType.CreateMessagesGroup: + case MessageEventType.UpdatePatch: + return await this.updatePatch(event) + case MessageEventType.RemovePatch: + return await this.removePatch(event) + case MessageEventType.ReactionPatch: + return await this.reactionPatch(event) + case MessageEventType.BlobPatch: + return await this.blobPatch(event) + case MessageEventType.LinkPreviewPatch: + return await this.linkPreviewPatch(event) + case MessageEventType.ThreadPatch: + return await this.threadPatch(event) + case MessageEventType.CreateMessagesGroup: return await this.createMessagesGroup(event) - case MessageRequestEventType.RemoveMessagesGroup: + case MessageEventType.RemoveMessagesGroup: return await this.removeMessagesGroup(event) // Labels - case LabelRequestEventType.CreateLabel: + case LabelEventType.CreateLabel: return await this.createLabel(event) - case LabelRequestEventType.RemoveLabel: + case LabelEventType.RemoveLabel: return await this.removeLabel(event) // Cards - case CardRequestEventType.UpdateCardType: + case CardEventType.UpdateCardType: return await this.updateCardType(event) - case CardRequestEventType.RemoveCard: + case CardEventType.RemoveCard: return await this.removeCard(event) // Collaborators - case NotificationRequestEventType.AddCollaborators: + case NotificationEventType.AddCollaborators: return await this.addCollaborators(event) - case NotificationRequestEventType.RemoveCollaborators: + case NotificationEventType.RemoveCollaborators: return await this.removeCollaborators(event) // Notifications - case NotificationRequestEventType.CreateNotification: + case NotificationEventType.CreateNotification: return await this.createNotification(event) - case NotificationRequestEventType.RemoveNotifications: + case NotificationEventType.RemoveNotifications: return await this.removeNotifications(event) - case NotificationRequestEventType.UpdateNotification: + case NotificationEventType.UpdateNotification: return await this.updateNotification(event) // Notification Contexts - case NotificationRequestEventType.CreateNotificationContext: + case NotificationEventType.CreateNotificationContext: return await this.createNotificationContext(event) - case NotificationRequestEventType.RemoveNotificationContext: + case NotificationEventType.RemoveNotificationContext: return await this.removeNotificationContext(event) - case NotificationRequestEventType.UpdateNotificationContext: + case NotificationEventType.UpdateNotificationContext: return await this.updateNotificationContext(event) } } @@ -206,15 +179,7 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { const added = await this.db.addCollaborators(event.cardId, event.cardType, event.collaborators, event.date) if (added.length === 0) return {} return { - responseEvent: { - _id: event._id, - type: NotificationResponseEventType.AddedCollaborators, - cardId: event.cardId, - cardType: event.cardType, - collaborators: added, - socialId: event.socialId, - date: event.date - } + response: event } } @@ -223,15 +188,7 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { await this.db.removeCollaborators(event.cardId, event.collaborators) return { - responseEvent: { - _id: event._id, - type: NotificationResponseEventType.RemovedCollaborators, - cardId: event.cardId, - cardType: event.cardType, - collaborators: event.collaborators, - socialId: event.socialId, - date: event.date - } + response: event } } @@ -239,9 +196,6 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { if (event.messageId == null) { throw new Error('Message id is required') } - if (event.date == null) { - throw new Error('Date is required') - } const created = await this.db.createMessage( event.messageId, @@ -262,31 +216,8 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { } } - const message: Message = { - id: event.messageId, - type: event.messageType, - cardId: event.cardId, - content: event.content, - creator: event.socialId, - created: event.date, - extra: event.extra, - - removed: false, - - reactions: [], - blobs: [], - linkPreviews: [] - } - const responseEvent: MessageCreatedEvent = { - _id: event._id, - type: MessageResponseEventType.MessageCreated, - cardId: event.cardId, - cardType: event.cardType, - message, - options: event.options - } return { - responseEvent, + response: event, result: { messageId: event.messageId, created: event.date @@ -294,186 +225,105 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { } } - private async createPatch (event: Enriched): Promise { - const messageCreated = await this.db.getMessageCreated(event.cardId, event.messageId) - if (messageCreated == null) return {} - await this.db.createPatch( - event.cardId, - event.messageId, - messageCreated, - event.patchType, - event.data, - event.socialId, - event.date - ) + private async createPatch ( + cardId: CardID, + messageId: MessageID, + type: PatchType, + data: Record, + socialId: SocialID, + date: Date + ): Promise { + await this.db.createPatch(cardId, messageId, type, data, socialId, date) + } - const patch = { - type: event.patchType, - messageId: event.messageId, - data: event.data, - creator: event.socialId, - created: event.date - } as any as Patch - - const responseEvent: PatchCreatedEvent = { - _id: event._id, - type: MessageResponseEventType.PatchCreated, - cardId: event.cardId, - messageId: event.messageId, - messageCreated, - patch, - options: event.options + private async updatePatch (event: Enriched): Promise { + const data: UpdatePatchData = { + content: event.content, + extra: event.extra } + await this.createPatch(event.cardId, event.messageId, PatchType.update, data, event.socialId, event.date) - return { - responseEvent - } + return { response: event } } - private async setReaction (event: Enriched): Promise { - const inDb = await this.db.isMessageInDb(event.cardId, event.messageId) - if (inDb) { - await this.db.setReaction(event.cardId, event.messageId, event.reaction, event.socialId, event.date) - } else { - await this.createPatch({ - type: MessageRequestEventType.CreatePatch, - cardId: event.cardId, - messageId: event.messageId, - patchType: PatchType.setReaction, - data: { - reaction: event.reaction - }, - socialId: event.socialId, - date: event.date - }) - } - - const reaction: Reaction = { - reaction: event.reaction, - creator: event.socialId, - created: event.date - } - const responseEvent: ReactionSetEvent = { - _id: event._id, - type: MessageResponseEventType.ReactionSet, - cardId: event.cardId, - messageId: event.messageId, - reaction - } - return { - responseEvent - } + private async removePatch (event: Enriched): Promise { + await this.createPatch(event.cardId, event.messageId, PatchType.remove, {}, event.socialId, event.date) + return { response: event } } - private async removeReaction (event: Enriched): Promise { - const inDb = await this.db.isMessageInDb(event.cardId, event.messageId) - if (inDb) { - await this.db.removeReaction(event.cardId, event.messageId, event.reaction, event.socialId, event.date) - } else { - await this.createPatch({ - type: MessageRequestEventType.CreatePatch, - cardId: event.cardId, - messageId: event.messageId, - patchType: PatchType.removeReaction, - data: { - reaction: event.reaction - }, - socialId: event.socialId, - date: event.date - }) - } + private async reactionPatch (event: Enriched): Promise { + const { operation } = event - const responseEvent: ReactionRemovedEvent = { - _id: event._id, - type: MessageResponseEventType.ReactionRemoved, - cardId: event.cardId, - messageId: event.messageId, - reaction: event.reaction, - socialId: event.socialId, - date: event.date - } - return { - responseEvent + if (operation.opcode === 'add') { + await this.db.addReaction(event.cardId, event.messageId, operation.reaction, event.socialId, event.date) + } else if (operation.opcode === 'remove') { + await this.db.removeReaction(event.cardId, event.messageId, operation.reaction, event.socialId, event.date) } + + return { response: event } } - private async attachBlob (event: Enriched): Promise { - await this.db.attachBlob(event.cardId, event.messageId, event.blobData, event.socialId, event.date) - const responseEvent: BlobAttachedEvent = { - _id: event._id, - type: MessageResponseEventType.BlobAttached, - cardId: event.cardId, - messageId: event.messageId, - blob: { - ...event.blobData, - creator: event.socialId, - created: event.date + private async blobPatch (event: Enriched): Promise { + const { operations } = event + + for (const operation of operations) { + if (operation.opcode === 'attach') { + await this.db.attachBlobs(event.cardId, event.messageId, operation.blobs, event.socialId, event.date) + } else if (operation.opcode === 'detach') { + await this.db.detachBlobs(event.cardId, event.messageId, operation.blobIds, event.socialId, event.date) + } else if (operation.opcode === 'set') { + await this.db.setBlobs(event.cardId, event.messageId, operation.blobs, event.socialId, event.date) } } - return { - responseEvent - } - } - private async detachBlob (event: Enriched): Promise { - await this.db.detachBlob(event.cardId, event.messageId, event.blobId, event.socialId, event.date) - const responseEvent: BlobDetachedEvent = { - _id: event._id, - type: MessageResponseEventType.BlobDetached, - cardId: event.cardId, - messageId: event.messageId, - blobId: event.blobId, - socialId: event.socialId, - date: event.date - } - return { - responseEvent - } + return { response: event } } - private async createLinkPreview (event: Enriched): Promise { - const id = await this.db.createLinkPreview( - event.cardId, - event.messageId, - event.previewData, - event.socialId, - event.date - ) - - const responseEvent: LinkPreviewCreatedEvent = { - _id: event._id, - type: MessageResponseEventType.LinkPreviewCreated, - cardId: event.cardId, - messageId: event.messageId, - linkPreview: { - ...event.previewData, - id, - creator: event.socialId, - created: event.date + private async linkPreviewPatch (event: Enriched): Promise { + for (const operation of event.operations) { + if (operation.opcode === 'attach') { + await this.db.attachLinkPreviews(event.cardId, event.messageId, operation.previews, event.socialId, event.date) + } else if (operation.opcode === 'detach') { + await this.db.detachLinkPreviews( + event.cardId, + event.messageId, + operation.previewIds, + event.socialId, + event.date + ) + } else if (operation.opcode === 'set') { + await this.db.setLinkPreviews(event.cardId, event.messageId, operation.previews, event.socialId, event.date) } } return { - responseEvent, - result: { - previewId: id, - created: event.date - } - } - } - - private async removeLinkPreview (event: Enriched): Promise { - await this.db.removeLinkPreview(event.cardId, event.messageId, event.previewId) - const responseEvent: LinkPreviewRemovedEvent = { - _id: event._id, - type: MessageResponseEventType.LinkPreviewRemoved, - cardId: event.cardId, - messageId: event.messageId, - previewId: event.previewId + response: event + } + } + + private async threadPatch (event: Enriched): Promise { + if (event.operation.opcode === 'attach') { + await this.db.attachThread( + event.cardId, + event.messageId, + event.operation.threadId, + event.operation.threadType, + event.socialId, + event.date + ) + } else if (event.operation.opcode === 'update') { + await this.db.updateThread( + event.cardId, + event.messageId, + event.operation.threadId, + event.operation.updates, + event.socialId, + event.date + ) } return { - responseEvent + response: event } } @@ -489,53 +339,26 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { ) return { - responseEvent: { - _id: event._id, - type: NotificationResponseEventType.NotificationCreated, - notification: { - id, - cardId: event.cardId, - account: event.account, - type: event.notificationType, - content: event.content ?? {}, - contextId: event.contextId, - messageId: event.messageId, - messageCreated: event.messageCreated, - read: event.read ?? false, - created: event.date - } - } + response: { ...event, notificationId: id } } } - private async updateNotification (event: UpdateNotificationEvent): Promise { - await this.db.updateNotification(event.query, event.updates) - const responseEvent: NotificationUpdatedEvent = { - _id: event._id, - type: NotificationResponseEventType.NotificationUpdated, - query: event.query, - updates: event.updates - } + private async updateNotification (event: Enriched): Promise { + await this.db.updateNotification(event.contextId, event.account, event.query, event.updates) + return { - responseEvent + response: event } } - private async removeNotifications (event: RemoveNotificationsEvent): Promise { + private async removeNotifications (event: Enriched): Promise { if (event.ids.length === 0) return {} const ids = await this.db.removeNotifications(event.contextId, event.account, event.ids) - const responseEvent: NotificationsRemovedEvent = { - _id: event._id, - type: NotificationResponseEventType.NotificationsRemoved, - contextId: event.contextId, - account: event.account, - ids - } return { result: { ids }, - responseEvent + response: { ...event, ids } } } @@ -547,53 +370,30 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { event.lastView, event.lastNotify ) - const responseEvent: NotificationContextCreatedEvent = { - _id: event._id, - type: NotificationResponseEventType.NotificationContextCreated, - context: { - id, - account: event.account, - cardId: event.cardId, - lastView: event.lastView, - lastUpdate: event.lastUpdate, - lastNotify: event.lastNotify - } - } return { - responseEvent, + response: { ...event, contextId: id }, result: { id } } } - private async removeNotificationContext (event: RemoveNotificationContextEvent): Promise { + private async removeNotificationContext (event: Enriched): Promise { const context = (await this.db.findNotificationContexts({ id: event.contextId, account: event.account }))[0] - if (context === undefined) return {} + if (context == null) return {} - await this.db.removeContext(event.contextId, event.account) - const responseEvent: NotificationContextRemovedEvent = { - _id: event._id, - type: NotificationResponseEventType.NotificationContextRemoved, - context - } + this.context.removedContexts.set(context.id, context) + + const id = await this.db.removeContext(context.id, context.account) + if (id == null) return {} return { - responseEvent + response: event } } - async updateNotificationContext (event: UpdateNotificationContextEvent): Promise { + async updateNotificationContext (event: Enriched): Promise { await this.db.updateContext(event.contextId, event.account, event.updates) - const responseEvent: NotificationContextUpdatedEvent = { - _id: event._id, - type: NotificationResponseEventType.NotificationContextUpdated, - contextId: event.contextId, - account: event.account, - lastView: event.updates.lastView, - lastUpdate: event.updates.lastUpdate, - lastNotify: event.updates.lastNotify - } return { - responseEvent + response: event } } @@ -601,19 +401,8 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { const { fromDate, toDate, count, cardId, blobId } = event.group await this.db.createMessagesGroup(cardId, blobId, fromDate, toDate, count) - const responseEvent: MessagesGroupCreatedEvent = { - _id: event._id, - type: MessageResponseEventType.MessagesGroupCreated, - group: { - cardId, - blobId, - fromDate, - toDate, - count - } - } return { - responseEvent + response: event } } @@ -621,47 +410,7 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { await this.db.removeMessagesGroup(event.cardId, event.blobId) return { - responseEvent: { - _id: event._id, - type: MessageResponseEventType.MessagesGroupRemoved, - cardId: event.cardId, - blobId: event.blobId - } - } - } - - private async attachThread (event: Enriched): Promise { - await this.db.attachThread(event.cardId, event.messageId, event.threadId, event.threadType, event.date) - const responseEvent: ThreadAttachedEvent = { - _id: event._id, - type: MessageResponseEventType.ThreadAttached, - cardId: event.cardId, - messageId: event.messageId, - thread: { - cardId: event.cardId, - messageId: event.messageId, - threadId: event.threadId, - threadType: event.threadType, - repliesCount: 0, - lastReply: event.date - } - } - return { - responseEvent - } - } - - private async updateThread (event: Enriched): Promise { - await this.db.updateThread(event.threadId, event.updates) - return { - responseEvent: { - _id: event._id, - type: MessageResponseEventType.ThreadUpdated, - cardId: event.cardId, - messageId: event.messageId, - threadId: event.threadId, - updates: event.updates - } + response: event } } @@ -669,17 +418,7 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { await this.db.createLabel(event.labelId, event.cardId, event.cardType, event.account, event.date) return { - responseEvent: { - _id: event._id, - type: LabelResponseEventType.LabelCreated, - label: { - labelId: event.labelId, - cardId: event.cardId, - cardType: event.cardType, - account: event.account, - created: event.date - } - } + response: event } } @@ -690,38 +429,19 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { account: event.account }) return { - responseEvent: { - _id: event._id, - type: LabelResponseEventType.LabelRemoved, - labelId: event.labelId, - cardId: event.cardId, - account: event.account - } + response: event } } - private async updateCardType (event: UpdateCardTypeEvent): Promise { + private async updateCardType (event: Enriched): Promise { return { - responseEvent: { - _id: event._id, - type: CardResponseEventType.CardTypeUpdated, - cardId: event.cardId, - cardType: event.cardType, - socialId: event.socialId, - date: event.date - } + response: event } } - private async removeCard (event: RemoveCardEvent): Promise { + private async removeCard (event: Enriched): Promise { return { - responseEvent: { - _id: event._id, - type: CardResponseEventType.CardRemoved, - cardId: event.cardId, - socialId: event.socialId, - date: event.date - } + response: event } } diff --git a/packages/server/src/middleware/id.ts b/packages/server/src/middleware/id.ts new file mode 100644 index 00000000000..de8aaaa6fbc --- /dev/null +++ b/packages/server/src/middleware/id.ts @@ -0,0 +1,44 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +import { + type EventResult, + MessageEventType, + type Event, + type SessionData +} from '@hcengineering/communication-sdk-types' + +import { generateMessageId } from '../messageId' +import type { Middleware, MiddlewareContext, Enriched } from '../types' +import { BaseMiddleware } from './base' + +export class IdMiddleware extends BaseMiddleware implements Middleware { + constructor ( + readonly context: MiddlewareContext, + next?: Middleware + ) { + super(context, next) + } + + async event (session: SessionData, event: Enriched, derived: boolean): Promise { + if (event.type === MessageEventType.CreateMessage) { + if (event.messageId == null) { + event.messageId = generateMessageId() + } + } + + return await this.provideEvent(session, event, derived) + } +} diff --git a/packages/server/src/middleware/indentity.ts b/packages/server/src/middleware/indentity.ts index 625ab269edf..97477f58c33 100644 --- a/packages/server/src/middleware/indentity.ts +++ b/packages/server/src/middleware/indentity.ts @@ -13,7 +13,7 @@ // limitations under the License. // -import { type EventResult, type RequestEvent, type SessionData } from '@hcengineering/communication-sdk-types' +import { type SessionData } from '@hcengineering/communication-sdk-types' import { systemAccountUuid } from '@hcengineering/core' import type { AccountID, @@ -25,11 +25,9 @@ import type { NotificationContext } from '@hcengineering/communication-types' -import type { Enriched, Middleware, MiddlewareContext, QueryId } from '../types' +import type { Middleware, MiddlewareContext, QueryId } from '../types' import { BaseMiddleware } from './base' -import { ApiError } from '../error' - export class IdentityMiddleware extends BaseMiddleware implements Middleware { constructor ( readonly context: MiddlewareContext, @@ -38,29 +36,12 @@ export class IdentityMiddleware extends BaseMiddleware implements Middleware { super(context, next) } - async event (session: SessionData, event: Enriched, derived: boolean): Promise { - if (event.socialId == null) { - if (this.isSystem(session)) { - console.error('Missing social id. System accounts cannot be anonymous.', event) - throw ApiError.badRequest('Missing social id. System accounts cannot be anonymous.') - } - event.socialId = session.account.primarySocialId - } - - return await this.provideEvent(session, event, derived) - } - - private isSystem (session: SessionData): boolean { - const account = session.account - return systemAccountUuid === account.uuid - } - async findNotificationContexts ( session: SessionData, params: FindNotificationContextParams, queryId?: QueryId ): Promise { - const paramsWithAccount = this.expandParamsWithAccount(session, params) + const paramsWithAccount = this.enrichParamsWithAccount(session, params) return await this.provideFindNotificationContexts(session, paramsWithAccount, queryId) } @@ -69,16 +50,16 @@ export class IdentityMiddleware extends BaseMiddleware implements Middleware { params: FindNotificationsParams, queryId?: QueryId ): Promise { - const paramsWithAccount = this.expandParamsWithAccount(session, params) + const paramsWithAccount = this.enrichParamsWithAccount(session, params) return await this.provideFindNotifications(session, paramsWithAccount, queryId) } async findLabels (session: SessionData, params: FindLabelsParams, queryId?: QueryId): Promise { - const paramsWithAccount = this.expandParamsWithAccount(session, params) + const paramsWithAccount = this.enrichParamsWithAccount(session, params) return await this.provideFindLabels(session, paramsWithAccount, queryId) } - private expandParamsWithAccount(session: SessionData, params: T): T { + private enrichParamsWithAccount(session: SessionData, params: T): T { const account = session.account const isSystem = account.uuid === systemAccountUuid diff --git a/packages/server/src/middleware/permissions.ts b/packages/server/src/middleware/permissions.ts index 2b02811b99e..56c0bb2b24c 100644 --- a/packages/server/src/middleware/permissions.ts +++ b/packages/server/src/middleware/permissions.ts @@ -15,10 +15,10 @@ import { type DbAdapter, + type Event, type EventResult, - MessageRequestEventType, - NotificationRequestEventType, - type RequestEvent, + MessageEventType, + NotificationEventType, type SessionData } from '@hcengineering/communication-sdk-types' import { systemAccountUuid } from '@hcengineering/core' @@ -37,23 +37,35 @@ export class PermissionsMiddleware extends BaseMiddleware implements Middleware super(context, next) } - async event (session: SessionData, event: Enriched, derived: boolean): Promise { + async event (session: SessionData, event: Enriched, derived: boolean): Promise { if (derived) return await this.provideEvent(session, event, derived) - this.checkSocialId(session, event.socialId) - switch (event.type) { - case NotificationRequestEventType.RemoveNotifications: - case NotificationRequestEventType.UpdateNotificationContext: - case NotificationRequestEventType.RemoveNotificationContext: { + case MessageEventType.CreateMessage: + this.checkSocialId(session, event.socialId) + if (!this.isSystemAccount(session) && event?.options?.noNotify === true) { + event.options.noNotify = false + } + break + case MessageEventType.RemovePatch: + case MessageEventType.UpdatePatch: + case MessageEventType.BlobPatch: + case MessageEventType.LinkPreviewPatch: + case MessageEventType.ReactionPatch: + case MessageEventType.ThreadPatch: + case NotificationEventType.AddCollaborators: + case NotificationEventType.RemoveCollaborators: + this.checkSocialId(session, event.socialId) + break + case NotificationEventType.RemoveNotifications: + case NotificationEventType.UpdateNotificationContext: + case NotificationEventType.UpdateNotification: + case NotificationEventType.RemoveNotificationContext: { this.checkAccount(session, event.account) break } - case NotificationRequestEventType.UpdateNotification: - this.checkAccount(session, event.query.account) - break - case MessageRequestEventType.CreateMessagesGroup: - case MessageRequestEventType.RemoveMessagesGroup: { + case MessageEventType.CreateMessagesGroup: + case MessageEventType.RemoveMessagesGroup: { this.onlySystemAccount(session) break } @@ -79,9 +91,13 @@ export class PermissionsMiddleware extends BaseMiddleware implements Middleware } private onlySystemAccount (session: SessionData): void { - const account = session.account - if (systemAccountUuid !== account.uuid) { + if (!this.isSystemAccount(session)) { throw ApiError.forbidden('only system account is allowed') } } + + private isSystemAccount (session: SessionData): boolean { + const account = session.account + return systemAccountUuid === account.uuid + } } diff --git a/packages/server/src/middleware/triggers.ts b/packages/server/src/middleware/triggers.ts index f303369c038..64b8a5517fd 100644 --- a/packages/server/src/middleware/triggers.ts +++ b/packages/server/src/middleware/triggers.ts @@ -13,7 +13,7 @@ // limitations under the License. // -import type { DbAdapter, RequestEvent, ResponseEvent, SessionData } from '@hcengineering/communication-sdk-types' +import type { DbAdapter, Event, SessionData } from '@hcengineering/communication-sdk-types' import type { MeasureContext } from '@hcengineering/core' import triggers from '../triggers/all' @@ -39,7 +39,7 @@ export class TriggersMiddleware extends BaseMiddleware implements Middleware { ) // 1hour } - async response (session: SessionData, event: ResponseEvent, derived: boolean): Promise { + async response (session: SessionData, event: Enriched, derived: boolean): Promise { const ctx: Omit = { metadata: this.context.metadata, db: this.db, @@ -47,10 +47,11 @@ export class TriggersMiddleware extends BaseMiddleware implements Middleware { account: session.account, registeredCards: this.context.registeredCards, accountBySocialID: this.context.accountBySocialID, + removedContexts: this.context.removedContexts, derived, - execute: async (event: RequestEvent) => { + execute: async (event: Event) => { // Will be enriched in head - return (await this.context.head?.event(session, event as Enriched, true)) ?? {} + return (await this.context.head?.event(session, event as Enriched, true)) ?? {} } } await this.applyTriggers(session, event, ctx) @@ -63,7 +64,11 @@ export class TriggersMiddleware extends BaseMiddleware implements Middleware { ).then((res) => this.propagate(session, res)) } - private async applyTriggers (session: SessionData, event: ResponseEvent, ctx: Omit): Promise { + private async applyTriggers ( + session: SessionData, + event: Enriched, + ctx: Omit + ): Promise { const matchedTriggers = triggers.filter(([_, type]) => type === event.type) if (matchedTriggers.length === 0) return @@ -84,10 +89,10 @@ export class TriggersMiddleware extends BaseMiddleware implements Middleware { await this.propagate(session, derived) } - private async propagate (session: SessionData, derived: RequestEvent[]): Promise { + private async propagate (session: SessionData, derived: Event[]): Promise { if (derived.length === 0) return if (this.context.head === undefined) return // Will be enriched in head - await Promise.all(derived.map((d) => this.context.head?.event(session, d as Enriched, true))) + await Promise.all(derived.map((d) => this.context.head?.event(session, d as Enriched, true))) } } diff --git a/packages/server/src/middleware/validate.ts b/packages/server/src/middleware/validate.ts index 38d8b587e38..8f123c4a7d0 100644 --- a/packages/server/src/middleware/validate.ts +++ b/packages/server/src/middleware/validate.ts @@ -15,9 +15,9 @@ import { type EventResult, - MessageRequestEventType, - NotificationRequestEventType, - type RequestEvent, + MessageEventType, + NotificationEventType, + type Event, type SessionData } from '@hcengineering/communication-sdk-types' import { @@ -33,7 +33,6 @@ import { type MessagesGroup, type Notification, type NotificationContext, - PatchType, SortingOrder } from '@hcengineering/communication-types' import { z } from 'zod' @@ -94,55 +93,49 @@ export class ValidateMiddleware extends BaseMiddleware implements Middleware { return await this.provideFindCollaborators(session, params) } - async event (session: SessionData, event: Enriched, derived: boolean): Promise { + async event (session: SessionData, event: Enriched, derived: boolean): Promise { if (derived) return await this.provideEvent(session, event, derived) switch (event.type) { - case MessageRequestEventType.CreateMessage: + case MessageEventType.CreateMessage: this.validate(event, CreateMessageEventSchema) break - case MessageRequestEventType.CreatePatch: - this.validate(event, CreatePatchEventSchema) + case MessageEventType.UpdatePatch: + this.validate(event, UpdatePatchEventSchema) break - case MessageRequestEventType.SetReaction: - this.validate(event, SetReactionEventSchema) + case MessageEventType.RemovePatch: + this.validate(event, RemovePatchEventSchema) break - case MessageRequestEventType.RemoveReaction: - this.validate(event, RemoveReactionEventSchema) + case MessageEventType.ReactionPatch: + this.validate(event, ReactionPatchEventSchema) break - case MessageRequestEventType.AttachBlob: - this.validate(event, AttachBlobEventSchema) + case MessageEventType.BlobPatch: + this.validate(event, BlobPatchEventSchema) break - case MessageRequestEventType.DetachBlob: - this.validate(event, DetachBlobEventSchema) + case MessageEventType.LinkPreviewPatch: + this.validate(event, LinkPreviewPatchEventSchema) break - case MessageRequestEventType.AttachThread: - this.validate(event, AttachThreadEventSchema) + case MessageEventType.ThreadPatch: + this.validate(event, ThreadPatchEventSchema) break - case MessageRequestEventType.CreateLinkPreview: - this.validate(event, CreateLinkPreviewEventSchema) - break - case MessageRequestEventType.RemoveLinkPreview: - this.validate(event, RemoveLinkPreviewEventSchema) - break - case MessageRequestEventType.CreateMessagesGroup: + case MessageEventType.CreateMessagesGroup: this.validate(event, CreateMessagesGroupEventSchema) break - case MessageRequestEventType.RemoveMessagesGroup: + case MessageEventType.RemoveMessagesGroup: this.validate(event, RemoveMessagesGroupEventSchema) break - case NotificationRequestEventType.AddCollaborators: + case NotificationEventType.AddCollaborators: this.validate(event, AddCollaboratorsEventSchema) break - case NotificationRequestEventType.RemoveCollaborators: + case NotificationEventType.RemoveCollaborators: this.validate(event, RemoveCollaboratorsEventSchema) break - case NotificationRequestEventType.UpdateNotification: + case NotificationEventType.UpdateNotification: this.validate(event, UpdateNotificationsEventSchema) break - case NotificationRequestEventType.RemoveNotificationContext: + case NotificationEventType.RemoveNotificationContext: this.validate(event, RemoveNotificationContextEventSchema) break - case NotificationRequestEventType.UpdateNotificationContext: + case NotificationEventType.UpdateNotificationContext: this.validate(event, UpdateNotificationContextEventSchema) break } @@ -166,6 +159,33 @@ const MessagesGroupSchema = z.any() const SocialIDSchema = z.string() const SortingOrderSchema = z.union([z.literal(SortingOrder.Ascending), z.literal(SortingOrder.Descending)]) +const BlobDataSchema = z.object({ + blobId: BlobIDSchema, + mimeType: z.string(), + fileName: z.string(), + size: z.number(), + metadata: z.record(z.string(), z.any()).optional() +}) + +const LinkPreviewDataSchema = z + .object({ + previewId: LinkPreviewIDSchema, + url: z.string(), + host: z.string(), + title: z.string().optional(), + description: z.string().optional(), + siteName: z.string().optional(), + iconUrl: z.string().optional(), + previewImage: z + .object({ + url: z.string(), + width: z.number().optional(), + height: z.number().optional() + }) + .optional() + }) + .strict() + // Find params const dateOrRecordSchema = z.union([DateSchema, z.record(DateSchema)]) @@ -235,20 +255,20 @@ const FindCollaboratorsParamsSchema = FindParamsSchema.extend({ // Events -const BaseRequestEventSchema = z +const BaseEventSchema = z .object({ _id: z.string().optional() }) .strict() // Message events -const CreateMessageEventSchema = BaseRequestEventSchema.extend({ - type: z.literal(MessageRequestEventType.CreateMessage), +const CreateMessageEventSchema = BaseEventSchema.extend({ + type: z.literal(MessageEventType.CreateMessage), cardId: CardIDSchema, cardType: CardTypeSchema, - messageId: MessageIDSchema.optional(), + messageId: MessageIDSchema.min(3).max(22).optional(), messageType: MessageTypeSchema, content: MarkdownSchema, @@ -260,135 +280,100 @@ const CreateMessageEventSchema = BaseRequestEventSchema.extend({ options: z .object({ skipLinkPreviews: z.boolean().optional(), - ignoreDuplicateIds: z.boolean().optional(), noNotify: z.boolean().optional() }) .optional() }).strict() -const CreatePatchEventSchema = BaseRequestEventSchema.extend({ - type: z.literal(MessageRequestEventType.CreatePatch), - +const UpdatePatchEventSchema = BaseEventSchema.extend({ + type: z.literal(MessageEventType.UpdatePatch), cardId: CardIDSchema, - messageId: MessageIDSchema, + messageId: MessageIDSchema.optional(), - patchType: z.enum([PatchType.update, PatchType.remove]), - data: z.any(), + content: MarkdownSchema.optional(), + extra: z.record(z.any()).optional(), socialId: SocialIDSchema, date: DateSchema, options: z .object({ - skipLinkPreviewsUpdate: z.boolean().optional(), - markAsUpdated: z.boolean().optional() + skipLinkPreviewsUpdate: z.boolean().optional() }) .optional() }).strict() -const SetReactionEventSchema = BaseRequestEventSchema.extend({ - type: z.literal(MessageRequestEventType.SetReaction), +const RemovePatchEventSchema = BaseEventSchema.extend({ + type: z.literal(MessageEventType.RemovePatch), cardId: CardIDSchema, - messageId: MessageIDSchema, - reaction: z.string(), - socialId: SocialIDSchema, - date: DateSchema -}).strict() + messageId: MessageIDSchema.optional(), -const RemoveReactionEventSchema = BaseRequestEventSchema.extend({ - type: z.literal(MessageRequestEventType.RemoveReaction), - cardId: CardIDSchema, - messageId: MessageIDSchema, - reaction: z.string(), socialId: SocialIDSchema, date: DateSchema }).strict() -const AttachThreadEventSchema = BaseRequestEventSchema.extend({ - type: z.literal(MessageRequestEventType.AttachThread), +const ReactionOperationSchema = z.union([ + z.object({ opcode: z.literal('add'), reaction: z.string() }), + z.object({ opcode: z.literal('remove'), reaction: z.string() }) +]) +const ReactionPatchEventSchema = BaseEventSchema.extend({ + type: z.literal(MessageEventType.ReactionPatch), cardId: CardIDSchema, messageId: MessageIDSchema, - - threadId: CardIDSchema, - threadType: CardTypeSchema, - - socialId: SocialIDSchema, - date: DateSchema -}) - -const AttachBlobEventSchema = BaseRequestEventSchema.extend({ - type: z.literal(MessageRequestEventType.AttachBlob), - - cardId: CardIDSchema, - messageId: MessageIDSchema, - - blobData: z.object({ - blobId: BlobIDSchema, - contentType: z.string(), - fileName: z.string(), - size: z.number(), - metadata: z.record(z.string(), z.any()).optional() - }), - + operation: ReactionOperationSchema, socialId: SocialIDSchema, date: DateSchema }).strict() -const DetachBlobEventSchema = BaseRequestEventSchema.extend({ - type: z.literal(MessageRequestEventType.DetachBlob), +const BlobOperationSchema = z.union([ + z.object({ opcode: z.literal('attach'), blobs: z.array(BlobDataSchema).nonempty() }), + z.object({ opcode: z.literal('detach'), blobIds: z.array(BlobIDSchema).nonempty() }), + z.object({ opcode: z.literal('set'), blobs: z.array(BlobDataSchema).nonempty() }) +]) +const BlobPatchEventSchema = BaseEventSchema.extend({ + type: z.literal(MessageEventType.BlobPatch), cardId: CardIDSchema, messageId: MessageIDSchema, - - blobId: BlobIDSchema, - + operations: z.array(BlobOperationSchema).nonempty(), socialId: SocialIDSchema, date: DateSchema }).strict() -const CreateLinkPreviewEventSchema = BaseRequestEventSchema.extend({ - type: z.literal(MessageRequestEventType.CreateLinkPreview), +const ThreadPatchEventSchema = BaseEventSchema.extend({ + type: z.literal(MessageEventType.ThreadPatch), cardId: CardIDSchema, messageId: MessageIDSchema, - - preview: z.object({ - url: z.string(), - host: z.string(), - title: z.string().optional(), - description: z.string().optional(), - iconUrl: z.string().optional(), - previewImage: z.object({ - url: z.string(), - width: z.number().optional(), - height: z.number().optional() - }) - }), - + operation: z.object({ opcode: z.literal('attach'), threadId: CardIDSchema, threadType: CardTypeSchema }), socialId: SocialIDSchema, date: DateSchema }).strict() -const RemoveLinkPreviewEventSchema = BaseRequestEventSchema.extend({ - type: z.literal(MessageRequestEventType.RemoveLinkPreview), - cardId: CardIDSchema, - messageId: MessageIDSchema, - - previewId: LinkPreviewIDSchema, +const LinkPreviewOperationSchema = z.union([ + z.object({ opcode: z.literal('attach'), previews: z.array(LinkPreviewDataSchema).nonempty() }), + z.object({ opcode: z.literal('detach'), previewIds: z.array(LinkPreviewIDSchema).nonempty() }), + z.object({ opcode: z.literal('set'), previews: z.array(LinkPreviewDataSchema).nonempty() }) +]) +const LinkPreviewPatchEventSchema = BaseEventSchema.extend({ + type: z.literal(MessageEventType.LinkPreviewPatch), + cardId: CardIDSchema, + messageId: MessageIDSchema.optional(), + operations: z.array(LinkPreviewOperationSchema).nonempty(), socialId: SocialIDSchema, date: DateSchema }).strict() -const CreateMessagesGroupEventSchema = BaseRequestEventSchema.extend({ - type: z.literal(MessageRequestEventType.CreateMessagesGroup), +const CreateMessagesGroupEventSchema = BaseEventSchema.extend({ + type: z.literal(MessageEventType.CreateMessagesGroup), group: MessagesGroupSchema, socialId: SocialIDSchema, date: DateSchema }).strict() -const RemoveMessagesGroupEventSchema = BaseRequestEventSchema.extend({ - type: z.literal(MessageRequestEventType.RemoveMessagesGroup), +const RemoveMessagesGroupEventSchema = BaseEventSchema.extend({ + type: z.literal(MessageEventType.RemoveMessagesGroup), cardId: CardIDSchema, blobId: BlobIDSchema, socialId: SocialIDSchema, @@ -396,8 +381,8 @@ const RemoveMessagesGroupEventSchema = BaseRequestEventSchema.extend({ }).strict() // Notification events -const UpdateNotificationsEventSchema = BaseRequestEventSchema.extend({ - type: z.literal(NotificationRequestEventType.UpdateNotification), +const UpdateNotificationsEventSchema = BaseEventSchema.extend({ + type: z.literal(NotificationEventType.UpdateNotification), query: z.object({ context: ContextIDSchema, account: AccountIDSchema, @@ -409,31 +394,28 @@ const UpdateNotificationsEventSchema = BaseRequestEventSchema.extend({ updates: z.object({ read: z.boolean() }), - socialId: SocialIDSchema, date: DateSchema }).strict() -const RemoveNotificationContextEventSchema = BaseRequestEventSchema.extend({ - type: z.literal(NotificationRequestEventType.RemoveNotificationContext), +const RemoveNotificationContextEventSchema = BaseEventSchema.extend({ + type: z.literal(NotificationEventType.RemoveNotificationContext), contextId: ContextIDSchema, account: AccountIDSchema, - socialId: SocialIDSchema, date: DateSchema }).strict() -const UpdateNotificationContextEventSchema = BaseRequestEventSchema.extend({ - type: z.literal(NotificationRequestEventType.UpdateNotificationContext), +const UpdateNotificationContextEventSchema = BaseEventSchema.extend({ + type: z.literal(NotificationEventType.UpdateNotificationContext), contextId: ContextIDSchema, account: AccountIDSchema, updates: z.object({ lastView: DateSchema.optional() }), - socialId: SocialIDSchema, date: DateSchema }).strict() -const AddCollaboratorsEventSchema = BaseRequestEventSchema.extend({ - type: z.literal(NotificationRequestEventType.AddCollaborators), +const AddCollaboratorsEventSchema = BaseEventSchema.extend({ + type: z.literal(NotificationEventType.AddCollaborators), cardId: CardIDSchema, cardType: CardTypeSchema, collaborators: z.array(AccountIDSchema).nonempty(), @@ -441,8 +423,8 @@ const AddCollaboratorsEventSchema = BaseRequestEventSchema.extend({ date: DateSchema }).strict() -const RemoveCollaboratorsEventSchema = BaseRequestEventSchema.extend({ - type: z.literal(NotificationRequestEventType.RemoveCollaborators), +const RemoveCollaboratorsEventSchema = BaseEventSchema.extend({ + type: z.literal(NotificationEventType.RemoveCollaborators), cardId: CardIDSchema, cardType: CardTypeSchema, collaborators: z.array(AccountIDSchema).nonempty(), @@ -450,9 +432,9 @@ const RemoveCollaboratorsEventSchema = BaseRequestEventSchema.extend({ date: DateSchema }).strict() -function deserializeEvent (event: Enriched): Enriched { +function deserializeEvent (event: Enriched): Enriched { switch (event.type) { - case MessageRequestEventType.CreateMessagesGroup: + case MessageEventType.CreateMessagesGroup: return { ...event, // eslint-disable-next-line @typescript-eslint/no-non-null-assertion @@ -465,7 +447,7 @@ function deserializeEvent (event: Enriched): Enriched { const createFns: MiddlewareCreateFn[] = [ + // Enrich events async (context, next) => new DateMiddleware(context, next), async (context, next) => new IdentityMiddleware(context, next), + async (context, next) => new IdMiddleware(context, next), + + // Validate events async (context, next) => new ValidateMiddleware(context, next), async (context, next) => new PermissionsMiddleware(db, context, next), + + // Process events async (context, next) => new BroadcastMiddleware(broadcast, context, next), async (context, next) => new DatabaseMiddleware(db, context, next), async (context, next) => new TriggersMiddleware(db, context, next) @@ -70,7 +77,8 @@ export async function buildMiddlewares ( metadata, workspace, registeredCards: new Set(), - accountBySocialID: new Map() + accountBySocialID: new Map(), + removedContexts: new Map() } return await Middlewares.create(ctx, context, createFns) @@ -165,9 +173,9 @@ export class Middlewares { this.head?.unsubscribeQuery(session, id) } - async event (session: SessionData, event: RequestEvent): Promise { + async event (session: SessionData, event: Event): Promise { if (this.head === undefined) return {} - return (await this.head?.event(session, event as Enriched, false)) ?? {} + return (await this.head?.event(session, event as Enriched, false)) ?? {} } async closeSession (sessionId: string): Promise { diff --git a/packages/server/src/notification/notification.ts b/packages/server/src/notification/notification.ts index 6ec4fa09819..8dea22f8bb7 100644 --- a/packages/server/src/notification/notification.ts +++ b/packages/server/src/notification/notification.ts @@ -15,45 +15,56 @@ import { type CreateNotificationContextResult, - LabelRequestEventType, - MessageResponseEventType, - NotificationRequestEventType, - type RequestEvent, - type ResponseEvent + LabelEventType, + NotificationEventType, + type Event, + MessageEventType } from '@hcengineering/communication-sdk-types' import { type AccountID, type CardID, type CardType, type ContextID, - type Message, type MessageID, NewMessageLabelID, type NotificationContext, NotificationType, - type Reaction, type ReactionNotificationContent, type SocialID, SortingOrder } from '@hcengineering/communication-types' -import type { TriggerCtx } from '../types' +import type { Enriched, TriggerCtx } from '../types' import { findAccount } from '../utils' import { findMessage } from '../triggers/utils' const BATCH_SIZE = 500 -export async function notify (ctx: TriggerCtx, event: ResponseEvent): Promise { +export async function notify (ctx: TriggerCtx, event: Enriched): Promise { switch (event.type) { - case MessageResponseEventType.MessageCreated: { - if (event.options?.noNotify === true) return [] - return await notifyMessage(ctx, event.message, event.cardType) + case MessageEventType.CreateMessage: { + if (event.options?.noNotify === true || event.messageId == null) return [] + return await notifyMessage(ctx, event.cardId, event.cardType, event.messageId, event.socialId, event.date) } - case MessageResponseEventType.ReactionSet: { - return await notifyReaction(ctx, event.cardId, event.messageId, event.reaction) - } - case MessageResponseEventType.ReactionRemoved: { - return await removeReactionNotification(ctx, event.cardId, event.messageId, event.reaction, event.socialId) + case MessageEventType.ReactionPatch: { + if (event.operation.opcode === 'add') { + return await notifyReaction( + ctx, + event.cardId, + event.messageId, + event.operation.reaction, + event.socialId, + event.date + ) + } else if (event.operation.opcode === 'remove') { + return await removeReactionNotification( + ctx, + event.cardId, + event.messageId, + event.operation.reaction, + event.socialId + ) + } } } @@ -66,8 +77,8 @@ async function removeReactionNotification ( messageId: MessageID, reaction: string, socialId: SocialID -): Promise { - const result: RequestEvent[] = [] +): Promise { + const result: Event[] = [] const msg = await findMessage(ctx.db, ctx.metadata.filesUrl, ctx.workspace, cardId, messageId) if (msg === undefined) return result @@ -103,20 +114,19 @@ async function removeReactionNotification ( )[0] if (lastNotification != null) { result.push({ - type: NotificationRequestEventType.UpdateNotificationContext, + type: NotificationEventType.UpdateNotificationContext, contextId: context.id, account: messageAccount, updates: { lastNotify: lastNotification.created }, - socialId, date: new Date() }) } } result.push({ - type: NotificationRequestEventType.RemoveNotifications, + type: NotificationEventType.RemoveNotifications, contextId: toDelete.contextId, account: messageAccount, ids: [toDelete.id] @@ -128,9 +138,11 @@ async function notifyReaction ( ctx: TriggerCtx, cardId: CardID, messageId: MessageID, - reaction: Reaction -): Promise { - const result: RequestEvent[] = [] + reaction: string, + socialId: SocialID, + date: Date +): Promise { + const result: Event[] = [] const message = await findMessage(ctx.db, ctx.metadata.filesUrl, ctx.workspace, cardId, messageId) if (message == null) return result @@ -138,7 +150,7 @@ async function notifyReaction ( const messageAccount = await findAccount(ctx, message.creator) if (messageAccount == null) return result - const reactionAccount = await findAccount(ctx, reaction.creator) + const reactionAccount = await findAccount(ctx, socialId) if (reactionAccount === messageAccount) return result const context = (await ctx.db.findNotificationContexts({ card: cardId, account: messageAccount }))[0] @@ -151,53 +163,70 @@ async function notifyReaction ( if (contextId == null) return result const content: ReactionNotificationContent = { - emoji: reaction.reaction, - creator: reaction.creator + emoji: reaction, + creator: socialId } result.push({ - type: NotificationRequestEventType.CreateNotification, + type: NotificationEventType.CreateNotification, notificationType: NotificationType.Reaction, account: messageAccount, cardId, contextId, messageId, messageCreated: message.created, - date: reaction.created, + date, content, - socialId: message.creator + read: false }) - result.push({ - type: NotificationRequestEventType.UpdateNotificationContext, - contextId, - account: messageAccount, - updates: { - lastNotify: reaction.created - }, - socialId: message.creator, - date: reaction.created - }) + if ((context?.lastNotify?.getTime() ?? 0) < date.getTime()) { + result.push({ + type: NotificationEventType.UpdateNotificationContext, + contextId, + account: messageAccount, + updates: { + lastNotify: date + }, + date + }) + } return result } -async function notifyMessage (ctx: TriggerCtx, message: Message, cardType: CardType): Promise { - const cursor = ctx.db.getCollaboratorsCursor(message.cardId, message.created, BATCH_SIZE) - const creatorAccount = await findAccount(ctx, message.creator) - const result: RequestEvent[] = [] +async function notifyMessage ( + ctx: TriggerCtx, + cardId: CardID, + cardType: CardType, + messageId: MessageID, + socialId: SocialID, + date: Date +): Promise { + const cursor = ctx.db.getCollaboratorsCursor(cardId, date, BATCH_SIZE) + const creatorAccount = await findAccount(ctx, socialId) + const result: Event[] = [] let isFirstBatch = true for await (const dbCollaborators of cursor) { const collaborators: AccountID[] = dbCollaborators.map((it) => it.account) const contexts: NotificationContext[] = await ctx.db.findNotificationContexts({ - card: message.cardId, + card: cardId, account: isFirstBatch && collaborators.length < BATCH_SIZE ? undefined : collaborators }) for (const collaborator of collaborators) { try { const context = contexts.find((it) => it.account === collaborator) - const res = await processCollaborator(ctx, cardType, message, collaborator, creatorAccount, context) + const res = await processCollaborator( + ctx, + cardId, + cardType, + messageId, + date, + collaborator, + creatorAccount, + context + ) result.push(...res) } catch (e) { ctx.ctx.error('Error on create notification', { collaborator, error: e }) @@ -212,25 +241,26 @@ async function notifyMessage (ctx: TriggerCtx, message: Message, cardType: CardT async function processCollaborator ( ctx: TriggerCtx, + cardId: CardID, cardType: CardType, - message: Message, + messageId: MessageID, + date: Date, collaborator: AccountID, creatorAccount?: AccountID, context?: NotificationContext -): Promise { - const result: RequestEvent[] = [] +): Promise { + const result: Event[] = [] const isOwn = creatorAccount === collaborator - const { contextId, events } = await createOrUpdateContext(ctx, message, collaborator, isOwn, context) + const { contextId, events } = await createOrUpdateContext(ctx, cardId, date, collaborator, isOwn, context) if (!isOwn) { result.push({ - type: LabelRequestEventType.CreateLabel, + type: LabelEventType.CreateLabel, account: collaborator, labelId: NewMessageLabelID, - cardId: message.cardId, + cardId, cardType, - date: message.created, - socialId: message.creator + date }) } @@ -239,37 +269,38 @@ async function processCollaborator ( if (contextId == null || isOwn) return result result.push({ - type: NotificationRequestEventType.CreateNotification, + type: NotificationEventType.CreateNotification, notificationType: NotificationType.Message, account: collaborator, contextId, - cardId: message.cardId, - messageId: message.id, - messageCreated: message.created, - date: message.created, - socialId: message.creator + cardId, + messageId, + messageCreated: date, + date, + read: date.getTime() < (context?.lastView?.getTime() ?? 0) }) return result } async function createOrUpdateContext ( ctx: TriggerCtx, - message: Message, + cardId: CardID, + date: Date, collaborator: AccountID, isOwn: boolean, context?: NotificationContext ): Promise<{ contextId: ContextID | undefined - events: RequestEvent[] + events: Event[] }> { if (context == null) { const contextId = await createContext( ctx, collaborator, - message.cardId, - message.created, - isOwn ? message.created : undefined, - isOwn ? undefined : message.created + cardId, + date, + isOwn ? date : undefined, + isOwn ? undefined : date ) return { @@ -278,23 +309,21 @@ async function createOrUpdateContext ( } } - const lastUpdate = - context.lastUpdate == null || message.created > context.lastUpdate ? message.created : context.lastUpdate - const lastView = isOwn && isContextRead(context) ? message.created : undefined + const lastUpdate = context.lastUpdate == null || date > context.lastUpdate ? date : context.lastUpdate + const lastView = isOwn && isContextRead(context) ? date : undefined return { contextId: context.id, events: [ { - type: NotificationRequestEventType.UpdateNotificationContext, + type: NotificationEventType.UpdateNotificationContext, contextId: context.id, account: collaborator, updates: { lastView, lastUpdate, - lastNotify: isOwn ? undefined : message.created + lastNotify: isOwn ? undefined : date }, - socialId: message.creator, date: new Date() } ] @@ -311,13 +340,12 @@ async function createContext ( ): Promise { try { const result = (await ctx.execute({ - type: NotificationRequestEventType.CreateNotificationContext, + type: NotificationEventType.CreateNotificationContext, account, cardId, lastUpdate, lastView: lastView ?? new Date(lastUpdate.getTime() - 1), lastNotify, - socialId: 'core:account:System' as SocialID, date: new Date() })) as CreateNotificationContextResult diff --git a/packages/server/src/triggers/card.ts b/packages/server/src/triggers/card.ts index d8a4cf9b077..00db7900343 100644 --- a/packages/server/src/triggers/card.ts +++ b/packages/server/src/triggers/card.ts @@ -14,19 +14,19 @@ // import { - type CardRemovedEvent, - CardResponseEventType, - type CardTypeUpdatedEvent, - MessageRequestEventType, - NotificationRequestEventType, - type RequestEvent + CardEventType, + MessageEventType, + NotificationEventType, + type Event, + UpdateCardTypeEvent, + RemoveCardEvent } from '@hcengineering/communication-sdk-types' -import { type ActivityTypeUpdate, ActivityUpdateType, MessageType, SocialID } from '@hcengineering/communication-types' +import { type ActivityTypeUpdate, ActivityUpdateType, MessageType } from '@hcengineering/communication-types' -import type { TriggerCtx, TriggerFn, Triggers } from '../types' +import type { Enriched, TriggerCtx, TriggerFn, Triggers } from '../types' import { getNameBySocialID } from './utils' -async function createActivityOnCardTypeUpdate (ctx: TriggerCtx, event: CardTypeUpdatedEvent): Promise { +async function createActivityOnCardTypeUpdate (ctx: TriggerCtx, event: UpdateCardTypeEvent): Promise { const updateDate: ActivityTypeUpdate = { type: ActivityUpdateType.Type, newType: event.cardType @@ -36,7 +36,7 @@ async function createActivityOnCardTypeUpdate (ctx: TriggerCtx, event: CardTypeU return [ { - type: MessageRequestEventType.CreateMessage, + type: MessageEventType.CreateMessage, messageType: MessageType.Activity, cardId: event.cardId, cardType: event.cardType, @@ -51,38 +51,55 @@ async function createActivityOnCardTypeUpdate (ctx: TriggerCtx, event: CardTypeU ] } -async function onCardTypeUpdates (ctx: TriggerCtx, event: CardTypeUpdatedEvent): Promise { +async function onCardTypeUpdates (ctx: TriggerCtx, event: Enriched): Promise { await ctx.db.updateCollaborators({ card: event.cardId }, { cardType: event.cardType }) await ctx.db.updateLabels(event.cardId, { cardType: event.cardType }) - await ctx.db.updateThread(event.cardId, { threadType: event.cardType }) - return [] + + const thread = await ctx.db.findThread(event.cardId) + if (thread === undefined) return [] + + return [ + { + type: MessageEventType.ThreadPatch, + cardId: thread.cardId, + messageId: thread.messageId, + operation: { + opcode: 'update', + threadId: thread.threadId, + updates: { + threadType: event.cardType + } + }, + socialId: event.socialId, + date: event.date + } + ] } -async function removeCardCollaborators (ctx: TriggerCtx, event: CardRemovedEvent): Promise { +async function removeCardCollaborators (ctx: TriggerCtx, event: UpdateCardTypeEvent): Promise { await ctx.db.removeCollaborators(event.cardId, [], true) return [] } -async function removeCardLabels (ctx: TriggerCtx, event: CardRemovedEvent): Promise { +async function removeCardLabels (ctx: TriggerCtx, event: UpdateCardTypeEvent): Promise { await ctx.db.removeLabels({ cardId: event.cardId }) return [] } -async function removeCardThreads (ctx: TriggerCtx, event: CardRemovedEvent): Promise { +async function removeCardThreads (ctx: TriggerCtx, event: RemoveCardEvent): Promise { await ctx.db.removeThreads({ cardId: event.cardId }) await ctx.db.removeThreads({ threadId: event.cardId }) return [] } -async function removeNotificationContexts (ctx: TriggerCtx, event: CardRemovedEvent): Promise { - const result: RequestEvent[] = [] +async function removeNotificationContexts (ctx: TriggerCtx, event: RemoveCardEvent): Promise { + const result: Event[] = [] const contexts = await ctx.db.findNotificationContexts({ card: event.cardId }) for (const context of contexts) { result.push({ - type: NotificationRequestEventType.RemoveNotificationContext, + type: NotificationEventType.RemoveNotificationContext, contextId: context.id, account: context.account, - socialId: 'core:account:System' as SocialID, date: new Date() }) } @@ -90,20 +107,12 @@ async function removeNotificationContexts (ctx: TriggerCtx, event: CardRemovedEv } const triggers: Triggers = [ - ['on_card_type_updates', CardResponseEventType.CardTypeUpdated, onCardTypeUpdates as TriggerFn], - [ - 'create_activity_on_card_type_updates', - CardResponseEventType.CardTypeUpdated, - createActivityOnCardTypeUpdate as TriggerFn - ], - ['remove_collaborators_on_card_removed', CardResponseEventType.CardRemoved, removeCardCollaborators as TriggerFn], - ['remove_labels_on_card_removed', CardResponseEventType.CardRemoved, removeCardLabels as TriggerFn], - ['remove_threads_on_card_removed', CardResponseEventType.CardRemoved, removeCardThreads as TriggerFn], - [ - 'remove_notification_contexts_on_card_removed', - CardResponseEventType.CardRemoved, - removeNotificationContexts as TriggerFn - ] + ['on_card_type_updates', CardEventType.UpdateCardType, onCardTypeUpdates as TriggerFn], + ['create_activity_on_card_type_updates', CardEventType.UpdateCardType, createActivityOnCardTypeUpdate as TriggerFn], + ['remove_collaborators_on_card_removed', CardEventType.RemoveCard, removeCardCollaborators as TriggerFn], + ['remove_labels_on_card_removed', CardEventType.RemoveCard, removeCardLabels as TriggerFn], + ['remove_threads_on_card_removed', CardEventType.RemoveCard, removeCardThreads as TriggerFn], + ['remove_notification_contexts_on_card_removed', CardEventType.RemoveCard, removeNotificationContexts as TriggerFn] ] export default triggers diff --git a/packages/server/src/triggers/message.ts b/packages/server/src/triggers/message.ts index 634d2e2fe0a..24cadddef7d 100644 --- a/packages/server/src/triggers/message.ts +++ b/packages/server/src/triggers/message.ts @@ -14,119 +14,55 @@ // import { - type BlobAttachedEvent, - type BlobDetachedEvent, - type MessageCreatedEvent, - MessageRequestEventType, - MessageResponseEventType, - type MessagesGroupCreatedEvent, - NotificationRequestEventType, - PatchCreatedEvent, - type RequestEvent, - type ThreadAttachedEvent + CreateMessageEvent, + CreateMessagesGroupEvent, + type Event, + MessageEventType, + NotificationEventType, + PatchEvent, + RemovePatchEvent, + ThreadPatchEvent } from '@hcengineering/communication-sdk-types' -import { - type AttachBlobPatchData, - type CardID, - type Message, - MessageType, - PatchType -} from '@hcengineering/communication-types' +import { type CardID, type Message, MessageType } from '@hcengineering/communication-types' import { generateToken } from '@hcengineering/server-token' import { type AccountUuid, concatLink, systemAccountUuid } from '@hcengineering/core' import { extractReferences } from '@hcengineering/text-core' import { markdownToMarkup } from '@hcengineering/text-markdown' -import { generateMessageId } from '@hcengineering/communication-cockroach' -import type { TriggerCtx, TriggerFn, Triggers } from '../types' +import type { Enriched, TriggerCtx, TriggerFn, Triggers } from '../types' import { findAccount } from '../utils' -import { findMessageInFiles } from './utils' +import { findMessage } from './utils' +import { generateMessageId } from '../messageId' -async function onMessagesGroupCreated (ctx: TriggerCtx, event: MessagesGroupCreatedEvent): Promise { +async function onMessagesGroupCreated (ctx: TriggerCtx, event: CreateMessagesGroupEvent): Promise { ctx.registeredCards.delete(event.group.cardId) return [] } -async function onMessageRemoved (ctx: TriggerCtx, event: PatchCreatedEvent): Promise { - if (event.patch.type !== PatchType.remove) return [] +async function onMessageRemoved (ctx: TriggerCtx, event: Enriched): Promise { const { cardId } = event const thread = await ctx.db.findThread(cardId) if (thread === undefined) return [] - const result: RequestEvent[] = [] - - if (!(await ctx.db.isMessageInDb(thread.cardId, thread.messageId))) { - result.push({ - type: MessageRequestEventType.CreatePatch, - patchType: PatchType.updateThread, - cardId: thread.cardId, - messageId: thread.messageId, - data: { threadId: thread.threadId, threadType: thread.threadType, repliesCountOp: 'decrement' }, - socialId: event.patch.creator, - date: event.patch.created - }) - } - - result.push({ - type: MessageRequestEventType.UpdateThread, - cardId: thread.cardId, - messageId: thread.messageId, - threadId: thread.threadId, - updates: { - repliesCountOp: 'decrement' - }, - date: event.patch.created, - socialId: event.patch.creator - }) - - return result -} - -async function onBlobAttached (ctx: TriggerCtx, event: BlobAttachedEvent): Promise { - const inDb = await ctx.db.isMessageInDb(event.cardId, event.messageId) - if (inDb) return [] - - const { blob } = event - const patchData: AttachBlobPatchData = { - blobId: blob.blobId, - contentType: blob.contentType, - fileName: blob.fileName, - size: blob.size, - metadata: blob.metadata - } - - return [ - { - type: MessageRequestEventType.CreatePatch, - patchType: PatchType.attachBlob, - cardId: event.cardId, - messageId: event.messageId, - data: patchData, - socialId: blob.creator, - date: blob.created - } - ] -} - -async function onBlobDetached (ctx: TriggerCtx, event: BlobDetachedEvent): Promise { - const inDb = await ctx.db.isMessageInDb(event.cardId, event.messageId) - if (inDb) return [] - const { blobId } = event - return [ { - type: MessageRequestEventType.CreatePatch, - patchType: PatchType.detachBlob, - cardId: event.cardId, - messageId: event.messageId, - data: { blobId }, - socialId: event.socialId, - date: event.date + type: MessageEventType.ThreadPatch, + cardId: thread.cardId, + messageId: thread.messageId, + operation: { + opcode: 'update', + threadId: thread.threadId, + updates: { + repliesCountOp: 'decrement' + } + }, + date: event.date, + socialId: event.socialId } ] } -async function registerCard (ctx: TriggerCtx, event: PatchCreatedEvent): Promise { +async function registerCard (ctx: TriggerCtx, event: PatchEvent): Promise { const { workspace, metadata } = ctx const card: CardID = event.cardId @@ -149,17 +85,17 @@ async function registerCard (ctx: TriggerCtx, event: PatchCreatedEvent): Promise return [] } -async function addCollaborators (ctx: TriggerCtx, event: MessageCreatedEvent): Promise { - const { creator, type } = event.message - if (type === MessageType.Activity) return [] - const account = await findAccount(ctx, creator) +async function addCollaborators (ctx: TriggerCtx, event: Enriched): Promise { + const { messageType, socialId, content, cardId, cardType, date } = event + if (messageType === MessageType.Activity) return [] + const account = await findAccount(ctx, socialId) const collaborators: AccountUuid[] = [] if (account !== undefined) { collaborators.push(account) } - const markup = markdownToMarkup(event.message.content) + const markup = markdownToMarkup(content) const references = extractReferences(markup) const personIds = references .filter((it) => ['contact:class:Person', 'contact:mixin:Employee'].includes(it.objectClass)) @@ -175,134 +111,110 @@ async function addCollaborators (ctx: TriggerCtx, event: MessageCreatedEvent): P return [ { - type: NotificationRequestEventType.AddCollaborators, - cardId: event.message.cardId, - cardType: event.cardType, + type: NotificationEventType.AddCollaborators, + cardId, + cardType, collaborators, - socialId: event.message.creator, - date: new Date(event.message.created.getTime() - 1) + socialId, + date: new Date(date.getTime() - 1) } ] } -async function addThreadReply (ctx: TriggerCtx, event: MessageCreatedEvent): Promise { - if (event.message.type !== MessageType.Message || ctx.derived) { +async function addThreadReply (ctx: TriggerCtx, event: Enriched): Promise { + if (event.messageType !== MessageType.Message || event.extra?.threadRoot === true) { return [] } - const { message } = event - const thread = await ctx.db.findThread(message.cardId) + const { cardId, socialId, date } = event + const thread = await ctx.db.findThread(cardId) if (thread === undefined) return [] - const result: RequestEvent[] = [] - if (!(await ctx.db.isMessageInDb(thread.cardId, thread.messageId))) { - result.push({ - type: MessageRequestEventType.CreatePatch, - patchType: PatchType.updateThread, + + return [ + { + type: MessageEventType.ThreadPatch, cardId: thread.cardId, messageId: thread.messageId, - data: { threadId: thread.threadId, threadType: thread.threadType, repliesCountOp: 'increment' }, - socialId: event.message.creator, - date: event.message.created - }) - } - - result.push({ - type: MessageRequestEventType.UpdateThread, - cardId: thread.cardId, - messageId: thread.messageId, - threadId: thread.threadId, - updates: { - lastReply: message.created, - repliesCountOp: 'increment' - }, - socialId: event.message.creator, - date: message.created - }) - - return result + operation: { + opcode: 'update', + threadId: thread.threadId, + updates: { + lastReply: date, + repliesCountOp: 'increment' + } + }, + socialId, + date + } + ] } -async function onThreadAttached (ctx: TriggerCtx, event: ThreadAttachedEvent): Promise { - let message: Message | undefined = ( - await ctx.db.findMessages({ - card: event.thread.cardId, - id: event.thread.messageId, - limit: 1, +async function onThreadAttached (ctx: TriggerCtx, event: Enriched): Promise { + if (event.operation.opcode !== 'attach') return [] + const message: Message | undefined = await findMessage( + ctx.db, + ctx.metadata.filesUrl, + ctx.workspace, + event.cardId, + event.messageId, + { files: true - }) - )[0] + } + ) - const result: RequestEvent[] = [] - if (message === undefined) { - message = await findMessageInFiles( - ctx.db, - ctx.metadata.filesUrl, - ctx.workspace, - event.thread.cardId, - event.thread.messageId - ) + if (message === undefined) return [] - if (message !== undefined) { - result.push({ - type: MessageRequestEventType.CreatePatch, - patchType: PatchType.updateThread, - cardId: event.thread.cardId, - messageId: event.thread.messageId, - data: { threadId: event.thread.threadId, threadType: event.thread.threadType }, - socialId: message.creator, - date: message.created - }) - } - } + const result: Event[] = [] - if (message === undefined || message.type === MessageType.Activity || message.extra?.threadRoot === true) { + if (message.type === MessageType.Activity || message.extra?.threadRoot === true) { return [] } - const messageId = generateMessageId(true) + const messageId = generateMessageId() result.push({ messageId, - type: MessageRequestEventType.CreateMessage, + type: MessageEventType.CreateMessage, messageType: message.type, - cardId: event.thread.threadId, - cardType: event.thread.threadType, + cardId: event.operation.threadId, + cardType: event.operation.threadType, content: message.content, extra: { ...message.extra, threadRoot: true }, socialId: message.creator, date: message.created }) - for (const blob of message.blobs) { - result.push({ - type: MessageRequestEventType.AttachBlob, - cardId: event.thread.threadId, - messageId, - blobData: { - blobId: blob.blobId, - contentType: blob.contentType, - fileName: blob.fileName, - size: blob.size, - metadata: blob.metadata - }, - socialId: blob.creator, - date: blob.created - }) - } + result.push({ + type: MessageEventType.BlobPatch, + cardId: event.operation.threadId, + messageId, + operations: [ + { + opcode: 'attach', + blobs: message.blobs + } + ], + socialId: message.creator, + date: message.created + }) return result } const triggers: Triggers = [ - ['add_collaborators_on_message_created', MessageResponseEventType.MessageCreated, addCollaborators as TriggerFn], - ['add_thread_reply_on_message_created', MessageResponseEventType.MessageCreated, addThreadReply as TriggerFn], - ['register_card_on_message_created', MessageResponseEventType.MessageCreated, registerCard as TriggerFn], - ['register_card_on_patch', MessageResponseEventType.PatchCreated, registerCard as TriggerFn], - ['on_messages_group_created', MessageResponseEventType.MessagesGroupCreated, onMessagesGroupCreated as TriggerFn], - ['remove_reply_on_messages_removed', MessageResponseEventType.PatchCreated, onMessageRemoved as TriggerFn], - ['on_thread_created', MessageResponseEventType.ThreadAttached, onThreadAttached as TriggerFn], - ['on_blob_attached', MessageResponseEventType.BlobAttached, onBlobAttached as TriggerFn], - ['on_blob_detached', MessageResponseEventType.BlobDetached, onBlobDetached as TriggerFn] + ['add_collaborators_on_message_created', MessageEventType.CreateMessage, addCollaborators as TriggerFn], + ['add_thread_reply_on_message_created', MessageEventType.CreateMessage, addThreadReply as TriggerFn], + ['register_card_on_message_created', MessageEventType.CreateMessage, registerCard as TriggerFn], + ['register_card_on_update_patch', MessageEventType.UpdatePatch, registerCard as TriggerFn], + ['register_card_on_remove_patch', MessageEventType.RemovePatch, registerCard as TriggerFn], + ['register_card_on_reaction_patch', MessageEventType.ReactionPatch, registerCard as TriggerFn], + ['register_card_on_blob_patch', MessageEventType.BlobPatch, registerCard as TriggerFn], + ['register_card_on_link_preview_patch', MessageEventType.LinkPreviewPatch, registerCard as TriggerFn], + ['register_card_on_thread_patch', MessageEventType.ThreadPatch, registerCard as TriggerFn], + + ['on_messages_group_created', MessageEventType.CreateMessagesGroup, onMessagesGroupCreated as TriggerFn], + ['remove_reply_on_messages_removed', MessageEventType.RemovePatch, onMessageRemoved as TriggerFn], + ['on_thread_created', MessageEventType.ThreadPatch, onThreadAttached as TriggerFn] ] export default triggers diff --git a/packages/server/src/triggers/notification.ts b/packages/server/src/triggers/notification.ts index 2ac5b7b1ced..46091f75b14 100644 --- a/packages/server/src/triggers/notification.ts +++ b/packages/server/src/triggers/notification.ts @@ -14,17 +14,15 @@ // import { - type AddedCollaboratorsEvent, - LabelRequestEventType, - MessageRequestEventType, - MessageResponseEventType, - type NotificationContextRemovedEvent, - type NotificationContextUpdatedEvent, - NotificationRequestEventType, - NotificationResponseEventType, - PatchCreatedEvent, - type RemovedCollaboratorsEvent, - type RequestEvent + AddCollaboratorsEvent, + LabelEventType, + MessageEventType, + NotificationEventType, + type Event, + UpdateNotificationContextEvent, + RemoveNotificationContextEvent, + RemovePatchEvent, + RemoveCollaboratorsEvent } from '@hcengineering/communication-sdk-types' import { type ActivityCollaboratorsUpdate, @@ -32,8 +30,6 @@ import { MessageType, NewMessageLabelID, NotificationType, - PatchType, - SocialID, SubscriptionLabelID } from '@hcengineering/communication-types' import { groupByArray } from '@hcengineering/core' @@ -42,21 +38,20 @@ import type { TriggerCtx, TriggerFn, Triggers } from '../types' import { findAccount } from '../utils' import { getAddCollaboratorsMessageContent, getRemoveCollaboratorsMessageContent } from './utils' -async function onAddedCollaborators (ctx: TriggerCtx, event: AddedCollaboratorsEvent): Promise { +async function onAddedCollaborators (ctx: TriggerCtx, event: AddCollaboratorsEvent): Promise { const { cardId, cardType, collaborators } = event if (collaborators.length === 0) return [] - const result: RequestEvent[] = [] + const result: Event[] = [] for (const collaborator of collaborators) { result.push({ - type: LabelRequestEventType.CreateLabel, + type: LabelEventType.CreateLabel, cardId, cardType, account: collaborator, labelId: SubscriptionLabelID, - date: event.date, - socialId: event.socialId + date: event.date }) } @@ -68,7 +63,7 @@ async function onAddedCollaborators (ctx: TriggerCtx, event: AddedCollaboratorsE removed: [] } result.push({ - type: MessageRequestEventType.CreateMessage, + type: MessageEventType.CreateMessage, messageType: MessageType.Activity, cardId, cardType, @@ -83,31 +78,29 @@ async function onAddedCollaborators (ctx: TriggerCtx, event: AddedCollaboratorsE return result } -async function onRemovedCollaborators (ctx: TriggerCtx, event: RemovedCollaboratorsEvent): Promise { +async function onRemovedCollaborators (ctx: TriggerCtx, event: RemoveCollaboratorsEvent): Promise { const { cardId, collaborators } = event if (collaborators.length === 0) return [] - const result: RequestEvent[] = [] + const result: Event[] = [] const contexts = await ctx.db.findNotificationContexts({ card: cardId, account: event.collaborators }) for (const collaborator of collaborators) { const context = contexts.find((it) => it.account === collaborator) result.push({ - type: LabelRequestEventType.RemoveLabel, + type: LabelEventType.RemoveLabel, cardId, account: collaborator, labelId: SubscriptionLabelID, - date: event.date, - socialId: event.socialId + date: event.date }) if (context !== undefined && context.lastUpdate.getTime() > context.lastView.getTime()) { result.push({ - type: NotificationRequestEventType.UpdateNotificationContext, + type: NotificationEventType.UpdateNotificationContext, contextId: context.id, account: collaborator, updates: { lastView: context.lastUpdate }, - socialId: event.socialId, date: new Date() }) } @@ -120,7 +113,7 @@ async function onRemovedCollaborators (ctx: TriggerCtx, event: RemovedCollaborat } const account = await findAccount(ctx, event.socialId) result.push({ - type: MessageRequestEventType.CreateMessage, + type: MessageEventType.CreateMessage, messageType: MessageType.Activity, cardId, cardType: event.cardType, @@ -136,69 +129,57 @@ async function onRemovedCollaborators (ctx: TriggerCtx, event: RemovedCollaborat return result } -async function onNotificationContextUpdated ( - ctx: TriggerCtx, - event: NotificationContextUpdatedEvent -): Promise { - const { contextId, lastView } = event +async function onNotificationContextUpdated (ctx: TriggerCtx, event: UpdateNotificationContextEvent): Promise { + const { contextId, updates } = event + const { lastView } = updates if (lastView == null) return [] const context = (await ctx.db.findNotificationContexts({ id: contextId }))[0] if (context == null) return [] - const result: RequestEvent[] = [] + const result: Event[] = [] if (context.lastView >= context.lastUpdate) { result.push({ - type: LabelRequestEventType.RemoveLabel, + type: LabelEventType.RemoveLabel, labelId: NewMessageLabelID, cardId: context.cardId, account: context.account, - date: new Date(), - socialId: 'core:account:System' as SocialID + date: new Date() }) } result.push({ - type: NotificationRequestEventType.UpdateNotification, + type: NotificationEventType.UpdateNotification, + account: context.account, + contextId: context.id, query: { - account: context.account, - context: context.id, type: NotificationType.Message, - created: { - lessOrEqual: context.lastView - } + untilDate: context.lastView }, updates: { read: true - }, - socialId: 'core:account:System' as SocialID + } }) return result } -async function onNotificationContextRemoved ( - ctx: TriggerCtx, - event: NotificationContextRemovedEvent -): Promise { - const { context } = event - - const result: RequestEvent[] = [] - - result.push({ - type: LabelRequestEventType.RemoveLabel, - labelId: NewMessageLabelID, - cardId: context.cardId, - account: context.account, - date: new Date(), - socialId: 'core:account:System' as SocialID - }) +async function onNotificationContextRemoved (ctx: TriggerCtx, event: RemoveNotificationContextEvent): Promise { + const context = ctx.removedContexts.get(event.contextId) + if (context == null) return [] - return result + return [ + { + type: LabelEventType.RemoveLabel, + labelId: NewMessageLabelID, + cardId: context.cardId, + account: context.account, + date: event.date + } + ] } -async function onMessagesRemoved (ctx: TriggerCtx, event: PatchCreatedEvent): Promise { - if (event.patch.type !== PatchType.remove) return [] +async function onMessagesRemoved (ctx: TriggerCtx, event: RemovePatchEvent): Promise { const notifications = await ctx.db.findNotifications({ card: event.cardId, messageId: event.messageId @@ -206,12 +187,12 @@ async function onMessagesRemoved (ctx: TriggerCtx, event: PatchCreatedEvent): Pr if (notifications.length === 0) return [] - const result: RequestEvent[] = [] + const result: Event[] = [] const byContextId = groupByArray(notifications, (it) => it.contextId) for (const [context, ns] of byContextId.entries()) { result.push({ - type: NotificationRequestEventType.RemoveNotifications, + type: NotificationEventType.RemoveNotifications, contextId: context, account: ns[0].account, ids: notifications.map((it) => it.id) @@ -224,17 +205,17 @@ async function onMessagesRemoved (ctx: TriggerCtx, event: PatchCreatedEvent): Pr const triggers: Triggers = [ [ 'on_notification_context_updated', - NotificationResponseEventType.NotificationContextUpdated, + NotificationEventType.UpdateNotificationContext, onNotificationContextUpdated as TriggerFn ], [ 'on_notification_context_removed', - NotificationResponseEventType.NotificationContextRemoved, + NotificationEventType.RemoveNotificationContext, onNotificationContextRemoved as TriggerFn ], - ['on_added_collaborators', NotificationResponseEventType.AddedCollaborators, onAddedCollaborators as TriggerFn], - ['on_removed_collaborators', NotificationResponseEventType.RemovedCollaborators, onRemovedCollaborators as TriggerFn], - ['remove_notifications_on_messages_removed', MessageResponseEventType.PatchCreated, onMessagesRemoved as TriggerFn] + ['on_added_collaborators', NotificationEventType.AddCollaborators, onAddedCollaborators as TriggerFn], + ['on_removed_collaborators', NotificationEventType.RemoveCollaborators, onRemovedCollaborators as TriggerFn], + ['remove_notifications_on_messages_removed', MessageEventType.RemovePatch, onMessagesRemoved as TriggerFn] ] export default triggers diff --git a/packages/server/src/triggers/utils.ts b/packages/server/src/triggers/utils.ts index 8547cbc4f48..975c9324a5f 100644 --- a/packages/server/src/triggers/utils.ts +++ b/packages/server/src/triggers/utils.ts @@ -35,9 +35,15 @@ export async function findMessage ( filesUrl: string, workspace: WorkspaceID, card: CardID, - id: MessageID + id: MessageID, + ops?: { + files?: boolean + replies?: boolean + links?: boolean + reactions?: boolean + } ): Promise { - const message = (await db.findMessages({ card, id, limit: 1, files: true }))[0] + const message = (await db.findMessages({ card, id, limit: 1, ...ops }))[0] if (message !== undefined) { return message } diff --git a/packages/server/src/types.ts b/packages/server/src/types.ts index 7d1f977dcd2..dfb3a270368 100644 --- a/packages/server/src/types.ts +++ b/packages/server/src/types.ts @@ -17,15 +17,13 @@ import type { Account, MeasureContext } from '@hcengineering/core' import type { DbAdapter, EventResult, - RequestEvent, - ResponseEvent, - ResponseEventType, - SessionData + Event, + SessionData, EventType } from '@hcengineering/communication-sdk-types' import type { AccountID, CardID, - Collaborator, + Collaborator, ContextID, FindCollaboratorsParams, FindLabelsParams, FindMessagesGroupsParams, @@ -72,11 +70,11 @@ export interface Middleware { findLabels: (session: SessionData, params: FindLabelsParams, queryId?: QueryId) => Promise findCollaborators: (session: SessionData, params: FindCollaboratorsParams) => Promise - event: (session: SessionData, event: Enriched, derived: boolean) => Promise + event: (session: SessionData, event: Enriched, derived: boolean) => Promise unsubscribeQuery: (session: SessionData, queryId: number) => void - response: (session: SessionData, event: ResponseEvent, derived: boolean) => Promise + response: (session: SessionData, event: Enriched, derived: boolean) => Promise closeSession: (sessionId: string) => void close: () => void @@ -88,6 +86,7 @@ export interface MiddlewareContext { metadata: Metadata registeredCards: Set accountBySocialID: Map + removedContexts: Map derived?: Middleware head?: Middleware @@ -105,14 +104,14 @@ export interface TriggerCtx { account: Account registeredCards: Set accountBySocialID: Map + removedContexts: Map derived: boolean - execute: (event: RequestEvent) => Promise + execute: (event: Event) => Promise } -export type TriggerFn = (ctx: TriggerCtx, event: ResponseEvent) => Promise -export type Triggers = [string, ResponseEventType, TriggerFn][] +export type TriggerFn = (ctx: TriggerCtx, event: Enriched) => Promise +export type Triggers = [string, EventType, TriggerFn][] export type Enriched = T & { - socialId: SocialID date: Date } diff --git a/packages/shared/src/index.ts b/packages/shared/src/index.ts index d871556b299..5640eb8d329 100644 --- a/packages/shared/src/index.ts +++ b/packages/shared/src/index.ts @@ -16,3 +16,4 @@ export * from './retry' export * from './patch' export * from './utils' +export * from './processor' diff --git a/packages/shared/src/patch.ts b/packages/shared/src/patch.ts index 1636c5cea50..8c04113ab28 100644 --- a/packages/shared/src/patch.ts +++ b/packages/shared/src/patch.ts @@ -14,14 +14,21 @@ // import { - PatchType, - type BlobID, + BlobData, + BlobID, + BlobPatch, + CardID, + CardType, + LinkPreview, + LinkPreviewData, + LinkPreviewID, + LinkPreviewPatch, type Message, type Patch, - type Reaction, - type SocialID, - type AttachBlobPatchData, - type UpdateThreadPatchData + PatchType, + ReactionPatch, + SocialID, + ThreadPatch } from '@hcengineering/communication-types' export function applyPatches (message: Message, patches: Patch[], allowedPatchTypes: PatchType[] = []): Message { @@ -34,7 +41,10 @@ export function applyPatches (message: Message, patches: Patch[], allowedPatchTy } export function applyPatch (message: Message, patch: Patch, allowedPatchTypes: PatchType[] = []): Message { - if ((allowedPatchTypes.length > 0 && !allowedPatchTypes.includes(patch.type)) || message.removed) return message + if ((allowedPatchTypes.length > 0 && !allowedPatchTypes.includes(patch.type)) || message.removed) { + return message + } + switch (patch.type) { case PatchType.update: { if (patch.created.getTime() < (message.edited?.getTime() ?? 0)) { @@ -47,7 +57,7 @@ export function applyPatch (message: Message, patch: Patch, allowedPatchTypes: P extra: patch.data.extra ?? message.extra } } - case PatchType.remove: + case PatchType.remove: { return { ...message, content: '', @@ -56,34 +66,62 @@ export function applyPatch (message: Message, patch: Patch, allowedPatchTypes: P reactions: [], removed: true } - case PatchType.setReaction: - return setReaction(message, { - reaction: patch.data.reaction, - creator: patch.creator, - created: patch.created - }) - case PatchType.removeReaction: - return removeReaction(message, patch.data.reaction, patch.creator) - case PatchType.attachBlob: - return attachBlob(message, patch.data, patch.created, patch.creator) - case PatchType.detachBlob: - return detachBlob(message, patch.data.blobId) - case PatchType.updateThread: - return updateThread(message, patch.data, patch.created) + } + case PatchType.reaction: + return patchReactions(message, patch) + case PatchType.blob: + return patchBlobs(message, patch) + case PatchType.linkPreview: + return patchLinkPreviews(message, patch) + case PatchType.thread: + return patchThread(message, patch) } +} +function patchBlobs (message: Message, patch: BlobPatch): Message { + if (patch.data.operation === 'attach') { + return attachBlobs(message, patch.data.blobs, patch.created, patch.creator) + } else if (patch.data.operation === 'detach') { + return detachBlobs(message, patch.data.blobIds) + } else if (patch.data.operation === 'set') { + return setBlobs(message, patch.data.blobs, patch.created, patch.creator) + } + return message +} + +function patchLinkPreviews (message: Message, patch: LinkPreviewPatch): Message { + if (patch.data.operation === 'attach') { + return attachLinkPreviews(message, patch.data.previews, patch.created, patch.creator) + } else if (patch.data.operation === 'detach') { + return detachLinkPreviews(message, patch.data.previewIds) + } else if (patch.data.operation === 'set') { + return setLinkPreviews(message, patch.data.previews, patch.created, patch.creator) + } + return message +} + +function patchReactions (message: Message, patch: ReactionPatch): Message { + if (patch.data.operation === 'add') { + return setReaction(message, patch.data.reaction, patch.creator, patch.created) + } else if (patch.data.operation === 'remove') { + return removeReaction(message, patch.data.reaction, patch.creator) + } return message } -function setReaction (message: Message, reaction: Reaction): Message { - const isExist = message.reactions.some((it) => it.reaction === reaction.reaction && it.creator === reaction.creator) +function setReaction (message: Message, reaction: string, creator: SocialID, created: Date): Message { + const isExist = message.reactions.some((it) => it.reaction === reaction && it.creator === creator) if (isExist) return message - message.reactions.push(reaction) + message.reactions.push({ + reaction, + creator, + created + }) return message } -function removeReaction (message: Message, emoji: string, creator: SocialID): Message { - const reactions = message.reactions.filter((it) => it.reaction !== emoji || it.creator !== creator) +function removeReaction (message: Message, reaction: string, creator: SocialID): Message { + const reactions = message.reactions.filter((it) => it.reaction !== reaction || it.creator !== creator) if (reactions.length === message.reactions.length) return message return { @@ -92,51 +130,162 @@ function removeReaction (message: Message, emoji: string, creator: SocialID): Me } } -function updateThread (message: Message, data: UpdateThreadPatchData, created: Date): Message { - const thread = message.thread ?? { - cardId: message.cardId, - messageId: message.id, - threadId: data.threadId, - threadType: data.threadType, - repliesCount: 0, - lastReply: created +function attachBlobs (message: Message, data: BlobData[], created: Date, creator: SocialID): Message { + const newBlobs = [] + for (const blob of data) { + const isExists = message.blobs.some((it) => it.blobId === blob.blobId) + if (isExists === undefined) continue + newBlobs.push({ + ...blob, + created, + creator + }) + } + + if (newBlobs.length === 0) return message + return { + ...message, + blobs: [...message.blobs, ...newBlobs] } +} - thread.threadId = data.threadId - thread.threadType = data.threadType +function detachBlobs (message: Message, blobIds: BlobID[]): Message { + const blobs = message.blobs.filter((it) => !blobIds.includes(it.blobId)) + if (blobs.length === message.blobs.length) return message - if (data.repliesCountOp === 'increment') { - thread.repliesCount = thread.repliesCount + 1 - thread.lastReply = created + return { + ...message, + blobs } +} - if (data.repliesCountOp === 'decrement') { - thread.repliesCount = Math.max(thread.repliesCount - 1, 0) +function setBlobs (message: Message, data: BlobData[], created: Date, creator: SocialID): Message { + if (data.length === 0) return message + return { + ...message, + blobs: data.map((it) => ({ + ...it, + created, + creator + })) } +} +function attachLinkPreviews ( + message: Message, + previews: (LinkPreviewData & { previewId: LinkPreviewID })[], + created: Date, + creator: SocialID +): Message { + const newPreviews: LinkPreview[] = [] + for (const preview of previews) { + if (message.linkPreviews.some((it) => it.id === preview.previewId)) continue + newPreviews.push({ + id: preview.previewId, + ...preview, + created, + creator + }) + } + + if (newPreviews.length === 0) return message return { ...message, - thread + linkPreviews: [...message.linkPreviews, ...newPreviews] } } -function attachBlob (message: Message, data: AttachBlobPatchData, created: Date, creator: SocialID): Message { - const isExists = message.blobs.some((it) => it.blobId === data.blobId) - if (isExists !== undefined) return message - message.blobs.push({ - ...data, - created, - creator - }) +function detachLinkPreviews (message: Message, previewIds: LinkPreviewID[]): Message { + const previews = message.linkPreviews.filter((it) => !previewIds.includes(it.id)) + if (previews.length === message.linkPreviews.length) return message + + return { + ...message, + linkPreviews: previews + } +} + +function setLinkPreviews ( + message: Message, + previews: (LinkPreviewData & { previewId: LinkPreviewID })[], + created: Date, + creator: SocialID +): Message { + if (previews.length === 0) return message + const newPreviews: LinkPreview[] = [] + for (const preview of previews) { + if (message.linkPreviews.some((it) => it.id === preview.previewId)) continue + newPreviews.push({ + id: preview.previewId, + ...preview, + created, + creator + }) + } + + return { + ...message, + linkPreviews: newPreviews + } +} + +function patchThread (message: Message, patch: ThreadPatch): Message { + if (patch.data.operation === 'attach') { + return attachThread(message, patch.data.threadId, patch.data.threadType) + } else if (patch.data.operation === 'update') { + return updateThread( + message, + patch.data.threadId, + patch.data.threadType, + patch.data.repliesCountOp, + patch.data.lastReply + ) + } return message } -function detachBlob (message: Message, blobId: BlobID): Message { - const blobs = message.blobs.filter((it) => it.blobId !== blobId) - if (blobs.length === message.blobs.length) return message +function attachThread (message: Message, threadId: CardID, threadType: CardType): Message { + if (message.thread !== undefined) return message + return { + ...message, + thread: { + cardId: message.cardId, + messageId: message.id, + threadId, + threadType, + repliesCount: 0, + lastReply: new Date() + } + } +} + +function updateThread ( + message: Message, + threadId: CardID, + threadType?: CardType, + repliesCountOp?: 'increment' | 'decrement', + lastReply?: Date +): Message { + if (repliesCountOp === undefined && lastReply === undefined) return message + if (message.thread === undefined) return message + if (message.thread.threadId !== threadId) return message + + let count = message.thread.repliesCount + if (repliesCountOp === 'increment') { + count = count + 1 + } + + if (repliesCountOp === 'decrement') { + count = Math.max(count - 1, 0) + } return { ...message, - blobs + thread: { + ...message.thread, + repliesCount: count, + threadType: threadType ?? message.thread.threadType, + lastReply: lastReply ?? message.thread.lastReply + } } } diff --git a/packages/shared/src/processor.ts b/packages/shared/src/processor.ts new file mode 100644 index 00000000000..f09e1fa917c --- /dev/null +++ b/packages/shared/src/processor.ts @@ -0,0 +1,282 @@ +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. + +import { + AttachBlobsPatchData, + AttachLinkPreviewsPatchData, + AttachThreadPatchData, + ContextID, + DetachBlobsPatchData, + DetachLinkPreviewsPatchData, + Message, + MessageID, + Notification, + NotificationContext, + NotificationID, + Patch, + PatchType, + SetBlobsPatchData, + SetLinkPreviewsPatchData, + UpdateThreadPatchData +} from '@hcengineering/communication-types' +import { + AttachBlobsOperation, + AttachLinkPreviewsOperation, + AttachThreadOperation, + CreateMessageEvent, + CreateNotificationContextEvent, + CreateNotificationEvent, + DetachBlobsOperation, + DetachLinkPreviewsOperation, + MessageEventType, + PatchEvent, + RemoveNotificationContextEvent, + SetBlobsOperation, + SetLinkPreviewsOperation, + UpdateNotificationContextEvent, + UpdateThreadOperation +} from '@hcengineering/communication-sdk-types' + +import { applyPatches } from './patch' + +// eslint-disable-next-line @typescript-eslint/no-extraneous-class +export class MessageProcessor { + static createFromEvent (event: CreateMessageEvent, id?: MessageID): Message { + const messageId = event.messageId ?? (id as MessageID) + if (messageId == null) throw new Error('Message id is required') + return { + id: messageId, + cardId: event.cardId, + type: event.messageType, + content: event.content, + extra: event.extra, + creator: event.socialId, + created: event.date ?? new Date(), + removed: false, + reactions: [], + blobs: [], + linkPreviews: [] + } + } + + static applyPatchEvent (message: Message, patchEvent: PatchEvent, allowedPatchTypes?: PatchType[]): Message { + const patches = this.eventToPatches(patchEvent).filter((it) => it.messageId === message.id) + return applyPatches(message, patches, allowedPatchTypes) + } + + static eventToPatches (event: PatchEvent): Patch[] { + switch (event.type) { + case MessageEventType.UpdatePatch: { + return [ + { + messageId: event.messageId, + type: PatchType.update, + creator: event.socialId, + created: event.date ?? new Date(), + data: { + content: event.content, + extra: event.extra + } + } + ] + } + case MessageEventType.RemovePatch: + return [ + { + messageId: event.messageId, + type: PatchType.remove, + creator: event.socialId, + created: event.date ?? new Date(), + data: {} + } + ] + case MessageEventType.ReactionPatch: + return [ + { + messageId: event.messageId, + type: PatchType.reaction, + creator: event.socialId, + created: event.date ?? new Date(), + data: { + operation: event.operation.opcode, + reaction: event.operation.reaction + } + } + ] + case MessageEventType.BlobPatch: + return event.operations + .map((it) => blobOperationToPatchData(it)) + .filter((x) => x != null) + .map((it) => ({ + messageId: event.messageId, + type: PatchType.blob, + creator: event.socialId, + created: event.date ?? new Date(), + data: it + })) + + case MessageEventType.LinkPreviewPatch: + return event.operations + .map((it) => linkPreviewOperationToPatchData(it)) + .filter((x) => x != null) + .map((it) => ({ + messageId: event.messageId, + type: PatchType.linkPreview, + creator: event.socialId, + created: event.date ?? new Date(), + data: it + })) + + case MessageEventType.ThreadPatch: { + const data = threadOperationToPatchData(event.operation) + if (data == null) return [] + return [ + { + messageId: event.messageId, + type: PatchType.thread, + creator: event.socialId, + created: event.date ?? new Date(), + data + } + ] + } + } + } +} + +// eslint-disable-next-line @typescript-eslint/no-extraneous-class +export class NotificationContextProcessor { + static createFromEvent (event: CreateNotificationContextEvent, id?: ContextID): NotificationContext { + const contextId: ContextID | undefined = event.contextId ?? id + + if (contextId == null) { + throw new Error('Notification context id is required') + } + return { + id: contextId, + cardId: event.cardId, + account: event.account, + lastView: event.lastView, + lastUpdate: event.lastUpdate, + lastNotify: event.lastNotify, + notifications: [] + } + } + + static updateFromEvent (context: NotificationContext, event: UpdateNotificationContextEvent): NotificationContext { + if (context.account !== event.account || context.id !== event.contextId) return context + return { + ...context, + lastView: event.updates.lastView ?? context.lastView, + lastUpdate: event.updates.lastUpdate ?? context.lastUpdate, + lastNotify: event.updates.lastNotify ?? context.lastNotify + } + } + + static removeFromEvent ( + context: NotificationContext, + event: RemoveNotificationContextEvent + ): NotificationContext | undefined { + if (context.account !== event.account || context.id !== event.contextId) return context + return undefined + } +} + +// eslint-disable-next-line @typescript-eslint/no-extraneous-class +export class NotificationProcessor { + static createFromEvent (event: CreateNotificationEvent, id?: NotificationID): Notification { + const notificationId: NotificationID | undefined = event.notificationId ?? (id as NotificationID) + + if (notificationId == null) { + throw new Error('Notification id is required') + } + return { + id: notificationId, + cardId: event.cardId, + contextId: event.contextId, + account: event.account, + type: event.notificationType, + read: event.read, + content: event.content ?? {}, + created: event.date ?? new Date(), + messageId: event.messageId, + messageCreated: event.messageCreated + } + } +} + +function blobOperationToPatchData ( + operation: AttachBlobsOperation | DetachBlobsOperation | SetBlobsOperation +): AttachBlobsPatchData | DetachBlobsPatchData | SetBlobsPatchData | undefined { + if (operation.opcode === 'attach') { + return { + operation: 'attach', + blobs: operation.blobs + } + } else if (operation.opcode === 'detach') { + return { + operation: 'detach', + blobIds: operation.blobIds + } + } else if (operation.opcode === 'set') { + return { + operation: 'set', + blobs: operation.blobs + } + } + + return undefined +} + +function linkPreviewOperationToPatchData ( + operation: AttachLinkPreviewsOperation | DetachLinkPreviewsOperation | SetLinkPreviewsOperation +): AttachLinkPreviewsPatchData | DetachLinkPreviewsPatchData | SetLinkPreviewsPatchData | undefined { + if (operation.opcode === 'attach') { + return { + operation: 'attach', + previews: operation.previews + } + } else if (operation.opcode === 'detach') { + return { + operation: 'detach', + previewIds: operation.previewIds + } + } else if (operation.opcode === 'set') { + return { + operation: 'set', + previews: operation.previews + } + } + + return undefined +} + +function threadOperationToPatchData ( + operation: AttachThreadOperation | UpdateThreadOperation +): AttachThreadPatchData | UpdateThreadPatchData | undefined { + if (operation.opcode === 'attach') { + return { + operation: 'attach', + threadId: operation.threadId, + threadType: operation.threadType + } + } else if (operation.opcode === 'update') { + return { + operation: 'update', + threadId: operation.threadId, + repliesCountOp: operation.updates.repliesCountOp, + lastReply: operation.updates.lastReply + } + } + return undefined +} diff --git a/packages/shared/src/utils.ts b/packages/shared/src/utils.ts index 4228f3fa637..4a839a6f91a 100644 --- a/packages/shared/src/utils.ts +++ b/packages/shared/src/utils.ts @@ -13,26 +13,35 @@ // limitations under the License. // -import type { MessageID } from '@hcengineering/communication-types' +import type { LinkPreviewID, MessageID } from '@hcengineering/communication-types' -const COUNTER_BITS = 8n +const COUNTER_BITS = 10n const RANDOM_BITS = 10n - const MAX_SEQUENCE = (1n << COUNTER_BITS) - 1n -const MAX_RANDOM = (1n << RANDOM_BITS) - 1n -const EXTERNAL_FLAG = 1n << 62n // sets bit 62 let counter = 0n -/** - * Generate 64-bit external MessageID and return it as string. - */ -export function generateMessageId (): MessageID { +function makeBigIntId (): bigint { const ts = BigInt(Date.now()) counter = counter < MAX_SEQUENCE ? counter + 1n : 0n - const random = BigInt(Math.floor(Math.random() * Number(MAX_RANDOM + 1n))) + const random = BigInt(Math.floor(Math.random() * Number((1n << RANDOM_BITS) - 1n))) + return (ts << (COUNTER_BITS + RANDOM_BITS)) | (counter << RANDOM_BITS) | random +} - const idBigInt = EXTERNAL_FLAG | (ts << (COUNTER_BITS + RANDOM_BITS)) | (counter << RANDOM_BITS) | random +function toBase64Url (bytes: Uint8Array): string { + let s = '' + for (const b of bytes) s += String.fromCharCode(b) + const base64 = typeof btoa === 'function' ? btoa(s) : Buffer.from(bytes).toString('base64') + return base64.replace(/\+/g, '-').replace(/\//g, '_').replace(/=+$/, '') +} + +export function generateMessageId (): MessageID { + const idBig = makeBigIntId() + const buf = new Uint8Array(8) + new DataView(buf.buffer).setBigUint64(0, idBig, false) + return toBase64Url(buf) as MessageID +} - return idBigInt.toString() as MessageID +export function generateLinkPreviewId (): LinkPreviewID { + return makeBigIntId().toString() as LinkPreviewID } diff --git a/packages/types/src/message.ts b/packages/types/src/message.ts index 0253f7a9f17..d60b1853f50 100644 --- a/packages/types/src/message.ts +++ b/packages/types/src/message.ts @@ -112,7 +112,7 @@ interface BasePatch { creator: SocialID created: Date - data: PatchData + data: Record } export interface UpdatePatch extends BasePatch { @@ -120,92 +120,103 @@ export interface UpdatePatch extends BasePatch { data: UpdatePatchData } +export interface UpdatePatchData { + type?: MessageType + content?: Markdown + extra?: MessageExtra +} + export interface RemovePatch extends BasePatch { type: PatchType.remove - // eslint-disable-next-line @typescript-eslint/ban-types - data: {} + data: RemovePatchData } -export interface SetReactionPatch extends BasePatch { - type: PatchType.setReaction - data: SetReactionPatchData +// eslint-disable-next-line @typescript-eslint/no-empty-interface +export interface RemovePatchData {} + +export interface ReactionPatch extends BasePatch { + type: PatchType.reaction + data: AddReactionPatchData | RemoveReactionPatchData } -export interface RemoveReactionPatch extends BasePatch { - type: PatchType.removeReaction - data: RemoveReactionPatchData +export interface AddReactionPatchData { + operation: 'add' + reaction: string } -export interface AttachBlobPatch extends BasePatch { - type: PatchType.attachBlob - data: AttachBlobPatchData +export interface RemoveReactionPatchData { + operation: 'remove' + reaction: string } -export interface DetachBlobPatch extends BasePatch { - type: PatchType.detachBlob - data: DetachBlobPatchData +export interface BlobPatch extends BasePatch { + type: PatchType.blob + data: AttachBlobsPatchData | DetachBlobsPatchData | SetBlobsPatchData } -export interface UpdateThreadPatch extends BasePatch { - type: PatchType.updateThread - data: UpdateThreadPatchData +export interface AttachBlobsPatchData { + operation: 'attach' + blobs: BlobData[] } -export type Patch = - | UpdatePatch - | RemovePatch - | SetReactionPatch - | RemoveReactionPatch - | AttachBlobPatch - | DetachBlobPatch - | UpdateThreadPatch +export interface DetachBlobsPatchData { + operation: 'detach' + blobIds: BlobID[] +} -export type PatchData = - | RemovePatchData - | UpdatePatchData - | SetReactionPatchData - | RemoveReactionPatchData - | AttachBlobPatchData - | DetachBlobPatchData - | UpdateThreadPatchData +export interface SetBlobsPatchData { + operation: 'set' + blobs: BlobData[] +} -export interface UpdateThreadPatchData { - threadId: CardID - threadType: CardType - repliesCountOp?: 'increment' | 'decrement' +export interface LinkPreviewPatch extends BasePatch { + type: PatchType.linkPreview + data: AttachLinkPreviewsPatchData | DetachLinkPreviewsPatchData | SetLinkPreviewsPatchData } -export interface UpdatePatchData { - type?: MessageType - content?: Markdown - extra?: MessageExtra +export interface AttachLinkPreviewsPatchData { + operation: 'attach' + previews: (LinkPreviewData & { previewId: LinkPreviewID })[] } -export interface SetReactionPatchData { - reaction: string +export interface DetachLinkPreviewsPatchData { + operation: 'detach' + previewIds: LinkPreviewID[] } -export interface RemoveReactionPatchData { - reaction: string +export interface SetLinkPreviewsPatchData { + operation: 'set' + previews: (LinkPreviewData & { previewId: LinkPreviewID })[] } -export type AttachBlobPatchData = BlobData +export interface ThreadPatch extends BasePatch { + type: PatchType.thread + data: AttachThreadPatchData | UpdateThreadPatchData +} -export interface DetachBlobPatchData { - blobId: BlobID +export interface AttachThreadPatchData { + operation: 'attach' + threadId: CardID + threadType: CardType } -// eslint-disable-next-line @typescript-eslint/no-empty-interface -export interface RemovePatchData {} +export interface UpdateThreadPatchData { + operation: 'update' + threadId: CardID + threadType?: CardType + repliesCountOp?: 'increment' | 'decrement' + lastReply?: Date +} + +export type Patch = UpdatePatch | RemovePatch | ReactionPatch | BlobPatch | LinkPreviewPatch | ThreadPatch export enum PatchType { update = 'update', remove = 'remove', - setReaction = 'setReaction', - removeReaction = 'removeReaction', - attachBlob = 'attachBlob', - detachBlob = 'detachBlob', - updateThread = 'updateThread' + reaction = 'reaction', + blob = 'blob', + linkPreview = 'linkPreview', + thread = 'thread' } export interface Reaction { @@ -216,7 +227,7 @@ export interface Reaction { export interface BlobData { blobId: BlobID - contentType: string + mimeType: string fileName: string size: number metadata?: BlobMetadata diff --git a/packages/yaml/src/parse.ts b/packages/yaml/src/parse.ts index 299ef69d7e9..d6e92f50a80 100644 --- a/packages/yaml/src/parse.ts +++ b/packages/yaml/src/parse.ts @@ -83,10 +83,10 @@ export function parseYaml (data: string): ParsedFile { } : undefined, blobs: - message.blobs ?? + message.blobs?.map((it) => ({ ...it, mimeType: it.mimeType ?? (it as any).contentType })) ?? (message as any).files?.map((it: any) => ({ blobId: it.blobId, - contentType: it.type, + mimeType: it.type, fileName: it.filename, size: it.size, metadata: it.meta From 069fa40d8ad411bbf30ebef5001890e3d9a9a554 Mon Sep 17 00:00:00 2001 From: Kristina Date: Thu, 19 Jun 2025 22:37:06 +0400 Subject: [PATCH 113/636] Fix notification update validation schema (#71) --- packages/server/src/middleware/validate.ts | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/packages/server/src/middleware/validate.ts b/packages/server/src/middleware/validate.ts index 8f123c4a7d0..9a10d92bf4a 100644 --- a/packages/server/src/middleware/validate.ts +++ b/packages/server/src/middleware/validate.ts @@ -383,13 +383,12 @@ const RemoveMessagesGroupEventSchema = BaseEventSchema.extend({ // Notification events const UpdateNotificationsEventSchema = BaseEventSchema.extend({ type: z.literal(NotificationEventType.UpdateNotification), + context: ContextIDSchema, + account: AccountIDSchema, query: z.object({ - context: ContextIDSchema, - account: AccountIDSchema, id: z.string().optional(), type: z.string().optional(), - read: z.boolean().optional(), - created: dateOrRecordSchema.optional() + untilDate: DateSchema.optional() }), updates: z.object({ read: z.boolean() From c83ff3bf6837eb2b3d4a3530a82fb88c2595ca5e Mon Sep 17 00:00:00 2001 From: Alexander Onnikov Date: Fri, 20 Jun 2025 16:25:10 +0700 Subject: [PATCH 114/636] fix: properly detect created hls segments Signed-off-by: Alexander Onnikov --- Dockerfile | 2 +- go.mod | 2 +- go.sum | 4 +- internal/pkg/mediaconvert/command.go | 34 +++++++++++--- internal/pkg/mediaconvert/command_test.go | 8 ++-- internal/pkg/mediaconvert/scheduler.go | 2 + internal/pkg/mediaconvert/transcoder.go | 56 ++++++++++++++++------- internal/pkg/storage/datalake.go | 56 +++++++++++++---------- internal/pkg/uploader/uploader.go | 25 ++++++++-- 9 files changed, 131 insertions(+), 58 deletions(-) diff --git a/Dockerfile b/Dockerfile index 121a1b5efb4..1e7a5cd10b9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -11,7 +11,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -FROM golang:1.24.1 AS builder +FROM golang:1.24.4 AS builder ENV GO111MODULE=on ENV CGO_ENABLED=0 ENV GOBIN=/bin diff --git a/go.mod b/go.mod index 905e524afa1..701291bda07 100644 --- a/go.mod +++ b/go.mod @@ -8,7 +8,7 @@ require ( github.com/aws/aws-sdk-go-v2/credentials v1.17.59 github.com/aws/aws-sdk-go-v2/service/s3 v1.77.0 github.com/getsentry/sentry-go v0.31.1 - github.com/golang-jwt/jwt/v5 v5.2.1 + github.com/golang-jwt/jwt/v5 v5.2.2 github.com/google/uuid v1.6.0 github.com/kelseyhightower/envconfig v1.4.0 github.com/pkg/errors v0.9.1 diff --git a/go.sum b/go.sum index c531b49b391..3ca0c0a4f26 100644 --- a/go.sum +++ b/go.sum @@ -45,8 +45,8 @@ github.com/getsentry/sentry-go v0.31.1 h1:ELVc0h7gwyhnXHDouXkhqTFSO5oslsRDk0++ey github.com/getsentry/sentry-go v0.31.1/go.mod h1:CYNcMMz73YigoHljQRG+qPF+eMq8gG72XcGN/p71BAY= github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA= github.com/go-errors/errors v1.4.2/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= -github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk= -github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= +github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8= +github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= diff --git a/internal/pkg/mediaconvert/command.go b/internal/pkg/mediaconvert/command.go index 047aa3a5a76..2efbf54cf76 100644 --- a/internal/pkg/mediaconvert/command.go +++ b/internal/pkg/mediaconvert/command.go @@ -19,7 +19,6 @@ import ( "context" "fmt" "io" - "os" "os/exec" "path/filepath" "strings" @@ -30,12 +29,27 @@ import ( "go.uber.org/zap" ) +type LogLevel string + +const ( + LogLevelQuiet LogLevel = "quiet" + LogLevelPanic LogLevel = "panic" + LogLevelFatal LogLevel = "fatal" + LogLevelError LogLevel = "error" + LogLevelWarning LogLevel = "warning" + LogLevelInfo LogLevel = "info" + LogLevelVerbose LogLevel = "verbose" + LogLevelDebug LogLevel = "debug" + LogLevelTrace LogLevel = "trace" +) + // Options represents configuration for the ffmpeg command type Options struct { Input string OutputDir string ScalingLevels []string Level string + LogLevel LogLevel Transcode bool Threads int UploadID string @@ -51,8 +65,6 @@ func newFfmpegCommand(ctx context.Context, in io.Reader, args []string) (*exec.C logger.Debug("prepared command: ", zap.Strings("args", args)) var result = exec.CommandContext(ctx, "ffmpeg", args...) - result.Stderr = os.Stdout - result.Stdout = os.Stdout result.Stdin = in return result, nil @@ -60,6 +72,8 @@ func newFfmpegCommand(ctx context.Context, in io.Reader, args []string) (*exec.C func buildCommonCommand(opts *Options) []string { var result = []string{ + "-y", // Overwrite output files without asking. + "-v", string(opts.LogLevel), "-threads", fmt.Sprint(opts.Threads), "-i", opts.Input, } @@ -96,7 +110,7 @@ func BuildRawVideoCommand(opts *Options) []string { "-g", "60", "-f", "hls", "-hls_time", "5", - "-hls_flags", "split_by_time", + "-hls_flags", "split_by_time+temp_file", "-hls_list_size", "0", "-hls_segment_filename", filepath.Join(opts.OutputDir, opts.UploadID, fmt.Sprintf("%s_%s_%s.ts", opts.UploadID, "%03d", opts.Level)), filepath.Join(opts.OutputDir, opts.UploadID, fmt.Sprintf("%s_%s_master.m3u8", opts.UploadID, opts.Level))) @@ -107,7 +121,7 @@ func BuildRawVideoCommand(opts *Options) []string { "-c:v", "copy", // Copy video stream "-f", "hls", "-hls_time", "5", - "-hls_flags", "split_by_time", + "-hls_flags", "split_by_time+temp_file", "-hls_list_size", "0", "-hls_segment_filename", filepath.Join(opts.OutputDir, opts.UploadID, fmt.Sprintf("%s_%s_%s.ts", opts.UploadID, "%03d", opts.Level)), filepath.Join(opts.OutputDir, opts.UploadID, fmt.Sprintf("%s_%s_master.m3u8", opts.UploadID, opts.Level))) @@ -142,7 +156,15 @@ func BuildScalingVideoCommand(opts *Options) []string { "-g", "60", "-f", "hls", "-hls_time", "5", - "-hls_flags", "split_by_time", + // Use HLS flags + // - split_by_time + // Allow segments to start on frames other than key frames. + // This improves behavior on some players when the time between key frames is inconsistent, + // but may make things worse on others, and can cause some oddities during seeking. + // This flag should be used with the hls_time option. + // - temp_file + // Write segment data to filename.tmp and rename to filename only once the segment is complete. + "-hls_flags", "split_by_time+temp_file", "-hls_list_size", "0", "-hls_segment_filename", filepath.Join(opts.OutputDir, opts.UploadID, fmt.Sprintf("%s_%s_%s.ts", opts.UploadID, "%03d", level)), filepath.Join(opts.OutputDir, opts.UploadID, fmt.Sprintf("%s_%s_master.m3u8", opts.UploadID, level))) diff --git a/internal/pkg/mediaconvert/command_test.go b/internal/pkg/mediaconvert/command_test.go index b6161e7ea7a..26beed32728 100644 --- a/internal/pkg/mediaconvert/command_test.go +++ b/internal/pkg/mediaconvert/command_test.go @@ -31,7 +31,7 @@ func Test_BuildVideoCommand_Scaling(t *testing.T) { ScalingLevels: []string{"720p", "480p"}, }) - const expected = `-threads 4 -i pipe:0 -map 0:v -vf scale=-2:720 -c:a aac -c:v libx264 -preset veryfast -crf 23 -g 60 -f hls -hls_time 5 -hls_flags split_by_time -hls_list_size 0 -hls_segment_filename test/1/1_%03d_720p.ts test/1/1_720p_master.m3u8 -map 0:v -vf scale=-2:480 -c:a aac -c:v libx264 -preset veryfast -crf 23 -g 60 -f hls -hls_time 5 -hls_flags split_by_time -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` + const expected = `-threads 4 -i pipe:0 -map 0:v -vf scale=-2:720 -c:a aac -c:v libx264 -preset veryfast -crf 23 -g 60 -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_720p.ts test/1/1_720p_master.m3u8 -map 0:v -vf scale=-2:480 -c:a aac -c:v libx264 -preset veryfast -crf 23 -g 60 -f hls -hls_time 5 -hls_flags split_by_time -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` require.Contains(t, expected, strings.Join(scaleCommand, " ")) } @@ -46,7 +46,7 @@ func Test_BuildVideoCommand_Scaling_NoRaw(t *testing.T) { ScalingLevels: []string{"720p", "480p"}, }) - const expected = `-threads 4 -i pipe:0 -map 0:v -vf scale=-2:480 -c:a aac -c:v libx264 -preset veryfast -crf 23 -g 60 -f hls -hls_time 5 -hls_flags split_by_time -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` + const expected = `-threads 4 -i pipe:0 -map 0:v -vf scale=-2:480 -c:a aac -c:v libx264 -preset veryfast -crf 23 -g 60 -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` require.Contains(t, expected, strings.Join(scaleCommand, " ")) } @@ -61,7 +61,7 @@ func Test_BuildVideoCommand_Raw_NoTranscode(t *testing.T) { Transcode: false, }) - const expected = `"-threads 4 -i pipe:0 -c:a copy -c:v copy -f hls -hls_time 5 -hls_flags split_by_time -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` + const expected = `"-threads 4 -i pipe:0 -c:a copy -c:v copy -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` require.Contains(t, expected, strings.Join(rawCommand, " ")) } @@ -76,7 +76,7 @@ func Test_BuildVideoCommand_Raw_Transcode(t *testing.T) { Transcode: true, }) - const expected = `-threads 4 -i pipe:0 -c:a aac -c:v libx264 -preset veryfast -crf 23 -g 60 -f hls -hls_time 5 -hls_flags split_by_time -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` + const expected = `-threads 4 -i pipe:0 -c:a aac -c:v libx264 -preset veryfast -crf 23 -g 60 -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` require.Contains(t, expected, strings.Join(rawCommand, " ")) } diff --git a/internal/pkg/mediaconvert/scheduler.go b/internal/pkg/mediaconvert/scheduler.go index 1d71d9113fe..5f335373600 100644 --- a/internal/pkg/mediaconvert/scheduler.go +++ b/internal/pkg/mediaconvert/scheduler.go @@ -299,6 +299,8 @@ func IsSupportedMediaType(mediaType string) bool { return true case "video/webm": return true + case "video/quicktime": + return true default: return false } diff --git a/internal/pkg/mediaconvert/transcoder.go b/internal/pkg/mediaconvert/transcoder.go index 8b04ca677d7..fca9d023537 100644 --- a/internal/pkg/mediaconvert/transcoder.go +++ b/internal/pkg/mediaconvert/transcoder.go @@ -16,8 +16,10 @@ package mediaconvert import ( + "bytes" "context" "fmt" + "io" "os" "os/exec" "path/filepath" @@ -42,6 +44,12 @@ type Transcoder struct { logger *zap.Logger } +type Command struct { + cmd *exec.Cmd + stdoutBuf bytes.Buffer + stderrBuf bytes.Buffer +} + // NewTranscoder creates a new instance of task transcoder func NewTranscoder(ctx context.Context, cfg *config.Config) *Transcoder { var p = &Transcoder{ @@ -54,7 +62,7 @@ func NewTranscoder(ctx context.Context, cfg *config.Config) *Transcoder { } // Transcode handles one transcoding task -func (p *Transcoder) Transcode(ctx context.Context, task *Task) (TaskResult, error) { +func (p *Transcoder) Transcode(ctx context.Context, task *Task) (*TaskResult, error) { var logger = p.logger.With(zap.String("task-id", task.ID)) logger.Debug("start") @@ -64,7 +72,7 @@ func (p *Transcoder) Transcode(ctx context.Context, task *Task) (TaskResult, err var tokenString, err = token.NewToken(p.cfg.ServerSecret, task.Workspace, "stream", "datalake") if err != nil { logger.Error("can not create token", zap.Error(err)) - return TaskResult{}, errors.Wrapf(err, "can not create token") + return nil, errors.Wrapf(err, "can not create token") } logger.Debug("phase 2: preparing fs") @@ -73,7 +81,7 @@ func (p *Transcoder) Transcode(ctx context.Context, task *Task) (TaskResult, err err = os.MkdirAll(destinationFolder, os.ModePerm) if err != nil { logger.Error("can not create temporary folder", zap.Error(err)) - return TaskResult{}, errors.Wrapf(err, "can not create temporary folder") + return nil, errors.Wrapf(err, "can not create temporary folder") } defer func() { @@ -87,38 +95,38 @@ func (p *Transcoder) Transcode(ctx context.Context, task *Task) (TaskResult, err remoteStorage, err := storage.NewStorageByURL(ctx, p.cfg.Endpoint(), p.cfg.EndpointURL.Scheme, tokenString, task.Workspace) if err != nil { logger.Error("can not create storage by url", zap.Error(err), zap.String("url", p.cfg.EndpointURL.String())) - return TaskResult{}, errors.Wrapf(err, "can not create storage by url") + return nil, errors.Wrapf(err, "can not create storage by url") } stat, err := remoteStorage.StatFile(ctx, task.Source) if err != nil { logger.Error("can not stat file", zap.Error(err), zap.String("filepath", task.Source)) - return TaskResult{}, errors.Wrapf(err, "can not stat file") + return nil, errors.Wrapf(err, "can not stat file") } if !IsSupportedMediaType(stat.Type) { logger.Info("unsupported media type", zap.String("type", stat.Type)) - return TaskResult{}, fmt.Errorf("unsupported media type: %s", stat.Type) + return nil, fmt.Errorf("unsupported media type: %s", stat.Type) } sourceFilePath := filepath.Join(destinationFolder, filename) if err = remoteStorage.GetFile(ctx, task.Source, sourceFilePath); err != nil { logger.Error("can not download source file", zap.Error(err), zap.String("filepath", task.Source)) // TODO: reschedule - return TaskResult{}, errors.Wrapf(err, "can not download source file") + return nil, errors.Wrapf(err, "can not download source file") } logger.Debug("phase 4: prepare to transcode") probe, err := ffprobe.ProbeURL(ctx, sourceFilePath) if err != nil { logger.Error("can not get ffprobe", zap.Error(err), zap.String("filepath", sourceFilePath)) - return TaskResult{}, errors.Wrapf(err, "can not get ffprobe") + return nil, errors.Wrapf(err, "can not get ffprobe") } videoStream := probe.FirstVideoStream() if videoStream == nil { logger.Error("no video stream found", zap.String("filepath", sourceFilePath)) - return TaskResult{}, errors.Wrapf(err, "no video stream found") + return nil, errors.Wrapf(err, "no video stream found") } logger.Debug("video stream found", zap.String("codec", videoStream.CodecName), zap.Int("width", videoStream.Width), zap.Int("height", videoStream.Height)) @@ -136,6 +144,7 @@ func (p *Transcoder) Transcode(ctx context.Context, task *Task) (TaskResult, err Input: sourceFilePath, OutputDir: p.cfg.OutputDir, Level: level, + LogLevel: LogLevel(p.cfg.LogLevel), Transcode: !IsHLSSupportedVideoCodec(codec), ScalingLevels: append(sublevels, level), UploadID: task.ID, @@ -157,7 +166,7 @@ func (p *Transcoder) Transcode(ctx context.Context, task *Task) (TaskResult, err err = manifest.GenerateHLSPlaylist(opts.ScalingLevels, p.cfg.OutputDir, opts.UploadID) if err != nil { logger.Error("can not generate hls playlist", zap.String("out", p.cfg.OutputDir), zap.String("uploadID", opts.UploadID)) - return TaskResult{}, errors.Wrapf(err, "can not generate hls playlist") + return nil, errors.Wrapf(err, "can not generate hls playlist") } go uploader.Start() @@ -169,29 +178,42 @@ func (p *Transcoder) Transcode(ctx context.Context, task *Task) (TaskResult, err BuildRawVideoCommand(&opts), BuildScalingVideoCommand(&opts), } - var cmds []*exec.Cmd + var cmds []Command for _, args := range argsSlice { cmd, cmdErr := newFfmpegCommand(ctx, nil, args) if cmdErr != nil { logger.Error("can not create a new command", zap.Error(cmdErr), zap.Strings("args", args)) go uploader.Cancel() - return TaskResult{}, errors.Wrapf(err, "can not create a new command") + return nil, errors.Wrapf(err, "can not create a new command") + } + + var command = Command{ + cmd: cmd, + stdoutBuf: bytes.Buffer{}, + stderrBuf: bytes.Buffer{}, } - cmds = append(cmds, cmd) + + cmd.Stdout = io.MultiWriter(os.Stdout, &command.stdoutBuf) + cmd.Stderr = io.MultiWriter(os.Stderr, &command.stderrBuf) + + cmds = append(cmds, command) if err = cmd.Start(); err != nil { logger.Error("can not start a command", zap.Error(err), zap.Strings("args", args)) go uploader.Cancel() - return TaskResult{}, errors.Wrapf(err, "can not start a command") + return nil, errors.Wrapf(err, "can not start a command") } } logger.Debug("phase 7: wait for the result") + for _, cmd := range cmds { - if err = cmd.Wait(); err != nil { + if err = cmd.cmd.Wait(); err != nil { logger.Error("can not wait for command end ", zap.Error(err)) + os.Stdout.Write(cmd.stdoutBuf.Bytes()) + os.Stderr.Write(cmd.stderrBuf.Bytes()) go uploader.Cancel() - return TaskResult{}, errors.Wrapf(err, "can not wait for command end") + return nil, errors.Wrapf(err, "can not wait for command end") } } @@ -226,5 +248,5 @@ func (p *Transcoder) Transcode(ctx context.Context, task *Task) (TaskResult, err } } - return result, nil + return &result, nil } diff --git a/internal/pkg/storage/datalake.go b/internal/pkg/storage/datalake.go index 8a5284197f9..aaf18ac2730 100644 --- a/internal/pkg/storage/datalake.go +++ b/internal/pkg/storage/datalake.go @@ -21,8 +21,10 @@ import ( "io" "mime/multipart" "net/textproto" + "net/url" "os" "path/filepath" + "strconv" "strings" "time" @@ -170,6 +172,11 @@ func (d *DatalakeStorage) DeleteFile(ctx context.Context, fileName string) error return errors.Wrapf(err, "delete failed") } + if err := okResponse(res); err != nil { + logRequestError(logger, err, "bad status code", res) + return err + } + logger.Debug("deleted") return nil @@ -205,14 +212,11 @@ func (d *DatalakeStorage) PatchMeta(ctx context.Context, filename string, md *Me return err } - if resp.StatusCode() != fasthttp.StatusOK { - var err = fmt.Errorf("unexpected status code: %d", resp.StatusCode()) - logger.Debug("bad status code", zap.Error(err)) + if err := okResponse(resp); err != nil { + logRequestError(logger, err, "bad status code", resp) return err } - fmt.Println(string(resp.Body())) - return nil } @@ -237,9 +241,8 @@ func (d *DatalakeStorage) GetMeta(ctx context.Context, filename string) (*Metada return nil, err } - if resp.StatusCode() != fasthttp.StatusOK { - var err = fmt.Errorf("unexpected status code: %d", resp.StatusCode()) - logger.Debug("bad status code", zap.Error(err)) + if err := okResponse(resp); err != nil { + logRequestError(logger, err, "bad status code", resp) return nil, err } @@ -270,10 +273,8 @@ func (d *DatalakeStorage) GetFile(ctx context.Context, filename, destination str return err } - // Check the response status code - if resp.StatusCode() != fasthttp.StatusOK { - var err = fmt.Errorf("unexpected status code: %d", resp.StatusCode()) - logger.Debug("bad status code", zap.Error(err)) + if err := okResponse(resp); err != nil { + logRequestError(logger, err, "bad status code", resp) return err } @@ -291,7 +292,13 @@ func (d *DatalakeStorage) GetFile(ctx context.Context, filename, destination str return err } - logger.Debug("file downloaded successfully") + stat, err := os.Stat(destination) + if err != nil { + logger.Error("can't stat the file", zap.Error(err)) + return err + } + + logger.Info("file downloaded successfully", zap.Int64("size", stat.Size())) return nil } @@ -316,9 +323,7 @@ func (d *DatalakeStorage) StatFile(ctx context.Context, filename string) (*BlobI return nil, err } - // Check the response status code - if resp.StatusCode() != fasthttp.StatusOK { - var err = fmt.Errorf("unexpected status code: %d", resp.StatusCode()) + if err := okResponse(resp); err != nil { logRequestError(logger, err, "bad status code", resp) return nil, err } @@ -346,7 +351,7 @@ func (d *DatalakeStorage) SetParent(ctx context.Context, filename, parent string req.SetRequestURI(d.baseURL + "/blob/" + d.workspace + "/" + objectKey + "/parent") req.Header.SetMethod(fasthttp.MethodPatch) req.Header.Add("Authorization", "Bearer "+d.token) - req.Header.Add("Content-Type", "application/json") + req.Header.SetContentType("application/json") body := map[string]any{ "parent": parentKey, @@ -365,15 +370,20 @@ func (d *DatalakeStorage) SetParent(ctx context.Context, filename, parent string return err } - // Check the response status code - var statusOK = resp.StatusCode() >= 200 && resp.StatusCode() < 300 - if !statusOK { - var err = fmt.Errorf("unexpected status code: %d", resp.StatusCode()) - logger.Debug("bad status code", zap.Error(err), zap.Int("status", resp.StatusCode()), zap.String("response", resp.String())) + if err := okResponse(resp); err != nil { + logRequestError(logger, err, "bad status code", resp) return err } - logger.Debug("finished") + return nil +} + +func okResponse(res *fasthttp.Response) error { + var statusOK = res.StatusCode() >= 200 && res.StatusCode() < 300 + + if !statusOK { + return fmt.Errorf("unexpected status code: %d", res.StatusCode()) + } return nil } diff --git a/internal/pkg/uploader/uploader.go b/internal/pkg/uploader/uploader.go index 957d889f61c..11207510290 100644 --- a/internal/pkg/uploader/uploader.go +++ b/internal/pkg/uploader/uploader.go @@ -35,6 +35,8 @@ import ( // See at https://man7.org/linux/man-pages/man7/inotify.7.html const inotifyCloseWrite uint32 = 0x8 // IN_CLOSE_WRITE const inotifyMovedTo uint32 = 0x80 // IN_MOVED_TO +const inotifyDelete uint32 = 0x200 // IN_DELETE +const inotifyMovedFrom uint32 = 0x40 // IN_MOVED_FROM // Uploader represents file uploader type Uploader interface { @@ -250,9 +252,12 @@ func (u *uploaderImpl) uploadAndDelete(f string) { } // Check if the file exists - _, err := os.Stat(f) - if err != nil { - logger.Debug("file does not exist") + if _, err := os.Stat(f); err != nil { + if os.IsNotExist(err) { + logger.Debug("file does not exist", zap.Error(err)) + } else { + logger.Error("failed to stat file", zap.Error(err)) + } return } @@ -300,6 +305,7 @@ func (u *uploaderImpl) uploadAndDelete(f string) { } } +// startWatch watches for changes in the directory and uploads created files func (u *uploaderImpl) startWatch(ready chan<- struct{}) { defer close(u.watcherDoneCh) @@ -317,7 +323,7 @@ func (u *uploaderImpl) startWatch(ready chan<- struct{}) { } }() - if err := watcher.AddWatch(u.options.Dir, inotifyCloseWrite|inotifyMovedTo); err != nil { + if err := watcher.AddWatch(u.options.Dir, inotifyCloseWrite|inotifyMovedTo|inotifyDelete|inotifyMovedFrom); err != nil { logger.Error("can not start watching", zap.Error(err)) close(ready) return @@ -348,8 +354,19 @@ func (u *uploaderImpl) startWatch(ready chan<- struct{}) { if strings.HasSuffix(event.Name, ".tmp") { continue } + if event.Mask&(inotifyDelete|inotifyMovedFrom) != 0 { + logger.Debug("file deleted or moved away", zap.String("event", event.Name), zap.Uint32("mask", event.Mask)) + continue + } + logger.Debug("received an event", zap.String("event", event.Name), zap.Uint32("mask", event.Mask)) + if _, err := os.Stat(event.Name); os.IsNotExist(err) { + logger.Warn("file does not exist", zap.String("file", event.Name)) + // wait a bit for file operations to complete + time.Sleep(100 * time.Millisecond) + } + u.filesCh <- event.Name case err, ok := <-watcher.Error: if !ok { From f36ea74c26f05d0d5c98fc6072839f63666822b0 Mon Sep 17 00:00:00 2001 From: Kristina Date: Fri, 20 Jun 2025 16:42:13 +0400 Subject: [PATCH 115/636] Fix thread attach (#72) --- packages/cockroach/src/client.ts | 6 +++--- packages/cockroach/src/db/base.ts | 12 ++++++------ packages/cockroach/src/db/mapping.ts | 17 ----------------- packages/cockroach/src/db/message.ts | 13 +++++++++++-- packages/cockroach/src/db/notification.ts | 20 +------------------- packages/cockroach/src/types.ts | 1 + 6 files changed, 22 insertions(+), 47 deletions(-) diff --git a/packages/cockroach/src/client.ts b/packages/cockroach/src/client.ts index 987b0bb2319..37afdd4ec5e 100644 --- a/packages/cockroach/src/client.ts +++ b/packages/cockroach/src/client.ts @@ -14,7 +14,7 @@ import postgres from 'postgres' import type { PostgresClientReference } from './connection' -import type { SqlParams, SqlRow } from './types' +import type { SqlParams, SqlResult, SqlRow } from './types' import { convertArrayParams } from './utils' export class SqlClient { @@ -27,9 +27,9 @@ export class SqlClient { return this.sql } - async execute(query: string, params?: SqlParams, client?: postgres.TransactionSql): Promise { + async execute(query: string, params?: SqlParams, client?: postgres.TransactionSql): Promise> { const convertedParams = convertArrayParams(params) - return await (client ?? this.sql).unsafe(query, convertedParams) + return await (client ?? this.sql).unsafe(query, convertedParams) as SqlResult } cursor(query: string, params?: SqlParams, size?: number): AsyncIterable[]> { diff --git a/packages/cockroach/src/db/base.ts b/packages/cockroach/src/db/base.ts index d99528c09ef..b460abd3a1b 100644 --- a/packages/cockroach/src/db/base.ts +++ b/packages/cockroach/src/db/base.ts @@ -13,10 +13,10 @@ // limitations under the License. // -import postgres, { type ParameterOrJSON, type Row } from 'postgres' +import postgres, { type ParameterOrJSON } from 'postgres' import type { WorkspaceID } from '@hcengineering/communication-types' -import { type Logger, type Options } from '../types' +import { SqlRow, type Logger, type Options, type SqlResult } from '../types' import { SqlClient } from '../client' export class BaseDb { @@ -31,12 +31,12 @@ export class BaseDb { return this.client.getRawClient() } - async execute>( + async execute( sql: string, params?: ParameterOrJSON[], name?: string, client?: postgres.TransactionSql - ): Promise { + ): Promise> { if (this.options?.withLogs === true && this.logger !== undefined) { return await this.executeWithLogs(name, this.logger, sql, params, client) } @@ -44,13 +44,13 @@ export class BaseDb { return await this.client.execute(sql, params, client) } - private async executeWithLogs>( + private async executeWithLogs( name: string | undefined, logger: Logger, sql: string, params?: ParameterOrJSON[], client?: postgres.TransactionSql - ): Promise { + ): Promise> { if (name === undefined) { return await this.client.execute(sql, params, client) } diff --git a/packages/cockroach/src/db/mapping.ts b/packages/cockroach/src/db/mapping.ts index 2d3b59c178a..2898a43be33 100644 --- a/packages/cockroach/src/db/mapping.ts +++ b/packages/cockroach/src/db/mapping.ts @@ -78,10 +78,6 @@ interface RawNotification extends NotificationDb { message_group_from_date?: Date message_group_to_date?: Date message_group_count?: number - message_thread_id?: CardID - message_thread_type?: CardType - message_replies?: number - message_last_reply?: Date message_patches?: { type: PatchType data: Record @@ -248,18 +244,6 @@ function toNotificationRaw (id: ContextID, card: CardID, raw: RawNotification): }) ) - let thread: Thread | undefined - - if (raw.message_thread_id != null && raw.message_thread_type != null) { - thread = { - cardId: card, - messageId: String(raw.message_id) as MessageID, - threadId: raw.message_thread_id, - threadType: raw.message_thread_type, - repliesCount: Number(raw.message_replies ?? 0), - lastReply: raw.message_last_reply != null ? new Date(raw.message_last_reply) : created - } - } message = { id: String(raw.message_id) as MessageID, type: raw.message_type, @@ -272,7 +256,6 @@ function toNotificationRaw (id: ContextID, card: CardID, raw: RawNotification): edited: undefined, reactions: [], blobs: messageBlobs ?? [], - thread, linkPreviews: [] } diff --git a/packages/cockroach/src/db/message.ts b/packages/cockroach/src/db/message.ts index bf23da34ca8..309aff01053 100644 --- a/packages/cockroach/src/db/message.ts +++ b/packages/cockroach/src/db/message.ts @@ -625,7 +625,16 @@ export class MessagesDb extends BaseDb { if (!inDb) { await this.getRowClient().begin(async (s) => { - await this.execute(sql, [...values, this.workspace, threadId, cardId, messageId], 'update thread', s) + const res = await this.execute( + sql, + [...values, this.workspace, threadId, cardId, messageId], + 'update thread', + s + ) + + if (res.count === 0) { + return + } const data: UpdateThreadPatchData = { operation: 'update', @@ -1052,7 +1061,7 @@ export class MessagesDb extends BaseDb { AND mc.message_id = $3::varchar LIMIT 1` const result = await this.execute(select, [this.workspace, cardId, messageId]) - const created = result[0].created + const created = result[0]?.created return created != null ? new Date(created) : undefined } } diff --git a/packages/cockroach/src/db/notification.ts b/packages/cockroach/src/db/notification.ts index f777524aba8..190113ce344 100644 --- a/packages/cockroach/src/db/notification.ts +++ b/packages/cockroach/src/db/notification.ts @@ -304,10 +304,6 @@ export class NotificationsDb extends BaseDb { ON nc.workspace_id = m.workspace_id AND nc.card_id = m.card_id AND n.message_id = m.id - LEFT JOIN ${TableName.Thread} t - ON nc.workspace_id = t.workspace_id - AND nc.card_id = t.card_id - AND n.message_id = t.message_id LEFT JOIN ${TableName.MessagesGroup} mg ON nc.workspace_id = mg.workspace_id AND nc.card_id = mg.card_id @@ -330,10 +326,6 @@ export class NotificationsDb extends BaseDb { 'message_group_from_date', mg.from_date, 'message_group_to_date', mg.to_date, 'message_group_count', mg.count, - 'message_thread_id', t.thread_id, - 'message_thread_type', t.thread_type, - 'message_replies', t.replies_count, - 'message_last_reply', t.last_reply, 'message_patches', ( SELECT COALESCE( JSON_AGG( @@ -447,10 +439,6 @@ export class NotificationsDb extends BaseDb { mg.from_date AS message_group_from_date, mg.to_date AS message_group_to_date, mg.count AS message_group_count, - t.thread_id AS message_thread_id, - t.thread_type AS message_thread_type, - t.replies_count AS message_replies, - t.last_reply AS message_last_reply, (SELECT json_agg( jsonb_build_object( 'type', p.type, @@ -475,7 +463,6 @@ export class NotificationsDb extends BaseDb { FROM ${TableName.File} f WHERE f.workspace_id = m.workspace_id AND f.card_id = m.card_id AND f.message_id = m.id) AS message_files ` - joinMessages = ` LEFT JOIN ${TableName.Message} m ON nc.workspace_id = m.workspace_id @@ -484,12 +471,7 @@ export class NotificationsDb extends BaseDb { LEFT JOIN ${TableName.MessagesGroup} mg ON nc.workspace_id = mg.workspace_id AND nc.card_id = mg.card_id - AND n.message_created BETWEEN mg.from_date AND mg.to_date - LEFT JOIN ${TableName.Thread} t - ON nc.workspace_id = t.workspace_id - AND nc.card_id = t.card_id - AND n.message_id = t.message_id - ` + AND n.message_created BETWEEN mg.from_date AND mg.to_date ` } select += ` FROM ${TableName.Notification} n diff --git a/packages/cockroach/src/types.ts b/packages/cockroach/src/types.ts index 2412a81d9ef..36b1cab5b27 100644 --- a/packages/cockroach/src/types.ts +++ b/packages/cockroach/src/types.ts @@ -17,6 +17,7 @@ import { type ParameterOrJSON, type Row } from 'postgres' export type SqlRow = Row & Iterable export type SqlParams = ParameterOrJSON[] +export type SqlResult = T[] & { count: number } export interface Logger { info: (message: string, data?: Record) => void From c910668a86ee59b9a265dd87c1ea5f6abe4b0a34 Mon Sep 17 00:00:00 2001 From: Alexander Onnikov Date: Mon, 23 Jun 2025 00:05:59 +0700 Subject: [PATCH 116/636] fix: wait until uploader finishes Signed-off-by: Alexander Onnikov --- internal/pkg/mediaconvert/transcoder.go | 3 +- internal/pkg/storage/datalake.go | 2 - internal/pkg/uploader/uploader.go | 126 +++++++++++++++--------- 3 files changed, 79 insertions(+), 52 deletions(-) diff --git a/internal/pkg/mediaconvert/transcoder.go b/internal/pkg/mediaconvert/transcoder.go index fca9d023537..46b8206083a 100644 --- a/internal/pkg/mediaconvert/transcoder.go +++ b/internal/pkg/mediaconvert/transcoder.go @@ -85,6 +85,7 @@ func (p *Transcoder) Transcode(ctx context.Context, task *Task) (*TaskResult, er } defer func() { + logger.Debug("remove temporary folder") if err = os.RemoveAll(destinationFolder); err != nil { logger.Error("failed to cleanup temporary folder", zap.Error(err)) } @@ -218,7 +219,7 @@ func (p *Transcoder) Transcode(ctx context.Context, task *Task) (*TaskResult, er } logger.Debug("phase 8: schedule cleanup") - go uploader.Stop() + uploader.Stop() logger.Debug("phase 9: try to set metadata") diff --git a/internal/pkg/storage/datalake.go b/internal/pkg/storage/datalake.go index aaf18ac2730..33d8b9e1594 100644 --- a/internal/pkg/storage/datalake.go +++ b/internal/pkg/storage/datalake.go @@ -21,10 +21,8 @@ import ( "io" "mime/multipart" "net/textproto" - "net/url" "os" "path/filepath" - "strconv" "strings" "time" diff --git a/internal/pkg/uploader/uploader.go b/internal/pkg/uploader/uploader.go index 11207510290..e14782f0a77 100644 --- a/internal/pkg/uploader/uploader.go +++ b/internal/pkg/uploader/uploader.go @@ -34,9 +34,9 @@ import ( // See at https://man7.org/linux/man-pages/man7/inotify.7.html const inotifyCloseWrite uint32 = 0x8 // IN_CLOSE_WRITE +const inotifyMovedFrom uint32 = 0x40 // IN_MOVED_FROM const inotifyMovedTo uint32 = 0x80 // IN_MOVED_TO const inotifyDelete uint32 = 0x200 // IN_DELETE -const inotifyMovedFrom uint32 = 0x40 // IN_MOVED_FROM // Uploader represents file uploader type Uploader interface { @@ -104,50 +104,66 @@ func New(ctx context.Context, s storage.Storage, opts Options) Uploader { } func (u *uploaderImpl) Stop() { + u.logger.Info("stopping upload") u.stop(false) } func (u *uploaderImpl) Cancel() { + u.logger.Info("canceling upload") u.stop(true) } -func (u *uploaderImpl) scanInitialFiles() { - u.workerWaitGroup.Add(1) - - go func() { - defer u.workerWaitGroup.Done() +func (u *uploaderImpl) scanFiles() { + logger := u.logger.With(zap.String("dir", u.options.Dir)) - logger := u.logger.With(zap.String("dir", u.options.Dir)) + logger.Info("scan files") + files, err := os.ReadDir(u.options.Dir) + if err != nil { + logger.Error("failed to read files", zap.Error(err)) + return + } - logger.Info("initial file scan") - initFiles, err := os.ReadDir(u.options.Dir) - if err != nil { - logger.Error("failed to read initial files", zap.Error(err)) - return + count := 0 + for _, f := range files { + if f.IsDir() { + continue } - for _, f := range initFiles { - if f.IsDir() { - continue - } + var filePath = filepath.Join(u.options.Dir, f.Name()) - // Ignore source file - var filePath = filepath.Join(u.options.Dir, f.Name()) - if filePath == u.options.SourceFile { - continue - } - u.filesCh <- filePath + // Ignore source file + if filePath == u.options.SourceFile { + continue } - logger.Info("initial file scan complete", zap.Int("count", len(initFiles))) - }() + if _, uploaded := u.sentFiles.Load(filePath); uploaded { + logger.Debug("file already uploaded", zap.String("file", filePath)) + continue + } + + u.filesCh <- filePath + count++ + } + + logger.Info("scan complete", zap.Int("count", count)) } func (u *uploaderImpl) stop(rollback bool) { + // Stop watching for new files close(u.watcherStopCh) <-u.watcherDoneCh - u.logger.Debug("file watch stopped") + // Scan remaining files in the directory + u.scanFiles() + + // Close filesCh so no new files added + close(u.filesCh) + + // Wait for all workers to finish processing + u.workerWaitGroup.Wait() + u.logger.Debug("workers done") + + // Perform rollback if rollback { u.logger.Debug("starting rollback...") var i uint32 @@ -161,15 +177,25 @@ func (u *uploaderImpl) stop(rollback bool) { }) u.logger.Debug("rollback done") } - close(u.filesCh) - u.workerWaitGroup.Wait() - u.logger.Debug("workers done") u.uploadCancel() - _ = os.RemoveAll(u.options.Dir) + + remainingFiles, err := os.ReadDir(u.options.Dir) + if err != nil && !os.IsNotExist(err) { + u.logger.Error("failed to read dir", zap.Error(err)) + } + // log remaining files + if len(remainingFiles) > 0 { + files := make([]string, 0, len(remainingFiles)) + for _, entry := range remainingFiles { + files = append(files, entry.Name()) + } + u.logger.Info("remaining files", zap.Int("count", len(files)), zap.Any("files", files)) + } + u.sentFiles.Clear() - u.logger.Debug("finish done", zap.Bool("cancel", rollback)) + u.logger.Debug("stopped", zap.Bool("rollback", rollback)) } func (u *uploaderImpl) Start() { @@ -180,7 +206,7 @@ func (u *uploaderImpl) Start() { <-watcherReady - u.scanInitialFiles() + u.scanFiles() } func (u *uploaderImpl) startWorkers() { @@ -251,8 +277,7 @@ func (u *uploaderImpl) uploadAndDelete(f string) { return } - // Check if the file exists - if _, err := os.Stat(f); err != nil { + if err := waitFileExists(f); os.IsNotExist(err) { if os.IsNotExist(err) { logger.Debug("file does not exist", zap.Error(err)) } else { @@ -342,18 +367,12 @@ func (u *uploaderImpl) startWatch(ready chan<- struct{}) { logger.Error("file channel was closed") return } - if !strings.Contains(event.Name, u.options.Dir) { - continue - } - if event.Name == u.options.Dir { - continue - } - if event.Name == u.options.SourceFile { - continue - } - if strings.HasSuffix(event.Name, ".tmp") { + if event.Name == u.options.Dir || + event.Name == u.options.SourceFile || + strings.HasSuffix(event.Name, ".tmp") { continue } + if event.Mask&(inotifyDelete|inotifyMovedFrom) != 0 { logger.Debug("file deleted or moved away", zap.String("event", event.Name), zap.Uint32("mask", event.Mask)) continue @@ -361,12 +380,6 @@ func (u *uploaderImpl) startWatch(ready chan<- struct{}) { logger.Debug("received an event", zap.String("event", event.Name), zap.Uint32("mask", event.Mask)) - if _, err := os.Stat(event.Name); os.IsNotExist(err) { - logger.Warn("file does not exist", zap.String("file", event.Name)) - // wait a bit for file operations to complete - time.Sleep(100 * time.Millisecond) - } - u.filesCh <- event.Name case err, ok := <-watcher.Error: if !ok { @@ -376,3 +389,18 @@ func (u *uploaderImpl) startWatch(ready chan<- struct{}) { } } } + +func waitFileExists(file string) error { + var err error + + for range 10 { + stat, err := os.Stat(file) + if err == nil && stat.Size() > 0 { + return nil + } + + time.Sleep(50 * time.Millisecond) + } + + return err +} From 0755e927f56fb89c19d624336841810fb0801f66 Mon Sep 17 00:00:00 2001 From: Alexander Onnikov Date: Mon, 23 Jun 2025 00:34:51 +0700 Subject: [PATCH 117/636] fix tests and lint issues Signed-off-by: Alexander Onnikov --- .golangci.yaml | 4 ++-- internal/pkg/mediaconvert/command.go | 24 ++++++++++++++++------- internal/pkg/mediaconvert/command_test.go | 12 ++++++++---- internal/pkg/mediaconvert/transcoder.go | 21 +++++++++++++------- internal/pkg/storage/datalake.go | 2 +- internal/pkg/uploader/uploader.go | 5 +++-- 6 files changed, 45 insertions(+), 23 deletions(-) diff --git a/.golangci.yaml b/.golangci.yaml index 5fd9dcf2b3d..ea39029926a 100644 --- a/.golangci.yaml +++ b/.golangci.yaml @@ -57,11 +57,11 @@ linters-settings: goimports: local-prefixes: github.com/networkservicemesh/sdk gocyclo: - min-complexity: 20 + min-complexity: 30 dupl: threshold: 150 funlen: - lines: 180 + lines: 200 statements: 100 goconst: min-len: 2 diff --git a/internal/pkg/mediaconvert/command.go b/internal/pkg/mediaconvert/command.go index 2efbf54cf76..88dbb7bb4a3 100644 --- a/internal/pkg/mediaconvert/command.go +++ b/internal/pkg/mediaconvert/command.go @@ -29,18 +29,28 @@ import ( "go.uber.org/zap" ) +// LogLevel is ffmpeg log level type LogLevel string const ( - LogLevelQuiet LogLevel = "quiet" - LogLevelPanic LogLevel = "panic" - LogLevelFatal LogLevel = "fatal" - LogLevelError LogLevel = "error" + // LogLevelQuiet is quiet log level + LogLevelQuiet LogLevel = "quiet" + // LogLevelPanic is panic log level + LogLevelPanic LogLevel = "panic" + // LogLevelFatal is fatal log level + LogLevelFatal LogLevel = "fatal" + // LogLevelError is error log level + LogLevelError LogLevel = "error" + // LogLevelWarning is warning log level LogLevelWarning LogLevel = "warning" - LogLevelInfo LogLevel = "info" + // LogLevelInfo is info log level + LogLevelInfo LogLevel = "info" + // LogLevelVerbose is verbose log level LogLevelVerbose LogLevel = "verbose" - LogLevelDebug LogLevel = "debug" - LogLevelTrace LogLevel = "trace" + // LogLevelDebug is debug log level + LogLevelDebug LogLevel = "debug" + // LogLevelTrace is trace log level + LogLevelTrace LogLevel = "trace" ) // Options represents configuration for the ffmpeg command diff --git a/internal/pkg/mediaconvert/command_test.go b/internal/pkg/mediaconvert/command_test.go index 26beed32728..f8b36a77500 100644 --- a/internal/pkg/mediaconvert/command_test.go +++ b/internal/pkg/mediaconvert/command_test.go @@ -28,10 +28,11 @@ func Test_BuildVideoCommand_Scaling(t *testing.T) { Input: "pipe:0", UploadID: "1", Threads: 4, + LogLevel: mediaconvert.LogLevelDebug, ScalingLevels: []string{"720p", "480p"}, }) - const expected = `-threads 4 -i pipe:0 -map 0:v -vf scale=-2:720 -c:a aac -c:v libx264 -preset veryfast -crf 23 -g 60 -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_720p.ts test/1/1_720p_master.m3u8 -map 0:v -vf scale=-2:480 -c:a aac -c:v libx264 -preset veryfast -crf 23 -g 60 -f hls -hls_time 5 -hls_flags split_by_time -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` + const expected = `-y -v debug -threads 4 -i pipe:0 -map 0:v -vf scale=-2:720 -c:a aac -c:v libx264 -preset veryfast -crf 23 -g 60 -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_720p.ts test/1/1_720p_master.m3u8 -map 0:v -vf scale=-2:480 -c:a aac -c:v libx264 -preset veryfast -crf 23 -g 60 -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` require.Contains(t, expected, strings.Join(scaleCommand, " ")) } @@ -42,11 +43,12 @@ func Test_BuildVideoCommand_Scaling_NoRaw(t *testing.T) { Input: "pipe:0", UploadID: "1", Threads: 4, + LogLevel: mediaconvert.LogLevelDebug, Level: "720p", ScalingLevels: []string{"720p", "480p"}, }) - const expected = `-threads 4 -i pipe:0 -map 0:v -vf scale=-2:480 -c:a aac -c:v libx264 -preset veryfast -crf 23 -g 60 -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` + const expected = `-y -v debug -threads 4 -i pipe:0 -map 0:v -vf scale=-2:480 -c:a aac -c:v libx264 -preset veryfast -crf 23 -g 60 -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` require.Contains(t, expected, strings.Join(scaleCommand, " ")) } @@ -57,11 +59,12 @@ func Test_BuildVideoCommand_Raw_NoTranscode(t *testing.T) { Input: "pipe:0", UploadID: "1", Threads: 4, + LogLevel: mediaconvert.LogLevelDebug, Level: resconv.Level("651:490"), Transcode: false, }) - const expected = `"-threads 4 -i pipe:0 -c:a copy -c:v copy -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` + const expected = `"-y -v debug -threads 4 -i pipe:0 -c:a copy -c:v copy -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` require.Contains(t, expected, strings.Join(rawCommand, " ")) } @@ -72,11 +75,12 @@ func Test_BuildVideoCommand_Raw_Transcode(t *testing.T) { Input: "pipe:0", UploadID: "1", Threads: 4, + LogLevel: mediaconvert.LogLevelDebug, Level: resconv.Level("651:490"), Transcode: true, }) - const expected = `-threads 4 -i pipe:0 -c:a aac -c:v libx264 -preset veryfast -crf 23 -g 60 -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` + const expected = `-y -v debug -threads 4 -i pipe:0 -c:a aac -c:v libx264 -preset veryfast -crf 23 -g 60 -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` require.Contains(t, expected, strings.Join(rawCommand, " ")) } diff --git a/internal/pkg/mediaconvert/transcoder.go b/internal/pkg/mediaconvert/transcoder.go index 46b8206083a..9667073bf00 100644 --- a/internal/pkg/mediaconvert/transcoder.go +++ b/internal/pkg/mediaconvert/transcoder.go @@ -44,6 +44,7 @@ type Transcoder struct { logger *zap.Logger } +// Command represents a ffmpeg command type Command struct { cmd *exec.Cmd stdoutBuf bytes.Buffer @@ -186,7 +187,7 @@ func (p *Transcoder) Transcode(ctx context.Context, task *Task) (*TaskResult, er if cmdErr != nil { logger.Error("can not create a new command", zap.Error(cmdErr), zap.Strings("args", args)) go uploader.Cancel() - return nil, errors.Wrapf(err, "can not create a new command") + return nil, errors.Wrapf(cmdErr, "can not create a new command") } var command = Command{ @@ -209,13 +210,19 @@ func (p *Transcoder) Transcode(ctx context.Context, task *Task) (*TaskResult, er logger.Debug("phase 7: wait for the result") for _, cmd := range cmds { - if err = cmd.cmd.Wait(); err != nil { - logger.Error("can not wait for command end ", zap.Error(err)) - os.Stdout.Write(cmd.stdoutBuf.Bytes()) - os.Stderr.Write(cmd.stderrBuf.Bytes()) - go uploader.Cancel() - return nil, errors.Wrapf(err, "can not wait for command end") + if err = cmd.cmd.Wait(); err == nil { + continue + } + + logger.Error("can not wait for command end ", zap.Error(err)) + if _, err = os.Stdout.Write(cmd.stdoutBuf.Bytes()); err != nil { + logger.Error("can not write stdout ", zap.Error(err)) + } + if _, err = os.Stderr.Write(cmd.stderrBuf.Bytes()); err != nil { + logger.Error("can not write stderr", zap.Error(err)) } + go uploader.Cancel() + return nil, errors.Wrapf(err, "can not wait for command end") } logger.Debug("phase 8: schedule cleanup") diff --git a/internal/pkg/storage/datalake.go b/internal/pkg/storage/datalake.go index 33d8b9e1594..95c87555697 100644 --- a/internal/pkg/storage/datalake.go +++ b/internal/pkg/storage/datalake.go @@ -285,7 +285,7 @@ func (d *DatalakeStorage) GetFile(ctx context.Context, filename, destination str defer func() { _ = file.Close() }() - if err := resp.BodyWriteTo(file); err != nil { + if err = resp.BodyWriteTo(file); err != nil { logger.Debug("can't write to file", zap.Error(err)) return err } diff --git a/internal/pkg/uploader/uploader.go b/internal/pkg/uploader/uploader.go index e14782f0a77..4e1515c80fd 100644 --- a/internal/pkg/uploader/uploader.go +++ b/internal/pkg/uploader/uploader.go @@ -277,7 +277,7 @@ func (u *uploaderImpl) uploadAndDelete(f string) { return } - if err := waitFileExists(f); os.IsNotExist(err) { + if err := waitFileExists(f); err != nil { if os.IsNotExist(err) { logger.Debug("file does not exist", zap.Error(err)) } else { @@ -392,9 +392,10 @@ func (u *uploaderImpl) startWatch(ready chan<- struct{}) { func waitFileExists(file string) error { var err error + var stat os.FileInfo for range 10 { - stat, err := os.Stat(file) + stat, err = os.Stat(file) if err == nil && stat.Size() > 0 { return nil } From 2449f29168059e0a3f55cf79815057219332acf7 Mon Sep 17 00:00:00 2001 From: Kristina Date: Mon, 23 Jun 2025 18:44:58 +0400 Subject: [PATCH 118/636] Fix message id limit (#74) Signed-off-by: Kristina Fefelova --- packages/server/src/middleware/validate.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/server/src/middleware/validate.ts b/packages/server/src/middleware/validate.ts index 9a10d92bf4a..0518eee120f 100644 --- a/packages/server/src/middleware/validate.ts +++ b/packages/server/src/middleware/validate.ts @@ -268,7 +268,7 @@ const CreateMessageEventSchema = BaseEventSchema.extend({ cardId: CardIDSchema, cardType: CardTypeSchema, - messageId: MessageIDSchema.min(3).max(22).optional(), + messageId: MessageIDSchema.max(22).optional(), messageType: MessageTypeSchema, content: MarkdownSchema, From cf09ed49b579514148b82b4b4712b7c5b0250ade Mon Sep 17 00:00:00 2001 From: Alexander Onnikov Date: Tue, 24 Jun 2025 09:57:21 +0700 Subject: [PATCH 119/636] fix: adjust transcode result Signed-off-by: Alexander Onnikov --- internal/pkg/mediaconvert/scheduler.go | 20 +++++++++++++++----- internal/pkg/mediaconvert/transcoder.go | 11 ++++++++--- internal/pkg/queue/queue.go | 8 ++++---- internal/pkg/queue/worker.go | 8 +++++++- 4 files changed, 34 insertions(+), 13 deletions(-) diff --git a/internal/pkg/mediaconvert/scheduler.go b/internal/pkg/mediaconvert/scheduler.go index 5f335373600..da3190276ba 100644 --- a/internal/pkg/mediaconvert/scheduler.go +++ b/internal/pkg/mediaconvert/scheduler.go @@ -47,7 +47,7 @@ type Task struct { // TaskResult represents transcoding task result type TaskResult struct { - Source string `json:"source"` + Playlist string `json:"playlist"` Thumbnail string `json:"thumbnail"` Width int `json:"width"` Height int `json:"height"` @@ -258,19 +258,29 @@ func (p *Scheduler) processTask(ctx context.Context, task *Task) { logger.Debug("phase 9: try to set metadata") if metaProvider, ok := remoteStorage.(storage.MetaProvider); ok { - var hls = TaskResult{ + var result = TaskResult{ Width: videoStream.Width, Height: videoStream.Height, - Source: task.ID + "_master.m3u8", + Playlist: task.ID + "_master.m3u8", Thumbnail: task.ID + ".jpg", } - logger.Debug("applying metadata", zap.String("url", hls.Source), zap.String("thumbnail", hls.Thumbnail), zap.String("source", task.Source)) + logger.Debug( + "applying metadata", + zap.String("url", result.Playlist), + zap.String("thumbnail", result.Thumbnail), + zap.String("source", task.Source), + ) err = metaProvider.PatchMeta( ctx, task.Source, &storage.Metadata{ - "hls": hls, + "hls": map[string]any{ + "source": result.Playlist, + "thumbnail": result.Thumbnail, + }, + "width": result.Width, + "height": result.Height, }, ) if err != nil { diff --git a/internal/pkg/mediaconvert/transcoder.go b/internal/pkg/mediaconvert/transcoder.go index 9667073bf00..2ca4e3eb35c 100644 --- a/internal/pkg/mediaconvert/transcoder.go +++ b/internal/pkg/mediaconvert/transcoder.go @@ -233,14 +233,14 @@ func (p *Transcoder) Transcode(ctx context.Context, task *Task) (*TaskResult, er var result = TaskResult{ Width: videoStream.Width, Height: videoStream.Height, - Source: task.ID + "_master.m3u8", + Playlist: task.ID + "_master.m3u8", Thumbnail: task.ID + ".jpg", } if metaProvider, ok := remoteStorage.(storage.MetaProvider); ok { logger.Debug( "applying metadata", - zap.String("url", result.Source), + zap.String("url", result.Playlist), zap.String("thumbnail", result.Thumbnail), zap.String("source", task.Source), ) @@ -248,7 +248,12 @@ func (p *Transcoder) Transcode(ctx context.Context, task *Task) (*TaskResult, er ctx, task.Source, &storage.Metadata{ - "hls": result, + "hls": map[string]any{ + "source": result.Playlist, + "thumbnail": result.Thumbnail, + }, + "width": result.Width, + "height": result.Height, }, ) if err != nil { diff --git a/internal/pkg/queue/queue.go b/internal/pkg/queue/queue.go index 64389e8335f..cb5c6745194 100644 --- a/internal/pkg/queue/queue.go +++ b/internal/pkg/queue/queue.go @@ -39,10 +39,10 @@ type TranscodeRequest struct { // TranscodeResult represents transcode result. type TranscodeResult struct { - BlobID string - WorkspaceUUID string - Playlist string - Thumbnail string + BlobID string `json:"blobId"` + WorkspaceUUID string `json:"workspaceUuid"` + Playlist string `json:"playlist"` + Thumbnail string `json:"thumbnail"` } // ConsumerOptions represents options for the consumer diff --git a/internal/pkg/queue/worker.go b/internal/pkg/queue/worker.go index 0137f48be9b..43e72c822da 100644 --- a/internal/pkg/queue/worker.go +++ b/internal/pkg/queue/worker.go @@ -112,7 +112,13 @@ func (w *Worker) processMessage(ctx context.Context, msg kafka.Message, logger * res, err := transcoder.Transcode(ctx, &task) if err == nil { - if err = w.producer.Send(ctx, req.WorkspaceUUID, res); err != nil { + result := TranscodeResult{ + BlobID: req.BlobID, + WorkspaceUUID: req.WorkspaceUUID, + Playlist: res.Playlist, + Thumbnail: res.Thumbnail, + } + if err = w.producer.Send(ctx, req.WorkspaceUUID, result); err != nil { logger.Error("failed to send transcode result", zap.Error(err)) } } From 4bc1ea0a078632d57b9481c9dd524c832962e8c5 Mon Sep 17 00:00:00 2001 From: Alexander Onnikov Date: Tue, 24 Jun 2025 11:09:01 +0700 Subject: [PATCH 120/636] fix: proper upload rollback Signed-off-by: Alexander Onnikov --- internal/pkg/uploader/uploader.go | 42 +++++++++++++++++++++++-------- 1 file changed, 31 insertions(+), 11 deletions(-) diff --git a/internal/pkg/uploader/uploader.go b/internal/pkg/uploader/uploader.go index 4e1515c80fd..aad4594e167 100644 --- a/internal/pkg/uploader/uploader.go +++ b/internal/pkg/uploader/uploader.go @@ -165,17 +165,7 @@ func (u *uploaderImpl) stop(rollback bool) { // Perform rollback if rollback { - u.logger.Debug("starting rollback...") - var i uint32 - u.sentFiles.Range(func(key, _ any) bool { - i++ - var filename = key.(string) - u.workersCh[i%u.options.WorkerCount] <- func() { - u.deleteRemoteFile(filename) - } - return true - }) - u.logger.Debug("rollback done") + u.uploadRollback() } u.uploadCancel() @@ -198,6 +188,36 @@ func (u *uploaderImpl) stop(rollback bool) { u.logger.Debug("stopped", zap.Bool("rollback", rollback)) } +func (u *uploaderImpl) uploadRollback() { + u.logger.Debug("starting rollback...") + + // Create a separate worker pool for rollback + var rollbackWg sync.WaitGroup + rollbackCh := make(chan string, u.options.BufferSize) + + // Start rollback workers + for range u.options.WorkerCount { + rollbackWg.Add(1) + go func() { + defer rollbackWg.Done() + for filename := range rollbackCh { + u.deleteRemoteFile(filename) + } + }() + } + + // Send files to rollback + u.sentFiles.Range(func(key, _ any) bool { + rollbackCh <- key.(string) + return true + }) + + // Close channel and wait for rollback to complete + close(rollbackCh) + rollbackWg.Wait() + u.logger.Debug("rollback done") +} + func (u *uploaderImpl) Start() { watcherReady := make(chan struct{}) From b4949c0a369496fd64cf2594343b95edc4590749 Mon Sep 17 00:00:00 2001 From: Alexander Onnikov Date: Tue, 24 Jun 2025 11:59:50 +0700 Subject: [PATCH 121/636] fix: nil pointer dereference Signed-off-by: Alexander Onnikov --- internal/pkg/mediaconvert/transcoder.go | 29 +++++++++++++------------ 1 file changed, 15 insertions(+), 14 deletions(-) diff --git a/internal/pkg/mediaconvert/transcoder.go b/internal/pkg/mediaconvert/transcoder.go index 2ca4e3eb35c..6f476eee8d8 100644 --- a/internal/pkg/mediaconvert/transcoder.go +++ b/internal/pkg/mediaconvert/transcoder.go @@ -200,29 +200,30 @@ func (p *Transcoder) Transcode(ctx context.Context, task *Task) (*TaskResult, er cmd.Stderr = io.MultiWriter(os.Stderr, &command.stderrBuf) cmds = append(cmds, command) - if err = cmd.Start(); err != nil { - logger.Error("can not start a command", zap.Error(err), zap.Strings("args", args)) + if startErr := cmd.Start(); startErr != nil { + logger.Error("can not start a command", zap.Error(startErr), zap.Strings("args", args)) go uploader.Cancel() - return nil, errors.Wrapf(err, "can not start a command") + return nil, errors.Wrapf(startErr, "can not start a command") } } logger.Debug("phase 7: wait for the result") for _, cmd := range cmds { - if err = cmd.cmd.Wait(); err == nil { + var cmdErr error + if cmdErr = cmd.cmd.Wait(); cmdErr == nil { continue } - logger.Error("can not wait for command end ", zap.Error(err)) - if _, err = os.Stdout.Write(cmd.stdoutBuf.Bytes()); err != nil { - logger.Error("can not write stdout ", zap.Error(err)) + logger.Error("can not wait for command end ", zap.Error(cmdErr)) + if _, writeErr := os.Stdout.Write(cmd.stdoutBuf.Bytes()); writeErr != nil { + logger.Error("can not write stdout ", zap.Error(writeErr)) } - if _, err = os.Stderr.Write(cmd.stderrBuf.Bytes()); err != nil { - logger.Error("can not write stderr", zap.Error(err)) + if _, writeErr := os.Stderr.Write(cmd.stderrBuf.Bytes()); writeErr != nil { + logger.Error("can not write stderr", zap.Error(writeErr)) } - go uploader.Cancel() - return nil, errors.Wrapf(err, "can not wait for command end") + uploader.Cancel() + return nil, errors.Wrapf(cmdErr, "can not wait for command end") } logger.Debug("phase 8: schedule cleanup") @@ -244,7 +245,7 @@ func (p *Transcoder) Transcode(ctx context.Context, task *Task) (*TaskResult, er zap.String("thumbnail", result.Thumbnail), zap.String("source", task.Source), ) - err = metaProvider.PatchMeta( + metaErr := metaProvider.PatchMeta( ctx, task.Source, &storage.Metadata{ @@ -256,8 +257,8 @@ func (p *Transcoder) Transcode(ctx context.Context, task *Task) (*TaskResult, er "height": result.Height, }, ) - if err != nil { - logger.Error("can not patch the source file", zap.Error(err)) + if metaErr != nil { + logger.Error("can not patch the source file", zap.Error(metaErr)) } } From 960eceb4af3e50950a0d7cb91f7fb39f1491aae7 Mon Sep 17 00:00:00 2001 From: Alexander Onnikov Date: Tue, 24 Jun 2025 16:15:02 +0700 Subject: [PATCH 122/636] fix: ensure correct ffmpeg commands Signed-off-by: Alexander Onnikov --- .golangci.yaml | 4 ++-- internal/pkg/mediaconvert/command.go | 15 +++++++++++++- internal/pkg/mediaconvert/command_test.go | 22 +++++++++++++++++---- internal/pkg/mediaconvert/transcoder.go | 19 +++++++++++------- internal/pkg/queue/worker.go | 2 +- internal/pkg/uploader/uploader.go | 24 ++++++++++++----------- 6 files changed, 60 insertions(+), 26 deletions(-) diff --git a/.golangci.yaml b/.golangci.yaml index ea39029926a..3a6223134d0 100644 --- a/.golangci.yaml +++ b/.golangci.yaml @@ -61,8 +61,8 @@ linters-settings: dupl: threshold: 150 funlen: - lines: 200 - statements: 100 + lines: 240 + statements: 120 goconst: min-len: 2 min-occurrences: 2 diff --git a/internal/pkg/mediaconvert/command.go b/internal/pkg/mediaconvert/command.go index 88dbb7bb4a3..3b026240315 100644 --- a/internal/pkg/mediaconvert/command.go +++ b/internal/pkg/mediaconvert/command.go @@ -84,6 +84,8 @@ func buildCommonCommand(opts *Options) []string { var result = []string{ "-y", // Overwrite output files without asking. "-v", string(opts.LogLevel), + "-err_detect", "ignore_err", + "-fflags", "+discardcorrupt", "-threads", fmt.Sprint(opts.Threads), "-i", opts.Input, } @@ -113,6 +115,8 @@ func BuildAudioCommand(opts *Options) []string { func BuildRawVideoCommand(opts *Options) []string { if opts.Transcode { return append(buildCommonCommand(opts), + "-map", "0:v:0", + "-map", "0:a?", "-c:a", "aac", "-c:v", "libx264", "-preset", "veryfast", @@ -149,6 +153,14 @@ func BuildThumbnailCommand(opts *Options) []string { // BuildScalingVideoCommand returns flags for ffmpeg for video scaling func BuildScalingVideoCommand(opts *Options) []string { + if len(opts.ScalingLevels) == 0 { + return []string{} + } + + if len(opts.ScalingLevels) == 1 && opts.ScalingLevels[0] == opts.Level { + return []string{} + } + var result = buildCommonCommand(opts) for _, level := range opts.ScalingLevels { @@ -157,7 +169,8 @@ func BuildScalingVideoCommand(opts *Options) []string { } result = append(result, - "-map", "0:v", + "-map", "0:v:0", + "-map", "0:a?", "-vf", "scale=-2:"+level[:len(level)-1], "-c:a", "aac", "-c:v", "libx264", diff --git a/internal/pkg/mediaconvert/command_test.go b/internal/pkg/mediaconvert/command_test.go index f8b36a77500..0b83a79ab60 100644 --- a/internal/pkg/mediaconvert/command_test.go +++ b/internal/pkg/mediaconvert/command_test.go @@ -32,7 +32,7 @@ func Test_BuildVideoCommand_Scaling(t *testing.T) { ScalingLevels: []string{"720p", "480p"}, }) - const expected = `-y -v debug -threads 4 -i pipe:0 -map 0:v -vf scale=-2:720 -c:a aac -c:v libx264 -preset veryfast -crf 23 -g 60 -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_720p.ts test/1/1_720p_master.m3u8 -map 0:v -vf scale=-2:480 -c:a aac -c:v libx264 -preset veryfast -crf 23 -g 60 -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` + const expected = `-y -v debug -err_detect ignore_err -fflags +discardcorrupt -threads 4 -i pipe:0 -map 0:v:0 -map 0:a? -vf scale=-2:720 -c:a aac -c:v libx264 -preset veryfast -crf 23 -g 60 -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_720p.ts test/1/1_720p_master.m3u8 -map 0:v:0 -map 0:a? -vf scale=-2:480 -c:a aac -c:v libx264 -preset veryfast -crf 23 -g 60 -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` require.Contains(t, expected, strings.Join(scaleCommand, " ")) } @@ -48,7 +48,7 @@ func Test_BuildVideoCommand_Scaling_NoRaw(t *testing.T) { ScalingLevels: []string{"720p", "480p"}, }) - const expected = `-y -v debug -threads 4 -i pipe:0 -map 0:v -vf scale=-2:480 -c:a aac -c:v libx264 -preset veryfast -crf 23 -g 60 -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` + const expected = `-y -v debug -err_detect ignore_err -fflags +discardcorrupt -threads 4 -i pipe:0 -map 0:v:0 -map 0:a? -vf scale=-2:480 -c:a aac -c:v libx264 -preset veryfast -crf 23 -g 60 -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` require.Contains(t, expected, strings.Join(scaleCommand, " ")) } @@ -64,7 +64,7 @@ func Test_BuildVideoCommand_Raw_NoTranscode(t *testing.T) { Transcode: false, }) - const expected = `"-y -v debug -threads 4 -i pipe:0 -c:a copy -c:v copy -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` + const expected = `"-y -v debug -err_detect ignore_err -fflags +discardcorrupt -threads 4 -i pipe:0 -c:a copy -c:v copy -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` require.Contains(t, expected, strings.Join(rawCommand, " ")) } @@ -80,7 +80,21 @@ func Test_BuildVideoCommand_Raw_Transcode(t *testing.T) { Transcode: true, }) - const expected = `-y -v debug -threads 4 -i pipe:0 -c:a aac -c:v libx264 -preset veryfast -crf 23 -g 60 -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` + const expected = `-y -v debug -err_detect ignore_err -fflags +discardcorrupt -threads 4 -i pipe:0 -map 0:v:0 -map 0:a? -c:a aac -c:v libx264 -preset veryfast -crf 23 -g 60 -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` require.Contains(t, expected, strings.Join(rawCommand, " ")) } + +func Test_BuildVideoCommand_Scaling_Small(t *testing.T) { + var scaleCommand = mediaconvert.BuildScalingVideoCommand(&mediaconvert.Options{ + OutputDir: "test", + Input: "pipe:0", + UploadID: "1", + Threads: 4, + LogLevel: mediaconvert.LogLevelDebug, + Level: "360p", + ScalingLevels: []string{"360p"}, + }) + + require.Empty(t, scaleCommand) +} diff --git a/internal/pkg/mediaconvert/transcoder.go b/internal/pkg/mediaconvert/transcoder.go index 6f476eee8d8..7d8c0316d47 100644 --- a/internal/pkg/mediaconvert/transcoder.go +++ b/internal/pkg/mediaconvert/transcoder.go @@ -47,8 +47,8 @@ type Transcoder struct { // Command represents a ffmpeg command type Command struct { cmd *exec.Cmd - stdoutBuf bytes.Buffer - stderrBuf bytes.Buffer + stdoutBuf *bytes.Buffer + stderrBuf *bytes.Buffer } // NewTranscoder creates a new instance of task transcoder @@ -183,6 +183,11 @@ func (p *Transcoder) Transcode(ctx context.Context, task *Task) (*TaskResult, er var cmds []Command for _, args := range argsSlice { + if len(args) == 0 { + logger.Debug("skip empty command") + continue + } + cmd, cmdErr := newFfmpegCommand(ctx, nil, args) if cmdErr != nil { logger.Error("can not create a new command", zap.Error(cmdErr), zap.Strings("args", args)) @@ -192,12 +197,12 @@ func (p *Transcoder) Transcode(ctx context.Context, task *Task) (*TaskResult, er var command = Command{ cmd: cmd, - stdoutBuf: bytes.Buffer{}, - stderrBuf: bytes.Buffer{}, + stdoutBuf: &bytes.Buffer{}, + stderrBuf: &bytes.Buffer{}, } - cmd.Stdout = io.MultiWriter(os.Stdout, &command.stdoutBuf) - cmd.Stderr = io.MultiWriter(os.Stderr, &command.stderrBuf) + cmd.Stdout = io.MultiWriter(os.Stdout, command.stdoutBuf) + cmd.Stderr = io.MultiWriter(os.Stderr, command.stderrBuf) cmds = append(cmds, command) if startErr := cmd.Start(); startErr != nil { @@ -215,7 +220,7 @@ func (p *Transcoder) Transcode(ctx context.Context, task *Task) (*TaskResult, er continue } - logger.Error("can not wait for command end ", zap.Error(cmdErr)) + logger.Error("can not wait for command end", zap.Error(cmdErr), zap.String("cmd", cmd.cmd.String())) if _, writeErr := os.Stdout.Write(cmd.stdoutBuf.Bytes()); writeErr != nil { logger.Error("can not write stdout ", zap.Error(writeErr)) } diff --git a/internal/pkg/queue/worker.go b/internal/pkg/queue/worker.go index 43e72c822da..7c9dae02b41 100644 --- a/internal/pkg/queue/worker.go +++ b/internal/pkg/queue/worker.go @@ -85,7 +85,7 @@ func (w *Worker) fetchAndProcessMessage(ctx context.Context) error { err = w.processMessage(ctx, msg, logger) if err != nil { w.logger.Error("failed to process message", zap.Error(err)) - return err + return fmt.Errorf("process message: %w", err) } return nil diff --git a/internal/pkg/uploader/uploader.go b/internal/pkg/uploader/uploader.go index aad4594e167..baffcaa82e4 100644 --- a/internal/pkg/uploader/uploader.go +++ b/internal/pkg/uploader/uploader.go @@ -297,6 +297,13 @@ func (u *uploaderImpl) uploadAndDelete(f string) { return } + // Check if the file has already been uploaded + _, ok := u.sentFiles.Load(f) + if ok && !u.shouldDeleteOnStop(f) { + logger.Debug("file already uploaded") + return + } + if err := waitFileExists(f); err != nil { if os.IsNotExist(err) { logger.Debug("file does not exist", zap.Error(err)) @@ -306,18 +313,11 @@ func (u *uploaderImpl) uploadAndDelete(f string) { return } - // Check if the file has already been uploaded - var _, ok = u.sentFiles.Load(f) - if ok && !u.shouldDeleteOnStop(f) { - logger.Debug("file already uploaded") - return - } - for attempt := range u.options.RetryCount { logger = logger.With(zap.Int("attempt", attempt)) - var ctx, cancel = context.WithTimeout(u.uploadCtx, u.options.Timeout) - var err = u.storage.PutFile(ctx, f) - cancel() + var putCtx, putCancel = context.WithTimeout(u.uploadCtx, u.options.Timeout) + var err = u.storage.PutFile(putCtx, f) + putCancel() if err != nil { logger.Error("attempt failed", zap.Error(err)) @@ -328,7 +328,9 @@ func (u *uploaderImpl) uploadAndDelete(f string) { // Update the file's parent if SourceFile is set if u.options.Source != "" { - err = u.storage.SetParent(ctx, f, u.options.Source) + var setParentCtx, setParentCancel = context.WithTimeout(u.uploadCtx, u.options.Timeout) + err = u.storage.SetParent(setParentCtx, f, u.options.Source) + setParentCancel() if err != nil { logger.Error("can not set blob parent", zap.Error(err), zap.String("filename", f), zap.String("source", u.options.Source)) } From b94e4927ed44df176729e085452a716f0d311681 Mon Sep 17 00:00:00 2001 From: Andrey Sobolev Date: Tue, 24 Jun 2025 17:26:24 +0700 Subject: [PATCH 123/636] UBERF-11712: Rework integration (#73) Signed-off-by: Kristina Fefelova Co-authored-by: Kristina Fefelova --- packages/sdk-types/src/serverApi.ts | 1 + packages/server/src/middleware/broadcast.ts | 11 ++++++++++- packages/server/src/middleware/validate.ts | 2 +- packages/server/src/types.ts | 5 ++++- 4 files changed, 16 insertions(+), 3 deletions(-) diff --git a/packages/sdk-types/src/serverApi.ts b/packages/sdk-types/src/serverApi.ts index 5ba994cbf9d..2be491af852 100644 --- a/packages/sdk-types/src/serverApi.ts +++ b/packages/sdk-types/src/serverApi.ts @@ -34,6 +34,7 @@ import type { EventResult, Event } from './events/event' export interface SessionData { sessionId?: string account: Account + contextData?: any } export interface ServerApi { diff --git a/packages/server/src/middleware/broadcast.ts b/packages/server/src/middleware/broadcast.ts index 303966d358f..de9932b7f3e 100644 --- a/packages/server/src/middleware/broadcast.ts +++ b/packages/server/src/middleware/broadcast.ts @@ -127,13 +127,22 @@ export class BroadcastMiddleware extends BaseMiddleware implements Middleware { } } + const ctx = this.context.ctx.newChild('enqueue', {}) + ctx.contextData = session.contextData + if (sessionIds.length > 0) { try { - this.broadcastFn(this.context.ctx, sessionIds, event) + this.broadcastFn.broadcast(ctx, sessionIds, event) } catch (e) { this.context.ctx.error('Failed to broadcast event', { error: e }) } } + + try { + this.broadcastFn.enqueue(ctx, event) + } catch (e) { + this.context.ctx.error('Failed to broadcast event', { error: e }) + } await this.provideResponse(session, event, derived) } diff --git a/packages/server/src/middleware/validate.ts b/packages/server/src/middleware/validate.ts index 0518eee120f..6534cd78095 100644 --- a/packages/server/src/middleware/validate.ts +++ b/packages/server/src/middleware/validate.ts @@ -383,7 +383,7 @@ const RemoveMessagesGroupEventSchema = BaseEventSchema.extend({ // Notification events const UpdateNotificationsEventSchema = BaseEventSchema.extend({ type: z.literal(NotificationEventType.UpdateNotification), - context: ContextIDSchema, + contextId: ContextIDSchema, account: AccountIDSchema, query: z.object({ id: z.string().optional(), diff --git a/packages/server/src/types.ts b/packages/server/src/types.ts index dfb3a270368..c8bd456a442 100644 --- a/packages/server/src/types.ts +++ b/packages/server/src/types.ts @@ -94,7 +94,10 @@ export interface MiddlewareContext { export type MiddlewareCreateFn = (context: MiddlewareContext, next?: Middleware) => Promise -export type BroadcastSessionsFunc = (ctx: MeasureContext, sessionIds: string[], result: any) => void +export interface BroadcastSessionsFunc { + broadcast: (ctx: MeasureContext, sessionIds: string[], result: Enriched) => void + enqueue: (ctx: MeasureContext, result: Enriched) => void +} export interface TriggerCtx { ctx: MeasureContext From 86ad2921f20529ed98b081e344c27acc9b724299 Mon Sep 17 00:00:00 2001 From: Alexander Onnikov Date: Tue, 24 Jun 2025 18:00:18 +0700 Subject: [PATCH 124/636] fix: extract command executor to separate class Signed-off-by: Alexander Onnikov --- internal/pkg/mediaconvert/executor.go | 98 ++++++++++++++ internal/pkg/mediaconvert/executor_test.go | 142 +++++++++++++++++++++ internal/pkg/mediaconvert/transcoder.go | 50 +------- 3 files changed, 247 insertions(+), 43 deletions(-) create mode 100644 internal/pkg/mediaconvert/executor.go create mode 100644 internal/pkg/mediaconvert/executor_test.go diff --git a/internal/pkg/mediaconvert/executor.go b/internal/pkg/mediaconvert/executor.go new file mode 100644 index 00000000000..6385160aa7c --- /dev/null +++ b/internal/pkg/mediaconvert/executor.go @@ -0,0 +1,98 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package mediaconvert + +import ( + "bytes" + "context" + "io" + "os" + "os/exec" + "sync" + + "github.com/hcengineering/stream/internal/pkg/log" + "go.uber.org/zap" +) + +// CommandExecutor executes multiple commands in parallel +type CommandExecutor interface { + Execute(commands []*exec.Cmd) error +} + +type commandExecutor struct { + logger *zap.Logger +} + +var _ CommandExecutor = (*commandExecutor)(nil) + +// NewCommandExecutor creates a new instance of command executor +func NewCommandExecutor(ctx context.Context) CommandExecutor { + return &commandExecutor{ + logger: log.FromContext(ctx), + } +} + +// Execute executes multiple commands in parallel +func (e *commandExecutor) Execute(commands []*exec.Cmd) error { + logger := e.logger + errCh := make(chan error, len(commands)) + + var mu sync.Mutex + var wg sync.WaitGroup + for _, cmd := range commands { + wg.Add(1) + + go func(cmd *exec.Cmd) { + defer wg.Done() + + var stdoutBuf = &bytes.Buffer{} + var stderrBuf = &bytes.Buffer{} + + if logger.Core().Enabled(zap.DebugLevel) { + cmd.Stdout = io.MultiWriter(os.Stdout, stdoutBuf) + cmd.Stderr = io.MultiWriter(os.Stderr, stderrBuf) + } else { + cmd.Stdout = stdoutBuf + cmd.Stderr = stderrBuf + } + + logger.Info("run command", zap.String("cmd", cmd.String())) + if err := cmd.Run(); err != nil { + errCh <- err + + // Lock so only on goroutine can write to stdout/stderr at the same time + mu.Lock() + defer mu.Unlock() + logger.Error("can not wait for command end", zap.Error(err), zap.String("cmd", cmd.String())) + if _, writeErr := os.Stdout.Write(stdoutBuf.Bytes()); writeErr != nil { + logger.Error("can not write stdout ", zap.Error(writeErr)) + } + if _, writeErr := os.Stderr.Write(stderrBuf.Bytes()); writeErr != nil { + logger.Error("can not write stderr", zap.Error(writeErr)) + } + } + }(cmd) + } + + wg.Wait() + close(errCh) + + for err := range errCh { + return err + } + + return nil +} diff --git a/internal/pkg/mediaconvert/executor_test.go b/internal/pkg/mediaconvert/executor_test.go new file mode 100644 index 00000000000..3f30a82cf97 --- /dev/null +++ b/internal/pkg/mediaconvert/executor_test.go @@ -0,0 +1,142 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package mediaconvert_test + +import ( + "context" + "os/exec" + "testing" + "time" + + "github.com/hcengineering/stream/internal/pkg/log" + "github.com/hcengineering/stream/internal/pkg/mediaconvert" + "github.com/stretchr/testify/assert" +) + +func TestCommandExecutor_Execute_Success(t *testing.T) { + tests := []struct { + name string + commands []*exec.Cmd + }{ + { + name: "single command", + commands: []*exec.Cmd{ + exec.Command("echo", "test"), + }, + }, + { + name: "multiple commands", + commands: []*exec.Cmd{ + exec.Command("echo", "test1"), + exec.Command("echo", "test2"), + exec.Command("echo", "test3"), + }, + }, + { + name: "empty commands", + commands: []*exec.Cmd{}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ctx := context.Background() + ctx = log.WithFields(ctx) + executor := mediaconvert.NewCommandExecutor(ctx) + + err := executor.Execute(tt.commands) + assert.NoError(t, err) + }) + } +} + +func TestCommandExecutor_Execute_Error(t *testing.T) { + tests := []struct { + name string + commands []*exec.Cmd + expectedError bool + }{ + { + name: "single failing command", + commands: []*exec.Cmd{ + exec.Command("false"), + }, + expectedError: true, + }, + { + name: "mixed success and failure", + commands: []*exec.Cmd{ + exec.Command("echo", "success"), + exec.Command("false"), + exec.Command("echo", "another success"), + }, + expectedError: true, + }, + { + name: "non-existent command", + commands: []*exec.Cmd{ + exec.Command("this-command-does-not-exist"), + }, + expectedError: true, + }, + { + name: "multiple failures", + commands: []*exec.Cmd{ + exec.Command("false"), + exec.Command("false"), + exec.Command("false"), + }, + expectedError: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ctx := context.Background() + ctx = log.WithFields(ctx) + executor := mediaconvert.NewCommandExecutor(ctx) + + err := executor.Execute(tt.commands) + if tt.expectedError { + assert.Error(t, err) + } else { + assert.NoError(t, err) + } + }) + } +} + +func TestCommandExecutor_Execute_Parallel(t *testing.T) { + // Create commands that sleep for different durations + commands := []*exec.Cmd{ + exec.Command("sleep", "0.1"), + exec.Command("sleep", "0.1"), + exec.Command("sleep", "0.1"), + exec.Command("sleep", "0.1"), + } + + ctx := context.Background() + ctx = log.WithFields(ctx) + executor := mediaconvert.NewCommandExecutor(ctx) + + start := time.Now() + err := executor.Execute(commands) + duration := time.Since(start) + + assert.NoError(t, err) + // If commands run in parallel, total time should be close to 0.1s, not 0.4s + assert.Less(t, duration, 200*time.Millisecond) +} diff --git a/internal/pkg/mediaconvert/transcoder.go b/internal/pkg/mediaconvert/transcoder.go index 7d8c0316d47..f85d034532f 100644 --- a/internal/pkg/mediaconvert/transcoder.go +++ b/internal/pkg/mediaconvert/transcoder.go @@ -16,10 +16,8 @@ package mediaconvert import ( - "bytes" "context" "fmt" - "io" "os" "os/exec" "path/filepath" @@ -44,13 +42,6 @@ type Transcoder struct { logger *zap.Logger } -// Command represents a ffmpeg command -type Command struct { - cmd *exec.Cmd - stdoutBuf *bytes.Buffer - stderrBuf *bytes.Buffer -} - // NewTranscoder creates a new instance of task transcoder func NewTranscoder(ctx context.Context, cfg *config.Config) *Transcoder { var p = &Transcoder{ @@ -180,8 +171,8 @@ func (p *Transcoder) Transcode(ctx context.Context, task *Task) (*TaskResult, er BuildRawVideoCommand(&opts), BuildScalingVideoCommand(&opts), } - var cmds []Command + var cmds []*exec.Cmd for _, args := range argsSlice { if len(args) == 0 { logger.Debug("skip empty command") @@ -195,46 +186,19 @@ func (p *Transcoder) Transcode(ctx context.Context, task *Task) (*TaskResult, er return nil, errors.Wrapf(cmdErr, "can not create a new command") } - var command = Command{ - cmd: cmd, - stdoutBuf: &bytes.Buffer{}, - stderrBuf: &bytes.Buffer{}, - } - - cmd.Stdout = io.MultiWriter(os.Stdout, command.stdoutBuf) - cmd.Stderr = io.MultiWriter(os.Stderr, command.stderrBuf) - - cmds = append(cmds, command) - if startErr := cmd.Start(); startErr != nil { - logger.Error("can not start a command", zap.Error(startErr), zap.Strings("args", args)) - go uploader.Cancel() - return nil, errors.Wrapf(startErr, "can not start a command") - } + cmds = append(cmds, cmd) } - logger.Debug("phase 7: wait for the result") - - for _, cmd := range cmds { - var cmdErr error - if cmdErr = cmd.cmd.Wait(); cmdErr == nil { - continue - } - - logger.Error("can not wait for command end", zap.Error(cmdErr), zap.String("cmd", cmd.cmd.String())) - if _, writeErr := os.Stdout.Write(cmd.stdoutBuf.Bytes()); writeErr != nil { - logger.Error("can not write stdout ", zap.Error(writeErr)) - } - if _, writeErr := os.Stderr.Write(cmd.stderrBuf.Bytes()); writeErr != nil { - logger.Error("can not write stderr", zap.Error(writeErr)) - } + executor := NewCommandExecutor(ctx) + if execErr := executor.Execute(cmds); execErr != nil { uploader.Cancel() - return nil, errors.Wrapf(cmdErr, "can not wait for command end") + return nil, errors.Wrapf(execErr, "can not execute command") } - logger.Debug("phase 8: schedule cleanup") + logger.Debug("phase 7: schedule cleanup") uploader.Stop() - logger.Debug("phase 9: try to set metadata") + logger.Debug("phase 8: try to set metadata") var result = TaskResult{ Width: videoStream.Width, From 2e04e5c0914ec3ce99e896d8f0eb91622570e873 Mon Sep 17 00:00:00 2001 From: Kristina Date: Tue, 24 Jun 2025 17:24:57 +0400 Subject: [PATCH 125/636] Fix api (#75) Signed-off-by: Kristina Fefelova --- packages/rest-client/src/rest.ts | 270 +++++++++++++--------------- packages/rest-client/src/types.ts | 2 +- packages/sdk-types/src/serverApi.ts | 1 + packages/server/src/middlewares.ts | 2 +- 4 files changed, 131 insertions(+), 144 deletions(-) diff --git a/packages/rest-client/src/rest.ts b/packages/rest-client/src/rest.ts index 62e0f822974..47fc34e0481 100644 --- a/packages/rest-client/src/rest.ts +++ b/packages/rest-client/src/rest.ts @@ -13,7 +13,7 @@ // limitations under the License. // -import { concatLink } from '@hcengineering/core' +import core, { concatLink, generateId, OperationDomain, TxDomainEvent } from '@hcengineering/core' import { type EventResult, type Event, @@ -79,15 +79,49 @@ class RestClientImpl implements RestClient { } } - async event (event: Event): Promise { - const response = await fetch(concatLink(this.endpoint, `/api/v1/event/${this.workspace}`), { + private wrapEvent (event: Event, modifiedBy: SocialID): TxDomainEvent { + return { + _id: generateId(), + _class: core.class.TxDomainEvent, + space: core.space.Tx, + objectSpace: core.space.Tx, + domain: 'communication' as OperationDomain, + event, + modifiedBy, + modifiedOn: Date.now() + } + } + + private async find(operation: string, params: Record): Promise { + const searchParams = new URLSearchParams() + if (Object.keys(params).length > 0) { + searchParams.append('params', JSON.stringify(params)) + } + const requestUrl = concatLink( + this.endpoint, + `/api/v1/request/communication/${operation}/${this.workspace}?${searchParams.toString()}` + ) + return await retry( + async () => { + const response = await fetch(requestUrl, this.requestInit()) + if (!response.ok) { + throw new Error(response.statusText) + } + return await extractJson(response) + }, + { retries } + ) + } + + async event (event: Event, socialId: SocialID): Promise { + const response = await fetch(concatLink(this.endpoint, `/api/v1/tx/${this.workspace}`), { method: 'POST', headers: { 'Content-Type': 'application/json', Authorization: 'Bearer ' + this.token }, keepalive: true, - body: JSON.stringify(event) + body: JSON.stringify(this.wrapEvent(event, socialId)) }) if (!response.ok) { throw new Error(response.statusText) @@ -106,18 +140,21 @@ class RestClientImpl implements RestClient { messageId?: MessageID, options?: CreateMessageOptions ): Promise { - const result = await this.event({ - type: MessageEventType.CreateMessage, - messageType: type, - cardId, - cardType, - content, - extra, - socialId, - date, - messageId, - options - }) + const result = await this.event( + { + type: MessageEventType.CreateMessage, + messageType: type, + cardId, + cardType, + content, + extra, + socialId, + date, + messageId, + options + }, + socialId + ) return result as CreateMessageResult } @@ -130,26 +167,32 @@ class RestClientImpl implements RestClient { date?: Date, options?: UpdatePatchOptions ): Promise { - await this.event({ - type: MessageEventType.UpdatePatch, - cardId, - messageId, - content, - extra, - socialId, - date, - options - }) + await this.event( + { + type: MessageEventType.UpdatePatch, + cardId, + messageId, + content, + extra, + socialId, + date, + options + }, + socialId + ) } async removeMessage (cardId: CardID, messageId: MessageID, socialId: SocialID, date?: Date): Promise { - await this.event({ - type: MessageEventType.RemovePatch, - cardId, - messageId, - socialId, - date - }) + await this.event( + { + type: MessageEventType.RemovePatch, + cardId, + messageId, + socialId, + date + }, + socialId + ) } async attachBlobs ( @@ -159,19 +202,22 @@ class RestClientImpl implements RestClient { socialId: SocialID, date?: Date ): Promise { - await this.event({ - type: MessageEventType.BlobPatch, - cardId, - messageId, - operations: [ - { - opcode: 'attach', - blobs - } - ], - socialId, - date - }) + await this.event( + { + type: MessageEventType.BlobPatch, + cardId, + messageId, + operations: [ + { + opcode: 'attach', + blobs + } + ], + socialId, + date + }, + socialId + ) } async detachBlobs ( @@ -181,19 +227,22 @@ class RestClientImpl implements RestClient { socialId: SocialID, date?: Date ): Promise { - await this.event({ - type: MessageEventType.BlobPatch, - cardId, - messageId, - operations: [ - { - opcode: 'detach', - blobIds - } - ], - socialId, - date - }) + await this.event( + { + type: MessageEventType.BlobPatch, + cardId, + messageId, + operations: [ + { + opcode: 'detach', + blobIds + } + ], + socialId, + date + }, + socialId + ) } async setBlobs ( @@ -203,100 +252,37 @@ class RestClientImpl implements RestClient { socialId: SocialID, date?: Date ): Promise { - await this.event({ - type: MessageEventType.BlobPatch, - cardId, - messageId, - operations: [ - { - opcode: 'set', - blobs - } - ], - socialId, - date - }) + await this.event( + { + type: MessageEventType.BlobPatch, + cardId, + messageId, + operations: [ + { + opcode: 'set', + blobs + } + ], + socialId, + date + }, + socialId + ) } async findMessages (params: FindMessagesParams): Promise { - const searchParams = new URLSearchParams() - if (Object.keys(params).length > 0) { - searchParams.append('params', JSON.stringify(params)) - } - const requestUrl = concatLink(this.endpoint, `/api/v1/find-messages/${this.workspace}?${searchParams.toString()}`) - - return await retry( - async () => { - const response = await fetch(requestUrl, this.requestInit()) - if (!response.ok) { - throw new Error(response.statusText) - } - return await extractJson(response) - }, - { retries } - ) + return await this.find('findMessages', params) } async findMessagesGroups (params: FindMessagesGroupsParams): Promise { - const searchParams = new URLSearchParams() - if (Object.keys(params).length > 0) { - searchParams.append('params', JSON.stringify(params)) - } - const requestUrl = concatLink( - this.endpoint, - `/api/v1/find-messages-groups/${this.workspace}?${searchParams.toString()}` - ) - return await retry( - async () => { - const response = await fetch(requestUrl, this.requestInit()) - if (!response.ok) { - throw new Error(response.statusText) - } - return await extractJson(response) - }, - { retries } - ) + return await this.find('findMessagesGroups', params) } async findNotificationContexts (params: FindNotificationContextParams): Promise { - const searchParams = new URLSearchParams() - if (Object.keys(params).length > 0) { - searchParams.append('params', JSON.stringify(params)) - } - const requestUrl = concatLink( - this.endpoint, - `/api/v1/find-notification-contexts/${this.workspace}?${searchParams.toString()}` - ) - return await retry( - async () => { - const response = await fetch(requestUrl, this.requestInit()) - if (!response.ok) { - throw new Error(response.statusText) - } - return await extractJson(response) - }, - { retries } - ) + return await this.find('findNotificationContexts', params) } async findNotifications (params: FindNotificationsParams): Promise { - const searchParams = new URLSearchParams() - if (Object.keys(params).length > 0) { - searchParams.append('params', JSON.stringify(params)) - } - const requestUrl = concatLink( - this.endpoint, - `/api/v1/find-notifications/${this.workspace}?${searchParams.toString()}` - ) - return await retry( - async () => { - const response = await fetch(requestUrl, this.requestInit()) - if (!response.ok) { - throw new Error(response.statusText) - } - return await extractJson(response) - }, - { retries } - ) + return await this.find('findNotifications', params) } } diff --git a/packages/rest-client/src/types.ts b/packages/rest-client/src/types.ts index 3b5c1c675be..cf92ff888bb 100644 --- a/packages/rest-client/src/types.ts +++ b/packages/rest-client/src/types.ts @@ -46,7 +46,7 @@ export interface RestClient { findNotificationContexts: (params: FindNotificationContextParams) => Promise findNotifications: (params: FindNotificationsParams) => Promise - event: (event: Event) => Promise + event: (event: Event, socialId: SocialID) => Promise createMessage: ( cardId: CardID, diff --git a/packages/sdk-types/src/serverApi.ts b/packages/sdk-types/src/serverApi.ts index 2be491af852..93ab1093191 100644 --- a/packages/sdk-types/src/serverApi.ts +++ b/packages/sdk-types/src/serverApi.ts @@ -34,6 +34,7 @@ import type { EventResult, Event } from './events/event' export interface SessionData { sessionId?: string account: Account + derived?: boolean contextData?: any } diff --git a/packages/server/src/middlewares.ts b/packages/server/src/middlewares.ts index 0c7f3bf4812..aefd96dbed8 100644 --- a/packages/server/src/middlewares.ts +++ b/packages/server/src/middlewares.ts @@ -175,7 +175,7 @@ export class Middlewares { async event (session: SessionData, event: Event): Promise { if (this.head === undefined) return {} - return (await this.head?.event(session, event as Enriched, false)) ?? {} + return (await this.head?.event(session, event as Enriched, session.derived ?? false)) ?? {} } async closeSession (sessionId: string): Promise { From 0cb756d440f8699bc190f2dfde88de928aa793a9 Mon Sep 17 00:00:00 2001 From: Alexander Onnikov Date: Thu, 26 Jun 2025 20:04:19 +0700 Subject: [PATCH 126/636] feat: fast live recording Signed-off-by: Alexander Onnikov --- internal/pkg/api/v1/transcoding/handler.go | 2 +- internal/pkg/config/config.go | 18 +- internal/pkg/manifest/hls.go | 13 +- internal/pkg/manifest/hls_test.go | 20 ++- internal/pkg/mediaconvert/command.go | 153 ++++++++--------- internal/pkg/mediaconvert/command_test.go | 88 +++++----- internal/pkg/mediaconvert/coordinator.go | 115 +++++++++---- internal/pkg/mediaconvert/multipart.go | 188 +++++++++++++++++++++ internal/pkg/mediaconvert/scheduler.go | 41 +++-- internal/pkg/mediaconvert/strategy.go | 67 ++++++++ internal/pkg/mediaconvert/stream.go | 172 ++++++++++++++----- internal/pkg/mediaconvert/transcoder.go | 35 ++-- internal/pkg/profile/profile.go | 185 ++++++++++++++++++++ internal/pkg/profile/profile_test.go | 173 +++++++++++++++++++ internal/pkg/queue/queue.go | 6 + internal/pkg/queue/worker.go | 2 +- internal/pkg/storage/datalake.go | 137 +++++++++++++++ internal/pkg/storage/storage.go | 14 ++ 18 files changed, 1163 insertions(+), 266 deletions(-) create mode 100644 internal/pkg/mediaconvert/multipart.go create mode 100644 internal/pkg/mediaconvert/strategy.go create mode 100644 internal/pkg/profile/profile.go create mode 100644 internal/pkg/profile/profile_test.go diff --git a/internal/pkg/api/v1/transcoding/handler.go b/internal/pkg/api/v1/transcoding/handler.go index c3a3ecfc92c..b9871f7e123 100644 --- a/internal/pkg/api/v1/transcoding/handler.go +++ b/internal/pkg/api/v1/transcoding/handler.go @@ -68,7 +68,7 @@ func (t *trascodeHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusOK) } -// NewHandler creates a new trnascoding http handler, requires context and config. +// NewHandler creates a new transcoding http handler, requires context and config. func NewHandler(ctx context.Context, cfg *config.Config) http.Handler { return &trascodeHandler{ scheduler: mediaconvert.NewScheduler(ctx, cfg), diff --git a/internal/pkg/config/config.go b/internal/pkg/config/config.go index f182a610757..ca0e4aef228 100644 --- a/internal/pkg/config/config.go +++ b/internal/pkg/config/config.go @@ -25,15 +25,15 @@ import ( // Config represents configuration for the huly-stream application. type Config struct { - SentryDsn string `split_words:"true" default:"" desc:"sentry dsn value"` - LogLevel string `split_words:"true" default:"debug" desc:"sets log level for the application"` - ServerSecret string `split_words:"true" default:"" desc:"server secret required to generate and verify tokens"` - PprofEnabled bool `split_words:"true" default:"true" desc:"starts profile server on localhost:6060 if true"` - Insecure bool `split_words:"true" default:"false" desc:"ignores authorization check if true"` - ServeURL string `split_words:"true" desc:"listen on url" default:"0.0.0.0:1080"` - EndpointURL *url.URL `split_words:"true" default:"s3://127.0.0.1:9000" desc:"S3 or Datalake endpoint, example: s3://my-ip-address, datalake://my-ip-address"` - MaxParallelScalingCount int `split_words:"true" default:"2" desc:"how much parallel scaling can be processed"` - MaxThreadCount int `split_words:"true" default:"4" desc:"max number of threads for transcoder"` + SentryDsn string `split_words:"true" default:"" desc:"sentry dsn value"` + LogLevel string `split_words:"true" default:"debug" desc:"sets log level for the application"` + ServerSecret string `split_words:"true" default:"" desc:"server secret required to generate and verify tokens"` + PprofEnabled bool `split_words:"true" default:"true" desc:"starts profile server on localhost:6060 if true"` + Insecure bool `split_words:"true" default:"false" desc:"ignores authorization check if true"` + ServeURL string `split_words:"true" desc:"listen on url" default:"0.0.0.0:1080"` + EndpointURL *url.URL `split_words:"true" default:"s3://127.0.0.1:9000" desc:"S3 or Datalake endpoint, example: s3://my-ip-address, datalake://my-ip-address"` + MaxParallelTranscodingCount int `split_words:"true" default:"2" desc:"how much parallel transcodings can be processed"` + MaxThreadCount int `split_words:"true" default:"4" desc:"max number of threads for transcoder"` QueueConfig string `split_words:"true" default:"" desc:"queue config"` Region string `split_words:"true" default:"" desc:"service region"` diff --git a/internal/pkg/manifest/hls.go b/internal/pkg/manifest/hls.go index dcb421fe50c..7b2934ea46d 100644 --- a/internal/pkg/manifest/hls.go +++ b/internal/pkg/manifest/hls.go @@ -18,13 +18,12 @@ import ( "fmt" "os" "path/filepath" - "strings" - "github.com/hcengineering/stream/internal/pkg/resconv" + "github.com/hcengineering/stream/internal/pkg/profile" ) // GenerateHLSPlaylist generates master file for master files for resolution levels -func GenerateHLSPlaylist(levels []string, outputPath, uploadID string) error { +func GenerateHLSPlaylist(profiles []profile.VideoProfile, outputPath, uploadID string) error { p := filepath.Join(outputPath, uploadID, fmt.Sprintf("%v_master.m3u8", uploadID)) d := filepath.Dir(p) _ = os.MkdirAll(d, os.ModePerm) @@ -40,16 +39,16 @@ func GenerateHLSPlaylist(levels []string, outputPath, uploadID string) error { return err } - for _, res := range levels { - var bandwidth = resconv.Bandwidth(res) - var resolution = strings.ReplaceAll(resconv.Resolution(res), ":", "x") + for _, profile := range profiles { + var bandwidth = profile.Bandwidth + var resolution = fmt.Sprintf("%vx%v", profile.Width, profile.Height) _, err = file.WriteString(fmt.Sprintf("#EXT-X-STREAM-INF:BANDWIDTH=%d,RESOLUTION=%v\n", bandwidth, resolution)) if err != nil { return err } - _, err = file.WriteString(fmt.Sprintf("%s_%s_master.m3u8\n", uploadID, res)) + _, err = file.WriteString(fmt.Sprintf("%s_%s_master.m3u8\n", uploadID, profile.Name)) if err != nil { return err } diff --git a/internal/pkg/manifest/hls_test.go b/internal/pkg/manifest/hls_test.go index a37c63b8742..5ad6dd6758f 100644 --- a/internal/pkg/manifest/hls_test.go +++ b/internal/pkg/manifest/hls_test.go @@ -19,14 +19,24 @@ import ( "testing" "github.com/hcengineering/stream/internal/pkg/manifest" + "github.com/hcengineering/stream/internal/pkg/profile" "github.com/stretchr/testify/require" ) func TestGenerateHLSPlaylist(t *testing.T) { - resolutions := []string{"320p", "480p", "720p", "1080p", "4k", "8k"} + profiles := []profile.VideoProfile{ + profile.Profile360p, + profile.Profile480p, + profile.Profile720p, + profile.Profile1080p, + profile.Profile1440p, + } uploadID := "test123" + defer func() { + _ = os.RemoveAll(uploadID) + }() - err := manifest.GenerateHLSPlaylist(resolutions, "", uploadID) + err := manifest.GenerateHLSPlaylist(profiles, "", uploadID) require.NoError(t, err) outputPath := filepath.Join(uploadID, uploadID+"_master.m3u8") @@ -42,10 +52,8 @@ func TestGenerateHLSPlaylist(t *testing.T) { require.Contains(t, playlistContent, "#EXTM3U", "File must start with #EXTM3U") - for _, res := range resolutions { - expectedLine := uploadID + "_" + res + "_master.m3u8" + for _, prof := range profiles { + expectedLine := uploadID + "_" + prof.Name + "_master.m3u8" require.Contains(t, playlistContent, expectedLine, "Missing expected reference: "+expectedLine) } - - _ = os.RemoveAll(uploadID) } diff --git a/internal/pkg/mediaconvert/command.go b/internal/pkg/mediaconvert/command.go index 3b026240315..895b5e08c26 100644 --- a/internal/pkg/mediaconvert/command.go +++ b/internal/pkg/mediaconvert/command.go @@ -21,11 +21,13 @@ import ( "io" "os/exec" "path/filepath" + "strconv" "strings" "github.com/pkg/errors" "github.com/hcengineering/stream/internal/pkg/log" + "github.com/hcengineering/stream/internal/pkg/profile" "go.uber.org/zap" ) @@ -55,14 +57,12 @@ const ( // Options represents configuration for the ffmpeg command type Options struct { - Input string - OutputDir string - ScalingLevels []string - Level string - LogLevel LogLevel - Transcode bool - Threads int - UploadID string + Input string + OutputDir string + LogLevel LogLevel + Threads int + UploadID string + Profiles []profile.VideoProfile } func newFfmpegCommand(ctx context.Context, in io.Reader, args []string) (*exec.Cmd, error) { @@ -70,14 +70,13 @@ func newFfmpegCommand(ctx context.Context, in io.Reader, args []string) (*exec.C return nil, errors.New("ctx should not be nil") } - var logger = log.FromContext(ctx).With(zap.String("func", "newFFMpegCommand")) - - logger.Debug("prepared command: ", zap.Strings("args", args)) + var cmd = exec.CommandContext(ctx, "ffmpeg", args...) + cmd.Stdin = in - var result = exec.CommandContext(ctx, "ffmpeg", args...) - result.Stdin = in + var logger = log.FromContext(ctx).With(zap.String("func", "newFFMpegCommand")) + logger.Debug("prepared command: ", zap.String("cmd", cmd.String())) - return result, nil + return cmd, nil } func buildCommonCommand(opts *Options) []string { @@ -90,6 +89,7 @@ func buildCommonCommand(opts *Options) []string { "-i", opts.Input, } + // If input is a URL, add HTTP specific parameters if strings.HasPrefix(opts.Input, "http://") || strings.HasPrefix(opts.Input, "https://") { result = append(result, "-reconnect", "1", @@ -101,6 +101,49 @@ func buildCommonCommand(opts *Options) []string { return result } +func buildHLSCommand(profile profile.VideoProfile, opts *Options) []string { + return []string{ + "-f", "hls", + "-hls_time", "5", + // Use HLS flags + // - split_by_time + // Allow segments to start on frames other than key frames. + // This improves behavior on some players when the time between key frames is inconsistent, + // but may make things worse on others, and can cause some oddities during seeking. + // This flag should be used with the hls_time option. + // - temp_file + // Write segment data to filename.tmp and rename to filename only once the segment is complete. + "-hls_flags", "split_by_time+temp_file", + // Do not limit number of HLS segments + "-hls_list_size", "0", + "-hls_segment_filename", filepath.Join(opts.OutputDir, opts.UploadID, fmt.Sprintf("%s_%s_%s.ts", opts.UploadID, "%03d", profile.Name)), + } +} + +func buildVideoCommand(profile profile.VideoProfile, opts *Options) []string { + crf := profile.CRF + if crf == 0 { + crf = 23 + } + command := []string{ + // Transcode only first video and optionally audio stream + "-map", "0:v:0", + "-map", "0:a?", + // Set up codecs + "-c:a", profile.AudioCodec, + "-c:v", profile.VideoCodec, + "-preset", "veryfast", + "-crf", strconv.Itoa(crf), + "-g", "60", + } + + if profile.VideoCodec != "copy" && profile.Scale { + command = append(command, "-vf", "scale=-2:"+strconv.Itoa(profile.Height)) + } + + return command +} + // BuildAudioCommand returns flags for getting the audio from the input func BuildAudioCommand(opts *Options) []string { var commonPart = buildCommonCommand(opts) @@ -111,34 +154,19 @@ func BuildAudioCommand(opts *Options) []string { ) } -// BuildRawVideoCommand returns an extremely lightweight ffmpeg command for converting raw video without extra cost. -func BuildRawVideoCommand(opts *Options) []string { - if opts.Transcode { - return append(buildCommonCommand(opts), - "-map", "0:v:0", - "-map", "0:a?", - "-c:a", "aac", - "-c:v", "libx264", - "-preset", "veryfast", - "-crf", "23", - "-g", "60", - "-f", "hls", - "-hls_time", "5", - "-hls_flags", "split_by_time+temp_file", - "-hls_list_size", "0", - "-hls_segment_filename", filepath.Join(opts.OutputDir, opts.UploadID, fmt.Sprintf("%s_%s_%s.ts", opts.UploadID, "%03d", opts.Level)), - filepath.Join(opts.OutputDir, opts.UploadID, fmt.Sprintf("%s_%s_master.m3u8", opts.UploadID, opts.Level))) +// BuildVideoCommand returns ffmpeg command for converting video. +func BuildVideoCommand(opts *Options) []string { + if len(opts.Profiles) == 0 { + return []string{} } - return append(buildCommonCommand(opts), - "-c:a", "copy", // Copy audio stream - "-c:v", "copy", // Copy video stream - "-f", "hls", - "-hls_time", "5", - "-hls_flags", "split_by_time+temp_file", - "-hls_list_size", "0", - "-hls_segment_filename", filepath.Join(opts.OutputDir, opts.UploadID, fmt.Sprintf("%s_%s_%s.ts", opts.UploadID, "%03d", opts.Level)), - filepath.Join(opts.OutputDir, opts.UploadID, fmt.Sprintf("%s_%s_master.m3u8", opts.UploadID, opts.Level))) + var command = buildCommonCommand(opts) + for _, profile := range opts.Profiles { + command = append(command, buildVideoCommand(profile, opts)...) + command = append(command, buildHLSCommand(profile, opts)...) + command = append(command, filepath.Join(opts.OutputDir, opts.UploadID, fmt.Sprintf("%s_%s_master.m3u8", opts.UploadID, profile.Name))) + } + return command } // BuildThumbnailCommand creates a command that creates a thumbnail for the input video @@ -150,48 +178,3 @@ func BuildThumbnailCommand(opts *Options) []string { filepath.Join(opts.OutputDir, opts.UploadID, opts.UploadID+".jpg"), ) } - -// BuildScalingVideoCommand returns flags for ffmpeg for video scaling -func BuildScalingVideoCommand(opts *Options) []string { - if len(opts.ScalingLevels) == 0 { - return []string{} - } - - if len(opts.ScalingLevels) == 1 && opts.ScalingLevels[0] == opts.Level { - return []string{} - } - - var result = buildCommonCommand(opts) - - for _, level := range opts.ScalingLevels { - if level == opts.Level { - continue - } - - result = append(result, - "-map", "0:v:0", - "-map", "0:a?", - "-vf", "scale=-2:"+level[:len(level)-1], - "-c:a", "aac", - "-c:v", "libx264", - "-preset", "veryfast", - "-crf", "23", - "-g", "60", - "-f", "hls", - "-hls_time", "5", - // Use HLS flags - // - split_by_time - // Allow segments to start on frames other than key frames. - // This improves behavior on some players when the time between key frames is inconsistent, - // but may make things worse on others, and can cause some oddities during seeking. - // This flag should be used with the hls_time option. - // - temp_file - // Write segment data to filename.tmp and rename to filename only once the segment is complete. - "-hls_flags", "split_by_time+temp_file", - "-hls_list_size", "0", - "-hls_segment_filename", filepath.Join(opts.OutputDir, opts.UploadID, fmt.Sprintf("%s_%s_%s.ts", opts.UploadID, "%03d", level)), - filepath.Join(opts.OutputDir, opts.UploadID, fmt.Sprintf("%s_%s_master.m3u8", opts.UploadID, level))) - } - - return result -} diff --git a/internal/pkg/mediaconvert/command_test.go b/internal/pkg/mediaconvert/command_test.go index 0b83a79ab60..67beff10b44 100644 --- a/internal/pkg/mediaconvert/command_test.go +++ b/internal/pkg/mediaconvert/command_test.go @@ -18,83 +18,79 @@ import ( "testing" "github.com/hcengineering/stream/internal/pkg/mediaconvert" - "github.com/hcengineering/stream/internal/pkg/resconv" + "github.com/hcengineering/stream/internal/pkg/profile" "github.com/stretchr/testify/require" ) -func Test_BuildVideoCommand_Scaling(t *testing.T) { - var scaleCommand = mediaconvert.BuildScalingVideoCommand(&mediaconvert.Options{ - OutputDir: "test", - Input: "pipe:0", - UploadID: "1", - Threads: 4, - LogLevel: mediaconvert.LogLevelDebug, - ScalingLevels: []string{"720p", "480p"}, - }) +func Test_BuildVideoCommand_Empty(t *testing.T) { + var profiles []profile.VideoProfile - const expected = `-y -v debug -err_detect ignore_err -fflags +discardcorrupt -threads 4 -i pipe:0 -map 0:v:0 -map 0:a? -vf scale=-2:720 -c:a aac -c:v libx264 -preset veryfast -crf 23 -g 60 -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_720p.ts test/1/1_720p_master.m3u8 -map 0:v:0 -map 0:a? -vf scale=-2:480 -c:a aac -c:v libx264 -preset veryfast -crf 23 -g 60 -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` + var rawCommand = mediaconvert.BuildVideoCommand(&mediaconvert.Options{ + OutputDir: "test", + Input: "pipe:0", + UploadID: "1", + Threads: 4, + LogLevel: mediaconvert.LogLevelDebug, + Profiles: profiles, + }) - require.Contains(t, expected, strings.Join(scaleCommand, " ")) + require.Empty(t, rawCommand) } -func Test_BuildVideoCommand_Scaling_NoRaw(t *testing.T) { - var scaleCommand = mediaconvert.BuildScalingVideoCommand(&mediaconvert.Options{ - OutputDir: "test", - Input: "pipe:0", - UploadID: "1", - Threads: 4, - LogLevel: mediaconvert.LogLevelDebug, - Level: "720p", - ScalingLevels: []string{"720p", "480p"}, +func Test_BuildVideoCommand_Scaling(t *testing.T) { + var profiles = []profile.VideoProfile{ + profile.Profile720p, + profile.Profile480p, + } + + var scaleCommand = mediaconvert.BuildVideoCommand(&mediaconvert.Options{ + OutputDir: "test", + Input: "pipe:0", + UploadID: "1", + Threads: 4, + LogLevel: mediaconvert.LogLevelDebug, + Profiles: profiles, }) - const expected = `-y -v debug -err_detect ignore_err -fflags +discardcorrupt -threads 4 -i pipe:0 -map 0:v:0 -map 0:a? -vf scale=-2:480 -c:a aac -c:v libx264 -preset veryfast -crf 23 -g 60 -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` + const expected = `-y -v debug -err_detect ignore_err -fflags +discardcorrupt -threads 4 -i pipe:0 -map 0:v:0 -map 0:a? -c:a aac -c:v libx264 -preset veryfast -crf 25 -g 60 -vf scale=-2:720 -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_720p.ts test/1/1_720p_master.m3u8 -map 0:v:0 -map 0:a? -c:a aac -c:v libx264 -preset veryfast -crf 27 -g 60 -vf scale=-2:480 -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` require.Contains(t, expected, strings.Join(scaleCommand, " ")) } -func Test_BuildVideoCommand_Raw_NoTranscode(t *testing.T) { - var rawCommand = mediaconvert.BuildRawVideoCommand(&mediaconvert.Options{ +func Test_BuildVideoCommand_Original(t *testing.T) { + var profiles = []profile.VideoProfile{ + profile.MakeProfileOriginal(640, 480), + } + + var rawCommand = mediaconvert.BuildVideoCommand(&mediaconvert.Options{ OutputDir: "test", Input: "pipe:0", UploadID: "1", Threads: 4, LogLevel: mediaconvert.LogLevelDebug, - Level: resconv.Level("651:490"), - Transcode: false, + Profiles: profiles, }) - const expected = `"-y -v debug -err_detect ignore_err -fflags +discardcorrupt -threads 4 -i pipe:0 -c:a copy -c:v copy -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` + const expected = `-y -v debug -err_detect ignore_err -fflags +discardcorrupt -threads 4 -i pipe:0 -map 0:v:0 -map 0:a? -c:a copy -c:v copy -preset veryfast -crf 23 -g 60 -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_orig.ts test/1/1_orig_master.m3u8` require.Contains(t, expected, strings.Join(rawCommand, " ")) } -func Test_BuildVideoCommand_Raw_Transcode(t *testing.T) { - var rawCommand = mediaconvert.BuildRawVideoCommand(&mediaconvert.Options{ +func Test_BuildVideoCommand_OriginalT(t *testing.T) { + var profiles = []profile.VideoProfile{ + profile.MakeProfileOriginalT(640, 480), + } + + var rawCommand = mediaconvert.BuildVideoCommand(&mediaconvert.Options{ OutputDir: "test", Input: "pipe:0", UploadID: "1", Threads: 4, LogLevel: mediaconvert.LogLevelDebug, - Level: resconv.Level("651:490"), - Transcode: true, + Profiles: profiles, }) - const expected = `-y -v debug -err_detect ignore_err -fflags +discardcorrupt -threads 4 -i pipe:0 -map 0:v:0 -map 0:a? -c:a aac -c:v libx264 -preset veryfast -crf 23 -g 60 -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` + const expected = `-y -v debug -err_detect ignore_err -fflags +discardcorrupt -threads 4 -i pipe:0 -map 0:v:0 -map 0:a? -c:a aac -c:v libx264 -preset veryfast -crf 23 -g 60 -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_orig.ts test/1/1_orig_master.m3u8` require.Contains(t, expected, strings.Join(rawCommand, " ")) } - -func Test_BuildVideoCommand_Scaling_Small(t *testing.T) { - var scaleCommand = mediaconvert.BuildScalingVideoCommand(&mediaconvert.Options{ - OutputDir: "test", - Input: "pipe:0", - UploadID: "1", - Threads: 4, - LogLevel: mediaconvert.LogLevelDebug, - Level: "360p", - ScalingLevels: []string{"360p"}, - }) - - require.Empty(t, scaleCommand) -} diff --git a/internal/pkg/mediaconvert/coordinator.go b/internal/pkg/mediaconvert/coordinator.go index d2721134ee2..d20520a203d 100644 --- a/internal/pkg/mediaconvert/coordinator.go +++ b/internal/pkg/mediaconvert/coordinator.go @@ -17,7 +17,11 @@ package mediaconvert import ( "context" + "fmt" "path/filepath" + "regexp" + "strconv" + "strings" "sync" "sync/atomic" "time" @@ -27,7 +31,6 @@ import ( "github.com/google/uuid" "github.com/hcengineering/stream/internal/pkg/config" "github.com/hcengineering/stream/internal/pkg/log" - "github.com/hcengineering/stream/internal/pkg/resconv" "github.com/hcengineering/stream/internal/pkg/sharedpipe" "github.com/hcengineering/stream/internal/pkg/storage" "github.com/hcengineering/stream/internal/pkg/uploader" @@ -40,7 +43,7 @@ type StreamCoordinator struct { conf *config.Config uploadOptions uploader.Options - activeScalling int32 + activeTranscoding int32 mainContext context.Context logger *zap.Logger @@ -49,6 +52,11 @@ type StreamCoordinator struct { cancels sync.Map } +var _ handler.DataStore = (*StreamCoordinator)(nil) +var _ handler.ConcaterDataStore = (*StreamCoordinator)(nil) +var _ handler.TerminaterDataStore = (*StreamCoordinator)(nil) +var _ handler.LengthDeferrerDataStore = (*StreamCoordinator)(nil) + // NewStreamCoordinator creates a new scheduler for transcode operations. func NewStreamCoordinator(ctx context.Context, c *config.Config) *StreamCoordinator { return &StreamCoordinator{ @@ -81,42 +89,64 @@ func (s *StreamCoordinator) NewUpload(ctx context.Context, info handler.FileInfo done: make(chan struct{}), } - var scaling = resconv.SubLevels(info.MetaData["resolution"]) - var level = resconv.Level(info.MetaData["resolution"]) - var cost int64 + if atomic.AddInt32(&s.activeTranscoding, 1) > int32(s.conf.MaxParallelTranscodingCount) { + atomic.AddInt32(&s.activeTranscoding, -1) + s.logger.Debug("run out of resources for scaling") + // TODO do not transcode + } - for _, scale := range scaling { - cost += int64(resconv.Pixels(resconv.Resolution(scale))) + width, err := strconv.Atoi(info.MetaData["width"]) + if err != nil { + return nil, errors.Wrapf(err, "can not parse video width: %v", info.MetaData["width"]) } - if atomic.AddInt32(&s.activeScalling, 1) > int32(s.conf.MaxParallelScalingCount) { - atomic.AddInt32(&s.activeScalling, -1) - s.logger.Debug("run out of resources for scaling") - scaling = nil + height, err := strconv.Atoi(info.MetaData["height"]) + if err != nil { + return nil, errors.Wrapf(err, "can not parse video height: %v", info.MetaData["height"]) + } + + meta := VideoMeta{ + Width: width, + Height: height, + Codec: extractCodec(info.MetaData["contentType"]), + ContentType: extractContentType(info.MetaData["contentType"]), } + profiles := FastTranscodingProfiles(meta) var commandOptions = Options{ - Input: "pipe:0", - OutputDir: s.conf.OutputDir, - Threads: s.conf.MaxThreadCount, - UploadID: info.ID, - Transcode: true, - Level: level, - ScalingLevels: scaling, + Input: "pipe:0", + OutputDir: s.conf.OutputDir, + Threads: s.conf.MaxThreadCount, + UploadID: info.ID, + Profiles: profiles, } if s.conf.EndpointURL != nil { s.logger.Sugar().Debugf("initializing uploader for %v", info) + + // setup content uploader for transcoded outputs var opts = s.uploadOptions opts.Dir = filepath.Join(opts.Dir, info.ID) - var storage, err = storage.NewStorageByURL(s.mainContext, s.conf.Endpoint(), s.conf.EndpointURL.Scheme, info.MetaData["token"], info.MetaData["workspace"]) + // create storage backend + var stg, err = storage.NewStorageByURL(s.mainContext, s.conf.Endpoint(), s.conf.EndpointURL.Scheme, info.MetaData["token"], info.MetaData["workspace"]) if err != nil { - s.logger.Error("can not create storage by url") - return nil, err + s.logger.Error("can not create storage by url", zap.Error(err)) + return nil, errors.Wrapf(err, "can not create storage") } - var contentUploader = uploader.New(s.mainContext, storage, opts) - + stream.storage = stg + + // if storage supports multipart, initialize raw upload + if ms, ok := stg.(storage.MultipartStorage); ok { + multipart, err := NewMultipartUpload(s.mainContext, ms, info, meta.ContentType) + if err != nil { + s.logger.Error("multipart upload failed", zap.Error(err)) + return nil, errors.Wrapf(err, "multipart upload failed") + } + stream.multipart = multipart + } + // uploader for processed outputs + var contentUploader = uploader.New(s.mainContext, stg, opts) stream.contentUploader = contentUploader } @@ -127,10 +157,7 @@ func (s *StreamCoordinator) NewUpload(ctx context.Context, info handler.FileInfo go func() { stream.commandGroup.Wait() - if scaling != nil { - atomic.AddInt32(&s.activeScalling, -1) - } - s.logger.Debug("returned capacity", zap.Int64("capacity", cost)) + atomic.AddInt32(&s.activeTranscoding, -1) close(stream.done) }() @@ -142,26 +169,26 @@ func (s *StreamCoordinator) NewUpload(ctx context.Context, info handler.FileInfo // GetUpload returns current a worker based on upload id func (s *StreamCoordinator) GetUpload(ctx context.Context, id string) (upload handler.Upload, err error) { + logger := s.logger.With(zap.String("func", "GetUpload")).With(zap.String("id", id)) + if v, ok := s.streams.Load(id); ok { - s.logger.Debug("GetUpload: found stream by id", zap.String("id", id)) + logger.Debug("found stream") var w = v.(*Stream) s.manageTimeout(w) return w, nil } - s.logger.Debug("GetUpload: stream not found", zap.String("id", id)) - return nil, errors.New("bad id") + + logger.Warn("stream not found") + return nil, fmt.Errorf("stream not found: %v", id) } // AsTerminatableUpload returns tusd handler.TerminatableUpload func (s *StreamCoordinator) AsTerminatableUpload(upload handler.Upload) handler.TerminatableUpload { - var worker = upload.(*Stream) - s.logger.Debug("AsTerminatableUpload") - return worker + return upload.(*Stream) } // AsLengthDeclarableUpload returns tusd handler.LengthDeclarableUpload func (s *StreamCoordinator) AsLengthDeclarableUpload(upload handler.Upload) handler.LengthDeclarableUpload { - s.logger.Debug("AsLengthDeclarableUpload") return upload.(*Stream) } @@ -191,3 +218,23 @@ func (s *StreamCoordinator) manageTimeout(w *Stream) { } }() } + +func extractCodec(mimeType string) string { + codecRegex := regexp.MustCompile(`codecs["\s=]+([^",\s]+)`) + matches := codecRegex.FindStringSubmatch(mimeType) + codec := "unknown" + if len(matches) > 1 { + codec = matches[1] + } + + return codec +} + +func extractContentType(mimeType string) string { + contentType := "video/mp4" + parts := strings.Split(mimeType, ";") + if parts[0] != "" { + contentType = strings.TrimSpace(parts[0]) + } + return contentType +} diff --git a/internal/pkg/mediaconvert/multipart.go b/internal/pkg/mediaconvert/multipart.go new file mode 100644 index 00000000000..f29e35ef7f1 --- /dev/null +++ b/internal/pkg/mediaconvert/multipart.go @@ -0,0 +1,188 @@ +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package mediaconvert provides types and functions for video transcoding +package mediaconvert + +import ( + "bytes" + "context" + "time" + + "github.com/pkg/errors" + + "github.com/hcengineering/stream/internal/pkg/log" + "github.com/hcengineering/stream/internal/pkg/storage" + "github.com/tus/tusd/v2/pkg/handler" + "go.uber.org/zap" +) + +// minimum size for multipart parts (backend requires >=5MiB for all but last part) +const minPartSize = 5 * 1024 * 1024 + +type MultipartUpload struct { + logger *zap.Logger + buffer *bytes.Buffer + info handler.FileInfo + + storage storage.MultipartStorage + objectName string + uploadID string + parts []storage.MultipartPart + nextPartNum int + + terminated bool + completed bool + + bytesWritten int64 + bytesUploaded int64 +} + +func NewMultipartUpload( + ctx context.Context, + multipartStorage storage.MultipartStorage, + info handler.FileInfo, + contentType string, +) (*MultipartUpload, error) { + objectName := info.ID + uploadID, err := multipartStorage.MultipartUploadStart(ctx, objectName, contentType) + if err != nil { + return nil, errors.Wrap(err, "failed to initialize multipart upload") + } + + logger := log.FromContext(ctx).With(zap.String("multipart", "upload"), zap.String("uploadID", uploadID)) + + return &MultipartUpload{ + logger: logger, + buffer: bytes.NewBuffer(nil), + info: info, + storage: multipartStorage, + objectName: objectName, + uploadID: uploadID, + parts: make([]storage.MultipartPart, 0), + nextPartNum: 1, + }, nil +} + +// Write writes chunk of data to the storage +func (w *MultipartUpload) Write(ctx context.Context, data []byte) error { + if w.terminated || w.completed { + return errors.New("upload already terminated or completed") + } + + if err := ctx.Err(); err != nil { + return err + } + + _, err := w.buffer.Write(data) + if err != nil { + return errors.Wrap(err, "failed to write to buffer") + } + w.bytesWritten += int64(len(data)) + + // flush parts of at least minPartSize + for w.buffer.Len() >= minPartSize { + partNum := w.nextPartNum + partData := w.buffer.Next(minPartSize) + + if err := ctx.Err(); err != nil { + return err + } + + part, err := w.storage.MultipartUploadPart(ctx, w.objectName, w.uploadID, partNum, partData) + if err != nil { + w.logger.Error("multipart upload part failed", zap.Error(err), zap.Int("partNumber", partNum)) + return errors.Wrap(err, "failed to upload part") + } + + w.bytesUploaded += int64(len(partData)) + w.parts = append(w.parts, *part) + w.nextPartNum++ + } + + return nil +} + +// Terminate cancels the upload +func (w *MultipartUpload) Terminate(ctx context.Context) error { + if w.terminated || w.completed { + return nil + } + w.terminated = true + + w.logger.Debug("terminating multipart upload", zap.Int("parts", len(w.parts))) + + // create new context in case the main context is cancelled + cancelCtx := ctx + if ctx.Err() != nil { + var cancel context.CancelFunc + cancelCtx, cancel = context.WithTimeout(context.Background(), 30*time.Second) + defer cancel() + } + + if err := w.storage.MultipartUploadCancel(cancelCtx, w.objectName, w.uploadID); err != nil { + w.logger.Error("multipart upload cancel failed", zap.Error(err)) + return errors.Wrap(err, "failed to cancel multipart upload") + } + + w.logger.Debug("multipart upload terminated", zap.Int("parts", len(w.parts))) + + return nil +} + +// Complete uploads last bytes and completes the upload +func (w *MultipartUpload) Complete(ctx context.Context) error { + if w.completed { + return nil + } + + if w.terminated { + return errors.New("cannot complete terminated upload") + } + + w.logger.Debug("finishing multipart upload", zap.Int("parts", len(w.parts))) + + // flush any remaining data as last part + if w.buffer.Len() > 0 { + partNum := w.nextPartNum + lastData := w.buffer.Bytes() + + part, err := w.storage.MultipartUploadPart(ctx, w.objectName, w.uploadID, partNum, lastData) + if err != nil { + w.logger.Error("multipart upload last part failed", zap.Error(err), zap.Int("partNumber", partNum)) + return errors.Wrap(err, "failed to upload last part") + } + + w.bytesUploaded += int64(len(lastData)) + w.parts = append(w.parts, *part) + } + + if len(w.parts) == 0 { + w.logger.Warn("cannot complete upload with no parts") + return errors.New("cannot complete upload with no parts") + } + + if err := w.storage.MultipartUploadComplete(ctx, w.objectName, w.uploadID, w.parts); err != nil { + w.logger.Error("multipart upload complete failed", zap.Error(err)) + return errors.Wrap(err, "failed to complete multipart upload") + } + + w.completed = true + w.logger.Info( + "multipart upload completed", + zap.Int64("bytesUploaded", w.bytesUploaded), + zap.Int64("bytesWritten", w.bytesWritten), + ) + + return nil +} diff --git a/internal/pkg/mediaconvert/scheduler.go b/internal/pkg/mediaconvert/scheduler.go index da3190276ba..2f3e2f6f55c 100644 --- a/internal/pkg/mediaconvert/scheduler.go +++ b/internal/pkg/mediaconvert/scheduler.go @@ -21,13 +21,13 @@ import ( "os" "os/exec" "path/filepath" + "strings" "time" "github.com/google/uuid" "github.com/hcengineering/stream/internal/pkg/config" "github.com/hcengineering/stream/internal/pkg/log" "github.com/hcengineering/stream/internal/pkg/manifest" - "github.com/hcengineering/stream/internal/pkg/resconv" "github.com/hcengineering/stream/internal/pkg/storage" "github.com/hcengineering/stream/internal/pkg/token" "github.com/hcengineering/stream/internal/pkg/uploader" @@ -94,7 +94,7 @@ func (p *Scheduler) start() { close(p.taskCh) }() - for range p.cfg.MaxParallelScalingCount { + for range p.cfg.MaxParallelTranscodingCount { go func() { for task := range p.taskCh { p.processTask(p.ctx, task) @@ -185,17 +185,24 @@ func (p *Scheduler) processTask(ctx context.Context, task *Task) { logger.Debug("video stream found", zap.String("codec", videoStream.CodecName), zap.Int("width", videoStream.Width), zap.Int("height", videoStream.Height)) - var res = fmt.Sprintf("%v:%v", videoStream.Width, videoStream.Height) - var codec = videoStream.CodecName - var level = resconv.Level(res) + meta := VideoMeta{ + Width: videoStream.Width, + Height: videoStream.Height, + Codec: videoStream.CodecName, + ContentType: stat.Type, + } + + var profiles = DefaultTranscodingProfiles(meta) + var opts = Options{ - Input: sourceFilePath, - OutputDir: p.cfg.OutputDir, - Level: level, - Transcode: !IsHLSSupportedVideoCodec(codec), - ScalingLevels: append(resconv.SubLevels(res), level), - UploadID: task.ID, - Threads: p.cfg.MaxThreadCount, + Input: sourceFilePath, + OutputDir: p.cfg.OutputDir, + UploadID: task.ID, + Threads: p.cfg.MaxThreadCount, + Profiles: profiles, + // Level: level, + // Transcode: !IsHLSSupportedVideoCodec(codec), + // ScalingLevels: append(resconv.SubLevels(res), level), } logger.Debug("phase 5: start async upload process") @@ -210,7 +217,7 @@ func (p *Scheduler) processTask(ctx context.Context, task *Task) { SourceFile: sourceFilePath, }) - err = manifest.GenerateHLSPlaylist(opts.ScalingLevels, p.cfg.OutputDir, opts.UploadID) + err = manifest.GenerateHLSPlaylist(profiles, p.cfg.OutputDir, opts.UploadID) if err != nil { logger.Error("can not generate hls playlist", zap.String("out", p.cfg.OutputDir), zap.String("uploadID", opts.UploadID)) _ = os.RemoveAll(destinationFolder) @@ -223,8 +230,7 @@ func (p *Scheduler) processTask(ctx context.Context, task *Task) { var argsSlice = [][]string{ BuildThumbnailCommand(&opts), - BuildRawVideoCommand(&opts), - BuildScalingVideoCommand(&opts), + BuildVideoCommand(&opts), } var cmds []*exec.Cmd @@ -295,6 +301,11 @@ func IsHLSSupportedVideoCodec(codec string) bool { case "h264", "h265": return true default: + if strings.HasPrefix(codec, "avc1") { + return true + } else if strings.HasPrefix(codec, "av1") { + return true + } return false } } diff --git a/internal/pkg/mediaconvert/strategy.go b/internal/pkg/mediaconvert/strategy.go new file mode 100644 index 00000000000..387db9de3fd --- /dev/null +++ b/internal/pkg/mediaconvert/strategy.go @@ -0,0 +1,67 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package mediaconvert + +import ( + "fmt" + + "github.com/hcengineering/stream/internal/pkg/profile" + "github.com/hcengineering/stream/internal/pkg/resconv" +) + +type VideoMeta struct { + Width int + Height int + Codec string + ContentType string +} + +// DefaultTranscodingProfiles uses original resolution and two more resolutions +func DefaultTranscodingProfiles(meta VideoMeta) []profile.VideoProfile { + var profiles = make([]profile.VideoProfile, 0) + + var res = fmt.Sprintf("%v:%v", meta.Width, meta.Height) + var sublevels = resconv.SubLevels(res) + + if IsHLSSupportedVideoCodec(meta.Codec) { + profile := profile.MakeProfileOriginal(meta.Width, meta.Height) + profiles = append(profiles, profile) + } else { + profile := profile.MakeProfileOriginalT(meta.Width, meta.Height) + profiles = append(profiles, profile) + } + + for _, level := range sublevels { + if profile, ok := profile.GetProfileByName(level); ok { + profiles = append(profiles, profile) + } + } + + return profiles +} + +// FastTranscodingProfiles uses fastest possible video profile +func FastTranscodingProfiles(meta VideoMeta) []profile.VideoProfile { + if IsHLSSupportedVideoCodec(meta.Codec) { + return []profile.VideoProfile{ + profile.MakeProfileOriginal(meta.Width, meta.Height), + } + } + + return []profile.VideoProfile{ + profile.Profile360p, + } +} diff --git a/internal/pkg/mediaconvert/stream.go b/internal/pkg/mediaconvert/stream.go index e41042d2e01..135ce989977 100644 --- a/internal/pkg/mediaconvert/stream.go +++ b/internal/pkg/mediaconvert/stream.go @@ -11,44 +11,69 @@ // See the License for the specific language governing permissions and // limitations under the License. -// Package mediaconvert provides types and functions for video trnascoding +// Package mediaconvert provides types and functions for video transcoding package mediaconvert import ( "context" "io" + "os/exec" "sync" "github.com/pkg/errors" "github.com/hcengineering/stream/internal/pkg/manifest" "github.com/hcengineering/stream/internal/pkg/sharedpipe" + "github.com/hcengineering/stream/internal/pkg/storage" "github.com/hcengineering/stream/internal/pkg/uploader" "github.com/tus/tusd/v2/pkg/handler" "go.uber.org/zap" ) -// Stream manages client's input and transcodes it based on the passsed configuration +// Stream manages client's input and transcodes it based on the passed configuration type Stream struct { contentUploader uploader.Uploader logger *zap.Logger info handler.FileInfo writer *sharedpipe.Writer reader *sharedpipe.Reader + storage storage.Storage + multipart *MultipartUpload commandGroup sync.WaitGroup done chan struct{} } -// WriteChunk calls when client sends a chunk of raw data +var _ handler.Upload = (*Stream)(nil) +var _ handler.ConcatableUpload = (*Stream)(nil) +var _ handler.TerminatableUpload = (*Stream)(nil) +var _ handler.LengthDeclarableUpload = (*Stream)(nil) + +// WriteChunk is called when client sends a chunk of raw data func (w *Stream) WriteChunk(ctx context.Context, _ int64, src io.Reader) (int64, error) { w.logger.Debug("Write Chunk start", zap.Int64("offset", w.info.Offset)) - var bytes, err = io.ReadAll(src) - _, _ = w.writer.Write(bytes) - var n = int64(len(bytes)) + data, err := io.ReadAll(src) + if err != nil { + return 0, err + } + // write into pipeline for transcoding + written, err := w.writer.Write(data) + if err != nil { + return int64(written), err + } + + n := int64(len(data)) w.info.Offset += n - w.logger.Debug("Write Chunk end", zap.Int64("offset", w.info.Offset), zap.Error(err)) - return n, err + + if w.multipart != nil { + if writeErr := w.multipart.Write(ctx, data); writeErr != nil { + w.logger.Error("multipart upload part failed", zap.Error(writeErr)) + return n, writeErr + } + } + + w.logger.Debug("write chunk end", zap.Int64("offset", w.info.Offset), zap.Error(err)) + return n, nil } // DeclareLength sets length of the video input @@ -73,14 +98,40 @@ func (w *Stream) GetReader(ctx context.Context) (io.ReadCloser, error) { // Terminate calls when upload has failed func (w *Stream) Terminate(ctx context.Context) error { - w.logger.Debug("Terminating...") + w.logger.Debug("terminate upload") + + // Close the writer first to signal EOF to all readers + if err := w.writer.Close(); err != nil { + w.logger.Error("failed to close writer", zap.Error(err)) + return err + } + + var wg sync.WaitGroup + + // cancel upload if in progress if w.contentUploader != nil { + wg.Add(1) go func() { + defer wg.Done() w.commandGroup.Wait() w.contentUploader.Cancel() }() } - return w.writer.Close() + + // cancel multipart upload if in progress + if w.multipart != nil { + wg.Add(1) + go func() { + defer wg.Done() + if err := w.multipart.Terminate(ctx); err != nil { + w.logger.Error("multipart upload cancel failed", zap.Error(err)) + } + }() + } + + wg.Wait() + + return nil } // ConcatUploads calls when upload resumed after fail @@ -94,16 +145,56 @@ func (w *Stream) ConcatUploads(ctx context.Context, partialUploads []handler.Upl // FinishUpload calls when upload finished without errors on the client side func (w *Stream) FinishUpload(ctx context.Context) error { - w.logger.Debug("finishing upload...") + w.logger.Debug("finish upload") + + // Close the writer first to signal EOF to all readers + if err := w.writer.Close(); err != nil { + w.logger.Error("failed to close writer", zap.Error(err)) + return err + } + + var wg sync.WaitGroup if w.contentUploader != nil { + wg.Add(1) go func() { + defer wg.Done() w.commandGroup.Wait() w.contentUploader.Stop() }() } - return w.writer.Close() + // finalize raw multipart stream if supported + if w.multipart != nil { + wg.Add(1) + go func() { + defer wg.Done() + if err := w.multipart.Complete(ctx); err != nil { + w.logger.Error("multipart upload complete failed", zap.Error(err)) + return + } + + if metaProvider, ok := w.storage.(storage.MetaProvider); ok { + metaErr := metaProvider.PatchMeta( + ctx, + w.info.ID, + &storage.Metadata{ + "hls": map[string]any{ + "source": w.info.ID + "_master.m3u8", + "thumbnail": w.info.ID + ".jpg", + }, + }, + ) + if metaErr != nil { + w.logger.Error("can not patch the source file", zap.Error(metaErr)) + } + } + }() + } + + wg.Wait() + + return nil } // AsConcatableUpload returns tusd handler.ConcatableUpload @@ -115,48 +206,39 @@ func (s *StreamCoordinator) AsConcatableUpload(upload handler.Upload) handler.Co func (w *Stream) start(ctx context.Context, options *Options) error { defer w.logger.Debug("start done") w.reader = w.writer.Transpile() - if err := manifest.GenerateHLSPlaylist(append(options.ScalingLevels, options.Level), options.OutputDir, options.UploadID); err != nil { + if err := manifest.GenerateHLSPlaylist(options.Profiles, options.OutputDir, options.UploadID); err != nil { return err } + var argsSlice = [][]string{ + BuildThumbnailCommand(options), + BuildVideoCommand(options), + } + + var cmds []*exec.Cmd + for idx, args := range argsSlice { + reader := w.reader + if idx > 0 { + reader = w.writer.Transpile() + } + + cmd, cmdErr := newFfmpegCommand(ctx, reader, args) + if cmdErr != nil { + w.logger.Error("can not create a new command", zap.Error(cmdErr), zap.Strings("args", args)) + return errors.Wrapf(cmdErr, "can not create a new command") + } + cmds = append(cmds, cmd) + } + w.commandGroup.Add(1) go func() { defer w.commandGroup.Done() - var logger = w.logger.With(zap.String("command", "raw")) - defer logger.Debug("done") - - var args = BuildRawVideoCommand(options) - var convertSourceCommand, err = newFfmpegCommand(ctx, w.reader, args) - if err != nil { - logger.Debug("can not start", zap.Error(err)) - } - err = convertSourceCommand.Run() - if err != nil { - logger.Debug("finished with error", zap.Error(err)) + executor := NewCommandExecutor(ctx) + if execErr := executor.Execute(cmds); execErr != nil { + w.logger.Error("can not execute command", zap.Error(execErr)) } }() - if len(options.ScalingLevels) > 0 { - w.commandGroup.Add(1) - var scalingCommandReader = w.writer.Transpile() - - go func() { - defer w.commandGroup.Done() - var logger = w.logger.With(zap.String("command", "scaling")) - defer logger.Debug("done") - - var args = BuildScalingVideoCommand(options) - var convertSourceCommand, err = newFfmpegCommand(ctx, scalingCommandReader, args) - if err != nil { - logger.Debug("can not start", zap.Error(err)) - } - err = convertSourceCommand.Run() - if err != nil { - logger.Debug("finished with error", zap.Error(err)) - } - }() - } - go w.contentUploader.Start() return nil diff --git a/internal/pkg/mediaconvert/transcoder.go b/internal/pkg/mediaconvert/transcoder.go index f85d034532f..1972f171ddf 100644 --- a/internal/pkg/mediaconvert/transcoder.go +++ b/internal/pkg/mediaconvert/transcoder.go @@ -26,7 +26,6 @@ import ( "github.com/hcengineering/stream/internal/pkg/config" "github.com/hcengineering/stream/internal/pkg/log" "github.com/hcengineering/stream/internal/pkg/manifest" - "github.com/hcengineering/stream/internal/pkg/resconv" "github.com/hcengineering/stream/internal/pkg/storage" "github.com/hcengineering/stream/internal/pkg/token" "github.com/hcengineering/stream/internal/pkg/uploader" @@ -119,7 +118,7 @@ func (p *Transcoder) Transcode(ctx context.Context, task *Task) (*TaskResult, er videoStream := probe.FirstVideoStream() if videoStream == nil { logger.Error("no video stream found", zap.String("filepath", sourceFilePath)) - return nil, errors.Wrapf(err, "no video stream found") + return nil, fmt.Errorf("no video stream found") } logger.Debug("video stream found", zap.String("codec", videoStream.CodecName), zap.Int("width", videoStream.Width), zap.Int("height", videoStream.Height)) @@ -129,19 +128,22 @@ func (p *Transcoder) Transcode(ctx context.Context, task *Task) (*TaskResult, er logger.Info("no audio stream found", zap.String("filepath", sourceFilePath)) } - var res = fmt.Sprintf("%v:%v", videoStream.Width, videoStream.Height) - var codec = videoStream.CodecName - var level = resconv.Level(res) - var sublevels = resconv.SubLevels(res) + meta := VideoMeta{ + Width: videoStream.Width, + Height: videoStream.Height, + Codec: videoStream.CodecName, + ContentType: stat.Type, + } + + var profiles = DefaultTranscodingProfiles(meta) + var opts = Options{ - Input: sourceFilePath, - OutputDir: p.cfg.OutputDir, - Level: level, - LogLevel: LogLevel(p.cfg.LogLevel), - Transcode: !IsHLSSupportedVideoCodec(codec), - ScalingLevels: append(sublevels, level), - UploadID: task.ID, - Threads: p.cfg.MaxThreadCount, + Input: sourceFilePath, + OutputDir: p.cfg.OutputDir, + LogLevel: LogLevel(p.cfg.LogLevel), + Profiles: profiles, + UploadID: task.ID, + Threads: p.cfg.MaxThreadCount, } logger.Debug("phase 5: start async upload process") @@ -156,7 +158,7 @@ func (p *Transcoder) Transcode(ctx context.Context, task *Task) (*TaskResult, er SourceFile: sourceFilePath, }) - err = manifest.GenerateHLSPlaylist(opts.ScalingLevels, p.cfg.OutputDir, opts.UploadID) + err = manifest.GenerateHLSPlaylist(profiles, p.cfg.OutputDir, opts.UploadID) if err != nil { logger.Error("can not generate hls playlist", zap.String("out", p.cfg.OutputDir), zap.String("uploadID", opts.UploadID)) return nil, errors.Wrapf(err, "can not generate hls playlist") @@ -168,8 +170,7 @@ func (p *Transcoder) Transcode(ctx context.Context, task *Task) (*TaskResult, er var argsSlice = [][]string{ BuildThumbnailCommand(&opts), - BuildRawVideoCommand(&opts), - BuildScalingVideoCommand(&opts), + BuildVideoCommand(&opts), } var cmds []*exec.Cmd diff --git a/internal/pkg/profile/profile.go b/internal/pkg/profile/profile.go new file mode 100644 index 00000000000..7716ef6914b --- /dev/null +++ b/internal/pkg/profile/profile.go @@ -0,0 +1,185 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +// Package profile provides video profiles +package profile + +import ( + "fmt" + + "github.com/hcengineering/stream/internal/pkg/resconv" +) + +// VideoProfile represents a video profile +type VideoProfile struct { + Name string + Width int // 0 means keep original + Height int // 0 means keep original + Bandwidth int + Scale bool + + // Codec settings + VideoCodec string + AudioCodec string + + // Advanced encoding settings + CRF int // Constant Rate Factor (0-51 for x264) +} + +// profileOriginal is a profile for original video without transcoding +var profileOriginal = VideoProfile{ + Name: "orig", + Scale: false, + VideoCodec: "copy", + AudioCodec: "copy", + CRF: 23, +} + +// profileOriginalT is a profile for transcoding video in the original resolution +var profileOriginalT = VideoProfile{ + Name: "orig", + Scale: false, + VideoCodec: "libx264", + AudioCodec: "aac", + CRF: 23, +} + +// Profile360p is a profile for transcoding video in 360p +var Profile360p = VideoProfile{ + Name: "360p", + Scale: true, + Width: 640, + Height: 360, + Bandwidth: 500000, + VideoCodec: "libx264", + AudioCodec: "aac", + CRF: 28, +} + +// Profile480p is a profile for transcoding video in 480p +var Profile480p = VideoProfile{ + Name: "480p", + Scale: true, + Width: 854, + Height: 480, + Bandwidth: 2000000, + VideoCodec: "libx264", + AudioCodec: "aac", + CRF: 27, +} + +// Profile720p is a profile for transcoding video in 720p +var Profile720p = VideoProfile{ + Name: "720p", + Scale: true, + Width: 1280, + Height: 720, + Bandwidth: 5000000, + VideoCodec: "libx264", + AudioCodec: "aac", + CRF: 25, +} + +// Profile1080p is a profile for transcoding video in 1080p +var Profile1080p = VideoProfile{ + Name: "1080p", + Scale: true, + Width: 1920, + Height: 1080, + Bandwidth: 8000000, + VideoCodec: "libx264", + AudioCodec: "aac", + CRF: 23, +} + +// Profile1440p is a profile for transcoding video in 1440p +var Profile1440p = VideoProfile{ + Name: "1440p", + Scale: true, + Width: 2560, + Height: 1440, + Bandwidth: 12000000, + VideoCodec: "libx264", + AudioCodec: "aac", + CRF: 23, +} + +// Profile2160p is a profile for transcoding video in 2160p +var Profile2160p = VideoProfile{ + Name: "2160p", + Scale: true, + Width: 3840, + Height: 2160, + Bandwidth: 25000000, + VideoCodec: "libx264", // Consider libx265 + AudioCodec: "aac", + CRF: 22, +} + +// Profile4320p is a profile for transcoding video in 360p +var Profile4320p = VideoProfile{ + Name: "4320p", + Scale: true, + Width: 7680, + Height: 4320, + Bandwidth: 50000000, + VideoCodec: "libx264", // Consider libx265 + AudioCodec: "aac", + CRF: 22, +} + +var Profiles = map[string]VideoProfile{ + "360p": Profile360p, + "480p": Profile480p, + "720p": Profile720p, + "1080p": Profile1080p, + "1440p": Profile1440p, + "2160p": Profile2160p, + "4320p": Profile4320p, +} + +func MakeProfileOriginal(width, height int) VideoProfile { + resolution := fmt.Sprintf("%v:%v", width, height) + level := resconv.Level(resolution) + bandwidth := resconv.Bandwidth(level) + + profile := profileOriginal + //profile.Name = level + profile.Width = width + profile.Height = height + profile.Bandwidth = bandwidth + + return profile +} + +func MakeProfileOriginalT(width, height int) VideoProfile { + resolution := fmt.Sprintf("%v:%v", width, height) + level := resconv.Level(resolution) + bandwidth := resconv.Bandwidth(level) + + profile := profileOriginalT + //profile.Name = level + profile.Width = width + profile.Height = height + profile.Bandwidth = bandwidth + + return profile +} + +// GetProfileByName returns a VideoProfile by name +func GetProfileByName(name string) (VideoProfile, bool) { + profile, ok := Profiles[name] + return profile, ok +} diff --git a/internal/pkg/profile/profile_test.go b/internal/pkg/profile/profile_test.go new file mode 100644 index 00000000000..a4ea1448fd0 --- /dev/null +++ b/internal/pkg/profile/profile_test.go @@ -0,0 +1,173 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package profile_test + +import ( + "testing" + + "github.com/hcengineering/stream/internal/pkg/profile" + "github.com/stretchr/testify/assert" +) + +func TestGetProfileByName(t *testing.T) { + tests := []struct { + name string + expected profile.VideoProfile + }{ + { + name: "360p", + expected: profile.Profile360p, + }, + { + name: "480p", + expected: profile.Profile480p, + }, + { + name: "720p", + expected: profile.Profile720p, + }, + { + name: "1080p", + expected: profile.Profile1080p, + }, + { + name: "1440p", + expected: profile.Profile1440p, + }, + { + name: "2160p", + expected: profile.Profile2160p, + }, + { + name: "4320p", + expected: profile.Profile4320p, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + profile, ok := profile.GetProfileByName(tt.name) + assert.True(t, ok) + assert.Equal(t, tt.expected, profile) + }) + } +} + +func TestGetProfileByName_Failure(t *testing.T) { + tests := []struct { + name string + }{ + { + name: "foo", + }, + { + name: "original", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + _, ok := profile.GetProfileByName(tt.name) + assert.False(t, ok) + }) + } +} + +func TestMakeProfileOriginal(t *testing.T) { + tests := []struct { + name string + width int + height int + expected profile.VideoProfile + }{ + { + name: "480p", + width: 640, + height: 480, + expected: profile.VideoProfile{ + Name: "orig", + VideoCodec: "copy", + AudioCodec: "copy", + Width: 640, + Height: 480, + Bandwidth: 2000000, + }, + }, + { + name: "1440p", + width: 2000, + height: 1200, + expected: profile.VideoProfile{ + Name: "orig", + VideoCodec: "copy", + AudioCodec: "copy", + Width: 2000, + Height: 1200, + Bandwidth: 8000000, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + profile := profile.MakeProfileOriginal(tt.width, tt.height) + assert.Equal(t, tt.expected, profile) + }) + } +} + +func TestMakeProfileOriginalT(t *testing.T) { + tests := []struct { + name string + width int + height int + expected profile.VideoProfile + }{ + { + name: "720p", + width: 1280, + height: 720, + expected: profile.VideoProfile{ + Name: "orig", + VideoCodec: "libx264", + AudioCodec: "aac", + Width: 1280, + Height: 720, + Bandwidth: 5000000, + }, + }, + { + name: "2160p", + width: 3840, + height: 2160, + expected: profile.VideoProfile{ + Name: "orig", + VideoCodec: "libx264", + AudioCodec: "aac", + Width: 3840, + Height: 2160, + Bandwidth: 25000000, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + profile := profile.MakeProfileOriginalT(tt.width, tt.height) + assert.Equal(t, tt.expected, profile) + }) + } +} diff --git a/internal/pkg/queue/queue.go b/internal/pkg/queue/queue.go index cb5c6745194..d9f6a542086 100644 --- a/internal/pkg/queue/queue.go +++ b/internal/pkg/queue/queue.go @@ -17,6 +17,7 @@ package queue import ( "context" "encoding/json" + "fmt" "time" "github.com/hcengineering/stream/internal/pkg/log" @@ -168,7 +169,12 @@ func NewProducer(ctx context.Context, options ProducerOptions) Producer { // Send sends a message to the queue topic func (p *TProducer) Send(ctx context.Context, workspaceID string, data any) error { + if data == nil { + return fmt.Errorf("event data is empty") + } + value, err := json.Marshal(data) + if err != nil { return err } diff --git a/internal/pkg/queue/worker.go b/internal/pkg/queue/worker.go index 7c9dae02b41..60def900d5f 100644 --- a/internal/pkg/queue/worker.go +++ b/internal/pkg/queue/worker.go @@ -111,7 +111,7 @@ func (w *Worker) processMessage(ctx context.Context, msg kafka.Message, logger * transcoder := mediaconvert.NewTranscoder(ctx, w.cfg) res, err := transcoder.Transcode(ctx, &task) - if err == nil { + if res != nil { result := TranscodeResult{ BlobID: req.BlobID, WorkspaceUUID: req.WorkspaceUUID, diff --git a/internal/pkg/storage/datalake.go b/internal/pkg/storage/datalake.go index 95c87555697..4000168acab 100644 --- a/internal/pkg/storage/datalake.go +++ b/internal/pkg/storage/datalake.go @@ -21,8 +21,10 @@ import ( "io" "mime/multipart" "net/textproto" + "net/url" "os" "path/filepath" + "strconv" "strings" "time" @@ -376,6 +378,140 @@ func (d *DatalakeStorage) SetParent(ctx context.Context, filename, parent string return nil } +func (d *DatalakeStorage) MultipartUploadStart(ctx context.Context, objectName, contentType string) (string, error) { + var logger = d.logger.With(zap.String("workspace", d.workspace), zap.String("objectName", objectName)) + url := fmt.Sprintf("%v/upload/multipart/%v/%v", d.baseURL, d.workspace, objectName) + + req := fasthttp.AcquireRequest() + defer fasthttp.ReleaseRequest(req) + req.SetRequestURI(url) + req.Header.SetMethod(fasthttp.MethodPost) + req.Header.Add("Authorization", "Bearer "+d.token) + req.Header.SetContentType(contentType) + + resp := fasthttp.AcquireResponse() + defer fasthttp.ReleaseResponse(resp) + + if err := d.client.Do(req, resp); err != nil { + logRequestError(logger, err, "request failed", resp) + return "", err + } + + if err := okResponse(resp); err != nil { + logRequestError(logger, err, "bad status code", resp) + return "", err + } + + var result struct { + UploadID string `json:"uploadId"` + } + err := json.Unmarshal(resp.Body(), &result) + + return result.UploadID, err +} + +func (d *DatalakeStorage) MultipartUploadPart(ctx context.Context, objectName, uploadID string, partNumber int, data []byte) (*MultipartPart, error) { + var logger = d.logger.With(zap.String("workspace", d.workspace), zap.String("uploadID", uploadID), zap.Int("partNumber", partNumber)) + params := url.Values{} + params.Add("uploadId", uploadID) + params.Add("partNumber", strconv.Itoa(partNumber)) + url := fmt.Sprintf("%v/upload/multipart/%v/%v/part?%v", d.baseURL, d.workspace, objectName, params.Encode()) + + req := fasthttp.AcquireRequest() + defer fasthttp.ReleaseRequest(req) + req.SetRequestURI(url) + req.Header.SetMethod(fasthttp.MethodPut) + req.Header.Add("Authorization", "Bearer "+d.token) + req.Header.SetContentType("application/octet-stream") + req.Header.SetContentLength(len(data)) + req.SetBody(data) + + resp := fasthttp.AcquireResponse() + defer fasthttp.ReleaseResponse(resp) + + if err := d.client.Do(req, resp); err != nil { + logRequestError(logger, err, "request failed", resp) + return nil, err + } + + if err := okResponse(resp); err != nil { + logRequestError(logger, err, "bad status code", resp) + return nil, err + } + + var part MultipartPart + err := json.Unmarshal(resp.Body(), &part) + + return &part, err +} + +func (d *DatalakeStorage) MultipartUploadComplete(ctx context.Context, objectName, uploadID string, parts []MultipartPart) error { + var logger = d.logger.With(zap.String("workspace", d.workspace), zap.String("uploadID", uploadID), zap.String("objectName", objectName)) + params := url.Values{} + params.Add("uploadId", uploadID) + url := fmt.Sprintf("%v/upload/multipart/%v/%v/complete?%v", d.baseURL, d.workspace, objectName, params.Encode()) + + body, err := json.Marshal(map[string]any{ + "parts": parts, + }) + + if err != nil { + logger.Debug("can not encode body", zap.Error(err)) + return err + } + + req := fasthttp.AcquireRequest() + defer fasthttp.ReleaseRequest(req) + req.SetRequestURI(url) + req.Header.SetMethod(fasthttp.MethodPost) + req.Header.Add("Authorization", "Bearer "+d.token) + req.Header.SetContentType("application/json") + req.SetBody(body) + + resp := fasthttp.AcquireResponse() + defer fasthttp.ReleaseResponse(resp) + + if err := d.client.Do(req, resp); err != nil { + logRequestError(logger, err, "request failed", resp) + return err + } + + if err := okResponse(resp); err != nil { + logRequestError(logger, err, "bad status code", resp) + return err + } + + return nil +} + +func (d *DatalakeStorage) MultipartUploadCancel(ctx context.Context, objectName, uploadID string) error { + var logger = d.logger.With(zap.String("workspace", d.workspace), zap.String("uploadID", uploadID)) + params := url.Values{} + params.Add("uploadId", uploadID) + url := fmt.Sprintf("%v/upload/multipart/%v/%v/abort?%v", d.baseURL, d.workspace, objectName, params.Encode()) + + req := fasthttp.AcquireRequest() + defer fasthttp.ReleaseRequest(req) + req.SetRequestURI(url) + req.Header.SetMethod(fasthttp.MethodPost) + req.Header.Add("Authorization", "Bearer "+d.token) + + resp := fasthttp.AcquireResponse() + defer fasthttp.ReleaseResponse(resp) + + if err := d.client.Do(req, resp); err != nil { + logRequestError(logger, err, "request failed", resp) + return err + } + + if err := okResponse(resp); err != nil { + logRequestError(logger, err, "bad status code", resp) + return err + } + + return nil +} + func okResponse(res *fasthttp.Response) error { var statusOK = res.StatusCode() >= 200 && res.StatusCode() < 300 @@ -397,4 +533,5 @@ func logRequestError(logger *zap.Logger, err error, msg string, res *fasthttp.Re } var _ Storage = (*DatalakeStorage)(nil) +var _ MultipartStorage = (*DatalakeStorage)(nil) var _ MetaProvider = (*DatalakeStorage)(nil) diff --git a/internal/pkg/storage/storage.go b/internal/pkg/storage/storage.go index 0feb5878256..78d28cdf133 100644 --- a/internal/pkg/storage/storage.go +++ b/internal/pkg/storage/storage.go @@ -46,6 +46,20 @@ type Storage interface { SetParent(ctx context.Context, fileName string, parentName string) error } +// MultipartPart represents uploaded multipart part +type MultipartPart struct { + PartNumber int `json:"partNumber"` + ETag string `json:"etag"` +} + +// MultipartStorage represents multipart-based storage +type MultipartStorage interface { + MultipartUploadStart(ctx context.Context, objectName, contentType string) (string, error) + MultipartUploadPart(ctx context.Context, objectName, uploadID string, partNumber int, data []byte) (*MultipartPart, error) + MultipartUploadComplete(ctx context.Context, objectName, uploadID string, parts []MultipartPart) error + MultipartUploadCancel(ctx context.Context, objectName, uploadID string) error +} + // NewStorageByURL creates a new storage based on the type from the url scheme, for example "datalake://my-datalake-endpoint" func NewStorageByURL(ctx context.Context, u *url.URL, storageType, token, workspace string) (Storage, error) { if workspace == "" { From a1a5bac7b5e041f74244cca8aa23abe2de0e5746 Mon Sep 17 00:00:00 2001 From: Alexander Onnikov Date: Fri, 27 Jun 2025 10:41:44 +0700 Subject: [PATCH 127/636] fix ci and fmt issues Signed-off-by: Alexander Onnikov --- .golangci.yaml | 4 ++-- internal/pkg/mediaconvert/command.go | 5 +++-- internal/pkg/mediaconvert/coordinator.go | 2 +- internal/pkg/mediaconvert/multipart.go | 4 +++- internal/pkg/mediaconvert/strategy.go | 1 + internal/pkg/profile/profile.go | 9 +++++---- internal/pkg/profile/profile_test.go | 4 ++++ internal/pkg/storage/datalake.go | 4 ++++ 8 files changed, 23 insertions(+), 10 deletions(-) diff --git a/.golangci.yaml b/.golangci.yaml index 3a6223134d0..2972e64e43f 100644 --- a/.golangci.yaml +++ b/.golangci.yaml @@ -59,13 +59,13 @@ linters-settings: gocyclo: min-complexity: 30 dupl: - threshold: 150 + threshold: 500 funlen: lines: 240 statements: 120 goconst: min-len: 2 - min-occurrences: 2 + min-occurrences: 3 depguard: rules: main: diff --git a/internal/pkg/mediaconvert/command.go b/internal/pkg/mediaconvert/command.go index 895b5e08c26..100eddb956a 100644 --- a/internal/pkg/mediaconvert/command.go +++ b/internal/pkg/mediaconvert/command.go @@ -120,7 +120,7 @@ func buildHLSCommand(profile profile.VideoProfile, opts *Options) []string { } } -func buildVideoCommand(profile profile.VideoProfile, opts *Options) []string { +func buildVideoCommand(profile profile.VideoProfile) []string { crf := profile.CRF if crf == 0 { crf = 23 @@ -162,7 +162,7 @@ func BuildVideoCommand(opts *Options) []string { var command = buildCommonCommand(opts) for _, profile := range opts.Profiles { - command = append(command, buildVideoCommand(profile, opts)...) + command = append(command, buildVideoCommand(profile)...) command = append(command, buildHLSCommand(profile, opts)...) command = append(command, filepath.Join(opts.OutputDir, opts.UploadID, fmt.Sprintf("%s_%s_master.m3u8", opts.UploadID, profile.Name))) } @@ -172,6 +172,7 @@ func BuildVideoCommand(opts *Options) []string { // BuildThumbnailCommand creates a command that creates a thumbnail for the input video func BuildThumbnailCommand(opts *Options) []string { return append([]string{}, + "-y", // Overwrite output files without asking. "-i", opts.Input, "-vframes", "1", "-update", "1", diff --git a/internal/pkg/mediaconvert/coordinator.go b/internal/pkg/mediaconvert/coordinator.go index d20520a203d..3c57fbf455f 100644 --- a/internal/pkg/mediaconvert/coordinator.go +++ b/internal/pkg/mediaconvert/coordinator.go @@ -90,8 +90,8 @@ func (s *StreamCoordinator) NewUpload(ctx context.Context, info handler.FileInfo } if atomic.AddInt32(&s.activeTranscoding, 1) > int32(s.conf.MaxParallelTranscodingCount) { - atomic.AddInt32(&s.activeTranscoding, -1) s.logger.Debug("run out of resources for scaling") + // atomic.AddInt32(&s.activeTranscoding, -1) // TODO do not transcode } diff --git a/internal/pkg/mediaconvert/multipart.go b/internal/pkg/mediaconvert/multipart.go index f29e35ef7f1..b153f62fcb6 100644 --- a/internal/pkg/mediaconvert/multipart.go +++ b/internal/pkg/mediaconvert/multipart.go @@ -30,6 +30,7 @@ import ( // minimum size for multipart parts (backend requires >=5MiB for all but last part) const minPartSize = 5 * 1024 * 1024 +// MultipartUpload uploads chunks via multipart upload type MultipartUpload struct { logger *zap.Logger buffer *bytes.Buffer @@ -48,6 +49,7 @@ type MultipartUpload struct { bytesUploaded int64 } +// NewMultipartUpload creates a new multipart upload func NewMultipartUpload( ctx context.Context, multipartStorage storage.MultipartStorage, @@ -122,7 +124,7 @@ func (w *MultipartUpload) Terminate(ctx context.Context) error { w.logger.Debug("terminating multipart upload", zap.Int("parts", len(w.parts))) - // create new context in case the main context is cancelled + // create new context in case the main context is canceled cancelCtx := ctx if ctx.Err() != nil { var cancel context.CancelFunc diff --git a/internal/pkg/mediaconvert/strategy.go b/internal/pkg/mediaconvert/strategy.go index 387db9de3fd..3fff4a80aad 100644 --- a/internal/pkg/mediaconvert/strategy.go +++ b/internal/pkg/mediaconvert/strategy.go @@ -22,6 +22,7 @@ import ( "github.com/hcengineering/stream/internal/pkg/resconv" ) +// VideoMeta contains information about the video type VideoMeta struct { Width int Height int diff --git a/internal/pkg/profile/profile.go b/internal/pkg/profile/profile.go index 7716ef6914b..c84a3437ef2 100644 --- a/internal/pkg/profile/profile.go +++ b/internal/pkg/profile/profile.go @@ -53,7 +53,7 @@ var profileOriginalT = VideoProfile{ Scale: false, VideoCodec: "libx264", AudioCodec: "aac", - CRF: 23, + CRF: 23, } // Profile360p is a profile for transcoding video in 360p @@ -89,7 +89,7 @@ var Profile720p = VideoProfile{ Bandwidth: 5000000, VideoCodec: "libx264", AudioCodec: "aac", - CRF: 25, + CRF: 25, } // Profile1080p is a profile for transcoding video in 1080p @@ -140,6 +140,7 @@ var Profile4320p = VideoProfile{ CRF: 22, } +// Profiles is a map of all supported profiles var Profiles = map[string]VideoProfile{ "360p": Profile360p, "480p": Profile480p, @@ -150,13 +151,13 @@ var Profiles = map[string]VideoProfile{ "4320p": Profile4320p, } +// MakeProfileOriginal creates a profile for original video without transcoding func MakeProfileOriginal(width, height int) VideoProfile { resolution := fmt.Sprintf("%v:%v", width, height) level := resconv.Level(resolution) bandwidth := resconv.Bandwidth(level) profile := profileOriginal - //profile.Name = level profile.Width = width profile.Height = height profile.Bandwidth = bandwidth @@ -164,13 +165,13 @@ func MakeProfileOriginal(width, height int) VideoProfile { return profile } +// MakeProfileOriginalT creates a profile for transcoding video in the original resolution func MakeProfileOriginalT(width, height int) VideoProfile { resolution := fmt.Sprintf("%v:%v", width, height) level := resconv.Level(resolution) bandwidth := resconv.Bandwidth(level) profile := profileOriginalT - //profile.Name = level profile.Width = width profile.Height = height profile.Bandwidth = bandwidth diff --git a/internal/pkg/profile/profile_test.go b/internal/pkg/profile/profile_test.go index a4ea1448fd0..1ad67d79590 100644 --- a/internal/pkg/profile/profile_test.go +++ b/internal/pkg/profile/profile_test.go @@ -104,6 +104,7 @@ func TestMakeProfileOriginal(t *testing.T) { Width: 640, Height: 480, Bandwidth: 2000000, + CRF: 23, }, }, { @@ -117,6 +118,7 @@ func TestMakeProfileOriginal(t *testing.T) { Width: 2000, Height: 1200, Bandwidth: 8000000, + CRF: 23, }, }, } @@ -147,6 +149,7 @@ func TestMakeProfileOriginalT(t *testing.T) { Width: 1280, Height: 720, Bandwidth: 5000000, + CRF: 23, }, }, { @@ -160,6 +163,7 @@ func TestMakeProfileOriginalT(t *testing.T) { Width: 3840, Height: 2160, Bandwidth: 25000000, + CRF: 23, }, }, } diff --git a/internal/pkg/storage/datalake.go b/internal/pkg/storage/datalake.go index 4000168acab..b9560e0710a 100644 --- a/internal/pkg/storage/datalake.go +++ b/internal/pkg/storage/datalake.go @@ -378,6 +378,7 @@ func (d *DatalakeStorage) SetParent(ctx context.Context, filename, parent string return nil } +// MultipartUploadStart creates a new multipart upload func (d *DatalakeStorage) MultipartUploadStart(ctx context.Context, objectName, contentType string) (string, error) { var logger = d.logger.With(zap.String("workspace", d.workspace), zap.String("objectName", objectName)) url := fmt.Sprintf("%v/upload/multipart/%v/%v", d.baseURL, d.workspace, objectName) @@ -410,6 +411,7 @@ func (d *DatalakeStorage) MultipartUploadStart(ctx context.Context, objectName, return result.UploadID, err } +// MultipartUploadPart uploads a part of a multipart upload func (d *DatalakeStorage) MultipartUploadPart(ctx context.Context, objectName, uploadID string, partNumber int, data []byte) (*MultipartPart, error) { var logger = d.logger.With(zap.String("workspace", d.workspace), zap.String("uploadID", uploadID), zap.Int("partNumber", partNumber)) params := url.Values{} @@ -445,6 +447,7 @@ func (d *DatalakeStorage) MultipartUploadPart(ctx context.Context, objectName, u return &part, err } +// MultipartUploadComplete completes a multipart upload func (d *DatalakeStorage) MultipartUploadComplete(ctx context.Context, objectName, uploadID string, parts []MultipartPart) error { var logger = d.logger.With(zap.String("workspace", d.workspace), zap.String("uploadID", uploadID), zap.String("objectName", objectName)) params := url.Values{} @@ -484,6 +487,7 @@ func (d *DatalakeStorage) MultipartUploadComplete(ctx context.Context, objectNam return nil } +// MultipartUploadCancel cancels a multipart upload func (d *DatalakeStorage) MultipartUploadCancel(ctx context.Context, objectName, uploadID string) error { var logger = d.logger.With(zap.String("workspace", d.workspace), zap.String("uploadID", uploadID)) params := url.Values{} From 4c2e68af493cfeb5136144ebd8217a30a35de98b Mon Sep 17 00:00:00 2001 From: Alexander Onnikov Date: Mon, 30 Jun 2025 10:45:57 +0700 Subject: [PATCH 128/636] fix: use different paths for recording and transcoding (#21) Signed-off-by: Alexander Onnikov --- internal/pkg/mediaconvert/coordinator.go | 14 +++++++++---- internal/pkg/mediaconvert/transcoder.go | 26 +++++++++++++++--------- 2 files changed, 26 insertions(+), 14 deletions(-) diff --git a/internal/pkg/mediaconvert/coordinator.go b/internal/pkg/mediaconvert/coordinator.go index 3c57fbf455f..519e0d51ba7 100644 --- a/internal/pkg/mediaconvert/coordinator.go +++ b/internal/pkg/mediaconvert/coordinator.go @@ -38,9 +38,12 @@ import ( "go.uber.org/zap" ) +var recordingDir = "r" + // StreamCoordinator represents manager for streams. It creates a new stream for a client and manages it's life cycle. type StreamCoordinator struct { conf *config.Config + outputDir string uploadOptions uploader.Options activeTranscoding int32 @@ -59,18 +62,21 @@ var _ handler.LengthDeferrerDataStore = (*StreamCoordinator)(nil) // NewStreamCoordinator creates a new scheduler for transcode operations. func NewStreamCoordinator(ctx context.Context, c *config.Config) *StreamCoordinator { + outputDir := filepath.Join(c.OutputDir, recordingDir) + return &StreamCoordinator{ - conf: c, + conf: c, + outputDir: outputDir, uploadOptions: uploader.Options{ RetryDelay: time.Millisecond * 100, Timeout: c.Timeout, WorkerCount: uint32(c.MaxThreadCount), RetryCount: 10, BufferSize: 128, - Dir: c.OutputDir, + Dir: outputDir, }, mainContext: ctx, - logger: log.FromContext(ctx).With(zap.String("Scheduler", c.OutputDir)), + logger: log.FromContext(ctx).With(zap.String("Scheduler", outputDir)), } } @@ -115,7 +121,7 @@ func (s *StreamCoordinator) NewUpload(ctx context.Context, info handler.FileInfo var commandOptions = Options{ Input: "pipe:0", - OutputDir: s.conf.OutputDir, + OutputDir: s.outputDir, Threads: s.conf.MaxThreadCount, UploadID: info.ID, Profiles: profiles, diff --git a/internal/pkg/mediaconvert/transcoder.go b/internal/pkg/mediaconvert/transcoder.go index 1972f171ddf..cffa1667761 100644 --- a/internal/pkg/mediaconvert/transcoder.go +++ b/internal/pkg/mediaconvert/transcoder.go @@ -34,19 +34,25 @@ import ( "gopkg.in/vansante/go-ffprobe.v2" ) +var transcodingDir = "t" + // Transcoder process one transcoding task type Transcoder struct { - ctx context.Context - cfg *config.Config - logger *zap.Logger + ctx context.Context + cfg *config.Config + outputDir string + logger *zap.Logger } // NewTranscoder creates a new instance of task transcoder func NewTranscoder(ctx context.Context, cfg *config.Config) *Transcoder { + outputDir := filepath.Join(cfg.OutputDir, transcodingDir) + var p = &Transcoder{ - cfg: cfg, - ctx: ctx, - logger: log.FromContext(ctx).With(zap.String("transcoding", "transcoder")), + cfg: cfg, + ctx: ctx, + outputDir: outputDir, + logger: log.FromContext(ctx).With(zap.String("transcoding", "transcoder")), } return p @@ -67,7 +73,7 @@ func (p *Transcoder) Transcode(ctx context.Context, task *Task) (*TaskResult, er } logger.Debug("phase 2: preparing fs") - var destinationFolder = filepath.Join(p.cfg.OutputDir, task.ID) + var destinationFolder = filepath.Join(p.outputDir, task.ID) var _, filename = filepath.Split(task.Source) err = os.MkdirAll(destinationFolder, os.ModePerm) if err != nil { @@ -139,7 +145,7 @@ func (p *Transcoder) Transcode(ctx context.Context, task *Task) (*TaskResult, er var opts = Options{ Input: sourceFilePath, - OutputDir: p.cfg.OutputDir, + OutputDir: p.outputDir, LogLevel: LogLevel(p.cfg.LogLevel), Profiles: profiles, UploadID: task.ID, @@ -158,9 +164,9 @@ func (p *Transcoder) Transcode(ctx context.Context, task *Task) (*TaskResult, er SourceFile: sourceFilePath, }) - err = manifest.GenerateHLSPlaylist(profiles, p.cfg.OutputDir, opts.UploadID) + err = manifest.GenerateHLSPlaylist(profiles, p.outputDir, opts.UploadID) if err != nil { - logger.Error("can not generate hls playlist", zap.String("out", p.cfg.OutputDir), zap.String("uploadID", opts.UploadID)) + logger.Error("can not generate hls playlist", zap.String("out", p.outputDir), zap.String("uploadID", opts.UploadID)) return nil, errors.Wrapf(err, "can not generate hls playlist") } From 234938c358137a7d014d05d729ec3ad3d5c03996 Mon Sep 17 00:00:00 2001 From: Kristina Date: Mon, 30 Jun 2025 09:42:22 +0400 Subject: [PATCH 129/636] Fix async broadcast (#76) Signed-off-by: Kristina Fefelova --- packages/sdk-types/src/serverApi.ts | 10 +- packages/server/src/index.ts | 6 +- packages/server/src/middleware/base.ts | 10 +- packages/server/src/middleware/broadcast.ts | 23 ++-- packages/server/src/middleware/date.ts | 2 +- packages/server/src/middleware/db.ts | 100 ++++++--------- packages/server/src/middleware/id.ts | 7 +- packages/server/src/middleware/permissions.ts | 2 +- packages/server/src/middleware/triggers.ts | 115 +++++++++++++----- packages/server/src/middlewares.ts | 19 +-- packages/server/src/triggers/message.ts | 5 +- packages/server/src/types.ts | 10 +- 12 files changed, 178 insertions(+), 131 deletions(-) diff --git a/packages/sdk-types/src/serverApi.ts b/packages/sdk-types/src/serverApi.ts index 93ab1093191..ec1dde33719 100644 --- a/packages/sdk-types/src/serverApi.ts +++ b/packages/sdk-types/src/serverApi.ts @@ -27,15 +27,21 @@ import type { FindCollaboratorsParams, Collaborator } from '@hcengineering/communication-types' -import type { Account } from '@hcengineering/core' +import type { Account, MeasureContext } from '@hcengineering/core' import type { EventResult, Event } from './events/event' +export type ContextData = { + asyncRequests?: ((ctx: MeasureContext) => Promise)[] +} & Record + export interface SessionData { sessionId?: string account: Account derived?: boolean - contextData?: any + isAsyncContext?: boolean + contextData?: ContextData + asyncData: Event[] } export interface ServerApi { diff --git a/packages/server/src/index.ts b/packages/server/src/index.ts index 0f12c48ccf0..46c481f4fd0 100644 --- a/packages/server/src/index.ts +++ b/packages/server/src/index.ts @@ -33,7 +33,7 @@ import { createDbAdapter } from '@hcengineering/communication-cockroach' import type { EventResult, Event, ServerApi, SessionData } from '@hcengineering/communication-sdk-types' import { getMetadata } from './metadata' -import type { BroadcastSessionsFunc, QueryId } from './types' +import type { CommunicationCallbacks, QueryId } from './types' import { buildMiddlewares, Middlewares } from './middlewares' export class Api implements ServerApi { @@ -46,14 +46,14 @@ export class Api implements ServerApi { ctx: MeasureContext, workspace: WorkspaceID, dbUrl: string, - broadcast: BroadcastSessionsFunc + callbacks: CommunicationCallbacks ): Promise { const db = await createDbAdapter(dbUrl, workspace, ctx, { withLogs: process.env.COMMUNICATION_TIME_LOGGING_ENABLED === 'true' }) const metadata = getMetadata() - const middleware = await buildMiddlewares(ctx, workspace, metadata, db, broadcast) + const middleware = await buildMiddlewares(ctx, workspace, metadata, db, callbacks) return new Api(ctx, middleware) } diff --git a/packages/server/src/middleware/base.ts b/packages/server/src/middleware/base.ts index 5e7222f8f98..7233adf33cf 100644 --- a/packages/server/src/middleware/base.ts +++ b/packages/server/src/middleware/base.ts @@ -13,7 +13,7 @@ // limitations under the License. // -import { type EventResult, type Event, type SessionData } from '@hcengineering/communication-sdk-types' +import { type Event, EventResult, type SessionData } from '@hcengineering/communication-sdk-types' import type { FindMessagesGroupsParams, FindMessagesParams, @@ -77,8 +77,8 @@ export class BaseMiddleware implements Middleware { return await this.provideEvent(session, event, derived) } - async response (session: SessionData, event: Enriched, derived: boolean): Promise { - await this.provideResponse(session, event, derived) + handleBroadcast (session: SessionData, events: Enriched[]): void { + this.provideHandleBroadcast(session, events) } unsubscribeQuery (session: SessionData, queryId: number): void { @@ -162,9 +162,9 @@ export class BaseMiddleware implements Middleware { return [] } - protected async provideResponse (session: SessionData, event: Enriched, derived: boolean): Promise { + protected provideHandleBroadcast (session: SessionData, events: Enriched[]): void { if (this.next !== undefined) { - await this.next.response(session, event, derived) + this.next.handleBroadcast(session, events) } } } diff --git a/packages/server/src/middleware/broadcast.ts b/packages/server/src/middleware/broadcast.ts index de9932b7f3e..7530471861d 100644 --- a/packages/server/src/middleware/broadcast.ts +++ b/packages/server/src/middleware/broadcast.ts @@ -16,7 +16,7 @@ import { CardEventType, type Event, - type EventResult, + EventResult, LabelEventType, MessageEventType, NotificationEventType, @@ -38,7 +38,7 @@ import type { NotificationContext } from '@hcengineering/communication-types' -import type { BroadcastSessionsFunc, Enriched, Middleware, MiddlewareContext, QueryId } from '../types' +import type { CommunicationCallbacks, Enriched, Middleware, MiddlewareContext, QueryId } from '../types' import { BaseMiddleware } from './base' interface SessionInfo { @@ -51,7 +51,7 @@ export class BroadcastMiddleware extends BaseMiddleware implements Middleware { private readonly dataBySessionId = new Map() constructor ( - private readonly broadcastFn: BroadcastSessionsFunc, + private readonly callbacks: CommunicationCallbacks, readonly context: MiddlewareContext, next?: Middleware ) { @@ -119,31 +119,30 @@ export class BroadcastMiddleware extends BaseMiddleware implements Middleware { data.contextQueries.delete(queryId) } - async response (session: SessionData, event: Enriched, derived: boolean): Promise { - const sessionIds: string[] = [] + handleBroadcast (session: SessionData, events: Enriched[]): void { + if (events.length === 0) return + const sessionIds: Record[]> = {} + for (const [sessionId, session] of this.dataBySessionId.entries()) { - if (this.match(event, session)) { - sessionIds.push(sessionId) - } + sessionIds[sessionId] = events.filter((it) => this.match(it, session)) } const ctx = this.context.ctx.newChild('enqueue', {}) ctx.contextData = session.contextData - if (sessionIds.length > 0) { + if (Object.keys(sessionIds).length > 0) { try { - this.broadcastFn.broadcast(ctx, sessionIds, event) + this.callbacks.broadcast(ctx, sessionIds) } catch (e) { this.context.ctx.error('Failed to broadcast event', { error: e }) } } try { - this.broadcastFn.enqueue(ctx, event) + this.callbacks.enqueue(ctx, events) } catch (e) { this.context.ctx.error('Failed to broadcast event', { error: e }) } - await this.provideResponse(session, event, derived) } closeSession (sessionId: string): void { diff --git a/packages/server/src/middleware/date.ts b/packages/server/src/middleware/date.ts index e6b94b6162d..726f7948cff 100644 --- a/packages/server/src/middleware/date.ts +++ b/packages/server/src/middleware/date.ts @@ -13,7 +13,7 @@ // limitations under the License. // -import { type EventResult, type Event, type SessionData } from '@hcengineering/communication-sdk-types' +import { EventResult, type Event, type SessionData } from '@hcengineering/communication-sdk-types' import { systemAccountUuid } from '@hcengineering/core' import type { Middleware, MiddlewareContext, Enriched } from '../types' diff --git a/packages/server/src/middleware/db.ts b/packages/server/src/middleware/db.ts index ff646bd5a9c..e05bdb32290 100644 --- a/packages/server/src/middleware/db.ts +++ b/packages/server/src/middleware/db.ts @@ -43,7 +43,6 @@ import { type CreateNotificationEvent, type DbAdapter, type Event, - type EventResult, LabelEventType, LinkPreviewPatchEvent, MessageEventType, @@ -61,14 +60,15 @@ import { type UpdateNotificationContextEvent, type UpdateNotificationEvent, UpdatePatchEvent, - ThreadPatchEvent + ThreadPatchEvent, + EventResult } from '@hcengineering/communication-sdk-types' import type { Enriched, Middleware, MiddlewareContext } from '../types' import { BaseMiddleware } from './base' interface Result { - response?: Enriched + skipPropagate?: boolean result?: EventResult } @@ -108,10 +108,11 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { return await this.db.findCollaborators(params) } - async event (session: SessionData, event: Enriched, derived: boolean): Promise { + async event (session: SessionData, event: Enriched): Promise { const result = await this.processEvent(session, event) - if (result.response != null) { - void this.context.head?.response(session, result.response, derived) + + if (result.skipPropagate === true) { + event.skipPropagate = true } return result.result ?? {} @@ -177,19 +178,16 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { private async addCollaborators (event: Enriched): Promise { const added = await this.db.addCollaborators(event.cardId, event.cardType, event.collaborators, event.date) - if (added.length === 0) return {} - return { - response: event - } + + if (added.length === 0) return { skipPropagate: true } + return {} } private async removeCollaborators (event: Enriched): Promise { - if (event.collaborators.length === 0) return {} + if (event.collaborators.length === 0) return { skipPropagate: true } await this.db.removeCollaborators(event.cardId, event.collaborators) - return { - response: event - } + return {} } private async createMessage (event: Enriched): Promise { @@ -209,6 +207,7 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { if (!created) { return { + skipPropagate: true, result: { messageId: event.messageId, created: event.date @@ -217,7 +216,6 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { } return { - response: event, result: { messageId: event.messageId, created: event.date @@ -243,12 +241,12 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { } await this.createPatch(event.cardId, event.messageId, PatchType.update, data, event.socialId, event.date) - return { response: event } + return {} } private async removePatch (event: Enriched): Promise { await this.createPatch(event.cardId, event.messageId, PatchType.remove, {}, event.socialId, event.date) - return { response: event } + return {} } private async reactionPatch (event: Enriched): Promise { @@ -260,7 +258,7 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { await this.db.removeReaction(event.cardId, event.messageId, operation.reaction, event.socialId, event.date) } - return { response: event } + return {} } private async blobPatch (event: Enriched): Promise { @@ -276,7 +274,7 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { } } - return { response: event } + return {} } private async linkPreviewPatch (event: Enriched): Promise { @@ -296,9 +294,7 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { } } - return { - response: event - } + return {} } private async threadPatch (event: Enriched): Promise { @@ -322,9 +318,7 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { ) } - return { - response: event - } + return {} } private async createNotification (event: Enriched): Promise { @@ -338,27 +332,25 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { event.date ) - return { - response: { ...event, notificationId: id } - } + event.notificationId = id + + return {} } private async updateNotification (event: Enriched): Promise { await this.db.updateNotification(event.contextId, event.account, event.query, event.updates) - return { - response: event - } + return {} } private async removeNotifications (event: Enriched): Promise { - if (event.ids.length === 0) return {} + if (event.ids.length === 0) return { skipPropagate: true } const ids = await this.db.removeNotifications(event.contextId, event.account, event.ids) + event.ids = ids return { result: { ids - }, - response: { ...event, ids } + } } } @@ -370,56 +362,47 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { event.lastView, event.lastNotify ) + + event.contextId = id return { - response: { ...event, contextId: id }, result: { id } } } private async removeNotificationContext (event: Enriched): Promise { const context = (await this.db.findNotificationContexts({ id: event.contextId, account: event.account }))[0] - if (context == null) return {} + if (context == null) return { skipPropagate: true } this.context.removedContexts.set(context.id, context) const id = await this.db.removeContext(context.id, context.account) - if (id == null) return {} - return { - response: event - } + if (id == null) return { skipPropagate: true } + return {} } async updateNotificationContext (event: Enriched): Promise { await this.db.updateContext(event.contextId, event.account, event.updates) - return { - response: event - } + return {} } async createMessagesGroup (event: Enriched): Promise { const { fromDate, toDate, count, cardId, blobId } = event.group await this.db.createMessagesGroup(cardId, blobId, fromDate, toDate, count) - return { - response: event - } + return {} } async removeMessagesGroup (event: Enriched): Promise { await this.db.removeMessagesGroup(event.cardId, event.blobId) - return { - response: event - } + return {} } private async createLabel (event: Enriched): Promise { await this.db.createLabel(event.labelId, event.cardId, event.cardType, event.account, event.date) - return { - response: event - } + return {} } private async removeLabel (event: Enriched): Promise { @@ -428,21 +411,16 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { cardId: event.cardId, account: event.account }) - return { - response: event - } + + return {} } private async updateCardType (event: Enriched): Promise { - return { - response: event - } + return {} } private async removeCard (event: Enriched): Promise { - return { - response: event - } + return {} } close (): void { diff --git a/packages/server/src/middleware/id.ts b/packages/server/src/middleware/id.ts index de8aaaa6fbc..137295f24a7 100644 --- a/packages/server/src/middleware/id.ts +++ b/packages/server/src/middleware/id.ts @@ -13,12 +13,7 @@ // limitations under the License. // -import { - type EventResult, - MessageEventType, - type Event, - type SessionData -} from '@hcengineering/communication-sdk-types' +import { EventResult, MessageEventType, type Event, type SessionData } from '@hcengineering/communication-sdk-types' import { generateMessageId } from '../messageId' import type { Middleware, MiddlewareContext, Enriched } from '../types' diff --git a/packages/server/src/middleware/permissions.ts b/packages/server/src/middleware/permissions.ts index 56c0bb2b24c..54b4e8b52da 100644 --- a/packages/server/src/middleware/permissions.ts +++ b/packages/server/src/middleware/permissions.ts @@ -16,7 +16,7 @@ import { type DbAdapter, type Event, - type EventResult, + EventResult, MessageEventType, NotificationEventType, type SessionData diff --git a/packages/server/src/middleware/triggers.ts b/packages/server/src/middleware/triggers.ts index 64b8a5517fd..6c7b9f3cbd8 100644 --- a/packages/server/src/middleware/triggers.ts +++ b/packages/server/src/middleware/triggers.ts @@ -13,18 +13,19 @@ // limitations under the License. // -import type { DbAdapter, Event, SessionData } from '@hcengineering/communication-sdk-types' +import type { DbAdapter, Event, EventResult, SessionData } from '@hcengineering/communication-sdk-types' import type { MeasureContext } from '@hcengineering/core' -import triggers from '../triggers/all' -import type { Enriched, Middleware, MiddlewareContext, TriggerCtx } from '../types' +import type { CommunicationCallbacks, Enriched, Middleware, MiddlewareContext, TriggerCtx } from '../types' import { BaseMiddleware } from './base' +import triggers from '../triggers/all' import { notify } from '../notification/notification' export class TriggersMiddleware extends BaseMiddleware implements Middleware { - private readonly ctx: MeasureContext + private ctx: MeasureContext constructor ( + private readonly callbacks: CommunicationCallbacks, private readonly db: DbAdapter, context: MiddlewareContext, next?: Middleware @@ -39,8 +40,19 @@ export class TriggersMiddleware extends BaseMiddleware implements Middleware { ) // 1hour } - async response (session: SessionData, event: Enriched, derived: boolean): Promise { - const ctx: Omit = { + async event (session: SessionData, event: Enriched, derived: boolean): Promise { + const result = await this.provideEvent(session, event, derived) + if (event.skipPropagate === true) { + return result + } + + await this.processDerived(session, [event], derived) + + return result + } + + async processDerived (session: SessionData, events: Enriched[], derived: boolean): Promise { + const triggerCtx: Omit = { metadata: this.context.metadata, db: this.db, workspace: this.context.workspace, @@ -54,25 +66,81 @@ export class TriggersMiddleware extends BaseMiddleware implements Middleware { return (await this.context.head?.event(session, event as Enriched, true)) ?? {} } } - await this.applyTriggers(session, event, ctx) - void notify( - { - ...ctx, - ctx: this.ctx.newChild('create-notifications', {}) - }, - event - ).then((res) => this.propagate(session, res)) + + if (!derived && session.isAsyncContext !== true && session.contextData !== undefined) { + session.isAsyncContext = true + const ctx = this.context.ctx.newChild('async-triggers', {}) + ctx.contextData = session.contextData + + this.callbacks.registerAsyncRequest(ctx, async (_ctx) => { + this.ctx = _ctx + await this.callAsyncTriggers({ ...triggerCtx, ctx: this.ctx }, session, events) + this.handleBroadcast( + session, + (session.asyncData as Enriched[]).sort((a, b) => a.date.getTime() - b.date.getTime()) + ) + session.asyncData = [] + }) + } else { + await this.callAsyncTriggers({ ...triggerCtx, ctx: this.ctx }, session, events) + + if (session.isAsyncContext !== true) { + this.handleBroadcast( + session, + (session.asyncData as Enriched[]).sort((a, b) => a.date.getTime() - b.date.getTime()) + ) + session.asyncData = [] + } + } + } + + private async callAsyncTriggers (ctx: TriggerCtx, session: SessionData, events: Enriched[]): Promise { + const fromTriggers = await this.runTriggers({ ...ctx, ctx: this.ctx }, events) + await Promise.all(fromTriggers.map((d) => this.context.head?.event(session, d as Enriched, true))) + const triggersDerived = (fromTriggers as Enriched[]).filter((it) => it.skipPropagate !== true) + session.asyncData = [...session.asyncData, ...triggersDerived] + + await this.callAsyncNotifications(ctx, session, events) } - private async applyTriggers ( + private async callAsyncNotifications ( + ctx: TriggerCtx, session: SessionData, - event: Enriched, - ctx: Omit + events: Enriched[] ): Promise { + const notifications = ( + await Promise.all( + events.map(async (event) => { + return await notify( + { + ...ctx, + ctx: this.ctx.newChild('create-notifications', {}) + }, + event + ) + }) + ) + ).flat() + await Promise.all(notifications.map((d) => this.context.head?.event(session, d as Enriched, true))) + const notificationsDerived = (notifications as Enriched[]).filter((it) => it.skipPropagate !== true) + session.asyncData = [...session.asyncData, ...notificationsDerived] + } + + private async runTriggers (ctx: TriggerCtx, events: Enriched[]): Promise { + return ( + await Promise.all( + events.map(async (event) => { + return await this.applyTriggers(event, ctx) + }) + ) + ).flat() + } + + private async applyTriggers (event: Enriched, ctx: Omit): Promise { const matchedTriggers = triggers.filter(([_, type]) => type === event.type) - if (matchedTriggers.length === 0) return + if (matchedTriggers.length === 0) return [] - const derived = ( + return ( await Promise.all( matchedTriggers.map(([name, _, fn]) => fn( @@ -85,14 +153,5 @@ export class TriggersMiddleware extends BaseMiddleware implements Middleware { ) ) ).flat() - - await this.propagate(session, derived) - } - - private async propagate (session: SessionData, derived: Event[]): Promise { - if (derived.length === 0) return - if (this.context.head === undefined) return - // Will be enriched in head - await Promise.all(derived.map((d) => this.context.head?.event(session, d as Enriched, true))) } } diff --git a/packages/server/src/middlewares.ts b/packages/server/src/middlewares.ts index aefd96dbed8..3617a9f7b44 100644 --- a/packages/server/src/middlewares.ts +++ b/packages/server/src/middlewares.ts @@ -32,7 +32,7 @@ import type { } from '@hcengineering/communication-types' import type { - BroadcastSessionsFunc, + CommunicationCallbacks, Enriched, Metadata, Middleware, @@ -54,7 +54,7 @@ export async function buildMiddlewares ( workspace: WorkspaceID, metadata: Metadata, db: DbAdapter, - broadcast: BroadcastSessionsFunc + callbacks: CommunicationCallbacks ): Promise { const createFns: MiddlewareCreateFn[] = [ // Enrich events @@ -67,9 +67,9 @@ export async function buildMiddlewares ( async (context, next) => new PermissionsMiddleware(db, context, next), // Process events - async (context, next) => new BroadcastMiddleware(broadcast, context, next), - async (context, next) => new DatabaseMiddleware(db, context, next), - async (context, next) => new TriggersMiddleware(db, context, next) + async (context, next) => new TriggersMiddleware(callbacks, db, context, next), + async (context, next) => new BroadcastMiddleware(callbacks, context, next), + async (context, next) => new DatabaseMiddleware(db, context, next) ] const context: MiddlewareContext = { @@ -92,7 +92,8 @@ export class Middlewares { private constructor ( private readonly ctx: MeasureContext, private readonly context: MiddlewareContext - ) {} + ) { + } static async create ( ctx: MeasureContext, @@ -175,7 +176,11 @@ export class Middlewares { async event (session: SessionData, event: Event): Promise { if (this.head === undefined) return {} - return (await this.head?.event(session, event as Enriched, session.derived ?? false)) ?? {} + const result = (await this.head?.event(session, event as Enriched, session.derived ?? false)) ?? {} + + this.head?.handleBroadcast(session, [event] as Enriched[]) + + return result } async closeSession (sessionId: string): Promise { diff --git a/packages/server/src/triggers/message.ts b/packages/server/src/triggers/message.ts index 24cadddef7d..c9f5e3f8c82 100644 --- a/packages/server/src/triggers/message.ts +++ b/packages/server/src/triggers/message.ts @@ -181,7 +181,10 @@ async function onThreadAttached (ctx: TriggerCtx, event: Enriched void - response: (session: SessionData, event: Enriched, derived: boolean) => Promise + handleBroadcast: (session: SessionData, events: Enriched[]) => void closeSession: (sessionId: string) => void close: () => void @@ -94,9 +94,10 @@ export interface MiddlewareContext { export type MiddlewareCreateFn = (context: MiddlewareContext, next?: Middleware) => Promise -export interface BroadcastSessionsFunc { - broadcast: (ctx: MeasureContext, sessionIds: string[], result: Enriched) => void - enqueue: (ctx: MeasureContext, result: Enriched) => void +export interface CommunicationCallbacks { + registerAsyncRequest: (ctx: MeasureContext, promise: (ctx: MeasureContext) => Promise) => void + broadcast: (ctx: MeasureContext, result: Record[]>) => void + enqueue: (ctx: MeasureContext, result: Enriched[]) => void } export interface TriggerCtx { @@ -116,5 +117,6 @@ export type TriggerFn = (ctx: TriggerCtx, event: Enriched) => Promise = T & { + skipPropagate?: boolean date: Date } From 40c5821a99f9d5de78778fd3c96ca8d524aed94d Mon Sep 17 00:00:00 2001 From: Kristina Date: Mon, 30 Jun 2025 14:30:17 +0400 Subject: [PATCH 130/636] Push notifications (#77) * Provide info for pushes Signed-off-by: Kristina Fefelova * Fix notification context query Signed-off-by: Kristina Fefelova * Fix query Signed-off-by: Kristina Fefelova --------- Signed-off-by: Kristina Fefelova --- packages/client-query/src/query.ts | 30 ++++++++-------- packages/cockroach/src/adapter.ts | 13 ++++++- packages/cockroach/src/db/notification.ts | 4 +-- packages/query/src/index.ts | 2 +- packages/query/src/lq.ts | 11 +++--- .../query/src/notification-contexts/query.ts | 3 +- packages/query/src/notifications/query.ts | 31 +++++++++++++--- packages/query/src/result.ts | 8 +++++ packages/query/src/types.ts | 4 ++- packages/sdk-types/src/db.ts | 3 +- packages/sdk-types/src/events/notification.ts | 2 +- .../server/src/notification/notification.ts | 36 +++++++++++++++++-- packages/server/src/triggers/card.ts | 5 +-- packages/server/src/triggers/utils.ts | 11 +++--- packages/types/src/notification.ts | 10 ++++-- 15 files changed, 124 insertions(+), 49 deletions(-) diff --git a/packages/client-query/src/query.ts b/packages/client-query/src/query.ts index 0b29b1582fc..6bc4d0a292a 100644 --- a/packages/client-query/src/query.ts +++ b/packages/client-query/src/query.ts @@ -13,12 +13,11 @@ // limitations under the License. // -import { MessageQueryParams } from '@hcengineering/communication-query' +import { MessageQueryParams, NotificationQueryParams } from '@hcengineering/communication-query' import type { PagedQueryCallback, QueryCallback } from '@hcengineering/communication-sdk-types' import { type FindLabelsParams, type FindNotificationContextParams, - type FindNotificationsParams, type Label, type Message, type NotificationContext, @@ -37,12 +36,12 @@ class BaseQuery

, C extends (r: any) => void> { if (dontDestroy !== true) { const destroyFn = getOnDestroy() destroyFn(() => { - this.unsubscribe() + this.unsubscribe(false) }) } } - unsubscribe: () => void = () => {} + unsubscribe: (isUpdate: boolean) => void = () => {} query (params: P, callback: C): boolean { if (!this.needUpdate(params, callback)) { @@ -53,13 +52,14 @@ class BaseQuery

, C extends (r: any) => void> { } private doQuery (query: P, callback: C): void { - this.unsubscribe() + const isUpdate = this.oldQuery !== undefined + this.unsubscribe(isUpdate) this.oldCallback = callback this.oldQuery = query const { unsubscribe } = this.createQuery(query, callback) - this.unsubscribe = () => { - unsubscribe() + this.unsubscribe = (isUpdate) => { + unsubscribe(isUpdate) this.oldCallback = undefined this.oldQuery = undefined this.unsubscribe = () => {} @@ -67,7 +67,7 @@ class BaseQuery

, C extends (r: any) => void> { } // eslint-disable-next-line @typescript-eslint/no-unused-vars - createQuery (params: P, callback: C): { unsubscribe: () => void } { + createQuery (params: P, callback: C): { unsubscribe: (isUpdate: boolean) => void } { return { unsubscribe: () => {} } @@ -81,17 +81,17 @@ class BaseQuery

, C extends (r: any) => void> { } export class MessagesQuery extends BaseQuery> { - override createQuery (params: MessageQueryParams, callback: PagedQueryCallback): { unsubscribe: () => void } { + override createQuery (params: MessageQueryParams, callback: PagedQueryCallback): { unsubscribe: (isUpdate: boolean) => void } { return getLiveQueries().queryMessages(params, callback) } } -export class NotificationsQuery extends BaseQuery> { +export class NotificationsQuery extends BaseQuery> { override createQuery ( - params: FindNotificationsParams, + params: NotificationQueryParams, callback: PagedQueryCallback ): { - unsubscribe: () => void + unsubscribe: (isUpdate: boolean) => void } { return getLiveQueries().queryNotifications(params, callback) } @@ -105,7 +105,7 @@ PagedQueryCallback params: FindNotificationContextParams, callback: PagedQueryCallback ): { - unsubscribe: () => void + unsubscribe: (isUpdate: boolean) => void } { return getLiveQueries().queryNotificationContexts(params, callback) } @@ -116,7 +116,7 @@ export class LabelsQuery extends BaseQuery ): { - unsubscribe: () => void + unsubscribe: (isUpdate: boolean) => void } { return getLiveQueries().queryLabels(params, callback) } @@ -127,7 +127,7 @@ export class CollaboratorsQuery extends BaseQuery ): { - unsubscribe: () => void + unsubscribe: (isUpdate: boolean) => void } { return getLiveQueries().queryCollaborators(params, callback) } diff --git a/packages/cockroach/src/adapter.ts b/packages/cockroach/src/adapter.ts index 29e71b54d3f..0349e1e269f 100644 --- a/packages/cockroach/src/adapter.ts +++ b/packages/cockroach/src/adapter.ts @@ -240,7 +240,7 @@ export class CockroachAdapter implements DbAdapter { messageCreated: Date, type: NotificationType, read: boolean, - content: NotificationContent | undefined, + content: NotificationContent, created: Date ): Promise { return await this.notification.createNotification( @@ -343,6 +343,17 @@ export class CockroachAdapter implements DbAdapter { return name != null ? formatName(name) : undefined } + async getCardTitle (_id: CardID): Promise { + const sql = `SELECT data ->> 'title' AS title + FROM public.card + WHERE "workspaceId" = $1::uuid + AND "_id" = $2::text + LIMIT 1` + const result = await this.sql.execute(sql, [this.workspace, _id]) + + return result[0]?.title + } + async getMessageCreated (cardId: CardID, messageId: MessageID): Promise { return await this.message.getMessageCreated(cardId, messageId) } diff --git a/packages/cockroach/src/db/notification.ts b/packages/cockroach/src/db/notification.ts index 190113ce344..dc1646256c7 100644 --- a/packages/cockroach/src/db/notification.ts +++ b/packages/cockroach/src/db/notification.ts @@ -110,7 +110,7 @@ export class NotificationsDb extends BaseDb { messageCreated: Date, type: NotificationType, read: boolean, - content: NotificationContent | undefined, + content: NotificationContent, created: Date ): Promise { const db: Omit = { @@ -120,7 +120,7 @@ export class NotificationsDb extends BaseDb { read, context_id: context, created, - content: content ?? {} + content } const sql = `INSERT INTO ${TableName.Notification} (message_id, message_created, context_id, read, created, type, content) VALUES ($1::varchar, $2::timestamptz, $3::int8, $4::boolean, $5::timestamptz, $6::varchar, $7::jsonb) diff --git a/packages/query/src/index.ts b/packages/query/src/index.ts index b377c23485b..87409751b75 100644 --- a/packages/query/src/index.ts +++ b/packages/query/src/index.ts @@ -15,4 +15,4 @@ export * from './lq' -export type { MessageQueryParams } from './types' +export type { MessageQueryParams, NotificationQueryParams } from './types' diff --git a/packages/query/src/lq.ts b/packages/query/src/lq.ts index 78e07fee31e..6ed9a229899 100644 --- a/packages/query/src/lq.ts +++ b/packages/query/src/lq.ts @@ -43,7 +43,7 @@ import { LabelsQuery } from './label/query' import { CollaboratorsQuery } from './collaborators/query' interface CreateQueryResult { - unsubscribe: () => void + unsubscribe: (isUpdate: boolean) => void } const maxQueriesCache = 50 @@ -127,8 +127,8 @@ export class LiveQueries { this.queries.set(query.id, query) return { - unsubscribe: () => { - this.unsubscribeQuery(query) + unsubscribe: (isUpdate) => { + this.unsubscribeQuery(query, isUpdate) } } } @@ -206,12 +206,15 @@ export class LiveQueries { this.unsubscribed.delete(id) } - private unsubscribeQuery (query: AnyQuery): void { + private unsubscribeQuery (query: AnyQuery, force: boolean = false): void { this.unsubscribed.add(query.id) query.removeCallback() if (this.unsubscribed.size > maxQueriesCache) { this.removeOldQueries() } + if (force) { + this.unsubscribe(query.id) + } } close (): void { diff --git a/packages/query/src/notification-contexts/query.ts b/packages/query/src/notification-contexts/query.ts index cad726d2ef4..f7bce2ebf86 100644 --- a/packages/query/src/notification-contexts/query.ts +++ b/packages/query/src/notification-contexts/query.ts @@ -410,6 +410,7 @@ export class NotificationContextsQuery implements PagedQuery it.id === notification.id)) return if (context !== undefined) { const message = this.params.notifications.message === true @@ -422,7 +423,7 @@ export class NotificationContextsQuery implements PagedQuery { +export class NotificationQuery implements PagedQuery { private result: QueryResult | Promise> constructor ( @@ -56,7 +56,7 @@ export class NotificationQuery implements PagedQuery, initialResult?: QueryResult ) { @@ -64,7 +64,7 @@ export class NotificationQuery implements PagedQuery { + if (this.params.strict === true) return if (this.result instanceof Promise) this.result = await this.result await this.loadPage(SortingOrder.Ascending, this.result.getLast()?.created) } async requestLoadPrevPage (): Promise { + if (this.params.strict === true) return if (this.result instanceof Promise) this.result = await this.result await this.loadPage(SortingOrder.Descending, this.result.getFirst()?.created) } @@ -186,7 +188,8 @@ export class NotificationQuery implements PagedQuery { + private async find (params: NotificationQueryParams): Promise { + delete params.strict const notifications = await this.client.findNotifications(params, this.id) if (params.message !== true) return notifications @@ -215,9 +218,27 @@ export class NotificationQuery implements PagedQuery { return last } + shift (): T | undefined { + const array = Array.from(this.objectById.values()) + const first = array[0] + if (first === undefined) return + this.objectById.delete(this.getId(first)) + return first + } + update (object: T): void { this.objectById.set(this.getId(object), object) } diff --git a/packages/query/src/types.ts b/packages/query/src/types.ts index e6f9d12a7bc..9ea07662820 100644 --- a/packages/query/src/types.ts +++ b/packages/query/src/types.ts @@ -19,7 +19,8 @@ import { type Window, type ComparisonOperator, type CardID, - type MessageID + type MessageID, + FindNotificationsParams } from '@hcengineering/communication-types' import { QueryResult } from './result' @@ -101,3 +102,4 @@ export interface OneMessageQueryParams extends BaseMessageQueryParams { } export type MessageQueryParams = OneMessageQueryParams | ManyMessagesQueryParams +export type NotificationQueryParams = FindNotificationsParams & { strict?: boolean } diff --git a/packages/sdk-types/src/db.ts b/packages/sdk-types/src/db.ts index ccab09ff196..b651db6dcb6 100644 --- a/packages/sdk-types/src/db.ts +++ b/packages/sdk-types/src/db.ts @@ -108,7 +108,7 @@ export interface DbAdapter { messageCreated: Date, type: NotificationType, read: boolean, - content: NotificationContent | undefined, + content: NotificationContent, created: Date ) => Promise updateNotification: (context: ContextID, account: AccountID, query: UpdateNotificationQuery, updates: NotificationUpdates) => Promise @@ -132,6 +132,7 @@ export interface DbAdapter { findLabels: (params: FindLabelsParams) => Promise updateLabels: (cardId: CardID, update: LabelUpdates) => Promise + getCardTitle: (cardId: CardID) => Promise getAccountsByPersonIds: (ids: string[]) => Promise getNameByAccount: (id: AccountID) => Promise getMessageCreated: (cardId: CardID, messageId: MessageID) => Promise diff --git a/packages/sdk-types/src/events/notification.ts b/packages/sdk-types/src/events/notification.ts index 97e50d69f69..e66a570f18c 100644 --- a/packages/sdk-types/src/events/notification.ts +++ b/packages/sdk-types/src/events/notification.ts @@ -54,7 +54,7 @@ export interface CreateNotificationEvent extends BaseEvent { notificationId?: NotificationID notificationType: NotificationType read: boolean - content?: NotificationContent + content: NotificationContent cardId: CardID contextId: ContextID messageId: MessageID diff --git a/packages/server/src/notification/notification.ts b/packages/server/src/notification/notification.ts index 8dea22f8bb7..2b129ed6445 100644 --- a/packages/server/src/notification/notification.ts +++ b/packages/server/src/notification/notification.ts @@ -25,6 +25,7 @@ import { type CardID, type CardType, type ContextID, + Markdown, type MessageID, NewMessageLabelID, type NotificationContext, @@ -36,7 +37,9 @@ import { import type { Enriched, TriggerCtx } from '../types' import { findAccount } from '../utils' -import { findMessage } from '../triggers/utils' +import { findMessage, getNameBySocialID } from '../triggers/utils' +import { markdownToMarkup } from '@hcengineering/text-markdown' +import { jsonToMarkup, markupToText } from '@hcengineering/text-core' const BATCH_SIZE = 500 @@ -44,7 +47,15 @@ export async function notify (ctx: TriggerCtx, event: Enriched): Promise< switch (event.type) { case MessageEventType.CreateMessage: { if (event.options?.noNotify === true || event.messageId == null) return [] - return await notifyMessage(ctx, event.cardId, event.cardType, event.messageId, event.socialId, event.date) + return await notifyMessage( + ctx, + event.cardId, + event.cardType, + event.messageId, + event.content, + event.socialId, + event.date + ) } case MessageEventType.ReactionPatch: { if (event.operation.opcode === 'add') { @@ -164,7 +175,10 @@ async function notifyReaction ( const content: ReactionNotificationContent = { emoji: reaction, - creator: socialId + creator: socialId, + senderName: (await getNameBySocialID(ctx, socialId)) ?? 'System', + title: 'Reacted to your message', + shortText: reaction } result.push({ type: NotificationEventType.CreateNotification, @@ -198,6 +212,7 @@ async function notifyMessage ( cardId: CardID, cardType: CardType, messageId: MessageID, + markdown: Markdown, socialId: SocialID, date: Date ): Promise { @@ -205,6 +220,8 @@ async function notifyMessage ( const creatorAccount = await findAccount(ctx, socialId) const result: Event[] = [] + const cardTitle = (await ctx.db.getCardTitle(cardId)) ?? 'New message' + let isFirstBatch = true for await (const dbCollaborators of cursor) { @@ -221,9 +238,12 @@ async function notifyMessage ( ctx, cardId, cardType, + cardTitle, messageId, + markdown, date, collaborator, + socialId, creatorAccount, context ) @@ -243,9 +263,12 @@ async function processCollaborator ( ctx: TriggerCtx, cardId: CardID, cardType: CardType, + cardTitle: string, messageId: MessageID, + markdown: Markdown, date: Date, collaborator: AccountID, + creatorSocialId: SocialID, creatorAccount?: AccountID, context?: NotificationContext ): Promise { @@ -268,6 +291,8 @@ async function processCollaborator ( if (contextId == null || isOwn) return result + const text = markupToText(jsonToMarkup(markdownToMarkup(markdown))) + const shortText = text.slice(0, 100) result.push({ type: NotificationEventType.CreateNotification, notificationType: NotificationType.Message, @@ -277,6 +302,11 @@ async function processCollaborator ( messageId, messageCreated: date, date, + content: { + senderName: (await getNameBySocialID(ctx, creatorSocialId)) ?? 'System', + title: cardTitle, + shortText: shortText.length < text.length ? shortText + '...' : text + }, read: date.getTime() < (context?.lastView?.getTime() ?? 0) }) return result diff --git a/packages/server/src/triggers/card.ts b/packages/server/src/triggers/card.ts index 00db7900343..296a7b96437 100644 --- a/packages/server/src/triggers/card.ts +++ b/packages/server/src/triggers/card.ts @@ -24,7 +24,6 @@ import { import { type ActivityTypeUpdate, ActivityUpdateType, MessageType } from '@hcengineering/communication-types' import type { Enriched, TriggerCtx, TriggerFn, Triggers } from '../types' -import { getNameBySocialID } from './utils' async function createActivityOnCardTypeUpdate (ctx: TriggerCtx, event: UpdateCardTypeEvent): Promise { const updateDate: ActivityTypeUpdate = { @@ -32,15 +31,13 @@ async function createActivityOnCardTypeUpdate (ctx: TriggerCtx, event: UpdateCar newType: event.cardType } - const sender = await getNameBySocialID(ctx, event.socialId) - return [ { type: MessageEventType.CreateMessage, messageType: MessageType.Activity, cardId: event.cardId, cardType: event.cardType, - content: `${sender} changed type`, + content: 'Changed type', socialId: event.socialId, date: event.date, extra: { diff --git a/packages/server/src/triggers/utils.ts b/packages/server/src/triggers/utils.ts index 975c9324a5f..720d8dd52ac 100644 --- a/packages/server/src/triggers/utils.ts +++ b/packages/server/src/triggers/utils.ts @@ -105,17 +105,15 @@ export async function getAddCollaboratorsMessageContent ( sender: AccountID | undefined, collaborators: AccountID[] ): Promise { - const senderName = sender != null ? (await ctx.db.getNameByAccount(sender)) ?? 'System' : 'System' - if (sender != null && collaborators.length === 1 && collaborators.includes(sender)) { - return `${senderName} joined` + return 'Joined card' } const collaboratorsNames = (await Promise.all(collaborators.map((it) => ctx.db.getNameByAccount(it)))).filter( (it): it is string => it != null && it !== '' ) - return `${senderName} added ${collaboratorsNames.join(', ')}` + return `Added ${collaboratorsNames.join(', ')}` } export async function getRemoveCollaboratorsMessageContent ( @@ -123,14 +121,13 @@ export async function getRemoveCollaboratorsMessageContent ( sender: AccountID | undefined, collaborators: AccountID[] ): Promise { - const senderName = sender != null ? (await ctx.db.getNameByAccount(sender)) ?? 'System' : 'System' if (sender != null && collaborators.length === 1 && collaborators.includes(sender)) { - return `${senderName} left` + return 'Left card' } const collaboratorsNames = (await Promise.all(collaborators.map((it) => ctx.db.getNameByAccount(it)))).filter( (it): it is string => it != null && it !== '' ) - return `${senderName} removed ${collaboratorsNames.join(', ')}` + return `Removed ${collaboratorsNames.join(', ')}` } diff --git a/packages/types/src/notification.ts b/packages/types/src/notification.ts index d3c686a53ff..a13a86904c5 100644 --- a/packages/types/src/notification.ts +++ b/packages/types/src/notification.ts @@ -47,13 +47,17 @@ export enum NotificationType { Reaction = 'reaction' } -export interface ReactionNotificationContent { +export type NotificationContent = { + title: string + shortText: string + senderName: string +} & Record + +export type ReactionNotificationContent = NotificationContent & { emoji: string creator: SocialID } -export type NotificationContent = Record - export interface NotificationContext { id: ContextID cardId: CardID From cdba7387be4af42e82520fb6d0be67f0ccc5da37 Mon Sep 17 00:00:00 2001 From: Kristina Date: Mon, 30 Jun 2025 14:50:11 +0400 Subject: [PATCH 131/636] Fix format (#78) Signed-off-by: Kristina Fefelova --- packages/client-query/src/query.ts | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/client-query/src/query.ts b/packages/client-query/src/query.ts index 6bc4d0a292a..cff920334d1 100644 --- a/packages/client-query/src/query.ts +++ b/packages/client-query/src/query.ts @@ -81,7 +81,10 @@ class BaseQuery

, C extends (r: any) => void> { } export class MessagesQuery extends BaseQuery> { - override createQuery (params: MessageQueryParams, callback: PagedQueryCallback): { unsubscribe: (isUpdate: boolean) => void } { + override createQuery ( + params: MessageQueryParams, + callback: PagedQueryCallback + ): { unsubscribe: (isUpdate: boolean) => void } { return getLiveQueries().queryMessages(params, callback) } } From de20669d03856d5ae400e843f5eb2be5ee0f2624 Mon Sep 17 00:00:00 2001 From: Kristina Date: Mon, 30 Jun 2025 15:53:12 +0400 Subject: [PATCH 132/636] Fix query (#79) Signed-off-by: Kristina Fefelova --- packages/client-query/src/query.ts | 14 +++++++++----- packages/query/src/lq.ts | 6 +++--- 2 files changed, 12 insertions(+), 8 deletions(-) diff --git a/packages/client-query/src/query.ts b/packages/client-query/src/query.ts index cff920334d1..d6d3fa3ddee 100644 --- a/packages/client-query/src/query.ts +++ b/packages/client-query/src/query.ts @@ -36,12 +36,16 @@ class BaseQuery

, C extends (r: any) => void> { if (dontDestroy !== true) { const destroyFn = getOnDestroy() destroyFn(() => { - this.unsubscribe(false) + this.unsubscribe() }) } } - unsubscribe: (isUpdate: boolean) => void = () => {} + private _unsubscribe: (isUpdate: boolean) => void = () => {} + + public unsubscribe (): void { + this._unsubscribe(false) + } query (params: P, callback: C): boolean { if (!this.needUpdate(params, callback)) { @@ -53,16 +57,16 @@ class BaseQuery

, C extends (r: any) => void> { private doQuery (query: P, callback: C): void { const isUpdate = this.oldQuery !== undefined - this.unsubscribe(isUpdate) + this._unsubscribe(isUpdate) this.oldCallback = callback this.oldQuery = query const { unsubscribe } = this.createQuery(query, callback) - this.unsubscribe = (isUpdate) => { + this._unsubscribe = (isUpdate) => { unsubscribe(isUpdate) this.oldCallback = undefined this.oldQuery = undefined - this.unsubscribe = () => {} + this._unsubscribe = () => {} } } diff --git a/packages/query/src/lq.ts b/packages/query/src/lq.ts index 6ed9a229899..450b2c7052d 100644 --- a/packages/query/src/lq.ts +++ b/packages/query/src/lq.ts @@ -43,7 +43,7 @@ import { LabelsQuery } from './label/query' import { CollaboratorsQuery } from './collaborators/query' interface CreateQueryResult { - unsubscribe: (isUpdate: boolean) => void + unsubscribe: (force: boolean) => void } const maxQueriesCache = 50 @@ -127,8 +127,8 @@ export class LiveQueries { this.queries.set(query.id, query) return { - unsubscribe: (isUpdate) => { - this.unsubscribeQuery(query, isUpdate) + unsubscribe: (force) => { + this.unsubscribeQuery(query, force) } } } From 8d50243ed2937bd922bee8d71542dd880649abb7 Mon Sep 17 00:00:00 2001 From: Kristina Date: Mon, 30 Jun 2025 21:10:03 +0400 Subject: [PATCH 133/636] Make card optional for messages/message groups (#80) Signed-off-by: Kristina Fefelova --- packages/cockroach/src/db/message.ts | 6 ++++-- packages/server/src/middleware/validate.ts | 4 ++-- packages/types/src/query.ts | 4 ++-- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/packages/cockroach/src/db/message.ts b/packages/cockroach/src/db/message.ts index 309aff01053..a5cef55d01c 100644 --- a/packages/cockroach/src/db/message.ts +++ b/packages/cockroach/src/db/message.ts @@ -1010,8 +1010,10 @@ export class MessagesDb extends BaseDb { let index = 2 - where.push(`mg.card_id = $${index++}::varchar`) - values.push(params.card) + if (params.card != null) { + where.push(`mg.card_id = $${index++}::varchar`) + values.push(params.card) + } if (params.blobId != null) { where.push(`mg.blob_id = $${index++}`) diff --git a/packages/server/src/middleware/validate.ts b/packages/server/src/middleware/validate.ts index 6534cd78095..d3193dfdc89 100644 --- a/packages/server/src/middleware/validate.ts +++ b/packages/server/src/middleware/validate.ts @@ -198,7 +198,7 @@ const FindParamsSchema = z const FindMessagesParamsSchema = FindParamsSchema.extend({ id: MessageIDSchema.optional(), - card: CardIDSchema, + card: CardIDSchema.optional(), files: z.boolean().optional(), reactions: z.boolean().optional(), replies: z.boolean().optional(), @@ -207,7 +207,7 @@ const FindMessagesParamsSchema = FindParamsSchema.extend({ }).strict() const FindMessagesGroupsParamsSchema = FindParamsSchema.extend({ - card: CardIDSchema, + card: CardIDSchema.optional(), blobId: BlobIDSchema.optional(), patches: z.boolean().optional(), fromDate: dateOrRecordSchema.optional(), diff --git a/packages/types/src/query.ts b/packages/types/src/query.ts index 6657c74486e..a8acc304ee7 100644 --- a/packages/types/src/query.ts +++ b/packages/types/src/query.ts @@ -43,7 +43,7 @@ interface FindParams { export interface FindMessagesParams extends FindParams { id?: MessageID - card: CardID + card?: CardID files?: boolean reactions?: boolean replies?: boolean @@ -52,7 +52,7 @@ export interface FindMessagesParams extends FindParams { } export interface FindMessagesGroupsParams extends FindParams { - card: CardID + card?: CardID blobId?: BlobID patches?: boolean fromDate?: Partial> | Date From 54d792ed7bd5c86e979c24792877da5222b3423e Mon Sep 17 00:00:00 2001 From: Alexander Onnikov Date: Tue, 1 Jul 2025 16:56:22 +0700 Subject: [PATCH 134/636] fix: provide service name in token extra Signed-off-by: Alexander Onnikov --- internal/pkg/mediaconvert/scheduler.go | 2 +- internal/pkg/mediaconvert/transcoder.go | 2 +- internal/pkg/token/token.go | 21 +++++++++------------ internal/pkg/token/token_test.go | 12 ++++-------- 4 files changed, 15 insertions(+), 22 deletions(-) diff --git a/internal/pkg/mediaconvert/scheduler.go b/internal/pkg/mediaconvert/scheduler.go index 2f3e2f6f55c..08e9b18dbce 100644 --- a/internal/pkg/mediaconvert/scheduler.go +++ b/internal/pkg/mediaconvert/scheduler.go @@ -111,7 +111,7 @@ func (p *Scheduler) processTask(ctx context.Context, task *Task) { defer logger.Debug("finished") logger.Debug("phase 1: get a token") - var tokenString, err = token.NewToken(p.cfg.ServerSecret, task.Workspace, "stream", "datalake") + var tokenString, err = token.NewToken(p.cfg.ServerSecret, task.Workspace, "stream") if err != nil { logger.Error("can not create token", zap.Error(err)) return diff --git a/internal/pkg/mediaconvert/transcoder.go b/internal/pkg/mediaconvert/transcoder.go index cffa1667761..4d7231dc939 100644 --- a/internal/pkg/mediaconvert/transcoder.go +++ b/internal/pkg/mediaconvert/transcoder.go @@ -66,7 +66,7 @@ func (p *Transcoder) Transcode(ctx context.Context, task *Task) (*TaskResult, er defer logger.Debug("finished") logger.Debug("phase 1: get a token") - var tokenString, err = token.NewToken(p.cfg.ServerSecret, task.Workspace, "stream", "datalake") + var tokenString, err = token.NewToken(p.cfg.ServerSecret, task.Workspace, "stream") if err != nil { logger.Error("can not create token", zap.Error(err)) return nil, errors.Wrapf(err, "can not create token") diff --git a/internal/pkg/token/token.go b/internal/pkg/token/token.go index 1f81a764843..74da213b746 100644 --- a/internal/pkg/token/token.go +++ b/internal/pkg/token/token.go @@ -16,7 +16,6 @@ package token import ( "fmt" - "time" "github.com/golang-jwt/jwt/v5" "github.com/google/uuid" @@ -24,21 +23,19 @@ import ( // Token represents Claims for the platform token type Token struct { - jwt.RegisteredClaims - Account string `json:"account"` - Workspace string `json:"workspace,omitempty"` - Extra map[string]interface{} `json:"extra,omitempty"` + jwt.MapClaims + Account string `json:"account"` + Workspace string `json:"workspace,omitempty"` + Extra map[string]any `json:"extra,omitempty"` } // NewToken creates a new platform token -func NewToken(serverSecret, workspace, issuer, audience string) (string, error) { +func NewToken(serverSecret, workspace, service string) (string, error) { var res = Token{ Account: uuid.NewString(), Workspace: workspace, - RegisteredClaims: jwt.RegisteredClaims{ - Issuer: issuer, - Audience: jwt.ClaimStrings{audience}, - ExpiresAt: jwt.NewNumericDate(time.Now().Add(time.Hour * 12)), + Extra: map[string]any{ + "service": service, }, } return res.Encode(serverSecret) @@ -58,7 +55,7 @@ func (t *Token) Encode(serverSecret string) (string, error) { // Decode decodes a token by a passed configuration func Decode(secretKey, tokenString string) (*Token, error) { - token, err := jwt.ParseWithClaims(tokenString, &Token{}, func(token *jwt.Token) (interface{}, error) { + token, err := jwt.ParseWithClaims(tokenString, &Token{}, func(token *jwt.Token) (any, error) { if _, ok := token.Method.(*jwt.SigningMethodHMAC); !ok { return nil, fmt.Errorf("unexpected signing method: %v", token.Header["alg"]) } @@ -73,5 +70,5 @@ func Decode(secretKey, tokenString string) (*Token, error) { return claims, nil } - return nil, fmt.Errorf("invalid token: can't parse claims") + return nil, fmt.Errorf("invalid token") } diff --git a/internal/pkg/token/token_test.go b/internal/pkg/token/token_test.go index 93d97bdb539..7c46879efbc 100644 --- a/internal/pkg/token/token_test.go +++ b/internal/pkg/token/token_test.go @@ -15,26 +15,22 @@ package token_test import ( "testing" - "time" - "github.com/golang-jwt/jwt/v5" "github.com/hcengineering/stream/internal/pkg/token" "github.com/stretchr/testify/require" ) func Test_GenerateSimpleServiceToken(t *testing.T) { - var _, err = token.NewToken("secret", "ws", "issuer", "aud") + var _, err = token.NewToken("secret", "ws", "stream") require.NoError(t, err) } func Test_ParseSimpleServiceToken(t *testing.T) { const secret = "secret" - tokenString, err := token.NewToken(secret, "ws", "issuer", "aud") + tokenString, err := token.NewToken(secret, "ws", "stream") require.NoError(t, err) tok, err := token.Decode(secret, tokenString) require.NoError(t, err) - require.Equal(t, tok.Issuer, "issuer") - require.Equal(t, tok.Audience, jwt.ClaimStrings{"aud"}) - require.Equal(t, tok.Workspace, "ws") - require.True(t, tok.ExpiresAt.After(time.Now())) + require.Equal(t, "ws", tok.Workspace) + require.Equal(t, "stream", tok.Extra["service"]) } From a68925459af13c8f6db236effb684311f4b79ecc Mon Sep 17 00:00:00 2001 From: Alexander Onnikov Date: Tue, 1 Jul 2025 23:51:15 +0700 Subject: [PATCH 135/636] fix: do not cache m3u8 files Signed-off-by: Alexander Onnikov --- internal/pkg/storage/datalake.go | 5 ++++- internal/pkg/storage/s3.go | 2 +- internal/pkg/storage/storage.go | 7 ++++++- internal/pkg/uploader/uploader.go | 5 ++++- 4 files changed, 15 insertions(+), 4 deletions(-) diff --git a/internal/pkg/storage/datalake.go b/internal/pkg/storage/datalake.go index b9560e0710a..e76e0b1a876 100644 --- a/internal/pkg/storage/datalake.go +++ b/internal/pkg/storage/datalake.go @@ -84,7 +84,7 @@ func getObjectKeyFromPath(s string) string { } // PutFile uploads file to the datalake -func (d *DatalakeStorage) PutFile(ctx context.Context, fileName string) error { +func (d *DatalakeStorage) PutFile(ctx context.Context, fileName string, options PutOptions) error { // #nosec file, err := os.Open(fileName) if err != nil { @@ -127,6 +127,9 @@ func (d *DatalakeStorage) PutFile(ctx context.Context, fileName string) error { req.Header.SetMethod(fasthttp.MethodPost) req.Header.Add("Authorization", "Bearer "+d.token) req.Header.SetContentType(writer.FormDataContentType()) + if options.NoCache { + req.Header.Add("Cache-Control", "max-age=0, must-revalidate") + } req.SetBody(body.Bytes()) if err := d.client.Do(req, res); err != nil { diff --git a/internal/pkg/storage/s3.go b/internal/pkg/storage/s3.go index e4f3050d094..aea0adec5b6 100644 --- a/internal/pkg/storage/s3.go +++ b/internal/pkg/storage/s3.go @@ -84,7 +84,7 @@ func (u *S3Storage) DeleteFile(ctx context.Context, fileName string) error { } // PutFile uploads file to the s3 storage -func (u *S3Storage) PutFile(ctx context.Context, fileName string) error { +func (u *S3Storage) PutFile(ctx context.Context, fileName string, options PutOptions) error { var _, objectKey = filepath.Split(fileName) var logger = u.logger.With(zap.String("upload", u.bucketName), zap.String("fileName", fileName)) diff --git a/internal/pkg/storage/storage.go b/internal/pkg/storage/storage.go index 78d28cdf133..cd9c9b8aac5 100644 --- a/internal/pkg/storage/storage.go +++ b/internal/pkg/storage/storage.go @@ -37,9 +37,14 @@ type BlobInfo struct { ETag string } +// PutOptions represents options for the PutFile operation +type PutOptions struct { + NoCache bool +} + // Storage represents file-based storage type Storage interface { - PutFile(ctx context.Context, fileName string) error + PutFile(ctx context.Context, fileName string, options PutOptions) error DeleteFile(ctx context.Context, fileName string) error GetFile(ctx context.Context, fileName, destination string) error StatFile(ctx context.Context, fileName string) (*BlobInfo, error) diff --git a/internal/pkg/uploader/uploader.go b/internal/pkg/uploader/uploader.go index baffcaa82e4..2bdee38c763 100644 --- a/internal/pkg/uploader/uploader.go +++ b/internal/pkg/uploader/uploader.go @@ -316,7 +316,10 @@ func (u *uploaderImpl) uploadAndDelete(f string) { for attempt := range u.options.RetryCount { logger = logger.With(zap.Int("attempt", attempt)) var putCtx, putCancel = context.WithTimeout(u.uploadCtx, u.options.Timeout) - var err = u.storage.PutFile(putCtx, f) + var putOptions = storage.PutOptions{ + NoCache: u.shouldDeleteOnStop(f), + } + var err = u.storage.PutFile(putCtx, f, putOptions) putCancel() if err != nil { From 0e4b200720d1420e68ba83dede591a752b3e5596 Mon Sep 17 00:00:00 2001 From: Alexander Onnikov Date: Thu, 3 Jul 2025 18:39:06 +0700 Subject: [PATCH 136/636] UBERF-12153 Transcode only created blobs Signed-off-by: Alexander Onnikov --- internal/pkg/queue/queue.go | 10 ++++++---- internal/pkg/queue/worker.go | 1 + 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/internal/pkg/queue/queue.go b/internal/pkg/queue/queue.go index d9f6a542086..b79786f67d6 100644 --- a/internal/pkg/queue/queue.go +++ b/internal/pkg/queue/queue.go @@ -36,14 +36,16 @@ type TranscodeRequest struct { BlobID string WorkspaceUUID string ContentType string + Source map[string]any } // TranscodeResult represents transcode result. type TranscodeResult struct { - BlobID string `json:"blobId"` - WorkspaceUUID string `json:"workspaceUuid"` - Playlist string `json:"playlist"` - Thumbnail string `json:"thumbnail"` + BlobID string `json:"blobId"` + WorkspaceUUID string `json:"workspaceUuid"` + Source map[string]any `json:"source"` + Playlist string `json:"playlist"` + Thumbnail string `json:"thumbnail"` } // ConsumerOptions represents options for the consumer diff --git a/internal/pkg/queue/worker.go b/internal/pkg/queue/worker.go index 60def900d5f..a80b4c702ae 100644 --- a/internal/pkg/queue/worker.go +++ b/internal/pkg/queue/worker.go @@ -115,6 +115,7 @@ func (w *Worker) processMessage(ctx context.Context, msg kafka.Message, logger * result := TranscodeResult{ BlobID: req.BlobID, WorkspaceUUID: req.WorkspaceUUID, + Source: req.Source, Playlist: res.Playlist, Thumbnail: res.Thumbnail, } From ceefe63ef11f42839a5fcfe59010d00c87dce045 Mon Sep 17 00:00:00 2001 From: Kristina Date: Fri, 4 Jul 2025 14:37:43 +0400 Subject: [PATCH 137/636] Add update blob event (#81) Signed-off-by: Kristina Fefelova --- packages/cockroach/src/adapter.ts | 12 ++++- packages/cockroach/src/db/message.ts | 61 ++++++++++++++++++++++ packages/sdk-types/src/db.ts | 3 +- packages/sdk-types/src/events/message.ts | 10 +++- packages/server/src/middleware/db.ts | 2 + packages/server/src/middleware/validate.ts | 11 +++- packages/shared/src/patch.ts | 26 ++++++++- packages/shared/src/processor.ts | 11 +++- packages/types/src/message.ts | 9 +++- 9 files changed, 136 insertions(+), 9 deletions(-) diff --git a/packages/cockroach/src/adapter.ts b/packages/cockroach/src/adapter.ts index 0349e1e269f..56d065d5a65 100644 --- a/packages/cockroach/src/adapter.ts +++ b/packages/cockroach/src/adapter.ts @@ -45,7 +45,7 @@ import { type LinkPreviewData, type LinkPreviewID, type MessageExtra, - type BlobData + type BlobData, BlobUpdateData } from '@hcengineering/communication-types' import type { DbAdapter, @@ -162,6 +162,16 @@ export class CockroachAdapter implements DbAdapter { await this.message.setBlobs(cardId, messageId, blobs, socialId, date) } + async updateBlobs ( + cardId: CardID, + messageId: MessageID, + blobs: BlobUpdateData[], + socialId: SocialID, + date: Date + ): Promise { + await this.message.updateBlobs(cardId, messageId, blobs, socialId, date) + } + async attachLinkPreviews ( cardId: CardID, messageId: MessageID, diff --git a/packages/cockroach/src/db/message.ts b/packages/cockroach/src/db/message.ts index a5cef55d01c..eaffbee23d0 100644 --- a/packages/cockroach/src/db/message.ts +++ b/packages/cockroach/src/db/message.ts @@ -20,6 +20,7 @@ import { AttachThreadPatchData, type BlobData, type BlobID, + BlobUpdateData, type CardID, type CardType, DetachBlobsPatchData, @@ -40,6 +41,7 @@ import { type SocialID, SortingOrder, type Thread, + UpdateBlobsPatchData, UpdateThreadPatchData } from '@hcengineering/communication-types' import type { ThreadUpdates, ThreadQuery } from '@hcengineering/communication-sdk-types' @@ -298,6 +300,65 @@ export class MessagesDb extends BaseDb { }) } + async updateBlobs ( + cardId: CardID, + messageId: MessageID, + blobs: BlobUpdateData[], + socialId: SocialID, + date: Date + ): Promise { + if (blobs.length === 0) return + + const colMap = { + mimeType: { col: 'type', cast: '::varchar' }, + fileName: { col: 'filename', cast: '::varchar' }, + size: { col: 'size', cast: '::int8' }, + metadata: { col: 'meta', cast: '::jsonb' } + } as const + type UpdateKey = keyof typeof colMap + const updateKeys = Object.keys(colMap) as UpdateKey[] + + const params: any[] = [this.workspace, cardId, messageId] + + const rowLen = 1 + updateKeys.length + + const tuples = blobs.map((blob, i) => { + params.push(blob.blobId) + updateKeys.forEach((k) => params.push(blob[k] ?? null)) + + const offset = 3 + i * rowLen + const casts = ['::uuid', ...updateKeys.map((k) => colMap[k].cast)] + const placeholders = casts.map((cast, idx) => `$${offset + idx + 1}${cast}`) + return `(${placeholders.join(', ')})` + }) + + const setClauses = updateKeys.map((k) => { + const col = colMap[k].col + return `${col} = COALESCE(v.${col}, f.${col})` + }) + + const updateSql = ` + UPDATE ${TableName.File} AS f + SET ${setClauses.join(',\n ')} + FROM (VALUES ${tuples.join(',\n ')}) AS v(blob_id, ${updateKeys.map((k) => colMap[k].col).join(', ')}) + WHERE f.workspace_id = $1::uuid + AND f.card_id = $2::varchar + AND f.message_id = $3::varchar + AND f.blob_id = v.blob_id; + ` + + const inDb = await this.isMessageInDb(cardId, messageId) + if (!inDb) { + await this.getRowClient().begin(async (txn) => { + await this.execute(updateSql, params, 'update blobs', txn) + const data: UpdateBlobsPatchData = { operation: 'update', blobs } + await this.createPatch(cardId, messageId, PatchType.blob, data, socialId, date, txn) + }) + } else { + await this.execute(updateSql, params, 'update blobs') + } + } + async attachLinkPreviews ( cardId: CardID, messageId: MessageID, diff --git a/packages/sdk-types/src/db.ts b/packages/sdk-types/src/db.ts index b651db6dcb6..820ee92746a 100644 --- a/packages/sdk-types/src/db.ts +++ b/packages/sdk-types/src/db.ts @@ -44,7 +44,7 @@ import { NotificationType, BlobData, LinkPreviewData, - LinkPreviewID + LinkPreviewID, BlobUpdateData } from '@hcengineering/communication-types' export interface DbAdapter { @@ -76,6 +76,7 @@ export interface DbAdapter { attachBlobs: (cardId: CardID, messageId: MessageID, data: BlobData[], socialId: SocialID, date: Date) => Promise detachBlobs: (card: CardID, messageId: MessageID, blobId: BlobID[], socialId: SocialID, date: Date) => Promise setBlobs: (cardId: CardID, messageId: MessageID, data: BlobData[], socialId: SocialID, date: Date) => Promise + updateBlobs: (cardId: CardID, messageId: MessageID, data: BlobUpdateData[], socialId: SocialID, date: Date) => Promise attachLinkPreviews: ( cardId: CardID, diff --git a/packages/sdk-types/src/events/message.ts b/packages/sdk-types/src/events/message.ts index 06000715a02..28a50e0b82d 100644 --- a/packages/sdk-types/src/events/message.ts +++ b/packages/sdk-types/src/events/message.ts @@ -10,7 +10,8 @@ import type { MessagesGroup, MessageExtra, BlobData, - LinkPreviewData + LinkPreviewData, + BlobUpdateData } from '@hcengineering/communication-types' import type { BaseEvent } from './common' @@ -134,6 +135,11 @@ export interface SetBlobsOperation { blobs: BlobData[] } +export interface UpdateBlobsOperation { + opcode: 'update' + blobs: BlobUpdateData[] +} + // For system and message author export interface BlobPatchEvent extends BaseEvent { type: MessageEventType.BlobPatch @@ -141,7 +147,7 @@ export interface BlobPatchEvent extends BaseEvent { cardId: CardID messageId: MessageID - operations: (AttachBlobsOperation | DetachBlobsOperation | SetBlobsOperation)[] + operations: (AttachBlobsOperation | DetachBlobsOperation | SetBlobsOperation | UpdateBlobsOperation)[] socialId: SocialID date?: Date diff --git a/packages/server/src/middleware/db.ts b/packages/server/src/middleware/db.ts index e05bdb32290..b3374ac99bb 100644 --- a/packages/server/src/middleware/db.ts +++ b/packages/server/src/middleware/db.ts @@ -271,6 +271,8 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { await this.db.detachBlobs(event.cardId, event.messageId, operation.blobIds, event.socialId, event.date) } else if (operation.opcode === 'set') { await this.db.setBlobs(event.cardId, event.messageId, operation.blobs, event.socialId, event.date) + } else if (operation.opcode === 'update') { + await this.db.updateBlobs(event.cardId, event.messageId, operation.blobs, event.socialId, event.date) } } diff --git a/packages/server/src/middleware/validate.ts b/packages/server/src/middleware/validate.ts index d3193dfdc89..f734275c54d 100644 --- a/packages/server/src/middleware/validate.ts +++ b/packages/server/src/middleware/validate.ts @@ -167,6 +167,14 @@ const BlobDataSchema = z.object({ metadata: z.record(z.string(), z.any()).optional() }) +const UpdateBlobDataSchema = z.object({ + blobId: BlobIDSchema, + mimeType: z.string().optional(), + fileName: z.string().optional(), + size: z.number().optional(), + metadata: z.record(z.string(), z.any()).optional() +}) + const LinkPreviewDataSchema = z .object({ previewId: LinkPreviewIDSchema, @@ -329,7 +337,8 @@ const ReactionPatchEventSchema = BaseEventSchema.extend({ const BlobOperationSchema = z.union([ z.object({ opcode: z.literal('attach'), blobs: z.array(BlobDataSchema).nonempty() }), z.object({ opcode: z.literal('detach'), blobIds: z.array(BlobIDSchema).nonempty() }), - z.object({ opcode: z.literal('set'), blobs: z.array(BlobDataSchema).nonempty() }) + z.object({ opcode: z.literal('set'), blobs: z.array(BlobDataSchema).nonempty() }), + z.object({ opcode: z.literal('update'), blobs: z.array(UpdateBlobDataSchema).nonempty() }) ]) const BlobPatchEventSchema = BaseEventSchema.extend({ diff --git a/packages/shared/src/patch.ts b/packages/shared/src/patch.ts index 8c04113ab28..7bdabb5fdc1 100644 --- a/packages/shared/src/patch.ts +++ b/packages/shared/src/patch.ts @@ -28,7 +28,8 @@ import { PatchType, ReactionPatch, SocialID, - ThreadPatch + ThreadPatch, + BlobUpdateData } from '@hcengineering/communication-types' export function applyPatches (message: Message, patches: Patch[], allowedPatchTypes: PatchType[] = []): Message { @@ -85,6 +86,8 @@ function patchBlobs (message: Message, patch: BlobPatch): Message { return detachBlobs(message, patch.data.blobIds) } else if (patch.data.operation === 'set') { return setBlobs(message, patch.data.blobs, patch.created, patch.creator) + } else if (patch.data.operation === 'update') { + return updateBlobs(message, patch.data.blobs) } return message } @@ -149,6 +152,27 @@ function attachBlobs (message: Message, data: BlobData[], created: Date, creator } } +function updateBlobs (message: Message, updates: BlobUpdateData[]): Message { + if (updates.length === 0) return message + const updatedBlobs = [] + for (const blob of message.blobs) { + const update = updates.find((it) => it.blobId === blob.blobId) + if (update === undefined) { + updatedBlobs.push(blob) + } else { + updatedBlobs.push({ + ...blob, + ...update + }) + } + } + + return { + ...message, + blobs: updatedBlobs + } +} + function detachBlobs (message: Message, blobIds: BlobID[]): Message { const blobs = message.blobs.filter((it) => !blobIds.includes(it.blobId)) if (blobs.length === message.blobs.length) return message diff --git a/packages/shared/src/processor.ts b/packages/shared/src/processor.ts index f09e1fa917c..0a96908bd31 100644 --- a/packages/shared/src/processor.ts +++ b/packages/shared/src/processor.ts @@ -27,6 +27,7 @@ import { PatchType, SetBlobsPatchData, SetLinkPreviewsPatchData, + UpdateBlobsPatchData, UpdateThreadPatchData } from '@hcengineering/communication-types' import { @@ -43,6 +44,7 @@ import { RemoveNotificationContextEvent, SetBlobsOperation, SetLinkPreviewsOperation, + UpdateBlobsOperation, UpdateNotificationContextEvent, UpdateThreadOperation } from '@hcengineering/communication-sdk-types' @@ -216,8 +218,8 @@ export class NotificationProcessor { } function blobOperationToPatchData ( - operation: AttachBlobsOperation | DetachBlobsOperation | SetBlobsOperation -): AttachBlobsPatchData | DetachBlobsPatchData | SetBlobsPatchData | undefined { + operation: AttachBlobsOperation | DetachBlobsOperation | SetBlobsOperation | UpdateBlobsOperation +): AttachBlobsPatchData | DetachBlobsPatchData | SetBlobsPatchData | UpdateBlobsPatchData | undefined { if (operation.opcode === 'attach') { return { operation: 'attach', @@ -233,6 +235,11 @@ function blobOperationToPatchData ( operation: 'set', blobs: operation.blobs } + } else if (operation.opcode === 'update') { + return { + operation: 'update', + blobs: operation.blobs + } } return undefined diff --git a/packages/types/src/message.ts b/packages/types/src/message.ts index d60b1853f50..633a405dddc 100644 --- a/packages/types/src/message.ts +++ b/packages/types/src/message.ts @@ -151,7 +151,7 @@ export interface RemoveReactionPatchData { export interface BlobPatch extends BasePatch { type: PatchType.blob - data: AttachBlobsPatchData | DetachBlobsPatchData | SetBlobsPatchData + data: AttachBlobsPatchData | DetachBlobsPatchData | SetBlobsPatchData | UpdateBlobsPatchData } export interface AttachBlobsPatchData { @@ -169,6 +169,11 @@ export interface SetBlobsPatchData { blobs: BlobData[] } +export interface UpdateBlobsPatchData { + operation: 'update' + blobs: BlobUpdateData[] +} + export interface LinkPreviewPatch extends BasePatch { type: PatchType.linkPreview data: AttachLinkPreviewsPatchData | DetachLinkPreviewsPatchData | SetLinkPreviewsPatchData @@ -233,6 +238,8 @@ export interface BlobData { metadata?: BlobMetadata } +export type BlobUpdateData = { blobId: BlobID } & Partial + export interface AttachedBlob extends BlobData { creator: SocialID created: Date From 588546f9dede7b75c87b62adac9372c26149eb75 Mon Sep 17 00:00:00 2001 From: Kristina Date: Mon, 7 Jul 2025 10:23:22 +0400 Subject: [PATCH 138/636] Put blobId to notifications and refactor sql (#82) Signed-off-by: Kristina Fefelova --- packages/cockroach/src/adapter.ts | 10 +- packages/cockroach/src/db/mapping.ts | 60 ++--- packages/cockroach/src/db/notification.ts | 244 +++++++++--------- packages/cockroach/src/init.ts | 41 ++- packages/cockroach/src/schema.ts | 3 +- packages/query/src/messages/query.ts | 2 +- .../query/src/notification-contexts/query.ts | 74 +++--- packages/query/src/notifications/query.ts | 4 +- packages/query/src/utils.ts | 24 +- packages/sdk-types/src/db.ts | 4 +- packages/sdk-types/src/events/notification.ts | 6 +- packages/server/src/middleware/validate.ts | 2 +- .../server/src/notification/notification.ts | 26 +- packages/server/src/triggers/message.ts | 15 +- packages/server/src/triggers/notification.ts | 29 ++- packages/server/src/triggers/utils.ts | 30 ++- packages/types/src/notification.ts | 6 +- packages/types/src/query.ts | 2 +- packages/yaml/src/parse.ts | 12 +- packages/yaml/src/utils.ts | 2 +- 20 files changed, 341 insertions(+), 255 deletions(-) diff --git a/packages/cockroach/src/adapter.ts b/packages/cockroach/src/adapter.ts index 56d065d5a65..360fb8da57f 100644 --- a/packages/cockroach/src/adapter.ts +++ b/packages/cockroach/src/adapter.ts @@ -276,12 +276,20 @@ export class CockroachAdapter implements DbAdapter { return await this.notification.removeNotifications(contextId, account, ids) } + async removeNotificationsBlobId (cardId: CardID, blobId: string): Promise { + await this.notification.removeNotificationsBlobId(cardId, blobId) + } + + async updateNotificationsBlobId (cardId: CardID, blobId: string, from: Date, to: Date): Promise { + await this.notification.updateNotificationsBlobId(cardId, blobId, from, to) + } + async createContext ( account: AccountID, card: CardID, lastUpdate: Date, lastView: Date, - lastNotify?: Date + lastNotify: Date ): Promise { return await this.notification.createContext(account, card, lastUpdate, lastView, lastNotify) } diff --git a/packages/cockroach/src/db/mapping.ts b/packages/cockroach/src/db/mapping.ts index 2898a43be33..9ebf1e01d9a 100644 --- a/packages/cockroach/src/db/mapping.ts +++ b/packages/cockroach/src/db/mapping.ts @@ -74,10 +74,6 @@ interface RawNotification extends NotificationDb { message_content?: Markdown message_creator?: SocialID message_data?: MessageExtra - message_group_blob_id?: BlobID - message_group_from_date?: Date - message_group_to_date?: Date - message_group_count?: number message_patches?: { type: PatchType data: Record @@ -219,16 +215,18 @@ function toNotificationRaw (id: ContextID, card: CardID, raw: RawNotification): const created = new Date(raw.created) let message: Message | undefined - const patches = (raw.message_patches ?? []).map((it) => - toPatch({ - card_id: card, - message_id: raw.message_id, - type: it.type, - data: it.data, - creator: it.creator, - created: new Date(it.created) - }) - ) + const patches = (raw.message_patches ?? []) + .map((it) => + toPatch({ + card_id: card, + message_id: raw.message_id, + type: it.type, + data: it.data, + creator: it.creator, + created: new Date(it.created) + }) + ) + .sort((a, b) => a.created.getTime() - b.created.getTime()) if ( raw.message_content != null && @@ -236,13 +234,15 @@ function toNotificationRaw (id: ContextID, card: CardID, raw: RawNotification): raw.message_created != null && raw.message_type != null ) { - const messageBlobs = raw.message_files?.map((it) => - toBlob({ - card_id: card, - message_id: raw.message_id, - ...it - }) - ) + const messageBlobs = raw.message_files + ?.map((it) => + toBlob({ + card_id: card, + message_id: raw.message_id, + ...it + }) + ) + .sort((a, b) => a.created.getTime() - b.created.getTime()) message = { id: String(raw.message_id) as MessageID, @@ -280,19 +280,6 @@ function toNotificationRaw (id: ContextID, card: CardID, raw: RawNotification): } } - let messageGroup: MessagesGroup | undefined - - if (raw.message_group_blob_id != null && raw.message_group_from_date != null && raw.message_group_to_date != null) { - messageGroup = { - cardId: card, - blobId: raw.message_group_blob_id, - fromDate: new Date(raw.message_group_from_date), - toDate: new Date(raw.message_group_to_date), - count: raw.message_group_count ?? 0, - patches - } - } - return { id: String(raw.id) as NotificationID, cardId: card, @@ -303,8 +290,9 @@ function toNotificationRaw (id: ContextID, card: CardID, raw: RawNotification): messageCreated: new Date(raw.message_created), created, contextId: String(id) as ContextID, - messageGroup, - content: raw.content + content: raw.content, + blobId: raw.blob_id ?? undefined, + patches } } diff --git a/packages/cockroach/src/db/notification.ts b/packages/cockroach/src/db/notification.ts index dc1646256c7..2b733e4f400 100644 --- a/packages/cockroach/src/db/notification.ts +++ b/packages/cockroach/src/db/notification.ts @@ -215,7 +215,7 @@ export class NotificationsDb extends BaseDb { card: CardID, lastUpdate: Date, lastView: Date, - lastNotify?: Date + lastNotify: Date ): Promise { const db: ContextDb = { workspace_id: this.workspace, @@ -286,83 +286,7 @@ export class NotificationsDb extends BaseDb { const orderBy = params.order != null ? `ORDER BY nc.last_notify ${params.order === SortingOrder.Ascending ? 'ASC' : 'DESC'}` : '' - let joinMessages = '' - let buildNotificationObject = ` - JSONB_BUILD_OBJECT( - 'id', n.id::text, - 'read', n.read, - 'created', n.created, - 'type', n.type, - 'content', n.content, - 'message_id', n.message_id::text, - 'message_created', n.message_created - )` - - if (withMessages) { - joinMessages = ` - LEFT JOIN ${TableName.Message} m - ON nc.workspace_id = m.workspace_id - AND nc.card_id = m.card_id - AND n.message_id = m.id - LEFT JOIN ${TableName.MessagesGroup} mg - ON nc.workspace_id = mg.workspace_id - AND nc.card_id = mg.card_id - AND n.message_created BETWEEN mg.from_date AND mg.to_date` - - buildNotificationObject = ` - JSONB_BUILD_OBJECT( - 'id', n.id::text, - 'read', n.read, - 'type', n.type, - 'content', n.content, - 'created', n.created, - 'message_created', n.message_created, - 'message_id', n.message_id::text, - 'message_type', m.type, - 'message_content', m.content, - 'message_data', m.data, - 'message_creator', m.creator, - 'message_group_blob_id', mg.blob_id, - 'message_group_from_date', mg.from_date, - 'message_group_to_date', mg.to_date, - 'message_group_count', mg.count, - 'message_patches', ( - SELECT COALESCE( - JSON_AGG( - JSONB_BUILD_OBJECT( - 'type', p.type, - 'data', p.data, - 'creator', p.creator, - 'created', p.created - ) ORDER BY p.created DESC - ), - '[]'::JSONB - ) - FROM ${TableName.Patch} p - WHERE p.workspace_id = nc.workspace_id AND p.card_id = nc.card_id AND p.message_id = n.message_id - ), - 'message_files', ( - SELECT COALESCE( - JSON_AGG( - JSONB_BUILD_OBJECT( - 'blob_id', f.blob_id, - 'type', f.type, - 'size', f.size, - 'filename', f.filename, - 'meta', f.meta, - 'creator', f.creator, - 'created', f.created - ) ORDER BY f.created ASC - ), - '[]'::JSONB - ) - FROM ${TableName.File} f - WHERE f.workspace_id = nc.workspace_id AND f.card_id = nc.card_id AND f.message_id = n.message_id - ) - )` - } - - let joinNotifications = '' + let notificationsJoin = '' let notificationsSelect = '' let groupBy = '' @@ -372,52 +296,115 @@ export class NotificationsDb extends BaseDb { values.length, true ) - values.push(...valuesNotifications) - joinNotifications = ` + const notificationLimit = params.notifications?.limit ?? 10 + const notificationOrder = params.notifications?.order === SortingOrder.Ascending ? 'ASC' : 'DESC' + + notificationsJoin = ` LEFT JOIN LATERAL ( - SELECT - n.*, - ROW_NUMBER() OVER ( - PARTITION BY n.context_id - ORDER BY n.created ${params.notifications?.order === SortingOrder.Ascending ? 'ASC' : 'DESC'} - ) AS rn + SELECT * FROM ${TableName.Notification} n - ${whereNotifications} ${whereNotifications.length > 1 ? 'AND n.context_id = nc.id' : 'WHERE n.context_id = nc.id'} - ) n ON n.rn <= ${params.notifications?.limit ?? 1}` + ${whereNotifications} ${whereNotifications.length > 1 ? 'AND' : 'WHERE'} n.context_id = nc.id + ORDER BY n.created ${notificationOrder} + LIMIT ${notificationLimit} + ) n ON TRUE + + ${ + withMessages + ? ` + LEFT JOIN ${TableName.Message} m + ON m.workspace_id = nc.workspace_id + AND m.card_id = nc.card_id + AND m.id = n.message_id + AND n.message_id IS NOT NULL + AND n.blob_id IS NULL` + : '' + } + + LEFT JOIN LATERAL ( + SELECT COALESCE( + JSON_AGG( + JSONB_BUILD_OBJECT( + 'type', p.type, + 'data', p.data, + 'creator', p.creator, + 'created', p.created + ) ORDER BY p.created DESC + ), '[]'::JSONB + ) AS patches + FROM ${TableName.Patch} p + WHERE p.workspace_id = nc.workspace_id AND p.card_id = nc.card_id AND p.message_id = n.message_id + ) p ON TRUE + + LEFT JOIN LATERAL ( + SELECT COALESCE( + JSON_AGG( + JSONB_BUILD_OBJECT( + 'blob_id', f.blob_id, + 'type', f.type, + 'size', f.size, + 'filename', f.filename, + 'meta', f.meta, + 'creator', f.creator, + 'created', f.created + ) ORDER BY f.created ASC + ), '[]'::JSONB + ) AS files + FROM ${TableName.File} f + WHERE f.workspace_id = nc.workspace_id AND f.card_id = nc.card_id AND f.message_id = n.message_id + ) f ON TRUE + ` notificationsSelect = `, COALESCE( JSON_AGG( - ${buildNotificationObject} - ORDER BY n.created ${params.notifications?.order === SortingOrder.Ascending ? 'ASC' : 'DESC'} - ), - '[]'::JSONB + JSONB_BUILD_OBJECT( + 'id', n.id::text, + 'read', n.read, + 'type', n.type, + 'content', n.content, + 'created', n.created, + 'message_created', n.message_created, + 'message_id', n.message_id::text, + ${ + withMessages + ? ` + 'message_type', m.type, + 'message_content', m.content, + 'message_data', m.data, + 'message_creator', m.creator,` + : '' + } + 'blob_id', n.blob_id, + 'patches', p.patches, + 'files', f.files + ) + ORDER BY n.created ${notificationOrder} + ), '[]'::JSONB ) AS notifications` - groupBy = 'GROUP BY nc.id' + groupBy = ` + GROUP BY nc.id, nc.card_id, nc.account, nc.last_view, nc.last_update, nc.last_notify + ` } const sql = ` - SELECT nc.id::text, - nc.card_id, - nc.account, - nc.last_view, - nc.last_update, - nc.last_notify - ${notificationsSelect} - FROM ${TableName.NotificationContext} nc - ${joinNotifications} - ${joinMessages} - ${where} - ${groupBy} - ${orderBy} - ${limit}; + SELECT nc.id::text, + nc.card_id, + nc.account, + nc.last_view, + nc.last_update, + nc.last_notify + ${notificationsSelect} + FROM ${TableName.NotificationContext} nc + ${notificationsJoin} ${where} + ${groupBy} + ${orderBy} + ${limit}; ` const result = await this.execute(sql, values, 'find contexts') - return result.map((it: any) => toNotificationContext(it)) } @@ -435,10 +422,6 @@ export class NotificationsDb extends BaseDb { m.content AS message_content, m.creator AS message_creator, m.data AS message_data, - mg.blob_id AS message_group_blob_id, - mg.from_date AS message_group_from_date, - mg.to_date AS message_group_to_date, - mg.count AS message_group_count, (SELECT json_agg( jsonb_build_object( 'type', p.type, @@ -467,11 +450,7 @@ export class NotificationsDb extends BaseDb { LEFT JOIN ${TableName.Message} m ON nc.workspace_id = m.workspace_id AND nc.card_id = m.card_id - AND n.message_id = m.id - LEFT JOIN ${TableName.MessagesGroup} mg - ON nc.workspace_id = mg.workspace_id - AND nc.card_id = mg.card_id - AND n.message_created BETWEEN mg.from_date AND mg.to_date ` + AND n.message_id = m.id ` } select += ` FROM ${TableName.Notification} n @@ -593,7 +572,7 @@ export class NotificationsDb extends BaseDb { } } - const lastUpdateCondition = getCondition('nc', 'last_update', index, params.lastUpdate, 'timestamptz') + const lastUpdateCondition = getCondition('nc', 'last_notify', index, params.lastNotify, 'timestamptz') if (lastUpdateCondition != null) { where.push(lastUpdateCondition.where) @@ -664,4 +643,33 @@ export class NotificationsDb extends BaseDb { return { where: where.length > 0 ? `WHERE ${where.join(' AND ')}` : '', values } } + + public async updateNotificationsBlobId (cardId: CardID, blobId: string, from: Date, to: Date): Promise { + const sql = ` + UPDATE ${TableName.Notification} AS n + SET blob_id = $3::uuid + FROM ${TableName.NotificationContext} AS nc + WHERE + n.context_id = nc.id + AND nc.workspace_id = $1::uuid + AND nc.card_id = $2::uuid + AND n.message_created BETWEEN $4::timestamptz AND $5::timestamptz + AND n.blob_id IS NULL + ` + await this.execute(sql, [this.workspace, cardId, blobId, from, to]) + } + + public async removeNotificationsBlobId (cardId: CardID, blobId: string): Promise { + const sql = ` + UPDATE ${TableName.Notification} AS n + SET blob_id = NULL + FROM ${TableName.NotificationContext} AS nc + WHERE + n.context_id = nc.id + AND nc.workspace_id = $1::uuid + AND nc.card_id = $2::uuid + AND n.blob_id = $3::uuid; + ` + await this.execute(sql, [this.workspace, cardId, blobId]) + } } diff --git a/packages/cockroach/src/init.ts b/packages/cockroach/src/init.ts index 287a7bb7452..20e89bc88d3 100644 --- a/packages/cockroach/src/init.ts +++ b/packages/cockroach/src/init.ts @@ -124,7 +124,10 @@ function getMigrations (): [string, string][] { migrationV6_5(), migrationV6_6(), migrationV6_7(), - migrationV6_8() + migrationV6_8(), + migrationV7_1(), + migrationV7_2(), + migrationV7_3() ] } @@ -604,3 +607,39 @@ function migrationV6_8 (): [string, string] { ` return ['drop_old_message_id_columns-v6_8', sql] } + +function migrationV7_1 (): [string, string] { + const sql = ` + ALTER TABLE communication.notifications + ADD COLUMN IF NOT EXISTS blob_id UUID; + ` + return ['add_blobId_to_notifications-v7_1', sql] +} + +function migrationV7_2 (): [string, string] { + const sql = ` + UPDATE communication.notifications AS n + SET blob_id = mg.blob_id + FROM communication.notification_context AS nc + JOIN communication.messages_groups AS mg + ON mg.workspace_id = nc.workspace_id + AND mg.card_id = nc.card_id + WHERE + n.context_id = nc.id + AND n.message_created BETWEEN mg.from_date AND mg.to_date + AND n.blob_id IS NULL; + ` + return ['fill_blobId_on_notifications-v7_2', sql] +} + +function migrationV7_3 (): [string, string] { + const sql = ` + UPDATE communication.notification_context + SET last_notify = last_update + WHERE last_notify IS NULL; + + ALTER TABLE communication.notification_context + ALTER COLUMN last_notify SET NOT NULL; + ` + return ['make_last_notify_not_null-v7_3', sql] +} diff --git a/packages/cockroach/src/schema.ts b/packages/cockroach/src/schema.ts index 9cc01a6a88f..626e26f7d3f 100644 --- a/packages/cockroach/src/schema.ts +++ b/packages/cockroach/src/schema.ts @@ -145,6 +145,7 @@ export interface NotificationDb { read: boolean message_id: MessageID | null message_created: Date + blob_id?: BlobID context_id: ContextID created: Date content: NotificationContent @@ -156,7 +157,7 @@ export interface ContextDb { account: AccountID last_update: Date last_view: Date - last_notify?: Date + last_notify: Date } export interface CollaboratorDb { diff --git a/packages/query/src/messages/query.ts b/packages/query/src/messages/query.ts index 45fcbb98a7b..891820cd693 100644 --- a/packages/query/src/messages/query.ts +++ b/packages/query/src/messages/query.ts @@ -633,7 +633,7 @@ export class MessagesQuery implements PagedQuery { } private async loadMessagesFromFiles (group: MessagesGroup): Promise { - const parsedFile = await loadGroupFile(this.workspace, this.filesUrl, group, { retries: 5 }) + const parsedFile = await loadGroupFile(this.workspace, this.filesUrl, group.blobId, { retries: 5 }) const patches = group.patches ?? [] const patchesMap = new Map() diff --git a/packages/query/src/notification-contexts/query.ts b/packages/query/src/notification-contexts/query.ts index f7bce2ebf86..90f3bb05b8d 100644 --- a/packages/query/src/notification-contexts/query.ts +++ b/packages/query/src/notification-contexts/query.ts @@ -14,6 +14,7 @@ // import { + BlobID, type CardID, type FindNotificationContextParams, FindNotificationsParams, @@ -22,6 +23,7 @@ import { type Notification, type NotificationContext, NotificationType, + ParsedFile, PatchType, SortingOrder, type WorkspaceID @@ -88,8 +90,8 @@ export class NotificationContextsQuery implements PagedQuery { const allLoaded = limit == null || res.length < limit - const isTail = allLoaded || (params.lastUpdate == null && params.order === SortingOrder.Descending) - const isHead = allLoaded || (params.lastUpdate == null && params.order === SortingOrder.Ascending) + const isTail = allLoaded || (params.lastNotify == null && params.order === SortingOrder.Descending) + const isHead = allLoaded || (params.lastNotify == null && params.order === SortingOrder.Ascending) if (limit != null && res.length >= limit) { res.pop() @@ -170,8 +172,8 @@ export class NotificationContextsQuery implements PagedQuery { - const contexts = await this.client.findNotificationContexts(params, this.id) - if (params.notifications?.message !== true) return contexts - - await Promise.all( + async loadGroups (contexts: NotificationContext[]): Promise { + const cache = new Map>() + const newContexts = await Promise.all( contexts.map(async (context) => { const notifications = context.notifications ?? [] context.notifications = await Promise.all( notifications.map(async (notification) => { - if (notification.message != null || notification.messageId == null) return notification - + if (notification.message != null || notification.messageId == null || notification.blobId == null) { + return notification + } + const { blobId } = notification const message = await loadMessageFromGroup( notification.messageId, this.workspace, this.filesUrl, - notification.messageGroup, - notification.patches + blobId, + notification.patches, + cache ) if (message !== undefined) { return { @@ -288,8 +291,15 @@ export class NotificationContextsQuery implements PagedQuery { + const contexts = await this.client.findNotificationContexts(params, this.id) + if (params.notifications?.message !== true) return contexts - return contexts + return await this.loadGroups(contexts) } private async onCreateNotificationContextEvent (event: CreateNotificationContextEvent): Promise { @@ -476,9 +486,9 @@ export class NotificationContextsQuery implements PagedQuery { const isTail = - res.length <= limit || (this.params.order === SortingOrder.Descending && this.params.lastUpdate == null) + res.length <= limit || (this.params.order === SortingOrder.Descending && this.params.lastNotify == null) const isHead = - res.length <= limit || (this.params.order === SortingOrder.Ascending && this.params.lastUpdate == null) + res.length <= limit || (this.params.order === SortingOrder.Ascending && this.params.lastNotify == null) if (res.length > limit) { res.pop() } @@ -617,37 +627,37 @@ export class NotificationContextsQuery implements PagedQuery= this.params.lastUpdate.less + 'less' in this.params.lastNotify && + this.params.lastNotify.less != null && + (context.lastNotify?.getTime() ?? 0) >= this.params.lastNotify.less.getTime() ) { return false } if ( - 'greaterOrEqual' in this.params.lastUpdate && - this.params.lastUpdate.greaterOrEqual != null && - context.lastUpdate < this.params.lastUpdate.greaterOrEqual + 'greaterOrEqual' in this.params.lastNotify && + this.params.lastNotify.greaterOrEqual != null && + (context.lastNotify?.getTime() ?? 0) < this.params.lastNotify.greaterOrEqual.getTime() ) { return false } if ( - 'lessOrEqual' in this.params.lastUpdate && - this.params.lastUpdate.lessOrEqual != null && - context.lastUpdate > this.params.lastUpdate.lessOrEqual + 'lessOrEqual' in this.params.lastNotify && + this.params.lastNotify.lessOrEqual != null && + (context.lastNotify?.getTime() ?? 0) > this.params.lastNotify.lessOrEqual.getTime() ) { return false } - if (this.params.lastUpdate instanceof Date && this.params.lastUpdate !== context.lastUpdate) { + if (this.params.lastNotify instanceof Date && this.params.lastNotify !== context.lastNotify) { return false } } diff --git a/packages/query/src/notifications/query.ts b/packages/query/src/notifications/query.ts index f8523c89354..fb6df5cb53d 100644 --- a/packages/query/src/notifications/query.ts +++ b/packages/query/src/notifications/query.ts @@ -195,12 +195,12 @@ export class NotificationQuery implements PagedQuery { - if (notification.message != null) return notification + if (notification.message != null || notification.blobId == null) return notification const message = await loadMessageFromGroup( notification.messageId, this.workspace, this.filesUrl, - notification.messageGroup, + notification.blobId, notification.patches ) return message != null ? { ...notification, message } : notification diff --git a/packages/query/src/utils.ts b/packages/query/src/utils.ts index 2da02b159a1..9bfcc774807 100644 --- a/packages/query/src/utils.ts +++ b/packages/query/src/utils.ts @@ -15,12 +15,13 @@ import { applyPatches } from '@hcengineering/communication-shared' import { + BlobID, type CardID, type FindNotificationsParams, type Message, type MessageID, - type MessagesGroup, type Notification, + ParsedFile, type Patch, SortingOrder, type WorkspaceID @@ -74,7 +75,7 @@ export async function findMessageInFiles ( } try { - const parsedFile = await loadGroupFile(workspace, filesUrl, group, { retries: 3 }) + const parsedFile = await loadGroupFile(workspace, filesUrl, group.blobId, { retries: 3 }) const messageFromFile = parsedFile.messages.find((it) => it.id === id) if (messageFromFile === undefined) { return undefined @@ -93,12 +94,23 @@ export async function loadMessageFromGroup ( id: MessageID, workspace: WorkspaceID, filesUrl: string, - group?: MessagesGroup, - patches: Patch[] = [] + blobId: BlobID, + patches: Patch[] = [], + cache?: Map> ): Promise { - if (group == null) return + if (cache != null && cache.has(blobId)) { + const parsedFile = await cache.get(blobId) + if (parsedFile == null) return + const message = parsedFile.messages.find((it) => it.id === id) + if (message == null) return + return applyPatches(message, patches) + } - const parsedFile = await loadGroupFile(workspace, filesUrl, group, { retries: 5 }) + const parsedFilePromise = loadGroupFile(workspace, filesUrl, blobId, { retries: 3 }) + if (cache != null) { + cache.set(blobId, parsedFilePromise) + } + const parsedFile = await parsedFilePromise const message = parsedFile.messages.find((it) => it.id === id) if (message == null) return diff --git a/packages/sdk-types/src/db.ts b/packages/sdk-types/src/db.ts index 820ee92746a..bef3cefafcc 100644 --- a/packages/sdk-types/src/db.ts +++ b/packages/sdk-types/src/db.ts @@ -114,13 +114,15 @@ export interface DbAdapter { ) => Promise updateNotification: (context: ContextID, account: AccountID, query: UpdateNotificationQuery, updates: NotificationUpdates) => Promise removeNotifications: (contextId: ContextID, account: AccountID, ids: NotificationID[]) => Promise + removeNotificationsBlobId: (cardId: CardID, blobId: string) => Promise + updateNotificationsBlobId: (cardId: CardID, blobId: string, from: Date, to: Date) => Promise createContext: ( account: AccountID, cardId: CardID, lastUpdate: Date, lastView: Date, - lastNotify?: Date + lastNotify: Date ) => Promise updateContext: (contextId: ContextID, account: AccountID, updates: NotificationContextUpdates) => Promise removeContext: (id: ContextID, account: AccountID) => Promise diff --git a/packages/sdk-types/src/events/notification.ts b/packages/sdk-types/src/events/notification.ts index e66a570f18c..e0130a6bfe9 100644 --- a/packages/sdk-types/src/events/notification.ts +++ b/packages/sdk-types/src/events/notification.ts @@ -22,7 +22,8 @@ import { NotificationType, NotificationContent, NotificationID, - SocialID + SocialID, + BlobID } from '@hcengineering/communication-types' import type { BaseEvent } from './common' @@ -59,6 +60,7 @@ export interface CreateNotificationEvent extends BaseEvent { contextId: ContextID messageId: MessageID messageCreated: Date + blobId?: BlobID account: AccountID date?: Date @@ -97,7 +99,7 @@ export interface CreateNotificationContextEvent extends BaseEvent { lastView: Date lastUpdate: Date - lastNotify?: Date + lastNotify: Date date?: Date } diff --git a/packages/server/src/middleware/validate.ts b/packages/server/src/middleware/validate.ts index f734275c54d..c475e82681d 100644 --- a/packages/server/src/middleware/validate.ts +++ b/packages/server/src/middleware/validate.ts @@ -226,7 +226,7 @@ const FindMessagesGroupsParamsSchema = FindParamsSchema.extend({ const FindNotificationContextParamsSchema = FindParamsSchema.extend({ id: ContextIDSchema.optional(), card: z.union([CardIDSchema, z.array(CardIDSchema)]).optional(), - lastUpdate: dateOrRecordSchema.optional(), + lastNotify: dateOrRecordSchema.optional(), account: z.union([AccountIDSchema, z.array(AccountIDSchema)]).optional(), notifications: z .object({ diff --git a/packages/server/src/notification/notification.ts b/packages/server/src/notification/notification.ts index 2b129ed6445..f02620d9de7 100644 --- a/packages/server/src/notification/notification.ts +++ b/packages/server/src/notification/notification.ts @@ -90,10 +90,10 @@ async function removeReactionNotification ( socialId: SocialID ): Promise { const result: Event[] = [] - const msg = await findMessage(ctx.db, ctx.metadata.filesUrl, ctx.workspace, cardId, messageId) - if (msg === undefined) return result + const { message } = await findMessage(ctx.db, ctx.metadata.filesUrl, ctx.workspace, cardId, messageId) + if (message === undefined) return result - const messageAccount = await findAccount(ctx, msg.creator) + const messageAccount = await findAccount(ctx, message.creator) if (messageAccount == null) return result const notifications = await ctx.db.findNotifications({ @@ -155,7 +155,7 @@ async function notifyReaction ( ): Promise { const result: Event[] = [] - const message = await findMessage(ctx.db, ctx.metadata.filesUrl, ctx.workspace, cardId, messageId) + const { message, blobId } = await findMessage(ctx.db, ctx.metadata.filesUrl, ctx.workspace, cardId, messageId) if (message == null) return result const messageAccount = await findAccount(ctx, message.creator) @@ -168,7 +168,7 @@ async function notifyReaction ( let contextId: ContextID | undefined = context?.id if (context == null) { - contextId = await createContext(ctx, messageAccount, cardId, new Date(), new Date()) + contextId = await createContext(ctx, messageAccount, cardId, date, undefined, date) } if (contextId == null) return result @@ -188,12 +188,13 @@ async function notifyReaction ( contextId, messageId, messageCreated: message.created, + blobId, date, content, read: false }) - if ((context?.lastNotify?.getTime() ?? 0) < date.getTime()) { + if ((context?.lastNotify?.getTime() ?? date.getTime()) < date.getTime()) { result.push({ type: NotificationEventType.UpdateNotificationContext, contextId, @@ -324,14 +325,7 @@ async function createOrUpdateContext ( events: Event[] }> { if (context == null) { - const contextId = await createContext( - ctx, - collaborator, - cardId, - date, - isOwn ? date : undefined, - isOwn ? undefined : date - ) + const contextId = await createContext(ctx, collaborator, cardId, date, isOwn ? date : undefined, date) return { contextId, @@ -365,8 +359,8 @@ async function createContext ( account: AccountID, cardId: CardID, lastUpdate: Date, - lastView?: Date, - lastNotify?: Date + lastView: Date | undefined, + lastNotify: Date ): Promise { try { const result = (await ctx.execute({ diff --git a/packages/server/src/triggers/message.ts b/packages/server/src/triggers/message.ts index c9f5e3f8c82..11299e31e2f 100644 --- a/packages/server/src/triggers/message.ts +++ b/packages/server/src/triggers/message.ts @@ -23,7 +23,7 @@ import { RemovePatchEvent, ThreadPatchEvent } from '@hcengineering/communication-sdk-types' -import { type CardID, type Message, MessageType } from '@hcengineering/communication-types' +import { type CardID, MessageType } from '@hcengineering/communication-types' import { generateToken } from '@hcengineering/server-token' import { type AccountUuid, concatLink, systemAccountUuid } from '@hcengineering/core' import { extractReferences } from '@hcengineering/text-core' @@ -151,16 +151,9 @@ async function addThreadReply (ctx: TriggerCtx, event: Enriched): Promise { if (event.operation.opcode !== 'attach') return [] - const message: Message | undefined = await findMessage( - ctx.db, - ctx.metadata.filesUrl, - ctx.workspace, - event.cardId, - event.messageId, - { - files: true - } - ) + const { message } = await findMessage(ctx.db, ctx.metadata.filesUrl, ctx.workspace, event.cardId, event.messageId, { + files: true + }) if (message === undefined) return [] diff --git a/packages/server/src/triggers/notification.ts b/packages/server/src/triggers/notification.ts index 46091f75b14..f4e0ba0f800 100644 --- a/packages/server/src/triggers/notification.ts +++ b/packages/server/src/triggers/notification.ts @@ -22,7 +22,9 @@ import { UpdateNotificationContextEvent, RemoveNotificationContextEvent, RemovePatchEvent, - RemoveCollaboratorsEvent + RemoveCollaboratorsEvent, + RemoveMessagesGroupEvent, + CreateMessagesGroupEvent } from '@hcengineering/communication-sdk-types' import { type ActivityCollaboratorsUpdate, @@ -202,6 +204,19 @@ async function onMessagesRemoved (ctx: TriggerCtx, event: RemovePatchEvent): Pro return result } +async function onMessagesGroupCreated (ctx: TriggerCtx, event: CreateMessagesGroupEvent): Promise { + const { group } = event + await ctx.db.updateNotificationsBlobId(group.cardId, group.blobId, group.fromDate, group.toDate) + + return [] +} + +async function onMessagesGroupRemoved (ctx: TriggerCtx, event: RemoveMessagesGroupEvent): Promise { + await ctx.db.removeNotificationsBlobId(event.cardId, event.blobId) + + return [] +} + const triggers: Triggers = [ [ 'on_notification_context_updated', @@ -215,7 +230,17 @@ const triggers: Triggers = [ ], ['on_added_collaborators', NotificationEventType.AddCollaborators, onAddedCollaborators as TriggerFn], ['on_removed_collaborators', NotificationEventType.RemoveCollaborators, onRemovedCollaborators as TriggerFn], - ['remove_notifications_on_messages_removed', MessageEventType.RemovePatch, onMessagesRemoved as TriggerFn] + ['remove_notifications_on_messages_removed', MessageEventType.RemovePatch, onMessagesRemoved as TriggerFn], + [ + 'update_notifications_on_messages_group_created', + MessageEventType.CreateMessagesGroup, + onMessagesGroupCreated as TriggerFn + ], + [ + 'update_notifications_on_messages_group_removed', + MessageEventType.RemoveMessagesGroup, + onMessagesGroupRemoved as TriggerFn + ] ] export default triggers diff --git a/packages/server/src/triggers/utils.ts b/packages/server/src/triggers/utils.ts index 720d8dd52ac..219ef6175fa 100644 --- a/packages/server/src/triggers/utils.ts +++ b/packages/server/src/triggers/utils.ts @@ -21,7 +21,8 @@ import { type Markdown, type SocialID, SortingOrder, - type WorkspaceID + type WorkspaceID, + BlobID } from '@hcengineering/communication-types' import { loadGroupFile } from '@hcengineering/communication-yaml' import { applyPatches } from '@hcengineering/communication-shared' @@ -42,10 +43,13 @@ export async function findMessage ( links?: boolean reactions?: boolean } -): Promise { +): Promise<{ + message?: Message + blobId?: BlobID + }> { const message = (await db.findMessages({ card, id, limit: 1, ...ops }))[0] if (message !== undefined) { - return message + return { message } } return await findMessageInFiles(db, filesUrl, workspace, card, id) } @@ -56,14 +60,17 @@ export async function findMessageInFiles ( workspace: WorkspaceID, cardId: CardID, messageId: MessageID -): Promise { +): Promise<{ + message?: Message + blobId?: BlobID + }> { if (filesUrl === '') { - return undefined + return {} } const created = await db.getMessageCreated(cardId, messageId) - if (created == null) return undefined + if (created == null) return {} const group = ( await db.findMessagesGroups({ card: cardId, @@ -76,23 +83,26 @@ export async function findMessageInFiles ( )[0] if (group === undefined) { - return undefined + return {} } try { - const parsedFile = await loadGroupFile(workspace, filesUrl, group, { retries: 3 }) + const parsedFile = await loadGroupFile(workspace, filesUrl, group.blobId, { retries: 3 }) const messageFromFile = parsedFile.messages.find((it) => it.id === messageId) if (messageFromFile === undefined) { - return undefined + return {} } const patches = (group.patches ?? []).filter((it) => it.messageId === messageId) + const message = patches.length > 0 ? applyPatches(messageFromFile, patches) : messageFromFile - return patches.length > 0 ? applyPatches(messageFromFile, patches) : messageFromFile + return { message, blobId: group.blobId } } catch (e) { console.error('Failed to find message in files', { card: cardId, id: messageId, created }) console.error('Error:', { error: e }) } + + return {} } export async function getNameBySocialID (ctx: TriggerCtx, id: SocialID): Promise { diff --git a/packages/types/src/notification.ts b/packages/types/src/notification.ts index a13a86904c5..48455802735 100644 --- a/packages/types/src/notification.ts +++ b/packages/types/src/notification.ts @@ -13,8 +13,8 @@ // limitations under the License. // -import type { AccountID, CardID, CardType, ID, SocialID } from './core' -import type { Message, MessageID, MessagesGroup, Patch } from './message' +import type { AccountID, BlobID, CardID, CardType, ID, SocialID } from './core' +import type { Message, MessageID, Patch } from './message' export type ContextID = ID & { context: true } export type NotificationID = ID & { notification: true } @@ -38,7 +38,7 @@ export interface Notification { messageCreated: Date message?: Message - messageGroup?: MessagesGroup + blobId?: BlobID patches?: Patch[] } diff --git a/packages/types/src/query.ts b/packages/types/src/query.ts index a8acc304ee7..cd80a3320b1 100644 --- a/packages/types/src/query.ts +++ b/packages/types/src/query.ts @@ -63,7 +63,7 @@ export interface FindMessagesGroupsParams extends FindParams { export interface FindNotificationContextParams extends FindParams { id?: ContextID card?: CardID | CardID[] - lastUpdate?: Partial> | Date + lastNotify?: Partial> | Date account?: AccountID | AccountID[] notifications?: { type?: NotificationType diff --git a/packages/yaml/src/parse.ts b/packages/yaml/src/parse.ts index d6e92f50a80..d72fb250ae5 100644 --- a/packages/yaml/src/parse.ts +++ b/packages/yaml/src/parse.ts @@ -14,22 +14,16 @@ // import { retry, type RetryOptions } from '@hcengineering/communication-shared' -import type { - FileMessage, - FileMetadata, - MessagesGroup, - ParsedFile, - WorkspaceID -} from '@hcengineering/communication-types' +import type { BlobID, FileMessage, FileMetadata, ParsedFile, WorkspaceID } from '@hcengineering/communication-types' import yaml from 'js-yaml' export async function loadGroupFile ( workspace: WorkspaceID, filesUrl: string, - group: MessagesGroup, + blobId: BlobID, options: RetryOptions ): Promise { - const url = getFileUrl(workspace, filesUrl, group.blobId) + const url = getFileUrl(workspace, filesUrl, blobId) const file = await retry(() => fetchFile(url), options) return parseYaml(file) diff --git a/packages/yaml/src/utils.ts b/packages/yaml/src/utils.ts index ab9a7ce6cd4..dd99a3f2703 100644 --- a/packages/yaml/src/utils.ts +++ b/packages/yaml/src/utils.ts @@ -46,7 +46,7 @@ export async function findMessage ( const group = await findGroupByDate(client, card, created) if (group === undefined) return undefined - const parsedFile = await loadGroupFile(workspace, filesUrl, group, { retries: 5 }) + const parsedFile = await loadGroupFile(workspace, filesUrl, group.blobId, { retries: 5 }) return parsedFile.messages.find((it) => it.id === id) } From 317a5cd35bdb3f70faf00af6b597dd38e91d063f Mon Sep 17 00:00:00 2001 From: Kristina Date: Mon, 7 Jul 2025 17:34:11 +0400 Subject: [PATCH 139/636] Fix validation (#83) Signed-off-by: Kristina Fefelova --- packages/server/src/middleware/validate.ts | 57 +++++++++------------- 1 file changed, 23 insertions(+), 34 deletions(-) diff --git a/packages/server/src/middleware/validate.ts b/packages/server/src/middleware/validate.ts index c475e82681d..dfa197b5c36 100644 --- a/packages/server/src/middleware/validate.ts +++ b/packages/server/src/middleware/validate.ts @@ -42,13 +42,14 @@ import { BaseMiddleware } from './base' import { ApiError } from '../error' export class ValidateMiddleware extends BaseMiddleware implements Middleware { - private validate (data: any, schema: z.ZodObject): void { + private validate(data: unknown, schema: z.ZodType): T { const validationResult = schema.safeParse(data) if (!validationResult.success) { const errors = validationResult.error.errors.map((err) => err.message) - this.context.ctx.error(validationResult.error.message, data) + this.context.ctx.error(validationResult.error.message, data as any) throw ApiError.badRequest(errors.join(', ')) } + return validationResult.data } async findMessages (session: SessionData, params: FindMessagesParams, queryId?: QueryId): Promise { @@ -148,7 +149,7 @@ const BlobIDSchema = z.string() const CardIDSchema = z.string() const CardTypeSchema = z.string() const ContextIDSchema = z.string() -const DateSchema = z.union([z.date(), z.string()]) +const DateSchema = z.coerce.date() const LabelIDSchema = z.string() const LinkPreviewIDSchema = z.string() const MarkdownSchema = z.string() @@ -195,7 +196,7 @@ const LinkPreviewDataSchema = z .strict() // Find params -const dateOrRecordSchema = z.union([DateSchema, z.record(DateSchema)]) +const DateOrRecordSchema = z.union([DateSchema, z.record(DateSchema)]) const FindParamsSchema = z .object({ @@ -211,22 +212,22 @@ const FindMessagesParamsSchema = FindParamsSchema.extend({ reactions: z.boolean().optional(), replies: z.boolean().optional(), links: z.boolean().optional(), - created: dateOrRecordSchema.optional() + created: DateOrRecordSchema.optional() }).strict() const FindMessagesGroupsParamsSchema = FindParamsSchema.extend({ card: CardIDSchema.optional(), blobId: BlobIDSchema.optional(), patches: z.boolean().optional(), - fromDate: dateOrRecordSchema.optional(), - toDate: dateOrRecordSchema.optional(), + fromDate: DateOrRecordSchema.optional(), + toDate: DateOrRecordSchema.optional(), orderBy: z.enum(['fromDate', 'toDate']).optional() }).strict() const FindNotificationContextParamsSchema = FindParamsSchema.extend({ id: ContextIDSchema.optional(), card: z.union([CardIDSchema, z.array(CardIDSchema)]).optional(), - lastNotify: dateOrRecordSchema.optional(), + lastNotify: DateOrRecordSchema.optional(), account: z.union([AccountIDSchema, z.array(AccountIDSchema)]).optional(), notifications: z .object({ @@ -243,7 +244,7 @@ const FindNotificationsParamsSchema = FindParamsSchema.extend({ context: ContextIDSchema.optional(), type: z.string().optional(), read: z.boolean().optional(), - created: dateOrRecordSchema.optional(), + created: DateOrRecordSchema.optional(), account: z.union([AccountIDSchema, z.array(AccountIDSchema)]).optional(), message: z.boolean().optional(), card: CardIDSchema.optional() @@ -443,34 +444,22 @@ const RemoveCollaboratorsEventSchema = BaseEventSchema.extend({ function deserializeEvent (event: Enriched): Enriched { switch (event.type) { case MessageEventType.CreateMessagesGroup: - return { - ...event, - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - date: deserializeDate(event.date)!, - group: { - ...event.group, - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - fromDate: deserializeDate(event.group.fromDate)!, - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - toDate: deserializeDate(event.group.toDate)! - } - } + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + event.group.fromDate = deserializeDate(event.group.fromDate)! + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + event.group.toDate = deserializeDate(event.group.toDate)! + break case NotificationEventType.UpdateNotificationContext: - return { - ...event, - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - date: deserializeDate(event.date)!, - updates: { - ...event.updates, - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - lastView: deserializeDate(event.updates.lastView)!, - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - lastUpdate: deserializeDate(event.updates.lastUpdate)! - } - } + event.updates.lastView = deserializeDate(event.updates.lastView) + break + case NotificationEventType.UpdateNotification: + event.query.untilDate = deserializeDate(event.query.untilDate) + break } + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - return { ...event, date: deserializeDate(event.date)! } + event.date = deserializeDate(event.date)! + return event } function deserializeDate (date?: Date | string | undefined | null): Date | undefined { From 41c511fffd14d4d758f63eb125ee8a351afdab27 Mon Sep 17 00:00:00 2001 From: Kristina Date: Mon, 7 Jul 2025 21:25:40 +0400 Subject: [PATCH 140/636] Find message groups by messageId (#84) Signed-off-by: Kristina Fefelova --- packages/cockroach/src/db/mapping.ts | 10 ++- packages/cockroach/src/db/message.ts | 88 ++++++++++++------- .../query/src/notification-contexts/query.ts | 37 ++++---- packages/server/src/middleware/validate.ts | 1 + packages/server/src/utils.ts | 10 +-- packages/types/src/query.ts | 1 + 6 files changed, 91 insertions(+), 56 deletions(-) diff --git a/packages/cockroach/src/db/mapping.ts b/packages/cockroach/src/db/mapping.ts index 9ebf1e01d9a..08d725db8ca 100644 --- a/packages/cockroach/src/db/mapping.ts +++ b/packages/cockroach/src/db/mapping.ts @@ -165,13 +165,21 @@ export function toLinkPreview (raw: LinkPreviewDb): LinkPreview { } export function toMessagesGroup (raw: MessagesGroupDb): MessagesGroup { + const patches = + raw.patches == null + ? [] + : raw.patches + .filter((it: any) => it.message_id != null) + .map(toPatch) + .sort((a, b) => a.created.getTime() - b.created.getTime()) + return { cardId: raw.card_id, blobId: raw.blob_id, fromDate: raw.from_date, toDate: raw.to_date, count: Number(raw.count), - patches: raw.patches == null ? [] : raw.patches.filter((it: any) => it.message_id != null).map(toPatch) + patches } } diff --git a/packages/cockroach/src/db/message.ts b/packages/cockroach/src/db/message.ts index eaffbee23d0..aab222c324b 100644 --- a/packages/cockroach/src/db/message.ts +++ b/packages/cockroach/src/db/message.ts @@ -1027,33 +1027,53 @@ export class MessagesDb extends BaseDb { // Find messages groups async findMessagesGroups (params: FindMessagesGroupsParams): Promise { + const useMessageIdCte = params.messageId != null + const values: any[] = [this.workspace] + if (useMessageIdCte) values.push(params.messageId) + + const cte = useMessageIdCte + ? ` + WITH msg_created AS ( + SELECT card_id, created + FROM ${TableName.MessageCreated} + WHERE workspace_id = $1::uuid + AND message_id = $2::varchar + ) + ` + : '' + const select = ` - SELECT mg.card_id, - mg.blob_id, - mg.from_date, - mg.to_date, - mg.count, - patches - FROM ${TableName.MessagesGroup} mg - CROSS JOIN LATERAL ( - SELECT jsonb_agg(jsonb_build_object( - 'message_id', p.message_id::text, - 'type', p.type, - 'data', p.data, - 'creator', p.creator, - 'created', p.created - ) ORDER BY p.created) AS patches - FROM ${TableName.Patch} p - WHERE p.workspace_id = mg.workspace_id - AND p.card_id = mg.card_id - AND p.message_created BETWEEN mg.from_date AND mg.to_date - ) sub` - - const { where, values } = this.buildMessagesGroupWhere(params) + ${cte} + SELECT mg.card_id, + mg.blob_id, + mg.from_date, + mg.to_date, + mg.count, + patches + FROM ${TableName.MessagesGroup} mg + ${useMessageIdCte ? 'JOIN msg_created mc ON mg.card_id = mc.card_id AND mc.created BETWEEN mg.from_date AND mg.to_date' : ''} + CROSS JOIN LATERAL ( + SELECT jsonb_agg(jsonb_build_object( + 'message_id', p.message_id::varchar, + 'type', p.type, + 'data', p.data, + 'creator', p.creator, + 'created', p.created + )) AS patches + FROM ${TableName.Patch} p + WHERE p.workspace_id = mg.workspace_id + AND p.card_id = mg.card_id + AND p.message_created BETWEEN mg.from_date AND mg.to_date + ) sub + ` + + const { where, values: additionalValues } = this.buildMessagesGroupWhere(params, values.length + 1) + values.push(...additionalValues) + const orderBy = params.orderBy === 'toDate' - ? `ORDER BY mg.to_date ${params.order === SortingOrder.Ascending ? 'ASC' : 'DESC'}` - : `ORDER BY mg.from_date ${params.order === SortingOrder.Ascending ? 'ASC' : 'DESC'}` + ? `ORDER BY mg.to_date ${params.order === SortingOrder.Descending ? 'DESC' : 'ASC'}` + : `ORDER BY mg.from_date ${params.order === SortingOrder.Descending ? 'DESC' : 'ASC'}` const limit = params.limit != null ? ` LIMIT ${params.limit}` : '' const sql = [select, where, orderBy, limit].join(' ') @@ -1062,14 +1082,17 @@ export class MessagesDb extends BaseDb { return result.map((it: any) => toMessagesGroup(it)) } - buildMessagesGroupWhere (params: FindMessagesGroupsParams): { - where: string - values: any[] - } { + buildMessagesGroupWhere ( + params: FindMessagesGroupsParams, + startIndex = 1 + ): { + where: string + values: any[] + } { const where: string[] = ['mg.workspace_id = $1::uuid'] - const values: any[] = [this.workspace] + const values: any[] = [] - let index = 2 + let index = startIndex if (params.card != null) { where.push(`mg.card_id = $${index++}::varchar`) @@ -1099,7 +1122,10 @@ export class MessagesDb extends BaseDb { where.push('sub.patches IS NOT NULL') } - return { where: `WHERE ${where.join(' AND ')}`, values } + return { + where: where.length > 0 ? `WHERE ${where.join(' AND ')}` : '', + values + } } public async isMessageInDb (cardId: CardID, messageId: MessageID): Promise { diff --git a/packages/query/src/notification-contexts/query.ts b/packages/query/src/notification-contexts/query.ts index 90f3bb05b8d..000c44b60c7 100644 --- a/packages/query/src/notification-contexts/query.ts +++ b/packages/query/src/notification-contexts/query.ts @@ -318,7 +318,7 @@ export class NotificationContextsQuery implements PagedQuery - this.params.order === SortingOrder.Descending - ? (b.lastNotify?.getTime() ?? 0) - (a.lastNotify?.getTime() ?? 0) - : (a.lastNotify?.getTime() ?? 0) - (b.lastNotify?.getTime() ?? 0) - ) + this.sort(this.result) } + void this.notify() } @@ -601,20 +599,29 @@ export class NotificationContextsQuery implements PagedQuery it.read === read) } - private async addContext (context: NotificationContext): Promise { - if (this.result instanceof Promise) this.result = await this.result - if (this.result.get(context.id) !== undefined) return - if (this.result.isTail()) { + private addContext (context: NotificationContext, result: QueryResult): void { + if (result.get(context.id) !== undefined) return + if (result.isTail()) { if (this.params.order === SortingOrder.Ascending) { - this.result.push(context) + result.push(context) } else { - this.result.unshift(context) + result.unshift(context) } } - if (this.params.limit != null && this.result.length > this.params.limit) { - this.result.pop() + if (this.params.limit != null && result.length > this.params.limit) { + result.pop() } + + this.sort(result) + } + + private sort (result: QueryResult): void { + result.sort((a, b) => + this.params.order === SortingOrder.Descending + ? (b.lastNotify?.getTime() ?? 0) - (a.lastNotify?.getTime() ?? 0) + : (a.lastNotify?.getTime() ?? 0) - (b.lastNotify?.getTime() ?? 0) + ) } private match (context: NotificationContext): boolean { diff --git a/packages/server/src/middleware/validate.ts b/packages/server/src/middleware/validate.ts index dfa197b5c36..54bbbe1e40e 100644 --- a/packages/server/src/middleware/validate.ts +++ b/packages/server/src/middleware/validate.ts @@ -216,6 +216,7 @@ const FindMessagesParamsSchema = FindParamsSchema.extend({ }).strict() const FindMessagesGroupsParamsSchema = FindParamsSchema.extend({ + messageId: MessageIDSchema.optional(), card: CardIDSchema.optional(), blobId: BlobIDSchema.optional(), patches: z.boolean().optional(), diff --git a/packages/server/src/utils.ts b/packages/server/src/utils.ts index d019242a64f..e340dc69928 100644 --- a/packages/server/src/utils.ts +++ b/packages/server/src/utils.ts @@ -13,7 +13,7 @@ // limitations under the License. // -import type { AccountID, MessageID, SocialID } from '@hcengineering/communication-types' +import type { AccountID, SocialID } from '@hcengineering/communication-types' import { generateToken } from '@hcengineering/server-token' import { systemAccountUuid } from '@hcengineering/core' import { getClient as getAccountClient } from '@hcengineering/account-client' @@ -47,11 +47,3 @@ export async function findAccount (ctx: TriggerCtx, socialString: SocialID): Pro ctx.ctx.warn('Cannot find account', { socialString, err }) } } - -export function isExternalMessageId (messageId: MessageID): boolean { - return messageId.startsWith('e') -} - -export function parseMessageIdDate (messageId: MessageID): Date | undefined { - return isExternalMessageId(messageId) ? undefined : new Date(Number(messageId)) -} diff --git a/packages/types/src/query.ts b/packages/types/src/query.ts index cd80a3320b1..5f562fb6ce7 100644 --- a/packages/types/src/query.ts +++ b/packages/types/src/query.ts @@ -52,6 +52,7 @@ export interface FindMessagesParams extends FindParams { } export interface FindMessagesGroupsParams extends FindParams { + messageId?: MessageID card?: CardID blobId?: BlobID patches?: boolean From aeaf67a21218298bfc9d547eeee1b912696233de Mon Sep 17 00:00:00 2001 From: Alexander Onnikov Date: Tue, 8 Jul 2025 20:55:01 +0700 Subject: [PATCH 141/636] fix: set parent for recording blobs Signed-off-by: Alexander Onnikov --- internal/pkg/manifest/hls.go | 19 +++++++++++++++++-- internal/pkg/manifest/hls_test.go | 19 +++++++++++++++++-- internal/pkg/mediaconvert/command.go | 3 ++- internal/pkg/mediaconvert/command_test.go | 6 +++--- internal/pkg/mediaconvert/coordinator.go | 2 ++ internal/pkg/mediaconvert/scheduler.go | 4 ++-- internal/pkg/mediaconvert/stream.go | 4 ++-- internal/pkg/mediaconvert/transcoder.go | 4 ++-- internal/pkg/storage/datalake.go | 5 +++++ 9 files changed, 52 insertions(+), 14 deletions(-) diff --git a/internal/pkg/manifest/hls.go b/internal/pkg/manifest/hls.go index 7b2934ea46d..cc263b5a165 100644 --- a/internal/pkg/manifest/hls.go +++ b/internal/pkg/manifest/hls.go @@ -24,7 +24,7 @@ import ( // GenerateHLSPlaylist generates master file for master files for resolution levels func GenerateHLSPlaylist(profiles []profile.VideoProfile, outputPath, uploadID string) error { - p := filepath.Join(outputPath, uploadID, fmt.Sprintf("%v_master.m3u8", uploadID)) + p := filepath.Join(outputPath, uploadID, MasterPlaylistFileName(uploadID)) d := filepath.Dir(p) _ = os.MkdirAll(d, os.ModePerm) // #nosec @@ -48,7 +48,7 @@ func GenerateHLSPlaylist(profiles []profile.VideoProfile, outputPath, uploadID s return err } - _, err = file.WriteString(fmt.Sprintf("%s_%s_master.m3u8\n", uploadID, profile.Name)) + _, err = file.WriteString(PlaylistFileName(uploadID, profile.Name) + "\n") if err != nil { return err } @@ -56,3 +56,18 @@ func GenerateHLSPlaylist(profiles []profile.VideoProfile, outputPath, uploadID s return nil } + +// MasterPlaylistFileName generates master playlist file name +func MasterPlaylistFileName(source string) string { + return fmt.Sprintf("%s_master.m3u8", source) +} + +// PlaylistFileName generates profile playlist file name +func PlaylistFileName(source, profile string) string { + return fmt.Sprintf("%s_%s.m3u8", source, profile) +} + +// ThumbnailFileName generates thumbnail file name +func ThumbnailFileName(source string) string { + return fmt.Sprintf("%s.jpg", source) +} diff --git a/internal/pkg/manifest/hls_test.go b/internal/pkg/manifest/hls_test.go index 5ad6dd6758f..b4f8d13e584 100644 --- a/internal/pkg/manifest/hls_test.go +++ b/internal/pkg/manifest/hls_test.go @@ -39,7 +39,7 @@ func TestGenerateHLSPlaylist(t *testing.T) { err := manifest.GenerateHLSPlaylist(profiles, "", uploadID) require.NoError(t, err) - outputPath := filepath.Join(uploadID, uploadID+"_master.m3u8") + outputPath := filepath.Join(uploadID, manifest.MasterPlaylistFileName(uploadID)) _, err = os.Stat(outputPath) require.NoError(t, err, "Master playlist file should exist") @@ -53,7 +53,22 @@ func TestGenerateHLSPlaylist(t *testing.T) { require.Contains(t, playlistContent, "#EXTM3U", "File must start with #EXTM3U") for _, prof := range profiles { - expectedLine := uploadID + "_" + prof.Name + "_master.m3u8" + expectedLine := manifest.PlaylistFileName(uploadID, prof.Name) require.Contains(t, playlistContent, expectedLine, "Missing expected reference: "+expectedLine) } } + +func TestMasterPlaylistFileName(t *testing.T) { + filename := manifest.MasterPlaylistFileName("example") + require.Equal(t, "example_master.m3u8", filename) +} + +func TestPlaylistFileName(t *testing.T) { + filename := manifest.PlaylistFileName("example", "720p") + require.Equal(t, "example_720p.m3u8", filename) +} + +func TestThumbnailFileName(t *testing.T) { + filename := manifest.ThumbnailFileName("example") + require.Equal(t, "example.jpg", filename) +} diff --git a/internal/pkg/mediaconvert/command.go b/internal/pkg/mediaconvert/command.go index 100eddb956a..7017359282c 100644 --- a/internal/pkg/mediaconvert/command.go +++ b/internal/pkg/mediaconvert/command.go @@ -27,6 +27,7 @@ import ( "github.com/pkg/errors" "github.com/hcengineering/stream/internal/pkg/log" + "github.com/hcengineering/stream/internal/pkg/manifest" "github.com/hcengineering/stream/internal/pkg/profile" "go.uber.org/zap" ) @@ -164,7 +165,7 @@ func BuildVideoCommand(opts *Options) []string { for _, profile := range opts.Profiles { command = append(command, buildVideoCommand(profile)...) command = append(command, buildHLSCommand(profile, opts)...) - command = append(command, filepath.Join(opts.OutputDir, opts.UploadID, fmt.Sprintf("%s_%s_master.m3u8", opts.UploadID, profile.Name))) + command = append(command, filepath.Join(opts.OutputDir, opts.UploadID, manifest.PlaylistFileName(opts.UploadID, profile.Name))) } return command } diff --git a/internal/pkg/mediaconvert/command_test.go b/internal/pkg/mediaconvert/command_test.go index 67beff10b44..c516f89e742 100644 --- a/internal/pkg/mediaconvert/command_test.go +++ b/internal/pkg/mediaconvert/command_test.go @@ -52,7 +52,7 @@ func Test_BuildVideoCommand_Scaling(t *testing.T) { Profiles: profiles, }) - const expected = `-y -v debug -err_detect ignore_err -fflags +discardcorrupt -threads 4 -i pipe:0 -map 0:v:0 -map 0:a? -c:a aac -c:v libx264 -preset veryfast -crf 25 -g 60 -vf scale=-2:720 -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_720p.ts test/1/1_720p_master.m3u8 -map 0:v:0 -map 0:a? -c:a aac -c:v libx264 -preset veryfast -crf 27 -g 60 -vf scale=-2:480 -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p_master.m3u8` + const expected = `-y -v debug -err_detect ignore_err -fflags +discardcorrupt -threads 4 -i pipe:0 -map 0:v:0 -map 0:a? -c:a aac -c:v libx264 -preset veryfast -crf 25 -g 60 -vf scale=-2:720 -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_720p.ts test/1/1_720p.m3u8 -map 0:v:0 -map 0:a? -c:a aac -c:v libx264 -preset veryfast -crf 27 -g 60 -vf scale=-2:480 -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_480p.ts test/1/1_480p.m3u8` require.Contains(t, expected, strings.Join(scaleCommand, " ")) } @@ -71,7 +71,7 @@ func Test_BuildVideoCommand_Original(t *testing.T) { Profiles: profiles, }) - const expected = `-y -v debug -err_detect ignore_err -fflags +discardcorrupt -threads 4 -i pipe:0 -map 0:v:0 -map 0:a? -c:a copy -c:v copy -preset veryfast -crf 23 -g 60 -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_orig.ts test/1/1_orig_master.m3u8` + const expected = `-y -v debug -err_detect ignore_err -fflags +discardcorrupt -threads 4 -i pipe:0 -map 0:v:0 -map 0:a? -c:a copy -c:v copy -preset veryfast -crf 23 -g 60 -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_orig.ts test/1/1_orig.m3u8` require.Contains(t, expected, strings.Join(rawCommand, " ")) } @@ -90,7 +90,7 @@ func Test_BuildVideoCommand_OriginalT(t *testing.T) { Profiles: profiles, }) - const expected = `-y -v debug -err_detect ignore_err -fflags +discardcorrupt -threads 4 -i pipe:0 -map 0:v:0 -map 0:a? -c:a aac -c:v libx264 -preset veryfast -crf 23 -g 60 -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_orig.ts test/1/1_orig_master.m3u8` + const expected = `-y -v debug -err_detect ignore_err -fflags +discardcorrupt -threads 4 -i pipe:0 -map 0:v:0 -map 0:a? -c:a aac -c:v libx264 -preset veryfast -crf 23 -g 60 -f hls -hls_time 5 -hls_flags split_by_time+temp_file -hls_list_size 0 -hls_segment_filename test/1/1_%03d_orig.ts test/1/1_orig.m3u8` require.Contains(t, expected, strings.Join(rawCommand, " ")) } diff --git a/internal/pkg/mediaconvert/coordinator.go b/internal/pkg/mediaconvert/coordinator.go index 519e0d51ba7..cfad9916d41 100644 --- a/internal/pkg/mediaconvert/coordinator.go +++ b/internal/pkg/mediaconvert/coordinator.go @@ -31,6 +31,7 @@ import ( "github.com/google/uuid" "github.com/hcengineering/stream/internal/pkg/config" "github.com/hcengineering/stream/internal/pkg/log" + "github.com/hcengineering/stream/internal/pkg/manifest" "github.com/hcengineering/stream/internal/pkg/sharedpipe" "github.com/hcengineering/stream/internal/pkg/storage" "github.com/hcengineering/stream/internal/pkg/uploader" @@ -133,6 +134,7 @@ func (s *StreamCoordinator) NewUpload(ctx context.Context, info handler.FileInfo // setup content uploader for transcoded outputs var opts = s.uploadOptions opts.Dir = filepath.Join(opts.Dir, info.ID) + opts.Source = manifest.MasterPlaylistFileName(info.ID) // create storage backend var stg, err = storage.NewStorageByURL(s.mainContext, s.conf.Endpoint(), s.conf.EndpointURL.Scheme, info.MetaData["token"], info.MetaData["workspace"]) diff --git a/internal/pkg/mediaconvert/scheduler.go b/internal/pkg/mediaconvert/scheduler.go index 08e9b18dbce..8235f44b02e 100644 --- a/internal/pkg/mediaconvert/scheduler.go +++ b/internal/pkg/mediaconvert/scheduler.go @@ -267,8 +267,8 @@ func (p *Scheduler) processTask(ctx context.Context, task *Task) { var result = TaskResult{ Width: videoStream.Width, Height: videoStream.Height, - Playlist: task.ID + "_master.m3u8", - Thumbnail: task.ID + ".jpg", + Playlist: manifest.MasterPlaylistFileName(task.ID), + Thumbnail: manifest.ThumbnailFileName(task.ID), } logger.Debug( diff --git a/internal/pkg/mediaconvert/stream.go b/internal/pkg/mediaconvert/stream.go index 135ce989977..95524361c9a 100644 --- a/internal/pkg/mediaconvert/stream.go +++ b/internal/pkg/mediaconvert/stream.go @@ -180,8 +180,8 @@ func (w *Stream) FinishUpload(ctx context.Context) error { w.info.ID, &storage.Metadata{ "hls": map[string]any{ - "source": w.info.ID + "_master.m3u8", - "thumbnail": w.info.ID + ".jpg", + "source": manifest.MasterPlaylistFileName(w.info.ID), + "thumbnail": manifest.ThumbnailFileName(w.info.ID), }, }, ) diff --git a/internal/pkg/mediaconvert/transcoder.go b/internal/pkg/mediaconvert/transcoder.go index 4d7231dc939..a3fc1b738d3 100644 --- a/internal/pkg/mediaconvert/transcoder.go +++ b/internal/pkg/mediaconvert/transcoder.go @@ -210,8 +210,8 @@ func (p *Transcoder) Transcode(ctx context.Context, task *Task) (*TaskResult, er var result = TaskResult{ Width: videoStream.Width, Height: videoStream.Height, - Playlist: task.ID + "_master.m3u8", - Thumbnail: task.ID + ".jpg", + Playlist: manifest.MasterPlaylistFileName(task.ID), + Thumbnail: manifest.ThumbnailFileName(task.ID), } if metaProvider, ok := remoteStorage.(storage.MetaProvider); ok { diff --git a/internal/pkg/storage/datalake.go b/internal/pkg/storage/datalake.go index e76e0b1a876..9207a88ed91 100644 --- a/internal/pkg/storage/datalake.go +++ b/internal/pkg/storage/datalake.go @@ -349,6 +349,11 @@ func (d *DatalakeStorage) SetParent(ctx context.Context, filename, parent string var objectKey = getObjectKeyFromPath(filename) var parentKey = getObjectKeyFromPath(parent) + if objectKey == parentKey { + // no need to set parent for itself + return nil + } + req := fasthttp.AcquireRequest() defer fasthttp.ReleaseRequest(req) req.SetRequestURI(d.baseURL + "/blob/" + d.workspace + "/" + objectKey + "/parent") From a3743876d1ea0e800eafc7efcd396fe54f94176e Mon Sep 17 00:00:00 2001 From: Kristina Date: Thu, 10 Jul 2025 16:24:24 +0400 Subject: [PATCH 142/636] Refresh queries (#85) Signed-off-by: Kristina Fefelova --- packages/client-query/src/index.ts | 2 +- packages/client-query/src/init.ts | 6 + packages/query/src/collaborators/query.ts | 25 +-- packages/query/src/label/query.ts | 25 +-- packages/query/src/lq.ts | 15 ++ packages/query/src/messages/query.ts | 95 ++++++++--- .../query/src/notification-contexts/query.ts | 151 +++++++++++------- packages/query/src/notifications/query.ts | 144 +++++++++++------ packages/query/src/types.ts | 9 +- 9 files changed, 323 insertions(+), 149 deletions(-) diff --git a/packages/client-query/src/index.ts b/packages/client-query/src/index.ts index 6c69e81797b..57bf5c1e996 100644 --- a/packages/client-query/src/index.ts +++ b/packages/client-query/src/index.ts @@ -16,7 +16,7 @@ import { CollaboratorsQuery, LabelsQuery, MessagesQuery, NotificationContextsQuery, NotificationsQuery } from './query' export type { MessageQueryParams } from '@hcengineering/communication-query' -export { initLiveQueries } from './init' +export { initLiveQueries, refreshLiveQueries } from './init' export function createMessagesQuery (dontDestroy?: boolean): MessagesQuery { return new MessagesQuery(dontDestroy) diff --git a/packages/client-query/src/init.ts b/packages/client-query/src/init.ts index 86dbdd99be2..33e951e1117 100644 --- a/packages/client-query/src/init.ts +++ b/packages/client-query/src/init.ts @@ -46,3 +46,9 @@ export function initLiveQueries ( lq = new LiveQueries(client, workspace, filesUrl) } + +export async function refreshLiveQueries (): Promise { + if (lq != null) { + await lq.refresh() + } +} diff --git a/packages/query/src/collaborators/query.ts b/packages/query/src/collaborators/query.ts index 7942521fac7..4cf32079895 100644 --- a/packages/query/src/collaborators/query.ts +++ b/packages/query/src/collaborators/query.ts @@ -15,15 +15,15 @@ import type { AccountID, Collaborator, FindCollaboratorsParams, WorkspaceID } from '@hcengineering/communication-types' import { + AddCollaboratorsEvent, + CardEventType, + type Event, type EventResult, type FindClient, - type QueryCallback, - type Event, NotificationEventType, - CardEventType, - AddCollaboratorsEvent, - RemoveCollaboratorsEvent, - RemoveCardEvent + type QueryCallback, + RemoveCardEvent, + RemoveCollaboratorsEvent } from '@hcengineering/communication-sdk-types' import { QueryResult } from '../result' @@ -47,6 +47,9 @@ export class CollaboratorsQuery implements Query { + void this.notify() + }) } } @@ -123,10 +126,7 @@ export class CollaboratorsQuery implements Query> { try { const res = await this.find(this.params) - const result = new QueryResult(res, (c) => c.account) - - void this.notify() - return result + return new QueryResult(res, (c) => c.account) } catch (error) { console.error('Failed to initialize query:', error) return new QueryResult([] as Collaborator[], (c) => c.account) @@ -175,4 +175,9 @@ export class CollaboratorsQuery implements Query { + this.result = new QueryResult([] as Collaborator[], (c) => c.account) + await this.initResult() + } } diff --git a/packages/query/src/label/query.ts b/packages/query/src/label/query.ts index 02c6aaec05f..eb1ac407d12 100644 --- a/packages/query/src/label/query.ts +++ b/packages/query/src/label/query.ts @@ -15,16 +15,16 @@ import type { FindLabelsParams, Label, WorkspaceID } from '@hcengineering/communication-types' import { + CardEventType, + CreateLabelEvent, + type Event, type EventResult, type FindClient, - type QueryCallback, - type Event, LabelEventType, - CardEventType, - CreateLabelEvent, + type QueryCallback, + RemoveCardEvent, RemoveLabelEvent, - UpdateCardTypeEvent, - RemoveCardEvent + UpdateCardTypeEvent } from '@hcengineering/communication-sdk-types' import { QueryResult } from '../result' @@ -52,6 +52,9 @@ export class LabelsQuery implements Query { void this.notify() } else { this.result = this.initResult() + void this.result.then(() => { + void this.notify() + }) } } @@ -186,10 +189,7 @@ export class LabelsQuery implements Query { private async initResult (): Promise> { try { const res = await this.find(this.params) - const result = new QueryResult(res, getId) - - void this.notify() - return result + return new QueryResult(res, getId) } catch (error) { console.error('Failed to initialize query:', error) return new QueryResult([] as Label[], getId) @@ -251,4 +251,9 @@ export class LabelsQuery implements Query { } return true } + + async refresh (): Promise { + this.result = new QueryResult([] as Label[], getId) + await this.initResult() + } } diff --git a/packages/query/src/lq.ts b/packages/query/src/lq.ts index 450b2c7052d..37c9de45056 100644 --- a/packages/query/src/lq.ts +++ b/packages/query/src/lq.ts @@ -221,4 +221,19 @@ export class LiveQueries { this.queries.clear() this.unsubscribed.clear() } + + async refresh (): Promise { + for (const [id, query] of this.queries.entries()) { + if (this.unsubscribed.has(id)) { + this.unsubscribe(id) + continue + } + + try { + await query.refresh() + } catch (e) { + console.error('Failed to refresh live query', e, query.id, query.params) + } + } + } } diff --git a/packages/query/src/messages/query.ts b/packages/query/src/messages/query.ts index 891820cd693..e3da5c32a54 100644 --- a/packages/query/src/messages/query.ts +++ b/packages/query/src/messages/query.ts @@ -57,7 +57,7 @@ const GROUPS_LIMIT = 4 export class MessagesQuery implements PagedQuery { private result: Promise> | QueryResult - private readonly groupsBuffer: MessagesGroup[] = [] + private groupsBuffer: MessagesGroup[] = [] private firstGroup?: MessagesGroup private lastGroup?: MessagesGroup @@ -69,6 +69,9 @@ export class MessagesQuery implements PagedQuery { private readonly limit: number private initialized = false + nexLoadedPagesCount = 0 + prevLoadedPagesCount = 0 + private readonly next = { hasMessages: true, hasGroups: true, @@ -250,33 +253,37 @@ export class MessagesQuery implements PagedQuery { await this.client.unsubscribeQuery(this.id) } - async requestLoadNextPage (): Promise { - if (this.isCardRemoved) return + async requestLoadNextPage (notify = true): Promise<{ isDone: boolean }> { + if (this.isCardRemoved) return { isDone: true } if (this.result instanceof Promise) this.result = await this.result - if (!this.result.isTail()) { - this.result = this.loadPage(Direction.Forward, this.result) - void this.result - .then(() => this.notify()) - .catch((error) => { - console.error('Failed to load messages', error) - void this.notify() - }) + if (this.result.isTail()) return { isDone: true } + + const pagePromise = this.loadPage(Direction.Forward, this.result) + this.nexLoadedPagesCount++ + this.result = pagePromise + + const r = await pagePromise + if (notify) { + await this.notify() } + return { isDone: r.isTail() } } - async requestLoadPrevPage (): Promise { - if (this.isCardRemoved) return + async requestLoadPrevPage (notify = true): Promise<{ isDone: boolean }> { + if (this.isCardRemoved) return { isDone: true } if (this.result instanceof Promise) this.result = await this.result - if (!this.result.isHead()) { - this.result = this.loadPage(Direction.Backward, this.result) - void this.result - .then(() => this.notify()) - .catch((error) => { - console.error('Failed to load messages', error) - void this.notify() - }) + if (this.result.isHead()) return { isDone: true } + + const pagePromise = this.loadPage(Direction.Backward, this.result) + this.prevLoadedPagesCount++ + this.result = pagePromise + const r = await pagePromise + + if (notify) { + await this.notify() } + return { isDone: r.isHead() } } removeCallback (): void { @@ -845,4 +852,50 @@ export class MessagesQuery implements PagedQuery { return result } + + async refresh (): Promise { + const nextPagesCount = this.nexLoadedPagesCount + const prevPagesCount = this.prevLoadedPagesCount + + this.nexLoadedPagesCount = 0 + this.prevLoadedPagesCount = 0 + + this.groupsBuffer = [] + this.firstGroup = undefined + this.lastGroup = undefined + + this.firstLoadedGroup = undefined + this.lastLoadedGroup = undefined + + this.lastGroupsDirection = undefined + + this.next.hasMessages = true + this.next.hasGroups = true + this.next.buffer = [] + this.prev.hasMessages = true + this.prev.hasGroups = true + this.prev.buffer = [] + + this.createdPatches.clear() + this.tmpMessages.clear() + + this.result = new QueryResult([] as Message[], (x) => x.id) + this.result.setTail(this.params.from == null) + this.result.setHead(this.params.from == null) + this.initialized = false + + for (let i = 0; i < nextPagesCount; i++) { + const { isDone } = await this.requestLoadNextPage(false) + this.initialized = true + if (!isDone) break + } + + for (let i = 0; i < prevPagesCount; i++) { + const { isDone } = await this.requestLoadPrevPage(false) + this.initialized = true + if (!isDone) break + } + + await this.notify() + } } diff --git a/packages/query/src/notification-contexts/query.ts b/packages/query/src/notification-contexts/query.ts index 000c44b60c7..3869b31a41a 100644 --- a/packages/query/src/notification-contexts/query.ts +++ b/packages/query/src/notification-contexts/query.ts @@ -59,8 +59,11 @@ import { findMessage, loadMessageFromGroup, matchNotification } from '../utils' const allowedPatchTypes = [PatchType.update, PatchType.remove, PatchType.blob] export class NotificationContextsQuery implements PagedQuery { private result: QueryResult | Promise> - private forward: Promise | NotificationContext[] = [] - private backward: Promise | NotificationContext[] = [] + private forward: Promise<{ isDone: boolean }> | { isDone: boolean } = { isDone: false } + private backward: Promise<{ isDone: boolean }> | { isDone: boolean } = { isDone: false } + + nexLoadedPagesCount = 0 + prevLoadedPagesCount = 0 constructor ( private readonly client: FindClient, @@ -76,39 +79,46 @@ export class NotificationContextsQuery implements PagedQuery { + void this.notify() + }) + } + } + + private async rawInitResult (): Promise> { + const limit = this.params.limit != null ? this.params.limit + 1 : undefined const findParams: FindNotificationContextParams = { ...this.params, order: this.params.order ?? defaultQueryParams.order, limit } - if (initialResult !== undefined) { - this.result = initialResult - void this.notify() - } else { - const findPromise = this.find(findParams) - this.result = findPromise.then((res) => { - const allLoaded = limit == null || res.length < limit - const isTail = allLoaded || (params.lastNotify == null && params.order === SortingOrder.Descending) - const isHead = allLoaded || (params.lastNotify == null && params.order === SortingOrder.Ascending) - - if (limit != null && res.length >= limit) { - res.pop() - } - const qResult = new QueryResult(res, (x) => x.id) - qResult.setTail(isTail) - qResult.setHead(isHead) + const res = await this.find(findParams) + const isComplete = limit == null || res.length < limit + if (!isComplete) res.pop() - return qResult - }) - this.result - .then(async () => { - await this.notify() - }) - .catch((err: any) => { - console.error('Failed to update Live query: ', err) - }) + const isTail = isComplete || (this.params.lastNotify == null && this.params.order === SortingOrder.Descending) + const isHead = isComplete || (this.params.lastNotify == null && this.params.order === SortingOrder.Ascending) + + const result = new QueryResult(res, (it) => it.id) + result.setTail(isTail) + result.setHead(isHead) + + return result + } + + private async initResult (): Promise> { + try { + return await this.rawInitResult() + } catch (error) { + console.error('Failed to initialize query:', error) + return new QueryResult([] as NotificationContext[], (it) => it.id) } } @@ -156,18 +166,14 @@ export class NotificationContextsQuery implements PagedQuery { - if (this.result instanceof Promise) { - this.result = await this.result - } - if (this.forward instanceof Promise) { - this.forward = await this.forward - } + async requestLoadNextPage (notify = true): Promise<{ isDone: boolean }> { + if (this.result instanceof Promise) this.result = await this.result + if (this.forward instanceof Promise) this.forward = await this.forward - if (this.result.isTail()) return + if (this.result.isTail()) return { isDone: true } const last = this.result.getLast() - if (last === undefined) return + if (last === undefined) return { isDone: false } const limit = this.params.limit ?? defaultQueryParams.limit const findParams: FindNotificationContextParams = { @@ -180,8 +186,7 @@ export class NotificationContextsQuery implements PagedQuery { + const forwardPromise = forward.then(async (res) => { if (this.result instanceof Promise) { this.result = await this.result } @@ -191,23 +196,28 @@ export class NotificationContextsQuery implements PagedQuery { - if (this.result instanceof Promise) { - this.result = await this.result - } - if (this.backward instanceof Promise) { - this.backward = await this.backward - } + async requestLoadPrevPage (notify = true): Promise<{ isDone: boolean }> { + if (this.result instanceof Promise) this.result = await this.result + if (this.backward instanceof Promise) this.backward = await this.backward - if (this.result.isHead()) return + if (this.result.isHead()) return { isDone: true } const first = this.params.order === SortingOrder.Ascending ? this.result.getFirst() : this.result.getLast() - if (first === undefined) return + if (first === undefined) return { isDone: false } const limit = this.params.limit ?? defaultQueryParams.limit const findParams: FindNotificationContextParams = { @@ -220,10 +230,9 @@ export class NotificationContextsQuery implements PagedQuery { - if (this.result instanceof Promise) { - this.result = await this.result - } + const backwardPromise = backward.then(async (res) => { + if (this.result instanceof Promise) this.result = await this.result + const isHead = res.length <= limit if (!isHead) { res.pop() @@ -236,9 +245,16 @@ export class NotificationContextsQuery implements PagedQuery { + const nextPagesCount = this.nexLoadedPagesCount + const prevPagesCount = this.prevLoadedPagesCount + + this.result = new QueryResult([] as NotificationContext[], (it) => it.id) + this.nexLoadedPagesCount = 0 + this.prevLoadedPagesCount = 0 + + this.result = await this.rawInitResult() + + for (let i = 0; i < nextPagesCount; i++) { + const { isDone } = await this.requestLoadNextPage(false) + if (!isDone) break + } + + for (let i = 0; i < prevPagesCount; i++) { + const { isDone } = await this.requestLoadPrevPage(false) + if (!isDone) break + } + + await this.notify() + } } diff --git a/packages/query/src/notifications/query.ts b/packages/query/src/notifications/query.ts index fb6df5cb53d..c343cf41342 100644 --- a/packages/query/src/notifications/query.ts +++ b/packages/query/src/notifications/query.ts @@ -25,19 +25,19 @@ import { type WorkspaceID } from '@hcengineering/communication-types' import { - type FindClient, - type PagedQueryCallback, - type Event, - NotificationEventType, - MessageEventType, CardEventType, CreateNotificationEvent, - UpdateNotificationContextEvent, - UpdateNotificationEvent, - RemoveNotificationsEvent, - RemoveNotificationContextEvent, + type Event, + type FindClient, + MessageEventType, + NotificationEventType, + type PagedQueryCallback, + PatchEvent, RemoveCardEvent, - PatchEvent + RemoveNotificationContextEvent, + RemoveNotificationsEvent, + UpdateNotificationContextEvent, + UpdateNotificationEvent } from '@hcengineering/communication-sdk-types' import { applyPatches, MessageProcessor, NotificationProcessor } from '@hcengineering/communication-shared' @@ -51,6 +51,9 @@ const allowedPatchTypes = [PatchType.update, PatchType.remove, PatchType.blob] export class NotificationQuery implements PagedQuery { private result: QueryResult | Promise> + nexLoadedPagesCount = 0 + prevLoadedPagesCount = 0 + constructor ( private readonly client: FindClient, private readonly workspace: WorkspaceID, @@ -60,33 +63,39 @@ export class NotificationQuery implements PagedQuery, initialResult?: QueryResult ) { - const limit = this.params.limit ?? defaultQueryParams.limit - const findParams: FindNotificationsParams = { - ...this.params, - order: this.params.order ?? defaultQueryParams.order, - limit: this.params.strict === true ? limit : limit + 1 - } - if (initialResult !== undefined) { this.result = initialResult void this.notify() } else { - this.result = this.initResult(findParams, limit) + this.result = this.initResult() + void this.result.then(() => { + void this.notify() + }) } } - private async initResult (findParams: FindNotificationsParams, limit: number): Promise> { - try { - const res = await this.find(findParams) - const isComplete = res.length <= limit - if (!isComplete) res.pop() + private async rawInitResult (): Promise> { + const limit = this.params.limit != null ? this.params.limit + 1 : undefined + const findParams: FindNotificationsParams = { + ...this.params, + order: this.params.order ?? defaultQueryParams.order, + limit: this.params.strict === true ? this.params.limit : limit + } - const result = new QueryResult(res, (it) => it.id) - result.setTail(isComplete) - result.setHead(isComplete) + const res = await this.find(findParams) + const isComplete = this.params.limit == null || this.params.strict === true || res.length < this.params.limit + if (!isComplete) res.pop() - void this.notify() - return result + const result = new QueryResult(res, (it) => it.id) + result.setTail(isComplete) + result.setHead(isComplete) + + return result + } + + private async initResult (): Promise> { + try { + return await this.rawInitResult() } catch (error) { console.error('Failed to initialize query:', error) return new QueryResult([] as Notification[], (it) => it.id) @@ -131,24 +140,33 @@ export class NotificationQuery implements PagedQuery { - if (this.params.strict === true) return + async requestLoadNextPage (notify = true): Promise<{ isDone: boolean }> { + if (this.params.strict === true) return { isDone: true } if (this.result instanceof Promise) this.result = await this.result + if (this.result.isTail()) return { isDone: true } + + const result = await this.loadPage(SortingOrder.Ascending, this.result.getLast()?.created, notify) + this.nexLoadedPagesCount++ - await this.loadPage(SortingOrder.Ascending, this.result.getLast()?.created) + return result } - async requestLoadPrevPage (): Promise { - if (this.params.strict === true) return + async requestLoadPrevPage (notify = true): Promise<{ isDone: boolean }> { + if (this.params.strict === true) return { isDone: true } if (this.result instanceof Promise) this.result = await this.result - await this.loadPage(SortingOrder.Descending, this.result.getFirst()?.created) + if (this.result.isHead()) return { isDone: true } + const result = await this.loadPage(SortingOrder.Descending, this.result.getFirst()?.created, notify) + this.prevLoadedPagesCount++ + + return result } - private async loadPage (order: SortingOrder, created?: Date): Promise { - if (created == null) return + private async loadPage (order: SortingOrder, created?: Date, notify = true): Promise<{ isDone: boolean }> { + if (created == null) return { isDone: false } + if (this.params.limit == null) return { isDone: true } if (this.result instanceof Promise) this.result = await this.result - const limit = this.getLimit() + const limit = this.params.limit const findParams: FindNotificationsParams = { ...this.params, created: order === SortingOrder.Ascending ? { greater: created } : { less: created }, @@ -158,21 +176,27 @@ export class NotificationQuery implements PagedQuery= this.getLimit() && newLength < this.getLimit()) { + if (limit != null && currentLength !== newLength && currentLength >= limit && newLength < limit) { await this.reinit(currentLength) } else { void this.notify() @@ -312,8 +337,9 @@ export class NotificationQuery implements PagedQuery= this.getLimit() && newLength < this.getLimit()) { + if (limit != null && currentLength >= limit && newLength < limit) { void this.reinit(currentLength) } else if (isDeleted) { void this.notify() @@ -339,7 +365,8 @@ export class NotificationQuery implements PagedQuery= this.getLimit() && this.result.length < this.getLimit()) { + const { limit } = this.params + if (limit != null && length >= limit && this.result.length < limit) { void this.reinit(this.result.length) } else { void this.notify() @@ -380,10 +407,6 @@ export class NotificationQuery implements PagedQuery { if (this.result instanceof Promise) this.result = await this.result this.result = this.find({ ...this.params, limit: limit + 1 }).then((res) => { @@ -429,4 +452,27 @@ export class NotificationQuery implements PagedQuery { + const nextPagesCount = this.nexLoadedPagesCount + const prevPagesCount = this.prevLoadedPagesCount + + this.result = new QueryResult([] as Notification[], (it) => it.id) + this.nexLoadedPagesCount = 0 + this.prevLoadedPagesCount = 0 + + this.result = await this.rawInitResult() + + for (let i = 0; i < nextPagesCount; i++) { + const { isDone } = await this.requestLoadNextPage(false) + if (!isDone) break + } + + for (let i = 0; i < prevPagesCount; i++) { + const { isDone } = await this.requestLoadPrevPage(false) + if (!isDone) break + } + + await this.notify() + } } diff --git a/packages/query/src/types.ts b/packages/query/src/types.ts index 9ea07662820..96ee09936a2 100644 --- a/packages/query/src/types.ts +++ b/packages/query/src/types.ts @@ -51,13 +51,18 @@ interface BaseQuery { removeCallback: () => void setCallback: (callback: (result: any) => void) => void copyResult: () => QueryResult | undefined + + refresh: () => Promise } + export interface PagedQuery extends BaseQuery { readonly id: QueryId readonly params: P + nexLoadedPagesCount: number + prevLoadedPagesCount: number - requestLoadNextPage: () => Promise - requestLoadPrevPage: () => Promise + requestLoadNextPage: (notify?: boolean) => Promise<{ isDone: boolean }> + requestLoadPrevPage: (notify?: boolean) => Promise<{ isDone: boolean }> setCallback: (callback: (window: Window) => void) => void } From 34a153aa27678967cfb5606181fc9317f2b2540a Mon Sep 17 00:00:00 2001 From: Kristina Date: Fri, 11 Jul 2025 10:56:59 +0400 Subject: [PATCH 143/636] Q-fix: sql (#86) Signed-off-by: Kristina Fefelova --- packages/cockroach/src/db/notification.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/cockroach/src/db/notification.ts b/packages/cockroach/src/db/notification.ts index 2b733e4f400..df641dfa8e3 100644 --- a/packages/cockroach/src/db/notification.ts +++ b/packages/cockroach/src/db/notification.ts @@ -652,7 +652,7 @@ export class NotificationsDb extends BaseDb { WHERE n.context_id = nc.id AND nc.workspace_id = $1::uuid - AND nc.card_id = $2::uuid + AND nc.card_id = $2::varchar AND n.message_created BETWEEN $4::timestamptz AND $5::timestamptz AND n.blob_id IS NULL ` @@ -667,7 +667,7 @@ export class NotificationsDb extends BaseDb { WHERE n.context_id = nc.id AND nc.workspace_id = $1::uuid - AND nc.card_id = $2::uuid + AND nc.card_id = $2::varchar AND n.blob_id = $3::uuid; ` await this.execute(sql, [this.workspace, cardId, blobId]) From 3b6666d252e70bdfdecbe56ae0989a5c8b8a3b8d Mon Sep 17 00:00:00 2001 From: Kristina Date: Fri, 11 Jul 2025 21:57:11 +0400 Subject: [PATCH 144/636] Adjust notifications for readonly guest (#87) Signed-off-by: Kristina Fefelova --- packages/server/src/middleware/permissions.ts | 15 ++++++++++++++- .../server/src/notification/notification.ts | 17 +++++++++++------ 2 files changed, 25 insertions(+), 7 deletions(-) diff --git a/packages/server/src/middleware/permissions.ts b/packages/server/src/middleware/permissions.ts index 54b4e8b52da..9d9e5657948 100644 --- a/packages/server/src/middleware/permissions.ts +++ b/packages/server/src/middleware/permissions.ts @@ -21,7 +21,7 @@ import { NotificationEventType, type SessionData } from '@hcengineering/communication-sdk-types' -import { systemAccountUuid } from '@hcengineering/core' +import { AccountRole, systemAccountUuid } from '@hcengineering/core' import type { AccountID, SocialID } from '@hcengineering/communication-types' import { ApiError } from '../error' @@ -40,6 +40,8 @@ export class PermissionsMiddleware extends BaseMiddleware implements Middleware async event (session: SessionData, event: Enriched, derived: boolean): Promise { if (derived) return await this.provideEvent(session, event, derived) + this.notAnonymousAccount(session) + switch (event.type) { case MessageEventType.CreateMessage: this.checkSocialId(session, event.socialId) @@ -96,8 +98,19 @@ export class PermissionsMiddleware extends BaseMiddleware implements Middleware } } + private notAnonymousAccount (session: SessionData): void { + if (this.isAnonymousAccount(session)) { + throw ApiError.forbidden('anonymous account is not allowed') + } + } + private isSystemAccount (session: SessionData): boolean { const account = session.account return systemAccountUuid === account.uuid } + + private isAnonymousAccount (session: SessionData): boolean { + const account = session.account + return account.role === AccountRole.ReadOnlyGuest + } } diff --git a/packages/server/src/notification/notification.ts b/packages/server/src/notification/notification.ts index f02620d9de7..0ce78b8cd5b 100644 --- a/packages/server/src/notification/notification.ts +++ b/packages/server/src/notification/notification.ts @@ -34,14 +34,16 @@ import { type SocialID, SortingOrder } from '@hcengineering/communication-types' +import { markdownToMarkup } from '@hcengineering/text-markdown' +import { jsonToMarkup, markupToText } from '@hcengineering/text-core' +import { readOnlyGuestAccountUuid } from '@hcengineering/core' import type { Enriched, TriggerCtx } from '../types' import { findAccount } from '../utils' import { findMessage, getNameBySocialID } from '../triggers/utils' -import { markdownToMarkup } from '@hcengineering/text-markdown' -import { jsonToMarkup, markupToText } from '@hcengineering/text-core' const BATCH_SIZE = 500 +const maxDate = new Date('9999-12-31T23:59:59Z') export async function notify (ctx: TriggerCtx, event: Enriched): Promise { switch (event.type) { @@ -191,7 +193,7 @@ async function notifyReaction ( blobId, date, content, - read: false + read: messageAccount === readOnlyGuestAccountUuid }) if ((context?.lastNotify?.getTime() ?? date.getTime()) < date.getTime()) { @@ -294,6 +296,7 @@ async function processCollaborator ( const text = markupToText(jsonToMarkup(markdownToMarkup(markdown))) const shortText = text.slice(0, 100) + const isRead = collaborator === readOnlyGuestAccountUuid result.push({ type: NotificationEventType.CreateNotification, notificationType: NotificationType.Message, @@ -308,7 +311,7 @@ async function processCollaborator ( title: cardTitle, shortText: shortText.length < text.length ? shortText + '...' : text }, - read: date.getTime() < (context?.lastView?.getTime() ?? 0) + read: isRead || date.getTime() < (context?.lastView?.getTime() ?? 0) }) return result } @@ -325,7 +328,8 @@ async function createOrUpdateContext ( events: Event[] }> { if (context == null) { - const contextId = await createContext(ctx, collaborator, cardId, date, isOwn ? date : undefined, date) + const lastView = collaborator === readOnlyGuestAccountUuid ? maxDate : isOwn ? date : undefined + const contextId = await createContext(ctx, collaborator, cardId, date, lastView, date) return { contextId, @@ -334,7 +338,8 @@ async function createOrUpdateContext ( } const lastUpdate = context.lastUpdate == null || date > context.lastUpdate ? date : context.lastUpdate - const lastView = isOwn && isContextRead(context) ? date : undefined + const lastView = + collaborator === readOnlyGuestAccountUuid ? maxDate : isOwn && isContextRead(context) ? date : undefined return { contextId: context.id, From 5824242b8bea1b0f4d9b42541422d8567217d976 Mon Sep 17 00:00:00 2001 From: Kristina Date: Mon, 14 Jul 2025 16:42:40 +0400 Subject: [PATCH 145/636] Fix query (#88) Signed-off-by: Kristina Fefelova --- packages/query/src/notifications/query.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/query/src/notifications/query.ts b/packages/query/src/notifications/query.ts index c343cf41342..26e295be6d8 100644 --- a/packages/query/src/notifications/query.ts +++ b/packages/query/src/notifications/query.ts @@ -83,7 +83,7 @@ export class NotificationQuery implements PagedQuery it.id) From 88ecdd1be78105f616ffd2b217b65fea8f8ebf25 Mon Sep 17 00:00:00 2001 From: Kristina Date: Wed, 16 Jul 2025 15:18:44 +0400 Subject: [PATCH 146/636] Fix query refresh (#89) Signed-off-by: Kristina Fefelova --- packages/query/src/collaborators/query.ts | 5 ++++- packages/query/src/label/query.ts | 5 ++++- packages/query/src/messages/query.ts | 8 ++++++-- 3 files changed, 14 insertions(+), 4 deletions(-) diff --git a/packages/query/src/collaborators/query.ts b/packages/query/src/collaborators/query.ts index 4cf32079895..3ed96edb6e3 100644 --- a/packages/query/src/collaborators/query.ts +++ b/packages/query/src/collaborators/query.ts @@ -178,6 +178,9 @@ export class CollaboratorsQuery implements Query { this.result = new QueryResult([] as Collaborator[], (c) => c.account) - await this.initResult() + this.result = this.initResult() + void this.result.then(() => { + void this.notify() + }) } } diff --git a/packages/query/src/label/query.ts b/packages/query/src/label/query.ts index eb1ac407d12..15431b2fbf3 100644 --- a/packages/query/src/label/query.ts +++ b/packages/query/src/label/query.ts @@ -254,6 +254,9 @@ export class LabelsQuery implements Query { async refresh (): Promise { this.result = new QueryResult([] as Label[], getId) - await this.initResult() + this.result = this.initResult() + void this.result.then(() => { + void this.notify() + }) } } diff --git a/packages/query/src/messages/query.ts b/packages/query/src/messages/query.ts index e3da5c32a54..59d947611de 100644 --- a/packages/query/src/messages/query.ts +++ b/packages/query/src/messages/query.ts @@ -880,10 +880,14 @@ export class MessagesQuery implements PagedQuery { this.tmpMessages.clear() this.result = new QueryResult([] as Message[], (x) => x.id) - this.result.setTail(this.params.from == null) - this.result.setHead(this.params.from == null) this.initialized = false + if (this.isInitLoadingForward()) { + this.result.setHead(this.params.from == null) + } else { + this.result.setTail(this.params.from == null) + } + for (let i = 0; i < nextPagesCount; i++) { const { isDone } = await this.requestLoadNextPage(false) this.initialized = true From b038c53c54dbf0e369de5d50e83ef6dcef7f5346 Mon Sep 17 00:00:00 2001 From: Kristina Date: Wed, 23 Jul 2025 16:22:15 +0400 Subject: [PATCH 147/636] Init applets (#90) Signed-off-by: Kristina Fefelova --- packages/cockroach/src/adapter.ts | 64 +- packages/cockroach/src/db/base.ts | 245 ++++++ packages/cockroach/src/db/label.ts | 94 ++- packages/cockroach/src/db/mapping.ts | 101 +-- packages/cockroach/src/db/message.ts | 783 ++++++------------ packages/cockroach/src/db/notification.ts | 233 +++--- packages/cockroach/src/init.ts | 237 +++--- packages/cockroach/src/schema.ts | 218 +++-- packages/query/src/messages/query.ts | 9 +- .../query/src/notification-contexts/query.ts | 2 +- packages/query/src/notifications/query.ts | 4 +- packages/query/src/types.ts | 3 +- packages/query/src/utils.ts | 4 +- packages/rest-client/src/rest.ts | 32 +- packages/rest-client/src/types.ts | 21 +- packages/sdk-types/src/db.ts | 26 +- packages/sdk-types/src/domain.ts | 32 + packages/sdk-types/src/events/message.ts | 90 +- packages/sdk-types/src/index.ts | 3 +- packages/server/src/middleware/broadcast.ts | 2 +- packages/server/src/middleware/db.ts | 80 +- packages/server/src/middleware/permissions.ts | 2 +- packages/server/src/middleware/validate.ts | 92 +- packages/server/src/triggers/message.ts | 14 +- packages/server/src/triggers/utils.ts | 3 +- packages/shared/src/patch.ts | 171 ++-- packages/shared/src/processor.ts | 90 +- packages/shared/src/utils.ts | 33 +- packages/types/src/file.ts | 5 +- packages/types/src/index.ts | 1 + packages/types/src/message.ts | 202 ++--- packages/types/src/notification.ts | 3 +- packages/types/src/patch.ts | 112 +++ packages/types/src/query.ts | 3 +- packages/yaml/src/deserialize.ts | 5 +- packages/yaml/src/parse.ts | 74 +- 36 files changed, 1645 insertions(+), 1448 deletions(-) create mode 100644 packages/sdk-types/src/domain.ts create mode 100644 packages/types/src/patch.ts diff --git a/packages/cockroach/src/adapter.ts b/packages/cockroach/src/adapter.ts index 360fb8da57f..6837cdcc30c 100644 --- a/packages/cockroach/src/adapter.ts +++ b/packages/cockroach/src/adapter.ts @@ -16,7 +16,6 @@ import { type FindCollaboratorsParams, type AccountID, - type BlobID, type CardID, type Collaborator, type ContextID, @@ -42,16 +41,19 @@ import { type CardType, NotificationType, type NotificationContent, - type LinkPreviewData, - type LinkPreviewID, type MessageExtra, - type BlobData, BlobUpdateData + type BlobID, + type AttachmentData, + type AttachmentID, + type AttachmentUpdateData } from '@hcengineering/communication-types' import type { DbAdapter, LabelUpdates, NotificationContextUpdates, - NotificationUpdates, RemoveLabelQuery, ThreadQuery, + NotificationUpdates, + RemoveLabelQuery, + ThreadQuery, ThreadUpdates, UpdateNotificationQuery } from '@hcengineering/communication-sdk-types' @@ -82,15 +84,15 @@ export class CockroachAdapter implements DbAdapter { } async createMessage ( - id: MessageID, cardId: CardID, + id: MessageID, type: MessageType, content: Markdown, extra: MessageExtra | undefined, creator: SocialID, created: Date ): Promise { - return await this.message.createMessage(id, cardId, type, content, extra, creator, created) + return await this.message.createMessage(cardId, id, type, content, extra, creator, created) } async createPatch ( @@ -138,62 +140,38 @@ export class CockroachAdapter implements DbAdapter { await this.message.removeReaction(cardId, messageId, reaction, socialId, date) } - async attachBlobs ( + async addAttachments ( cardId: CardID, messageId: MessageID, - blobs: BlobData[], + data: AttachmentData[], socialId: SocialID, date: Date ): Promise { - await this.message.attachBlobs(cardId, messageId, blobs, socialId, date) - } - - async detachBlobs (cardId: CardID, messageId: MessageID, blobIds: BlobID[], socialId: SocialID, date: Date): Promise { - await this.message.detachBlobs(cardId, messageId, blobIds, socialId, date) + await this.message.addAttachments(cardId, messageId, data, socialId, date) } - async setBlobs ( - cardId: CardID, - messageId: MessageID, - blobs: BlobData[], - socialId: SocialID, - date: Date - ): Promise { - await this.message.setBlobs(cardId, messageId, blobs, socialId, date) + async removeAttachments (cardId: CardID, messageId: MessageID, ids: AttachmentID[], socialId: SocialID, date: Date): Promise { + await this.message.removeAttachments(cardId, messageId, ids, socialId, date) } - async updateBlobs ( + async setAttachments ( cardId: CardID, messageId: MessageID, - blobs: BlobUpdateData[], + data: AttachmentData[], socialId: SocialID, date: Date ): Promise { - await this.message.updateBlobs(cardId, messageId, blobs, socialId, date) + await this.message.setAttachments(cardId, messageId, data, socialId, date) } - async attachLinkPreviews ( + async updateAttachments ( cardId: CardID, messageId: MessageID, - data: (LinkPreviewData & { previewId: LinkPreviewID })[], + data: AttachmentUpdateData[], socialId: SocialID, date: Date ): Promise { - await this.message.attachLinkPreviews(cardId, messageId, data, socialId, date) - } - - async setLinkPreviews ( - cardId: CardID, - messageId: MessageID, - data: (LinkPreviewData & { previewId: LinkPreviewID })[], - socialId: SocialID, - date: Date - ): Promise { - await this.message.setLinkPreviews(cardId, messageId, data, socialId, date) - } - - async detachLinkPreviews (cardId: CardID, messageId: MessageID, previewIds: LinkPreviewID[], socialId: SocialID, date: Date): Promise { - await this.message.detachLinkPreviews(cardId, messageId, previewIds, socialId, date) + await this.message.updateAttachments(cardId, messageId, data, socialId, date) } async attachThread ( @@ -231,7 +209,7 @@ export class CockroachAdapter implements DbAdapter { card: CardID, cardType: CardType, collaborators: AccountID[], - date?: Date + date: Date ): Promise { return await this.notification.addCollaborators(card, cardType, collaborators, date) } diff --git a/packages/cockroach/src/db/base.ts b/packages/cockroach/src/db/base.ts index b460abd3a1b..50e5f5b319f 100644 --- a/packages/cockroach/src/db/base.ts +++ b/packages/cockroach/src/db/base.ts @@ -15,9 +15,19 @@ import postgres, { type ParameterOrJSON } from 'postgres' import type { WorkspaceID } from '@hcengineering/communication-types' +import { Domain } from '@hcengineering/communication-sdk-types' import { SqlRow, type Logger, type Options, type SqlResult } from '../types' import { SqlClient } from '../client' +import { + DbModelBatchUpdate, + DbModelColumn, + DbModelColumnType, + DbModelFilter, + DbModelUpdate, + DomainDbModel, + schemas +} from '../schema' export class BaseDb { constructor ( @@ -31,6 +41,241 @@ export class BaseDb { return this.client.getRawClient() } + getInsertSql( + domain: D, + model: M, + returnColumns: { column: DbModelColumn, cast: string }[] = [], + options?: { + conflictColumns?: DbModelColumn[] + conflictAction?: string + } + ): { sql: string, values: any[] } { + const schema = schemas[domain] + const columns = (Object.keys(model) as DbModelColumn[]).filter( + (c): c is DbModelColumn => model[c] !== undefined + ) + const values = columns.map((c) => model[c]) + const placeholders = columns.map((c, i) => { + const sqlType = (schema as any)[c] + return `$${i + 1}::${sqlType}` + }) + + const columnsString = columns.map((k) => k).join(', ') + const placeholdersString = placeholders.join(', ') + let sql = `INSERT INTO ${domain} (${columnsString}) VALUES (${placeholdersString})` + + if (options?.conflictColumns != null && options.conflictColumns.length > 0) { + const cols = options.conflictColumns.join(', ') + const action = options.conflictAction ?? 'DO NOTHING' + sql += ` ON CONFLICT (${cols}) ${action}` + } + + if (returnColumns.length > 0) { + sql += ` RETURNING ${returnColumns.map((c) => `${c.column}::${c.cast}`).join(', ')}` + } + + return { sql, values } + } + + getBatchInsertSql( + domain: D, + models: M[], + returnColumns: { column: DbModelColumn, cast: string }[] = [], + options?: { + conflictColumns?: DbModelColumn[] + conflictAction?: string + } + ): { sql: string, values: any[] } { + if (models.length === 0) throw new Error('models must not be empty') + + const columns = Object.keys(models[0]) as Array + const schema = schemas[domain] + + const values: any[] = [] + const placeholders = models.map((model, i) => { + const rowPlaceholders = columns.map((k, j) => { + values.push(model[k]) + return `$${i * columns.length + j + 1}::${(schema as any)[k]}` + }) + return `(${rowPlaceholders.join(', ')})` + }) + + let sql = `INSERT INTO ${domain} (${columns.join(', ')}) VALUES ${placeholders.join(', ')}`.trim() + + if (options?.conflictColumns != null && options.conflictColumns.length > 0) { + const cols = options.conflictColumns.join(', ') + const action = options.conflictAction ?? 'DO NOTHING' + sql += ` ON CONFLICT (${cols}) ${action}` + } + + if (returnColumns.length > 0) { + sql += ` RETURNING ${returnColumns.map((c) => `${c.column}::${c.cast}`).join(', ')}` + } + + return { sql, values } + } + + getDeleteSql(domain: D, filter: DbModelFilter): { sql: string, values: any[] } { + if (filter.length === 0) { + throw new Error('getDeleteSql requires at least one filter') + } + const schema = schemas[domain] + const placeholders: string[] = [] + const values: any[] = [] + + filter.forEach((f, i) => { + if (!Array.isArray(f.value)) { + const idx = values.push(f.value) + placeholders.push(`${f.column} = $${idx}::${(schema as any)[f.column]}`) + } else { + const idx = values.push(f.value) + placeholders.push(`${f.column} = ANY($${idx}::${(schema as any)[f.column]}[])`) + } + }) + + const sql = `DELETE FROM ${domain} WHERE ${placeholders.join(' AND ')}`.trim() + + return { sql, values } + } + + getUpdateSql( + domain: D, + filter: DbModelFilter, + updates: DbModelUpdate + ): { sql: string, values: any[] } { + if (filter.length === 0) { + throw new Error('Filter must not be empty') + } + if (updates.length === 0) { + throw new Error('Updates must not be empty') + } + const schema = schemas[domain] + const values: any[] = [] + const whereClauses: string[] = [] + const setClauses: string[] = [] + + for (const { column, value } of filter) { + const idx = values.push(value) + const cast = (schema as any)[column] + if (Array.isArray(value)) { + whereClauses.push(`${column} = ANY($${idx}::${cast}[])`) + } else { + whereClauses.push(`${column} = $${idx}::${cast}`) + } + } + + updates + .filter((u) => u.innerKey == null) + .forEach((u) => { + const idx = values.push(u.value) + const cast = (schema as any)[u.column] + setClauses.push(`${u.column} = $${idx}::${cast}`) + }) + + const jsonGroups: Record> = {} + updates + .filter((u) => u.innerKey != null) + .forEach((u) => { + const col = u.column + jsonGroups[col] = jsonGroups[col] ?? [] + jsonGroups[col].push({ key: u.innerKey as string, value: u.value }) + }) + for (const [col, items] of Object.entries(jsonGroups)) { + const parts = items.map((item) => { + const idx = values.push(item.value) + return `'${item.key}', $${idx}` + }) + setClauses.push(`${col} = ${col} || jsonb_build_object(${parts.join(', ')})`) + } + + const sql = ` + UPDATE ${domain} AS u + SET ${setClauses.join(', ')} + WHERE ${whereClauses.join(' AND ')} + `.trim() + + return { sql, values } + } + + getBatchUpdateSql( + domain: D, + keyColumn: DbModelColumn, + filter: DbModelFilter, + updates: DbModelBatchUpdate + ): { sql: string, values: any[] } { + if (filter.length === 0) throw new Error('Batch update requires at least one filter') + if (updates.length === 0) throw new Error('Batch update requires at least one update') + + const schema = schemas[domain] + const values: any[] = [] + const whereClauses: string[] = [] + const setClauses: string[] = [] + + for (const { column, value } of filter) { + const idx = values.push(value) + const cast = (schema as any)[column] + whereClauses.push( + Array.isArray(value) ? `u.${column} = ANY($${idx}::${cast}[])` : `u.${column} = $${idx}::${cast}` + ) + } + + const rowsByKey = new Map, typeof updates>() + for (const u of updates) { + if (!rowsByKey.has(u.key)) { + rowsByKey.set(u.key, []) + } + ;(rowsByKey.get(u.key) ?? []).push(u) + } + + const allCols = Array.from(rowsByKey.values()) + .flat() + .map((u) => u.column) + .filter((c, i, a) => a.indexOf(c) === i) + + const tuples = Array.from(rowsByKey.entries()).map(([key, ups]) => { + const rowVals = [ + key, + ...allCols.map((col) => { + const f = ups.find((u) => u.column === col) + if (f == null) return null + return f.innerKey != null ? { [f.innerKey]: f.value } : f.value + }) + ] + const placeholders = rowVals.map((v, j) => { + const idx = values.push(v) + const colName = j === 0 ? keyColumn : allCols[j - 1] + const cast = (schema as any)[colName] + return `$${idx}::${cast}` + }) + return `(${placeholders.join(',')})` + }) + + for (const col of allCols) { + if (filter.some((f) => f.column === col) || col === keyColumn) continue + setClauses.push(`${col} = v.${col}`) + } + + for (const col of allCols) { + if ((schema as any)[col] === 'jsonb') { + const idx = setClauses.findIndex((s) => s.startsWith(`${col} = v.${col}`)) + if (idx >= 0) setClauses[idx] = `${col} = u.${col} || v.${col}` + } + } + + const sql = ` + UPDATE ${domain} AS u + SET ${setClauses.join(', ')} + FROM ( + VALUES + ${tuples.join(',\n ')} + ) AS v(${[keyColumn, ...allCols].join(', ')}) + WHERE ${whereClauses.join(' AND ')} + AND u.${keyColumn} = v.${keyColumn} + `.trim() + + return { sql, values } + } + async execute( sql: string, params?: ParameterOrJSON[], diff --git a/packages/cockroach/src/db/label.ts b/packages/cockroach/src/db/label.ts index ebf5d643c37..86fa7bd7c4d 100644 --- a/packages/cockroach/src/db/label.ts +++ b/packages/cockroach/src/db/label.ts @@ -22,11 +22,11 @@ import { type LabelID, type Label } from '@hcengineering/communication-types' +import { Domain, type LabelUpdates, type RemoveLabelQuery } from '@hcengineering/communication-sdk-types' import { BaseDb } from './base' -import { type LabelDb, TableName } from '../schema' import { toLabel } from './mapping' -import type { LabelUpdates, RemoveLabelQuery } from '@hcengineering/communication-sdk-types' +import { DbModel, DbModelFilter, DbModelUpdate } from '../schema' export class LabelsDb extends BaseDb { async createLabel ( @@ -36,7 +36,7 @@ export class LabelsDb extends BaseDb { account: AccountID, created: Date ): Promise { - const db: LabelDb = { + const db: DbModel = { workspace_id: this.workspace, label_id: label, card_id: card, @@ -44,60 +44,78 @@ export class LabelsDb extends BaseDb { account, created } - const sql = `INSERT INTO ${TableName.Label} (workspace_id, label_id, card_id, card_type, account, created) - VALUES ($1::uuid, $2::varchar, $3::varchar, $4::varchar, $5::uuid, $6::timestamptz) - ON CONFLICT DO NOTHING` - await this.execute( - sql, - [db.workspace_id, db.label_id, db.card_id, db.card_type, db.account, db.created], - 'insert label' - ) + const { sql, values } = this.getInsertSql(Domain.Label, db, [], { + conflictColumns: ['workspace_id', 'label_id', 'card_id', 'account'], + conflictAction: 'DO NOTHING' + }) + await this.execute(sql, values, 'insert label') } async removeLabels (query: RemoveLabelQuery): Promise { - const db: Partial = { - label_id: query.labelId, - card_id: query.cardId, - account: query.account - } - - const entries = Object.entries(db).filter(([_, value]) => value !== undefined) + const filter: DbModelFilter = [] - if (entries.length === 0) return + if (query.labelId != null) { + filter.push({ + column: 'label_id', + value: query.labelId + }) + } + if (query.cardId != null) { + filter.push({ + column: 'card_id', + value: query.cardId + }) + } + if (query.account != null) { + filter.push({ + column: 'account', + value: query.account + }) + } - entries.unshift(['workspace_id', this.workspace]) + if (filter.length === 0) return - const whereClauses = entries.map(([key], index) => `${key} = $${index + 1}`) - const whereValues = entries.map(([_, value]) => value) + filter.unshift({ + column: 'workspace_id', + value: this.workspace + }) - const sql = `DELETE - FROM ${TableName.Label} - WHERE ${whereClauses.join(' AND ')}` + const { sql, values } = this.getDeleteSql(Domain.Label, filter) - await this.execute(sql, whereValues, 'remove labels') + await this.execute(sql, values, 'remove labels') } async updateLabels (card: CardID, updates: LabelUpdates): Promise { - const dbData: Partial = { - card_type: updates.cardType - } + const update: DbModelUpdate = [] + + const filter: DbModelFilter = [ + { + column: 'workspace_id', + value: this.workspace + }, + { + column: 'card_id', + value: card + } + ] - const entries = Object.entries(dbData).filter(([_, value]) => value !== undefined) - if (entries.length === 0) return + if (updates.cardType != null) { + update.push({ + column: 'card_type', + value: updates.cardType + }) + } - const setClauses = entries.map(([key], index) => `${key} = $${index + 3}`) - const setValues = entries.map(([_, value]) => value) + if (update.length === 0) return - const sql = `UPDATE ${TableName.Label} - SET ${setClauses.join(', ')} - WHERE workspace_id = $1::uuid AND card_id = $2::varchar` + const { sql, values } = this.getUpdateSql(Domain.Label, filter, update) - await this.execute(sql, [this.workspace, card, ...setValues], 'update labels') + await this.execute(sql, values, 'update labels') } async findLabels (params: FindLabelsParams): Promise { const select = `SELECT * - FROM ${TableName.Label} l` + FROM ${Domain.Label} l` const { where, values } = this.buildWhere(params) diff --git a/packages/cockroach/src/db/mapping.ts b/packages/cockroach/src/db/mapping.ts index 08d725db8ca..32da1b4b97a 100644 --- a/packages/cockroach/src/db/mapping.ts +++ b/packages/cockroach/src/db/mapping.ts @@ -14,8 +14,6 @@ // import { - type AttachedBlob, - type BlobID, type CardID, type Collaborator, type ContextID, @@ -34,40 +32,30 @@ import { type Thread, type Label, type CardType, - type BlobMetadata, type AccountID, - type LinkPreview, - type LinkPreviewID, - type MessageExtra + type MessageExtra, + AttachmentID, + Attachment } from '@hcengineering/communication-types' +import { Domain } from '@hcengineering/communication-sdk-types' import { applyPatches } from '@hcengineering/communication-shared' +import { DbModel } from '../schema' -import { - type FileDb, - type CollaboratorDb, - type ContextDb, - type MessageDb, - type MessagesGroupDb, - type NotificationDb, - type PatchDb, - type ReactionDb, - type ThreadDb, - type LabelDb, - type LinkPreviewDb -} from '../schema' - -interface RawMessage extends MessageDb { +interface RawMessage extends DbModel { thread_id?: CardID thread_type?: CardType replies_count?: number last_reply?: Date - patches?: PatchDb[] - files?: FileDb[] - reactions?: ReactionDb[] - link_previews?: LinkPreviewDb[] + patches?: DbModel[] + attachments?: DbModel[] + reactions?: DbModel[] +} + +interface RawMessageGroup extends DbModel { + patches?: DbModel[] } -interface RawNotification extends NotificationDb { +interface RawNotification extends DbModel { account: AccountID message_id: MessageID message_type?: MessageType @@ -80,18 +68,17 @@ interface RawNotification extends NotificationDb { creator: SocialID created: Date }[] - message_files?: { - blob_id: BlobID + message_attachments?: { + id: AttachmentID type: string - size: number - filename: string - meta?: BlobMetadata + params: Record creator: SocialID created: Date + modified?: Date }[] } -type RawContext = ContextDb & { id: ContextID } & { +type RawContext = DbModel & { id: ContextID } & { notifications?: RawNotification[] } @@ -118,8 +105,7 @@ export function toMessage (raw: RawMessage): Message { } : undefined, reactions: (raw.reactions ?? []).map(toReaction), - blobs: (raw.files ?? []).map(toBlob), - linkPreviews: (raw.link_previews ?? []).map(toLinkPreview) + attachments: (raw.attachments ?? []).map(toAttachment) } if (patches.length === 0) { @@ -129,7 +115,7 @@ export function toMessage (raw: RawMessage): Message { return applyPatches(rawMessage, patches, [PatchType.update, PatchType.remove]) } -export function toReaction (raw: ReactionDb): Reaction { +export function toReaction (raw: DbModel): Reaction { return { reaction: raw.reaction, creator: raw.creator, @@ -137,34 +123,18 @@ export function toReaction (raw: ReactionDb): Reaction { } } -export function toBlob (raw: Omit): AttachedBlob { +export function toAttachment (raw: Omit, 'workspace_id'>): Attachment { return { - blobId: raw.blob_id, - mimeType: raw.type, - fileName: raw.filename, - size: Number(raw.size), - metadata: raw.meta, + id: String(raw.id) as AttachmentID, + type: raw.type, + params: raw.params, creator: raw.creator, - created: new Date(raw.created) - } -} - -export function toLinkPreview (raw: LinkPreviewDb): LinkPreview { - return { - id: String(raw.id) as LinkPreviewID, - url: raw.url, - host: raw.host, - title: raw.title ?? undefined, - description: raw.description ?? undefined, - iconUrl: raw.favicon ?? undefined, - siteName: raw.hostname ?? undefined, - previewImage: raw.image ?? undefined, created: new Date(raw.created), - creator: raw.creator - } + modified: raw.modified != null ? new Date(raw.modified) : undefined + } as any as Attachment } -export function toMessagesGroup (raw: MessagesGroupDb): MessagesGroup { +export function toMessagesGroup (raw: RawMessageGroup): MessagesGroup { const patches = raw.patches == null ? [] @@ -183,7 +153,7 @@ export function toMessagesGroup (raw: MessagesGroupDb): MessagesGroup { } } -export function toPatch (raw: Omit): Patch { +export function toPatch (raw: Omit, 'workspace_id' | 'message_created'>): Patch { return { type: raw.type, messageId: String(raw.message_id) as MessageID, @@ -193,7 +163,7 @@ export function toPatch (raw: Omit) } } -export function toThread (raw: ThreadDb): Thread { +export function toThread (raw: DbModel): Thread { return { cardId: raw.card_id, messageId: String(raw.message_id) as MessageID, @@ -242,9 +212,9 @@ function toNotificationRaw (id: ContextID, card: CardID, raw: RawNotification): raw.message_created != null && raw.message_type != null ) { - const messageBlobs = raw.message_files + const attachments = raw.message_attachments ?.map((it) => - toBlob({ + toAttachment({ card_id: card, message_id: raw.message_id, ...it @@ -263,8 +233,7 @@ function toNotificationRaw (id: ContextID, card: CardID, raw: RawNotification): created: new Date(raw.message_created), edited: undefined, reactions: [], - blobs: messageBlobs ?? [], - linkPreviews: [] + attachments: attachments ?? [] } if (patches.length > 0) { @@ -308,7 +277,7 @@ export function toNotification (raw: RawNotification & { card_id: CardID }): Not return toNotificationRaw(raw.context_id, raw.card_id, raw) } -export function toCollaborator (raw: CollaboratorDb): Collaborator { +export function toCollaborator (raw: DbModel): Collaborator { return { account: raw.account, cardType: raw.card_type, @@ -316,7 +285,7 @@ export function toCollaborator (raw: CollaboratorDb): Collaborator { } } -export function toLabel (raw: LabelDb): Label { +export function toLabel (raw: DbModel): Label { return { labelId: raw.label_id, cardId: raw.card_id, diff --git a/packages/cockroach/src/db/message.ts b/packages/cockroach/src/db/message.ts index aab222c324b..d0cb09218af 100644 --- a/packages/cockroach/src/db/message.ts +++ b/packages/cockroach/src/db/message.ts @@ -14,21 +14,17 @@ // import { + AddAttachmentsPatchData, AddReactionPatchData, - AttachBlobsPatchData, - AttachLinkPreviewsPatchData, + AttachmentData, + AttachmentID, + AttachmentUpdateData, AttachThreadPatchData, - type BlobData, type BlobID, - BlobUpdateData, type CardID, type CardType, - DetachBlobsPatchData, - DetachLinkPreviewsPatchData, type FindMessagesGroupsParams, type FindMessagesParams, - type LinkPreviewData, - type LinkPreviewID, type Markdown, type Message, type MessageExtra, @@ -36,82 +32,82 @@ import { type MessagesGroup, type MessageType, PatchType, + RemoveAttachmentsPatchData, RemoveReactionPatchData, - SetLinkPreviewsPatchData, + SetAttachmentsPatchData, type SocialID, SortingOrder, type Thread, - UpdateBlobsPatchData, + UpdateAttachmentsPatchData, UpdateThreadPatchData } from '@hcengineering/communication-types' -import type { ThreadUpdates, ThreadQuery } from '@hcengineering/communication-sdk-types' +import { Domain, type ThreadQuery, type ThreadUpdates } from '@hcengineering/communication-sdk-types' import postgres from 'postgres' import { BaseDb } from './base' -import { - type MessageDb, - messageSchema, - type MessagesGroupDb, - type PatchDb, - type ReactionDb, - TableName, - type ThreadDb -} from '../schema' +import { DbModel, DbModelColumn, DbModelFilter, schemas } from '../schema' import { getCondition } from './utils' import { toMessage, toMessagesGroup, toThread } from './mapping' export class MessagesDb extends BaseDb { // Message async createMessage ( - id: MessageID, cardId: CardID, + id: MessageID, type: MessageType, content: Markdown, extra: MessageExtra | undefined, creator: SocialID, created: Date ): Promise { - const db: MessageDb = { - type, + const messageDbModel: DbModel = { workspace_id: this.workspace, card_id: cardId, + id, + type, content, creator, created, - data: extra, - id + data: extra } - - const values: any[] = [] - const keys: string[] = [] - - for (const key in db) { - const value: any = (db as any)[key] - if (value == null) continue - keys.push(key) - values.push(value) + const messageCreatedDbModel: DbModel = { + workspace_id: this.workspace, + card_id: cardId, + message_id: id, + created } - - const placeholders = keys.map((key, i) => `$${i + 1}::${(messageSchema as any)[key]}`) - - const insertSql = `INSERT INTO ${TableName.Message} (${keys.join(', ')}) - VALUES (${placeholders.join(', ')}) - RETURNING id::text, created` + const insertMessageCreatedSql = this.getInsertSql(Domain.MessageCreated, messageCreatedDbModel, [], { + conflictColumns: ['workspace_id', 'card_id', 'message_id'], + conflictAction: 'DO NOTHING' + }) + const insertMessageSql = this.getInsertSql(Domain.Message, messageDbModel, [ + { + column: 'id', + cast: 'text' + }, + { + column: 'created', + cast: 'timestamptz' + } + ]) return await this.getRowClient().begin(async (s) => { - const sql = `INSERT INTO ${TableName.MessageCreated} (workspace_id, card_id, message_id, created) - VALUES ($1::uuid, $2::varchar, $3::varchar, $4::timestamptz) - ON CONFLICT (workspace_id, card_id, message_id) DO NOTHING` - const result = await s.unsafe(sql, [this.workspace, cardId, db.id, created]) + const result = await this.execute( + insertMessageCreatedSql.sql, + insertMessageCreatedSql.values, + 'insert message created', + s + ) if (result.count === 0) { return false } - await s.unsafe(insertSql, values) + await this.execute(insertMessageSql.sql, insertMessageSql.values, 'insert message', s) return true }) } + // Patch async createPatch ( cardId: CardID, messageId: MessageID, @@ -121,7 +117,7 @@ export class MessagesDb extends BaseDb { created: Date, client?: postgres.TransactionSql ): Promise { - const db: Omit = { + const dbModel: Omit, 'message_created'> = { workspace_id: this.workspace, card_id: cardId, message_id: messageId, @@ -131,409 +127,213 @@ export class MessagesDb extends BaseDb { created } + const schema = schemas[Domain.Patch] + const columns = Object.keys(dbModel) as Array, 'message_created'>> + + const values = columns.map((c) => dbModel[c]) + const placeholders = columns.map((c, i) => { + const sqlType = (schema as any)[c] + return `$${i + 1}::${sqlType}` + }) + const sql = ` - INSERT INTO ${TableName.Patch} ( - workspace_id, card_id, message_id, - type, data, creator, created, message_created - ) - SELECT - $1::uuid, $2::varchar, $3::varchar, - $4::varchar, $5::jsonb, $6::varchar, $7::timestamptz, - mc.created - FROM ${TableName.MessageCreated} mc - WHERE mc.workspace_id = $1::uuid - AND mc.card_id = $2::varchar - AND mc.message_id = $3::varchar + INSERT INTO ${Domain.Patch} (${columns.join(', ')}, message_created) + SELECT ${placeholders.join(', ')}, mc.created + FROM ${Domain.MessageCreated} mc + WHERE mc.workspace_id = $1::${schema.workspace_id} + AND mc.card_id = $2::${schema.card_id} + AND mc.message_id = $3::${schema.message_id} ` - await this.execute( - sql, - [this.workspace, db.card_id, db.message_id, db.type, db.data, db.creator, db.created], - 'insert patch', - client - ) + await this.execute(sql, values, 'insert patch', client) } - // Blob - async attachBlobs ( + // Attachment + async addAttachments ( cardId: CardID, messageId: MessageID, - blobs: BlobData[], + attachments: AttachmentData[], socialId: SocialID, date: Date ): Promise { - if (blobs.length === 0) return + if (attachments.length === 0) return - const values: any[] = [] - const placeholders: string[] = [] - - blobs.forEach((blob, i) => { - const baseIndex = i * 10 - placeholders.push(`($${baseIndex + 1}::uuid, $${baseIndex + 2}::varchar, $${baseIndex + 3}::varchar, $${baseIndex + 4}::uuid, - $${baseIndex + 5}::varchar, $${baseIndex + 6}::varchar, $${baseIndex + 7}::varchar, - $${baseIndex + 8}::timestamptz, $${baseIndex + 9}::int8, $${baseIndex + 10}::jsonb)`) - - values.push( - this.workspace, - cardId, - messageId, - blob.blobId, - blob.mimeType, - blob.fileName, - socialId, - date, - blob.size, - blob.metadata ?? {} - ) - }) + const models: DbModel[] = attachments.map((att) => ({ + workspace_id: this.workspace, + card_id: cardId, + message_id: messageId, + id: att.id, + type: att.type, + params: att.params, + creator: socialId, + created: date + })) - const insertSql = ` - INSERT INTO ${TableName.File} (workspace_id, card_id, message_id, blob_id, - type, filename, creator, created, size, meta) - VALUES ${placeholders.join(', ')}` + const { sql, values } = this.getBatchInsertSql(Domain.Attachment, models) const inDb = await this.isMessageInDb(cardId, messageId) if (!inDb) { await this.getRowClient().begin(async (s) => { - await this.execute(insertSql, values, 'insert files', s) + await this.execute(sql, values, 'insert attachments', s) - const data: AttachBlobsPatchData = { - operation: 'attach', - blobs + const data: AddAttachmentsPatchData = { + operation: 'add', + attachments } - await this.createPatch(cardId, messageId, PatchType.blob, data, socialId, date, s) + await this.createPatch(cardId, messageId, PatchType.attachment, data, socialId, date, s) return true }) } else { - await this.execute(insertSql, values, 'insert files') + await this.execute(sql, values, 'insert attachments') } } - async detachBlobs ( + async removeAttachments ( cardId: CardID, messageId: MessageID, - blobIds: BlobID[], + ids: AttachmentID[], socialId: SocialID, date: Date ): Promise { - if (blobIds.length === 0) return + if (ids.length === 0) return - const sql = ` - DELETE FROM ${TableName.File} - WHERE workspace_id = $1::uuid - AND card_id = $2::varchar - AND message_id = $3::varchar - AND blob_id = ANY($4::uuid[]) - ` + const { sql, values } = this.getDeleteSql(Domain.Attachment, [ + { column: 'workspace_id', value: this.workspace }, + { column: 'card_id', value: cardId }, + { column: 'message_id', value: messageId }, + { column: 'id', value: ids.length === 1 ? ids[0] : ids } + ]) const inDb = await this.isMessageInDb(cardId, messageId) if (!inDb) { await this.getRowClient().begin(async (s) => { - await this.execute(sql, [this.workspace, cardId, messageId, blobIds], 'remove files', s) + await this.execute(sql, values, 'remove attachments', s) - const data: DetachBlobsPatchData = { - operation: 'detach', - blobIds + const data: RemoveAttachmentsPatchData = { + operation: 'remove', + ids } - await this.createPatch(cardId, messageId, PatchType.blob, data, socialId, date, s) + await this.createPatch(cardId, messageId, PatchType.attachment, data, socialId, date, s) return true }) } else { - await this.execute(sql, [this.workspace, cardId, messageId, blobIds], 'remove files') + await this.execute(sql, values, 'delete attachments') } } - async setBlobs ( + async setAttachments ( cardId: CardID, messageId: MessageID, - blobs: BlobData[], + attachments: AttachmentData[], socialId: SocialID, date: Date ): Promise { - if (blobs.length === 0) return + if (attachments.length === 0) return + const { sql: deleteSql, values: deleteValues } = this.getDeleteSql(Domain.Attachment, [ + { column: 'workspace_id', value: this.workspace }, + { column: 'card_id', value: cardId }, + { column: 'message_id', value: messageId } + ]) + + const models: DbModel[] = attachments.map((att) => ({ + workspace_id: this.workspace, + card_id: cardId, + message_id: messageId, + id: att.id, + type: att.type, + params: att.params, + creator: socialId, + created: date + })) - const values: any[] = [] - const placeholders: string[] = [] - - blobs.forEach((blob, i) => { - const baseIndex = i * 10 - placeholders.push(`($${baseIndex + 1}::uuid, $${baseIndex + 2}::varchar, $${baseIndex + 3}::varchar, $${baseIndex + 4}::uuid, - $${baseIndex + 5}::varchar, $${baseIndex + 6}::varchar, $${baseIndex + 7}::varchar, - $${baseIndex + 8}::timestamptz, $${baseIndex + 9}::int8, $${baseIndex + 10}::jsonb)`) - - values.push( - this.workspace, - cardId, - messageId, - blob.blobId, - blob.mimeType, - blob.fileName, - socialId, - date, - blob.size, - blob.metadata ?? {} - ) - }) + const { sql: insertSql, values: insertValues } = this.getBatchInsertSql(Domain.Attachment, models) + + const inDb = await this.isMessageInDb(cardId, messageId) - const insertSql = ` - INSERT INTO ${TableName.File} (workspace_id, card_id, message_id, blob_id, - type, filename, creator, created, size, meta) - VALUES ${placeholders.join(', ')}` - const deleteSql = ` - DELETE FROM ${TableName.File} - WHERE workspace_id = $1::uuid - AND card_id = $2::varchar - AND message_id = $3::varchar - ` await this.getRowClient().begin(async (s) => { - await this.execute(deleteSql, [this.workspace, cardId, messageId], 'delete blobs', s) - await this.execute(insertSql, values, 'insert blobs', s) + await this.execute(deleteSql, deleteValues, 'delete attachments', s) + await this.execute(insertSql, insertValues, 'insert attachments', s) - const data: AttachBlobsPatchData = { - operation: 'attach', - blobs + if (!inDb) { + const data: SetAttachmentsPatchData = { + operation: 'set', + attachments + } + await this.createPatch(cardId, messageId, PatchType.attachment, data, socialId, date, s) } - - await this.createPatch(cardId, messageId, PatchType.blob, data, socialId, date, s) - - return true - }) - } - - async updateBlobs ( - cardId: CardID, - messageId: MessageID, - blobs: BlobUpdateData[], - socialId: SocialID, - date: Date - ): Promise { - if (blobs.length === 0) return - - const colMap = { - mimeType: { col: 'type', cast: '::varchar' }, - fileName: { col: 'filename', cast: '::varchar' }, - size: { col: 'size', cast: '::int8' }, - metadata: { col: 'meta', cast: '::jsonb' } - } as const - type UpdateKey = keyof typeof colMap - const updateKeys = Object.keys(colMap) as UpdateKey[] - - const params: any[] = [this.workspace, cardId, messageId] - - const rowLen = 1 + updateKeys.length - - const tuples = blobs.map((blob, i) => { - params.push(blob.blobId) - updateKeys.forEach((k) => params.push(blob[k] ?? null)) - - const offset = 3 + i * rowLen - const casts = ['::uuid', ...updateKeys.map((k) => colMap[k].cast)] - const placeholders = casts.map((cast, idx) => `$${offset + idx + 1}${cast}`) - return `(${placeholders.join(', ')})` - }) - - const setClauses = updateKeys.map((k) => { - const col = colMap[k].col - return `${col} = COALESCE(v.${col}, f.${col})` }) - - const updateSql = ` - UPDATE ${TableName.File} AS f - SET ${setClauses.join(',\n ')} - FROM (VALUES ${tuples.join(',\n ')}) AS v(blob_id, ${updateKeys.map((k) => colMap[k].col).join(', ')}) - WHERE f.workspace_id = $1::uuid - AND f.card_id = $2::varchar - AND f.message_id = $3::varchar - AND f.blob_id = v.blob_id; - ` - - const inDb = await this.isMessageInDb(cardId, messageId) - if (!inDb) { - await this.getRowClient().begin(async (txn) => { - await this.execute(updateSql, params, 'update blobs', txn) - const data: UpdateBlobsPatchData = { operation: 'update', blobs } - await this.createPatch(cardId, messageId, PatchType.blob, data, socialId, date, txn) - }) - } else { - await this.execute(updateSql, params, 'update blobs') - } } - async attachLinkPreviews ( + async updateAttachments ( cardId: CardID, messageId: MessageID, - previews: (LinkPreviewData & { previewId: LinkPreviewID })[], + attachments: AttachmentUpdateData[], socialId: SocialID, date: Date ): Promise { - if (previews.length === 0) return - - const values: any[] = [] - const placeholders: string[] = [] - - previews.forEach((preview, i) => { - const base = i * 12 - placeholders.push(`($${base + 1}::uuid, $${base + 2}::varchar, $${base + 3}::varchar, $${base + 4}::varchar, - $${base + 5}::varchar, $${base + 6}::varchar, $${base + 7}::varchar, - $${base + 8}::varchar, $${base + 9}::varchar, $${base + 10}::jsonb, - $${base + 11}::varchar, $${base + 12}::timestamptz, $${base + 13}::int8)`) - - values.push( - this.workspace, - cardId, - messageId, - preview.url, - preview.host, - preview.title ?? null, - preview.description ?? null, - preview.iconUrl ?? null, - preview.siteName ?? null, - preview.previewImage ?? null, - socialId, - date, - preview.previewId - ) - }) - - const insertSql = ` - INSERT INTO ${TableName.LinkPreview} ( - workspace_id, card_id, message_id, url, host, title, description, - favicon, hostname, image, creator, created, id - ) VALUES ${placeholders.join(', ')}` - - const inDb = await this.isMessageInDb(cardId, messageId) - if (!inDb) { - await this.getRowClient().begin(async (s) => { - await this.execute(insertSql, values, 'insert link previews', s) + if (attachments.length === 0) return + + const filter: DbModelFilter = [ + { column: 'workspace_id', value: this.workspace }, + { column: 'card_id', value: cardId }, + { column: 'message_id', value: messageId } + ] + + const updates: Array<{ + key: AttachmentID + column: DbModelColumn + innerKey?: string + value: any + }> = [] + + for (const att of attachments) { + if (Object.keys(att.params).length > 0) { + const attachmentUpdates: Array<{ + key: AttachmentID + column: DbModelColumn + innerKey?: string + value: any + }> = [] + for (const [innerKey, val] of Object.entries(att.params)) { + attachmentUpdates.push({ + key: att.id, + column: 'params', + innerKey, + value: val + }) + } - const data: AttachLinkPreviewsPatchData = { - operation: 'attach', - previews + if (attachmentUpdates.length > 0) { + attachmentUpdates.push({ + key: att.id, + column: 'modified', + value: date + }) + updates.push(...attachmentUpdates) } - await this.createPatch(cardId, messageId, PatchType.linkPreview, data, socialId, date, s) - }) - } else { - await this.execute(insertSql, values, 'insert link previews') + } } - } - async detachLinkPreviews ( - cardId: CardID, - messageId: MessageID, - previewIds: LinkPreviewID[], - socialId: SocialID, - date: Date - ): Promise { - if (previewIds.length === 0) return + if (updates.length === 0) return - const sql = - previewIds.length > 1 - ? ` - DELETE FROM ${TableName.LinkPreview} - WHERE workspace_id = $1::uuid - AND card_id = $2::varchar - AND message_id = $3::varchar - AND id = ANY($4::int8[]) - ` - : ` - DELETE FROM ${TableName.LinkPreview} - WHERE workspace_id = $1::uuid - AND card_id = $2::varchar - AND message_id = $3::varchar - AND id = $4::int8 - ` + const { sql, values } = this.getBatchUpdateSql(Domain.Attachment, 'id', filter, updates) const inDb = await this.isMessageInDb(cardId, messageId) - if (!inDb) { await this.getRowClient().begin(async (s) => { - await this.execute( - sql, - [this.workspace, cardId, messageId, previewIds.length === 1 ? previewIds[0] : previewIds], - 'remove link previews', - s - ) + await this.execute(sql, values, 'update attachments', s) - const data: DetachLinkPreviewsPatchData = { - operation: 'detach', - previewIds + const data: UpdateAttachmentsPatchData = { + operation: 'update', + attachments } - - await this.createPatch(cardId, messageId, PatchType.linkPreview, data, socialId, date, s) - - return true + await this.createPatch(cardId, messageId, PatchType.attachment, data, socialId, date, s) }) } else { - await this.execute( - sql, - [this.workspace, cardId, messageId, previewIds.length === 1 ? previewIds[0] : previewIds], - 'remove link previews' - ) + await this.execute(sql, values, 'update attachments') } } - public async setLinkPreviews ( - cardId: CardID, - messageId: MessageID, - previews: (LinkPreviewData & { previewId: LinkPreviewID })[], - socialId: SocialID, - date: Date - ): Promise { - if (previews.length === 0) return - const deleteSql = ` - DELETE FROM ${TableName.LinkPreview} - WHERE workspace_id = $1::uuid - AND card_id = $2::varchar - AND message_id = $3::varchar - ` - - const values: any[] = [] - const placeholders: string[] = [] - - previews.forEach((preview, i) => { - const base = i * 12 - placeholders.push(`($${base + 1}::uuid, $${base + 2}::varchar, $${base + 3}::varchar, $${base + 4}::varchar, - $${base + 5}::varchar, $${base + 6}::varchar, $${base + 7}::varchar, - $${base + 8}::varchar, $${base + 9}::varchar, $${base + 10}::jsonb, - $${base + 11}::varchar, $${base + 12}::timestamptz, $${base + 13}::int8)`) - - values.push( - this.workspace, - cardId, - messageId, - preview.url, - preview.host, - preview.title ?? null, - preview.description ?? null, - preview.iconUrl ?? null, - preview.siteName ?? null, - preview.previewImage ?? null, - socialId, - date, - preview.previewId - ) - }) - - const insertSql = `INSERT INTO ${TableName.LinkPreview} ( - workspace_id, card_id, message_id, url, host, title, description, - favicon, hostname, image, creator, created, id - ) VALUES ${placeholders.join(', ')} ` - - await this.getRowClient().begin(async (s) => { - await this.execute(deleteSql, [this.workspace, cardId, messageId], 'delete link previews', s) - await this.execute(insertSql, values, 'insert new link previews', s) - - const data: SetLinkPreviewsPatchData = { - operation: 'set', - previews - } - - await this.createPatch(cardId, messageId, PatchType.linkPreview, data, socialId, date, s) - - return true - }) - } - // Reaction async addReaction ( cardId: CardID, @@ -544,7 +344,7 @@ export class MessagesDb extends BaseDb { ): Promise { const inDb = await this.isMessageInDb(cardId, messageId) if (inDb) { - const db: ReactionDb = { + const db: DbModel = { workspace_id: this.workspace, card_id: cardId, message_id: messageId, @@ -552,15 +352,13 @@ export class MessagesDb extends BaseDb { creator, created } - const sql = `INSERT INTO ${TableName.Reaction} (workspace_id, card_id, message_id, reaction, creator, created) - VALUES ($1::uuid, $2::varchar, $3::varchar, $4::varchar, $5::varchar, $6::timestamptz) - ON CONFLICT DO NOTHING` - await this.execute( - sql, - [db.workspace_id, db.card_id, db.message_id, db.reaction, db.creator, db.created], - 'insert reaction' - ) + const { sql, values } = this.getInsertSql(Domain.Reaction, db, [], { + conflictColumns: ['workspace_id', 'card_id', 'message_id', 'reaction', 'creator'], + conflictAction: 'DO NOTHING' + }) + + await this.execute(sql, values, 'insert reaction') } else { const data: AddReactionPatchData = { operation: 'add', @@ -579,14 +377,14 @@ export class MessagesDb extends BaseDb { ): Promise { const inDb = await this.isMessageInDb(cardId, messageId) if (inDb) { - const sql = `DELETE - FROM ${TableName.Reaction} - WHERE workspace_id = $1::uuid - AND card_id = $2::varchar - AND message_id = $3::varchar - AND reaction = $4::varchar - AND creator = $5::varchar` - await this.execute(sql, [this.workspace, cardId, messageId, reaction, socialId], 'remove reaction') + const { sql, values } = this.getDeleteSql(Domain.Reaction, [ + { column: 'workspace_id', value: this.workspace }, + { column: 'card_id', value: cardId }, + { column: 'message_id', value: messageId }, + { column: 'reaction', value: reaction }, + { column: 'creator', value: socialId } + ]) + await this.execute(sql, values, 'remove reaction') } else { const data: RemoveReactionPatchData = { operation: 'remove', @@ -605,7 +403,7 @@ export class MessagesDb extends BaseDb { socialId: SocialID, date: Date ): Promise { - const db: ThreadDb = { + const db: DbModel = { workspace_id: this.workspace, card_id: cardId, message_id: messageId, @@ -614,20 +412,13 @@ export class MessagesDb extends BaseDb { replies_count: 0, last_reply: date } - const sql = `INSERT INTO ${TableName.Thread} (workspace_id, card_id, message_id, thread_id, thread_type, - replies_count, - last_reply) - VALUES ($1::uuid, $2::varchar, $3::varchar, $4::varchar, $5::varchar, $6::int, $7::timestamptz)` + + const { sql, values } = this.getInsertSql(Domain.Thread, db) const inDb = await this.isMessageInDb(cardId, messageId) if (!inDb) { await this.getRowClient().begin(async (s) => { - await this.execute( - sql, - [db.workspace_id, db.card_id, db.message_id, db.thread_id, db.thread_type, db.replies_count, db.last_reply], - 'insert thread', - s - ) + await this.execute(sql, values, 'insert thread', s) const data: AttachThreadPatchData = { operation: 'attach', @@ -677,7 +468,7 @@ export class MessagesDb extends BaseDb { if (set.length === 0) return - const updateSql = `UPDATE ${TableName.Thread}` + const updateSql = `UPDATE ${Domain.Thread}` const setSql = 'SET ' + set.join(', ') const where = `WHERE workspace_id = $${index++}::uuid AND thread_id = $${index++}::varchar AND card_id = $${index++}::varchar AND message_id = $${index++}::varchar` const sql = [updateSql, setSql, where].join(' ') @@ -714,31 +505,25 @@ export class MessagesDb extends BaseDb { } async removeThreads (query: ThreadQuery): Promise { - const db: Partial = { - card_id: query.cardId, - message_id: query.messageId, - thread_id: query.threadId - } - - const entries = Object.entries(db).filter(([_, value]) => value !== undefined) - - if (entries.length === 0) return - - entries.unshift(['workspace_id', this.workspace]) + const filter: DbModelFilter = [ + { + column: 'workspace_id', + value: this.workspace + } + ] - const whereClauses = entries.map(([key], index) => `${key} = $${index + 1}`) - const whereValues = entries.map(([_, value]) => value) + if (query.cardId != null) filter.push({ column: 'card_id', value: query.cardId }) + if (query.messageId != null) filter.push({ column: 'message_id', value: query.messageId }) + if (query.threadId != null) filter.push({ column: 'thread_id', value: query.threadId }) - const sql = `DELETE - FROM ${TableName.Thread} - WHERE ${whereClauses.join(' AND ')}` + const { sql, values } = this.getDeleteSql(Domain.Thread, filter) - await this.execute(sql, whereValues, 'remove threads') + await this.execute(sql, values, 'remove threads') } // MessagesGroup async createMessagesGroup (card: CardID, blobId: BlobID, fromDate: Date, toDate: Date, count: number): Promise { - const db: MessagesGroupDb = { + const db: DbModel = { workspace_id: this.workspace, card_id: card, blob_id: blobId, @@ -747,22 +532,26 @@ export class MessagesDb extends BaseDb { count } - const sql = `INSERT INTO ${TableName.MessagesGroup} (workspace_id, card_id, blob_id, from_date, to_date, count) - VALUES ($1::uuid, $2::varchar, $3::uuid, $4::timestamptz, $5::timestamptz, $6::int)` - await this.execute( - sql, - [db.workspace_id, db.card_id, db.blob_id, db.from_date, db.to_date, db.count], - 'insert messages group' - ) + const { sql, values } = this.getInsertSql(Domain.MessagesGroup, db) + await this.execute(sql, values, 'insert messages group') } async removeMessagesGroup (card: CardID, blobId: BlobID): Promise { - const sql = `DELETE - FROM ${TableName.MessagesGroup} - WHERE workspace_id = $1::uuid - AND card_id = $2::varchar - AND blob_id = $3::uuid` - await this.execute(sql, [this.workspace, card, blobId], 'remove messages group') + const { sql, values } = this.getDeleteSql(Domain.MessagesGroup, [ + { + column: 'workspace_id', + value: this.workspace + }, + { + column: 'card_id', + value: card + }, + { + column: 'blob_id', + value: blobId + } + ]) + await this.execute(sql, values, 'remove messages group') } async find (params: FindMessagesParams): Promise { @@ -773,8 +562,7 @@ export class MessagesDb extends BaseDb { const sql = ` WITH ${this.buildCteLimitedMessages(where, orderBy, limit)} - ${this.buildCteAggregatedFiles(params)} - ${this.buildCteAggregatedLinkPreviews(params)} + ${this.buildCteAggregatedAttachments(params)} ${this.buildCteAggregatedReactions(params)} ${this.buildCteAggregatedPatches()} ${this.buildMainSelect(params)} @@ -796,7 +584,7 @@ export class MessagesDb extends BaseDb { return ` limited_messages AS ( SELECT * - FROM ${TableName.Message} m + FROM ${Domain.Message} m ${where} ${orderBy} ${limit} @@ -804,59 +592,28 @@ export class MessagesDb extends BaseDb { ` } - private buildCteAggregatedFiles (params: FindMessagesParams): string { - if (params.files !== true) return '' - return `, - agg_files AS ( - SELECT - f.workspace_id, - f.card_id, - f.message_id, - jsonb_agg(jsonb_build_object( - 'blob_id', f.blob_id, - 'type', f.type, - 'size', f.size, - 'filename', f.filename, - 'meta', f.meta, - 'creator', f.creator, - 'created', f.created - )) AS files - FROM ${TableName.File} f - INNER JOIN limited_messages m - ON m.workspace_id = f.workspace_id - AND m.card_id = f.card_id - AND m.id = f.message_id - GROUP BY f.workspace_id, f.card_id, f.message_id - ) - ` - } - - private buildCteAggregatedLinkPreviews (params: FindMessagesParams): string { - if (params.links !== true) return '' + private buildCteAggregatedAttachments (params: FindMessagesParams): string { + if (params.attachments !== true) return '' return `, - agg_link_previews AS ( + agg_attachments AS ( SELECT - l.workspace_id, - l.card_id, - l.message_id, + a.workspace_id, + a.card_id, + a.message_id, jsonb_agg(jsonb_build_object( - 'id', l.id::text, - 'url', l.url, - 'host', l.host, - 'title', l.title, - 'description', l.description, - 'favicon', l.favicon, - 'hostname', l.hostname, - 'image', l.image, - 'creator', l.creator, - 'created', l.created - )) AS link_previews - FROM ${TableName.LinkPreview} l + 'id', a.id, + 'type', a.type, + 'params', a.params, + 'creator', a.creator, + 'created', a.created, + 'modified', a.modified + )) AS attachments + FROM ${Domain.Attachment} a INNER JOIN limited_messages m - ON m.workspace_id = l.workspace_id - AND m.card_id = l.card_id - AND m.id = l.message_id - GROUP BY l.workspace_id, l.card_id, l.message_id + ON m.workspace_id = a.workspace_id + AND m.card_id = a.card_id + AND m.id = a.message_id + GROUP BY a.workspace_id, a.card_id, a.message_id ) ` } @@ -874,7 +631,7 @@ export class MessagesDb extends BaseDb { 'creator', r.creator, 'created', r.created )) AS reactions - FROM ${TableName.Reaction} r + FROM ${Domain.Reaction} r INNER JOIN limited_messages m ON m.workspace_id = r.workspace_id AND m.card_id = r.card_id @@ -899,7 +656,7 @@ export class MessagesDb extends BaseDb { 'created', p.created ) ORDER BY p.created ASC ) AS patches - FROM ${TableName.Patch} p + FROM ${Domain.Patch} p INNER JOIN limited_messages m ON m.workspace_id = p.workspace_id AND m.card_id = p.card_id @@ -916,31 +673,21 @@ export class MessagesDb extends BaseDb { ? 't.thread_id as thread_id, t.thread_type as thread_type, t.replies_count::int as replies_count, t.last_reply as last_reply,' : '' - const selectFiles = params.files === true ? "COALESCE(f.files, '[]'::jsonb) AS files," : "'[]'::jsonb AS files," - const selectLinks = - params.links === true - ? "COALESCE(l.link_previews, '[]'::jsonb) AS link_previews," - : "'[]'::jsonb AS link_previews," + const selectAttachments = + params.attachments === true + ? "COALESCE(a.attachments, '[]'::jsonb) AS attachments," + : "'[]'::jsonb AS attachments," const selectReactions = params.reactions === true ? "COALESCE(r.reactions, '[]'::jsonb) AS reactions," : "'[]'::jsonb AS reactions," - const joinFiles = - params.files === true - ? ` - LEFT JOIN agg_files f - ON f.workspace_id = m.workspace_id - AND f.card_id = m.card_id - AND f.message_id = m.id` - : '' - - const joinLinks = - params.links === true + const joinAttachments = + params.attachments === true ? ` - LEFT JOIN agg_link_previews l - ON l.workspace_id = m.workspace_id - AND l.card_id = m.card_id - AND l.message_id = m.id` + LEFT JOIN agg_attachments a + ON a.workspace_id = m.workspace_id + AND a.card_id = m.card_id + AND a.message_id = m.id` : '' const joinReactions = @@ -961,18 +708,16 @@ export class MessagesDb extends BaseDb { m.created, m.data, ${selectReplies} - ${selectFiles} - ${selectLinks} + ${selectAttachments} ${selectReactions} COALESCE(p.patches, '[]'::jsonb) AS patches FROM limited_messages m - LEFT JOIN ${TableName.Thread} t + LEFT JOIN ${Domain.Thread} t ON t.workspace_id = m.workspace_id AND t.card_id = m.card_id AND t.message_id = m.id - ${joinFiles} - ${joinLinks} - ${joinReactions} + ${joinAttachments} + ${joinReactions} LEFT JOIN agg_patches p ON p.workspace_id = m.workspace_id AND p.card_id = m.card_id @@ -1016,7 +761,7 @@ export class MessagesDb extends BaseDb { t.thread_type, t.replies_count::int, t.last_reply - FROM ${TableName.Thread} t + FROM ${Domain.Thread} t WHERE t.workspace_id = $1::uuid AND t.thread_id = $2::varchar LIMIT 1;` @@ -1035,7 +780,7 @@ export class MessagesDb extends BaseDb { ? ` WITH msg_created AS ( SELECT card_id, created - FROM ${TableName.MessageCreated} + FROM ${Domain.MessageCreated} WHERE workspace_id = $1::uuid AND message_id = $2::varchar ) @@ -1050,7 +795,7 @@ export class MessagesDb extends BaseDb { mg.to_date, mg.count, patches - FROM ${TableName.MessagesGroup} mg + FROM ${Domain.MessagesGroup} mg ${useMessageIdCte ? 'JOIN msg_created mc ON mg.card_id = mc.card_id AND mc.created BETWEEN mg.from_date AND mg.to_date' : ''} CROSS JOIN LATERAL ( SELECT jsonb_agg(jsonb_build_object( @@ -1060,7 +805,7 @@ export class MessagesDb extends BaseDb { 'creator', p.creator, 'created', p.created )) AS patches - FROM ${TableName.Patch} p + FROM ${Domain.Patch} p WHERE p.workspace_id = mg.workspace_id AND p.card_id = mg.card_id AND p.message_created BETWEEN mg.from_date AND mg.to_date @@ -1131,7 +876,7 @@ export class MessagesDb extends BaseDb { public async isMessageInDb (cardId: CardID, messageId: MessageID): Promise { const sql = ` SELECT 1 - FROM ${TableName.Message} m + FROM ${Domain.Message} m WHERE m.workspace_id = $1::uuid AND m.card_id = $2::varchar AND m.id = $3::varchar @@ -1144,7 +889,7 @@ export class MessagesDb extends BaseDb { public async getMessageCreated (cardId: CardID, messageId: MessageID): Promise { const select = `SELECT mc.created - FROM ${TableName.MessageCreated} mc + FROM ${Domain.MessageCreated} mc WHERE mc.workspace_id = $1::uuid AND mc.card_id = $2::varchar AND mc.message_id = $3::varchar diff --git a/packages/cockroach/src/db/notification.ts b/packages/cockroach/src/db/notification.ts index df641dfa8e3..111a570dc1b 100644 --- a/packages/cockroach/src/db/notification.ts +++ b/packages/cockroach/src/db/notification.ts @@ -32,34 +32,35 @@ import { } from '@hcengineering/communication-types' import { BaseDb } from './base' -import { type CollaboratorDb, type ContextDb, type NotificationDb, TableName } from '../schema' import { getCondition } from './utils' import { toCollaborator, toNotification, toNotificationContext } from './mapping' -import type { - NotificationContextUpdates, - NotificationUpdates, - UpdateNotificationQuery +import { + Domain, + type NotificationContextUpdates, + type NotificationUpdates, + type UpdateNotificationQuery } from '@hcengineering/communication-sdk-types' +import { DbModel, DbModelFilter, DbModelUpdate } from '../schema' export class NotificationsDb extends BaseDb { async addCollaborators ( card: CardID, cardType: CardType, collaborators: AccountID[], - date?: Date + date: Date ): Promise { if (collaborators.length === 0) return [] - const values: any[] = [] - - const sqlValues = collaborators - .map((account, index) => { - const i = index * 5 - values.push(this.workspace, card, account, date ?? new Date(), cardType) - return `($${i + 1}::uuid, $${i + 2}::varchar, $${i + 3}::uuid, $${i + 4}::timestamptz, $${i + 5}::varchar)` - }) - .join(', ') - - const sql = `INSERT INTO ${TableName.Collaborators} (workspace_id, card_id, account, date, card_type) VALUES ${sqlValues} ON CONFLICT DO NOTHING RETURNING account` + const models: DbModel[] = collaborators.map((account, index) => ({ + workspace_id: this.workspace, + card_id: card, + account, + date, + card_type: cardType + })) + const { sql, values } = this.getBatchInsertSql(Domain.Collaborator, models, [{ column: 'account', cast: 'text' }], { + conflictColumns: ['workspace_id', 'card_id', 'account'], + conflictAction: 'DO NOTHING' + }) const result = await this.execute(sql, values, 'insert collaborators') return result.map((it: any) => it.account) @@ -67,24 +68,15 @@ export class NotificationsDb extends BaseDb { async removeCollaborators (card: CardID, accounts: AccountID[], unsafe = false): Promise { if (accounts.length === 0 && unsafe) { - const sql = `DELETE FROM ${TableName.Collaborators} WHERE workspace_id = $1::uuid AND card_id = $2::varchar` + const sql = `DELETE FROM ${Domain.Collaborator} WHERE workspace_id = $1::uuid AND card_id = $2::varchar` await this.execute(sql, [this.workspace, card], 'remove collaborators') - } else if (accounts.length === 1) { - const sql = `DELETE - FROM ${TableName.Collaborators} - WHERE workspace_id = $1::uuid - AND card_id = $2::varchar - AND account = $3::uuid` - await this.execute(sql, [this.workspace, card, accounts[0]], 'remove collaborator') } else { - const inValues = accounts.map((_, index) => `$${index + 3}`).join(', ') - const sql = `DELETE - FROM ${TableName.Collaborators} - WHERE workspace_id = $1::uuid - AND card_id = $2::varchar - AND account IN (${inValues})` - - await this.execute(sql, [this.workspace, card, accounts], 'remove collaborators') + const { sql, values } = this.getDeleteSql(Domain.Collaborator, [ + { column: 'workspace_id', value: this.workspace }, + { column: 'card_id', value: card }, + { column: 'account', value: accounts } + ]) + await this.execute(sql, values, 'remove collaborator') } } @@ -95,7 +87,7 @@ export class NotificationsDb extends BaseDb { ): AsyncIterable[]> { const sql = ` SELECT * - FROM ${TableName.Collaborators} + FROM ${Domain.Collaborator} WHERE workspace_id = $1::uuid AND card_id = $2::varchar AND date <= $3::timestamptz @@ -113,23 +105,19 @@ export class NotificationsDb extends BaseDb { content: NotificationContent, created: Date ): Promise { - const db: Omit = { + const db: Omit, 'id'> = { + context_id: context, type, message_id: message, message_created: messageCreated, read, - context_id: context, created, content } - const sql = `INSERT INTO ${TableName.Notification} (message_id, message_created, context_id, read, created, type, content) - VALUES ($1::varchar, $2::timestamptz, $3::int8, $4::boolean, $5::timestamptz, $6::varchar, $7::jsonb) - RETURNING id::text` - const result = await this.execute( - sql, - [db.message_id, db.message_created, db.context_id, db.read, db.created, db.type, db.content], - 'insert notification' - ) + const { sql, values } = this.getInsertSql(Domain.Notification, db as DbModel, [ + { column: 'id', cast: 'text' } + ]) + const result = await this.execute(sql, values, 'insert notification') return result[0].id as NotificationID } @@ -170,9 +158,9 @@ export class NotificationsDb extends BaseDb { const whereClause = `WHERE ${where.join(' AND ')}` const sql = ` - UPDATE ${TableName.Notification} n + UPDATE ${Domain.Notification} n SET read = $${index++}::boolean - FROM ${TableName.NotificationContext} nc ${whereClause}` + FROM ${Domain.NotificationContext} nc ${whereClause}` await this.execute(sql, [...values, updates.read], 'update notification') } @@ -200,8 +188,8 @@ export class NotificationsDb extends BaseDb { values.push(ids) } - const sql = `DELETE FROM ${TableName.Notification} n - USING ${TableName.NotificationContext} nc + const sql = `DELETE FROM ${Domain.Notification} n + USING ${Domain.NotificationContext} nc WHERE ${where.join(' AND ')} RETURNING n.id::text` @@ -217,7 +205,7 @@ export class NotificationsDb extends BaseDb { lastView: Date, lastNotify: Date ): Promise { - const db: ContextDb = { + const db: Omit, 'id'> = { workspace_id: this.workspace, card_id: card, account, @@ -225,20 +213,17 @@ export class NotificationsDb extends BaseDb { last_update: lastUpdate, last_notify: lastNotify } - const sql = `INSERT INTO ${TableName.NotificationContext} (workspace_id, card_id, account, last_view, last_update, last_notify) - VALUES ($1::uuid, $2::varchar, $3::uuid, $4::timestamptz, $5::timestamptz, $6::timestamptz) - RETURNING id::text` - const result = await this.execute( - sql, - [db.workspace_id, db.card_id, db.account, db.last_view, db.last_update, db.last_notify ?? null], - 'insert notification context' - ) + const { sql, values } = this.getInsertSql(Domain.NotificationContext, db as DbModel, [ + { column: 'id', cast: 'text' } + ]) + + const result = await this.execute(sql, values, 'insert notification context') return result[0].id as ContextID } async removeContext (contextId: ContextID, account: AccountID): Promise { const sql = `DELETE - FROM ${TableName.NotificationContext} + FROM ${Domain.NotificationContext} WHERE workspace_id = $1::uuid AND id = $2::int8 AND account = $3::uuid @@ -250,31 +235,47 @@ export class NotificationsDb extends BaseDb { } async updateContext (context: ContextID, account: AccountID, updates: NotificationContextUpdates): Promise { - const dbData: Partial = {} + const update: DbModelUpdate = [] if (updates.lastView != null) { - dbData.last_view = updates.lastView + update.push({ + column: 'last_view', + value: updates.lastView + }) } if (updates.lastUpdate != null) { - dbData.last_update = updates.lastUpdate + update.push({ + column: 'last_update', + value: updates.lastUpdate + }) } - if (updates.lastNotify != null) { - dbData.last_notify = updates.lastNotify - } - - if (Object.keys(dbData).length === 0) { - return + update.push({ + column: 'last_notify', + value: updates.lastNotify + }) } - const keys = Object.keys(dbData) - const values = Object.values(dbData) + if (update.length === 0) return + + const filter: DbModelFilter = [ + { + column: 'workspace_id', + value: this.workspace + }, + { + column: 'id', + value: context + }, + { + column: 'account', + value: account + } + ] - const sql = `UPDATE ${TableName.NotificationContext} - SET ${keys.map((k, idx) => `"${k}" = $${idx + 4}::timestamptz`).join(', ')} - WHERE workspace_id = $1::uuid AND id = $2::int8 AND account = $3::uuid;` + const { sql, values } = this.getUpdateSql(Domain.NotificationContext, filter, update) - await this.execute(sql, [this.workspace, context, account, ...values], 'update notification context') + await this.execute(sql, values, 'update notification context') } async findContexts (params: FindNotificationContextParams): Promise { @@ -304,7 +305,7 @@ export class NotificationsDb extends BaseDb { notificationsJoin = ` LEFT JOIN LATERAL ( SELECT * - FROM ${TableName.Notification} n + FROM ${Domain.Notification} n ${whereNotifications} ${whereNotifications.length > 1 ? 'AND' : 'WHERE'} n.context_id = nc.id ORDER BY n.created ${notificationOrder} LIMIT ${notificationLimit} @@ -313,7 +314,7 @@ export class NotificationsDb extends BaseDb { ${ withMessages ? ` - LEFT JOIN ${TableName.Message} m + LEFT JOIN ${Domain.Message} m ON m.workspace_id = nc.workspace_id AND m.card_id = nc.card_id AND m.id = n.message_id @@ -333,27 +334,30 @@ export class NotificationsDb extends BaseDb { ) ORDER BY p.created DESC ), '[]'::JSONB ) AS patches - FROM ${TableName.Patch} p + FROM ${Domain.Patch} p WHERE p.workspace_id = nc.workspace_id AND p.card_id = nc.card_id AND p.message_id = n.message_id ) p ON TRUE LEFT JOIN LATERAL ( - SELECT COALESCE( - JSON_AGG( - JSONB_BUILD_OBJECT( - 'blob_id', f.blob_id, - 'type', f.type, - 'size', f.size, - 'filename', f.filename, - 'meta', f.meta, - 'creator', f.creator, - 'created', f.created - ) ORDER BY f.created ASC - ), '[]'::JSONB - ) AS files - FROM ${TableName.File} f - WHERE f.workspace_id = nc.workspace_id AND f.card_id = nc.card_id AND f.message_id = n.message_id - ) f ON TRUE + SELECT + COALESCE( + JSON_AGG( + JSONB_BUILD_OBJECT( + 'id', a.id, + 'type', a.type, + 'params', a.params, + 'creator', a.creator, + 'created', a.created, + 'modified',a.modified + ) + ), + '[]'::JSONB + ) AS attachments + FROM communication.attachment AS a + WHERE a.workspace_id = nc.workspace_id + AND a.card_id = nc.card_id + AND a.message_id = n.message_id + ) AS a ON TRUE ` notificationsSelect = `, @@ -378,7 +382,7 @@ export class NotificationsDb extends BaseDb { } 'blob_id', n.blob_id, 'patches', p.patches, - 'files', f.files + 'attachments', a.attachments ) ORDER BY n.created ${notificationOrder} ), '[]'::JSONB @@ -397,7 +401,7 @@ export class NotificationsDb extends BaseDb { nc.last_update, nc.last_notify ${notificationsSelect} - FROM ${TableName.NotificationContext} nc + FROM ${Domain.NotificationContext} nc ${notificationsJoin} ${where} ${groupBy} ${orderBy} @@ -430,31 +434,30 @@ export class NotificationsDb extends BaseDb { 'created', p.created ) ) - FROM ${TableName.Patch} p + FROM ${Domain.Patch} p WHERE p.workspace_id = m.workspace_id AND p.card_id = m.card_id AND p.message_id = m.id) AS message_patches, (SELECT json_agg( jsonb_build_object( - 'blob_id', f.blob_id, - 'type', f.type, - 'size', f.size, - 'filename', f.filename, - 'meta', f.meta, - 'creator', f.creator, - 'created', f.created + 'id', a.id, + 'type', a.type, + 'params', a.params, + 'creator', a.creator, + 'created', a.created, + 'modified', a.modified ) ) - FROM ${TableName.File} f - WHERE f.workspace_id = m.workspace_id AND f.card_id = m.card_id AND f.message_id = m.id) AS message_files + FROM ${Domain.Attachment} a + WHERE a.workspace_id = m.workspace_id AND a.card_id = m.card_id AND a.message_id = m.id) AS message_attachments ` joinMessages = ` - LEFT JOIN ${TableName.Message} m + LEFT JOIN ${Domain.Message} m ON nc.workspace_id = m.workspace_id AND nc.card_id = m.card_id AND n.message_id = m.id ` } - select += ` FROM ${TableName.Notification} n - JOIN ${TableName.NotificationContext} nc ON n.context_id = nc.id` + select += ` FROM ${Domain.Notification} n + JOIN ${Domain.NotificationContext} nc ON n.context_id = nc.id` const { where, values } = this.buildNotificationWhere(params) const orderBy = @@ -469,13 +472,13 @@ export class NotificationsDb extends BaseDb { } async updateCollaborators (params: FindCollaboratorsParams, data: Partial): Promise { - const dbData: Partial = { + const update: Partial> = { account: data.account, card_id: data.cardId, card_type: data.cardType } - const entries = Object.entries(dbData).filter(([_, value]) => value != null) + const entries = Object.entries(update).filter(([_, value]) => value != null) if (entries.length === 0) return entries.unshift(['workspace_id', this.workspace]) @@ -484,7 +487,7 @@ export class NotificationsDb extends BaseDb { const { where, values: whereValues } = this.buildCollaboratorsWhere(params, setValues.length, '') - const sql = `UPDATE ${TableName.Collaborators} + const sql = `UPDATE ${Domain.Collaborator} SET ${setClauses.join(', ')} ${where}` @@ -495,7 +498,7 @@ export class NotificationsDb extends BaseDb { const { where, values } = this.buildCollaboratorsWhere(params) const select = ` SELECT * - FROM ${TableName.Collaborators} c + FROM ${Domain.Collaborator} c ` const limit = params.limit != null ? ` LIMIT ${params.limit}` : '' @@ -646,9 +649,9 @@ export class NotificationsDb extends BaseDb { public async updateNotificationsBlobId (cardId: CardID, blobId: string, from: Date, to: Date): Promise { const sql = ` - UPDATE ${TableName.Notification} AS n + UPDATE ${Domain.Notification} AS n SET blob_id = $3::uuid - FROM ${TableName.NotificationContext} AS nc + FROM ${Domain.NotificationContext} AS nc WHERE n.context_id = nc.id AND nc.workspace_id = $1::uuid @@ -661,9 +664,9 @@ export class NotificationsDb extends BaseDb { public async removeNotificationsBlobId (cardId: CardID, blobId: string): Promise { const sql = ` - UPDATE ${TableName.Notification} AS n + UPDATE ${Domain.Notification} AS n SET blob_id = NULL - FROM ${TableName.NotificationContext} AS nc + FROM ${Domain.NotificationContext} AS nc WHERE n.context_id = nc.id AND nc.workspace_id = $1::uuid diff --git a/packages/cockroach/src/init.ts b/packages/cockroach/src/init.ts index 20e89bc88d3..b23a0252252 100644 --- a/packages/cockroach/src/init.ts +++ b/packages/cockroach/src/init.ts @@ -14,11 +14,11 @@ // import type postgres from 'postgres' -import { TableName } from './schema' +import { Domain } from '@hcengineering/communication-sdk-types' /* eslint-disable @typescript-eslint/naming-convention */ -const migrationsTableName = 'communication._migrations' +const migrationsDomain = 'communication._migrations' let isSchemaInitialized = false let initPromise: Promise | null = null @@ -69,7 +69,7 @@ async function init (sql: postgres.Sql): Promise { const start = performance.now() console.log('🗃️ Initializing schema...') await sql.unsafe('CREATE SCHEMA IF NOT EXISTS communication;') - await sql.unsafe(`CREATE TABLE IF NOT EXISTS ${migrationsTableName} + await sql.unsafe(`CREATE TABLE IF NOT EXISTS ${migrationsDomain} ( name VARCHAR(255) NOT NULL, created_on TIMESTAMPTZ NOT NULL DEFAULT now(), @@ -77,7 +77,7 @@ async function init (sql: postgres.Sql): Promise { )`) const appliedMigrations = await sql.unsafe(`SELECT name - FROM ${migrationsTableName}`) + FROM ${migrationsDomain}`) const appliedNames = appliedMigrations.map((it) => it.name) const migrations = getMigrations() @@ -86,7 +86,7 @@ async function init (sql: postgres.Sql): Promise { try { await sql.unsafe(sqlString) await sql.unsafe( - `INSERT INTO ${migrationsTableName}(name) + `INSERT INTO ${migrationsDomain}(name) VALUES ($1::varchar);`, [name] ) @@ -113,7 +113,6 @@ function getMigrations (): [string, string][] { migrationV2_5(), migrationV2_6(), migrationV2_7(), - migrationV3_1(), migrationV4_1(), migrationV5_1(), migrationV5_2(), @@ -127,7 +126,10 @@ function getMigrations (): [string, string][] { migrationV6_8(), migrationV7_1(), migrationV7_2(), - migrationV7_3() + migrationV7_3(), + migrationV8_1(), + migrationV8_2(), + migrationV8_3() ] } @@ -135,7 +137,7 @@ function migrationV1_1 (): [string, string] { const sql = ` DROP SCHEMA IF EXISTS communication CASCADE; CREATE SCHEMA IF NOT EXISTS communication; - CREATE TABLE IF NOT EXISTS ${migrationsTableName} + CREATE TABLE IF NOT EXISTS ${migrationsDomain} ( name VARCHAR(255) NOT NULL, created_on TIMESTAMPTZ NOT NULL DEFAULT now(), @@ -193,24 +195,6 @@ function migrationV1_2 (): [string, string] { CREATE INDEX IF NOT EXISTS idx_patch_workspace_card_message ON communication.patch (workspace_id, card_id, message_id); - CREATE TABLE IF NOT EXISTS communication.files - ( - workspace_id UUID NOT NULL, - card_id VARCHAR(255) NOT NULL, - message_id INT8 NOT NULL, - blob_id UUID NOT NULL, - filename VARCHAR(255) NOT NULL, - type VARCHAR(255) NOT NULL, - size INT8 NOT NULL, - meta JSONB NOT NULL DEFAULT '{}', - creator VARCHAR(255) NOT NULL, - created TIMESTAMPTZ NOT NULL DEFAULT now(), - message_created TIMESTAMPTZ NOT NULL, - PRIMARY KEY (workspace_id, card_id, message_id, blob_id) - ); - - CREATE INDEX IF NOT EXISTS files_workspace_card_message_idx ON communication.files (workspace_id, card_id, message_id); - CREATE TABLE IF NOT EXISTS communication.reactions ( workspace_id UUID NOT NULL, @@ -348,33 +332,6 @@ function migrationV2_7 (): [string, string] { return ['set_last_notify_to_last_update-v2_7', sql] } -function migrationV3_1 (): [string, string] { - const sql = ` - CREATE TABLE IF NOT EXISTS communication.link_preview - ( - id INT8 NOT NULL DEFAULT unique_rowid(), - workspace_id UUID NOT NULL, - card_id VARCHAR(255) NOT NULL, - message_id INT8 NOT NULL, - message_created TIMESTAMPTZ NOT NULL, - url TEXT NOT NULL, - host TEXT NOT NULL, - hostname TEXT, - title TEXT, - description TEXT, - favicon TEXT, - image JSONB, - creator VARCHAR(255) NOT NULL, - created TIMESTAMPTZ NOT NULL DEFAULT now(), - PRIMARY KEY (id) - ); - - CREATE INDEX IF NOT EXISTS workspace_id_card_id_message_id_idx ON communication.link_preview (workspace_id, card_id, message_id); - ` - - return ['init_link_preview-v3_1', sql] -} - function migrationV4_1 (): [string, string] { const sql = ` CREATE INDEX IF NOT EXISTS notifications_context_id_read_created_desc_idx ON communication.notifications (context_id, read, created DESC); @@ -387,12 +344,8 @@ function migrationV5_1 (): [string, string] { DROP INDEX IF EXISTS communication.idx_messages_unique_workspace_card_external_id; ALTER TABLE communication.messages DROP COLUMN IF EXISTS external_id; - ALTER TABLE communication.files - DROP COLUMN IF EXISTS message_created; ALTER TABLE communication.thread DROP COLUMN IF EXISTS message_created; - ALTER TABLE communication.link_preview - DROP COLUMN IF EXISTS message_created; ` return ['remove_unused-columns-v5_1', sql] } @@ -414,21 +367,17 @@ function migrationV5_2 (): [string, string] { function migrationV6_1 (): [string, string] { const sql = ` - ALTER TABLE ${TableName.Message} - ADD COLUMN IF NOT EXISTS message_id_str VARCHAR(22); - ALTER TABLE ${TableName.Patch} - ADD COLUMN IF NOT EXISTS message_id_str VARCHAR(22); - ALTER TABLE ${TableName.File} + ALTER TABLE communication.messages ADD COLUMN IF NOT EXISTS message_id_str VARCHAR(22); - ALTER TABLE ${TableName.Reaction} + ALTER TABLE communication.patch ADD COLUMN IF NOT EXISTS message_id_str VARCHAR(22); - ALTER TABLE ${TableName.Thread} + ALTER TABLE communication.reactions ADD COLUMN IF NOT EXISTS message_id_str VARCHAR(22); - ALTER TABLE ${TableName.LinkPreview} + ALTER TABLE communication.thread ADD COLUMN IF NOT EXISTS message_id_str VARCHAR(22); - ALTER TABLE ${TableName.Notification} + ALTER TABLE communication.notifications ADD COLUMN IF NOT EXISTS message_id_str VARCHAR(22); - ALTER TABLE ${TableName.MessageCreated} + ALTER TABLE communication.message_created ADD COLUMN IF NOT EXISTS message_id_str VARCHAR(22); ` return ['add_message_id_str_columns-v6_1', sql] @@ -436,21 +385,17 @@ function migrationV6_1 (): [string, string] { function migrationV6_2 (): [string, string] { const sql = ` - UPDATE ${TableName.Message} + UPDATE communication.messages SET message_id_str = id::text; - UPDATE ${TableName.Patch} - SET message_id_str = message_id::text; - UPDATE ${TableName.File} - SET message_id_str = message_id::text; - UPDATE ${TableName.Reaction} + UPDATE communication.patch SET message_id_str = message_id::text; - UPDATE ${TableName.Thread} + UPDATE communication.reactions SET message_id_str = message_id::text; - UPDATE ${TableName.LinkPreview} + UPDATE communication.thread SET message_id_str = message_id::text; - UPDATE ${TableName.Notification} + UPDATE communication.notifications SET message_id_str = message_id::text; - UPDATE ${TableName.MessageCreated} + UPDATE communication.message_created SET message_id_str = message_id::text; ` return ['copy_int8_ids_to_str_columns-v6_2', sql] @@ -458,13 +403,12 @@ function migrationV6_2 (): [string, string] { function migrationV6_3 (): [string, string] { const sql = ` - ALTER TABLE ${TableName.Reaction} + ALTER TABLE communication.reactions DROP CONSTRAINT IF EXISTS reactions_workspace_id_card_id_message_id_fkey; DROP INDEX IF EXISTS communication.thread_unique_constraint CASCADE; DROP INDEX IF EXISTS communication.idx_patch_workspace_card_message; - DROP INDEX IF EXISTS communication.files_workspace_card_message_idx; DROP INDEX IF EXISTS communication.idx_reactions_workspace_card_message; DROP INDEX IF EXISTS communication.idx_thread_workspace_card_message; DROP INDEX IF EXISTS communication.workspace_id_card_id_message_id_idx; @@ -476,44 +420,34 @@ function migrationV6_3 (): [string, string] { function migrationV6_4 (): [string, string] { const sql = ` - ALTER TABLE ${TableName.Message} + ALTER TABLE communication.messages RENAME COLUMN id TO message_id_old; - ALTER TABLE ${TableName.Message} + ALTER TABLE communication.messages RENAME COLUMN message_id_str TO id; - ALTER TABLE ${TableName.Patch} - RENAME COLUMN message_id TO message_id_old; - ALTER TABLE ${TableName.Patch} - RENAME COLUMN message_id_str TO message_id; - - ALTER TABLE ${TableName.File} - RENAME COLUMN message_id TO message_id_old; - ALTER TABLE ${TableName.File} - RENAME COLUMN message_id_str TO message_id; - - ALTER TABLE ${TableName.Reaction} + ALTER TABLE communication.patch RENAME COLUMN message_id TO message_id_old; - ALTER TABLE ${TableName.Reaction} + ALTER TABLE communication.patch RENAME COLUMN message_id_str TO message_id; - ALTER TABLE ${TableName.Thread} + ALTER TABLE communication.reactions RENAME COLUMN message_id TO message_id_old; - ALTER TABLE ${TableName.Thread} + ALTER TABLE communication.reactions RENAME COLUMN message_id_str TO message_id; - ALTER TABLE ${TableName.LinkPreview} + ALTER TABLE communication.thread RENAME COLUMN message_id TO message_id_old; - ALTER TABLE ${TableName.LinkPreview} + ALTER TABLE communication.thread RENAME COLUMN message_id_str TO message_id; - ALTER TABLE ${TableName.Notification} + ALTER TABLE communication.notifications RENAME COLUMN message_id TO message_id_old; - ALTER TABLE ${TableName.Notification} + ALTER TABLE communication.notifications RENAME COLUMN message_id_str TO message_id; - ALTER TABLE ${TableName.MessageCreated} + ALTER TABLE communication.message_created RENAME COLUMN message_id TO message_id_old; - ALTER TABLE ${TableName.MessageCreated} + ALTER TABLE communication.message_created RENAME COLUMN message_id_str TO message_id; ` return ['rename_message_id_columns-v6_4', sql] @@ -521,20 +455,16 @@ function migrationV6_4 (): [string, string] { function migrationV6_5 (): [string, string] { const sql = ` - ALTER TABLE ${TableName.Message} + ALTER TABLE communication.messages ALTER COLUMN id SET NOT NULL; - ALTER TABLE ${TableName.MessageCreated} + ALTER TABLE communication.message_created ALTER COLUMN message_id SET NOT NULL; - ALTER TABLE ${TableName.File} + ALTER TABLE communication.reactions ALTER COLUMN message_id SET NOT NULL; - ALTER TABLE ${TableName.Reaction} - ALTER COLUMN message_id SET NOT NULL; - ALTER TABLE ${TableName.Thread} - ALTER COLUMN message_id SET NOT NULL; - ALTER TABLE ${TableName.LinkPreview} + ALTER TABLE communication.thread ALTER COLUMN message_id SET NOT NULL; - ALTER TABLE ${TableName.Notification} + ALTER TABLE communication.notifications ALTER COLUMN message_id SET NOT NULL; ` @@ -543,13 +473,11 @@ function migrationV6_5 (): [string, string] { function migrationV6_6 (): [string, string] { const sql = ` - ALTER TABLE ${TableName.Message} + ALTER TABLE communication.messages ALTER PRIMARY KEY USING COLUMNS (workspace_id, card_id, id); - ALTER TABLE ${TableName.MessageCreated} + ALTER TABLE communication.message_created ALTER PRIMARY KEY USING COLUMNS (workspace_id, card_id, message_id); - ALTER TABLE ${TableName.File} - ALTER PRIMARY KEY USING COLUMNS (workspace_id, card_id, message_id, blob_id); - ALTER TABLE ${TableName.Reaction} + ALTER TABLE communication.reactions ALTER PRIMARY KEY USING COLUMNS (workspace_id, card_id, message_id, creator, reaction); ` return ['recrate_primary_keys-v6_6', sql] @@ -557,52 +485,42 @@ function migrationV6_6 (): [string, string] { function migrationV6_7 (): [string, string] { const sql = ` - ALTER TABLE ${TableName.Reaction} + ALTER TABLE communication.reactions ADD CONSTRAINT fk_reactions_message FOREIGN KEY (workspace_id, card_id, message_id) - REFERENCES ${TableName.Message} (workspace_id, card_id, id) + REFERENCES communication.messages (workspace_id, card_id, id) ON DELETE CASCADE; CREATE INDEX IF NOT EXISTS idx_patch_workspace_card_message - ON ${TableName.Patch} (workspace_id, card_id, message_id); - - CREATE INDEX IF NOT EXISTS files_workspace_card_message_idx - ON ${TableName.File} (workspace_id, card_id, message_id); + ON communication.patch (workspace_id, card_id, message_id); CREATE INDEX IF NOT EXISTS idx_reactions_workspace_card_message - ON ${TableName.Reaction} (workspace_id, card_id, message_id); + ON communication.reactions (workspace_id, card_id, message_id); - ALTER TABLE ${TableName.Thread} ADD CONSTRAINT thread_unique_constraint UNIQUE (workspace_id, card_id, message_id); + ALTER TABLE communication.thread ADD CONSTRAINT thread_unique_constraint UNIQUE (workspace_id, card_id, message_id); CREATE INDEX IF NOT EXISTS idx_thread_workspace_card_message - ON ${TableName.Thread} (workspace_id, card_id, message_id); - - CREATE INDEX IF NOT EXISTS workspace_id_card_id_message_id_idx - ON ${TableName.LinkPreview} (workspace_id, card_id, message_id); + ON communication.thread (workspace_id, card_id, message_id); CREATE INDEX IF NOT EXISTS notifications_context_id_read_created_desc_idx - ON ${TableName.Notification} (context_id, read, created DESC); + ON communication.notifications (context_id, read, created DESC); ` return ['recreate_constraints_and_indexes-v6_7', sql] } function migrationV6_8 (): [string, string] { const sql = ` - ALTER TABLE ${TableName.Message} - DROP COLUMN IF EXISTS message_id_old; - ALTER TABLE ${TableName.Patch} - DROP COLUMN IF EXISTS message_id_old; - ALTER TABLE ${TableName.File} + ALTER TABLE communication.messages DROP COLUMN IF EXISTS message_id_old; - ALTER TABLE ${TableName.Reaction} + ALTER TABLE communication.patch DROP COLUMN IF EXISTS message_id_old; - ALTER TABLE ${TableName.Thread} + ALTER TABLE communication.reactions DROP COLUMN IF EXISTS message_id_old; - ALTER TABLE ${TableName.LinkPreview} + ALTER TABLE communication.thread DROP COLUMN IF EXISTS message_id_old; - ALTER TABLE ${TableName.Notification} + ALTER TABLE communication.notifications DROP COLUMN IF EXISTS message_id_old; - ALTER TABLE ${TableName.MessageCreated} + ALTER TABLE communication.message_created DROP COLUMN IF EXISTS message_id_old; ` return ['drop_old_message_id_columns-v6_8', sql] @@ -643,3 +561,46 @@ function migrationV7_3 (): [string, string] { ` return ['make_last_notify_not_null-v7_3', sql] } + +function migrationV8_1 (): [string, string] { + const sql = ` + ALTER TABLE communication.messages + RENAME TO message; + ALTER TABLE communication.messages_groups + RENAME TO messages_group; + ALTER TABLE communication.reactions + RENAME TO reaction; + ALTER TABLE communication.notifications + RENAME TO notification; + ALTER TABLE communication.collaborators + RENAME TO collaborator; + ` + + return ['rename_tables-v8_1', sql] +} + +function migrationV8_2 (): [string, string] { + const sql = ` + CREATE TABLE IF NOT EXISTS ${Domain.Attachment} + ( + workspace_id UUID NOT NULL, + card_id VARCHAR NOT NULL, + message_id VARCHAR NOT NULL, + id UUID NOT NULL, + type TEXT NOT NULL, + params JSONB NOT NULL, + creator VARCHAR NOT NULL, + created TIMESTAMPTZ NOT NULL, + modified TIMESTAMPTZ, + PRIMARY KEY (workspace_id, card_id, message_id, id) + ); + ` + return ['create_attachment_table-v8_2', sql] +} + +function migrationV8_3 (): [string, string] { + const sql = ` + CREATE INDEX IF NOT EXISTS attachment_workspace_card_message_idx ON ${Domain.Attachment} (workspace_id, card_id, message_id) + ` + return ['add_attachment_indexes-v8_3', sql] +} diff --git a/packages/cockroach/src/schema.ts b/packages/cockroach/src/schema.ts index 626e26f7d3f..3e850acd149 100644 --- a/packages/cockroach/src/schema.ts +++ b/packages/cockroach/src/schema.ts @@ -27,61 +27,174 @@ import { type NotificationID, type LabelID, type CardType, - type BlobMetadata, - type LinkPreviewImage, - type LinkPreviewID, NotificationContent, - NotificationType + NotificationType, AttachmentID } from '@hcengineering/communication-types' +import { Domain } from '@hcengineering/communication-sdk-types' -export enum TableName { - File = 'communication.files', - Message = 'communication.messages', - MessageCreated = 'communication.message_created', - MessagesGroup = 'communication.messages_groups', - Notification = 'communication.notifications', - NotificationContext = 'communication.notification_context', - Patch = 'communication.patch', - Reaction = 'communication.reactions', - Thread = 'communication.thread', - Collaborators = 'communication.collaborators', - Label = 'communication.label', - LinkPreview = 'communication.link_preview' +export const schemas = { + [Domain.Message]: { + workspace_id: 'uuid', + card_id: 'varchar', + id: 'varchar', + type: 'varchar', + content: 'string', + creator: 'varchar', + created: 'timestamptz', + data: 'jsonb' + }, + [Domain.MessageCreated]: { + workspace_id: 'uuid', + card_id: 'varchar', + created: 'timestamptz', + message_id: 'varchar' + }, + [Domain.MessagesGroup]: { + workspace_id: 'uuid', + card_id: 'varchar', + blob_id: 'uuid', + from_date: 'timestamptz', + to_date: 'timestamptz', + count: 'int8' + }, + [Domain.Patch]: { + id: 'int', + workspace_id: 'uuid', + card_id: 'varchar', + message_id: 'varchar', + type: 'varchar', + creator: 'varchar', + created: 'timestamptz', + message_created: 'timestamptz', + data: 'jsonb' + }, + [Domain.Reaction]: { + workspace_id: 'uuid', + card_id: 'varchar', + message_id: 'varchar', + reaction: 'varchar', + creator: 'varchar', + created: 'timestamptz' + }, + [Domain.Thread]: { + workspace_id: 'uuid', + card_id: 'varchar', + message_id: 'varchar', + thread_id: 'varchar', + thread_type: 'varchar', + replies_count: 'int', + last_reply: 'timestamptz' + }, + [Domain.Attachment]: { + workspace_id: 'uuid', + card_id: 'varchar', + message_id: 'varchar', + id: 'uuid', + type: 'text', + params: 'jsonb', + creator: 'varchar', + created: 'timestamptz', + modified: 'timestamptz' + }, + [Domain.Notification]: { + id: 'int8', + context_id: 'int8', + message_created: 'timestamptz', + message_id: 'varchar', + blob_id: 'uuid', + created: 'timestamptz', + content: 'jsonb', + type: 'varchar', + read: 'bool' + }, + [Domain.Collaborator]: { + workspace_id: 'uuid', + card_id: 'varchar', + account: 'uuid', + date: 'timestamptz', + card_type: 'varchar' + }, + [Domain.Label]: { + workspace_id: 'uuid', + card_id: 'varchar', + card_type: 'varchar', + label_id: 'varchar', + account: 'uuid', + created: 'timestamptz' + }, + [Domain.NotificationContext]: { + workspace_id: 'uuid', + card_id: 'varchar', + id: 'int8', + account: 'uuid', + last_view: 'timestamptz', + last_update: 'timestamptz', + last_notify: 'timestamptz' + } +} as const + +export interface DomainDbModel { + [Domain.Message]: MessageDbModel + [Domain.MessageCreated]: MessageCreatedDbModel + [Domain.MessagesGroup]: MessagesGroupDbModel + [Domain.Patch]: PatchDbModel + [Domain.Reaction]: ReactionDbModel + [Domain.Thread]: ThreadDbModel + [Domain.Attachment]: AttachmentDbModel + // [Domain.LinkPreview]: LinkPreviewDbModel + [Domain.Notification]: NotificationDbModel + [Domain.NotificationContext]: ContextDbModel + [Domain.Collaborator]: CollaboratorDbModel + [Domain.Label]: LabelDbModel } -export interface MessageDb { - id: MessageID - type: MessageType +export type DbModel = DomainDbModel[D] + +export type DbModelColumn = keyof DomainDbModel[D] & string + +export type DbModelColumnType = DomainDbModel[D][DbModelColumn] + +export type DbModelFilter = Array<{ column: DbModelColumn, value: DbModelColumnType | DbModelColumnType[] }> +export type DbModelUpdate = Array<{ + column: DbModelColumn + innerKey?: string + value: any +}> +export type DbModelBatchUpdate = Array<{ + key: DbModelColumnType + column: DbModelColumn + innerKey?: string + value: any +}> + +interface MessageDbModel { workspace_id: WorkspaceID card_id: CardID + id: MessageID + type: MessageType content: Markdown creator: SocialID created: Date data?: Record } -export const messageSchema: Record = { - workspace_id: 'uuid', - card_id: 'varchar', - id: 'varchar', - created: 'timestamptz', - content: 'text', - creator: 'varchar', - type: 'varchar', - data: 'jsonb' +interface MessageCreatedDbModel { + workspace_id: WorkspaceID + card_id: CardID + message_id: MessageID + created: Date } -export interface MessagesGroupDb { +interface MessagesGroupDbModel { workspace_id: WorkspaceID card_id: CardID blob_id: BlobID from_date: Date to_date: Date count: number - patches?: PatchDb[] } -export interface PatchDb { +interface PatchDbModel { workspace_id: WorkspaceID card_id: CardID message_id: MessageID @@ -92,44 +205,28 @@ export interface PatchDb { message_created: Date } -export interface ReactionDb { - workspace_id: WorkspaceID - card_id: CardID - message_id: MessageID - reaction: string - creator: SocialID - created: Date -} - -export interface FileDb { +interface AttachmentDbModel { workspace_id: WorkspaceID card_id: CardID message_id: MessageID - blob_id: BlobID - filename: string - size: number + id: AttachmentID type: string - meta?: BlobMetadata + params: Record creator: SocialID created: Date + modified?: Date } -export interface LinkPreviewDb { +interface ReactionDbModel { workspace_id: WorkspaceID - id: LinkPreviewID card_id: CardID message_id: MessageID - url: string - host: string - title: string | null - description: string | null - favicon: string | null - hostname: string | null - image: LinkPreviewImage | null + reaction: string creator: SocialID created: Date } -export interface ThreadDb { + +interface ThreadDbModel { workspace_id: WorkspaceID card_id: CardID message_id: MessageID @@ -139,7 +236,7 @@ export interface ThreadDb { last_reply: Date } -export interface NotificationDb { +interface NotificationDbModel { id: NotificationID type: NotificationType read: boolean @@ -151,7 +248,8 @@ export interface NotificationDb { content: NotificationContent } -export interface ContextDb { +interface ContextDbModel { + id: ContextID workspace_id: WorkspaceID card_id: CardID account: AccountID @@ -160,7 +258,7 @@ export interface ContextDb { last_notify: Date } -export interface CollaboratorDb { +interface CollaboratorDbModel { workspace_id: WorkspaceID card_id: CardID card_type: CardType @@ -168,7 +266,7 @@ export interface CollaboratorDb { date: Date } -export interface LabelDb { +interface LabelDbModel { workspace_id: WorkspaceID label_id: LabelID card_id: CardID diff --git a/packages/query/src/messages/query.ts b/packages/query/src/messages/query.ts index 59d947611de..a15758ef640 100644 --- a/packages/query/src/messages/query.ts +++ b/packages/query/src/messages/query.ts @@ -147,8 +147,8 @@ export class MessagesQuery implements PagedQuery { case MessageEventType.UpdatePatch: case MessageEventType.RemovePatch: case MessageEventType.ThreadPatch: - case MessageEventType.LinkPreviewPatch: case MessageEventType.BlobPatch: + case MessageEventType.AttachmentPatch: case MessageEventType.ReactionPatch: { await this.onPatchEvent(event) break @@ -840,15 +840,12 @@ export class MessagesQuery implements PagedQuery { if (this.params.reactions === true) { result.push(PatchType.reaction) } - if (this.params.files === true) { - result.push(PatchType.blob) + if (this.params.attachments === true) { + result.push(PatchType.attachment) } if (this.params.replies === true) { result.push(PatchType.thread) } - if (this.params.links === true) { - result.push(PatchType.linkPreview) - } return result } diff --git a/packages/query/src/notification-contexts/query.ts b/packages/query/src/notification-contexts/query.ts index 3869b31a41a..1507e4646e9 100644 --- a/packages/query/src/notification-contexts/query.ts +++ b/packages/query/src/notification-contexts/query.ts @@ -56,7 +56,7 @@ import { QueryResult } from '../result' import { WindowImpl } from '../window' import { findMessage, loadMessageFromGroup, matchNotification } from '../utils' -const allowedPatchTypes = [PatchType.update, PatchType.remove, PatchType.blob] +const allowedPatchTypes = [PatchType.update, PatchType.remove, PatchType.attachment] export class NotificationContextsQuery implements PagedQuery { private result: QueryResult | Promise> private forward: Promise<{ isDone: boolean }> | { isDone: boolean } = { isDone: false } diff --git a/packages/query/src/notifications/query.ts b/packages/query/src/notifications/query.ts index 26e295be6d8..f950214dfbd 100644 --- a/packages/query/src/notifications/query.ts +++ b/packages/query/src/notifications/query.ts @@ -46,7 +46,7 @@ import { QueryResult } from '../result' import { WindowImpl } from '../window' import { loadMessageFromGroup, matchNotification } from '../utils' -const allowedPatchTypes = [PatchType.update, PatchType.remove, PatchType.blob] +const allowedPatchTypes = [PatchType.update, PatchType.remove, PatchType.attachment] export class NotificationQuery implements PagedQuery { private result: QueryResult | Promise> @@ -247,7 +247,7 @@ export class NotificationQuery implements PagedQuery { - const message = (await client.findMessages({ card, id, limit: 1, files, reactions, replies }))[0] + const message = (await client.findMessages({ card, id, limit: 1, attachments, reactions, replies }))[0] if (message !== undefined) { return message } diff --git a/packages/rest-client/src/rest.ts b/packages/rest-client/src/rest.ts index 47fc34e0481..417b84dcd43 100644 --- a/packages/rest-client/src/rest.ts +++ b/packages/rest-client/src/rest.ts @@ -37,9 +37,9 @@ import { type SocialID, type CardType, type MessageType, - type BlobID, type MessageExtra, - type BlobData + AttachmentData, + AttachmentID } from '@hcengineering/communication-types' import { retry } from '@hcengineering/communication-shared' @@ -195,22 +195,22 @@ class RestClientImpl implements RestClient { ) } - async attachBlobs ( + async addAttachments ( cardId: CardID, messageId: MessageID, - blobs: BlobData[], + data: AttachmentData[], socialId: SocialID, date?: Date ): Promise { await this.event( { - type: MessageEventType.BlobPatch, + type: MessageEventType.AttachmentPatch, cardId, messageId, operations: [ { - opcode: 'attach', - blobs + opcode: 'add', + attachments: data } ], socialId, @@ -220,22 +220,22 @@ class RestClientImpl implements RestClient { ) } - async detachBlobs ( + async removeAttachments ( cardId: CardID, messageId: MessageID, - blobIds: BlobID[], + ids: AttachmentID[], socialId: SocialID, date?: Date ): Promise { await this.event( { - type: MessageEventType.BlobPatch, + type: MessageEventType.AttachmentPatch, cardId, messageId, operations: [ { - opcode: 'detach', - blobIds + opcode: 'remove', + ids } ], socialId, @@ -245,22 +245,22 @@ class RestClientImpl implements RestClient { ) } - async setBlobs ( + async setAttachments ( cardId: CardID, messageId: MessageID, - blobs: BlobData[], + data: AttachmentData[], socialId: SocialID, date?: Date ): Promise { await this.event( { - type: MessageEventType.BlobPatch, + type: MessageEventType.AttachmentPatch, cardId, messageId, operations: [ { opcode: 'set', - blobs + attachments: data } ], socialId, diff --git a/packages/rest-client/src/types.ts b/packages/rest-client/src/types.ts index cf92ff888bb..e462319c0c9 100644 --- a/packages/rest-client/src/types.ts +++ b/packages/rest-client/src/types.ts @@ -33,11 +33,12 @@ import type { Markdown, SocialID, MessageType, - BlobID, CardID, CardType, MessageExtra, - BlobData + AttachmentData, + AttachmentParams, + AttachmentID } from '@hcengineering/communication-types' export interface RestClient { @@ -70,19 +71,25 @@ export interface RestClient { ) => Promise removeMessage: (cardId: CardID, messageId: MessageID, socialId: SocialID, date?: Date) => Promise - attachBlobs: ( + addAttachments:

( cardId: CardID, messageId: MessageID, - blobs: BlobData[], + data: AttachmentData

[], socialId: SocialID, date?: Date ) => Promise - detachBlobs: ( + removeAttachments: ( cardId: CardID, messageId: MessageID, - blobIds: BlobID[], + ids: AttachmentID[], + socialId: SocialID, + date?: Date + ) => Promise + setAttachments:

( + cardId: CardID, + messageId: MessageID, + data: AttachmentData

[], socialId: SocialID, date?: Date ) => Promise - setBlobs: (cardId: CardID, messageId: MessageID, blobs: BlobData[], socialId: SocialID, date?: Date) => Promise } diff --git a/packages/sdk-types/src/db.ts b/packages/sdk-types/src/db.ts index bef3cefafcc..cb7cb3bcb0c 100644 --- a/packages/sdk-types/src/db.ts +++ b/packages/sdk-types/src/db.ts @@ -42,15 +42,15 @@ import { CardType, NotificationContent, NotificationType, - BlobData, - LinkPreviewData, - LinkPreviewID, BlobUpdateData + AttachmentData, + AttachmentID, + AttachmentUpdateData } from '@hcengineering/communication-types' export interface DbAdapter { createMessage: ( - id: MessageID, cardId: CardID, + id: MessageID, type: MessageType, content: Markdown, extra: MessageExtra | undefined, @@ -73,20 +73,10 @@ export interface DbAdapter { addReaction: (cardId: CardID, messageId: MessageID, reaction: string, socialId: SocialID, date: Date) => Promise removeReaction: (cardId: CardID, message: MessageID, reaction: string, socialId: SocialID, date: Date) => Promise - attachBlobs: (cardId: CardID, messageId: MessageID, data: BlobData[], socialId: SocialID, date: Date) => Promise - detachBlobs: (card: CardID, messageId: MessageID, blobId: BlobID[], socialId: SocialID, date: Date) => Promise - setBlobs: (cardId: CardID, messageId: MessageID, data: BlobData[], socialId: SocialID, date: Date) => Promise - updateBlobs: (cardId: CardID, messageId: MessageID, data: BlobUpdateData[], socialId: SocialID, date: Date) => Promise - - attachLinkPreviews: ( - cardId: CardID, - messageId: MessageID, - data: (LinkPreviewData & { previewId: LinkPreviewID })[], - socialId: SocialID, - date: Date - ) => Promise - detachLinkPreviews: (cardId: CardID, messageId: MessageID, ids: LinkPreviewID[], socialId: SocialID, date: Date) => Promise - setLinkPreviews: (cardId: CardID, messageId: MessageID, data: (LinkPreviewData & { previewId: LinkPreviewID })[], socialId: SocialID, date: Date) => Promise + addAttachments: (cardId: CardID, messageId: MessageID, data: AttachmentData[], socialId: SocialID, date: Date) => Promise + removeAttachments: (card: CardID, messageId: MessageID, ids: AttachmentID[], socialId: SocialID, date: Date) => Promise + setAttachments: (cardId: CardID, messageId: MessageID, data: AttachmentData[], socialId: SocialID, date: Date) => Promise + updateAttachments: (cardId: CardID, messageId: MessageID, data: AttachmentUpdateData[], socialId: SocialID, date: Date) => Promise attachThread: (cardId: CardID, messageId: MessageID, threadId: CardID, threadType: CardType, socialId: SocialID, date: Date) => Promise removeThreads: (query: ThreadQuery) => Promise diff --git a/packages/sdk-types/src/domain.ts b/packages/sdk-types/src/domain.ts new file mode 100644 index 00000000000..bf9ca8510e9 --- /dev/null +++ b/packages/sdk-types/src/domain.ts @@ -0,0 +1,32 @@ +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. + +export enum Domain { + Message = 'communication.message', + MessageCreated = 'communication.message_created', + MessagesGroup = 'communication.messages_group', + + Attachment = 'communication.attachment', + Patch = 'communication.patch', + Reaction = 'communication.reaction', + Thread = 'communication.thread', + + Notification = 'communication.notification', + NotificationContext = 'communication.notification_context', + + Collaborator = 'communication.collaborator', + Label = 'communication.label', + // LinkPreview = 'communication.link_preview' +} + +export const Domains = Object.values(Domain) diff --git a/packages/sdk-types/src/events/message.ts b/packages/sdk-types/src/events/message.ts index 28a50e0b82d..5c22f9acef0 100644 --- a/packages/sdk-types/src/events/message.ts +++ b/packages/sdk-types/src/events/message.ts @@ -6,12 +6,12 @@ import type { BlobID, MessageType, CardType, - LinkPreviewID, MessagesGroup, MessageExtra, - BlobData, - LinkPreviewData, - BlobUpdateData + BlobParams, + AttachmentData, + AttachmentID, + AttachmentUpdateData } from '@hcengineering/communication-types' import type { BaseEvent } from './common' @@ -22,8 +22,11 @@ export enum MessageEventType { UpdatePatch = 'updatePatch', RemovePatch = 'removePatch', ReactionPatch = 'reactionPatch', + /** + * @deprecated Use AttachmentPatch instead + */ BlobPatch = 'blobPatch', - LinkPreviewPatch = 'linkPreviewPatch', + AttachmentPatch = 'attachmentPatch', ThreadPatch = 'threadPatch', // Internal events @@ -36,7 +39,7 @@ export type PatchEvent = | RemovePatchEvent | ReactionPatchEvent | BlobPatchEvent - | LinkPreviewPatchEvent + | AttachmentPatchEvent | ThreadPatchEvent export type MessageEvent = CreateMessageEvent | PatchEvent | CreateMessagesGroupEvent | RemoveMessagesGroupEvent @@ -122,7 +125,7 @@ export interface ReactionPatchEvent extends BaseEvent { export interface AttachBlobsOperation { opcode: 'attach' - blobs: BlobData[] + blobs: BlobParams[] } export interface DetachBlobsOperation { @@ -132,7 +135,7 @@ export interface DetachBlobsOperation { export interface SetBlobsOperation { opcode: 'set' - blobs: BlobData[] + blobs: BlobParams[] } export interface UpdateBlobsOperation { @@ -140,7 +143,11 @@ export interface UpdateBlobsOperation { blobs: BlobUpdateData[] } -// For system and message author +export type BlobUpdateData = { blobId: BlobID } & Partial + +/** + * @deprecated Use AttachmentPatch instead + */ export interface BlobPatchEvent extends BaseEvent { type: MessageEventType.BlobPatch @@ -153,6 +160,44 @@ export interface BlobPatchEvent extends BaseEvent { date?: Date } +export interface AddAttachmentsOperation { + opcode: 'add' + attachments: AttachmentData[] +} + +export interface RemoveAttachmentsOperation { + opcode: 'remove' + ids: AttachmentID[] +} + +export interface SetAttachmentsOperation { + opcode: 'set' + attachments: AttachmentData[] +} + +export interface UpdateAttachmentsOperation { + opcode: 'update' + attachments: AttachmentUpdateData[] +} + +// For system and message author +export interface AttachmentPatchEvent extends BaseEvent { + type: MessageEventType.AttachmentPatch + + cardId: CardID + messageId: MessageID + + operations: ( + | AddAttachmentsOperation + | RemoveAttachmentsOperation + | SetAttachmentsOperation + | UpdateAttachmentsOperation + )[] + + socialId: SocialID + date?: Date +} + // For any user export interface AttachThreadOperation { opcode: 'attach' @@ -183,33 +228,6 @@ export interface ThreadPatchEvent extends BaseEvent { date?: Date } -export interface AttachLinkPreviewsOperation { - opcode: 'attach' - previews: (LinkPreviewData & { previewId: LinkPreviewID })[] -} - -export interface DetachLinkPreviewsOperation { - opcode: 'detach' - previewIds: LinkPreviewID[] -} - -export interface SetLinkPreviewsOperation { - opcode: 'set' - previews: (LinkPreviewData & { previewId: LinkPreviewID })[] -} - -// For system and message author -export interface LinkPreviewPatchEvent extends BaseEvent { - type: MessageEventType.LinkPreviewPatch - cardId: CardID - messageId: MessageID - - operations: (AttachLinkPreviewsOperation | DetachLinkPreviewsOperation | SetLinkPreviewsOperation)[] - - socialId: SocialID - date?: Date -} - export interface CreateMessageResult { messageId: MessageID created: Date diff --git a/packages/sdk-types/src/index.ts b/packages/sdk-types/src/index.ts index 0cab58460d9..901d8da5210 100644 --- a/packages/sdk-types/src/index.ts +++ b/packages/sdk-types/src/index.ts @@ -14,7 +14,8 @@ // export type * from './client' -export type * from './db' +export * from './db' export type * from './query' export * from './events/event' export type * from './serverApi' +export * from './domain' diff --git a/packages/server/src/middleware/broadcast.ts b/packages/server/src/middleware/broadcast.ts index 7530471861d..40879be3209 100644 --- a/packages/server/src/middleware/broadcast.ts +++ b/packages/server/src/middleware/broadcast.ts @@ -194,9 +194,9 @@ export class BroadcastMiddleware extends BaseMiddleware implements Middleware { new Set(Array.from(info.contextQueries.values()).flatMap((it) => Array.from(it))) ) case MessageEventType.ThreadPatch: - case MessageEventType.LinkPreviewPatch: case MessageEventType.ReactionPatch: case MessageEventType.BlobPatch: + case MessageEventType.AttachmentPatch: case MessageEventType.RemovePatch: case MessageEventType.UpdatePatch: return this.matchMessagesQuery( diff --git a/packages/server/src/middleware/db.ts b/packages/server/src/middleware/db.ts index b3374ac99bb..dd1fb8cd3ad 100644 --- a/packages/server/src/middleware/db.ts +++ b/packages/server/src/middleware/db.ts @@ -14,6 +14,7 @@ // import { + AttachmentID, CardID, type Collaborator, type FindCollaboratorsParams, @@ -34,7 +35,6 @@ import { } from '@hcengineering/communication-types' import { type AddCollaboratorsEvent, - BlobPatchEvent, CardEventType, type CreateLabelEvent, type CreateMessageEvent, @@ -44,7 +44,6 @@ import { type DbAdapter, type Event, LabelEventType, - LinkPreviewPatchEvent, MessageEventType, NotificationEventType, ReactionPatchEvent, @@ -61,7 +60,9 @@ import { type UpdateNotificationEvent, UpdatePatchEvent, ThreadPatchEvent, - EventResult + EventResult, + AttachmentPatchEvent, + BlobPatchEvent } from '@hcengineering/communication-sdk-types' import type { Enriched, Middleware, MiddlewareContext } from '../types' @@ -131,8 +132,8 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { return await this.reactionPatch(event) case MessageEventType.BlobPatch: return await this.blobPatch(event) - case MessageEventType.LinkPreviewPatch: - return await this.linkPreviewPatch(event) + case MessageEventType.AttachmentPatch: + return await this.attachmentPatch(event) case MessageEventType.ThreadPatch: return await this.threadPatch(event) case MessageEventType.CreateMessagesGroup: @@ -196,8 +197,8 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { } const created = await this.db.createMessage( - event.messageId, event.cardId, + event.messageId, event.messageType, event.content, event.extra, @@ -266,33 +267,74 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { for (const operation of operations) { if (operation.opcode === 'attach') { - await this.db.attachBlobs(event.cardId, event.messageId, operation.blobs, event.socialId, event.date) + await this.db.addAttachments( + event.cardId, + event.messageId, + operation.blobs.map((b) => ({ + id: b.blobId as any as AttachmentID, + type: b.mimeType, + params: b + })), + event.socialId, + event.date + ) } else if (operation.opcode === 'detach') { - await this.db.detachBlobs(event.cardId, event.messageId, operation.blobIds, event.socialId, event.date) + await this.db.removeAttachments( + event.cardId, + event.messageId, + operation.blobIds as any as AttachmentID[], + event.socialId, + event.date + ) } else if (operation.opcode === 'set') { - await this.db.setBlobs(event.cardId, event.messageId, operation.blobs, event.socialId, event.date) + await this.db.setAttachments( + event.cardId, + event.messageId, + operation.blobs.map((b) => ({ + id: b.blobId as any as AttachmentID, + type: b.mimeType, + params: b + })), + event.socialId, + event.date + ) } else if (operation.opcode === 'update') { - await this.db.updateBlobs(event.cardId, event.messageId, operation.blobs, event.socialId, event.date) + await this.db.updateAttachments( + event.cardId, + event.messageId, + operation.blobs.map((b) => ({ + id: b.blobId as any as AttachmentID, + params: { + ...b + } + })), + event.socialId, + event.date + ) } } return {} } - private async linkPreviewPatch (event: Enriched): Promise { - for (const operation of event.operations) { - if (operation.opcode === 'attach') { - await this.db.attachLinkPreviews(event.cardId, event.messageId, operation.previews, event.socialId, event.date) - } else if (operation.opcode === 'detach') { - await this.db.detachLinkPreviews( + private async attachmentPatch (event: Enriched): Promise { + const { operations } = event + + for (const operation of operations) { + if (operation.opcode === 'add') { + await this.db.addAttachments(event.cardId, event.messageId, operation.attachments, event.socialId, event.date) + } else if (operation.opcode === 'remove') { + await this.db.removeAttachments(event.cardId, event.messageId, operation.ids, event.socialId, event.date) + } else if (operation.opcode === 'set') { + await this.db.setAttachments(event.cardId, event.messageId, operation.attachments, event.socialId, event.date) + } else if (operation.opcode === 'update') { + await this.db.updateAttachments( event.cardId, event.messageId, - operation.previewIds, + operation.attachments, event.socialId, event.date ) - } else if (operation.opcode === 'set') { - await this.db.setLinkPreviews(event.cardId, event.messageId, operation.previews, event.socialId, event.date) } } diff --git a/packages/server/src/middleware/permissions.ts b/packages/server/src/middleware/permissions.ts index 9d9e5657948..a9d46a3a962 100644 --- a/packages/server/src/middleware/permissions.ts +++ b/packages/server/src/middleware/permissions.ts @@ -52,7 +52,7 @@ export class PermissionsMiddleware extends BaseMiddleware implements Middleware case MessageEventType.RemovePatch: case MessageEventType.UpdatePatch: case MessageEventType.BlobPatch: - case MessageEventType.LinkPreviewPatch: + case MessageEventType.AttachmentPatch: case MessageEventType.ReactionPatch: case MessageEventType.ThreadPatch: case NotificationEventType.AddCollaborators: diff --git a/packages/server/src/middleware/validate.ts b/packages/server/src/middleware/validate.ts index 54bbbe1e40e..d19ad9cebf6 100644 --- a/packages/server/src/middleware/validate.ts +++ b/packages/server/src/middleware/validate.ts @@ -40,6 +40,7 @@ import { z } from 'zod' import type { Enriched, Middleware, QueryId } from '../types' import { BaseMiddleware } from './base' import { ApiError } from '../error' +import { isBlobAttachmentType, isLinkPreviewAttachmentType } from '@hcengineering/communication-shared' export class ValidateMiddleware extends BaseMiddleware implements Middleware { private validate(data: unknown, schema: z.ZodType): T { @@ -112,8 +113,19 @@ export class ValidateMiddleware extends BaseMiddleware implements Middleware { case MessageEventType.BlobPatch: this.validate(event, BlobPatchEventSchema) break - case MessageEventType.LinkPreviewPatch: - this.validate(event, LinkPreviewPatchEventSchema) + case MessageEventType.AttachmentPatch: + this.validate(event, AttachmentPatchEventSchema) + event.operations.forEach((op) => { + if (op.opcode === 'add' || op.opcode === 'set') { + op.attachments.forEach((att) => { + if (isLinkPreviewAttachmentType(att.type)) { + this.validate(att.params, LinkPreviewParamsSchema) + } else if (isBlobAttachmentType(att.type)) { + this.validate(att.params, BlobParamsSchema) + } + }) + } + }) break case MessageEventType.ThreadPatch: this.validate(event, ThreadPatchEventSchema) @@ -145,13 +157,13 @@ export class ValidateMiddleware extends BaseMiddleware implements Middleware { } const AccountIDSchema = z.string() -const BlobIDSchema = z.string() +const BlobIDSchema = z.string().uuid() +const AttachmentIDSchema = z.string().uuid() const CardIDSchema = z.string() const CardTypeSchema = z.string() const ContextIDSchema = z.string() const DateSchema = z.coerce.date() const LabelIDSchema = z.string() -const LinkPreviewIDSchema = z.string() const MarkdownSchema = z.string() const MessageExtraSchema = z.any() const MessageIDSchema = z.string() @@ -160,7 +172,7 @@ const MessagesGroupSchema = z.any() const SocialIDSchema = z.string() const SortingOrderSchema = z.union([z.literal(SortingOrder.Ascending), z.literal(SortingOrder.Descending)]) -const BlobDataSchema = z.object({ +const BlobParamsSchema = z.object({ blobId: BlobIDSchema, mimeType: z.string(), fileName: z.string(), @@ -168,17 +180,8 @@ const BlobDataSchema = z.object({ metadata: z.record(z.string(), z.any()).optional() }) -const UpdateBlobDataSchema = z.object({ - blobId: BlobIDSchema, - mimeType: z.string().optional(), - fileName: z.string().optional(), - size: z.number().optional(), - metadata: z.record(z.string(), z.any()).optional() -}) - -const LinkPreviewDataSchema = z +const LinkPreviewParamsSchema = z .object({ - previewId: LinkPreviewIDSchema, url: z.string(), host: z.string(), title: z.string().optional(), @@ -195,6 +198,25 @@ const LinkPreviewDataSchema = z }) .strict() +const UpdateBlobDataSchema = z.object({ + blobId: BlobIDSchema, + mimeType: z.string().optional(), + fileName: z.string().optional(), + size: z.number().optional(), + metadata: z.record(z.string(), z.any()).optional() +}) + +const AttachmentDataSchema = z.object({ + id: AttachmentIDSchema, + type: z.string(), + params: z.record(z.string(), z.any()) +}) + +const AttachmentUpdateDataSchema = z.object({ + id: AttachmentIDSchema, + params: z.record(z.string(), z.any()) +}) + // Find params const DateOrRecordSchema = z.union([DateSchema, z.record(DateSchema)]) @@ -208,10 +230,9 @@ const FindParamsSchema = z const FindMessagesParamsSchema = FindParamsSchema.extend({ id: MessageIDSchema.optional(), card: CardIDSchema.optional(), - files: z.boolean().optional(), + attachments: z.boolean().optional(), reactions: z.boolean().optional(), replies: z.boolean().optional(), - links: z.boolean().optional(), created: DateOrRecordSchema.optional() }).strict() @@ -336,13 +357,19 @@ const ReactionPatchEventSchema = BaseEventSchema.extend({ date: DateSchema }).strict() +/** + * @deprecated + */ const BlobOperationSchema = z.union([ - z.object({ opcode: z.literal('attach'), blobs: z.array(BlobDataSchema).nonempty() }), + z.object({ opcode: z.literal('attach'), blobs: z.array(BlobParamsSchema).nonempty() }), z.object({ opcode: z.literal('detach'), blobIds: z.array(BlobIDSchema).nonempty() }), - z.object({ opcode: z.literal('set'), blobs: z.array(BlobDataSchema).nonempty() }), + z.object({ opcode: z.literal('set'), blobs: z.array(BlobParamsSchema).nonempty() }), z.object({ opcode: z.literal('update'), blobs: z.array(UpdateBlobDataSchema).nonempty() }) ]) +/** + * @deprecated + */ const BlobPatchEventSchema = BaseEventSchema.extend({ type: z.literal(MessageEventType.BlobPatch), cardId: CardIDSchema, @@ -352,26 +379,27 @@ const BlobPatchEventSchema = BaseEventSchema.extend({ date: DateSchema }).strict() -const ThreadPatchEventSchema = BaseEventSchema.extend({ - type: z.literal(MessageEventType.ThreadPatch), +const AttachmentOperationSchema = z.union([ + z.object({ opcode: z.literal('add'), attachments: z.array(AttachmentDataSchema).nonempty() }), + z.object({ opcode: z.literal('remove'), ids: z.array(AttachmentIDSchema).nonempty() }), + z.object({ opcode: z.literal('set'), attachments: z.array(AttachmentDataSchema).nonempty() }), + z.object({ opcode: z.literal('update'), attachments: z.array(AttachmentUpdateDataSchema).nonempty() }) +]) + +const AttachmentPatchEventSchema = BaseEventSchema.extend({ + type: z.literal(MessageEventType.AttachmentPatch), cardId: CardIDSchema, messageId: MessageIDSchema, - operation: z.object({ opcode: z.literal('attach'), threadId: CardIDSchema, threadType: CardTypeSchema }), + operations: z.array(AttachmentOperationSchema).nonempty(), socialId: SocialIDSchema, date: DateSchema }).strict() -const LinkPreviewOperationSchema = z.union([ - z.object({ opcode: z.literal('attach'), previews: z.array(LinkPreviewDataSchema).nonempty() }), - z.object({ opcode: z.literal('detach'), previewIds: z.array(LinkPreviewIDSchema).nonempty() }), - z.object({ opcode: z.literal('set'), previews: z.array(LinkPreviewDataSchema).nonempty() }) -]) - -const LinkPreviewPatchEventSchema = BaseEventSchema.extend({ - type: z.literal(MessageEventType.LinkPreviewPatch), +const ThreadPatchEventSchema = BaseEventSchema.extend({ + type: z.literal(MessageEventType.ThreadPatch), cardId: CardIDSchema, - messageId: MessageIDSchema.optional(), - operations: z.array(LinkPreviewOperationSchema).nonempty(), + messageId: MessageIDSchema, + operation: z.object({ opcode: z.literal('attach'), threadId: CardIDSchema, threadType: CardTypeSchema }), socialId: SocialIDSchema, date: DateSchema }).strict() diff --git a/packages/server/src/triggers/message.ts b/packages/server/src/triggers/message.ts index 11299e31e2f..79a57a5f554 100644 --- a/packages/server/src/triggers/message.ts +++ b/packages/server/src/triggers/message.ts @@ -152,7 +152,7 @@ async function addThreadReply (ctx: TriggerCtx, event: Enriched): Promise { if (event.operation.opcode !== 'attach') return [] const { message } = await findMessage(ctx.db, ctx.metadata.filesUrl, ctx.workspace, event.cardId, event.messageId, { - files: true + attachments: true }) if (message === undefined) return [] @@ -181,13 +181,17 @@ async function onThreadAttached (ctx: TriggerCtx, event: Enriched ({ + id: it.id, + type: it.type, + params: it.params + })) } ], socialId: message.creator, @@ -205,7 +209,7 @@ const triggers: Triggers = [ ['register_card_on_remove_patch', MessageEventType.RemovePatch, registerCard as TriggerFn], ['register_card_on_reaction_patch', MessageEventType.ReactionPatch, registerCard as TriggerFn], ['register_card_on_blob_patch', MessageEventType.BlobPatch, registerCard as TriggerFn], - ['register_card_on_link_preview_patch', MessageEventType.LinkPreviewPatch, registerCard as TriggerFn], + ['register_card_on_attachment_patch', MessageEventType.AttachmentPatch, registerCard as TriggerFn], ['register_card_on_thread_patch', MessageEventType.ThreadPatch, registerCard as TriggerFn], ['on_messages_group_created', MessageEventType.CreateMessagesGroup, onMessagesGroupCreated as TriggerFn], diff --git a/packages/server/src/triggers/utils.ts b/packages/server/src/triggers/utils.ts index 219ef6175fa..6d18866e408 100644 --- a/packages/server/src/triggers/utils.ts +++ b/packages/server/src/triggers/utils.ts @@ -38,9 +38,8 @@ export async function findMessage ( card: CardID, id: MessageID, ops?: { - files?: boolean + attachments?: boolean replies?: boolean - links?: boolean reactions?: boolean } ): Promise<{ diff --git a/packages/shared/src/patch.ts b/packages/shared/src/patch.ts index 7bdabb5fdc1..fd53ae378e6 100644 --- a/packages/shared/src/patch.ts +++ b/packages/shared/src/patch.ts @@ -14,22 +14,19 @@ // import { - BlobData, - BlobID, - BlobPatch, CardID, CardType, - LinkPreview, - LinkPreviewData, - LinkPreviewID, - LinkPreviewPatch, type Message, type Patch, PatchType, ReactionPatch, SocialID, ThreadPatch, - BlobUpdateData + AttachmentData, + AttachmentPatch, + AttachmentUpdateData, + AttachmentID, + Attachment } from '@hcengineering/communication-types' export function applyPatches (message: Message, patches: Patch[], allowedPatchTypes: PatchType[] = []): Message { @@ -62,43 +59,29 @@ export function applyPatch (message: Message, patch: Patch, allowedPatchTypes: P return { ...message, content: '', - blobs: [], - linkPreviews: [], + attachments: [], reactions: [], removed: true } } case PatchType.reaction: return patchReactions(message, patch) - case PatchType.blob: - return patchBlobs(message, patch) - case PatchType.linkPreview: - return patchLinkPreviews(message, patch) + case PatchType.attachment: + return patchAttachments(message, patch) case PatchType.thread: return patchThread(message, patch) } } -function patchBlobs (message: Message, patch: BlobPatch): Message { - if (patch.data.operation === 'attach') { - return attachBlobs(message, patch.data.blobs, patch.created, patch.creator) - } else if (patch.data.operation === 'detach') { - return detachBlobs(message, patch.data.blobIds) +function patchAttachments (message: Message, patch: AttachmentPatch): Message { + if (patch.data.operation === 'add') { + return addAttachments(message, patch.data.attachments, patch.created, patch.creator) + } else if (patch.data.operation === 'remove') { + return removeAttachments(message, patch.data.ids) } else if (patch.data.operation === 'set') { - return setBlobs(message, patch.data.blobs, patch.created, patch.creator) + return setAttachments(message, patch.data.attachments, patch.created, patch.creator) } else if (patch.data.operation === 'update') { - return updateBlobs(message, patch.data.blobs) - } - return message -} - -function patchLinkPreviews (message: Message, patch: LinkPreviewPatch): Message { - if (patch.data.operation === 'attach') { - return attachLinkPreviews(message, patch.data.previews, patch.created, patch.creator) - } else if (patch.data.operation === 'detach') { - return detachLinkPreviews(message, patch.data.previewIds) - } else if (patch.data.operation === 'set') { - return setLinkPreviews(message, patch.data.previews, patch.created, patch.creator) + return updateAttachments(message, patch.data.attachments, patch.created) } return message } @@ -133,123 +116,73 @@ function removeReaction (message: Message, reaction: string, creator: SocialID): } } -function attachBlobs (message: Message, data: BlobData[], created: Date, creator: SocialID): Message { - const newBlobs = [] - for (const blob of data) { - const isExists = message.blobs.some((it) => it.blobId === blob.blobId) +function addAttachments (message: Message, data: AttachmentData[], created: Date, creator: SocialID): Message { + const newAttachments: Attachment[] = [] + for (const attach of data) { + const isExists = message.attachments.some((it) => it.id === attach.id) if (isExists === undefined) continue - newBlobs.push({ - ...blob, + const attachment: Attachment = { + ...attach, created, creator - }) + } as any + newAttachments.push(attachment) } - if (newBlobs.length === 0) return message + if (newAttachments.length === 0) return message return { ...message, - blobs: [...message.blobs, ...newBlobs] + attachments: [...message.attachments, ...newAttachments] } } -function updateBlobs (message: Message, updates: BlobUpdateData[]): Message { +function updateAttachments (message: Message, updates: AttachmentUpdateData[], date: Date): Message { if (updates.length === 0) return message - const updatedBlobs = [] - for (const blob of message.blobs) { - const update = updates.find((it) => it.blobId === blob.blobId) + const updatedAttachments: Attachment[] = [] + for (const attachment of message.attachments) { + const update = updates.find((it) => it.id === attachment.id) if (update === undefined) { - updatedBlobs.push(blob) + updatedAttachments.push(attachment) } else { - updatedBlobs.push({ - ...blob, - ...update - }) + updatedAttachments.push({ + ...attachment, + params: { + ...attachment.params, + ...update.params + }, + modified: date.getTime() > (attachment.modified?.getTime() ?? 0) ? date : attachment.modified + } as any) } } return { ...message, - blobs: updatedBlobs + attachments: updatedAttachments } } -function detachBlobs (message: Message, blobIds: BlobID[]): Message { - const blobs = message.blobs.filter((it) => !blobIds.includes(it.blobId)) - if (blobs.length === message.blobs.length) return message +function removeAttachments (message: Message, ids: AttachmentID[]): Message { + const attachments = message.attachments.filter((it) => !ids.includes(it.id)) + if (attachments.length === message.attachments.length) return message return { ...message, - blobs + attachments } } -function setBlobs (message: Message, data: BlobData[], created: Date, creator: SocialID): Message { +function setAttachments (message: Message, data: AttachmentData[], created: Date, creator: SocialID): Message { if (data.length === 0) return message return { ...message, - blobs: data.map((it) => ({ - ...it, - created, - creator - })) - } -} - -function attachLinkPreviews ( - message: Message, - previews: (LinkPreviewData & { previewId: LinkPreviewID })[], - created: Date, - creator: SocialID -): Message { - const newPreviews: LinkPreview[] = [] - for (const preview of previews) { - if (message.linkPreviews.some((it) => it.id === preview.previewId)) continue - newPreviews.push({ - id: preview.previewId, - ...preview, - created, - creator - }) - } - - if (newPreviews.length === 0) return message - return { - ...message, - linkPreviews: [...message.linkPreviews, ...newPreviews] - } -} - -function detachLinkPreviews (message: Message, previewIds: LinkPreviewID[]): Message { - const previews = message.linkPreviews.filter((it) => !previewIds.includes(it.id)) - if (previews.length === message.linkPreviews.length) return message - - return { - ...message, - linkPreviews: previews - } -} - -function setLinkPreviews ( - message: Message, - previews: (LinkPreviewData & { previewId: LinkPreviewID })[], - created: Date, - creator: SocialID -): Message { - if (previews.length === 0) return message - const newPreviews: LinkPreview[] = [] - for (const preview of previews) { - if (message.linkPreviews.some((it) => it.id === preview.previewId)) continue - newPreviews.push({ - id: preview.previewId, - ...preview, - created, - creator - }) - } - - return { - ...message, - linkPreviews: newPreviews + attachments: data.map( + (it) => + ({ + ...it, + created, + creator + }) as any + ) } } diff --git a/packages/shared/src/processor.ts b/packages/shared/src/processor.ts index 0a96908bd31..b880c0030ba 100644 --- a/packages/shared/src/processor.ts +++ b/packages/shared/src/processor.ts @@ -12,12 +12,10 @@ // limitations under the License. import { - AttachBlobsPatchData, - AttachLinkPreviewsPatchData, + AddAttachmentsPatchData, + AttachmentID, AttachThreadPatchData, ContextID, - DetachBlobsPatchData, - DetachLinkPreviewsPatchData, Message, MessageID, Notification, @@ -25,25 +23,26 @@ import { NotificationID, Patch, PatchType, - SetBlobsPatchData, - SetLinkPreviewsPatchData, - UpdateBlobsPatchData, + RemoveAttachmentsPatchData, + SetAttachmentsPatchData, + UpdateAttachmentsPatchData, UpdateThreadPatchData } from '@hcengineering/communication-types' import { + AddAttachmentsOperation, AttachBlobsOperation, - AttachLinkPreviewsOperation, AttachThreadOperation, CreateMessageEvent, CreateNotificationContextEvent, CreateNotificationEvent, DetachBlobsOperation, - DetachLinkPreviewsOperation, MessageEventType, PatchEvent, + RemoveAttachmentsOperation, RemoveNotificationContextEvent, + SetAttachmentsOperation, SetBlobsOperation, - SetLinkPreviewsOperation, + UpdateAttachmentsOperation, UpdateBlobsOperation, UpdateNotificationContextEvent, UpdateThreadOperation @@ -66,8 +65,7 @@ export class MessageProcessor { created: event.date ?? new Date(), removed: false, reactions: [], - blobs: [], - linkPreviews: [] + attachments: [] } } @@ -121,19 +119,19 @@ export class MessageProcessor { .filter((x) => x != null) .map((it) => ({ messageId: event.messageId, - type: PatchType.blob, + type: PatchType.attachment, creator: event.socialId, created: event.date ?? new Date(), data: it })) - case MessageEventType.LinkPreviewPatch: + case MessageEventType.AttachmentPatch: return event.operations - .map((it) => linkPreviewOperationToPatchData(it)) + .map((it) => attachmentOperationToPatchData(it)) .filter((x) => x != null) .map((it) => ({ messageId: event.messageId, - type: PatchType.linkPreview, + type: PatchType.attachment, creator: event.socialId, created: event.date ?? new Date(), data: it @@ -219,49 +217,75 @@ export class NotificationProcessor { function blobOperationToPatchData ( operation: AttachBlobsOperation | DetachBlobsOperation | SetBlobsOperation | UpdateBlobsOperation -): AttachBlobsPatchData | DetachBlobsPatchData | SetBlobsPatchData | UpdateBlobsPatchData | undefined { +): + | AddAttachmentsPatchData + | RemoveAttachmentsPatchData + | SetAttachmentsPatchData + | UpdateAttachmentsPatchData + | undefined { if (operation.opcode === 'attach') { return { - operation: 'attach', - blobs: operation.blobs + operation: 'add', + attachments: operation.blobs.map((it) => ({ + id: it.blobId as any as AttachmentID, + type: it.mimeType, + params: it + })) } } else if (operation.opcode === 'detach') { return { - operation: 'detach', - blobIds: operation.blobIds + operation: 'remove', + ids: operation.blobIds as any as AttachmentID[] } } else if (operation.opcode === 'set') { return { operation: 'set', - blobs: operation.blobs + attachments: operation.blobs.map((it) => ({ + id: it.blobId as any as AttachmentID, + type: it.mimeType, + params: it + })) } } else if (operation.opcode === 'update') { return { operation: 'update', - blobs: operation.blobs + attachments: operation.blobs.map((it) => ({ + id: it.blobId as any as AttachmentID, + params: it + })) } } return undefined } -function linkPreviewOperationToPatchData ( - operation: AttachLinkPreviewsOperation | DetachLinkPreviewsOperation | SetLinkPreviewsOperation -): AttachLinkPreviewsPatchData | DetachLinkPreviewsPatchData | SetLinkPreviewsPatchData | undefined { - if (operation.opcode === 'attach') { +function attachmentOperationToPatchData ( + operation: AddAttachmentsOperation | RemoveAttachmentsOperation | SetAttachmentsOperation | UpdateAttachmentsOperation +): + | AddAttachmentsPatchData + | RemoveAttachmentsPatchData + | SetAttachmentsPatchData + | UpdateAttachmentsPatchData + | undefined { + if (operation.opcode === 'add') { return { - operation: 'attach', - previews: operation.previews + operation: 'add', + attachments: operation.attachments } - } else if (operation.opcode === 'detach') { + } else if (operation.opcode === 'remove') { return { - operation: 'detach', - previewIds: operation.previewIds + operation: 'remove', + ids: operation.ids } } else if (operation.opcode === 'set') { return { operation: 'set', - previews: operation.previews + attachments: operation.attachments + } + } else if (operation.opcode === 'update') { + return { + operation: 'update', + attachments: operation.attachments } } diff --git a/packages/shared/src/utils.ts b/packages/shared/src/utils.ts index 4a839a6f91a..89dcf79f8ba 100644 --- a/packages/shared/src/utils.ts +++ b/packages/shared/src/utils.ts @@ -13,7 +13,14 @@ // limitations under the License. // -import type { LinkPreviewID, MessageID } from '@hcengineering/communication-types' +import { + MessageID, + AppletAttachment, + Attachment, + BlobAttachment, + LinkPreviewAttachment, + linkPreviewType +} from '@hcengineering/communication-types' const COUNTER_BITS = 10n const RANDOM_BITS = 10n @@ -42,6 +49,26 @@ export function generateMessageId (): MessageID { return toBase64Url(buf) as MessageID } -export function generateLinkPreviewId (): LinkPreviewID { - return makeBigIntId().toString() as LinkPreviewID +export function isAppletAttachment (attachment: Attachment): attachment is AppletAttachment { + return attachment.type.startsWith('application/vnd.huly.applet.') +} + +export function isLinkPreviewAttachmentType (type: string): boolean { + return type === linkPreviewType +} + +export function isAppletAttachmentType (type: string): boolean { + return type.startsWith('application/vnd.huly.applet.') +} + +export function isBlobAttachmentType (type: string): boolean { + return !isLinkPreviewAttachmentType(type) && !isAppletAttachmentType(type) +} + +export function isLinkPreviewAttachment (attachment: Attachment): attachment is LinkPreviewAttachment { + return attachment.type === linkPreviewType +} + +export function isBlobAttachment (attachment: Attachment): attachment is BlobAttachment { + return !isLinkPreviewAttachment(attachment) && !isAppletAttachment(attachment) && 'blobId' in attachment.params } diff --git a/packages/types/src/file.ts b/packages/types/src/file.ts index bd3b1f5e1ea..e1fe50a8e5b 100644 --- a/packages/types/src/file.ts +++ b/packages/types/src/file.ts @@ -14,7 +14,7 @@ // import type { CardID, CardType, Markdown, SocialID } from './core' -import type { Message, MessageID, MessageType, MessageExtra, LinkPreview, Reaction, AttachedBlob } from './message' +import type { Message, MessageID, MessageType, MessageExtra, Reaction, Attachment } from './message' export interface FileMetadata { cardId: CardID @@ -36,8 +36,7 @@ export interface FileMessage { edited?: Date reactions: Reaction[] - blobs: AttachedBlob[] - linkPreviews: LinkPreview[] + attachments: Attachment[] thread?: FileThread } diff --git a/packages/types/src/index.ts b/packages/types/src/index.ts index cbcdb5bc271..84cdd620685 100644 --- a/packages/types/src/index.ts +++ b/packages/types/src/index.ts @@ -19,3 +19,4 @@ export * from './message' export * from './notification' export * from './query' export * from './label' +export * from './patch' diff --git a/packages/types/src/message.ts b/packages/types/src/message.ts index 633a405dddc..19cbaaea86c 100644 --- a/packages/types/src/message.ts +++ b/packages/types/src/message.ts @@ -14,11 +14,13 @@ // import type { Attribute, BlobMetadata, Class, Mixin, Ref } from '@hcengineering/core' -import type { AccountID, BlobID, CardID, CardType, ID, Markdown, SocialID } from './core' import type { Card, Tag } from '@hcengineering/card' +import type { AccountID, BlobID, CardID, CardType, ID, Markdown, SocialID } from './core' +import { Patch } from './patch' + +// Message export type MessageID = ID & { message: true } -export type LinkPreviewID = string & { __linkPreviewId: true } export enum MessageType { Message = 'message', @@ -40,8 +42,7 @@ export interface Message { edited?: Date reactions: Reaction[] - blobs: AttachedBlob[] - linkPreviews: LinkPreview[] + attachments: Attachment[] thread?: Thread } @@ -97,140 +98,35 @@ export interface ActivityAttributeUpdate { removed?: AttributeValue[] } -export interface MessagesGroup { - cardId: CardID - blobId: BlobID - fromDate: Date - toDate: Date - count: number - patches?: Patch[] -} - -interface BasePatch { - messageId: MessageID - type: PatchType +// Reaction +export interface Reaction { + reaction: string creator: SocialID created: Date - - data: Record -} - -export interface UpdatePatch extends BasePatch { - type: PatchType.update - data: UpdatePatchData -} - -export interface UpdatePatchData { - type?: MessageType - content?: Markdown - extra?: MessageExtra -} - -export interface RemovePatch extends BasePatch { - type: PatchType.remove - data: RemovePatchData } -// eslint-disable-next-line @typescript-eslint/no-empty-interface -export interface RemovePatchData {} - -export interface ReactionPatch extends BasePatch { - type: PatchType.reaction - data: AddReactionPatchData | RemoveReactionPatchData -} - -export interface AddReactionPatchData { - operation: 'add' - reaction: string -} - -export interface RemoveReactionPatchData { - operation: 'remove' - reaction: string -} - -export interface BlobPatch extends BasePatch { - type: PatchType.blob - data: AttachBlobsPatchData | DetachBlobsPatchData | SetBlobsPatchData | UpdateBlobsPatchData -} - -export interface AttachBlobsPatchData { - operation: 'attach' - blobs: BlobData[] -} - -export interface DetachBlobsPatchData { - operation: 'detach' - blobIds: BlobID[] -} - -export interface SetBlobsPatchData { - operation: 'set' - blobs: BlobData[] -} - -export interface UpdateBlobsPatchData { - operation: 'update' - blobs: BlobUpdateData[] -} - -export interface LinkPreviewPatch extends BasePatch { - type: PatchType.linkPreview - data: AttachLinkPreviewsPatchData | DetachLinkPreviewsPatchData | SetLinkPreviewsPatchData -} - -export interface AttachLinkPreviewsPatchData { - operation: 'attach' - previews: (LinkPreviewData & { previewId: LinkPreviewID })[] -} - -export interface DetachLinkPreviewsPatchData { - operation: 'detach' - previewIds: LinkPreviewID[] -} - -export interface SetLinkPreviewsPatchData { - operation: 'set' - previews: (LinkPreviewData & { previewId: LinkPreviewID })[] -} +// LinkPreview +export const linkPreviewType = 'application/vnd.huly.link-preview' as const -export interface ThreadPatch extends BasePatch { - type: PatchType.thread - data: AttachThreadPatchData | UpdateThreadPatchData -} - -export interface AttachThreadPatchData { - operation: 'attach' - threadId: CardID - threadType: CardType -} - -export interface UpdateThreadPatchData { - operation: 'update' - threadId: CardID - threadType?: CardType - repliesCountOp?: 'increment' | 'decrement' - lastReply?: Date -} +export interface LinkPreviewParams { + url: string + host: string -export type Patch = UpdatePatch | RemovePatch | ReactionPatch | BlobPatch | LinkPreviewPatch | ThreadPatch + title?: string + description?: string + siteName?: string -export enum PatchType { - update = 'update', - remove = 'remove', - reaction = 'reaction', - blob = 'blob', - linkPreview = 'linkPreview', - thread = 'thread' + iconUrl?: string + previewImage?: LinkPreviewImage } -export interface Reaction { - reaction: string - creator: SocialID - created: Date +export interface LinkPreviewImage { + url: string + width?: number + height?: number } -export interface BlobData { +export interface BlobParams { blobId: BlobID mimeType: string fileName: string @@ -238,37 +134,43 @@ export interface BlobData { metadata?: BlobMetadata } -export type BlobUpdateData = { blobId: BlobID } & Partial +// Attachment +export type AttachmentID = string & { __attachmentId: true } -export interface AttachedBlob extends BlobData { +export type Attachment = BlobAttachment | LinkPreviewAttachment | AppletAttachment +interface BaseAttachment extends AttachmentData { creator: SocialID created: Date + modified?: Date } -export interface LinkPreviewImage { - url: string - width?: number - height?: number +export interface LinkPreviewAttachment extends BaseAttachment { + type: typeof linkPreviewType } -export interface LinkPreviewData { - url: string - host: string +export interface BlobAttachment extends BaseAttachment {} - title?: string - description?: string - siteName?: string +export type AppletParams = Record +export type AppletType = `application/vnd.huly.applet.${string}` - iconUrl?: string - previewImage?: LinkPreviewImage +export interface AppletAttachment extends BaseAttachment { + type: AppletType } -export interface LinkPreview extends LinkPreviewData { - id: LinkPreviewID - creator: SocialID - created: Date +export interface AttachmentData

{ + id: AttachmentID + type: string + params: P } +export type AttachmentParams = Record + +export interface AttachmentUpdateData

{ + id: AttachmentID + params: Partial

+} + +// Thread export interface Thread { cardId: CardID messageId: MessageID @@ -277,3 +179,13 @@ export interface Thread { repliesCount: number lastReply: Date } + +// MessagesGroup +export interface MessagesGroup { + cardId: CardID + blobId: BlobID + fromDate: Date + toDate: Date + count: number + patches?: Patch[] +} diff --git a/packages/types/src/notification.ts b/packages/types/src/notification.ts index 48455802735..137ece52e3f 100644 --- a/packages/types/src/notification.ts +++ b/packages/types/src/notification.ts @@ -14,7 +14,8 @@ // import type { AccountID, BlobID, CardID, CardType, ID, SocialID } from './core' -import type { Message, MessageID, Patch } from './message' +import type { Message, MessageID } from './message' +import { Patch } from './patch' export type ContextID = ID & { context: true } export type NotificationID = ID & { notification: true } diff --git a/packages/types/src/patch.ts b/packages/types/src/patch.ts new file mode 100644 index 00000000000..adc8e6e9bd7 --- /dev/null +++ b/packages/types/src/patch.ts @@ -0,0 +1,112 @@ +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. + +import type { CardID, CardType, Markdown, SocialID } from './core' +import { AttachmentData, AttachmentID, MessageExtra, MessageID, MessageType, AttachmentUpdateData } from './message' + +export type Patch = UpdatePatch | RemovePatch | ReactionPatch | ThreadPatch | AttachmentPatch + +export enum PatchType { + update = 'update', + remove = 'remove', + reaction = 'reaction', + attachment = 'attachment', + thread = 'thread' +} + +interface BasePatch { + messageId: MessageID + type: PatchType + creator: SocialID + created: Date + + data: Record +} + +export interface UpdatePatch extends BasePatch { + type: PatchType.update + data: UpdatePatchData +} + +export interface UpdatePatchData { + type?: MessageType + content?: Markdown + extra?: MessageExtra +} + +export interface RemovePatch extends BasePatch { + type: PatchType.remove + data: RemovePatchData +} + +// eslint-disable-next-line @typescript-eslint/no-empty-interface +export interface RemovePatchData {} + +export interface ReactionPatch extends BasePatch { + type: PatchType.reaction + data: AddReactionPatchData | RemoveReactionPatchData +} + +export interface AddReactionPatchData { + operation: 'add' + reaction: string +} + +export interface RemoveReactionPatchData { + operation: 'remove' + reaction: string +} + +export interface AttachmentPatch extends BasePatch { + type: PatchType.attachment + data: AddAttachmentsPatchData | RemoveAttachmentsPatchData | SetAttachmentsPatchData | UpdateAttachmentsPatchData +} + +export interface AddAttachmentsPatchData { + operation: 'add' + attachments: AttachmentData[] +} + +export interface RemoveAttachmentsPatchData { + operation: 'remove' + ids: AttachmentID[] +} + +export interface SetAttachmentsPatchData { + operation: 'set' + attachments: AttachmentData[] +} + +export interface UpdateAttachmentsPatchData { + operation: 'update' + attachments: AttachmentUpdateData[] +} + +export interface ThreadPatch extends BasePatch { + type: PatchType.thread + data: AttachThreadPatchData | UpdateThreadPatchData +} + +export interface AttachThreadPatchData { + operation: 'attach' + threadId: CardID + threadType: CardType +} + +export interface UpdateThreadPatchData { + operation: 'update' + threadId: CardID + threadType?: CardType + repliesCountOp?: 'increment' | 'decrement' + lastReply?: Date +} diff --git a/packages/types/src/query.ts b/packages/types/src/query.ts index 5f562fb6ce7..469470f8674 100644 --- a/packages/types/src/query.ts +++ b/packages/types/src/query.ts @@ -44,10 +44,9 @@ interface FindParams { export interface FindMessagesParams extends FindParams { id?: MessageID card?: CardID - files?: boolean + attachments?: boolean reactions?: boolean replies?: boolean - links?: boolean created?: Partial> | Date } diff --git a/packages/yaml/src/deserialize.ts b/packages/yaml/src/deserialize.ts index 27caa437aff..80c9431da30 100644 --- a/packages/yaml/src/deserialize.ts +++ b/packages/yaml/src/deserialize.ts @@ -34,8 +34,7 @@ export function deserializeMessage (message: Message): FileMessage { lastReply: message.thread.lastReply } : undefined, - blobs: message.blobs, - reactions: message.reactions, - linkPreviews: message.linkPreviews + attachments: message.attachments, + reactions: message.reactions } } diff --git a/packages/yaml/src/parse.ts b/packages/yaml/src/parse.ts index d72fb250ae5..08669bda86f 100644 --- a/packages/yaml/src/parse.ts +++ b/packages/yaml/src/parse.ts @@ -14,7 +14,15 @@ // import { retry, type RetryOptions } from '@hcengineering/communication-shared' -import type { BlobID, FileMessage, FileMetadata, ParsedFile, WorkspaceID } from '@hcengineering/communication-types' +import { + type Attachment, + type BlobID, + type FileMessage, + type FileMetadata, + type ParsedFile, + type WorkspaceID, + linkPreviewType +} from '@hcengineering/communication-types' import yaml from 'js-yaml' export async function loadGroupFile ( @@ -76,18 +84,66 @@ export function parseYaml (data: string): ParsedFile { lastReply: message.thread.lastReply } : undefined, - blobs: - message.blobs?.map((it) => ({ ...it, mimeType: it.mimeType ?? (it as any).contentType })) ?? - (message as any).files?.map((it: any) => ({ + attachments: parseAttachments(message), + reactions: message.reactions ?? [] + })) + } +} + +function parseAttachments (message: FileMessage): Attachment[] { + if (message.attachments != null) { + return message.attachments + } + + const oldMessage = message as any + + const attachments: Attachment[] = [] + + if ('files' in oldMessage && Array.isArray(oldMessage.files)) { + attachments.push( + ...oldMessage.files.map((it: any) => ({ + id: it.blobId, + type: it.type, + params: { blobId: it.blobId, mimeType: it.type, fileName: it.filename, size: it.size, metadata: it.meta - })) ?? - [], - reactions: message.reactions ?? [], - linkPreviews: message.linkPreviews ?? [] - })) + }, + creator: it.creator, + created: new Date(it.created) + })) + ) + } else if ('blobs' in oldMessage && Array.isArray(oldMessage.blobs)) { + attachments.push( + ...oldMessage.blobs.map((it: any) => ({ + id: it.blobId, + type: it.mimeType ?? it.contentType, + params: { + blobId: it.blobId, + mimeType: it.mimeType ?? it.contentType, + fileName: it.fileName, + size: it.size, + metadata: it.metadata + }, + creator: it.creator, + created: new Date(it.created) + })) + ) } + + if ('linkPreviews' in oldMessage && Array.isArray(oldMessage.linkPreviews)) { + attachments.push( + ...oldMessage.linkPreviews.map((it: any) => ({ + id: it.previewId, + type: linkPreviewType, + params: it, + creator: it.creator, + created: new Date(it.created) + })) + ) + } + + return attachments } From 1536fe6cde485655f7088b92a8083477d55400f6 Mon Sep 17 00:00:00 2001 From: Kristina Date: Fri, 25 Jul 2025 19:26:24 +0400 Subject: [PATCH 148/636] Add tottal for notifications (#91) Signed-off-by: Kristina Fefelova --- packages/cockroach/src/adapter.ts | 8 +- packages/cockroach/src/db/mapping.ts | 6 +- packages/cockroach/src/db/notification.ts | 335 ++++++++++++------ packages/query/src/lq.ts | 2 +- packages/query/src/messages/query.ts | 2 +- .../query/src/notification-contexts/query.ts | 103 +++--- packages/query/src/notifications/query.ts | 108 ++++-- packages/query/src/result.ts | 9 + packages/query/src/window.ts | 5 + packages/sdk-types/src/client.ts | 4 +- packages/sdk-types/src/db.ts | 6 +- packages/sdk-types/src/events/notification.ts | 2 + packages/server/src/middleware/db.ts | 5 +- packages/server/src/middleware/validate.ts | 6 +- packages/shared/src/processor.ts | 3 +- packages/shared/src/utils.ts | 9 +- packages/types/src/notification.ts | 1 + packages/types/src/query.ts | 5 + 18 files changed, 406 insertions(+), 213 deletions(-) diff --git a/packages/cockroach/src/adapter.ts b/packages/cockroach/src/adapter.ts index 6837cdcc30c..0b83dbaefe5 100644 --- a/packages/cockroach/src/adapter.ts +++ b/packages/cockroach/src/adapter.ts @@ -45,7 +45,7 @@ import { type BlobID, type AttachmentData, type AttachmentID, - type AttachmentUpdateData + type AttachmentUpdateData, WithTotal } from '@hcengineering/communication-types' import type { DbAdapter, @@ -242,8 +242,8 @@ export class CockroachAdapter implements DbAdapter { ) } - async updateNotification (contextId: ContextID, account: AccountID, query: UpdateNotificationQuery, updates: NotificationUpdates): Promise { - await this.notification.updateNotification(contextId, account, query, updates) + async updateNotification (contextId: ContextID, account: AccountID, query: UpdateNotificationQuery, updates: NotificationUpdates): Promise { + return await this.notification.updateNotification(contextId, account, query, updates) } async removeNotifications ( @@ -284,7 +284,7 @@ export class CockroachAdapter implements DbAdapter { return await this.notification.findContexts(params) } - async findNotifications (params: FindNotificationsParams): Promise { + async findNotifications (params: FindNotificationsParams): Promise> { return await this.notification.findNotifications(params) } diff --git a/packages/cockroach/src/db/mapping.ts b/packages/cockroach/src/db/mapping.ts index 32da1b4b97a..34cf72e2c46 100644 --- a/packages/cockroach/src/db/mapping.ts +++ b/packages/cockroach/src/db/mapping.ts @@ -39,6 +39,7 @@ import { } from '@hcengineering/communication-types' import { Domain } from '@hcengineering/communication-sdk-types' import { applyPatches } from '@hcengineering/communication-shared' + import { DbModel } from '../schema' interface RawMessage extends DbModel { @@ -78,7 +79,7 @@ interface RawNotification extends DbModel { }[] } -type RawContext = DbModel & { id: ContextID } & { +type RawContext = DbModel & { id: ContextID, total?: number } & { notifications?: RawNotification[] } @@ -185,7 +186,8 @@ export function toNotificationContext (raw: RawContext): NotificationContext { lastNotify: raw.last_notify != null ? new Date(raw.last_notify) : undefined, notifications: (raw.notifications ?? []) .filter((it) => it.id != null) - .map((it) => toNotificationRaw(raw.id, raw.card_id, { ...it, account: raw.account })) + .map((it) => toNotificationRaw(raw.id, raw.card_id, { ...it, account: raw.account })), + totalNotifications: Number(raw.total ?? 0) } } diff --git a/packages/cockroach/src/db/notification.ts b/packages/cockroach/src/db/notification.ts index 111a570dc1b..0b9319af14a 100644 --- a/packages/cockroach/src/db/notification.ts +++ b/packages/cockroach/src/db/notification.ts @@ -28,18 +28,20 @@ import { type NotificationID, type CardType, type NotificationType, - type NotificationContent + type NotificationContent, + WithTotal } from '@hcengineering/communication-types' - -import { BaseDb } from './base' -import { getCondition } from './utils' -import { toCollaborator, toNotification, toNotificationContext } from './mapping' +import { withTotal } from '@hcengineering/communication-shared' import { Domain, type NotificationContextUpdates, type NotificationUpdates, type UpdateNotificationQuery } from '@hcengineering/communication-sdk-types' + +import { BaseDb } from './base' +import { getCondition } from './utils' +import { toCollaborator, toNotification, toNotificationContext } from './mapping' import { DbModel, DbModelFilter, DbModelUpdate } from '../schema' export class NotificationsDb extends BaseDb { @@ -126,7 +128,7 @@ export class NotificationsDb extends BaseDb { account: AccountID, query: UpdateNotificationQuery, updates: NotificationUpdates - ): Promise { + ): Promise { const where: string[] = [ 'nc.workspace_id = $1::uuid', 'nc.id = $2::int8', @@ -155,14 +157,17 @@ export class NotificationsDb extends BaseDb { } } - const whereClause = `WHERE ${where.join(' AND ')}` + const whereClause = `WHERE ${where.join(' AND ')} AND read <>${index}::boolean` const sql = ` UPDATE ${Domain.Notification} n - SET read = $${index++}::boolean - FROM ${Domain.NotificationContext} nc ${whereClause}` + SET read = $${index}::boolean + FROM ${Domain.NotificationContext} nc + ${whereClause} + ` - await this.execute(sql, [...values, updates.read], 'update notification') + const result = await this.execute(sql, [...values, updates.read], 'update notification') + return result?.count ?? 0 } async removeNotifications ( @@ -279,140 +284,214 @@ export class NotificationsDb extends BaseDb { } async findContexts (params: FindNotificationContextParams): Promise { - const withNotifications = params.notifications != null - const withMessages = params.notifications?.message === true + const withNotification = params.notifications != null + const withMessage = params.notifications?.message === true + const withTotal = params.notifications?.total === true + const notificationsLimit = params.notifications?.limit + const notificationOrder = params.notifications?.order === SortingOrder.Ascending ? 'ASC' : 'DESC' const { where, values } = this.buildContextWhere(params) - const limit = params.limit != null ? `LIMIT ${Number(params.limit)}` : '' const orderBy = params.order != null ? `ORDER BY nc.last_notify ${params.order === SortingOrder.Ascending ? 'ASC' : 'DESC'}` : '' + const limit = params.limit != null ? `LIMIT ${Number(params.limit)}` : '' - let notificationsJoin = '' - let notificationsSelect = '' - let groupBy = '' - - if (withNotifications) { - const { where: whereNotifications, values: valuesNotifications } = this.buildNotificationWhere( - { read: params.notifications?.read, type: params.notifications?.type }, - values.length, - true - ) - values.push(...valuesNotifications) - - const notificationLimit = params.notifications?.limit ?? 10 - const notificationOrder = params.notifications?.order === SortingOrder.Ascending ? 'ASC' : 'DESC' + if (!withNotification) { + const sql = ` + SELECT nc.id::text, + nc.card_id, + nc.account, + nc.last_view, + nc.last_update, + nc.last_notify + FROM ${Domain.NotificationContext} nc + ${where} + ${orderBy} + ${limit}; + ` + const rows = await this.execute(sql, values, 'find contexts (no notifications)') + return rows.map((it: any) => toNotificationContext(it)) + } - notificationsJoin = ` - LEFT JOIN LATERAL ( - SELECT * + const { where: notificationWhere, values: notificationValues } = this.buildNotificationWhere( + { read: params.notifications?.read, type: params.notifications?.type }, + values.length, + true + ) + values.push(...notificationValues) + + const contextsCte = ` + WITH ctx AS ( + SELECT id, card_id, account, last_view, last_update, last_notify, workspace_id + FROM ${Domain.NotificationContext} nc + ${where} + ${orderBy} + ${limit} + ) + ` + + const notificationsCte = ` + , last_notifs AS ( + SELECT * + FROM ( + SELECT n.*, + ROW_NUMBER() OVER (PARTITION BY n.context_id ORDER BY n.created ${notificationOrder}) AS rn FROM ${Domain.Notification} n - ${whereNotifications} ${whereNotifications.length > 1 ? 'AND' : 'WHERE'} n.context_id = nc.id - ORDER BY n.created ${notificationOrder} - LIMIT ${notificationLimit} - ) n ON TRUE - - ${ - withMessages - ? ` - LEFT JOIN ${Domain.Message} m - ON m.workspace_id = nc.workspace_id - AND m.card_id = nc.card_id - AND m.id = n.message_id - AND n.message_id IS NOT NULL - AND n.blob_id IS NULL` - : '' - } - - LEFT JOIN LATERAL ( - SELECT COALESCE( + WHERE n.context_id IN (SELECT id FROM ctx) + ${notificationWhere.length > 0 ? `AND (${notificationWhere.replace(/^WHERE/i, '')})` : ''} + ) t + WHERE rn <= ${notificationsLimit} + ) + ` + + const msgKeysCte = ` + , message_keys AS ( + SELECT DISTINCT + c.workspace_id, + c.card_id, + n.message_id + FROM last_notifs n + JOIN ctx c ON c.id = n.context_id + WHERE n.message_id IS NOT NULL + ) + ` + + const statsCte = withTotal + ? ` + , stats AS ( + SELECT context_id, COUNT(*) AS total + FROM ${Domain.Notification} n + WHERE n.context_id IN (SELECT id FROM ctx) + ${notificationWhere.length > 0 ? `AND (${notificationWhere.replace(/^WHERE/i, '')})` : ''} + GROUP BY context_id + )` + : '' + + const patchesCte = ` + , patches_json AS ( + SELECT + p.workspace_id, + p.card_id, + p.message_id, + COALESCE( JSON_AGG( JSONB_BUILD_OBJECT( - 'type', p.type, - 'data', p.data, + 'type', p.type, + 'data', p.data, 'creator', p.creator, 'created', p.created - ) ORDER BY p.created DESC - ), '[]'::JSONB + ) + ), '[]'::jsonb ) AS patches - FROM ${Domain.Patch} p - WHERE p.workspace_id = nc.workspace_id AND p.card_id = nc.card_id AND p.message_id = n.message_id - ) p ON TRUE - - LEFT JOIN LATERAL ( - SELECT - COALESCE( - JSON_AGG( - JSONB_BUILD_OBJECT( - 'id', a.id, + FROM ${Domain.Patch} p + JOIN message_keys mk + ON mk.workspace_id = p.workspace_id + AND mk.card_id = p.card_id + AND mk.message_id = p.message_id + GROUP BY p.workspace_id, p.card_id, p.message_id + ) + ` + + const attachCte = ` + , attachments_json AS ( + SELECT + a.workspace_id, + a.card_id, + a.message_id, + COALESCE( + JSON_AGG( + JSONB_BUILD_OBJECT( + 'id', a.id, 'type', a.type, 'params', a.params, 'creator', a.creator, 'created', a.created, 'modified',a.modified - ) - ), - '[]'::JSONB - ) AS attachments - FROM communication.attachment AS a - WHERE a.workspace_id = nc.workspace_id - AND a.card_id = nc.card_id - AND a.message_id = n.message_id - ) AS a ON TRUE - ` + ) + ), '[]'::jsonb + ) AS attachments + FROM ${Domain.Attachment} a + JOIN message_keys mk + ON mk.workspace_id = a.workspace_id + AND mk.card_id = a.card_id + AND mk.message_id = a.message_id + GROUP BY a.workspace_id, a.card_id, a.message_id + ) + ` + + const msgJoin = withMessage + ? ` + LEFT JOIN ${Domain.Message} m + ON m.workspace_id = nc.workspace_id + AND m.card_id = nc.card_id + AND m.id = n.message_id + AND n.message_id IS NOT NULL + AND n.blob_id IS NULL` + : '' - notificationsSelect = `, + const sql = ` + ${contextsCte} + ${notificationsCte} + ${msgKeysCte} + ${statsCte} + ${patchesCte} + ${attachCte} + SELECT + nc.id::text, + nc.card_id, + nc.account, + nc.last_view, + nc.last_update, + ${withTotal ? ' MAX(s.total) AS total,' : ''} + nc.last_notify, COALESCE( JSON_AGG( JSONB_BUILD_OBJECT( - 'id', n.id::text, - 'read', n.read, - 'type', n.type, - 'content', n.content, - 'created', n.created, - 'message_created', n.message_created, - 'message_id', n.message_id::text, + 'id', n.id::text, + 'read', n.read, + 'type', n.type, + 'content', n.content, + 'created', n.created, + 'blob_id', n.blob_id, + 'message_created', n.message_created, + 'message_id', n.message_id::text, ${ - withMessages + withMessage ? ` - 'message_type', m.type, - 'message_content', m.content, - 'message_data', m.data, - 'message_creator', m.creator,` + 'message_type', m.type, + 'message_content', m.content, + 'message_data', m.data, + 'message_creator', m.creator,` : '' } - 'blob_id', n.blob_id, - 'patches', p.patches, - 'attachments', a.attachments + 'patches', pj.patches, + 'attachments', aj.attachments ) ORDER BY n.created ${notificationOrder} - ), '[]'::JSONB - ) AS notifications` - - groupBy = ` - GROUP BY nc.id, nc.card_id, nc.account, nc.last_view, nc.last_update, nc.last_notify - ` - } - - const sql = ` - SELECT nc.id::text, - nc.card_id, - nc.account, - nc.last_view, - nc.last_update, - nc.last_notify - ${notificationsSelect} - FROM ${Domain.NotificationContext} nc - ${notificationsJoin} ${where} - ${groupBy} - ${orderBy} - ${limit}; - ` - - const result = await this.execute(sql, values, 'find contexts') - return result.map((it: any) => toNotificationContext(it)) + ), '[]'::jsonb + ) AS notifications + FROM ctx nc + ${withTotal ? 'LEFT JOIN stats s ON s.context_id = nc.id' : ''} + LEFT JOIN last_notifs n + ON n.context_id = nc.id + ${msgJoin} + LEFT JOIN patches_json pj + ON pj.workspace_id = nc.workspace_id + AND pj.card_id = nc.card_id + AND pj.message_id = n.message_id + LEFT JOIN attachments_json aj + ON aj.workspace_id = nc.workspace_id + AND aj.card_id = nc.card_id + AND aj.message_id = n.message_id + GROUP BY + nc.id, nc.card_id, nc.account, nc.last_view, nc.last_update, nc.last_notify + ${orderBy} + `.trim() + + const rows = await this.execute(sql, values, 'find contexts (cte)') + return rows.map((it: any) => toNotificationContext(it)) } - async findNotifications (params: FindNotificationsParams): Promise { + async findNotifications (params: FindNotificationsParams): Promise> { const withMessage = params.message === true let select = @@ -468,7 +547,31 @@ export class NotificationsDb extends BaseDb { const result = await this.execute(sql, values, 'find notifications') - return result.map((it: any) => toNotification(it)) + let total: number | undefined + + if (params.total === true) { + const totalSql = this.buildNotificationsTotalSql(params) + const result = await this.execute(totalSql.sql, totalSql.values, 'find notifications total') + total = result[0]?.total ?? undefined + } + + return withTotal( + result.map((it: any) => toNotification(it)), + total + ) + } + + private buildNotificationsTotalSql (params: FindNotificationsParams): { sql: string, values: any[] } { + const select = ` + SELECT COUNT(*) AS total + FROM ${Domain.Notification} n + JOIN ${Domain.NotificationContext} nc ON n.context_id = nc.id` + + const { where, values } = this.buildNotificationWhere(params) + + const sql = [select, where].join(' ') + + return { sql, values } } async updateCollaborators (params: FindCollaboratorsParams, data: Partial): Promise { diff --git a/packages/query/src/lq.ts b/packages/query/src/lq.ts index 37c9de45056..090d82ac3d3 100644 --- a/packages/query/src/lq.ts +++ b/packages/query/src/lq.ts @@ -46,7 +46,7 @@ interface CreateQueryResult { unsubscribe: (force: boolean) => void } -const maxQueriesCache = 50 +const maxQueriesCache = 100 export class LiveQueries { private readonly queries = new Map() diff --git a/packages/query/src/messages/query.ts b/packages/query/src/messages/query.ts index a15758ef640..9a0f673a82c 100644 --- a/packages/query/src/messages/query.ts +++ b/packages/query/src/messages/query.ts @@ -713,7 +713,7 @@ export class MessagesQuery implements PagedQuery { if (this.callback == null) return if (this.result instanceof Promise) this.result = await this.result const result = this.result.getResult() - this.callback(new WindowImpl(result, this.result.isTail(), this.result.isHead(), this)) + this.callback(new WindowImpl(result, this.result.getTotal(), this.result.isTail(), this.result.isHead(), this)) } private match (message: Message): boolean { diff --git a/packages/query/src/notification-contexts/query.ts b/packages/query/src/notification-contexts/query.ts index 1507e4646e9..ca85b69fe03 100644 --- a/packages/query/src/notification-contexts/query.ts +++ b/packages/query/src/notification-contexts/query.ts @@ -22,7 +22,6 @@ import { type MessageID, type Notification, type NotificationContext, - NotificationType, ParsedFile, PatchType, SortingOrder, @@ -125,6 +124,7 @@ export class NotificationContextsQuery implements PagedQuery { switch (event.type) { case MessageEventType.BlobPatch: + case MessageEventType.AttachmentPatch: case MessageEventType.RemovePatch: case MessageEventType.UpdatePatch: { await this.onCreatePatchEvent(event) @@ -356,6 +356,20 @@ export class NotificationContextsQuery implements PagedQuery !event.ids.includes(it.id)) @@ -387,19 +401,31 @@ export class NotificationContextsQuery implements PagedQuery matchNotification(it, matchQuery) && it.read !== event.updates.read ) - if (toUpdate === undefined || (toUpdate?.length ?? 0) === 0) return + if (toUpdate === undefined || (toUpdate?.length ?? 0) === 0) { + if (totalNotifications !== context.totalNotifications) { + this.result.update({ + ...context, + totalNotifications + }) + + void this.notify() + } + return + } const toUpdateMap = new Map(toUpdate.map((it) => [it.id, it])) - const currentLength = context.notifications.length ?? 0 - const newNotifications = context.notifications.map((it) => + const currentLength = context.notifications?.length ?? 0 + const newNotifications = (context.notifications ?? []).map((it) => toUpdateMap.has(it.id) ? { ...it, ...event.updates } : it ) const newLength = newNotifications.length @@ -416,12 +442,27 @@ export class NotificationContextsQuery implements PagedQuery 0) { + if (this.params.notifications?.read != null && this.params.notifications.read === event.updates.read) { + return (context.totalNotifications ?? 0) + updatedCount + } else if (this.params.notifications?.read != null && this.params.notifications.read !== event.updates.read) { + return Math.max(0, (context.totalNotifications ?? 0) - updatedCount) + } + } + + return context.totalNotifications ?? 0 + } + private async onCreateNotificationEvent (event: CreateNotificationEvent): Promise { if (this.params.notifications == null || event.notificationId == null) return if (this.forward instanceof Promise) this.forward = await this.forward @@ -465,6 +506,9 @@ export class NotificationContextsQuery implements PagedQuery ({ - ...it, - read: - it.type === NotificationType.Message - ? event.updates.lastView != null && event.updates.lastView >= it.created - : it.read - })) - ) - : currentNotifications - - if ( - this.params.notifications != null && - newNotifications.length < currentNotifications.length && - newNotifications.length < this.params.notifications.limit && - this.params.notifications.order !== SortingOrder.Descending - ) { - const updated: NotificationContext = ( - await this.find({ id: event.contextId, limit: 1, notifications: this.params.notifications }) - )[0] - if (updated !== undefined) { - this.result.update(updated) - } else { - this.result.delete(contextToUpdate.id) - } - } else { - const updated: NotificationContext = { - ...contextToUpdate, - lastUpdate: event.updates.lastUpdate ?? contextToUpdate.lastUpdate, - lastView: event.updates.lastView ?? contextToUpdate.lastView, - lastNotify: event.updates.lastNotify ?? contextToUpdate.lastNotify, - notifications: newNotifications - } - this.result.update(updated) + const updated: NotificationContext = { + ...contextToUpdate, + lastUpdate: event.updates.lastUpdate ?? contextToUpdate.lastUpdate, + lastView: event.updates.lastView ?? contextToUpdate.lastView, + lastNotify: event.updates.lastNotify ?? contextToUpdate.lastNotify } + this.result.update(updated) if (event.updates.lastNotify != null) { this.sort(this.result) @@ -698,7 +711,7 @@ export class NotificationContextsQuery implements PagedQuery it.id) result.setTail(isComplete) result.setHead(isComplete) + if (this.params.total === true) { + result.setTotal(res.total) + } return result } @@ -116,15 +118,12 @@ export class NotificationQuery implements PagedQuery { + private async find (params: NotificationQueryParams): Promise> { delete params.strict const notifications = await this.client.findNotifications(params, this.id) if (params.message !== true) return notifications - return await Promise.all( + const result = await Promise.all( notifications.map(async (notification) => { if (notification.message != null || notification.blobId == null) return notification const message = await loadMessageFromGroup( @@ -230,18 +230,34 @@ export class NotificationQuery implements PagedQuery { if (event.notificationId == null) return if (this.result instanceof Promise) this.result = await this.result if (this.result.get(event.notificationId) != null) return - if (!this.result.isTail()) return const notification = NotificationProcessor.createFromEvent(event) + const match = matchNotification(notification, { ...this.params, created: undefined }) if (!match) return + const res = + this.params.order === SortingOrder.Ascending ? this.result.getResult() : this.result.getResult().reverse() + const first = res[0] + const last = res[res.length - 1] + const inRange = + first != null && + last != null && + notification.created.getTime() >= first.created.getTime() && + notification.created.getTime() <= last.created.getTime() + + if (!this.result.isTail() && !inRange) { + return + } + if (this.params.message === true) { const message = await this.client.findMessages({ card: notification.cardId, @@ -252,12 +268,24 @@ export class NotificationQuery implements PagedQuery + this.params.order === SortingOrder.Ascending + ? a.created.getTime() - b.created.getTime() + : b.created.getTime() - a.created.getTime() + ) + } else if (this.params.order === SortingOrder.Ascending) { if (this.params.limit !== undefined && this.result.length === this.params.limit && this.params.strict === true) { this.result.setTail(false) - return + } else { + this.result.push(notification) } - this.result.push(notification) } else { if (this.params.limit !== undefined && this.result.length === this.params.limit && this.params.strict === true) { this.result.pop() @@ -269,24 +297,6 @@ export class NotificationQuery implements PagedQuery { - if (this.result instanceof Promise) this.result = await this.result - if (this.params.context != null && this.params.context !== event.contextId) return - - const lastView = event.updates.lastView - if (lastView === undefined) return - - const toUpdate = this.result.getResult().filter((it) => it.contextId === event.contextId) - if (toUpdate.length === 0) return - - const updated: Notification[] = toUpdate.map((it) => ({ - ...it, - read: it.type === NotificationType.Message ? lastView >= it.created : it.read - })) - - await this.updateNotificationRead(this.result, updated) - } - private async updateNotificationRead (result: QueryResult, updated: Notification[]): Promise { const isAllowed = (n: Notification): boolean => { if (this.params.read == null) return true @@ -313,13 +323,30 @@ export class NotificationQuery implements PagedQuery { if (this.result instanceof Promise) this.result = await this.result + const updatedCount = event.updated ?? 0 + let totalUpdated = false + + if (this.params.total === true && updatedCount > 0) { + if (this.params.read != null && this.params.read === event.updates.read) { + this.result.setTotal(this.result.getTotal() + updatedCount) + totalUpdated = true + } else if (this.params.read != null && this.params.read !== event.updates.read) { + this.result.setTotal(this.result.getTotal() - updatedCount) + totalUpdated = true + } + } const toUpdate = ( event.query.id != null ? [this.result.get(event.query.id)].filter((it): it is Notification => it != null) : this.result.getResult().filter((it) => matchNotification(it, event.query)) ).filter((it) => it.read !== event.updates.read) - if (toUpdate === undefined || toUpdate.length === 0) return + if (toUpdate === undefined || toUpdate.length === 0) { + if (totalUpdated) { + await this.notify() + } + return + } const updated = toUpdate.map((it) => ({ ...it, ...event.updates })) await this.updateNotificationRead(this.result, updated) } @@ -328,6 +355,11 @@ export class NotificationQuery implements PagedQuery it.contextId === event.contextId) if (toRemove.length === 0) return @@ -403,7 +438,13 @@ export class NotificationQuery implements PagedQuery it.id) result.setHead(isHead) result.setTail(isTail) + result.setTotal(res.total) return result }) void this.result.then((res) => { diff --git a/packages/query/src/result.ts b/packages/query/src/result.ts index 794da50764f..ad29615a1ab 100644 --- a/packages/query/src/result.ts +++ b/packages/query/src/result.ts @@ -18,6 +18,7 @@ import type { ID } from '@hcengineering/communication-types' export class QueryResult { private objectById: Map + private total: number = -1 private tail: boolean = false private head: boolean = false @@ -32,6 +33,14 @@ export class QueryResult { this.objectById = new Map(messages.map((it) => [getId(it), it])) } + setTotal (total: number): void { + this.total = total + } + + getTotal (): number { + return Math.max(this.total, -1) + } + isTail (): boolean { return this.tail } diff --git a/packages/query/src/window.ts b/packages/query/src/window.ts index 84e07fde0a8..3e86a3078e1 100644 --- a/packages/query/src/window.ts +++ b/packages/query/src/window.ts @@ -20,6 +20,7 @@ import type { PagedQuery } from './types' export class WindowImpl implements Window { constructor ( private readonly result: T[], + private readonly total: number, private readonly isTail: boolean, private readonly isHead: boolean, private readonly query: PagedQuery @@ -29,6 +30,10 @@ export class WindowImpl implements Window { return this.result } + getTotal (): number { + return this.total + } + async loadNextPage (): Promise { if (!this.hasNextPage()) return await this.query.requestLoadNextPage() diff --git a/packages/sdk-types/src/client.ts b/packages/sdk-types/src/client.ts index 9b2f3fa8241..168b51edd37 100644 --- a/packages/sdk-types/src/client.ts +++ b/packages/sdk-types/src/client.ts @@ -24,7 +24,7 @@ import type { Message, MessagesGroup, Notification, - NotificationContext + NotificationContext, WithTotal } from '@hcengineering/communication-types' import type { EventResult, Event } from './events/event' @@ -39,7 +39,7 @@ export interface FindClient { findNotificationContexts: (params: FindNotificationContextParams, queryId?: number) => Promise - findNotifications: (params: FindNotificationsParams, queryId?: number) => Promise + findNotifications: (params: FindNotificationsParams, queryId?: number) => Promise> findLabels: (params: FindLabelsParams, queryId?: number) => Promise diff --git a/packages/sdk-types/src/db.ts b/packages/sdk-types/src/db.ts index cb7cb3bcb0c..23c53e9d6aa 100644 --- a/packages/sdk-types/src/db.ts +++ b/packages/sdk-types/src/db.ts @@ -44,7 +44,7 @@ import { NotificationType, AttachmentData, AttachmentID, - AttachmentUpdateData + AttachmentUpdateData, WithTotal } from '@hcengineering/communication-types' export interface DbAdapter { @@ -102,7 +102,7 @@ export interface DbAdapter { content: NotificationContent, created: Date ) => Promise - updateNotification: (context: ContextID, account: AccountID, query: UpdateNotificationQuery, updates: NotificationUpdates) => Promise + updateNotification: (context: ContextID, account: AccountID, query: UpdateNotificationQuery, updates: NotificationUpdates) => Promise removeNotifications: (contextId: ContextID, account: AccountID, ids: NotificationID[]) => Promise removeNotificationsBlobId: (cardId: CardID, blobId: string) => Promise updateNotificationsBlobId: (cardId: CardID, blobId: string, from: Date, to: Date) => Promise @@ -118,7 +118,7 @@ export interface DbAdapter { removeContext: (id: ContextID, account: AccountID) => Promise findNotificationContexts: (params: FindNotificationContextParams) => Promise - findNotifications: (params: FindNotificationsParams) => Promise + findNotifications: (params: FindNotificationsParams) => Promise> createLabel: (labelId: LabelID, cardId: CardID, cardType: CardType, account: AccountID, created: Date) => Promise removeLabels: (query: RemoveLabelQuery) => Promise diff --git a/packages/sdk-types/src/events/notification.ts b/packages/sdk-types/src/events/notification.ts index e0130a6bfe9..60a3e013070 100644 --- a/packages/sdk-types/src/events/notification.ts +++ b/packages/sdk-types/src/events/notification.ts @@ -25,6 +25,7 @@ import { SocialID, BlobID } from '@hcengineering/communication-types' + import type { BaseEvent } from './common' export enum NotificationEventType { @@ -79,6 +80,7 @@ export interface UpdateNotificationEvent extends BaseEvent { read: boolean } + updated?: number date?: Date } diff --git a/packages/server/src/middleware/db.ts b/packages/server/src/middleware/db.ts index dd1fb8cd3ad..1abe45d8346 100644 --- a/packages/server/src/middleware/db.ts +++ b/packages/server/src/middleware/db.ts @@ -382,8 +382,9 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { } private async updateNotification (event: Enriched): Promise { - await this.db.updateNotification(event.contextId, event.account, event.query, event.updates) - + const updated = await this.db.updateNotification(event.contextId, event.account, event.query, event.updates) + if (updated === 0) return { skipPropagate: true } + event.updated = updated return {} } diff --git a/packages/server/src/middleware/validate.ts b/packages/server/src/middleware/validate.ts index d19ad9cebf6..18c0b825c58 100644 --- a/packages/server/src/middleware/validate.ts +++ b/packages/server/src/middleware/validate.ts @@ -257,7 +257,8 @@ const FindNotificationContextParamsSchema = FindParamsSchema.extend({ message: z.boolean().optional(), limit: z.number(), order: SortingOrderSchema, - read: z.boolean().optional() + read: z.boolean().optional(), + total: z.boolean().optional() }) .optional() }).strict() @@ -269,7 +270,8 @@ const FindNotificationsParamsSchema = FindParamsSchema.extend({ created: DateOrRecordSchema.optional(), account: z.union([AccountIDSchema, z.array(AccountIDSchema)]).optional(), message: z.boolean().optional(), - card: CardIDSchema.optional() + card: CardIDSchema.optional(), + total: z.boolean().optional() }).strict() const FindLabelsParamsSchema = FindParamsSchema.extend({ diff --git a/packages/shared/src/processor.ts b/packages/shared/src/processor.ts index b880c0030ba..e7aa107c515 100644 --- a/packages/shared/src/processor.ts +++ b/packages/shared/src/processor.ts @@ -49,6 +49,7 @@ import { } from '@hcengineering/communication-sdk-types' import { applyPatches } from './patch' +import { withTotal } from './utils' // eslint-disable-next-line @typescript-eslint/no-extraneous-class export class MessageProcessor { @@ -169,7 +170,7 @@ export class NotificationContextProcessor { lastView: event.lastView, lastUpdate: event.lastUpdate, lastNotify: event.lastNotify, - notifications: [] + notifications: withTotal([] as Notification[]) } } diff --git a/packages/shared/src/utils.ts b/packages/shared/src/utils.ts index 89dcf79f8ba..d6a930be3c3 100644 --- a/packages/shared/src/utils.ts +++ b/packages/shared/src/utils.ts @@ -19,7 +19,8 @@ import { Attachment, BlobAttachment, LinkPreviewAttachment, - linkPreviewType + linkPreviewType, + WithTotal } from '@hcengineering/communication-types' const COUNTER_BITS = 10n @@ -72,3 +73,9 @@ export function isLinkPreviewAttachment (attachment: Attachment): attachment is export function isBlobAttachment (attachment: Attachment): attachment is BlobAttachment { return !isLinkPreviewAttachment(attachment) && !isAppletAttachment(attachment) && 'blobId' in attachment.params } + +export function withTotal (objects: T[], total?: number): WithTotal { + const length = total ?? objects.length + + return Object.assign(objects, { total: length }) +} diff --git a/packages/types/src/notification.ts b/packages/types/src/notification.ts index 137ece52e3f..f5c1eba8fc4 100644 --- a/packages/types/src/notification.ts +++ b/packages/types/src/notification.ts @@ -68,4 +68,5 @@ export interface NotificationContext { lastNotify?: Date notifications?: Notification[] + totalNotifications?: number } diff --git a/packages/types/src/query.ts b/packages/types/src/query.ts index 469470f8674..d2b739af0ef 100644 --- a/packages/types/src/query.ts +++ b/packages/types/src/query.ts @@ -26,6 +26,7 @@ export type ComparisonOperator = 'less' | 'lessOrEqual' | 'greater' | 'greaterOr export interface Window { getResult: () => T[] + getTotal: () => number loadNextPage: () => Promise @@ -71,6 +72,7 @@ export interface FindNotificationContextParams extends FindParams { limit: number order: SortingOrder read?: boolean + total?: boolean } } @@ -84,6 +86,7 @@ export interface FindNotificationsParams extends FindParams { account?: AccountID | AccountID[] card?: CardID message?: boolean + total?: boolean } export interface FindCollaboratorsParams extends FindParams { @@ -97,3 +100,5 @@ export interface FindLabelsParams extends FindParams { cardType?: CardType | CardType[] account?: AccountID } + +export type WithTotal = T[] & { total: number } From 930e7b803430d959054bec52f8a91946d11860ab Mon Sep 17 00:00:00 2001 From: Kristina Date: Tue, 29 Jul 2025 17:40:06 +0400 Subject: [PATCH 149/636] Fix total (#92) Signed-off-by: Kristina Fefelova --- packages/cockroach/src/db/notification.ts | 2 +- packages/query/src/lq.ts | 12 ++++--- .../query/src/notification-contexts/query.ts | 32 +++++++++++++++++-- packages/query/src/result.ts | 2 +- 4 files changed, 40 insertions(+), 8 deletions(-) diff --git a/packages/cockroach/src/db/notification.ts b/packages/cockroach/src/db/notification.ts index 0b9319af14a..649ed646a8f 100644 --- a/packages/cockroach/src/db/notification.ts +++ b/packages/cockroach/src/db/notification.ts @@ -552,7 +552,7 @@ export class NotificationsDb extends BaseDb { if (params.total === true) { const totalSql = this.buildNotificationsTotalSql(params) const result = await this.execute(totalSql.sql, totalSql.values, 'find notifications total') - total = result[0]?.total ?? undefined + total = result[0]?.total != null ? Number(result[0]?.total ?? 0) : undefined } return withTotal( diff --git a/packages/query/src/lq.ts b/packages/query/src/lq.ts index 090d82ac3d3..d96f6ff7011 100644 --- a/packages/query/src/lq.ts +++ b/packages/query/src/lq.ts @@ -53,13 +53,19 @@ export class LiveQueries { private readonly unsubscribed = new Set() private counter: number = 0 + private eventQueue: Promise = Promise.resolve() + constructor ( private readonly client: FindClient, private readonly workspace: WorkspaceID, private readonly filesUrl: string ) { this.client.onEvent = (event) => { - void this.onEvent(event) + this.eventQueue = this.eventQueue + .then(() => this.onEvent(event)) + .catch(err => { + console.error('Error handling event:', err) + }) } this.client.onRequest = (event, promise) => { void this.onRequest(event, promise) @@ -67,9 +73,7 @@ export class LiveQueries { } async onEvent (event: Event): Promise { - for (const q of this.queries.values()) { - void q.onEvent(event) - } + await Promise.all(Array.from(this.queries.values()).map(q => q.onEvent(event))) } async onRequest (event: Event, promise: Promise): Promise { diff --git a/packages/query/src/notification-contexts/query.ts b/packages/query/src/notification-contexts/query.ts index ca85b69fe03..0a77979a3cb 100644 --- a/packages/query/src/notification-contexts/query.ts +++ b/packages/query/src/notification-contexts/query.ts @@ -404,6 +404,7 @@ export class NotificationContextsQuery implements PagedQuery matchNotification(it, matchQuery) && it.read !== event.updates.read ) if (toUpdate === undefined || (toUpdate?.length ?? 0) === 0) { - if (totalNotifications !== context.totalNotifications) { + if (shouldRefreshTotal) { + const nRes = await this.client.findNotifications({ + ...this.params.notifications, + limit: 1, + total: true + }) + + this.result.update({ + ...context, + totalNotifications: nRes.total + }) + + void this.notify() + } else if (totalNotifications !== context.totalNotifications) { this.result.update({ ...context, totalNotifications @@ -430,7 +444,9 @@ export class NotificationContextsQuery implements PagedQuery { } getTotal (): number { - return Math.max(this.total, -1) + return Math.max(this.total, 0) } isTail (): boolean { From ccd2f44c48d5cdd811499ff3626845276478426c Mon Sep 17 00:00:00 2001 From: Leonid Kaganov Date: Thu, 7 Aug 2025 17:40:17 +0300 Subject: [PATCH 150/636] HTTP API (done), WS API (in process) --- .gitignore | 9 + Cargo.lock | 3944 ++++++++++++++++++++++++++++++++++ Cargo.toml | 34 + LICENSE | 277 +++ README.md | 183 ++ scripts/!test.sh | 102 + scripts/!ws.sh | 58 + scripts/TEST_HTTP_API.sh | 31 + scripts/TEST_WS_API.sh | 58 + scripts/claims.json | 7 + scripts/claims_system.json | 7 + scripts/claims_wrong_ws.json | 7 + scripts/pulse_lib.sh | 73 + scripts/test_pulse.sh | 63 + scripts/test_pulse_system.sh | 35 + scripts/token.sh | 16 + src/config.rs | 77 + src/config/default.toml | 12 + src/handlers_http.rs | 241 +++ src/handlers_ws.rs | 119 + src/main.rs | 124 ++ src/redis.rs | 250 +++ src/redis.rs.ok | 231 ++ src/ws_owner.rs | 39 + 24 files changed, 5997 insertions(+) create mode 100644 .gitignore create mode 100644 Cargo.lock create mode 100644 Cargo.toml create mode 100644 LICENSE create mode 100644 README.md create mode 100755 scripts/!test.sh create mode 100755 scripts/!ws.sh create mode 100755 scripts/TEST_HTTP_API.sh create mode 100755 scripts/TEST_WS_API.sh create mode 100644 scripts/claims.json create mode 100644 scripts/claims_system.json create mode 100644 scripts/claims_wrong_ws.json create mode 100755 scripts/pulse_lib.sh create mode 100755 scripts/test_pulse.sh create mode 100755 scripts/test_pulse_system.sh create mode 100755 scripts/token.sh create mode 100644 src/config.rs create mode 100644 src/config/default.toml create mode 100644 src/handlers_http.rs create mode 100644 src/handlers_ws.rs create mode 100644 src/main.rs create mode 100644 src/redis.rs create mode 100644 src/redis.rs.ok create mode 100644 src/ws_owner.rs diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000000..dc110e3e5b1 --- /dev/null +++ b/.gitignore @@ -0,0 +1,9 @@ +/off +/target +/scripts/off +Justfile +commit.sh +/src/GO.sh +GO.sh +DROP_DB.sh +TODO.txt diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 00000000000..34c35b22781 --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,3944 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "actix" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de7fa236829ba0841304542f7614c42b80fca007455315c45c785ccfa873a85b" +dependencies = [ + "actix-macros", + "actix-rt", + "actix_derive", + "bitflags 2.9.1", + "bytes", + "crossbeam-channel", + "futures-core", + "futures-sink", + "futures-task", + "futures-util", + "log", + "once_cell", + "parking_lot 0.12.4", + "pin-project-lite", + "smallvec", + "tokio", + "tokio-util", +] + +[[package]] +name = "actix-codec" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f7b0a21988c1bf877cf4759ef5ddaac04c1c9fe808c9142ecb78ba97d97a28a" +dependencies = [ + "bitflags 2.9.1", + "bytes", + "futures-core", + "futures-sink", + "memchr", + "pin-project-lite", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "actix-cors" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daa239b93927be1ff123eebada5a3ff23e89f0124ccb8609234e5103d5a5ae6d" +dependencies = [ + "actix-utils", + "actix-web", + "derive_more", + "futures-util", + "log", + "once_cell", + "smallvec", +] + +[[package]] +name = "actix-http" +version = "3.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44dfe5c9e0004c623edc65391dfd51daa201e7e30ebd9c9bedf873048ec32bc2" +dependencies = [ + "actix-codec", + "actix-rt", + "actix-service", + "actix-tls", + "actix-utils", + "base64 0.22.1", + "bitflags 2.9.1", + "brotli", + "bytes", + "bytestring", + "derive_more", + "encoding_rs", + "flate2", + "foldhash", + "futures-core", + "h2", + "http 0.2.12", + "httparse", + "httpdate", + "itoa", + "language-tags", + "local-channel", + "mime", + "percent-encoding", + "pin-project-lite", + "rand 0.9.2", + "sha1", + "smallvec", + "tokio", + "tokio-util", + "tracing", + "zstd", +] + +[[package]] +name = "actix-macros" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e01ed3140b2f8d422c68afa1ed2e85d996ea619c988ac834d255db32138655cb" +dependencies = [ + "quote", + "syn", +] + +[[package]] +name = "actix-router" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13d324164c51f63867b57e73ba5936ea151b8a41a1d23d1031eeb9f70d0236f8" +dependencies = [ + "bytestring", + "cfg-if", + "http 0.2.12", + "regex", + "regex-lite", + "serde", + "tracing", +] + +[[package]] +name = "actix-rt" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24eda4e2a6e042aa4e55ac438a2ae052d3b5da0ecf83d7411e1a368946925208" +dependencies = [ + "futures-core", + "tokio", +] + +[[package]] +name = "actix-server" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a65064ea4a457eaf07f2fba30b4c695bf43b721790e9530d26cb6f9019ff7502" +dependencies = [ + "actix-rt", + "actix-service", + "actix-utils", + "futures-core", + "futures-util", + "mio", + "socket2 0.5.10", + "tokio", + "tracing", +] + +[[package]] +name = "actix-service" +version = "2.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e46f36bf0e5af44bdc4bdb36fbbd421aa98c79a9bce724e1edeb3894e10dc7f" +dependencies = [ + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "actix-tls" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac453898d866cdbecdbc2334fe1738c747b4eba14a677261f2b768ba05329389" +dependencies = [ + "actix-rt", + "actix-service", + "actix-utils", + "futures-core", + "impl-more", + "pin-project-lite", + "tokio", + "tokio-rustls 0.23.4", + "tokio-util", + "tracing", + "webpki-roots 0.22.6", +] + +[[package]] +name = "actix-utils" +version = "3.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88a1dcdff1466e3c2488e1cb5c36a71822750ad43839937f85d2f4d9f8b705d8" +dependencies = [ + "local-waker", + "pin-project-lite", +] + +[[package]] +name = "actix-web" +version = "4.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a597b77b5c6d6a1e1097fddde329a83665e25c5437c696a3a9a4aa514a614dea" +dependencies = [ + "actix-codec", + "actix-http", + "actix-macros", + "actix-router", + "actix-rt", + "actix-server", + "actix-service", + "actix-tls", + "actix-utils", + "actix-web-codegen", + "bytes", + "bytestring", + "cfg-if", + "cookie", + "derive_more", + "encoding_rs", + "foldhash", + "futures-core", + "futures-util", + "impl-more", + "itoa", + "language-tags", + "log", + "mime", + "once_cell", + "pin-project-lite", + "regex", + "regex-lite", + "serde", + "serde_json", + "serde_urlencoded", + "smallvec", + "socket2 0.5.10", + "time", + "tracing", + "url", +] + +[[package]] +name = "actix-web-actors" +version = "4.3.1+deprecated" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f98c5300b38fd004fe7d2a964f9a90813fdbe8a81fed500587e78b1b71c6f980" +dependencies = [ + "actix", + "actix-codec", + "actix-http", + "actix-web", + "bytes", + "bytestring", + "futures-core", + "pin-project-lite", + "tokio", + "tokio-util", +] + +[[package]] +name = "actix-web-codegen" +version = "4.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f591380e2e68490b5dfaf1dd1aa0ebe78d84ba7067078512b4ea6e4492d622b8" +dependencies = [ + "actix-router", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "actix_derive" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6ac1e58cded18cb28ddc17143c4dea5345b3ad575e14f32f66e4054a56eb271" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "addr2line" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" + +[[package]] +name = "aho-corasick" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +dependencies = [ + "memchr", +] + +[[package]] +name = "alloc-no-stdlib" +version = "2.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3" + +[[package]] +name = "alloc-stdlib" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece" +dependencies = [ + "alloc-no-stdlib", +] + +[[package]] +name = "allocator-api2" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" + +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anyhow" +version = "1.0.98" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" + +[[package]] +name = "arraydeque" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d902e3d592a523def97af8f317b08ce16b7ab854c1985a0c671e6f15cebc236" + +[[package]] +name = "async-trait" +version = "0.1.88" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e539d3fca749fcee5236ab05e93a52867dd549cc157c8cb7f99595f3cedffdb5" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "async-tungstenite" +version = "0.29.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef0f7efedeac57d9b26170f72965ecfd31473ca52ca7a64e925b0b6f5f079886" +dependencies = [ + "atomic-waker", + "futures-core", + "futures-io", + "futures-task", + "futures-util", + "log", + "pin-project-lite", + "tungstenite", +] + +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + +[[package]] +name = "autocfg" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" + +[[package]] +name = "backtrace" +version = "0.3.75" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6806a6321ec58106fea15becdad98371e28d92ccbc7c8f1b3b6dd724fe8f1002" +dependencies = [ + "addr2line", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", + "windows-targets", +] + +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "bb8" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "212d8b8e1a22743d9241575c6ba822cf9c8fef34771c86ab7e477a4fbfd254e5" +dependencies = [ + "futures-util", + "parking_lot 0.12.4", + "tokio", +] + +[[package]] +name = "bb8-redis" +version = "0.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5143936af5e1eea1a881e3e3d21b6777da6315e5e307bc3d0c2301c44fa37da9" +dependencies = [ + "bb8", + "redis", +] + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967" +dependencies = [ + "serde", +] + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "brotli" +version = "8.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9991eea70ea4f293524138648e41ee89b0b2b12ddef3b255effa43c8056e0e0d" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", + "brotli-decompressor", +] + +[[package]] +name = "brotli-decompressor" +version = "5.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "874bb8112abecc98cbd6d81ea4fa7e94fb9449648c93cc89aa40c81c24d7de03" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", +] + +[[package]] +name = "bumpalo" +version = "3.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "bytes" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" + +[[package]] +name = "bytestring" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e465647ae23b2823b0753f50decb2d5a86d2bb2cac04788fafd1f80e45378e5f" +dependencies = [ + "bytes", +] + +[[package]] +name = "cc" +version = "1.2.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3a42d84bb6b69d3a8b3eaacf0d88f179e1929695e1ad012b6cf64d9caaa5fd2" +dependencies = [ + "jobserver", + "libc", + "shlex", +] + +[[package]] +name = "cfg-if" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268" + +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + +[[package]] +name = "chrono" +version = "0.4.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c469d952047f47f91b68d1cba3f10d63c11d73e4636f24f08daf0278abf01c4d" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "js-sys", + "num-traits", + "serde", + "wasm-bindgen", + "windows-link", +] + +[[package]] +name = "combine" +version = "4.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" +dependencies = [ + "bytes", + "futures-core", + "memchr", + "pin-project-lite", + "tokio", + "tokio-util", +] + +[[package]] +name = "config" +version = "0.15.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b1eb4fb07bc7f012422df02766c7bd5971effb894f573865642f06fa3265440" +dependencies = [ + "async-trait", + "convert_case", + "json5", + "pathdiff", + "ron", + "rust-ini", + "serde", + "serde_json", + "toml 0.9.5", + "winnow", + "yaml-rust2", +] + +[[package]] +name = "const-random" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87e00182fe74b066627d63b85fd550ac2998d4b0bd86bfed477a0ae4c7c71359" +dependencies = [ + "const-random-macro", +] + +[[package]] +name = "const-random-macro" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9d839f2a20b0aee515dc581a6172f2321f96cab76c1a38a4c584a194955390e" +dependencies = [ + "getrandom 0.2.16", + "once_cell", + "tiny-keccak", +] + +[[package]] +name = "convert_case" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "cookie" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e859cd57d0710d9e06c381b550c06e76992472a8c6d527aecd2fc673dcc231fb" +dependencies = [ + "percent-encoding", + "time", + "version_check", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + +[[package]] +name = "cpufeatures" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" +dependencies = [ + "libc", +] + +[[package]] +name = "crc32fast" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crossbeam-channel" +version = "0.5.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + +[[package]] +name = "crunchy" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "darling" +version = "0.20.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.20.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d00b9596d185e565c2207a0b01f8bd1a135483d02d9b7b0a54b11da8d53412e" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn", +] + +[[package]] +name = "darling_macro" +version = "0.20.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" +dependencies = [ + "darling_core", + "quote", + "syn", +] + +[[package]] +name = "dashmap" +version = "6.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf" +dependencies = [ + "cfg-if", + "crossbeam-utils", + "hashbrown 0.14.5", + "lock_api", + "once_cell", + "parking_lot_core 0.9.11", +] + +[[package]] +name = "data-encoding" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a2330da5de22e8a3cb63252ce2abb30116bf5265e89c0e01bc17015ce30a476" + +[[package]] +name = "deranged" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c9e6a11ca8224451684bc0d7d5a7adbf8f2fd6887261a1cfc3c0432f9d4068e" +dependencies = [ + "powerfmt", + "serde", +] + +[[package]] +name = "derive_builder" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "507dfb09ea8b7fa618fcf76e953f4f5e192547945816d5358edffe39f6f94947" +dependencies = [ + "derive_builder_macro", +] + +[[package]] +name = "derive_builder_core" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d5bcf7b024d6835cfb3d473887cd966994907effbe9227e8c8219824d06c4e8" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "derive_builder_macro" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c" +dependencies = [ + "derive_builder_core", + "syn", +] + +[[package]] +name = "derive_more" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "093242cf7570c207c83073cf82f79706fe7b8317e98620a47d5be7c3d8497678" +dependencies = [ + "derive_more-impl", +] + +[[package]] +name = "derive_more-impl" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "unicode-xid", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "crypto-common", + "subtle", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "dlv-list" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "442039f5147480ba31067cb00ada1adae6892028e40e45fc5de7b7df6dcc1b5f" +dependencies = [ + "const-random", +] + +[[package]] +name = "dyn-clone" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555" + +[[package]] +name = "encoding_rs" +version = "0.8.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "fallible-iterator" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" + +[[package]] +name = "flate2" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a3d7db9596fecd151c5f638c0ee5d5bd487b6e0ea232e5dc96d5250f6f94b1d" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + +[[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "futures" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" + +[[package]] +name = "futures-executor" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" + +[[package]] +name = "futures-macro" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "futures-sink" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" + +[[package]] +name = "futures-task" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" + +[[package]] +name = "futures-timer" +version = "3.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" + +[[package]] +name = "futures-util" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "wasi 0.11.1+wasi-snapshot-preview1", + "wasm-bindgen", +] + +[[package]] +name = "getrandom" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "r-efi", + "wasi 0.14.2+wasi-0.2.4", + "wasm-bindgen", +] + +[[package]] +name = "gimli" +version = "0.31.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" + +[[package]] +name = "governor" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3cbe789d04bf14543f03c4b60cd494148aa79438c8440ae7d81a7778147745c3" +dependencies = [ + "cfg-if", + "dashmap", + "futures-sink", + "futures-timer", + "futures-util", + "getrandom 0.3.3", + "hashbrown 0.15.4", + "nonzero_ext", + "parking_lot 0.12.4", + "portable-atomic", + "quanta", + "rand 0.9.2", + "smallvec", + "spinning_top", + "web-time", +] + +[[package]] +name = "h2" +version = "0.3.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0beca50380b1fc32983fc1cb4587bfa4bb9e78fc259aad4a0032d2080309222d" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http 0.2.12", + "indexmap 2.10.0", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] +name = "hashbrown" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" + +[[package]] +name = "hashbrown" +version = "0.15.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5971ac85611da7067dbfcabef3c70ebb5606018acd9e2a3903a0da507521e0d5" +dependencies = [ + "allocator-api2", + "equivalent", + "foldhash", +] + +[[package]] +name = "hashlink" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1" +dependencies = [ + "hashbrown 0.15.4", +] + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest", +] + +[[package]] +name = "http" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http 1.3.1", +] + +[[package]] +name = "http-body-util" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" +dependencies = [ + "bytes", + "futures-core", + "http 1.3.1", + "http-body", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "hulypulse" +version = "0.1.0" +dependencies = [ + "actix", + "actix-cors", + "actix-web", + "actix-web-actors", + "anyhow", + "bb8", + "bb8-redis", + "config", + "hex", + "hulyrs", + "jsonwebtoken", + "md5", + "redis", + "refinery", + "secrecy", + "serde", + "serde_json", + "serde_with", + "size", + "tokio", + "tracing", + "tracing-subscriber", + "url", + "uuid", +] + +[[package]] +name = "hulyrs" +version = "0.1.0" +source = "git+https://github.com/hcengineering/hulyrs.git#08a583761d532cbfaa188983f4e10c77bcba4c71" +dependencies = [ + "actix-web", + "bytes", + "chrono", + "config", + "derive_builder", + "futures", + "governor", + "itoa", + "jsonwebtoken", + "num-traits", + "rand 0.9.2", + "reqwest", + "reqwest-middleware", + "reqwest-ratelimit", + "reqwest-retry", + "reqwest-websocket", + "ryu", + "secrecy", + "serde", + "serde_json", + "serde_with", + "strum", + "thiserror 2.0.12", + "tokio", + "tokio-stream", + "tokio_with_wasm", + "tracing", + "url", + "uuid", + "wasmtimer", +] + +[[package]] +name = "hyper" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "http 1.3.1", + "http-body", + "httparse", + "itoa", + "pin-project-lite", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.27.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" +dependencies = [ + "http 1.3.1", + "hyper", + "hyper-util", + "rustls 0.23.31", + "rustls-pki-types", + "tokio", + "tokio-rustls 0.26.2", + "tower-service", + "webpki-roots 1.0.2", +] + +[[package]] +name = "hyper-util" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d9b05277c7e8da2c93a568989bb6207bef0112e8d17df7a6eda4a3cf143bc5e" +dependencies = [ + "base64 0.22.1", + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "http 1.3.1", + "http-body", + "hyper", + "ipnet", + "libc", + "percent-encoding", + "pin-project-lite", + "socket2 0.6.0", + "tokio", + "tower-service", + "tracing", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.63" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0c919e5debc312ad217002b8048a17b7d83f80703865bbfcfebb0458b0b27d8" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "log", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "icu_collections" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47" +dependencies = [ + "displaydoc", + "potential_utf", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locale_core" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_normalizer" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3" + +[[package]] +name = "icu_properties" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locale_core", + "icu_properties_data", + "icu_provider", + "potential_utf", + "zerotrie", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632" + +[[package]] +name = "icu_provider" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af" +dependencies = [ + "displaydoc", + "icu_locale_core", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerotrie", + "zerovec", +] + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "idna" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "impl-more" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8a5a9a0ff0086c7a148acb942baaabeadf9504d10400b5a05645853729b9cd2" + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", + "serde", +] + +[[package]] +name = "indexmap" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661" +dependencies = [ + "equivalent", + "hashbrown 0.15.4", + "serde", +] + +[[package]] +name = "instant" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" +dependencies = [ + "cfg-if", + "js-sys", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "io-uring" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d93587f37623a1a17d94ef2bc9ada592f5465fe7732084ab7beefabe5c77c0c4" +dependencies = [ + "bitflags 2.9.1", + "cfg-if", + "libc", +] + +[[package]] +name = "ipnet" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" + +[[package]] +name = "iri-string" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbc5ebe9c3a1a7a5127f920a418f7585e9e758e911d0466ed004f393b0e380b2" +dependencies = [ + "memchr", + "serde", +] + +[[package]] +name = "itoa" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" + +[[package]] +name = "jobserver" +version = "0.1.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38f262f097c174adebe41eb73d66ae9c06b2844fb0da69969647bbddd9b0538a" +dependencies = [ + "getrandom 0.3.3", + "libc", +] + +[[package]] +name = "js-sys" +version = "0.3.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "json5" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96b0db21af676c1ce64250b5f40f3ce2cf27e4e47cb91ed91eb6fe9350b430c1" +dependencies = [ + "pest", + "pest_derive", + "serde", +] + +[[package]] +name = "jsonwebtoken" +version = "9.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a87cc7a48537badeae96744432de36f4be2b4a34a05a5ef32e9dd8a1c169dde" +dependencies = [ + "base64 0.22.1", + "js-sys", + "pem", + "ring 0.17.14", + "serde", + "serde_json", + "simple_asn1", +] + +[[package]] +name = "language-tags" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4345964bb142484797b161f473a503a434de77149dd8c7427788c6e13379388" + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" + +[[package]] +name = "libc" +version = "0.2.174" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1171693293099992e19cddea4e8b849964e9846f4acee11b3948bcc337be8776" + +[[package]] +name = "litemap" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956" + +[[package]] +name = "local-channel" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6cbc85e69b8df4b8bb8b89ec634e7189099cea8927a276b7384ce5488e53ec8" +dependencies = [ + "futures-core", + "futures-sink", + "local-waker", +] + +[[package]] +name = "local-waker" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d873d7c67ce09b42110d801813efbc9364414e356be9935700d368351657487" + +[[package]] +name = "lock_api" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96936507f153605bddfcda068dd804796c84324ed2510809e5b2a624c81da765" +dependencies = [ + "autocfg", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" + +[[package]] +name = "lru-slab" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" + +[[package]] +name = "md-5" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" +dependencies = [ + "cfg-if", + "digest", +] + +[[package]] +name = "md5" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae960838283323069879657ca3de837e9f7bbb4c7bf6ea7f1b290d5e9476d2e0" + +[[package]] +name = "memchr" +version = "2.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "miniz_oxide" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" +dependencies = [ + "adler2", +] + +[[package]] +name = "mio" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c" +dependencies = [ + "libc", + "log", + "wasi 0.11.1+wasi-snapshot-preview1", + "windows-sys 0.59.0", +] + +[[package]] +name = "nonzero_ext" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38bf9645c8b145698bb0b18a4637dcacbc421ea49bef2317e4fd8065a387cf21" + +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + +[[package]] +name = "num-bigint" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" +dependencies = [ + "num-integer", + "num-traits", +] + +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + +[[package]] +name = "num-integer" +version = "0.1.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", +] + +[[package]] +name = "object" +version = "0.36.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" + +[[package]] +name = "ordered-multimap" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49203cdcae0030493bad186b28da2fa25645fa276a51b6fec8010d281e02ef79" +dependencies = [ + "dlv-list", + "hashbrown 0.14.5", +] + +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + +[[package]] +name = "parking_lot" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" +dependencies = [ + "instant", + "lock_api", + "parking_lot_core 0.8.6", +] + +[[package]] +name = "parking_lot" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70d58bf43669b5795d1576d0641cfb6fbb2057bf629506267a92807158584a13" +dependencies = [ + "lock_api", + "parking_lot_core 0.9.11", +] + +[[package]] +name = "parking_lot_core" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" +dependencies = [ + "cfg-if", + "instant", + "libc", + "redox_syscall 0.2.16", + "smallvec", + "winapi", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc838d2a56b5b1a6c25f55575dfc605fabb63bb2365f6c2353ef9159aa69e4a5" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall 0.5.17", + "smallvec", + "windows-targets", +] + +[[package]] +name = "pathdiff" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df94ce210e5bc13cb6651479fa48d14f601d9858cfe0467f43ae157023b938d3" + +[[package]] +name = "pem" +version = "3.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38af38e8470ac9dee3ce1bae1af9c1671fffc44ddfd8bd1d0a3445bf349a8ef3" +dependencies = [ + "base64 0.22.1", + "serde", +] + +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "pest" +version = "2.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1db05f56d34358a8b1066f67cbb203ee3e7ed2ba674a6263a1d5ec6db2204323" +dependencies = [ + "memchr", + "thiserror 2.0.12", + "ucd-trie", +] + +[[package]] +name = "pest_derive" +version = "2.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb056d9e8ea77922845ec74a1c4e8fb17e7c218cc4fc11a15c5d25e189aa40bc" +dependencies = [ + "pest", + "pest_generator", +] + +[[package]] +name = "pest_generator" +version = "2.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87e404e638f781eb3202dc82db6760c8ae8a1eeef7fb3fa8264b2ef280504966" +dependencies = [ + "pest", + "pest_meta", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "pest_meta" +version = "2.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edd1101f170f5903fde0914f899bb503d9ff5271d7ba76bbb70bea63690cc0d5" +dependencies = [ + "pest", + "sha2", +] + +[[package]] +name = "phf" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078" +dependencies = [ + "phf_shared", +] + +[[package]] +name = "phf_shared" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" +dependencies = [ + "siphasher", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pkg-config" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" + +[[package]] +name = "portable-atomic" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483" + +[[package]] +name = "postgres-protocol" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76ff0abab4a9b844b93ef7b81f1efc0a366062aaef2cd702c76256b5dc075c54" +dependencies = [ + "base64 0.22.1", + "byteorder", + "bytes", + "fallible-iterator", + "hmac", + "md-5", + "memchr", + "rand 0.9.2", + "sha2", + "stringprep", +] + +[[package]] +name = "postgres-types" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613283563cd90e1dfc3518d548caee47e0e725455ed619881f5cf21f36de4b48" +dependencies = [ + "bytes", + "fallible-iterator", + "postgres-protocol", +] + +[[package]] +name = "potential_utf" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5a7c30837279ca13e7c867e9e40053bc68740f988cb07f7ca6df43cc734b585" +dependencies = [ + "zerovec", +] + +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "ppv-lite86" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" +dependencies = [ + "zerocopy", +] + +[[package]] +name = "proc-macro2" +version = "1.0.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quanta" +version = "0.12.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3ab5a9d756f0d97bdc89019bd2e4ea098cf9cde50ee7564dde6b81ccc8f06c7" +dependencies = [ + "crossbeam-utils", + "libc", + "once_cell", + "raw-cpuid", + "wasi 0.11.1+wasi-snapshot-preview1", + "web-sys", + "winapi", +] + +[[package]] +name = "quinn" +version = "0.11.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "626214629cda6781b6dc1d316ba307189c85ba657213ce642d9c77670f8202c8" +dependencies = [ + "bytes", + "cfg_aliases", + "pin-project-lite", + "quinn-proto", + "quinn-udp", + "rustc-hash", + "rustls 0.23.31", + "socket2 0.5.10", + "thiserror 2.0.12", + "tokio", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-proto" +version = "0.11.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49df843a9161c85bb8aae55f101bc0bac8bcafd637a620d9122fd7e0b2f7422e" +dependencies = [ + "bytes", + "getrandom 0.3.3", + "lru-slab", + "rand 0.9.2", + "ring 0.17.14", + "rustc-hash", + "rustls 0.23.31", + "rustls-pki-types", + "slab", + "thiserror 2.0.12", + "tinyvec", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-udp" +version = "0.5.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcebb1209ee276352ef14ff8732e24cc2b02bbac986cd74a4c81bcb2f9881970" +dependencies = [ + "cfg_aliases", + "libc", + "once_cell", + "socket2 0.5.10", + "tracing", + "windows-sys 0.59.0", +] + +[[package]] +name = "quote" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "r-efi" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha 0.3.1", + "rand_core 0.6.4", +] + +[[package]] +name = "rand" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" +dependencies = [ + "rand_chacha 0.9.0", + "rand_core 0.9.3", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" +dependencies = [ + "ppv-lite86", + "rand_core 0.9.3", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom 0.2.16", +] + +[[package]] +name = "rand_core" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" +dependencies = [ + "getrandom 0.3.3", +] + +[[package]] +name = "raw-cpuid" +version = "11.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6df7ab838ed27997ba19a4664507e6f82b41fe6e20be42929332156e5e85146" +dependencies = [ + "bitflags 2.9.1", +] + +[[package]] +name = "redis" +version = "0.32.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1f66bf4cac9733a23bcdf1e0e01effbaaad208567beba68be8f67e5f4af3ee1" +dependencies = [ + "bytes", + "cfg-if", + "combine", + "futures-util", + "itoa", + "num-bigint", + "percent-encoding", + "pin-project-lite", + "rand 0.9.2", + "ryu", + "sha1_smol", + "socket2 0.6.0", + "tokio", + "tokio-util", + "url", +] + +[[package]] +name = "redox_syscall" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "redox_syscall" +version = "0.5.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5407465600fb0548f1442edf71dd20683c6ed326200ace4b1ef0763521bb3b77" +dependencies = [ + "bitflags 2.9.1", +] + +[[package]] +name = "ref-cast" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a0ae411dbe946a674d89546582cea4ba2bb8defac896622d6496f14c23ba5cf" +dependencies = [ + "ref-cast-impl", +] + +[[package]] +name = "ref-cast-impl" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1165225c21bff1f3bbce98f5a1f889949bc902d3575308cc7b0de30b4f6d27c7" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "refinery" +version = "0.8.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ba5d693abf62492c37268512ff35b77655d2e957ca53dab85bf993fe9172d15" +dependencies = [ + "refinery-core", + "refinery-macros", +] + +[[package]] +name = "refinery-core" +version = "0.8.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a83581f18c1a4c3a6ebd7a174bdc665f17f618d79f7edccb6a0ac67e660b319" +dependencies = [ + "async-trait", + "cfg-if", + "log", + "regex", + "serde", + "siphasher", + "thiserror 1.0.69", + "time", + "tokio", + "tokio-postgres", + "toml 0.8.23", + "url", + "walkdir", +] + +[[package]] +name = "refinery-macros" +version = "0.8.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72c225407d8e52ef8cf094393781ecda9a99d6544ec28d90a6915751de259264" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "refinery-core", + "regex", + "syn", +] + +[[package]] +name = "regex" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-lite" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53a49587ad06b26609c52e423de037e7f57f20d53535d66e08c695f347df952a" + +[[package]] +name = "regex-syntax" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" + +[[package]] +name = "reqwest" +version = "0.12.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cbc931937e6ca3a06e3b6c0aa7841849b160a90351d6ab467a8b9b9959767531" +dependencies = [ + "base64 0.22.1", + "bytes", + "futures-core", + "http 1.3.1", + "http-body", + "http-body-util", + "hyper", + "hyper-rustls", + "hyper-util", + "js-sys", + "log", + "percent-encoding", + "pin-project-lite", + "quinn", + "rustls 0.23.31", + "rustls-pki-types", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tokio-rustls 0.26.2", + "tower", + "tower-http", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "webpki-roots 1.0.2", +] + +[[package]] +name = "reqwest-middleware" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57f17d28a6e6acfe1733fe24bcd30774d13bffa4b8a22535b4c8c98423088d4e" +dependencies = [ + "anyhow", + "async-trait", + "http 1.3.1", + "reqwest", + "serde", + "thiserror 1.0.69", + "tower-service", +] + +[[package]] +name = "reqwest-ratelimit" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b8fff0d8036f23dcad6c27605ca3baa8ae3867438d0a8b34072f40f6c8bf628" +dependencies = [ + "async-trait", + "http 1.3.1", + "reqwest", + "reqwest-middleware", +] + +[[package]] +name = "reqwest-retry" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29c73e4195a6bfbcb174b790d9b3407ab90646976c55de58a6515da25d851178" +dependencies = [ + "anyhow", + "async-trait", + "futures", + "getrandom 0.2.16", + "http 1.3.1", + "hyper", + "parking_lot 0.11.2", + "reqwest", + "reqwest-middleware", + "retry-policies", + "thiserror 1.0.69", + "tokio", + "tracing", + "wasm-timer", +] + +[[package]] +name = "reqwest-websocket" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f91a811daaa8b54faeaec9d507a336897a3d243834a4965254a17d39da8b5c9" +dependencies = [ + "async-tungstenite", + "bytes", + "futures-util", + "reqwest", + "serde", + "serde_json", + "thiserror 2.0.12", + "tokio", + "tokio-util", + "tracing", + "tungstenite", + "web-sys", +] + +[[package]] +name = "retry-policies" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5875471e6cab2871bc150ecb8c727db5113c9338cc3354dc5ee3425b6aa40a1c" +dependencies = [ + "rand 0.8.5", +] + +[[package]] +name = "ring" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin", + "untrusted 0.7.1", + "web-sys", + "winapi", +] + +[[package]] +name = "ring" +version = "0.17.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" +dependencies = [ + "cc", + "cfg-if", + "getrandom 0.2.16", + "libc", + "untrusted 0.9.0", + "windows-sys 0.52.0", +] + +[[package]] +name = "ron" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b91f7eff05f748767f183df4320a63d6936e9c6107d97c9e6bdd9784f4289c94" +dependencies = [ + "base64 0.21.7", + "bitflags 2.9.1", + "serde", + "serde_derive", +] + +[[package]] +name = "rust-ini" +version = "0.21.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7295b7ce3bf4806b419dc3420745998b447178b7005e2011947b38fc5aa6791" +dependencies = [ + "cfg-if", + "ordered-multimap", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56f7d92ca342cea22a06f2121d944b4fd82af56988c270852495420f961d4ace" + +[[package]] +name = "rustc-hash" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" + +[[package]] +name = "rustls" +version = "0.20.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99" +dependencies = [ + "log", + "ring 0.16.20", + "sct", + "webpki", +] + +[[package]] +name = "rustls" +version = "0.23.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0ebcbd2f03de0fc1122ad9bb24b127a5a6cd51d72604a3f3c50ac459762b6cc" +dependencies = [ + "once_cell", + "ring 0.17.14", + "rustls-pki-types", + "rustls-webpki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-pki-types" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "229a4a4c221013e7e1f1a043678c5cc39fe5171437c88fb47151a21e6f5b5c79" +dependencies = [ + "web-time", + "zeroize", +] + +[[package]] +name = "rustls-webpki" +version = "0.103.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a17884ae0c1b773f1ccd2bd4a8c72f16da897310a98b0e84bf349ad5ead92fc" +dependencies = [ + "ring 0.17.14", + "rustls-pki-types", + "untrusted 0.9.0", +] + +[[package]] +name = "rustversion" +version = "1.0.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a0d197bd2c9dc6e53b84da9556a69ba4cdfab8619eb41a8bd1cc2027a0f6b1d" + +[[package]] +name = "ryu" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "schemars" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd191f9397d57d581cddd31014772520aa448f65ef991055d7f61582c65165f" +dependencies = [ + "dyn-clone", + "ref-cast", + "serde", + "serde_json", +] + +[[package]] +name = "schemars" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82d20c4491bc164fa2f6c5d44565947a52ad80b9505d8e36f8d54c27c739fcd0" +dependencies = [ + "dyn-clone", + "ref-cast", + "serde", + "serde_json", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "sct" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" +dependencies = [ + "ring 0.17.14", + "untrusted 0.9.0", +] + +[[package]] +name = "secrecy" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e891af845473308773346dc847b2c23ee78fe442e0472ac50e22a18a93d3ae5a" +dependencies = [ + "serde", + "zeroize", +] + +[[package]] +name = "serde" +version = "1.0.219" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.219" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.142" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "030fedb782600dcbd6f02d479bf0d817ac3bb40d644745b769d6a96bc3afc5a7" +dependencies = [ + "itoa", + "memchr", + "ryu", + "serde", +] + +[[package]] +name = "serde_spanned" +version = "0.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_spanned" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40734c41988f7306bb04f0ecf60ec0f3f1caa34290e4e8ea471dcd3346483b83" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_with" +version = "3.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2c45cd61fefa9db6f254525d46e392b852e0e61d9a1fd36e5bd183450a556d5" +dependencies = [ + "base64 0.22.1", + "chrono", + "hex", + "indexmap 1.9.3", + "indexmap 2.10.0", + "schemars 0.9.0", + "schemars 1.0.4", + "serde", + "serde_derive", + "serde_json", + "serde_with_macros", + "time", +] + +[[package]] +name = "serde_with_macros" +version = "3.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de90945e6565ce0d9a25098082ed4ee4002e047cb59892c318d66821e14bb30f" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sha1_smol" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbfa15b3dddfee50a0fff136974b3e1bde555604ba463834a7eb7deb6417705d" + +[[package]] +name = "sha2" +version = "0.10.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "signal-hook-registry" +version = "1.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2a4719bff48cee6b39d12c020eeb490953ad2443b7055bd0b21fca26bd8c28b" +dependencies = [ + "libc", +] + +[[package]] +name = "simple_asn1" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "297f631f50729c8c99b84667867963997ec0b50f32b2a7dbcab828ef0541e8bb" +dependencies = [ + "num-bigint", + "num-traits", + "thiserror 2.0.12", + "time", +] + +[[package]] +name = "siphasher" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" + +[[package]] +name = "size" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b6709c7b6754dca1311b3c73e79fcce40dd414c782c66d88e8823030093b02b" +dependencies = [ + "serde", +] + +[[package]] +name = "slab" +version = "0.4.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04dc19736151f35336d325007ac991178d504a119863a2fcb3758cdb5e52c50d" + +[[package]] +name = "smallvec" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" + +[[package]] +name = "socket2" +version = "0.5.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "socket2" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "233504af464074f9d066d7b5416c5f9b894a5862a6506e306f7b816cdd6f1807" +dependencies = [ + "libc", + "windows-sys 0.59.0", +] + +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + +[[package]] +name = "spinning_top" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d96d2d1d716fb500937168cc09353ffdc7a012be8475ac7308e1bdf0e3923300" +dependencies = [ + "lock_api", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + +[[package]] +name = "stringprep" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b4df3d392d81bd458a8a621b8bffbd2302a12ffe288a9d931670948749463b1" +dependencies = [ + "unicode-bidi", + "unicode-normalization", + "unicode-properties", +] + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "strum" +version = "0.27.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf" +dependencies = [ + "strum_macros", +] + +[[package]] +name = "strum_macros" +version = "0.27.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7695ce3845ea4b33927c055a39dc438a45b059f7c1b3d91d38d10355fb8cbca7" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + +[[package]] +name = "syn" +version = "2.0.104" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17b6f705963418cdb9927482fa304bc562ece2fdd4f616084c50b7023b435a40" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +dependencies = [ + "futures-core", +] + +[[package]] +name = "synstructure" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl 1.0.69", +] + +[[package]] +name = "thiserror" +version = "2.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708" +dependencies = [ + "thiserror-impl 2.0.12", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "thiserror-impl" +version = "2.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "thread_local" +version = "1.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "time" +version = "0.3.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a7619e19bc266e0f9c5e6686659d394bc57973859340060a69221e57dbc0c40" +dependencies = [ + "deranged", + "itoa", + "num-conv", + "powerfmt", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9e9a38711f559d9e3ce1cdb06dd7c5b8ea546bc90052da6d06bb76da74bb07c" + +[[package]] +name = "time-macros" +version = "0.2.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3526739392ec93fd8b359c8e98514cb3e8e021beb4e5f597b00a0221f8ed8a49" +dependencies = [ + "num-conv", + "time-core", +] + +[[package]] +name = "tiny-keccak" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" +dependencies = [ + "crunchy", +] + +[[package]] +name = "tinystr" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b" +dependencies = [ + "displaydoc", + "zerovec", +] + +[[package]] +name = "tinyvec" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09b3661f17e86524eccd4371ab0429194e0d7c008abb45f7a7495b1719463c71" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.47.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89e49afdadebb872d3145a5638b59eb0691ea23e46ca484037cfab3b76b95038" +dependencies = [ + "backtrace", + "bytes", + "io-uring", + "libc", + "mio", + "parking_lot 0.12.4", + "pin-project-lite", + "signal-hook-registry", + "slab", + "socket2 0.6.0", + "tokio-macros", + "windows-sys 0.59.0", +] + +[[package]] +name = "tokio-macros" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tokio-postgres" +version = "0.7.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c95d533c83082bb6490e0189acaa0bbeef9084e60471b696ca6988cd0541fb0" +dependencies = [ + "async-trait", + "byteorder", + "bytes", + "fallible-iterator", + "futures-channel", + "futures-util", + "log", + "parking_lot 0.12.4", + "percent-encoding", + "phf", + "pin-project-lite", + "postgres-protocol", + "postgres-types", + "rand 0.9.2", + "socket2 0.5.10", + "tokio", + "tokio-util", + "whoami", +] + +[[package]] +name = "tokio-rustls" +version = "0.23.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" +dependencies = [ + "rustls 0.20.9", + "tokio", + "webpki", +] + +[[package]] +name = "tokio-rustls" +version = "0.26.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e727b36a1a0e8b74c376ac2211e40c2c8af09fb4013c60d910495810f008e9b" +dependencies = [ + "rustls 0.23.31", + "tokio", +] + +[[package]] +name = "tokio-stream" +version = "0.1.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", + "tokio-util", +] + +[[package]] +name = "tokio-util" +version = "0.7.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14307c986784f72ef81c89db7d9e28d6ac26d16213b109ea501696195e6e3ce5" +dependencies = [ + "bytes", + "futures-core", + "futures-io", + "futures-sink", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio_with_wasm" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4dfba9b946459940fb564dcf576631074cdfb0bfe4c962acd4c31f0dca7897e6" +dependencies = [ + "js-sys", + "tokio", + "tokio_with_wasm_proc", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "tokio_with_wasm_proc" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37e04c1865c281139e5ccf633cb9f76ffdaabeebfe53b703984cf82878e2aabb" +dependencies = [ + "quote", + "syn", +] + +[[package]] +name = "toml" +version = "0.8.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" +dependencies = [ + "serde", + "serde_spanned 0.6.9", + "toml_datetime 0.6.11", + "toml_edit", +] + +[[package]] +name = "toml" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75129e1dc5000bfbaa9fee9d1b21f974f9fbad9daec557a521ee6e080825f6e8" +dependencies = [ + "serde", + "serde_spanned 1.0.0", + "toml_datetime 0.7.0", + "toml_parser", + "winnow", +] + +[[package]] +name = "toml_datetime" +version = "0.6.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_datetime" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bade1c3e902f58d73d3f294cd7f20391c1cb2fbcb643b73566bc773971df91e3" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_edit" +version = "0.22.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" +dependencies = [ + "indexmap 2.10.0", + "serde", + "serde_spanned 0.6.9", + "toml_datetime 0.6.11", + "toml_write", + "winnow", +] + +[[package]] +name = "toml_parser" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b551886f449aa90d4fe2bdaa9f4a2577ad2dde302c61ecf262d80b116db95c10" +dependencies = [ + "winnow", +] + +[[package]] +name = "toml_write" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" + +[[package]] +name = "tower" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper", + "tokio", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-http" +version = "0.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2" +dependencies = [ + "bitflags 2.9.1", + "bytes", + "futures-util", + "http 1.3.1", + "http-body", + "iri-string", + "pin-project-lite", + "tower", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-layer" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" + +[[package]] +name = "tracing" +version = "0.1.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" +dependencies = [ + "log", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tracing-core" +version = "0.1.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" +dependencies = [ + "nu-ansi-term", + "sharded-slab", + "smallvec", + "thread_local", + "tracing-core", + "tracing-log", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "tungstenite" +version = "0.26.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4793cb5e56680ecbb1d843515b23b6de9a75eb04b66643e256a396d43be33c13" +dependencies = [ + "bytes", + "data-encoding", + "http 1.3.1", + "httparse", + "log", + "rand 0.9.2", + "sha1", + "thiserror 2.0.12", + "utf-8", +] + +[[package]] +name = "typenum" +version = "1.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" + +[[package]] +name = "ucd-trie" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971" + +[[package]] +name = "unicode-bidi" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5" + +[[package]] +name = "unicode-ident" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" + +[[package]] +name = "unicode-normalization" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-properties" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e70f2a8b45122e719eb623c01822704c4e0907e7e426a05927e1a1cfff5b75d0" + +[[package]] +name = "unicode-segmentation" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" + +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "url" +version = "2.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "utf-8" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "uuid" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3cf4199d1e5d15ddd86a694e4d0dffa9c323ce759fea589f00fef9d81cc1931d" +dependencies = [ + "getrandom 0.3.3", + "js-sys", + "serde", + "wasm-bindgen", +] + +[[package]] +name = "valuable" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.1+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" + +[[package]] +name = "wasi" +version = "0.14.2+wasi-0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" +dependencies = [ + "wit-bindgen-rt", +] + +[[package]] +name = "wasite" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" + +[[package]] +name = "wasm-bindgen" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" +dependencies = [ + "cfg-if", + "once_cell", + "rustversion", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" +dependencies = [ + "bumpalo", + "log", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.50" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61" +dependencies = [ + "cfg-if", + "js-sys", + "once_cell", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "wasm-timer" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be0ecb0db480561e9a7642b5d3e4187c128914e58aa84330b9493e3eb68c5e7f" +dependencies = [ + "futures", + "js-sys", + "parking_lot 0.11.2", + "pin-utils", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "wasmtimer" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8d49b5d6c64e8558d9b1b065014426f35c18de636895d24893dbbd329743446" +dependencies = [ + "futures", + "js-sys", + "parking_lot 0.12.4", + "pin-utils", + "slab", + "wasm-bindgen", +] + +[[package]] +name = "web-sys" +version = "0.3.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webpki" +version = "0.22.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53" +dependencies = [ + "ring 0.17.14", + "untrusted 0.9.0", +] + +[[package]] +name = "webpki-roots" +version = "0.22.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c71e40d7d2c34a5106301fb632274ca37242cd0c9d3e64dbece371a40a2d87" +dependencies = [ + "webpki", +] + +[[package]] +name = "webpki-roots" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e8983c3ab33d6fb807cfcdad2491c4ea8cbc8ed839181c7dfd9c67c83e261b2" +dependencies = [ + "rustls-pki-types", +] + +[[package]] +name = "whoami" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6994d13118ab492c3c80c1f81928718159254c53c472bf9ce36f8dae4add02a7" +dependencies = [ + "redox_syscall 0.5.17", + "wasite", + "web-sys", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-core" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3" +dependencies = [ + "windows-implement", + "windows-interface", + "windows-link", + "windows-result", + "windows-strings", +] + +[[package]] +name = "windows-implement" +version = "0.60.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "windows-interface" +version = "0.59.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "windows-link" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" + +[[package]] +name = "windows-result" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-strings" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_gnullvm", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "winnow" +version = "0.7.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3edebf492c8125044983378ecb5766203ad3b4c2f7a922bd7dd207f6d443e95" +dependencies = [ + "memchr", +] + +[[package]] +name = "wit-bindgen-rt" +version = "0.39.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" +dependencies = [ + "bitflags 2.9.1", +] + +[[package]] +name = "writeable" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb" + +[[package]] +name = "yaml-rust2" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ce2a4ff45552406d02501cea6c18d8a7e50228e7736a872951fe2fe75c91be7" +dependencies = [ + "arraydeque", + "encoding_rs", + "hashlink", +] + +[[package]] +name = "yoke" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerocopy" +version = "0.8.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1039dd0d3c310cf05de012d8a39ff557cb0d23087fd44cad61df08fc31907a2f" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.8.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ecf5b4cc5364572d7f4c329661bcc82724222973f2cab6f050a4e5c22f75181" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zerofrom" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zeroize" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" + +[[package]] +name = "zerotrie" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bdbb9122ea75b11bf96e7492afb723e8a7fbe12c67417aa95e7e3d18144d37cd" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zstd" +version = "0.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a" +dependencies = [ + "zstd-safe", +] + +[[package]] +name = "zstd-safe" +version = "7.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d" +dependencies = [ + "zstd-sys", +] + +[[package]] +name = "zstd-sys" +version = "2.0.15+zstd.1.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb81183ddd97d0c74cedf1d50d85c8d08c1b8b68ee863bdee9e706eedba1a237" +dependencies = [ + "cc", + "pkg-config", +] diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 00000000000..598df204e06 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,34 @@ +[package] +name = "hulypulse" +version = "0.1.0" +edition = "2024" + +[dependencies] +tokio = { version = "1", features = ["full"] } +tracing = "0.1.41" +tracing-subscriber = "0.3.19" +anyhow = "1.0.97" +config = "0.15.4" +serde = "1.0.219" +actix = "0.13.5" +actix-web = "4.10.2" +actix-cors = "0.7.1" +actix-web-actors = "4.2.0" +refinery = { version = "0.8.16", features = ["tokio-postgres"] } +bb8 = "0.9.0" +bb8-redis = "0.24" +redis = { version = "0.32.2", features = [ "tokio-comp", "sentinel" ] } +md5 = "0.8.0" +serde_with = "3" +url = "2" +jsonwebtoken = "9.3.1" +size = { version = "0.5.0", features = ["serde"] } +uuid = { version = "1.7", features = ["v4", "serde"] } +hex = "0.4.3" +serde_json = "1.0" +hulyrs = { git = "https://github.com/hcengineering/hulyrs.git", features = ["actix"] } +secrecy = "0.10.3" + +[[bin]] +name = "hulypulse" +path = "src/main.rs" diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000000..e48e0963459 --- /dev/null +++ b/LICENSE @@ -0,0 +1,277 @@ +Eclipse Public License - v 2.0 + + THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE + PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION + OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT. + +1. DEFINITIONS + +"Contribution" means: + + a) in the case of the initial Contributor, the initial content + Distributed under this Agreement, and + + b) in the case of each subsequent Contributor: + i) changes to the Program, and + ii) additions to the Program; + where such changes and/or additions to the Program originate from + and are Distributed by that particular Contributor. A Contribution + "originates" from a Contributor if it was added to the Program by + such Contributor itself or anyone acting on such Contributor's behalf. + Contributions do not include changes or additions to the Program that + are not Modified Works. + +"Contributor" means any person or entity that Distributes the Program. + +"Licensed Patents" mean patent claims licensable by a Contributor which +are necessarily infringed by the use or sale of its Contribution alone +or when combined with the Program. + +"Program" means the Contributions Distributed in accordance with this +Agreement. + +"Recipient" means anyone who receives the Program under this Agreement +or any Secondary License (as applicable), including Contributors. + +"Derivative Works" shall mean any work, whether in Source Code or other +form, that is based on (or derived from) the Program and for which the +editorial revisions, annotations, elaborations, or other modifications +represent, as a whole, an original work of authorship. + +"Modified Works" shall mean any work in Source Code or other form that +results from an addition to, deletion from, or modification of the +contents of the Program, including, for purposes of clarity any new file +in Source Code form that contains any contents of the Program. Modified +Works shall not include works that contain only declarations, +interfaces, types, classes, structures, or files of the Program solely +in each case in order to link to, bind by name, or subclass the Program +or Modified Works thereof. + +"Distribute" means the acts of a) distributing or b) making available +in any manner that enables the transfer of a copy. + +"Source Code" means the form of a Program preferred for making +modifications, including but not limited to software source code, +documentation source, and configuration files. + +"Secondary License" means either the GNU General Public License, +Version 2.0, or any later versions of that license, including any +exceptions or additional permissions as identified by the initial +Contributor. + +2. GRANT OF RIGHTS + + a) Subject to the terms of this Agreement, each Contributor hereby + grants Recipient a non-exclusive, worldwide, royalty-free copyright + license to reproduce, prepare Derivative Works of, publicly display, + publicly perform, Distribute and sublicense the Contribution of such + Contributor, if any, and such Derivative Works. + + b) Subject to the terms of this Agreement, each Contributor hereby + grants Recipient a non-exclusive, worldwide, royalty-free patent + license under Licensed Patents to make, use, sell, offer to sell, + import and otherwise transfer the Contribution of such Contributor, + if any, in Source Code or other form. This patent license shall + apply to the combination of the Contribution and the Program if, at + the time the Contribution is added by the Contributor, such addition + of the Contribution causes such combination to be covered by the + Licensed Patents. The patent license shall not apply to any other + combinations which include the Contribution. No hardware per se is + licensed hereunder. + + c) Recipient understands that although each Contributor grants the + licenses to its Contributions set forth herein, no assurances are + provided by any Contributor that the Program does not infringe the + patent or other intellectual property rights of any other entity. + Each Contributor disclaims any liability to Recipient for claims + brought by any other entity based on infringement of intellectual + property rights or otherwise. As a condition to exercising the + rights and licenses granted hereunder, each Recipient hereby + assumes sole responsibility to secure any other intellectual + property rights needed, if any. For example, if a third party + patent license is required to allow Recipient to Distribute the + Program, it is Recipient's responsibility to acquire that license + before distributing the Program. + + d) Each Contributor represents that to its knowledge it has + sufficient copyright rights in its Contribution, if any, to grant + the copyright license set forth in this Agreement. + + e) Notwithstanding the terms of any Secondary License, no + Contributor makes additional grants to any Recipient (other than + those set forth in this Agreement) as a result of such Recipient's + receipt of the Program under the terms of a Secondary License + (if permitted under the terms of Section 3). + +3. REQUIREMENTS + +3.1 If a Contributor Distributes the Program in any form, then: + + a) the Program must also be made available as Source Code, in + accordance with section 3.2, and the Contributor must accompany + the Program with a statement that the Source Code for the Program + is available under this Agreement, and informs Recipients how to + obtain it in a reasonable manner on or through a medium customarily + used for software exchange; and + + b) the Contributor may Distribute the Program under a license + different than this Agreement, provided that such license: + i) effectively disclaims on behalf of all other Contributors all + warranties and conditions, express and implied, including + warranties or conditions of title and non-infringement, and + implied warranties or conditions of merchantability and fitness + for a particular purpose; + + ii) effectively excludes on behalf of all other Contributors all + liability for damages, including direct, indirect, special, + incidental and consequential damages, such as lost profits; + + iii) does not attempt to limit or alter the recipients' rights + in the Source Code under section 3.2; and + + iv) requires any subsequent distribution of the Program by any + party to be under a license that satisfies the requirements + of this section 3. + +3.2 When the Program is Distributed as Source Code: + + a) it must be made available under this Agreement, or if the + Program (i) is combined with other material in a separate file or + files made available under a Secondary License, and (ii) the initial + Contributor attached to the Source Code the notice described in + Exhibit A of this Agreement, then the Program may be made available + under the terms of such Secondary Licenses, and + + b) a copy of this Agreement must be included with each copy of + the Program. + +3.3 Contributors may not remove or alter any copyright, patent, +trademark, attribution notices, disclaimers of warranty, or limitations +of liability ("notices") contained within the Program from any copy of +the Program which they Distribute, provided that Contributors may add +their own appropriate notices. + +4. COMMERCIAL DISTRIBUTION + +Commercial distributors of software may accept certain responsibilities +with respect to end users, business partners and the like. While this +license is intended to facilitate the commercial use of the Program, +the Contributor who includes the Program in a commercial product +offering should do so in a manner which does not create potential +liability for other Contributors. Therefore, if a Contributor includes +the Program in a commercial product offering, such Contributor +("Commercial Contributor") hereby agrees to defend and indemnify every +other Contributor ("Indemnified Contributor") against any losses, +damages and costs (collectively "Losses") arising from claims, lawsuits +and other legal actions brought by a third party against the Indemnified +Contributor to the extent caused by the acts or omissions of such +Commercial Contributor in connection with its distribution of the Program +in a commercial product offering. The obligations in this section do not +apply to any claims or Losses relating to any actual or alleged +intellectual property infringement. In order to qualify, an Indemnified +Contributor must: a) promptly notify the Commercial Contributor in +writing of such claim, and b) allow the Commercial Contributor to control, +and cooperate with the Commercial Contributor in, the defense and any +related settlement negotiations. The Indemnified Contributor may +participate in any such claim at its own expense. + +For example, a Contributor might include the Program in a commercial +product offering, Product X. That Contributor is then a Commercial +Contributor. If that Commercial Contributor then makes performance +claims, or offers warranties related to Product X, those performance +claims and warranties are such Commercial Contributor's responsibility +alone. Under this section, the Commercial Contributor would have to +defend claims against the other Contributors related to those performance +claims and warranties, and if a court requires any other Contributor to +pay any damages as a result, the Commercial Contributor must pay +those damages. + +5. NO WARRANTY + +EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT +PERMITTED BY APPLICABLE LAW, THE PROGRAM IS PROVIDED ON AN "AS IS" +BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR +IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF +TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR +PURPOSE. Each Recipient is solely responsible for determining the +appropriateness of using and distributing the Program and assumes all +risks associated with its exercise of rights under this Agreement, +including but not limited to the risks and costs of program errors, +compliance with applicable laws, damage to or loss of data, programs +or equipment, and unavailability or interruption of operations. + +6. DISCLAIMER OF LIABILITY + +EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT +PERMITTED BY APPLICABLE LAW, NEITHER RECIPIENT NOR ANY CONTRIBUTORS +SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST +PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE +EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGES. + +7. GENERAL + +If any provision of this Agreement is invalid or unenforceable under +applicable law, it shall not affect the validity or enforceability of +the remainder of the terms of this Agreement, and without further +action by the parties hereto, such provision shall be reformed to the +minimum extent necessary to make such provision valid and enforceable. + +If Recipient institutes patent litigation against any entity +(including a cross-claim or counterclaim in a lawsuit) alleging that the +Program itself (excluding combinations of the Program with other software +or hardware) infringes such Recipient's patent(s), then such Recipient's +rights granted under Section 2(b) shall terminate as of the date such +litigation is filed. + +All Recipient's rights under this Agreement shall terminate if it +fails to comply with any of the material terms or conditions of this +Agreement and does not cure such failure in a reasonable period of +time after becoming aware of such noncompliance. If all Recipient's +rights under this Agreement terminate, Recipient agrees to cease use +and distribution of the Program as soon as reasonably practicable. +However, Recipient's obligations under this Agreement and any licenses +granted by Recipient relating to the Program shall continue and survive. + +Everyone is permitted to copy and distribute copies of this Agreement, +but in order to avoid inconsistency the Agreement is copyrighted and +may only be modified in the following manner. The Agreement Steward +reserves the right to publish new versions (including revisions) of +this Agreement from time to time. No one other than the Agreement +Steward has the right to modify this Agreement. The Eclipse Foundation +is the initial Agreement Steward. The Eclipse Foundation may assign the +responsibility to serve as the Agreement Steward to a suitable separate +entity. Each new version of the Agreement will be given a distinguishing +version number. The Program (including Contributions) may always be +Distributed subject to the version of the Agreement under which it was +received. In addition, after a new version of the Agreement is published, +Contributor may elect to Distribute the Program (including its +Contributions) under the new version. + +Except as expressly stated in Sections 2(a) and 2(b) above, Recipient +receives no rights or licenses to the intellectual property of any +Contributor under this Agreement, whether expressly, by implication, +estoppel or otherwise. All rights in the Program not expressly granted +under this Agreement are reserved. Nothing in this Agreement is intended +to be enforceable by any entity that is not a Contributor or Recipient. +No third-party beneficiary rights are created under this Agreement. + +Exhibit A - Form of Secondary Licenses Notice + +"This Source Code may also be made available under the following +Secondary Licenses when the conditions for such availability set forth +in the Eclipse Public License, v. 2.0 are satisfied: {name license(s), +version(s), and exceptions or additional permissions here}." + + Simply including a copy of this Agreement, including this Exhibit A + is not sufficient to license the Source Code under Secondary Licenses. + + If it is not possible or desirable to put the notice in a particular + file, then You may include the notice in a location (such as a LICENSE + file in a relevant directory) where a recipient would be likely to + look for such a notice. + + You may add additional accurate notices of copyright ownership. diff --git a/README.md b/README.md new file mode 100644 index 00000000000..e8aa9f6763d --- /dev/null +++ b/README.md @@ -0,0 +1,183 @@ +# Hulypulse + +Hulypulse is a service that enables clients to share information on a “whiteboard”. Clients connected to the same “whiteboard” see data provided by other clients to the whiteboard. + +The service is exposed as REST and WebSocket API. + +**Usage scenarios:** + +- user presence in a document +- user is “typing” event +- user cursor position in editor or drawing board +- service posts a process status + +## Key +Key is a string that consists of one or multiple segments separated by some separator. +Example: foo/bar/baz. + +It is possible to use wildcard keys to list or subscribe to values with this prefix. + +Key may contain a special section (guard) $that separates public and private data. “Private” data is available when querying or subscribing by exact key. +Example foo/bar/$/private, this value can be queried by foo/bar/$/private or foo/bar/$/but not by foo/bar/ + +## Data +“Data” is an arbitrary JSON document. +Size of data is limited to some reasonable size + +## API +Methods + +GET - returns values of one key + +LIST - returns values with given prefix until the “sentinel” + +PUT - put value to the key +- Support CAS +- Support If-* headers + +DELETE - delete value of the key + +SUB - subscribe to key data + get initial state +Behavior identical to LIST + +UNSUB - unsubscribe to key data + + +## HTTP API + +```PUT /{workspace}/{key}``` +- Input + Body - data + Content-Type: application/json (do we need something else?) + Content-Length: optional + Headers: TTL or absolute expiration time + HULY-TTL + HULY-EXPIRE-AT + ** Conditional Headers If-*: ** + - `If-Match: *` — update only if the key exists + - `If-Match: ` — update only if current value's MD5 matches + - `If-None-Match: *` — insert only if the key does not exist +- Output + - Status: + - `201` if inserted with `If-None-Match: *` + - `204` on successful insert or update + - `412` if the condition is not met + - `400` if headers are invalid + - No body + +```PATCH /{workspace}/{key}``` +- TODO (not in v1) + +```DELETE /{workspace}/{key}``` +- Output + Status: 204 + +```GET /{workspace}/{key}``` +- Output + - Status 200 + - Content-type: application/json + - Body: + - workspace + - key + - data + - expiresAt ? + +```GET /{workspace}?prefix={key}``` +- Output + - Status 200 + - Content-type: application/json + - Body (array): + - workspace + - key + - data + - expiresAt ? + +## WebSocket API + +**Client to Server** + +```PUT``` + - correlation id (optional) + - type: "put" + - key: “foo/bar“ + - data + - TTL / expiresAt + +```DELETE``` + - correlation id (optional) + - type: "delete" + - key: “foo/bar“ + +```SUB``` + type: "sub" + key: “foo/bar“ + +```UNSUB``` + - type: "unsub" + - key: “foo/bar“ + +**Server to Client** + +```PUT``` + - correlation id (optional) + - type: "put" + - ?? TODO: user? workspace: "11111111-2222-3333-4444-555555555555" + - key: “foo/bar“ + - data + - expiresAt + +```DELETE``` + - correlation id (optional) + - type: "delete" + - key: “foo/bar“ + + +## Running + +Pre-build docker images is available at: hardcoreeng/service_hulypulse:{tag}. + +You can use the following command to run the image locally: +```bash +docker run -p 8095:8095 -it --rm hardcoreeng/service_hulypulse:{tag}" +``` + +If you want to run the service as a part of local huly development environment use the following command: +```bash + export HULY_REDIS_URLS="redis://huly.local:6379" + docker run --rm -it --network dev_default -p 8095:8095 hardcoreeng/service_hulypulse:{tag} +``` +This will run Hulypulse in the same network as the rest of huly services, and set the redis connection string to the one matching the local dev redis instance. + +You can then access hulypulse at http://localhost:8095. + + +## Authetication +Hulypulse uses bearer JWT token authetication. At the moment, it will accept any token signed by the hulypulse secret. The secret is set in the environment variable HULY_TOKEN_SECRET variable. + +## Configuration +The following environment variables are used to configure hulypulse: + - ```HULY_BIND_HOST```: host to bind the server to (default: 0.0.0.0) + - ```HULY_BIND_PORT```: port to bind the server to (default: 8094) + - ```HULY_PAYLOAD_SIZE_LIMIT```: maximum size of the payload (default: 2Mb) + - ```HULY_TOKEN_SECRET```: secret used to sign JWT tokens (default: secret) + - ```HULY_REDIS_URLS```: redis connection string (default: redis://huly.local:6379) + - ```HULY_REDIS_PASSWORD```: redis password (default: "<invalid>") + - ```HULY_REDIS_MODE```: redis mode "direct" or "sentinel" (default: "direct") + - ```HULY_REDIS_SERVICE```: redis service (default: "mymaster") + - ```HULY_MAX_TTL```: maximum storage time (default: 3600) + +## Todo (in no particular order) +- [ ] Optional value encryption +- [ ] HEAD request +- [ ] Conditional update (optimistic locking) +- [ ] Support for open telemetry +- [ ] Concurrency control for database migration (several instances of hulypulse are updated at the same time) +- [ ] TLS support +- [ ] Namespacee based access control +- [ ] Liveness/readiness probe endpoint + +## Contributing +Contributions are welcome! Please open an issue or a pull request if you have any suggestions or improvements. + +## License +This project is licensed under EPL-2.0 diff --git a/scripts/!test.sh b/scripts/!test.sh new file mode 100755 index 00000000000..6c779757a04 --- /dev/null +++ b/scripts/!test.sh @@ -0,0 +1,102 @@ +#!/bin/bash + +clear +source ./pulse_lib.sh + +TOKEN=$(./token.sh claims.json) +ZP="00000000-0000-0000-0000-000000000001/TESTS" +# /AnyKey" + +# put ${ZP} "one text" + +# put "00000000-0000-0000-0000-000000000001/TESTS" "text 1" "If-None-Match: *" "Blooooooooo: blya" + +#exit + +#put "00000000-0000-0000-0000-000000000001/TESTS" "Value_1" "HULY-TTL: 3" +#echo "sleep 1 sec" +#sleep 1 +#get "00000000-0000-0000-0000-000000000001/TESTS" +#echo "sleep 3 sec" +#sleep 2 +#get "00000000-0000-0000-0000-000000000001/TESTS" + +put "00000000-0000-0000-0000-000000000001/TESTS1" "Value_1" "HULY-TTL: 3" +put "00000000-0000-0000-0000-000000000001/TESTS2" "Value_1" "HULY-TTL: 3" +put "00000000-0000-0000-0000-000000000001/HREST2" "Value_1" "HULY-TTL: 3" +get "00000000-0000-0000-0000-000000000001?prefix=TES" +sleep 1 +get "00000000-0000-0000-0000-000000000001?prefix=" + +exit + +echo "--------- delete ----------" +put "00000000-0000-0000-0000-000000000001/TESTS" "Value_2" "HULY-TTL: 3" +get "00000000-0000-0000-0000-000000000001/TESTS" +delete "00000000-0000-0000-0000-000000000001/TESTS" +get "00000000-0000-0000-0000-000000000001/TESTS" +#HULY-EXPIRE-AT: + + +# put "00000000-0000-0000-0000-000000000001/TESTS/some" "text 2" +# put "00000000-0000-0000-0000-000000000001/TESTS/some/gogo/" "text 3" + + +exit + + +echo "================> LIST" + put "00000000-0000-0000-0000-000000000001/Huome2/MyKey1" "value1" + put "00000000-0000-0000-0000-000000000001/Huome2/MyKey2" "value2" + get "00000000-0000-0000-0000-000000000001/Huome2" + delete "00000000-0000-0000-0000-000000000001/Huome2/MyKey1" + delete "00000000-0000-0000-0000-000000000001/Huome2/MyKey2" + +echo "================> WRONG UUID" + get "WrongUUID/TESTS/AnyKey" + +echo "================> INSERT If-None-Match" + + echo "-- Expected Error: 400 Bad Request (If-None-Match may be only *)" + put ${ZP} "enother text" "If-None-Match" "552e21cd4cd9918678e3c1a0df491bc3" + + delete ${ZP} + + echo "-- Expected OK: 201 Created (key was not exist)" + put ${ZP} "enother text" "If-None-Match" "*" + + put ${ZP} "some text" + echo "-- Expected Error: 412 Precondition Failed (key was exist)" + put ${ZP} "enother text" "If-None-Match" "*" + +echo "================> UPDATE PUT If-Match" + + get ${ZP} + + echo "-- Expected OK: 204 No Content (right hash)" + put ${ZP} "some text" "If-Match" "552e21cd4cd9918678e3c1a0df491bc3" + get ${ZP} + + echo "-- Expected OK: 204 No Content (hash still right)" + put ${ZP} "enother version" "If-Match" "552e21cd4cd9918678e3c1a0df491bc3" + get ${ZP} + + echo "-- Expected OK: 204 No Content (any hash)" + put ${ZP} "enother version2" "If-Match" "*" + get ${ZP} + + echo "-- Expected Error: 412 Precondition Failed (wrong hash)" + put ${ZP} "enother version3" "If-Match" "552e21cd4cd9918678e3c1a0df491bc3" + + delete ${ZP} + + echo "-- Expected Error: 412 Precondition Failed (any hash not found)" + put ${ZP} "enother version2" "If-Match" "*" + +echo "================> UPSERT (Expected OK)" + put ${ZP} "my value" + get ${ZP} + put ${ZP} "my new value" + get ${ZP} + +exit diff --git a/scripts/!ws.sh b/scripts/!ws.sh new file mode 100755 index 00000000000..2d111e134c6 --- /dev/null +++ b/scripts/!ws.sh @@ -0,0 +1,58 @@ +#!/bin/bash + +clear +#source ./pulse_lib.sh + +websocat ws://127.0.0.1:8095/ws/testworkspace + +exit + + +let ws = new WebSocket("ws://localhost:8095/ws/testworkspace"); +ws.onmessage = e => console.log("Message from server:", e.data); +ws.onopen = () => ws.send("Hello from browser!"); + + + + + + + + + + + + + + + + + + + +TOKEN=$(./token.sh claims.json) +ZP="00000000-0000-0000-0000-000000000001/TESTS" +# /AnyKey" + +# put ${ZP} "one text" + +# put "00000000-0000-0000-0000-000000000001/TESTS" "text 1" "If-None-Match: *" "Blooooooooo: blya" + +#exit + +#put "00000000-0000-0000-0000-000000000001/TESTS" "Value_1" "HULY-TTL: 3" +#echo "sleep 1 sec" +#sleep 1 +#get "00000000-0000-0000-0000-000000000001/TESTS" +#echo "sleep 3 sec" +#sleep 2 +#get "00000000-0000-0000-0000-000000000001/TESTS" + +put "00000000-0000-0000-0000-000000000001/TESTS1" "Value_1" "HULY-TTL: 3" +put "00000000-0000-0000-0000-000000000001/TESTS2" "Value_1" "HULY-TTL: 3" +put "00000000-0000-0000-0000-000000000001/HREST2" "Value_1" "HULY-TTL: 3" +get "00000000-0000-0000-0000-000000000001?prefix=TES" +sleep 1 +get "00000000-0000-0000-0000-000000000001?prefix=" + +exit diff --git a/scripts/TEST_HTTP_API.sh b/scripts/TEST_HTTP_API.sh new file mode 100755 index 00000000000..4e5614c0655 --- /dev/null +++ b/scripts/TEST_HTTP_API.sh @@ -0,0 +1,31 @@ +#!/bin/bash + +clear +source ./pulse_lib.sh + +TOKEN=$(./token.sh claims.json) +ZP="00000000-0000-0000-0000-000000000001/TESTS" + +put "00000000-0000-0000-0000-000000000001/TESTS" "Value_1" "HULY-TTL: 3" +echo "sleep 1 sec" +sleep 1 +get "00000000-0000-0000-0000-000000000001/TESTS" +echo "sleep 3 sec" +sleep 2 +get "00000000-0000-0000-0000-000000000001/TESTS" + +echo "--------- delete ----------" +put "00000000-0000-0000-0000-000000000001/TESTS" "Value_2" "HULY-TTL: 3" +get "00000000-0000-0000-0000-000000000001/TESTS" +delete "00000000-0000-0000-0000-000000000001/TESTS" +get "00000000-0000-0000-0000-000000000001/TESTS" + +echo "--------- prefix ----------" +put "00000000-0000-0000-0000-000000000001/TESTS1" "Value_1" "HULY-TTL: 3" +put "00000000-0000-0000-0000-000000000001/TESTS2" "Value_1" "HULY-TTL: 3" +put "00000000-0000-0000-0000-000000000001/HREST2" "Value_1" "HULY-TTL: 3" +get "00000000-0000-0000-0000-000000000001?prefix=TES" +sleep 1 +get "00000000-0000-0000-0000-000000000001?prefix=" + +exit diff --git a/scripts/TEST_WS_API.sh b/scripts/TEST_WS_API.sh new file mode 100755 index 00000000000..2d111e134c6 --- /dev/null +++ b/scripts/TEST_WS_API.sh @@ -0,0 +1,58 @@ +#!/bin/bash + +clear +#source ./pulse_lib.sh + +websocat ws://127.0.0.1:8095/ws/testworkspace + +exit + + +let ws = new WebSocket("ws://localhost:8095/ws/testworkspace"); +ws.onmessage = e => console.log("Message from server:", e.data); +ws.onopen = () => ws.send("Hello from browser!"); + + + + + + + + + + + + + + + + + + + +TOKEN=$(./token.sh claims.json) +ZP="00000000-0000-0000-0000-000000000001/TESTS" +# /AnyKey" + +# put ${ZP} "one text" + +# put "00000000-0000-0000-0000-000000000001/TESTS" "text 1" "If-None-Match: *" "Blooooooooo: blya" + +#exit + +#put "00000000-0000-0000-0000-000000000001/TESTS" "Value_1" "HULY-TTL: 3" +#echo "sleep 1 sec" +#sleep 1 +#get "00000000-0000-0000-0000-000000000001/TESTS" +#echo "sleep 3 sec" +#sleep 2 +#get "00000000-0000-0000-0000-000000000001/TESTS" + +put "00000000-0000-0000-0000-000000000001/TESTS1" "Value_1" "HULY-TTL: 3" +put "00000000-0000-0000-0000-000000000001/TESTS2" "Value_1" "HULY-TTL: 3" +put "00000000-0000-0000-0000-000000000001/HREST2" "Value_1" "HULY-TTL: 3" +get "00000000-0000-0000-0000-000000000001?prefix=TES" +sleep 1 +get "00000000-0000-0000-0000-000000000001?prefix=" + +exit diff --git a/scripts/claims.json b/scripts/claims.json new file mode 100644 index 00000000000..af4a64c94bb --- /dev/null +++ b/scripts/claims.json @@ -0,0 +1,7 @@ +{ + "extra": { + "service": "account" + }, + "account": "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee", + "workspace": "00000000-0000-0000-0000-000000000001" +} diff --git a/scripts/claims_system.json b/scripts/claims_system.json new file mode 100644 index 00000000000..a6f400009ec --- /dev/null +++ b/scripts/claims_system.json @@ -0,0 +1,7 @@ +{ + "extra": { + "service": "account" + }, + "account": "1749089e-22e6-48de-af4e-165e18fbd2f9", + "workspace": "00000000-0000-0000-0000-000000000001" +} diff --git a/scripts/claims_wrong_ws.json b/scripts/claims_wrong_ws.json new file mode 100644 index 00000000000..8bd456b0867 --- /dev/null +++ b/scripts/claims_wrong_ws.json @@ -0,0 +1,7 @@ +{ + "extra": { + "service": "account" + }, + "account": "lleo", + "workspace": "00000000-0000-0000-0000-000000000002" +} diff --git a/scripts/pulse_lib.sh b/scripts/pulse_lib.sh new file mode 100755 index 00000000000..c79edec450b --- /dev/null +++ b/scripts/pulse_lib.sh @@ -0,0 +1,73 @@ +#!/bin/bash + +clear + +URL="http://localhost:8095/api" + +R='\033[0;31m' # Color red +G='\033[0;32m' # Color green +W='\033[0;33m' # Color ? +S='\033[0;34m' # Color Blue +F='\033[0;35m' # Color Fiolet +L='\033[0;36m' # Color LightBlue +N='\033[0m' # No Color +GRAY='\033[90m' # bright black + +api() { + local tmpfile + tmpfile=$1 + local status + status=$(head -n 1 "$tmpfile") + local status_code + status_code=$(echo "$status" | awk '{print $2}') + local etag + etag=$(grep -i "^ETag:" "${tmpfile}") + local body + body=$(awk 'found { print; next } NF == 0 { found = 1 }' "$tmpfile") + case "$status_code" in + 2*) echo -en "${G}${status}${N}" ;; + 3*) echo -en "${F}${status}${N}" ;; + 4*) echo -en "${R}${status}${N}" ;; + 5*) echo -en "${R}${status}${N}" ;; + *) echo -en "${GRAY}${status}${N}" ;; + esac + if [ -n "$etag" ]; then echo -n -e " ${F}${etag}${N}" ; fi + if [ -n "$body" ]; then echo -e "\n ${GRAY}[${body}]${N}" ; else echo -e " ${L}(no body)${N}" ; fi + rm -f "$tmpfile" +} + +get() { + echo -n -e "📥 ${L}GET ${W}$1${N} > " + local tmpfile + tmpfile=$(mktemp) + curl -i -s -X GET "$URL/$1" -H "Authorization: Bearer ${TOKEN}" | tr -d '\r' > "$tmpfile" + api ${tmpfile} +} + +put() { # If-None-Match If-Match + local match + local match_prn +# if [ -n "$3" ]; then match=(-H "$3: $4") ; else match=() ; fi +# if [ -n "$3" ]; then match_prn=" ${F}$3:$4${N}" ; else match_prn="" ; fi +# echo -n -e "📥 ${L}PUT ${W}$1${N}${match_prn} > " + + if [ -n "$3" ]; then match1=(-H "$3") ; else match1=() ; fi + if [ -n "$3" ]; then match1_prn=" ${F}$3${N}" ; else match1_prn="" ; fi + if [ -n "$4" ]; then match2=(-H "$4") ; else match2=() ; fi + if [ -n "$4" ]; then match2_prn=" ${F}$4${N}" ; else match2_prn="" ; fi + echo -n -e "📥 ${L}PUT ${W}$1${N}${match1_prn}${match2_prn} > " + + local tmpfile + tmpfile=$(mktemp) +# curl -v -i -s -X PUT "$URL/$1" -H "Authorization: Bearer ${TOKEN}" "${match1[@]}" "${match2[@]}" -H "Content-Type: application/json" -d "$2" | tr -d '\r' > "$tmpfile" + curl -i -s -X PUT "$URL/$1" -H "Authorization: Bearer ${TOKEN}" "${match1[@]}" "${match2[@]}" -H "Content-Type: application/json" -d "$2" | tr -d '\r' > "$tmpfile" + api ${tmpfile} +} + +delete() { + echo -n -e "📥 ${L}DELETE ${W}$1${N} > " + local tmpfile + tmpfile=$(mktemp) + curl -i -s -X DELETE "$URL/$1" -H "Authorization: Bearer ${TOKEN}" | tr -d '\r' > "$tmpfile" + api ${tmpfile} +} diff --git a/scripts/test_pulse.sh b/scripts/test_pulse.sh new file mode 100755 index 00000000000..808173ffe9d --- /dev/null +++ b/scripts/test_pulse.sh @@ -0,0 +1,63 @@ +#!/bin/bash + +clear +source ./pulse_lib.sh + +TOKEN=$(./token.sh claims.json) +ZP="00000000-0000-0000-0000-000000000001/TESTS/AnyKey" + +echo "================> LIST" + put "00000000-0000-0000-0000-000000000001/Huome2/MyKey1" "value1" + put "00000000-0000-0000-0000-000000000001/Huome2/MyKey2" "value2" + get "00000000-0000-0000-0000-000000000001/Huome2" + delete "00000000-0000-0000-0000-000000000001/Huome2/MyKey1" + delete "00000000-0000-0000-0000-000000000001/Huome2/MyKey2" + +echo "================> WRONG UUID" + get "WrongUUID/TESTS/AnyKey" + +echo "================> INSERT If-None-Match" + + echo "-- Expected Error: 400 Bad Request (If-None-Match may be only *)" + put ${ZP} "enother text" "If-None-Match" "552e21cd4cd9918678e3c1a0df491bc3" + + delete ${ZP} + + echo "-- Expected OK: 201 Created (key was not exist)" + put ${ZP} "enother text" "If-None-Match" "*" + + put ${ZP} "some text" + echo "-- Expected Error: 412 Precondition Failed (key was exist)" + put ${ZP} "enother text" "If-None-Match" "*" + +echo "================> UPDATE PUT If-Match" + + get ${ZP} + + echo "-- Expected OK: 204 No Content (right hash)" + put ${ZP} "some text" "If-Match" "552e21cd4cd9918678e3c1a0df491bc3" + get ${ZP} + + echo "-- Expected OK: 204 No Content (hash still right)" + put ${ZP} "enother version" "If-Match" "552e21cd4cd9918678e3c1a0df491bc3" + get ${ZP} + + echo "-- Expected OK: 204 No Content (any hash)" + put ${ZP} "enother version2" "If-Match" "*" + get ${ZP} + + echo "-- Expected Error: 412 Precondition Failed (wrong hash)" + put ${ZP} "enother version3" "If-Match" "552e21cd4cd9918678e3c1a0df491bc3" + + delete ${ZP} + + echo "-- Expected Error: 412 Precondition Failed (any hash not found)" + put ${ZP} "enother version2" "If-Match" "*" + +echo "================> UPSERT (Expected OK)" + put ${ZP} "my value" + get ${ZP} + put ${ZP} "my new value" + get ${ZP} + +exit diff --git a/scripts/test_pulse_system.sh b/scripts/test_pulse_system.sh new file mode 100755 index 00000000000..be2c78fa0dc --- /dev/null +++ b/scripts/test_pulse_system.sh @@ -0,0 +1,35 @@ +#!/bin/bash + +clear +source ./pulse_lib.sh + +TOKEN_OK=$(./token.sh claims.json) +TOKEN_SYSTEM=$(./token.sh claims_system.json) +TOKEN_WRONG=$(./token.sh claims_wrong_ws.json) +ZP="00000000-0000-0000-0000-000000000001/TESTS/JWT_tests" + +echo "================> SYSTEM change - OK" + TOKEN=${TOKEN_SYSTEM} + # delete ${ZP} + put ${ZP} "system value" + +echo "================> USER read/change - OK" + TOKEN=${TOKEN_OK} + get ${ZP} + put ${ZP} "user value" + +echo "================> WRONG USER read/change - ERROR" + TOKEN=${TOKEN_WRONG} + get ${ZP} + put ${ZP} "wrong user value" + +echo "================> SYSTEM read/change - OK" + TOKEN=${TOKEN_SYSTEM} + get ${ZP} + put ${ZP} "system value 2" + +echo "================> USER read - OK" + TOKEN=${TOKEN_OK} + get ${ZP} + +exit diff --git a/scripts/token.sh b/scripts/token.sh new file mode 100755 index 00000000000..78ae2ef971d --- /dev/null +++ b/scripts/token.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +CONFIG_PATH="../src/config/default.toml" +SECRET=$(grep '^token_secret' "$CONFIG_PATH" | sed -E 's/.*=\s*"(.*)"/\1/') # " + +if [ -z "$SECRET" ]; then + echo "❌No token_secret in $CONFIG_PATH" + exit 1 +fi + +claims=$1 # "claims.json" + +#TOKEN=$(echo -n "${SECRET}" | jwt -alg HS256 -key - -sign claims.json) +TOKEN=$(echo -n "${SECRET}" | jwt -alg HS256 -key - -sign ${claims}) + +echo "$TOKEN" diff --git a/src/config.rs b/src/config.rs new file mode 100644 index 00000000000..4bd18c305e7 --- /dev/null +++ b/src/config.rs @@ -0,0 +1,77 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +use serde::Deserialize; +use serde_with::formats::CommaSeparator; +use serde_with::{StringWithSeparator, serde_as}; + +use url::Url; + +use std::{path::Path, sync::LazyLock}; + +use config::FileFormat; + +use uuid::Uuid; + +#[derive(Deserialize, Debug, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum RedisMode { + Sentinel, + Direct, +} + +#[serde_as] +#[derive(Deserialize, Debug)] +pub struct Config { + pub bind_port: u16, + pub bind_host: String, + pub payload_size_limit: size::Size, + + pub token_secret: String, + + #[serde_as(as = "StringWithSeparator::")] + pub redis_urls: Vec, + pub redis_password: String, + pub redis_mode: RedisMode, + pub redis_service: String, + + pub max_ttl: usize, +} + +pub static CONFIG: LazyLock = LazyLock::new(|| { + const DEFAULTS: &str = std::include_str!("config/default.toml"); + + let mut builder = + config::Config::builder().add_source(config::File::from_str(DEFAULTS, FileFormat::Toml)); + + let path = Path::new("etc/config.toml"); + + if path.exists() { + builder = builder.add_source(config::File::with_name(path.as_os_str().to_str().unwrap())); + } + + let settings = builder + .add_source(config::Environment::with_prefix("HULY")) + .build() + .and_then(|c| c.try_deserialize::()); + + match settings { + Ok(settings) => settings, + Err(error) => { + eprintln!("configuration error: {}", error); + std::process::exit(1); + } + } +}); diff --git a/src/config/default.toml b/src/config/default.toml new file mode 100644 index 00000000000..00eb9c805b8 --- /dev/null +++ b/src/config/default.toml @@ -0,0 +1,12 @@ +bind_port = 8095 +bind_host = "0.0.0.0" +payload_size_limit = "2mb" + +token_secret = "secret" + +redis_urls = "redis://huly.local:6379" +redis_password = "" +redis_mode = "direct" +redis_service = "mymaster" + +max_ttl = 3600 diff --git a/src/handlers_http.rs b/src/handlers_http.rs new file mode 100644 index 00000000000..eddaac3e20e --- /dev/null +++ b/src/handlers_http.rs @@ -0,0 +1,241 @@ +use redis::aio::MultiplexedConnection; +use std::sync::Arc; +use std::collections::HashMap; +use tokio::sync::Mutex; +use anyhow::anyhow; +use std::time::{SystemTime, UNIX_EPOCH}; +use tracing::{error, trace}; +use uuid::Uuid; +use crate::ws_owner; + +// type BucketPath = web::Path<(String)>; +type ObjectPath = web::Path<(String, String)>; + +use crate::redis::{ + Ttl, SaveMode, + RedisArray, + redis_save, + redis_read, + redis_delete, + redis_list, +}; + +use actix_web::{ + Error, HttpMessage, HttpRequest, HttpResponse, error, + web::{self, Data, Json, Query}, +}; + + +/// list +// #[derive(Deserialize)] +// pub struct ListInfo { prefix: Option } + +pub async fn list( + req: HttpRequest, + path: web::Path, + query: web::Query>, + redis: web::Data>>, +) -> Result { + + ws_owner::workspace_owner(&req)?; // Check workspace + + let workspace = path.into_inner(); + let prefix = query.get("prefix").map(|s| s.as_str()); + + trace!(workspace, prefix, "list request"); + + async move || -> anyhow::Result { + + let mut conn = redis.lock().await; + + let entries = redis_list(&mut *conn, &workspace, prefix).await?; + + Ok(HttpResponse::Ok().json(entries)) + + }() + .await + .map_err(|err| { + tracing::error!(error = %err, "Internal error in GET handler"); + actix_web::error::ErrorInternalServerError("internal error") + }) + +} +/* + path: BucketPath, + query: Query, + redis: web::Data>>, +) -> Result, actix_web::error::Error> { + + ws_owner::workspace_owner(&req)?; // Check workspace + + let (workspace) = path.into_inner(); + trace!(workspace, prefix = ?query.prefix, "list request"); + + // ... + + async move || -> anyhow::Result> { + let connection = pool.get().await?; + + let response = if let Some(prefix) = &query.prefix { + let pattern = format!("{}%", prefix); + let statement = r#" + select key from kvs where workspace=$1 and namespace=$2 and key like $3 + "#; + + connection + .query(statement, &[&wsuuid, &nsstr, &pattern]) + .await? + } else { + let statement = r#" + select key from kvs where workspace=$1 and namespace=$2 + "#; + + connection.query(statement, &[&wsuuid, &nsstr]).await? + }; + + let count = response.len(); + + let keys = response.into_iter().map(|row| row.get(0)).collect(); + + Ok(Json(ListResponse { + keys, + count, + namespace: nsstr.to_owned(), + workspace: wsstr.to_owned(), + })) + }() + .await + .map_err(|error| { + error!(op = "list", workspace, namespace, ?error, "internal error"); + error::ErrorInternalServerError("") + }) +} +*/ + + +/// get / (test) + +pub async fn get( + req: HttpRequest, + path: ObjectPath, + redis: web::Data>>, +) -> Result { + + ws_owner::workspace_owner(&req)?; // Check workspace + + let (workspace, key) = path.into_inner(); + // println!("\nworkspace = {}", workspace); + // println!("key = {}\n", key); + + trace!(workspace, key, "get request"); + + async move || -> anyhow::Result { + + let mut conn = redis.lock().await; + + Ok( + redis_read(&mut *conn, &workspace, &key).await? + .map(|entry| HttpResponse::Ok().json(entry)) + .unwrap_or_else(|| HttpResponse::NotFound().body("empty")) + ) + + }() + .await + .map_err(|err| { + tracing::error!(error = %err, "Internal error in GET handler"); + actix_web::error::ErrorInternalServerError("internal error") + }) +} + + +/// put + +pub async fn put( + req: HttpRequest, + path: ObjectPath, + body: web::Bytes, + redis: web::Data>>, +) -> Result { + + ws_owner::workspace_owner(&req)?; // Check workspace + + let (workspace, key) = path.into_inner(); + + trace!(workspace, key, "put request"); + + async move || -> anyhow::Result { + + let mut conn = redis.lock().await; + + // TTL logic + let mut ttl = None; + if let Some(x) = req.headers().get("HULY-TTL") { + let s = x.to_str().map_err(|_| anyhow!("Invalid HULY-TTL header"))?; + let secs = s.parse::().map_err(|_| anyhow!("Invalid TTL value in HULY-TTL header"))?; + ttl = Some(Ttl::Sec(secs)); + } else if let Some(x) = req.headers().get("HULY-EXPIRE-AT") { + let s = x.to_str().map_err(|_| anyhow!("Invalid HULY-EXPIRE-AT header"))?; + let ts = s.parse::().map_err(|_| anyhow!("Invalid EXPIRE-AT value in HULY-EXPIRE-AT header"))?; + ttl = Some(Ttl::At(ts)); + } + + // MODE logic + let mut mode = Some(SaveMode::Upsert); + if let Some(h) = req.headers().get("If-Match") { // `If-Match: *` - update only if the key exists + let s = h.to_str().map_err(|_| anyhow!("Invalid If-Match header"))?; + if s == "*" { mode = Some(SaveMode::Update); } else { + // TODO: `If-Match: ` — update only if current value's MD5 matches + return Err(anyhow!("TODO: Only '*' suported now")); + } + } else if let Some(h) = req.headers().get("If-None-Match") { // `If-None-Match: *` — insert only if the key does not exist + let s = h.to_str().map_err(|_| anyhow!("Invalid If-None-Match header"))?; + if s == "*" { mode = Some(SaveMode::Insert); } else { return Err(anyhow!("If-None-Match must be '*'")); } + } + + redis_save(&mut *conn, &workspace, &key, &body[..], ttl, mode).await?; + return Ok(HttpResponse::Ok().body("DONE")); + + }() + .await + .map_err(|err| { + tracing::error!(error = %err, "Internal error in GET handler"); + actix_web::error::ErrorInternalServerError("internal error") + }) +} + + + +// delete + +pub async fn delete( + req: HttpRequest, + path: ObjectPath, + redis: web::Data>>, +) -> Result { + + ws_owner::workspace_owner(&req)?; // Check workspace + + let (workspace, key) = path.into_inner(); + trace!(workspace, key, "delete request"); + +// let wsuuid = Uuid::parse_str(workspace.as_str()) +// .map_err(|e| error::ErrorBadRequest(format!("Invalid UUID in workspace: {}", e)))?; + + async move || -> anyhow::Result { + let mut conn = redis.lock().await; + + let deleted = redis_delete(&mut *conn, &workspace, &key).await?; + + let response = match deleted { + true => HttpResponse::NoContent().finish(), + false => HttpResponse::NotFound().body("not found"), + }; + + Ok(response) + }() + .await + .map_err(|err| { + tracing::error!(error = %err, "Internal error in DELETE handler"); + actix_web::error::ErrorInternalServerError("internal error") + }) +} diff --git a/src/handlers_ws.rs b/src/handlers_ws.rs new file mode 100644 index 00000000000..216fcf686a6 --- /dev/null +++ b/src/handlers_ws.rs @@ -0,0 +1,119 @@ +use actix::{Actor, StreamHandler, AsyncContext, ActorContext}; +use actix_web::{web, HttpRequest, HttpResponse, Error}; +use actix_web_actors::ws; +use serde::Deserialize; +use serde_json::Result as JsonResult; +use std::collections::HashSet; + +/// WsCommand - commands enum (put, delete, sub, unsub) +#[derive(Deserialize, Debug)] +#[serde(rename_all = "lowercase", tag = "type")] +pub enum WsCommand { + Put { + key: String, + data: String, + #[serde(default)] + correlation: Option, + #[serde(rename = "expiresAt")] + expires_at: Option, + }, + Delete { + key: String, + #[serde(default)] + correlation: Option, + }, + Sub { + key: String, + }, + Unsub { + key: String, + }, +} + +/// Session condition +#[allow(dead_code)] +pub struct WsSession { + pub workspace: String, + pub subscriptions: HashSet, // новые поля +} + +/// Actor External trait: must be in separate impl block +impl Actor for WsSession { + type Context = ws::WebsocketContext; + + fn started(&mut self, ctx: &mut Self::Context) { + println!("WebSocket connected to workspace [{}]", self.workspace); + ctx.text(format!("Connected to workspace: {}", self.workspace)); + } +} + +/// StreamHandler External trait: must be in separate impl block +impl StreamHandler> for WsSession { + fn handle(&mut self, msg: Result, ctx: &mut Self::Context) { + match msg { + Ok(ws::Message::Text(text)) => { + println!("Message from [{}]: {}", self.workspace, text); + match serde_json::from_str::(&text) { + Ok(cmd) => self.handle_command(cmd, ctx), + Err(err) => ctx.text(format!("Invalid JSON: {}", err)), + } + } + Ok(ws::Message::Ping(msg)) => ctx.pong(&msg), + Ok(ws::Message::Close(reason)) => { + println!("Closing WS for workspace [{}]: {:?}", self.workspace, reason); + ctx.close(reason); + ctx.stop(); + } + _ => (), + } + } +} + +/// All logic in one impl +impl WsSession { + + /// When valid JSON recieved for WsSession + fn handle_command(&mut self, cmd: WsCommand, ctx: &mut ws::WebsocketContext) { + match cmd { + WsCommand::Put { key, data, expires_at, correlation } => { + println!("PUT {} = {} (expires_at: {:?})", key, data, expires_at); + ctx.text(format!("OK PUT {}{}", key, Self::correlation_suffix(&correlation))); + // Здесь — сохранить в Redis + } + WsCommand::Delete { key, correlation } => { + println!("DELETE {}", key); + ctx.text(format!("OK DELETE {}{}", key, Self::correlation_suffix(&correlation))); + // Здесь — удалить из Redis + } + WsCommand::Sub { key } => { + println!("SUB {}", key); + ctx.text(format!("OK SUB {}", key)); + // Здесь — подписка (в будущем pub/sub) + } + WsCommand::Unsub { key } => { + println!("UNSUB {}", key); + ctx.text(format!("OK UNSUB {}", key)); + // Здесь — отписка + } + } + } + + fn correlation_suffix(corr: &Option) -> String { + match corr { + Some(c) => format!(" [correlation: {}]", c), + None => "".to_string(), + } + // + // corr.as_ref() + // .map(|c| format!(" [correlation: {}]", c)) + // .unwrap_or_default() + // + } + +} + +pub async fn handler(req: HttpRequest, stream: web::Payload, path: web::Path) -> Result { + let workspace = path.into_inner(); + let session = WsSession { workspace, subscriptions: HashSet::new() }; + ws::start(session, &req, stream) +} diff --git a/src/main.rs b/src/main.rs new file mode 100644 index 00000000000..a30f297e9f0 --- /dev/null +++ b/src/main.rs @@ -0,0 +1,124 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +#![allow(unused_imports)] + +use std::pin::Pin; + +use actix_cors::Cors; + +use actix_web::{ + App, Error, HttpMessage, HttpRequest, HttpResponse, HttpServer, + body::MessageBody, + dev::{ServiceRequest, ServiceResponse}, + middleware::{self, Next}, + web::{self, Data, PayloadConfig}, +}; + +use actix_web_actors::ws; + +use tracing::info; + +mod config; +mod handlers_http; +mod handlers_ws; + +use crate::handlers_ws::{WsSession, handler}; + +mod redis; +use crate::redis::redis_connect; + +mod ws_owner; + +use config::CONFIG; + +use hulyrs::services::jwt::actix::ServiceRequestExt; +use secrecy::SecretString; + +// pub type Pool = bb8::Pool>; + +fn initialize_tracing(level: tracing::Level) { + use tracing_subscriber::{filter::targets::Targets, prelude::*}; + + let filter = Targets::default() + .with_target(env!("CARGO_BIN_NAME"), level) + .with_target("actix", level); + let format = tracing_subscriber::fmt::layer().compact(); + + tracing_subscriber::registry() + .with(filter) + .with(format) + .init(); +} + +// #[allow(dead_code)] +async fn interceptor( + request: ServiceRequest, + next: Next, +) -> Result, Error> { + let secret = SecretString::new(CONFIG.token_secret.clone().into_boxed_str()); + + let claims = request.extract_claims(&secret)?; + + request.extensions_mut().insert(claims.to_owned()); + + next.call(request).await +} + + +#[tokio::main] +async fn main() -> anyhow::Result<()> { + initialize_tracing(tracing::Level::DEBUG); + + tracing::info!("{}/{}", env!("CARGO_BIN_NAME"), env!("CARGO_PKG_VERSION")); + + let redis = redis_connect().await?; + let redis = std::sync::Arc::new(tokio::sync::Mutex::new(redis)); + let redis_data = web::Data::new(redis.clone()); + + let socket = std::net::SocketAddr::new(CONFIG.bind_host.as_str().parse()?, CONFIG.bind_port); + let payload_config = PayloadConfig::new(CONFIG.payload_size_limit.bytes() as usize); + + let server = HttpServer::new(move || { + let cors = Cors::default() + .allow_any_origin() + .allow_any_method() + .allow_any_header() + .supports_credentials() + .max_age(3600); + + App::new() + .app_data(payload_config.clone()) + .app_data(redis_data.clone()) + .wrap(middleware::Logger::default()) + .wrap(cors) + .service( + web::scope("/api") + .wrap(middleware::from_fn(interceptor)) + .route("/{workspace}", web::get().to(handlers_http::list)) + .route("/{workspace}/{key:.*}",web::get().to(handlers_http::get)) + .route("/{workspace}/{key:.*}",web::put().to(handlers_http::put)) + .route("/{workspace}/{key:.*}",web::delete().to(handlers_http::delete)) + ) + .route("/status", web::get().to(async || "ok")) + .route("/ws/{workspace}", web::get().to(handlers_ws::handler)) // WebSocket + }) + .bind(socket)? + .run(); + + server.await?; + + Ok(()) +} diff --git a/src/redis.rs b/src/redis.rs new file mode 100644 index 00000000000..c17f152a919 --- /dev/null +++ b/src/redis.rs @@ -0,0 +1,250 @@ +use crate::config::{CONFIG, RedisMode}; + +use std::time::{SystemTime, UNIX_EPOCH}; + +pub enum Ttl { + Sec(usize), // EX + At(u64), // EXAT (timestamp in seconds) +} + +pub enum SaveMode { + Upsert, // default: set or overwrite + Insert, // only if not exists (NX) + Update, // only if exists (XX) +} + +use redis::{ + AsyncCommands, RedisResult, + ToRedisArgs, + Client, ConnectionInfo, ProtocolVersion, RedisConnectionInfo, aio::MultiplexedConnection }; +use url::Url; + +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Serialize)] +pub struct RedisArray { + pub workspace: String, + pub key: String, + pub data: String, + pub expires_at: u64, // sec to expire TTL +} + +fn error(msg: &'static str) -> RedisResult { + Err(redis::RedisError::from((redis::ErrorKind::ExtensionError, msg))) +} + +/// redis_list(&connection,workspace,prefix) + +pub async fn redis_list( + conn: &mut MultiplexedConnection, + workspace: &str, + prefix: Option<&str>, +) -> redis::RedisResult> { + let mut cursor = 0; + let mut results = Vec::new(); + let pattern = prefix.map(|p| format!("{}*", p)); + + loop { + let mut cmd = redis::cmd("HSCAN"); + cmd.arg(workspace).arg(cursor); + if let Some(ref p) = pattern { + cmd.arg("MATCH").arg(p); + } + // cmd.arg("COUNT").arg(100); + + let (next_cursor, items): (u64, Vec<(String, String)>) = cmd.query_async(conn).await?; + + for (key, value) in items { + // TTL + let ttl_vec: Vec = redis::cmd("HTTL") + .arg(workspace) + .arg("FIELDS") + .arg(1) + .arg(&key) + .query_async(conn) + .await?; + + let ttl = ttl_vec.get(0).copied().unwrap_or(-3); + + if ttl >= 0 { + results.push(RedisArray { + workspace: workspace.to_string(), + key, + data: value, + expires_at: ttl as u64, + }); + } + } + + if next_cursor == 0 { break; } + cursor = next_cursor; + } + + Ok(results) +} + + + +/// redis_read(&connection,workspace,key) + +#[allow(dead_code)] +pub async fn redis_read( + conn: &mut MultiplexedConnection, + workspace: &str, + key: &str, +) -> redis::RedisResult> { + + let data: Option = redis::cmd("HGET").arg(workspace).arg(key).query_async(conn).await?; + let Some(data) = data else { return Ok(None); }; + + // let ttl: i64 = redis::cmd("HTTL").arg(workspace).arg("FIELDS").arg(1).arg(key).query_async(conn).await?; + let ttl_vec: Vec = redis::cmd("HTTL").arg(workspace).arg("FIELDS").arg(1).arg(key).query_async(conn).await?; + let ttl = ttl_vec.get(0).copied().unwrap_or(-3); // -3 unknown error + + if ttl == -1 { return error("TTL not setL"); } + if ttl == -2 { return error("Key not found"); } + if ttl < 0 { return error("Unknown TTL error"); } + + Ok(Some(RedisArray { + workspace: workspace.to_string(), + key: key.to_string(), + data, + expires_at: ttl as u64, + })) +} + +/// TTL sec +/// redis_save(&mut conn, "workspace", "key", "val", Some(Ttl::Sec(300)), Some(SaveMode::Insert)).await?; +/// +/// TTL at +/// let at_unixtime: u64 = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs() + 600; +/// redis_save(&mut conn, "workspace", "key", "val", Some(Ttl::At(at_unixtime)), Some(SaveMode::Update)).await?; +/// +/// w/o TTL (CONFIG.max_ttl) +/// redis_save(&mut conn, "workspace", "key", "val", None, None).await?; + +#[allow(dead_code)] +pub async fn redis_save( + conn: &mut MultiplexedConnection, + workspace: &str, + key: &str, + value: T, + ttl: Option, + mode: Option, +) -> RedisResult<()> { + + // TTL logic + let sec = match ttl { + Some(Ttl::Sec(secs)) => secs, + Some(Ttl::At(timestamp)) => { + let now = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs(); + if timestamp <= now { + return error("TTL timestamp exceeds MAX_TTL limit"); + } + (timestamp - now) as usize + } + None => CONFIG.max_ttl, + }; + if sec == 0 { return error("TTL must be > 0"); } + if sec > CONFIG.max_ttl { return error("TTL exceeds MAX_TTL"); } + + let mut cmd = redis::cmd("HSET"); + cmd.arg(workspace).arg(key).arg(value); + + // Mode variants + match mode.unwrap_or(SaveMode::Upsert) { + SaveMode::Upsert => {} // none + SaveMode::Insert => { cmd.arg("NX"); } + SaveMode::Update => { cmd.arg("XX"); } + } + + // 1) HSET execute + if cmd.query_async::>(&mut *conn).await?.is_none() { + return error("SET failed: NX/XX condition not met"); + } + + // 2) HEXPIRE execute + let res: Vec = redis::cmd("HEXPIRE").arg(workspace).arg(sec).arg("FIELDS").arg(1).arg(key).query_async(&mut *conn).await?; + if res.get(0).copied().unwrap_or(0) == 0 { + return error("HEXPIRE failed: field not found or TTL not set"); + } + + Ok(()) +} + + +/// redis_delete(&connection,workspace,key) +#[allow(dead_code)] +pub async fn redis_delete( + conn: &mut MultiplexedConnection, + workspace: &str, + key: &str, +) -> redis::RedisResult { + + let deleted: i32 = redis::cmd("HDEL") + .arg(workspace) + .arg(key) + .query_async(conn) + .await?; + + Ok(deleted > 0) +} + + +/// redis_connect() +pub async fn redis_connect() -> anyhow::Result { + let default_port = match CONFIG.redis_mode { + RedisMode::Sentinel => 6379, + RedisMode::Direct => 6380, + }; + + let urls = CONFIG + .redis_urls + .iter() + .map(|url| { + redis::ConnectionAddr::Tcp( + url.host().unwrap().to_string(), + url.port().unwrap_or(default_port), + ) + }) + .collect::>(); + + let conn = if CONFIG.redis_mode == RedisMode::Sentinel { + use redis::sentinel::{SentinelClientBuilder, SentinelServerType}; + + let mut sentinel = SentinelClientBuilder::new( + urls, + CONFIG.redis_service.to_owned(), + SentinelServerType::Master, + ) + .unwrap() + .set_client_to_redis_protocol(ProtocolVersion::RESP3) + .set_client_to_redis_db(0) + .set_client_to_redis_password(CONFIG.redis_password.clone()) + .set_client_to_sentinel_password(CONFIG.redis_password.clone()) + .build()?; + + sentinel.get_async_connection().await? + } else { + let single = urls + .first() + .ok_or_else(|| anyhow::anyhow!("No redis URL provided"))?; + + let redis_connection_info = RedisConnectionInfo { + db: 0, + username: None, + password: Some(CONFIG.redis_password.clone()), + protocol: ProtocolVersion::RESP3, + }; + + let connection_info = ConnectionInfo { + addr: single.clone(), + redis: redis_connection_info, + }; + + let client = Client::open(connection_info)?; + client.get_multiplexed_async_connection().await? + }; + + Ok(conn) +} diff --git a/src/redis.rs.ok b/src/redis.rs.ok new file mode 100644 index 00000000000..bd57538c0dd --- /dev/null +++ b/src/redis.rs.ok @@ -0,0 +1,231 @@ +// hget hset TODO +// статистику TODO + +// ------------------------------- + +use crate::config::{CONFIG, RedisMode}; + +use std::time::{SystemTime, UNIX_EPOCH}; + +pub enum Ttl { + Sec(usize), // EX + At(u64), // EXAT (timestamp in seconds) +} + +pub enum SaveMode { + Upsert, // default: set or overwrite + Insert, // only if not exists (NX) + Update, // only if exists (XX) +} + +use redis::{ + AsyncCommands, RedisResult, + ToRedisArgs, + Client, ConnectionInfo, ProtocolVersion, RedisConnectionInfo, aio::MultiplexedConnection }; +use url::Url; + +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Serialize)] +pub struct RedisArray { + pub workspace: String, + pub key: String, + pub data: String, + pub expires_at: Option, // секунды до истечения TTL +} + +fn error(msg: &'static str) -> redis::RedisResult<()> { + Err(redis::RedisError::from(( redis::ErrorKind::ExtensionError, msg ))) +} + +/// redis_read(&connection,key) + +#[allow(dead_code)] +pub async fn redis_read( + conn: &mut MultiplexedConnection, + workspace: &str, + key: &str, +) -> redis::RedisResult> { + + let data: Option = redis::cmd("HGET").arg(workspace).arg(key).query_async(conn).await?; + let Some(data) = data else { return Ok(None); }; + + // let ttl: i64 = redis::cmd("TTL").arg(redis_key).query_async(conn).await?; + let ttl: i64 = redis::cmd("TTL").arg(workspace).arg(key).query_async(conn).await?; + let expires_at = if ttl >= 0 { Some(ttl as u64) } else { None }; // -1 (нет TTL), -2 (нет ключа) + + Ok(Some(RedisArray { + workspace: workspace.to_string(), + key: key.to_string(), + data, + expires_at, + })) +} + + +/* +EX — срок жизни в секундах (e.g. EX 60 = 1 минута). +EXAT — дата истечения в секундах с эпохи Unix. +KEEPTTL — сохраняет текущий TTL ключа при перезаписи. + +Нет, несложно — Redis уже поддерживает это с помощью флагов NX и XX: + NX — записать только если ключ не существует + XX — перезаписать только если ключ уже существует + +Ты просто добавляешь .arg("NX") или .arg("XX") в команду SET. +Варианты: + SET key val EX 60 NX — с TTL, только если не существует + SET key val XX — только если уже существует, без TTL + SET key val — просто перезаписать, без TTL + +*/ + +/// TTL sec +/// redis_save(&mut conn, "key", "val", Some(Ttl::Sec(300)), Some(SaveMode::Insert)).await?; +/// +/// TTL at +/// let at_unixtime: u64 = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs() + 600; +/// redis_save(&mut conn, "key", "val", Some(Ttl::At(at_unixtime)), Some(SaveMode::Update)).await?; +/// +/// w/o TTL (CONFIG.max_ttl) +/// redis_save(&mut conn, "key", "val", None, None).await?; + +#[allow(dead_code)] +pub async fn redis_save( + conn: &mut MultiplexedConnection, + workspace: &str, + key: &str, + value: T, + ttl: Option, + mode: Option, +) -> RedisResult<()> { + + // TTL variants + match ttl { + Some(Ttl::Sec(secs)) => { + if secs == 0 { + return error("TTL must be > 0"); + } + if secs > CONFIG.max_ttl { + return error("TTL exceeds MAX_TTL"); + } + cmd.arg("EX").arg(secs); + } + Some(Ttl::At(timestamp)) => { + let now = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs(); + if timestamp <= now { + return error("TTL timestamp is in the past"); + } + if timestamp - now > CONFIG.max_ttl as u64 { + return error("TTL timestamp exceeds MAX_TTL limit"); + } + cmd.arg("EXAT").arg(timestamp); + } + None => { cmd.arg("EX").arg(CONFIG.max_ttl); } + } + + + let mut cmd = redis::cmd("HSET"); + cmd.arg(workspace).arg(key).arg(value).query_async(conn).await?; + +redis::cmd("HEXPIRE") + .arg(workspace) + .arg(ttl_seconds) + .arg("FIELDS") + .arg(1).arg(key) + .query_async(conn).await?; + + + + // Mode variants + match mode.unwrap_or(SaveMode::Upsert) { + SaveMode::Upsert => { } // nothing + SaveMode::Insert => { cmd.arg("NX"); } + SaveMode::Update => { cmd.arg("XX"); } + } + + let res: Option = cmd.query_async(&mut *conn).await?; + + if res.is_none() { // nil - if NX/XX error + return error("SET failed: NX/XX condition not met"); + } else { + Ok(()) + } +} + + +#[allow(dead_code)] +pub async fn redis_delete( + conn: &mut MultiplexedConnection, + workspace: &str, + key: &str, +) -> redis::RedisResult { + + let deleted: i32 = redis::cmd("HDEL") + .arg(workspace) + .arg(key) + .query_async(conn) + .await?; + + Ok(deleted > 0) +} + + + +/// redis_connect() +pub async fn redis_connect() -> anyhow::Result { + let default_port = match CONFIG.redis_mode { + RedisMode::Sentinel => 6379, + RedisMode::Direct => 6380, + }; + + let urls = CONFIG + .redis_urls + .iter() + .map(|url| { + redis::ConnectionAddr::Tcp( + url.host().unwrap().to_string(), + url.port().unwrap_or(default_port), + ) + }) + .collect::>(); + + let conn = if CONFIG.redis_mode == RedisMode::Sentinel { + use redis::sentinel::{SentinelClientBuilder, SentinelServerType}; + + let mut sentinel = SentinelClientBuilder::new( + urls, + CONFIG.redis_service.to_owned(), + SentinelServerType::Master, + ) + .unwrap() + .set_client_to_redis_protocol(ProtocolVersion::RESP3) + .set_client_to_redis_db(0) + .set_client_to_redis_password(CONFIG.redis_password.clone()) + .set_client_to_sentinel_password(CONFIG.redis_password.clone()) + .build()?; + + sentinel.get_async_connection().await? + } else { + let single = urls + .first() + .ok_or_else(|| anyhow::anyhow!("No redis URL provided"))?; + + let redis_connection_info = RedisConnectionInfo { + db: 0, + username: None, + password: Some(CONFIG.redis_password.clone()), + protocol: ProtocolVersion::RESP3, + }; + + let connection_info = ConnectionInfo { + addr: single.clone(), + redis: redis_connection_info, + }; + + let client = Client::open(connection_info)?; + client.get_multiplexed_async_connection().await? + }; + + Ok(conn) +} diff --git a/src/ws_owner.rs b/src/ws_owner.rs new file mode 100644 index 00000000000..941ded604c3 --- /dev/null +++ b/src/ws_owner.rs @@ -0,0 +1,39 @@ +use hulyrs::services::jwt::Claims; +use uuid::Uuid; +use actix_web::{ Error, HttpMessage, HttpRequest, error }; + + +/// Checking workspace in Authorization +pub fn workspace_owner(req: &HttpRequest) -> Result<(), Error> { + let extensions = req.extensions(); + + let claims = extensions + .get::() + .ok_or_else(|| error::ErrorUnauthorized("Missing auth claims"))?; + + // is_system - allowed to all + if claims.is_system() { + return Ok(()); + } + + // else - check workplace + let jwt_workspace = claims + .workspace + .as_ref() + .ok_or_else(|| error::ErrorForbidden("Missing workspace in token"))?; + + let path_ws = req + .match_info() + .get("workspace") + .ok_or_else(|| error::ErrorBadRequest("Missing workspace in URL path"))?; + + let path_ws_uuid = + Uuid::parse_str(path_ws).map_err(|_| error::ErrorBadRequest("Invalid workspace UUID"))?; + + if jwt_workspace != &path_ws_uuid { + return Err(error::ErrorForbidden("Workspace mismatch")); + } + + Ok(()) +} + From 4844f295c0863d433e37a89ae9c0e66934c64761 Mon Sep 17 00:00:00 2001 From: Leonid Kaganov Date: Fri, 8 Aug 2025 18:39:54 +0300 Subject: [PATCH 151/636] HTTP API: fix If-logic and ETag --- scripts/!test.sh | 102 ----------------- scripts/!ws.sh | 58 ---------- scripts/TEST.html | 125 +++++++++++++++++++++ scripts/TEST_HTTP_API.sh | 47 ++++++++ src/handlers_http.rs | 127 ++++++--------------- src/main.rs | 2 - src/redis.rs | 62 +++++++---- src/redis.rs.ok | 231 --------------------------------------- src/ws_owner.rs | 2 - 9 files changed, 246 insertions(+), 510 deletions(-) delete mode 100755 scripts/!test.sh delete mode 100755 scripts/!ws.sh create mode 100644 scripts/TEST.html delete mode 100644 src/redis.rs.ok diff --git a/scripts/!test.sh b/scripts/!test.sh deleted file mode 100755 index 6c779757a04..00000000000 --- a/scripts/!test.sh +++ /dev/null @@ -1,102 +0,0 @@ -#!/bin/bash - -clear -source ./pulse_lib.sh - -TOKEN=$(./token.sh claims.json) -ZP="00000000-0000-0000-0000-000000000001/TESTS" -# /AnyKey" - -# put ${ZP} "one text" - -# put "00000000-0000-0000-0000-000000000001/TESTS" "text 1" "If-None-Match: *" "Blooooooooo: blya" - -#exit - -#put "00000000-0000-0000-0000-000000000001/TESTS" "Value_1" "HULY-TTL: 3" -#echo "sleep 1 sec" -#sleep 1 -#get "00000000-0000-0000-0000-000000000001/TESTS" -#echo "sleep 3 sec" -#sleep 2 -#get "00000000-0000-0000-0000-000000000001/TESTS" - -put "00000000-0000-0000-0000-000000000001/TESTS1" "Value_1" "HULY-TTL: 3" -put "00000000-0000-0000-0000-000000000001/TESTS2" "Value_1" "HULY-TTL: 3" -put "00000000-0000-0000-0000-000000000001/HREST2" "Value_1" "HULY-TTL: 3" -get "00000000-0000-0000-0000-000000000001?prefix=TES" -sleep 1 -get "00000000-0000-0000-0000-000000000001?prefix=" - -exit - -echo "--------- delete ----------" -put "00000000-0000-0000-0000-000000000001/TESTS" "Value_2" "HULY-TTL: 3" -get "00000000-0000-0000-0000-000000000001/TESTS" -delete "00000000-0000-0000-0000-000000000001/TESTS" -get "00000000-0000-0000-0000-000000000001/TESTS" -#HULY-EXPIRE-AT: - - -# put "00000000-0000-0000-0000-000000000001/TESTS/some" "text 2" -# put "00000000-0000-0000-0000-000000000001/TESTS/some/gogo/" "text 3" - - -exit - - -echo "================> LIST" - put "00000000-0000-0000-0000-000000000001/Huome2/MyKey1" "value1" - put "00000000-0000-0000-0000-000000000001/Huome2/MyKey2" "value2" - get "00000000-0000-0000-0000-000000000001/Huome2" - delete "00000000-0000-0000-0000-000000000001/Huome2/MyKey1" - delete "00000000-0000-0000-0000-000000000001/Huome2/MyKey2" - -echo "================> WRONG UUID" - get "WrongUUID/TESTS/AnyKey" - -echo "================> INSERT If-None-Match" - - echo "-- Expected Error: 400 Bad Request (If-None-Match may be only *)" - put ${ZP} "enother text" "If-None-Match" "552e21cd4cd9918678e3c1a0df491bc3" - - delete ${ZP} - - echo "-- Expected OK: 201 Created (key was not exist)" - put ${ZP} "enother text" "If-None-Match" "*" - - put ${ZP} "some text" - echo "-- Expected Error: 412 Precondition Failed (key was exist)" - put ${ZP} "enother text" "If-None-Match" "*" - -echo "================> UPDATE PUT If-Match" - - get ${ZP} - - echo "-- Expected OK: 204 No Content (right hash)" - put ${ZP} "some text" "If-Match" "552e21cd4cd9918678e3c1a0df491bc3" - get ${ZP} - - echo "-- Expected OK: 204 No Content (hash still right)" - put ${ZP} "enother version" "If-Match" "552e21cd4cd9918678e3c1a0df491bc3" - get ${ZP} - - echo "-- Expected OK: 204 No Content (any hash)" - put ${ZP} "enother version2" "If-Match" "*" - get ${ZP} - - echo "-- Expected Error: 412 Precondition Failed (wrong hash)" - put ${ZP} "enother version3" "If-Match" "552e21cd4cd9918678e3c1a0df491bc3" - - delete ${ZP} - - echo "-- Expected Error: 412 Precondition Failed (any hash not found)" - put ${ZP} "enother version2" "If-Match" "*" - -echo "================> UPSERT (Expected OK)" - put ${ZP} "my value" - get ${ZP} - put ${ZP} "my new value" - get ${ZP} - -exit diff --git a/scripts/!ws.sh b/scripts/!ws.sh deleted file mode 100755 index 2d111e134c6..00000000000 --- a/scripts/!ws.sh +++ /dev/null @@ -1,58 +0,0 @@ -#!/bin/bash - -clear -#source ./pulse_lib.sh - -websocat ws://127.0.0.1:8095/ws/testworkspace - -exit - - -let ws = new WebSocket("ws://localhost:8095/ws/testworkspace"); -ws.onmessage = e => console.log("Message from server:", e.data); -ws.onopen = () => ws.send("Hello from browser!"); - - - - - - - - - - - - - - - - - - - -TOKEN=$(./token.sh claims.json) -ZP="00000000-0000-0000-0000-000000000001/TESTS" -# /AnyKey" - -# put ${ZP} "one text" - -# put "00000000-0000-0000-0000-000000000001/TESTS" "text 1" "If-None-Match: *" "Blooooooooo: blya" - -#exit - -#put "00000000-0000-0000-0000-000000000001/TESTS" "Value_1" "HULY-TTL: 3" -#echo "sleep 1 sec" -#sleep 1 -#get "00000000-0000-0000-0000-000000000001/TESTS" -#echo "sleep 3 sec" -#sleep 2 -#get "00000000-0000-0000-0000-000000000001/TESTS" - -put "00000000-0000-0000-0000-000000000001/TESTS1" "Value_1" "HULY-TTL: 3" -put "00000000-0000-0000-0000-000000000001/TESTS2" "Value_1" "HULY-TTL: 3" -put "00000000-0000-0000-0000-000000000001/HREST2" "Value_1" "HULY-TTL: 3" -get "00000000-0000-0000-0000-000000000001?prefix=TES" -sleep 1 -get "00000000-0000-0000-0000-000000000001?prefix=" - -exit diff --git a/scripts/TEST.html b/scripts/TEST.html new file mode 100644 index 00000000000..8cd50f79b5c --- /dev/null +++ b/scripts/TEST.html @@ -0,0 +1,125 @@ + + + + + WebSocket JSON Tester + + + + +

WebSocket JSON Tester

+ + + +
+ + + +

+ + + + + + +

Waiting for server response...
+ + + + + diff --git a/scripts/TEST_HTTP_API.sh b/scripts/TEST_HTTP_API.sh index 4e5614c0655..3e1a69e1f57 100755 --- a/scripts/TEST_HTTP_API.sh +++ b/scripts/TEST_HTTP_API.sh @@ -6,6 +6,53 @@ source ./pulse_lib.sh TOKEN=$(./token.sh claims.json) ZP="00000000-0000-0000-0000-000000000001/TESTS" +echo "--------- if-match ----------" + + delete ${ZP} + put "00000000-0000-0000-0000-000000000001/TESTS" "Value_1" "HULY-TTL: 1" "If-Match: *" + get ${ZP} + put "00000000-0000-0000-0000-000000000001/TESTS" "Value_2" "HULY-TTL: 1" + get ${ZP} + put "00000000-0000-0000-0000-000000000001/TESTS" "Value_3" "HULY-TTL: 1" "If-Match: dd358c74cb9cb897424838fbcb69c933" + put "00000000-0000-0000-0000-000000000001/TESTS" "Value_4" "HULY-TTL: 1" "If-Match: *" + put "00000000-0000-0000-0000-000000000001/TESTS" "Value_5" "HULY-TTL: 1" "If-Match: c7bcabf6b98a220f2f4888a18d01568d" + put "00000000-0000-0000-0000-000000000001/TESTS" "Value_6" "HULY-TTL: 1" "If-None-Match: *" + +echo "-- Expected OK: 201 Created (key was not exist)" + + put ${ZP} "enother text" "If-None-Match" "*" + + put ${ZP} "some text" + echo "-- Expected Error: 412 Precondition Failed (key was exist)" + put ${ZP} "enother text" "If-None-Match" "*" + +echo "================> UPDATE PUT If-Match" + + get ${ZP} + + echo "-- Expected OK: 204 No Content (right hash)" + put ${ZP} "some text" "If-Match" "552e21cd4cd9918678e3c1a0df491bc3" + get ${ZP} + + echo "-- Expected OK: 204 No Content (hash still right)" + put ${ZP} "enother version" "If-Match" "552e21cd4cd9918678e3c1a0df491bc3" + + + + + + + + + + + + + + + + + put "00000000-0000-0000-0000-000000000001/TESTS" "Value_1" "HULY-TTL: 3" echo "sleep 1 sec" sleep 1 diff --git a/src/handlers_http.rs b/src/handlers_http.rs index eddaac3e20e..7786627bc81 100644 --- a/src/handlers_http.rs +++ b/src/handlers_http.rs @@ -8,7 +8,6 @@ use tracing::{error, trace}; use uuid::Uuid; use crate::ws_owner; -// type BucketPath = web::Path<(String)>; type ObjectPath = web::Path<(String, String)>; use crate::redis::{ @@ -21,15 +20,34 @@ use crate::redis::{ }; use actix_web::{ - Error, HttpMessage, HttpRequest, HttpResponse, error, + HttpRequest, HttpResponse, error, Error, web::{self, Data, Json, Query}, }; +pub fn map_handler_error(err: impl std::fmt::Display) -> Error { + + let msg = err.to_string(); + + if let Some(detail) = msg.split(" - ExtensionError: ").nth(1) { + if let Some((code, text)) = detail.split_once(": ") { + let text = format!("{} {}", code, text); + return match code { + "400" => actix_web::error::ErrorBadRequest(text), + "404" => actix_web::error::ErrorNotFound(text), + "412" => actix_web::error::ErrorPreconditionFailed(text), + "500" => actix_web::error::ErrorInternalServerError(text), + _ => actix_web::error::ErrorInternalServerError("unexpected error"), + }; + } + } + actix_web::error::ErrorInternalServerError("internal error") +} + + /// list -// #[derive(Deserialize)] -// pub struct ListInfo { prefix: Option } +// #[derive(Deserialize)] pub async fn list( req: HttpRequest, path: web::Path, @@ -52,66 +70,8 @@ pub async fn list( Ok(HttpResponse::Ok().json(entries)) - }() - .await - .map_err(|err| { - tracing::error!(error = %err, "Internal error in GET handler"); - actix_web::error::ErrorInternalServerError("internal error") - }) - -} -/* - path: BucketPath, - query: Query, - redis: web::Data>>, -) -> Result, actix_web::error::Error> { - - ws_owner::workspace_owner(&req)?; // Check workspace - - let (workspace) = path.into_inner(); - trace!(workspace, prefix = ?query.prefix, "list request"); - - // ... - - async move || -> anyhow::Result> { - let connection = pool.get().await?; - - let response = if let Some(prefix) = &query.prefix { - let pattern = format!("{}%", prefix); - let statement = r#" - select key from kvs where workspace=$1 and namespace=$2 and key like $3 - "#; - - connection - .query(statement, &[&wsuuid, &nsstr, &pattern]) - .await? - } else { - let statement = r#" - select key from kvs where workspace=$1 and namespace=$2 - "#; - - connection.query(statement, &[&wsuuid, &nsstr]).await? - }; - - let count = response.len(); - - let keys = response.into_iter().map(|row| row.get(0)).collect(); - - Ok(Json(ListResponse { - keys, - count, - namespace: nsstr.to_owned(), - workspace: wsstr.to_owned(), - })) - }() - .await - .map_err(|error| { - error!(op = "list", workspace, namespace, ?error, "internal error"); - error::ErrorInternalServerError("") - }) + }().await.map_err(map_handler_error) } -*/ - /// get / (test) @@ -124,8 +84,6 @@ pub async fn get( ws_owner::workspace_owner(&req)?; // Check workspace let (workspace, key) = path.into_inner(); - // println!("\nworkspace = {}", workspace); - // println!("key = {}\n", key); trace!(workspace, key, "get request"); @@ -135,16 +93,13 @@ pub async fn get( Ok( redis_read(&mut *conn, &workspace, &key).await? - .map(|entry| HttpResponse::Ok().json(entry)) + .map(|entry| HttpResponse::Ok() + .insert_header(("ETag", &*entry.etag)) + .json(entry)) .unwrap_or_else(|| HttpResponse::NotFound().body("empty")) ) - }() - .await - .map_err(|err| { - tracing::error!(error = %err, "Internal error in GET handler"); - actix_web::error::ErrorInternalServerError("internal error") - }) + }().await.map_err(map_handler_error) } @@ -161,8 +116,6 @@ pub async fn put( let (workspace, key) = path.into_inner(); - trace!(workspace, key, "put request"); - async move || -> anyhow::Result { let mut conn = redis.lock().await; @@ -183,11 +136,9 @@ pub async fn put( let mut mode = Some(SaveMode::Upsert); if let Some(h) = req.headers().get("If-Match") { // `If-Match: *` - update only if the key exists let s = h.to_str().map_err(|_| anyhow!("Invalid If-Match header"))?; - if s == "*" { mode = Some(SaveMode::Update); } else { - // TODO: `If-Match: ` — update only if current value's MD5 matches - return Err(anyhow!("TODO: Only '*' suported now")); - } - } else if let Some(h) = req.headers().get("If-None-Match") { // `If-None-Match: *` — insert only if the key does not exist + if s == "*" { mode = Some(SaveMode::Update); } // `If-Match: *` — update only if exist + else { mode = Some(SaveMode::Equal(s.to_string())); } // `If-Match: ` — update only if current + } else if let Some(h) = req.headers().get("If-None-Match") { // `If-None-Match: *` — insert only if does not exist let s = h.to_str().map_err(|_| anyhow!("Invalid If-None-Match header"))?; if s == "*" { mode = Some(SaveMode::Insert); } else { return Err(anyhow!("If-None-Match must be '*'")); } } @@ -195,12 +146,7 @@ pub async fn put( redis_save(&mut *conn, &workspace, &key, &body[..], ttl, mode).await?; return Ok(HttpResponse::Ok().body("DONE")); - }() - .await - .map_err(|err| { - tracing::error!(error = %err, "Internal error in GET handler"); - actix_web::error::ErrorInternalServerError("internal error") - }) + }().await.map_err(map_handler_error) } @@ -218,9 +164,6 @@ pub async fn delete( let (workspace, key) = path.into_inner(); trace!(workspace, key, "delete request"); -// let wsuuid = Uuid::parse_str(workspace.as_str()) -// .map_err(|e| error::ErrorBadRequest(format!("Invalid UUID in workspace: {}", e)))?; - async move || -> anyhow::Result { let mut conn = redis.lock().await; @@ -232,10 +175,6 @@ pub async fn delete( }; Ok(response) - }() - .await - .map_err(|err| { - tracing::error!(error = %err, "Internal error in DELETE handler"); - actix_web::error::ErrorInternalServerError("internal error") - }) + }().await.map_err(map_handler_error) } + diff --git a/src/main.rs b/src/main.rs index a30f297e9f0..99d2b56c4a2 100644 --- a/src/main.rs +++ b/src/main.rs @@ -47,8 +47,6 @@ use config::CONFIG; use hulyrs::services::jwt::actix::ServiceRequestExt; use secrecy::SecretString; -// pub type Pool = bb8::Pool>; - fn initialize_tracing(level: tracing::Level) { use tracing_subscriber::{filter::targets::Targets, prelude::*}; diff --git a/src/redis.rs b/src/redis.rs index c17f152a919..29590a5b84c 100644 --- a/src/redis.rs +++ b/src/redis.rs @@ -7,10 +7,12 @@ pub enum Ttl { At(u64), // EXAT (timestamp in seconds) } +#[derive(Debug)] pub enum SaveMode { Upsert, // default: set or overwrite Insert, // only if not exists (NX) Update, // only if exists (XX) + Equal(String), // only if md5 matches provided } use redis::{ @@ -27,14 +29,16 @@ pub struct RedisArray { pub key: String, pub data: String, pub expires_at: u64, // sec to expire TTL + pub etag: String, // md5 hash (data) } -fn error(msg: &'static str) -> RedisResult { - Err(redis::RedisError::from((redis::ErrorKind::ExtensionError, msg))) +fn error(code: u16, msg: impl Into) -> redis::RedisResult { + let msg = msg.into(); + let full = format!("{}: {}", code, msg); + Err(redis::RedisError::from(( redis::ErrorKind::ExtensionError, "", full ))) } /// redis_list(&connection,workspace,prefix) - pub async fn redis_list( conn: &mut MultiplexedConnection, workspace: &str, @@ -70,8 +74,9 @@ pub async fn redis_list( results.push(RedisArray { workspace: workspace.to_string(), key, - data: value, + data: value.clone(), expires_at: ttl as u64, + etag: hex::encode(md5::compute(&value).0), }); } } @@ -84,9 +89,7 @@ pub async fn redis_list( } - /// redis_read(&connection,workspace,key) - #[allow(dead_code)] pub async fn redis_read( conn: &mut MultiplexedConnection, @@ -97,22 +100,23 @@ pub async fn redis_read( let data: Option = redis::cmd("HGET").arg(workspace).arg(key).query_async(conn).await?; let Some(data) = data else { return Ok(None); }; - // let ttl: i64 = redis::cmd("HTTL").arg(workspace).arg("FIELDS").arg(1).arg(key).query_async(conn).await?; let ttl_vec: Vec = redis::cmd("HTTL").arg(workspace).arg("FIELDS").arg(1).arg(key).query_async(conn).await?; let ttl = ttl_vec.get(0).copied().unwrap_or(-3); // -3 unknown error - if ttl == -1 { return error("TTL not setL"); } - if ttl == -2 { return error("Key not found"); } - if ttl < 0 { return error("Unknown TTL error"); } + if ttl == -1 { return error(500, "TTL not set"); } + if ttl == -2 { return error(500, "Key not found"); } + if ttl < 0 { return error(500, "Unknown TTL error"); } Ok(Some(RedisArray { workspace: workspace.to_string(), key: key.to_string(), - data, + data: data.clone(), expires_at: ttl as u64, + etag: hex::encode(md5::compute(&data).0), })) } + /// TTL sec /// redis_save(&mut conn, "workspace", "key", "val", Some(Ttl::Sec(300)), Some(SaveMode::Insert)).await?; /// @@ -139,34 +143,50 @@ pub async fn redis_save( Some(Ttl::At(timestamp)) => { let now = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs(); if timestamp <= now { - return error("TTL timestamp exceeds MAX_TTL limit"); + return error(400, "TTL timestamp exceeds MAX_TTL limit"); } (timestamp - now) as usize } None => CONFIG.max_ttl, }; - if sec == 0 { return error("TTL must be > 0"); } - if sec > CONFIG.max_ttl { return error("TTL exceeds MAX_TTL"); } + if sec == 0 { return error(400, "TTL must be > 0"); } + if sec > CONFIG.max_ttl { return error(412, "TTL exceeds MAX_TTL"); } let mut cmd = redis::cmd("HSET"); cmd.arg(workspace).arg(key).arg(value); // Mode variants match mode.unwrap_or(SaveMode::Upsert) { - SaveMode::Upsert => {} // none - SaveMode::Insert => { cmd.arg("NX"); } - SaveMode::Update => { cmd.arg("XX"); } + + SaveMode::Upsert => {} // none + + SaveMode::Insert => { + let exists: bool = redis::cmd("HEXISTS").arg(workspace).arg(key).query_async(conn).await?; + if exists { return error(412, "Insert: key already exists"); } + } + + SaveMode::Update => { + let exists: bool = redis::cmd("HEXISTS").arg(workspace).arg(key).query_async(conn).await?; + if !exists { return error(404, "Update: key does not exist"); } + } + + SaveMode::Equal(md5) => { + let current_value: Option = redis::cmd("HGET").arg(workspace).arg(key).query_async(conn).await?; + if let Some(existing) = current_value { + let actual_md5 = hex::encode(md5::compute(&existing).0); + if actual_md5 != md5 { return error(412, format!("md5 mismatch, current: {} expected: {}", actual_md5, md5)); } + } else { return error(404, "Equal: key does not exist"); } + } + } // 1) HSET execute - if cmd.query_async::>(&mut *conn).await?.is_none() { - return error("SET failed: NX/XX condition not met"); - } + cmd.query_async::(&mut *conn).await?; // 2) HEXPIRE execute let res: Vec = redis::cmd("HEXPIRE").arg(workspace).arg(sec).arg("FIELDS").arg(1).arg(key).query_async(&mut *conn).await?; if res.get(0).copied().unwrap_or(0) == 0 { - return error("HEXPIRE failed: field not found or TTL not set"); + return error(404, "HEXPIRE field not found or TTL not set"); } Ok(()) diff --git a/src/redis.rs.ok b/src/redis.rs.ok deleted file mode 100644 index bd57538c0dd..00000000000 --- a/src/redis.rs.ok +++ /dev/null @@ -1,231 +0,0 @@ -// hget hset TODO -// статистику TODO - -// ------------------------------- - -use crate::config::{CONFIG, RedisMode}; - -use std::time::{SystemTime, UNIX_EPOCH}; - -pub enum Ttl { - Sec(usize), // EX - At(u64), // EXAT (timestamp in seconds) -} - -pub enum SaveMode { - Upsert, // default: set or overwrite - Insert, // only if not exists (NX) - Update, // only if exists (XX) -} - -use redis::{ - AsyncCommands, RedisResult, - ToRedisArgs, - Client, ConnectionInfo, ProtocolVersion, RedisConnectionInfo, aio::MultiplexedConnection }; -use url::Url; - -use serde::{Deserialize, Serialize}; - -#[derive(Debug, Serialize)] -pub struct RedisArray { - pub workspace: String, - pub key: String, - pub data: String, - pub expires_at: Option, // секунды до истечения TTL -} - -fn error(msg: &'static str) -> redis::RedisResult<()> { - Err(redis::RedisError::from(( redis::ErrorKind::ExtensionError, msg ))) -} - -/// redis_read(&connection,key) - -#[allow(dead_code)] -pub async fn redis_read( - conn: &mut MultiplexedConnection, - workspace: &str, - key: &str, -) -> redis::RedisResult> { - - let data: Option = redis::cmd("HGET").arg(workspace).arg(key).query_async(conn).await?; - let Some(data) = data else { return Ok(None); }; - - // let ttl: i64 = redis::cmd("TTL").arg(redis_key).query_async(conn).await?; - let ttl: i64 = redis::cmd("TTL").arg(workspace).arg(key).query_async(conn).await?; - let expires_at = if ttl >= 0 { Some(ttl as u64) } else { None }; // -1 (нет TTL), -2 (нет ключа) - - Ok(Some(RedisArray { - workspace: workspace.to_string(), - key: key.to_string(), - data, - expires_at, - })) -} - - -/* -EX — срок жизни в секундах (e.g. EX 60 = 1 минута). -EXAT — дата истечения в секундах с эпохи Unix. -KEEPTTL — сохраняет текущий TTL ключа при перезаписи. - -Нет, несложно — Redis уже поддерживает это с помощью флагов NX и XX: - NX — записать только если ключ не существует - XX — перезаписать только если ключ уже существует - -Ты просто добавляешь .arg("NX") или .arg("XX") в команду SET. -Варианты: - SET key val EX 60 NX — с TTL, только если не существует - SET key val XX — только если уже существует, без TTL - SET key val — просто перезаписать, без TTL - -*/ - -/// TTL sec -/// redis_save(&mut conn, "key", "val", Some(Ttl::Sec(300)), Some(SaveMode::Insert)).await?; -/// -/// TTL at -/// let at_unixtime: u64 = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs() + 600; -/// redis_save(&mut conn, "key", "val", Some(Ttl::At(at_unixtime)), Some(SaveMode::Update)).await?; -/// -/// w/o TTL (CONFIG.max_ttl) -/// redis_save(&mut conn, "key", "val", None, None).await?; - -#[allow(dead_code)] -pub async fn redis_save( - conn: &mut MultiplexedConnection, - workspace: &str, - key: &str, - value: T, - ttl: Option, - mode: Option, -) -> RedisResult<()> { - - // TTL variants - match ttl { - Some(Ttl::Sec(secs)) => { - if secs == 0 { - return error("TTL must be > 0"); - } - if secs > CONFIG.max_ttl { - return error("TTL exceeds MAX_TTL"); - } - cmd.arg("EX").arg(secs); - } - Some(Ttl::At(timestamp)) => { - let now = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs(); - if timestamp <= now { - return error("TTL timestamp is in the past"); - } - if timestamp - now > CONFIG.max_ttl as u64 { - return error("TTL timestamp exceeds MAX_TTL limit"); - } - cmd.arg("EXAT").arg(timestamp); - } - None => { cmd.arg("EX").arg(CONFIG.max_ttl); } - } - - - let mut cmd = redis::cmd("HSET"); - cmd.arg(workspace).arg(key).arg(value).query_async(conn).await?; - -redis::cmd("HEXPIRE") - .arg(workspace) - .arg(ttl_seconds) - .arg("FIELDS") - .arg(1).arg(key) - .query_async(conn).await?; - - - - // Mode variants - match mode.unwrap_or(SaveMode::Upsert) { - SaveMode::Upsert => { } // nothing - SaveMode::Insert => { cmd.arg("NX"); } - SaveMode::Update => { cmd.arg("XX"); } - } - - let res: Option = cmd.query_async(&mut *conn).await?; - - if res.is_none() { // nil - if NX/XX error - return error("SET failed: NX/XX condition not met"); - } else { - Ok(()) - } -} - - -#[allow(dead_code)] -pub async fn redis_delete( - conn: &mut MultiplexedConnection, - workspace: &str, - key: &str, -) -> redis::RedisResult { - - let deleted: i32 = redis::cmd("HDEL") - .arg(workspace) - .arg(key) - .query_async(conn) - .await?; - - Ok(deleted > 0) -} - - - -/// redis_connect() -pub async fn redis_connect() -> anyhow::Result { - let default_port = match CONFIG.redis_mode { - RedisMode::Sentinel => 6379, - RedisMode::Direct => 6380, - }; - - let urls = CONFIG - .redis_urls - .iter() - .map(|url| { - redis::ConnectionAddr::Tcp( - url.host().unwrap().to_string(), - url.port().unwrap_or(default_port), - ) - }) - .collect::>(); - - let conn = if CONFIG.redis_mode == RedisMode::Sentinel { - use redis::sentinel::{SentinelClientBuilder, SentinelServerType}; - - let mut sentinel = SentinelClientBuilder::new( - urls, - CONFIG.redis_service.to_owned(), - SentinelServerType::Master, - ) - .unwrap() - .set_client_to_redis_protocol(ProtocolVersion::RESP3) - .set_client_to_redis_db(0) - .set_client_to_redis_password(CONFIG.redis_password.clone()) - .set_client_to_sentinel_password(CONFIG.redis_password.clone()) - .build()?; - - sentinel.get_async_connection().await? - } else { - let single = urls - .first() - .ok_or_else(|| anyhow::anyhow!("No redis URL provided"))?; - - let redis_connection_info = RedisConnectionInfo { - db: 0, - username: None, - password: Some(CONFIG.redis_password.clone()), - protocol: ProtocolVersion::RESP3, - }; - - let connection_info = ConnectionInfo { - addr: single.clone(), - redis: redis_connection_info, - }; - - let client = Client::open(connection_info)?; - client.get_multiplexed_async_connection().await? - }; - - Ok(conn) -} diff --git a/src/ws_owner.rs b/src/ws_owner.rs index 941ded604c3..d97035802a5 100644 --- a/src/ws_owner.rs +++ b/src/ws_owner.rs @@ -2,7 +2,6 @@ use hulyrs::services::jwt::Claims; use uuid::Uuid; use actix_web::{ Error, HttpMessage, HttpRequest, error }; - /// Checking workspace in Authorization pub fn workspace_owner(req: &HttpRequest) -> Result<(), Error> { let extensions = req.extensions(); @@ -36,4 +35,3 @@ pub fn workspace_owner(req: &HttpRequest) -> Result<(), Error> { Ok(()) } - From bb0bf3665d80a5ada2053f9a72f4eaf1102d1ebb Mon Sep 17 00:00:00 2001 From: Kristina Date: Tue, 12 Aug 2025 12:22:20 +0400 Subject: [PATCH 152/636] Fix notifications total on update (#93) Signed-off-by: Kristina Fefelova --- packages/query/src/notification-contexts/query.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/query/src/notification-contexts/query.ts b/packages/query/src/notification-contexts/query.ts index 0a77979a3cb..d90ab4f87a7 100644 --- a/packages/query/src/notification-contexts/query.ts +++ b/packages/query/src/notification-contexts/query.ts @@ -417,6 +417,7 @@ export class NotificationContextsQuery implements PagedQuery Date: Tue, 12 Aug 2025 18:53:28 +0300 Subject: [PATCH 153/636] WS API: done ; HTTP API: add secret path '$' --- README.md | 31 +++- scripts/TEST.html | 1 + scripts/TEST_HTTP_API.sh | 134 ++++++++++++++++-- scripts/pulse_lib.sh | 3 + src/handlers_http.rs | 31 ++-- src/handlers_ws.rs | 297 +++++++++++++++++++++++++++++++++++---- src/main.rs | 9 +- src/redis.rs | 76 +++++++--- 8 files changed, 497 insertions(+), 85 deletions(-) diff --git a/README.md b/README.md index e8aa9f6763d..2132c064790 100644 --- a/README.md +++ b/README.md @@ -12,13 +12,34 @@ The service is exposed as REST and WebSocket API. - service posts a process status ## Key -Key is a string that consists of one or multiple segments separated by some separator. -Example: foo/bar/baz. -It is possible to use wildcard keys to list or subscribe to values with this prefix. +Key is a string that consists of one or multiple segments separated by ‘/’. Example: foo/bar/baz. +Key may not end with ‘/’ +Segment may not contain special characters (‘*’, ‘?’, ‘[’, ‘]’,‘\’,‘\x00..\xF1’,‘\x7F’,‘"’,‘'’) +Segment may not be empty +Key segment may be private (prefixed with ‘$’) + + Query + +May not contain special characters (‘*’, ‘?’, ‘[’, ‘]’,‘\’,‘\x00..\xF1’,‘\x7F’,‘"’,‘'’) +It is possible to use prefix, for listings / subscriptions (prefix ends with segment separator ‘/’) + +GET/SUBSCRIBE/.. a/b → single key +GET/SUBSCRIBE/.. a/b/c/ → multiple + + If multiple + +select all keys starting with prefix +skip keys, containing private segments to the right from the prefix + + example +1. /a/b/$c/$d, 2. /a/b/c, 3. /a/b/$c, 4. /a/b/$c/$d/e +/ → [2] +/a/b/ → [2] +/a/b/$c/ → [3] +/a/b/$c/$d/ → [4] +/a/b/$c/$d → [1] -Key may contain a special section (guard) $that separates public and private data. “Private” data is available when querying or subscribing by exact key. -Example foo/bar/$/private, this value can be queried by foo/bar/$/private or foo/bar/$/but not by foo/bar/ ## Data “Data” is an arbitrary JSON document. diff --git a/scripts/TEST.html b/scripts/TEST.html index 8cd50f79b5c..a525eca7f15 100644 --- a/scripts/TEST.html +++ b/scripts/TEST.html @@ -46,6 +46,7 @@ border-radius: 6px; min-height: 100px; white-space: pre-wrap; + overflow-wrap: anywhere; } diff --git a/scripts/TEST_HTTP_API.sh b/scripts/TEST_HTTP_API.sh index 3e1a69e1f57..050093fcf70 100755 --- a/scripts/TEST_HTTP_API.sh +++ b/scripts/TEST_HTTP_API.sh @@ -6,6 +6,129 @@ source ./pulse_lib.sh TOKEN=$(./token.sh claims.json) ZP="00000000-0000-0000-0000-000000000001/TESTS" + +echo "--------- if-match ----------" + + put "00000000-0000-0000-0000-000000000001/TESTS" "Value_1" "HULY-TTL: 2" + put "00000000-0000-0000-0000-000000000001/TESTS/1" "Value_1" "HULY-TTL: 2" + put "00000000-0000-0000-0000-000000000001/TESTS/2" "Value_1" "HULY-TTL: 2" + put "00000000-0000-0000-0000-000000000001/TESTS/3$" "Value_1" "HULY-TTL: 2" + put "00000000-0000-0000-0000-000000000001/TESTS/3/secret$/4" "Value_1" "HULY-TTL: 2" + get "00000000-0000-0000-0000-000000000001/TESTS" + get "00000000-0000-0000-0000-000000000001/TESTS/" + get "00000000-0000-0000-0000-000000000001/TESTS/3/secret$/" + + +exit +Key + + + + + +Key is a string that consists of one or multiple segments separated by ‘/’. Example: foo/bar/baz. + + + + + +Segment may not contain special characters (‘$’, ‘*’, ‘?’) + + + +Key may not end with ‘/’ + + + +Segment may not be empty + + + +Key segment may be private (prefixed with ‘$’) + + + +Query + + + + + +May not contain special characters (‘*’, ‘?’) + + + +It is possible to use prefix, for listings / subscriptions (prefix ends with segment separator ‘/’) + + + + + +GET/SUBSCRIBE/.. a/b → single key + + + +GET/SUBSCRIBE/.. a/b/c/ → multiple + + + +If multiple + + + + + +select all keys starting with prefix + + + +skip keys, containing private segments to the right from the prefix + + + +example + + + + + + 1. /a/b/$c/$d, 2. /a/b/c, 3. /a/b/$c, 4. /a/b/$c/$d/e + + + +/ → [2] + + + +/a/b/ → [2] + + + +/a/b/$c/ → [3] + + + +/a/b/$c/$d/ → [4] + + + +/a/b/$c/$d → (1) + + + + + + + +exit + +echo "--------- Deprecated symbols ----------" + + put "00000000-0000-0000-0000-000000000001/'TESTS" "Value_1" "HULY-TTL: 2" + put "00000000-0000-0000-0000-000000000001/TES?TS" "Value_1" "HULY-TTL: 2" + put "00000000-0000-0000-0000-000000000001/TESTS*" "Value_1" "HULY-TTL: 2" + put "00000000-0000-0000-0000-000000000001/TESTS/" "Value_1" "HULY-TTL: 2" + echo "--------- if-match ----------" delete ${ZP} @@ -42,17 +165,6 @@ echo "================> UPDATE PUT If-Match" - - - - - - - - - - - put "00000000-0000-0000-0000-000000000001/TESTS" "Value_1" "HULY-TTL: 3" echo "sleep 1 sec" sleep 1 diff --git a/scripts/pulse_lib.sh b/scripts/pulse_lib.sh index c79edec450b..022d8cb87cc 100755 --- a/scripts/pulse_lib.sh +++ b/scripts/pulse_lib.sh @@ -32,6 +32,9 @@ api() { *) echo -en "${GRAY}${status}${N}" ;; esac if [ -n "$etag" ]; then echo -n -e " ${F}${etag}${N}" ; fi + + body=$(echo "$body" | sed 's/{/\\n{/g') + if [ -n "$body" ]; then echo -e "\n ${GRAY}[${body}]${N}" ; else echo -e " ${L}(no body)${N}" ; fi rm -f "$tmpfile" } diff --git a/src/handlers_http.rs b/src/handlers_http.rs index 7786627bc81..bba2cc8ddcf 100644 --- a/src/handlers_http.rs +++ b/src/handlers_http.rs @@ -8,8 +8,6 @@ use tracing::{error, trace}; use uuid::Uuid; use crate::ws_owner; -type ObjectPath = web::Path<(String, String)>; - use crate::redis::{ Ttl, SaveMode, RedisArray, @@ -46,38 +44,35 @@ pub fn map_handler_error(err: impl std::fmt::Display) -> Error { /// list - -// #[derive(Deserialize)] pub async fn list( req: HttpRequest, - path: web::Path, - query: web::Query>, + path: web::Path<(String, Option)>, redis: web::Data>>, ) -> Result { ws_owner::workspace_owner(&req)?; // Check workspace - let workspace = path.into_inner(); - let prefix = query.get("prefix").map(|s| s.as_str()); + let (workspace, key) = path.into_inner(); - trace!(workspace, prefix, "list request"); + // trace!(workspace, prefix, "list request"); async move || -> anyhow::Result { let mut conn = redis.lock().await; - let entries = redis_list(&mut *conn, &workspace, prefix).await?; + let entries = redis_list(&mut *conn, &workspace, key.as_deref()).await?; Ok(HttpResponse::Ok().json(entries)) }().await.map_err(map_handler_error) } -/// get / (test) + +/// get pub async fn get( req: HttpRequest, - path: ObjectPath, + path: web::Path<(String, String)>, redis: web::Data>>, ) -> Result { @@ -85,7 +80,7 @@ pub async fn get( let (workspace, key) = path.into_inner(); - trace!(workspace, key, "get request"); + // trace!(workspace, key, "get request"); async move || -> anyhow::Result { @@ -104,10 +99,9 @@ pub async fn get( /// put - pub async fn put( req: HttpRequest, - path: ObjectPath, + path: web::Path<(String, String)>, body: web::Bytes, redis: web::Data>>, ) -> Result { @@ -118,6 +112,8 @@ pub async fn put( async move || -> anyhow::Result { + if !req.query_string().is_empty() { return Err(anyhow!("Query parameters are not allowed")); } + let mut conn = redis.lock().await; // TTL logic @@ -151,11 +147,10 @@ pub async fn put( -// delete - +/// delete pub async fn delete( req: HttpRequest, - path: ObjectPath, + path: web::Path<(String, String)>, redis: web::Data>>, ) -> Result { diff --git a/src/handlers_ws.rs b/src/handlers_ws.rs index 216fcf686a6..af20f024112 100644 --- a/src/handlers_ws.rs +++ b/src/handlers_ws.rs @@ -1,3 +1,27 @@ +use redis::aio::MultiplexedConnection; +use std::sync::Arc; +use tokio::sync::Mutex; + +use actix::fut; +use actix::ActorFutureExt; + +use crate::redis::{ + Ttl, SaveMode, + RedisArray, + redis_save, + redis_read, + redis_delete, + redis_list, + error +}; + +use std::future::Future; +use serde_json::{Value, Map, json}; + + +type JsonMap = Map; + +// ================== use actix::{Actor, StreamHandler, AsyncContext, ActorContext}; use actix_web::{web, HttpRequest, HttpResponse, Error}; use actix_web_actors::ws; @@ -10,22 +34,59 @@ use std::collections::HashSet; #[serde(rename_all = "lowercase", tag = "type")] pub enum WsCommand { Put { - key: String, - data: String, #[serde(default)] correlation: Option, + key: String, + data: String, + #[serde(rename = "expiresAt")] + #[serde(default)] expires_at: Option, + + #[serde(rename = "TTL")] + #[serde(default)] + ttl: Option, + + #[serde(rename = "ifMatch")] + #[serde(default)] + if_match: Option, + + #[serde(rename = "ifNoneMatch")] + #[serde(default)] + if_none_match: Option, }, - Delete { + + Get { + #[serde(default)] + correlation: Option, key: String, + }, + + List { + #[serde(default)] + correlation: Option, + key: Option, + }, + + Delete { #[serde(default)] correlation: Option, + key: String, + + #[serde(rename = "ifMatch")] + #[serde(default)] + if_match: Option, }, + Sub { + #[serde(default)] + correlation: Option, key: String, }, + Unsub { + #[serde(default)] + correlation: Option, key: String, }, } @@ -35,8 +96,10 @@ pub enum WsCommand { pub struct WsSession { pub workspace: String, pub subscriptions: HashSet, // новые поля + pub redis: Arc>, // вот он, тот же тип что и в HTTP API } + /// Actor External trait: must be in separate impl block impl Actor for WsSession { type Context = ws::WebsocketContext; @@ -72,48 +135,224 @@ impl StreamHandler> for WsSession { /// All logic in one impl impl WsSession { + fn wait_and_send( + &mut self, + ctx: &mut ws::WebsocketContext, + fut: F, + mut base: JsonMap, + ) + where + F: std::future::Future> + 'static, + { + ctx.wait( + fut::wrap_future(fut).map(move |res, _actor: &mut Self, ctx| { + match res { + Ok(extra) => { + base.extend(extra); + } + Err(err) => { + base.insert("type".into(), json!("error")); + base.insert("message".into(), json!(err)); + } + } + ctx.text(Value::Object(base).to_string()); + }) + ); + } + + /// When valid JSON recieved for WsSession fn handle_command(&mut self, cmd: WsCommand, ctx: &mut ws::WebsocketContext) { match cmd { - WsCommand::Put { key, data, expires_at, correlation } => { - println!("PUT {} = {} (expires_at: {:?})", key, data, expires_at); - ctx.text(format!("OK PUT {}{}", key, Self::correlation_suffix(&correlation))); - // Здесь — сохранить в Redis - } - WsCommand::Delete { key, correlation } => { + + WsCommand::Put { key, data, expires_at, ttl, if_match, if_none_match, correlation } => { + + println!("PUT {} = {} (expires_at: {:?}) (ttl: {:?}) ws={:?}", key, data, expires_at, ttl, self.workspace); + + let redis = self.redis.clone(); + let workspace = self.workspace.clone(); + + let mut base = JsonMap::new(); + base.insert("action".into(), json!("put")); + base.insert("workspace".into(), json!(&self.workspace)); + base.insert("key".into(), json!(&key)); + base.insert("data".into(), json!(&data)); + if let Some(x) = &correlation { base.insert("correlation".into(), json!(x)); } + if let Some(x) = &expires_at { base.insert("expiresAt".into(), json!(x)); } + if let Some(x) = &ttl { base.insert("TTL".into(), json!(x)); } + if let Some(x) = &if_match { base.insert("ifMatch".into(), json!(x)); } + if let Some(x) = &if_none_match { base.insert("ifNoneMatch".into(),json!(x)); } + + let fut = async move { + + // TTL logic + let real_ttl = if let Some(secs) = ttl { + Some(Ttl::Sec(secs as usize)) + } else if let Some(timestamp) = expires_at { + Some(Ttl::At(timestamp)) + } else { + None + }; + + // MODE logic + let mut mode = Some(SaveMode::Upsert); + if let Some(s) = if_match { // `If-Match: *` - update only if the key exists + if s == "*" { // `If-Match: *` — update only if exist + mode = Some(SaveMode::Update); + } else { // `If-Match: ` — update only if current + mode = Some(SaveMode::Equal(s.to_string())); + } + } else if let Some(s) = if_none_match { // `If-None-Match: *` — insert only if does not exist + if s == "*" { + mode = Some(SaveMode::Insert); + } else { + return Err::("ifNoneMatch must contain only '*'".into()); + } + } + + let mut conn = redis.lock().await; + + redis_save(&mut *conn, &workspace, &key, &data, real_ttl, mode) + .await + .map_err(|e| e.to_string())?; + + let mut extra = JsonMap::new(); + extra.insert("response".into(), json!("OK")); + Ok::(extra) + + }; + + self.wait_and_send(ctx, fut, base); + } + + + WsCommand::Delete { key, correlation, if_match } => { println!("DELETE {}", key); - ctx.text(format!("OK DELETE {}{}", key, Self::correlation_suffix(&correlation))); - // Здесь — удалить из Redis + + let redis = self.redis.clone(); + let workspace = self.workspace.clone(); + + let mut base = JsonMap::new(); + base.insert("action".into(), json!("delete")); + base.insert("workspace".into(), json!(&self.workspace)); + base.insert("key".into(), json!(&key)); + if let Some(x) = &correlation { base.insert("correlation".into(), json!(x)); } + if let Some(x) = &if_match { base.insert("ifMatch".into(), json!(x)); } + + let fut = async move { + + let mut conn = redis.lock().await; + + let deleted = redis_delete(&mut *conn, &workspace, &key) + .await + .map_err(|e| e.to_string())?; + + if deleted { + let mut extra = JsonMap::new(); + extra.insert("response".into(), json!("OK")); + Ok::(extra) + } else { + Err::("not found".into()) + } + + }; + + self.wait_and_send(ctx, fut, base); } - WsCommand::Sub { key } => { - println!("SUB {}", key); + + WsCommand::Get { key, correlation } => { + println!("GET {}{:?}", key, correlation); + + let redis = self.redis.clone(); + let workspace = self.workspace.clone(); + + let mut base = JsonMap::new(); + base.insert("action".into(), json!("get")); + base.insert("workspace".into(), json!(&self.workspace)); + base.insert("key".into(), json!(&key)); + if let Some(x) = &correlation { base.insert("correlation".into(), json!(x)); } + + let fut = async move { + + let mut conn = redis.lock().await; + + let data_opt = redis_read(&mut *conn, &workspace, &key) + .await + .map_err(|e| e.to_string())?; + + match data_opt { + Some(data) => { + let mut extra = JsonMap::new(); + let data_value = serde_json::to_value(&data).map_err(|e| e.to_string())?; + extra.insert("response".into(), data_value); + Ok::(extra) + } + None => Err::("not found".into()) + } + }; + + self.wait_and_send(ctx, fut, base); + } + + WsCommand::List { key, correlation } => { + println!("LIST {:?}{:?}", key, correlation); + + let redis = self.redis.clone(); + let workspace = self.workspace.clone(); + + let mut base = JsonMap::new(); + base.insert("action".into(), json!("get")); + base.insert("workspace".into(), json!(&self.workspace)); + if let Some(x) = &key { base.insert("key".into(), json!(x)); } + if let Some(x) = &correlation { base.insert("correlation".into(), json!(x)); } + + let fut = async move { + + let mut conn = redis.lock().await; + + let data = redis_list(&mut *conn, &workspace, key.as_deref()) + .await + .map_err(|e| e.to_string())?; + + let mut extra = JsonMap::new(); + let data_value = serde_json::to_value(&data).map_err(|e| e.to_string())?; + extra.insert("response".into(), data_value); + Ok::(extra) + }; + + self.wait_and_send(ctx, fut, base); + } + + WsCommand::Sub { key, correlation } => { + println!("SUB {}{:?}", key, correlation); ctx.text(format!("OK SUB {}", key)); // Здесь — подписка (в будущем pub/sub) } - WsCommand::Unsub { key } => { - println!("UNSUB {}", key); + + + WsCommand::Unsub { key, correlation } => { + println!("UNSUB {}{:?}", key, correlation); ctx.text(format!("OK UNSUB {}", key)); // Здесь — отписка } - } - } - fn correlation_suffix(corr: &Option) -> String { - match corr { - Some(c) => format!(" [correlation: {}]", c), - None => "".to_string(), - } - // - // corr.as_ref() - // .map(|c| format!(" [correlation: {}]", c)) - // .unwrap_or_default() - // + } } } -pub async fn handler(req: HttpRequest, stream: web::Payload, path: web::Path) -> Result { + +pub async fn handler( + req: HttpRequest, + stream: web::Payload, + path: web::Path, + redis: web::Data>>, +) -> Result { let workspace = path.into_inner(); - let session = WsSession { workspace, subscriptions: HashSet::new() }; + let session = WsSession { + workspace, + subscriptions: HashSet::new(), + redis: redis.get_ref().clone(), + }; ws::start(session, &req, stream) } diff --git a/src/main.rs b/src/main.rs index 99d2b56c4a2..3717dccb272 100644 --- a/src/main.rs +++ b/src/main.rs @@ -105,10 +105,11 @@ async fn main() -> anyhow::Result<()> { .service( web::scope("/api") .wrap(middleware::from_fn(interceptor)) - .route("/{workspace}", web::get().to(handlers_http::list)) - .route("/{workspace}/{key:.*}",web::get().to(handlers_http::get)) - .route("/{workspace}/{key:.*}",web::put().to(handlers_http::put)) - .route("/{workspace}/{key:.*}",web::delete().to(handlers_http::delete)) + .route("/{workspace}/", web::get().to(handlers_http::list)) + .route("/{workspace}/{key:.+/}", web::get().to(handlers_http::list)) + .route("/{workspace}/{key:.+}", web::get().to(handlers_http::get)) + .route("/{workspace}/{key:.+}", web::put().to(handlers_http::put)) + .route("/{workspace}/{key:.+}", web::delete().to(handlers_http::delete)) ) .route("/status", web::get().to(async || "ok")) .route("/ws/{workspace}", web::get().to(handlers_ws::handler)) // WebSocket diff --git a/src/redis.rs b/src/redis.rs index 29590a5b84c..9c8edfcbd49 100644 --- a/src/redis.rs +++ b/src/redis.rs @@ -2,6 +2,7 @@ use crate::config::{CONFIG, RedisMode}; use std::time::{SystemTime, UNIX_EPOCH}; +#[derive(serde::Serialize)] pub enum Ttl { Sec(usize), // EX At(u64), // EXAT (timestamp in seconds) @@ -32,51 +33,76 @@ pub struct RedisArray { pub etag: String, // md5 hash (data) } -fn error(code: u16, msg: impl Into) -> redis::RedisResult { +/// return Error +pub fn error(code: u16, msg: impl Into) -> redis::RedisResult { let msg = msg.into(); let full = format!("{}: {}", code, msg); Err(redis::RedisError::from(( redis::ErrorKind::ExtensionError, "", full ))) } + +/// Check for redis-deprecated symbols +pub fn redis_deprecate_symbols(s: &str) -> redis::RedisResult<()> { + if s.chars().any(|c| matches!( c, + '*' | '?' | '[' | ']' | '\\' | + '\0'..='\x1F' | '\x7F' | + '"' | '\'' // | ' ' + )) { + error(412, "Deprecated symbols in workspace or key") + } else { + Ok(()) + } +} + + /// redis_list(&connection,workspace,prefix) pub async fn redis_list( conn: &mut MultiplexedConnection, workspace: &str, - prefix: Option<&str>, + key: Option<&str>, ) -> redis::RedisResult> { + + let pattern = if let Some(k) = key { + if !k.ends_with('/') { return error(412, "Key must end with slash"); } + Some(format!("{k}*")) + } else { + None + }; + + redis_deprecate_symbols(&workspace)?; + if let Some(k) = key { redis_deprecate_symbols(k)?; } + let mut cursor = 0; let mut results = Vec::new(); - let pattern = prefix.map(|p| format!("{}*", p)); loop { let mut cmd = redis::cmd("HSCAN"); cmd.arg(workspace).arg(cursor); if let Some(ref p) = pattern { - cmd.arg("MATCH").arg(p); - } + cmd.arg("MATCH").arg(p); + } + // cmd.arg("COUNT").arg(100); let (next_cursor, items): (u64, Vec<(String, String)>) = cmd.query_async(conn).await?; - for (key, value) in items { - // TTL - let ttl_vec: Vec = redis::cmd("HTTL") - .arg(workspace) - .arg("FIELDS") - .arg(1) - .arg(&key) - .query_async(conn) - .await?; + for (k, v) in items { - let ttl = ttl_vec.get(0).copied().unwrap_or(-3); + // Check for $-security path + if let Some(prefix) = key { + if k[prefix.len()..].contains('$') { continue; } + } + // TTL + let ttl_vec: Vec = redis::cmd("HTTL").arg(workspace).arg("FIELDS").arg(1).arg(&k).query_async(conn).await?; + let ttl = ttl_vec.get(0).copied().unwrap_or(-3); if ttl >= 0 { results.push(RedisArray { workspace: workspace.to_string(), - key, - data: value.clone(), + key: k, + data: v.clone(), expires_at: ttl as u64, - etag: hex::encode(md5::compute(&value).0), + etag: hex::encode(md5::compute(&v).0), }); } } @@ -97,6 +123,10 @@ pub async fn redis_read( key: &str, ) -> redis::RedisResult> { + redis_deprecate_symbols(&workspace)?; + redis_deprecate_symbols(&key)?; + if key.ends_with('/') { return error(412, "Key must not end with a slash"); } + let data: Option = redis::cmd("HGET").arg(workspace).arg(key).query_async(conn).await?; let Some(data) = data else { return Ok(None); }; @@ -137,6 +167,10 @@ pub async fn redis_save( mode: Option, ) -> RedisResult<()> { + redis_deprecate_symbols(&workspace)?; + redis_deprecate_symbols(&key)?; + if key.ends_with('/') { return error(412, "Key must not end with a slash"); } + // TTL logic let sec = match ttl { Some(Ttl::Sec(secs)) => secs, @@ -201,6 +235,10 @@ pub async fn redis_delete( key: &str, ) -> redis::RedisResult { + redis_deprecate_symbols(&workspace)?; + redis_deprecate_symbols(&key)?; + if key.ends_with('/') { return error(412, "Key must not end with a slash"); } + let deleted: i32 = redis::cmd("HDEL") .arg(workspace) .arg(key) @@ -268,3 +306,5 @@ pub async fn redis_connect() -> anyhow::Result { Ok(conn) } + + From d7e2a50232751ddcf51c564a4353df606ac3f4a6 Mon Sep 17 00:00:00 2001 From: Leonid Kaganov Date: Wed, 13 Aug 2025 14:16:47 +0300 Subject: [PATCH 154/636] WS API: ping test every 5 sec --- src/handlers_ws.rs | 47 +++++++++++++++++++++++++++++----------------- src/main.rs | 4 ++++ src/ws_ping.rs | 15 +++++++++++++++ 3 files changed, 49 insertions(+), 17 deletions(-) create mode 100644 src/ws_ping.rs diff --git a/src/handlers_ws.rs b/src/handlers_ws.rs index af20f024112..5e42b308e4c 100644 --- a/src/handlers_ws.rs +++ b/src/handlers_ws.rs @@ -1,9 +1,12 @@ use redis::aio::MultiplexedConnection; use std::sync::Arc; use tokio::sync::Mutex; - -use actix::fut; -use actix::ActorFutureExt; +use serde_json::{Value, Map, json}; +use actix::{Actor, StreamHandler, AsyncContext, ActorContext, fut, ActorFutureExt }; +use actix_web::{web, HttpRequest, HttpResponse, Error}; +use actix_web_actors::ws; +use serde::Deserialize; +use std::collections::HashSet; use crate::redis::{ Ttl, SaveMode, @@ -15,20 +18,8 @@ use crate::redis::{ error }; -use std::future::Future; -use serde_json::{Value, Map, json}; - - type JsonMap = Map; -// ================== -use actix::{Actor, StreamHandler, AsyncContext, ActorContext}; -use actix_web::{web, HttpRequest, HttpResponse, Error}; -use actix_web_actors::ws; -use serde::Deserialize; -use serde_json::Result as JsonResult; -use std::collections::HashSet; - /// WsCommand - commands enum (put, delete, sub, unsub) #[derive(Deserialize, Debug)] #[serde(rename_all = "lowercase", tag = "type")] @@ -100,6 +91,15 @@ pub struct WsSession { } + +// ======= ping ======== +use crate::ws_ping::test_message; +// ======= /ping ======== + + + + + /// Actor External trait: must be in separate impl block impl Actor for WsSession { type Context = ws::WebsocketContext; @@ -107,6 +107,19 @@ impl Actor for WsSession { fn started(&mut self, ctx: &mut Self::Context) { println!("WebSocket connected to workspace [{}]", self.workspace); ctx.text(format!("Connected to workspace: {}", self.workspace)); + +// ======= ping ======== + + // Для наглядности во время отладки: +// install_ws_ping_with(ctx, std::time::Duration::from_secs(5), PingMode::ControlAndText("__!ping!__")); + + test_message(ctx); + // Если захочешь нестандартный интервал: + // use std::time::Duration; + // use crate::ws_ping::install_ws_ping_with_period; + // install_ws_ping_with_period(ctx, Duration::from_secs(5)); +// ======= /ping ======== + } } @@ -326,14 +339,14 @@ impl WsSession { WsCommand::Sub { key, correlation } => { println!("SUB {}{:?}", key, correlation); ctx.text(format!("OK SUB {}", key)); - // Здесь — подписка (в будущем pub/sub) + // TODO } WsCommand::Unsub { key, correlation } => { println!("UNSUB {}{:?}", key, correlation); ctx.text(format!("OK UNSUB {}", key)); - // Здесь — отписка + // TODO } } diff --git a/src/main.rs b/src/main.rs index 3717dccb272..c43c9b7caeb 100644 --- a/src/main.rs +++ b/src/main.rs @@ -15,6 +15,10 @@ #![allow(unused_imports)] +// === ping === +mod ws_ping; +// === /ping === + use std::pin::Pin; use actix_cors::Cors; diff --git a/src/ws_ping.rs b/src/ws_ping.rs new file mode 100644 index 00000000000..651f9271f2b --- /dev/null +++ b/src/ws_ping.rs @@ -0,0 +1,15 @@ +use std::time::Duration; +use actix::Actor; +use actix::AsyncContext; +use actix_web_actors::ws; + +pub fn test_message
(ctx: &mut ws::WebsocketContext) +where + A: Actor> + 'static, +{ + println!("-- INSTALL test message ---"); + ctx.run_interval(Duration::from_secs(5), |_, ctx| { + println!("-- sending test message ---"); + ctx.text("test message from server"); + }); +} From 6248d746a5953db973b2640111bf88cb414988f2 Mon Sep 17 00:00:00 2001 From: Leonid Kaganov Date: Thu, 14 Aug 2025 01:34:57 +0300 Subject: [PATCH 155/636] WS API: Sub, Unsub --- Cargo.lock | 1 + Cargo.toml | 3 +- scripts/TEST.html | 40 ++++- src/handlers_http.rs | 43 +++-- src/handlers_ws.rs | 209 +++++++++++++++++------- src/main.rs | 104 ++++++++++-- src/redis.rs | 180 ++++++++++---------- src/redis_events.rs | 87 ++++++++++ src/{ws_owner.rs => workspace_owner.rs} | 22 ++- src/ws_hub.rs | 90 ++++++++++ src/ws_ping.rs | 15 -- 11 files changed, 581 insertions(+), 213 deletions(-) create mode 100644 src/redis_events.rs rename src/{ws_owner.rs => workspace_owner.rs} (60%) create mode 100644 src/ws_hub.rs delete mode 100644 src/ws_ping.rs diff --git a/Cargo.lock b/Cargo.lock index 34c35b22781..c806ce9bab4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1192,6 +1192,7 @@ dependencies = [ "serde_with", "size", "tokio", + "tokio-stream", "tracing", "tracing-subscriber", "url", diff --git a/Cargo.toml b/Cargo.toml index 598df204e06..3ef483388b5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -17,7 +17,7 @@ actix-web-actors = "4.2.0" refinery = { version = "0.8.16", features = ["tokio-postgres"] } bb8 = "0.9.0" bb8-redis = "0.24" -redis = { version = "0.32.2", features = [ "tokio-comp", "sentinel" ] } +redis = { version = "0.32.2", features = [ "aio", "tokio-comp", "sentinel" ] } md5 = "0.8.0" serde_with = "3" url = "2" @@ -28,6 +28,7 @@ hex = "0.4.3" serde_json = "1.0" hulyrs = { git = "https://github.com/hcengineering/hulyrs.git", features = ["actix"] } secrecy = "0.10.3" +tokio-stream = "0.1" [[bin]] name = "hulypulse" diff --git a/scripts/TEST.html b/scripts/TEST.html index a525eca7f15..3aed28e5251 100644 --- a/scripts/TEST.html +++ b/scripts/TEST.html @@ -63,6 +63,7 @@

WebSocket JSON Tester


+

@@ -85,13 +86,46 @@

WebSocket JSON Tester

"key": "foo/bar1" }'>GET 2 + + + + + + + + + + + + + + + +
Waiting for server response...
diff --git a/src/handlers_http.rs b/src/handlers_http.rs index bba2cc8ddcf..51c06eaba73 100644 --- a/src/handlers_http.rs +++ b/src/handlers_http.rs @@ -6,7 +6,7 @@ use anyhow::anyhow; use std::time::{SystemTime, UNIX_EPOCH}; use tracing::{error, trace}; use uuid::Uuid; -use crate::ws_owner; +use crate::workspace_owner::workspace_check; use crate::redis::{ Ttl, SaveMode, @@ -46,21 +46,21 @@ pub fn map_handler_error(err: impl std::fmt::Display) -> Error { /// list pub async fn list( req: HttpRequest, - path: web::Path<(String, Option)>, + path: web::Path, redis: web::Data>>, ) -> Result { - ws_owner::workspace_owner(&req)?; // Check workspace + workspace_check(&req)?; // Check workspace - let (workspace, key) = path.into_inner(); + let key = path.into_inner(); - // trace!(workspace, prefix, "list request"); + trace!(key, "list request"); async move || -> anyhow::Result { let mut conn = redis.lock().await; - let entries = redis_list(&mut *conn, &workspace, key.as_deref()).await?; + let entries = redis_list(&mut *conn, &key).await?; Ok(HttpResponse::Ok().json(entries)) @@ -68,26 +68,25 @@ pub async fn list( } - /// get pub async fn get( req: HttpRequest, - path: web::Path<(String, String)>, + path: web::Path, redis: web::Data>>, ) -> Result { - ws_owner::workspace_owner(&req)?; // Check workspace + workspace_check(&req)?; // Check workspace - let (workspace, key) = path.into_inner(); + let key = path.into_inner(); - // trace!(workspace, key, "get request"); + // trace!(key, "get request"); async move || -> anyhow::Result { let mut conn = redis.lock().await; Ok( - redis_read(&mut *conn, &workspace, &key).await? + redis_read(&mut *conn, &key).await? .map(|entry| HttpResponse::Ok() .insert_header(("ETag", &*entry.etag)) .json(entry)) @@ -101,14 +100,14 @@ pub async fn get( /// put pub async fn put( req: HttpRequest, - path: web::Path<(String, String)>, + path: web::Path, body: web::Bytes, redis: web::Data>>, ) -> Result { - ws_owner::workspace_owner(&req)?; // Check workspace + workspace_check(&req)?; // Check workspace - let (workspace, key) = path.into_inner(); + let key: String = path.into_inner(); async move || -> anyhow::Result { @@ -139,30 +138,30 @@ pub async fn put( if s == "*" { mode = Some(SaveMode::Insert); } else { return Err(anyhow!("If-None-Match must be '*'")); } } - redis_save(&mut *conn, &workspace, &key, &body[..], ttl, mode).await?; + redis_save(&mut *conn, &key, &body[..], ttl, mode).await?; return Ok(HttpResponse::Ok().body("DONE")); }().await.map_err(map_handler_error) } - /// delete pub async fn delete( req: HttpRequest, - path: web::Path<(String, String)>, + path: web::Path, redis: web::Data>>, ) -> Result { - ws_owner::workspace_owner(&req)?; // Check workspace + workspace_check(&req)?; // Check workspace + + let key: String = path.into_inner(); - let (workspace, key) = path.into_inner(); - trace!(workspace, key, "delete request"); + trace!(key, "delete request"); async move || -> anyhow::Result { let mut conn = redis.lock().await; - let deleted = redis_delete(&mut *conn, &workspace, &key).await?; + let deleted = redis_delete(&mut *conn, &key).await?; let response = match deleted { true => HttpResponse::NoContent().finish(), diff --git a/src/handlers_ws.rs b/src/handlers_ws.rs index 5e42b308e4c..26095ba6f42 100644 --- a/src/handlers_ws.rs +++ b/src/handlers_ws.rs @@ -1,8 +1,21 @@ +use crate::ws_hub::{WsHub, ServerMessage, Join, Leave}; // NEW + +// ============= use redis::aio::MultiplexedConnection; use std::sync::Arc; use tokio::sync::Mutex; use serde_json::{Value, Map, json}; -use actix::{Actor, StreamHandler, AsyncContext, ActorContext, fut, ActorFutureExt }; +use actix::{ + Actor, + StreamHandler, + AsyncContext, + ActorContext, + fut, + ActorFutureExt, + + Handler, WrapFuture // добавили Handler, WrapFuture + +}; use actix_web::{web, HttpRequest, HttpResponse, Error}; use actix_web_actors::ws; use serde::Deserialize; @@ -15,7 +28,8 @@ use crate::redis::{ redis_read, redis_delete, redis_list, - error + error, + deprecated_symbol, }; type JsonMap = Map; @@ -56,7 +70,7 @@ pub enum WsCommand { List { #[serde(default)] correlation: Option, - key: Option, + key: String, }, Delete { @@ -80,20 +94,27 @@ pub enum WsCommand { correlation: Option, key: String, }, + + Sublist { + #[serde(default)] + correlation: Option, + }, } /// Session condition #[allow(dead_code)] pub struct WsSession { - pub workspace: String, pub subscriptions: HashSet, // новые поля pub redis: Arc>, // вот он, тот же тип что и в HTTP API + + pub hub: actix::Addr, // NEW + pub id: Option, // NEW } // ======= ping ======== -use crate::ws_ping::test_message; +// use crate::ws_ping::test_message; // ======= /ping ======== @@ -105,30 +126,74 @@ impl Actor for WsSession { type Context = ws::WebsocketContext; fn started(&mut self, ctx: &mut Self::Context) { - println!("WebSocket connected to workspace [{}]", self.workspace); - ctx.text(format!("Connected to workspace: {}", self.workspace)); + println!("WebSocket connected"); + ctx.text("Connected"); // ======= ping ======== + // test_message(ctx); + // регистрируемся в хабе и получаем id + let addr = ctx.address().recipient::(); + let hub = self.hub.clone(); + ctx.wait( + hub.send(Join { addr }) + .into_actor(self) + .map(|res, actor, _ctx| { + if let Ok(id) = res { + actor.id = Some(id); + } + }) + ); +// ======= /ping ======== + } - // Для наглядности во время отладки: -// install_ws_ping_with(ctx, std::time::Duration::from_secs(5), PingMode::ControlAndText("__!ping!__")); - - test_message(ctx); - // Если захочешь нестандартный интервал: - // use std::time::Duration; - // use crate::ws_ping::install_ws_ping_with_period; - // install_ws_ping_with_period(ctx, Duration::from_secs(5)); +// ======= ping ======== + fn stopped(&mut self, _ctx: &mut Self::Context) { + if let Some(id) = self.id.take() { + self.hub.do_send(Leave { id }); + } + println!("WebSocket disconnected"); + } // ======= /ping ======== +} + + + + + +// ======= ping ======== +impl actix::Handler for WsSession { + type Result = (); + fn handle(&mut self, msg: ServerMessage, ctx: &mut Self::Context) { + ctx.text(msg.0); } } +// ======= /ping ======== + + + + + + + + + + + + + + + + + + /// StreamHandler External trait: must be in separate impl block impl StreamHandler> for WsSession { fn handle(&mut self, msg: Result, ctx: &mut Self::Context) { match msg { Ok(ws::Message::Text(text)) => { - println!("Message from [{}]: {}", self.workspace, text); + println!("Message: {}", text); match serde_json::from_str::(&text) { Ok(cmd) => self.handle_command(cmd, ctx), Err(err) => ctx.text(format!("Invalid JSON: {}", err)), @@ -136,7 +201,7 @@ impl StreamHandler> for WsSession { } Ok(ws::Message::Ping(msg)) => ctx.pong(&msg), Ok(ws::Message::Close(reason)) => { - println!("Closing WS for workspace [{}]: {:?}", self.workspace, reason); + println!("Closing WS: {:?}", reason); ctx.close(reason); ctx.stop(); } @@ -160,13 +225,8 @@ impl WsSession { ctx.wait( fut::wrap_future(fut).map(move |res, _actor: &mut Self, ctx| { match res { - Ok(extra) => { - base.extend(extra); - } - Err(err) => { - base.insert("type".into(), json!("error")); - base.insert("message".into(), json!(err)); - } + Ok(extra) => { base.extend(extra); } + Err(err) => { base.insert("error".into(), json!(err)); } } ctx.text(Value::Object(base).to_string()); }) @@ -180,14 +240,12 @@ impl WsSession { WsCommand::Put { key, data, expires_at, ttl, if_match, if_none_match, correlation } => { - println!("PUT {} = {} (expires_at: {:?}) (ttl: {:?}) ws={:?}", key, data, expires_at, ttl, self.workspace); + println!("PUT {} = {} (expires_at: {:?}) (ttl: {:?})", key, data, expires_at, ttl); let redis = self.redis.clone(); - let workspace = self.workspace.clone(); let mut base = JsonMap::new(); base.insert("action".into(), json!("put")); - base.insert("workspace".into(), json!(&self.workspace)); base.insert("key".into(), json!(&key)); base.insert("data".into(), json!(&data)); if let Some(x) = &correlation { base.insert("correlation".into(), json!(x)); } @@ -225,7 +283,7 @@ impl WsSession { let mut conn = redis.lock().await; - redis_save(&mut *conn, &workspace, &key, &data, real_ttl, mode) + redis_save(&mut *conn, &key, &data, real_ttl, mode) .await .map_err(|e| e.to_string())?; @@ -243,11 +301,9 @@ impl WsSession { println!("DELETE {}", key); let redis = self.redis.clone(); - let workspace = self.workspace.clone(); let mut base = JsonMap::new(); base.insert("action".into(), json!("delete")); - base.insert("workspace".into(), json!(&self.workspace)); base.insert("key".into(), json!(&key)); if let Some(x) = &correlation { base.insert("correlation".into(), json!(x)); } if let Some(x) = &if_match { base.insert("ifMatch".into(), json!(x)); } @@ -256,9 +312,7 @@ impl WsSession { let mut conn = redis.lock().await; - let deleted = redis_delete(&mut *conn, &workspace, &key) - .await - .map_err(|e| e.to_string())?; + let deleted = redis_delete(&mut *conn, &key).await.map_err(|e| e.to_string())?; if deleted { let mut extra = JsonMap::new(); @@ -277,11 +331,9 @@ impl WsSession { println!("GET {}{:?}", key, correlation); let redis = self.redis.clone(); - let workspace = self.workspace.clone(); let mut base = JsonMap::new(); base.insert("action".into(), json!("get")); - base.insert("workspace".into(), json!(&self.workspace)); base.insert("key".into(), json!(&key)); if let Some(x) = &correlation { base.insert("correlation".into(), json!(x)); } @@ -289,7 +341,7 @@ impl WsSession { let mut conn = redis.lock().await; - let data_opt = redis_read(&mut *conn, &workspace, &key) + let data_opt = redis_read(&mut *conn, &key) .await .map_err(|e| e.to_string())?; @@ -311,21 +363,17 @@ impl WsSession { println!("LIST {:?}{:?}", key, correlation); let redis = self.redis.clone(); - let workspace = self.workspace.clone(); let mut base = JsonMap::new(); base.insert("action".into(), json!("get")); - base.insert("workspace".into(), json!(&self.workspace)); - if let Some(x) = &key { base.insert("key".into(), json!(x)); } + base.insert("key".into(), json!(&key)); if let Some(x) = &correlation { base.insert("correlation".into(), json!(x)); } let fut = async move { let mut conn = redis.lock().await; - let data = redis_list(&mut *conn, &workspace, key.as_deref()) - .await - .map_err(|e| e.to_string())?; + let data = redis_list(&mut *conn, &key).await.map_err(|e| e.to_string())?; let mut extra = JsonMap::new(); let data_value = serde_json::to_value(&data).map_err(|e| e.to_string())?; @@ -336,19 +384,69 @@ impl WsSession { self.wait_and_send(ctx, fut, base); } - WsCommand::Sub { key, correlation } => { - println!("SUB {}{:?}", key, correlation); - ctx.text(format!("OK SUB {}", key)); - // TODO - } + // TODO + WsCommand::Sub { key, correlation } => { + println!("SUB {}{:?}", key, correlation); - WsCommand::Unsub { key, correlation } => { - println!("UNSUB {}{:?}", key, correlation); - ctx.text(format!("OK UNSUB {}", key)); - // TODO - } + let mut obj = JsonMap::new(); + obj.insert("action".into(), json!("sub")); + obj.insert("key".into(), json!(key)); + if let Some(c) = correlation { obj.insert("correlation".into(), json!(c)); } + + if deprecated_symbol(&key) { + obj.insert("error".into(), json!("Deprecated symbol in key")); + } else { + let added = self.subscriptions.insert(key.clone()); + obj.insert("sub_count".into(), json!( self.subscriptions.len() )); + if !added { obj.insert("warning".into(), json!("Subscribe already exist")); } + } + ctx.text(Value::Object(obj).to_string()); + } + + WsCommand::Unsub { key, correlation } => { + println!("UNSUB {}{:?}", key, correlation); + let mut obj = JsonMap::new(); + obj.insert("action".into(), json!("unsub")); + obj.insert("key".into(), json!(key)); + if let Some(c) = correlation { obj.insert("correlation".into(), json!(c)); } + + + let removed = if key == "*" { + if !self.subscriptions.is_empty() { + self.subscriptions.clear(); + true + } else { + false + } + } else { + if deprecated_symbol(&key) { + obj.insert("error".into(), json!("Deprecated symbol in key")); + true + } else { + self.subscriptions.remove(&key) + } + }; + obj.insert("sub_count".into(), json!( self.subscriptions.len() )); + if !removed { obj.insert("warning".into(), json!("Subscribe already deleted")); } + + ctx.text(Value::Object(obj).to_string()); + } + + WsCommand::Sublist { correlation } => { + println!("SUBLIST {:?}", correlation); + let mut obj = JsonMap::new(); + obj.insert("action".into(), json!("sublist")); + if let Some(c) = correlation { obj.insert("correlation".into(), json!(c)); } + + obj.insert("response".into(), json!(self.subscriptions.iter().cloned().collect::>())); + obj.insert("sub_count".into(), json!( self.subscriptions.len() )); + + ctx.text(Value::Object(obj).to_string()); + } + + // End of commands } } @@ -358,14 +456,15 @@ impl WsSession { pub async fn handler( req: HttpRequest, stream: web::Payload, - path: web::Path, redis: web::Data>>, + hub: web::Data>, // NEW ) -> Result { - let workspace = path.into_inner(); let session = WsSession { - workspace, subscriptions: HashSet::new(), redis: redis.get_ref().clone(), + hub: hub.get_ref().clone(), // NEW + id: None, // NEW }; ws::start(session, &req, stream) } + diff --git a/src/main.rs b/src/main.rs index c43c9b7caeb..f5a7a0953e2 100644 --- a/src/main.rs +++ b/src/main.rs @@ -15,10 +15,6 @@ #![allow(unused_imports)] -// === ping === -mod ws_ping; -// === /ping === - use std::pin::Pin; use actix_cors::Cors; @@ -38,13 +34,21 @@ use tracing::info; mod config; mod handlers_http; mod handlers_ws; - use crate::handlers_ws::{WsSession, handler}; mod redis; use crate::redis::redis_connect; +// use ::redis::cmd as redis_cmd; // redis_cmd для GET в таске + +mod workspace_owner; + +// == =hub === +// mod redis_events; +mod ws_hub; +use actix::prelude::*; +use crate::ws_hub::{WsHub, Broadcast, Count, ServerMessage, Join, Leave}; +// === /hub === -mod ws_owner; use config::CONFIG; @@ -80,7 +84,8 @@ async fn interceptor( } -#[tokio::main] +// #[tokio::main] +#[actix_web::main] async fn main() -> anyhow::Result<()> { initialize_tracing(tracing::Level::DEBUG); @@ -90,6 +95,68 @@ async fn main() -> anyhow::Result<()> { let redis = std::sync::Arc::new(tokio::sync::Mutex::new(redis)); let redis_data = web::Data::new(redis.clone()); + +// ====================================== + +let hub_addr = WsHub::default().start(); // NEW +let hub_data = web::Data::new(hub_addr.clone()); // NEW + +// let hub_addr: Addr = WsHub::default().start(); +// let hub_data: web::Data> = web::Data::new(hub_addr.clone()); + +/* +// === HUB: общий реестр WS-подключений === + + +// === Redis Pub/Sub listener (отдельное соединение) === +let mut pubsub_conn = redis_connect().await?; // NEW: отдельный коннект только для Pub/Sub +// (опционально) пробуем включить нотификации +let _ = crate::redis_events::try_enable_keyspace_notifications(&mut pubsub_conn).await; // можно игнорить ошибку +let (mut rx, _handle) = crate::redis_events::start_keyevent_listener(pubsub_conn); // NEW +// поток, который читает события из Redis и шлёт их всем WS +{ + let hub_for_task = hub_addr.clone(); + let redis_for_task = redis.clone(); + tokio::spawn(async move { + use crate::redis_events::RedisEventKind; + while let Some(ev) = rx.recv().await { + let payload_json = match ev.kind { + RedisEventKind::Set => { + let mut conn = redis_for_task.lock().await; + let val: Option = redis_cmd("GET") + .arg(&ev.key) + .query_async(&mut *conn) + .await + .ok() + .flatten(); + serde_json::json!({ + "type": "redis", + "event": "set", + "key": ev.key, + "value": val + }) + } + RedisEventKind::Del => serde_json::json!({ + "type": "redis", + "event": "del", + "key": ev.key + }), + RedisEventKind::Expired => serde_json::json!({ + "type": "redis", + "event": "expired", + "key": ev.key + }), + }; + hub_for_task.do_send(Broadcast { text: payload_json.to_string() }); + } + }); +} + +*/ + +// ============================================ + + let socket = std::net::SocketAddr::new(CONFIG.bind_host.as_str().parse()?, CONFIG.bind_port); let payload_config = PayloadConfig::new(CONFIG.payload_size_limit.bytes() as usize); @@ -104,19 +171,30 @@ async fn main() -> anyhow::Result<()> { App::new() .app_data(payload_config.clone()) .app_data(redis_data.clone()) + .app_data(hub_data.clone()) // ← ЭТО ОБЯЗАТЕЛЬНО + .wrap(middleware::Logger::default()) .wrap(cors) .service( web::scope("/api") .wrap(middleware::from_fn(interceptor)) - .route("/{workspace}/", web::get().to(handlers_http::list)) - .route("/{workspace}/{key:.+/}", web::get().to(handlers_http::list)) - .route("/{workspace}/{key:.+}", web::get().to(handlers_http::get)) - .route("/{workspace}/{key:.+}", web::put().to(handlers_http::put)) - .route("/{workspace}/{key:.+}", web::delete().to(handlers_http::delete)) + .route("/{key:.+/}", web::get().to(handlers_http::list)) + .route("/{key:.+}", web::get().to(handlers_http::get)) + .route("/{key:.+}", web::put().to(handlers_http::put)) + .route("/{key:.+}", web::delete().to(handlers_http::delete)) ) .route("/status", web::get().to(async || "ok")) - .route("/ws/{workspace}", web::get().to(handlers_ws::handler)) // WebSocket + + .route("/stat", web::get().to(|hub: web::Data>| async move { + // let count = hub.send(Count).await.unwrap_or(0); + let count = hub.send(crate::ws_hub::Count).await.unwrap_or(0); + HttpResponse::Ok().json(serde_json::json!({ + "status": "ok", + "connections": count + })) + })) + + .route("/ws", web::get().to(handlers_ws::handler)) // WebSocket }) .bind(socket)? .run(); diff --git a/src/redis.rs b/src/redis.rs index 9c8edfcbd49..c5a59f55dd5 100644 --- a/src/redis.rs +++ b/src/redis.rs @@ -19,14 +19,14 @@ pub enum SaveMode { use redis::{ AsyncCommands, RedisResult, ToRedisArgs, - Client, ConnectionInfo, ProtocolVersion, RedisConnectionInfo, aio::MultiplexedConnection }; + Client, ConnectionInfo, ProtocolVersion, RedisConnectionInfo, aio::MultiplexedConnection +}; use url::Url; use serde::{Deserialize, Serialize}; #[derive(Debug, Serialize)] pub struct RedisArray { - pub workspace: String, pub key: String, pub data: String, pub expires_at: u64, // sec to expire TTL @@ -42,133 +42,123 @@ pub fn error(code: u16, msg: impl Into) -> redis::RedisResult { /// Check for redis-deprecated symbols -pub fn redis_deprecate_symbols(s: &str) -> redis::RedisResult<()> { - if s.chars().any(|c| matches!( c, + +pub fn deprecated_symbol(s: &str) -> bool { + s.chars().any(|c| matches!( + c, '*' | '?' | '[' | ']' | '\\' | '\0'..='\x1F' | '\x7F' | - '"' | '\'' // | ' ' - )) { - error(412, "Deprecated symbols in workspace or key") + '"' | '\'' + )) +} + +pub fn deprecated_symbol_error(s: &str) -> redis::RedisResult<()> { + if deprecated_symbol(s) { + error(412, "Deprecated symbol in key") } else { Ok(()) } } - -/// redis_list(&connection,workspace,prefix) +/// redis_list(&connection,prefix) pub async fn redis_list( conn: &mut MultiplexedConnection, - workspace: &str, - key: Option<&str>, + key: &str, ) -> redis::RedisResult> { - let pattern = if let Some(k) = key { - if !k.ends_with('/') { return error(412, "Key must end with slash"); } - Some(format!("{k}*")) - } else { - None - }; + deprecated_symbol_error(key)?; + if !key.ends_with('/') { return error(412, "Key must end with slash"); } + let pattern = format!("{key}*"); - redis_deprecate_symbols(&workspace)?; - if let Some(k) = key { redis_deprecate_symbols(k)?; } - - let mut cursor = 0; + let mut cursor = 0u64; let mut results = Vec::new(); loop { - let mut cmd = redis::cmd("HSCAN"); - cmd.arg(workspace).arg(cursor); - if let Some(ref p) = pattern { - cmd.arg("MATCH").arg(p); - } - - // cmd.arg("COUNT").arg(100); + let mut cmd = redis::cmd("SCAN"); + cmd.arg(cursor); + cmd.arg("MATCH").arg(&pattern); + // cmd.arg("COUNT").arg(100); // Optionally adjust batch size - let (next_cursor, items): (u64, Vec<(String, String)>) = cmd.query_async(conn).await?; + let (next_cursor, keys): (u64, Vec) = cmd.query_async(conn).await?; - for (k, v) in items { + for k in keys { // Check for $-security path - if let Some(prefix) = key { - if k[prefix.len()..].contains('$') { continue; } - } - - // TTL - let ttl_vec: Vec = redis::cmd("HTTL").arg(workspace).arg("FIELDS").arg(1).arg(&k).query_async(conn).await?; - let ttl = ttl_vec.get(0).copied().unwrap_or(-3); - if ttl >= 0 { - results.push(RedisArray { - workspace: workspace.to_string(), - key: k, - data: v.clone(), - expires_at: ttl as u64, - etag: hex::encode(md5::compute(&v).0), - }); - } - } + if k.strip_prefix(key).map_or(false, |s| s.contains('$')) { continue; } + + // Get value + let value: Option = redis::cmd("GET").arg(&k).query_async(conn).await?; + let Some(value) = value else { continue; }; // Old and deleted + + // Get TTL + let ttl: i64 = redis::cmd("TTL").arg(&k).query_async(conn).await?; + if ttl >= 0 { + results.push(RedisArray { + key: k, + data: value.clone(), + expires_at: ttl as u64, + etag: hex::encode(md5::compute(&value).0), + }); + } + } - if next_cursor == 0 { break; } - cursor = next_cursor; + if next_cursor == 0 { break;} + cursor = next_cursor; } Ok(results) } -/// redis_read(&connection,workspace,key) +/// redis_read(&connection,key) #[allow(dead_code)] pub async fn redis_read( conn: &mut MultiplexedConnection, - workspace: &str, key: &str, ) -> redis::RedisResult> { - redis_deprecate_symbols(&workspace)?; - redis_deprecate_symbols(&key)?; + deprecated_symbol_error(key)?; + if key.ends_with('/') { return error(412, "Key must not end with a slash"); } - let data: Option = redis::cmd("HGET").arg(workspace).arg(key).query_async(conn).await?; - let Some(data) = data else { return Ok(None); }; + let data: Option = redis::cmd("GET").arg(key).query_async(conn).await?; - let ttl_vec: Vec = redis::cmd("HTTL").arg(workspace).arg("FIELDS").arg(1).arg(key).query_async(conn).await?; - let ttl = ttl_vec.get(0).copied().unwrap_or(-3); // -3 unknown error + let Some(data) = data else { return Ok(None); }; + let ttl: i64 = redis::cmd("TTL").arg(key).query_async(conn).await?; if ttl == -1 { return error(500, "TTL not set"); } if ttl == -2 { return error(500, "Key not found"); } if ttl < 0 { return error(500, "Unknown TTL error"); } Ok(Some(RedisArray { - workspace: workspace.to_string(), key: key.to_string(), data: data.clone(), - expires_at: ttl as u64, + expires_at: ttl as u64, etag: hex::encode(md5::compute(&data).0), })) } - /// TTL sec -/// redis_save(&mut conn, "workspace", "key", "val", Some(Ttl::Sec(300)), Some(SaveMode::Insert)).await?; +/// redis_save(&mut conn, "key", "val", Some(Ttl::Sec(300)), Some(SaveMode::Insert)).await?; /// /// TTL at /// let at_unixtime: u64 = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs() + 600; -/// redis_save(&mut conn, "workspace", "key", "val", Some(Ttl::At(at_unixtime)), Some(SaveMode::Update)).await?; +/// redis_save(&mut conn, "key", "val", Some(Ttl::At(at_unixtime)), Some(SaveMode::Update)).await?; /// /// w/o TTL (CONFIG.max_ttl) -/// redis_save(&mut conn, "workspace", "key", "val", None, None).await?; +/// redis_save(&mut conn, "key", "val", None, None).await?; #[allow(dead_code)] pub async fn redis_save( conn: &mut MultiplexedConnection, - workspace: &str, key: &str, value: T, ttl: Option, mode: Option, ) -> RedisResult<()> { - redis_deprecate_symbols(&workspace)?; - redis_deprecate_symbols(&key)?; + deprecated_symbol_error(&key)?; + if key.ends_with('/') { return error(412, "Key must not end with a slash"); } // TTL logic @@ -186,61 +176,57 @@ pub async fn redis_save( if sec == 0 { return error(400, "TTL must be > 0"); } if sec > CONFIG.max_ttl { return error(412, "TTL exceeds MAX_TTL"); } - let mut cmd = redis::cmd("HSET"); - cmd.arg(workspace).arg(key).arg(value); + let mut cmd = redis::cmd("SET"); + cmd.arg(key).arg(value).arg("EX").arg(sec); // Mode variants - match mode.unwrap_or(SaveMode::Upsert) { + let mode = mode.unwrap_or(SaveMode::Upsert); + + match mode { SaveMode::Upsert => {} // none - SaveMode::Insert => { - let exists: bool = redis::cmd("HEXISTS").arg(workspace).arg(key).query_async(conn).await?; - if exists { return error(412, "Insert: key already exists"); } - } + SaveMode::Insert => { cmd.arg("NX"); } // if NOT Exist - SaveMode::Update => { - let exists: bool = redis::cmd("HEXISTS").arg(workspace).arg(key).query_async(conn).await?; - if !exists { return error(404, "Update: key does not exist"); } - } + SaveMode::Update => { cmd.arg("XX"); } // if Exist - SaveMode::Equal(md5) => { - let current_value: Option = redis::cmd("HGET").arg(workspace).arg(key).query_async(conn).await?; - if let Some(existing) = current_value { - let actual_md5 = hex::encode(md5::compute(&existing).0); - if actual_md5 != md5 { return error(412, format!("md5 mismatch, current: {} expected: {}", actual_md5, md5)); } - } else { return error(404, "Equal: key does not exist"); } + SaveMode::Equal(ref expected_md5) => { // if md5 === actual_md5 + let current_value: Option = redis::cmd("GET").arg(key).query_async(conn).await?; + if let Some(existing) = current_value { + let actual_md5 = hex::encode(md5::compute(&existing).0); + if &actual_md5 != expected_md5 { return error(412, format!("md5 mismatch, current: {}, expected: {}", actual_md5, expected_md5)); } + } else { return error(404, "Equal: key does not exist"); } } - } - // 1) HSET execute - cmd.query_async::(&mut *conn).await?; + let result: Option = cmd.query_async(conn).await?; +// // execute +// cmd.query_async::(&mut *conn).await?; - // 2) HEXPIRE execute - let res: Vec = redis::cmd("HEXPIRE").arg(workspace).arg(sec).arg("FIELDS").arg(1).arg(key).query_async(&mut *conn).await?; - if res.get(0).copied().unwrap_or(0) == 0 { - return error(404, "HEXPIRE field not found or TTL not set"); + if result.is_none() { + match mode { + SaveMode::Insert => return error(412, "Insert: key already exists"), + SaveMode::Update => return error(404, "Update: key does not exist"), + _ => return error(500, "Unexpected Redis SET failure"), + } } Ok(()) } -/// redis_delete(&connection,workspace,key) +/// redis_delete(&connection,key) #[allow(dead_code)] pub async fn redis_delete( conn: &mut MultiplexedConnection, - workspace: &str, key: &str, ) -> redis::RedisResult { - redis_deprecate_symbols(&workspace)?; - redis_deprecate_symbols(&key)?; + deprecated_symbol_error(key)?; + if key.ends_with('/') { return error(412, "Key must not end with a slash"); } - let deleted: i32 = redis::cmd("HDEL") - .arg(workspace) + let deleted: i32 = redis::cmd("DEL") .arg(key) .query_async(conn) .await?; @@ -249,6 +235,7 @@ pub async fn redis_delete( } + /// redis_connect() pub async fn redis_connect() -> anyhow::Result { let default_port = match CONFIG.redis_mode { @@ -307,4 +294,3 @@ pub async fn redis_connect() -> anyhow::Result { Ok(conn) } - diff --git a/src/redis_events.rs b/src/redis_events.rs new file mode 100644 index 00000000000..6fc223774ac --- /dev/null +++ b/src/redis_events.rs @@ -0,0 +1,87 @@ +use redis::aio::MultiplexedConnection; + +use redis::Connection; + + +use tokio::sync::mpsc; +use tokio::task::JoinHandle; +use tokio_stream::StreamExt; + +#[derive(Debug, Clone)] +pub enum RedisEventKind { Set, Del, Expired } + +#[derive(Debug, Clone)] +pub struct RedisEvent { + pub kind: RedisEventKind, + pub key: String, +} + +/// Попытаться включить keyspace notifications (необязательно, но полезно). +/// Если Redis управляемый и CONFIG запрещён, просто вернёт Err — это не критично, +/// слушатель всё равно можно запускать (если они уже включены конфигом). +pub async fn try_enable_keyspace_notifications( + client: &redis::Client, +) -> redis::RedisResult<()> { + let mut conn = client.get_tokio_connection().await?; + let _: redis::Value = redis::cmd("CONFIG") + .arg("SET") + .arg("notify-keyspace-events") + .arg("KExg") + .query_async(&mut conn) + .await?; + Ok(()) +} + + +/// Внутренняя функция: крутит pubsub и шлёт события в tx. +/// Завершается при ошибке соединения (см. spawn_* для обёртки). +/// Запускаем слушатель keyevents, используя redis::Client (сам внутри делает into_pubsub()). +pub async fn run_keyevent_listener_with_client( + client: redis::Client, + tx: tokio::sync::mpsc::UnboundedSender, +) -> redis::RedisResult<()> { + let conn = client.get_tokio_connection().await?; + let mut pubsub = conn.into_pubsub(); + + pubsub.subscribe("__keyevent@0__:set").await?; + pubsub.subscribe("__keyevent@0__:del").await?; + pubsub.subscribe("__keyevent@0__:expired").await?; + + let mut stream = pubsub.on_message(); + + while let Some(msg) = stream.next().await { + let channel: String = match msg.get_channel() { Ok(c) => c, Err(_) => continue }; + let key: String = match msg.get_payload() { Ok(p) => p, Err(_) => continue }; + + let kind = if channel.ends_with(":set") { + RedisEventKind::Set + } else if channel.ends_with(":del") { + RedisEventKind::Del + } else if channel.ends_with(":expired") { + RedisEventKind::Expired + } else { + continue; + }; + + let _ = tx.send(RedisEvent { kind, key }); + } + + Ok(()) +} + + + +/// Удобная обёртка: создаёт канал, спаунит таск и возвращает (rx, handle). +/// Ошибки изнутри логируйте в таске, чтобы handle::<()> не падал наружу. +pub fn start_keyevent_listener( + client: redis::Client, +) -> (tokio::sync::mpsc::UnboundedReceiver, tokio::task::JoinHandle<()>) { + let (tx, rx) = tokio::sync::mpsc::unbounded_channel(); + let handle = tokio::spawn(async move { + if let Err(e) = run_keyevent_listener_with_client(client, tx).await { + eprintln!("[redis_events] listener stopped: {e}"); + } + }); + (rx, handle) +} + diff --git a/src/ws_owner.rs b/src/workspace_owner.rs similarity index 60% rename from src/ws_owner.rs rename to src/workspace_owner.rs index d97035802a5..24fade1ebec 100644 --- a/src/ws_owner.rs +++ b/src/workspace_owner.rs @@ -3,9 +3,21 @@ use uuid::Uuid; use actix_web::{ Error, HttpMessage, HttpRequest, error }; /// Checking workspace in Authorization -pub fn workspace_owner(req: &HttpRequest) -> Result<(), Error> { +pub fn workspace_check(req: &HttpRequest) -> Result<(), Error> { let extensions = req.extensions(); + // Get key + let key = req + .match_info() + .get("key") + .ok_or_else(|| error::ErrorBadRequest("Missing key in URL path"))?; + + // Get workspace + let path_ws = match key.find('/') { + Some(x) if x > 0 => &key[..x], + _ => return Err(error::ErrorBadRequest("Invalid key: missing workspace")), + }; + let claims = extensions .get::() .ok_or_else(|| error::ErrorUnauthorized("Missing auth claims"))?; @@ -21,13 +33,7 @@ pub fn workspace_owner(req: &HttpRequest) -> Result<(), Error> { .as_ref() .ok_or_else(|| error::ErrorForbidden("Missing workspace in token"))?; - let path_ws = req - .match_info() - .get("workspace") - .ok_or_else(|| error::ErrorBadRequest("Missing workspace in URL path"))?; - - let path_ws_uuid = - Uuid::parse_str(path_ws).map_err(|_| error::ErrorBadRequest("Invalid workspace UUID"))?; + let path_ws_uuid = Uuid::parse_str(path_ws).map_err(|_| error::ErrorBadRequest("Invalid workspace UUID"))?; if jwt_workspace != &path_ws_uuid { return Err(error::ErrorForbidden("Workspace mismatch")); diff --git a/src/ws_hub.rs b/src/ws_hub.rs new file mode 100644 index 00000000000..5939f79cc09 --- /dev/null +++ b/src/ws_hub.rs @@ -0,0 +1,90 @@ +use actix::prelude::*; +use std::collections::HashMap; + +/// Сообщение от хаба к сессии: просто текст (готовая JSON-строка) +#[derive(Message)] +#[rtype(result = "()")] +pub struct ServerMessage(pub String); + +/// Присоединить сессию. Возвращает присвоенный id. +#[derive(Message)] +#[rtype(result = "usize")] +pub struct Join { + pub addr: Recipient, +} + +/// Отключить сессию по id +#[derive(Message)] +#[rtype(result = "()")] +pub struct Leave { + pub id: usize, +} + +/// Отправить всем +#[derive(Message)] +#[rtype(result = "()")] +pub struct Broadcast { + pub text: String, +} + +/// Количество активных сессий +#[derive(Message)] +#[rtype(result = "usize")] +pub struct Count; + +pub struct WsHub { + sessions: HashMap>, + next_id: usize, +} + +impl Default for WsHub { + fn default() -> Self { + Self { + sessions: HashMap::new(), + next_id: 1, + } + } +} + +impl Actor for WsHub { + type Context = Context; +} + +impl Handler for WsHub { + type Result = usize; + + fn handle(&mut self, msg: Join, _: &mut Context) -> Self::Result { + let id = self.next_id; + self.next_id += 1; + self.sessions.insert(id, msg.addr); + id + } +} + +impl Handler for WsHub { + type Result = (); + + fn handle(&mut self, msg: Leave, _: &mut Context) { + self.sessions.remove(&msg.id); + } +} + +impl Handler for WsHub { + type Result = (); + + fn handle(&mut self, msg: Broadcast, _: &mut Context) { + let Broadcast { text } = msg; + // рассылаем всем; если какая-то сессия отвалилась — игнорируем ошибку + for (_, recp) in self.sessions.iter() { + let _ = recp.do_send(ServerMessage(text.clone())); + } + } +} + +impl Handler for WsHub { + type Result = usize; + + fn handle(&mut self, _: Count, _: &mut Context) -> Self::Result { + self.sessions.len() + } +} diff --git a/src/ws_ping.rs b/src/ws_ping.rs deleted file mode 100644 index 651f9271f2b..00000000000 --- a/src/ws_ping.rs +++ /dev/null @@ -1,15 +0,0 @@ -use std::time::Duration; -use actix::Actor; -use actix::AsyncContext; -use actix_web_actors::ws; - -pub fn test_message
(ctx: &mut ws::WebsocketContext) -where - A: Actor> + 'static, -{ - println!("-- INSTALL test message ---"); - ctx.run_interval(Duration::from_secs(5), |_, ctx| { - println!("-- sending test message ---"); - ctx.text("test message from server"); - }); -} From 43b25c9af4f999e5e9cb1f62e11ba54cdb95d9d8 Mon Sep 17 00:00:00 2001 From: Leonid Kaganov Date: Thu, 14 Aug 2025 11:59:41 +0300 Subject: [PATCH 156/636] Redis events: testing only --- scripts/TEST.html | 57 +++-------- src/.#handlers_ws.rs | 1 + src/GOT.sh | 8 ++ src/handlers_http.rs | 2 +- src/handlers_ws.rs | 2 +- src/main.rs | 136 +++++++++++++------------- src/redis_events.rs | 173 +++++++++++++++++++++------------ src/{redis.rs => redis_lib.rs} | 0 src/ws_hub.rs | 15 +++ 9 files changed, 220 insertions(+), 174 deletions(-) create mode 120000 src/.#handlers_ws.rs create mode 100755 src/GOT.sh rename src/{redis.rs => redis_lib.rs} (100%) diff --git a/scripts/TEST.html b/scripts/TEST.html index 3aed28e5251..bfdb3d28420 100644 --- a/scripts/TEST.html +++ b/scripts/TEST.html @@ -65,58 +65,25 @@

WebSocket JSON Tester

-

- - - - - - - - - - + + - + + - + + - + + - + + + +

Waiting for server response...
diff --git a/src/.#handlers_ws.rs b/src/.#handlers_ws.rs new file mode 120000 index 00000000000..5a49031bd82 --- /dev/null +++ b/src/.#handlers_ws.rs @@ -0,0 +1 @@ +lleo@lleonuc3.10483 \ No newline at end of file diff --git a/src/GOT.sh b/src/GOT.sh new file mode 100755 index 00000000000..9497816776b --- /dev/null +++ b/src/GOT.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +clear + +redis-cli set lleo value +redis-cli del lleo +redis-cli set ttlkey 1 EX 2 +# подожди ~2 сек → должно прийти expired diff --git a/src/handlers_http.rs b/src/handlers_http.rs index 51c06eaba73..38463dc9343 100644 --- a/src/handlers_http.rs +++ b/src/handlers_http.rs @@ -8,7 +8,7 @@ use tracing::{error, trace}; use uuid::Uuid; use crate::workspace_owner::workspace_check; -use crate::redis::{ +use crate::redis_lib::{ Ttl, SaveMode, RedisArray, redis_save, diff --git a/src/handlers_ws.rs b/src/handlers_ws.rs index 26095ba6f42..dcded0909f1 100644 --- a/src/handlers_ws.rs +++ b/src/handlers_ws.rs @@ -21,7 +21,7 @@ use actix_web_actors::ws; use serde::Deserialize; use std::collections::HashSet; -use crate::redis::{ +use crate::redis_lib::{ Ttl, SaveMode, RedisArray, redis_save, diff --git a/src/main.rs b/src/main.rs index f5a7a0953e2..5123e60778a 100644 --- a/src/main.rs +++ b/src/main.rs @@ -36,17 +36,25 @@ mod handlers_http; mod handlers_ws; use crate::handlers_ws::{WsSession, handler}; -mod redis; -use crate::redis::redis_connect; -// use ::redis::cmd as redis_cmd; // redis_cmd для GET в таске +mod redis_lib; +use crate::redis_lib::redis_connect; mod workspace_owner; // == =hub === -// mod redis_events; +mod redis_events; mod ws_hub; use actix::prelude::*; -use crate::ws_hub::{WsHub, Broadcast, Count, ServerMessage, Join, Leave}; +// use crate::ws_hub::{WsHub, Broadcast, Count, ServerMessage, Join, Leave}; +use crate::ws_hub::{WsHub, Broadcast, ServerMessage, Join, Leave}; + +// part 2 +// use redis::Client as RedisClient; +// use ::redis as redis_crate; +// use crate::redis::redis_connect; +// use ::redis::cmd as redis_cmd; // redis_cmd для GET в таске + + // === /hub === @@ -84,6 +92,53 @@ async fn interceptor( } + + + + + + + + + + + +// NEW +// ===================================================================================== +// ===================================================================================== +// ===================================================================================== +// ===================================================================================== +// ===================================================================================== +// ===================================================================================== +// ===================================================================================== +// ===================================================================================== +async fn start_redis_logger(redis_url: &str) { + let client = match redis::Client::open(redis_url) { + Ok(c) => c, + Err(e) => { eprintln!("[redis] bad url: {e}"); return; } + }; + + match crate::redis_events::make_pubsub_with_kea(&client).await { + Ok(pubsub) => { + let (mut rx, _handle) = crate::redis_events::start_keyevent_listener(pubsub); + tokio::spawn(async move { + use crate::redis_events::RedisEventKind::*; + while let Some(ev) = rx.recv().await { + match ev.kind { + Set => println!("[redis] db{} SET {}", ev.db, ev.key), + Del | Unlink => println!("[redis] db{} DEL {}", ev.db, ev.key), + // Unlink => println!("[redis] db{} UNLINK {}", ev.db, ev.key), + Expired => println!("[redis] db{} EXPIRED {}", ev.db, ev.key), + Other(kind) => println!("[redis] db{} {} {}", ev.db, kind, ev.key), + } + } + }); + } + Err(e) => eprintln!("[redis] pubsub init error: {e}"), + } +} + + // #[tokio::main] #[actix_web::main] async fn main() -> anyhow::Result<()> { @@ -97,66 +152,18 @@ async fn main() -> anyhow::Result<()> { // ====================================== + // HUB-Connections + let hub_addr = WsHub::default().start(); + let hub_data = web::Data::new(hub_addr.clone()); -let hub_addr = WsHub::default().start(); // NEW -let hub_data = web::Data::new(hub_addr.clone()); // NEW - -// let hub_addr: Addr = WsHub::default().start(); -// let hub_data: web::Data> = web::Data::new(hub_addr.clone()); - -/* // === HUB: общий реестр WS-подключений === - - -// === Redis Pub/Sub listener (отдельное соединение) === -let mut pubsub_conn = redis_connect().await?; // NEW: отдельный коннект только для Pub/Sub -// (опционально) пробуем включить нотификации -let _ = crate::redis_events::try_enable_keyspace_notifications(&mut pubsub_conn).await; // можно игнорить ошибку -let (mut rx, _handle) = crate::redis_events::start_keyevent_listener(pubsub_conn); // NEW -// поток, который читает события из Redis и шлёт их всем WS -{ - let hub_for_task = hub_addr.clone(); - let redis_for_task = redis.clone(); - tokio::spawn(async move { - use crate::redis_events::RedisEventKind; - while let Some(ev) = rx.recv().await { - let payload_json = match ev.kind { - RedisEventKind::Set => { - let mut conn = redis_for_task.lock().await; - let val: Option = redis_cmd("GET") - .arg(&ev.key) - .query_async(&mut *conn) - .await - .ok() - .flatten(); - serde_json::json!({ - "type": "redis", - "event": "set", - "key": ev.key, - "value": val - }) - } - RedisEventKind::Del => serde_json::json!({ - "type": "redis", - "event": "del", - "key": ev.key - }), - RedisEventKind::Expired => serde_json::json!({ - "type": "redis", - "event": "expired", - "key": ev.key - }), - }; - hub_for_task.do_send(Broadcast { text: payload_json.to_string() }); - } - }); -} - -*/ +// сразу после настройки логирования/перед запуском HttpServer +// start_redis_logger().await; +// Например, перед HttpServer::new(...) или сразу после .bind(...): +start_redis_logger("redis://127.0.0.1/").await; // ============================================ - let socket = std::net::SocketAddr::new(CONFIG.bind_host.as_str().parse()?, CONFIG.bind_port); let payload_config = PayloadConfig::new(CONFIG.payload_size_limit.bytes() as usize); @@ -185,13 +192,10 @@ let (mut rx, _handle) = crate::redis_events::start_keyevent_listener(pubsub_conn ) .route("/status", web::get().to(async || "ok")) - .route("/stat", web::get().to(|hub: web::Data>| async move { - // let count = hub.send(Count).await.unwrap_or(0); + // .route("/stat", web::get().to(ws_hub::stat)) + .route("/stat2", web::get().to(|hub: web::Data>| async move { let count = hub.send(crate::ws_hub::Count).await.unwrap_or(0); - HttpResponse::Ok().json(serde_json::json!({ - "status": "ok", - "connections": count - })) + HttpResponse::Ok().json(serde_json::json!({ "connections": count })) })) .route("/ws", web::get().to(handlers_ws::handler)) // WebSocket diff --git a/src/redis_events.rs b/src/redis_events.rs index 6fc223774ac..8e1a4ba0ee3 100644 --- a/src/redis_events.rs +++ b/src/redis_events.rs @@ -1,87 +1,138 @@ -use redis::aio::MultiplexedConnection; - -use redis::Connection; - - use tokio::sync::mpsc; use tokio::task::JoinHandle; use tokio_stream::StreamExt; +use redis::{ + self, + AsyncCommands, + RedisResult, + Client, + aio::{PubSub, ConnectionLike}, +}; + #[derive(Debug, Clone)] -pub enum RedisEventKind { Set, Del, Expired } +pub enum RedisEventKind { + Set, // создание или перезапись (Redis не различает) + Del, // удаление + Unlink, // удаление (асинхронное) + Expired, // исчез по TTL + Other(String), +} #[derive(Debug, Clone)] pub struct RedisEvent { - pub kind: RedisEventKind, + pub db: u32, pub key: String, + pub kind: RedisEventKind, + pub raw_channel: String, + pub raw_payload: String, } -/// Попытаться включить keyspace notifications (необязательно, но полезно). -/// Если Redis управляемый и CONFIG запрещён, просто вернёт Err — это не критично, -/// слушатель всё равно можно запускать (если они уже включены конфигом). -pub async fn try_enable_keyspace_notifications( - client: &redis::Client, -) -> redis::RedisResult<()> { - let mut conn = client.get_tokio_connection().await?; - let _: redis::Value = redis::cmd("CONFIG") - .arg("SET") - .arg("notify-keyspace-events") - .arg("KExg") - .query_async(&mut conn) - .await?; - Ok(()) +fn parse_kind(event: &str) -> RedisEventKind { + match event { + "set" => RedisEventKind::Set, + "del" => RedisEventKind::Del, + "unlink" => RedisEventKind::Unlink, + "expired" => RedisEventKind::Expired, + other => RedisEventKind::Other(other.to_string()), + } } - -/// Внутренняя функция: крутит pubsub и шлёт события в tx. -/// Завершается при ошибке соединения (см. spawn_* для обёртки). -/// Запускаем слушатель keyevents, используя redis::Client (сам внутри делает into_pubsub()). -pub async fn run_keyevent_listener_with_client( - client: redis::Client, - tx: tokio::sync::mpsc::UnboundedSender, -) -> redis::RedisResult<()> { - let conn = client.get_tokio_connection().await?; - let mut pubsub = conn.into_pubsub(); - - pubsub.subscribe("__keyevent@0__:set").await?; - pubsub.subscribe("__keyevent@0__:del").await?; - pubsub.subscribe("__keyevent@0__:expired").await?; - - let mut stream = pubsub.on_message(); - - while let Some(msg) = stream.next().await { - let channel: String = match msg.get_channel() { Ok(c) => c, Err(_) => continue }; - let key: String = match msg.get_payload() { Ok(p) => p, Err(_) => continue }; - - let kind = if channel.ends_with(":set") { - RedisEventKind::Set - } else if channel.ends_with(":del") { - RedisEventKind::Del - } else if channel.ends_with(":expired") { - RedisEventKind::Expired - } else { - continue; - }; - - let _ = tx.send(RedisEvent { kind, key }); +fn parse_db_from_channel(ch: &str) -> u32 { + // "__keyevent@0__:set" → db = 0 + if let Some(at) = ch.find('@') { + if let Some(rest) = ch.get(at + 1..) { + if let Some(end) = rest.find("__:") { + return rest[..end].parse::().unwrap_or(0); + } + } } + 0 +} +/// Включаем только нужные нотификации: keyevent + generic + expired → "Egx". +/// Это отключит шум от многих классов, включая keyspace и т.п. +/// Если прав на CONFIG нет — это не фатально. +async fn try_enable_keyspace_notifications(conn: &mut C) -> RedisResult<()> +where + C: ConnectionLike + Send, +{ + let _: String = redis::cmd("CONFIG") + .arg("SET") + .arg("notify-keyspace-events") + // .arg("Egx") + .arg("E$gx") + .query_async(conn) + .await?; Ok(()) } +/// Создаём обычный async-коннект, пробуем включить KEA=Egx, +/// затем открываем отдельный PubSub-коннект. +pub async fn make_pubsub_with_kea(client: &Client) -> RedisResult { + let mut conn = client.get_multiplexed_async_connection().await?; + let _ = try_enable_keyspace_notifications(&mut conn).await; + drop(conn); + let pubsub = client.get_async_pubsub().await?; + Ok(pubsub) +} -/// Удобная обёртка: создаёт канал, спаунит таск и возвращает (rx, handle). -/// Ошибки изнутри логируйте в таске, чтобы handle::<()> не падал наружу. +/// Слушатель keyevent-событий. Подписываемся ТОЛЬКО на нужные типы, +/// чтобы даже не получать `expire` (команда установки TTL). pub fn start_keyevent_listener( - client: redis::Client, -) -> (tokio::sync::mpsc::UnboundedReceiver, tokio::task::JoinHandle<()>) { - let (tx, rx) = tokio::sync::mpsc::unbounded_channel(); + mut pubsub: PubSub, +) -> (mpsc::UnboundedReceiver, JoinHandle<()>) { + let (tx, rx) = mpsc::unbounded_channel(); + let handle = tokio::spawn(async move { - if let Err(e) = run_keyevent_listener_with_client(client, tx).await { - eprintln!("[redis_events] listener stopped: {e}"); + // Подписываемся точечно + if let Err(e) = pubsub.psubscribe("__keyevent@*__:set").await { + eprintln!("[redis_events] psubscribe error (set): {e}"); + return; + } + if let Err(e) = pubsub.psubscribe("__keyevent@*__:del").await { + eprintln!("[redis_events] psubscribe error (del): {e}"); + return; + } + if let Err(e) = pubsub.psubscribe("__keyevent@*__:unlink").await { + eprintln!("[redis_events] psubscribe error (unlink): {e}"); + return; + } + if let Err(e) = pubsub.psubscribe("__keyevent@*__:expired").await { + eprintln!("[redis_events] psubscribe error (expired): {e}"); + return; + } + + let mut stream = pubsub.on_message(); + while let Some(msg) = stream.next().await { + let channel = match msg.get_channel::() { + Ok(c) => c, + Err(e) => { eprintln!("[redis_events] bad channel: {e}"); continue; } + }; + let payload = match msg.get_payload::() { + Ok(p) => p, + Err(e) => { eprintln!("[redis_events] bad payload: {e}"); continue; } + }; + + // "__keyevent@0__:set" → event="set", db=0; payload = ключ + let event = channel.rsplit(':').next().unwrap_or(""); + let kind = parse_kind(event); + let db = parse_db_from_channel(&channel); + + let ev = RedisEvent { + db, + key: payload.clone(), + kind, + raw_channel: channel, + raw_payload: payload, + }; + + if tx.send(ev).is_err() { + break; // приёмник закрыт + } } }); + (rx, handle) } - diff --git a/src/redis.rs b/src/redis_lib.rs similarity index 100% rename from src/redis.rs rename to src/redis_lib.rs diff --git a/src/ws_hub.rs b/src/ws_hub.rs index 5939f79cc09..96fbff04173 100644 --- a/src/ws_hub.rs +++ b/src/ws_hub.rs @@ -88,3 +88,18 @@ impl Handler for WsHub { self.sessions.len() } } + + +/* +/// stat +use actix_web::{web, HttpResponse}; +use actix::Addr; +use serde_json::json; + +// use crate::ws_hub::{WsHub, Count}; + +pub async fn stat(hub: web::Data>) -> HttpResponse { + let count = hub.send(Count).await.unwrap_or(0); + HttpResponse::Ok().json(json!({ "connections": count })) +} +*/ \ No newline at end of file From 5e1fa19880795e8ada455154b7866c7ad392ec5c Mon Sep 17 00:00:00 2001 From: Leonid Kaganov Date: Thu, 14 Aug 2025 12:48:19 +0300 Subject: [PATCH 157/636] Redis events: sessions enumerate --- src/.#handlers_ws.rs | 1 - src/handlers_ws.rs | 67 +++++++++++++++++++---------------- src/main.rs | 2 +- src/redis_events.rs | 8 ++--- src/ws_hub.rs | 83 ++++++++++++++++++++++++++++++++++++-------- 5 files changed, 112 insertions(+), 49 deletions(-) delete mode 120000 src/.#handlers_ws.rs diff --git a/src/.#handlers_ws.rs b/src/.#handlers_ws.rs deleted file mode 120000 index 5a49031bd82..00000000000 --- a/src/.#handlers_ws.rs +++ /dev/null @@ -1 +0,0 @@ -lleo@lleonuc3.10483 \ No newline at end of file diff --git a/src/handlers_ws.rs b/src/handlers_ws.rs index dcded0909f1..8c1193dd7aa 100644 --- a/src/handlers_ws.rs +++ b/src/handlers_ws.rs @@ -1,4 +1,5 @@ -use crate::ws_hub::{WsHub, ServerMessage, Join, Leave}; // NEW +use actix::{prelude::*}; +use crate::ws_hub::{WsHub, ServerMessage, Connect, Disconnect, SessionId}; // ============= use redis::aio::MultiplexedConnection; @@ -107,12 +108,13 @@ pub struct WsSession { pub subscriptions: HashSet, // новые поля pub redis: Arc>, // вот он, тот же тип что и в HTTP API - pub hub: actix::Addr, // NEW - pub id: Option, // NEW +// pub hub: actix::Addr, // NEW +// pub id: Option, // NEW + pub id: SessionId, + pub hub: Addr, } - // ======= ping ======== // use crate::ws_ping::test_message; // ======= /ping ======== @@ -120,19 +122,38 @@ pub struct WsSession { - /// Actor External trait: must be in separate impl block impl Actor for WsSession { type Context = ws::WebsocketContext; + // type Context = actix_web_actors::ws::WebsocketContext; fn started(&mut self, ctx: &mut Self::Context) { + // просим ID у хаба + + // let addr = ctx.address().recipient::(); + let addr = ctx.address(); + + let recipient = addr.recipient::(); println!("WebSocket connected"); - ctx.text("Connected"); + self.hub + .send(Connect { addr: recipient }) + .into_actor(self) + .map(|res, act, _ctx| { + match res { + Ok(id) => { + act.id = id; + println!("[ws_session] got id={id}"); + } + Err(e) => { + eprintln!("[ws_session] connect to hub failed: {e}"); + _ctx.stop(); + } + } + }) + .wait(ctx); // дождёмся присвоения ID, чтобы он точно был -// ======= ping ======== - // test_message(ctx); - // регистрируемся в хабе и получаем id - let addr = ctx.address().recipient::(); +/* + ctx.text("Connected"); let hub = self.hub.clone(); ctx.wait( hub.send(Join { addr }) @@ -143,21 +164,18 @@ impl Actor for WsSession { } }) ); -// ======= /ping ======== +*/ } -// ======= ping ======== fn stopped(&mut self, _ctx: &mut Self::Context) { - if let Some(id) = self.id.take() { - self.hub.do_send(Leave { id }); - } + // if let Some(id) = self.id.take() { self.hub.do_send(Leave { id }); } + if self.id != 0 { + self.hub.do_send(Disconnect { session_id: self.id }); + } println!("WebSocket disconnected"); } -// ======= /ping ======== -} - - +} // ======= ping ======== @@ -179,15 +197,6 @@ impl actix::Handler for WsSession { - - - - - - - - - /// StreamHandler External trait: must be in separate impl block impl StreamHandler> for WsSession { fn handle(&mut self, msg: Result, ctx: &mut Self::Context) { @@ -463,7 +472,7 @@ pub async fn handler( subscriptions: HashSet::new(), redis: redis.get_ref().clone(), hub: hub.get_ref().clone(), // NEW - id: None, // NEW + id: 0, // NEW }; ws::start(session, &req, stream) } diff --git a/src/main.rs b/src/main.rs index 5123e60778a..58806c4ec6a 100644 --- a/src/main.rs +++ b/src/main.rs @@ -46,7 +46,7 @@ mod redis_events; mod ws_hub; use actix::prelude::*; // use crate::ws_hub::{WsHub, Broadcast, Count, ServerMessage, Join, Leave}; -use crate::ws_hub::{WsHub, Broadcast, ServerMessage, Join, Leave}; +use crate::ws_hub::{WsHub, Broadcast, ServerMessage}; // , Join, Leave // part 2 // use redis::Client as RedisClient; diff --git a/src/redis_events.rs b/src/redis_events.rs index 8e1a4ba0ee3..574fb3a83c6 100644 --- a/src/redis_events.rs +++ b/src/redis_events.rs @@ -24,8 +24,8 @@ pub struct RedisEvent { pub db: u32, pub key: String, pub kind: RedisEventKind, - pub raw_channel: String, - pub raw_payload: String, +// pub raw_channel: String, +// pub raw_payload: String, } fn parse_kind(event: &str) -> RedisEventKind { @@ -124,8 +124,8 @@ pub fn start_keyevent_listener( db, key: payload.clone(), kind, - raw_channel: channel, - raw_payload: payload, + // raw_channel: channel, + // raw_payload: payload, }; if tx.send(ev).is_err() { diff --git a/src/ws_hub.rs b/src/ws_hub.rs index 96fbff04173..ff546595693 100644 --- a/src/ws_hub.rs +++ b/src/ws_hub.rs @@ -1,11 +1,41 @@ use actix::prelude::*; use std::collections::HashMap; -/// Сообщение от хаба к сессии: просто текст (готовая JSON-строка) + + + +#[derive(Message)] +#[rtype(result = "SessionId")] +pub struct Connect { + pub addr: Recipient, +} + +#[derive(Message)] +#[rtype(result = "()")] +pub struct Disconnect { + pub session_id: SessionId, +} + + + + + + + + + + + + + + +/// Message from Hub to Session (JSON-string) #[derive(Message)] #[rtype(result = "()")] pub struct ServerMessage(pub String); + +/* /// Присоединить сессию. Возвращает присвоенный id. #[derive(Message)] #[rtype(result = "usize")] @@ -13,12 +43,14 @@ pub struct Join { pub addr: Recipient, } + /// Отключить сессию по id #[derive(Message)] #[rtype(result = "()")] pub struct Leave { pub id: usize, } +*/ /// Отправить всем #[derive(Message)] @@ -32,17 +64,15 @@ pub struct Broadcast { #[rtype(result = "usize")] pub struct Count; +pub type SessionId = u64; pub struct WsHub { - sessions: HashMap>, - next_id: usize, + sessions: HashMap>, + next_id: SessionId, } impl Default for WsHub { fn default() -> Self { - Self { - sessions: HashMap::new(), - next_id: 1, - } + Self { sessions: HashMap::new(), next_id: 1u64 } } } @@ -50,25 +80,50 @@ impl Actor for WsHub { type Context = Context; } -impl Handler for WsHub { - type Result = usize; - fn handle(&mut self, msg: Join, _: &mut Context) -> Self::Result { +impl Handler for WsHub { + type Result = SessionId; + + fn handle(&mut self, msg: Connect, _ctx: &mut Context) -> Self::Result { let id = self.next_id; - self.next_id += 1; + self.next_id = self.next_id.wrapping_add(1); self.sessions.insert(id, msg.addr); + println!("[ws_hub] session connected: id={id} (total={})", self.sessions.len()); id } } -impl Handler for WsHub { +impl Handler for WsHub { type Result = (); - fn handle(&mut self, msg: Leave, _: &mut Context) { - self.sessions.remove(&msg.id); + fn handle(&mut self, msg: Disconnect, _ctx: &mut Context) { + let existed = self.sessions.remove(&msg.session_id).is_some(); + if existed { + println!("[ws_hub] session disconnected: id={} (total={})", msg.session_id, self.sessions.len()); + } else { + println!("[ws_hub] disconnect for unknown id={}", msg.session_id); + } } } + + + + + + + + + + + + + + + + + + impl Handler for WsHub { type Result = (); From 1ceeb41d92ac18cc5a2e3a8ce65f6baf8c7686f9 Mon Sep 17 00:00:00 2001 From: Leonid Kaganov Date: Fri, 15 Aug 2025 03:54:25 +0300 Subject: [PATCH 158/636] Redis events: done with WS --- src/handlers_ws.rs | 133 ++++++++------- src/main.rs | 74 +++++--- src/redis_events.rs | 88 +++++----- src/ws_hub.rs | 404 ++++++++++++++++++++++++++++++++++++++++---- 4 files changed, 535 insertions(+), 164 deletions(-) diff --git a/src/handlers_ws.rs b/src/handlers_ws.rs index 8c1193dd7aa..ea1697b76b8 100644 --- a/src/handlers_ws.rs +++ b/src/handlers_ws.rs @@ -1,5 +1,10 @@ use actix::{prelude::*}; -use crate::ws_hub::{WsHub, ServerMessage, Connect, Disconnect, SessionId}; +use crate::ws_hub::{ + WsHub, ServerMessage, SessionId, + Connect, Disconnect, + Subscribe, Unsubscribe, UnsubscribeAll, + SubscribeList, +}; // ============= use redis::aio::MultiplexedConnection; @@ -105,32 +110,20 @@ pub enum WsCommand { /// Session condition #[allow(dead_code)] pub struct WsSession { - pub subscriptions: HashSet, // новые поля - pub redis: Arc>, // вот он, тот же тип что и в HTTP API - -// pub hub: actix::Addr, // NEW -// pub id: Option, // NEW +// pub subscriptions: HashSet, // новые поля + pub redis: Arc>, pub id: SessionId, pub hub: Addr, } -// ======= ping ======== -// use crate::ws_ping::test_message; -// ======= /ping ======== - - - - /// Actor External trait: must be in separate impl block impl Actor for WsSession { type Context = ws::WebsocketContext; - // type Context = actix_web_actors::ws::WebsocketContext; fn started(&mut self, ctx: &mut Self::Context) { // просим ID у хаба - // let addr = ctx.address().recipient::(); let addr = ctx.address(); let recipient = addr.recipient::(); @@ -151,34 +144,28 @@ impl Actor for WsSession { } }) .wait(ctx); // дождёмся присвоения ID, чтобы он точно был - -/* - ctx.text("Connected"); - let hub = self.hub.clone(); - ctx.wait( - hub.send(Join { addr }) - .into_actor(self) - .map(|res, actor, _ctx| { - if let Ok(id) = res { - actor.id = Some(id); - } - }) - ); -*/ } fn stopped(&mut self, _ctx: &mut Self::Context) { - // if let Some(id) = self.id.take() { self.hub.do_send(Leave { id }); } - if self.id != 0 { - self.hub.do_send(Disconnect { session_id: self.id }); - } + if self.id != 0 { self.hub.do_send(Disconnect { session_id: self.id }); } println!("WebSocket disconnected"); } } - // ======= ping ======== +impl actix::Handler for WsSession { + type Result = (); + + fn handle(&mut self, msg: ServerMessage, ctx: &mut Self::Context) { + let json = serde_json::to_string(&msg.event) + .unwrap_or_else(|_| "{\"error\":\"serialization\"}".into()); + ctx.text(json); + } +} + +/* + impl actix::Handler for WsSession { type Result = (); @@ -186,6 +173,17 @@ impl actix::Handler for WsSession { ctx.text(msg.0); } } + +impl actix::Handler for WsSession { + type Result = (); + fn handle(&mut self, msg: ServerMessage, ctx: &mut Self::Context) { + // Чтобы это работало, добавь #[derive(serde::Serialize)] в RedisEvent и RedisEventKind + let json = serde_json::to_string(&msg.0) + .unwrap_or_else(|_| "\"serialization error\"".into()); + ctx.text(json); + } +} +*/ // ======= /ping ======== @@ -393,9 +391,21 @@ impl WsSession { self.wait_and_send(ctx, fut, base); } - // TODO + + + + + + + + + + + + WsCommand::Sub { key, correlation } => { + // LEVENT 3 println!("SUB {}{:?}", key, correlation); let mut obj = JsonMap::new(); @@ -406,54 +416,53 @@ impl WsSession { if deprecated_symbol(&key) { obj.insert("error".into(), json!("Deprecated symbol in key")); } else { - let added = self.subscriptions.insert(key.clone()); - obj.insert("sub_count".into(), json!( self.subscriptions.len() )); - if !added { obj.insert("warning".into(), json!("Subscribe already exist")); } + self.hub.do_send(Subscribe { session_id: self.id, key: key.clone() }); + // obj.insert("sub_count".into(), json!( self.subscriptions.len() )); } + ctx.text(Value::Object(obj).to_string()); } WsCommand::Unsub { key, correlation } => { + // LEVENT 4 println!("UNSUB {}{:?}", key, correlation); let mut obj = JsonMap::new(); obj.insert("action".into(), json!("unsub")); obj.insert("key".into(), json!(key)); if let Some(c) = correlation { obj.insert("correlation".into(), json!(c)); } - - let removed = if key == "*" { - if !self.subscriptions.is_empty() { - self.subscriptions.clear(); - true - } else { - false - } + if key == "*" { + self.hub.do_send(UnsubscribeAll { session_id: self.id }); } else { if deprecated_symbol(&key) { obj.insert("error".into(), json!("Deprecated symbol in key")); - true } else { - self.subscriptions.remove(&key) + self.hub.do_send(Unsubscribe { session_id: self.id, key: key.clone() }); } }; - obj.insert("sub_count".into(), json!( self.subscriptions.len() )); - if !removed { obj.insert("warning".into(), json!("Subscribe already deleted")); } - ctx.text(Value::Object(obj).to_string()); } WsCommand::Sublist { correlation } => { println!("SUBLIST {:?}", correlation); - let mut obj = JsonMap::new(); - obj.insert("action".into(), json!("sublist")); - if let Some(c) = correlation { obj.insert("correlation".into(), json!(c)); } - obj.insert("response".into(), json!(self.subscriptions.iter().cloned().collect::>())); - obj.insert("sub_count".into(), json!( self.subscriptions.len() )); + let mut base = JsonMap::new(); + base.insert("action".into(), json!("sublist")); + if let Some(x) = &correlation { base.insert("correlation".into(), json!(x)); } + + let hub = self.hub.clone(); + let id = self.id; - ctx.text(Value::Object(obj).to_string()); - } + let fut = async move { + let keys = hub.send(SubscribeList { session_id: id }).await.unwrap_or_default(); + let mut extra = JsonMap::new(); + extra.insert("response".into(), serde_json::to_value(&keys).map_err(|e| e.to_string())? ); + Ok::(extra) + }; + + self.wait_and_send(ctx, fut, base); + } // End of commands } @@ -466,13 +475,13 @@ pub async fn handler( req: HttpRequest, stream: web::Payload, redis: web::Data>>, - hub: web::Data>, // NEW + hub: web::Data>, ) -> Result { let session = WsSession { - subscriptions: HashSet::new(), + // subscriptions: HashSet::new(), redis: redis.get_ref().clone(), - hub: hub.get_ref().clone(), // NEW - id: 0, // NEW + hub: hub.get_ref().clone(), + id: 0, }; ws::start(session, &req, stream) } diff --git a/src/main.rs b/src/main.rs index 58806c4ec6a..097437451bf 100644 --- a/src/main.rs +++ b/src/main.rs @@ -45,19 +45,12 @@ mod workspace_owner; mod redis_events; mod ws_hub; use actix::prelude::*; -// use crate::ws_hub::{WsHub, Broadcast, Count, ServerMessage, Join, Leave}; -use crate::ws_hub::{WsHub, Broadcast, ServerMessage}; // , Join, Leave - -// part 2 -// use redis::Client as RedisClient; -// use ::redis as redis_crate; -// use crate::redis::redis_connect; -// use ::redis::cmd as redis_cmd; // redis_cmd для GET в таске - +use crate::ws_hub::{WsHub, ServerMessage, + TestGetSubs, +}; // === /hub === - use config::CONFIG; use hulyrs::services::jwt::actix::ServiceRequestExt; @@ -112,6 +105,35 @@ async fn interceptor( // ===================================================================================== // ===================================================================================== // ===================================================================================== +use crate::redis_events::RedisEventKind::*; // Set, Del, Unlink, Expired, Other + +pub async fn start_redis_logger(redis_url: String, hub: Addr) { + let client = match redis::Client::open(redis_url) { + Ok(c) => c, + Err(e) => { eprintln!("[redis] bad url: {e}"); return; } + }; + + match crate::redis_events::make_pubsub_with_kea(&client).await { + Ok(pubsub) => { + let (mut rx, _handle) = crate::redis_events::start_keyevent_listener(pubsub); + + // Просто читаем события и шлём их в хаб. + while let Some(ev) = rx.recv().await { + match ev.kind { + Set => println!("[redis] db{} SET {}", ev.db, ev.key), + Del | Unlink => println!("[redis] db{} DEL {}", ev.db, ev.key), + Expired => println!("[redis] db{} EXPIRED {}", ev.db, ev.key), + Other(ref k) => println!("[redis] db{} {} {}", ev.db, k, ev.key), + } + hub.do_send(ev.clone()); // RedisEvent помечен #[derive(Message)] + } + } + Err(e) => eprintln!("[redis] pubsub init error: {e}"), + } +} + +/* + async fn start_redis_logger(redis_url: &str) { let client = match redis::Client::open(redis_url) { Ok(c) => c, @@ -122,22 +144,26 @@ async fn start_redis_logger(redis_url: &str) { Ok(pubsub) => { let (mut rx, _handle) = crate::redis_events::start_keyevent_listener(pubsub); tokio::spawn(async move { - use crate::redis_events::RedisEventKind::*; + while let Some(ev) = rx.recv().await { + // LEVENT 5,6 match ev.kind { Set => println!("[redis] db{} SET {}", ev.db, ev.key), Del | Unlink => println!("[redis] db{} DEL {}", ev.db, ev.key), - // Unlink => println!("[redis] db{} UNLINK {}", ev.db, ev.key), Expired => println!("[redis] db{} EXPIRED {}", ev.db, ev.key), Other(kind) => println!("[redis] db{} {} {}", ev.db, kind, ev.key), } + + // TODO !!!!!!!!!!!!!!! + hub.do_send(ev.clone()); // ev: RedisEvent + } }); } Err(e) => eprintln!("[redis] pubsub init error: {e}"), } } - +*/ // #[tokio::main] #[actix_web::main] @@ -152,15 +178,14 @@ async fn main() -> anyhow::Result<()> { // ====================================== - // HUB-Connections - let hub_addr = WsHub::default().start(); - let hub_data = web::Data::new(hub_addr.clone()); + // стартуем хаб + let hub = WsHub::default().start(); + let hub_data = web::Data::new(hub.clone()); // Data> -// === HUB: общий реестр WS-подключений === -// сразу после настройки логирования/перед запуском HttpServer -// start_redis_logger().await; -// Например, перед HttpServer::new(...) или сразу после .bind(...): -start_redis_logger("redis://127.0.0.1/").await; + // запускаем логгер редиса в фоне + tokio::spawn(start_redis_logger("redis://127.0.0.1/".to_string(), hub.clone())); + // tokio::spawn(start_redis_logger(redis_url, hub.clone())); + // start_redis_logger("redis://127.0.0.1/").await; // ============================================ @@ -198,6 +223,13 @@ start_redis_logger("redis://127.0.0.1/").await; HttpResponse::Ok().json(serde_json::json!({ "connections": count })) })) + .route("/subs", web::get().to(|hub: web::Data>| async move { + match hub.send(TestGetSubs).await { + Ok(subs) => HttpResponse::Ok().json(subs), + Err(_) => HttpResponse::InternalServerError().body("Failed to get subscriptions"), + } + })) + .route("/ws", web::get().to(handlers_ws::handler)) // WebSocket }) .bind(socket)? diff --git a/src/redis_events.rs b/src/redis_events.rs index 574fb3a83c6..b8cc3bcef5a 100644 --- a/src/redis_events.rs +++ b/src/redis_events.rs @@ -1,7 +1,18 @@ +/* +TODO: Со * + +Сперва по точному совпадению +Потом перебором по * +*/ + + use tokio::sync::mpsc; use tokio::task::JoinHandle; use tokio_stream::StreamExt; +use serde::Serialize; + + use redis::{ self, AsyncCommands, @@ -10,7 +21,7 @@ use redis::{ aio::{PubSub, ConnectionLike}, }; -#[derive(Debug, Clone)] +#[derive(Debug, Clone, Serialize)] pub enum RedisEventKind { Set, // создание или перезапись (Redis не различает) Del, // удаление @@ -19,36 +30,23 @@ pub enum RedisEventKind { Other(String), } -#[derive(Debug, Clone)] +use actix::Message; +// use serde::Serialize; + + +#[derive(Debug, Clone, Serialize, Message)] +#[rtype(result = "()")] pub struct RedisEvent { pub db: u32, pub key: String, pub kind: RedisEventKind, -// pub raw_channel: String, -// pub raw_payload: String, } -fn parse_kind(event: &str) -> RedisEventKind { - match event { - "set" => RedisEventKind::Set, - "del" => RedisEventKind::Del, - "unlink" => RedisEventKind::Unlink, - "expired" => RedisEventKind::Expired, - other => RedisEventKind::Other(other.to_string()), - } -} -fn parse_db_from_channel(ch: &str) -> u32 { - // "__keyevent@0__:set" → db = 0 - if let Some(at) = ch.find('@') { - if let Some(rest) = ch.get(at + 1..) { - if let Some(end) = rest.find("__:") { - return rest[..end].parse::().unwrap_or(0); - } - } - } - 0 -} + + + + /// Включаем только нужные нотификации: keyevent + generic + expired → "Egx". /// Это отключит шум от многих классов, включая keyspace и т.п. @@ -57,13 +55,7 @@ async fn try_enable_keyspace_notifications(conn: &mut C) -> RedisResult<()> where C: ConnectionLike + Send, { - let _: String = redis::cmd("CONFIG") - .arg("SET") - .arg("notify-keyspace-events") - // .arg("Egx") - .arg("E$gx") - .query_async(conn) - .await?; + let _: String = redis::cmd("CONFIG").arg("SET").arg("notify-keyspace-events").arg("E$gx").query_async(conn).await?; Ok(()) } @@ -78,15 +70,14 @@ pub async fn make_pubsub_with_kea(client: &Client) -> RedisResult { Ok(pubsub) } -/// Слушатель keyevent-событий. Подписываемся ТОЛЬКО на нужные типы, -/// чтобы даже не получать `expire` (команда установки TTL). +/// Listener keyevents pub fn start_keyevent_listener( mut pubsub: PubSub, ) -> (mpsc::UnboundedReceiver, JoinHandle<()>) { let (tx, rx) = mpsc::unbounded_channel(); let handle = tokio::spawn(async move { - // Подписываемся точечно + // Subscribe to events if let Err(e) = pubsub.psubscribe("__keyevent@*__:set").await { eprintln!("[redis_events] psubscribe error (set): {e}"); return; @@ -117,20 +108,23 @@ pub fn start_keyevent_listener( // "__keyevent@0__:set" → event="set", db=0; payload = ключ let event = channel.rsplit(':').next().unwrap_or(""); - let kind = parse_kind(event); - let db = parse_db_from_channel(&channel); - - let ev = RedisEvent { - db, - key: payload.clone(), - kind, - // raw_channel: channel, - // raw_payload: payload, - }; + let kind = match event { + "set" => RedisEventKind::Set, + "del" => RedisEventKind::Del, + "unlink" => RedisEventKind::Unlink, + "expired" => RedisEventKind::Expired, + other => RedisEventKind::Other(other.to_string()), + }; + + let db = channel.find('@') + .and_then(|at| channel.get(at + 1..)) + .and_then(|rest| rest.find("__:").map(|end| &rest[..end])) + .and_then(|s| s.parse::().ok()) + .unwrap_or(0); + + let ev = RedisEvent { db, key: payload.clone(), kind }; - if tx.send(ev).is_err() { - break; // приёмник закрыт - } + if tx.send(ev).is_err() { break; } // closed } }); diff --git a/src/ws_hub.rs b/src/ws_hub.rs index ff546595693..41d11cb4c38 100644 --- a/src/ws_hub.rs +++ b/src/ws_hub.rs @@ -1,63 +1,104 @@ -use actix::prelude::*; -use std::collections::HashMap; +/* +У нас имеется 6 событий: +1) WS сессия подключилась +2) WS сессия отключилась +3) Подписку добавили +4) Подписку отменили +И информация от Redis: +5) Ключ появился/изменился +6) Ключ удалился -#[derive(Message)] -#[rtype(result = "SessionId")] -pub struct Connect { - pub addr: Recipient, -} +Итак, у нас есть таблица A, где хранятся имена подписок и номера подписчиков, которые ее оформили: +foo/ 1,2,5,88 +foo/dir/ 3,50 +xz/value 2,3,4,88 -#[derive(Message)] -#[rtype(result = "()")] -pub struct Disconnect { - pub session_id: SessionId, -} +Соответственно она меняется так: +[DONE] В случае 2 - обходим таблицу и удаляем подписчика номер 5; если не осталось подписчиков, то удаляем и саму строку подписки. +[DONE] В случае 3 - если подписка не существовала, то создать; добавить подписчика. +[DONE] В случае 4 - убрать подписчика, если подписка опустела, то удалить и ее. +В случаях 5 и 6 мы обходим каждый раз ВСЮ таблицу A, сравниваем с каждой +подпиской, и выясняем список подписчиков. +-- Если ключ таблицы === ключу - разослать по всем ID +-- Если ключ таблицы заканчивается на "/" и является началом ключа и его остальные символы не содержат "$" - разослать по всем ID +Соответственно есть второй вариант - оптимизация этого процесса. Хранить +таблицу Б, где перечисляются все существующие ключи, а к каждому +привязаны соответствующие им строки таблицы А. Так что обход таблицы А +происходит только для каждого новопоявившегося ключа. Но поскольку +появление и исчезновение ключа это, я так понимаю, самое частое событие +(пользователь набирает текст - пользователь перестал набирать текст), то +смысла делать таблицу Б я не вижу, она все равно будет точно так же +требовать вычислений и обхода таблицы А каждый раз. +*/ + +use std::collections::HashSet; +// ------ +use actix::prelude::*; +use std::collections::HashMap; +fn subscription_matches(sub_key: &str, key: &str) -> bool { + if sub_key == key { return true; } + if sub_key.ends_with('/') && key.starts_with(sub_key) { + let rest = &key[sub_key.len()..]; + return !rest.contains('$'); + } + false +} + + + /// Message from Hub to Session (JSON-string) + +/* #[derive(Message)] #[rtype(result = "()")] pub struct ServerMessage(pub String); +*/ - -/* -/// Присоединить сессию. Возвращает присвоенный id. -#[derive(Message)] -#[rtype(result = "usize")] -pub struct Join { - pub addr: Recipient, +#[derive(Message, Clone, Debug)] +#[rtype(result = "()")] +pub struct ServerMessage { + pub event: RedisEvent, } -/// Отключить сессию по id -#[derive(Message)] + +use crate::redis_events::RedisEvent; +// Redis(RedisEvent), // 👈 новый вариант + +/* +#[derive(Message, Clone)] #[rtype(result = "()")] -pub struct Leave { - pub id: usize, +pub enum ServerMessage { + Text(String), + KeyEvent { db: u32, key: String, kind: RedisEvent }, } */ +/* /// Отправить всем #[derive(Message)] #[rtype(result = "()")] pub struct Broadcast { pub text: String, } +*/ /// Количество активных сессий #[derive(Message)] @@ -65,14 +106,20 @@ pub struct Broadcast { pub struct Count; pub type SessionId = u64; + pub struct WsHub { sessions: HashMap>, + subs: HashMap>, // Массив моих подписок: key -> {id, id, id ...} next_id: SessionId, } impl Default for WsHub { fn default() -> Self { - Self { sessions: HashMap::new(), next_id: 1u64 } + Self { + sessions: HashMap::new(), + subs: HashMap::new(), + next_id: 1u64, + } } } @@ -81,10 +128,19 @@ impl Actor for WsHub { } + + +#[derive(Message)] +#[rtype(result = "SessionId")] +pub struct Connect { + pub addr: Recipient, +} + impl Handler for WsHub { type Result = SessionId; fn handle(&mut self, msg: Connect, _ctx: &mut Context) -> Self::Result { + // LEVENT 1 let id = self.next_id; self.next_id = self.next_id.wrapping_add(1); self.sessions.insert(id, msg.addr); @@ -93,10 +149,25 @@ impl Handler for WsHub { } } + +#[derive(Message)] +#[rtype(result = "()")] +pub struct Disconnect { + pub session_id: SessionId, +} + impl Handler for WsHub { type Result = (); fn handle(&mut self, msg: Disconnect, _ctx: &mut Context) { + // LEVENT 2 + + // Delete all subscribes + self.subs.retain(|_key, session_ids| { + session_ids.remove(&msg.session_id); + !session_ids.is_empty() + }); + let existed = self.sessions.remove(&msg.session_id).is_some(); if existed { println!("[ws_hub] session disconnected: id={} (total={})", msg.session_id, self.sessions.len()); @@ -106,6 +177,48 @@ impl Handler for WsHub { } } +/* +impl Handler for WsHub { + type Result = (); + + fn handle(&mut self, msg: Broadcast, _: &mut Context) { + let Broadcast { text } = msg; + // рассылаем всем; если какая-то сессия отвалилась — игнорируем ошибку + for (_, recp) in self.sessions.iter() { + let _ = recp.do_send(ServerMessage(text.clone())); + } + } +} +*/ + + + +#[derive(Message)] +#[rtype(result = "Vec")] +pub struct SubscribeList { + pub session_id: SessionId, +} + +impl Handler for WsHub { + type Result = MessageResult; + + fn handle(&mut self, msg: SubscribeList, _ctx: &mut Context) -> Self::Result { + // Собираем все ключи, где session_id присутствует + let list = self.subs + .iter() + .filter_map(|(key, sessions)| { + if sessions.contains(&msg.session_id) { + Some(key.clone()) + } else { + None + } + }) + .collect::>(); + + MessageResult(list) + } +} + @@ -124,17 +237,7 @@ impl Handler for WsHub { -impl Handler for WsHub { - type Result = (); - fn handle(&mut self, msg: Broadcast, _: &mut Context) { - let Broadcast { text } = msg; - // рассылаем всем; если какая-то сессия отвалилась — игнорируем ошибку - for (_, recp) in self.sessions.iter() { - let _ = recp.do_send(ServerMessage(text.clone())); - } - } -} impl Handler for WsHub { type Result = usize; @@ -145,6 +248,239 @@ impl Handler for WsHub { } + +// Городим массив подписок + + +#[derive(Message)] +#[rtype(result = "()")] +pub struct Subscribe { + pub session_id: SessionId, + pub key: String, +} + +impl Handler for WsHub { + type Result = (); + fn handle(&mut self, msg: Subscribe, _ctx: &mut Context) { + self.subs.entry(msg.key).or_default().insert(msg.session_id); + } +} + +/* +#[derive(Message)] +#[rtype(result = "bool")] +pub struct Subscribe { + pub session_id: SessionId, + pub key: String, +} + +impl Handler for WsHub { + type Result = MessageResult; + fn handle(&mut self, msg: Subscribe, _ctx: &mut Context) -> Self::Result { + let subs = self.subs.entry(msg.key).or_default(); + let added = subs.insert(msg.session_id); // true + MessageResult(added) + } +} +*/ + +#[derive(Message)] +#[rtype(result = "()")] +pub struct Unsubscribe { + pub session_id: SessionId, + pub key: String, +} + +impl Handler for WsHub { + type Result = (); + fn handle(&mut self, msg: Unsubscribe, _ctx: &mut Context) { + if let Some(set) = self.subs.get_mut(&msg.key) { + set.remove(&msg.session_id); + if set.is_empty() { self.subs.remove(&msg.key); } + } + } +} + +/* +#[derive(Message)] +#[rtype(result = "bool")] +pub struct Unsubscribe { + pub session_id: SessionId, + pub key: String, +} + +impl Handler for WsHub { + type Result = MessageResult; + fn handle(&mut self, msg: Unsubscribe, _ctx: &mut Context) -> Self::Result { + let mut removed = false; + if let Some(set) = self.subs.get_mut(&msg.key) { + removed = set.remove(&msg.session_id); // true + if set.is_empty() { self.subs.remove(&msg.key); } + } + MessageResult(removed) + } +} +*/ + +#[derive(Message)] +#[rtype(result = "()")] +pub struct UnsubscribeAll { + pub session_id: SessionId, +} + +impl Handler for WsHub { + type Result = (); + fn handle(&mut self, msg: UnsubscribeAll, _ctx: &mut Context) { + self.subs.retain(|_key, session_ids| { + session_ids.remove(&msg.session_id); + !session_ids.is_empty() + }); + } +} + +/* +#[derive(Message)] +#[rtype(result = "bool")] +pub struct UnsubscribeAll { + pub session_id: SessionId, +} + +impl Handler for WsHub { + type Result = MessageResult; + fn handle(&mut self, msg: UnsubscribeAll, _ctx: &mut Context) -> Self::Result { + let mut x = false; + self.subs.retain(|_key, session_ids| { + if session_ids.remove(&msg.session_id) { x = true; } + !session_ids.is_empty() + }); + MessageResult(x) + } +} +*/ + + +#[derive(Message)] +#[rtype(result = "HashMap>")] +pub struct TestGetSubs; + +impl Handler for WsHub { + type Result = MessageResult; + + fn handle(&mut self, _msg: TestGetSubs, _ctx: &mut Context) -> Self::Result { + // Преобразуем HashSet → Vec для сериализации + let s: HashMap> = self.subs + .iter() + .map(|(key, ids)| (key.clone(), ids.iter().copied().collect())) + .collect(); + + MessageResult(s) + } +} + + + +// .. ================================== + +/* +// Сообщение для WsHub, чтобы сделать рассылку по подписчикам +#[derive(Message, Clone)] +#[rtype(result = "()")] +pub struct FanoutKeyEvent { + pub db: u32, + pub key: String, + pub kind: RedisEvent, +} +*/ + +// Собираем список подписчиков по правилу выше +impl WsHub { + fn subscribers_for(&self, key: &str) -> HashSet { + let mut out = HashSet::new(); + for (sub_key, set) in &self.subs { + if subscription_matches(sub_key, key) { + out.extend(set.iter().copied()); + } + } + out + } +} + + + + + + + +// use actix::prelude::*; +// use crate::redis_events::RedisEvent; + +impl Handler for WsHub { + type Result = (); + + fn handle(&mut self, msg: RedisEvent, _ctx: &mut Context) { + let targets = self.subscribers_for(&msg.key); + if targets.is_empty() { return; } + + let payload = ServerMessage { event: msg.clone() }; + + for sid in targets { + if let Some(rcpt) = self.sessions.get(&sid) { + let _ = rcpt.do_send(payload.clone()); + } + } + } +} + +/* + +impl Handler for WsHub { + type Result = (); + + fn handle(&mut self, msg: FanoutKeyEvent, _ctx: &mut Context) { + let targets = self.subscribers_for(&msg.event.key); + if targets.is_empty() { return; } + + let payload = ServerMessage { event: msg.event.clone() }; + for sid in targets { + if let Some(rcpt) = self.sessions.get(&sid) { + let _ = rcpt.do_send(payload.clone()); + } + } + } +} + + +// Обработчик, который рассылает ServerMessage всем, кто подписан +impl Handler for WsHub { + type Result = (); + + fn handle(&mut self, msg: FanoutKeyEvent, _ctx: &mut Context) { + let targets = self.subscribers_for(&msg.key); + if targets.is_empty() { return; } + + // Сформируй payload под свой тип ServerMessage + // Пример: добавь вариант KeyEvent в твой ServerMessage +// let payload = ServerMessage::KeyEvent { +// db: msg.db, +// key: msg.key.clone(), +// kind: msg.kind.clone(), +// }; + +// let payload = ServerMessage { event: ev.clone() }; + let payload = ServerMessage { event: msg.event.clone() }; + + for sid in targets { + if let Some(rcpt) = self.sessions.get(&sid) { let _ = rcpt.do_send(payload.clone()); } + } + } +} + +*/ + + + + + /* /// stat use actix_web::{web, HttpResponse}; From 78e57cd12c3e4126d006b7df66627a943928db09 Mon Sep 17 00:00:00 2001 From: Leonid Kaganov Date: Fri, 15 Aug 2025 09:21:11 +0300 Subject: [PATCH 159/636] Fix national comments in code --- .gitignore | 1 + src/handlers_ws.rs | 54 +-------- src/main.rs | 18 +-- src/redis_events.rs | 24 ++-- src/redis_lib.rs | 2 - src/ws_hub.rs | 277 +------------------------------------------- 6 files changed, 23 insertions(+), 353 deletions(-) diff --git a/.gitignore b/.gitignore index dc110e3e5b1..a4911831667 100644 --- a/.gitignore +++ b/.gitignore @@ -4,6 +4,7 @@ Justfile commit.sh /src/GO.sh +/src/GOT.sh GO.sh DROP_DB.sh TODO.txt diff --git a/src/handlers_ws.rs b/src/handlers_ws.rs index ea1697b76b8..c6a093f356f 100644 --- a/src/handlers_ws.rs +++ b/src/handlers_ws.rs @@ -1,4 +1,5 @@ use actix::{prelude::*}; + use crate::ws_hub::{ WsHub, ServerMessage, SessionId, Connect, Disconnect, @@ -6,7 +7,6 @@ use crate::ws_hub::{ SubscribeList, }; -// ============= use redis::aio::MultiplexedConnection; use std::sync::Arc; use tokio::sync::Mutex; @@ -18,9 +18,7 @@ use actix::{ ActorContext, fut, ActorFutureExt, - - Handler, WrapFuture // добавили Handler, WrapFuture - + Handler, WrapFuture }; use actix_web::{web, HttpRequest, HttpResponse, Error}; use actix_web_actors::ws; @@ -110,7 +108,6 @@ pub enum WsCommand { /// Session condition #[allow(dead_code)] pub struct WsSession { -// pub subscriptions: HashSet, // новые поля pub redis: Arc>, pub id: SessionId, pub hub: Addr, @@ -122,8 +119,7 @@ impl Actor for WsSession { type Context = ws::WebsocketContext; fn started(&mut self, ctx: &mut Self::Context) { - // просим ID у хаба - + // ask ID from Hub let addr = ctx.address(); let recipient = addr.recipient::(); @@ -143,7 +139,7 @@ impl Actor for WsSession { } } }) - .wait(ctx); // дождёмся присвоения ID, чтобы он точно был + .wait(ctx); // waiting for ID } fn stopped(&mut self, _ctx: &mut Self::Context) { @@ -163,38 +159,8 @@ impl actix::Handler for WsSession { ctx.text(json); } } - -/* - -impl actix::Handler for WsSession { - type Result = (); - - fn handle(&mut self, msg: ServerMessage, ctx: &mut Self::Context) { - ctx.text(msg.0); - } -} - -impl actix::Handler for WsSession { - type Result = (); - fn handle(&mut self, msg: ServerMessage, ctx: &mut Self::Context) { - // Чтобы это работало, добавь #[derive(serde::Serialize)] в RedisEvent и RedisEventKind - let json = serde_json::to_string(&msg.0) - .unwrap_or_else(|_| "\"serialization error\"".into()); - ctx.text(json); - } -} -*/ // ======= /ping ======== - - - - - - - - - /// StreamHandler External trait: must be in separate impl block impl StreamHandler> for WsSession { fn handle(&mut self, msg: Result, ctx: &mut Self::Context) { @@ -240,7 +206,6 @@ impl WsSession { ); } - /// When valid JSON recieved for WsSession fn handle_command(&mut self, cmd: WsCommand, ctx: &mut ws::WebsocketContext) { match cmd { @@ -395,15 +360,6 @@ impl WsSession { - - - - - - - - - WsCommand::Sub { key, correlation } => { // LEVENT 3 println!("SUB {}{:?}", key, correlation); @@ -417,7 +373,6 @@ impl WsSession { obj.insert("error".into(), json!("Deprecated symbol in key")); } else { self.hub.do_send(Subscribe { session_id: self.id, key: key.clone() }); - // obj.insert("sub_count".into(), json!( self.subscriptions.len() )); } ctx.text(Value::Object(obj).to_string()); @@ -478,7 +433,6 @@ pub async fn handler( hub: web::Data>, ) -> Result { let session = WsSession { - // subscriptions: HashSet::new(), redis: redis.get_ref().clone(), hub: hub.get_ref().clone(), id: 0, diff --git a/src/main.rs b/src/main.rs index 097437451bf..ffd4171748d 100644 --- a/src/main.rs +++ b/src/main.rs @@ -176,18 +176,13 @@ async fn main() -> anyhow::Result<()> { let redis = std::sync::Arc::new(tokio::sync::Mutex::new(redis)); let redis_data = web::Data::new(redis.clone()); - -// ====================================== - // стартуем хаб + // ====================================== + // starting Hub let hub = WsHub::default().start(); - let hub_data = web::Data::new(hub.clone()); // Data> - - // запускаем логгер редиса в фоне + let hub_data = web::Data::new(hub.clone()); + // starting Logger tokio::spawn(start_redis_logger("redis://127.0.0.1/".to_string(), hub.clone())); - // tokio::spawn(start_redis_logger(redis_url, hub.clone())); - // start_redis_logger("redis://127.0.0.1/").await; - -// ============================================ + // ============================================ let socket = std::net::SocketAddr::new(CONFIG.bind_host.as_str().parse()?, CONFIG.bind_port); let payload_config = PayloadConfig::new(CONFIG.payload_size_limit.bytes() as usize); @@ -203,8 +198,7 @@ async fn main() -> anyhow::Result<()> { App::new() .app_data(payload_config.clone()) .app_data(redis_data.clone()) - .app_data(hub_data.clone()) // ← ЭТО ОБЯЗАТЕЛЬНО - + .app_data(hub_data.clone()) // Important! .wrap(middleware::Logger::default()) .wrap(cors) .service( diff --git a/src/redis_events.rs b/src/redis_events.rs index b8cc3bcef5a..1abaac1f78b 100644 --- a/src/redis_events.rs +++ b/src/redis_events.rs @@ -23,16 +23,14 @@ use redis::{ #[derive(Debug, Clone, Serialize)] pub enum RedisEventKind { - Set, // создание или перезапись (Redis не различает) - Del, // удаление - Unlink, // удаление (асинхронное) - Expired, // исчез по TTL + Set, // Insert or Update + Del, // Delete + Unlink, // async Delete + Expired, // TTL Delete Other(String), } use actix::Message; -// use serde::Serialize; - #[derive(Debug, Clone, Serialize, Message)] #[rtype(result = "()")] @@ -43,14 +41,7 @@ pub struct RedisEvent { } - - - - - -/// Включаем только нужные нотификации: keyevent + generic + expired → "Egx". -/// Это отключит шум от многих классов, включая keyspace и т.п. -/// Если прав на CONFIG нет — это не фатально. +/// Notifications: keyevent + generic + expired = "Egx" (no keyspace) async fn try_enable_keyspace_notifications(conn: &mut C) -> RedisResult<()> where C: ConnectionLike + Send, @@ -59,8 +50,7 @@ where Ok(()) } -/// Создаём обычный async-коннект, пробуем включить KEA=Egx, -/// затем открываем отдельный PubSub-коннект. +/// Create async-connect, try to enable KEA=Egx, open PubSub-connect pub async fn make_pubsub_with_kea(client: &Client) -> RedisResult { let mut conn = client.get_multiplexed_async_connection().await?; let _ = try_enable_keyspace_notifications(&mut conn).await; @@ -106,7 +96,7 @@ pub fn start_keyevent_listener( Err(e) => { eprintln!("[redis_events] bad payload: {e}"); continue; } }; - // "__keyevent@0__:set" → event="set", db=0; payload = ключ + // "__keyevent@0__:set" → event="set", db=0; payload = key let event = channel.rsplit(':').next().unwrap_or(""); let kind = match event { "set" => RedisEventKind::Set, diff --git a/src/redis_lib.rs b/src/redis_lib.rs index c5a59f55dd5..44d7b3e3d28 100644 --- a/src/redis_lib.rs +++ b/src/redis_lib.rs @@ -40,9 +40,7 @@ pub fn error(code: u16, msg: impl Into) -> redis::RedisResult { Err(redis::RedisError::from(( redis::ErrorKind::ExtensionError, "", full ))) } - /// Check for redis-deprecated symbols - pub fn deprecated_symbol(s: &str) -> bool { s.chars().any(|c| matches!( c, diff --git a/src/ws_hub.rs b/src/ws_hub.rs index 41d11cb4c38..8ec468a858a 100644 --- a/src/ws_hub.rs +++ b/src/ws_hub.rs @@ -1,56 +1,8 @@ -/* - - -У нас имеется 6 событий: - -1) WS сессия подключилась -2) WS сессия отключилась -3) Подписку добавили -4) Подписку отменили -И информация от Redis: -5) Ключ появился/изменился -6) Ключ удалился - -Итак, у нас есть таблица A, где хранятся имена подписок и номера подписчиков, которые ее оформили: -foo/ 1,2,5,88 -foo/dir/ 3,50 -xz/value 2,3,4,88 - -Соответственно она меняется так: - -[DONE] В случае 2 - обходим таблицу и удаляем подписчика номер 5; если не осталось подписчиков, то удаляем и саму строку подписки. - -[DONE] В случае 3 - если подписка не существовала, то создать; добавить подписчика. - -[DONE] В случае 4 - убрать подписчика, если подписка опустела, то удалить и ее. - -В случаях 5 и 6 мы обходим каждый раз ВСЮ таблицу A, сравниваем с каждой -подпиской, и выясняем список подписчиков. --- Если ключ таблицы === ключу - разослать по всем ID --- Если ключ таблицы заканчивается на "/" и является началом ключа и его остальные символы не содержат "$" - разослать по всем ID - - -Соответственно есть второй вариант - оптимизация этого процесса. Хранить -таблицу Б, где перечисляются все существующие ключи, а к каждому -привязаны соответствующие им строки таблицы А. Так что обход таблицы А -происходит только для каждого новопоявившегося ключа. Но поскольку -появление и исчезновение ключа это, я так понимаю, самое частое событие -(пользователь набирает текст - пользователь перестал набирать текст), то -смысла делать таблицу Б я не вижу, она все равно будет точно так же -требовать вычислений и обхода таблицы А каждый раз. - -*/ - use std::collections::HashSet; -// ------ - use actix::prelude::*; use std::collections::HashMap; - - - fn subscription_matches(sub_key: &str, key: &str) -> bool { if sub_key == key { return true; } if sub_key.ends_with('/') && key.starts_with(sub_key) { @@ -60,47 +12,17 @@ fn subscription_matches(sub_key: &str, key: &str) -> bool { false } - - - /// Message from Hub to Session (JSON-string) -/* -#[derive(Message)] -#[rtype(result = "()")] -pub struct ServerMessage(pub String); -*/ - #[derive(Message, Clone, Debug)] #[rtype(result = "()")] pub struct ServerMessage { pub event: RedisEvent, } - - use crate::redis_events::RedisEvent; -// Redis(RedisEvent), // 👈 новый вариант -/* -#[derive(Message, Clone)] -#[rtype(result = "()")] -pub enum ServerMessage { - Text(String), - KeyEvent { db: u32, key: String, kind: RedisEvent }, -} -*/ - -/* -/// Отправить всем -#[derive(Message)] -#[rtype(result = "()")] -pub struct Broadcast { - pub text: String, -} -*/ - -/// Количество активных сессий +/// Count of active sessions #[derive(Message)] #[rtype(result = "usize")] pub struct Count; @@ -109,7 +31,7 @@ pub type SessionId = u64; pub struct WsHub { sessions: HashMap>, - subs: HashMap>, // Массив моих подписок: key -> {id, id, id ...} + subs: HashMap>, // Subscriptions array: key -> {id, id, id ...} next_id: SessionId, } @@ -177,22 +99,6 @@ impl Handler for WsHub { } } -/* -impl Handler for WsHub { - type Result = (); - - fn handle(&mut self, msg: Broadcast, _: &mut Context) { - let Broadcast { text } = msg; - // рассылаем всем; если какая-то сессия отвалилась — игнорируем ошибку - for (_, recp) in self.sessions.iter() { - let _ = recp.do_send(ServerMessage(text.clone())); - } - } -} -*/ - - - #[derive(Message)] #[rtype(result = "Vec")] pub struct SubscribeList { @@ -203,7 +109,7 @@ impl Handler for WsHub { type Result = MessageResult; fn handle(&mut self, msg: SubscribeList, _ctx: &mut Context) -> Self::Result { - // Собираем все ключи, где session_id присутствует + // Collect all keys with my session_id let list = self.subs .iter() .filter_map(|(key, sessions)| { @@ -218,26 +124,6 @@ impl Handler for WsHub { MessageResult(list) } } - - - - - - - - - - - - - - - - - - - - impl Handler for WsHub { type Result = usize; @@ -247,10 +133,7 @@ impl Handler for WsHub { } } - - -// Городим массив подписок - +// Subscriptions #[derive(Message)] #[rtype(result = "()")] @@ -266,24 +149,6 @@ impl Handler for WsHub { } } -/* -#[derive(Message)] -#[rtype(result = "bool")] -pub struct Subscribe { - pub session_id: SessionId, - pub key: String, -} - -impl Handler for WsHub { - type Result = MessageResult; - fn handle(&mut self, msg: Subscribe, _ctx: &mut Context) -> Self::Result { - let subs = self.subs.entry(msg.key).or_default(); - let added = subs.insert(msg.session_id); // true - MessageResult(added) - } -} -*/ - #[derive(Message)] #[rtype(result = "()")] pub struct Unsubscribe { @@ -301,27 +166,6 @@ impl Handler for WsHub { } } -/* -#[derive(Message)] -#[rtype(result = "bool")] -pub struct Unsubscribe { - pub session_id: SessionId, - pub key: String, -} - -impl Handler for WsHub { - type Result = MessageResult; - fn handle(&mut self, msg: Unsubscribe, _ctx: &mut Context) -> Self::Result { - let mut removed = false; - if let Some(set) = self.subs.get_mut(&msg.key) { - removed = set.remove(&msg.session_id); // true - if set.is_empty() { self.subs.remove(&msg.key); } - } - MessageResult(removed) - } -} -*/ - #[derive(Message)] #[rtype(result = "()")] pub struct UnsubscribeAll { @@ -338,27 +182,6 @@ impl Handler for WsHub { } } -/* -#[derive(Message)] -#[rtype(result = "bool")] -pub struct UnsubscribeAll { - pub session_id: SessionId, -} - -impl Handler for WsHub { - type Result = MessageResult; - fn handle(&mut self, msg: UnsubscribeAll, _ctx: &mut Context) -> Self::Result { - let mut x = false; - self.subs.retain(|_key, session_ids| { - if session_ids.remove(&msg.session_id) { x = true; } - !session_ids.is_empty() - }); - MessageResult(x) - } -} -*/ - - #[derive(Message)] #[rtype(result = "HashMap>")] pub struct TestGetSubs; @@ -367,32 +190,15 @@ impl Handler for WsHub { type Result = MessageResult; fn handle(&mut self, _msg: TestGetSubs, _ctx: &mut Context) -> Self::Result { - // Преобразуем HashSet → Vec для сериализации let s: HashMap> = self.subs .iter() .map(|(key, ids)| (key.clone(), ids.iter().copied().collect())) .collect(); - MessageResult(s) } } - - -// .. ================================== - -/* -// Сообщение для WsHub, чтобы сделать рассылку по подписчикам -#[derive(Message, Clone)] -#[rtype(result = "()")] -pub struct FanoutKeyEvent { - pub db: u32, - pub key: String, - pub kind: RedisEvent, -} -*/ - -// Собираем список подписчиков по правилу выше +// List of subscribers impl WsHub { fn subscribers_for(&self, key: &str) -> HashSet { let mut out = HashSet::new(); @@ -405,15 +211,6 @@ impl WsHub { } } - - - - - - -// use actix::prelude::*; -// use crate::redis_events::RedisEvent; - impl Handler for WsHub { type Result = (); @@ -430,67 +227,3 @@ impl Handler for WsHub { } } } - -/* - -impl Handler for WsHub { - type Result = (); - - fn handle(&mut self, msg: FanoutKeyEvent, _ctx: &mut Context) { - let targets = self.subscribers_for(&msg.event.key); - if targets.is_empty() { return; } - - let payload = ServerMessage { event: msg.event.clone() }; - for sid in targets { - if let Some(rcpt) = self.sessions.get(&sid) { - let _ = rcpt.do_send(payload.clone()); - } - } - } -} - - -// Обработчик, который рассылает ServerMessage всем, кто подписан -impl Handler for WsHub { - type Result = (); - - fn handle(&mut self, msg: FanoutKeyEvent, _ctx: &mut Context) { - let targets = self.subscribers_for(&msg.key); - if targets.is_empty() { return; } - - // Сформируй payload под свой тип ServerMessage - // Пример: добавь вариант KeyEvent в твой ServerMessage -// let payload = ServerMessage::KeyEvent { -// db: msg.db, -// key: msg.key.clone(), -// kind: msg.kind.clone(), -// }; - -// let payload = ServerMessage { event: ev.clone() }; - let payload = ServerMessage { event: msg.event.clone() }; - - for sid in targets { - if let Some(rcpt) = self.sessions.get(&sid) { let _ = rcpt.do_send(payload.clone()); } - } - } -} - -*/ - - - - - -/* -/// stat -use actix_web::{web, HttpResponse}; -use actix::Addr; -use serde_json::json; - -// use crate::ws_hub::{WsHub, Count}; - -pub async fn stat(hub: web::Data>) -> HttpResponse { - let count = hub.send(Count).await.unwrap_or(0); - HttpResponse::Ok().json(json!({ "connections": count })) -} -*/ \ No newline at end of file From 0a4f3fe1f159adce860ead0fb5bd5b6b6782ddf6 Mon Sep 17 00:00:00 2001 From: Leonid Kaganov Date: Mon, 18 Aug 2025 18:35:57 +0300 Subject: [PATCH 160/636] Fix workspace security for WebSocket --- scripts/TEST.html | 27 ++++++---- scripts/TEST_HTTP_API.sh | 4 ++ src/handlers_ws.rs | 83 +++++++++++++++++++++++++++-- src/main.rs | 109 ++++++++++++++++++++++----------------- src/redis_events.rs | 28 ++++------ src/redis_lib.rs | 1 + src/ws_hub.rs | 24 ++++++--- 7 files changed, 190 insertions(+), 86 deletions(-) diff --git a/scripts/TEST.html b/scripts/TEST.html index bfdb3d28420..2070094d09f 100644 --- a/scripts/TEST.html +++ b/scripts/TEST.html @@ -67,20 +67,20 @@

WebSocket JSON Tester

- - + + - - + + - - + + - - + + - - + + @@ -92,7 +92,12 @@

WebSocket JSON Tester

const output = document.getElementById("output"); const textarea = document.getElementById("jsonInput"); - let ws = new WebSocket("ws://localhost:8095/ws"); +// const workspace="00000000-0000-0000-0000-000000000001"; + + let token="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhY2NvdW50IjoiYWFhYWFhYWEtYmJiYi1jY2NjLWRkZGQtZWVlZWVlZWVlZWVlIiwiZXh0cmEiOnsic2VydmljZSI6ImFjY291bnQifSwid29ya3NwYWNlIjoiMDAwMDAwMDAtMDAwMC0wMDAwLTAwMDAtMDAwMDAwMDAwMDAxIn0.ZrwMvv_0CjuKeF2CkmHyMK2vHd9Ro4M3kHZcZBrBxZQ"; + let ws = new WebSocket(`ws://localhost:8095/ws?token=${token}`); + +// let ws = new WebSocket(`ws://localhost:8095/ws`); ws.onopen = () => { output.textContent = "✅ WebSocket connected."; diff --git a/scripts/TEST_HTTP_API.sh b/scripts/TEST_HTTP_API.sh index 050093fcf70..563d21de2f4 100755 --- a/scripts/TEST_HTTP_API.sh +++ b/scripts/TEST_HTTP_API.sh @@ -4,6 +4,10 @@ clear source ./pulse_lib.sh TOKEN=$(./token.sh claims.json) +#echo ${TOKEN} +#exit + + ZP="00000000-0000-0000-0000-000000000001/TESTS" diff --git a/src/handlers_ws.rs b/src/handlers_ws.rs index c6a093f356f..ae36e457ac7 100644 --- a/src/handlers_ws.rs +++ b/src/handlers_ws.rs @@ -1,3 +1,8 @@ + +use uuid::Uuid; + +// ------------------- + use actix::{prelude::*}; use crate::ws_hub::{ @@ -105,12 +110,15 @@ pub enum WsCommand { }, } +use hulyrs::services::jwt::Claims; + /// Session condition #[allow(dead_code)] pub struct WsSession { pub redis: Arc>, pub id: SessionId, pub hub: Addr, + pub claims: Option, } @@ -186,6 +194,21 @@ impl StreamHandler> for WsSession { /// All logic in one impl impl WsSession { + fn ws_error(&self, ctx: &mut ws::WebsocketContext, msg: &str) { + ctx.text(format!(r#"{{"error":"{}"}}"#, msg)); + } + + fn workspace_check_ws(&self, key: &str) -> Result<(), &'static str> { + let claims = self.claims.as_ref().ok_or("Missing auth claims")?; + if claims.is_system() { return Ok(()); } + let jwt_workspace = claims.workspace.as_ref().ok_or("Missing workspace in token")?; + let path_ws = key.split('/').next().ok_or("Invalid key: missing workspace")?; + if path_ws.is_empty() { return Err("Invalid key: missing workspace"); } + let path_ws_uuid = Uuid::parse_str(path_ws).map_err(|_| "Invalid workspace UUID in key")?; + if jwt_workspace != &path_ws_uuid { return Err("Workspace mismatch"); } + Ok(()) + } + fn wait_and_send( &mut self, ctx: &mut ws::WebsocketContext, @@ -214,6 +237,9 @@ impl WsSession { println!("PUT {} = {} (expires_at: {:?}) (ttl: {:?})", key, data, expires_at, ttl); + // Check workspace + if let Err(e) = self.workspace_check_ws(&key) { self.ws_error(ctx, e); return; } + let redis = self.redis.clone(); let mut base = JsonMap::new(); @@ -272,6 +298,9 @@ impl WsSession { WsCommand::Delete { key, correlation, if_match } => { println!("DELETE {}", key); + // Check workspace + if let Err(e) = self.workspace_check_ws(&key) { self.ws_error(ctx, e); return; } + let redis = self.redis.clone(); let mut base = JsonMap::new(); @@ -302,6 +331,9 @@ impl WsSession { WsCommand::Get { key, correlation } => { println!("GET {}{:?}", key, correlation); + // Check workspace + if let Err(e) = self.workspace_check_ws(&key) { self.ws_error(ctx, e); return; } + let redis = self.redis.clone(); let mut base = JsonMap::new(); @@ -334,6 +366,9 @@ impl WsSession { WsCommand::List { key, correlation } => { println!("LIST {:?}{:?}", key, correlation); + // Check workspace + if let Err(e) = self.workspace_check_ws(&key) { self.ws_error(ctx, e); return; } + let redis = self.redis.clone(); let mut base = JsonMap::new(); @@ -357,13 +392,13 @@ impl WsSession { } - - - WsCommand::Sub { key, correlation } => { // LEVENT 3 println!("SUB {}{:?}", key, correlation); + // Check workspace + if let Err(e) = self.workspace_check_ws(&key) { self.ws_error(ctx, e); return; } + let mut obj = JsonMap::new(); obj.insert("action".into(), json!("sub")); obj.insert("key".into(), json!(key)); @@ -381,6 +416,10 @@ impl WsSession { WsCommand::Unsub { key, correlation } => { // LEVENT 4 println!("UNSUB {}{:?}", key, correlation); + + // Check workspace + if let Err(e) = self.workspace_check_ws(&key) { self.ws_error(ctx, e); return; } + let mut obj = JsonMap::new(); obj.insert("action".into(), json!("unsub")); obj.insert("key".into(), json!(key)); @@ -402,6 +441,8 @@ impl WsSession { WsCommand::Sublist { correlation } => { println!("SUBLIST {:?}", correlation); + // w/o Check workspace! + let mut base = JsonMap::new(); base.insert("action".into(), json!("sublist")); if let Some(x) = &correlation { base.insert("correlation".into(), json!(x)); } @@ -425,18 +466,50 @@ impl WsSession { } +// ---- auth + +use actix_web::{HttpMessage,error}; +use url::form_urlencoded; +use jsonwebtoken::{decode, DecodingKey, Validation, Algorithm}; +use crate::CONFIG; pub async fn handler( req: HttpRequest, stream: web::Payload, redis: web::Data>>, - hub: web::Data>, + hub: web::Data>, ) -> Result { + + let token_opt = req.uri().query().and_then(|q| { + form_urlencoded::parse(q.as_bytes()) + .find(|(k, _)| k == "token") + .map(|(_, v)| v.into_owned()) + }); + + let claims = match token_opt { + Some(t) if !t.is_empty() => { + + let mut validation = Validation::new(Algorithm::HS256); + validation.required_spec_claims = HashSet::new(); // no: exp/iat/nbf + + let c = decode::(&t, &DecodingKey::from_secret(CONFIG.token_secret.as_bytes()), &validation ) + .map(|td| td.claims) + .map_err(|_e| error::ErrorUnauthorized("Invalid token"))?; + + + Some(c) + } + _ => None, + }; + + // println!("claims={:?}",&claims); + let session = WsSession { redis: redis.get_ref().clone(), hub: hub.get_ref().clone(), id: 0, + claims, }; + ws::start(session, &req, stream) } - diff --git a/src/main.rs b/src/main.rs index ffd4171748d..122289a7152 100644 --- a/src/main.rs +++ b/src/main.rs @@ -86,17 +86,6 @@ async fn interceptor( - - - - - - - - - - -// NEW // ===================================================================================== // ===================================================================================== // ===================================================================================== @@ -105,7 +94,7 @@ async fn interceptor( // ===================================================================================== // ===================================================================================== // ===================================================================================== -use crate::redis_events::RedisEventKind::*; // Set, Del, Unlink, Expired, Other +use crate::redis_events::RedisEventAction::*; // Set, Del, Unlink, Expired, Other pub async fn start_redis_logger(redis_url: String, hub: Addr) { let client = match redis::Client::open(redis_url) { @@ -116,54 +105,29 @@ pub async fn start_redis_logger(redis_url: String, hub: Addr) { match crate::redis_events::make_pubsub_with_kea(&client).await { Ok(pubsub) => { let (mut rx, _handle) = crate::redis_events::start_keyevent_listener(pubsub); - - // Просто читаем события и шлём их в хаб. while let Some(ev) = rx.recv().await { - match ev.kind { + + match ev.action { Set => println!("[redis] db{} SET {}", ev.db, ev.key), Del | Unlink => println!("[redis] db{} DEL {}", ev.db, ev.key), Expired => println!("[redis] db{} EXPIRED {}", ev.db, ev.key), Other(ref k) => println!("[redis] db{} {} {}", ev.db, k, ev.key), } - hub.do_send(ev.clone()); // RedisEvent помечен #[derive(Message)] + + hub.do_send(ev.clone()); } } Err(e) => eprintln!("[redis] pubsub init error: {e}"), } } -/* - -async fn start_redis_logger(redis_url: &str) { - let client = match redis::Client::open(redis_url) { - Ok(c) => c, - Err(e) => { eprintln!("[redis] bad url: {e}"); return; } - }; - match crate::redis_events::make_pubsub_with_kea(&client).await { - Ok(pubsub) => { - let (mut rx, _handle) = crate::redis_events::start_keyevent_listener(pubsub); - tokio::spawn(async move { - while let Some(ev) = rx.recv().await { - // LEVENT 5,6 - match ev.kind { - Set => println!("[redis] db{} SET {}", ev.db, ev.key), - Del | Unlink => println!("[redis] db{} DEL {}", ev.db, ev.key), - Expired => println!("[redis] db{} EXPIRED {}", ev.db, ev.key), - Other(kind) => println!("[redis] db{} {} {}", ev.db, kind, ev.key), - } +// use actix_web::http::header; +// use actix_web::http::header::HeaderValue; +// use actix_web::body::BoxBody; +// use url::form_urlencoded; - // TODO !!!!!!!!!!!!!!! - hub.do_send(ev.clone()); // ev: RedisEvent - - } - }); - } - Err(e) => eprintln!("[redis] pubsub init error: {e}"), - } -} -*/ // #[tokio::main] #[actix_web::main] @@ -176,13 +140,11 @@ async fn main() -> anyhow::Result<()> { let redis = std::sync::Arc::new(tokio::sync::Mutex::new(redis)); let redis_data = web::Data::new(redis.clone()); - // ====================================== // starting Hub let hub = WsHub::default().start(); let hub_data = web::Data::new(hub.clone()); // starting Logger tokio::spawn(start_redis_logger("redis://127.0.0.1/".to_string(), hub.clone())); - // ============================================ let socket = std::net::SocketAddr::new(CONFIG.bind_host.as_str().parse()?, CONFIG.bind_port); let payload_config = PayloadConfig::new(CONFIG.payload_size_limit.bytes() as usize); @@ -225,6 +187,59 @@ async fn main() -> anyhow::Result<()> { })) .route("/ws", web::get().to(handlers_ws::handler)) // WebSocket + + +/* + +.service( + web::resource("/ws") + + +.wrap(middleware::from_fn(|mut req: ServiceRequest, next: Next| async move { + // Уже есть Authorization? + let has_auth = req.headers().contains_key(header::AUTHORIZATION); + + if !has_auth { + // ?token=... + if let Some(token) = form_urlencoded::parse(req.query_string().as_bytes()) + .find(|(k, _)| k == "token") + .map(|(_, v)| v.into_owned()) + { + if !token.is_empty() { + let value = format!("Bearer {}", token); + req.headers_mut().insert( + header::AUTHORIZATION, + HeaderValue::from_str(&value) + .map_err(|_| actix_web::error::ErrorBadRequest("Invalid token header"))?, + ); + } + } + } + + next.call(req).await +})) + // затем твой interceptor: +// .wrap(middleware::from_fn(interceptor)) + .route(web::get().to(handlers_ws::handler)) + +) + +*/ + + + + + + + + + + + + + + + }) .bind(socket)? .run(); diff --git a/src/redis_events.rs b/src/redis_events.rs index 1abaac1f78b..88a89ca8591 100644 --- a/src/redis_events.rs +++ b/src/redis_events.rs @@ -1,11 +1,3 @@ -/* -TODO: Со * - -Сперва по точному совпадению -Потом перебором по * -*/ - - use tokio::sync::mpsc; use tokio::task::JoinHandle; use tokio_stream::StreamExt; @@ -22,7 +14,7 @@ use redis::{ }; #[derive(Debug, Clone, Serialize)] -pub enum RedisEventKind { +pub enum RedisEventAction { Set, // Insert or Update Del, // Delete Unlink, // async Delete @@ -37,7 +29,8 @@ use actix::Message; pub struct RedisEvent { pub db: u32, pub key: String, - pub kind: RedisEventKind, +// pub value: String, + pub action: RedisEventAction, } @@ -86,6 +79,7 @@ pub fn start_keyevent_listener( } let mut stream = pubsub.on_message(); + while let Some(msg) = stream.next().await { let channel = match msg.get_channel::() { Ok(c) => c, @@ -98,12 +92,12 @@ pub fn start_keyevent_listener( // "__keyevent@0__:set" → event="set", db=0; payload = key let event = channel.rsplit(':').next().unwrap_or(""); - let kind = match event { - "set" => RedisEventKind::Set, - "del" => RedisEventKind::Del, - "unlink" => RedisEventKind::Unlink, - "expired" => RedisEventKind::Expired, - other => RedisEventKind::Other(other.to_string()), + let action = match event { + "set" => RedisEventAction::Set, + "del" => RedisEventAction::Del, + "unlink" => RedisEventAction::Unlink, + "expired" => RedisEventAction::Expired, + other => RedisEventAction::Other(other.to_string()), }; let db = channel.find('@') @@ -112,7 +106,7 @@ pub fn start_keyevent_listener( .and_then(|s| s.parse::().ok()) .unwrap_or(0); - let ev = RedisEvent { db, key: payload.clone(), kind }; + let ev = RedisEvent { db, key: payload.clone(), action }; if tx.send(ev).is_err() { break; } // closed } diff --git a/src/redis_lib.rs b/src/redis_lib.rs index 44d7b3e3d28..eb587706f6e 100644 --- a/src/redis_lib.rs +++ b/src/redis_lib.rs @@ -136,6 +136,7 @@ pub async fn redis_read( })) } + /// TTL sec /// redis_save(&mut conn, "key", "val", Some(Ttl::Sec(300)), Some(SaveMode::Insert)).await?; /// diff --git a/src/ws_hub.rs b/src/ws_hub.rs index 8ec468a858a..bc8f5b94e8e 100644 --- a/src/ws_hub.rs +++ b/src/ws_hub.rs @@ -14,14 +14,14 @@ fn subscription_matches(sub_key: &str, key: &str) -> bool { /// Message from Hub to Session (JSON-string) +use crate::redis_events::RedisEvent; + #[derive(Message, Clone, Debug)] #[rtype(result = "()")] pub struct ServerMessage { pub event: RedisEvent, } -use crate::redis_events::RedisEvent; - /// Count of active sessions #[derive(Message)] #[rtype(result = "usize")] @@ -35,6 +35,7 @@ pub struct WsHub { next_id: SessionId, } +/// Init WsHub impl Default for WsHub { fn default() -> Self { Self { @@ -51,7 +52,7 @@ impl Actor for WsHub { - +/// Connect #[derive(Message)] #[rtype(result = "SessionId")] pub struct Connect { @@ -71,7 +72,7 @@ impl Handler for WsHub { } } - +/// Disconnect #[derive(Message)] #[rtype(result = "()")] pub struct Disconnect { @@ -99,6 +100,7 @@ impl Handler for WsHub { } } +/// SubscribeList #[derive(Message)] #[rtype(result = "Vec")] pub struct SubscribeList { @@ -125,6 +127,7 @@ impl Handler for WsHub { } } +/// Count of IDs impl Handler for WsHub { type Result = usize; @@ -133,8 +136,7 @@ impl Handler for WsHub { } } -// Subscriptions - +/// Subscribe #[derive(Message)] #[rtype(result = "()")] pub struct Subscribe { @@ -149,6 +151,7 @@ impl Handler for WsHub { } } +/// Unsubscribe #[derive(Message)] #[rtype(result = "()")] pub struct Unsubscribe { @@ -182,6 +185,9 @@ impl Handler for WsHub { } } + + + #[derive(Message)] #[rtype(result = "HashMap>")] pub struct TestGetSubs; @@ -211,6 +217,7 @@ impl WsHub { } } +/// Send Messages impl Handler for WsHub { type Result = (); @@ -218,6 +225,11 @@ impl Handler for WsHub { let targets = self.subscribers_for(&msg.key); if targets.is_empty() { return; } + // TODO: redis_read + // conn: &mut MultiplexedConnection, + let value = redis::cmd("GET").arg(&msg.key).query_async(conn).await?; + + let payload = ServerMessage { event: msg.clone() }; for sid in targets { From f7d1b98df46f277396ba9b22b084adf28203ac84 Mon Sep 17 00:00:00 2001 From: Kristina Date: Mon, 18 Aug 2025 19:59:32 +0400 Subject: [PATCH 161/636] Fix contexts sql (#94) Signed-off-by: Kristina Fefelova --- packages/cockroach/src/db/notification.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/cockroach/src/db/notification.ts b/packages/cockroach/src/db/notification.ts index 649ed646a8f..3bec891b565 100644 --- a/packages/cockroach/src/db/notification.ts +++ b/packages/cockroach/src/db/notification.ts @@ -463,8 +463,8 @@ export class NotificationsDb extends BaseDb { 'message_creator', m.creator,` : '' } - 'patches', pj.patches, - 'attachments', aj.attachments + 'message_patches', pj.patches, + 'message_attachments', aj.attachments ) ORDER BY n.created ${notificationOrder} ), '[]'::jsonb From dc68a27f5fc40874b613026f735972802c01f258 Mon Sep 17 00:00:00 2001 From: Leonid Kaganov Date: Tue, 19 Aug 2025 03:21:28 +0300 Subject: [PATCH 162/636] Getting value from redis before event message --- src/handlers_ws.rs | 37 +++++++------------ src/main.rs | 12 ++++-- src/ws_hub.rs | 91 ++++++++++++++++++++++++++++++---------------- 3 files changed, 82 insertions(+), 58 deletions(-) diff --git a/src/handlers_ws.rs b/src/handlers_ws.rs index ae36e457ac7..88dccb75164 100644 --- a/src/handlers_ws.rs +++ b/src/handlers_ws.rs @@ -1,8 +1,4 @@ - use uuid::Uuid; - -// ------------------- - use actix::{prelude::*}; use crate::ws_hub::{ @@ -131,7 +127,7 @@ impl Actor for WsSession { let addr = ctx.address(); let recipient = addr.recipient::(); - println!("WebSocket connected"); + // println!("WebSocket connected"); self.hub .send(Connect { addr: recipient }) .into_actor(self) @@ -139,10 +135,10 @@ impl Actor for WsSession { match res { Ok(id) => { act.id = id; - println!("[ws_session] got id={id}"); + tracing::info!("WebSocket connected: {id}"); } Err(e) => { - eprintln!("[ws_session] connect to hub failed: {e}"); + tracing::error!("WebSocket failed connect to hub: {e}"); _ctx.stop(); } } @@ -152,29 +148,26 @@ impl Actor for WsSession { fn stopped(&mut self, _ctx: &mut Self::Context) { if self.id != 0 { self.hub.do_send(Disconnect { session_id: self.id }); } - println!("WebSocket disconnected"); + tracing::info!("WebSocket disconnected: {:?}",&self.id); } } -// ======= ping ======== impl actix::Handler for WsSession { type Result = (); fn handle(&mut self, msg: ServerMessage, ctx: &mut Self::Context) { - let json = serde_json::to_string(&msg.event) - .unwrap_or_else(|_| "{\"error\":\"serialization\"}".into()); + let json = serde_json::to_string(&msg).unwrap_or_else(|_| "{\"error\":\"serialization\"}".into()); ctx.text(json); } } -// ======= /ping ======== /// StreamHandler External trait: must be in separate impl block impl StreamHandler> for WsSession { fn handle(&mut self, msg: Result, ctx: &mut Self::Context) { match msg { Ok(ws::Message::Text(text)) => { - println!("Message: {}", text); + // println!("Message: {}", text); match serde_json::from_str::(&text) { Ok(cmd) => self.handle_command(cmd, ctx), Err(err) => ctx.text(format!("Invalid JSON: {}", err)), @@ -182,7 +175,7 @@ impl StreamHandler> for WsSession { } Ok(ws::Message::Ping(msg)) => ctx.pong(&msg), Ok(ws::Message::Close(reason)) => { - println!("Closing WS: {:?}", reason); + // println!("Closing WS: {:?}", reason); ctx.close(reason); ctx.stop(); } @@ -235,7 +228,7 @@ impl WsSession { WsCommand::Put { key, data, expires_at, ttl, if_match, if_none_match, correlation } => { - println!("PUT {} = {} (expires_at: {:?}) (ttl: {:?})", key, data, expires_at, ttl); + tracing::info!("PUT {} = {} (expires_at: {:?}) (ttl: {:?}) correlation: {:?}", &key, &data, &expires_at, &ttl, &correlation); // Check workspace if let Err(e) = self.workspace_check_ws(&key) { self.ws_error(ctx, e); return; } @@ -296,7 +289,7 @@ impl WsSession { WsCommand::Delete { key, correlation, if_match } => { - println!("DELETE {}", key); + tracing::info!("DELETE {} correlation:{:?}", &key, &correlation); // Check workspace if let Err(e) = self.workspace_check_ws(&key) { self.ws_error(ctx, e); return; } @@ -329,7 +322,7 @@ impl WsSession { } WsCommand::Get { key, correlation } => { - println!("GET {}{:?}", key, correlation); + tracing::info!("GET {} correlation:{:?}", &key, &correlation); // Check workspace if let Err(e) = self.workspace_check_ws(&key) { self.ws_error(ctx, e); return; } @@ -364,7 +357,7 @@ impl WsSession { } WsCommand::List { key, correlation } => { - println!("LIST {:?}{:?}", key, correlation); + tracing::info!("LIST {:?} correlation: {:?}", &key, &correlation); // Check workspace if let Err(e) = self.workspace_check_ws(&key) { self.ws_error(ctx, e); return; } @@ -394,7 +387,7 @@ impl WsSession { WsCommand::Sub { key, correlation } => { // LEVENT 3 - println!("SUB {}{:?}", key, correlation); + tracing::info!("SUB {} correlation: {:?}", &key, &correlation); // Check workspace if let Err(e) = self.workspace_check_ws(&key) { self.ws_error(ctx, e); return; } @@ -415,7 +408,7 @@ impl WsSession { WsCommand::Unsub { key, correlation } => { // LEVENT 4 - println!("UNSUB {}{:?}", key, correlation); + tracing::info!("UNSUB {} correlation: {:?}", &key, &correlation); // Check workspace if let Err(e) = self.workspace_check_ws(&key) { self.ws_error(ctx, e); return; } @@ -439,7 +432,7 @@ impl WsSession { } WsCommand::Sublist { correlation } => { - println!("SUBLIST {:?}", correlation); + tracing::info!("SUBLIST correlation: {:?}", &correlation); // w/o Check workspace! @@ -502,8 +495,6 @@ pub async fn handler( _ => None, }; - // println!("claims={:?}",&claims); - let session = WsSession { redis: redis.get_ref().clone(), hub: hub.get_ref().clone(), diff --git a/src/main.rs b/src/main.rs index 122289a7152..db6be5c8467 100644 --- a/src/main.rs +++ b/src/main.rs @@ -75,12 +75,17 @@ async fn interceptor( request: ServiceRequest, next: Next, ) -> Result, Error> { + let secret = SecretString::new(CONFIG.token_secret.clone().into_boxed_str()); let claims = request.extract_claims(&secret)?; + // TODO: сделать это здесь + request.extensions_mut().insert(claims.to_owned()); + // TODO потом исправить hulyrs: extract_claims + next.call(request).await } @@ -141,7 +146,9 @@ async fn main() -> anyhow::Result<()> { let redis_data = web::Data::new(redis.clone()); // starting Hub - let hub = WsHub::default().start(); + // let hub = WsHub::default().start(); + let hub = WsHub::new(redis.clone()).start(); + let hub_data = web::Data::new(hub.clone()); // starting Logger tokio::spawn(start_redis_logger("redis://127.0.0.1/".to_string(), hub.clone())); @@ -160,7 +167,7 @@ async fn main() -> anyhow::Result<()> { App::new() .app_data(payload_config.clone()) .app_data(redis_data.clone()) - .app_data(hub_data.clone()) // Important! + .app_data(hub_data.clone()) .wrap(middleware::Logger::default()) .wrap(cors) .service( @@ -188,7 +195,6 @@ async fn main() -> anyhow::Result<()> { .route("/ws", web::get().to(handlers_ws::handler)) // WebSocket - /* .service( diff --git a/src/ws_hub.rs b/src/ws_hub.rs index bc8f5b94e8e..e9611ac5054 100644 --- a/src/ws_hub.rs +++ b/src/ws_hub.rs @@ -1,7 +1,4 @@ -use std::collections::HashSet; - -use actix::prelude::*; -use std::collections::HashMap; +use std::collections::{ HashMap, HashSet }; fn subscription_matches(sub_key: &str, key: &str) -> bool { if sub_key == key { return true; } @@ -12,14 +9,16 @@ fn subscription_matches(sub_key: &str, key: &str) -> bool { false } -/// Message from Hub to Session (JSON-string) +use crate::redis_events::{ RedisEvent, RedisEventAction }; +use serde::Serialize; -use crate::redis_events::RedisEvent; - -#[derive(Message, Clone, Debug)] +#[derive(Message, Clone, Serialize, Debug)] #[rtype(result = "()")] pub struct ServerMessage { - pub event: RedisEvent, + #[serde(flatten)] + pub event: RedisEvent, // поля RedisEvent «вливаются» в корень JSON + #[serde(skip_serializing_if = "Option::is_none")] + pub value: Option, // будет только при Set } /// Count of active sessions @@ -33,16 +32,17 @@ pub struct WsHub { sessions: HashMap>, subs: HashMap>, // Subscriptions array: key -> {id, id, id ...} next_id: SessionId, + redis: Arc>, } -/// Init WsHub -impl Default for WsHub { - fn default() -> Self { +impl WsHub { + pub fn new(redis: Arc>) -> Self { Self { - sessions: HashMap::new(), + sessions: HashMap::new(), subs: HashMap::new(), - next_id: 1u64, - } + next_id: 1, + redis, + } } } @@ -50,8 +50,6 @@ impl Actor for WsHub { type Context = Context; } - - /// Connect #[derive(Message)] #[rtype(result = "SessionId")] @@ -67,7 +65,7 @@ impl Handler for WsHub { let id = self.next_id; self.next_id = self.next_id.wrapping_add(1); self.sessions.insert(id, msg.addr); - println!("[ws_hub] session connected: id={id} (total={})", self.sessions.len()); + // tracing::info!("session connected: id={id} (total={})", self.sessions.len()); id } } @@ -93,9 +91,9 @@ impl Handler for WsHub { let existed = self.sessions.remove(&msg.session_id).is_some(); if existed { - println!("[ws_hub] session disconnected: id={} (total={})", msg.session_id, self.sessions.len()); + // tracing::info!("session disconnected: id={} (total={})", msg.session_id, self.sessions.len()); } else { - println!("[ws_hub] disconnect for unknown id={}", msg.session_id); + tracing::warn!("disconnect for unknown id={}", msg.session_id); } } } @@ -217,25 +215,54 @@ impl WsHub { } } -/// Send Messages +use actix::prelude::*; +use actix::ActorFutureExt; +use actix::fut::ready; +use std::sync::Arc; +use tokio::sync::Mutex; +use redis::aio::MultiplexedConnection; + impl Handler for WsHub { - type Result = (); + type Result = ResponseActFuture; - fn handle(&mut self, msg: RedisEvent, _ctx: &mut Context) { + fn handle(&mut self, msg: RedisEvent, _ctx: &mut Context) -> Self::Result { let targets = self.subscribers_for(&msg.key); - if targets.is_empty() { return; } + if targets.is_empty() { + return Box::pin(actix::fut::ready(()).into_actor(self)); + } - // TODO: redis_read - // conn: &mut MultiplexedConnection, - let value = redis::cmd("GET").arg(&msg.key).query_async(conn).await?; + let recipients: Vec> = targets.into_iter() + .filter_map(|sid| self.sessions.get(&sid).cloned()) + .collect(); + let redis = self.redis.clone(); + let event = msg.clone(); + let need_get = matches!(msg.action, RedisEventAction::Set); + + Box::pin( + async move { + let value = if need_get { + + let mut conn = redis.lock().await; + match redis::cmd("GET").arg(&event.key).query_async::>(&mut *conn).await + { + Ok(v) => v, + Err(e) => { + tracing::warn!("redis GET {} failed: {}", &event.key, e); + None + } + } + } else { + None + }; - let payload = ServerMessage { event: msg.clone() }; + let payload = ServerMessage { event, value }; - for sid in targets { - if let Some(rcpt) = self.sessions.get(&sid) { - let _ = rcpt.do_send(payload.clone()); + for rcpt in recipients { + let _ = rcpt.do_send(payload.clone()); + } } - } + .into_actor(self) + ) } } From df44eab1e391d06f1cb3d64e2b3b3d6a38bbe14e Mon Sep 17 00:00:00 2001 From: Leonid Kaganov Date: Tue, 19 Aug 2025 13:41:22 +0300 Subject: [PATCH 163/636] Refactoring code --- scripts/TEST.html | 2 + scripts/TEST_HTTP_API.sh | 113 +------ src/handlers_http.rs | 169 ++++++---- src/handlers_ws.rs | 646 ++++++++++++++++++++++----------------- src/main.rs | 170 ++++------- src/redis_events.rs | 96 +++--- src/redis_lib.rs | 244 ++++++++++----- src/workspace_owner.rs | 73 +++-- src/ws_hub.rs | 59 ++-- 9 files changed, 853 insertions(+), 719 deletions(-) diff --git a/scripts/TEST.html b/scripts/TEST.html index 2070094d09f..8ecf37a3c88 100644 --- a/scripts/TEST.html +++ b/scripts/TEST.html @@ -99,6 +99,7 @@

WebSocket JSON Tester

// let ws = new WebSocket(`ws://localhost:8095/ws`); + ws.onopen = () => { output.textContent = "✅ WebSocket connected."; }; @@ -122,6 +123,7 @@

WebSocket JSON Tester

} catch (e) { output.textContent += "\n\n⚠️ Invalid JSON:\n" + e.message; } + } function place(event) { textarea.value = (event || window.event).target.getAttribute("data"); sendMessage(); } diff --git a/scripts/TEST_HTTP_API.sh b/scripts/TEST_HTTP_API.sh index 563d21de2f4..b04a1fa74fb 100755 --- a/scripts/TEST_HTTP_API.sh +++ b/scripts/TEST_HTTP_API.sh @@ -11,6 +11,19 @@ TOKEN=$(./token.sh claims.json) ZP="00000000-0000-0000-0000-000000000001/TESTS" +echo "--------- authorization_test ----------" +TOKEN="" + put "00000000-0000-0000-0000-000000000001/TESTS" "Value_1" "HULY-TTL: 2" +TOKEN=$(./token.sh claims_system.json) + put "00000000-0000-0000-0000-000000000001/TESTS" "Value_1" "HULY-TTL: 2" +TOKEN=$(./token.sh claims_wrong_ws.json) + put "00000000-0000-0000-0000-000000000001/TESTS" "Value_1" "HULY-TTL: 2" +TOKEN=$(./token.sh claims.json) + put "00000000-0000-0000-0000-000000000002/TESTS" "Value_1" "HULY-TTL: 2" +exit + + + echo "--------- if-match ----------" put "00000000-0000-0000-0000-000000000001/TESTS" "Value_1" "HULY-TTL: 2" @@ -23,106 +36,6 @@ echo "--------- if-match ----------" get "00000000-0000-0000-0000-000000000001/TESTS/3/secret$/" -exit -Key - - - - - -Key is a string that consists of one or multiple segments separated by ‘/’. Example: foo/bar/baz. - - - - - -Segment may not contain special characters (‘$’, ‘*’, ‘?’) - - - -Key may not end with ‘/’ - - - -Segment may not be empty - - - -Key segment may be private (prefixed with ‘$’) - - - -Query - - - - - -May not contain special characters (‘*’, ‘?’) - - - -It is possible to use prefix, for listings / subscriptions (prefix ends with segment separator ‘/’) - - - - - -GET/SUBSCRIBE/.. a/b → single key - - - -GET/SUBSCRIBE/.. a/b/c/ → multiple - - - -If multiple - - - - - -select all keys starting with prefix - - - -skip keys, containing private segments to the right from the prefix - - - -example - - - - - - 1. /a/b/$c/$d, 2. /a/b/c, 3. /a/b/$c, 4. /a/b/$c/$d/e - - - -/ → [2] - - - -/a/b/ → [2] - - - -/a/b/$c/ → [3] - - - -/a/b/$c/$d/ → [4] - - - -/a/b/$c/$d → (1) - - - - - - exit diff --git a/src/handlers_http.rs b/src/handlers_http.rs index 38463dc9343..16ff8ace0e2 100644 --- a/src/handlers_http.rs +++ b/src/handlers_http.rs @@ -1,55 +1,61 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +use crate::workspace_owner::workspace_check; +use anyhow::anyhow; use redis::aio::MultiplexedConnection; -use std::sync::Arc; use std::collections::HashMap; -use tokio::sync::Mutex; -use anyhow::anyhow; +use std::sync::Arc; use std::time::{SystemTime, UNIX_EPOCH}; +use tokio::sync::Mutex; use tracing::{error, trace}; use uuid::Uuid; -use crate::workspace_owner::workspace_check; use crate::redis_lib::{ - Ttl, SaveMode, - RedisArray, - redis_save, - redis_read, - redis_delete, - redis_list, + RedisArray, SaveMode, Ttl, redis_delete, redis_list, redis_read, redis_save, }; use actix_web::{ - HttpRequest, HttpResponse, error, Error, + Error, HttpRequest, HttpResponse, error, web::{self, Data, Json, Query}, }; - pub fn map_handler_error(err: impl std::fmt::Display) -> Error { - let msg = err.to_string(); if let Some(detail) = msg.split(" - ExtensionError: ").nth(1) { if let Some((code, text)) = detail.split_once(": ") { - let text = format!("{} {}", code, text); + let text = format!("{} {}", code, text); return match code { "400" => actix_web::error::ErrorBadRequest(text), "404" => actix_web::error::ErrorNotFound(text), "412" => actix_web::error::ErrorPreconditionFailed(text), "500" => actix_web::error::ErrorInternalServerError(text), - _ => actix_web::error::ErrorInternalServerError("unexpected error"), + _ => actix_web::error::ErrorInternalServerError("unexpected error"), }; } } actix_web::error::ErrorInternalServerError("internal error") } - /// list pub async fn list( req: HttpRequest, path: web::Path, redis: web::Data>>, ) -> Result { - workspace_check(&req)?; // Check workspace let key = path.into_inner(); @@ -57,24 +63,22 @@ pub async fn list( trace!(key, "list request"); async move || -> anyhow::Result { - let mut conn = redis.lock().await; - let entries = redis_list(&mut *conn, &key).await?; + let entries = redis_list(&mut *conn, &key).await?; Ok(HttpResponse::Ok().json(entries)) - - }().await.map_err(map_handler_error) + }() + .await + .map_err(map_handler_error) } - /// get pub async fn get( req: HttpRequest, path: web::Path, redis: web::Data>>, ) -> Result { - workspace_check(&req)?; // Check workspace let key = path.into_inner(); @@ -82,21 +86,21 @@ pub async fn get( // trace!(key, "get request"); async move || -> anyhow::Result { - let mut conn = redis.lock().await; - Ok( - redis_read(&mut *conn, &key).await? - .map(|entry| HttpResponse::Ok() + Ok(redis_read(&mut *conn, &key) + .await? + .map(|entry| { + HttpResponse::Ok() .insert_header(("ETag", &*entry.etag)) - .json(entry)) - .unwrap_or_else(|| HttpResponse::NotFound().body("empty")) - ) - - }().await.map_err(map_handler_error) + .json(entry) + }) + .unwrap_or_else(|| HttpResponse::NotFound().body("empty"))) + }() + .await + .map_err(map_handler_error) } - /// put pub async fn put( req: HttpRequest, @@ -104,54 +108,72 @@ pub async fn put( body: web::Bytes, redis: web::Data>>, ) -> Result { - workspace_check(&req)?; // Check workspace let key: String = path.into_inner(); async move || -> anyhow::Result { - - if !req.query_string().is_empty() { return Err(anyhow!("Query parameters are not allowed")); } + if !req.query_string().is_empty() { + return Err(anyhow!("Query parameters are not allowed")); + } let mut conn = redis.lock().await; - // TTL logic - let mut ttl = None; - if let Some(x) = req.headers().get("HULY-TTL") { - let s = x.to_str().map_err(|_| anyhow!("Invalid HULY-TTL header"))?; - let secs = s.parse::().map_err(|_| anyhow!("Invalid TTL value in HULY-TTL header"))?; - ttl = Some(Ttl::Sec(secs)); - } else if let Some(x) = req.headers().get("HULY-EXPIRE-AT") { - let s = x.to_str().map_err(|_| anyhow!("Invalid HULY-EXPIRE-AT header"))?; - let ts = s.parse::().map_err(|_| anyhow!("Invalid EXPIRE-AT value in HULY-EXPIRE-AT header"))?; - ttl = Some(Ttl::At(ts)); - } - - // MODE logic - let mut mode = Some(SaveMode::Upsert); - if let Some(h) = req.headers().get("If-Match") { // `If-Match: *` - update only if the key exists - let s = h.to_str().map_err(|_| anyhow!("Invalid If-Match header"))?; - if s == "*" { mode = Some(SaveMode::Update); } // `If-Match: *` — update only if exist - else { mode = Some(SaveMode::Equal(s.to_string())); } // `If-Match: ` — update only if current - } else if let Some(h) = req.headers().get("If-None-Match") { // `If-None-Match: *` — insert only if does not exist - let s = h.to_str().map_err(|_| anyhow!("Invalid If-None-Match header"))?; - if s == "*" { mode = Some(SaveMode::Insert); } else { return Err(anyhow!("If-None-Match must be '*'")); } - } + // TTL logic + let mut ttl = None; + if let Some(x) = req.headers().get("HULY-TTL") { + let s = x.to_str().map_err(|_| anyhow!("Invalid HULY-TTL header"))?; + let secs = s + .parse::() + .map_err(|_| anyhow!("Invalid TTL value in HULY-TTL header"))?; + ttl = Some(Ttl::Sec(secs)); + } else if let Some(x) = req.headers().get("HULY-EXPIRE-AT") { + let s = x + .to_str() + .map_err(|_| anyhow!("Invalid HULY-EXPIRE-AT header"))?; + let ts = s + .parse::() + .map_err(|_| anyhow!("Invalid EXPIRE-AT value in HULY-EXPIRE-AT header"))?; + ttl = Some(Ttl::At(ts)); + } + + // MODE logic + let mut mode = Some(SaveMode::Upsert); + if let Some(h) = req.headers().get("If-Match") { + // `If-Match: *` - update only if the key exists + let s = h.to_str().map_err(|_| anyhow!("Invalid If-Match header"))?; + if s == "*" { + mode = Some(SaveMode::Update); + } + // `If-Match: *` — update only if exist + else { + mode = Some(SaveMode::Equal(s.to_string())); + } // `If-Match: ` — update only if current + } else if let Some(h) = req.headers().get("If-None-Match") { + // `If-None-Match: *` — insert only if does not exist + let s = h + .to_str() + .map_err(|_| anyhow!("Invalid If-None-Match header"))?; + if s == "*" { + mode = Some(SaveMode::Insert); + } else { + return Err(anyhow!("If-None-Match must be '*'")); + } + } redis_save(&mut *conn, &key, &body[..], ttl, mode).await?; - return Ok(HttpResponse::Ok().body("DONE")); - - }().await.map_err(map_handler_error) + return Ok(HttpResponse::Ok().body("DONE")); + }() + .await + .map_err(map_handler_error) } - /// delete pub async fn delete( req: HttpRequest, path: web::Path, redis: web::Data>>, ) -> Result { - workspace_check(&req)?; // Check workspace let key: String = path.into_inner(); @@ -161,7 +183,21 @@ pub async fn delete( async move || -> anyhow::Result { let mut conn = redis.lock().await; - let deleted = redis_delete(&mut *conn, &key).await?; + // MODE logic + let mut mode = Some(SaveMode::Upsert); + if let Some(h) = req.headers().get("If-Match") { + // `If-Match: *` - delete only if the key exists + let s = h.to_str().map_err(|_| anyhow!("Invalid If-Match header"))?; + if s == "*" { + mode = Some(SaveMode::Update); + } + // `If-Match: *` — return error if not exist + else { + mode = Some(SaveMode::Equal(s.to_string())); + } // `If-Match: ` — delete only if current + } + + let deleted = redis_delete(&mut *conn, &key, mode).await?; let response = match deleted { true => HttpResponse::NoContent().finish(), @@ -169,6 +205,7 @@ pub async fn delete( }; Ok(response) - }().await.map_err(map_handler_error) + }() + .await + .map_err(map_handler_error) } - diff --git a/src/handlers_ws.rs b/src/handlers_ws.rs index 88dccb75164..bbadf6513e1 100644 --- a/src/handlers_ws.rs +++ b/src/handlers_ws.rs @@ -1,43 +1,70 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +use actix::prelude::*; use uuid::Uuid; -use actix::{prelude::*}; use crate::ws_hub::{ - WsHub, ServerMessage, SessionId, - Connect, Disconnect, - Subscribe, Unsubscribe, UnsubscribeAll, - SubscribeList, + Connect, Disconnect, ServerMessage, SessionId, Subscribe, SubscribeList, Unsubscribe, + UnsubscribeAll, WsHub, }; -use redis::aio::MultiplexedConnection; -use std::sync::Arc; -use tokio::sync::Mutex; -use serde_json::{Value, Map, json}; use actix::{ - Actor, - StreamHandler, - AsyncContext, - ActorContext, - fut, - ActorFutureExt, - Handler, WrapFuture + Actor, ActorContext, ActorFutureExt, AsyncContext, Handler, StreamHandler, WrapFuture, fut, }; -use actix_web::{web, HttpRequest, HttpResponse, Error}; +use actix_web::{Error, HttpRequest, HttpResponse, web}; use actix_web_actors::ws; +use redis::aio::MultiplexedConnection; use serde::Deserialize; +use serde_json::{Map, Value, json}; use std::collections::HashSet; +use std::sync::Arc; +use tokio::sync::Mutex; use crate::redis_lib::{ - Ttl, SaveMode, - RedisArray, + RedisArray, SaveMode, Ttl, deprecated_symbol, error, redis_delete, redis_list, redis_read, redis_save, - redis_read, - redis_delete, - redis_list, - error, - deprecated_symbol, }; -type JsonMap = Map; +use serde::Serialize; + +#[derive(Serialize, Default)] +struct ReturnBase<'a> { + action: &'a str, + + #[serde(skip_serializing_if = "Option::is_none")] + key: Option<&'a str>, + + #[serde(skip_serializing_if = "Option::is_none")] + data: Option<&'a str>, + + #[serde(skip_serializing_if = "Option::is_none")] + correlation: Option<&'a str>, + + #[serde(rename = "TTL", skip_serializing_if = "Option::is_none")] + ttl: Option, + + #[serde(rename = "expiresAt", skip_serializing_if = "Option::is_none")] + expires_at: Option, + + #[serde(rename = "ifMatch", skip_serializing_if = "Option::is_none")] + if_match: Option<&'a str>, + + #[serde(rename = "ifNoneMatch", skip_serializing_if = "Option::is_none")] + if_none_match: Option<&'a str>, +} /// WsCommand - commands enum (put, delete, sub, unsub) #[derive(Deserialize, Debug)] @@ -66,26 +93,26 @@ pub enum WsCommand { if_none_match: Option, }, - Get { + Delete { #[serde(default)] correlation: Option, key: String, + + #[serde(rename = "ifMatch")] + #[serde(default)] + if_match: Option, }, - List { + Get { #[serde(default)] correlation: Option, key: String, }, - Delete { + List { #[serde(default)] correlation: Option, key: String, - - #[serde(rename = "ifMatch")] - #[serde(default)] - if_match: Option, }, Sub { @@ -117,7 +144,6 @@ pub struct WsSession { pub claims: Option, } - /// Actor External trait: must be in separate impl block impl Actor for WsSession { type Context = ws::WebsocketContext; @@ -131,33 +157,35 @@ impl Actor for WsSession { self.hub .send(Connect { addr: recipient }) .into_actor(self) - .map(|res, act, _ctx| { - match res { - Ok(id) => { - act.id = id; - tracing::info!("WebSocket connected: {id}"); - } - Err(e) => { - tracing::error!("WebSocket failed connect to hub: {e}"); - _ctx.stop(); - } + .map(|res, act, _ctx| match res { + Ok(id) => { + act.id = id; + tracing::info!("WebSocket connected: {id}"); + } + Err(e) => { + tracing::error!("WebSocket failed connect to hub: {e}"); + _ctx.stop(); } }) .wait(ctx); // waiting for ID } fn stopped(&mut self, _ctx: &mut Self::Context) { - if self.id != 0 { self.hub.do_send(Disconnect { session_id: self.id }); } - tracing::info!("WebSocket disconnected: {:?}",&self.id); + if self.id != 0 { + self.hub.do_send(Disconnect { + session_id: self.id, + }); + } + tracing::info!("WebSocket disconnected: {:?}", &self.id); } - } impl actix::Handler for WsSession { type Result = (); fn handle(&mut self, msg: ServerMessage, ctx: &mut Self::Context) { - let json = serde_json::to_string(&msg).unwrap_or_else(|_| "{\"error\":\"serialization\"}".into()); + let json = + serde_json::to_string(&msg).unwrap_or_else(|_| "{\"error\":\"serialization\"}".into()); ctx.text(json); } } @@ -166,16 +194,12 @@ impl actix::Handler for WsSession { impl StreamHandler> for WsSession { fn handle(&mut self, msg: Result, ctx: &mut Self::Context) { match msg { - Ok(ws::Message::Text(text)) => { - // println!("Message: {}", text); - match serde_json::from_str::(&text) { - Ok(cmd) => self.handle_command(cmd, ctx), - Err(err) => ctx.text(format!("Invalid JSON: {}", err)), - } - } + Ok(ws::Message::Text(text)) => match serde_json::from_str::(&text) { + Ok(cmd) => self.handle_command(cmd, ctx), + Err(err) => ctx.text(format!("Invalid JSON: {}", err)), + }, Ok(ws::Message::Ping(msg)) => ctx.pong(&msg), Ok(ws::Message::Close(reason)) => { - // println!("Closing WS: {:?}", reason); ctx.close(reason); ctx.stop(); } @@ -184,287 +208,352 @@ impl StreamHandler> for WsSession { } } +use crate::workspace_owner::check_workspace_core; + /// All logic in one impl impl WsSession { - fn ws_error(&self, ctx: &mut ws::WebsocketContext, msg: &str) { ctx.text(format!(r#"{{"error":"{}"}}"#, msg)); } fn workspace_check_ws(&self, key: &str) -> Result<(), &'static str> { let claims = self.claims.as_ref().ok_or("Missing auth claims")?; - if claims.is_system() { return Ok(()); } - let jwt_workspace = claims.workspace.as_ref().ok_or("Missing workspace in token")?; - let path_ws = key.split('/').next().ok_or("Invalid key: missing workspace")?; - if path_ws.is_empty() { return Err("Invalid key: missing workspace"); } - let path_ws_uuid = Uuid::parse_str(path_ws).map_err(|_| "Invalid workspace UUID in key")?; - if jwt_workspace != &path_ws_uuid { return Err("Workspace mismatch"); } - Ok(()) + check_workspace_core(claims, key) } - fn wait_and_send( + fn fut_send( &mut self, ctx: &mut ws::WebsocketContext, - fut: F, - mut base: JsonMap, - ) - where - F: std::future::Future> + 'static, - { + fut: impl Future> + 'static, + mut base: Value, + ) { ctx.wait( fut::wrap_future(fut).map(move |res, _actor: &mut Self, ctx| { + // if !base.is_object() { base = json!({ "base": base }); } + let obj = base.as_object_mut().unwrap(); match res { - Ok(extra) => { base.extend(extra); } - Err(err) => { base.insert("error".into(), json!(err)); } + Ok(Value::Object(extra)) => { + obj.extend(extra); + } + Ok(v) => { + obj.insert("extra".into(), v); + } + Err(err) => { + obj.insert("error".into(), json!(err)); + } } - ctx.text(Value::Object(base).to_string()); - }) + ctx.text(base.to_string()); + }), ); } /// When valid JSON recieved for WsSession fn handle_command(&mut self, cmd: WsCommand, ctx: &mut ws::WebsocketContext) { match cmd { + WsCommand::Put { + key, + data, + expires_at, + ttl, + if_match, + if_none_match, + correlation, + } => { + tracing::info!( + "PUT {} = {} (expires_at: {:?}) (ttl: {:?}) correlation: {:?}", + &key, + &data, + &expires_at, + &ttl, + &correlation + ); + + // Check workspace + if let Err(e) = self.workspace_check_ws(&key) { + self.ws_error(ctx, e); + return; + } - WsCommand::Put { key, data, expires_at, ttl, if_match, if_none_match, correlation } => { - - tracing::info!("PUT {} = {} (expires_at: {:?}) (ttl: {:?}) correlation: {:?}", &key, &data, &expires_at, &ttl, &correlation); + let redis = self.redis.clone(); + + let base = serde_json::json!(ReturnBase { + action: "put", + key: Some(key.as_str()), + data: Some(data.as_str()), + correlation: correlation.as_deref(), + ttl, + expires_at, + if_match: if_match.as_deref(), + if_none_match: if_none_match.as_deref(), + }); + + let fut = async move { + // TTL logic + let real_ttl = if let Some(secs) = ttl { + Some(Ttl::Sec(secs as usize)) + } else if let Some(timestamp) = expires_at { + Some(Ttl::At(timestamp)) + } else { + None + }; + + // MODE logic + let mut mode = Some(SaveMode::Upsert); + if let Some(s) = if_match { + // `If-Match: *` - update only if the key exists + if s == "*" { + // `If-Match: *` — update only if exist + mode = Some(SaveMode::Update); + } else { + // `If-Match: ` — update only if current + mode = Some(SaveMode::Equal(s.to_string())); + } + } else if let Some(s) = if_none_match { + // `If-None-Match: *` — insert only if does not exist + if s == "*" { + mode = Some(SaveMode::Insert); + } else { + return Err("ifNoneMatch must contain only '*'".into()); + } + } - // Check workspace - if let Err(e) = self.workspace_check_ws(&key) { self.ws_error(ctx, e); return; } + let mut conn = redis.lock().await; - let redis = self.redis.clone(); + redis_save(&mut *conn, &key, &data, real_ttl, mode) + .await + .map_err(|e| e.to_string())?; - let mut base = JsonMap::new(); - base.insert("action".into(), json!("put")); - base.insert("key".into(), json!(&key)); - base.insert("data".into(), json!(&data)); - if let Some(x) = &correlation { base.insert("correlation".into(), json!(x)); } - if let Some(x) = &expires_at { base.insert("expiresAt".into(), json!(x)); } - if let Some(x) = &ttl { base.insert("TTL".into(), json!(x)); } - if let Some(x) = &if_match { base.insert("ifMatch".into(), json!(x)); } - if let Some(x) = &if_none_match { base.insert("ifNoneMatch".into(),json!(x)); } + Ok(json!({"result": "OK"})) + }; - let fut = async move { + self.fut_send(ctx, fut, base); + } - // TTL logic - let real_ttl = if let Some(secs) = ttl { - Some(Ttl::Sec(secs as usize)) - } else if let Some(timestamp) = expires_at { - Some(Ttl::At(timestamp)) - } else { - None - }; - - // MODE logic - let mut mode = Some(SaveMode::Upsert); - if let Some(s) = if_match { // `If-Match: *` - update only if the key exists - if s == "*" { // `If-Match: *` — update only if exist - mode = Some(SaveMode::Update); - } else { // `If-Match: ` — update only if current - mode = Some(SaveMode::Equal(s.to_string())); - } - } else if let Some(s) = if_none_match { // `If-None-Match: *` — insert only if does not exist - if s == "*" { - mode = Some(SaveMode::Insert); - } else { - return Err::("ifNoneMatch must contain only '*'".into()); - } - } - - let mut conn = redis.lock().await; - - redis_save(&mut *conn, &key, &data, real_ttl, mode) - .await - .map_err(|e| e.to_string())?; - - let mut extra = JsonMap::new(); - extra.insert("response".into(), json!("OK")); - Ok::(extra) - - }; - - self.wait_and_send(ctx, fut, base); - } - - - WsCommand::Delete { key, correlation, if_match } => { + WsCommand::Delete { + key, + correlation, + if_match, + } => { tracing::info!("DELETE {} correlation:{:?}", &key, &correlation); - // Check workspace - if let Err(e) = self.workspace_check_ws(&key) { self.ws_error(ctx, e); return; } - - let redis = self.redis.clone(); - - let mut base = JsonMap::new(); - base.insert("action".into(), json!("delete")); - base.insert("key".into(), json!(&key)); - if let Some(x) = &correlation { base.insert("correlation".into(), json!(x)); } - if let Some(x) = &if_match { base.insert("ifMatch".into(), json!(x)); } - - let fut = async move { - - let mut conn = redis.lock().await; + // Check workspace + if let Err(e) = self.workspace_check_ws(&key) { + self.ws_error(ctx, e); + return; + } - let deleted = redis_delete(&mut *conn, &key).await.map_err(|e| e.to_string())?; + let redis = self.redis.clone(); + + let base = serde_json::json!(ReturnBase { + action: "delete", + key: Some(key.as_str()), + correlation: correlation.as_deref(), + if_match: if_match.as_deref(), + ..Default::default() + }); + + let fut = async move { + let mut conn = redis.lock().await; + + // MODE logic + let mut mode = Some(SaveMode::Upsert); + if let Some(s) = if_match { + // `If-Match: *` - delete only if the key exists + if s == "*" { + // `If-Match: *` — return error if not exist + mode = Some(SaveMode::Update); + } else { + // `If-Match: ` — update only if current + mode = Some(SaveMode::Equal(s.to_string())); + } + } - if deleted { - let mut extra = JsonMap::new(); - extra.insert("response".into(), json!("OK")); - Ok::(extra) - } else { - Err::("not found".into()) - } + let deleted = redis_delete(&mut *conn, &key, mode) + .await + .map_err(|e| e.to_string())?; - }; + if deleted { + Ok(json!({"result": "OK"})) + } else { + Err("not found".into()) + } + }; - self.wait_and_send(ctx, fut, base); + self.fut_send(ctx, fut, base); } WsCommand::Get { key, correlation } => { tracing::info!("GET {} correlation:{:?}", &key, &correlation); - // Check workspace - if let Err(e) = self.workspace_check_ws(&key) { self.ws_error(ctx, e); return; } - - let redis = self.redis.clone(); - - let mut base = JsonMap::new(); - base.insert("action".into(), json!("get")); - base.insert("key".into(), json!(&key)); - if let Some(x) = &correlation { base.insert("correlation".into(), json!(x)); } - - let fut = async move { - - let mut conn = redis.lock().await; - - let data_opt = redis_read(&mut *conn, &key) - .await - .map_err(|e| e.to_string())?; + // Check workspace + if let Err(e) = self.workspace_check_ws(&key) { + self.ws_error(ctx, e); + return; + } - match data_opt { - Some(data) => { - let mut extra = JsonMap::new(); - let data_value = serde_json::to_value(&data).map_err(|e| e.to_string())?; - extra.insert("response".into(), data_value); - Ok::(extra) - } - None => Err::("not found".into()) - } - }; + let redis = self.redis.clone(); + + let base = serde_json::json!(ReturnBase { + action: "get", + key: Some(key.as_str()), + correlation: correlation.as_deref(), + ..Default::default() + }); + + let fut = async move { + let mut conn = redis.lock().await; + + let data_opt = redis_read(&mut *conn, &key) + .await + .map_err(|e| e.to_string())?; + + match data_opt { + Some(data) => { + let data_value = + serde_json::to_value(&data).map_err(|e| e.to_string())?; + Ok(json!({"result": data_value})) + } + None => Err("not found".into()), + } + }; - self.wait_and_send(ctx, fut, base); + self.fut_send(ctx, fut, base); } WsCommand::List { key, correlation } => { tracing::info!("LIST {:?} correlation: {:?}", &key, &correlation); - // Check workspace - if let Err(e) = self.workspace_check_ws(&key) { self.ws_error(ctx, e); return; } - - let redis = self.redis.clone(); - - let mut base = JsonMap::new(); - base.insert("action".into(), json!("get")); - base.insert("key".into(), json!(&key)); - if let Some(x) = &correlation { base.insert("correlation".into(), json!(x)); } - - let fut = async move { + // Check workspace + if let Err(e) = self.workspace_check_ws(&key) { + self.ws_error(ctx, e); + return; + } - let mut conn = redis.lock().await; + let redis = self.redis.clone(); - let data = redis_list(&mut *conn, &key).await.map_err(|e| e.to_string())?; + let base = serde_json::json!(ReturnBase { + action: "list", + key: Some(key.as_str()), + correlation: correlation.as_deref(), + ..Default::default() + }); - let mut extra = JsonMap::new(); - let data_value = serde_json::to_value(&data).map_err(|e| e.to_string())?; - extra.insert("response".into(), data_value); - Ok::(extra) - }; + let fut = async move { + let mut conn = redis.lock().await; + let data = redis_list(&mut *conn, &key) + .await + .map_err(|e| e.to_string())?; + Ok(json!({ "result": data })) + }; - self.wait_and_send(ctx, fut, base); + self.fut_send(ctx, fut, base); } + WsCommand::Sub { key, correlation } => { + // LEVENT 3 + tracing::info!("SUB {} correlation: {:?}", &key, &correlation); - WsCommand::Sub { key, correlation } => { - // LEVENT 3 - tracing::info!("SUB {} correlation: {:?}", &key, &correlation); - - // Check workspace - if let Err(e) = self.workspace_check_ws(&key) { self.ws_error(ctx, e); return; } - - let mut obj = JsonMap::new(); - obj.insert("action".into(), json!("sub")); - obj.insert("key".into(), json!(key)); - if let Some(c) = correlation { obj.insert("correlation".into(), json!(c)); } - - if deprecated_symbol(&key) { - obj.insert("error".into(), json!("Deprecated symbol in key")); - } else { - self.hub.do_send(Subscribe { session_id: self.id, key: key.clone() }); - } - - ctx.text(Value::Object(obj).to_string()); - } - - WsCommand::Unsub { key, correlation } => { - // LEVENT 4 - tracing::info!("UNSUB {} correlation: {:?}", &key, &correlation); + // Check workspace + if let Err(e) = self.workspace_check_ws(&key) { + self.ws_error(ctx, e); + return; + } - // Check workspace - if let Err(e) = self.workspace_check_ws(&key) { self.ws_error(ctx, e); return; } + let mut obj = serde_json::json!(ReturnBase { + action: "sub", + key: Some(key.as_str()), + correlation: correlation.as_deref(), + ..Default::default() + }); + + let map = obj.as_object_mut().unwrap(); + + if deprecated_symbol(&key) { + map.insert("error".into(), json!("Deprecated symbol in key")); + } else { + self.hub.do_send(Subscribe { + session_id: self.id, + key: key.clone(), + }); + map.insert("result".into(), json!("OK")); + } - let mut obj = JsonMap::new(); - obj.insert("action".into(), json!("unsub")); - obj.insert("key".into(), json!(key)); - if let Some(c) = correlation { obj.insert("correlation".into(), json!(c)); } + ctx.text(obj.to_string()); + } - if key == "*" { - self.hub.do_send(UnsubscribeAll { session_id: self.id }); - } else { - if deprecated_symbol(&key) { - obj.insert("error".into(), json!("Deprecated symbol in key")); - } else { - self.hub.do_send(Unsubscribe { session_id: self.id, key: key.clone() }); - } - }; + WsCommand::Unsub { key, correlation } => { + // LEVENT 4 + tracing::info!("UNSUB {} correlation: {:?}", &key, &correlation); + + let mut obj = serde_json::json!(ReturnBase { + action: "unsub", + key: Some(key.as_str()), + correlation: correlation.as_deref(), + ..Default::default() + }); + + let map = obj.as_object_mut().unwrap(); + + if key == "*" { + self.hub.do_send(UnsubscribeAll { + session_id: self.id, + }); + map.insert("result".into(), json!("OK")); + } else { + // Check workspace + if let Err(e) = self.workspace_check_ws(&key) { + self.ws_error(ctx, e); + return; + } - ctx.text(Value::Object(obj).to_string()); - } + if deprecated_symbol(&key) { + map.insert("error".into(), json!("Deprecated symbol in key")); + } else { + map.insert("result".into(), json!("OK")); + self.hub.do_send(Unsubscribe { + session_id: self.id, + key: key.clone(), + }); + } + }; - WsCommand::Sublist { correlation } => { - tracing::info!("SUBLIST correlation: {:?}", &correlation); + ctx.text(obj.to_string()); + } - // w/o Check workspace! + WsCommand::Sublist { correlation } => { + tracing::info!("SUBLIST correlation: {:?}", &correlation); - let mut base = JsonMap::new(); - base.insert("action".into(), json!("sublist")); - if let Some(x) = &correlation { base.insert("correlation".into(), json!(x)); } + // w/o Check workspace! - let hub = self.hub.clone(); - let id = self.id; + let base = serde_json::json!(ReturnBase { + action: "list", + correlation: correlation.as_deref(), + ..Default::default() + }); - let fut = async move { - let keys = hub.send(SubscribeList { session_id: id }).await.unwrap_or_default(); - let mut extra = JsonMap::new(); - extra.insert("response".into(), serde_json::to_value(&keys).map_err(|e| e.to_string())? ); - Ok::(extra) - }; + let hub = self.hub.clone(); + let id = self.id; - self.wait_and_send(ctx, fut, base); - } + let fut = async move { + let keys = hub + .send(SubscribeList { session_id: id }) + .await + .unwrap_or_default(); + Ok(json!({ "result": keys })) + }; - // End of commands + self.fut_send(ctx, fut, base); + } // End of commands } } - } // ---- auth -use actix_web::{HttpMessage,error}; -use url::form_urlencoded; -use jsonwebtoken::{decode, DecodingKey, Validation, Algorithm}; use crate::CONFIG; +use actix_web::{HttpMessage, error}; +use jsonwebtoken::{Algorithm, DecodingKey, Validation, decode}; +use url::form_urlencoded; pub async fn handler( req: HttpRequest, @@ -472,23 +561,24 @@ pub async fn handler( redis: web::Data>>, hub: web::Data>, ) -> Result { - let token_opt = req.uri().query().and_then(|q| { - form_urlencoded::parse(q.as_bytes()) - .find(|(k, _)| k == "token") - .map(|(_, v)| v.into_owned()) - }); + form_urlencoded::parse(q.as_bytes()) + .find(|(k, _)| k == "token") + .map(|(_, v)| v.into_owned()) + }); let claims = match token_opt { Some(t) if !t.is_empty() => { - - let mut validation = Validation::new(Algorithm::HS256); - validation.required_spec_claims = HashSet::new(); // no: exp/iat/nbf - - let c = decode::(&t, &DecodingKey::from_secret(CONFIG.token_secret.as_bytes()), &validation ) - .map(|td| td.claims) - .map_err(|_e| error::ErrorUnauthorized("Invalid token"))?; - + let mut validation = Validation::new(Algorithm::HS256); + validation.required_spec_claims = HashSet::new(); // no: exp/iat/nbf + + let c = decode::( + &t, + &DecodingKey::from_secret(CONFIG.token_secret.as_bytes()), + &validation, + ) + .map(|td| td.claims) + .map_err(|_e| error::ErrorUnauthorized("Invalid token"))?; Some(c) } diff --git a/src/main.rs b/src/main.rs index db6be5c8467..b66a84b544a 100644 --- a/src/main.rs +++ b/src/main.rs @@ -23,10 +23,14 @@ use actix_web::{ App, Error, HttpMessage, HttpRequest, HttpResponse, HttpServer, body::MessageBody, dev::{ServiceRequest, ServiceResponse}, + error::ErrorBadRequest, + http::header::{AUTHORIZATION, HeaderValue}, middleware::{self, Next}, web::{self, Data, PayloadConfig}, }; +use url::form_urlencoded; + use actix_web_actors::ws; use tracing::info; @@ -41,15 +45,10 @@ use crate::redis_lib::redis_connect; mod workspace_owner; -// == =hub === mod redis_events; mod ws_hub; +use crate::ws_hub::{ServerMessage, TestGetSubs, WsHub}; use actix::prelude::*; -use crate::ws_hub::{WsHub, ServerMessage, - TestGetSubs, -}; - -// === /hub === use config::CONFIG; @@ -70,54 +69,55 @@ fn initialize_tracing(level: tracing::Level) { .init(); } -// #[allow(dead_code)] async fn interceptor( - request: ServiceRequest, + mut request: ServiceRequest, next: Next, ) -> Result, Error> { + // Authorization/token patch + if request.headers().get(AUTHORIZATION).is_none() { + if let Some(qs) = request.uri().query() { + if let Some(token) = form_urlencoded::parse(qs.as_bytes()) + .find(|(k, _)| k == "token") + .map(|(_, v)| v.into_owned()) + { + let auth_value = HeaderValue::from_str(&format!("Bearer {}", token)) + .map_err(|_| ErrorBadRequest("Malformed token"))?; + request.headers_mut().insert(AUTHORIZATION, auth_value); + } + } + } let secret = SecretString::new(CONFIG.token_secret.clone().into_boxed_str()); - let claims = request.extract_claims(&secret)?; - // TODO: сделать это здесь - request.extensions_mut().insert(claims.to_owned()); - // TODO потом исправить hulyrs: extract_claims - next.call(request).await } - - -// ===================================================================================== -// ===================================================================================== -// ===================================================================================== -// ===================================================================================== -// ===================================================================================== -// ===================================================================================== -// ===================================================================================== -// ===================================================================================== use crate::redis_events::RedisEventAction::*; // Set, Del, Unlink, Expired, Other pub async fn start_redis_logger(redis_url: String, hub: Addr) { let client = match redis::Client::open(redis_url) { Ok(c) => c, - Err(e) => { eprintln!("[redis] bad url: {e}"); return; } + Err(e) => { + eprintln!("[redis] bad url: {e}"); + return; + } }; match crate::redis_events::make_pubsub_with_kea(&client).await { Ok(pubsub) => { let (mut rx, _handle) = crate::redis_events::start_keyevent_listener(pubsub); while let Some(ev) = rx.recv().await { - - match ev.action { - Set => println!("[redis] db{} SET {}", ev.db, ev.key), - Del | Unlink => println!("[redis] db{} DEL {}", ev.db, ev.key), - Expired => println!("[redis] db{} EXPIRED {}", ev.db, ev.key), - Other(ref k) => println!("[redis] db{} {} {}", ev.db, k, ev.key), - } + /* + match ev.action { + Set => println!("[redis] db{} SET {}", ev.db, ev.key), + Del | Unlink => println!("[redis] db{} DEL {}", ev.db, ev.key), + Expired => println!("[redis] db{} EXPIRED {}", ev.db, ev.key), + Other(ref k) => println!("[redis] db{} {} {}", ev.db, k, ev.key), + } + */ hub.do_send(ev.clone()); } @@ -126,14 +126,6 @@ pub async fn start_redis_logger(redis_url: String, hub: Addr) { } } - - -// use actix_web::http::header; -// use actix_web::http::header::HeaderValue; -// use actix_web::body::BoxBody; -// use url::form_urlencoded; - - // #[tokio::main] #[actix_web::main] async fn main() -> anyhow::Result<()> { @@ -146,12 +138,14 @@ async fn main() -> anyhow::Result<()> { let redis_data = web::Data::new(redis.clone()); // starting Hub - // let hub = WsHub::default().start(); let hub = WsHub::new(redis.clone()).start(); let hub_data = web::Data::new(hub.clone()); // starting Logger - tokio::spawn(start_redis_logger("redis://127.0.0.1/".to_string(), hub.clone())); + tokio::spawn(start_redis_logger( + "redis://127.0.0.1/".to_string(), + hub.clone(), + )); let socket = std::net::SocketAddr::new(CONFIG.bind_host.as_str().parse()?, CONFIG.bind_port); let payload_config = PayloadConfig::new(CONFIG.payload_size_limit.bytes() as usize); @@ -167,85 +161,37 @@ async fn main() -> anyhow::Result<()> { App::new() .app_data(payload_config.clone()) .app_data(redis_data.clone()) - .app_data(hub_data.clone()) + .app_data(hub_data.clone()) .wrap(middleware::Logger::default()) .wrap(cors) .service( web::scope("/api") .wrap(middleware::from_fn(interceptor)) - .route("/{key:.+/}", web::get().to(handlers_http::list)) + .route("/{key:.+/}", web::get().to(handlers_http::list)) .route("/{key:.+}", web::get().to(handlers_http::get)) - .route("/{key:.+}", web::put().to(handlers_http::put)) - .route("/{key:.+}", web::delete().to(handlers_http::delete)) + .route("/{key:.+}", web::put().to(handlers_http::put)) + .route("/{key:.+}", web::delete().to(handlers_http::delete)), ) .route("/status", web::get().to(async || "ok")) - - // .route("/stat", web::get().to(ws_hub::stat)) - .route("/stat2", web::get().to(|hub: web::Data>| async move { - let count = hub.send(crate::ws_hub::Count).await.unwrap_or(0); - HttpResponse::Ok().json(serde_json::json!({ "connections": count })) - })) - - .route("/subs", web::get().to(|hub: web::Data>| async move { - match hub.send(TestGetSubs).await { - Ok(subs) => HttpResponse::Ok().json(subs), - Err(_) => HttpResponse::InternalServerError().body("Failed to get subscriptions"), - } - })) - - .route("/ws", web::get().to(handlers_ws::handler)) // WebSocket - -/* - -.service( - web::resource("/ws") - - -.wrap(middleware::from_fn(|mut req: ServiceRequest, next: Next| async move { - // Уже есть Authorization? - let has_auth = req.headers().contains_key(header::AUTHORIZATION); - - if !has_auth { - // ?token=... - if let Some(token) = form_urlencoded::parse(req.query_string().as_bytes()) - .find(|(k, _)| k == "token") - .map(|(_, v)| v.into_owned()) - { - if !token.is_empty() { - let value = format!("Bearer {}", token); - req.headers_mut().insert( - header::AUTHORIZATION, - HeaderValue::from_str(&value) - .map_err(|_| actix_web::error::ErrorBadRequest("Invalid token header"))?, - ); - } - } - } - - next.call(req).await -})) - // затем твой interceptor: -// .wrap(middleware::from_fn(interceptor)) - .route(web::get().to(handlers_ws::handler)) - -) - -*/ - - - - - - - - - - - - - - - + .route( + "/stat2", + web::get().to(|hub: web::Data>| async move { + let count = hub.send(crate::ws_hub::Count).await.unwrap_or(0); + HttpResponse::Ok().json(serde_json::json!({ "connections": count })) + }), + ) + .route( + "/subs", + web::get().to(|hub: web::Data>| async move { + match hub.send(TestGetSubs).await { + Ok(subs) => HttpResponse::Ok().json(subs), + Err(_) => { + HttpResponse::InternalServerError().body("Failed to get subscriptions") + } + } + }), + ) + .route("/ws", web::get().to(handlers_ws::handler)) // WebSocket }) .bind(socket)? .run(); diff --git a/src/redis_events.rs b/src/redis_events.rs index 88a89ca8591..2d3c2150dfa 100644 --- a/src/redis_events.rs +++ b/src/redis_events.rs @@ -1,24 +1,35 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + use tokio::sync::mpsc; use tokio::task::JoinHandle; use tokio_stream::StreamExt; use serde::Serialize; - use redis::{ - self, - AsyncCommands, - RedisResult, - Client, - aio::{PubSub, ConnectionLike}, + self, AsyncCommands, Client, RedisResult, + aio::{ConnectionLike, PubSub}, }; #[derive(Debug, Clone, Serialize)] pub enum RedisEventAction { - Set, // Insert or Update - Del, // Delete - Unlink, // async Delete - Expired, // TTL Delete + Set, // Insert or Update + Del, // Delete + Unlink, // async Delete + Expired, // TTL Delete Other(String), } @@ -29,17 +40,21 @@ use actix::Message; pub struct RedisEvent { pub db: u32, pub key: String, -// pub value: String, + // pub value: String, pub action: RedisEventAction, } - /// Notifications: keyevent + generic + expired = "Egx" (no keyspace) async fn try_enable_keyspace_notifications(conn: &mut C) -> RedisResult<()> where C: ConnectionLike + Send, { - let _: String = redis::cmd("CONFIG").arg("SET").arg("notify-keyspace-events").arg("E$gx").query_async(conn).await?; + let _: String = redis::cmd("CONFIG") + .arg("SET") + .arg("notify-keyspace-events") + .arg("E$gx") + .query_async(conn) + .await?; Ok(()) } @@ -82,33 +97,46 @@ pub fn start_keyevent_listener( while let Some(msg) = stream.next().await { let channel = match msg.get_channel::() { - Ok(c) => c, - Err(e) => { eprintln!("[redis_events] bad channel: {e}"); continue; } + Ok(c) => c, + Err(e) => { + eprintln!("[redis_events] bad channel: {e}"); + continue; + } }; let payload = match msg.get_payload::() { - Ok(p) => p, - Err(e) => { eprintln!("[redis_events] bad payload: {e}"); continue; } + Ok(p) => p, + Err(e) => { + eprintln!("[redis_events] bad payload: {e}"); + continue; + } }; // "__keyevent@0__:set" → event="set", db=0; payload = key let event = channel.rsplit(':').next().unwrap_or(""); - let action = match event { - "set" => RedisEventAction::Set, - "del" => RedisEventAction::Del, - "unlink" => RedisEventAction::Unlink, - "expired" => RedisEventAction::Expired, - other => RedisEventAction::Other(other.to_string()), - }; - - let db = channel.find('@') - .and_then(|at| channel.get(at + 1..)) - .and_then(|rest| rest.find("__:").map(|end| &rest[..end])) - .and_then(|s| s.parse::().ok()) - .unwrap_or(0); - - let ev = RedisEvent { db, key: payload.clone(), action }; - - if tx.send(ev).is_err() { break; } // closed + let action = match event { + "set" => RedisEventAction::Set, + "del" => RedisEventAction::Del, + "unlink" => RedisEventAction::Unlink, + "expired" => RedisEventAction::Expired, + other => RedisEventAction::Other(other.to_string()), + }; + + let db = channel + .find('@') + .and_then(|at| channel.get(at + 1..)) + .and_then(|rest| rest.find("__:").map(|end| &rest[..end])) + .and_then(|s| s.parse::().ok()) + .unwrap_or(0); + + let ev = RedisEvent { + db, + key: payload.clone(), + action, + }; + + if tx.send(ev).is_err() { + break; + } // closed } }); diff --git a/src/redis_lib.rs b/src/redis_lib.rs index eb587706f6e..415d24bf776 100644 --- a/src/redis_lib.rs +++ b/src/redis_lib.rs @@ -1,3 +1,18 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + use crate::config::{CONFIG, RedisMode}; use std::time::{SystemTime, UNIX_EPOCH}; @@ -5,21 +20,20 @@ use std::time::{SystemTime, UNIX_EPOCH}; #[derive(serde::Serialize)] pub enum Ttl { Sec(usize), // EX - At(u64), // EXAT (timestamp in seconds) + At(u64), // EXAT (timestamp in seconds) } #[derive(Debug)] pub enum SaveMode { - Upsert, // default: set or overwrite - Insert, // only if not exists (NX) - Update, // only if exists (XX) + Upsert, // default: set or overwrite + Insert, // only if not exists (NX) + Update, // only if exists (XX) Equal(String), // only if md5 matches provided } use redis::{ - AsyncCommands, RedisResult, - ToRedisArgs, - Client, ConnectionInfo, ProtocolVersion, RedisConnectionInfo, aio::MultiplexedConnection + AsyncCommands, Client, ConnectionInfo, ProtocolVersion, RedisConnectionInfo, RedisResult, + ToRedisArgs, aio::MultiplexedConnection, }; use url::Url; @@ -30,24 +44,28 @@ pub struct RedisArray { pub key: String, pub data: String, pub expires_at: u64, // sec to expire TTL - pub etag: String, // md5 hash (data) + pub etag: String, // md5 hash (data) } /// return Error pub fn error(code: u16, msg: impl Into) -> redis::RedisResult { let msg = msg.into(); let full = format!("{}: {}", code, msg); - Err(redis::RedisError::from(( redis::ErrorKind::ExtensionError, "", full ))) + Err(redis::RedisError::from(( + redis::ErrorKind::ExtensionError, + "", + full, + ))) } /// Check for redis-deprecated symbols pub fn deprecated_symbol(s: &str) -> bool { - s.chars().any(|c| matches!( - c, - '*' | '?' | '[' | ']' | '\\' | - '\0'..='\x1F' | '\x7F' | - '"' | '\'' - )) + s.chars().any(|c| { + matches!( + c, + '*' | '?' | '[' | ']' | '\\' | '\0'..='\x1F' | '\x7F' | '"' | '\'' + ) + }) } pub fn deprecated_symbol_error(s: &str) -> redis::RedisResult<()> { @@ -63,70 +81,84 @@ pub async fn redis_list( conn: &mut MultiplexedConnection, key: &str, ) -> redis::RedisResult> { - deprecated_symbol_error(key)?; - if !key.ends_with('/') { return error(412, "Key must end with slash"); } + if !key.ends_with('/') { + return error(412, "Key must end with slash"); + } let pattern = format!("{key}*"); let mut cursor = 0u64; let mut results = Vec::new(); loop { - let mut cmd = redis::cmd("SCAN"); + let mut cmd = redis::cmd("SCAN"); cmd.arg(cursor); cmd.arg("MATCH").arg(&pattern); // cmd.arg("COUNT").arg(100); // Optionally adjust batch size let (next_cursor, keys): (u64, Vec) = cmd.query_async(conn).await?; - for k in keys { - - // Check for $-security path - if k.strip_prefix(key).map_or(false, |s| s.contains('$')) { continue; } - - // Get value - let value: Option = redis::cmd("GET").arg(&k).query_async(conn).await?; - let Some(value) = value else { continue; }; // Old and deleted - - // Get TTL - let ttl: i64 = redis::cmd("TTL").arg(&k).query_async(conn).await?; - if ttl >= 0 { - results.push(RedisArray { - key: k, - data: value.clone(), - expires_at: ttl as u64, - etag: hex::encode(md5::compute(&value).0), - }); - } - } - - if next_cursor == 0 { break;} - cursor = next_cursor; + for k in keys { + // Check for $-security path + if k.strip_prefix(key).map_or(false, |s| s.contains('$')) { + continue; + } + + // Get value + let value: Option = redis::cmd("GET").arg(&k).query_async(conn).await?; + let Some(value) = value else { + continue; + }; // Old and deleted + + // Get TTL + let ttl: i64 = redis::cmd("TTL").arg(&k).query_async(conn).await?; + if ttl >= 0 { + results.push(RedisArray { + key: k, + data: value.clone(), + expires_at: ttl as u64, + etag: hex::encode(md5::compute(&value).0), + }); + } + } + + if next_cursor == 0 { + break; + } + cursor = next_cursor; } Ok(results) } - /// redis_read(&connection,key) #[allow(dead_code)] pub async fn redis_read( conn: &mut MultiplexedConnection, key: &str, ) -> redis::RedisResult> { - deprecated_symbol_error(key)?; - if key.ends_with('/') { return error(412, "Key must not end with a slash"); } + if key.ends_with('/') { + return error(412, "Key must not end with a slash"); + } let data: Option = redis::cmd("GET").arg(key).query_async(conn).await?; - let Some(data) = data else { return Ok(None); }; + let Some(data) = data else { + return Ok(None); + }; let ttl: i64 = redis::cmd("TTL").arg(key).query_async(conn).await?; - if ttl == -1 { return error(500, "TTL not set"); } - if ttl == -2 { return error(500, "Key not found"); } - if ttl < 0 { return error(500, "Unknown TTL error"); } + if ttl == -1 { + return error(500, "TTL not set"); + } + if ttl == -2 { + return error(500, "Key not found"); + } + if ttl < 0 { + return error(500, "Unknown TTL error"); + } Ok(Some(RedisArray { key: key.to_string(), @@ -136,7 +168,6 @@ pub async fn redis_read( })) } - /// TTL sec /// redis_save(&mut conn, "key", "val", Some(Ttl::Sec(300)), Some(SaveMode::Insert)).await?; /// @@ -155,25 +186,33 @@ pub async fn redis_save( ttl: Option, mode: Option, ) -> RedisResult<()> { - deprecated_symbol_error(&key)?; - if key.ends_with('/') { return error(412, "Key must not end with a slash"); } + if key.ends_with('/') { + return error(412, "Key must not end with a slash"); + } // TTL logic let sec = match ttl { - Some(Ttl::Sec(secs)) => secs, - Some(Ttl::At(timestamp)) => { - let now = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs(); - if timestamp <= now { - return error(400, "TTL timestamp exceeds MAX_TTL limit"); - } - (timestamp - now) as usize - } - None => CONFIG.max_ttl, + Some(Ttl::Sec(secs)) => secs, + Some(Ttl::At(timestamp)) => { + let now = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap() + .as_secs(); + if timestamp <= now { + return error(400, "TTL timestamp exceeds MAX_TTL limit"); + } + (timestamp - now) as usize + } + None => CONFIG.max_ttl, }; - if sec == 0 { return error(400, "TTL must be > 0"); } - if sec > CONFIG.max_ttl { return error(412, "TTL exceeds MAX_TTL"); } + if sec == 0 { + return error(400, "TTL must be > 0"); + } + if sec > CONFIG.max_ttl { + return error(412, "TTL exceeds MAX_TTL"); + } let mut cmd = redis::cmd("SET"); cmd.arg(key).arg(value).arg("EX").arg(sec); @@ -182,25 +221,39 @@ pub async fn redis_save( let mode = mode.unwrap_or(SaveMode::Upsert); match mode { - SaveMode::Upsert => {} // none - SaveMode::Insert => { cmd.arg("NX"); } // if NOT Exist + SaveMode::Insert => { + cmd.arg("NX"); + } // if NOT Exist - SaveMode::Update => { cmd.arg("XX"); } // if Exist + SaveMode::Update => { + cmd.arg("XX"); + } // if Exist - SaveMode::Equal(ref expected_md5) => { // if md5 === actual_md5 - let current_value: Option = redis::cmd("GET").arg(key).query_async(conn).await?; + SaveMode::Equal(ref expected_md5) => { + // if md5 === actual_md5 + let current_value: Option = + redis::cmd("GET").arg(key).query_async(conn).await?; if let Some(existing) = current_value { let actual_md5 = hex::encode(md5::compute(&existing).0); - if &actual_md5 != expected_md5 { return error(412, format!("md5 mismatch, current: {}, expected: {}", actual_md5, expected_md5)); } - } else { return error(404, "Equal: key does not exist"); } + if &actual_md5 != expected_md5 { + return error( + 412, + format!( + "md5 mismatch, current: {}, expected: {}", + actual_md5, expected_md5 + ), + ); + } + } else { + return error(404, "Equal: key does not exist"); + } } } + // execute let result: Option = cmd.query_async(conn).await?; -// // execute -// cmd.query_async::(&mut *conn).await?; if result.is_none() { match mode { @@ -213,27 +266,53 @@ pub async fn redis_save( Ok(()) } - /// redis_delete(&connection,key) -#[allow(dead_code)] pub async fn redis_delete( conn: &mut MultiplexedConnection, key: &str, -) -> redis::RedisResult { - + mode: Option, // <— добавили +) -> RedisResult { deprecated_symbol_error(key)?; - if key.ends_with('/') { return error(412, "Key must not end with a slash"); } + if key.ends_with('/') { + return error(412, "Key must not end with a slash"); + } - let deleted: i32 = redis::cmd("DEL") - .arg(key) - .query_async(conn) - .await?; + let mode = mode.unwrap_or(SaveMode::Upsert); - Ok(deleted > 0) -} + match mode { + SaveMode::Equal(ref expected_md5) => { + let current: Option = redis::cmd("GET").arg(key).query_async(conn).await?; + match current { + None => return error(404, "Equal: key does not exist"), + Some(val) => { + let actual_md5 = hex::encode(md5::compute(&val).0); + if &actual_md5 != expected_md5 { + return error( + 412, + format!( + "md5 mismatch, current: {}, expected: {}", + actual_md5, expected_md5 + ), + ); + } + } + } + } + SaveMode::Insert => { + return error(412, "Insert mode is not supported for delete"); + } + SaveMode::Update | SaveMode::Upsert => {} + } + let deleted: i32 = redis::cmd("DEL").arg(key).query_async(conn).await?; + if deleted == 0 && matches!(mode, SaveMode::Equal(_)) { + return error(404, "Delete: key does not exist"); + } + + Ok(deleted > 0) +} /// redis_connect() pub async fn redis_connect() -> anyhow::Result { @@ -292,4 +371,3 @@ pub async fn redis_connect() -> anyhow::Result { Ok(conn) } - diff --git a/src/workspace_owner.rs b/src/workspace_owner.rs index 24fade1ebec..3f369c9cdc1 100644 --- a/src/workspace_owner.rs +++ b/src/workspace_owner.rs @@ -1,43 +1,62 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +use actix_web::{Error, HttpMessage, HttpRequest}; use hulyrs::services::jwt::Claims; use uuid::Uuid; -use actix_web::{ Error, HttpMessage, HttpRequest, error }; -/// Checking workspace in Authorization -pub fn workspace_check(req: &HttpRequest) -> Result<(), Error> { - let extensions = req.extensions(); - - // Get key - let key = req - .match_info() - .get("key") - .ok_or_else(|| error::ErrorBadRequest("Missing key in URL path"))?; - - // Get workspace - let path_ws = match key.find('/') { - Some(x) if x > 0 => &key[..x], - _ => return Err(error::ErrorBadRequest("Invalid key: missing workspace")), - }; - - let claims = extensions - .get::() - .ok_or_else(|| error::ErrorUnauthorized("Missing auth claims"))?; - - // is_system - allowed to all +// common checker +pub fn check_workspace_core(claims: &Claims, key: &str) -> Result<(), &'static str> { if claims.is_system() { return Ok(()); } - // else - check workplace let jwt_workspace = claims .workspace .as_ref() - .ok_or_else(|| error::ErrorForbidden("Missing workspace in token"))?; - - let path_ws_uuid = Uuid::parse_str(path_ws).map_err(|_| error::ErrorBadRequest("Invalid workspace UUID"))?; + .ok_or("Missing workspace in token")?; + let path_ws = key + .split('/') + .next() + .ok_or("Invalid key: missing workspace")?; + if path_ws.is_empty() { + return Err("Invalid key: missing workspace"); + } + let path_ws_uuid = Uuid::parse_str(path_ws).map_err(|_| "Invalid workspace UUID in key")?; if jwt_workspace != &path_ws_uuid { - return Err(error::ErrorForbidden("Workspace mismatch")); + return Err("Workspace mismatch"); } Ok(()) } + +/// HTTP API +pub fn workspace_check(req: &HttpRequest) -> Result<(), actix_web::Error> { + let key = req + .match_info() + .get("key") + .ok_or_else(|| actix_web::error::ErrorBadRequest("Missing key in URL path"))?; + let claims = req + .extensions() + .get::() + .cloned() + .ok_or_else(|| actix_web::error::ErrorUnauthorized("Missing auth claims"))?; + + match check_workspace_core(&claims, key) { + Ok(()) => Ok(()), + Err(msg) => Err(actix_web::error::ErrorUnauthorized(msg)), + } +} diff --git a/src/ws_hub.rs b/src/ws_hub.rs index e9611ac5054..21b3f71d449 100644 --- a/src/ws_hub.rs +++ b/src/ws_hub.rs @@ -1,7 +1,24 @@ -use std::collections::{ HashMap, HashSet }; +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +use std::collections::{HashMap, HashSet}; fn subscription_matches(sub_key: &str, key: &str) -> bool { - if sub_key == key { return true; } + if sub_key == key { + return true; + } if sub_key.ends_with('/') && key.starts_with(sub_key) { let rest = &key[sub_key.len()..]; return !rest.contains('$'); @@ -9,16 +26,16 @@ fn subscription_matches(sub_key: &str, key: &str) -> bool { false } -use crate::redis_events::{ RedisEvent, RedisEventAction }; +use crate::redis_events::{RedisEvent, RedisEventAction}; use serde::Serialize; #[derive(Message, Clone, Serialize, Debug)] #[rtype(result = "()")] pub struct ServerMessage { #[serde(flatten)] - pub event: RedisEvent, // поля RedisEvent «вливаются» в корень JSON + pub event: RedisEvent, #[serde(skip_serializing_if = "Option::is_none")] - pub value: Option, // будет только при Set + pub value: Option, } /// Count of active sessions @@ -61,7 +78,7 @@ impl Handler for WsHub { type Result = SessionId; fn handle(&mut self, msg: Connect, _ctx: &mut Context) -> Self::Result { - // LEVENT 1 + // LEVENT 1 let id = self.next_id; self.next_id = self.next_id.wrapping_add(1); self.sessions.insert(id, msg.addr); @@ -81,7 +98,7 @@ impl Handler for WsHub { type Result = (); fn handle(&mut self, msg: Disconnect, _ctx: &mut Context) { - // LEVENT 2 + // LEVENT 2 // Delete all subscribes self.subs.retain(|_key, session_ids| { @@ -110,7 +127,8 @@ impl Handler for WsHub { fn handle(&mut self, msg: SubscribeList, _ctx: &mut Context) -> Self::Result { // Collect all keys with my session_id - let list = self.subs + let list = self + .subs .iter() .filter_map(|(key, sessions)| { if sessions.contains(&msg.session_id) { @@ -162,7 +180,9 @@ impl Handler for WsHub { fn handle(&mut self, msg: Unsubscribe, _ctx: &mut Context) { if let Some(set) = self.subs.get_mut(&msg.key) { set.remove(&msg.session_id); - if set.is_empty() { self.subs.remove(&msg.key); } + if set.is_empty() { + self.subs.remove(&msg.key); + } } } } @@ -183,9 +203,6 @@ impl Handler for WsHub { } } - - - #[derive(Message)] #[rtype(result = "HashMap>")] pub struct TestGetSubs; @@ -194,7 +211,8 @@ impl Handler for WsHub { type Result = MessageResult; fn handle(&mut self, _msg: TestGetSubs, _ctx: &mut Context) -> Self::Result { - let s: HashMap> = self.subs + let s: HashMap> = self + .subs .iter() .map(|(key, ids)| (key.clone(), ids.iter().copied().collect())) .collect(); @@ -215,12 +233,12 @@ impl WsHub { } } -use actix::prelude::*; use actix::ActorFutureExt; use actix::fut::ready; +use actix::prelude::*; +use redis::aio::MultiplexedConnection; use std::sync::Arc; use tokio::sync::Mutex; -use redis::aio::MultiplexedConnection; impl Handler for WsHub { type Result = ResponseActFuture; @@ -231,7 +249,8 @@ impl Handler for WsHub { return Box::pin(actix::fut::ready(()).into_actor(self)); } - let recipients: Vec> = targets.into_iter() + let recipients: Vec> = targets + .into_iter() .filter_map(|sid| self.sessions.get(&sid).cloned()) .collect(); @@ -242,9 +261,11 @@ impl Handler for WsHub { Box::pin( async move { let value = if need_get { - let mut conn = redis.lock().await; - match redis::cmd("GET").arg(&event.key).query_async::>(&mut *conn).await + match redis::cmd("GET") + .arg(&event.key) + .query_async::>(&mut *conn) + .await { Ok(v) => v, Err(e) => { @@ -262,7 +283,7 @@ impl Handler for WsHub { let _ = rcpt.do_send(payload.clone()); } } - .into_actor(self) + .into_actor(self), ) } } From 36b59c78c98d56eaba8a238241723fd89f977d55 Mon Sep 17 00:00:00 2001 From: Alexey Aristov Date: Mon, 18 Aug 2025 17:04:39 +0200 Subject: [PATCH 164/636] initial (config, tracing) Signed-off-by: Alexey Aristov --- .gitignore | 1 + Cargo.lock | 5038 +++++++++++++++++++++++++++++++++++++++++++++++++ Cargo.toml | 37 + src/config.rs | 77 + src/main.rs | 146 ++ 5 files changed, 5299 insertions(+) create mode 100644 .gitignore create mode 100644 Cargo.lock create mode 100644 Cargo.toml create mode 100644 src/config.rs create mode 100644 src/main.rs diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000000..ea8c4bf7f35 --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +/target diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 00000000000..540ea4f76f4 --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,5038 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "actix-codec" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f7b0a21988c1bf877cf4759ef5ddaac04c1c9fe808c9142ecb78ba97d97a28a" +dependencies = [ + "bitflags 2.9.2", + "bytes", + "futures-core", + "futures-sink", + "memchr", + "pin-project-lite", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "actix-cors" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daa239b93927be1ff123eebada5a3ff23e89f0124ccb8609234e5103d5a5ae6d" +dependencies = [ + "actix-utils", + "actix-web", + "derive_more", + "futures-util", + "log", + "once_cell", + "smallvec", +] + +[[package]] +name = "actix-http" +version = "3.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44dfe5c9e0004c623edc65391dfd51daa201e7e30ebd9c9bedf873048ec32bc2" +dependencies = [ + "actix-codec", + "actix-rt", + "actix-service", + "actix-tls", + "actix-utils", + "base64 0.22.1", + "bitflags 2.9.2", + "brotli", + "bytes", + "bytestring", + "derive_more", + "encoding_rs", + "flate2", + "foldhash", + "futures-core", + "h2 0.3.27", + "http 0.2.12", + "httparse", + "httpdate", + "itoa", + "language-tags", + "local-channel", + "mime", + "percent-encoding", + "pin-project-lite", + "rand 0.9.2", + "sha1", + "smallvec", + "tokio", + "tokio-util", + "tracing", + "zstd", +] + +[[package]] +name = "actix-macros" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e01ed3140b2f8d422c68afa1ed2e85d996ea619c988ac834d255db32138655cb" +dependencies = [ + "quote", + "syn", +] + +[[package]] +name = "actix-router" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13d324164c51f63867b57e73ba5936ea151b8a41a1d23d1031eeb9f70d0236f8" +dependencies = [ + "bytestring", + "cfg-if", + "http 0.2.12", + "regex", + "regex-lite", + "serde", + "tracing", +] + +[[package]] +name = "actix-rt" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24eda4e2a6e042aa4e55ac438a2ae052d3b5da0ecf83d7411e1a368946925208" +dependencies = [ + "futures-core", + "tokio", +] + +[[package]] +name = "actix-server" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a65064ea4a457eaf07f2fba30b4c695bf43b721790e9530d26cb6f9019ff7502" +dependencies = [ + "actix-rt", + "actix-service", + "actix-utils", + "futures-core", + "futures-util", + "mio", + "socket2 0.5.10", + "tokio", + "tracing", +] + +[[package]] +name = "actix-service" +version = "2.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e46f36bf0e5af44bdc4bdb36fbbd421aa98c79a9bce724e1edeb3894e10dc7f" +dependencies = [ + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "actix-tls" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac453898d866cdbecdbc2334fe1738c747b4eba14a677261f2b768ba05329389" +dependencies = [ + "actix-rt", + "actix-service", + "actix-utils", + "futures-core", + "impl-more", + "pin-project-lite", + "tokio", + "tokio-rustls 0.23.4", + "tokio-util", + "tracing", + "webpki-roots 0.22.6", +] + +[[package]] +name = "actix-utils" +version = "3.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88a1dcdff1466e3c2488e1cb5c36a71822750ad43839937f85d2f4d9f8b705d8" +dependencies = [ + "local-waker", + "pin-project-lite", +] + +[[package]] +name = "actix-web" +version = "4.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a597b77b5c6d6a1e1097fddde329a83665e25c5437c696a3a9a4aa514a614dea" +dependencies = [ + "actix-codec", + "actix-http", + "actix-macros", + "actix-router", + "actix-rt", + "actix-server", + "actix-service", + "actix-tls", + "actix-utils", + "actix-web-codegen", + "bytes", + "bytestring", + "cfg-if", + "cookie", + "derive_more", + "encoding_rs", + "foldhash", + "futures-core", + "futures-util", + "impl-more", + "itoa", + "language-tags", + "log", + "mime", + "once_cell", + "pin-project-lite", + "regex", + "regex-lite", + "serde", + "serde_json", + "serde_urlencoded", + "smallvec", + "socket2 0.5.10", + "time", + "tracing", + "url", +] + +[[package]] +name = "actix-web-codegen" +version = "4.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f591380e2e68490b5dfaf1dd1aa0ebe78d84ba7067078512b4ea6e4492d622b8" +dependencies = [ + "actix-router", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "addr2line" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" + +[[package]] +name = "aho-corasick" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +dependencies = [ + "memchr", +] + +[[package]] +name = "alloc-no-stdlib" +version = "2.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3" + +[[package]] +name = "alloc-stdlib" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece" +dependencies = [ + "alloc-no-stdlib", +] + +[[package]] +name = "allocator-api2" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" + +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anyhow" +version = "1.0.99" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0674a1ddeecb70197781e945de4b3b8ffb61fa939a5597bcf48503737663100" + +[[package]] +name = "arraydeque" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d902e3d592a523def97af8f317b08ce16b7ab854c1985a0c671e6f15cebc236" + +[[package]] +name = "async-trait" +version = "0.1.89" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "async-tungstenite" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee88b4c88ac8c9ea446ad43498955750a4bbe64c4392f21ccfe5d952865e318f" +dependencies = [ + "atomic-waker", + "futures-core", + "futures-io", + "futures-task", + "futures-util", + "log", + "pin-project-lite", + "tungstenite", +] + +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + +[[package]] +name = "autocfg" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" + +[[package]] +name = "aws-config" +version = "1.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c478f5b10ce55c9a33f87ca3404ca92768b144fc1bfdede7c0121214a8283a25" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-sdk-sso", + "aws-sdk-ssooidc", + "aws-sdk-sts", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes", + "fastrand", + "hex", + "http 1.3.1", + "ring 0.17.14", + "time", + "tokio", + "tracing", + "url", + "zeroize", +] + +[[package]] +name = "aws-credential-types" +version = "1.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1541072f81945fa1251f8795ef6c92c4282d74d59f88498ae7d4bf00f0ebdad9" +dependencies = [ + "aws-smithy-async", + "aws-smithy-runtime-api", + "aws-smithy-types", + "zeroize", +] + +[[package]] +name = "aws-lc-rs" +version = "1.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c953fe1ba023e6b7730c0d4b031d06f267f23a46167dcbd40316644b10a17ba" +dependencies = [ + "aws-lc-sys", + "zeroize", +] + +[[package]] +name = "aws-lc-sys" +version = "0.30.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbfd150b5dbdb988bcc8fb1fe787eb6b7ee6180ca24da683b61ea5405f3d43ff" +dependencies = [ + "bindgen", + "cc", + "cmake", + "dunce", + "fs_extra", +] + +[[package]] +name = "aws-runtime" +version = "1.5.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c034a1bc1d70e16e7f4e4caf7e9f7693e4c9c24cd91cf17c2a0b21abaebc7c8b" +dependencies = [ + "aws-credential-types", + "aws-sigv4", + "aws-smithy-async", + "aws-smithy-eventstream", + "aws-smithy-http", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes", + "fastrand", + "http 0.2.12", + "http-body 0.4.6", + "percent-encoding", + "pin-project-lite", + "tracing", + "uuid", +] + +[[package]] +name = "aws-sdk-s3" +version = "1.103.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af040a86ae4378b7ed2f62c83b36be1848709bbbf5757ec850d0e08596a26be9" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-sigv4", + "aws-smithy-async", + "aws-smithy-checksums", + "aws-smithy-eventstream", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-smithy-xml", + "aws-types", + "bytes", + "fastrand", + "hex", + "hmac", + "http 0.2.12", + "http 1.3.1", + "http-body 0.4.6", + "lru", + "percent-encoding", + "regex-lite", + "sha2", + "tracing", + "url", +] + +[[package]] +name = "aws-sdk-sso" +version = "1.81.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79ede098271e3471036c46957cba2ba30888f53bda2515bf04b560614a30a36e" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes", + "fastrand", + "http 0.2.12", + "regex-lite", + "tracing", +] + +[[package]] +name = "aws-sdk-ssooidc" +version = "1.82.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43326f724ba2cc957e6f3deac0ca1621a3e5d4146f5970c24c8a108dac33070f" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-types", + "bytes", + "fastrand", + "http 0.2.12", + "regex-lite", + "tracing", +] + +[[package]] +name = "aws-sdk-sts" +version = "1.83.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5468593c47efc31fdbe6c902d1a5fde8d9c82f78a3f8ccfe907b1e9434748cb" +dependencies = [ + "aws-credential-types", + "aws-runtime", + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-json", + "aws-smithy-query", + "aws-smithy-runtime", + "aws-smithy-runtime-api", + "aws-smithy-types", + "aws-smithy-xml", + "aws-types", + "fastrand", + "http 0.2.12", + "regex-lite", + "tracing", +] + +[[package]] +name = "aws-sigv4" +version = "1.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "084c34162187d39e3740cb635acd73c4e3a551a36146ad6fe8883c929c9f876c" +dependencies = [ + "aws-credential-types", + "aws-smithy-eventstream", + "aws-smithy-http", + "aws-smithy-runtime-api", + "aws-smithy-types", + "bytes", + "crypto-bigint 0.5.5", + "form_urlencoded", + "hex", + "hmac", + "http 0.2.12", + "http 1.3.1", + "p256", + "percent-encoding", + "ring 0.17.14", + "sha2", + "subtle", + "time", + "tracing", + "zeroize", +] + +[[package]] +name = "aws-smithy-async" +version = "1.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e190749ea56f8c42bf15dd76c65e14f8f765233e6df9b0506d9d934ebef867c" +dependencies = [ + "futures-util", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "aws-smithy-checksums" +version = "0.63.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4dbef71cd3cf607deb5c407df52f7e589e6849b296874ee448977efbb6d0832b" +dependencies = [ + "aws-smithy-http", + "aws-smithy-types", + "bytes", + "crc-fast", + "hex", + "http 0.2.12", + "http-body 0.4.6", + "md-5", + "pin-project-lite", + "sha1", + "sha2", + "tracing", +] + +[[package]] +name = "aws-smithy-eventstream" +version = "0.60.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "182b03393e8c677347fb5705a04a9392695d47d20ef0a2f8cfe28c8e6b9b9778" +dependencies = [ + "aws-smithy-types", + "bytes", + "crc32fast", +] + +[[package]] +name = "aws-smithy-http" +version = "0.62.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c4dacf2d38996cf729f55e7a762b30918229917eca115de45dfa8dfb97796c9" +dependencies = [ + "aws-smithy-eventstream", + "aws-smithy-runtime-api", + "aws-smithy-types", + "bytes", + "bytes-utils", + "futures-core", + "http 0.2.12", + "http 1.3.1", + "http-body 0.4.6", + "percent-encoding", + "pin-project-lite", + "pin-utils", + "tracing", +] + +[[package]] +name = "aws-smithy-http-client" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fdbad9bd9dbcc6c5e68c311a841b54b70def3ca3b674c42fbebb265980539f8" +dependencies = [ + "aws-smithy-async", + "aws-smithy-runtime-api", + "aws-smithy-types", + "h2 0.3.27", + "h2 0.4.12", + "http 0.2.12", + "http 1.3.1", + "http-body 0.4.6", + "hyper 0.14.32", + "hyper 1.7.0", + "hyper-rustls 0.24.2", + "hyper-rustls 0.27.7", + "hyper-util", + "pin-project-lite", + "rustls 0.21.12", + "rustls 0.23.31", + "rustls-native-certs 0.8.1", + "rustls-pki-types", + "tokio", + "tokio-rustls 0.26.2", + "tower", + "tracing", +] + +[[package]] +name = "aws-smithy-json" +version = "0.61.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a16e040799d29c17412943bdbf488fd75db04112d0c0d4b9290bacf5ae0014b9" +dependencies = [ + "aws-smithy-types", +] + +[[package]] +name = "aws-smithy-observability" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9364d5989ac4dd918e5cc4c4bdcc61c9be17dcd2586ea7f69e348fc7c6cab393" +dependencies = [ + "aws-smithy-runtime-api", +] + +[[package]] +name = "aws-smithy-query" +version = "0.60.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2fbd61ceb3fe8a1cb7352e42689cec5335833cd9f94103a61e98f9bb61c64bb" +dependencies = [ + "aws-smithy-types", + "urlencoding", +] + +[[package]] +name = "aws-smithy-runtime" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3d57c8b53a72d15c8e190475743acf34e4996685e346a3448dd54ef696fc6e0" +dependencies = [ + "aws-smithy-async", + "aws-smithy-http", + "aws-smithy-http-client", + "aws-smithy-observability", + "aws-smithy-runtime-api", + "aws-smithy-types", + "bytes", + "fastrand", + "http 0.2.12", + "http 1.3.1", + "http-body 0.4.6", + "http-body 1.0.1", + "pin-project-lite", + "pin-utils", + "tokio", + "tracing", +] + +[[package]] +name = "aws-smithy-runtime-api" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07f5e0fc8a6b3f2303f331b94504bbf754d85488f402d6f1dd7a6080f99afe56" +dependencies = [ + "aws-smithy-async", + "aws-smithy-types", + "bytes", + "http 0.2.12", + "http 1.3.1", + "pin-project-lite", + "tokio", + "tracing", + "zeroize", +] + +[[package]] +name = "aws-smithy-types" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d498595448e43de7f4296b7b7a18a8a02c61ec9349128c80a368f7c3b4ab11a8" +dependencies = [ + "base64-simd", + "bytes", + "bytes-utils", + "futures-core", + "http 0.2.12", + "http 1.3.1", + "http-body 0.4.6", + "http-body 1.0.1", + "http-body-util", + "itoa", + "num-integer", + "pin-project-lite", + "pin-utils", + "ryu", + "serde", + "time", + "tokio", + "tokio-util", +] + +[[package]] +name = "aws-smithy-xml" +version = "0.60.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3db87b96cb1b16c024980f133968d52882ca0daaee3a086c6decc500f6c99728" +dependencies = [ + "xmlparser", +] + +[[package]] +name = "aws-types" +version = "1.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b069d19bf01e46298eaedd7c6f283fe565a59263e53eebec945f3e6398f42390" +dependencies = [ + "aws-credential-types", + "aws-smithy-async", + "aws-smithy-runtime-api", + "aws-smithy-types", + "rustc_version", + "tracing", +] + +[[package]] +name = "backtrace" +version = "0.3.75" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6806a6321ec58106fea15becdad98371e28d92ccbc7c8f1b3b6dd724fe8f1002" +dependencies = [ + "addr2line", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", + "windows-targets", +] + +[[package]] +name = "base16ct" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce" + +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "base64-simd" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "339abbe78e73178762e23bea9dfd08e697eb3f3301cd4be981c0f78ba5859195" +dependencies = [ + "outref", + "vsimd", +] + +[[package]] +name = "base64ct" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55248b47b0caf0546f7988906588779981c43bb1bc9d0c44087278f80cdb44ba" + +[[package]] +name = "bb8" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "212d8b8e1a22743d9241575c6ba822cf9c8fef34771c86ab7e477a4fbfd254e5" +dependencies = [ + "futures-util", + "parking_lot 0.12.4", + "tokio", +] + +[[package]] +name = "bb8-postgres" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e570e6557cd0f88d28d32afa76644873271a70dc22656df565b2021c4036aa9c" +dependencies = [ + "bb8", + "tokio", + "tokio-postgres", +] + +[[package]] +name = "bindgen" +version = "0.69.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "271383c67ccabffb7381723dea0672a673f292304fcb45c01cc648c7a8d58088" +dependencies = [ + "bitflags 2.9.2", + "cexpr", + "clang-sys", + "itertools", + "lazy_static", + "lazycell", + "log", + "prettyplease", + "proc-macro2", + "quote", + "regex", + "rustc-hash 1.1.0", + "shlex", + "syn", + "which", +] + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a65b545ab31d687cff52899d4890855fec459eb6afe0da6417b8a18da87aa29" +dependencies = [ + "serde", +] + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "brotli" +version = "8.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bd8b9603c7aa97359dbd97ecf258968c95f3adddd6db2f7e7a5bef101c84560" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", + "brotli-decompressor", +] + +[[package]] +name = "brotli-decompressor" +version = "5.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "874bb8112abecc98cbd6d81ea4fa7e94fb9449648c93cc89aa40c81c24d7de03" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", +] + +[[package]] +name = "bumpalo" +version = "3.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "bytes" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" + +[[package]] +name = "bytes-utils" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dafe3a8757b027e2be6e4e5601ed563c55989fcf1546e933c66c8eb3a058d35" +dependencies = [ + "bytes", + "either", +] + +[[package]] +name = "bytestring" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e465647ae23b2823b0753f50decb2d5a86d2bb2cac04788fafd1f80e45378e5f" +dependencies = [ + "bytes", +] + +[[package]] +name = "cc" +version = "1.2.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ee0f8803222ba5a7e2777dd72ca451868909b1ac410621b676adf07280e9b5f" +dependencies = [ + "jobserver", + "libc", + "shlex", +] + +[[package]] +name = "cexpr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" +dependencies = [ + "nom", +] + +[[package]] +name = "cfg-if" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268" + +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + +[[package]] +name = "chrono" +version = "0.4.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c469d952047f47f91b68d1cba3f10d63c11d73e4636f24f08daf0278abf01c4d" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "js-sys", + "num-traits", + "serde", + "wasm-bindgen", + "windows-link", +] + +[[package]] +name = "clang-sys" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4" +dependencies = [ + "glob", + "libc", + "libloading", +] + +[[package]] +name = "cmake" +version = "0.1.54" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7caa3f9de89ddbe2c607f4101924c5abec803763ae9534e4f4d7d8f84aa81f0" +dependencies = [ + "cc", +] + +[[package]] +name = "config" +version = "0.15.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa4092bf3922a966e2bd74640b80f36c73eaa7251a4fd0fbcda1f8a4de401352" +dependencies = [ + "async-trait", + "convert_case", + "json5", + "pathdiff", + "ron", + "rust-ini", + "serde", + "serde-untagged", + "serde_json", + "toml 0.9.5", + "winnow", + "yaml-rust2", +] + +[[package]] +name = "const-oid" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" + +[[package]] +name = "const-random" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87e00182fe74b066627d63b85fd550ac2998d4b0bd86bfed477a0ae4c7c71359" +dependencies = [ + "const-random-macro", +] + +[[package]] +name = "const-random-macro" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9d839f2a20b0aee515dc581a6172f2321f96cab76c1a38a4c584a194955390e" +dependencies = [ + "getrandom 0.2.16", + "once_cell", + "tiny-keccak", +] + +[[package]] +name = "convert_case" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "cookie" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e859cd57d0710d9e06c381b550c06e76992472a8c6d527aecd2fc673dcc231fb" +dependencies = [ + "percent-encoding", + "time", + "version_check", +] + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2a6cd9ae233e7f62ba4e9353e81a88df7fc8a5987b8d445b4d90c879bd156f6" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + +[[package]] +name = "cpufeatures" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" +dependencies = [ + "libc", +] + +[[package]] +name = "crc" +version = "3.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9710d3b3739c2e349eb44fe848ad0b7c8cb1e42bd87ee49371df2f7acaf3e675" +dependencies = [ + "crc-catalog", +] + +[[package]] +name = "crc-catalog" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" + +[[package]] +name = "crc-fast" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6bf62af4cc77d8fe1c22dde4e721d87f2f54056139d8c412e1366b740305f56f" +dependencies = [ + "crc", + "digest", + "libc", + "rand 0.9.2", + "regex", +] + +[[package]] +name = "crc32fast" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + +[[package]] +name = "crunchy" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" + +[[package]] +name = "crypto-bigint" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef2b4b23cddf68b89b8f8069890e8c270d54e2d5fe1b143820234805e4cb17ef" +dependencies = [ + "generic-array", + "rand_core 0.6.4", + "subtle", + "zeroize", +] + +[[package]] +name = "crypto-bigint" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" +dependencies = [ + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "darling" +version = "0.20.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.20.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d00b9596d185e565c2207a0b01f8bd1a135483d02d9b7b0a54b11da8d53412e" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn", +] + +[[package]] +name = "darling_macro" +version = "0.20.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" +dependencies = [ + "darling_core", + "quote", + "syn", +] + +[[package]] +name = "dashmap" +version = "6.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf" +dependencies = [ + "cfg-if", + "crossbeam-utils", + "hashbrown 0.14.5", + "lock_api", + "once_cell", + "parking_lot_core 0.9.11", +] + +[[package]] +name = "data-encoding" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a2330da5de22e8a3cb63252ce2abb30116bf5265e89c0e01bc17015ce30a476" + +[[package]] +name = "der" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1a467a65c5e759bce6e65eaf91cc29f466cdc57cb65777bd646872a8a1fd4de" +dependencies = [ + "const-oid", + "zeroize", +] + +[[package]] +name = "deranged" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c9e6a11ca8224451684bc0d7d5a7adbf8f2fd6887261a1cfc3c0432f9d4068e" +dependencies = [ + "powerfmt", + "serde", +] + +[[package]] +name = "derive_builder" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "507dfb09ea8b7fa618fcf76e953f4f5e192547945816d5358edffe39f6f94947" +dependencies = [ + "derive_builder_macro", +] + +[[package]] +name = "derive_builder_core" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d5bcf7b024d6835cfb3d473887cd966994907effbe9227e8c8219824d06c4e8" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "derive_builder_macro" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c" +dependencies = [ + "derive_builder_core", + "syn", +] + +[[package]] +name = "derive_more" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "093242cf7570c207c83073cf82f79706fe7b8317e98620a47d5be7c3d8497678" +dependencies = [ + "derive_more-impl", +] + +[[package]] +name = "derive_more-impl" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "unicode-xid", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "crypto-common", + "subtle", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "dlv-list" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "442039f5147480ba31067cb00ada1adae6892028e40e45fc5de7b7df6dcc1b5f" +dependencies = [ + "const-random", +] + +[[package]] +name = "dunce" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" + +[[package]] +name = "dyn-clone" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555" + +[[package]] +name = "ecdsa" +version = "0.14.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413301934810f597c1d19ca71c8710e99a3f1ba28a0d2ebc01551a2daeea3c5c" +dependencies = [ + "der", + "elliptic-curve", + "rfc6979", + "signature", +] + +[[package]] +name = "either" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" + +[[package]] +name = "elliptic-curve" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7bb888ab5300a19b8e5bceef25ac745ad065f3c9f7efc6de1b91958110891d3" +dependencies = [ + "base16ct", + "crypto-bigint 0.4.9", + "der", + "digest", + "ff", + "generic-array", + "group", + "pkcs8", + "rand_core 0.6.4", + "sec1", + "subtle", + "zeroize", +] + +[[package]] +name = "encoding_rs" +version = "0.8.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "erased-serde" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e004d887f51fcb9fef17317a2f3525c887d8aa3f4f50fed920816a688284a5b7" +dependencies = [ + "serde", + "typeid", +] + +[[package]] +name = "errno" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "778e2ac28f6c47af28e4907f13ffd1e1ddbd400980a9abd7c8df189bf578a5ad" +dependencies = [ + "libc", + "windows-sys 0.59.0", +] + +[[package]] +name = "fallible-iterator" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" + +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] +name = "ff" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d013fc25338cc558c5c2cfbad646908fb23591e2404481826742b651c9af7160" +dependencies = [ + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "flate2" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a3d7db9596fecd151c5f638c0ee5d5bd487b6e0ea232e5dc96d5250f6f94b1d" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + +[[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "fs_extra" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" + +[[package]] +name = "futures" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" + +[[package]] +name = "futures-executor" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" + +[[package]] +name = "futures-macro" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "futures-sink" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" + +[[package]] +name = "futures-task" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" + +[[package]] +name = "futures-timer" +version = "3.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" + +[[package]] +name = "futures-util" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "wasi 0.11.1+wasi-snapshot-preview1", + "wasm-bindgen", +] + +[[package]] +name = "getrandom" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "r-efi", + "wasi 0.14.2+wasi-0.2.4", + "wasm-bindgen", +] + +[[package]] +name = "gimli" +version = "0.31.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" + +[[package]] +name = "glob" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" + +[[package]] +name = "governor" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "444405bbb1a762387aa22dd569429533b54a1d8759d35d3b64cb39b0293eaa19" +dependencies = [ + "cfg-if", + "dashmap", + "futures-sink", + "futures-timer", + "futures-util", + "getrandom 0.3.3", + "hashbrown 0.15.5", + "nonzero_ext", + "parking_lot 0.12.4", + "portable-atomic", + "quanta", + "rand 0.9.2", + "smallvec", + "spinning_top", + "web-time", +] + +[[package]] +name = "group" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7" +dependencies = [ + "ff", + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "h2" +version = "0.3.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0beca50380b1fc32983fc1cb4587bfa4bb9e78fc259aad4a0032d2080309222d" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http 0.2.12", + "indexmap 2.10.0", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "h2" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3c0b69cfcb4e1b9f1bf2f53f95f766e4661169728ec61cd3fe5a0166f2d1386" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http 1.3.1", + "indexmap 2.10.0", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] +name = "hashbrown" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" + +[[package]] +name = "hashbrown" +version = "0.15.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" +dependencies = [ + "allocator-api2", + "equivalent", + "foldhash", +] + +[[package]] +name = "hashlink" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1" +dependencies = [ + "hashbrown 0.15.5", +] + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest", +] + +[[package]] +name = "home" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589533453244b0995c858700322199b2becb13b627df2851f64a2775d024abcf" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] +name = "http" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" +dependencies = [ + "bytes", + "http 0.2.12", + "pin-project-lite", +] + +[[package]] +name = "http-body" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http 1.3.1", +] + +[[package]] +name = "http-body-util" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" +dependencies = [ + "bytes", + "futures-core", + "http 1.3.1", + "http-body 1.0.1", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "hulylake" +version = "0.1.0" +dependencies = [ + "actix-cors", + "actix-web", + "anyhow", + "aws-config", + "aws-sdk-s3", + "bb8", + "bb8-postgres", + "config", + "futures-util", + "hex", + "hulyrs", + "md5", + "refinery", + "reqwest", + "secrecy", + "serde", + "serde_json", + "size", + "tokio", + "tokio-postgres", + "tokio-stream", + "tracing", + "tracing-actix-web", + "tracing-subscriber", + "uuid", +] + +[[package]] +name = "hulyrs" +version = "0.1.0" +source = "git+https://github.com/hcengineering/hulyrs.git#08a583761d532cbfaa188983f4e10c77bcba4c71" +dependencies = [ + "actix-web", + "bytes", + "chrono", + "config", + "derive_builder", + "futures", + "governor", + "itoa", + "jsonwebtoken", + "num-traits", + "rand 0.9.2", + "reqwest", + "reqwest-middleware", + "reqwest-ratelimit", + "reqwest-retry", + "reqwest-websocket", + "ryu", + "secrecy", + "serde", + "serde_json", + "serde_with", + "strum", + "thiserror 2.0.15", + "tokio", + "tokio-stream", + "tokio_with_wasm", + "tracing", + "url", + "uuid", + "wasmtimer", +] + +[[package]] +name = "hyper" +version = "0.14.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41dfc780fdec9373c01bae43289ea34c972e40ee3c9f6b3c8801a35f35586ce7" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2 0.3.27", + "http 0.2.12", + "http-body 0.4.6", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2 0.5.10", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb3aa54a13a0dfe7fbe3a59e0c76093041720fdc77b110cc0fc260fafb4dc51e" +dependencies = [ + "atomic-waker", + "bytes", + "futures-channel", + "futures-core", + "h2 0.4.12", + "http 1.3.1", + "http-body 1.0.1", + "httparse", + "itoa", + "pin-project-lite", + "pin-utils", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" +dependencies = [ + "futures-util", + "http 0.2.12", + "hyper 0.14.32", + "log", + "rustls 0.21.12", + "rustls-native-certs 0.6.3", + "tokio", + "tokio-rustls 0.24.1", +] + +[[package]] +name = "hyper-rustls" +version = "0.27.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" +dependencies = [ + "http 1.3.1", + "hyper 1.7.0", + "hyper-util", + "rustls 0.23.31", + "rustls-native-certs 0.8.1", + "rustls-pki-types", + "tokio", + "tokio-rustls 0.26.2", + "tower-service", + "webpki-roots 1.0.2", +] + +[[package]] +name = "hyper-util" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d9b05277c7e8da2c93a568989bb6207bef0112e8d17df7a6eda4a3cf143bc5e" +dependencies = [ + "base64 0.22.1", + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "http 1.3.1", + "http-body 1.0.1", + "hyper 1.7.0", + "ipnet", + "libc", + "percent-encoding", + "pin-project-lite", + "socket2 0.6.0", + "tokio", + "tower-service", + "tracing", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.63" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0c919e5debc312ad217002b8048a17b7d83f80703865bbfcfebb0458b0b27d8" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "log", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "icu_collections" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47" +dependencies = [ + "displaydoc", + "potential_utf", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locale_core" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_normalizer" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3" + +[[package]] +name = "icu_properties" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locale_core", + "icu_properties_data", + "icu_provider", + "potential_utf", + "zerotrie", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632" + +[[package]] +name = "icu_provider" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af" +dependencies = [ + "displaydoc", + "icu_locale_core", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerotrie", + "zerovec", +] + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "idna" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "impl-more" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8a5a9a0ff0086c7a148acb942baaabeadf9504d10400b5a05645853729b9cd2" + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", + "serde", +] + +[[package]] +name = "indexmap" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661" +dependencies = [ + "equivalent", + "hashbrown 0.15.5", + "serde", +] + +[[package]] +name = "instant" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" +dependencies = [ + "cfg-if", + "js-sys", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "io-uring" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d93587f37623a1a17d94ef2bc9ada592f5465fe7732084ab7beefabe5c77c0c4" +dependencies = [ + "bitflags 2.9.2", + "cfg-if", + "libc", +] + +[[package]] +name = "ipnet" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" + +[[package]] +name = "iri-string" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbc5ebe9c3a1a7a5127f920a418f7585e9e758e911d0466ed004f393b0e380b2" +dependencies = [ + "memchr", + "serde", +] + +[[package]] +name = "itertools" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" + +[[package]] +name = "jobserver" +version = "0.1.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38f262f097c174adebe41eb73d66ae9c06b2844fb0da69969647bbddd9b0538a" +dependencies = [ + "getrandom 0.3.3", + "libc", +] + +[[package]] +name = "js-sys" +version = "0.3.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "json5" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96b0db21af676c1ce64250b5f40f3ce2cf27e4e47cb91ed91eb6fe9350b430c1" +dependencies = [ + "pest", + "pest_derive", + "serde", +] + +[[package]] +name = "jsonwebtoken" +version = "9.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a87cc7a48537badeae96744432de36f4be2b4a34a05a5ef32e9dd8a1c169dde" +dependencies = [ + "base64 0.22.1", + "js-sys", + "pem", + "ring 0.17.14", + "serde", + "serde_json", + "simple_asn1", +] + +[[package]] +name = "language-tags" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4345964bb142484797b161f473a503a434de77149dd8c7427788c6e13379388" + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" + +[[package]] +name = "lazycell" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" + +[[package]] +name = "libc" +version = "0.2.175" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a82ae493e598baaea5209805c49bbf2ea7de956d50d7da0da1164f9c6d28543" + +[[package]] +name = "libloading" +version = "0.8.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07033963ba89ebaf1584d767badaa2e8fcec21aedea6b8c0346d487d49c28667" +dependencies = [ + "cfg-if", + "windows-targets", +] + +[[package]] +name = "libredox" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "391290121bad3d37fbddad76d8f5d1c1c314cfc646d143d7e07a3086ddff0ce3" +dependencies = [ + "bitflags 2.9.2", + "libc", + "redox_syscall 0.5.17", +] + +[[package]] +name = "linux-raw-sys" +version = "0.4.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" + +[[package]] +name = "litemap" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956" + +[[package]] +name = "local-channel" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6cbc85e69b8df4b8bb8b89ec634e7189099cea8927a276b7384ce5488e53ec8" +dependencies = [ + "futures-core", + "futures-sink", + "local-waker", +] + +[[package]] +name = "local-waker" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d873d7c67ce09b42110d801813efbc9364414e356be9935700d368351657487" + +[[package]] +name = "lock_api" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96936507f153605bddfcda068dd804796c84324ed2510809e5b2a624c81da765" +dependencies = [ + "autocfg", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" + +[[package]] +name = "lru" +version = "0.12.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38" +dependencies = [ + "hashbrown 0.15.5", +] + +[[package]] +name = "lru-slab" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" + +[[package]] +name = "md-5" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" +dependencies = [ + "cfg-if", + "digest", +] + +[[package]] +name = "md5" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae960838283323069879657ca3de837e9f7bbb4c7bf6ea7f1b290d5e9476d2e0" + +[[package]] +name = "memchr" +version = "2.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + +[[package]] +name = "miniz_oxide" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" +dependencies = [ + "adler2", +] + +[[package]] +name = "mio" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c" +dependencies = [ + "libc", + "log", + "wasi 0.11.1+wasi-snapshot-preview1", + "windows-sys 0.59.0", +] + +[[package]] +name = "mutually_exclusive_features" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e94e1e6445d314f972ff7395df2de295fe51b71821694f0b0e1e79c4f12c8577" + +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + +[[package]] +name = "nonzero_ext" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38bf9645c8b145698bb0b18a4637dcacbc421ea49bef2317e4fd8065a387cf21" + +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + +[[package]] +name = "num-bigint" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" +dependencies = [ + "num-integer", + "num-traits", +] + +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + +[[package]] +name = "num-integer" +version = "0.1.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", +] + +[[package]] +name = "object" +version = "0.36.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" + +[[package]] +name = "openssl-probe" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" + +[[package]] +name = "ordered-multimap" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49203cdcae0030493bad186b28da2fa25645fa276a51b6fec8010d281e02ef79" +dependencies = [ + "dlv-list", + "hashbrown 0.14.5", +] + +[[package]] +name = "outref" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a80800c0488c3a21695ea981a54918fbb37abf04f4d0720c453632255e2ff0e" + +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + +[[package]] +name = "p256" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51f44edd08f51e2ade572f141051021c5af22677e42b7dd28a88155151c33594" +dependencies = [ + "ecdsa", + "elliptic-curve", + "sha2", +] + +[[package]] +name = "parking_lot" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" +dependencies = [ + "instant", + "lock_api", + "parking_lot_core 0.8.6", +] + +[[package]] +name = "parking_lot" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70d58bf43669b5795d1576d0641cfb6fbb2057bf629506267a92807158584a13" +dependencies = [ + "lock_api", + "parking_lot_core 0.9.11", +] + +[[package]] +name = "parking_lot_core" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" +dependencies = [ + "cfg-if", + "instant", + "libc", + "redox_syscall 0.2.16", + "smallvec", + "winapi", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc838d2a56b5b1a6c25f55575dfc605fabb63bb2365f6c2353ef9159aa69e4a5" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall 0.5.17", + "smallvec", + "windows-targets", +] + +[[package]] +name = "pathdiff" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df94ce210e5bc13cb6651479fa48d14f601d9858cfe0467f43ae157023b938d3" + +[[package]] +name = "pem" +version = "3.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38af38e8470ac9dee3ce1bae1af9c1671fffc44ddfd8bd1d0a3445bf349a8ef3" +dependencies = [ + "base64 0.22.1", + "serde", +] + +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "pest" +version = "2.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1db05f56d34358a8b1066f67cbb203ee3e7ed2ba674a6263a1d5ec6db2204323" +dependencies = [ + "memchr", + "thiserror 2.0.15", + "ucd-trie", +] + +[[package]] +name = "pest_derive" +version = "2.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb056d9e8ea77922845ec74a1c4e8fb17e7c218cc4fc11a15c5d25e189aa40bc" +dependencies = [ + "pest", + "pest_generator", +] + +[[package]] +name = "pest_generator" +version = "2.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87e404e638f781eb3202dc82db6760c8ae8a1eeef7fb3fa8264b2ef280504966" +dependencies = [ + "pest", + "pest_meta", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "pest_meta" +version = "2.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edd1101f170f5903fde0914f899bb503d9ff5271d7ba76bbb70bea63690cc0d5" +dependencies = [ + "pest", + "sha2", +] + +[[package]] +name = "phf" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078" +dependencies = [ + "phf_shared", +] + +[[package]] +name = "phf_shared" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" +dependencies = [ + "siphasher", +] + +[[package]] +name = "pin-project" +version = "1.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pkcs8" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9eca2c590a5f85da82668fa685c09ce2888b9430e83299debf1f34b65fd4a4ba" +dependencies = [ + "der", + "spki", +] + +[[package]] +name = "pkg-config" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" + +[[package]] +name = "portable-atomic" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483" + +[[package]] +name = "postgres-protocol" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76ff0abab4a9b844b93ef7b81f1efc0a366062aaef2cd702c76256b5dc075c54" +dependencies = [ + "base64 0.22.1", + "byteorder", + "bytes", + "fallible-iterator", + "hmac", + "md-5", + "memchr", + "rand 0.9.2", + "sha2", + "stringprep", +] + +[[package]] +name = "postgres-types" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613283563cd90e1dfc3518d548caee47e0e725455ed619881f5cf21f36de4b48" +dependencies = [ + "bytes", + "fallible-iterator", + "postgres-protocol", + "uuid", +] + +[[package]] +name = "potential_utf" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5a7c30837279ca13e7c867e9e40053bc68740f988cb07f7ca6df43cc734b585" +dependencies = [ + "zerovec", +] + +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "ppv-lite86" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" +dependencies = [ + "zerocopy", +] + +[[package]] +name = "prettyplease" +version = "0.2.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" +dependencies = [ + "proc-macro2", + "syn", +] + +[[package]] +name = "proc-macro2" +version = "1.0.101" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quanta" +version = "0.12.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3ab5a9d756f0d97bdc89019bd2e4ea098cf9cde50ee7564dde6b81ccc8f06c7" +dependencies = [ + "crossbeam-utils", + "libc", + "once_cell", + "raw-cpuid", + "wasi 0.11.1+wasi-snapshot-preview1", + "web-sys", + "winapi", +] + +[[package]] +name = "quinn" +version = "0.11.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "626214629cda6781b6dc1d316ba307189c85ba657213ce642d9c77670f8202c8" +dependencies = [ + "bytes", + "cfg_aliases", + "pin-project-lite", + "quinn-proto", + "quinn-udp", + "rustc-hash 2.1.1", + "rustls 0.23.31", + "socket2 0.5.10", + "thiserror 2.0.15", + "tokio", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-proto" +version = "0.11.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49df843a9161c85bb8aae55f101bc0bac8bcafd637a620d9122fd7e0b2f7422e" +dependencies = [ + "bytes", + "getrandom 0.3.3", + "lru-slab", + "rand 0.9.2", + "ring 0.17.14", + "rustc-hash 2.1.1", + "rustls 0.23.31", + "rustls-pki-types", + "slab", + "thiserror 2.0.15", + "tinyvec", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-udp" +version = "0.5.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcebb1209ee276352ef14ff8732e24cc2b02bbac986cd74a4c81bcb2f9881970" +dependencies = [ + "cfg_aliases", + "libc", + "once_cell", + "socket2 0.5.10", + "tracing", + "windows-sys 0.59.0", +] + +[[package]] +name = "quote" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "r-efi" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha 0.3.1", + "rand_core 0.6.4", +] + +[[package]] +name = "rand" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" +dependencies = [ + "rand_chacha 0.9.0", + "rand_core 0.9.3", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" +dependencies = [ + "ppv-lite86", + "rand_core 0.9.3", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom 0.2.16", +] + +[[package]] +name = "rand_core" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" +dependencies = [ + "getrandom 0.3.3", +] + +[[package]] +name = "raw-cpuid" +version = "11.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6df7ab838ed27997ba19a4664507e6f82b41fe6e20be42929332156e5e85146" +dependencies = [ + "bitflags 2.9.2", +] + +[[package]] +name = "redox_syscall" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "redox_syscall" +version = "0.5.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5407465600fb0548f1442edf71dd20683c6ed326200ace4b1ef0763521bb3b77" +dependencies = [ + "bitflags 2.9.2", +] + +[[package]] +name = "ref-cast" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a0ae411dbe946a674d89546582cea4ba2bb8defac896622d6496f14c23ba5cf" +dependencies = [ + "ref-cast-impl", +] + +[[package]] +name = "ref-cast-impl" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1165225c21bff1f3bbce98f5a1f889949bc902d3575308cc7b0de30b4f6d27c7" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "refinery" +version = "0.8.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ba5d693abf62492c37268512ff35b77655d2e957ca53dab85bf993fe9172d15" +dependencies = [ + "refinery-core", + "refinery-macros", +] + +[[package]] +name = "refinery-core" +version = "0.8.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a83581f18c1a4c3a6ebd7a174bdc665f17f618d79f7edccb6a0ac67e660b319" +dependencies = [ + "async-trait", + "cfg-if", + "log", + "regex", + "serde", + "siphasher", + "thiserror 1.0.69", + "time", + "tokio", + "tokio-postgres", + "toml 0.8.23", + "url", + "walkdir", +] + +[[package]] +name = "refinery-macros" +version = "0.8.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72c225407d8e52ef8cf094393781ecda9a99d6544ec28d90a6915751de259264" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "refinery-core", + "regex", + "syn", +] + +[[package]] +name = "regex" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-lite" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53a49587ad06b26609c52e423de037e7f57f20d53535d66e08c695f347df952a" + +[[package]] +name = "regex-syntax" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" + +[[package]] +name = "reqwest" +version = "0.12.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d429f34c8092b2d42c7c93cec323bb4adeb7c67698f70839adec842ec10c7ceb" +dependencies = [ + "base64 0.22.1", + "bytes", + "futures-core", + "futures-util", + "http 1.3.1", + "http-body 1.0.1", + "http-body-util", + "hyper 1.7.0", + "hyper-rustls 0.27.7", + "hyper-util", + "js-sys", + "log", + "percent-encoding", + "pin-project-lite", + "quinn", + "rustls 0.23.31", + "rustls-pki-types", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tokio-rustls 0.26.2", + "tokio-util", + "tower", + "tower-http", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", + "webpki-roots 1.0.2", +] + +[[package]] +name = "reqwest-middleware" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57f17d28a6e6acfe1733fe24bcd30774d13bffa4b8a22535b4c8c98423088d4e" +dependencies = [ + "anyhow", + "async-trait", + "http 1.3.1", + "reqwest", + "serde", + "thiserror 1.0.69", + "tower-service", +] + +[[package]] +name = "reqwest-ratelimit" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b8fff0d8036f23dcad6c27605ca3baa8ae3867438d0a8b34072f40f6c8bf628" +dependencies = [ + "async-trait", + "http 1.3.1", + "reqwest", + "reqwest-middleware", +] + +[[package]] +name = "reqwest-retry" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29c73e4195a6bfbcb174b790d9b3407ab90646976c55de58a6515da25d851178" +dependencies = [ + "anyhow", + "async-trait", + "futures", + "getrandom 0.2.16", + "http 1.3.1", + "hyper 1.7.0", + "parking_lot 0.11.2", + "reqwest", + "reqwest-middleware", + "retry-policies", + "thiserror 1.0.69", + "tokio", + "tracing", + "wasm-timer", +] + +[[package]] +name = "reqwest-websocket" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd5f79b25f7f17a62cc9337108974431a66ae5a723ac0d9fe78ac1cce2027720" +dependencies = [ + "async-tungstenite", + "bytes", + "futures-util", + "reqwest", + "serde", + "serde_json", + "thiserror 2.0.15", + "tokio", + "tokio-util", + "tracing", + "tungstenite", + "web-sys", +] + +[[package]] +name = "retry-policies" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5875471e6cab2871bc150ecb8c727db5113c9338cc3354dc5ee3425b6aa40a1c" +dependencies = [ + "rand 0.8.5", +] + +[[package]] +name = "rfc6979" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7743f17af12fa0b03b803ba12cd6a8d9483a587e89c69445e3909655c0b9fabb" +dependencies = [ + "crypto-bigint 0.4.9", + "hmac", + "zeroize", +] + +[[package]] +name = "ring" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin", + "untrusted 0.7.1", + "web-sys", + "winapi", +] + +[[package]] +name = "ring" +version = "0.17.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" +dependencies = [ + "cc", + "cfg-if", + "getrandom 0.2.16", + "libc", + "untrusted 0.9.0", + "windows-sys 0.52.0", +] + +[[package]] +name = "ron" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b91f7eff05f748767f183df4320a63d6936e9c6107d97c9e6bdd9784f4289c94" +dependencies = [ + "base64 0.21.7", + "bitflags 2.9.2", + "serde", + "serde_derive", +] + +[[package]] +name = "rust-ini" +version = "0.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e310ef0e1b6eeb79169a1171daf9abcb87a2e17c03bee2c4bb100b55c75409f" +dependencies = [ + "cfg-if", + "ordered-multimap", + "trim-in-place", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56f7d92ca342cea22a06f2121d944b4fd82af56988c270852495420f961d4ace" + +[[package]] +name = "rustc-hash" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" + +[[package]] +name = "rustc-hash" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" + +[[package]] +name = "rustc_version" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" +dependencies = [ + "semver", +] + +[[package]] +name = "rustix" +version = "0.38.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" +dependencies = [ + "bitflags 2.9.2", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.59.0", +] + +[[package]] +name = "rustls" +version = "0.20.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99" +dependencies = [ + "log", + "ring 0.16.20", + "sct", + "webpki", +] + +[[package]] +name = "rustls" +version = "0.21.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" +dependencies = [ + "log", + "ring 0.17.14", + "rustls-webpki 0.101.7", + "sct", +] + +[[package]] +name = "rustls" +version = "0.23.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0ebcbd2f03de0fc1122ad9bb24b127a5a6cd51d72604a3f3c50ac459762b6cc" +dependencies = [ + "aws-lc-rs", + "once_cell", + "ring 0.17.14", + "rustls-pki-types", + "rustls-webpki 0.103.4", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-native-certs" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00" +dependencies = [ + "openssl-probe", + "rustls-pemfile", + "schannel", + "security-framework 2.11.1", +] + +[[package]] +name = "rustls-native-certs" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fcff2dd52b58a8d98a70243663a0d234c4e2b79235637849d15913394a247d3" +dependencies = [ + "openssl-probe", + "rustls-pki-types", + "schannel", + "security-framework 3.3.0", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" +dependencies = [ + "base64 0.21.7", +] + +[[package]] +name = "rustls-pki-types" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "229a4a4c221013e7e1f1a043678c5cc39fe5171437c88fb47151a21e6f5b5c79" +dependencies = [ + "web-time", + "zeroize", +] + +[[package]] +name = "rustls-webpki" +version = "0.101.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" +dependencies = [ + "ring 0.17.14", + "untrusted 0.9.0", +] + +[[package]] +name = "rustls-webpki" +version = "0.103.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a17884ae0c1b773f1ccd2bd4a8c72f16da897310a98b0e84bf349ad5ead92fc" +dependencies = [ + "aws-lc-rs", + "ring 0.17.14", + "rustls-pki-types", + "untrusted 0.9.0", +] + +[[package]] +name = "rustversion" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" + +[[package]] +name = "ryu" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "schannel" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] +name = "schemars" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd191f9397d57d581cddd31014772520aa448f65ef991055d7f61582c65165f" +dependencies = [ + "dyn-clone", + "ref-cast", + "serde", + "serde_json", +] + +[[package]] +name = "schemars" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82d20c4491bc164fa2f6c5d44565947a52ad80b9505d8e36f8d54c27c739fcd0" +dependencies = [ + "dyn-clone", + "ref-cast", + "serde", + "serde_json", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "sct" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" +dependencies = [ + "ring 0.17.14", + "untrusted 0.9.0", +] + +[[package]] +name = "sec1" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3be24c1842290c45df0a7bf069e0c268a747ad05a192f2fd7dcfdbc1cba40928" +dependencies = [ + "base16ct", + "der", + "generic-array", + "pkcs8", + "subtle", + "zeroize", +] + +[[package]] +name = "secrecy" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e891af845473308773346dc847b2c23ee78fe442e0472ac50e22a18a93d3ae5a" +dependencies = [ + "serde", + "zeroize", +] + +[[package]] +name = "security-framework" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" +dependencies = [ + "bitflags 2.9.2", + "core-foundation 0.9.4", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework" +version = "3.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80fb1d92c5028aa318b4b8bd7302a5bfcf48be96a37fc6fc790f806b0004ee0c" +dependencies = [ + "bitflags 2.9.2", + "core-foundation 0.10.1", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49db231d56a190491cb4aeda9527f1ad45345af50b0851622a7adb8c03b01c32" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "semver" +version = "1.0.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0" + +[[package]] +name = "serde" +version = "1.0.219" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde-untagged" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34836a629bcbc6f1afdf0907a744870039b1e14c0561cb26094fa683b158eff3" +dependencies = [ + "erased-serde", + "serde", + "typeid", +] + +[[package]] +name = "serde_derive" +version = "1.0.219" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.143" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d401abef1d108fbd9cbaebc3e46611f4b1021f714a0597a71f41ee463f5f4a5a" +dependencies = [ + "itoa", + "memchr", + "ryu", + "serde", +] + +[[package]] +name = "serde_spanned" +version = "0.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_spanned" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40734c41988f7306bb04f0ecf60ec0f3f1caa34290e4e8ea471dcd3346483b83" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_with" +version = "3.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2c45cd61fefa9db6f254525d46e392b852e0e61d9a1fd36e5bd183450a556d5" +dependencies = [ + "base64 0.22.1", + "chrono", + "hex", + "indexmap 1.9.3", + "indexmap 2.10.0", + "schemars 0.9.0", + "schemars 1.0.4", + "serde", + "serde_derive", + "serde_json", + "serde_with_macros", + "time", +] + +[[package]] +name = "serde_with_macros" +version = "3.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de90945e6565ce0d9a25098082ed4ee4002e047cb59892c318d66821e14bb30f" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sha2" +version = "0.10.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "signal-hook-registry" +version = "1.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2a4719bff48cee6b39d12c020eeb490953ad2443b7055bd0b21fca26bd8c28b" +dependencies = [ + "libc", +] + +[[package]] +name = "signature" +version = "1.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" +dependencies = [ + "digest", + "rand_core 0.6.4", +] + +[[package]] +name = "simple_asn1" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "297f631f50729c8c99b84667867963997ec0b50f32b2a7dbcab828ef0541e8bb" +dependencies = [ + "num-bigint", + "num-traits", + "thiserror 2.0.15", + "time", +] + +[[package]] +name = "siphasher" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" + +[[package]] +name = "size" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b6709c7b6754dca1311b3c73e79fcce40dd414c782c66d88e8823030093b02b" +dependencies = [ + "serde", +] + +[[package]] +name = "slab" +version = "0.4.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" + +[[package]] +name = "smallvec" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" + +[[package]] +name = "socket2" +version = "0.5.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "socket2" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "233504af464074f9d066d7b5416c5f9b894a5862a6506e306f7b816cdd6f1807" +dependencies = [ + "libc", + "windows-sys 0.59.0", +] + +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + +[[package]] +name = "spinning_top" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d96d2d1d716fb500937168cc09353ffdc7a012be8475ac7308e1bdf0e3923300" +dependencies = [ + "lock_api", +] + +[[package]] +name = "spki" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67cf02bbac7a337dc36e4f5a693db6c21e7863f45070f7064577eb4367a3212b" +dependencies = [ + "base64ct", + "der", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + +[[package]] +name = "stringprep" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b4df3d392d81bd458a8a621b8bffbd2302a12ffe288a9d931670948749463b1" +dependencies = [ + "unicode-bidi", + "unicode-normalization", + "unicode-properties", +] + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "strum" +version = "0.27.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf" +dependencies = [ + "strum_macros", +] + +[[package]] +name = "strum_macros" +version = "0.27.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7695ce3845ea4b33927c055a39dc438a45b059f7c1b3d91d38d10355fb8cbca7" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + +[[package]] +name = "syn" +version = "2.0.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +dependencies = [ + "futures-core", +] + +[[package]] +name = "synstructure" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl 1.0.69", +] + +[[package]] +name = "thiserror" +version = "2.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80d76d3f064b981389ecb4b6b7f45a0bf9fdac1d5b9204c7bd6714fecc302850" +dependencies = [ + "thiserror-impl 2.0.15", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "thiserror-impl" +version = "2.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44d29feb33e986b6ea906bd9c3559a856983f92371b3eaa5e83782a351623de0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "thread_local" +version = "1.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "time" +version = "0.3.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a7619e19bc266e0f9c5e6686659d394bc57973859340060a69221e57dbc0c40" +dependencies = [ + "deranged", + "itoa", + "num-conv", + "powerfmt", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9e9a38711f559d9e3ce1cdb06dd7c5b8ea546bc90052da6d06bb76da74bb07c" + +[[package]] +name = "time-macros" +version = "0.2.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3526739392ec93fd8b359c8e98514cb3e8e021beb4e5f597b00a0221f8ed8a49" +dependencies = [ + "num-conv", + "time-core", +] + +[[package]] +name = "tiny-keccak" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" +dependencies = [ + "crunchy", +] + +[[package]] +name = "tinystr" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b" +dependencies = [ + "displaydoc", + "zerovec", +] + +[[package]] +name = "tinyvec" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.47.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89e49afdadebb872d3145a5638b59eb0691ea23e46ca484037cfab3b76b95038" +dependencies = [ + "backtrace", + "bytes", + "io-uring", + "libc", + "mio", + "parking_lot 0.12.4", + "pin-project-lite", + "signal-hook-registry", + "slab", + "socket2 0.6.0", + "tokio-macros", + "windows-sys 0.59.0", +] + +[[package]] +name = "tokio-macros" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tokio-postgres" +version = "0.7.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c95d533c83082bb6490e0189acaa0bbeef9084e60471b696ca6988cd0541fb0" +dependencies = [ + "async-trait", + "byteorder", + "bytes", + "fallible-iterator", + "futures-channel", + "futures-util", + "log", + "parking_lot 0.12.4", + "percent-encoding", + "phf", + "pin-project-lite", + "postgres-protocol", + "postgres-types", + "rand 0.9.2", + "socket2 0.5.10", + "tokio", + "tokio-util", + "whoami", +] + +[[package]] +name = "tokio-rustls" +version = "0.23.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" +dependencies = [ + "rustls 0.20.9", + "tokio", + "webpki", +] + +[[package]] +name = "tokio-rustls" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" +dependencies = [ + "rustls 0.21.12", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.26.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e727b36a1a0e8b74c376ac2211e40c2c8af09fb4013c60d910495810f008e9b" +dependencies = [ + "rustls 0.23.31", + "tokio", +] + +[[package]] +name = "tokio-stream" +version = "0.1.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", + "tokio-util", +] + +[[package]] +name = "tokio-util" +version = "0.7.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14307c986784f72ef81c89db7d9e28d6ac26d16213b109ea501696195e6e3ce5" +dependencies = [ + "bytes", + "futures-core", + "futures-io", + "futures-sink", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio_with_wasm" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4dfba9b946459940fb564dcf576631074cdfb0bfe4c962acd4c31f0dca7897e6" +dependencies = [ + "js-sys", + "tokio", + "tokio_with_wasm_proc", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "tokio_with_wasm_proc" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37e04c1865c281139e5ccf633cb9f76ffdaabeebfe53b703984cf82878e2aabb" +dependencies = [ + "quote", + "syn", +] + +[[package]] +name = "toml" +version = "0.8.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" +dependencies = [ + "serde", + "serde_spanned 0.6.9", + "toml_datetime 0.6.11", + "toml_edit", +] + +[[package]] +name = "toml" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75129e1dc5000bfbaa9fee9d1b21f974f9fbad9daec557a521ee6e080825f6e8" +dependencies = [ + "serde", + "serde_spanned 1.0.0", + "toml_datetime 0.7.0", + "toml_parser", + "winnow", +] + +[[package]] +name = "toml_datetime" +version = "0.6.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_datetime" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bade1c3e902f58d73d3f294cd7f20391c1cb2fbcb643b73566bc773971df91e3" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_edit" +version = "0.22.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" +dependencies = [ + "indexmap 2.10.0", + "serde", + "serde_spanned 0.6.9", + "toml_datetime 0.6.11", + "toml_write", + "winnow", +] + +[[package]] +name = "toml_parser" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b551886f449aa90d4fe2bdaa9f4a2577ad2dde302c61ecf262d80b116db95c10" +dependencies = [ + "winnow", +] + +[[package]] +name = "toml_write" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" + +[[package]] +name = "tower" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper", + "tokio", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-http" +version = "0.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2" +dependencies = [ + "bitflags 2.9.2", + "bytes", + "futures-util", + "http 1.3.1", + "http-body 1.0.1", + "iri-string", + "pin-project-lite", + "tower", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-layer" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" + +[[package]] +name = "tracing" +version = "0.1.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" +dependencies = [ + "log", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-actix-web" +version = "0.7.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5360edd490ec8dee9fedfc6a9fd83ac2f01b3e1996e3261b9ad18a61971fe064" +dependencies = [ + "actix-web", + "mutually_exclusive_features", + "pin-project", + "tracing", + "uuid", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tracing-core" +version = "0.1.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" +dependencies = [ + "nu-ansi-term", + "sharded-slab", + "smallvec", + "thread_local", + "tracing-core", + "tracing-log", +] + +[[package]] +name = "trim-in-place" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "343e926fc669bc8cde4fa3129ab681c63671bae288b1f1081ceee6d9d37904fc" + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "tungstenite" +version = "0.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eadc29d668c91fcc564941132e17b28a7ceb2f3ebf0b9dae3e03fd7a6748eb0d" +dependencies = [ + "bytes", + "data-encoding", + "http 1.3.1", + "httparse", + "log", + "rand 0.9.2", + "sha1", + "thiserror 2.0.15", + "utf-8", +] + +[[package]] +name = "typeid" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc7d623258602320d5c55d1bc22793b57daff0ec7efc270ea7d55ce1d5f5471c" + +[[package]] +name = "typenum" +version = "1.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" + +[[package]] +name = "ucd-trie" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971" + +[[package]] +name = "unicode-bidi" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5" + +[[package]] +name = "unicode-ident" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" + +[[package]] +name = "unicode-normalization" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-properties" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e70f2a8b45122e719eb623c01822704c4e0907e7e426a05927e1a1cfff5b75d0" + +[[package]] +name = "unicode-segmentation" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" + +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "url" +version = "2.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "urlencoding" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" + +[[package]] +name = "utf-8" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "uuid" +version = "1.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f33196643e165781c20a5ead5582283a7dacbb87855d867fbc2df3f81eddc1be" +dependencies = [ + "getrandom 0.3.3", + "js-sys", + "serde", + "wasm-bindgen", +] + +[[package]] +name = "valuable" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + +[[package]] +name = "vsimd" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c3082ca00d5a5ef149bb8b555a72ae84c9c59f7250f013ac822ac2e49b19c64" + +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.1+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" + +[[package]] +name = "wasi" +version = "0.14.2+wasi-0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" +dependencies = [ + "wit-bindgen-rt", +] + +[[package]] +name = "wasite" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" + +[[package]] +name = "wasm-bindgen" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" +dependencies = [ + "cfg-if", + "once_cell", + "rustversion", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" +dependencies = [ + "bumpalo", + "log", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.50" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61" +dependencies = [ + "cfg-if", + "js-sys", + "once_cell", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "wasm-streams" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15053d8d85c7eccdbefef60f06769760a563c7f0a9d6902a13d35c7800b0ad65" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "wasm-timer" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be0ecb0db480561e9a7642b5d3e4187c128914e58aa84330b9493e3eb68c5e7f" +dependencies = [ + "futures", + "js-sys", + "parking_lot 0.11.2", + "pin-utils", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "wasmtimer" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8d49b5d6c64e8558d9b1b065014426f35c18de636895d24893dbbd329743446" +dependencies = [ + "futures", + "js-sys", + "parking_lot 0.12.4", + "pin-utils", + "slab", + "wasm-bindgen", +] + +[[package]] +name = "web-sys" +version = "0.3.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webpki" +version = "0.22.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53" +dependencies = [ + "ring 0.17.14", + "untrusted 0.9.0", +] + +[[package]] +name = "webpki-roots" +version = "0.22.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c71e40d7d2c34a5106301fb632274ca37242cd0c9d3e64dbece371a40a2d87" +dependencies = [ + "webpki", +] + +[[package]] +name = "webpki-roots" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e8983c3ab33d6fb807cfcdad2491c4ea8cbc8ed839181c7dfd9c67c83e261b2" +dependencies = [ + "rustls-pki-types", +] + +[[package]] +name = "which" +version = "4.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" +dependencies = [ + "either", + "home", + "once_cell", + "rustix", +] + +[[package]] +name = "whoami" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d4a4db5077702ca3015d3d02d74974948aba2ad9e12ab7df718ee64ccd7e97d" +dependencies = [ + "libredox", + "wasite", + "web-sys", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-core" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3" +dependencies = [ + "windows-implement", + "windows-interface", + "windows-link", + "windows-result", + "windows-strings", +] + +[[package]] +name = "windows-implement" +version = "0.60.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "windows-interface" +version = "0.59.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "windows-link" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" + +[[package]] +name = "windows-result" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-strings" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_gnullvm", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "winnow" +version = "0.7.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3edebf492c8125044983378ecb5766203ad3b4c2f7a922bd7dd207f6d443e95" +dependencies = [ + "memchr", +] + +[[package]] +name = "wit-bindgen-rt" +version = "0.39.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" +dependencies = [ + "bitflags 2.9.2", +] + +[[package]] +name = "writeable" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb" + +[[package]] +name = "xmlparser" +version = "0.13.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "66fee0b777b0f5ac1c69bb06d361268faafa61cd4682ae064a171c16c433e9e4" + +[[package]] +name = "yaml-rust2" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ce2a4ff45552406d02501cea6c18d8a7e50228e7736a872951fe2fe75c91be7" +dependencies = [ + "arraydeque", + "encoding_rs", + "hashlink", +] + +[[package]] +name = "yoke" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerocopy" +version = "0.8.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1039dd0d3c310cf05de012d8a39ff557cb0d23087fd44cad61df08fc31907a2f" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.8.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ecf5b4cc5364572d7f4c329661bcc82724222973f2cab6f050a4e5c22f75181" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zerofrom" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zeroize" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" + +[[package]] +name = "zerotrie" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.11.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7aa2bd55086f1ab526693ecbe444205da57e25f4489879da80635a46d90e73b" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zstd" +version = "0.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a" +dependencies = [ + "zstd-safe", +] + +[[package]] +name = "zstd-safe" +version = "7.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d" +dependencies = [ + "zstd-sys", +] + +[[package]] +name = "zstd-sys" +version = "2.0.15+zstd.1.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb81183ddd97d0c74cedf1d50d85c8d08c1b8b68ee863bdee9e706eedba1a237" +dependencies = [ + "cc", + "pkg-config", +] diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 00000000000..6d4d4e22006 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,37 @@ +[package] +name = "hulylake" +version = "0.1.0" +edition = "2024" + +[dependencies] +tokio = { version = "1", features = ["full"] } +tracing = "0.1.41" +tracing-subscriber = "0.3.19" +anyhow = "1.0.99" +config = "0.15.14" +serde = "1.0.219" +actix-web = "4.11.0" +actix-cors = "0.7.1" +refinery = { version = "0.8.16", features = ["tokio-postgres"] } +tokio-postgres = "0.7.13" +bb8 = "0.9.0" +bb8-postgres = { version = "0.9.0", features = ["with-uuid-1"] } +md5 = "0.8.0" +size = { version = "0.5.0", features = ["serde"] } +uuid = { version = "1.18", features = ["v4", "serde"] } +hex = "0.4.3" +serde_json = "1.0" +hulyrs = { git = "https://github.com/hcengineering/hulyrs.git", features = [ + "actix", +] } +secrecy = "0.10.3" +tracing-actix-web = "0.7.19" +aws-config = { version = "1.8.5" } +aws-sdk-s3 = "1.103.0" +reqwest = { version = "0.12.15", default-features = false, features = [ + "json", + "rustls-tls", + "stream", +] } +futures-util = "0.3.31" +tokio-stream = "0.1.17" diff --git a/src/config.rs b/src/config.rs new file mode 100644 index 00000000000..de46bc2f6ae --- /dev/null +++ b/src/config.rs @@ -0,0 +1,77 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +use std::{path::Path, sync::LazyLock}; + +use config::FileFormat; +use secrecy::SecretString; +use serde::Deserialize; + +#[derive(Deserialize, Debug)] +pub struct Config { + pub bind_port: u16, + pub bind_host: String, + + pub token_secret: SecretString, + + pub db_connection: String, + pub db_scheme: String, +} + +pub mod hulyrs { + use std::sync::LazyLock; + + pub static CONFIG: LazyLock = LazyLock::new(|| match hulyrs::Config::auto() { + Ok(config) => config, + Err(error) => { + eprintln!("configuration error: {}", error); + std::process::exit(1); + } + }); +} + +pub static CONFIG: LazyLock = LazyLock::new(|| { + const DEFAULTS: &str = r#" + bind_port = 8096 + bind_host = "0.0.0.0" + + token_secret = "secret" + + db_connection = "postgresql://root@huly.local:26257/defaultdb?sslmode=disable" + db_scheme = "hulylake" + "#; + + let mut builder = + config::Config::builder().add_source(config::File::from_str(DEFAULTS, FileFormat::Toml)); + + let path = Path::new("etc/config.toml"); + + if path.exists() { + builder = builder.add_source(config::File::with_name(path.as_os_str().to_str().unwrap())); + } + + let settings = builder + .add_source(config::Environment::with_prefix("HULY")) + .build() + .and_then(|c| c.try_deserialize::()); + + match settings { + Ok(settings) => settings, + Err(error) => { + eprintln!("configuration error: {}", error); + std::process::exit(1); + } + } +}); diff --git a/src/main.rs b/src/main.rs new file mode 100644 index 00000000000..aac9c860afe --- /dev/null +++ b/src/main.rs @@ -0,0 +1,146 @@ +use std::net::SocketAddr; + +use actix_cors::Cors; +use actix_web::{ + App, Error, HttpMessage, HttpServer, + body::MessageBody, + dev::{ServiceRequest, ServiceResponse}, + middleware::{Next, from_fn}, + web::{Data, Path, delete, get, post, put, scope}, +}; +use tracing::*; +use tracing_actix_web::TracingLogger; +use uuid::Uuid; + +use hulyrs::services::jwt::actix::ServiceRequestExt; + +mod config; +mod handlers; +mod postgres; +mod s3; + +use config::CONFIG; + +fn initialize_tracing() { + use tracing_subscriber::{filter::targets::Targets, prelude::*}; + + let filter = Targets::default() + .with_target(env!("CARGO_BIN_NAME"), config::hulyrs::CONFIG.log) + .with_target("actix", Level::WARN); + let format = tracing_subscriber::fmt::layer().compact(); + + tracing_subscriber::registry() + .with(filter) + .with(format) + .init(); +} + +#[tokio::main] +async fn main() -> anyhow::Result<()> { + initialize_tracing(); + + tracing::info!( + "{}/{} started", + env!("CARGO_PKG_NAME"), + env!("CARGO_PKG_VERSION") + ); + + let s3 = s3::client().await; + let postgres = postgres::pool().await?; + + let bind_to = SocketAddr::new(CONFIG.bind_host.as_str().parse()?, CONFIG.bind_port); + + async fn auth( + mut request: ServiceRequest, + next: Next, + ) -> Result, Error> { + let claims = request + .extract_claims(&CONFIG.token_secret) + .map_err(|error| { + warn!(%error, "Unauthorized request"); + error + })?; + + let workspace = Uuid::parse_str(&request.extract::>().await?); + + if claims.is_system() || Ok(claims.workspace.clone()) == workspace.clone().map(Some) { + request.extensions_mut().insert(claims); + next.call(request).await + } else { + warn!( + expected = ?claims.workspace, + actual = ?workspace, + "Unauthorized request, workspace mismatch" + ); + Err(actix_web::error::ErrorUnauthorized("Unauthorized").into()) + } + } + + let server = HttpServer::new(move || { + let cors = Cors::default() + .allow_any_origin() + .allow_any_method() + .allow_any_header() + .supports_credentials() + .max_age(3600); + + const KEY_PATH: &str = "/{key:.*}"; + + App::new() + .app_data(Data::new(postgres.clone())) + .app_data(Data::new(s3.clone())) + .wrap(TracingLogger::default()) + .wrap(cors) + .service( + scope("/api/{workspace}") + .wrap(from_fn(auth)) + .route(KEY_PATH, get().to(handlers::get)) + .route(KEY_PATH, put().to(handlers::put)) + .route(KEY_PATH, post().to(handlers::post)) + .route(KEY_PATH, delete().to(handlers::delete)), + ) + .route("/status", get().to(async || "ok")) + }) + .bind(bind_to)? + .run(); + + info!("http listener on {}", bind_to); + + server.await?; + + Ok(()) +} + +#[tokio::main] +async fn main_() -> anyhow::Result<()> { + use crate::{postgres::Pool, s3}; + use aws_sdk_s3::{presigning::PresigningConfig, primitives::ByteStream}; + + initialize_tracing(); + + let expires_in: std::time::Duration = std::time::Duration::from_secs(600); + let expires_in: aws_sdk_s3::presigning::PresigningConfig = + PresigningConfig::expires_in(expires_in).unwrap(); + + let s3 = s3::client().await; + + let presigned_request = s3 + .put_object() + .set_bucket(Some("hulylake".into())) + .set_key(Some("myobject".into())) + .presigned(expires_in) + .await + .unwrap(); + + let url = presigned_request.uri(); + + debug!(?url, "presigned request"); + + let client = reqwest::Client::new(); + let res = client.put(url).body("hello world").send().await.unwrap(); + + debug!(?res, "response"); + debug!("body: {:?}", res.text().await.unwrap()); + + Ok(()) +} From 6c8797071b961275e4755a21356766a39eeb591e Mon Sep 17 00:00:00 2001 From: Alexey Aristov Date: Wed, 20 Aug 2025 19:54:48 +0200 Subject: [PATCH 165/636] cleanup & fixes --- src/config.rs | 3 +- src/handlers_http.rs | 61 +++++++++------------- src/handlers_ws.rs | 104 ++++++++++++------------------------- src/main.rs | 120 +++++++++++++++++++++++-------------------- src/ws_hub.rs | 49 +++++++++--------- 5 files changed, 149 insertions(+), 188 deletions(-) diff --git a/src/config.rs b/src/config.rs index 4bd18c305e7..a7a950d9ac7 100644 --- a/src/config.rs +++ b/src/config.rs @@ -13,6 +13,7 @@ // limitations under the License. // +use secrecy::SecretString; use serde::Deserialize; use serde_with::formats::CommaSeparator; use serde_with::{StringWithSeparator, serde_as}; @@ -39,7 +40,7 @@ pub struct Config { pub bind_host: String, pub payload_size_limit: size::Size, - pub token_secret: String, + pub token_secret: SecretString, #[serde_as(as = "StringWithSeparator::")] pub redis_urls: Vec, diff --git a/src/handlers_http.rs b/src/handlers_http.rs index 16ff8ace0e2..fad3cf8d43a 100644 --- a/src/handlers_http.rs +++ b/src/handlers_http.rs @@ -13,25 +13,18 @@ // limitations under the License. // -use crate::workspace_owner::workspace_check; use anyhow::anyhow; use redis::aio::MultiplexedConnection; -use std::collections::HashMap; -use std::sync::Arc; -use std::time::{SystemTime, UNIX_EPOCH}; -use tokio::sync::Mutex; -use tracing::{error, trace}; -use uuid::Uuid; - -use crate::redis_lib::{ - RedisArray, SaveMode, Ttl, redis_delete, redis_list, redis_read, redis_save, -}; +use tracing::*; use actix_web::{ - Error, HttpRequest, HttpResponse, error, - web::{self, Data, Json, Query}, + Error, HttpRequest, HttpResponse, + web::{self}, }; +use crate::redis_lib::{SaveMode, Ttl, redis_delete, redis_list, redis_read, redis_save}; +use crate::workspace_owner::workspace_check; + pub fn map_handler_error(err: impl std::fmt::Display) -> Error { let msg = err.to_string(); @@ -52,20 +45,17 @@ pub fn map_handler_error(err: impl std::fmt::Display) -> Error { /// list pub async fn list( - req: HttpRequest, path: web::Path, - redis: web::Data>>, + redis: web::Data, ) -> Result { - workspace_check(&req)?; // Check workspace - let key = path.into_inner(); trace!(key, "list request"); async move || -> anyhow::Result { - let mut conn = redis.lock().await; + let mut redis = redis.get_ref().clone(); - let entries = redis_list(&mut *conn, &key).await?; + let entries = redis_list(&mut redis, &key).await?; Ok(HttpResponse::Ok().json(entries)) }() @@ -75,20 +65,17 @@ pub async fn list( /// get pub async fn get( - req: HttpRequest, path: web::Path, - redis: web::Data>>, + redis: web::Data, ) -> Result { - workspace_check(&req)?; // Check workspace - let key = path.into_inner(); // trace!(key, "get request"); async move || -> anyhow::Result { - let mut conn = redis.lock().await; + let mut redis = redis.get_ref().clone(); - Ok(redis_read(&mut *conn, &key) + Ok(redis_read(&mut redis, &key) .await? .map(|entry| { HttpResponse::Ok() @@ -101,23 +88,23 @@ pub async fn get( .map_err(map_handler_error) } +#[derive(serde::Deserialize)] +struct MyHeaders { + #[serde(rename = "HULY-TTL")] + ttl: Option, +} + /// put pub async fn put( req: HttpRequest, path: web::Path, body: web::Bytes, - redis: web::Data>>, + redis: web::Data, ) -> Result { - workspace_check(&req)?; // Check workspace - let key: String = path.into_inner(); async move || -> anyhow::Result { - if !req.query_string().is_empty() { - return Err(anyhow!("Query parameters are not allowed")); - } - - let mut conn = redis.lock().await; + let mut redis = redis.get_ref().clone(); // TTL logic let mut ttl = None; @@ -161,7 +148,7 @@ pub async fn put( } } - redis_save(&mut *conn, &key, &body[..], ttl, mode).await?; + redis_save(&mut redis, &key, &body[..], ttl, mode).await?; return Ok(HttpResponse::Ok().body("DONE")); }() .await @@ -172,7 +159,7 @@ pub async fn put( pub async fn delete( req: HttpRequest, path: web::Path, - redis: web::Data>>, + redis: web::Data, ) -> Result { workspace_check(&req)?; // Check workspace @@ -181,7 +168,7 @@ pub async fn delete( trace!(key, "delete request"); async move || -> anyhow::Result { - let mut conn = redis.lock().await; + let mut redis = redis.get_ref().clone(); // MODE logic let mut mode = Some(SaveMode::Upsert); @@ -197,7 +184,7 @@ pub async fn delete( } // `If-Match: ` — delete only if current } - let deleted = redis_delete(&mut *conn, &key, mode).await?; + let deleted = redis_delete(&mut redis, &key, mode).await?; let response = match deleted { true => HttpResponse::NoContent().finish(), diff --git a/src/handlers_ws.rs b/src/handlers_ws.rs index bbadf6513e1..33a43be9e88 100644 --- a/src/handlers_ws.rs +++ b/src/handlers_ws.rs @@ -13,32 +13,22 @@ // limitations under the License. // -use actix::prelude::*; -use uuid::Uuid; - -use crate::ws_hub::{ - Connect, Disconnect, ServerMessage, SessionId, Subscribe, SubscribeList, Unsubscribe, - UnsubscribeAll, WsHub, -}; - use actix::{ - Actor, ActorContext, ActorFutureExt, AsyncContext, Handler, StreamHandler, WrapFuture, fut, + Actor, ActorContext, ActorFutureExt, AsyncContext, StreamHandler, WrapFuture, fut, prelude::*, }; -use actix_web::{Error, HttpRequest, HttpResponse, web}; +use actix_web::{Error, HttpMessage, HttpRequest, HttpResponse, web}; use actix_web_actors::ws; use redis::aio::MultiplexedConnection; -use serde::Deserialize; -use serde_json::{Map, Value, json}; -use std::collections::HashSet; -use std::sync::Arc; -use tokio::sync::Mutex; +use serde::{Deserialize, Serialize}; +use serde_json::{Value, json}; use crate::redis_lib::{ - RedisArray, SaveMode, Ttl, deprecated_symbol, error, redis_delete, redis_list, redis_read, - redis_save, + SaveMode, Ttl, deprecated_symbol, redis_delete, redis_list, redis_read, redis_save, +}; +use crate::ws_hub::{ + Connect, Disconnect, ServerMessage, SessionId, Subscribe, SubscribeList, Unsubscribe, + UnsubscribeAll, WsHub, }; - -use serde::Serialize; #[derive(Serialize, Default)] struct ReturnBase<'a> { @@ -138,10 +128,10 @@ use hulyrs::services::jwt::Claims; /// Session condition #[allow(dead_code)] pub struct WsSession { - pub redis: Arc>, + pub redis: MultiplexedConnection, pub id: SessionId, pub hub: Addr, - pub claims: Option, + pub claims: Claims, } /// Actor External trait: must be in separate impl block @@ -155,7 +145,10 @@ impl Actor for WsSession { let recipient = addr.recipient::(); // println!("WebSocket connected"); self.hub - .send(Connect { addr: recipient }) + .send(Connect { + addr: recipient, + session_id: self.id, + }) .into_actor(self) .map(|res, act, _ctx| match res { Ok(id) => { @@ -217,8 +210,7 @@ impl WsSession { } fn workspace_check_ws(&self, key: &str) -> Result<(), &'static str> { - let claims = self.claims.as_ref().ok_or("Missing auth claims")?; - check_workspace_core(claims, key) + check_workspace_core(&self.claims, key) } fn fut_send( @@ -274,7 +266,7 @@ impl WsSession { return; } - let redis = self.redis.clone(); + let mut redis = self.redis.clone(); let base = serde_json::json!(ReturnBase { action: "put", @@ -317,9 +309,7 @@ impl WsSession { } } - let mut conn = redis.lock().await; - - redis_save(&mut *conn, &key, &data, real_ttl, mode) + redis_save(&mut redis, &key, &data, real_ttl, mode) .await .map_err(|e| e.to_string())?; @@ -342,7 +332,7 @@ impl WsSession { return; } - let redis = self.redis.clone(); + let mut redis = self.redis.clone(); let base = serde_json::json!(ReturnBase { action: "delete", @@ -353,8 +343,6 @@ impl WsSession { }); let fut = async move { - let mut conn = redis.lock().await; - // MODE logic let mut mode = Some(SaveMode::Upsert); if let Some(s) = if_match { @@ -368,7 +356,7 @@ impl WsSession { } } - let deleted = redis_delete(&mut *conn, &key, mode) + let deleted = redis_delete(&mut redis, &key, mode) .await .map_err(|e| e.to_string())?; @@ -391,7 +379,7 @@ impl WsSession { return; } - let redis = self.redis.clone(); + let mut redis = self.redis.clone(); let base = serde_json::json!(ReturnBase { action: "get", @@ -401,9 +389,7 @@ impl WsSession { }); let fut = async move { - let mut conn = redis.lock().await; - - let data_opt = redis_read(&mut *conn, &key) + let data_opt = redis_read(&mut redis, &key) .await .map_err(|e| e.to_string())?; @@ -429,7 +415,7 @@ impl WsSession { return; } - let redis = self.redis.clone(); + let mut redis = self.redis.clone(); let base = serde_json::json!(ReturnBase { action: "list", @@ -439,8 +425,7 @@ impl WsSession { }); let fut = async move { - let mut conn = redis.lock().await; - let data = redis_list(&mut *conn, &key) + let data = redis_list(&mut redis, &key) .await .map_err(|e| e.to_string())?; Ok(json!({ "result": data })) @@ -550,47 +535,24 @@ impl WsSession { // ---- auth -use crate::CONFIG; -use actix_web::{HttpMessage, error}; -use jsonwebtoken::{Algorithm, DecodingKey, Validation, decode}; -use url::form_urlencoded; - pub async fn handler( req: HttpRequest, - stream: web::Payload, - redis: web::Data>>, + payload: web::Payload, + redis: web::Data, hub: web::Data>, ) -> Result { - let token_opt = req.uri().query().and_then(|q| { - form_urlencoded::parse(q.as_bytes()) - .find(|(k, _)| k == "token") - .map(|(_, v)| v.into_owned()) - }); - - let claims = match token_opt { - Some(t) if !t.is_empty() => { - let mut validation = Validation::new(Algorithm::HS256); - validation.required_spec_claims = HashSet::new(); // no: exp/iat/nbf - - let c = decode::( - &t, - &DecodingKey::from_secret(CONFIG.token_secret.as_bytes()), - &validation, - ) - .map(|td| td.claims) - .map_err(|_e| error::ErrorUnauthorized("Invalid token"))?; - - Some(c) - } - _ => None, - }; + let claims = req + .extensions() + .get::() + .expect("Missing claims") + .to_owned(); let session = WsSession { redis: redis.get_ref().clone(), hub: hub.get_ref().clone(), - id: 0, + id: crate::ws_hub::new_session_id(), claims, }; - ws::start(session, &req, stream) + ws::start(session, &req, payload) } diff --git a/src/main.rs b/src/main.rs index b66a84b544a..11c449b3ad1 100644 --- a/src/main.rs +++ b/src/main.rs @@ -13,47 +13,31 @@ // limitations under the License. // -#![allow(unused_imports)] - -use std::pin::Pin; - +use actix::prelude::*; use actix_cors::Cors; - use actix_web::{ - App, Error, HttpMessage, HttpRequest, HttpResponse, HttpServer, + App, Error, HttpMessage, HttpResponse, HttpServer, body::MessageBody, dev::{ServiceRequest, ServiceResponse}, - error::ErrorBadRequest, - http::header::{AUTHORIZATION, HeaderValue}, middleware::{self, Next}, - web::{self, Data, PayloadConfig}, + web::{self, Path, Query}, }; - -use url::form_urlencoded; - -use actix_web_actors::ws; - -use tracing::info; +use hulyrs::services::jwt::{Claims, actix::ServiceRequestExt}; +use secrecy::ExposeSecret; +use tracing::*; mod config; mod handlers_http; mod handlers_ws; -use crate::handlers_ws::{WsSession, handler}; - +mod redis_events; mod redis_lib; -use crate::redis_lib::redis_connect; - mod workspace_owner; - -mod redis_events; mod ws_hub; -use crate::ws_hub::{ServerMessage, TestGetSubs, WsHub}; -use actix::prelude::*; use config::CONFIG; - -use hulyrs::services::jwt::actix::ServiceRequestExt; -use secrecy::SecretString; +use redis_lib::redis_connect; +use uuid::Uuid; +use ws_hub::{TestGetSubs, WsHub}; fn initialize_tracing(level: tracing::Level) { use tracing_subscriber::{filter::targets::Targets, prelude::*}; @@ -69,33 +53,56 @@ fn initialize_tracing(level: tracing::Level) { .init(); } -async fn interceptor( +async fn extract_claims( mut request: ServiceRequest, next: Next, ) -> Result, Error> { - // Authorization/token patch - if request.headers().get(AUTHORIZATION).is_none() { - if let Some(qs) = request.uri().query() { - if let Some(token) = form_urlencoded::parse(qs.as_bytes()) - .find(|(k, _)| k == "token") - .map(|(_, v)| v.into_owned()) - { - let auth_value = HeaderValue::from_str(&format!("Bearer {}", token)) - .map_err(|_| ErrorBadRequest("Malformed token"))?; - request.headers_mut().insert(AUTHORIZATION, auth_value); - } - } + #[derive(serde::Deserialize)] + struct QueryString { + token: Option, } - let secret = SecretString::new(CONFIG.token_secret.clone().into_boxed_str()); - let claims = request.extract_claims(&secret)?; + let query = request.extract::>().await?.into_inner(); - request.extensions_mut().insert(claims.to_owned()); + let claims = if let Some(token) = query.token { + Claims::from_token(token, CONFIG.token_secret.expose_secret()).unwrap() + } else { + request.extract_claims(&CONFIG.token_secret)? + }; - next.call(request).await + let workspace = Uuid::parse_str(&request.extract::>().await?); + + if claims.is_system() || Ok(claims.workspace.clone()) == workspace.clone().map(Some) { + request.extensions_mut().insert(claims); + next.call(request).await + } else { + warn!( + expected = ?claims.workspace, + actual = ?workspace, + "Unauthorized request, workspace mismatch" + ); + Err(actix_web::error::ErrorUnauthorized("Unauthorized").into()) + } } -use crate::redis_events::RedisEventAction::*; // Set, Del, Unlink, Expired, Other +async fn check_workspace( + mut request: ServiceRequest, + next: Next, +) -> Result, Error> { + let workspace = Uuid::parse_str(&request.extract::>().await?); + let claims = request.extensions().get::().cloned().unwrap(); + + if claims.is_system() || Ok(claims.workspace.clone()) == workspace.clone().map(Some) { + next.call(request).await + } else { + warn!( + expected = ?claims.workspace, + actual = ?workspace, + "Unauthorized request, workspace mismatch" + ); + Err(actix_web::error::ErrorUnauthorized("Unauthorized").into()) + } +} pub async fn start_redis_logger(redis_url: String, hub: Addr) { let client = match redis::Client::open(redis_url) { @@ -119,14 +126,13 @@ pub async fn start_redis_logger(redis_url: String, hub: Addr) { } */ - hub.do_send(ev.clone()); + hub.do_send(ev); } } Err(e) => eprintln!("[redis] pubsub init error: {e}"), } } -// #[tokio::main] #[actix_web::main] async fn main() -> anyhow::Result<()> { initialize_tracing(tracing::Level::DEBUG); @@ -134,13 +140,10 @@ async fn main() -> anyhow::Result<()> { tracing::info!("{}/{}", env!("CARGO_BIN_NAME"), env!("CARGO_PKG_VERSION")); let redis = redis_connect().await?; - let redis = std::sync::Arc::new(tokio::sync::Mutex::new(redis)); - let redis_data = web::Data::new(redis.clone()); // starting Hub let hub = WsHub::new(redis.clone()).start(); - let hub_data = web::Data::new(hub.clone()); // starting Logger tokio::spawn(start_redis_logger( "redis://127.0.0.1/".to_string(), @@ -148,7 +151,6 @@ async fn main() -> anyhow::Result<()> { )); let socket = std::net::SocketAddr::new(CONFIG.bind_host.as_str().parse()?, CONFIG.bind_port); - let payload_config = PayloadConfig::new(CONFIG.payload_size_limit.bytes() as usize); let server = HttpServer::new(move || { let cors = Cors::default() @@ -159,20 +161,27 @@ async fn main() -> anyhow::Result<()> { .max_age(3600); App::new() - .app_data(payload_config.clone()) - .app_data(redis_data.clone()) - .app_data(hub_data.clone()) + .app_data(web::Data::new(redis.clone())) + .app_data(web::Data::new(hub.clone())) .wrap(middleware::Logger::default()) .wrap(cors) .service( - web::scope("/api") - .wrap(middleware::from_fn(interceptor)) + web::scope("/api/{workspace}") + .wrap(middleware::from_fn(check_workspace)) + .wrap(middleware::from_fn(extract_claims)) .route("/{key:.+/}", web::get().to(handlers_http::list)) .route("/{key:.+}", web::get().to(handlers_http::get)) .route("/{key:.+}", web::put().to(handlers_http::put)) .route("/{key:.+}", web::delete().to(handlers_http::delete)), ) + .route( + "/ws", + web::get() + .to(handlers_ws::handler) + .wrap(middleware::from_fn(extract_claims)), + ) // WebSocket .route("/status", web::get().to(async || "ok")) + // .route( "/stat2", web::get().to(|hub: web::Data>| async move { @@ -191,7 +200,6 @@ async fn main() -> anyhow::Result<()> { } }), ) - .route("/ws", web::get().to(handlers_ws::handler)) // WebSocket }) .bind(socket)? .run(); diff --git a/src/ws_hub.rs b/src/ws_hub.rs index 21b3f71d449..2652508bf90 100644 --- a/src/ws_hub.rs +++ b/src/ws_hub.rs @@ -13,7 +13,16 @@ // limitations under the License. // -use std::collections::{HashMap, HashSet}; +use std::{ + collections::{HashMap, HashSet}, + sync::atomic::AtomicU64, +}; + +use actix::prelude::*; +use redis::aio::MultiplexedConnection; +use serde::Serialize; + +use crate::redis_events::{RedisEvent, RedisEventAction}; fn subscription_matches(sub_key: &str, key: &str) -> bool { if sub_key == key { @@ -26,9 +35,6 @@ fn subscription_matches(sub_key: &str, key: &str) -> bool { false } -use crate::redis_events::{RedisEvent, RedisEventAction}; -use serde::Serialize; - #[derive(Message, Clone, Serialize, Debug)] #[rtype(result = "()")] pub struct ServerMessage { @@ -45,19 +51,19 @@ pub struct Count; pub type SessionId = u64; +static NEXT_ID: AtomicU64 = AtomicU64::new(1); + pub struct WsHub { sessions: HashMap>, subs: HashMap>, // Subscriptions array: key -> {id, id, id ...} - next_id: SessionId, - redis: Arc>, + redis: MultiplexedConnection, } impl WsHub { - pub fn new(redis: Arc>) -> Self { + pub fn new(redis: MultiplexedConnection) -> Self { Self { sessions: HashMap::new(), subs: HashMap::new(), - next_id: 1, redis, } } @@ -71,19 +77,24 @@ impl Actor for WsHub { #[derive(Message)] #[rtype(result = "SessionId")] pub struct Connect { + pub session_id: SessionId, pub addr: Recipient, } +pub fn new_session_id() -> SessionId { + NEXT_ID.fetch_add(1, std::sync::atomic::Ordering::SeqCst) +} + impl Handler for WsHub { type Result = SessionId; fn handle(&mut self, msg: Connect, _ctx: &mut Context) -> Self::Result { // LEVENT 1 - let id = self.next_id; - self.next_id = self.next_id.wrapping_add(1); - self.sessions.insert(id, msg.addr); + //let id = self.next_id; + //self.next_id = self.next_id.wrapping_add(1); + self.sessions.insert(msg.session_id, msg.addr); // tracing::info!("session connected: id={id} (total={})", self.sessions.len()); - id + msg.session_id } } @@ -137,7 +148,7 @@ impl Handler for WsHub { None } }) - .collect::>(); + .collect::>(); MessageResult(list) } @@ -233,13 +244,6 @@ impl WsHub { } } -use actix::ActorFutureExt; -use actix::fut::ready; -use actix::prelude::*; -use redis::aio::MultiplexedConnection; -use std::sync::Arc; -use tokio::sync::Mutex; - impl Handler for WsHub { type Result = ResponseActFuture; @@ -254,17 +258,16 @@ impl Handler for WsHub { .filter_map(|sid| self.sessions.get(&sid).cloned()) .collect(); - let redis = self.redis.clone(); + let mut redis = self.redis.clone(); let event = msg.clone(); let need_get = matches!(msg.action, RedisEventAction::Set); Box::pin( async move { let value = if need_get { - let mut conn = redis.lock().await; match redis::cmd("GET") .arg(&event.key) - .query_async::>(&mut *conn) + .query_async::>(&mut redis) .await { Ok(v) => v, From f391abad769e9d0adb4c65283afe400c4c529be2 Mon Sep 17 00:00:00 2001 From: Alexey Aristov Date: Thu, 21 Aug 2025 16:15:52 +0200 Subject: [PATCH 166/636] http api - fix extract_claims, misc corrections Signed-off-by: Alexey Aristov --- src/handlers_http.rs | 28 +++++++++++++++++++--------- src/main.rs | 21 +++++---------------- 2 files changed, 24 insertions(+), 25 deletions(-) diff --git a/src/handlers_http.rs b/src/handlers_http.rs index fad3cf8d43a..7e66e55cf99 100644 --- a/src/handlers_http.rs +++ b/src/handlers_http.rs @@ -15,12 +15,14 @@ use anyhow::anyhow; use redis::aio::MultiplexedConnection; +use serde::{Deserialize, de}; use tracing::*; use actix_web::{ Error, HttpRequest, HttpResponse, web::{self}, }; +use uuid::Uuid; use crate::redis_lib::{SaveMode, Ttl, redis_delete, redis_list, redis_read, redis_save}; use crate::workspace_owner::workspace_check; @@ -43,12 +45,18 @@ pub fn map_handler_error(err: impl std::fmt::Display) -> Error { actix_web::error::ErrorInternalServerError("internal error") } +#[derive(Deserialize, Debug)] +pub struct PathParams { + //workspace: Uuid, + key: String, +} + /// list pub async fn list( - path: web::Path, + path: web::Path, redis: web::Data, ) -> Result { - let key = path.into_inner(); + let key = path.into_inner().key; trace!(key, "list request"); @@ -65,12 +73,12 @@ pub async fn list( /// get pub async fn get( - path: web::Path, + path: web::Path, redis: web::Data, ) -> Result { - let key = path.into_inner(); + let key = path.into_inner().key; - // trace!(key, "get request"); + trace!(key, "get request"); async move || -> anyhow::Result { let mut redis = redis.get_ref().clone(); @@ -97,11 +105,13 @@ struct MyHeaders { /// put pub async fn put( req: HttpRequest, - path: web::Path, + path: web::Path, body: web::Bytes, redis: web::Data, ) -> Result { - let key: String = path.into_inner(); + let key: String = path.into_inner().key; + + trace!(key, "put request"); async move || -> anyhow::Result { let mut redis = redis.get_ref().clone(); @@ -158,12 +168,12 @@ pub async fn put( /// delete pub async fn delete( req: HttpRequest, - path: web::Path, + path: web::Path, redis: web::Data, ) -> Result { workspace_check(&req)?; // Check workspace - let key: String = path.into_inner(); + let key: String = path.into_inner().key; trace!(key, "delete request"); diff --git a/src/main.rs b/src/main.rs index 11c449b3ad1..45a9aa6be2f 100644 --- a/src/main.rs +++ b/src/main.rs @@ -20,7 +20,7 @@ use actix_web::{ body::MessageBody, dev::{ServiceRequest, ServiceResponse}, middleware::{self, Next}, - web::{self, Path, Query}, + web::{self, Path, Query, trace}, }; use hulyrs::services::jwt::{Claims, actix::ServiceRequestExt}; use secrecy::ExposeSecret; @@ -44,7 +44,7 @@ fn initialize_tracing(level: tracing::Level) { let filter = Targets::default() .with_target(env!("CARGO_BIN_NAME"), level) - .with_target("actix", level); + .with_target("actix", tracing::Level::WARN); let format = tracing_subscriber::fmt::layer().compact(); tracing_subscriber::registry() @@ -70,19 +70,8 @@ async fn extract_claims( request.extract_claims(&CONFIG.token_secret)? }; - let workspace = Uuid::parse_str(&request.extract::>().await?); - - if claims.is_system() || Ok(claims.workspace.clone()) == workspace.clone().map(Some) { - request.extensions_mut().insert(claims); - next.call(request).await - } else { - warn!( - expected = ?claims.workspace, - actual = ?workspace, - "Unauthorized request, workspace mismatch" - ); - Err(actix_web::error::ErrorUnauthorized("Unauthorized").into()) - } + request.extensions_mut().insert(claims); + next.call(request).await } async fn check_workspace( @@ -135,7 +124,7 @@ pub async fn start_redis_logger(redis_url: String, hub: Addr) { #[actix_web::main] async fn main() -> anyhow::Result<()> { - initialize_tracing(tracing::Level::DEBUG); + initialize_tracing(tracing::Level::TRACE); tracing::info!("{}/{}", env!("CARGO_BIN_NAME"), env!("CARGO_PKG_VERSION")); From 5bfe6f895e277b1b4ac8458ccef2786c2914be5b Mon Sep 17 00:00:00 2001 From: Alexey Aristov Date: Thu, 21 Aug 2025 16:17:25 +0200 Subject: [PATCH 167/636] streamline redis handling Signed-off-by: Alexey Aristov --- src/handlers_http.rs | 2 +- src/handlers_ws.rs | 2 +- src/main.rs | 47 ++--------- src/{redis_lib.rs => redis.rs} | 108 ++++++++++++++++++++++--- src/redis_events.rs | 144 --------------------------------- src/ws_hub.rs | 21 ++++- 6 files changed, 123 insertions(+), 201 deletions(-) rename src/{redis_lib.rs => redis.rs} (79%) delete mode 100644 src/redis_events.rs diff --git a/src/handlers_http.rs b/src/handlers_http.rs index 7e66e55cf99..709cd4787a4 100644 --- a/src/handlers_http.rs +++ b/src/handlers_http.rs @@ -24,7 +24,7 @@ use actix_web::{ }; use uuid::Uuid; -use crate::redis_lib::{SaveMode, Ttl, redis_delete, redis_list, redis_read, redis_save}; +use crate::redis::{SaveMode, Ttl, redis_delete, redis_list, redis_read, redis_save}; use crate::workspace_owner::workspace_check; pub fn map_handler_error(err: impl std::fmt::Display) -> Error { diff --git a/src/handlers_ws.rs b/src/handlers_ws.rs index 33a43be9e88..dd1e74f69c8 100644 --- a/src/handlers_ws.rs +++ b/src/handlers_ws.rs @@ -22,7 +22,7 @@ use redis::aio::MultiplexedConnection; use serde::{Deserialize, Serialize}; use serde_json::{Value, json}; -use crate::redis_lib::{ +use crate::redis::{ SaveMode, Ttl, deprecated_symbol, redis_delete, redis_list, redis_read, redis_save, }; use crate::ws_hub::{ diff --git a/src/main.rs b/src/main.rs index 45a9aa6be2f..88d1455a62e 100644 --- a/src/main.rs +++ b/src/main.rs @@ -20,7 +20,7 @@ use actix_web::{ body::MessageBody, dev::{ServiceRequest, ServiceResponse}, middleware::{self, Next}, - web::{self, Path, Query, trace}, + web::{self, Path, Query}, }; use hulyrs::services::jwt::{Claims, actix::ServiceRequestExt}; use secrecy::ExposeSecret; @@ -29,13 +29,11 @@ use tracing::*; mod config; mod handlers_http; mod handlers_ws; -mod redis_events; -mod redis_lib; +mod redis; mod workspace_owner; mod ws_hub; use config::CONFIG; -use redis_lib::redis_connect; use uuid::Uuid; use ws_hub::{TestGetSubs, WsHub}; @@ -93,51 +91,20 @@ async fn check_workspace( } } -pub async fn start_redis_logger(redis_url: String, hub: Addr) { - let client = match redis::Client::open(redis_url) { - Ok(c) => c, - Err(e) => { - eprintln!("[redis] bad url: {e}"); - return; - } - }; - - match crate::redis_events::make_pubsub_with_kea(&client).await { - Ok(pubsub) => { - let (mut rx, _handle) = crate::redis_events::start_keyevent_listener(pubsub); - while let Some(ev) = rx.recv().await { - /* - match ev.action { - Set => println!("[redis] db{} SET {}", ev.db, ev.key), - Del | Unlink => println!("[redis] db{} DEL {}", ev.db, ev.key), - Expired => println!("[redis] db{} EXPIRED {}", ev.db, ev.key), - Other(ref k) => println!("[redis] db{} {} {}", ev.db, k, ev.key), - } - */ - - hub.do_send(ev); - } - } - Err(e) => eprintln!("[redis] pubsub init error: {e}"), - } -} - #[actix_web::main] async fn main() -> anyhow::Result<()> { initialize_tracing(tracing::Level::TRACE); tracing::info!("{}/{}", env!("CARGO_BIN_NAME"), env!("CARGO_PKG_VERSION")); - let redis = redis_connect().await?; + let redis_client = redis::client().await?; + let redis_connection = redis_client.get_multiplexed_async_connection().await?; // starting Hub - let hub = WsHub::new(redis.clone()).start(); + let hub = WsHub::new(redis_connection.clone()).start(); // starting Logger - tokio::spawn(start_redis_logger( - "redis://127.0.0.1/".to_string(), - hub.clone(), - )); + tokio::spawn(redis::receiver(redis_client)); let socket = std::net::SocketAddr::new(CONFIG.bind_host.as_str().parse()?, CONFIG.bind_port); @@ -150,7 +117,7 @@ async fn main() -> anyhow::Result<()> { .max_age(3600); App::new() - .app_data(web::Data::new(redis.clone())) + .app_data(web::Data::new(redis_connection.clone())) .app_data(web::Data::new(hub.clone())) .wrap(middleware::Logger::default()) .wrap(cors) diff --git a/src/redis_lib.rs b/src/redis.rs similarity index 79% rename from src/redis_lib.rs rename to src/redis.rs index 415d24bf776..1c076783e2b 100644 --- a/src/redis_lib.rs +++ b/src/redis.rs @@ -13,10 +13,15 @@ // limitations under the License. // -use crate::config::{CONFIG, RedisMode}; - use std::time::{SystemTime, UNIX_EPOCH}; +use ::redis::Msg; +use tokio_stream::StreamExt; +use tracing::*; + +use crate::config::{CONFIG, RedisMode}; +use crate::ws_hub::{RedisEvent, RedisEventAction}; + #[derive(serde::Serialize)] pub enum Ttl { Sec(usize), // EX @@ -32,12 +37,10 @@ pub enum SaveMode { } use redis::{ - AsyncCommands, Client, ConnectionInfo, ProtocolVersion, RedisConnectionInfo, RedisResult, - ToRedisArgs, aio::MultiplexedConnection, + Client, ConnectionInfo, ProtocolVersion, RedisConnectionInfo, RedisResult, ToRedisArgs, + aio::MultiplexedConnection, }; -use url::Url; - -use serde::{Deserialize, Serialize}; +use serde::Serialize; #[derive(Debug, Serialize)] pub struct RedisArray { @@ -314,8 +317,86 @@ pub async fn redis_delete( Ok(deleted > 0) } +impl TryFrom for RedisEvent { + type Error = anyhow::Error; + + fn try_from(msg: Msg) -> Result { + let channel = match msg.get_channel::() { + Ok(c) => c, + Err(e) => { + anyhow::bail!("[redis_events] bad channel: {e}"); + } + }; + let payload = match msg.get_payload::() { + Ok(p) => p, + Err(e) => { + anyhow::bail!("[redis_events] bad payload: {e}"); + } + }; + + // "__keyevent@0__:set" → event="set", db=0; payload = key + let event = channel.rsplit(':').next().unwrap_or(""); + let action = match event { + "set" => RedisEventAction::Set, + "del" => RedisEventAction::Del, + "unlink" => RedisEventAction::Unlink, + "expired" => RedisEventAction::Expired, + other => RedisEventAction::Other(other.to_string()), + }; + + let db = channel + .find('@') + .and_then(|at| channel.get(at + 1..)) + .and_then(|rest| rest.find("__:").map(|end| &rest[..end])) + .and_then(|s| s.parse::().ok()) + .unwrap_or(0); + + Ok(RedisEvent { + db, + key: payload.clone(), + action, + }) + } +} + +pub async fn receiver(redis_client: Client) -> anyhow::Result<()> { + let mut redis = redis_client.get_multiplexed_async_connection().await?; + let mut pubsub = redis_client.get_async_pubsub().await?; + + let _: String = ::redis::cmd("CONFIG") + .arg("SET") + .arg("notify-keyspace-events") + .arg("E$gx") + .query_async(&mut redis) + .await?; + + for pattern in [ + "__keyevent@*__:set", + "__keyevent@*__:del", + "__keyevent@*__:unlink", + "__keyevent@*__:expired", + ] { + pubsub.psubscribe(pattern).await?; + } + + let mut messages = pubsub.on_message(); + + while let Some(message) = messages.next().await { + match RedisEvent::try_from(message) { + Ok(ev) => { + debug!("redis event: {ev:#?}"); + } + Err(e) => { + warn!("invalid redis message: {e}"); + } + } + } + + Ok(()) +} + /// redis_connect() -pub async fn redis_connect() -> anyhow::Result { +pub async fn client() -> anyhow::Result { let default_port = match CONFIG.redis_mode { RedisMode::Sentinel => 6379, RedisMode::Direct => 6380, @@ -332,7 +413,7 @@ pub async fn redis_connect() -> anyhow::Result { }) .collect::>(); - let conn = if CONFIG.redis_mode == RedisMode::Sentinel { + if CONFIG.redis_mode == RedisMode::Sentinel { use redis::sentinel::{SentinelClientBuilder, SentinelServerType}; let mut sentinel = SentinelClientBuilder::new( @@ -347,7 +428,9 @@ pub async fn redis_connect() -> anyhow::Result { .set_client_to_sentinel_password(CONFIG.redis_password.clone()) .build()?; - sentinel.get_async_connection().await? + let client = sentinel.async_get_client().await?; + + Ok(client) } else { let single = urls .first() @@ -366,8 +449,7 @@ pub async fn redis_connect() -> anyhow::Result { }; let client = Client::open(connection_info)?; - client.get_multiplexed_async_connection().await? - }; - Ok(conn) + Ok(client) + } } diff --git a/src/redis_events.rs b/src/redis_events.rs deleted file mode 100644 index 2d3c2150dfa..00000000000 --- a/src/redis_events.rs +++ /dev/null @@ -1,144 +0,0 @@ -// -// Copyright © 2025 Hardcore Engineering Inc. -// -// Licensed under the Eclipse Public License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. You may -// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// -// See the License for the specific language governing permissions and -// limitations under the License. -// - -use tokio::sync::mpsc; -use tokio::task::JoinHandle; -use tokio_stream::StreamExt; - -use serde::Serialize; - -use redis::{ - self, AsyncCommands, Client, RedisResult, - aio::{ConnectionLike, PubSub}, -}; - -#[derive(Debug, Clone, Serialize)] -pub enum RedisEventAction { - Set, // Insert or Update - Del, // Delete - Unlink, // async Delete - Expired, // TTL Delete - Other(String), -} - -use actix::Message; - -#[derive(Debug, Clone, Serialize, Message)] -#[rtype(result = "()")] -pub struct RedisEvent { - pub db: u32, - pub key: String, - // pub value: String, - pub action: RedisEventAction, -} - -/// Notifications: keyevent + generic + expired = "Egx" (no keyspace) -async fn try_enable_keyspace_notifications(conn: &mut C) -> RedisResult<()> -where - C: ConnectionLike + Send, -{ - let _: String = redis::cmd("CONFIG") - .arg("SET") - .arg("notify-keyspace-events") - .arg("E$gx") - .query_async(conn) - .await?; - Ok(()) -} - -/// Create async-connect, try to enable KEA=Egx, open PubSub-connect -pub async fn make_pubsub_with_kea(client: &Client) -> RedisResult { - let mut conn = client.get_multiplexed_async_connection().await?; - let _ = try_enable_keyspace_notifications(&mut conn).await; - drop(conn); - - let pubsub = client.get_async_pubsub().await?; - Ok(pubsub) -} - -/// Listener keyevents -pub fn start_keyevent_listener( - mut pubsub: PubSub, -) -> (mpsc::UnboundedReceiver, JoinHandle<()>) { - let (tx, rx) = mpsc::unbounded_channel(); - - let handle = tokio::spawn(async move { - // Subscribe to events - if let Err(e) = pubsub.psubscribe("__keyevent@*__:set").await { - eprintln!("[redis_events] psubscribe error (set): {e}"); - return; - } - if let Err(e) = pubsub.psubscribe("__keyevent@*__:del").await { - eprintln!("[redis_events] psubscribe error (del): {e}"); - return; - } - if let Err(e) = pubsub.psubscribe("__keyevent@*__:unlink").await { - eprintln!("[redis_events] psubscribe error (unlink): {e}"); - return; - } - if let Err(e) = pubsub.psubscribe("__keyevent@*__:expired").await { - eprintln!("[redis_events] psubscribe error (expired): {e}"); - return; - } - - let mut stream = pubsub.on_message(); - - while let Some(msg) = stream.next().await { - let channel = match msg.get_channel::() { - Ok(c) => c, - Err(e) => { - eprintln!("[redis_events] bad channel: {e}"); - continue; - } - }; - let payload = match msg.get_payload::() { - Ok(p) => p, - Err(e) => { - eprintln!("[redis_events] bad payload: {e}"); - continue; - } - }; - - // "__keyevent@0__:set" → event="set", db=0; payload = key - let event = channel.rsplit(':').next().unwrap_or(""); - let action = match event { - "set" => RedisEventAction::Set, - "del" => RedisEventAction::Del, - "unlink" => RedisEventAction::Unlink, - "expired" => RedisEventAction::Expired, - other => RedisEventAction::Other(other.to_string()), - }; - - let db = channel - .find('@') - .and_then(|at| channel.get(at + 1..)) - .and_then(|rest| rest.find("__:").map(|end| &rest[..end])) - .and_then(|s| s.parse::().ok()) - .unwrap_or(0); - - let ev = RedisEvent { - db, - key: payload.clone(), - action, - }; - - if tx.send(ev).is_err() { - break; - } // closed - } - }); - - (rx, handle) -} diff --git a/src/ws_hub.rs b/src/ws_hub.rs index 2652508bf90..84a96fc2151 100644 --- a/src/ws_hub.rs +++ b/src/ws_hub.rs @@ -22,8 +22,6 @@ use actix::prelude::*; use redis::aio::MultiplexedConnection; use serde::Serialize; -use crate::redis_events::{RedisEvent, RedisEventAction}; - fn subscription_matches(sub_key: &str, key: &str) -> bool { if sub_key == key { return true; @@ -244,6 +242,25 @@ impl WsHub { } } +#[derive(Debug, Clone, Serialize)] +pub enum RedisEventAction { + Set, // Insert or Update + Del, // Delete + Unlink, // async Delete + Expired, // TTL Delete + Other(String), +} + +use actix::Message; + +#[derive(Debug, Clone, Serialize, Message)] +#[rtype(result = "()")] +pub struct RedisEvent { + pub db: u32, + pub key: String, + pub action: RedisEventAction, +} + impl Handler for WsHub { type Result = ResponseActFuture; From 36135208583100293059c62410cb26bfca634226 Mon Sep 17 00:00:00 2001 From: Alexey Aristov Date: Thu, 21 Aug 2025 18:55:43 +0200 Subject: [PATCH 168/636] update deps Signed-off-by: Alexey Aristov --- Cargo.lock | 322 ++++++++++++++++++++++++++++++++++++----------------- 1 file changed, 222 insertions(+), 100 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c806ce9bab4..d31124600e7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -11,7 +11,7 @@ dependencies = [ "actix-macros", "actix-rt", "actix_derive", - "bitflags 2.9.1", + "bitflags 2.9.2", "bytes", "crossbeam-channel", "futures-core", @@ -33,7 +33,7 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f7b0a21988c1bf877cf4759ef5ddaac04c1c9fe808c9142ecb78ba97d97a28a" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.2", "bytes", "futures-core", "futures-sink", @@ -71,7 +71,7 @@ dependencies = [ "actix-tls", "actix-utils", "base64 0.22.1", - "bitflags 2.9.1", + "bitflags 2.9.2", "brotli", "bytes", "bytestring", @@ -337,9 +337,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.98" +version = "1.0.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" +checksum = "b0674a1ddeecb70197781e945de4b3b8ffb61fa939a5597bcf48503737663100" [[package]] name = "arraydeque" @@ -349,9 +349,9 @@ checksum = "7d902e3d592a523def97af8f317b08ce16b7ab854c1985a0c671e6f15cebc236" [[package]] name = "async-trait" -version = "0.1.88" +version = "0.1.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e539d3fca749fcee5236ab05e93a52867dd549cc157c8cb7f99595f3cedffdb5" +checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" dependencies = [ "proc-macro2", "quote", @@ -360,9 +360,9 @@ dependencies = [ [[package]] name = "async-tungstenite" -version = "0.29.1" +version = "0.31.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef0f7efedeac57d9b26170f72965ecfd31473ca52ca7a64e925b0b6f5f079886" +checksum = "ee88b4c88ac8c9ea446ad43498955750a4bbe64c4392f21ccfe5d952865e318f" dependencies = [ "atomic-waker", "futures-core", @@ -398,7 +398,7 @@ dependencies = [ "miniz_oxide", "object", "rustc-demangle", - "windows-targets", + "windows-targets 0.52.6", ] [[package]] @@ -442,9 +442,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.9.1" +version = "2.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967" +checksum = "6a65b545ab31d687cff52899d4890855fec459eb6afe0da6417b8a18da87aa29" dependencies = [ "serde", ] @@ -460,9 +460,9 @@ dependencies = [ [[package]] name = "brotli" -version = "8.0.1" +version = "8.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9991eea70ea4f293524138648e41ee89b0b2b12ddef3b255effa43c8056e0e0d" +checksum = "4bd8b9603c7aa97359dbd97ecf258968c95f3adddd6db2f7e7a5bef101c84560" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", @@ -508,9 +508,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.2.31" +version = "1.2.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3a42d84bb6b69d3a8b3eaacf0d88f179e1929695e1ad012b6cf64d9caaa5fd2" +checksum = "3ee0f8803222ba5a7e2777dd72ca451868909b1ac410621b676adf07280e9b5f" dependencies = [ "jobserver", "libc", @@ -519,9 +519,9 @@ dependencies = [ [[package]] name = "cfg-if" -version = "1.0.1" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268" +checksum = "2fd1289c04a9ea8cb22300a459a72a385d7c73d3259e2ed7dcb2af674838cfa9" [[package]] name = "cfg_aliases" @@ -560,9 +560,9 @@ dependencies = [ [[package]] name = "config" -version = "0.15.13" +version = "0.15.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b1eb4fb07bc7f012422df02766c7bd5971effb894f573865642f06fa3265440" +checksum = "aa4092bf3922a966e2bd74640b80f36c73eaa7251a4fd0fbcda1f8a4de401352" dependencies = [ "async-trait", "convert_case", @@ -571,6 +571,7 @@ dependencies = [ "ron", "rust-ini", "serde", + "serde-untagged", "serde_json", "toml 0.9.5", "winnow", @@ -841,6 +842,16 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" +[[package]] +name = "erased-serde" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e004d887f51fcb9fef17317a2f3525c887d8aa3f4f50fed920816a688284a5b7" +dependencies = [ + "serde", + "typeid", +] + [[package]] name = "fallible-iterator" version = "0.2.0" @@ -871,9 +882,9 @@ checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" [[package]] name = "form_urlencoded" -version = "1.2.1" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" dependencies = [ "percent-encoding", ] @@ -1018,9 +1029,9 @@ checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" [[package]] name = "governor" -version = "0.10.0" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cbe789d04bf14543f03c4b60cd494148aa79438c8440ae7d81a7778147745c3" +checksum = "444405bbb1a762387aa22dd569429533b54a1d8759d35d3b64cb39b0293eaa19" dependencies = [ "cfg-if", "dashmap", @@ -1028,7 +1039,7 @@ dependencies = [ "futures-timer", "futures-util", "getrandom 0.3.3", - "hashbrown 0.15.4", + "hashbrown 0.15.5", "nonzero_ext", "parking_lot 0.12.4", "portable-atomic", @@ -1072,9 +1083,9 @@ checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" [[package]] name = "hashbrown" -version = "0.15.4" +version = "0.15.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5971ac85611da7067dbfcabef3c70ebb5606018acd9e2a3903a0da507521e0d5" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" dependencies = [ "allocator-api2", "equivalent", @@ -1087,7 +1098,7 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1" dependencies = [ - "hashbrown 0.15.4", + "hashbrown 0.15.5", ] [[package]] @@ -1202,7 +1213,7 @@ dependencies = [ [[package]] name = "hulyrs" version = "0.1.0" -source = "git+https://github.com/hcengineering/hulyrs.git#08a583761d532cbfaa188983f4e10c77bcba4c71" +source = "git+https://github.com/hcengineering/hulyrs.git#d03108c28f554d258a410a2d9c02f48c3826e435" dependencies = [ "actix-web", "bytes", @@ -1226,7 +1237,7 @@ dependencies = [ "serde_json", "serde_with", "strum", - "thiserror 2.0.12", + "thiserror 2.0.16", "tokio", "tokio-stream", "tokio_with_wasm", @@ -1238,18 +1249,20 @@ dependencies = [ [[package]] name = "hyper" -version = "1.6.0" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80" +checksum = "eb3aa54a13a0dfe7fbe3a59e0c76093041720fdc77b110cc0fc260fafb4dc51e" dependencies = [ + "atomic-waker", "bytes", "futures-channel", - "futures-util", + "futures-core", "http 1.3.1", "http-body", "httparse", "itoa", "pin-project-lite", + "pin-utils", "smallvec", "tokio", "want", @@ -1414,9 +1427,9 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "idna" -version = "1.0.3" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" dependencies = [ "idna_adapter", "smallvec", @@ -1457,7 +1470,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661" dependencies = [ "equivalent", - "hashbrown 0.15.4", + "hashbrown 0.15.5", "serde", ] @@ -1479,7 +1492,7 @@ version = "0.7.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d93587f37623a1a17d94ef2bc9ada592f5465fe7732084ab7beefabe5c77c0c4" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.2", "cfg-if", "libc", ] @@ -1566,9 +1579,20 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "libc" -version = "0.2.174" +version = "0.2.175" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1171693293099992e19cddea4e8b849964e9846f4acee11b3948bcc337be8776" +checksum = "6a82ae493e598baaea5209805c49bbf2ea7de956d50d7da0da1164f9c6d28543" + +[[package]] +name = "libredox" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "391290121bad3d37fbddad76d8f5d1c1c314cfc646d143d7e07a3086ddff0ce3" +dependencies = [ + "bitflags 2.9.2", + "libc", + "redox_syscall 0.5.17", +] [[package]] name = "litemap" @@ -1790,7 +1814,7 @@ dependencies = [ "libc", "redox_syscall 0.5.17", "smallvec", - "windows-targets", + "windows-targets 0.52.6", ] [[package]] @@ -1811,9 +1835,9 @@ dependencies = [ [[package]] name = "percent-encoding" -version = "2.3.1" +version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" [[package]] name = "pest" @@ -1822,7 +1846,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1db05f56d34358a8b1066f67cbb203ee3e7ed2ba674a6263a1d5ec6db2204323" dependencies = [ "memchr", - "thiserror 2.0.12", + "thiserror 2.0.16", "ucd-trie", ] @@ -1956,9 +1980,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.95" +version = "1.0.101" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" +checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de" dependencies = [ "unicode-ident", ] @@ -1992,7 +2016,7 @@ dependencies = [ "rustc-hash", "rustls 0.23.31", "socket2 0.5.10", - "thiserror 2.0.12", + "thiserror 2.0.16", "tokio", "tracing", "web-time", @@ -2013,7 +2037,7 @@ dependencies = [ "rustls 0.23.31", "rustls-pki-types", "slab", - "thiserror 2.0.12", + "thiserror 2.0.16", "tinyvec", "tracing", "web-time", @@ -2113,14 +2137,14 @@ version = "11.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c6df7ab838ed27997ba19a4664507e6f82b41fe6e20be42929332156e5e85146" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.2", ] [[package]] name = "redis" -version = "0.32.4" +version = "0.32.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1f66bf4cac9733a23bcdf1e0e01effbaaad208567beba68be8f67e5f4af3ee1" +checksum = "7cd3650deebc68526b304898b192fa4102a4ef0b9ada24da096559cb60e0eef8" dependencies = [ "bytes", "cfg-if", @@ -2154,7 +2178,7 @@ version = "0.5.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5407465600fb0548f1442edf71dd20683c6ed326200ace4b1ef0763521bb3b77" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.2", ] [[package]] @@ -2259,9 +2283,9 @@ checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "reqwest" -version = "0.12.22" +version = "0.12.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbc931937e6ca3a06e3b6c0aa7841849b160a90351d6ab467a8b9b9959767531" +checksum = "d429f34c8092b2d42c7c93cec323bb4adeb7c67698f70839adec842ec10c7ceb" dependencies = [ "base64 0.22.1", "bytes", @@ -2346,9 +2370,9 @@ dependencies = [ [[package]] name = "reqwest-websocket" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f91a811daaa8b54faeaec9d507a336897a3d243834a4965254a17d39da8b5c9" +checksum = "cd5f79b25f7f17a62cc9337108974431a66ae5a723ac0d9fe78ac1cce2027720" dependencies = [ "async-tungstenite", "bytes", @@ -2356,7 +2380,7 @@ dependencies = [ "reqwest", "serde", "serde_json", - "thiserror 2.0.12", + "thiserror 2.0.16", "tokio", "tokio-util", "tracing", @@ -2409,19 +2433,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b91f7eff05f748767f183df4320a63d6936e9c6107d97c9e6bdd9784f4289c94" dependencies = [ "base64 0.21.7", - "bitflags 2.9.1", + "bitflags 2.9.2", "serde", "serde_derive", ] [[package]] name = "rust-ini" -version = "0.21.2" +version = "0.21.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7295b7ce3bf4806b419dc3420745998b447178b7005e2011947b38fc5aa6791" +checksum = "4e310ef0e1b6eeb79169a1171daf9abcb87a2e17c03bee2c4bb100b55c75409f" dependencies = [ "cfg-if", "ordered-multimap", + "trim-in-place", ] [[package]] @@ -2485,9 +2510,9 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.21" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a0d197bd2c9dc6e53b84da9556a69ba4cdfab8619eb41a8bd1cc2027a0f6b1d" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" [[package]] name = "ryu" @@ -2563,6 +2588,17 @@ dependencies = [ "serde_derive", ] +[[package]] +name = "serde-untagged" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34836a629bcbc6f1afdf0907a744870039b1e14c0561cb26094fa683b158eff3" +dependencies = [ + "erased-serde", + "serde", + "typeid", +] + [[package]] name = "serde_derive" version = "1.0.219" @@ -2576,9 +2612,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.142" +version = "1.0.143" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "030fedb782600dcbd6f02d479bf0d817ac3bb40d644745b769d6a96bc3afc5a7" +checksum = "d401abef1d108fbd9cbaebc3e46611f4b1021f714a0597a71f41ee463f5f4a5a" dependencies = [ "itoa", "memchr", @@ -2708,7 +2744,7 @@ checksum = "297f631f50729c8c99b84667867963997ec0b50f32b2a7dbcab828ef0541e8bb" dependencies = [ "num-bigint", "num-traits", - "thiserror 2.0.12", + "thiserror 2.0.16", "time", ] @@ -2729,9 +2765,9 @@ dependencies = [ [[package]] name = "slab" -version = "0.4.10" +version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04dc19736151f35336d325007ac991178d504a119863a2fcb3758cdb5e52c50d" +checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" [[package]] name = "smallvec" @@ -2826,9 +2862,9 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "syn" -version = "2.0.104" +version = "2.0.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17b6f705963418cdb9927482fa304bc562ece2fdd4f616084c50b7023b435a40" +checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6" dependencies = [ "proc-macro2", "quote", @@ -2866,11 +2902,11 @@ dependencies = [ [[package]] name = "thiserror" -version = "2.0.12" +version = "2.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708" +checksum = "3467d614147380f2e4e374161426ff399c91084acd2363eaf549172b3d5e60c0" dependencies = [ - "thiserror-impl 2.0.12", + "thiserror-impl 2.0.16", ] [[package]] @@ -2886,9 +2922,9 @@ dependencies = [ [[package]] name = "thiserror-impl" -version = "2.0.12" +version = "2.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" +checksum = "6c5e1be1c48b9172ee610da68fd9cd2770e7a4056cb3fc98710ee6906f0c7960" dependencies = [ "proc-macro2", "quote", @@ -2956,9 +2992,9 @@ dependencies = [ [[package]] name = "tinyvec" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09b3661f17e86524eccd4371ab0429194e0d7c008abb45f7a7495b1719463c71" +checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa" dependencies = [ "tinyvec_macros", ] @@ -3190,7 +3226,7 @@ version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.2", "bytes", "futures-util", "http 1.3.1", @@ -3272,6 +3308,12 @@ dependencies = [ "tracing-log", ] +[[package]] +name = "trim-in-place" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "343e926fc669bc8cde4fa3129ab681c63671bae288b1f1081ceee6d9d37904fc" + [[package]] name = "try-lock" version = "0.2.5" @@ -3280,9 +3322,9 @@ checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "tungstenite" -version = "0.26.2" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4793cb5e56680ecbb1d843515b23b6de9a75eb04b66643e256a396d43be33c13" +checksum = "eadc29d668c91fcc564941132e17b28a7ceb2f3ebf0b9dae3e03fd7a6748eb0d" dependencies = [ "bytes", "data-encoding", @@ -3291,10 +3333,16 @@ dependencies = [ "log", "rand 0.9.2", "sha1", - "thiserror 2.0.12", + "thiserror 2.0.16", "utf-8", ] +[[package]] +name = "typeid" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc7d623258602320d5c55d1bc22793b57daff0ec7efc270ea7d55ce1d5f5471c" + [[package]] name = "typenum" version = "1.18.0" @@ -3384,9 +3432,9 @@ checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" [[package]] name = "uuid" -version = "1.17.0" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cf4199d1e5d15ddd86a694e4d0dffa9c323ce759fea589f00fef9d81cc1931d" +checksum = "f33196643e165781c20a5ead5582283a7dacbb87855d867fbc2df3f81eddc1be" dependencies = [ "getrandom 0.3.3", "js-sys", @@ -3596,11 +3644,11 @@ dependencies = [ [[package]] name = "whoami" -version = "1.6.0" +version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6994d13118ab492c3c80c1f81928718159254c53c472bf9ce36f8dae4add02a7" +checksum = "5d4a4db5077702ca3015d3d02d74974948aba2ad9e12ab7df718ee64ccd7e97d" dependencies = [ - "redox_syscall 0.5.17", + "libredox", "wasite", "web-sys", ] @@ -3623,11 +3671,11 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" +checksum = "0978bf7171b3d90bac376700cb56d606feb40f251a475a5d6634613564460b22" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.60.2", ] [[package]] @@ -3701,7 +3749,7 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows-targets", + "windows-targets 0.52.6", ] [[package]] @@ -3710,7 +3758,16 @@ version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" dependencies = [ - "windows-targets", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" +dependencies = [ + "windows-targets 0.53.3", ] [[package]] @@ -3719,14 +3776,31 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ - "windows_aarch64_gnullvm", - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_gnullvm", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc", + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm 0.52.6", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", +] + +[[package]] +name = "windows-targets" +version = "0.53.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5fe6031c4041849d7c496a8ded650796e7b6ecc19df1a431c1a363342e5dc91" +dependencies = [ + "windows-link", + "windows_aarch64_gnullvm 0.53.0", + "windows_aarch64_msvc 0.53.0", + "windows_i686_gnu 0.53.0", + "windows_i686_gnullvm 0.53.0", + "windows_i686_msvc 0.53.0", + "windows_x86_64_gnu 0.53.0", + "windows_x86_64_gnullvm 0.53.0", + "windows_x86_64_msvc 0.53.0", ] [[package]] @@ -3735,48 +3809,96 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764" + [[package]] name = "windows_aarch64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" +[[package]] +name = "windows_aarch64_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c" + [[package]] name = "windows_i686_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" +[[package]] +name = "windows_i686_gnu" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3" + [[package]] name = "windows_i686_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" +[[package]] +name = "windows_i686_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11" + [[package]] name = "windows_i686_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" +[[package]] +name = "windows_i686_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d" + [[package]] name = "windows_x86_64_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" +[[package]] +name = "windows_x86_64_gnu" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba" + [[package]] name = "windows_x86_64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57" + [[package]] name = "windows_x86_64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" +[[package]] +name = "windows_x86_64_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" + [[package]] name = "winnow" version = "0.7.12" @@ -3792,7 +3914,7 @@ version = "0.39.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.2", ] [[package]] @@ -3896,9 +4018,9 @@ dependencies = [ [[package]] name = "zerovec" -version = "0.11.3" +version = "0.11.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bdbb9122ea75b11bf96e7492afb723e8a7fbe12c67417aa95e7e3d18144d37cd" +checksum = "e7aa2bd55086f1ab526693ecbe444205da57e25f4489879da80635a46d90e73b" dependencies = [ "yoke", "zerofrom", From c39aab015ce3a88bf38d755e4fc985ce36f8f3ee Mon Sep 17 00:00:00 2001 From: Leonid Kaganov Date: Fri, 22 Aug 2025 01:56:19 +0300 Subject: [PATCH 169/636] Make Hulypulse Great Again --- scripts/claims_exp.json | 8 ++++++++ src/config.rs | 3 --- src/config/default.toml | 1 - src/handlers_http.rs | 5 +++-- src/main.rs | 2 +- src/redis.rs | 11 +++++++---- src/workspace_owner.rs | 2 +- 7 files changed, 20 insertions(+), 12 deletions(-) create mode 100644 scripts/claims_exp.json diff --git a/scripts/claims_exp.json b/scripts/claims_exp.json new file mode 100644 index 00000000000..dcc90fe4b81 --- /dev/null +++ b/scripts/claims_exp.json @@ -0,0 +1,8 @@ +{ + "extra": { + "service": "account" + }, + "account": "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee", + "workspace": "00000000-0000-0000-0000-000000000001", + "exp": 1924236800 +} diff --git a/src/config.rs b/src/config.rs index a7a950d9ac7..dfe5037cbaa 100644 --- a/src/config.rs +++ b/src/config.rs @@ -24,8 +24,6 @@ use std::{path::Path, sync::LazyLock}; use config::FileFormat; -use uuid::Uuid; - #[derive(Deserialize, Debug, PartialEq)] #[serde(rename_all = "lowercase")] pub enum RedisMode { @@ -38,7 +36,6 @@ pub enum RedisMode { pub struct Config { pub bind_port: u16, pub bind_host: String, - pub payload_size_limit: size::Size, pub token_secret: SecretString, diff --git a/src/config/default.toml b/src/config/default.toml index 00eb9c805b8..a570fff0c79 100644 --- a/src/config/default.toml +++ b/src/config/default.toml @@ -1,6 +1,5 @@ bind_port = 8095 bind_host = "0.0.0.0" -payload_size_limit = "2mb" token_secret = "secret" diff --git a/src/handlers_http.rs b/src/handlers_http.rs index 709cd4787a4..55c1f04338c 100644 --- a/src/handlers_http.rs +++ b/src/handlers_http.rs @@ -15,14 +15,13 @@ use anyhow::anyhow; use redis::aio::MultiplexedConnection; -use serde::{Deserialize, de}; +use serde::{Deserialize}; use tracing::*; use actix_web::{ Error, HttpRequest, HttpResponse, web::{self}, }; -use uuid::Uuid; use crate::redis::{SaveMode, Ttl, redis_delete, redis_list, redis_read, redis_save}; use crate::workspace_owner::workspace_check; @@ -96,11 +95,13 @@ pub async fn get( .map_err(map_handler_error) } +/* #[derive(serde::Deserialize)] struct MyHeaders { #[serde(rename = "HULY-TTL")] ttl: Option, } +*/ /// put pub async fn put( diff --git a/src/main.rs b/src/main.rs index 88d1455a62e..5807a3169aa 100644 --- a/src/main.rs +++ b/src/main.rs @@ -104,7 +104,7 @@ async fn main() -> anyhow::Result<()> { let hub = WsHub::new(redis_connection.clone()).start(); // starting Logger - tokio::spawn(redis::receiver(redis_client)); + tokio::spawn(redis::receiver(redis_client, hub.clone())); let socket = std::net::SocketAddr::new(CONFIG.bind_host.as_str().parse()?, CONFIG.bind_port); diff --git a/src/redis.rs b/src/redis.rs index 1c076783e2b..4bd6ca252ba 100644 --- a/src/redis.rs +++ b/src/redis.rs @@ -15,12 +15,13 @@ use std::time::{SystemTime, UNIX_EPOCH}; +use actix::Addr; use ::redis::Msg; use tokio_stream::StreamExt; use tracing::*; use crate::config::{CONFIG, RedisMode}; -use crate::ws_hub::{RedisEvent, RedisEventAction}; +use crate::ws_hub::{RedisEvent, RedisEventAction, WsHub}; #[derive(serde::Serialize)] pub enum Ttl { @@ -135,7 +136,6 @@ pub async fn redis_list( } /// redis_read(&connection,key) -#[allow(dead_code)] pub async fn redis_read( conn: &mut MultiplexedConnection, key: &str, @@ -181,7 +181,6 @@ pub async fn redis_read( /// w/o TTL (CONFIG.max_ttl) /// redis_save(&mut conn, "key", "val", None, None).await?; -#[allow(dead_code)] pub async fn redis_save( conn: &mut MultiplexedConnection, key: &str, @@ -359,7 +358,7 @@ impl TryFrom for RedisEvent { } } -pub async fn receiver(redis_client: Client) -> anyhow::Result<()> { +pub async fn receiver(redis_client: Client, hub: Addr) -> anyhow::Result<()> { let mut redis = redis_client.get_multiplexed_async_connection().await?; let mut pubsub = redis_client.get_async_pubsub().await?; @@ -384,7 +383,11 @@ pub async fn receiver(redis_client: Client) -> anyhow::Result<()> { while let Some(message) = messages.next().await { match RedisEvent::try_from(message) { Ok(ev) => { + debug!("redis event: {ev:#?}"); + + hub.do_send(ev); + } Err(e) => { warn!("invalid redis message: {e}"); diff --git a/src/workspace_owner.rs b/src/workspace_owner.rs index 3f369c9cdc1..2f36abbe17d 100644 --- a/src/workspace_owner.rs +++ b/src/workspace_owner.rs @@ -13,7 +13,7 @@ // limitations under the License. // -use actix_web::{Error, HttpMessage, HttpRequest}; +use actix_web::{HttpMessage, HttpRequest}; use hulyrs::services::jwt::Claims; use uuid::Uuid; From 468b140e8d67e6dd05e2cdec69cdb76ea326cb3c Mon Sep 17 00:00:00 2001 From: Leonid Kaganov Date: Fri, 22 Aug 2025 04:02:17 +0300 Subject: [PATCH 170/636] W/o Actor --- Cargo.lock | 2 +- Cargo.toml | 2 +- scripts/TEST_HTTP_API.sh | 9 +- scripts/pulse_lib.sh | 1 + src/handlers_http.rs | 11 -- src/handlers_ws.rs | 120 ++++++--------- src/hub_service.rs | 266 +++++++++++++++++++++++++++++++++ src/main.rs | 62 ++++---- src/redis.rs | 13 +- src/workspace_owner.rs | 22 +-- src/ws_hub.rs | 309 --------------------------------------- 11 files changed, 352 insertions(+), 465 deletions(-) create mode 100644 src/hub_service.rs delete mode 100644 src/ws_hub.rs diff --git a/Cargo.lock b/Cargo.lock index d31124600e7..a79b93081ec 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1181,7 +1181,7 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "hulypulse" -version = "0.1.0" +version = "0.1.7" dependencies = [ "actix", "actix-cors", diff --git a/Cargo.toml b/Cargo.toml index 3ef483388b5..8bb882c4bc7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "hulypulse" -version = "0.1.0" +version = "0.1.7" edition = "2024" [dependencies] diff --git a/scripts/TEST_HTTP_API.sh b/scripts/TEST_HTTP_API.sh index b04a1fa74fb..9ce7ee896dd 100755 --- a/scripts/TEST_HTTP_API.sh +++ b/scripts/TEST_HTTP_API.sh @@ -10,6 +10,11 @@ TOKEN=$(./token.sh claims.json) ZP="00000000-0000-0000-0000-000000000001/TESTS" + delete "0000000/TESTS" + delete ${ZP} + put ${ZP} "Value_1" "HULY-TTL: 2" + delete ${ZP} + echo "--------- authorization_test ----------" TOKEN="" @@ -20,7 +25,6 @@ TOKEN=$(./token.sh claims_wrong_ws.json) put "00000000-0000-0000-0000-000000000001/TESTS" "Value_1" "HULY-TTL: 2" TOKEN=$(./token.sh claims.json) put "00000000-0000-0000-0000-000000000002/TESTS" "Value_1" "HULY-TTL: 2" -exit @@ -36,9 +40,6 @@ echo "--------- if-match ----------" get "00000000-0000-0000-0000-000000000001/TESTS/3/secret$/" - -exit - echo "--------- Deprecated symbols ----------" put "00000000-0000-0000-0000-000000000001/'TESTS" "Value_1" "HULY-TTL: 2" diff --git a/scripts/pulse_lib.sh b/scripts/pulse_lib.sh index 022d8cb87cc..e1512a3169c 100755 --- a/scripts/pulse_lib.sh +++ b/scripts/pulse_lib.sh @@ -72,5 +72,6 @@ delete() { local tmpfile tmpfile=$(mktemp) curl -i -s -X DELETE "$URL/$1" -H "Authorization: Bearer ${TOKEN}" | tr -d '\r' > "$tmpfile" +# curl -v -i -s -X DELETE "$URL/$1" -H "Authorization: Bearer ${TOKEN}" | tr -d '\r' > "$tmpfile" api ${tmpfile} } diff --git a/src/handlers_http.rs b/src/handlers_http.rs index 55c1f04338c..dd31bed486e 100644 --- a/src/handlers_http.rs +++ b/src/handlers_http.rs @@ -24,7 +24,6 @@ use actix_web::{ }; use crate::redis::{SaveMode, Ttl, redis_delete, redis_list, redis_read, redis_save}; -use crate::workspace_owner::workspace_check; pub fn map_handler_error(err: impl std::fmt::Display) -> Error { let msg = err.to_string(); @@ -46,7 +45,6 @@ pub fn map_handler_error(err: impl std::fmt::Display) -> Error { #[derive(Deserialize, Debug)] pub struct PathParams { - //workspace: Uuid, key: String, } @@ -95,14 +93,6 @@ pub async fn get( .map_err(map_handler_error) } -/* -#[derive(serde::Deserialize)] -struct MyHeaders { - #[serde(rename = "HULY-TTL")] - ttl: Option, -} -*/ - /// put pub async fn put( req: HttpRequest, @@ -172,7 +162,6 @@ pub async fn delete( path: web::Path, redis: web::Data, ) -> Result { - workspace_check(&req)?; // Check workspace let key: String = path.into_inner().key; diff --git a/src/handlers_ws.rs b/src/handlers_ws.rs index dd1e74f69c8..154a838880d 100644 --- a/src/handlers_ws.rs +++ b/src/handlers_ws.rs @@ -13,8 +13,10 @@ // limitations under the License. // +// https://github.com/hcengineering/hulypulse/ + use actix::{ - Actor, ActorContext, ActorFutureExt, AsyncContext, StreamHandler, WrapFuture, fut, prelude::*, + Actor, ActorContext, ActorFutureExt, AsyncContext, StreamHandler, fut, }; use actix_web::{Error, HttpMessage, HttpRequest, HttpResponse, web}; use actix_web_actors::ws; @@ -25,10 +27,10 @@ use serde_json::{Value, json}; use crate::redis::{ SaveMode, Ttl, deprecated_symbol, redis_delete, redis_list, redis_read, redis_save, }; -use crate::ws_hub::{ - Connect, Disconnect, ServerMessage, SessionId, Subscribe, SubscribeList, Unsubscribe, - UnsubscribeAll, WsHub, -}; + +use crate::hub_service::{HubServiceHandle, ServerMessage, SessionId, new_session_id}; +use crate::workspace_owner::check_workspace_core; + #[derive(Serialize, Default)] struct ReturnBase<'a> { @@ -130,7 +132,7 @@ use hulyrs::services::jwt::Claims; pub struct WsSession { pub redis: MultiplexedConnection, pub id: SessionId, - pub hub: Addr, + pub hub: HubServiceHandle, pub claims: Claims, } @@ -139,35 +141,16 @@ impl Actor for WsSession { type Context = ws::WebsocketContext; fn started(&mut self, ctx: &mut Self::Context) { - // ask ID from Hub let addr = ctx.address(); - let recipient = addr.recipient::(); - // println!("WebSocket connected"); - self.hub - .send(Connect { - addr: recipient, - session_id: self.id, - }) - .into_actor(self) - .map(|res, act, _ctx| match res { - Ok(id) => { - act.id = id; - tracing::info!("WebSocket connected: {id}"); - } - Err(e) => { - tracing::error!("WebSocket failed connect to hub: {e}"); - _ctx.stop(); - } - }) - .wait(ctx); // waiting for ID + + self.hub.connect(self.id, recipient); + tracing::info!("WebSocket connected: {}", self.id); } fn stopped(&mut self, _ctx: &mut Self::Context) { if self.id != 0 { - self.hub.do_send(Disconnect { - session_id: self.id, - }); + self.hub.disconnect(self.id); } tracing::info!("WebSocket disconnected: {:?}", &self.id); } @@ -175,14 +158,13 @@ impl Actor for WsSession { impl actix::Handler for WsSession { type Result = (); - fn handle(&mut self, msg: ServerMessage, ctx: &mut Self::Context) { - let json = - serde_json::to_string(&msg).unwrap_or_else(|_| "{\"error\":\"serialization\"}".into()); + let json = serde_json::to_string(&msg).unwrap_or_else(|_| "{\"error\":\"serialization\"}".into()); ctx.text(json); } } + /// StreamHandler External trait: must be in separate impl block impl StreamHandler> for WsSession { fn handle(&mut self, msg: Result, ctx: &mut Self::Context) { @@ -201,8 +183,6 @@ impl StreamHandler> for WsSession { } } -use crate::workspace_owner::check_workspace_core; - /// All logic in one impl impl WsSession { fn ws_error(&self, ctx: &mut ws::WebsocketContext, msg: &str) { @@ -221,7 +201,6 @@ impl WsSession { ) { ctx.wait( fut::wrap_future(fut).map(move |res, _actor: &mut Self, ctx| { - // if !base.is_object() { base = json!({ "base": base }); } let obj = base.as_object_mut().unwrap(); match res { Ok(Value::Object(extra)) => { @@ -251,14 +230,7 @@ impl WsSession { if_none_match, correlation, } => { - tracing::info!( - "PUT {} = {} (expires_at: {:?}) (ttl: {:?}) correlation: {:?}", - &key, - &data, - &expires_at, - &ttl, - &correlation - ); + tracing::info!("PUT {} = {}", &key, &data); // (expires_at: {:?}) (ttl: {:?}) correlation: {:?} &expires_at, &ttl, &correlation // Check workspace if let Err(e) = self.workspace_check_ws(&key) { @@ -324,7 +296,7 @@ impl WsSession { correlation, if_match, } => { - tracing::info!("DELETE {} correlation:{:?}", &key, &correlation); + tracing::info!("DELETE {}", &key); // correlation:{:?} , &correlation // Check workspace if let Err(e) = self.workspace_check_ws(&key) { @@ -332,6 +304,8 @@ impl WsSession { return; } + tracing::info!("DELETE!!! {}", &key); + let mut redis = self.redis.clone(); let base = serde_json::json!(ReturnBase { @@ -371,7 +345,7 @@ impl WsSession { } WsCommand::Get { key, correlation } => { - tracing::info!("GET {} correlation:{:?}", &key, &correlation); + tracing::info!("GET {}", &key); // correlation:{:?} , &correlation // Check workspace if let Err(e) = self.workspace_check_ws(&key) { @@ -407,7 +381,7 @@ impl WsSession { } WsCommand::List { key, correlation } => { - tracing::info!("LIST {:?} correlation: {:?}", &key, &correlation); + tracing::info!("LIST {:?}", &key); // correlation: {:?} , &correlation // Check workspace if let Err(e) = self.workspace_check_ws(&key) { @@ -434,9 +408,10 @@ impl WsSession { self.fut_send(ctx, fut, base); } + WsCommand::Sub { key, correlation } => { // LEVENT 3 - tracing::info!("SUB {} correlation: {:?}", &key, &correlation); + tracing::info!("SUB {}", &key); // correlation: {:?} , &correlation // Check workspace if let Err(e) = self.workspace_check_ws(&key) { @@ -456,19 +431,16 @@ impl WsSession { if deprecated_symbol(&key) { map.insert("error".into(), json!("Deprecated symbol in key")); } else { - self.hub.do_send(Subscribe { - session_id: self.id, - key: key.clone(), - }); + self.hub.subscribe(self.id, key.clone()); map.insert("result".into(), json!("OK")); } - ctx.text(obj.to_string()); } + WsCommand::Unsub { key, correlation } => { // LEVENT 4 - tracing::info!("UNSUB {} correlation: {:?}", &key, &correlation); + tracing::info!("UNSUB {}", &key); // correlation: {:?} , &correlation let mut obj = serde_json::json!(ReturnBase { action: "unsub", @@ -480,9 +452,7 @@ impl WsSession { let map = obj.as_object_mut().unwrap(); if key == "*" { - self.hub.do_send(UnsubscribeAll { - session_id: self.id, - }); + self.hub.unsubscribe_all(self.id); map.insert("result".into(), json!("OK")); } else { // Check workspace @@ -490,26 +460,20 @@ impl WsSession { self.ws_error(ctx, e); return; } - if deprecated_symbol(&key) { map.insert("error".into(), json!("Deprecated symbol in key")); } else { + self.hub.unsubscribe(self.id, key.clone()); map.insert("result".into(), json!("OK")); - self.hub.do_send(Unsubscribe { - session_id: self.id, - key: key.clone(), - }); } - }; - + } ctx.text(obj.to_string()); } - WsCommand::Sublist { correlation } => { - tracing::info!("SUBLIST correlation: {:?}", &correlation); + WsCommand::Sublist { correlation } => { + tracing::info!("SUBLIST"); // correlation: {:?} , &correlation // w/o Check workspace! - let base = serde_json::json!(ReturnBase { action: "list", correlation: correlation.as_deref(), @@ -519,27 +483,27 @@ impl WsSession { let hub = self.hub.clone(); let id = self.id; - let fut = async move { - let keys = hub - .send(SubscribeList { session_id: id }) - .await - .unwrap_or_default(); - Ok(json!({ "result": keys })) - }; + self.fut_send( + ctx, + async move { + let keys = hub.subscribe_list(id).await; + Ok(json!({ "result": keys })) + }, + base, + ); + } - self.fut_send(ctx, fut, base); - } // End of commands + // End of commands } } } -// ---- auth pub async fn handler( req: HttpRequest, payload: web::Payload, redis: web::Data, - hub: web::Data>, + hub: web::Data, // <-- было Addr ) -> Result { let claims = req .extensions() @@ -550,7 +514,7 @@ pub async fn handler( let session = WsSession { redis: redis.get_ref().clone(), hub: hub.get_ref().clone(), - id: crate::ws_hub::new_session_id(), + id: new_session_id(), claims, }; diff --git a/src/hub_service.rs b/src/hub_service.rs new file mode 100644 index 00000000000..2e79a468c4d --- /dev/null +++ b/src/hub_service.rs @@ -0,0 +1,266 @@ +use std::collections::{HashMap, HashSet}; +use std::sync::atomic::{AtomicU64, Ordering}; + +use actix::prelude::*; + +use redis::aio::MultiplexedConnection; +use serde::Serialize; +use tokio::sync::{mpsc, oneshot}; + +fn subscription_matches(sub_key: &str, key: &str) -> bool { + if sub_key == key { + return true; + } + if sub_key.ends_with('/') && key.starts_with(sub_key) { + let rest = &key[sub_key.len()..]; + return !rest.contains('$'); + } + false +} + +#[derive(Clone, Serialize, Debug, Message)] +#[rtype(result = "()")] +pub struct ServerMessage { + #[serde(flatten)] + pub event: RedisEvent, + #[serde(skip_serializing_if = "Option::is_none")] + pub value: Option, +} + +// ==== ID ==== + +pub type SessionId = u64; +static NEXT_ID: AtomicU64 = AtomicU64::new(1); +pub fn new_session_id() -> SessionId { + NEXT_ID.fetch_add(1, Ordering::SeqCst) +} + +// ==== Redis events ==== + +#[derive(Debug, Clone, Serialize)] +pub enum RedisEventAction { + Set, + Del, + Unlink, + Expired, + Other(String), +} + +#[derive(Debug, Clone, Serialize)] +pub struct RedisEvent { + pub db: u32, + pub key: String, + pub action: RedisEventAction, +} + +// ==== Commands for worker ==== + +enum Command { + Connect { + session_id: SessionId, + addr: Recipient, + }, + Disconnect { + session_id: SessionId, + }, + Subscribe { + session_id: SessionId, + key: String, + }, + Unsubscribe { + session_id: SessionId, + key: String, + }, + UnsubscribeAll { + session_id: SessionId, + }, + SubscribeList { + session_id: SessionId, + reply: oneshot::Sender>, + }, + Count { + reply: oneshot::Sender, + }, + DumpSubs { + reply: oneshot::Sender>>, + }, + RedisEvent(RedisEvent), +} + +// ==== Handle ==== + +#[derive(Clone)] +pub struct HubServiceHandle { + tx: mpsc::Sender, +} + +impl HubServiceHandle { + pub fn start(redis: MultiplexedConnection) -> Self { + let (tx, mut rx) = mpsc::channel::(1024); + + // Владелец состояния живёт внутри задачи + tokio::spawn(async move { + let mut sessions: HashMap> = HashMap::new(); + let mut subs: HashMap> = HashMap::new(); + let mut redis_conn = redis; + + fn subscribers_for( + subs: &HashMap>, + key: &str, + ) -> HashSet { + let mut out = HashSet::::new(); + for (sub_key, set) in subs.iter() { + if subscription_matches(sub_key, key) { + out.extend(set.iter().copied()); + } + } + out + } + + while let Some(cmd) = rx.recv().await { + match cmd { + + Command::Connect { session_id, addr } => { + sessions.insert(session_id, addr); + } + + Command::Disconnect { session_id } => { + subs.retain(|_, ids| { + ids.remove(&session_id); + !ids.is_empty() + }); + sessions.remove(&session_id); + } + + Command::Subscribe { session_id, key } => { + subs.entry(key).or_default().insert(session_id); + } + + Command::Unsubscribe { session_id, key } => { + if let Some(set) = subs.get_mut(&key) { + set.remove(&session_id); + if set.is_empty() { + subs.remove(&key); + } + } + } + + Command::UnsubscribeAll { session_id } => { + subs.retain(|_, ids| { + ids.remove(&session_id); + !ids.is_empty() + }); + } + + Command::SubscribeList { session_id, reply } => { + let list = subs + .iter() + .filter_map(|(key, ids)| { + if ids.contains(&session_id) { + Some(key.clone()) + } else { + None + } + }) + .collect::>(); + let _ = reply.send(list); + } + + Command::Count { reply } => { + let _ = reply.send(sessions.len()); + } + + Command::DumpSubs { reply } => { + let snapshot = subs + .iter() + .map(|(k, set)| (k.clone(), set.iter().copied().collect::>())) + .collect::>(); + let _ = reply.send(snapshot); + } + + Command::RedisEvent(event) => { + let targets = subscribers_for(&subs, &event.key); + if targets.is_empty() { + continue; + } + let recipients: Vec> = targets + .into_iter() + .filter_map(|sid| sessions.get(&sid).cloned()) + .collect(); + + // Inside: waiting GET + let need_get = matches!(event.action, RedisEventAction::Set); + let mut value: Option = None; + if need_get { + match redis::cmd("GET") + .arg(&event.key) + .query_async::>(&mut redis_conn) + .await + { + Ok(v) => value = v, + Err(e) => { + tracing::warn!("redis GET {} failed: {}", &event.key, e); + } + } + } + + let payload = ServerMessage { + event, + value, + }; + + for rcpt in recipients { + let _ = rcpt.do_send(payload.clone()); + } + } + } + } + }); + + Self { tx } + } + + // ---- API, ничего не выполняет параллельно внутри worker'а ---- + + pub fn connect(&self, session_id: SessionId, addr: Recipient) { + let _ = self.tx.try_send(Command::Connect { session_id, addr }); + } + + pub fn disconnect(&self, session_id: SessionId) { + let _ = self.tx.try_send(Command::Disconnect { session_id }); + } + + pub fn subscribe(&self, session_id: SessionId, key: String) { + let _ = self.tx.try_send(Command::Subscribe { session_id, key }); + } + + pub fn unsubscribe(&self, session_id: SessionId, key: String) { + let _ = self.tx.try_send(Command::Unsubscribe { session_id, key }); + } + + pub fn unsubscribe_all(&self, session_id: SessionId) { + let _ = self.tx.try_send(Command::UnsubscribeAll { session_id }); + } + + pub async fn subscribe_list(&self, session_id: SessionId) -> Vec { + let (tx, rx) = oneshot::channel(); + let _ = self.tx.send(Command::SubscribeList { session_id, reply: tx }).await; + rx.await.unwrap_or_default() + } + + pub async fn count(&self) -> usize { + let (tx, rx) = oneshot::channel(); + let _ = self.tx.send(Command::Count { reply: tx }).await; + rx.await.unwrap_or_default() + } + + pub async fn dump_subs(&self) -> std::collections::HashMap> { + let (tx, rx) = oneshot::channel(); + let _ = self.tx.send(Command::DumpSubs { reply: tx }).await; + rx.await.unwrap_or_default() + } + + pub fn push_event(&self, ev: RedisEvent) { + let _ = self.tx.try_send(Command::RedisEvent(ev)); + } +} diff --git a/src/main.rs b/src/main.rs index 5807a3169aa..7067a6358cd 100644 --- a/src/main.rs +++ b/src/main.rs @@ -13,29 +13,26 @@ // limitations under the License. // -use actix::prelude::*; use actix_cors::Cors; use actix_web::{ - App, Error, HttpMessage, HttpResponse, HttpServer, - body::MessageBody, - dev::{ServiceRequest, ServiceResponse}, - middleware::{self, Next}, - web::{self, Path, Query}, + body::MessageBody, dev::{ServiceRequest, ServiceResponse}, middleware::{self, Next}, web::{self, Path, Query}, App, Error, HttpMessage, HttpResponse, HttpServer }; use hulyrs::services::jwt::{Claims, actix::ServiceRequestExt}; use secrecy::ExposeSecret; +use serde_json::json; use tracing::*; +use uuid::Uuid; mod config; mod handlers_http; mod handlers_ws; mod redis; mod workspace_owner; -mod ws_hub; + +mod hub_service; +use hub_service::HubServiceHandle; use config::CONFIG; -use uuid::Uuid; -use ws_hub::{TestGetSubs, WsHub}; fn initialize_tracing(level: tracing::Level) { use tracing_subscriber::{filter::targets::Targets, prelude::*}; @@ -100,14 +97,22 @@ async fn main() -> anyhow::Result<()> { let redis_client = redis::client().await?; let redis_connection = redis_client.get_multiplexed_async_connection().await?; - // starting Hub - let hub = WsHub::new(redis_connection.clone()).start(); + // starting HubService + let hub = HubServiceHandle::start(redis_connection.clone()); // starting Logger tokio::spawn(redis::receiver(redis_client, hub.clone())); let socket = std::net::SocketAddr::new(CONFIG.bind_host.as_str().parse()?, CONFIG.bind_port); + let url = format!("http://{}:{}", &CONFIG.bind_host, &CONFIG.bind_port); + tracing::info!("Server running at {}", &url); + tracing::info!("HTTP API: {}/api", &url); + tracing::info!("WebSocket API: {}/ws", &url); + tracing::info!("Status: {}/status", &url); + tracing::info!("Stats: {}/stat", &url); + tracing::info!("Subscriptions: {}/subs", &url); + let server = HttpServer::new(move || { let cors = Cors::default() .allow_any_origin() @@ -130,32 +135,21 @@ async fn main() -> anyhow::Result<()> { .route("/{key:.+}", web::put().to(handlers_http::put)) .route("/{key:.+}", web::delete().to(handlers_http::delete)), ) - .route( - "/ws", - web::get() - .to(handlers_ws::handler) + .route("/ws", web::get().to(handlers_ws::handler) .wrap(middleware::from_fn(extract_claims)), ) // WebSocket .route("/status", web::get().to(async || "ok")) - // - .route( - "/stat2", - web::get().to(|hub: web::Data>| async move { - let count = hub.send(crate::ws_hub::Count).await.unwrap_or(0); - HttpResponse::Ok().json(serde_json::json!({ "connections": count })) - }), - ) - .route( - "/subs", - web::get().to(|hub: web::Data>| async move { - match hub.send(TestGetSubs).await { - Ok(subs) => HttpResponse::Ok().json(subs), - Err(_) => { - HttpResponse::InternalServerError().body("Failed to get subscriptions") - } - } - }), - ) + + .route("/stat", web::get().to(|hub: web::Data| async move { + let count = hub.count().await; + Ok::<_, actix_web::Error>(HttpResponse::Ok().json(json!({ "connections": count }))) + })) + + .route("/subs", web::get().to(|hub: web::Data| async move { + let subs = hub.dump_subs().await; + Ok::<_, actix_web::Error>(HttpResponse::Ok().json(subs)) + })) + }) .bind(socket)? .run(); diff --git a/src/redis.rs b/src/redis.rs index 4bd6ca252ba..f7f9ff32952 100644 --- a/src/redis.rs +++ b/src/redis.rs @@ -15,13 +15,11 @@ use std::time::{SystemTime, UNIX_EPOCH}; -use actix::Addr; use ::redis::Msg; use tokio_stream::StreamExt; use tracing::*; -use crate::config::{CONFIG, RedisMode}; -use crate::ws_hub::{RedisEvent, RedisEventAction, WsHub}; +use crate::{config::{RedisMode, CONFIG}, hub_service::{HubServiceHandle, RedisEvent, RedisEventAction}}; #[derive(serde::Serialize)] pub enum Ttl { @@ -358,7 +356,8 @@ impl TryFrom for RedisEvent { } } -pub async fn receiver(redis_client: Client, hub: Addr) -> anyhow::Result<()> { + +pub async fn receiver(redis_client: Client, hub: HubServiceHandle) -> anyhow::Result<()> { let mut redis = redis_client.get_multiplexed_async_connection().await?; let mut pubsub = redis_client.get_async_pubsub().await?; @@ -383,10 +382,10 @@ pub async fn receiver(redis_client: Client, hub: Addr) -> anyhow::Result< while let Some(message) = messages.next().await { match RedisEvent::try_from(message) { Ok(ev) => { - - debug!("redis event: {ev:#?}"); - hub.do_send(ev); + // debug!("redis event: {ev:#?}"); + + hub.push_event(ev); } Err(e) => { diff --git a/src/workspace_owner.rs b/src/workspace_owner.rs index 2f36abbe17d..148b06740cc 100644 --- a/src/workspace_owner.rs +++ b/src/workspace_owner.rs @@ -13,12 +13,12 @@ // limitations under the License. // -use actix_web::{HttpMessage, HttpRequest}; use hulyrs::services::jwt::Claims; use uuid::Uuid; // common checker pub fn check_workspace_core(claims: &Claims, key: &str) -> Result<(), &'static str> { + if claims.is_system() { return Ok(()); } @@ -41,22 +41,4 @@ pub fn check_workspace_core(claims: &Claims, key: &str) -> Result<(), &'static s } Ok(()) -} - -/// HTTP API -pub fn workspace_check(req: &HttpRequest) -> Result<(), actix_web::Error> { - let key = req - .match_info() - .get("key") - .ok_or_else(|| actix_web::error::ErrorBadRequest("Missing key in URL path"))?; - let claims = req - .extensions() - .get::() - .cloned() - .ok_or_else(|| actix_web::error::ErrorUnauthorized("Missing auth claims"))?; - - match check_workspace_core(&claims, key) { - Ok(()) => Ok(()), - Err(msg) => Err(actix_web::error::ErrorUnauthorized(msg)), - } -} +} \ No newline at end of file diff --git a/src/ws_hub.rs b/src/ws_hub.rs deleted file mode 100644 index 84a96fc2151..00000000000 --- a/src/ws_hub.rs +++ /dev/null @@ -1,309 +0,0 @@ -// -// Copyright © 2025 Hardcore Engineering Inc. -// -// Licensed under the Eclipse Public License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. You may -// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// -// See the License for the specific language governing permissions and -// limitations under the License. -// - -use std::{ - collections::{HashMap, HashSet}, - sync::atomic::AtomicU64, -}; - -use actix::prelude::*; -use redis::aio::MultiplexedConnection; -use serde::Serialize; - -fn subscription_matches(sub_key: &str, key: &str) -> bool { - if sub_key == key { - return true; - } - if sub_key.ends_with('/') && key.starts_with(sub_key) { - let rest = &key[sub_key.len()..]; - return !rest.contains('$'); - } - false -} - -#[derive(Message, Clone, Serialize, Debug)] -#[rtype(result = "()")] -pub struct ServerMessage { - #[serde(flatten)] - pub event: RedisEvent, - #[serde(skip_serializing_if = "Option::is_none")] - pub value: Option, -} - -/// Count of active sessions -#[derive(Message)] -#[rtype(result = "usize")] -pub struct Count; - -pub type SessionId = u64; - -static NEXT_ID: AtomicU64 = AtomicU64::new(1); - -pub struct WsHub { - sessions: HashMap>, - subs: HashMap>, // Subscriptions array: key -> {id, id, id ...} - redis: MultiplexedConnection, -} - -impl WsHub { - pub fn new(redis: MultiplexedConnection) -> Self { - Self { - sessions: HashMap::new(), - subs: HashMap::new(), - redis, - } - } -} - -impl Actor for WsHub { - type Context = Context; -} - -/// Connect -#[derive(Message)] -#[rtype(result = "SessionId")] -pub struct Connect { - pub session_id: SessionId, - pub addr: Recipient, -} - -pub fn new_session_id() -> SessionId { - NEXT_ID.fetch_add(1, std::sync::atomic::Ordering::SeqCst) -} - -impl Handler for WsHub { - type Result = SessionId; - - fn handle(&mut self, msg: Connect, _ctx: &mut Context) -> Self::Result { - // LEVENT 1 - //let id = self.next_id; - //self.next_id = self.next_id.wrapping_add(1); - self.sessions.insert(msg.session_id, msg.addr); - // tracing::info!("session connected: id={id} (total={})", self.sessions.len()); - msg.session_id - } -} - -/// Disconnect -#[derive(Message)] -#[rtype(result = "()")] -pub struct Disconnect { - pub session_id: SessionId, -} - -impl Handler for WsHub { - type Result = (); - - fn handle(&mut self, msg: Disconnect, _ctx: &mut Context) { - // LEVENT 2 - - // Delete all subscribes - self.subs.retain(|_key, session_ids| { - session_ids.remove(&msg.session_id); - !session_ids.is_empty() - }); - - let existed = self.sessions.remove(&msg.session_id).is_some(); - if existed { - // tracing::info!("session disconnected: id={} (total={})", msg.session_id, self.sessions.len()); - } else { - tracing::warn!("disconnect for unknown id={}", msg.session_id); - } - } -} - -/// SubscribeList -#[derive(Message)] -#[rtype(result = "Vec")] -pub struct SubscribeList { - pub session_id: SessionId, -} - -impl Handler for WsHub { - type Result = MessageResult; - - fn handle(&mut self, msg: SubscribeList, _ctx: &mut Context) -> Self::Result { - // Collect all keys with my session_id - let list = self - .subs - .iter() - .filter_map(|(key, sessions)| { - if sessions.contains(&msg.session_id) { - Some(key.clone()) - } else { - None - } - }) - .collect::>(); - - MessageResult(list) - } -} - -/// Count of IDs -impl Handler for WsHub { - type Result = usize; - - fn handle(&mut self, _: Count, _: &mut Context) -> Self::Result { - self.sessions.len() - } -} - -/// Subscribe -#[derive(Message)] -#[rtype(result = "()")] -pub struct Subscribe { - pub session_id: SessionId, - pub key: String, -} - -impl Handler for WsHub { - type Result = (); - fn handle(&mut self, msg: Subscribe, _ctx: &mut Context) { - self.subs.entry(msg.key).or_default().insert(msg.session_id); - } -} - -/// Unsubscribe -#[derive(Message)] -#[rtype(result = "()")] -pub struct Unsubscribe { - pub session_id: SessionId, - pub key: String, -} - -impl Handler for WsHub { - type Result = (); - fn handle(&mut self, msg: Unsubscribe, _ctx: &mut Context) { - if let Some(set) = self.subs.get_mut(&msg.key) { - set.remove(&msg.session_id); - if set.is_empty() { - self.subs.remove(&msg.key); - } - } - } -} - -#[derive(Message)] -#[rtype(result = "()")] -pub struct UnsubscribeAll { - pub session_id: SessionId, -} - -impl Handler for WsHub { - type Result = (); - fn handle(&mut self, msg: UnsubscribeAll, _ctx: &mut Context) { - self.subs.retain(|_key, session_ids| { - session_ids.remove(&msg.session_id); - !session_ids.is_empty() - }); - } -} - -#[derive(Message)] -#[rtype(result = "HashMap>")] -pub struct TestGetSubs; - -impl Handler for WsHub { - type Result = MessageResult; - - fn handle(&mut self, _msg: TestGetSubs, _ctx: &mut Context) -> Self::Result { - let s: HashMap> = self - .subs - .iter() - .map(|(key, ids)| (key.clone(), ids.iter().copied().collect())) - .collect(); - MessageResult(s) - } -} - -// List of subscribers -impl WsHub { - fn subscribers_for(&self, key: &str) -> HashSet { - let mut out = HashSet::new(); - for (sub_key, set) in &self.subs { - if subscription_matches(sub_key, key) { - out.extend(set.iter().copied()); - } - } - out - } -} - -#[derive(Debug, Clone, Serialize)] -pub enum RedisEventAction { - Set, // Insert or Update - Del, // Delete - Unlink, // async Delete - Expired, // TTL Delete - Other(String), -} - -use actix::Message; - -#[derive(Debug, Clone, Serialize, Message)] -#[rtype(result = "()")] -pub struct RedisEvent { - pub db: u32, - pub key: String, - pub action: RedisEventAction, -} - -impl Handler for WsHub { - type Result = ResponseActFuture; - - fn handle(&mut self, msg: RedisEvent, _ctx: &mut Context) -> Self::Result { - let targets = self.subscribers_for(&msg.key); - if targets.is_empty() { - return Box::pin(actix::fut::ready(()).into_actor(self)); - } - - let recipients: Vec> = targets - .into_iter() - .filter_map(|sid| self.sessions.get(&sid).cloned()) - .collect(); - - let mut redis = self.redis.clone(); - let event = msg.clone(); - let need_get = matches!(msg.action, RedisEventAction::Set); - - Box::pin( - async move { - let value = if need_get { - match redis::cmd("GET") - .arg(&event.key) - .query_async::>(&mut redis) - .await - { - Ok(v) => v, - Err(e) => { - tracing::warn!("redis GET {} failed: {}", &event.key, e); - None - } - } - } else { - None - }; - - let payload = ServerMessage { event, value }; - - for rcpt in recipients { - let _ = rcpt.do_send(payload.clone()); - } - } - .into_actor(self), - ) - } -} From 4c2a468689918e0aa3a10a841d48cb285e771bf8 Mon Sep 17 00:00:00 2001 From: Leonid Kaganov Date: Fri, 22 Aug 2025 12:08:53 +0300 Subject: [PATCH 171/636] Readme updated --- Cargo.lock | 2 +- Cargo.toml | 2 +- README.md | 171 +++++++++++++++++++++++---------------- scripts/TEST_HTTP_API.sh | 20 ++++- src/handlers_ws.rs | 44 +++++----- src/hub_service.rs | 32 ++++---- src/main.rs | 21 ++--- src/redis.rs | 14 ++-- 8 files changed, 175 insertions(+), 131 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a79b93081ec..ac96a96a2e5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1181,7 +1181,7 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "hulypulse" -version = "0.1.7" +version = "0.1.8" dependencies = [ "actix", "actix-cors", diff --git a/Cargo.toml b/Cargo.toml index 8bb882c4bc7..679318aeabc 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "hulypulse" -version = "0.1.7" +version = "0.1.8" edition = "2024" [dependencies] diff --git a/README.md b/README.md index 2132c064790..76edec749e5 100644 --- a/README.md +++ b/README.md @@ -24,8 +24,8 @@ Key segment may be private (prefixed with ‘$’) May not contain special characters (‘*’, ‘?’, ‘[’, ‘]’,‘\’,‘\x00..\xF1’,‘\x7F’,‘"’,‘'’) It is possible to use prefix, for listings / subscriptions (prefix ends with segment separator ‘/’) -GET/SUBSCRIBE/.. a/b → single key -GET/SUBSCRIBE/.. a/b/c/ → multiple +- GET/SUBSCRIBE/.. a/b → single key +- GET/SUBSCRIBE/.. a/b/c/ → multiple If multiple @@ -33,47 +33,34 @@ select all keys starting with prefix skip keys, containing private segments to the right from the prefix example -1. /a/b/$c/$d, 2. /a/b/c, 3. /a/b/$c, 4. /a/b/$c/$d/e -/ → [2] -/a/b/ → [2] -/a/b/$c/ → [3] -/a/b/$c/$d/ → [4] -/a/b/$c/$d → [1] + +- 1. /a/b/$c/$d, 2. /a/b/c, 3. /a/b/$c, 4. /a/b/$c/$d/e +- / → [2] +- /a/b/ → [2] +- /a/b/$c/ → [3] +- /a/b/$c/$d/ → [4] +- /a/b/$c/$d → [1] ## Data “Data” is an arbitrary JSON document. Size of data is limited to some reasonable size -## API -Methods - -GET - returns values of one key - -LIST - returns values with given prefix until the “sentinel” - -PUT - put value to the key -- Support CAS -- Support If-* headers - -DELETE - delete value of the key - -SUB - subscribe to key data + get initial state -Behavior identical to LIST +## HTTP API -UNSUB - unsubscribe to key data +```GET /status``` - server status and websockets count +- Answer: {"status":"OK","websockets":2} -## HTTP API - -```PUT /{workspace}/{key}``` +```PUT /{workspace}/{key}``` - Save key - Input Body - data - Content-Type: application/json (do we need something else?) + Content-Type: application/json Content-Length: optional Headers: TTL or absolute expiration time - HULY-TTL - HULY-EXPIRE-AT + - `HULY-TTL` — autodelete in N seconds + - or `HULY-EXPIRE-AT` — autodelete in UnixTime + - default max_ttl = 3600 (settings in config/default.toml) ** Conditional Headers If-*: ** - `If-Match: *` — update only if the key exists - `If-Match: ` — update only if current value's MD5 matches @@ -84,73 +71,118 @@ UNSUB - unsubscribe to key data - `204` on successful insert or update - `412` if the condition is not met - `400` if headers are invalid - - No body - -```PATCH /{workspace}/{key}``` -- TODO (not in v1) + - Body: `DONE` -```DELETE /{workspace}/{key}``` +```DELETE /{workspace}/{key}``` - Delete key - Output - Status: 204 + - Status: `204 No content`, no body + - `404 Not Found` if nothing to do -```GET /{workspace}/{key}``` +```GET /{workspace}/{key}``` - Read one key - Output - Status 200 - Content-type: application/json + - Header: `Etag: ` - Body: - - workspace - - key - - data - - expiresAt ? + - workspace (copy of input) + - key (copy of input) + - data (copy of input) + - expiresAt / TTL (copy of input, optional) + - etag -```GET /{workspace}?prefix={key}``` +```GET /{workspace}/{key}/``` - Read array of keys - Output - Status 200 - Content-type: application/json - Body (array): - - workspace - - key - - data - - expiresAt ? + - [{"key","data","expires_at","etag"}, ...] ## WebSocket API **Client to Server** ```PUT``` - - correlation id (optional) - type: "put" - - key: “foo/bar“ + - correlation id (optional) + - key: + - “workspace/foo/bar“ - shared key + - “workspace/foo/bar/$/secret“ - secret key - data - - TTL / expiresAt + ** time control (optional) ** + - `TTL` — autodelete in N seconds + - `ExpireAt` — autodelete in UnixTime + - or default max_ttl = 3600 (settings in config/default.toml) + ** Conditional (optional) ** + - `ifMatch: *` — update only if the key exists + - `ifMatch: ` — update only if current value's MD5 matches + - `ifNoneMatch: *` — insert only if the key does not exist -```DELETE``` +- Answer: {"action":"put","correlation":"abc123","result":"OK"} + + +```GET``` + - type: "get" - correlation id (optional) + - key: + - “workspace/foo/bar“ - one shared key + - “workspace/foo/bar/$/secret“ - one secret key + +- Answer: {"action":"get","result":{"data":"hello","etag":"5d41402abc4b2a76b9719d911017c592","expires_at":3599,"key":"00000000-0000-0000-0000-000000000001/foo/bar"}} + + +```LIST``` + - type: "list" + - correlation id (optional) + - key: + - “workspace/foo/bar/“ - keys from public space + - “workspace/foo/bar/$/secret/“ - keys from secret space + +- Answer: {"action":"list","result":[{"data":"hello 1","etag":"df0649bc4f1be901c85b6183091c1d83","expires_at":3570,"key":"00000000-0000-0000-0000-000000000001/foo/bar1"},{"data":"hello 2","etag":"bb21ec8394b75795622f61613a777a8b","expires_at":3555,"key":"00000000-0000-0000-0000-000000000001/foo/bar2"}]} + + +```DELETE``` - type: "delete" - - key: “foo/bar“ + - correlation id (optional) + - key: “workspace/foo/bar“ + ** Conditional (optional) ** + - `ifMatch: ` — delete only if current value's MD5 matches + - `ifMatch: *` — return error if key does not exist + +- Answer: {"action":"delete","result":"OK"} -```SUB``` + +```SUBSCRIBE``` type: "sub" - key: “foo/bar“ + key: + - “workspace/foo/bar“ - subscribe one shared key + - “workspace/foo/bar/“ - subscribe all keys started with + - “workspace/foo/bar/$/my_secret“ - subscribe one secret key + - “workspace/foo/bar/$/my_secret/“ - subscribe all keys started with secret + +- Answer: {"action":"sub","result":"OK"} + -```UNSUB``` +```UNSUBSCRIBE``` - type: "unsub" - - key: “foo/bar“ + - key: + - “workspace/foo/bar“ - unsubscribe subscribed key + - “*“ - unsubscribe all -**Server to Client** +- Answer: {"action":"unsub","result":"OK"} -```PUT``` - - correlation id (optional) - - type: "put" - - ?? TODO: user? workspace: "11111111-2222-3333-4444-555555555555" - - key: “foo/bar“ - - data - - expiresAt +```MY SUBSCRIBES``` + - type: "sublist" -```DELETE``` - - correlation id (optional) - - type: "delete" - - key: “foo/bar“ +- Answer: {"action":"list","result":["00000000-0000-0000-0000-000000000001/foo/bar1","00000000-0000-0000-0000-000000000001/foo/bar2"]} + + +** Server to Client ** subscribed events: + + - {"key":"00000000-0000-0000-0000-000000000001/foo/bar","action":"Set","value":"hello"} + + - {"key":"00000000-0000-0000-0000-000000000001/foo/bar","action":"Expired"} + + - {"key":"00000000-0000-0000-0000-000000000001/foo/bar","action":"Del"} ## Running @@ -179,22 +211,19 @@ Hulypulse uses bearer JWT token authetication. At the moment, it will accept any The following environment variables are used to configure hulypulse: - ```HULY_BIND_HOST```: host to bind the server to (default: 0.0.0.0) - ```HULY_BIND_PORT```: port to bind the server to (default: 8094) - - ```HULY_PAYLOAD_SIZE_LIMIT```: maximum size of the payload (default: 2Mb) - ```HULY_TOKEN_SECRET```: secret used to sign JWT tokens (default: secret) - ```HULY_REDIS_URLS```: redis connection string (default: redis://huly.local:6379) - ```HULY_REDIS_PASSWORD```: redis password (default: "<invalid>") - ```HULY_REDIS_MODE```: redis mode "direct" or "sentinel" (default: "direct") - ```HULY_REDIS_SERVICE```: redis service (default: "mymaster") - ```HULY_MAX_TTL```: maximum storage time (default: 3600) + - TODO: ```HULY_PAYLOAD_SIZE_LIMIT```: maximum size of the payload (default: 2Mb) ## Todo (in no particular order) - [ ] Optional value encryption -- [ ] HEAD request -- [ ] Conditional update (optimistic locking) - [ ] Support for open telemetry - [ ] Concurrency control for database migration (several instances of hulypulse are updated at the same time) - [ ] TLS support -- [ ] Namespacee based access control - [ ] Liveness/readiness probe endpoint ## Contributing diff --git a/scripts/TEST_HTTP_API.sh b/scripts/TEST_HTTP_API.sh index 9ce7ee896dd..e0fc3918eb0 100755 --- a/scripts/TEST_HTTP_API.sh +++ b/scripts/TEST_HTTP_API.sh @@ -10,12 +10,30 @@ TOKEN=$(./token.sh claims.json) ZP="00000000-0000-0000-0000-000000000001/TESTS" +put "00000000-0000-0000-0000-000000000001/TESTS" "Value" + +exit + delete "00000000-0000-0000-0000-000000000001/TESTS" +put "00000000-0000-0000-0000-000000000001/TESTS" "Value" + delete "00000000-0000-0000-0000-000000000001/TESTS" "If-Match: *" +put "00000000-0000-0000-0000-000000000001/TESTS" "Value" + delete "00000000-0000-0000-0000-000000000001/TESTS" "If-Match: dd358c74cb9cb897424838fbcb69c933" + +exit + + put "00000000-0000-0000-0000-000000000001/TESTS" "Value" "HULY-TTL: 2" + put "00000000-0000-0000-0000-000000000001/TESTS/1" "Value_1" "HULY-TTL: 2" + put "00000000-0000-0000-0000-000000000001/TESTS/2" "Value_2" "HULY-TTL: 2" + put "00000000-0000-0000-0000-000000000001/TESTS/2/$/secret" "Value_secret" "HULY-TTL: 2" + get "00000000-0000-0000-0000-000000000001/TESTS/" + +exit + delete "0000000/TESTS" delete ${ZP} put ${ZP} "Value_1" "HULY-TTL: 2" delete ${ZP} - echo "--------- authorization_test ----------" TOKEN="" put "00000000-0000-0000-0000-000000000001/TESTS" "Value_1" "HULY-TTL: 2" diff --git a/src/handlers_ws.rs b/src/handlers_ws.rs index 154a838880d..6eff5782910 100644 --- a/src/handlers_ws.rs +++ b/src/handlers_ws.rs @@ -13,8 +13,6 @@ // limitations under the License. // -// https://github.com/hcengineering/hulypulse/ - use actix::{ Actor, ActorContext, ActorFutureExt, AsyncContext, StreamHandler, fut, }; @@ -48,14 +46,14 @@ struct ReturnBase<'a> { #[serde(rename = "TTL", skip_serializing_if = "Option::is_none")] ttl: Option, - #[serde(rename = "expiresAt", skip_serializing_if = "Option::is_none")] - expires_at: Option, + // #[serde(rename = "expiresAt", skip_serializing_if = "Option::is_none")] + // expires_at: Option, - #[serde(rename = "ifMatch", skip_serializing_if = "Option::is_none")] - if_match: Option<&'a str>, + // #[serde(rename = "ifMatch", skip_serializing_if = "Option::is_none")] + // if_match: Option<&'a str>, - #[serde(rename = "ifNoneMatch", skip_serializing_if = "Option::is_none")] - if_none_match: Option<&'a str>, + // #[serde(rename = "ifNoneMatch", skip_serializing_if = "Option::is_none")] + // if_none_match: Option<&'a str>, } /// WsCommand - commands enum (put, delete, sub, unsub) @@ -242,13 +240,14 @@ impl WsSession { let base = serde_json::json!(ReturnBase { action: "put", - key: Some(key.as_str()), - data: Some(data.as_str()), + // key: Some(key.as_str()), + // data: Some(data.as_str()), correlation: correlation.as_deref(), - ttl, - expires_at, - if_match: if_match.as_deref(), - if_none_match: if_none_match.as_deref(), + // ttl, + // expires_at, + // if_match: if_match.as_deref(), + // if_none_match: if_none_match.as_deref(), + ..Default::default() }); let fut = async move { @@ -304,15 +303,13 @@ impl WsSession { return; } - tracing::info!("DELETE!!! {}", &key); - let mut redis = self.redis.clone(); let base = serde_json::json!(ReturnBase { action: "delete", - key: Some(key.as_str()), + // key: Some(key.as_str()), correlation: correlation.as_deref(), - if_match: if_match.as_deref(), + // if_match: if_match.as_deref(), ..Default::default() }); @@ -320,12 +317,11 @@ impl WsSession { // MODE logic let mut mode = Some(SaveMode::Upsert); if let Some(s) = if_match { - // `If-Match: *` - delete only if the key exists if s == "*" { // `If-Match: *` — return error if not exist mode = Some(SaveMode::Update); } else { - // `If-Match: ` — update only if current + // `If-Match: ` — delete only if current mode = Some(SaveMode::Equal(s.to_string())); } } @@ -357,7 +353,7 @@ impl WsSession { let base = serde_json::json!(ReturnBase { action: "get", - key: Some(key.as_str()), + // key: Some(key.as_str()), correlation: correlation.as_deref(), ..Default::default() }); @@ -393,7 +389,7 @@ impl WsSession { let base = serde_json::json!(ReturnBase { action: "list", - key: Some(key.as_str()), + // key: Some(key.as_str()), correlation: correlation.as_deref(), ..Default::default() }); @@ -421,7 +417,7 @@ impl WsSession { let mut obj = serde_json::json!(ReturnBase { action: "sub", - key: Some(key.as_str()), + // key: Some(key.as_str()), correlation: correlation.as_deref(), ..Default::default() }); @@ -444,7 +440,7 @@ impl WsSession { let mut obj = serde_json::json!(ReturnBase { action: "unsub", - key: Some(key.as_str()), + // key: Some(key.as_str()), correlation: correlation.as_deref(), ..Default::default() }); diff --git a/src/hub_service.rs b/src/hub_service.rs index 2e79a468c4d..39e0b71ffff 100644 --- a/src/hub_service.rs +++ b/src/hub_service.rs @@ -48,7 +48,7 @@ pub enum RedisEventAction { #[derive(Debug, Clone, Serialize)] pub struct RedisEvent { - pub db: u32, +// pub db: u32, pub key: String, pub action: RedisEventAction, } @@ -81,9 +81,9 @@ enum Command { Count { reply: oneshot::Sender, }, - DumpSubs { - reply: oneshot::Sender>>, - }, + // DumpSubs { + // reply: oneshot::Sender>>, + // }, RedisEvent(RedisEvent), } @@ -170,13 +170,13 @@ impl HubServiceHandle { let _ = reply.send(sessions.len()); } - Command::DumpSubs { reply } => { - let snapshot = subs - .iter() - .map(|(k, set)| (k.clone(), set.iter().copied().collect::>())) - .collect::>(); - let _ = reply.send(snapshot); - } + // Command::DumpSubs { reply } => { + // let snapshot = subs + // .iter() + // .map(|(k, set)| (k.clone(), set.iter().copied().collect::>())) + // .collect::>(); + // let _ = reply.send(snapshot); + // } Command::RedisEvent(event) => { let targets = subscribers_for(&subs, &event.key); @@ -254,11 +254,11 @@ impl HubServiceHandle { rx.await.unwrap_or_default() } - pub async fn dump_subs(&self) -> std::collections::HashMap> { - let (tx, rx) = oneshot::channel(); - let _ = self.tx.send(Command::DumpSubs { reply: tx }).await; - rx.await.unwrap_or_default() - } + // pub async fn dump_subs(&self) -> std::collections::HashMap> { + // let (tx, rx) = oneshot::channel(); + // let _ = self.tx.send(Command::DumpSubs { reply: tx }).await; + // rx.await.unwrap_or_default() + // } pub fn push_event(&self, ev: RedisEvent) { let _ = self.tx.try_send(Command::RedisEvent(ev)); diff --git a/src/main.rs b/src/main.rs index 7067a6358cd..13b8c9f2040 100644 --- a/src/main.rs +++ b/src/main.rs @@ -13,6 +13,8 @@ // limitations under the License. // +// https://github.com/hcengineering/hulypulse/ + use actix_cors::Cors; use actix_web::{ body::MessageBody, dev::{ServiceRequest, ServiceResponse}, middleware::{self, Next}, web::{self, Path, Query}, App, Error, HttpMessage, HttpResponse, HttpServer @@ -110,8 +112,6 @@ async fn main() -> anyhow::Result<()> { tracing::info!("HTTP API: {}/api", &url); tracing::info!("WebSocket API: {}/ws", &url); tracing::info!("Status: {}/status", &url); - tracing::info!("Stats: {}/stat", &url); - tracing::info!("Subscriptions: {}/subs", &url); let server = HttpServer::new(move || { let cors = Cors::default() @@ -138,18 +138,19 @@ async fn main() -> anyhow::Result<()> { .route("/ws", web::get().to(handlers_ws::handler) .wrap(middleware::from_fn(extract_claims)), ) // WebSocket - .route("/status", web::get().to(async || "ok")) + + // .route("/status", web::get().to(async || "ok")) - .route("/stat", web::get().to(|hub: web::Data| async move { + .route("/status", web::get().to(|hub: web::Data| async move { let count = hub.count().await; - Ok::<_, actix_web::Error>(HttpResponse::Ok().json(json!({ "connections": count }))) + Ok::<_, actix_web::Error>(HttpResponse::Ok().json(json!({ "websockets": count, "status": "OK" }))) })) - .route("/subs", web::get().to(|hub: web::Data| async move { - let subs = hub.dump_subs().await; - Ok::<_, actix_web::Error>(HttpResponse::Ok().json(subs)) - })) - + // .route("/subs", web::get().to(|hub: web::Data| async move { + // let subs = hub.dump_subs().await; + // Ok::<_, actix_web::Error>(HttpResponse::Ok().json(subs)) + // })) + }) .bind(socket)? .run(); diff --git a/src/redis.rs b/src/redis.rs index f7f9ff32952..bd090d6d4f7 100644 --- a/src/redis.rs +++ b/src/redis.rs @@ -341,15 +341,15 @@ impl TryFrom for RedisEvent { other => RedisEventAction::Other(other.to_string()), }; - let db = channel - .find('@') - .and_then(|at| channel.get(at + 1..)) - .and_then(|rest| rest.find("__:").map(|end| &rest[..end])) - .and_then(|s| s.parse::().ok()) - .unwrap_or(0); + // let db = channel + // .find('@') + // .and_then(|at| channel.get(at + 1..)) + // .and_then(|rest| rest.find("__:").map(|end| &rest[..end])) + // .and_then(|s| s.parse::().ok()) + // .unwrap_or(0); Ok(RedisEvent { - db, + // db, key: payload.clone(), action, }) From d7711b837f16f650358ba28244c997684aeb7762 Mon Sep 17 00:00:00 2001 From: Leonid Kaganov Date: Fri, 22 Aug 2025 12:23:14 +0300 Subject: [PATCH 172/636] Readme fix --- README.md | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/README.md b/README.md index 76edec749e5..2385a333c9b 100644 --- a/README.md +++ b/README.md @@ -49,19 +49,19 @@ Size of data is limited to some reasonable size ## HTTP API ```GET /status``` - server status and websockets count -- Answer: {"status":"OK","websockets":2} +- Answer: `{"status":"OK","websockets":2}` ```PUT /{workspace}/{key}``` - Save key - Input - Body - data - Content-Type: application/json - Content-Length: optional - Headers: TTL or absolute expiration time + - Body - data + - Content-Type: application/json + - Content-Length: optional + - Headers: TTL or absolute expiration time - `HULY-TTL` — autodelete in N seconds - or `HULY-EXPIRE-AT` — autodelete in UnixTime - default max_ttl = 3600 (settings in config/default.toml) - ** Conditional Headers If-*: ** + - Conditional Headers: - `If-Match: *` — update only if the key exists - `If-Match: ` — update only if current value's MD5 matches - `If-None-Match: *` — insert only if the key does not exist @@ -117,7 +117,7 @@ Size of data is limited to some reasonable size - `ifMatch: ` — update only if current value's MD5 matches - `ifNoneMatch: *` — insert only if the key does not exist -- Answer: {"action":"put","correlation":"abc123","result":"OK"} +- Answer: `{"action":"put","correlation":"abc123","result":"OK"}` ```GET``` @@ -127,7 +127,7 @@ Size of data is limited to some reasonable size - “workspace/foo/bar“ - one shared key - “workspace/foo/bar/$/secret“ - one secret key -- Answer: {"action":"get","result":{"data":"hello","etag":"5d41402abc4b2a76b9719d911017c592","expires_at":3599,"key":"00000000-0000-0000-0000-000000000001/foo/bar"}} +- Answer: `{"action":"get","result":{"data":"hello","etag":"5d41402abc4b2a76b9719d911017c592","expires_at":3599,"key":"00000000-0000-0000-0000-000000000001/foo/bar"}}` ```LIST``` @@ -137,7 +137,7 @@ Size of data is limited to some reasonable size - “workspace/foo/bar/“ - keys from public space - “workspace/foo/bar/$/secret/“ - keys from secret space -- Answer: {"action":"list","result":[{"data":"hello 1","etag":"df0649bc4f1be901c85b6183091c1d83","expires_at":3570,"key":"00000000-0000-0000-0000-000000000001/foo/bar1"},{"data":"hello 2","etag":"bb21ec8394b75795622f61613a777a8b","expires_at":3555,"key":"00000000-0000-0000-0000-000000000001/foo/bar2"}]} +- Answer: `{"action":"list","result":[{"data":"hello 1","etag":"df0649bc4f1be901c85b6183091c1d83","expires_at":3570,"key":"00000000-0000-0000-0000-000000000001/foo/bar1"},{"data":"hello 2","etag":"bb21ec8394b75795622f61613a777a8b","expires_at":3555,"key":"00000000-0000-0000-0000-000000000001/foo/bar2"}]}` ```DELETE``` @@ -148,7 +148,7 @@ Size of data is limited to some reasonable size - `ifMatch: ` — delete only if current value's MD5 matches - `ifMatch: *` — return error if key does not exist -- Answer: {"action":"delete","result":"OK"} +- Answer: `{"action":"delete","result":"OK"}` ```SUBSCRIBE``` @@ -159,7 +159,7 @@ Size of data is limited to some reasonable size - “workspace/foo/bar/$/my_secret“ - subscribe one secret key - “workspace/foo/bar/$/my_secret/“ - subscribe all keys started with secret -- Answer: {"action":"sub","result":"OK"} +- Answer: `{"action":"sub","result":"OK"}` ```UNSUBSCRIBE``` @@ -168,21 +168,21 @@ Size of data is limited to some reasonable size - “workspace/foo/bar“ - unsubscribe subscribed key - “*“ - unsubscribe all -- Answer: {"action":"unsub","result":"OK"} +- Answer: `{"action":"unsub","result":"OK"}` ```MY SUBSCRIBES``` - type: "sublist" -- Answer: {"action":"list","result":["00000000-0000-0000-0000-000000000001/foo/bar1","00000000-0000-0000-0000-000000000001/foo/bar2"]} +- Answer: `{"action":"list","result":["00000000-0000-0000-0000-000000000001/foo/bar1","00000000-0000-0000-0000-000000000001/foo/bar2"]}` ** Server to Client ** subscribed events: - - {"key":"00000000-0000-0000-0000-000000000001/foo/bar","action":"Set","value":"hello"} + - `{"key":"00000000-0000-0000-0000-000000000001/foo/bar","action":"Set","value":"hello"}` - - {"key":"00000000-0000-0000-0000-000000000001/foo/bar","action":"Expired"} + - `{"key":"00000000-0000-0000-0000-000000000001/foo/bar","action":"Expired"}` - - {"key":"00000000-0000-0000-0000-000000000001/foo/bar","action":"Del"} + - `{"key":"00000000-0000-0000-0000-000000000001/foo/bar","action":"Del"}` ## Running From 8860a10cea976e73ffc0fceb8a751d00d9641597 Mon Sep 17 00:00:00 2001 From: Alexey Aristov Date: Fri, 22 Aug 2025 12:23:26 +0200 Subject: [PATCH 173/636] code formatting Signed-off-by: Alexey Aristov --- src/handlers_http.rs | 3 +-- src/handlers_ws.rs | 18 ++++-------------- src/hub_service.rs | 17 +++++++++-------- src/main.rs | 39 +++++++++++++++++++++++---------------- src/redis.rs | 8 ++++---- src/workspace_owner.rs | 3 +-- 6 files changed, 42 insertions(+), 46 deletions(-) diff --git a/src/handlers_http.rs b/src/handlers_http.rs index dd31bed486e..74766e218ec 100644 --- a/src/handlers_http.rs +++ b/src/handlers_http.rs @@ -15,7 +15,7 @@ use anyhow::anyhow; use redis::aio::MultiplexedConnection; -use serde::{Deserialize}; +use serde::Deserialize; use tracing::*; use actix_web::{ @@ -162,7 +162,6 @@ pub async fn delete( path: web::Path, redis: web::Data, ) -> Result { - let key: String = path.into_inner().key; trace!(key, "delete request"); diff --git a/src/handlers_ws.rs b/src/handlers_ws.rs index 6eff5782910..93410f1bb31 100644 --- a/src/handlers_ws.rs +++ b/src/handlers_ws.rs @@ -13,9 +13,7 @@ // limitations under the License. // -use actix::{ - Actor, ActorContext, ActorFutureExt, AsyncContext, StreamHandler, fut, -}; +use actix::{Actor, ActorContext, ActorFutureExt, AsyncContext, StreamHandler, fut}; use actix_web::{Error, HttpMessage, HttpRequest, HttpResponse, web}; use actix_web_actors::ws; use redis::aio::MultiplexedConnection; @@ -29,7 +27,6 @@ use crate::redis::{ use crate::hub_service::{HubServiceHandle, ServerMessage, SessionId, new_session_id}; use crate::workspace_owner::check_workspace_core; - #[derive(Serialize, Default)] struct ReturnBase<'a> { action: &'a str, @@ -45,7 +42,6 @@ struct ReturnBase<'a> { #[serde(rename = "TTL", skip_serializing_if = "Option::is_none")] ttl: Option, - // #[serde(rename = "expiresAt", skip_serializing_if = "Option::is_none")] // expires_at: Option, @@ -157,12 +153,12 @@ impl Actor for WsSession { impl actix::Handler for WsSession { type Result = (); fn handle(&mut self, msg: ServerMessage, ctx: &mut Self::Context) { - let json = serde_json::to_string(&msg).unwrap_or_else(|_| "{\"error\":\"serialization\"}".into()); + let json = + serde_json::to_string(&msg).unwrap_or_else(|_| "{\"error\":\"serialization\"}".into()); ctx.text(json); } } - /// StreamHandler External trait: must be in separate impl block impl StreamHandler> for WsSession { fn handle(&mut self, msg: Result, ctx: &mut Self::Context) { @@ -404,7 +400,6 @@ impl WsSession { self.fut_send(ctx, fut, base); } - WsCommand::Sub { key, correlation } => { // LEVENT 3 tracing::info!("SUB {}", &key); // correlation: {:?} , &correlation @@ -433,7 +428,6 @@ impl WsSession { ctx.text(obj.to_string()); } - WsCommand::Unsub { key, correlation } => { // LEVENT 4 tracing::info!("UNSUB {}", &key); // correlation: {:?} , &correlation @@ -466,7 +460,6 @@ impl WsSession { ctx.text(obj.to_string()); } - WsCommand::Sublist { correlation } => { tracing::info!("SUBLIST"); // correlation: {:?} , &correlation // w/o Check workspace! @@ -487,14 +480,11 @@ impl WsSession { }, base, ); - } - - // End of commands + } // End of commands } } } - pub async fn handler( req: HttpRequest, payload: web::Payload, diff --git a/src/hub_service.rs b/src/hub_service.rs index 39e0b71ffff..859f5d5f2a1 100644 --- a/src/hub_service.rs +++ b/src/hub_service.rs @@ -48,7 +48,7 @@ pub enum RedisEventAction { #[derive(Debug, Clone, Serialize)] pub struct RedisEvent { -// pub db: u32, + // pub db: u32, pub key: String, pub action: RedisEventAction, } @@ -119,7 +119,6 @@ impl HubServiceHandle { while let Some(cmd) = rx.recv().await { match cmd { - Command::Connect { session_id, addr } => { sessions.insert(session_id, addr); } @@ -177,7 +176,6 @@ impl HubServiceHandle { // .collect::>(); // let _ = reply.send(snapshot); // } - Command::RedisEvent(event) => { let targets = subscribers_for(&subs, &event.key); if targets.is_empty() { @@ -204,10 +202,7 @@ impl HubServiceHandle { } } - let payload = ServerMessage { - event, - value, - }; + let payload = ServerMessage { event, value }; for rcpt in recipients { let _ = rcpt.do_send(payload.clone()); @@ -244,7 +239,13 @@ impl HubServiceHandle { pub async fn subscribe_list(&self, session_id: SessionId) -> Vec { let (tx, rx) = oneshot::channel(); - let _ = self.tx.send(Command::SubscribeList { session_id, reply: tx }).await; + let _ = self + .tx + .send(Command::SubscribeList { + session_id, + reply: tx, + }) + .await; rx.await.unwrap_or_default() } diff --git a/src/main.rs b/src/main.rs index 13b8c9f2040..d524e68e85e 100644 --- a/src/main.rs +++ b/src/main.rs @@ -13,11 +13,13 @@ // limitations under the License. // -// https://github.com/hcengineering/hulypulse/ - use actix_cors::Cors; use actix_web::{ - body::MessageBody, dev::{ServiceRequest, ServiceResponse}, middleware::{self, Next}, web::{self, Path, Query}, App, Error, HttpMessage, HttpResponse, HttpServer + App, Error, HttpMessage, HttpResponse, HttpServer, + body::MessageBody, + dev::{ServiceRequest, ServiceResponse}, + middleware::{self, Next}, + web::{self, Path, Query}, }; use hulyrs::services::jwt::{Claims, actix::ServiceRequestExt}; use secrecy::ExposeSecret; @@ -109,7 +111,7 @@ async fn main() -> anyhow::Result<()> { let url = format!("http://{}:{}", &CONFIG.bind_host, &CONFIG.bind_port); tracing::info!("Server running at {}", &url); - tracing::info!("HTTP API: {}/api", &url); + tracing::info!("HTTP API: {}/api", &url); tracing::info!("WebSocket API: {}/ws", &url); tracing::info!("Status: {}/status", &url); @@ -135,22 +137,27 @@ async fn main() -> anyhow::Result<()> { .route("/{key:.+}", web::put().to(handlers_http::put)) .route("/{key:.+}", web::delete().to(handlers_http::delete)), ) - .route("/ws", web::get().to(handlers_ws::handler) + .route( + "/ws", + web::get() + .to(handlers_ws::handler) .wrap(middleware::from_fn(extract_claims)), ) // WebSocket - // .route("/status", web::get().to(async || "ok")) + .route( + "/status", + web::get().to(|hub: web::Data| async move { + let count = hub.count().await; + Ok::<_, actix_web::Error>( + HttpResponse::Ok().json(json!({ "websockets": count, "status": "OK" })), + ) + }), + ) - .route("/status", web::get().to(|hub: web::Data| async move { - let count = hub.count().await; - Ok::<_, actix_web::Error>(HttpResponse::Ok().json(json!({ "websockets": count, "status": "OK" }))) - })) - - // .route("/subs", web::get().to(|hub: web::Data| async move { - // let subs = hub.dump_subs().await; - // Ok::<_, actix_web::Error>(HttpResponse::Ok().json(subs)) - // })) - + // .route("/subs", web::get().to(|hub: web::Data| async move { + // let subs = hub.dump_subs().await; + // Ok::<_, actix_web::Error>(HttpResponse::Ok().json(subs)) + // })) }) .bind(socket)? .run(); diff --git a/src/redis.rs b/src/redis.rs index bd090d6d4f7..f030b7f1be2 100644 --- a/src/redis.rs +++ b/src/redis.rs @@ -19,7 +19,10 @@ use ::redis::Msg; use tokio_stream::StreamExt; use tracing::*; -use crate::{config::{RedisMode, CONFIG}, hub_service::{HubServiceHandle, RedisEvent, RedisEventAction}}; +use crate::{ + config::{CONFIG, RedisMode}, + hub_service::{HubServiceHandle, RedisEvent, RedisEventAction}, +}; #[derive(serde::Serialize)] pub enum Ttl { @@ -356,7 +359,6 @@ impl TryFrom for RedisEvent { } } - pub async fn receiver(redis_client: Client, hub: HubServiceHandle) -> anyhow::Result<()> { let mut redis = redis_client.get_multiplexed_async_connection().await?; let mut pubsub = redis_client.get_async_pubsub().await?; @@ -382,11 +384,9 @@ pub async fn receiver(redis_client: Client, hub: HubServiceHandle) -> anyhow::Re while let Some(message) = messages.next().await { match RedisEvent::try_from(message) { Ok(ev) => { - // debug!("redis event: {ev:#?}"); hub.push_event(ev); - } Err(e) => { warn!("invalid redis message: {e}"); diff --git a/src/workspace_owner.rs b/src/workspace_owner.rs index 148b06740cc..0b427d9edaa 100644 --- a/src/workspace_owner.rs +++ b/src/workspace_owner.rs @@ -18,7 +18,6 @@ use uuid::Uuid; // common checker pub fn check_workspace_core(claims: &Claims, key: &str) -> Result<(), &'static str> { - if claims.is_system() { return Ok(()); } @@ -41,4 +40,4 @@ pub fn check_workspace_core(claims: &Claims, key: &str) -> Result<(), &'static s } Ok(()) -} \ No newline at end of file +} From e29ba95d508c633ad830c31decf52e46576fa2c3 Mon Sep 17 00:00:00 2001 From: Alexey Aristov Date: Fri, 22 Aug 2025 16:33:38 +0200 Subject: [PATCH 174/636] use global state for Hub Signed-off-by: Alexey Aristov --- src/handlers_ws.rs | 51 ++++++++++++++++++++++++++++++++++++++++++++++ src/hub_service.rs | 12 +++++++++++ src/main.rs | 11 ++++++++-- 3 files changed, 72 insertions(+), 2 deletions(-) diff --git a/src/handlers_ws.rs b/src/handlers_ws.rs index 93410f1bb31..e140f5e03ee 100644 --- a/src/handlers_ws.rs +++ b/src/handlers_ws.rs @@ -127,6 +127,7 @@ pub struct WsSession { pub redis: MultiplexedConnection, pub id: SessionId, pub hub: HubServiceHandle, + hub_state: Arc>, pub claims: Claims, } @@ -400,6 +401,7 @@ impl WsSession { self.fut_send(ctx, fut, base); } + /* WsCommand::Sub { key, correlation } => { // LEVENT 3 tracing::info!("SUB {}", &key); // correlation: {:?} , &correlation @@ -426,6 +428,41 @@ impl WsSession { map.insert("result".into(), json!("OK")); } ctx.text(obj.to_string()); + }*/ + WsCommand::Sub { key, correlation } => { + // LEVENT 3 + tracing::info!("SUB {}", &key); // correlation: {:?} , &correlation + + // Check workspace + if let Err(e) = self.workspace_check_ws(&key) { + self.ws_error(ctx, e); + return; + } + + let mut obj = serde_json::json!(ReturnBase { + action: "sub", + // key: Some(key.as_str()), + correlation: correlation.as_deref(), + ..Default::default() + }); + + let map = obj.as_object_mut().unwrap(); + + if deprecated_symbol(&key) { + map.insert("error".into(), json!("Deprecated symbol in key")); + } else { + let fut = async move { + let mut hub_state = self.hub_state.write().await; + + hub_state.subscribe(self.id, key.clone()); + }; + + // spawn and respond when done + //ctx.spawn(fut); + + map.insert("result".into(), json!("OK")); + } + ctx.text(obj.to_string()); } WsCommand::Unsub { key, correlation } => { @@ -472,6 +509,14 @@ impl WsSession { let hub = self.hub.clone(); let id = self.id; + let fut = async move { + let hub_state = self.hub_state.read().await; + + //hub_state.subscribe(self.id, key.clone()); + + // + }; + self.fut_send( ctx, async move { @@ -485,11 +530,16 @@ impl WsSession { } } +use crate::hub_service::HubState; +use std::sync::Arc; +use tokio::sync::RwLock; + pub async fn handler( req: HttpRequest, payload: web::Payload, redis: web::Data, hub: web::Data, // <-- было Addr + hub_state: web::Data>>, ) -> Result { let claims = req .extensions() @@ -500,6 +550,7 @@ pub async fn handler( let session = WsSession { redis: redis.get_ref().clone(), hub: hub.get_ref().clone(), + hub_state: hub_state.get_ref().clone(), id: new_session_id(), claims, }; diff --git a/src/hub_service.rs b/src/hub_service.rs index 859f5d5f2a1..58975b74586 100644 --- a/src/hub_service.rs +++ b/src/hub_service.rs @@ -89,6 +89,18 @@ enum Command { // ==== Handle ==== +#[derive(Debug, Default)] +pub struct HubState { + sessions: HashMap>, + subs: HashMap>, +} + +impl HubState { + pub fn subscribe(&mut self, session_id: SessionId, key: String) { + self.subs.entry(key).or_default().insert(session_id); + } +} + #[derive(Clone)] pub struct HubServiceHandle { tx: mpsc::Sender, diff --git a/src/main.rs b/src/main.rs index d524e68e85e..de3d240fb1d 100644 --- a/src/main.rs +++ b/src/main.rs @@ -34,7 +34,7 @@ mod redis; mod workspace_owner; mod hub_service; -use hub_service::HubServiceHandle; +use hub_service::{HubServiceHandle, HubState}; use config::CONFIG; @@ -104,6 +104,9 @@ async fn main() -> anyhow::Result<()> { // starting HubService let hub = HubServiceHandle::start(redis_connection.clone()); + let hub_state = HubState::default(); + let hub_state = Arc::new(RwLock::new(hub_state)); + // starting Logger tokio::spawn(redis::receiver(redis_client, hub.clone())); @@ -115,6 +118,9 @@ async fn main() -> anyhow::Result<()> { tracing::info!("WebSocket API: {}/ws", &url); tracing::info!("Status: {}/status", &url); + use std::sync::Arc; + use tokio::sync::RwLock; + let server = HttpServer::new(move || { let cors = Cors::default() .allow_any_origin() @@ -125,7 +131,8 @@ async fn main() -> anyhow::Result<()> { App::new() .app_data(web::Data::new(redis_connection.clone())) - .app_data(web::Data::new(hub.clone())) + //.app_data(web::Data::new(hub.clone())) + .app_data(web::Data::new(hub_state.clone())) .wrap(middleware::Logger::default()) .wrap(cors) .service( From 2774c0df637c61ca4e2d2779514aebafb286c79e Mon Sep 17 00:00:00 2001 From: Leonid Kaganov Date: Sat, 23 Aug 2025 02:35:02 +0300 Subject: [PATCH 175/636] Refactoring modules: Actor-2 removed --- README.md | 6 +- scripts/TEST.html | 21 +-- scripts/TEST_HTTP_API.sh | 6 +- src/handlers_ws.rs | 232 ++++++++++++------------------- src/hub_service.rs | 291 ++++++++++++--------------------------- src/main.rs | 40 ++---- src/redis.rs | 19 ++- src/workspace_owner.rs | 7 + 8 files changed, 229 insertions(+), 393 deletions(-) diff --git a/README.md b/README.md index 2385a333c9b..7cd4cff5644 100644 --- a/README.md +++ b/README.md @@ -178,11 +178,11 @@ Size of data is limited to some reasonable size ** Server to Client ** subscribed events: - - `{"key":"00000000-0000-0000-0000-000000000001/foo/bar","action":"Set","value":"hello"}` + - `{"message":"Set","key":"00000000-0000-0000-0000-000000000001/foo/bar","value":"hello"}` - - `{"key":"00000000-0000-0000-0000-000000000001/foo/bar","action":"Expired"}` + - `{"message":"Expired","key":"00000000-0000-0000-0000-000000000001/foo/bar"}` - - `{"key":"00000000-0000-0000-0000-000000000001/foo/bar","action":"Del"}` + - `{"message":"Del","key":"00000000-0000-0000-0000-000000000001/foo/bar"}` ## Running diff --git a/scripts/TEST.html b/scripts/TEST.html index 8ecf37a3c88..2649fbd0e39 100644 --- a/scripts/TEST.html +++ b/scripts/TEST.html @@ -63,7 +63,7 @@

WebSocket JSON Tester


- +

@@ -89,9 +89,15 @@

WebSocket JSON Tester

Waiting for server response...
diff --git a/scripts/TEST_HTTP_API.sh b/scripts/TEST_HTTP_API.sh index e0fc3918eb0..04383691a1c 100755 --- a/scripts/TEST_HTTP_API.sh +++ b/scripts/TEST_HTTP_API.sh @@ -12,14 +12,14 @@ ZP="00000000-0000-0000-0000-000000000001/TESTS" put "00000000-0000-0000-0000-000000000001/TESTS" "Value" -exit +#exit delete "00000000-0000-0000-0000-000000000001/TESTS" put "00000000-0000-0000-0000-000000000001/TESTS" "Value" delete "00000000-0000-0000-0000-000000000001/TESTS" "If-Match: *" put "00000000-0000-0000-0000-000000000001/TESTS" "Value" delete "00000000-0000-0000-0000-000000000001/TESTS" "If-Match: dd358c74cb9cb897424838fbcb69c933" -exit +#exit put "00000000-0000-0000-0000-000000000001/TESTS" "Value" "HULY-TTL: 2" put "00000000-0000-0000-0000-000000000001/TESTS/1" "Value_1" "HULY-TTL: 2" @@ -27,7 +27,7 @@ exit put "00000000-0000-0000-0000-000000000001/TESTS/2/$/secret" "Value_secret" "HULY-TTL: 2" get "00000000-0000-0000-0000-000000000001/TESTS/" -exit +#exit delete "0000000/TESTS" delete ${ZP} diff --git a/src/handlers_ws.rs b/src/handlers_ws.rs index e140f5e03ee..97385c5e7f3 100644 --- a/src/handlers_ws.rs +++ b/src/handlers_ws.rs @@ -13,6 +13,9 @@ // limitations under the License. // +use std::sync::Arc; + +use tokio::sync::RwLock; use actix::{Actor, ActorContext, ActorFutureExt, AsyncContext, StreamHandler, fut}; use actix_web::{Error, HttpMessage, HttpRequest, HttpResponse, web}; use actix_web_actors::ws; @@ -20,12 +23,11 @@ use redis::aio::MultiplexedConnection; use serde::{Deserialize, Serialize}; use serde_json::{Value, json}; -use crate::redis::{ - SaveMode, Ttl, deprecated_symbol, redis_delete, redis_list, redis_read, redis_save, -}; - -use crate::hub_service::{HubServiceHandle, ServerMessage, SessionId, new_session_id}; +use crate::redis::{ SaveMode, Ttl, redis_delete, redis_list, redis_read, redis_save }; +use crate::hub_service::{ServerMessage, SessionId, new_session_id}; use crate::workspace_owner::check_workspace_core; +use crate::hub_service::HubState; + #[derive(Serialize, Default)] struct ReturnBase<'a> { @@ -122,11 +124,9 @@ pub enum WsCommand { use hulyrs::services::jwt::Claims; /// Session condition -#[allow(dead_code)] pub struct WsSession { pub redis: MultiplexedConnection, pub id: SessionId, - pub hub: HubServiceHandle, hub_state: Arc>, pub claims: Claims, } @@ -136,18 +136,24 @@ impl Actor for WsSession { type Context = ws::WebsocketContext; fn started(&mut self, ctx: &mut Self::Context) { - let addr = ctx.address(); - let recipient = addr.recipient::(); - - self.hub.connect(self.id, recipient); - tracing::info!("WebSocket connected: {}", self.id); + let id = self.id; + let recipient = ctx.address().recipient::(); + let hub_state = self.hub_state.clone(); + ctx.spawn( + actix::fut::wrap_future(async move { + hub_state.write().await.connect(id, recipient); + }).map(|_, _, _| ()) + ); + tracing::info!("WebSocket connected: {}", id); } fn stopped(&mut self, _ctx: &mut Self::Context) { - if self.id != 0 { - self.hub.disconnect(self.id); - } - tracing::info!("WebSocket disconnected: {:?}", &self.id); + let id = self.id; + let hub_state = self.hub_state.clone(); + actix::spawn(async move { + hub_state.write().await.disconnect(id); + }); + tracing::info!("WebSocket disconnected: {}", id); } } @@ -178,15 +184,8 @@ impl StreamHandler> for WsSession { } } -/// All logic in one impl +/// All logic impl WsSession { - fn ws_error(&self, ctx: &mut ws::WebsocketContext, msg: &str) { - ctx.text(format!(r#"{{"error":"{}"}}"#, msg)); - } - - fn workspace_check_ws(&self, key: &str) -> Result<(), &'static str> { - check_workspace_core(&self.claims, key) - } fn fut_send( &mut self, @@ -215,6 +214,8 @@ impl WsSession { /// When valid JSON recieved for WsSession fn handle_command(&mut self, cmd: WsCommand, ctx: &mut ws::WebsocketContext) { + + // PUT match cmd { WsCommand::Put { key, @@ -227,14 +228,6 @@ impl WsSession { } => { tracing::info!("PUT {} = {}", &key, &data); // (expires_at: {:?}) (ttl: {:?}) correlation: {:?} &expires_at, &ttl, &correlation - // Check workspace - if let Err(e) = self.workspace_check_ws(&key) { - self.ws_error(ctx, e); - return; - } - - let mut redis = self.redis.clone(); - let base = serde_json::json!(ReturnBase { action: "put", // key: Some(key.as_str()), @@ -247,7 +240,13 @@ impl WsSession { ..Default::default() }); + let mut redis = self.redis.clone(); + let claims = self.claims.clone(); + let fut = async move { + // Check workspace + if let Err(e) = check_workspace_core(&claims, &key) { return Err(e.into()); } + // TTL logic let real_ttl = if let Some(secs) = ttl { Some(Ttl::Sec(secs as usize)) @@ -294,13 +293,8 @@ impl WsSession { } => { tracing::info!("DELETE {}", &key); // correlation:{:?} , &correlation - // Check workspace - if let Err(e) = self.workspace_check_ws(&key) { - self.ws_error(ctx, e); - return; - } - let mut redis = self.redis.clone(); + let claims = self.claims.clone(); let base = serde_json::json!(ReturnBase { action: "delete", @@ -311,6 +305,9 @@ impl WsSession { }); let fut = async move { + // Check workspace + if let Err(e) = check_workspace_core(&claims, &key) { return Err(e.into()); } + // MODE logic let mut mode = Some(SaveMode::Upsert); if let Some(s) = if_match { @@ -340,14 +337,6 @@ impl WsSession { WsCommand::Get { key, correlation } => { tracing::info!("GET {}", &key); // correlation:{:?} , &correlation - // Check workspace - if let Err(e) = self.workspace_check_ws(&key) { - self.ws_error(ctx, e); - return; - } - - let mut redis = self.redis.clone(); - let base = serde_json::json!(ReturnBase { action: "get", // key: Some(key.as_str()), @@ -355,7 +344,13 @@ impl WsSession { ..Default::default() }); + let mut redis = self.redis.clone(); + let claims = self.claims.clone(); + let fut = async move { + // Check workspace + if let Err(e) = check_workspace_core(&claims, &key) { return Err(e.into()); } + let data_opt = redis_read(&mut redis, &key) .await .map_err(|e| e.to_string())?; @@ -376,14 +371,6 @@ impl WsSession { WsCommand::List { key, correlation } => { tracing::info!("LIST {:?}", &key); // correlation: {:?} , &correlation - // Check workspace - if let Err(e) = self.workspace_check_ws(&key) { - self.ws_error(ctx, e); - return; - } - - let mut redis = self.redis.clone(); - let base = serde_json::json!(ReturnBase { action: "list", // key: Some(key.as_str()), @@ -391,7 +378,13 @@ impl WsSession { ..Default::default() }); + let mut redis = self.redis.clone(); + let claims = self.claims.clone(); + let fut = async move { + // Check workspace + if let Err(e) = check_workspace_core(&claims, &key) { return Err(e.into()); } + let data = redis_list(&mut redis, &key) .await .map_err(|e| e.to_string())?; @@ -401,100 +394,60 @@ impl WsSession { self.fut_send(ctx, fut, base); } - /* WsCommand::Sub { key, correlation } => { // LEVENT 3 tracing::info!("SUB {}", &key); // correlation: {:?} , &correlation - // Check workspace - if let Err(e) = self.workspace_check_ws(&key) { - self.ws_error(ctx, e); - return; - } - - let mut obj = serde_json::json!(ReturnBase { - action: "sub", - // key: Some(key.as_str()), - correlation: correlation.as_deref(), - ..Default::default() - }); - - let map = obj.as_object_mut().unwrap(); - - if deprecated_symbol(&key) { - map.insert("error".into(), json!("Deprecated symbol in key")); - } else { - self.hub.subscribe(self.id, key.clone()); - map.insert("result".into(), json!("OK")); - } - ctx.text(obj.to_string()); - }*/ - WsCommand::Sub { key, correlation } => { - // LEVENT 3 - tracing::info!("SUB {}", &key); // correlation: {:?} , &correlation - - // Check workspace - if let Err(e) = self.workspace_check_ws(&key) { - self.ws_error(ctx, e); - return; - } - - let mut obj = serde_json::json!(ReturnBase { - action: "sub", - // key: Some(key.as_str()), - correlation: correlation.as_deref(), - ..Default::default() + let base = serde_json::json!(ReturnBase { + action: "sub", + // key: Some(key.as_str()), + correlation: correlation.as_deref(), + ..Default::default() }); - let map = obj.as_object_mut().unwrap(); - - if deprecated_symbol(&key) { - map.insert("error".into(), json!("Deprecated symbol in key")); - } else { - let fut = async move { - let mut hub_state = self.hub_state.write().await; + let hub_state = self.hub_state.clone(); + let id = self.id.clone(); + let claims = self.claims.clone(); - hub_state.subscribe(self.id, key.clone()); - }; + let fut = async move { + // Check workspace + if let Err(e) = check_workspace_core(&claims, &key) { return Err(e.into()); } - // spawn and respond when done - //ctx.spawn(fut); + hub_state.write().await.subscribe(id, key); + Ok(json!({ "result": "OK" })) + }; - map.insert("result".into(), json!("OK")); - } - ctx.text(obj.to_string()); + self.fut_send(ctx, fut, base); } WsCommand::Unsub { key, correlation } => { // LEVENT 4 tracing::info!("UNSUB {}", &key); // correlation: {:?} , &correlation - let mut obj = serde_json::json!(ReturnBase { + let base = serde_json::json!(ReturnBase { action: "unsub", // key: Some(key.as_str()), correlation: correlation.as_deref(), ..Default::default() }); - let map = obj.as_object_mut().unwrap(); + let hub_state = self.hub_state.clone(); + let id = self.id.clone(); + let claims = self.claims.clone(); - if key == "*" { - self.hub.unsubscribe_all(self.id); - map.insert("result".into(), json!("OK")); - } else { - // Check workspace - if let Err(e) = self.workspace_check_ws(&key) { - self.ws_error(ctx, e); - return; - } - if deprecated_symbol(&key) { - map.insert("error".into(), json!("Deprecated symbol in key")); + let fut = async move { + if key == "*" { + hub_state.write().await.unsubscribe_all(id); + Ok(json!({ "result": "OK" })) } else { - self.hub.unsubscribe(self.id, key.clone()); - map.insert("result".into(), json!("OK")); + // Check workspace + if let Err(e) = check_workspace_core(&claims, &key) { return Err(e.into()); } + + hub_state.write().await.unsubscribe(id, key); + Ok(json!({ "result": "OK" })) } - } - ctx.text(obj.to_string()); + }; + self.fut_send(ctx, fut, base); } WsCommand::Sublist { correlation } => { @@ -506,39 +459,24 @@ impl WsSession { ..Default::default() }); - let hub = self.hub.clone(); - let id = self.id; + let hub_state = self.hub_state.clone(); + let id = self.id.clone(); let fut = async move { - let hub_state = self.hub_state.read().await; - - //hub_state.subscribe(self.id, key.clone()); - - // + let keys = hub_state.read().await.subscribe_list(id); + Ok(json!({ "result": keys })) }; - - self.fut_send( - ctx, - async move { - let keys = hub.subscribe_list(id).await; - Ok(json!({ "result": keys })) - }, - base, - ); - } // End of commands + self.fut_send(ctx, fut, base); + } + // End of commands } } } -use crate::hub_service::HubState; -use std::sync::Arc; -use tokio::sync::RwLock; - pub async fn handler( req: HttpRequest, payload: web::Payload, redis: web::Data, - hub: web::Data, // <-- было Addr hub_state: web::Data>>, ) -> Result { let claims = req @@ -549,7 +487,7 @@ pub async fn handler( let session = WsSession { redis: redis.get_ref().clone(), - hub: hub.get_ref().clone(), + // hub: hub.get_ref().clone(), hub_state: hub_state.get_ref().clone(), id: new_session_id(), claims, diff --git a/src/hub_service.rs b/src/hub_service.rs index 58975b74586..dc07686484e 100644 --- a/src/hub_service.rs +++ b/src/hub_service.rs @@ -1,11 +1,27 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + use std::collections::{HashMap, HashSet}; use std::sync::atomic::{AtomicU64, Ordering}; +use std::sync::Arc; use actix::prelude::*; use redis::aio::MultiplexedConnection; use serde::Serialize; -use tokio::sync::{mpsc, oneshot}; +use tokio::sync::{RwLock}; fn subscription_matches(sub_key: &str, key: &str) -> bool { if sub_key == key { @@ -49,45 +65,11 @@ pub enum RedisEventAction { #[derive(Debug, Clone, Serialize)] pub struct RedisEvent { // pub db: u32, + pub message: RedisEventAction, pub key: String, - pub action: RedisEventAction, -} - -// ==== Commands for worker ==== - -enum Command { - Connect { - session_id: SessionId, - addr: Recipient, - }, - Disconnect { - session_id: SessionId, - }, - Subscribe { - session_id: SessionId, - key: String, - }, - Unsubscribe { - session_id: SessionId, - key: String, - }, - UnsubscribeAll { - session_id: SessionId, - }, - SubscribeList { - session_id: SessionId, - reply: oneshot::Sender>, - }, - Count { - reply: oneshot::Sender, - }, - // DumpSubs { - // reply: oneshot::Sender>>, - // }, - RedisEvent(RedisEvent), } -// ==== Handle ==== +// ==== Handle ==== #[derive(Debug, Default)] pub struct HubState { @@ -96,184 +78,93 @@ pub struct HubState { } impl HubState { + pub fn connect(&mut self, session_id: SessionId, addr: Recipient) { + self.sessions.insert(session_id, addr); + } + pub fn disconnect(&mut self, session_id: SessionId) { + self.sessions.remove(&session_id); + self.subs.retain(|_, ids| { ids.remove(&session_id); !ids.is_empty() }); + } pub fn subscribe(&mut self, session_id: SessionId, key: String) { self.subs.entry(key).or_default().insert(session_id); } -} - -#[derive(Clone)] -pub struct HubServiceHandle { - tx: mpsc::Sender, -} - -impl HubServiceHandle { - pub fn start(redis: MultiplexedConnection) -> Self { - let (tx, mut rx) = mpsc::channel::(1024); - - // Владелец состояния живёт внутри задачи - tokio::spawn(async move { - let mut sessions: HashMap> = HashMap::new(); - let mut subs: HashMap> = HashMap::new(); - let mut redis_conn = redis; - - fn subscribers_for( - subs: &HashMap>, - key: &str, - ) -> HashSet { - let mut out = HashSet::::new(); - for (sub_key, set) in subs.iter() { - if subscription_matches(sub_key, key) { - out.extend(set.iter().copied()); - } - } - out - } - - while let Some(cmd) = rx.recv().await { - match cmd { - Command::Connect { session_id, addr } => { - sessions.insert(session_id, addr); - } - - Command::Disconnect { session_id } => { - subs.retain(|_, ids| { - ids.remove(&session_id); - !ids.is_empty() - }); - sessions.remove(&session_id); - } - - Command::Subscribe { session_id, key } => { - subs.entry(key).or_default().insert(session_id); - } - - Command::Unsubscribe { session_id, key } => { - if let Some(set) = subs.get_mut(&key) { - set.remove(&session_id); - if set.is_empty() { - subs.remove(&key); - } - } - } - - Command::UnsubscribeAll { session_id } => { - subs.retain(|_, ids| { - ids.remove(&session_id); - !ids.is_empty() - }); - } - - Command::SubscribeList { session_id, reply } => { - let list = subs - .iter() - .filter_map(|(key, ids)| { - if ids.contains(&session_id) { - Some(key.clone()) - } else { - None - } - }) - .collect::>(); - let _ = reply.send(list); - } - - Command::Count { reply } => { - let _ = reply.send(sessions.len()); - } - - // Command::DumpSubs { reply } => { - // let snapshot = subs - // .iter() - // .map(|(k, set)| (k.clone(), set.iter().copied().collect::>())) - // .collect::>(); - // let _ = reply.send(snapshot); - // } - Command::RedisEvent(event) => { - let targets = subscribers_for(&subs, &event.key); - if targets.is_empty() { - continue; - } - let recipients: Vec> = targets - .into_iter() - .filter_map(|sid| sessions.get(&sid).cloned()) - .collect(); - - // Inside: waiting GET - let need_get = matches!(event.action, RedisEventAction::Set); - let mut value: Option = None; - if need_get { - match redis::cmd("GET") - .arg(&event.key) - .query_async::>(&mut redis_conn) - .await - { - Ok(v) => value = v, - Err(e) => { - tracing::warn!("redis GET {} failed: {}", &event.key, e); - } - } - } - - let payload = ServerMessage { event, value }; - - for rcpt in recipients { - let _ = rcpt.do_send(payload.clone()); - } - } - } + pub fn unsubscribe(&mut self, session_id: SessionId, key: String) { + if let Some(set) = self.subs.get_mut(&key) { + set.remove(&session_id); + if set.is_empty() { + self.subs.remove(&key); } + } + } + pub fn unsubscribe_all(&mut self, session_id: SessionId) { + self.subs.retain(|_, ids| { + ids.remove(&session_id); + !ids.is_empty() }); - - Self { tx } } - - // ---- API, ничего не выполняет параллельно внутри worker'а ---- - - pub fn connect(&self, session_id: SessionId, addr: Recipient) { - let _ = self.tx.try_send(Command::Connect { session_id, addr }); + pub fn subscribe_list(&self, session_id: SessionId) -> Vec { + self.subs + .iter() + .filter_map(|(key, ids)| { + if ids.contains(&session_id) { + Some(key.clone()) + } else { + None + } + }) + .collect() } - - pub fn disconnect(&self, session_id: SessionId) { - let _ = self.tx.try_send(Command::Disconnect { session_id }); + pub fn count(&self) -> usize { + self.sessions.len() } - - pub fn subscribe(&self, session_id: SessionId, key: String) { - let _ = self.tx.try_send(Command::Subscribe { session_id, key }); + pub fn recipients_for_key(&self, key: &str) -> Vec> { + let mut out = Vec::new(); + for (sub_key, set) in &self.subs { + if subscription_matches(sub_key, key) { + for sid in set { + if let Some(r) = self.sessions.get(sid) { + out.push(r.clone()); + } + } + } + } + out } - pub fn unsubscribe(&self, session_id: SessionId, key: String) { - let _ = self.tx.try_send(Command::Unsubscribe { session_id, key }); - } +} - pub fn unsubscribe_all(&self, session_id: SessionId) { - let _ = self.tx.try_send(Command::UnsubscribeAll { session_id }); - } - pub async fn subscribe_list(&self, session_id: SessionId) -> Vec { - let (tx, rx) = oneshot::channel(); - let _ = self - .tx - .send(Command::SubscribeList { - session_id, - reply: tx, - }) - .await; - rx.await.unwrap_or_default() - } - pub async fn count(&self) -> usize { - let (tx, rx) = oneshot::channel(); - let _ = self.tx.send(Command::Count { reply: tx }).await; - rx.await.unwrap_or_default() +// Send messages about new Redis events +pub async fn push_event( + hub_state: &Arc>, + redis: &mut MultiplexedConnection, + ev: RedisEvent, +) { + // Collect Addresses + let recipients: Vec> = { + hub_state.read().await.recipients_for_key(&ev.key) + }; + if recipients.is_empty() { + return; } - // pub async fn dump_subs(&self) -> std::collections::HashMap> { - // let (tx, rx) = oneshot::channel(); - // let _ = self.tx.send(Command::DumpSubs { reply: tx }).await; - // rx.await.unwrap_or_default() - // } + // Get value from Redis (only for `Set` event, not for `Delete`, `Expire`) + let mut value: Option = None; + if matches!(ev.message, RedisEventAction::Set) { + match redis::cmd("GET") + .arg(&ev.key) + .query_async::>(redis) + .await + { + Ok(v) => value = v, + Err(e) => tracing::warn!("redis GET {} failed: {}", &ev.key, e), + } + } - pub fn push_event(&self, ev: RedisEvent) { - let _ = self.tx.try_send(Command::RedisEvent(ev)); + // Sending + let payload = ServerMessage { event: ev, value }; + for rcpt in recipients { + let _ = rcpt.do_send(payload.clone()); } } diff --git a/src/main.rs b/src/main.rs index de3d240fb1d..6a1ed21a397 100644 --- a/src/main.rs +++ b/src/main.rs @@ -34,7 +34,7 @@ mod redis; mod workspace_owner; mod hub_service; -use hub_service::{HubServiceHandle, HubState}; +use hub_service::{HubState}; use config::CONFIG; @@ -102,13 +102,10 @@ async fn main() -> anyhow::Result<()> { let redis_connection = redis_client.get_multiplexed_async_connection().await?; // starting HubService - let hub = HubServiceHandle::start(redis_connection.clone()); - - let hub_state = HubState::default(); - let hub_state = Arc::new(RwLock::new(hub_state)); + let hub_state = Arc::new(RwLock::new(HubState::default())); // starting Logger - tokio::spawn(redis::receiver(redis_client, hub.clone())); + tokio::spawn(redis::receiver(redis_client, hub_state.clone())); let socket = std::net::SocketAddr::new(CONFIG.bind_host.as_str().parse()?, CONFIG.bind_port); @@ -144,27 +141,20 @@ async fn main() -> anyhow::Result<()> { .route("/{key:.+}", web::put().to(handlers_http::put)) .route("/{key:.+}", web::delete().to(handlers_http::delete)), ) - .route( - "/ws", - web::get() - .to(handlers_ws::handler) + .route("/ws",web::get().to(handlers_ws::handler) .wrap(middleware::from_fn(extract_claims)), ) // WebSocket - // .route("/status", web::get().to(async || "ok")) - .route( - "/status", - web::get().to(|hub: web::Data| async move { - let count = hub.count().await; - Ok::<_, actix_web::Error>( - HttpResponse::Ok().json(json!({ "websockets": count, "status": "OK" })), - ) - }), - ) - - // .route("/subs", web::get().to(|hub: web::Data| async move { - // let subs = hub.dump_subs().await; - // Ok::<_, actix_web::Error>(HttpResponse::Ok().json(subs)) - // })) + .route("/status", web::get().to({ + move |hub_state: web::Data>>| { + let hub_state = hub_state.clone(); + async move { + let count = hub_state.read().await.count(); + Ok::<_, actix_web::Error>( + HttpResponse::Ok().json(json!({ "websockets": count, "status": "OK" })), + ) + } + } + })) }) .bind(socket)? .run(); diff --git a/src/redis.rs b/src/redis.rs index f030b7f1be2..40c1564b1fe 100644 --- a/src/redis.rs +++ b/src/redis.rs @@ -13,15 +13,16 @@ // limitations under the License. // -use std::time::{SystemTime, UNIX_EPOCH}; +use std::{sync::Arc, time::{SystemTime, UNIX_EPOCH}}; use ::redis::Msg; +use tokio::sync::RwLock; use tokio_stream::StreamExt; use tracing::*; use crate::{ - config::{CONFIG, RedisMode}, - hub_service::{HubServiceHandle, RedisEvent, RedisEventAction}, + config::{RedisMode, CONFIG}, + hub_service::{push_event, HubState, RedisEvent, RedisEventAction}, }; #[derive(serde::Serialize)] @@ -336,7 +337,7 @@ impl TryFrom for RedisEvent { // "__keyevent@0__:set" → event="set", db=0; payload = key let event = channel.rsplit(':').next().unwrap_or(""); - let action = match event { + let message = match event { "set" => RedisEventAction::Set, "del" => RedisEventAction::Del, "unlink" => RedisEventAction::Unlink, @@ -354,12 +355,15 @@ impl TryFrom for RedisEvent { Ok(RedisEvent { // db, key: payload.clone(), - action, + message, }) } } -pub async fn receiver(redis_client: Client, hub: HubServiceHandle) -> anyhow::Result<()> { +pub async fn receiver(redis_client: Client, + // hub: HubServiceHandle + hub_state: Arc>, +) -> anyhow::Result<()> { let mut redis = redis_client.get_multiplexed_async_connection().await?; let mut pubsub = redis_client.get_async_pubsub().await?; @@ -386,7 +390,8 @@ pub async fn receiver(redis_client: Client, hub: HubServiceHandle) -> anyhow::Re Ok(ev) => { // debug!("redis event: {ev:#?}"); - hub.push_event(ev); + push_event(&hub_state, &mut redis, ev).await; + } Err(e) => { warn!("invalid redis message: {e}"); diff --git a/src/workspace_owner.rs b/src/workspace_owner.rs index 0b427d9edaa..2014953921b 100644 --- a/src/workspace_owner.rs +++ b/src/workspace_owner.rs @@ -16,8 +16,15 @@ use hulyrs::services::jwt::Claims; use uuid::Uuid; +use crate::redis::deprecated_symbol; + // common checker pub fn check_workspace_core(claims: &Claims, key: &str) -> Result<(), &'static str> { + + if deprecated_symbol(key) { + return Err("Invalid key: deprecated symbols"); + } + if claims.is_system() { return Ok(()); } From 7ac978c6454333a54ca4b99534677c8095676bce Mon Sep 17 00:00:00 2001 From: Leonid Kaganov Date: Sun, 24 Aug 2025 05:58:54 +0300 Subject: [PATCH 176/636] New config options: (Native memory instead Redis), (authirization not required) --- Cargo.lock | 2 +- Cargo.toml | 2 +- scripts/TEST_HTTP_API.sh | 20 ++- scripts/TEST_lleo.html | 140 +++++++++++++++++ scripts/TEST_no_auth.html | 141 +++++++++++++++++ scripts/lleo_TEST_HTTP_API.sh | 149 ++++++++++++++++++ scripts/pulse_lib_lleo.sh | 79 ++++++++++ src/config.rs | 3 + src/config/default.toml | 3 + src/db.rs | 101 +++++++++++++ src/handlers_http.rs | 46 +++--- src/handlers_ws.rs | 72 ++++----- src/hub_service.rs | 39 ++--- src/main.rs | 35 ++++- src/memory.rs | 277 ++++++++++++++++++++++++++++++++++ src/workspace_owner.rs | 15 +- 16 files changed, 1026 insertions(+), 98 deletions(-) create mode 100644 scripts/TEST_lleo.html create mode 100644 scripts/TEST_no_auth.html create mode 100755 scripts/lleo_TEST_HTTP_API.sh create mode 100755 scripts/pulse_lib_lleo.sh create mode 100644 src/db.rs create mode 100644 src/memory.rs diff --git a/Cargo.lock b/Cargo.lock index ac96a96a2e5..7f17d70d928 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1181,7 +1181,7 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "hulypulse" -version = "0.1.8" +version = "0.1.10" dependencies = [ "actix", "actix-cors", diff --git a/Cargo.toml b/Cargo.toml index 679318aeabc..c79af425b2a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "hulypulse" -version = "0.1.8" +version = "0.1.10" edition = "2024" [dependencies] diff --git a/scripts/TEST_HTTP_API.sh b/scripts/TEST_HTTP_API.sh index 04383691a1c..a07394a6948 100755 --- a/scripts/TEST_HTTP_API.sh +++ b/scripts/TEST_HTTP_API.sh @@ -6,9 +6,27 @@ source ./pulse_lib.sh TOKEN=$(./token.sh claims.json) #echo ${TOKEN} #exit +ZP="00000000-0000-0000-0000-000000000001/TESTS" + + + +put "00000000-0000-0000-0000-000000000001/TESTS/val1" "value" "HULY-TTL: 1" +put "00000000-0000-0000-0000-000000000001/TESTS/val2" "value" "HULY-TTL: 12" +put "00000000-0000-0000-0000-000000000001/TESTS/val3" "value" "HULY-TTL: 1" + +get "00000000-0000-0000-0000-000000000001/TESTS/" +sleep 2 +# get "00000000-0000-0000-0000-000000000001/TESTS/val2" +get "00000000-0000-0000-0000-000000000001/TESTS/" + + + +exit + + + -ZP="00000000-0000-0000-0000-000000000001/TESTS" put "00000000-0000-0000-0000-000000000001/TESTS" "Value" diff --git a/scripts/TEST_lleo.html b/scripts/TEST_lleo.html new file mode 100644 index 00000000000..706cc56619c --- /dev/null +++ b/scripts/TEST_lleo.html @@ -0,0 +1,140 @@ + + + + + WebSocket JSON Tester + + + + +

WebSocket JSON Tester

+ + + +
+ + + +

+ + + + + + + + + + + + + + + + + + + + +

Waiting for server response...
+ + + + + diff --git a/scripts/TEST_no_auth.html b/scripts/TEST_no_auth.html new file mode 100644 index 00000000000..26b34faca79 --- /dev/null +++ b/scripts/TEST_no_auth.html @@ -0,0 +1,141 @@ + + + + + WebSocket JSON Tester + + + + +

WebSocket JSON Tester

+ + + +
+ + + +

+ + + + + + + + + + + + + + + + + + + + +

Waiting for server response...
+ + + + + diff --git a/scripts/lleo_TEST_HTTP_API.sh b/scripts/lleo_TEST_HTTP_API.sh new file mode 100755 index 00000000000..a30faf56084 --- /dev/null +++ b/scripts/lleo_TEST_HTTP_API.sh @@ -0,0 +1,149 @@ +#!/bin/bash + +clear +source ./pulse_lib_lleo.sh + +#TOKEN=$(./token.sh claims.json) +#echo ${TOKEN} + + + +put "dnevnik/onlline/admin" "oki" "HULY-TTL: 3" + + + +exit +ZP="00000000-0000-0000-0000-000000000001/TESTS" + +put "00000000-0000-0000-0000-000000000001/TESTS/val1" "value" "HULY-TTL: 3" +put "00000000-0000-0000-0000-000000000001/TESTS/val2" "value" "HULY-TTL: 12" +put "00000000-0000-0000-0000-000000000001/TESTS/val3" "value" "HULY-TTL: 3" + +get "00000000-0000-0000-0000-000000000001/TESTS/" +sleep 4 +# get "00000000-0000-0000-0000-000000000001/TESTS/val2" +get "00000000-0000-0000-0000-000000000001/TESTS/" + + + +exit + + + + + + +put "00000000-0000-0000-0000-000000000001/TESTS" "Value" + +#exit + delete "00000000-0000-0000-0000-000000000001/TESTS" +put "00000000-0000-0000-0000-000000000001/TESTS" "Value" + delete "00000000-0000-0000-0000-000000000001/TESTS" "If-Match: *" +put "00000000-0000-0000-0000-000000000001/TESTS" "Value" + delete "00000000-0000-0000-0000-000000000001/TESTS" "If-Match: dd358c74cb9cb897424838fbcb69c933" + +#exit + + put "00000000-0000-0000-0000-000000000001/TESTS" "Value" "HULY-TTL: 2" + put "00000000-0000-0000-0000-000000000001/TESTS/1" "Value_1" "HULY-TTL: 2" + put "00000000-0000-0000-0000-000000000001/TESTS/2" "Value_2" "HULY-TTL: 2" + put "00000000-0000-0000-0000-000000000001/TESTS/2/$/secret" "Value_secret" "HULY-TTL: 2" + get "00000000-0000-0000-0000-000000000001/TESTS/" + +#exit + + delete "0000000/TESTS" + delete ${ZP} + put ${ZP} "Value_1" "HULY-TTL: 2" + delete ${ZP} + +echo "--------- authorization_test ----------" +TOKEN="" + put "00000000-0000-0000-0000-000000000001/TESTS" "Value_1" "HULY-TTL: 2" +TOKEN=$(./token.sh claims_system.json) + put "00000000-0000-0000-0000-000000000001/TESTS" "Value_1" "HULY-TTL: 2" +TOKEN=$(./token.sh claims_wrong_ws.json) + put "00000000-0000-0000-0000-000000000001/TESTS" "Value_1" "HULY-TTL: 2" +TOKEN=$(./token.sh claims.json) + put "00000000-0000-0000-0000-000000000002/TESTS" "Value_1" "HULY-TTL: 2" + + + +echo "--------- if-match ----------" + + put "00000000-0000-0000-0000-000000000001/TESTS" "Value_1" "HULY-TTL: 2" + put "00000000-0000-0000-0000-000000000001/TESTS/1" "Value_1" "HULY-TTL: 2" + put "00000000-0000-0000-0000-000000000001/TESTS/2" "Value_1" "HULY-TTL: 2" + put "00000000-0000-0000-0000-000000000001/TESTS/3$" "Value_1" "HULY-TTL: 2" + put "00000000-0000-0000-0000-000000000001/TESTS/3/secret$/4" "Value_1" "HULY-TTL: 2" + get "00000000-0000-0000-0000-000000000001/TESTS" + get "00000000-0000-0000-0000-000000000001/TESTS/" + get "00000000-0000-0000-0000-000000000001/TESTS/3/secret$/" + + +echo "--------- Deprecated symbols ----------" + + put "00000000-0000-0000-0000-000000000001/'TESTS" "Value_1" "HULY-TTL: 2" + put "00000000-0000-0000-0000-000000000001/TES?TS" "Value_1" "HULY-TTL: 2" + put "00000000-0000-0000-0000-000000000001/TESTS*" "Value_1" "HULY-TTL: 2" + put "00000000-0000-0000-0000-000000000001/TESTS/" "Value_1" "HULY-TTL: 2" + +echo "--------- if-match ----------" + + delete ${ZP} + put "00000000-0000-0000-0000-000000000001/TESTS" "Value_1" "HULY-TTL: 1" "If-Match: *" + get ${ZP} + put "00000000-0000-0000-0000-000000000001/TESTS" "Value_2" "HULY-TTL: 1" + get ${ZP} + put "00000000-0000-0000-0000-000000000001/TESTS" "Value_3" "HULY-TTL: 1" "If-Match: dd358c74cb9cb897424838fbcb69c933" + put "00000000-0000-0000-0000-000000000001/TESTS" "Value_4" "HULY-TTL: 1" "If-Match: *" + put "00000000-0000-0000-0000-000000000001/TESTS" "Value_5" "HULY-TTL: 1" "If-Match: c7bcabf6b98a220f2f4888a18d01568d" + put "00000000-0000-0000-0000-000000000001/TESTS" "Value_6" "HULY-TTL: 1" "If-None-Match: *" + +echo "-- Expected OK: 201 Created (key was not exist)" + + put ${ZP} "enother text" "If-None-Match" "*" + + put ${ZP} "some text" + echo "-- Expected Error: 412 Precondition Failed (key was exist)" + put ${ZP} "enother text" "If-None-Match" "*" + +echo "================> UPDATE PUT If-Match" + + get ${ZP} + + echo "-- Expected OK: 204 No Content (right hash)" + put ${ZP} "some text" "If-Match" "552e21cd4cd9918678e3c1a0df491bc3" + get ${ZP} + + echo "-- Expected OK: 204 No Content (hash still right)" + put ${ZP} "enother version" "If-Match" "552e21cd4cd9918678e3c1a0df491bc3" + + + + + + +put "00000000-0000-0000-0000-000000000001/TESTS" "Value_1" "HULY-TTL: 3" +echo "sleep 1 sec" +sleep 1 +get "00000000-0000-0000-0000-000000000001/TESTS" +echo "sleep 3 sec" +sleep 2 +get "00000000-0000-0000-0000-000000000001/TESTS" + +echo "--------- delete ----------" +put "00000000-0000-0000-0000-000000000001/TESTS" "Value_2" "HULY-TTL: 3" +get "00000000-0000-0000-0000-000000000001/TESTS" +delete "00000000-0000-0000-0000-000000000001/TESTS" +get "00000000-0000-0000-0000-000000000001/TESTS" + +echo "--------- prefix ----------" +put "00000000-0000-0000-0000-000000000001/TESTS1" "Value_1" "HULY-TTL: 3" +put "00000000-0000-0000-0000-000000000001/TESTS2" "Value_1" "HULY-TTL: 3" +put "00000000-0000-0000-0000-000000000001/HREST2" "Value_1" "HULY-TTL: 3" +get "00000000-0000-0000-0000-000000000001?prefix=TES" +sleep 1 +get "00000000-0000-0000-0000-000000000001?prefix=" + +exit diff --git a/scripts/pulse_lib_lleo.sh b/scripts/pulse_lib_lleo.sh new file mode 100755 index 00000000000..4cf8832fcab --- /dev/null +++ b/scripts/pulse_lib_lleo.sh @@ -0,0 +1,79 @@ +#!/bin/bash + +clear + +# URL="http://localhost:8095/api" +URL="https://hulypulse_mem.lleo.me/api" + +R='\033[0;31m' # Color red +G='\033[0;32m' # Color green +W='\033[0;33m' # Color ? +S='\033[0;34m' # Color Blue +F='\033[0;35m' # Color Fiolet +L='\033[0;36m' # Color LightBlue +N='\033[0m' # No Color +GRAY='\033[90m' # bright black + +api() { + local tmpfile + tmpfile=$1 + local status + status=$(head -n 1 "$tmpfile") + local status_code + status_code=$(echo "$status" | awk '{print $2}') + local etag + etag=$(grep -i "^ETag:" "${tmpfile}") + local body + body=$(awk 'found { print; next } NF == 0 { found = 1 }' "$tmpfile") + case "$status_code" in + 2*) echo -en "${G}${status}${N}" ;; + 3*) echo -en "${F}${status}${N}" ;; + 4*) echo -en "${R}${status}${N}" ;; + 5*) echo -en "${R}${status}${N}" ;; + *) echo -en "${GRAY}${status}${N}" ;; + esac + if [ -n "$etag" ]; then echo -n -e " ${F}${etag}${N}" ; fi + + body=$(echo "$body" | sed 's/{/\\n{/g') + + if [ -n "$body" ]; then echo -e "\n ${GRAY}[${body}]${N}" ; else echo -e " ${L}(no body)${N}" ; fi + rm -f "$tmpfile" +} + +get() { + echo -n -e "📥 ${L}GET ${W}$1${N} > " + local tmpfile + tmpfile=$(mktemp) + curl -i -s -X GET "$URL/$1" -H "Authorization: Bearer ${TOKEN}" | tr -d '\r' > "$tmpfile" + api ${tmpfile} +} + +put() { # If-None-Match If-Match + local match + local match_prn +# if [ -n "$3" ]; then match=(-H "$3: $4") ; else match=() ; fi +# if [ -n "$3" ]; then match_prn=" ${F}$3:$4${N}" ; else match_prn="" ; fi +# echo -n -e "📥 ${L}PUT ${W}$1${N}${match_prn} > " + + if [ -n "$3" ]; then match1=(-H "$3") ; else match1=() ; fi + if [ -n "$3" ]; then match1_prn=" ${F}$3${N}" ; else match1_prn="" ; fi + if [ -n "$4" ]; then match2=(-H "$4") ; else match2=() ; fi + if [ -n "$4" ]; then match2_prn=" ${F}$4${N}" ; else match2_prn="" ; fi + echo -n -e "📥 ${L}PUT ${W}$1${N}${match1_prn}${match2_prn} > " + + local tmpfile + tmpfile=$(mktemp) +# curl -v -i -s -X PUT "$URL/$1" "${match1[@]}" "${match2[@]}" -H "Content-Type: application/json" -d "$2" | tr -d '\r' > "$tmpfile" +# curl -v -i -s -X PUT "$URL/$1" -H "Authorization: Bearer ${TOKEN}" "${match1[@]}" "${match2[@]}" -H "Content-Type: application/json" -d "$2" | tr -d '\r' > "$tmpfile" + curl -i -s -X PUT "$URL/$1" -H "Authorization: Bearer ${TOKEN}" "${match1[@]}" "${match2[@]}" -H "Content-Type: application/json" -d "$2" | tr -d '\r' > "$tmpfile" + api ${tmpfile} +} + +delete() { + echo -n -e "📥 ${L}DELETE ${W}$1${N} > " + local tmpfile + tmpfile=$(mktemp) + curl -i -s -X DELETE "$URL/$1" -H "Authorization: Bearer ${TOKEN}" | tr -d '\r' > "$tmpfile" +# curl -v -i -s -X DELETE "$URL/$1" -H "Authorization: Bearer ${TOKEN}" | tr -d '\r' > "$tmpfile" + api ${tmpfile} +} diff --git a/src/config.rs b/src/config.rs index dfe5037cbaa..feea94aa941 100644 --- a/src/config.rs +++ b/src/config.rs @@ -46,6 +46,9 @@ pub struct Config { pub redis_service: String, pub max_ttl: usize, + + pub memory_mode: Option, + pub no_authorization: Option, } pub static CONFIG: LazyLock = LazyLock::new(|| { diff --git a/src/config/default.toml b/src/config/default.toml index a570fff0c79..b4ad885aed2 100644 --- a/src/config/default.toml +++ b/src/config/default.toml @@ -9,3 +9,6 @@ redis_mode = "direct" redis_service = "mymaster" max_ttl = 3600 + +memory_mode = true +no_authorization = true \ No newline at end of file diff --git a/src/db.rs b/src/db.rs new file mode 100644 index 00000000000..71f84a2bc4f --- /dev/null +++ b/src/db.rs @@ -0,0 +1,101 @@ +use std::sync::Arc; + +use crate::{ + hub_service::{broadcast_event, HubState, RedisEvent, RedisEventAction}, + memory::{memory_delete, memory_list, memory_read, memory_save, MemoryBackend}, + redis::{redis_delete, redis_list, redis_read, redis_save, RedisArray, SaveMode, Ttl} +}; +use ::redis::aio::MultiplexedConnection; +use tokio::sync::RwLock; + +#[derive(Clone)] +pub struct Db { + inner: DbInner, + hub: Arc>, +} + +#[derive(Clone)] +enum DbInner { + Memory(MemoryBackend), + Redis(MultiplexedConnection), +} + +impl Db { + + pub fn new_memory(m: MemoryBackend, hub: Arc>) -> Self { + Self { inner: DbInner::Memory(m), hub } + } + pub fn new_redis(c: MultiplexedConnection, hub: Arc>) -> Self { + Self { inner: DbInner::Redis(c), hub } + } + + + pub async fn list(&self, key: &str) -> redis::RedisResult> { + match &self.inner { + DbInner::Memory(m) => memory_list(m, key).await, + DbInner::Redis(conn) => { + let mut c = conn.clone(); + redis_list(&mut c, key).await + } + } + } + + pub async fn read(&self, key: &str) -> redis::RedisResult> { + match &self.inner { + DbInner::Memory(m) => memory_read(m, key).await, + DbInner::Redis(conn) => { + let mut c = conn.clone(); + redis_read(&mut c, key).await + } + } + } + + + pub async fn save>( + &self, + key: &str, + value: V, + ttl: Option, + mode: Option, + ) -> redis::RedisResult<()> { + match &self.inner { + DbInner::Memory(m) => { + memory_save(m, key, value.as_ref(), ttl, mode).await?; + // Send events + let value_str = std::str::from_utf8(value.as_ref()).ok().map(|s| s.to_string()); + broadcast_event( + &self.hub, + RedisEvent { message: RedisEventAction::Set, key: key.to_string() }, + value_str, + ).await; + Ok(()) + } + DbInner::Redis(conn) => { + let mut c = conn.clone(); + redis_save(&mut c, key, value.as_ref(), ttl, mode).await + } + } + } + + + pub async fn delete(&self, key: &str, mode: Option) -> redis::RedisResult { + match &self.inner { + DbInner::Memory(m) => { + let deleted = memory_delete(m, key, mode).await?; + if deleted { + broadcast_event( + &self.hub, + RedisEvent { message: RedisEventAction::Del, key: key.to_string() }, + None, + ).await; + } + Ok(deleted) + } + DbInner::Redis(conn) => { + let mut c = conn.clone(); + redis_delete(&mut c, key, mode).await + } + } + } + +} \ No newline at end of file diff --git a/src/handlers_http.rs b/src/handlers_http.rs index 74766e218ec..a577709184d 100644 --- a/src/handlers_http.rs +++ b/src/handlers_http.rs @@ -14,7 +14,6 @@ // use anyhow::anyhow; -use redis::aio::MultiplexedConnection; use serde::Deserialize; use tracing::*; @@ -23,7 +22,10 @@ use actix_web::{ web::{self}, }; -use crate::redis::{SaveMode, Ttl, redis_delete, redis_list, redis_read, redis_save}; +use crate::{ + db::Db, + redis::{SaveMode, Ttl} +}; pub fn map_handler_error(err: impl std::fmt::Display) -> Error { let msg = err.to_string(); @@ -51,17 +53,14 @@ pub struct PathParams { /// list pub async fn list( path: web::Path, - redis: web::Data, + db: web::Data, ) -> Result { let key = path.into_inner().key; trace!(key, "list request"); async move || -> anyhow::Result { - let mut redis = redis.get_ref().clone(); - - let entries = redis_list(&mut redis, &key).await?; - + let entries = db.list(&key).await?; Ok(HttpResponse::Ok().json(entries)) }() .await @@ -71,23 +70,21 @@ pub async fn list( /// get pub async fn get( path: web::Path, - redis: web::Data, + db: web::Data, ) -> Result { let key = path.into_inner().key; trace!(key, "get request"); async move || -> anyhow::Result { - let mut redis = redis.get_ref().clone(); - - Ok(redis_read(&mut redis, &key) - .await? - .map(|entry| { - HttpResponse::Ok() - .insert_header(("ETag", &*entry.etag)) - .json(entry) - }) - .unwrap_or_else(|| HttpResponse::NotFound().body("empty"))) + let entry_opt = db.read(&key).await?; + let resp = match entry_opt { + Some(entry) => HttpResponse::Ok() + .insert_header(("ETag", entry.etag.clone())) + .json(entry), + None => HttpResponse::NotFound().body("empty"), + }; + Ok(resp) }() .await .map_err(map_handler_error) @@ -98,14 +95,13 @@ pub async fn put( req: HttpRequest, path: web::Path, body: web::Bytes, - redis: web::Data, + db: web::Data, ) -> Result { let key: String = path.into_inner().key; trace!(key, "put request"); async move || -> anyhow::Result { - let mut redis = redis.get_ref().clone(); // TTL logic let mut ttl = None; @@ -149,8 +145,8 @@ pub async fn put( } } - redis_save(&mut redis, &key, &body[..], ttl, mode).await?; - return Ok(HttpResponse::Ok().body("DONE")); + db.save(&key, &body[..], ttl, mode).await?; + Ok(HttpResponse::Ok().body("DONE")) }() .await .map_err(map_handler_error) @@ -160,14 +156,13 @@ pub async fn put( pub async fn delete( req: HttpRequest, path: web::Path, - redis: web::Data, + db: web::Data, ) -> Result { let key: String = path.into_inner().key; trace!(key, "delete request"); async move || -> anyhow::Result { - let mut redis = redis.get_ref().clone(); // MODE logic let mut mode = Some(SaveMode::Upsert); @@ -183,8 +178,7 @@ pub async fn delete( } // `If-Match: ` — delete only if current } - let deleted = redis_delete(&mut redis, &key, mode).await?; - + let deleted = db.delete(&key, mode).await?; let response = match deleted { true => HttpResponse::NoContent().finish(), false => HttpResponse::NotFound().body("not found"), diff --git a/src/handlers_ws.rs b/src/handlers_ws.rs index 97385c5e7f3..26ff558fe02 100644 --- a/src/handlers_ws.rs +++ b/src/handlers_ws.rs @@ -19,15 +19,12 @@ use tokio::sync::RwLock; use actix::{Actor, ActorContext, ActorFutureExt, AsyncContext, StreamHandler, fut}; use actix_web::{Error, HttpMessage, HttpRequest, HttpResponse, web}; use actix_web_actors::ws; -use redis::aio::MultiplexedConnection; use serde::{Deserialize, Serialize}; use serde_json::{Value, json}; -use crate::redis::{ SaveMode, Ttl, redis_delete, redis_list, redis_read, redis_save }; -use crate::hub_service::{ServerMessage, SessionId, new_session_id}; -use crate::workspace_owner::check_workspace_core; -use crate::hub_service::HubState; - +use crate::{ + config::CONFIG, db::Db, hub_service::{new_session_id, HubState, ServerMessage, SessionId}, redis::{ SaveMode, Ttl }, workspace_owner::check_workspace_core +}; #[derive(Serialize, Default)] struct ReturnBase<'a> { @@ -125,10 +122,10 @@ use hulyrs::services::jwt::Claims; /// Session condition pub struct WsSession { - pub redis: MultiplexedConnection, + pub db: Db, pub id: SessionId, hub_state: Arc>, - pub claims: Claims, + pub claims: Option, } /// Actor External trait: must be in separate impl block @@ -240,12 +237,12 @@ impl WsSession { ..Default::default() }); - let mut redis = self.redis.clone(); let claims = self.claims.clone(); + let db = self.db.clone(); let fut = async move { // Check workspace - if let Err(e) = check_workspace_core(&claims, &key) { return Err(e.into()); } + if let Err(e) = check_workspace_core(claims, &key) { return Err(e.into()); } // TTL logic let real_ttl = if let Some(secs) = ttl { @@ -276,7 +273,8 @@ impl WsSession { } } - redis_save(&mut redis, &key, &data, real_ttl, mode) + // Save + db.save(&key, &data, real_ttl, mode) .await .map_err(|e| e.to_string())?; @@ -293,7 +291,7 @@ impl WsSession { } => { tracing::info!("DELETE {}", &key); // correlation:{:?} , &correlation - let mut redis = self.redis.clone(); + let db = self.db.clone(); let claims = self.claims.clone(); let base = serde_json::json!(ReturnBase { @@ -306,7 +304,7 @@ impl WsSession { let fut = async move { // Check workspace - if let Err(e) = check_workspace_core(&claims, &key) { return Err(e.into()); } + if let Err(e) = check_workspace_core(claims, &key) { return Err(e.into()); } // MODE logic let mut mode = Some(SaveMode::Upsert); @@ -320,7 +318,8 @@ impl WsSession { } } - let deleted = redis_delete(&mut redis, &key, mode) + // Delete + let deleted = db.delete(&key, mode) .await .map_err(|e| e.to_string())?; @@ -344,21 +343,18 @@ impl WsSession { ..Default::default() }); - let mut redis = self.redis.clone(); + let db = self.db.clone(); let claims = self.claims.clone(); let fut = async move { // Check workspace - if let Err(e) = check_workspace_core(&claims, &key) { return Err(e.into()); } - - let data_opt = redis_read(&mut redis, &key) - .await - .map_err(|e| e.to_string())?; + if let Err(e) = check_workspace_core(claims, &key) { return Err(e.into()); } + // Read + let data_opt = db.read(&key).await.map_err(|e| e.to_string())?; match data_opt { Some(data) => { - let data_value = - serde_json::to_value(&data).map_err(|e| e.to_string())?; + let data_value = serde_json::to_value(&data).map_err(|e| e.to_string())?; Ok(json!({"result": data_value})) } None => Err("not found".into()), @@ -378,16 +374,14 @@ impl WsSession { ..Default::default() }); - let mut redis = self.redis.clone(); + let db = self.db.clone(); let claims = self.claims.clone(); let fut = async move { // Check workspace - if let Err(e) = check_workspace_core(&claims, &key) { return Err(e.into()); } - - let data = redis_list(&mut redis, &key) - .await - .map_err(|e| e.to_string())?; + if let Err(e) = check_workspace_core(claims, &key) { return Err(e.into()); } + // List + let data = db.list(&key).await.map_err(|e| e.to_string())?; Ok(json!({ "result": data })) }; @@ -411,7 +405,7 @@ impl WsSession { let fut = async move { // Check workspace - if let Err(e) = check_workspace_core(&claims, &key) { return Err(e.into()); } + if let Err(e) = check_workspace_core(claims, &key) { return Err(e.into()); } hub_state.write().await.subscribe(id, key); Ok(json!({ "result": "OK" })) @@ -441,7 +435,7 @@ impl WsSession { Ok(json!({ "result": "OK" })) } else { // Check workspace - if let Err(e) = check_workspace_core(&claims, &key) { return Err(e.into()); } + if let Err(e) = check_workspace_core(claims, &key) { return Err(e.into()); } hub_state.write().await.unsubscribe(id, key); Ok(json!({ "result": "OK" })) @@ -476,21 +470,21 @@ impl WsSession { pub async fn handler( req: HttpRequest, payload: web::Payload, - redis: web::Data, + db: web::Data, hub_state: web::Data>>, ) -> Result { - let claims = req - .extensions() - .get::() - .expect("Missing claims") - .to_owned(); + + let claims = if CONFIG.no_authorization == Some(true) { + None + } else { + Some(req.extensions().get::().expect("Missing claims").to_owned()) + }; let session = WsSession { - redis: redis.get_ref().clone(), - // hub: hub.get_ref().clone(), + db: db.get_ref().clone(), hub_state: hub_state.get_ref().clone(), id: new_session_id(), - claims, + claims: claims, }; ws::start(session, &req, payload) diff --git a/src/hub_service.rs b/src/hub_service.rs index dc07686484e..f44b9a05111 100644 --- a/src/hub_service.rs +++ b/src/hub_service.rs @@ -43,16 +43,12 @@ pub struct ServerMessage { pub value: Option, } -// ==== ID ==== - pub type SessionId = u64; static NEXT_ID: AtomicU64 = AtomicU64::new(1); pub fn new_session_id() -> SessionId { NEXT_ID.fetch_add(1, Ordering::SeqCst) } -// ==== Redis events ==== - #[derive(Debug, Clone, Serialize)] pub enum RedisEventAction { Set, @@ -69,8 +65,6 @@ pub struct RedisEvent { pub key: String, } -// ==== Handle ==== - #[derive(Debug, Default)] pub struct HubState { sessions: HashMap>, @@ -134,14 +128,13 @@ impl HubState { } - -// Send messages about new Redis events -pub async fn push_event( +// Send messages about new db events +pub async fn broadcast_event( hub_state: &Arc>, - redis: &mut MultiplexedConnection, ev: RedisEvent, + value: Option, ) { - // Collect Addresses + // Collect let recipients: Vec> = { hub_state.read().await.recipients_for_key(&ev.key) }; @@ -149,10 +142,22 @@ pub async fn push_event( return; } - // Get value from Redis (only for `Set` event, not for `Delete`, `Expire`) + // Send + let payload = ServerMessage { event: ev, value }; + for rcpt in recipients { + let _ = rcpt.do_send(payload.clone()); + } +} + +pub async fn push_event( + hub_state: &Arc>, + redis: &mut MultiplexedConnection, + ev: RedisEvent, +) { + // Value only for Set let mut value: Option = None; if matches!(ev.message, RedisEventAction::Set) { - match redis::cmd("GET") + match ::redis::cmd("GET") .arg(&ev.key) .query_async::>(redis) .await @@ -162,9 +167,5 @@ pub async fn push_event( } } - // Sending - let payload = ServerMessage { event: ev, value }; - for rcpt in recipients { - let _ = rcpt.do_send(payload.clone()); - } -} + broadcast_event(hub_state, ev, value).await; +} \ No newline at end of file diff --git a/src/main.rs b/src/main.rs index 6a1ed21a397..6ab19514dfb 100644 --- a/src/main.rs +++ b/src/main.rs @@ -38,6 +38,12 @@ use hub_service::{HubState}; use config::CONFIG; +mod db; +mod memory; + +use crate::db::Db; +use crate::memory::MemoryBackend; + fn initialize_tracing(level: tracing::Level) { use tracing_subscriber::{filter::targets::Targets, prelude::*}; @@ -61,6 +67,10 @@ async fn extract_claims( token: Option, } + if CONFIG.no_authorization == Some(true) { + return next.call(request).await; + } + let query = request.extract::>().await?.into_inner(); let claims = if let Some(token) = query.token { @@ -77,6 +87,11 @@ async fn check_workspace( mut request: ServiceRequest, next: Next, ) -> Result, Error> { + + if CONFIG.no_authorization.unwrap_or(false) { + return next.call(request).await; + } + let workspace = Uuid::parse_str(&request.extract::>().await?); let claims = request.extensions().get::().cloned().unwrap(); @@ -98,14 +113,21 @@ async fn main() -> anyhow::Result<()> { tracing::info!("{}/{}", env!("CARGO_BIN_NAME"), env!("CARGO_PKG_VERSION")); - let redis_client = redis::client().await?; - let redis_connection = redis_client.get_multiplexed_async_connection().await?; - // starting HubService let hub_state = Arc::new(RwLock::new(HubState::default())); - // starting Logger - tokio::spawn(redis::receiver(redis_client, hub_state.clone())); + let db_backend = if CONFIG.memory_mode == Some(true) { + let memory = MemoryBackend::new(); + memory.spawn_ticker(hub_state.clone()); + tracing::info!("Memory mode enabled"); + Db::new_memory(memory, hub_state.clone()) + } else { + let redis_client = redis::client().await?; + let redis_connection = redis_client.get_multiplexed_async_connection().await?; + tokio::spawn(crate::redis::receiver(redis_client, hub_state.clone())); + tracing::info!("Redis mode enabled"); + Db::new_redis(redis_connection, hub_state.clone()) + }; let socket = std::net::SocketAddr::new(CONFIG.bind_host.as_str().parse()?, CONFIG.bind_port); @@ -127,8 +149,7 @@ async fn main() -> anyhow::Result<()> { .max_age(3600); App::new() - .app_data(web::Data::new(redis_connection.clone())) - //.app_data(web::Data::new(hub.clone())) + .app_data(web::Data::new(db_backend.clone())) .app_data(web::Data::new(hub_state.clone())) .wrap(middleware::Logger::default()) .wrap(cors) diff --git a/src/memory.rs b/src/memory.rs new file mode 100644 index 00000000000..f375de329b5 --- /dev/null +++ b/src/memory.rs @@ -0,0 +1,277 @@ + +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +use std::{collections::HashMap, sync::Arc, time::{SystemTime, UNIX_EPOCH}}; +use tokio::{sync::RwLock, time::{self, Duration}}; +use crate::{ + config::CONFIG, + hub_service::{broadcast_event, HubState, RedisEvent, RedisEventAction}, + redis::{deprecated_symbol_error, error, RedisArray, SaveMode, Ttl}, +}; + +#[derive(Debug, Clone)] +struct Entry { + data: String, + ttl: u8, +} + +#[derive(Clone, Default)] +pub struct MemoryBackend { + inner: Arc>>, +} + +impl MemoryBackend { + + pub fn new() -> Self { + Self { inner: Arc::new(RwLock::new(HashMap::new())) } + } + + pub fn spawn_ticker(&self, hub_state: Arc>) { + let inner = self.inner.clone(); + tokio::spawn(async move { + let mut ticker = time::interval(Duration::from_secs(1)); + loop { + ticker.tick().await; + + // lock + let expired_keys: Vec = { + let mut map = inner.write().await; + + let mut expired: Vec = Vec::new(); + for (k, v) in map.iter_mut() { + if v.ttl > 0 { + v.ttl = v.ttl.saturating_sub(1); + if v.ttl == 0 { + expired.push(k.clone()); + } + } else { + expired.push(k.clone()); + } + } + + for k in &expired { + map.remove(k); + } + + expired + }; // write-lock free + + for k in expired_keys { + broadcast_event(&hub_state,RedisEvent { message: RedisEventAction::Expired, key: k },None).await; + } + } + }); + } +} + + +/// memory_list(&backend, "prefix/") → Vec +pub async fn memory_list( + backend: &MemoryBackend, + key_prefix: &str, +) -> redis::RedisResult> { + deprecated_symbol_error(key_prefix)?; + if !key_prefix.ends_with('/') { + return error(412, "Key must end with slash"); + } + + let map = backend.inner.read().await; + + let mut results = Vec::new(); + for (k, v) in map.iter() { + if !k.starts_with(key_prefix) { + continue; + } + + if k.strip_prefix(key_prefix).map_or(false, |s| s.contains('$')) { + continue; + } + + if v.ttl == 0 { + continue; + } + + results.push(RedisArray { + key: k.clone(), + data: v.data.clone(), + expires_at: v.ttl as u64, + etag: hex::encode(md5::compute(&v.data).0), + }); + } + + Ok(results) +} + +/// memory_read(&backend, "key") +pub async fn memory_read( + backend: &MemoryBackend, + key: &str, +) -> redis::RedisResult> { + deprecated_symbol_error(key)?; + if key.ends_with('/') { + return error(412, "Key must not end with a slash"); + } + + let map = backend.inner.read().await; + + match map.get(key) { + None => Ok(None), + Some(entry) => { + let data = entry.data.clone(); + let ttl = entry.ttl as u64; + + Ok(Some(RedisArray { + key: key.to_string(), + data: data.clone(), + expires_at: ttl, + etag: hex::encode(md5::compute(&data).0), + })) + } + } +} + +/// TTL in sec +fn compute_ttl_u8(ttl: Option) -> redis::RedisResult { + + let sec_usize = match ttl { + Some(Ttl::Sec(secs)) => secs, + Some(Ttl::At(timestamp)) => { + let now = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs(); + if timestamp <= now { + return error(400, "TTL timestamp exceeds MAX_TTL limit"); + } + (timestamp - now) as usize + } + None => CONFIG.max_ttl, + }; + + if sec_usize == 0 { + return error(400, "TTL must be > 0"); + } + + if sec_usize > CONFIG.max_ttl { + return error(412, "TTL exceeds MAX_TTL"); + } + + let capped = sec_usize.min(u8::MAX as usize); + Ok(capped as u8) +} + +/// memory_save(&backend, "key", value, ttl, mode) +pub async fn memory_save>( + backend: &MemoryBackend, + key: &str, + bytes_value: V, + ttl: Option, + mode: Option, +) -> redis::RedisResult<()> { + + // u8 - String + let value = match std::str::from_utf8(bytes_value.as_ref()) { + Ok(s) => s.to_string(), + Err(_) => return error(400, "Value must be valid UTF-8"), + }; + + deprecated_symbol_error(key)?; + if key.ends_with('/') { + return error(412, "Key must not end with a slash"); + } + + let sec_u8 = compute_ttl_u8(ttl)?; + let val = value.to_string(); + + let mut map = backend.inner.write().await; + + let mode = mode.unwrap_or(SaveMode::Upsert); + + match mode { + SaveMode::Upsert => { + map.insert(key.to_string(), Entry { data: val, ttl: sec_u8 }); + } + SaveMode::Insert => { + if map.contains_key(key) { + return error(412, "Insert: key already exists"); + } + map.insert(key.to_string(), Entry { data: val, ttl: sec_u8 }); + } + SaveMode::Update => { + let Some(existing) = map.get_mut(key) else { + return error(404, "Update: key does not exist"); + }; + *existing = Entry { data: val, ttl: sec_u8 }; + } + SaveMode::Equal(ref expected_md5) => { + let Some(existing) = map.get_mut(key) else { + return error(404, "Equal: key does not exist"); + }; + let actual_md5 = hex::encode(md5::compute(&existing.data).0); + if &actual_md5 != expected_md5 { + return error( + 412, + format!("md5 mismatch, current: {}, expected: {}", actual_md5, expected_md5), + ); + } + *existing = Entry { data: val, ttl: sec_u8 }; + } + } + + Ok(()) +} + +/// memory_delete(&backend, "key", mode) +pub async fn memory_delete( + backend: &MemoryBackend, + key: &str, + mode: Option, +) -> redis::RedisResult { + deprecated_symbol_error(key)?; + if key.ends_with('/') { + return error(412, "Key must not end with a slash"); + } + + let mut map = backend.inner.write().await; + let mode = mode.unwrap_or(SaveMode::Upsert); + + match mode { + SaveMode::Insert => { + return error(412, "Insert mode is not supported for delete"); + } + SaveMode::Update | SaveMode::Upsert => { + let existed = map.remove(key).is_some(); + Ok(existed) + } + SaveMode::Equal(ref expected_md5) => { + match map.get(key) { + None => return error(404, "Equal: key does not exist"), + Some(existing) => { + let actual_md5 = hex::encode(md5::compute(&existing.data).0); + if &actual_md5 != expected_md5 { + return error( + 412, + format!("md5 mismatch, current: {}, expected: {}", actual_md5, expected_md5), + ); + } + } + } + let existed = map.remove(key).is_some(); + if !existed { + // WHF?! + return error(404, "Delete: key does not exist"); + } + Ok(true) + } + } +} diff --git a/src/workspace_owner.rs b/src/workspace_owner.rs index 2014953921b..42423db1545 100644 --- a/src/workspace_owner.rs +++ b/src/workspace_owner.rs @@ -16,15 +16,21 @@ use hulyrs::services::jwt::Claims; use uuid::Uuid; -use crate::redis::deprecated_symbol; +use crate::{config::CONFIG, redis::deprecated_symbol}; // common checker -pub fn check_workspace_core(claims: &Claims, key: &str) -> Result<(), &'static str> { +pub fn check_workspace_core(claims_opt: Option, key: &str) -> Result<(), &'static str> { if deprecated_symbol(key) { return Err("Invalid key: deprecated symbols"); } + if CONFIG.no_authorization == Some(true) { + return Ok(()); + } + + let claims = claims_opt.ok_or("Missing authorization")?; + if claims.is_system() { return Ok(()); } @@ -40,11 +46,12 @@ pub fn check_workspace_core(claims: &Claims, key: &str) -> Result<(), &'static s if path_ws.is_empty() { return Err("Invalid key: missing workspace"); } - + let path_ws_uuid = Uuid::parse_str(path_ws).map_err(|_| "Invalid workspace UUID in key")?; if jwt_workspace != &path_ws_uuid { return Err("Workspace mismatch"); } - + Ok(()) + } From 8f58dbf578fc7844eb580d181f91d0f5a3b8e28b Mon Sep 17 00:00:00 2001 From: Leonid Kaganov Date: Sun, 24 Aug 2025 06:25:35 +0300 Subject: [PATCH 177/636] New config options: max_size --- README.md | 5 +++++ src/config.rs | 1 + src/config/default.toml | 5 ++++- src/memory.rs | 6 ++++++ src/redis.rs | 6 ++++++ 5 files changed, 22 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 7cd4cff5644..ead3fac26e3 100644 --- a/README.md +++ b/README.md @@ -184,6 +184,11 @@ Size of data is limited to some reasonable size - `{"message":"Del","key":"00000000-0000-0000-0000-000000000001/foo/bar"}` +## Special options in config/default.toml + - ```memory_mode = true``` Use native memory storage instead Redis + - ```no_authorization = true``` Don't check authorization + - ```max_size = 100``` Max value size in bytes + ## Running diff --git a/src/config.rs b/src/config.rs index feea94aa941..181a96df5e8 100644 --- a/src/config.rs +++ b/src/config.rs @@ -46,6 +46,7 @@ pub struct Config { pub redis_service: String, pub max_ttl: usize, + pub max_size: Option, pub memory_mode: Option, pub no_authorization: Option, diff --git a/src/config/default.toml b/src/config/default.toml index b4ad885aed2..c3ee02713d5 100644 --- a/src/config/default.toml +++ b/src/config/default.toml @@ -10,5 +10,8 @@ redis_service = "mymaster" max_ttl = 3600 +# optional settings + memory_mode = true -no_authorization = true \ No newline at end of file +no_authorization = true +max_size = 100 \ No newline at end of file diff --git a/src/memory.rs b/src/memory.rs index f375de329b5..9034cfd85aa 100644 --- a/src/memory.rs +++ b/src/memory.rs @@ -185,6 +185,12 @@ pub async fn memory_save>( Err(_) => return error(400, "Value must be valid UTF-8"), }; + // If max_size != 0 and value size > max_size, return error + let max_size = CONFIG.max_size.unwrap_or(0); + if max_size != 0 && value.len() > max_size { + return error(400, format!("Value in memory mode must be less than {} bytes", max_size)); + } + deprecated_symbol_error(key)?; if key.ends_with('/') { return error(412, "Key must not end with a slash"); diff --git a/src/redis.rs b/src/redis.rs index 40c1564b1fe..567cc075de9 100644 --- a/src/redis.rs +++ b/src/redis.rs @@ -196,6 +196,12 @@ pub async fn redis_save( return error(412, "Key must not end with a slash"); } + // If max_size != 0 and value size > max_size, return error + let max_size = CONFIG.max_size.unwrap_or(0); + if max_size != 0 && value.to_redis_args().iter().map(|a| a.len()).sum::() > max_size { + return error(400, format!("Value in memory mode must be less than {} bytes", max_size)); + } + // TTL logic let sec = match ttl { Some(Ttl::Sec(secs)) => secs, From 407c52ca437011ab2bb988c2cf7d103d6b83ff8a Mon Sep 17 00:00:00 2001 From: Kristina Date: Mon, 25 Aug 2025 09:50:39 +0400 Subject: [PATCH 178/636] Init directs (#95) Signed-off-by: Kristina Fefelova --- packages/cockroach/src/adapter.ts | 45 +++++- packages/cockroach/src/db/mapping.ts | 33 ++++- packages/cockroach/src/db/message.ts | 62 ++++++-- packages/cockroach/src/db/peer.ts | 135 ++++++++++++++++++ packages/cockroach/src/init.ts | 52 +++++-- packages/cockroach/src/schema.ts | 22 ++- packages/sdk-types/src/db.ts | 21 ++- packages/sdk-types/src/domain.ts | 3 +- packages/sdk-types/src/events/card.ts | 2 - packages/sdk-types/src/events/event.ts | 6 +- packages/sdk-types/src/events/peer.ts | 44 ++++++ packages/sdk-types/src/serverApi.ts | 6 +- packages/server/src/index.ts | 14 +- packages/server/src/middleware/base.ts | 28 +++- packages/server/src/middleware/broadcast.ts | 4 + packages/server/src/middleware/date.ts | 2 + packages/server/src/middleware/db.ts | 37 ++++- packages/server/src/middleware/peer.ts | 60 ++++++++ packages/server/src/middleware/permissions.ts | 3 + packages/server/src/middleware/triggers.ts | 3 + packages/server/src/middleware/validate.ts | 32 ++++- packages/server/src/middlewares.ts | 26 +++- .../server/src/notification/notification.ts | 2 + packages/server/src/triggers/card.ts | 2 +- packages/server/src/triggers/message.ts | 55 ++++++- packages/server/src/types.ts | 15 +- packages/types/src/index.ts | 1 + packages/types/src/peer.ts | 43 ++++++ packages/types/src/query.ts | 16 ++- 29 files changed, 712 insertions(+), 62 deletions(-) create mode 100644 packages/cockroach/src/db/peer.ts create mode 100644 packages/sdk-types/src/events/peer.ts create mode 100644 packages/server/src/middleware/peer.ts create mode 100644 packages/types/src/peer.ts diff --git a/packages/cockroach/src/adapter.ts b/packages/cockroach/src/adapter.ts index 0b83dbaefe5..7a2b3ade0ac 100644 --- a/packages/cockroach/src/adapter.ts +++ b/packages/cockroach/src/adapter.ts @@ -45,7 +45,8 @@ import { type BlobID, type AttachmentData, type AttachmentID, - type AttachmentUpdateData, WithTotal + type AttachmentUpdateData, WithTotal, PeerKind, PeerExtra, + FindPeersParams, Peer, FindThreadParams } from '@hcengineering/communication-types' import type { DbAdapter, @@ -66,11 +67,13 @@ import { formatName } from './utils' import { initSchema } from './init' import { LabelsDb } from './db/label' import { SqlClient } from './client' +import { PeersDb } from './db/peer' export class CockroachAdapter implements DbAdapter { private readonly message: MessagesDb private readonly notification: NotificationsDb private readonly label: LabelsDb + private readonly peer: PeersDb constructor ( private readonly sql: SqlClient, @@ -81,6 +84,7 @@ export class CockroachAdapter implements DbAdapter { this.message = new MessagesDb(this.sql, this.workspace, logger, options) this.notification = new NotificationsDb(this.sql, this.workspace, logger, options) this.label = new LabelsDb(this.sql, this.workspace, logger, options) + this.peer = new PeersDb(this.sql, this.workspace, logger, options) } async createMessage ( @@ -201,8 +205,8 @@ export class CockroachAdapter implements DbAdapter { return await this.message.findMessagesGroups(params) } - async findThread (thread: CardID): Promise { - return await this.message.findThread(thread) + async findThreads (params: FindThreadParams): Promise { + return await this.message.findThreads(params) } async addCollaborators ( @@ -316,6 +320,28 @@ export class CockroachAdapter implements DbAdapter { return this.label.updateLabels(card, updates) } + async createPeer ( + workspaceId: WorkspaceID, + cardId: CardID, + kind: PeerKind, + value: string, + extra: PeerExtra, + date: Date + ): Promise { + await this.peer.createPeer(workspaceId, cardId, kind, value, extra, date) + } + + async removePeer (workspaceId: WorkspaceID, + cardId: CardID, + kind: PeerKind, + value: string): Promise { + await this.peer.removePeer(workspaceId, cardId, kind, value) + } + + findPeers (params: FindPeersParams): Promise { + return this.peer.findPeers(params) + } + async getAccountsByPersonIds (ids: string[]): Promise { if (ids.length === 0) return [] const sql = `SELECT data ->> 'personUuid' AS "personUuid" @@ -350,6 +376,19 @@ export class CockroachAdapter implements DbAdapter { return result[0]?.title } + // TODO: remove later + async getCardSpaceMembers (cardId: CardID): Promise { + const sql = `SELECT s.members +FROM public.space AS s +JOIN public.card AS c ON c.space = s._id + WHERE c."workspaceId" = $1::uuid + AND c."_id" = $2::text + LIMIT 1` + + const result = await this.sql.execute(sql, [this.workspace, cardId]) + return result[0]?.members ?? [] + } + async getMessageCreated (cardId: CardID, messageId: MessageID): Promise { return await this.message.getMessageCreated(cardId, messageId) } diff --git a/packages/cockroach/src/db/mapping.ts b/packages/cockroach/src/db/mapping.ts index 34cf72e2c46..c7a98b6391d 100644 --- a/packages/cockroach/src/db/mapping.ts +++ b/packages/cockroach/src/db/mapping.ts @@ -35,7 +35,10 @@ import { type AccountID, type MessageExtra, AttachmentID, - Attachment + Attachment, + Peer, + WorkspaceID, + PeerExtra } from '@hcengineering/communication-types' import { Domain } from '@hcengineering/communication-sdk-types' import { applyPatches } from '@hcengineering/communication-shared' @@ -296,3 +299,31 @@ export function toLabel (raw: DbModel): Label { created: new Date(raw.created) } } + +export function toPeer ( + raw: DbModel & { members?: { workspace_id: WorkspaceID, card_id: CardID, extra?: PeerExtra }[] } +): Peer { + const peer: Peer = { + workspaceId: raw.workspace_id, + cardId: raw.card_id, + kind: raw.kind, + value: raw.value, + extra: raw.extra, + created: new Date(raw.created) + } + + if (peer.kind === 'card') { + return { + ...peer, + kind: 'card', + members: + raw.members?.map((it) => ({ + workspaceId: it.workspace_id, + cardId: it.card_id, + extra: it.extra ?? {} + })) ?? [] + } + } + + return peer +} diff --git a/packages/cockroach/src/db/message.ts b/packages/cockroach/src/db/message.ts index d0cb09218af..4c5c16361c0 100644 --- a/packages/cockroach/src/db/message.ts +++ b/packages/cockroach/src/db/message.ts @@ -25,6 +25,7 @@ import { type CardType, type FindMessagesGroupsParams, type FindMessagesParams, + FindThreadParams, type Markdown, type Message, type MessageExtra, @@ -753,21 +754,52 @@ export class MessagesDb extends BaseDb { return { where: `WHERE ${where.join(' AND ')}`, values } } - // Find thread - async findThread (thread: CardID): Promise { - const sql = `SELECT t.card_id, - t.message_id::text, - t.thread_id, - t.thread_type, - t.replies_count::int, - t.last_reply - FROM ${Domain.Thread} t - WHERE t.workspace_id = $1::uuid - AND t.thread_id = $2::varchar - LIMIT 1;` - - const result = await this.execute(sql, [this.workspace, thread], 'find thread') - return result.map((it: any) => toThread(it))[0] + // Find threads + async findThreads (params: FindThreadParams): Promise { + const { where, values } = this.buildThreadWhere(params) + const select = ` + SELECT * + FROM ${Domain.Thread} t + ` + + const limit = params.limit != null ? ` LIMIT ${params.limit}` : '' + const orderBy = + params.order != null ? `ORDER BY t.date ${params.order === SortingOrder.Ascending ? 'ASC' : 'DESC'}` : '' + + const sql = [select, where, orderBy, limit].join(' ') + const result = await this.execute(sql, values, 'find threads') + + return result.map((it: any) => toThread(it)) + } + + private buildThreadWhere ( + params: FindThreadParams, + startIndex: number = 0, + prefix: string = 't.' + ): { where: string, values: any[] } { + const where: string[] = [] + const values: any[] = [] + let index = startIndex + 1 + + where.push(`${prefix}workspace_id = $${index++}::uuid`) + values.push(this.workspace) + + if (params.cardId != null) { + where.push(`${prefix}card_id = $${index++}::varchar`) + values.push(params.cardId) + } + + if (params.messageId != null) { + where.push(`${prefix}message_id = $${index++}::varchar`) + values.push(params.messageId) + } + + if (params.threadId != null) { + where.push(`${prefix}thread_id = $${index++}::varchar`) + values.push(params.threadId) + } + + return { where: `WHERE ${where.join(' AND ')}`, values } } // Find messages groups diff --git a/packages/cockroach/src/db/peer.ts b/packages/cockroach/src/db/peer.ts new file mode 100644 index 00000000000..d4a7b3869d2 --- /dev/null +++ b/packages/cockroach/src/db/peer.ts @@ -0,0 +1,135 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +import { + WorkspaceID, + type CardID, + PeerKind, + PeerExtra, + FindPeersParams, + SortingOrder, + Peer +} from '@hcengineering/communication-types' +import { Domain } from '@hcengineering/communication-sdk-types' + +import { BaseDb } from './base' +import { DbModel, DbModelFilter } from '../schema' +import { toPeer } from './mapping' + +export class PeersDb extends BaseDb { + async createPeer ( + workspaceId: WorkspaceID, + cardId: CardID, + kind: PeerKind, + value: string, + extra: PeerExtra, + date: Date + ): Promise { + const db: DbModel = { + workspace_id: workspaceId, + card_id: cardId, + kind, + value, + extra, + created: date + } + const { sql, values } = this.getInsertSql(Domain.Peer, db, []) + await this.execute(sql, values, 'insert peer') + } + + async removePeer (workspaceId: WorkspaceID, cardId: CardID, kind: PeerKind, value: string): Promise { + const filter: DbModelFilter = [ + { + column: 'workspace_id', + value: workspaceId + }, + { + column: 'card_id', + value: cardId + }, + { + column: 'kind', + value: kind + }, + { + column: 'value', + value + } + ] + + if (filter.length === 0) return + + const { sql, values } = this.getDeleteSql(Domain.Peer, filter) + + await this.execute(sql, values, 'remove peer') + } + + async findPeers (params: FindPeersParams): Promise { + const select = `SELECT *, COALESCE(members.members, '[]') AS members + FROM ${Domain.Peer} p` + + const { where, values } = this.buildWhere(params) + + const limit = params.limit != null ? `LIMIT ${params.limit}` : '' + const orderBy = + params.order != null ? `ORDER BY p.created ${params.order === SortingOrder.Ascending ? 'ASC' : 'DESC'}` : '' + const join = `LEFT JOIN LATERAL ( + SELECT json_agg( + json_build_object( + 'workspace_id', p2.workspace_id, + 'card_id', p2.card_id, + 'extra', p2.extra + ) + ) AS members + FROM ${Domain.Peer} AS p2 + WHERE p2.value = p.value + AND p2.kind = 'card' + AND NOT (p2.workspace_id = p.workspace_id AND p2.card_id = p.card_id) + ) members ON true` + const sql = [select, join, where, orderBy, limit].join(' ') + + const result = await this.execute(sql, values, 'find peers') + + return result.map((it: any) => toPeer(it)) + } + + buildWhere (params: FindPeersParams, startIndex: number = 0, prefix = 'p.'): { where: string, values: any[] } { + const where: string[] = [] + const values: any[] = [] + let index = startIndex + 1 + + if (params.workspaceId != null) { + where.push(`${prefix}workspace_id = $${index++}::uuid`) + values.push(params.workspaceId) + } + + if (params.cardId != null) { + where.push(`${prefix}card_id = $${index++}::varchar`) + values.push(params.cardId) + } + + if (params.kind != null) { + where.push(`${prefix}kind = $${index++}::varchar`) + values.push(params.kind) + } + + if (params.value != null) { + where.push(`${prefix}value = $${index++}::varchar`) + values.push(params.value) + } + + return { where: where.length > 0 ? `WHERE ${where.join(' AND ')}` : '', values } + } +} diff --git a/packages/cockroach/src/init.ts b/packages/cockroach/src/init.ts index b23a0252252..b7af31c51f8 100644 --- a/packages/cockroach/src/init.ts +++ b/packages/cockroach/src/init.ts @@ -129,7 +129,8 @@ function getMigrations (): [string, string][] { migrationV7_3(), migrationV8_1(), migrationV8_2(), - migrationV8_3() + migrationV8_3(), + migrationV9_1() ] } @@ -497,7 +498,8 @@ function migrationV6_7 (): [string, string] { CREATE INDEX IF NOT EXISTS idx_reactions_workspace_card_message ON communication.reactions (workspace_id, card_id, message_id); - ALTER TABLE communication.thread ADD CONSTRAINT thread_unique_constraint UNIQUE (workspace_id, card_id, message_id); + ALTER TABLE communication.thread + ADD CONSTRAINT thread_unique_constraint UNIQUE (workspace_id, card_id, message_id); CREATE INDEX IF NOT EXISTS idx_thread_workspace_card_message ON communication.thread (workspace_id, card_id, message_id); @@ -541,9 +543,8 @@ function migrationV7_2 (): [string, string] { FROM communication.notification_context AS nc JOIN communication.messages_groups AS mg ON mg.workspace_id = nc.workspace_id - AND mg.card_id = nc.card_id - WHERE - n.context_id = nc.id + AND mg.card_id = nc.card_id + WHERE n.context_id = nc.id AND n.message_created BETWEEN mg.from_date AND mg.to_date AND n.blob_id IS NULL; ` @@ -552,12 +553,12 @@ function migrationV7_2 (): [string, string] { function migrationV7_3 (): [string, string] { const sql = ` - UPDATE communication.notification_context - SET last_notify = last_update - WHERE last_notify IS NULL; + UPDATE communication.notification_context + SET last_notify = last_update + WHERE last_notify IS NULL; - ALTER TABLE communication.notification_context - ALTER COLUMN last_notify SET NOT NULL; + ALTER TABLE communication.notification_context + ALTER COLUMN last_notify SET NOT NULL; ` return ['make_last_notify_not_null-v7_3', sql] } @@ -600,7 +601,34 @@ function migrationV8_2 (): [string, string] { function migrationV8_3 (): [string, string] { const sql = ` - CREATE INDEX IF NOT EXISTS attachment_workspace_card_message_idx ON ${Domain.Attachment} (workspace_id, card_id, message_id) - ` + CREATE INDEX IF NOT EXISTS attachment_workspace_card_message_idx ON ${Domain.Attachment} (workspace_id, card_id, message_id) + ` return ['add_attachment_indexes-v8_3', sql] } + +// CREATE TABLE ${Domain.CardPeerGroup} +// ( +// group_id UUID NOT NULL, +// workspace_id UUID NOT NULL, +// card_id VARCHAR(255) NOT NULL, +// created TIMESTAMPTZ NOT NULL DEFAULT now(), +// PRIMARY KEY (group_id,workspace_id, card_id) +// ); + +function migrationV9_1 (): [string, string] { + const sql = ` + CREATE TABLE IF NOT EXISTS ${Domain.Peer} + ( + workspace_id UUID NOT NULL, + card_id VARCHAR(255) NOT NULL, + kind TEXT NOT NULL, + value TEXT NOT NULL, + extra JSONB NOT NULL DEFAULT '{}', + created TIMESTAMPTZ NOT NULL DEFAULT now(), + PRIMARY KEY (workspace_id, card_id, kind, value) + ); + + CREATE INDEX IF NOT EXISTS peer_workspace_card_kind ON ${Domain.Peer} (workspace_id, card_id, kind); + CREATE INDEX IF NOT EXISTS peer_kind_value ON ${Domain.Peer} (kind, value);` + return ['init_peer_tables-v9_1', sql] +} diff --git a/packages/cockroach/src/schema.ts b/packages/cockroach/src/schema.ts index 3e850acd149..761c5f71ca5 100644 --- a/packages/cockroach/src/schema.ts +++ b/packages/cockroach/src/schema.ts @@ -28,7 +28,8 @@ import { type LabelID, type CardType, NotificationContent, - NotificationType, AttachmentID + NotificationType, AttachmentID, + PeerKind, PeerExtra } from '@hcengineering/communication-types' import { Domain } from '@hcengineering/communication-sdk-types' @@ -130,6 +131,14 @@ export const schemas = { last_view: 'timestamptz', last_update: 'timestamptz', last_notify: 'timestamptz' + }, + [Domain.Peer]: { + workspace_id: 'uuid', + card_id: 'varchar', + kind: 'varchar', + value: 'varchar', + extra: 'jsonb', + created: 'timestamptz' } } as const @@ -141,11 +150,11 @@ export interface DomainDbModel { [Domain.Reaction]: ReactionDbModel [Domain.Thread]: ThreadDbModel [Domain.Attachment]: AttachmentDbModel - // [Domain.LinkPreview]: LinkPreviewDbModel [Domain.Notification]: NotificationDbModel [Domain.NotificationContext]: ContextDbModel [Domain.Collaborator]: CollaboratorDbModel [Domain.Label]: LabelDbModel + [Domain.Peer]: PeerDbModel } export type DbModel = DomainDbModel[D] @@ -274,3 +283,12 @@ interface LabelDbModel { account: AccountID created: Date } + +interface PeerDbModel { + workspace_id: WorkspaceID + card_id: CardID + kind: PeerKind + value: string + extra: PeerExtra + created: Date +} diff --git a/packages/sdk-types/src/db.ts b/packages/sdk-types/src/db.ts index 23c53e9d6aa..73f4178aaf9 100644 --- a/packages/sdk-types/src/db.ts +++ b/packages/sdk-types/src/db.ts @@ -44,7 +44,7 @@ import { NotificationType, AttachmentData, AttachmentID, - AttachmentUpdateData, WithTotal + AttachmentUpdateData, WithTotal, WorkspaceID, PeerKind, PeerExtra, FindPeersParams, Peer, FindThreadParams } from '@hcengineering/communication-types' export interface DbAdapter { @@ -82,9 +82,25 @@ export interface DbAdapter { removeThreads: (query: ThreadQuery) => Promise updateThread: (cardId: CardID, messageId: MessageID, thread: CardID, update: ThreadUpdates, socialId: SocialID, date: Date) => Promise + createPeer: ( + workspaceId: WorkspaceID, + cardId: CardID, + kind: PeerKind, + value: string, + extra: PeerExtra, + date: Date + ) => Promise + + removePeer: (workspaceId: WorkspaceID, + cardId: CardID, + kind: PeerKind, + value: string) => Promise + + findPeers: (params: FindPeersParams) => Promise + findMessages: (params: FindMessagesParams) => Promise findMessagesGroups: (params: FindMessagesGroupsParams) => Promise - findThread: (threadId: CardID) => Promise + findThreads: (params: FindThreadParams) => Promise addCollaborators: (cardId: CardID, cardType: CardType, collaborators: AccountID[], date: Date) => Promise removeCollaborators: (cardId: CardID, accounts: AccountID[], unsafe?: boolean) => Promise @@ -126,6 +142,7 @@ export interface DbAdapter { updateLabels: (cardId: CardID, update: LabelUpdates) => Promise getCardTitle: (cardId: CardID) => Promise + getCardSpaceMembers: (cardId: CardID) => Promise getAccountsByPersonIds: (ids: string[]) => Promise getNameByAccount: (id: AccountID) => Promise getMessageCreated: (cardId: CardID, messageId: MessageID) => Promise diff --git a/packages/sdk-types/src/domain.ts b/packages/sdk-types/src/domain.ts index bf9ca8510e9..86e4f34ab72 100644 --- a/packages/sdk-types/src/domain.ts +++ b/packages/sdk-types/src/domain.ts @@ -26,7 +26,8 @@ export enum Domain { Collaborator = 'communication.collaborator', Label = 'communication.label', - // LinkPreview = 'communication.link_preview' + + Peer = 'communication.peer' } export const Domains = Object.values(Domain) diff --git a/packages/sdk-types/src/events/card.ts b/packages/sdk-types/src/events/card.ts index d96c8627908..46d5a794cd8 100644 --- a/packages/sdk-types/src/events/card.ts +++ b/packages/sdk-types/src/events/card.ts @@ -18,14 +18,12 @@ import type { CardID, CardType, SocialID } from '@hcengineering/communication-ty import type { BaseEvent } from './common' export enum CardEventType { - // Internal UpdateCardType = 'updateCardType', RemoveCard = 'removeCard' } export type CardEvent = UpdateCardTypeEvent | RemoveCardEvent -// Internal export interface UpdateCardTypeEvent extends BaseEvent { type: CardEventType.UpdateCardType cardId: CardID diff --git a/packages/sdk-types/src/events/event.ts b/packages/sdk-types/src/events/event.ts index a92d51b4029..506d4df6225 100644 --- a/packages/sdk-types/src/events/event.ts +++ b/packages/sdk-types/src/events/event.ts @@ -15,13 +15,15 @@ import type { LabelEvent, LabelEventType } from './label' import type { MessageEventResult, MessageEventType, MessageEvent } from './message' import type { NotificationEventResult, NotificationEvent, NotificationEventType } from './notification' import type { CardEvent, CardEventType } from './card' +import { PeerEvent, PeerEventType } from './peer' export * from './message' export * from './notification' export * from './label' export * from './card' +export * from './peer' -export type EventType = MessageEventType | NotificationEventType | LabelEventType | CardEventType -export type Event = MessageEvent | NotificationEvent | LabelEvent | CardEvent +export type EventType = MessageEventType | NotificationEventType | LabelEventType | CardEventType | PeerEventType +export type Event = MessageEvent | NotificationEvent | LabelEvent | CardEvent | PeerEvent // eslint-disable-next-line @typescript-eslint/ban-types export type EventResult = MessageEventResult | NotificationEventResult | {} diff --git a/packages/sdk-types/src/events/peer.ts b/packages/sdk-types/src/events/peer.ts new file mode 100644 index 00000000000..331afa41dc4 --- /dev/null +++ b/packages/sdk-types/src/events/peer.ts @@ -0,0 +1,44 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +import type { BaseEvent } from './common' +import { CardID, PeerKind, PeerExtra, WorkspaceID } from '@hcengineering/communication-types' + +// Peer events only for system +export enum PeerEventType { + CreatePeer = 'createPeer', + RemovePeer = 'removePeer' +} + +export type PeerEvent = CreatePeerEvent | RemovePeerEvent + +export interface CreatePeerEvent extends BaseEvent { + type: PeerEventType.CreatePeer + workspaceId: WorkspaceID + cardId: CardID + kind: PeerKind + value: string + extra?: PeerExtra + date?: Date +} + +export interface RemovePeerEvent extends BaseEvent { + type: PeerEventType.RemovePeer + workspaceId: WorkspaceID + cardId: CardID + kind: PeerKind + value: string + date?: Date +} diff --git a/packages/sdk-types/src/serverApi.ts b/packages/sdk-types/src/serverApi.ts index ec1dde33719..848a0140ea0 100644 --- a/packages/sdk-types/src/serverApi.ts +++ b/packages/sdk-types/src/serverApi.ts @@ -25,7 +25,8 @@ import type { FindLabelsParams, Label, FindCollaboratorsParams, - Collaborator + Collaborator, + FindThreadParams, Thread, FindPeersParams, Peer } from '@hcengineering/communication-types' import type { Account, MeasureContext } from '@hcengineering/core' @@ -61,6 +62,9 @@ export interface ServerApi { findLabels: (session: SessionData, params: FindLabelsParams) => Promise findCollaborators: (session: SessionData, params: FindCollaboratorsParams) => Promise + findThreads: (session: SessionData, params: FindThreadParams) => Promise + findPeers: (session: SessionData, params: FindPeersParams) => Promise + event: (session: SessionData, event: Event) => Promise unsubscribeQuery: (session: SessionData, id: number) => Promise diff --git a/packages/server/src/index.ts b/packages/server/src/index.ts index 46c481f4fd0..82dc68a1beb 100644 --- a/packages/server/src/index.ts +++ b/packages/server/src/index.ts @@ -27,7 +27,8 @@ import type { FindLabelsParams, Label, FindCollaboratorsParams, - Collaborator + Collaborator, FindPeersParams, Peer, Thread, + FindThreadParams } from '@hcengineering/communication-types' import { createDbAdapter } from '@hcengineering/communication-cockroach' import type { EventResult, Event, ServerApi, SessionData } from '@hcengineering/communication-sdk-types' @@ -52,8 +53,9 @@ export class Api implements ServerApi { withLogs: process.env.COMMUNICATION_TIME_LOGGING_ENABLED === 'true' }) + const peers = await db.findPeers({ workspaceId: workspace }) const metadata = getMetadata() - const middleware = await buildMiddlewares(ctx, workspace, metadata, db, callbacks) + const middleware = await buildMiddlewares(ctx, workspace, metadata, db, callbacks, peers) return new Api(ctx, middleware) } @@ -90,6 +92,14 @@ export class Api implements ServerApi { return await this.middlewares.findCollaborators(session, params) } + async findPeers (session: SessionData, params: FindPeersParams): Promise { + return await this.middlewares.findPeers(session, params) + } + + async findThreads (session: SessionData, params: FindThreadParams): Promise { + return await this.middlewares.findThreads(session, params) + } + async unsubscribeQuery (session: SessionData, id: number): Promise { await this.middlewares.unsubscribeQuery(session, id) } diff --git a/packages/server/src/middleware/base.ts b/packages/server/src/middleware/base.ts index 7233adf33cf..00209b02c8b 100644 --- a/packages/server/src/middleware/base.ts +++ b/packages/server/src/middleware/base.ts @@ -26,7 +26,11 @@ import type { FindLabelsParams, Label, FindCollaboratorsParams, - Collaborator + Collaborator, + FindPeersParams, + Peer, + FindThreadParams, + Thread } from '@hcengineering/communication-types' import type { Enriched, Middleware, MiddlewareContext, QueryId } from '../types' @@ -73,6 +77,14 @@ export class BaseMiddleware implements Middleware { return await this.provideFindCollaborators(session, params) } + async findPeers (session: SessionData, params: FindPeersParams): Promise { + return await this.provideFindPeers(session, params) + } + + async findThreads (session: SessionData, params: FindThreadParams): Promise { + return await this.provideFindThreads(session, params) + } + async event (session: SessionData, event: Enriched, derived: boolean): Promise { return await this.provideEvent(session, event, derived) } @@ -162,6 +174,20 @@ export class BaseMiddleware implements Middleware { return [] } + protected async provideFindPeers (session: SessionData, params: FindPeersParams): Promise { + if (this.next !== undefined) { + return await this.next.findPeers(session, params) + } + return [] + } + + protected async provideFindThreads (session: SessionData, params: FindThreadParams): Promise { + if (this.next !== undefined) { + return await this.next.findThreads(session, params) + } + return [] + } + protected provideHandleBroadcast (session: SessionData, events: Enriched[]): void { if (this.next !== undefined) { this.next.handleBroadcast(session, events) diff --git a/packages/server/src/middleware/broadcast.ts b/packages/server/src/middleware/broadcast.ts index 40879be3209..8824ed7d0a0 100644 --- a/packages/server/src/middleware/broadcast.ts +++ b/packages/server/src/middleware/broadcast.ts @@ -20,6 +20,7 @@ import { LabelEventType, MessageEventType, NotificationEventType, + PeerEventType, type SessionData } from '@hcengineering/communication-sdk-types' import type { @@ -223,6 +224,9 @@ export class BroadcastMiddleware extends BaseMiddleware implements Middleware { case CardEventType.UpdateCardType: case CardEventType.RemoveCard: return true + case PeerEventType.RemovePeer: + case PeerEventType.CreatePeer: + return false } } diff --git a/packages/server/src/middleware/date.ts b/packages/server/src/middleware/date.ts index 726f7948cff..c6ffb213d1a 100644 --- a/packages/server/src/middleware/date.ts +++ b/packages/server/src/middleware/date.ts @@ -34,6 +34,8 @@ export class DateMiddleware extends BaseMiddleware implements Middleware { event.date = new Date() } + event._eventExtra = {} + return await this.provideEvent(session, event, derived) } diff --git a/packages/server/src/middleware/db.ts b/packages/server/src/middleware/db.ts index 1abe45d8346..c75d436ecfd 100644 --- a/packages/server/src/middleware/db.ts +++ b/packages/server/src/middleware/db.ts @@ -23,6 +23,8 @@ import { type FindMessagesParams, type FindNotificationContextParams, type FindNotificationsParams, + FindPeersParams, + FindThreadParams, type Label, type Message, MessageID, @@ -30,7 +32,9 @@ import { type Notification, type NotificationContext, PatchType, + Peer, SocialID, + Thread, UpdatePatchData } from '@hcengineering/communication-types' import { @@ -62,7 +66,10 @@ import { ThreadPatchEvent, EventResult, AttachmentPatchEvent, - BlobPatchEvent + BlobPatchEvent, + PeerEventType, + CreatePeerEvent, + RemovePeerEvent } from '@hcengineering/communication-sdk-types' import type { Enriched, Middleware, MiddlewareContext } from '../types' @@ -109,11 +116,21 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { return await this.db.findCollaborators(params) } - async event (session: SessionData, event: Enriched): Promise { + async findPeers (_: SessionData, params: FindPeersParams): Promise { + return await this.db.findPeers(params) + } + + async findThreads (_: SessionData, params: FindThreadParams): Promise { + return await this.db.findThreads(params) + } + + async event (session: SessionData, event: Enriched, derived: boolean): Promise { const result = await this.processEvent(session, event) if (result.skipPropagate === true) { event.skipPropagate = true + } else { + await this.provideEvent(session, event, derived) } return result.result ?? {} @@ -153,6 +170,12 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { case CardEventType.RemoveCard: return await this.removeCard(event) + // Peers + case PeerEventType.RemovePeer: + return await this.removePeer(event) + case PeerEventType.CreatePeer: + return await this.createPeer(event) + // Collaborators case NotificationEventType.AddCollaborators: return await this.addCollaborators(event) @@ -460,6 +483,16 @@ export class DatabaseMiddleware extends BaseMiddleware implements Middleware { return {} } + private async createPeer (event: Enriched): Promise { + await this.db.createPeer(event.workspaceId, event.cardId, event.kind, event.value, event.extra ?? {}, event.date) + return {} + } + + private async removePeer (event: Enriched): Promise { + await this.db.removePeer(event.workspaceId, event.cardId, event.kind, event.value) + return {} + } + private async updateCardType (event: Enriched): Promise { return {} } diff --git a/packages/server/src/middleware/peer.ts b/packages/server/src/middleware/peer.ts new file mode 100644 index 00000000000..b9efe1777aa --- /dev/null +++ b/packages/server/src/middleware/peer.ts @@ -0,0 +1,60 @@ +// +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// + +import { + type Event, + EventResult, + MessageEventType, + PeerEventType, + type SessionData +} from '@hcengineering/communication-sdk-types' + +import type { Enriched, Middleware, MiddlewareContext } from '../types' +import { BaseMiddleware } from './base' + +export class PeerMiddleware extends BaseMiddleware implements Middleware { + constructor ( + readonly context: MiddlewareContext, + next?: Middleware + ) { + super(context, next) + } + + async event (session: SessionData, event: Enriched, derived: boolean): Promise { + switch (event.type) { + case PeerEventType.CreatePeer: + this.context.cadsWithPeers.add(event.cardId) + break + case MessageEventType.CreateMessage: + case MessageEventType.UpdatePatch: + case MessageEventType.RemovePatch: + case MessageEventType.AttachmentPatch: + case MessageEventType.ReactionPatch: + case MessageEventType.ThreadPatch: + case MessageEventType.BlobPatch: { + if (this.context.cadsWithPeers.has(event.cardId)) { + event._eventExtra.peers = + (await this.context.head?.findPeers(session, { + workspaceId: this.context.workspace, + cardId: event.cardId + })) ?? [] + } + break + } + } + + return await this.provideEvent(session, event, derived) + } +} diff --git a/packages/server/src/middleware/permissions.ts b/packages/server/src/middleware/permissions.ts index a9d46a3a962..9401a952fe3 100644 --- a/packages/server/src/middleware/permissions.ts +++ b/packages/server/src/middleware/permissions.ts @@ -19,6 +19,7 @@ import { EventResult, MessageEventType, NotificationEventType, + PeerEventType, type SessionData } from '@hcengineering/communication-sdk-types' import { AccountRole, systemAccountUuid } from '@hcengineering/core' @@ -66,6 +67,8 @@ export class PermissionsMiddleware extends BaseMiddleware implements Middleware this.checkAccount(session, event.account) break } + case PeerEventType.CreatePeer: + case PeerEventType.RemovePeer: case MessageEventType.CreateMessagesGroup: case MessageEventType.RemoveMessagesGroup: { this.onlySystemAccount(session) diff --git a/packages/server/src/middleware/triggers.ts b/packages/server/src/middleware/triggers.ts index 6c7b9f3cbd8..5c3174c5303 100644 --- a/packages/server/src/middleware/triggers.ts +++ b/packages/server/src/middleware/triggers.ts @@ -23,6 +23,7 @@ import { notify } from '../notification/notification' export class TriggersMiddleware extends BaseMiddleware implements Middleware { private ctx: MeasureContext + private processedPeersEvents = new Set() constructor ( private readonly callbacks: CommunicationCallbacks, @@ -60,6 +61,7 @@ export class TriggersMiddleware extends BaseMiddleware implements Middleware { registeredCards: this.context.registeredCards, accountBySocialID: this.context.accountBySocialID, removedContexts: this.context.removedContexts, + processedPeersEvents: this.processedPeersEvents, derived, execute: async (event: Event) => { // Will be enriched in head @@ -90,6 +92,7 @@ export class TriggersMiddleware extends BaseMiddleware implements Middleware { (session.asyncData as Enriched[]).sort((a, b) => a.date.getTime() - b.date.getTime()) ) session.asyncData = [] + this.processedPeersEvents = new Set() } } } diff --git a/packages/server/src/middleware/validate.ts b/packages/server/src/middleware/validate.ts index 18c0b825c58..1547dea4e48 100644 --- a/packages/server/src/middleware/validate.ts +++ b/packages/server/src/middleware/validate.ts @@ -18,7 +18,8 @@ import { MessageEventType, NotificationEventType, type Event, - type SessionData + type SessionData, + PeerEventType } from '@hcengineering/communication-sdk-types' import { type Collaborator, @@ -151,11 +152,18 @@ export class ValidateMiddleware extends BaseMiddleware implements Middleware { case NotificationEventType.UpdateNotificationContext: this.validate(event, UpdateNotificationContextEventSchema) break + case PeerEventType.CreatePeer: + this.validate(event, CreatePeerEventSchema) + break + case PeerEventType.RemovePeer: + this.validate(event, RemovePeerEventSchema) + break } return await this.provideEvent(session, deserializeEvent(event), derived) } } +const WorkspaceIDSchema = z.string().uuid() const AccountIDSchema = z.string() const BlobIDSchema = z.string().uuid() const AttachmentIDSchema = z.string().uuid() @@ -290,7 +298,8 @@ const FindCollaboratorsParamsSchema = FindParamsSchema.extend({ const BaseEventSchema = z .object({ - _id: z.string().optional() + _id: z.string().optional(), + _eventExtra: z.record(z.any()).optional() }) .strict() @@ -472,6 +481,25 @@ const RemoveCollaboratorsEventSchema = BaseEventSchema.extend({ date: DateSchema }).strict() +const CreatePeerEventSchema = BaseEventSchema.extend({ + type: z.literal(PeerEventType.CreatePeer), + workspaceId: WorkspaceIDSchema, + cardId: CardIDSchema, + kind: z.string().nonempty(), + value: z.string().nonempty(), + extra: z.record(z.any()).optional(), + date: DateSchema +}).strict() + +const RemovePeerEventSchema = BaseEventSchema.extend({ + type: z.literal(PeerEventType.RemovePeer), + workspaceId: WorkspaceIDSchema, + cardId: CardIDSchema, + kind: z.string().nonempty(), + value: z.string().nonempty(), + date: DateSchema +}).strict() + function deserializeEvent (event: Enriched): Enriched { switch (event.type) { case MessageEventType.CreateMessagesGroup: diff --git a/packages/server/src/middlewares.ts b/packages/server/src/middlewares.ts index 3617a9f7b44..a95bbb98b8c 100644 --- a/packages/server/src/middlewares.ts +++ b/packages/server/src/middlewares.ts @@ -13,7 +13,7 @@ // limitations under the License. // -import type { MeasureContext } from '@hcengineering/core' +import { MeasureContext } from '@hcengineering/core' import type { DbAdapter, EventResult, Event, SessionData } from '@hcengineering/communication-sdk-types' import type { Collaborator, @@ -23,11 +23,14 @@ import type { FindMessagesParams, FindNotificationContextParams, FindNotificationsParams, + FindPeersParams, + FindThreadParams, Label, Message, MessagesGroup, Notification, - NotificationContext, + NotificationContext, Peer, + Thread, WorkspaceID } from '@hcengineering/communication-types' @@ -48,13 +51,14 @@ import { ValidateMiddleware } from './middleware/validate' import { DateMiddleware } from './middleware/date' import { IdentityMiddleware } from './middleware/indentity' import { IdMiddleware } from './middleware/id' +import { PeerMiddleware } from './middleware/peer' export async function buildMiddlewares ( ctx: MeasureContext, workspace: WorkspaceID, metadata: Metadata, db: DbAdapter, - callbacks: CommunicationCallbacks + callbacks: CommunicationCallbacks, peers: Peer[] ): Promise { const createFns: MiddlewareCreateFn[] = [ // Enrich events @@ -69,7 +73,8 @@ export async function buildMiddlewares ( // Process events async (context, next) => new TriggersMiddleware(callbacks, db, context, next), async (context, next) => new BroadcastMiddleware(callbacks, context, next), - async (context, next) => new DatabaseMiddleware(db, context, next) + async (context, next) => new DatabaseMiddleware(db, context, next), + async (context, next) => new PeerMiddleware(context, next) ] const context: MiddlewareContext = { @@ -78,7 +83,8 @@ export async function buildMiddlewares ( workspace, registeredCards: new Set(), accountBySocialID: new Map(), - removedContexts: new Map() + removedContexts: new Map(), + cadsWithPeers: new Set(peers.map(it => it.cardId)) } return await Middlewares.create(ctx, context, createFns) @@ -169,6 +175,16 @@ export class Middlewares { return await this.head.findCollaborators(session, params) } + async findPeers (session: SessionData, params: FindPeersParams): Promise { + if (this.head === undefined) return [] + return await this.head.findPeers(session, params) + } + + async findThreads (session: SessionData, params: FindThreadParams): Promise { + if (this.head === undefined) return [] + return await this.head.findThreads(session, params) + } + async unsubscribeQuery (session: SessionData, id: number): Promise { if (this.head === undefined) return this.head?.unsubscribeQuery(session, id) diff --git a/packages/server/src/notification/notification.ts b/packages/server/src/notification/notification.ts index 0ce78b8cd5b..35fb4853e2e 100644 --- a/packages/server/src/notification/notification.ts +++ b/packages/server/src/notification/notification.ts @@ -220,6 +220,7 @@ async function notifyMessage ( date: Date ): Promise { const cursor = ctx.db.getCollaboratorsCursor(cardId, date, BATCH_SIZE) + const spaceMembers = await ctx.db.getCardSpaceMembers(cardId) const creatorAccount = await findAccount(ctx, socialId) const result: Event[] = [] @@ -235,6 +236,7 @@ async function notifyMessage ( }) for (const collaborator of collaborators) { + if (!spaceMembers.includes(collaborator)) continue try { const context = contexts.find((it) => it.account === collaborator) const res = await processCollaborator( diff --git a/packages/server/src/triggers/card.ts b/packages/server/src/triggers/card.ts index 296a7b96437..43b0c51bc1e 100644 --- a/packages/server/src/triggers/card.ts +++ b/packages/server/src/triggers/card.ts @@ -52,7 +52,7 @@ async function onCardTypeUpdates (ctx: TriggerCtx, event: Enriched): Promise { const { cardId } = event - const thread = await ctx.db.findThread(cardId) + const thread = (await ctx.db.findThreads({ threadId: cardId, limit: 1 }))[0] if (thread === undefined) return [] return [ @@ -126,7 +126,7 @@ async function addThreadReply (ctx: TriggerCtx, event: Enriched): Promise { + if (ctx.processedPeersEvents.has(event._id)) return [] + if (event.type === MessageEventType.CreateMessage) { + if (event.messageType === MessageType.Activity) { + return [] + } + } + + if (event.type === MessageEventType.ThreadPatch) { + return [] + } + + const cardPeers = new Set( + (((event._eventExtra.peers ?? []) as Peer[]).filter((it) => it.kind === 'card') as CardPeer[]) + .flatMap((it) => it.members) + .filter((it) => it.workspaceId === ctx.workspace && it.cardId !== event.cardId) + .map((it) => it.cardId) + ) + + if (cardPeers.size === 0) return [] + const res: Event[] = [] + + for (const peer of cardPeers) { + const ev = { + ...event, + _id: generateId(), + cardId: peer + } + + ctx.processedPeersEvents.add(ev._id) + + res.push(ev) + } + + return res +} + const triggers: Triggers = [ ['add_collaborators_on_message_created', MessageEventType.CreateMessage, addCollaborators as TriggerFn], ['add_thread_reply_on_message_created', MessageEventType.CreateMessage, addThreadReply as TriggerFn], @@ -214,7 +251,15 @@ const triggers: Triggers = [ ['on_messages_group_created', MessageEventType.CreateMessagesGroup, onMessagesGroupCreated as TriggerFn], ['remove_reply_on_messages_removed', MessageEventType.RemovePatch, onMessageRemoved as TriggerFn], - ['on_thread_created', MessageEventType.ThreadPatch, onThreadAttached as TriggerFn] + ['on_thread_created', MessageEventType.ThreadPatch, onThreadAttached as TriggerFn], + + ['check_peers_on_message_created', MessageEventType.CreateMessage, checkPeers as TriggerFn], + ['check_peers_on_update_patch', MessageEventType.UpdatePatch, checkPeers as TriggerFn], + ['check_peers_on_remove_patch', MessageEventType.RemovePatch, checkPeers as TriggerFn], + ['check_peers_on_reaction_patch', MessageEventType.ReactionPatch, checkPeers as TriggerFn], + ['check_peers_on_blob_patch', MessageEventType.BlobPatch, checkPeers as TriggerFn], + ['check_peers_on_attachment_patch', MessageEventType.AttachmentPatch, checkPeers as TriggerFn], + ['check_peers_on_thread_patch', MessageEventType.ThreadPatch, checkPeers as TriggerFn] ] export default triggers diff --git a/packages/server/src/types.ts b/packages/server/src/types.ts index 5c9f98893e4..97338a9421a 100644 --- a/packages/server/src/types.ts +++ b/packages/server/src/types.ts @@ -23,19 +23,23 @@ import type { import type { AccountID, CardID, - Collaborator, ContextID, + Collaborator, + ContextID, FindCollaboratorsParams, FindLabelsParams, FindMessagesGroupsParams, FindMessagesParams, FindNotificationContextParams, FindNotificationsParams, + FindPeersParams, + FindThreadParams, Label, Message, MessagesGroup, Notification, NotificationContext, - SocialID, + Peer, + SocialID, Thread, WorkspaceID } from '@hcengineering/communication-types' @@ -69,6 +73,8 @@ export interface Middleware { findLabels: (session: SessionData, params: FindLabelsParams, queryId?: QueryId) => Promise findCollaborators: (session: SessionData, params: FindCollaboratorsParams) => Promise + findPeers: (session: SessionData, params: FindPeersParams) => Promise + findThreads: (session: SessionData, params: FindThreadParams) => Promise event: (session: SessionData, event: Enriched, derived: boolean) => Promise @@ -88,6 +94,8 @@ export interface MiddlewareContext { accountBySocialID: Map removedContexts: Map + cadsWithPeers: Set + derived?: Middleware head?: Middleware } @@ -110,6 +118,7 @@ export interface TriggerCtx { accountBySocialID: Map removedContexts: Map derived: boolean + processedPeersEvents: Set execute: (event: Event) => Promise } @@ -117,6 +126,8 @@ export type TriggerFn = (ctx: TriggerCtx, event: Enriched) => Promise = T & { + _id: string skipPropagate?: boolean date: Date + _eventExtra: Record } diff --git a/packages/types/src/index.ts b/packages/types/src/index.ts index 84cdd620685..a6d3ee6fdfb 100644 --- a/packages/types/src/index.ts +++ b/packages/types/src/index.ts @@ -20,3 +20,4 @@ export * from './notification' export * from './query' export * from './label' export * from './patch' +export * from './peer' diff --git a/packages/types/src/peer.ts b/packages/types/src/peer.ts new file mode 100644 index 00000000000..d74755ada15 --- /dev/null +++ b/packages/types/src/peer.ts @@ -0,0 +1,43 @@ +// Copyright © 2025 Hardcore Engineering Inc. +// +// Licensed under the Eclipse Public License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. You may +// obtain a copy of the License at https://www.eclipse.org/legal/epl-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. + +import { CardID, WorkspaceID } from './core' + +export type PeerKind = 'card' | string +export type PeerExtra = Record + +interface BasePeer { + workspaceId: WorkspaceID + cardId: CardID + kind: PeerKind + value: string + extra: PeerExtra + created: Date +} + +export interface CardPeer extends BasePeer { + kind: 'card' + members: CardPeerMember[] +} + +export interface ExternalPeer extends BasePeer { + kind: string +} + +export type Peer = CardPeer | ExternalPeer + +export interface CardPeerMember { + workspaceId: WorkspaceID + cardId: CardID + extra: PeerExtra +} diff --git a/packages/types/src/query.ts b/packages/types/src/query.ts index d2b739af0ef..2789601d302 100644 --- a/packages/types/src/query.ts +++ b/packages/types/src/query.ts @@ -17,8 +17,9 @@ import { SortingOrder } from '@hcengineering/core' import type { MessageID } from './message' import type { ContextID, NotificationID, NotificationType } from './notification' -import type { AccountID, BlobID, CardID, CardType } from './core' +import type { AccountID, BlobID, CardID, CardType, WorkspaceID } from './core' import type { LabelID } from './label' +import { PeerKind } from './peer' export { SortingOrder } @@ -101,4 +102,17 @@ export interface FindLabelsParams extends FindParams { account?: AccountID } +export interface FindThreadParams extends FindParams { + cardId?: CardID + messageId?: MessageID + threadId?: CardID +} + +export interface FindPeersParams extends FindParams { + workspaceId?: WorkspaceID + cardId?: CardID + kind?: PeerKind + value?: string +} + export type WithTotal = T[] & { total: number } From deb27df0cfcb063e7b08d55fbd59cc185c7a4292 Mon Sep 17 00:00:00 2001 From: Alexey Aristov Date: Wed, 20 Aug 2025 12:01:32 +0200 Subject: [PATCH 179/636] database support Signed-off-by: Alexey Aristov --- Cargo.lock | 2 + Cargo.toml | 5 +- etc/migrations/V1__initial.sql | 17 +++ src/main.rs | 52 ++-------- src/postgres.rs | 184 +++++++++++++++++++++++++++++++++ 5 files changed, 213 insertions(+), 47 deletions(-) create mode 100644 etc/migrations/V1__initial.sql create mode 100644 src/postgres.rs diff --git a/Cargo.lock b/Cargo.lock index 540ea4f76f4..0f7b341ab17 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2765,6 +2765,8 @@ dependencies = [ "bytes", "fallible-iterator", "postgres-protocol", + "serde", + "serde_json", "uuid", ] diff --git a/Cargo.toml b/Cargo.toml index 6d4d4e22006..607377de37e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -13,7 +13,10 @@ serde = "1.0.219" actix-web = "4.11.0" actix-cors = "0.7.1" refinery = { version = "0.8.16", features = ["tokio-postgres"] } -tokio-postgres = "0.7.13" +tokio-postgres = { version = "0.7.13", features = [ + "with-uuid-1", + "with-serde_json-1", +] } bb8 = "0.9.0" bb8-postgres = { version = "0.9.0", features = ["with-uuid-1"] } md5 = "0.8.0" diff --git a/etc/migrations/V1__initial.sql b/etc/migrations/V1__initial.sql new file mode 100644 index 00000000000..8d8b0976790 --- /dev/null +++ b/etc/migrations/V1__initial.sql @@ -0,0 +1,17 @@ +create table blob( + key text not null, + hash text not null +); + +create unique index blob_key on blob(key); +create unique index blob_hash on blob(hash); + + +create table object( + workspace uuid not null, + key text not null, + part int not null, + data jsonb not null, + + primary key (workspace, key, part) +) \ No newline at end of file diff --git a/src/main.rs b/src/main.rs index aac9c860afe..17edb0f0140 100644 --- a/src/main.rs +++ b/src/main.rs @@ -6,7 +6,7 @@ use actix_web::{ body::MessageBody, dev::{ServiceRequest, ServiceResponse}, middleware::{Next, from_fn}, - web::{Data, Path, delete, get, post, put, scope}, + web::{Data, Path, get, scope}, }; use tracing::*; use tracing_actix_web::TracingLogger; @@ -15,9 +15,7 @@ use uuid::Uuid; use hulyrs::services::jwt::actix::ServiceRequestExt; mod config; -mod handlers; mod postgres; -mod s3; use config::CONFIG; @@ -45,7 +43,6 @@ async fn main() -> anyhow::Result<()> { env!("CARGO_PKG_VERSION") ); - let s3 = s3::client().await; let postgres = postgres::pool().await?; let bind_to = SocketAddr::new(CONFIG.bind_host.as_str().parse()?, CONFIG.bind_port); @@ -84,20 +81,17 @@ async fn main() -> anyhow::Result<()> { .supports_credentials() .max_age(3600); - const KEY_PATH: &str = "/{key:.*}"; + //const KEY_PATH: &str = "/{key:.*}"; App::new() .app_data(Data::new(postgres.clone())) - .app_data(Data::new(s3.clone())) .wrap(TracingLogger::default()) .wrap(cors) .service( - scope("/api/{workspace}") - .wrap(from_fn(auth)) - .route(KEY_PATH, get().to(handlers::get)) - .route(KEY_PATH, put().to(handlers::put)) - .route(KEY_PATH, post().to(handlers::post)) - .route(KEY_PATH, delete().to(handlers::delete)), + scope("/api/{workspace}").wrap(from_fn(auth)), //.route(KEY_PATH, get().to(handlers::get)) + //.route(KEY_PATH, put().to(handlers::put)) + //.route(KEY_PATH, post().to(handlers::post)) + //.route(KEY_PATH, delete().to(handlers::delete)), ) .route("/status", get().to(async || "ok")) }) @@ -110,37 +104,3 @@ async fn main() -> anyhow::Result<()> { Ok(()) } - -#[tokio::main] -async fn main_() -> anyhow::Result<()> { - use crate::{postgres::Pool, s3}; - use aws_sdk_s3::{presigning::PresigningConfig, primitives::ByteStream}; - - initialize_tracing(); - - let expires_in: std::time::Duration = std::time::Duration::from_secs(600); - let expires_in: aws_sdk_s3::presigning::PresigningConfig = - PresigningConfig::expires_in(expires_in).unwrap(); - - let s3 = s3::client().await; - - let presigned_request = s3 - .put_object() - .set_bucket(Some("hulylake".into())) - .set_key(Some("myobject".into())) - .presigned(expires_in) - .await - .unwrap(); - - let url = presigned_request.uri(); - - debug!(?url, "presigned request"); - - let client = reqwest::Client::new(); - let res = client.put(url).body("hello world").send().await.unwrap(); - - debug!(?res, "response"); - debug!("body: {:?}", res.text().await.unwrap()); - - Ok(()) -} diff --git a/src/postgres.rs b/src/postgres.rs new file mode 100644 index 00000000000..408e3897650 --- /dev/null +++ b/src/postgres.rs @@ -0,0 +1,184 @@ +use std::pin::Pin; + +use bb8_postgres::PostgresConnectionManager; +use tokio_postgres::NoTls; +use tokio_postgres::{self as pg}; +use tracing::*; + +use crate::config::CONFIG; + +pub type Pool = bb8::Pool>; + +pub async fn pool() -> anyhow::Result { + tracing::debug!( + connection = CONFIG.db_connection, + "database connection string" + ); + + let manager = bb8_postgres::PostgresConnectionManager::new_from_stringlike( + &CONFIG.db_connection, + tokio_postgres::NoTls, + )?; + + #[derive(Debug)] + struct ConnectionCustomizer; + + impl bb8::CustomizeConnection for ConnectionCustomizer { + fn on_acquire<'a>( + &'a self, + client: &'a mut pg::Client, + ) -> Pin> + Send + 'a>> { + Box::pin(async { + client + .execute("set search_path to $1", &[&CONFIG.db_scheme]) + .await + .unwrap(); + Ok(()) + }) + } + } + + let pool = bb8::Pool::builder() + .max_size(15) + .connection_customizer(Box::new(ConnectionCustomizer)) + .build(manager) + .await?; + + { + let mut connection = pool.dedicated_connection().await?; + + // query params cannot be bound in ddl statements + connection + .execute( + &format!("create schema if not exists {}", CONFIG.db_scheme), + &[], + ) + .await?; + + refinery::embed_migrations!("etc/migrations"); + + let report = migrations::runner() + .set_migration_table_name("migrations") + .run_async(&mut connection) + .await?; + + for m in report.applied_migrations().iter() { + info!(migration = m.to_string(), "apply migration"); + } + } + + Ok(pool) +} + +#[instrument(level = "debug", skip_all)] +pub async fn find_blob_by_hash(pool: &Pool, hash: &str) -> anyhow::Result> { + let connection = pool.get().await?; + + let blob = connection + .query("select key from blob where hash = $1", &[&hash]) + .await?; + + Ok(match blob.as_slice() { + [found] => Some(found.get::<_, String>("key")), + [] => None, + + _ => panic!(), + }) +} + +#[instrument(level = "debug", skip_all)] +pub async fn insert_blob(pool: &Pool, key: &str, hash: &str) -> anyhow::Result<()> { + let connection = pool.get().await?; + + connection + .execute( + "insert into blob (key, hash) values ($1, $2)", + &[&key, &hash], + ) + .await?; + + Ok(()) +} + +pub struct Object { + part: u32, + data: serde_json::Value, +} + +pub async fn find_parts( + pool: &Pool, + workspace: uuid::Uuid, + key: &str, +) -> anyhow::Result> { + let connection = pool.get().await?; + + let parts = connection + .query( + "select part, data from object where workspace = $1 and key = $1 order by part", + &[&workspace, &key], + ) + .await?; + + let parts = parts + .into_iter() + .map(|row| { + let part = row.get::<_, u32>("part"); + let data = row.get::<_, serde_json::Value>("data"); + Object { part, data } + }) + .collect(); + + Ok(parts) +} + +pub async fn insert_part( + pool: &Pool, + workspace: uuid::Uuid, + key: &str, + part: u32, + data: D, +) -> anyhow::Result<()> { + let connection = pool.get().await?; + + let data = serde_json::to_value(data)?; + + connection + .execute( + "insert into object (workspace, key, part, data) values ($1, $2, $3, $4)", + &[&workspace, &key, &part, &data], + ) + .await?; + + Ok(()) +} + +pub async fn shrink( + pool: &Pool, + workspace: uuid::Uuid, + key: &str, + data: D, +) -> anyhow::Result<()> { + let mut connection = pool.get().await?; + + let transaction = connection.transaction().await?; + + transaction + .execute( + "delete from object where workspace = $1 and key = $2 and part > 0", + &[&workspace, &key], + ) + .await?; + + let data = serde_json::to_value(data)?; + + transaction + .execute( + "update object set data=$1 where workspace = $2 and key = $3 and part = 0", + &[&data, &workspace, &key], + ) + .await?; + + transaction.commit().await?; + + Ok(()) +} From c27bd0da2d3f0db082ad38a143c363510f2f2c16 Mon Sep 17 00:00:00 2001 From: Alexey Aristov Date: Wed, 20 Aug 2025 12:00:56 +0200 Subject: [PATCH 180/636] upload endpoint Signed-off-by: Alexey Aristov --- Justfile | 5 +++ src/handlers.rs | 105 ++++++++++++++++++++++++++++++++++++++++++++++++ src/s3.rs | 26 ++++++++++++ 3 files changed, 136 insertions(+) create mode 100644 Justfile create mode 100644 src/handlers.rs create mode 100644 src/s3.rs diff --git a/Justfile b/Justfile new file mode 100644 index 00000000000..b05268d796b --- /dev/null +++ b/Justfile @@ -0,0 +1,5 @@ +workspace := "4cd5a9d5-7c74-47b1-ac93-265f0bcc73af" + + +put: + curl -X PUT -H "Authorization: Bearer $(cat _hidden/token.txt)" --data @_hidden/data.bin http://localhost:8096/api/{{workspace}}/abcd diff --git a/src/handlers.rs b/src/handlers.rs new file mode 100644 index 00000000000..2d86c11ed69 --- /dev/null +++ b/src/handlers.rs @@ -0,0 +1,105 @@ +use actix_web::{ + HttpResponse, error, + web::{self, Data, Json, Path, Payload, Query}, +}; +use futures_util::{StreamExt, TryStreamExt}; +use serde::de; +use tokio_stream::wrappers::ReceiverStream; +use tracing::*; + +use crate::s3::S3Client; +use crate::{postgres::Pool, s3}; +use aws_sdk_s3::{presigning::PresigningConfig, primitives::ByteStream}; + +pub type ObjectPath = Path<(String, String)>; + +pub async fn get( + path: ObjectPath, + _pool: Data, + _body: web::Bytes, +) -> Result { + debug!(?path, "GET request"); + + unimplemented!() // +} + +use aws_sdk_s3::presigning::PresignedRequest; + +pub async fn put( + path: ObjectPath, + pool: Data, + s3: Data, + req: actix_web::HttpRequest, + mut payload: Payload, +) -> Result { + debug!(?path, "PUT request"); + + let client = reqwest::Client::new(); + + let (sender, receiver) = tokio::sync::mpsc::channel::>>(1); + + let length = req + .headers() + .get("Content-Length") + .unwrap() + .to_str() + .unwrap() + .to_owned(); + + tokio::spawn(async move { + let expires_in: std::time::Duration = std::time::Duration::from_secs(600); + let expires_in: aws_sdk_s3::presigning::PresigningConfig = + PresigningConfig::expires_in(expires_in).unwrap(); + + let presigned_request = s3 + .put_object() + .bucket("hulylake") + .key("test") + .presigned(expires_in) + .await + .unwrap(); + + let url = presigned_request.uri(); + + debug!(?url, "presigned request"); + + let res = client + .put(url) + .body(reqwest::Body::wrap_stream(ReceiverStream::new(receiver))) + // .header("Content-Length", length) + .send() + .await + .unwrap(); + + debug!(?res, "response"); + debug!("body: {:?}", res.text().await.unwrap()); + }); + + while let Some(item) = payload.next().await { + if let Ok(item) = item { + sender.send(Ok(item.to_vec())).await.unwrap(); + } + } + + //s3.put_object().body() + + unimplemented!() // +} + +pub async fn post( + path: ObjectPath, + _pool: Data, + _body: web::Bytes, +) -> Result { + debug!(?path, "POST request"); + unimplemented!() // +} + +pub async fn delete( + path: ObjectPath, + _pool: Data, + _body: web::Bytes, +) -> Result { + debug!(?path, "DELETE request"); + unimplemented!() // +} diff --git a/src/s3.rs b/src/s3.rs new file mode 100644 index 00000000000..652c6b25bdd --- /dev/null +++ b/src/s3.rs @@ -0,0 +1,26 @@ +use aws_config::{BehaviorVersion, Region}; +use aws_sdk_s3::Config; + +pub type S3Client = aws_sdk_s3::Client; + +pub async fn client() -> S3Client { + //let config = aws_sdk_s3::Config::builder() + // .behavior_version_latest() + // .force_path_style(true) + // .region(Region::new("us-west-4")) + // .build(); + + let ref config = aws_config::defaults(BehaviorVersion::latest()) + .load() + .await + .into_builder() + .build(); + + //let mut config = config.into::(); + let s3config = Config::from(config) + .to_builder() + .force_path_style(true) + .build(); + + S3Client::from_conf(s3config) +} From a5d4d0e3a8b7c95cfd0a816ae23b940f874511c9 Mon Sep 17 00:00:00 2001 From: Leonid Kaganov Date: Mon, 25 Aug 2025 21:50:11 +0300 Subject: [PATCH 181/636] Add option info --- .gitignore | 1 + Cargo.lock | 2 +- Cargo.toml | 2 +- DOCER.sh | 14 +++++++ README.md | 5 +++ scripts/TEST.html | 2 + scripts/TEST_lleo.html | 21 +++------- src/Dockerfile | 35 ++++++++++++++++ src/db.rs | 56 +++++++++++++++++-------- src/handlers_http.rs | 4 +- src/handlers_ws.rs | 88 +++++++++++++++++++++++++++++----------- src/hub_service.rs | 20 ++++----- src/main.rs | 45 +++++++++++++++------ src/memory.rs | 92 +++++++++++++++++++++++++++++++++--------- src/redis.rs | 62 +++++++++++++++++++++++++--- src/workspace_owner.rs | 6 +-- 16 files changed, 341 insertions(+), 114 deletions(-) create mode 100755 DOCER.sh create mode 100644 src/Dockerfile diff --git a/.gitignore b/.gitignore index a4911831667..6ba67b2193f 100644 --- a/.gitignore +++ b/.gitignore @@ -8,3 +8,4 @@ commit.sh GO.sh DROP_DB.sh TODO.txt +DOCKER.sh diff --git a/Cargo.lock b/Cargo.lock index 7f17d70d928..02e61846e81 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1181,7 +1181,7 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "hulypulse" -version = "0.1.10" +version = "0.1.12" dependencies = [ "actix", "actix-cors", diff --git a/Cargo.toml b/Cargo.toml index c79af425b2a..89852eb21b8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "hulypulse" -version = "0.1.10" +version = "0.1.12" edition = "2024" [dependencies] diff --git a/DOCER.sh b/DOCER.sh new file mode 100755 index 00000000000..7b5b1059c83 --- /dev/null +++ b/DOCER.sh @@ -0,0 +1,14 @@ +clear + +#cd hulykvs_server && + +# docker buildx build --tag=hardcoreeng/hulykvs:latest --platform=linux/amd64 --load . + + +docker buildx build --tag=hardcoreeng/hulypulse:latest --platform=linux/amd64 . + +# linux/arm64 + +# docker buildx build --tag=hardcoreeng/hulykvs:latest --platform=linux/amd64 . + +# --load diff --git a/README.md b/README.md index ead3fac26e3..ee218aebd1a 100644 --- a/README.md +++ b/README.md @@ -175,6 +175,11 @@ Size of data is limited to some reasonable size - Answer: `{"action":"list","result":["00000000-0000-0000-0000-000000000001/foo/bar1","00000000-0000-0000-0000-000000000001/foo/bar2"]}` +```INFO``` + - type: "info" + +- Answer: `{"db_mode":"memory","memory_info":"1231 keys, 80345 bytes","status":"OK","websockets":164}` + ** Server to Client ** subscribed events: diff --git a/scripts/TEST.html b/scripts/TEST.html index 2649fbd0e39..d87d6187e30 100644 --- a/scripts/TEST.html +++ b/scripts/TEST.html @@ -67,6 +67,8 @@

WebSocket JSON Tester

+ + diff --git a/scripts/TEST_lleo.html b/scripts/TEST_lleo.html index 706cc56619c..3a9608a5e10 100644 --- a/scripts/TEST_lleo.html +++ b/scripts/TEST_lleo.html @@ -67,25 +67,14 @@

WebSocket JSON Tester

- - - - - - - - - - - - - - + + + + + - -

Waiting for server response...
- + \ No newline at end of file diff --git a/src/config.rs b/src/config.rs index e9ca6186ce5..3a0e84ed98f 100644 --- a/src/config.rs +++ b/src/config.rs @@ -56,6 +56,8 @@ pub struct Config { pub backend: BackendType, pub no_authorization: bool, + + pub heartbeat_timeout: u64, } pub static CONFIG: LazyLock = LazyLock::new(|| { diff --git a/src/config/default.toml b/src/config/default.toml index 2e7980fb5b1..bd5944e8b17 100644 --- a/src/config/default.toml +++ b/src/config/default.toml @@ -12,5 +12,7 @@ max_ttl = 3600 backend = "redis" no_authorization = false +heartbeat_timeout = 30 + # optional settings # max_size = 100 diff --git a/src/handlers_ws.rs b/src/handlers_ws.rs index 61a894a3d50..ac823c80318 100644 --- a/src/handlers_ws.rs +++ b/src/handlers_ws.rs @@ -143,11 +143,13 @@ impl Actor for WsSession { fn started(&mut self, ctx: &mut Self::Context) { let id = self.id; - let recipient = ctx.address().recipient::(); + // let recipient = ctx.address().recipient::(); + let addr = ctx.address(); + let hub_state = self.hub_state.clone(); ctx.spawn( actix::fut::wrap_future(async move { - hub_state.write().await.connect(id, recipient); + hub_state.write().await.connect(id, addr); }) .map(|_, _, _| ()), ); @@ -173,10 +175,37 @@ impl actix::Handler for WsSession { } } +// Disconecting +#[derive(actix::Message)] +#[rtype(result = "()")] +pub struct ForceDisconnect; + +impl actix::Handler for WsSession { + type Result = (); + + fn handle(&mut self, _msg: ForceDisconnect, ctx: &mut Self::Context) { + ctx.close(Some(ws::CloseReason { + code: ws::CloseCode::Normal, + description: Some("Disconnected by server".to_string()), + })); + ctx.stop(); + } +} + /// StreamHandler External trait: must be in separate impl block impl StreamHandler> for WsSession { fn handle(&mut self, msg: Result, ctx: &mut Self::Context) { + tracing::debug!("WebSocket message: {:?}", msg); match msg { + // String "ping" - answer "pong" + Ok(ws::Message::Text(text)) if text == "ping" => { + // renew heartbeat to unixtime + let hub_state = self.hub_state.clone(); + let id = self.id.clone(); + let fut = async move { hub_state.write().await.renew_heartbeat(id) }; + ctx.wait(fut::wrap_future(fut).map(|_, _, _| ())); + ctx.text("pong"); + } Ok(ws::Message::Text(text)) => match serde_json::from_str::(&text) { Ok(cmd) => self.handle_command(cmd, ctx), Err(err) => ctx.text(format!("Invalid JSON: {}", err)), @@ -195,11 +224,6 @@ impl StreamHandler> for WsSession { } } -// fn finished(&mut self, ctx: &mut Self::Context) { -// tracing::info!("Stream finished, stopping session"); -// ctx.stop(); // 🔑 если стрим оборван → закрыть -// } - /// All logic impl WsSession { fn fut_send( @@ -492,7 +516,7 @@ impl WsSession { tracing::info!("SUBLIST"); // correlation: {:?} , &correlation // w/o Check workspace! let base = serde_json::json!(ReturnBase { - action: "list", + action: "sublist", correlation: correlation.as_deref(), ..Default::default() }); diff --git a/src/hub_service.rs b/src/hub_service.rs index 5b44813a098..3ed0775dd1a 100644 --- a/src/hub_service.rs +++ b/src/hub_service.rs @@ -23,6 +23,9 @@ use redis::aio::MultiplexedConnection; use serde::Serialize; use tokio::sync::RwLock; +use crate::config::CONFIG; +use crate::handlers_ws::WsSession; + fn subscription_matches(sub_key: &str, key: &str) -> bool { if sub_key == key { return true; @@ -67,16 +70,26 @@ pub struct RedisEvent { #[derive(Debug, Default)] pub struct HubState { - sessions: HashMap>, + sessions: HashMap>, subs: HashMap>, + heartbeats: HashMap, } impl HubState { - pub fn connect(&mut self, session_id: SessionId, addr: Recipient) { + pub fn renew_heartbeat(&mut self, session_id: SessionId) { + if self.sessions.contains_key(&session_id) { + let now = std::time::Instant::now(); + self.heartbeats.insert(session_id, now); + } + } + pub fn connect(&mut self, session_id: SessionId, addr: Addr) { self.sessions.insert(session_id, addr); + let now = std::time::Instant::now(); + self.heartbeats.insert(session_id, now); } pub fn disconnect(&mut self, session_id: SessionId) { self.sessions.remove(&session_id); + self.heartbeats.remove(&session_id); self.subs.retain(|_, ids| { ids.remove(&session_id); !ids.is_empty() @@ -114,7 +127,7 @@ impl HubState { pub fn count(&self) -> usize { self.sessions.len() } - pub fn recipients_for_key(&self, key: &str) -> Vec> { + pub fn recipients_for_key(&self, key: &str) -> Vec> { let mut out = Vec::new(); for (sub_key, set) in &self.subs { if subscription_matches(sub_key, key) { @@ -136,8 +149,7 @@ pub async fn broadcast_event( value: Option, ) { // Collect - let recipients: Vec> = - { hub_state.read().await.recipients_for_key(&ev.key) }; + let recipients: Vec> = { hub_state.read().await.recipients_for_key(&ev.key) }; if recipients.is_empty() { return; } @@ -169,3 +181,37 @@ pub async fn push_event( broadcast_event(hub_state, ev, value).await; } + +pub fn check_heartbeat(hub_state: Arc>) { + tokio::spawn(async move { + let mut ticker = tokio::time::interval(std::time::Duration::from_secs(2)); + loop { + ticker.tick().await; + + let now = std::time::Instant::now(); + let timeout = std::time::Duration::from_secs(CONFIG.heartbeat_timeout); + let timelimit = now - timeout; + + let hub = hub_state.read().await; + let expired: Vec> = hub + .heartbeats + .iter() + .filter_map(|(&sid, &last_beat)| { + if last_beat < timelimit { + hub.sessions.get(&sid).cloned() + } else { + None + } + }) + .collect(); + + drop(hub); + + if !expired.is_empty() { + for addr in &expired { + addr.do_send(crate::handlers_ws::ForceDisconnect); + } + } + } + }); +} diff --git a/src/main.rs b/src/main.rs index 4de348e7242..e132eeac6c5 100644 --- a/src/main.rs +++ b/src/main.rs @@ -41,8 +41,8 @@ use config::CONFIG; mod db; mod memory; -use crate::db::Db; use crate::memory::MemoryBackend; +use crate::{db::Db, hub_service::check_heartbeat}; fn initialize_tracing(level: tracing::Level) { use tracing_subscriber::{filter::targets::Targets, prelude::*}; @@ -114,6 +114,9 @@ async fn main() -> anyhow::Result<()> { // starting HubService let hub_state = Arc::new(RwLock::new(HubState::default())); + // starting heartbeat checker + check_heartbeat(hub_state.clone()); + let db_backend = match CONFIG.backend { config::BackendType::Memory => { let memory = MemoryBackend::new(); From 72774956e100438e1d38a47dd27fb7c22db00fec Mon Sep 17 00:00:00 2001 From: Andrey Sobolev Date: Thu, 11 Sep 2025 22:53:12 +0700 Subject: [PATCH 259/636] Fixes in timeouts and an initial network-tool pod --- .vscode/launch.json | 16 + README.md | 2 +- common/config/rush/pnpm-lock.yaml | 321 +++++------------- packages/backrpc/src/__test__/backrpc.spec.ts | 2 +- packages/backrpc/src/client.ts | 5 + packages/backrpc/src/server.ts | 28 +- packages/backrpc/src/types.ts | 3 +- packages/client/src/agent.ts | 2 +- packages/client/src/client.ts | 3 +- packages/client/src/index.ts | 36 +- .../core/src/__test__/alive-checkins.spec.ts | 2 +- packages/core/src/__test__/network.spec.ts | 2 +- packages/core/src/agent.ts | 2 +- packages/core/src/api/client.ts | 2 +- .../core/src/api/{server.ts => network.ts} | 6 +- packages/core/src/index.ts | 4 +- packages/core/src/{server.ts => network.ts} | 72 ++-- packages/server/src/server.ts | 29 +- pods/network-pod/package.json | 2 - pods/network-tool/.eslintrc.js | 7 + pods/network-tool/.npmignore | 4 + pods/network-tool/Dockerfile | 11 + pods/network-tool/config/rig.json | 5 + pods/network-tool/jest.config.js | 7 + pods/network-tool/package.json | 71 ++++ pods/network-tool/src/agents.ts | 26 ++ pods/network-tool/src/benchmark.ts | 116 +++++++ pods/network-tool/src/index.ts | 27 ++ pods/network-tool/src/request.ts | 56 +++ pods/network-tool/src/utils.ts | 42 +++ pods/network-tool/tsconfig.json | 12 + rush.json | 5 + tests/src/__tests__/client.test.ts | 2 +- 33 files changed, 634 insertions(+), 296 deletions(-) rename packages/core/src/api/{server.ts => network.ts} (87%) rename packages/core/src/{server.ts => network.ts} (89%) create mode 100644 pods/network-tool/.eslintrc.js create mode 100644 pods/network-tool/.npmignore create mode 100644 pods/network-tool/Dockerfile create mode 100644 pods/network-tool/config/rig.json create mode 100644 pods/network-tool/jest.config.js create mode 100644 pods/network-tool/package.json create mode 100644 pods/network-tool/src/agents.ts create mode 100644 pods/network-tool/src/benchmark.ts create mode 100644 pods/network-tool/src/index.ts create mode 100644 pods/network-tool/src/request.ts create mode 100644 pods/network-tool/src/utils.ts create mode 100644 pods/network-tool/tsconfig.json diff --git a/.vscode/launch.json b/.vscode/launch.json index eda5540bab4..1dbdd7ed624 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -29,6 +29,22 @@ "sourceMaps": true, "cwd": "${workspaceRoot}/pods/network-pod", "protocol": "inspector" + }, + { + "name": "Debug Tool", + "type": "node", + "request": "launch", + "args": ["src/index.ts", "list-agents"], + "env": { + "NETWORK_HOST": "localhost:37371" + }, + "runtimeArgs": ["--nolazy", "-r", "ts-node/register"], + "runtimeVersion": "22", + "showAsyncStacks": true, + "outputCapture": "std", + "sourceMaps": true, + "cwd": "${workspaceRoot}/pods/network-tool", + "protocol": "inspector" } ] } diff --git a/README.md b/README.md index 69e29652c08..7cc5346f218 100644 --- a/README.md +++ b/README.md @@ -184,7 +184,7 @@ await networkClient.register(agent) import { createNetworkClient } from '@hcengineering/network-client' // Connect to network -const client = await createNetworkClient('localhost', 3737) +const client = await createNetworkClient('localhost:3737') // Get or create a container const containerRef = await client.get('user-session-1', { diff --git a/common/config/rush/pnpm-lock.yaml b/common/config/rush/pnpm-lock.yaml index 193022f10f4..36c8ed098bf 100644 --- a/common/config/rush/pnpm-lock.yaml +++ b/common/config/rush/pnpm-lock.yaml @@ -259,9 +259,6 @@ importers: '@hcengineering/network-server': specifier: workspace:^0.7.0 version: link:../../packages/server - dockerode: - specifier: ^3.3.5 - version: 3.3.5 uuid: specifier: ^8.3.2 version: 8.3.2 @@ -269,9 +266,79 @@ importers: '@hcengineering/platform-rig': specifier: ^0.7.4 version: 0.7.4(@typescript-eslint/eslint-plugin@6.21.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.9.2))(eslint@8.57.1)(typescript@5.9.2))(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.9.2))(eslint-config-standard-with-typescript@40.0.0(@typescript-eslint/eslint-plugin@6.21.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.9.2))(eslint@8.57.1)(typescript@5.9.2))(eslint-plugin-import@2.32.0(eslint@8.57.1))(eslint-plugin-n@15.7.0(eslint@8.57.1))(eslint-plugin-promise@6.6.0(eslint@8.57.1))(eslint@8.57.1)(typescript@5.9.2))(eslint-plugin-import@2.32.0(eslint@8.57.1))(eslint-plugin-n@15.7.0(eslint@8.57.1))(eslint-plugin-promise@6.6.0(eslint@8.57.1))(eslint@8.57.1)(prettier@3.6.2) - '@types/dockerode': - specifier: ^3.3.19 - version: 3.3.43 + '@types/jest': + specifier: ^29.5.5 + version: 29.5.14 + '@types/node': + specifier: ^22.15.29 + version: 22.18.1 + '@types/uuid': + specifier: ^8.3.1 + version: 8.3.4 + '@typescript-eslint/eslint-plugin': + specifier: ^6.11.0 + version: 6.21.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.9.2))(eslint@8.57.1)(typescript@5.9.2) + '@typescript-eslint/parser': + specifier: ^6.11.0 + version: 6.21.0(eslint@8.57.1)(typescript@5.9.2) + esbuild: + specifier: ^0.24.2 + version: 0.24.2 + eslint: + specifier: ^8.54.0 + version: 8.57.1 + eslint-config-standard-with-typescript: + specifier: ^40.0.0 + version: 40.0.0(@typescript-eslint/eslint-plugin@6.21.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.9.2))(eslint@8.57.1)(typescript@5.9.2))(eslint-plugin-import@2.32.0(eslint@8.57.1))(eslint-plugin-n@15.7.0(eslint@8.57.1))(eslint-plugin-promise@6.6.0(eslint@8.57.1))(eslint@8.57.1)(typescript@5.9.2) + eslint-plugin-import: + specifier: ^2.26.0 + version: 2.32.0(eslint@8.57.1) + eslint-plugin-n: + specifier: ^15.4.0 + version: 15.7.0(eslint@8.57.1) + eslint-plugin-promise: + specifier: ^6.1.1 + version: 6.6.0(eslint@8.57.1) + jest: + specifier: ^29.7.0 + version: 29.7.0(@types/node@22.18.1)(ts-node@10.9.2(@types/node@22.18.1)(typescript@5.9.2)) + prettier: + specifier: ^3.1.0 + version: 3.6.2 + simplytyped: + specifier: ^3.3.0 + version: 3.3.0(typescript@5.9.2) + ts-jest: + specifier: ^29.1.1 + version: 29.4.1(@babel/core@7.28.4)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.28.4))(esbuild@0.24.2)(jest-util@29.7.0)(jest@29.7.0(@types/node@22.18.1)(ts-node@10.9.2(@types/node@22.18.1)(typescript@5.9.2)))(typescript@5.9.2) + ts-node: + specifier: ^10.9.2 + version: 10.9.2(@types/node@22.18.1)(typescript@5.9.2) + typescript: + specifier: ^5.8.3 + version: 5.9.2 + + ../../pods/network-tool: + dependencies: + '@hcengineering/network-client': + specifier: workspace:^0.7.0 + version: link:../../packages/client + '@hcengineering/network-core': + specifier: workspace:^0.7.0 + version: link:../../packages/core + commander: + specifier: ^14.0.0 + version: 14.0.0 + uuid: + specifier: ^8.3.2 + version: 8.3.2 + zeromq: + specifier: ^6.5.0 + version: 6.5.0 + devDependencies: + '@hcengineering/platform-rig': + specifier: ^0.7.4 + version: 0.7.4(@typescript-eslint/eslint-plugin@6.21.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.9.2))(eslint@8.57.1)(typescript@5.9.2))(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.9.2))(eslint-config-standard-with-typescript@40.0.0(@typescript-eslint/eslint-plugin@6.21.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.9.2))(eslint@8.57.1)(typescript@5.9.2))(eslint-plugin-import@2.32.0(eslint@8.57.1))(eslint-plugin-n@15.7.0(eslint@8.57.1))(eslint-plugin-promise@6.6.0(eslint@8.57.1))(eslint@8.57.1)(typescript@5.9.2))(eslint-plugin-import@2.32.0(eslint@8.57.1))(eslint-plugin-n@15.7.0(eslint@8.57.1))(eslint-plugin-promise@6.6.0(eslint@8.57.1))(eslint@8.57.1)(prettier@3.6.2) '@types/jest': specifier: ^29.5.5 version: 29.5.14 @@ -564,9 +631,6 @@ packages: resolution: {integrity: sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==} engines: {node: '>=6.9.0'} - '@balena/dockerignore@1.0.2': - resolution: {integrity: sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q==} - '@bcoe/v8-coverage@0.2.3': resolution: {integrity: sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==} @@ -1077,12 +1141,6 @@ packages: '@types/cors@2.8.19': resolution: {integrity: sha512-mFNylyeyqN93lfe/9CSxOGREz8cpzAhH+E93xJ4xWQf62V8sQ/24reV2nyzUWM6H6Xji+GGHpkbLe7pVoUEskg==} - '@types/docker-modem@3.0.6': - resolution: {integrity: sha512-yKpAGEuKRSS8wwx0joknWxsmLha78wNMe9R2S3UNsVOkZded8UqOrV8KoeDXoXsjndxwyF3eIhyClGbO1SEhEg==} - - '@types/dockerode@3.3.43': - resolution: {integrity: sha512-YCi0aKKpKeC9dhKTbuglvsWDnAyuIITd6CCJSTKiAdbDzPH4RWu0P9IK2XkJHdyplH6mzYtDYO+gB06JlzcPxg==} - '@types/express-serve-static-core@4.19.6': resolution: {integrity: sha512-N4LZ2xG7DatVqhCZzOGb1Yi5lMbXSZcmdLDe9EzSndPV2HpWYWzRbaerl2n27irrm94EPpprqa8KpskPT085+A==} @@ -1119,9 +1177,6 @@ packages: '@types/morgan@1.9.10': resolution: {integrity: sha512-sS4A1zheMvsADRVfT0lYbJ4S9lmsey8Zo2F7cnbYjWHP67Q0AwMYuuzLlkIM2N8gAbb9cubhIVFwcIN2XyYCkA==} - '@types/node@18.19.124': - resolution: {integrity: sha512-hY4YWZFLs3ku6D2Gqo3RchTd9VRCcrjqp/I0mmohYeUVA5Y8eCXKJEasHxLAJVZRJuQogfd1GiJ9lgogBgKeuQ==} - '@types/node@22.18.1': resolution: {integrity: sha512-rzSDyhn4cYznVG+PCzGe1lwuMYJrcBS1fc3JqSa2PvtABwWo+dZ1ij5OVok3tqfpEBCBoaR4d7upFJk73HRJDw==} @@ -1140,9 +1195,6 @@ packages: '@types/serve-static@1.15.8': resolution: {integrity: sha512-roei0UY3LhpOJvjbIP6ZZFngyLKl5dskOtDhxY5THRSpO+ZI+nzJ+m5yUMzGrp89YRa7lvknKkMYjqQFGwA7Sg==} - '@types/ssh2@1.15.5': - resolution: {integrity: sha512-N1ASjp/nXH3ovBHddRJpli4ozpk6UdDYIX4RJWFa9L1YKnzdhTlVmiGHm4DZnj/jLbqZpes4aeR30EFGQtvhQQ==} - '@types/stack-utils@2.0.3': resolution: {integrity: sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw==} @@ -1293,9 +1345,6 @@ packages: resolution: {integrity: sha512-BNoCY6SXXPQ7gF2opIP4GBE+Xw7U+pHMYKuzjgCN3GwiaIR09UUeKfheyIry77QtrCBlC0KK0q5/TER/tYh3PQ==} engines: {node: '>= 0.4'} - asn1@0.2.6: - resolution: {integrity: sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==} - async-function@1.0.0: resolution: {integrity: sha512-hsU18Ae8CDTR6Kgu9DYf0EbCr/a5iGL0rytQDobUcdpYOKokk8LEjVphnXkDkgpi0wYVsqrXuP0bZxJaTqdgoA==} engines: {node: '>= 0.4'} @@ -1332,19 +1381,10 @@ packages: balanced-match@1.0.2: resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} - base64-js@1.5.1: - resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} - - bcrypt-pbkdf@1.0.2: - resolution: {integrity: sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==} - binary-extensions@2.3.0: resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==} engines: {node: '>=8'} - bl@4.1.0: - resolution: {integrity: sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==} - brace-expansion@1.1.12: resolution: {integrity: sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==} @@ -1370,13 +1410,6 @@ packages: buffer-from@1.1.2: resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==} - buffer@5.7.1: - resolution: {integrity: sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==} - - buildcheck@0.0.6: - resolution: {integrity: sha512-8f9ZJCUXyT1M35Jx7MkBgmBMo3oHTTBIPLiY9xyL0pl3T5RwcPEY8cUHr5LBNfu/fk6c2T4DJZuVM/8ZZT2D2A==} - engines: {node: '>=10.0.0'} - builtins@5.1.0: resolution: {integrity: sha512-SW9lzGTLvWTP1AY8xeAMZimqDrIaSdLQUcVr9DMef51niJ022Ri87SwRRKYm4A6iHfkPaiVUu/Duw2Wc4J7kKg==} @@ -1419,9 +1452,6 @@ packages: resolution: {integrity: sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==} engines: {node: '>= 8.10.0'} - chownr@1.1.4: - resolution: {integrity: sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==} - ci-info@3.9.0: resolution: {integrity: sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==} engines: {node: '>=8'} @@ -1451,16 +1481,16 @@ packages: color-name@1.1.4: resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} + commander@14.0.0: + resolution: {integrity: sha512-2uM9rYjPvyq39NwLRqaiLtWHyDC1FvryJDa2ATTVims5YAS4PupsEQsDvP14FqhFr0P49CYDugi59xaxJlTXRA==} + engines: {node: '>=20'} + concat-map@0.0.1: resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} convert-source-map@2.0.0: resolution: {integrity: sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==} - cpu-features@0.0.10: - resolution: {integrity: sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==} - engines: {node: '>=10.0.0'} - create-jest@29.7.0: resolution: {integrity: sha512-Adz2bdH0Vq3F53KEMJOoftQFutWCukm6J24wbPWRO4k1kMY7gS7ds/uoJkNuV8wDCtWWnuwGcJwpWcih+zEW1Q==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} @@ -1536,14 +1566,6 @@ packages: resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} engines: {node: '>=8'} - docker-modem@3.0.8: - resolution: {integrity: sha512-f0ReSURdM3pcKPNS30mxOHSbaFLcknGmQjwSfmbcdOw1XWKXVhukM3NJHhr7NpY9BIyyWQb0EBo3KQvvuU5egQ==} - engines: {node: '>= 8.0'} - - dockerode@3.3.5: - resolution: {integrity: sha512-/0YNa3ZDNeLr/tSckmD69+Gq+qVNhvKfAHNeZJBnp7EOP6RGKV8ORrJHkUn20So5wU+xxT7+1n5u8PjHbfjbSA==} - engines: {node: '>= 8.0'} - doctrine@2.1.0: resolution: {integrity: sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==} engines: {node: '>=0.10.0'} @@ -1566,9 +1588,6 @@ packages: emoji-regex@8.0.0: resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} - end-of-stream@1.4.5: - resolution: {integrity: sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==} - error-ex@1.3.2: resolution: {integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==} @@ -1798,9 +1817,6 @@ packages: resolution: {integrity: sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg==} engines: {node: '>= 0.4'} - fs-constants@1.0.0: - resolution: {integrity: sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==} - fs-extra@10.1.0: resolution: {integrity: sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==} engines: {node: '>=12'} @@ -1924,9 +1940,6 @@ packages: resolution: {integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==} engines: {node: '>=10.17.0'} - ieee754@1.2.1: - resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==} - ignore@5.3.2: resolution: {integrity: sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==} engines: {node: '>= 4'} @@ -2351,15 +2364,9 @@ packages: minimist@1.2.8: resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==} - mkdirp-classic@0.5.3: - resolution: {integrity: sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==} - ms@2.1.3: resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} - nan@2.23.0: - resolution: {integrity: sha512-1UxuyYGdoQHcGg87Lkqm3FzefucTa0NAiOcuRsDmysep3c1LVCRK2krrUDafMWtjSG04htvAmvg96+SDknOmgQ==} - natural-compare@1.4.0: resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==} @@ -2506,9 +2513,6 @@ packages: resolution: {integrity: sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==} engines: {node: '>= 6'} - pump@3.0.3: - resolution: {integrity: sha512-todwxLMY7/heScKmntwQG8CXVkWUOdYxIvY2s0VWAAMh/nd8SoYiRaKjlr7+iCs984f2P8zvrfWcDDYVb73NfA==} - punycode@2.3.1: resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==} engines: {node: '>=6'} @@ -2522,10 +2526,6 @@ packages: react-is@18.3.1: resolution: {integrity: sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==} - readable-stream@3.6.2: - resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==} - engines: {node: '>= 6'} - readdirp@3.6.0: resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} engines: {node: '>=8.10.0'} @@ -2583,9 +2583,6 @@ packages: resolution: {integrity: sha512-AURm5f0jYEOydBj7VQlVvDrjeFgthDdEF5H1dP+6mNpoXOMo1quQqJ4wvJDyRZ9+pO3kGWoOdmV08cSv2aJV6Q==} engines: {node: '>=0.4'} - safe-buffer@5.2.1: - resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} - safe-push-apply@1.0.0: resolution: {integrity: sha512-iKE9w/Z7xCzUMIZqdBsp6pEQvwuEebH4vdpjcDWnyzaI6yl6O9FHvVpmGelvEHNsoY6wGblkxR6Zty/h00WiSA==} engines: {node: '>= 0.4'} @@ -2594,9 +2591,6 @@ packages: resolution: {integrity: sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw==} engines: {node: '>= 0.4'} - safer-buffer@2.1.2: - resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} - semver@6.3.1: resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==} hasBin: true @@ -2664,16 +2658,9 @@ packages: resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==} engines: {node: '>=0.10.0'} - split-ca@1.0.1: - resolution: {integrity: sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ==} - sprintf-js@1.0.3: resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==} - ssh2@1.17.0: - resolution: {integrity: sha512-wPldCk3asibAjQ/kziWQQt1Wh3PgDFpC0XpwclzKcdT1vql6KeYxf5LIt4nlFkUeR8WuphYMKqUA56X4rjbfgQ==} - engines: {node: '>=10.16.0'} - stack-utils@2.0.6: resolution: {integrity: sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==} engines: {node: '>=10'} @@ -2702,9 +2689,6 @@ packages: resolution: {integrity: sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==} engines: {node: '>= 0.4'} - string_decoder@1.3.0: - resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==} - strip-ansi@6.0.1: resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} engines: {node: '>=8'} @@ -2737,13 +2721,6 @@ packages: resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} engines: {node: '>= 0.4'} - tar-fs@2.0.1: - resolution: {integrity: sha512-6tzWDMeroL87uF/+lin46k+Q+46rAJ0SyPGz7OW7wTgblI273hsBqk2C1j0/xNadNLKDTUL9BukSjB7cwgmlPA==} - - tar-stream@2.2.0: - resolution: {integrity: sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==} - engines: {node: '>=6'} - test-exclude@6.0.0: resolution: {integrity: sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==} engines: {node: '>=8'} @@ -2808,9 +2785,6 @@ packages: tsconfig-paths@3.15.0: resolution: {integrity: sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg==} - tweetnacl@0.14.5: - resolution: {integrity: sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==} - type-check@0.4.0: resolution: {integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==} engines: {node: '>= 0.8.0'} @@ -2861,9 +2835,6 @@ packages: resolution: {integrity: sha512-nWJ91DjeOkej/TA8pXQ3myruKpKEYgqvpw9lz4OPHj/NWFNluYrjbz9j01CJ8yKQd2g4jFoOkINCTW2I5LEEyw==} engines: {node: '>= 0.4'} - undici-types@5.26.5: - resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==} - undici-types@6.21.0: resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==} @@ -2880,9 +2851,6 @@ packages: uri-js@4.4.1: resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==} - util-deprecate@1.0.2: - resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} - uuid@8.3.2: resolution: {integrity: sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==} hasBin: true @@ -3152,8 +3120,6 @@ snapshots: '@babel/helper-string-parser': 7.27.1 '@babel/helper-validator-identifier': 7.27.1 - '@balena/dockerignore@1.0.2': {} - '@bcoe/v8-coverage@0.2.3': {} '@cspotcode/source-map-support@0.8.1': @@ -3627,17 +3593,6 @@ snapshots: dependencies: '@types/node': 22.18.1 - '@types/docker-modem@3.0.6': - dependencies: - '@types/node': 22.18.1 - '@types/ssh2': 1.15.5 - - '@types/dockerode@3.3.43': - dependencies: - '@types/docker-modem': 3.0.6 - '@types/node': 22.18.1 - '@types/ssh2': 1.15.5 - '@types/express-serve-static-core@4.19.6': dependencies: '@types/node': 22.18.1 @@ -3683,10 +3638,6 @@ snapshots: dependencies: '@types/node': 22.18.1 - '@types/node@18.19.124': - dependencies: - undici-types: 5.26.5 - '@types/node@22.18.1': dependencies: undici-types: 6.21.0 @@ -3708,10 +3659,6 @@ snapshots: '@types/node': 22.18.1 '@types/send': 0.17.5 - '@types/ssh2@1.15.5': - dependencies: - '@types/node': 18.19.124 - '@types/stack-utils@2.0.3': {} '@types/uuid@8.3.4': {} @@ -3908,10 +3855,6 @@ snapshots: get-intrinsic: 1.3.0 is-array-buffer: 3.0.5 - asn1@0.2.6: - dependencies: - safer-buffer: 2.1.2 - async-function@1.0.0: {} available-typed-arrays@1.0.7: @@ -3975,20 +3918,8 @@ snapshots: balanced-match@1.0.2: {} - base64-js@1.5.1: {} - - bcrypt-pbkdf@1.0.2: - dependencies: - tweetnacl: 0.14.5 - binary-extensions@2.3.0: {} - bl@4.1.0: - dependencies: - buffer: 5.7.1 - inherits: 2.0.4 - readable-stream: 3.6.2 - brace-expansion@1.1.12: dependencies: balanced-match: 1.0.2 @@ -4019,14 +3950,6 @@ snapshots: buffer-from@1.1.2: {} - buffer@5.7.1: - dependencies: - base64-js: 1.5.1 - ieee754: 1.2.1 - - buildcheck@0.0.6: - optional: true - builtins@5.1.0: dependencies: semver: 7.7.2 @@ -4075,8 +3998,6 @@ snapshots: optionalDependencies: fsevents: 2.3.3 - chownr@1.1.4: {} - ci-info@3.9.0: {} cjs-module-lexer@1.4.3: {} @@ -4099,16 +4020,12 @@ snapshots: color-name@1.1.4: {} + commander@14.0.0: {} + concat-map@0.0.1: {} convert-source-map@2.0.0: {} - cpu-features@0.0.10: - dependencies: - buildcheck: 0.0.6 - nan: 2.23.0 - optional: true - create-jest@29.7.0(@types/node@22.18.1)(ts-node@10.9.2(@types/node@22.18.1)(typescript@5.9.2)): dependencies: '@jest/types': 29.6.3 @@ -4186,23 +4103,6 @@ snapshots: dependencies: path-type: 4.0.0 - docker-modem@3.0.8: - dependencies: - debug: 4.4.1 - readable-stream: 3.6.2 - split-ca: 1.0.1 - ssh2: 1.17.0 - transitivePeerDependencies: - - supports-color - - dockerode@3.3.5: - dependencies: - '@balena/dockerignore': 1.0.2 - docker-modem: 3.0.8 - tar-fs: 2.0.1 - transitivePeerDependencies: - - supports-color - doctrine@2.1.0: dependencies: esutils: 2.0.3 @@ -4223,10 +4123,6 @@ snapshots: emoji-regex@8.0.0: {} - end-of-stream@1.4.5: - dependencies: - once: 1.4.0 - error-ex@1.3.2: dependencies: is-arrayish: 0.2.1 @@ -4618,8 +4514,6 @@ snapshots: dependencies: is-callable: 1.2.7 - fs-constants@1.0.0: {} - fs-extra@10.1.0: dependencies: graceful-fs: 4.2.11 @@ -4752,8 +4646,6 @@ snapshots: human-signals@2.1.0: {} - ieee754@1.2.1: {} - ignore@5.3.2: {} import-fresh@3.3.1: @@ -5353,13 +5245,8 @@ snapshots: minimist@1.2.8: {} - mkdirp-classic@0.5.3: {} - ms@2.1.3: {} - nan@2.23.0: - optional: true - natural-compare@1.4.0: {} neo-async@2.6.2: {} @@ -5498,11 +5385,6 @@ snapshots: kleur: 3.0.3 sisteransi: 1.0.5 - pump@3.0.3: - dependencies: - end-of-stream: 1.4.5 - once: 1.4.0 - punycode@2.3.1: {} pure-rand@6.1.0: {} @@ -5511,12 +5393,6 @@ snapshots: react-is@18.3.1: {} - readable-stream@3.6.2: - dependencies: - inherits: 2.0.4 - string_decoder: 1.3.0 - util-deprecate: 1.0.2 - readdirp@3.6.0: dependencies: picomatch: 2.3.1 @@ -5579,8 +5455,6 @@ snapshots: has-symbols: 1.1.0 isarray: 2.0.5 - safe-buffer@5.2.1: {} - safe-push-apply@1.0.0: dependencies: es-errors: 1.3.0 @@ -5592,8 +5466,6 @@ snapshots: es-errors: 1.3.0 is-regex: 1.2.1 - safer-buffer@2.1.2: {} - semver@6.3.1: {} semver@7.7.2: {} @@ -5671,18 +5543,8 @@ snapshots: source-map@0.6.1: {} - split-ca@1.0.1: {} - sprintf-js@1.0.3: {} - ssh2@1.17.0: - dependencies: - asn1: 0.2.6 - bcrypt-pbkdf: 1.0.2 - optionalDependencies: - cpu-features: 0.0.10 - nan: 2.23.0 - stack-utils@2.0.6: dependencies: escape-string-regexp: 2.0.0 @@ -5726,10 +5588,6 @@ snapshots: define-properties: 1.2.1 es-object-atoms: 1.1.1 - string_decoder@1.3.0: - dependencies: - safe-buffer: 5.2.1 - strip-ansi@6.0.1: dependencies: ansi-regex: 5.0.1 @@ -5752,21 +5610,6 @@ snapshots: supports-preserve-symlinks-flag@1.0.0: {} - tar-fs@2.0.1: - dependencies: - chownr: 1.1.4 - mkdirp-classic: 0.5.3 - pump: 3.0.3 - tar-stream: 2.2.0 - - tar-stream@2.2.0: - dependencies: - bl: 4.1.0 - end-of-stream: 1.4.5 - fs-constants: 1.0.0 - inherits: 2.0.4 - readable-stream: 3.6.2 - test-exclude@6.0.0: dependencies: '@istanbuljs/schema': 0.1.3 @@ -5831,8 +5674,6 @@ snapshots: minimist: 1.2.8 strip-bom: 3.0.0 - tweetnacl@0.14.5: {} - type-check@0.4.0: dependencies: prelude-ls: 1.2.1 @@ -5890,8 +5731,6 @@ snapshots: has-symbols: 1.1.0 which-boxed-primitive: 1.1.1 - undici-types@5.26.5: {} - undici-types@6.21.0: {} universalify@2.0.1: {} @@ -5906,8 +5745,6 @@ snapshots: dependencies: punycode: 2.3.1 - util-deprecate@1.0.2: {} - uuid@8.3.2: {} v8-compile-cache-lib@3.0.1: {} diff --git a/packages/backrpc/src/__test__/backrpc.spec.ts b/packages/backrpc/src/__test__/backrpc.spec.ts index 46be46d31c6..e6d1fa038e6 100644 --- a/packages/backrpc/src/__test__/backrpc.spec.ts +++ b/packages/backrpc/src/__test__/backrpc.spec.ts @@ -452,7 +452,7 @@ describe('backrpc', () => { helloHandler: async (clientId) => { console.log(`Client ${clientId} connected`) }, - handleTimeout: async (clientId) => { + closeHandler: async (clientId) => { timeoutHandlerCalled = true console.log(`Timeout for client ${clientId}`) } diff --git a/packages/backrpc/src/client.ts b/packages/backrpc/src/client.ts index 445faea772a..81042ffec96 100644 --- a/packages/backrpc/src/client.ts +++ b/packages/backrpc/src/client.ts @@ -234,6 +234,11 @@ export class BackRPCClient { } close (): void { + if (!this.closed) { + void this.doSend([backrpcOperations.close, '', '']).catch((err) => { + console.error('Failed to send close', err) + }) + } this.closed = true this.stopTick?.() this.dealer.close() diff --git a/packages/backrpc/src/server.ts b/packages/backrpc/src/server.ts index 3e2893c43d7..7def73442e7 100644 --- a/packages/backrpc/src/server.ts +++ b/packages/backrpc/src/server.ts @@ -28,7 +28,8 @@ export interface BackRPCServerHandler { send: (response: any) => Promise ) => Promise helloHandler?: (client: ClientT) => Promise - handleTimeout?: (client: ClientT) => Promise + closeHandler?: (client: ClientT, timeout: boolean) => Promise + onPing?: (client: ClientT) => void } interface RPCClientInfo { @@ -79,7 +80,6 @@ export class BackRPCServer { } async checkAlive (): Promise { - console.log('check alive:', this.revClientMapping.size, JSON.stringify(this.stats)) this.stats.hellos = 0 this.stats.pings = 0 this.stats.requests = 0 @@ -93,15 +93,19 @@ export class BackRPCServer { console.warn( `Client ${clientId} has been inactive for ${Math.round(timeSinceLastSeen / 1000)}s, marking as dead` ) - void this.handlers.handleTimeout?.(clientRecord.id).catch((err) => { - console.error('Error in handleTimeout', err) - }) - this.revClientMapping.delete(clientId) - this.clientMapping.delete(clientRecord.id) + this.handleClose(clientRecord.id, clientId, true) } } } + private handleClose (clientRecordId: ClientT, clientId: string, timeout: boolean): void { + void this.handlers.closeHandler?.(clientRecordId, timeout).catch((err) => { + console.error('Error in handleTimeout', err) + }) + this.revClientMapping.delete(clientId) + this.clientMapping.delete(clientRecordId) + } + async getPort (): Promise { await this.bound const reqEndpoint = this.router.lastEndpoint @@ -190,10 +194,18 @@ export class BackRPCServer { } break } + case backrpcOperations.close: { + if (client !== undefined) { + this.handleClose(client.id, clientIdText, false) + } + break + } case backrpcOperations.ping: { this.stats.pings++ void this.doSend([clientId, backrpcOperations.pong, this.uuid, '']) - // console.log('ping:' + clientIdText) + if (client !== undefined) { + this.handlers.onPing?.(client?.id) + } break } case backrpcOperations.request: diff --git a/packages/backrpc/src/types.ts b/packages/backrpc/src/types.ts index c336a9134de..4b1210c936e 100644 --- a/packages/backrpc/src/types.ts +++ b/packages/backrpc/src/types.ts @@ -6,7 +6,8 @@ export const backrpcOperations = { event: 4, ping: 5, pong: 6, - retry: 7 + retry: 7, + close: 8 } export type ClientId = string & { __clientId: string } diff --git a/packages/client/src/agent.ts b/packages/client/src/agent.ts index 9b32bf36736..711c93deb70 100644 --- a/packages/client/src/agent.ts +++ b/packages/client/src/agent.ts @@ -108,7 +108,7 @@ export class NetworkAgentServer implements BackRPCServerHandler { console.log(`Client ${clientId} connected`) } - async handleTimeout (client: ClientUuid): Promise { + async closeHandler (client: ClientUuid): Promise { console.log(`Client ${client} timed out`) } } diff --git a/packages/client/src/client.ts b/packages/client/src/client.ts index 401a93f9261..cfa0c95afe6 100644 --- a/packages/client/src/client.ts +++ b/packages/client/src/client.ts @@ -136,7 +136,6 @@ export class NetworkClientImpl implements NetworkClient { uuid: agent.agent.uuid }) } - this.client.close() } @@ -340,7 +339,7 @@ export class NetworkClientImpl implements NetworkClient { await this.client.request(opNames.releaseContainer, { uuid }) } - async list (kind: ContainerKind): Promise { + async list (kind?: ContainerKind): Promise { return await this.client.request(opNames.listContainers, { kind }) diff --git a/packages/client/src/index.ts b/packages/client/src/index.ts index 219cf139d39..6ea38503e49 100644 --- a/packages/client/src/index.ts +++ b/packages/client/src/index.ts @@ -1,9 +1,20 @@ -import { TickManagerImpl, timeouts, type NetworkClient } from '@hcengineering/network-core' +import { + AgentImpl, + TickManagerImpl, + timeouts, + type AgentUuid, + type ContainerFactory, + type ContainerKind, + type NetworkAgent, + type NetworkClient +} from '@hcengineering/network-core' +import { v4 as uuidv4 } from 'uuid' +import { NetworkAgentServer } from './agent' import { NetworkClientImpl } from './client' -export * from './types' -export * from './client' export * from './agent' +export * from './client' +export * from './types' const tickMgr = new TickManagerImpl(timeouts.pingInterval * 2) tickMgr.start() @@ -16,6 +27,23 @@ process.on('exit', () => { shutdownNetworkTickMgr() }) -export function createNetworkClient (host: string, port: number): NetworkClient { +export function createNetworkClient (url: string): NetworkClient { + const [host, portStr] = url.split(':') + const port = portStr != null ? parseInt(portStr, 10) : 3737 return new NetworkClientImpl(host, port, tickMgr) } + +export async function createAgent ( + endpointUrl: string, + factory: Record +): Promise<{ agent: NetworkAgent, server: NetworkAgentServer }> { + const agent = new AgentImpl(uuidv4() as AgentUuid, factory) + + const [host, portStr] = endpointUrl.split(':') + const port = portStr != null ? parseInt(portStr, 10) : 3738 + + const server = new NetworkAgentServer(tickMgr, host, '*', port) + + await server.start(agent) + return { agent, server } +} diff --git a/packages/core/src/__test__/alive-checkins.spec.ts b/packages/core/src/__test__/alive-checkins.spec.ts index edc78e15b66..fde4a30cc27 100644 --- a/packages/core/src/__test__/alive-checkins.spec.ts +++ b/packages/core/src/__test__/alive-checkins.spec.ts @@ -2,7 +2,7 @@ import type { AgentUuid } from '../api/types' import { timeouts } from '../api/timeouts' import { AgentImpl } from '../agent' import { TickManagerImpl } from '../utils' -import { NetworkImpl } from '../server' +import { NetworkImpl } from '../network' class FakeTickManager extends TickManagerImpl { time: number = 0 diff --git a/packages/core/src/__test__/network.spec.ts b/packages/core/src/__test__/network.spec.ts index 21f97b9ea1d..5f71e696947 100644 --- a/packages/core/src/__test__/network.spec.ts +++ b/packages/core/src/__test__/network.spec.ts @@ -1,7 +1,7 @@ import { AgentImpl } from '../agent' import type { AgentUuid, ClientUuid, ContainerEndpointRef, ContainerKind } from '../api/types' import type { Container } from '../containers' -import { NetworkImpl } from '../server' +import { NetworkImpl } from '../network' import { TickManagerImpl } from '../utils' // class DummyConnectionManager implements ConnectionManager { diff --git a/packages/core/src/agent.ts b/packages/core/src/agent.ts index a3164dcd3a4..b1a0b3d2d54 100644 --- a/packages/core/src/agent.ts +++ b/packages/core/src/agent.ts @@ -1,5 +1,5 @@ import type { NetworkAgent } from './api/agent' -import type { Network } from './api/server' +import type { Network } from './api/network' import type { AgentEndpointRef, AgentUuid, diff --git a/packages/core/src/api/client.ts b/packages/core/src/api/client.ts index c755805f9c4..5da3bccdb6f 100644 --- a/packages/core/src/api/client.ts +++ b/packages/core/src/api/client.ts @@ -35,7 +35,7 @@ export interface NetworkClient { */ get: (uuid: ContainerUuid, request: ContainerRequest) => Promise - list: (kind: ContainerKind) => Promise + list: (kind?: ContainerKind) => Promise // Send some data to container, using proxy connection. request: (target: ContainerUuid, operation: string, data?: any) => Promise diff --git a/packages/core/src/api/server.ts b/packages/core/src/api/network.ts similarity index 87% rename from packages/core/src/api/server.ts rename to packages/core/src/api/network.ts index 2b7480bb4ce..c19adab485e 100644 --- a/packages/core/src/api/server.ts +++ b/packages/core/src/api/network.ts @@ -43,7 +43,7 @@ export interface Network { */ release: (client: ClientUuid, uuid: ContainerUuid) => Promise - list: (kind: ContainerKind) => Promise + list: (kind?: ContainerKind) => Promise // Send some data to container, using proxy connection. request: (target: ContainerUuid, operation: string, data?: any) => Promise @@ -54,4 +54,8 @@ export interface Network { export interface NetworkWithClients { addClient: (clientUuid: ClientUuid, onContainer?: (event: ContainerEvent) => Promise) => void removeClient: (clientUuid: ClientUuid) => void + + // When client is registering agent. + mapAgent: (clientUuid: ClientUuid, agentUuid: AgentUuid) => void + unmapAgent: (clientUuid: ClientUuid, agentUuid: AgentUuid) => void } diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index 98829a2e422..0dd79078551 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -2,10 +2,10 @@ export * from './api/types' export * from './api/utils' export * from './api/agent' export * from './api/client' -export * from './api/server' +export * from './api/network' export * from './api/timeouts' export * from './utils' export * from './endpoints' export * from './agent' export * from './containers' -export * from './server' +export * from './network' diff --git a/packages/core/src/server.ts b/packages/core/src/network.ts similarity index 89% rename from packages/core/src/server.ts rename to packages/core/src/network.ts index 82ff2697e2e..e4beb1165b6 100644 --- a/packages/core/src/server.ts +++ b/packages/core/src/network.ts @@ -1,3 +1,6 @@ +import type { AgentRecord, NetworkAgent } from './api/agent' +import type { Network, NetworkWithClients } from './api/network' +import { timeouts } from './api/timeouts' import type { AgentEndpointRef, AgentUuid, @@ -9,11 +12,8 @@ import type { ContainerRequest, ContainerUuid } from './api/types' -import { timeouts } from './api/timeouts' import type { TickManager } from './api/utils' import type { ContainerRecordImpl } from './containers' -import type { AgentRecord, NetworkAgent } from './api/agent' -import type { Network, NetworkWithClients } from './api/server' interface AgentRecordImpl { api: NetworkAgent @@ -28,6 +28,8 @@ interface ClientRecordImpl { lastSeen: number containers: Set onContainer?: (event: ContainerEvent) => Promise + + agents: Set } /** * Server network implementation. @@ -68,7 +70,9 @@ export class NetworkImpl implements Network, NetworkWithClients { agentId: api.uuid, endpoint: api.endpoint, kinds: api.kinds, - containers: Object.values(containers).map(({ record }) => record) + containers: Array.from(containers.values()) + .filter((it) => !(it.endpoint instanceof Promise)) + .map(({ record, endpoint }) => ({ ...record, endpoint: endpoint as ContainerEndpointRef })) })) ) } @@ -82,10 +86,10 @@ export class NetworkImpl implements Network, NetworkWithClients { ) } - async list (kind: ContainerKind): Promise { + async list (kind?: ContainerKind): Promise { return Array.from(this._agents.values()) .flatMap((it) => Array.from(it.containers.values())) - .filter((it) => it.record.kind === kind) + .filter((it) => kind === undefined || it.record.kind === kind) .map((it) => it.record) } @@ -223,19 +227,47 @@ export class NetworkImpl implements Network, NetworkWithClients { } addClient (clientUuid: ClientUuid, onContainer?: (event: ContainerEvent) => Promise): void { - this._clients.set(clientUuid, { lastSeen: this.tickManager.now(), containers: new Set(), onContainer }) + const info = this._clients.get(clientUuid) ?? { + lastSeen: this.tickManager.now(), + containers: new Set(), + onContainer, + agents: new Set() + } + info.onContainer = onContainer + this._clients.set(clientUuid, info) } - removeClient (clientUuid: ClientUuid): void { - this._clients.delete(clientUuid) + removeClient (client: ClientUuid): void { + // Handle outdated clients + const clientRecord = this._clients.get(client) + if (clientRecord !== undefined) { + for (const uuid of clientRecord.containers) { + this.release(client, uuid).catch((err) => { + console.error(`Error releasing container ${uuid} for client ${client}:`, err) + }) + } + } + this._clients.delete(client) } - async get (clientUuid: ClientUuid, uuid: ContainerUuid, request: ContainerRequest): Promise { - this.ping(clientUuid) + mapAgent (clientUuid: ClientUuid, agentUuid: AgentUuid): void { + const client = this._clients.get(clientUuid) + if (client !== undefined) { + client.agents.add(agentUuid) + } + } + + unmapAgent (clientUuid: ClientUuid, agentUuid: AgentUuid): void { + const client = this._clients.get(clientUuid) + if (client !== undefined) { + client.agents.delete(agentUuid) + } + } + async get (clientUuid: ClientUuid, uuid: ContainerUuid, request: ContainerRequest): Promise { let client = this._clients.get(clientUuid) if (client === undefined) { - client = { lastSeen: this.tickManager.now(), containers: new Set() } + client = { lastSeen: this.tickManager.now(), containers: new Set(), agents: new Set() } this._clients.set(clientUuid, client) } client.containers.add(uuid) @@ -266,6 +298,7 @@ export class NetworkImpl implements Network, NetworkWithClients { return containerImpl } } + // Select agent using round/robin and register it in agent const suitableAgents = Array.from(this._agents.values().filter((it) => it.kinds.includes(request.kind))) if (suitableAgents.length === 0) { @@ -352,18 +385,13 @@ export class NetworkImpl implements Network, NetworkWithClients { const client = this._clients.get(id as ClientUuid) if (client != null) { client.lastSeen = this.tickManager.now() - } - } - - async handleTimeout (client: ClientUuid): Promise { - // Handle outdated clients - const clientRecord = this._clients.get(client) - if (clientRecord !== undefined) { - for (const uuid of clientRecord.containers) { - await this.release(client, uuid) + for (const agent of client.agents) { + const ag = this._agents.get(agent) + if (ag != null) { + ag.lastSeen = this.tickManager.now() + } } } - this._clients.delete(client) } /** diff --git a/packages/server/src/server.ts b/packages/server/src/server.ts index a1c06754e27..9d7e40780ee 100644 --- a/packages/server/src/server.ts +++ b/packages/server/src/server.ts @@ -55,6 +55,15 @@ export class NetworkServer implements BackRPCServerHandler { port: number = 3737 ) { this.rpcServer = new BackRPCServer(this, tickMgr, host, port) + + this.tickMgr.register(async () => { + console.log( + 'check alive:', + this.clients.size, + (await this.network.agents()).length, + (await this.network.list()).length + ) + }, 5) } async close (): Promise { @@ -71,7 +80,7 @@ export class NetworkServer implements BackRPCServerHandler { break } case opNames.unregister: { - await this.handleUnregister(params, send) + await this.handleUnregister(params, client, send) break } case opNames.getAgents: { @@ -123,10 +132,16 @@ export class NetworkServer implements BackRPCServerHandler { }) } - async handleTimeout (client: ClientUuid): Promise { + onPing (client: ClientUuid): void { + this.network.ping(client) + } + + async closeHandler (client: ClientUuid, timeout: boolean = false): Promise { this.clients.delete(client) this.network.removeClient(client) - console.log(`Client ${client} timed out ${this.clients.size}`) + if (timeout) { + console.log(`Client ${client} timed out ${this.clients.size}`) + } } private async handleRegister ( @@ -148,12 +163,18 @@ export class NetworkServer implements BackRPCServerHandler { }, new AgentCallbackHandler(server, agentUuid, endpoint, kinds, client) ) + this.network.mapAgent(client, agentUuid) await send(res) } - private async handleUnregister (params: any, send: (response: any) => Promise): Promise { + private async handleUnregister ( + params: any, + client: ClientUuid, + send: (response: any) => Promise + ): Promise { const agentUuid: AgentUuid = params.uuid await this.network.unregister(agentUuid) + this.network.unmapAgent(client, agentUuid) await send('ok') } } diff --git a/pods/network-pod/package.json b/pods/network-pod/package.json index d9942029a41..521943fb045 100644 --- a/pods/network-pod/package.json +++ b/pods/network-pod/package.json @@ -47,14 +47,12 @@ "@types/jest": "^29.5.5", "@types/node": "^22.15.29", "@types/uuid": "^8.3.1", - "@types/dockerode": "^3.3.19", "esbuild": "^0.24.2", "ts-node": "^10.9.2" }, "dependencies": { "@hcengineering/network-core": "workspace:^0.7.0", "@hcengineering/network-server": "workspace:^0.7.0", - "dockerode": "^3.3.5", "uuid": "^8.3.2" }, "repository": "https://github.com/hcengineering/platform", diff --git a/pods/network-tool/.eslintrc.js b/pods/network-tool/.eslintrc.js new file mode 100644 index 00000000000..ce90fb9646f --- /dev/null +++ b/pods/network-tool/.eslintrc.js @@ -0,0 +1,7 @@ +module.exports = { + extends: ['./node_modules/@hcengineering/platform-rig/profiles/node/eslint.config.json'], + parserOptions: { + tsconfigRootDir: __dirname, + project: './tsconfig.json' + } +} diff --git a/pods/network-tool/.npmignore b/pods/network-tool/.npmignore new file mode 100644 index 00000000000..e3ec093c383 --- /dev/null +++ b/pods/network-tool/.npmignore @@ -0,0 +1,4 @@ +* +!/lib/** +!CHANGELOG.md +/lib/**/__tests__/ diff --git a/pods/network-tool/Dockerfile b/pods/network-tool/Dockerfile new file mode 100644 index 00000000000..379738a6a5a --- /dev/null +++ b/pods/network-tool/Dockerfile @@ -0,0 +1,11 @@ +FROM hardcoreeng/base:v20250310 + +WORKDIR /usr/src/app + +RUN npm install --ignore-scripts=false --verbose zeromq --unsafe-perm + +COPY bundle/bundle.js ./ +COPY bundle/bundle.js.map ./ + +EXPOSE 8080 +CMD [ "node", "./bundle.js" ] diff --git a/pods/network-tool/config/rig.json b/pods/network-tool/config/rig.json new file mode 100644 index 00000000000..b94bbb0650a --- /dev/null +++ b/pods/network-tool/config/rig.json @@ -0,0 +1,5 @@ +{ + "$schema": "https://developer.microsoft.com/json-schemas/rig-package/rig.schema.json", + "rigPackageName": "@hcengineering/platform-rig", + "rigProfile": "node" +} diff --git a/pods/network-tool/jest.config.js b/pods/network-tool/jest.config.js new file mode 100644 index 00000000000..6479224b481 --- /dev/null +++ b/pods/network-tool/jest.config.js @@ -0,0 +1,7 @@ +module.exports = { + preset: 'ts-jest', + testEnvironment: 'node', + testMatch: ['**/?(*.)+(spec|test).[jt]s?(x)'], + roots: ['./src'], + coverageReporters: ['text-summary', 'html'] +} diff --git a/pods/network-tool/package.json b/pods/network-tool/package.json new file mode 100644 index 00000000000..4390fd45588 --- /dev/null +++ b/pods/network-tool/package.json @@ -0,0 +1,71 @@ +{ + "name": "@hcengineering/network-tool", + "version": "0.7.0", + "main": "lib/index.js", + "svelte": "src/index.ts", + "types": "types/index.d.ts", + "author": "Huly Platform Contributors", + "template": "@hcengineering/node-package", + "license": "EPL-2.0", + "scripts": { + "start": "rush bundle --to @hcengineering/network-tool && cross-env NODE_ENV=production MODEL_VERSION=$(node ../../common/scripts/show_version.js) ACCOUNTS_URL=http://localhost:3000 REKONI_URL=http://localhost:4004 MONGO_URL=mongodb://localhost:27017 DB_URL=mongodb://localhost:27017 FRONT_URL=http://localhost:8087 UPLOAD_URL=/upload MINIO_ENDPOINT=localhost MINIO_ACCESS_KEY=minioadmin MINIO_SECRET_KEY=minioadmin METRICS_CONSOLE=true SERVER_SECRET=secret OPERATION_PROFILING=false MODEL_JSON=../../models/all/bundle/model.json STATS_URL=http://huly.local:4900 node --inspect bundle/bundle.js", + "start-cr": "rush bundle --to @hcengineering/network-tool && cross-env NODE_ENV=production MODEL_VERSION=$(node ../../common/scripts/show_version.js) ACCOUNTS_URL=http://localhost:3000 REKONI_URL=http://localhost:4004 DB_URL=postgresql://root@huly.local:26257/defaultdb?sslmode=disable FRONT_URL=http://localhost:8087 UPLOAD_URL=/upload MINIO_ENDPOINT=localhost MINIO_ACCESS_KEY=minioadmin MINIO_SECRET_KEY=minioadmin METRICS_CONSOLE=true SERVER_SECRET=secret OPERATION_PROFILING=false MODEL_JSON=../../models/all/bundle/model.json STATS_URL=http://huly.local:4900 FULLTEXT_URL=http://huly.local:4702 SERVER_PORT=3332 node --inspect bundle/bundle.js", + "start-flame": "rush bundle --to @hcengineering/network-tool && cross-env NODE_ENV=production MODEL_VERSION=$(node ../../common/scripts/show_version.js) ACCOUNTS_URL=http://localhost:3000 REKONI_URL=http://localhost:4004 MONGO_URL=mongodb://localhost:27017 FRONT_URL=http://localhost:8087 UPLOAD_URL=/upload MINIO_ENDPOINT=localhost MINIO_ACCESS_KEY=minioadmin MINIO_SECRET_KEY=minioadmin METRICS_CONSOLE=true SERVER_SECRET=secret MODEL_JSON=../../models/all/bundle/model.json clinic flame --dest ./out -- node --nolazy -r ts-node/register --enable-source-maps src/__start.ts", + "start-raw": "ts-node src/__start.ts", + "build": "compile", + "_phase:bundle": "rushx bundle", + "_phase:docker-build": "rushx docker:build", + "_phase:docker-staging": "rushx docker:staging", + "bundle": "node ../../common/scripts/esbuild.js --entry=src/index.ts --keep-names=true --bundle=true --sourcemap=external --external=zeromq", + "docker:build": "../../common/scripts/docker_build.sh hardcoreeng/network-tool", + "docker:tbuild": "docker build -t hardcoreeng/network-tool . --platform=linux/amd64 && ../../common/scripts/docker_tag_push.sh hardcoreeng/network-tool", + "docker:abuild": "docker build -t hardcoreeng/network-tool . --platform=linux/arm64 && ../../common/scripts/docker_tag_push.sh hardcoreeng/network-tool", + "docker:staging": "../../common/scripts/docker_tag.sh hardcoreeng/network-tool staging", + "docker:push": "../../common/scripts/docker_tag.sh hardcoreeng/network-tool", + "build:watch": "compile", + "format": "format src", + "test": "jest --passWithNoTests --silent --forceExit", + "_phase:build": "compile transpile src", + "_phase:test": "jest --passWithNoTests --silent --forceExit", + "_phase:format": "format src", + "_phase:validate": "compile validate" + }, + "devDependencies": { + "@hcengineering/platform-rig": "^0.7.4", + "@typescript-eslint/eslint-plugin": "^6.11.0", + "eslint-plugin-import": "^2.26.0", + "eslint-plugin-promise": "^6.1.1", + "eslint-plugin-n": "^15.4.0", + "eslint": "^8.54.0", + "simplytyped": "^3.3.0", + "@typescript-eslint/parser": "^6.11.0", + "eslint-config-standard-with-typescript": "^40.0.0", + "prettier": "^3.1.0", + "typescript": "^5.8.3", + "jest": "^29.7.0", + "ts-jest": "^29.1.1", + "@types/jest": "^29.5.5", + "@types/node": "^22.15.29", + "@types/uuid": "^8.3.1", + "esbuild": "^0.24.2", + "ts-node": "^10.9.2" + }, + "dependencies": { + "@hcengineering/network-core": "workspace:^0.7.0", + "@hcengineering/network-client": "workspace:^0.7.0", + "uuid": "^8.3.2", + "commander": "^14.0.0", + "zeromq": "^6.5.0" + }, + "repository": "https://github.com/hcengineering/platform", + "exports": { + ".": { + "types": "./types/index.d.ts", + "require": "./lib/index.js", + "import": "./lib/index.js" + } + }, + "publishConfig": { + "access": "public" + } +} diff --git a/pods/network-tool/src/agents.ts b/pods/network-tool/src/agents.ts new file mode 100644 index 00000000000..57cc72df1ef --- /dev/null +++ b/pods/network-tool/src/agents.ts @@ -0,0 +1,26 @@ +import { createNetworkClient } from '@hcengineering/network-client' +import { program } from 'commander' + +export function registerAgentOperations (): void { + program + .command('list-agents') + .description('Connect to network and list active agents') + .option('-n, --network ', 'Network address', 'localhost:3737') + .action(async (cmd: { network: string }) => { + const network = process.env.NETWORK_HOST ?? cmd.network + const client = createNetworkClient(network) + + const agents = await client.agents() + console.log(`Active agents: ${agents.length}`) + for (const agent of agents) { + console.log(` - Agent: ${agent.agentId} at ${agent.endpoint} with ${agent.containers.length} containers\n`) + for (const container of agent.containers) { + console.log(` - Container: ${container.uuid} kind: ${container.kind} endpoint: ${container.endpoint}\n`) + } + } + + await client.close() + // required to call exit handlers to terminate + process.exit(0) + }) +} diff --git a/pods/network-tool/src/benchmark.ts b/pods/network-tool/src/benchmark.ts new file mode 100644 index 00000000000..1e03e0b2f06 --- /dev/null +++ b/pods/network-tool/src/benchmark.ts @@ -0,0 +1,116 @@ +import { createAgent, createNetworkClient } from '@hcengineering/network-client' +import { + containerOnAgentEndpointRef, + type AgentEndpointRef, + type ClientUuid, + type Container, + type ContainerKind, + type ContainerReference, + type ContainerUuid +} from '@hcengineering/network-core' +import { program } from 'commander' +import { addShutdownHandler, tickManager } from './utils' + +const benchmarkContainer = 'benchmark' as ContainerKind + +class BenchmarkContainer implements Container { + constructor (readonly uuid: ContainerUuid) {} + + async request (operation: string, data?: any, clientId?: ClientUuid): Promise { + // Just send data back + return data + } + + async ping (): Promise {} + + async terminate (): Promise { + console.log('Stopping bench container') + } + + async connect (clientId: ClientUuid, broadcast: (data: any) => Promise): Promise {} + + async disconnect (clientId: ClientUuid): Promise {} +} + +export function registerBenchmark (): void { + program + .command('bench-agent') + .description('Register a benchmark container agent') + .option('-n, --network ', 'Network address', 'localhost:3737') + .option('-e, --endpoint ', 'Agent endpoint address', 'localhost:3738') + .option( + '-l, --label